diff --git a/.core_files.yaml b/.core_files.yaml index e49ca62439356b..6fd3a74df925e9 100644 --- a/.core_files.yaml +++ b/.core_files.yaml @@ -79,6 +79,7 @@ components: &components - homeassistant/components/group/** - homeassistant/components/hassio/** - homeassistant/components/homeassistant/** + - homeassistant/components/homeassistant_hardware/** - homeassistant/components/http/** - homeassistant/components/image/** - homeassistant/components/input_boolean/** @@ -127,6 +128,7 @@ tests: &tests - tests/*.py - tests/auth/** - tests/backports/** + - tests/components/conftest.py - tests/components/diagnostics/** - tests/components/history/** - tests/components/logbook/** diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index d99bad9937b203..44c38afdec6303 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -2,7 +2,7 @@ "name": "Home Assistant Dev", "context": "..", "dockerFile": "../Dockerfile.dev", - "postCreateCommand": "script/setup", + "postCreateCommand": "git config --global --add safe.directory ${containerWorkspaceFolder} && script/setup", "postStartCommand": "script/bootstrap", "containerEnv": { "PYTHONASYNCIODEBUG": "1" @@ -58,7 +58,13 @@ ], "[python]": { "editor.defaultFormatter": "charliermarsh.ruff" - } + }, + "json.schemas": [ + { + "fileMatch": ["homeassistant/components/*/manifest.json"], + "url": "./script/json_schemas/manifest_schema.json" + } + ] } } } diff --git a/.dockerignore b/.dockerignore index 7fde7f33fa5587..cf975f4215f28f 100644 --- a/.dockerignore +++ b/.dockerignore @@ -7,6 +7,7 @@ docs # Development .devcontainer .vscode +.tool-versions # Test related files tests diff --git a/.github/FUNDING.yml b/.github/FUNDING.yml index ad3205c51c8fad..9deb34d20e9548 100644 --- a/.github/FUNDING.yml +++ b/.github/FUNDING.yml @@ -1,2 +1 @@ -custom: https://www.nabucasa.com -github: balloob +custom: https://www.openhomefoundation.org diff --git a/.github/workflows/builder.yml b/.github/workflows/builder.yml index 5f10ed17b8f7cf..7c08df39000c8c 100644 --- a/.github/workflows/builder.yml +++ b/.github/workflows/builder.yml @@ -27,12 +27,12 @@ jobs: publish: ${{ steps.version.outputs.publish }} steps: - name: Checkout the repository - uses: actions/checkout@v4.2.0 + uses: actions/checkout@v4.2.2 with: fetch-depth: 0 - name: Set up Python ${{ env.DEFAULT_PYTHON }} - uses: actions/setup-python@v5.2.0 + uses: actions/setup-python@v5.3.0 with: python-version: ${{ env.DEFAULT_PYTHON }} @@ -69,7 +69,7 @@ jobs: run: find ./homeassistant/components/*/translations -name "*.json" | tar zcvf translations.tar.gz -T - - name: Upload translations - uses: actions/upload-artifact@v4.4.0 + uses: actions/upload-artifact@v4.4.3 with: name: translations path: translations.tar.gz @@ -90,7 +90,7 @@ jobs: arch: ${{ fromJson(needs.init.outputs.architectures) }} steps: - name: Checkout the repository - uses: actions/checkout@v4.2.0 + uses: actions/checkout@v4.2.2 - name: Download nightly wheels of frontend if: needs.init.outputs.channel == 'dev' @@ -116,7 +116,7 @@ jobs: - name: Set up Python ${{ env.DEFAULT_PYTHON }} if: needs.init.outputs.channel == 'dev' - uses: actions/setup-python@v5.2.0 + uses: actions/setup-python@v5.3.0 with: python-version: ${{ env.DEFAULT_PYTHON }} @@ -242,7 +242,7 @@ jobs: - green steps: - name: Checkout the repository - uses: actions/checkout@v4.2.0 + uses: actions/checkout@v4.2.2 - name: Set build additional args run: | @@ -279,7 +279,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout the repository - uses: actions/checkout@v4.2.0 + uses: actions/checkout@v4.2.2 - name: Initialize git uses: home-assistant/actions/helpers/git-init@master @@ -321,7 +321,7 @@ jobs: registry: ["ghcr.io/home-assistant", "docker.io/homeassistant"] steps: - name: Checkout the repository - uses: actions/checkout@v4.2.0 + uses: actions/checkout@v4.2.2 - name: Install Cosign uses: sigstore/cosign-installer@v3.7.0 @@ -451,10 +451,10 @@ jobs: if: github.repository_owner == 'home-assistant' && needs.init.outputs.publish == 'true' steps: - name: Checkout the repository - uses: actions/checkout@v4.2.0 + uses: actions/checkout@v4.2.2 - name: Set up Python ${{ env.DEFAULT_PYTHON }} - uses: actions/setup-python@v5.2.0 + uses: actions/setup-python@v5.3.0 with: python-version: ${{ env.DEFAULT_PYTHON }} @@ -499,7 +499,7 @@ jobs: HASSFEST_IMAGE_TAG: ghcr.io/home-assistant/hassfest:${{ needs.init.outputs.version }} steps: - name: Checkout repository - uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - name: Login to GitHub Container Registry uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567 # v3.3.0 @@ -531,7 +531,7 @@ jobs: - name: Generate artifact attestation if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true' - uses: actions/attest-build-provenance@1c608d11d69870c2092266b3f9a6f3abbf17002c # v1.4.3 + uses: actions/attest-build-provenance@ef244123eb79f2f7a7e75d99086184180e6d0018 # v1.4.4 with: subject-name: ${{ env.HASSFEST_IMAGE_NAME }} subject-digest: ${{ steps.push.outputs.digest }} diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 8e899651a09145..fa05f6082a20ed 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -37,12 +37,12 @@ on: type: boolean env: - CACHE_VERSION: 10 + CACHE_VERSION: 11 UV_CACHE_VERSION: 1 MYPY_CACHE_VERSION: 9 - HA_SHORT_VERSION: "2024.11" + HA_SHORT_VERSION: "2024.12" DEFAULT_PYTHON: "3.12" - ALL_PYTHON_VERSIONS: "['3.12']" + ALL_PYTHON_VERSIONS: "['3.12', '3.13']" # 10.3 is the oldest supported version # - 10.3.32 is the version currently shipped with Synology (as of 17 Feb 2022) # 10.6 is the current long-term-support @@ -93,7 +93,7 @@ jobs: runs-on: ubuntu-24.04 steps: - name: Check out code from GitHub - uses: actions/checkout@v4.2.0 + uses: actions/checkout@v4.2.2 - name: Generate partial Python venv restore key id: generate_python_cache_key run: | @@ -231,16 +231,16 @@ jobs: - info steps: - name: Check out code from GitHub - uses: actions/checkout@v4.2.0 + uses: actions/checkout@v4.2.2 - name: Set up Python ${{ env.DEFAULT_PYTHON }} id: python - uses: actions/setup-python@v5.2.0 + uses: actions/setup-python@v5.3.0 with: python-version: ${{ env.DEFAULT_PYTHON }} check-latest: true - name: Restore base Python virtual environment id: cache-venv - uses: actions/cache@v4.1.0 + uses: actions/cache@v4.1.2 with: path: venv key: >- @@ -256,7 +256,7 @@ jobs: uv pip install "$(cat requirements_test.txt | grep pre-commit)" - name: Restore pre-commit environment from cache id: cache-precommit - uses: actions/cache@v4.1.0 + uses: actions/cache@v4.1.2 with: path: ${{ env.PRE_COMMIT_CACHE }} lookup-only: true @@ -277,16 +277,16 @@ jobs: - pre-commit steps: - name: Check out code from GitHub - uses: actions/checkout@v4.2.0 + uses: actions/checkout@v4.2.2 - name: Set up Python ${{ env.DEFAULT_PYTHON }} - uses: actions/setup-python@v5.2.0 + uses: actions/setup-python@v5.3.0 id: python with: python-version: ${{ env.DEFAULT_PYTHON }} check-latest: true - name: Restore base Python virtual environment id: cache-venv - uses: actions/cache/restore@v4.1.0 + uses: actions/cache/restore@v4.1.2 with: path: venv fail-on-cache-miss: true @@ -295,7 +295,7 @@ jobs: needs.info.outputs.pre-commit_cache_key }} - name: Restore pre-commit environment from cache id: cache-precommit - uses: actions/cache/restore@v4.1.0 + uses: actions/cache/restore@v4.1.2 with: path: ${{ env.PRE_COMMIT_CACHE }} fail-on-cache-miss: true @@ -317,16 +317,16 @@ jobs: - pre-commit steps: - name: Check out code from GitHub - uses: actions/checkout@v4.2.0 + uses: actions/checkout@v4.2.2 - name: Set up Python ${{ env.DEFAULT_PYTHON }} - uses: actions/setup-python@v5.2.0 + uses: actions/setup-python@v5.3.0 id: python with: python-version: ${{ env.DEFAULT_PYTHON }} check-latest: true - name: Restore base Python virtual environment id: cache-venv - uses: actions/cache/restore@v4.1.0 + uses: actions/cache/restore@v4.1.2 with: path: venv fail-on-cache-miss: true @@ -335,7 +335,7 @@ jobs: needs.info.outputs.pre-commit_cache_key }} - name: Restore pre-commit environment from cache id: cache-precommit - uses: actions/cache/restore@v4.1.0 + uses: actions/cache/restore@v4.1.2 with: path: ${{ env.PRE_COMMIT_CACHE }} fail-on-cache-miss: true @@ -357,16 +357,16 @@ jobs: - pre-commit steps: - name: Check out code from GitHub - uses: actions/checkout@v4.2.0 + uses: actions/checkout@v4.2.2 - name: Set up Python ${{ env.DEFAULT_PYTHON }} - uses: actions/setup-python@v5.2.0 + uses: actions/setup-python@v5.3.0 id: python with: python-version: ${{ env.DEFAULT_PYTHON }} check-latest: true - name: Restore base Python virtual environment id: cache-venv - uses: actions/cache/restore@v4.1.0 + uses: actions/cache/restore@v4.1.2 with: path: venv fail-on-cache-miss: true @@ -375,7 +375,7 @@ jobs: needs.info.outputs.pre-commit_cache_key }} - name: Restore pre-commit environment from cache id: cache-precommit - uses: actions/cache/restore@v4.1.0 + uses: actions/cache/restore@v4.1.2 with: path: ${{ env.PRE_COMMIT_CACHE }} fail-on-cache-miss: true @@ -447,7 +447,7 @@ jobs: - script/hassfest/docker/Dockerfile steps: - name: Check out code from GitHub - uses: actions/checkout@v4.2.0 + uses: actions/checkout@v4.2.2 - name: Register hadolint problem matcher run: | echo "::add-matcher::.github/workflows/matchers/hadolint.json" @@ -466,10 +466,10 @@ jobs: python-version: ${{ fromJSON(needs.info.outputs.python_versions) }} steps: - name: Check out code from GitHub - uses: actions/checkout@v4.2.0 + uses: actions/checkout@v4.2.2 - name: Set up Python ${{ matrix.python-version }} id: python - uses: actions/setup-python@v5.2.0 + uses: actions/setup-python@v5.3.0 with: python-version: ${{ matrix.python-version }} check-latest: true @@ -482,7 +482,7 @@ jobs: env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT - name: Restore base Python virtual environment id: cache-venv - uses: actions/cache@v4.1.0 + uses: actions/cache@v4.1.2 with: path: venv lookup-only: true @@ -491,7 +491,7 @@ jobs: needs.info.outputs.python_cache_key }} - name: Restore uv wheel cache if: steps.cache-venv.outputs.cache-hit != 'true' - uses: actions/cache@v4.1.0 + uses: actions/cache@v4.1.2 with: path: ${{ env.UV_CACHE_DIR }} key: >- @@ -550,16 +550,16 @@ jobs: sudo apt-get -y install \ libturbojpeg - name: Check out code from GitHub - uses: actions/checkout@v4.2.0 + uses: actions/checkout@v4.2.2 - name: Set up Python ${{ env.DEFAULT_PYTHON }} id: python - uses: actions/setup-python@v5.2.0 + uses: actions/setup-python@v5.3.0 with: python-version: ${{ env.DEFAULT_PYTHON }} check-latest: true - name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment id: cache-venv - uses: actions/cache/restore@v4.1.0 + uses: actions/cache/restore@v4.1.2 with: path: venv fail-on-cache-miss: true @@ -583,16 +583,16 @@ jobs: - base steps: - name: Check out code from GitHub - uses: actions/checkout@v4.2.0 + uses: actions/checkout@v4.2.2 - name: Set up Python ${{ env.DEFAULT_PYTHON }} id: python - uses: actions/setup-python@v5.2.0 + uses: actions/setup-python@v5.3.0 with: python-version: ${{ env.DEFAULT_PYTHON }} check-latest: true - name: Restore base Python virtual environment id: cache-venv - uses: actions/cache/restore@v4.1.0 + uses: actions/cache/restore@v4.1.2 with: path: venv fail-on-cache-miss: true @@ -615,37 +615,41 @@ jobs: && github.event.inputs.mypy-only != 'true' || github.event.inputs.audit-licenses-only == 'true') && needs.info.outputs.requirements == 'true' + strategy: + fail-fast: false + matrix: + python-version: ${{ fromJson(needs.info.outputs.python_versions) }} steps: - name: Check out code from GitHub - uses: actions/checkout@v4.2.0 - - name: Set up Python ${{ env.DEFAULT_PYTHON }} + uses: actions/checkout@v4.2.2 + - name: Set up Python ${{ matrix.python-version }} id: python - uses: actions/setup-python@v5.2.0 + uses: actions/setup-python@v5.3.0 with: - python-version: ${{ env.DEFAULT_PYTHON }} + python-version: ${{ matrix.python-version }} check-latest: true - - name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment + - name: Restore full Python ${{ matrix.python-version }} virtual environment id: cache-venv - uses: actions/cache/restore@v4.1.0 + uses: actions/cache/restore@v4.1.2 with: path: venv fail-on-cache-miss: true key: >- ${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{ needs.info.outputs.python_cache_key }} - - name: Run pip-licenses + - name: Extract license data run: | . venv/bin/activate - pip-licenses --format=json --output-file=licenses.json + python -m script.licenses extract --output-file=licenses-${{ matrix.python-version }}.json - name: Upload licenses - uses: actions/upload-artifact@v4.4.0 + uses: actions/upload-artifact@v4.4.3 with: - name: licenses - path: licenses.json - - name: Process licenses + name: licenses-${{ github.run_number }}-${{ matrix.python-version }} + path: licenses-${{ matrix.python-version }}.json + - name: Check licenses run: | . venv/bin/activate - python -m script.licenses licenses.json + python -m script.licenses check licenses-${{ matrix.python-version }}.json pylint: name: Check pylint @@ -660,16 +664,16 @@ jobs: - base steps: - name: Check out code from GitHub - uses: actions/checkout@v4.2.0 + uses: actions/checkout@v4.2.2 - name: Set up Python ${{ env.DEFAULT_PYTHON }} id: python - uses: actions/setup-python@v5.2.0 + uses: actions/setup-python@v5.3.0 with: python-version: ${{ env.DEFAULT_PYTHON }} check-latest: true - name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment id: cache-venv - uses: actions/cache/restore@v4.1.0 + uses: actions/cache/restore@v4.1.2 with: path: venv fail-on-cache-miss: true @@ -707,16 +711,16 @@ jobs: - base steps: - name: Check out code from GitHub - uses: actions/checkout@v4.2.0 + uses: actions/checkout@v4.2.2 - name: Set up Python ${{ env.DEFAULT_PYTHON }} id: python - uses: actions/setup-python@v5.2.0 + uses: actions/setup-python@v5.3.0 with: python-version: ${{ env.DEFAULT_PYTHON }} check-latest: true - name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment id: cache-venv - uses: actions/cache/restore@v4.1.0 + uses: actions/cache/restore@v4.1.2 with: path: venv fail-on-cache-miss: true @@ -752,10 +756,10 @@ jobs: - base steps: - name: Check out code from GitHub - uses: actions/checkout@v4.2.0 + uses: actions/checkout@v4.2.2 - name: Set up Python ${{ env.DEFAULT_PYTHON }} id: python - uses: actions/setup-python@v5.2.0 + uses: actions/setup-python@v5.3.0 with: python-version: ${{ env.DEFAULT_PYTHON }} check-latest: true @@ -768,7 +772,7 @@ jobs: env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT - name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment id: cache-venv - uses: actions/cache/restore@v4.1.0 + uses: actions/cache/restore@v4.1.2 with: path: venv fail-on-cache-miss: true @@ -776,7 +780,7 @@ jobs: ${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{ needs.info.outputs.python_cache_key }} - name: Restore mypy cache - uses: actions/cache@v4.1.0 + uses: actions/cache@v4.1.2 with: path: .mypy_cache key: >- @@ -815,11 +819,7 @@ jobs: needs: - info - base - strategy: - fail-fast: false - matrix: - python-version: ${{ fromJson(needs.info.outputs.python_versions) }} - name: Split tests for full run Python ${{ matrix.python-version }} + name: Split tests for full run steps: - name: Install additional OS dependencies run: | @@ -831,16 +831,16 @@ jobs: libturbojpeg \ libgammu-dev - name: Check out code from GitHub - uses: actions/checkout@v4.2.0 - - name: Set up Python ${{ matrix.python-version }} + uses: actions/checkout@v4.2.2 + - name: Set up Python ${{ env.DEFAULT_PYTHON }} id: python - uses: actions/setup-python@v5.2.0 + uses: actions/setup-python@v5.3.0 with: - python-version: ${{ matrix.python-version }} + python-version: ${{ env.DEFAULT_PYTHON }} check-latest: true - name: Restore base Python virtual environment id: cache-venv - uses: actions/cache/restore@v4.1.0 + uses: actions/cache/restore@v4.1.2 with: path: venv fail-on-cache-miss: true @@ -852,7 +852,7 @@ jobs: . venv/bin/activate python -m script.split_tests ${{ needs.info.outputs.test_group_count }} tests - name: Upload pytest_buckets - uses: actions/upload-artifact@v4.4.0 + uses: actions/upload-artifact@v4.4.3 with: name: pytest_buckets path: pytest_buckets.txt @@ -895,16 +895,16 @@ jobs: libturbojpeg \ libgammu-dev - name: Check out code from GitHub - uses: actions/checkout@v4.2.0 + uses: actions/checkout@v4.2.2 - name: Set up Python ${{ matrix.python-version }} id: python - uses: actions/setup-python@v5.2.0 + uses: actions/setup-python@v5.3.0 with: python-version: ${{ matrix.python-version }} check-latest: true - name: Restore full Python ${{ matrix.python-version }} virtual environment id: cache-venv - uses: actions/cache/restore@v4.1.0 + uses: actions/cache/restore@v4.1.2 with: path: venv fail-on-cache-miss: true @@ -944,7 +944,8 @@ jobs: -qq \ --timeout=9 \ --durations=10 \ - -n auto \ + --numprocesses auto \ + --snapshot-details \ --dist=loadfile \ ${cov_params[@]} \ -o console_output_style=count \ @@ -953,14 +954,14 @@ jobs: 2>&1 | tee pytest-${{ matrix.python-version }}-${{ matrix.group }}.txt - name: Upload pytest output if: success() || failure() && steps.pytest-full.conclusion == 'failure' - uses: actions/upload-artifact@v4.4.0 + uses: actions/upload-artifact@v4.4.3 with: name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ matrix.group }} path: pytest-*.txt overwrite: true - name: Upload coverage artifact if: needs.info.outputs.skip_coverage != 'true' - uses: actions/upload-artifact@v4.4.0 + uses: actions/upload-artifact@v4.4.3 with: name: coverage-${{ matrix.python-version }}-${{ matrix.group }} path: coverage.xml @@ -1015,16 +1016,16 @@ jobs: libturbojpeg \ libmariadb-dev-compat - name: Check out code from GitHub - uses: actions/checkout@v4.2.0 + uses: actions/checkout@v4.2.2 - name: Set up Python ${{ matrix.python-version }} id: python - uses: actions/setup-python@v5.2.0 + uses: actions/setup-python@v5.3.0 with: python-version: ${{ matrix.python-version }} check-latest: true - name: Restore full Python ${{ matrix.python-version }} virtual environment id: cache-venv - uses: actions/cache/restore@v4.1.0 + uses: actions/cache/restore@v4.1.2 with: path: venv fail-on-cache-miss: true @@ -1066,7 +1067,8 @@ jobs: python3 -b -X dev -m pytest \ -qq \ --timeout=20 \ - -n 1 \ + --numprocesses 1 \ + --snapshot-details \ ${cov_params[@]} \ -o console_output_style=count \ --durations=10 \ @@ -1079,7 +1081,7 @@ jobs: 2>&1 | tee pytest-${{ matrix.python-version }}-${mariadb}.txt - name: Upload pytest output if: success() || failure() && steps.pytest-partial.conclusion == 'failure' - uses: actions/upload-artifact@v4.4.0 + uses: actions/upload-artifact@v4.4.3 with: name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ steps.pytest-partial.outputs.mariadb }} @@ -1087,7 +1089,7 @@ jobs: overwrite: true - name: Upload coverage artifact if: needs.info.outputs.skip_coverage != 'true' - uses: actions/upload-artifact@v4.4.0 + uses: actions/upload-artifact@v4.4.3 with: name: coverage-${{ matrix.python-version }}-${{ steps.pytest-partial.outputs.mariadb }} @@ -1098,7 +1100,7 @@ jobs: ./script/check_dirty pytest-postgres: - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 services: postgres: image: ${{ matrix.postgresql-group }} @@ -1138,19 +1140,21 @@ jobs: sudo apt-get -y install \ bluez \ ffmpeg \ - libturbojpeg \ + libturbojpeg + sudo /usr/share/postgresql-common/pgdg/apt.postgresql.org.sh -y + sudo apt-get -y install \ postgresql-server-dev-14 - name: Check out code from GitHub - uses: actions/checkout@v4.2.0 + uses: actions/checkout@v4.2.2 - name: Set up Python ${{ matrix.python-version }} id: python - uses: actions/setup-python@v5.2.0 + uses: actions/setup-python@v5.3.0 with: python-version: ${{ matrix.python-version }} check-latest: true - name: Restore full Python ${{ matrix.python-version }} virtual environment id: cache-venv - uses: actions/cache/restore@v4.1.0 + uses: actions/cache/restore@v4.1.2 with: path: venv fail-on-cache-miss: true @@ -1192,7 +1196,8 @@ jobs: python3 -b -X dev -m pytest \ -qq \ --timeout=9 \ - -n 1 \ + --numprocesses 1 \ + --snapshot-details \ ${cov_params[@]} \ -o console_output_style=count \ --durations=0 \ @@ -1206,7 +1211,7 @@ jobs: 2>&1 | tee pytest-${{ matrix.python-version }}-${postgresql}.txt - name: Upload pytest output if: success() || failure() && steps.pytest-partial.conclusion == 'failure' - uses: actions/upload-artifact@v4.4.0 + uses: actions/upload-artifact@v4.4.3 with: name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ steps.pytest-partial.outputs.postgresql }} @@ -1214,7 +1219,7 @@ jobs: overwrite: true - name: Upload coverage artifact if: needs.info.outputs.skip_coverage != 'true' - uses: actions/upload-artifact@v4.4.0 + uses: actions/upload-artifact@v4.4.3 with: name: coverage-${{ matrix.python-version }}-${{ steps.pytest-partial.outputs.postgresql }} @@ -1236,7 +1241,7 @@ jobs: timeout-minutes: 10 steps: - name: Check out code from GitHub - uses: actions/checkout@v4.2.0 + uses: actions/checkout@v4.2.2 - name: Download all coverage artifacts uses: actions/download-artifact@v4.1.8 with: @@ -1287,16 +1292,16 @@ jobs: libturbojpeg \ libgammu-dev - name: Check out code from GitHub - uses: actions/checkout@v4.2.0 + uses: actions/checkout@v4.2.2 - name: Set up Python ${{ matrix.python-version }} id: python - uses: actions/setup-python@v5.2.0 + uses: actions/setup-python@v5.3.0 with: python-version: ${{ matrix.python-version }} check-latest: true - name: Restore full Python ${{ matrix.python-version }} virtual environment id: cache-venv - uses: actions/cache/restore@v4.1.0 + uses: actions/cache/restore@v4.1.2 with: path: venv fail-on-cache-miss: true @@ -1338,7 +1343,8 @@ jobs: python3 -b -X dev -m pytest \ -qq \ --timeout=9 \ - -n auto \ + --numprocesses auto \ + --snapshot-details \ ${cov_params[@]} \ -o console_output_style=count \ --durations=0 \ @@ -1348,14 +1354,14 @@ jobs: 2>&1 | tee pytest-${{ matrix.python-version }}-${{ matrix.group }}.txt - name: Upload pytest output if: success() || failure() && steps.pytest-partial.conclusion == 'failure' - uses: actions/upload-artifact@v4.4.0 + uses: actions/upload-artifact@v4.4.3 with: name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ matrix.group }} path: pytest-*.txt overwrite: true - name: Upload coverage artifact if: needs.info.outputs.skip_coverage != 'true' - uses: actions/upload-artifact@v4.4.0 + uses: actions/upload-artifact@v4.4.3 with: name: coverage-${{ matrix.python-version }}-${{ matrix.group }} path: coverage.xml @@ -1374,7 +1380,7 @@ jobs: timeout-minutes: 10 steps: - name: Check out code from GitHub - uses: actions/checkout@v4.2.0 + uses: actions/checkout@v4.2.2 - name: Download all coverage artifacts uses: actions/download-artifact@v4.1.8 with: diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index 9ea4a83c9eebf9..2c80c32245c062 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -21,14 +21,14 @@ jobs: steps: - name: Check out code from GitHub - uses: actions/checkout@v4.2.0 + uses: actions/checkout@v4.2.2 - name: Initialize CodeQL - uses: github/codeql-action/init@v3.26.11 + uses: github/codeql-action/init@v3.27.1 with: languages: python - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v3.26.11 + uses: github/codeql-action/analyze@v3.27.1 with: category: "/language:python" diff --git a/.github/workflows/translations.yml b/.github/workflows/translations.yml index db89819822b3ff..3fffc41e60c6d6 100644 --- a/.github/workflows/translations.yml +++ b/.github/workflows/translations.yml @@ -19,10 +19,10 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout the repository - uses: actions/checkout@v4.2.0 + uses: actions/checkout@v4.2.2 - name: Set up Python ${{ env.DEFAULT_PYTHON }} - uses: actions/setup-python@v5.2.0 + uses: actions/setup-python@v5.3.0 with: python-version: ${{ env.DEFAULT_PYTHON }} diff --git a/.github/workflows/wheels.yml b/.github/workflows/wheels.yml index 6f086210a6d6a4..b9f54bba081447 100644 --- a/.github/workflows/wheels.yml +++ b/.github/workflows/wheels.yml @@ -32,11 +32,11 @@ jobs: architectures: ${{ steps.info.outputs.architectures }} steps: - name: Checkout the repository - uses: actions/checkout@v4.2.0 + uses: actions/checkout@v4.2.2 - name: Set up Python ${{ env.DEFAULT_PYTHON }} id: python - uses: actions/setup-python@v5.2.0 + uses: actions/setup-python@v5.3.0 with: python-version: ${{ env.DEFAULT_PYTHON }} check-latest: true @@ -79,7 +79,7 @@ jobs: ) > .env_file - name: Upload env_file - uses: actions/upload-artifact@v4.4.0 + uses: actions/upload-artifact@v4.4.3 with: name: env_file path: ./.env_file @@ -87,7 +87,7 @@ jobs: overwrite: true - name: Upload requirements_diff - uses: actions/upload-artifact@v4.4.0 + uses: actions/upload-artifact@v4.4.3 with: name: requirements_diff path: ./requirements_diff.txt @@ -99,7 +99,7 @@ jobs: python -m script.gen_requirements_all ci - name: Upload requirements_all_wheels - uses: actions/upload-artifact@v4.4.0 + uses: actions/upload-artifact@v4.4.3 with: name: requirements_all_wheels path: ./requirements_all_wheels_*.txt @@ -112,11 +112,11 @@ jobs: strategy: fail-fast: false matrix: - abi: ["cp312"] + abi: ["cp312", "cp313"] arch: ${{ fromJson(needs.init.outputs.architectures) }} steps: - name: Checkout the repository - uses: actions/checkout@v4.2.0 + uses: actions/checkout@v4.2.2 - name: Download env_file uses: actions/download-artifact@v4.1.8 @@ -135,14 +135,14 @@ jobs: sed -i "/uv/d" requirements_diff.txt - name: Build wheels - uses: home-assistant/wheels@2024.07.1 + uses: home-assistant/wheels@2024.11.0 with: abi: ${{ matrix.abi }} tag: musllinux_1_2 arch: ${{ matrix.arch }} wheels-key: ${{ secrets.WHEELS_KEY }} env-file: true - apk: "libffi-dev;openssl-dev;yaml-dev;nasm" + apk: "libffi-dev;openssl-dev;yaml-dev;nasm;zlib-dev" skip-binary: aiohttp;multidict;yarl constraints: "homeassistant/package_constraints.txt" requirements-diff: "requirements_diff.txt" @@ -156,11 +156,11 @@ jobs: strategy: fail-fast: false matrix: - abi: ["cp312"] + abi: ["cp312", "cp313"] arch: ${{ fromJson(needs.init.outputs.architectures) }} steps: - name: Checkout the repository - uses: actions/checkout@v4.2.0 + uses: actions/checkout@v4.2.2 - name: Download env_file uses: actions/download-artifact@v4.1.8 @@ -198,6 +198,7 @@ jobs: split -l $(expr $(expr $(cat requirements_all.txt | wc -l) + 1) / 3) requirements_all_wheels_${{ matrix.arch }}.txt requirements_all.txt - name: Create requirements for cython<3 + if: matrix.abi == 'cp312' run: | # Some dependencies still require 'cython<3' # and don't yet use isolated build environments. @@ -208,7 +209,8 @@ jobs: cat homeassistant/package_constraints.txt | grep 'pydantic==' >> requirements_old-cython.txt - name: Build wheels (old cython) - uses: home-assistant/wheels@2024.07.1 + uses: home-assistant/wheels@2024.11.0 + if: matrix.abi == 'cp312' with: abi: ${{ matrix.abi }} tag: musllinux_1_2 @@ -223,43 +225,43 @@ jobs: pip: "'cython<3'" - name: Build wheels (part 1) - uses: home-assistant/wheels@2024.07.1 + uses: home-assistant/wheels@2024.11.0 with: abi: ${{ matrix.abi }} tag: musllinux_1_2 arch: ${{ matrix.arch }} wheels-key: ${{ secrets.WHEELS_KEY }} env-file: true - apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm" - skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pydantic;pymicro-vad;yarl + apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm;zlib-dev" + skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pymicro-vad;yarl constraints: "homeassistant/package_constraints.txt" requirements-diff: "requirements_diff.txt" requirements: "requirements_all.txtaa" - name: Build wheels (part 2) - uses: home-assistant/wheels@2024.07.1 + uses: home-assistant/wheels@2024.11.0 with: abi: ${{ matrix.abi }} tag: musllinux_1_2 arch: ${{ matrix.arch }} wheels-key: ${{ secrets.WHEELS_KEY }} env-file: true - apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm" - skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pydantic;pymicro-vad;yarl + apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm;zlib-dev" + skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pymicro-vad;yarl constraints: "homeassistant/package_constraints.txt" requirements-diff: "requirements_diff.txt" requirements: "requirements_all.txtab" - name: Build wheels (part 3) - uses: home-assistant/wheels@2024.07.1 + uses: home-assistant/wheels@2024.11.0 with: abi: ${{ matrix.abi }} tag: musllinux_1_2 arch: ${{ matrix.arch }} wheels-key: ${{ secrets.WHEELS_KEY }} env-file: true - apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm" - skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pydantic;pymicro-vad;yarl + apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm;zlib-dev" + skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pymicro-vad;yarl constraints: "homeassistant/package_constraints.txt" requirements-diff: "requirements_diff.txt" requirements: "requirements_all.txtac" diff --git a/.gitignore b/.gitignore index 9bbf5bb81d4412..241255253c5c4a 100644 --- a/.gitignore +++ b/.gitignore @@ -79,6 +79,7 @@ pytest-*.txt .pydevproject .python-version +.tool-versions # emacs auto backups *~ diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index af0fbd0af7f987..519674b9894eca 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.6.9 + rev: v0.7.3 hooks: - id: ruff args: diff --git a/.strict-typing b/.strict-typing index 8837c55a5840b1..b0fd74bce54fa0 100644 --- a/.strict-typing +++ b/.strict-typing @@ -124,6 +124,7 @@ homeassistant.components.bryant_evolution.* homeassistant.components.bthome.* homeassistant.components.button.* homeassistant.components.calendar.* +homeassistant.components.cambridge_audio.* homeassistant.components.camera.* homeassistant.components.canary.* homeassistant.components.cert_expiry.* @@ -208,12 +209,14 @@ homeassistant.components.geo_location.* homeassistant.components.geocaching.* homeassistant.components.gios.* homeassistant.components.glances.* +homeassistant.components.go2rtc.* homeassistant.components.goalzero.* homeassistant.components.google.* homeassistant.components.google_assistant_sdk.* homeassistant.components.google_cloud.* homeassistant.components.google_photos.* homeassistant.components.google_sheets.* +homeassistant.components.govee_ble.* homeassistant.components.gpsd.* homeassistant.components.greeneye_monitor.* homeassistant.components.group.* @@ -301,7 +304,6 @@ homeassistant.components.lookin.* homeassistant.components.luftdaten.* homeassistant.components.madvr.* homeassistant.components.manual.* -homeassistant.components.map.* homeassistant.components.mastodon.* homeassistant.components.matrix.* homeassistant.components.matter.* @@ -322,11 +324,13 @@ homeassistant.components.moon.* homeassistant.components.mopeka.* homeassistant.components.motionmount.* homeassistant.components.mqtt.* +homeassistant.components.music_assistant.* homeassistant.components.my.* homeassistant.components.mysensors.* homeassistant.components.myuplink.* homeassistant.components.nam.* homeassistant.components.nanoleaf.* +homeassistant.components.nasweb.* homeassistant.components.neato.* homeassistant.components.nest.* homeassistant.components.netatmo.* @@ -336,6 +340,7 @@ homeassistant.components.nfandroidtv.* homeassistant.components.nightscout.* homeassistant.components.nissan_leaf.* homeassistant.components.no_ip.* +homeassistant.components.nordpool.* homeassistant.components.notify.* homeassistant.components.notion.* homeassistant.components.number.* @@ -345,6 +350,7 @@ homeassistant.components.oncue.* homeassistant.components.onewire.* homeassistant.components.onkyo.* homeassistant.components.open_meteo.* +homeassistant.components.openai_conversation.* homeassistant.components.openexchangerates.* homeassistant.components.opensky.* homeassistant.components.openuv.* @@ -352,6 +358,7 @@ homeassistant.components.oralb.* homeassistant.components.otbr.* homeassistant.components.overkiz.* homeassistant.components.p1_monitor.* +homeassistant.components.panel_custom.* homeassistant.components.peco.* homeassistant.components.persistent_notification.* homeassistant.components.pi_hole.* @@ -407,6 +414,7 @@ homeassistant.components.sensor.* homeassistant.components.sensoterra.* homeassistant.components.senz.* homeassistant.components.sfr_box.* +homeassistant.components.shell_command.* homeassistant.components.shelly.* homeassistant.components.shopping_list.* homeassistant.components.simplepush.* @@ -421,6 +429,7 @@ homeassistant.components.snooz.* homeassistant.components.solarlog.* homeassistant.components.sonarr.* homeassistant.components.speedtestdotnet.* +homeassistant.components.spotify.* homeassistant.components.sql.* homeassistant.components.squeezebox.* homeassistant.components.ssdp.* @@ -435,6 +444,7 @@ homeassistant.components.suez_water.* homeassistant.components.sun.* homeassistant.components.surepetcare.* homeassistant.components.switch.* +homeassistant.components.switch_as_x.* homeassistant.components.switchbee.* homeassistant.components.switchbot_cloud.* homeassistant.components.switcher_kis.* diff --git a/.vscode/settings.default.json b/.vscode/settings.default.json index 681698d08b375e..ace0a988bf5e64 100644 --- a/.vscode/settings.default.json +++ b/.vscode/settings.default.json @@ -6,5 +6,13 @@ // https://code.visualstudio.com/docs/python/testing#_pytest-configuration-settings "python.testing.pytestEnabled": false, // https://code.visualstudio.com/docs/python/linting#_general-settings - "pylint.importStrategy": "fromEnvironment" + "pylint.importStrategy": "fromEnvironment", + "json.schemas": [ + { + "fileMatch": [ + "homeassistant/components/*/manifest.json" + ], + "url": "./script/json_schemas/manifest_schema.json" + } + ] } diff --git a/CODEOWNERS b/CODEOWNERS index 9a4379fc3429ab..022eda001233e8 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -496,8 +496,8 @@ build.json @home-assistant/supervisor /tests/components/freebox/ @hacf-fr @Quentame /homeassistant/components/freedompro/ @stefano055415 /tests/components/freedompro/ @stefano055415 -/homeassistant/components/fritz/ @mammuth @AaronDavidSchneider @chemelli74 @mib1185 -/tests/components/fritz/ @mammuth @AaronDavidSchneider @chemelli74 @mib1185 +/homeassistant/components/fritz/ @AaronDavidSchneider @chemelli74 @mib1185 +/tests/components/fritz/ @AaronDavidSchneider @chemelli74 @mib1185 /homeassistant/components/fritzbox/ @mib1185 @flabbamann /tests/components/fritzbox/ @mib1185 @flabbamann /homeassistant/components/fritzbox_callmonitor/ @cdce8p @@ -617,8 +617,8 @@ build.json @home-assistant/supervisor /tests/components/hlk_sw16/ @jameshilliard /homeassistant/components/holiday/ @jrieger @gjohansson-ST /tests/components/holiday/ @jrieger @gjohansson-ST -/homeassistant/components/home_connect/ @DavidMStraub -/tests/components/home_connect/ @DavidMStraub +/homeassistant/components/home_connect/ @DavidMStraub @Diegorro98 +/tests/components/home_connect/ @DavidMStraub @Diegorro98 /homeassistant/components/homeassistant/ @home-assistant/core /tests/components/homeassistant/ @home-assistant/core /homeassistant/components/homeassistant_alerts/ @home-assistant/core @@ -659,6 +659,8 @@ build.json @home-assistant/supervisor /tests/components/hunterdouglas_powerview/ @bdraco @kingy444 @trullock /homeassistant/components/husqvarna_automower/ @Thomas55555 /tests/components/husqvarna_automower/ @Thomas55555 +/homeassistant/components/husqvarna_automower_ble/ @alistair23 +/tests/components/husqvarna_automower_ble/ @alistair23 /homeassistant/components/huum/ @frwickst /tests/components/huum/ @frwickst /homeassistant/components/hvv_departures/ @vigonotion @@ -819,6 +821,8 @@ build.json @home-assistant/supervisor /tests/components/lektrico/ @lektrico /homeassistant/components/lg_netcast/ @Drafteed @splinter98 /tests/components/lg_netcast/ @Drafteed @splinter98 +/homeassistant/components/lg_thinq/ @LG-ThinQ-Integration +/tests/components/lg_thinq/ @LG-ThinQ-Integration /homeassistant/components/lidarr/ @tkdrob /tests/components/lidarr/ @tkdrob /homeassistant/components/lifx/ @Djelibeybi @@ -950,6 +954,8 @@ build.json @home-assistant/supervisor /homeassistant/components/msteams/ @peroyvind /homeassistant/components/mullvad/ @meichthys /tests/components/mullvad/ @meichthys +/homeassistant/components/music_assistant/ @music-assistant +/tests/components/music_assistant/ @music-assistant /homeassistant/components/mutesync/ @currentoor /tests/components/mutesync/ @currentoor /homeassistant/components/my/ @home-assistant/core @@ -964,6 +970,8 @@ build.json @home-assistant/supervisor /tests/components/nam/ @bieniu /homeassistant/components/nanoleaf/ @milanmeu @joostlek /tests/components/nanoleaf/ @milanmeu @joostlek +/homeassistant/components/nasweb/ @nasWebio +/tests/components/nasweb/ @nasWebio /homeassistant/components/neato/ @Santobert /tests/components/neato/ @Santobert /homeassistant/components/nederlandse_spoorwegen/ @YarmoM @@ -1004,6 +1012,8 @@ build.json @home-assistant/supervisor /homeassistant/components/noaa_tides/ @jdelaney72 /homeassistant/components/nobo_hub/ @echoromeo @oyvindwe /tests/components/nobo_hub/ @echoromeo @oyvindwe +/homeassistant/components/nordpool/ @gjohansson-ST +/tests/components/nordpool/ @gjohansson-ST /homeassistant/components/notify/ @home-assistant/core /tests/components/notify/ @home-assistant/core /homeassistant/components/notify_events/ @matrozov @papajojo @@ -1047,6 +1057,7 @@ build.json @home-assistant/supervisor /homeassistant/components/onewire/ @garbled1 @epenet /tests/components/onewire/ @garbled1 @epenet /homeassistant/components/onkyo/ @arturpragacz +/tests/components/onkyo/ @arturpragacz /homeassistant/components/onvif/ @hunterjm /tests/components/onvif/ @hunterjm /homeassistant/components/open_meteo/ @frenck @@ -1088,10 +1099,10 @@ build.json @home-assistant/supervisor /tests/components/ovo_energy/ @timmo001 /homeassistant/components/p1_monitor/ @klaasnicolaas /tests/components/p1_monitor/ @klaasnicolaas +/homeassistant/components/palazzetti/ @dotvav +/tests/components/palazzetti/ @dotvav /homeassistant/components/panel_custom/ @home-assistant/frontend /tests/components/panel_custom/ @home-assistant/frontend -/homeassistant/components/panel_iframe/ @home-assistant/frontend -/tests/components/panel_iframe/ @home-assistant/frontend /homeassistant/components/peco/ @IceBotYT /tests/components/peco/ @IceBotYT /homeassistant/components/pegel_online/ @mib1185 @@ -1239,8 +1250,8 @@ build.json @home-assistant/supervisor /tests/components/roku/ @ctalkington /homeassistant/components/romy/ @xeniter /tests/components/romy/ @xeniter -/homeassistant/components/roomba/ @pschmitt @cyr-ius @shenxn @Xitee1 @Orhideous -/tests/components/roomba/ @pschmitt @cyr-ius @shenxn @Xitee1 @Orhideous +/homeassistant/components/roomba/ @pschmitt @cyr-ius @shenxn @Orhideous +/tests/components/roomba/ @pschmitt @cyr-ius @shenxn @Orhideous /homeassistant/components/roon/ @pavoni /tests/components/roon/ @pavoni /homeassistant/components/rpi_power/ @shenxn @swetoast @@ -1351,6 +1362,7 @@ build.json @home-assistant/supervisor /homeassistant/components/smarttub/ @mdz /tests/components/smarttub/ @mdz /homeassistant/components/smarty/ @z0mbieprocess +/tests/components/smarty/ @z0mbieprocess /homeassistant/components/smhi/ @gjohansson-ST /tests/components/smhi/ @gjohansson-ST /homeassistant/components/smlight/ @tl-sl @@ -1414,8 +1426,8 @@ build.json @home-assistant/supervisor /tests/components/stt/ @home-assistant/core /homeassistant/components/subaru/ @G-Two /tests/components/subaru/ @G-Two -/homeassistant/components/suez_water/ @ooii -/tests/components/suez_water/ @ooii +/homeassistant/components/suez_water/ @ooii @jb101010-2 +/tests/components/suez_water/ @ooii @jb101010-2 /homeassistant/components/sun/ @Swamp-Ig /tests/components/sun/ @Swamp-Ig /homeassistant/components/sunweg/ @rokam diff --git a/Dockerfile b/Dockerfile index 44edbdf8e3e38f..903a121c032d43 100644 --- a/Dockerfile +++ b/Dockerfile @@ -7,12 +7,13 @@ FROM ${BUILD_FROM} # Synchronize with homeassistant/core.py:async_stop ENV \ S6_SERVICES_GRACETIME=240000 \ - UV_SYSTEM_PYTHON=true + UV_SYSTEM_PYTHON=true \ + UV_NO_CACHE=true ARG QEMU_CPU # Install uv -RUN pip3 install uv==0.4.17 +RUN pip3 install uv==0.5.0 WORKDIR /usr/src @@ -54,7 +55,7 @@ RUN \ "armv7") go2rtc_suffix='arm' ;; \ *) go2rtc_suffix=${BUILD_ARCH} ;; \ esac \ - && curl -L https://github.com/AlexxIT/go2rtc/releases/download/v1.9.4/go2rtc_linux_${go2rtc_suffix} --output /bin/go2rtc \ + && curl -L https://github.com/AlexxIT/go2rtc/releases/download/v1.9.6/go2rtc_linux_${go2rtc_suffix} --output /bin/go2rtc \ && chmod +x /bin/go2rtc \ # Verify go2rtc can be executed && go2rtc --version diff --git a/Dockerfile.dev b/Dockerfile.dev index d05c6df425cf5b..48f582a15810bf 100644 --- a/Dockerfile.dev +++ b/Dockerfile.dev @@ -35,6 +35,9 @@ RUN \ && apt-get clean \ && rm -rf /var/lib/apt/lists/* +# Add go2rtc binary +COPY --from=ghcr.io/alexxit/go2rtc:latest /usr/local/bin/go2rtc /bin/go2rtc + # Install uv RUN pip3 install uv diff --git a/homeassistant/__main__.py b/homeassistant/__main__.py index 4c870e94b24e00..b9d9883270503c 100644 --- a/homeassistant/__main__.py +++ b/homeassistant/__main__.py @@ -9,6 +9,7 @@ import sys import threading +from .backup_restore import restore_backup from .const import REQUIRED_PYTHON_VER, RESTART_EXIT_CODE, __version__ FAULT_LOG_FILENAME = "home-assistant.log.fault" @@ -182,6 +183,9 @@ def main() -> int: return scripts.run(args.script) config_dir = os.path.abspath(os.path.join(os.getcwd(), args.config)) + if restore_backup(config_dir): + return RESTART_EXIT_CODE + ensure_config_path(config_dir) # pylint: disable-next=import-outside-toplevel diff --git a/homeassistant/auth/__init__.py b/homeassistant/auth/__init__.py index 19045406a1536d..21a4b6113d0da8 100644 --- a/homeassistant/auth/__init__.py +++ b/homeassistant/auth/__init__.py @@ -12,7 +12,6 @@ import jwt -from homeassistant import data_entry_flow from homeassistant.core import ( CALLBACK_TYPE, HassJob, @@ -20,13 +19,14 @@ HomeAssistant, callback, ) +from homeassistant.data_entry_flow import FlowHandler, FlowManager, FlowResultType from homeassistant.helpers.event import async_track_point_in_utc_time from homeassistant.util import dt as dt_util from . import auth_store, jwt_wrapper, models from .const import ACCESS_TOKEN_EXPIRATION, GROUP_ID_ADMIN, REFRESH_TOKEN_EXPIRATION from .mfa_modules import MultiFactorAuthModule, auth_mfa_module_from_config -from .models import AuthFlowResult +from .models import AuthFlowContext, AuthFlowResult from .providers import AuthProvider, LoginFlow, auth_provider_from_config from .providers.homeassistant import HassAuthProvider @@ -98,7 +98,7 @@ async def auth_manager_from_config( class AuthManagerFlowManager( - data_entry_flow.FlowManager[AuthFlowResult, tuple[str, str]] + FlowManager[AuthFlowContext, AuthFlowResult, tuple[str, str]] ): """Manage authentication flows.""" @@ -113,7 +113,7 @@ async def async_create_flow( self, handler_key: tuple[str, str], *, - context: dict[str, Any] | None = None, + context: AuthFlowContext | None = None, data: dict[str, Any] | None = None, ) -> LoginFlow: """Create a login flow.""" @@ -124,7 +124,7 @@ async def async_create_flow( async def async_finish_flow( self, - flow: data_entry_flow.FlowHandler[AuthFlowResult, tuple[str, str]], + flow: FlowHandler[AuthFlowContext, AuthFlowResult, tuple[str, str]], result: AuthFlowResult, ) -> AuthFlowResult: """Return a user as result of login flow. @@ -134,7 +134,7 @@ async def async_finish_flow( """ flow = cast(LoginFlow, flow) - if result["type"] != data_entry_flow.FlowResultType.CREATE_ENTRY: + if result["type"] != FlowResultType.CREATE_ENTRY: return result # we got final result diff --git a/homeassistant/auth/models.py b/homeassistant/auth/models.py index 0b6515ed9a5ea4..6f45dab2b36adc 100644 --- a/homeassistant/auth/models.py +++ b/homeassistant/auth/models.py @@ -3,6 +3,7 @@ from __future__ import annotations from datetime import datetime, timedelta +from ipaddress import IPv4Address, IPv6Address import secrets from typing import Any, NamedTuple import uuid @@ -13,7 +14,7 @@ from propcache import cached_property from homeassistant.const import __version__ -from homeassistant.data_entry_flow import FlowResult +from homeassistant.data_entry_flow import FlowContext, FlowResult from homeassistant.util import dt as dt_util from . import permissions as perm_mdl @@ -23,7 +24,16 @@ TOKEN_TYPE_SYSTEM = "system" TOKEN_TYPE_LONG_LIVED_ACCESS_TOKEN = "long_lived_access_token" -AuthFlowResult = FlowResult[tuple[str, str]] + +class AuthFlowContext(FlowContext, total=False): + """Typed context dict for auth flow.""" + + credential_only: bool + ip_address: IPv4Address | IPv6Address + redirect_uri: str + + +AuthFlowResult = FlowResult[AuthFlowContext, tuple[str, str]] @attr.s(slots=True) diff --git a/homeassistant/auth/providers/__init__.py b/homeassistant/auth/providers/__init__.py index debdd0b1a051d6..34278c47df7dc6 100644 --- a/homeassistant/auth/providers/__init__.py +++ b/homeassistant/auth/providers/__init__.py @@ -10,9 +10,10 @@ import voluptuous as vol from voluptuous.humanize import humanize_error -from homeassistant import data_entry_flow, requirements +from homeassistant import requirements from homeassistant.const import CONF_ID, CONF_NAME, CONF_TYPE from homeassistant.core import HomeAssistant, callback +from homeassistant.data_entry_flow import FlowHandler from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.importlib import async_import_module from homeassistant.util import dt as dt_util @@ -21,7 +22,14 @@ from ..auth_store import AuthStore from ..const import MFA_SESSION_EXPIRATION -from ..models import AuthFlowResult, Credentials, RefreshToken, User, UserMeta +from ..models import ( + AuthFlowContext, + AuthFlowResult, + Credentials, + RefreshToken, + User, + UserMeta, +) _LOGGER = logging.getLogger(__name__) DATA_REQS: HassKey[set[str]] = HassKey("auth_prov_reqs_processed") @@ -97,7 +105,7 @@ def async_create_credentials(self, data: dict[str, str]) -> Credentials: # Implement by extending class - async def async_login_flow(self, context: dict[str, Any] | None) -> LoginFlow: + async def async_login_flow(self, context: AuthFlowContext | None) -> LoginFlow: """Return the data flow for logging in with auth provider. Auth provider should extend LoginFlow and return an instance. @@ -184,7 +192,7 @@ async def load_auth_provider_module( return module -class LoginFlow(data_entry_flow.FlowHandler[AuthFlowResult, tuple[str, str]]): +class LoginFlow(FlowHandler[AuthFlowContext, AuthFlowResult, tuple[str, str]]): """Handler for the login flow.""" _flow_result = AuthFlowResult diff --git a/homeassistant/auth/providers/command_line.py b/homeassistant/auth/providers/command_line.py index 43cde284a25560..12447bc8c18b04 100644 --- a/homeassistant/auth/providers/command_line.py +++ b/homeassistant/auth/providers/command_line.py @@ -13,7 +13,7 @@ from homeassistant.const import CONF_COMMAND from homeassistant.exceptions import HomeAssistantError -from ..models import AuthFlowResult, Credentials, UserMeta +from ..models import AuthFlowContext, AuthFlowResult, Credentials, UserMeta from . import AUTH_PROVIDER_SCHEMA, AUTH_PROVIDERS, AuthProvider, LoginFlow CONF_ARGS = "args" @@ -59,7 +59,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) self._user_meta: dict[str, dict[str, Any]] = {} - async def async_login_flow(self, context: dict[str, Any] | None) -> LoginFlow: + async def async_login_flow(self, context: AuthFlowContext | None) -> LoginFlow: """Return a flow to login.""" return CommandLineLoginFlow(self) diff --git a/homeassistant/auth/providers/homeassistant.py b/homeassistant/auth/providers/homeassistant.py index ec39bdbdcdc5b3..e5dded74762195 100644 --- a/homeassistant/auth/providers/homeassistant.py +++ b/homeassistant/auth/providers/homeassistant.py @@ -17,7 +17,7 @@ from homeassistant.helpers import issue_registry as ir from homeassistant.helpers.storage import Store -from ..models import AuthFlowResult, Credentials, UserMeta +from ..models import AuthFlowContext, AuthFlowResult, Credentials, UserMeta from . import AUTH_PROVIDER_SCHEMA, AUTH_PROVIDERS, AuthProvider, LoginFlow STORAGE_VERSION = 1 @@ -305,7 +305,7 @@ async def async_initialize(self) -> None: await data.async_load() self.data = data - async def async_login_flow(self, context: dict[str, Any] | None) -> LoginFlow: + async def async_login_flow(self, context: AuthFlowContext | None) -> LoginFlow: """Return a flow to login.""" return HassLoginFlow(self) diff --git a/homeassistant/auth/providers/insecure_example.py b/homeassistant/auth/providers/insecure_example.py index 8bcf7569f5a9c3..a7dced851a301c 100644 --- a/homeassistant/auth/providers/insecure_example.py +++ b/homeassistant/auth/providers/insecure_example.py @@ -4,14 +4,14 @@ from collections.abc import Mapping import hmac -from typing import Any, cast +from typing import cast import voluptuous as vol from homeassistant.core import callback from homeassistant.exceptions import HomeAssistantError -from ..models import AuthFlowResult, Credentials, UserMeta +from ..models import AuthFlowContext, AuthFlowResult, Credentials, UserMeta from . import AUTH_PROVIDER_SCHEMA, AUTH_PROVIDERS, AuthProvider, LoginFlow USER_SCHEMA = vol.Schema( @@ -36,7 +36,7 @@ class InvalidAuthError(HomeAssistantError): class ExampleAuthProvider(AuthProvider): """Example auth provider based on hardcoded usernames and passwords.""" - async def async_login_flow(self, context: dict[str, Any] | None) -> LoginFlow: + async def async_login_flow(self, context: AuthFlowContext | None) -> LoginFlow: """Return a flow to login.""" return ExampleLoginFlow(self) diff --git a/homeassistant/auth/providers/trusted_networks.py b/homeassistant/auth/providers/trusted_networks.py index 564633073fc895..f32c35d4bd554f 100644 --- a/homeassistant/auth/providers/trusted_networks.py +++ b/homeassistant/auth/providers/trusted_networks.py @@ -25,7 +25,13 @@ from homeassistant.helpers.network import is_cloud_connection from .. import InvalidAuthError -from ..models import AuthFlowResult, Credentials, RefreshToken, UserMeta +from ..models import ( + AuthFlowContext, + AuthFlowResult, + Credentials, + RefreshToken, + UserMeta, +) from . import AUTH_PROVIDER_SCHEMA, AUTH_PROVIDERS, AuthProvider, LoginFlow type IPAddress = IPv4Address | IPv6Address @@ -98,7 +104,7 @@ def support_mfa(self) -> bool: """Trusted Networks auth provider does not support MFA.""" return False - async def async_login_flow(self, context: dict[str, Any] | None) -> LoginFlow: + async def async_login_flow(self, context: AuthFlowContext | None) -> LoginFlow: """Return a flow to login.""" assert context is not None ip_addr = cast(IPAddress, context.get("ip_address")) diff --git a/homeassistant/backup_restore.py b/homeassistant/backup_restore.py new file mode 100644 index 00000000000000..32991dfb2d3b18 --- /dev/null +++ b/homeassistant/backup_restore.py @@ -0,0 +1,126 @@ +"""Home Assistant module to handle restoring backups.""" + +from dataclasses import dataclass +import json +import logging +from pathlib import Path +import shutil +import sys +from tempfile import TemporaryDirectory + +from awesomeversion import AwesomeVersion +import securetar + +from .const import __version__ as HA_VERSION + +RESTORE_BACKUP_FILE = ".HA_RESTORE" +KEEP_PATHS = ("backups",) + +_LOGGER = logging.getLogger(__name__) + + +@dataclass +class RestoreBackupFileContent: + """Definition for restore backup file content.""" + + backup_file_path: Path + + +def restore_backup_file_content(config_dir: Path) -> RestoreBackupFileContent | None: + """Return the contents of the restore backup file.""" + instruction_path = config_dir.joinpath(RESTORE_BACKUP_FILE) + try: + instruction_content = json.loads(instruction_path.read_text(encoding="utf-8")) + return RestoreBackupFileContent( + backup_file_path=Path(instruction_content["path"]) + ) + except (FileNotFoundError, json.JSONDecodeError): + return None + + +def _clear_configuration_directory(config_dir: Path) -> None: + """Delete all files and directories in the config directory except for the backups directory.""" + keep_paths = [config_dir.joinpath(path) for path in KEEP_PATHS] + config_contents = sorted( + [entry for entry in config_dir.iterdir() if entry not in keep_paths] + ) + + for entry in config_contents: + entrypath = config_dir.joinpath(entry) + + if entrypath.is_file(): + entrypath.unlink() + elif entrypath.is_dir(): + shutil.rmtree(entrypath) + + +def _extract_backup(config_dir: Path, backup_file_path: Path) -> None: + """Extract the backup file to the config directory.""" + with ( + TemporaryDirectory() as tempdir, + securetar.SecureTarFile( + backup_file_path, + gzip=False, + mode="r", + ) as ostf, + ): + ostf.extractall( + path=Path(tempdir, "extracted"), + members=securetar.secure_path(ostf), + filter="fully_trusted", + ) + backup_meta_file = Path(tempdir, "extracted", "backup.json") + backup_meta = json.loads(backup_meta_file.read_text(encoding="utf8")) + + if ( + backup_meta_version := AwesomeVersion( + backup_meta["homeassistant"]["version"] + ) + ) > HA_VERSION: + raise ValueError( + f"You need at least Home Assistant version {backup_meta_version} to restore this backup" + ) + + with securetar.SecureTarFile( + Path( + tempdir, + "extracted", + f"homeassistant.tar{'.gz' if backup_meta["compressed"] else ''}", + ), + gzip=backup_meta["compressed"], + mode="r", + ) as istf: + for member in istf.getmembers(): + if member.name == "data": + continue + member.name = member.name.replace("data/", "") + _clear_configuration_directory(config_dir) + istf.extractall( + path=config_dir, + members=[ + member + for member in securetar.secure_path(istf) + if member.name != "data" + ], + filter="fully_trusted", + ) + + +def restore_backup(config_dir_path: str) -> bool: + """Restore the backup file if any. + + Returns True if a restore backup file was found and restored, False otherwise. + """ + config_dir = Path(config_dir_path) + if not (restore_content := restore_backup_file_content(config_dir)): + return False + + logging.basicConfig(stream=sys.stdout, level=logging.INFO) + backup_file_path = restore_content.backup_file_path + _LOGGER.info("Restoring %s", backup_file_path) + try: + _extract_backup(config_dir, backup_file_path) + except FileNotFoundError as err: + raise ValueError(f"Backup file {backup_file_path} does not exist") from err + _LOGGER.info("Restore complete, restarting") + return True diff --git a/homeassistant/bootstrap.py b/homeassistant/bootstrap.py index 742a293e4c456a..dcfb668562702d 100644 --- a/homeassistant/bootstrap.py +++ b/homeassistant/bootstrap.py @@ -70,6 +70,7 @@ REQUIRED_NEXT_PYTHON_VER, SIGNAL_BOOTSTRAP_INTEGRATIONS, ) +from .core_config import async_process_ha_core_config from .exceptions import HomeAssistantError from .helpers import ( area_registry, @@ -479,7 +480,7 @@ async def async_from_config_dict( core_config = config.get(core.DOMAIN, {}) try: - await conf_util.async_process_ha_core_config(hass, core_config) + await async_process_ha_core_config(hass, core_config) except vol.Invalid as config_err: conf_util.async_log_schema_error(config_err, core.DOMAIN, core_config, hass) async_notify_setup_error(hass, core.DOMAIN) diff --git a/homeassistant/brands/husqvarna.json b/homeassistant/brands/husqvarna.json new file mode 100644 index 00000000000000..a01eba752324bc --- /dev/null +++ b/homeassistant/brands/husqvarna.json @@ -0,0 +1,5 @@ +{ + "domain": "husqvarna", + "name": "Husqvarna", + "integrations": ["husqvarna_automower", "husqvarna_automower_ble"] +} diff --git a/homeassistant/brands/lg.json b/homeassistant/brands/lg.json index 350db80b5f3df5..02bd58c0d1cfe0 100644 --- a/homeassistant/brands/lg.json +++ b/homeassistant/brands/lg.json @@ -1,5 +1,5 @@ { "domain": "lg", "name": "LG", - "integrations": ["lg_netcast", "lg_soundbar", "webostv"] + "integrations": ["lg_netcast", "lg_soundbar", "lg_thinq", "webostv"] } diff --git a/homeassistant/components/abode/alarm_control_panel.py b/homeassistant/components/abode/alarm_control_panel.py index b58a4757785808..4ec59ca4c39450 100644 --- a/homeassistant/components/abode/alarm_control_panel.py +++ b/homeassistant/components/abode/alarm_control_panel.py @@ -7,13 +7,9 @@ from homeassistant.components.alarm_control_panel import ( AlarmControlPanelEntity, AlarmControlPanelEntityFeature, + AlarmControlPanelState, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ( - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_DISARMED, -) from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -44,14 +40,14 @@ class AbodeAlarm(AbodeDevice, AlarmControlPanelEntity): _device: Alarm @property - def state(self) -> str | None: + def alarm_state(self) -> AlarmControlPanelState | None: """Return the state of the device.""" if self._device.is_standby: - return STATE_ALARM_DISARMED + return AlarmControlPanelState.DISARMED if self._device.is_away: - return STATE_ALARM_ARMED_AWAY + return AlarmControlPanelState.ARMED_AWAY if self._device.is_home: - return STATE_ALARM_ARMED_HOME + return AlarmControlPanelState.ARMED_HOME return None def alarm_disarm(self, code: str | None = None) -> None: diff --git a/homeassistant/components/accuweather/__init__.py b/homeassistant/components/accuweather/__init__.py index 3d52df765e6ffb..c046933d5d5d21 100644 --- a/homeassistant/components/accuweather/__init__.py +++ b/homeassistant/components/accuweather/__init__.py @@ -2,13 +2,11 @@ from __future__ import annotations -from dataclasses import dataclass import logging from accuweather import AccuWeather from homeassistant.components.sensor import DOMAIN as SENSOR_PLATFORM -from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_API_KEY, CONF_NAME, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -16,7 +14,9 @@ from .const import DOMAIN, UPDATE_INTERVAL_DAILY_FORECAST, UPDATE_INTERVAL_OBSERVATION from .coordinator import ( + AccuWeatherConfigEntry, AccuWeatherDailyForecastDataUpdateCoordinator, + AccuWeatherData, AccuWeatherObservationDataUpdateCoordinator, ) @@ -25,17 +25,6 @@ PLATFORMS = [Platform.SENSOR, Platform.WEATHER] -@dataclass -class AccuWeatherData: - """Data for AccuWeather integration.""" - - coordinator_observation: AccuWeatherObservationDataUpdateCoordinator - coordinator_daily_forecast: AccuWeatherDailyForecastDataUpdateCoordinator - - -type AccuWeatherConfigEntry = ConfigEntry[AccuWeatherData] - - async def async_setup_entry(hass: HomeAssistant, entry: AccuWeatherConfigEntry) -> bool: """Set up AccuWeather as config entry.""" api_key: str = entry.data[CONF_API_KEY] @@ -50,6 +39,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: AccuWeatherConfigEntry) coordinator_observation = AccuWeatherObservationDataUpdateCoordinator( hass, + entry, accuweather, name, "observation", @@ -58,6 +48,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: AccuWeatherConfigEntry) coordinator_daily_forecast = AccuWeatherDailyForecastDataUpdateCoordinator( hass, + entry, accuweather, name, "daily forecast", diff --git a/homeassistant/components/accuweather/coordinator.py b/homeassistant/components/accuweather/coordinator.py index 26fadd6806cd6d..40ff3ad2c87da6 100644 --- a/homeassistant/components/accuweather/coordinator.py +++ b/homeassistant/components/accuweather/coordinator.py @@ -1,6 +1,9 @@ """The AccuWeather coordinator.""" +from __future__ import annotations + from asyncio import timeout +from dataclasses import dataclass from datetime import timedelta import logging from typing import TYPE_CHECKING, Any @@ -8,6 +11,7 @@ from accuweather import AccuWeather, ApiError, InvalidApiKeyError, RequestsExceededError from aiohttp.client_exceptions import ClientConnectorError +from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo from homeassistant.helpers.update_coordinator import ( @@ -23,6 +27,17 @@ _LOGGER = logging.getLogger(__name__) +@dataclass +class AccuWeatherData: + """Data for AccuWeather integration.""" + + coordinator_observation: AccuWeatherObservationDataUpdateCoordinator + coordinator_daily_forecast: AccuWeatherDailyForecastDataUpdateCoordinator + + +type AccuWeatherConfigEntry = ConfigEntry[AccuWeatherData] + + class AccuWeatherObservationDataUpdateCoordinator( DataUpdateCoordinator[dict[str, Any]] ): @@ -31,6 +46,7 @@ class AccuWeatherObservationDataUpdateCoordinator( def __init__( self, hass: HomeAssistant, + config_entry: AccuWeatherConfigEntry, accuweather: AccuWeather, name: str, coordinator_type: str, @@ -48,6 +64,7 @@ def __init__( super().__init__( hass, _LOGGER, + config_entry=config_entry, name=f"{name} ({coordinator_type})", update_interval=update_interval, ) @@ -73,6 +90,7 @@ class AccuWeatherDailyForecastDataUpdateCoordinator( def __init__( self, hass: HomeAssistant, + config_entry: AccuWeatherConfigEntry, accuweather: AccuWeather, name: str, coordinator_type: str, @@ -90,6 +108,7 @@ def __init__( super().__init__( hass, _LOGGER, + config_entry=config_entry, name=f"{name} ({coordinator_type})", update_interval=update_interval, ) diff --git a/homeassistant/components/accuweather/diagnostics.py b/homeassistant/components/accuweather/diagnostics.py index 85c06a6140a97f..9f35c47b88675e 100644 --- a/homeassistant/components/accuweather/diagnostics.py +++ b/homeassistant/components/accuweather/diagnostics.py @@ -8,7 +8,7 @@ from homeassistant.const import CONF_API_KEY, CONF_LATITUDE, CONF_LONGITUDE from homeassistant.core import HomeAssistant -from . import AccuWeatherConfigEntry, AccuWeatherData +from .coordinator import AccuWeatherConfigEntry, AccuWeatherData TO_REDACT = {CONF_API_KEY, CONF_LATITUDE, CONF_LONGITUDE} diff --git a/homeassistant/components/accuweather/sensor.py b/homeassistant/components/accuweather/sensor.py index 2f6b10b296f2a4..001edc5f1976c0 100644 --- a/homeassistant/components/accuweather/sensor.py +++ b/homeassistant/components/accuweather/sensor.py @@ -28,7 +28,6 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.update_coordinator import CoordinatorEntity -from . import AccuWeatherConfigEntry from .const import ( API_METRIC, ATTR_CATEGORY, @@ -41,6 +40,7 @@ MAX_FORECAST_DAYS, ) from .coordinator import ( + AccuWeatherConfigEntry, AccuWeatherDailyForecastDataUpdateCoordinator, AccuWeatherObservationDataUpdateCoordinator, ) diff --git a/homeassistant/components/accuweather/system_health.py b/homeassistant/components/accuweather/system_health.py index eab16498248ef0..f5efaf3079fdae 100644 --- a/homeassistant/components/accuweather/system_health.py +++ b/homeassistant/components/accuweather/system_health.py @@ -9,8 +9,8 @@ from homeassistant.components import system_health from homeassistant.core import HomeAssistant, callback -from . import AccuWeatherConfigEntry from .const import DOMAIN +from .coordinator import AccuWeatherConfigEntry @callback diff --git a/homeassistant/components/accuweather/weather.py b/homeassistant/components/accuweather/weather.py index 72d717f2703ce4..7d754278d91d5c 100644 --- a/homeassistant/components/accuweather/weather.py +++ b/homeassistant/components/accuweather/weather.py @@ -33,7 +33,6 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.util.dt import utc_from_timestamp -from . import AccuWeatherConfigEntry, AccuWeatherData from .const import ( API_METRIC, ATTR_DIRECTION, @@ -43,7 +42,9 @@ CONDITION_MAP, ) from .coordinator import ( + AccuWeatherConfigEntry, AccuWeatherDailyForecastDataUpdateCoordinator, + AccuWeatherData, AccuWeatherObservationDataUpdateCoordinator, ) diff --git a/homeassistant/components/adguard/config_flow.py b/homeassistant/components/adguard/config_flow.py index c07967ec2c5e90..6fd50967c22aba 100644 --- a/homeassistant/components/adguard/config_flow.py +++ b/homeassistant/components/adguard/config_flow.py @@ -7,7 +7,6 @@ from adguardhome import AdGuardHome, AdGuardHomeConnectionError import voluptuous as vol -from homeassistant.components.hassio import HassioServiceInfo from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import ( CONF_HOST, @@ -18,6 +17,7 @@ CONF_VERIFY_SSL, ) from homeassistant.helpers.aiohttp_client import async_get_clientsession +from homeassistant.helpers.service_info.hassio import HassioServiceInfo from .const import DOMAIN diff --git a/homeassistant/components/advantage_air/__init__.py b/homeassistant/components/advantage_air/__init__.py index 752c1ec26fc684..8be1b719993c23 100644 --- a/homeassistant/components/advantage_air/__init__.py +++ b/homeassistant/components/advantage_air/__init__.py @@ -55,6 +55,7 @@ async def async_get(): coordinator = DataUpdateCoordinator( hass, _LOGGER, + config_entry=entry, name="Advantage Air", update_method=async_get, update_interval=timedelta(seconds=ADVANTAGE_AIR_SYNC_INTERVAL), diff --git a/homeassistant/components/aemet/__init__.py b/homeassistant/components/aemet/__init__.py index e242d62a58060c..29bc044c67d26b 100644 --- a/homeassistant/components/aemet/__init__.py +++ b/homeassistant/components/aemet/__init__.py @@ -1,6 +1,5 @@ """The AEMET OpenData component.""" -from dataclasses import dataclass import logging from aemet_opendata.exceptions import AemetError, TownNotFound @@ -13,20 +12,10 @@ from homeassistant.helpers import aiohttp_client from .const import CONF_STATION_UPDATES, PLATFORMS -from .coordinator import WeatherUpdateCoordinator +from .coordinator import AemetConfigEntry, AemetData, WeatherUpdateCoordinator _LOGGER = logging.getLogger(__name__) -type AemetConfigEntry = ConfigEntry[AemetData] - - -@dataclass -class AemetData: - """Aemet runtime data.""" - - name: str - coordinator: WeatherUpdateCoordinator - async def async_setup_entry(hass: HomeAssistant, entry: AemetConfigEntry) -> bool: """Set up AEMET OpenData as config entry.""" @@ -46,7 +35,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: AemetConfigEntry) -> boo except AemetError as err: raise ConfigEntryNotReady(err) from err - weather_coordinator = WeatherUpdateCoordinator(hass, aemet) + weather_coordinator = WeatherUpdateCoordinator(hass, entry, aemet) await weather_coordinator.async_config_entry_first_refresh() entry.runtime_data = AemetData(name=name, coordinator=weather_coordinator) diff --git a/homeassistant/components/aemet/coordinator.py b/homeassistant/components/aemet/coordinator.py index 8d179ccdb02b13..2e8534c746630f 100644 --- a/homeassistant/components/aemet/coordinator.py +++ b/homeassistant/components/aemet/coordinator.py @@ -3,6 +3,7 @@ from __future__ import annotations from asyncio import timeout +from dataclasses import dataclass from datetime import timedelta import logging from typing import Any, Final, cast @@ -19,6 +20,7 @@ from aemet_opendata.interface import AEMET from homeassistant.components.weather import Forecast +from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed @@ -29,6 +31,16 @@ API_TIMEOUT: Final[int] = 120 WEATHER_UPDATE_INTERVAL = timedelta(minutes=10) +type AemetConfigEntry = ConfigEntry[AemetData] + + +@dataclass +class AemetData: + """Aemet runtime data.""" + + name: str + coordinator: WeatherUpdateCoordinator + class WeatherUpdateCoordinator(DataUpdateCoordinator): """Weather data update coordinator.""" @@ -36,6 +48,7 @@ class WeatherUpdateCoordinator(DataUpdateCoordinator): def __init__( self, hass: HomeAssistant, + entry: AemetConfigEntry, aemet: AEMET, ) -> None: """Initialize coordinator.""" @@ -44,6 +57,7 @@ def __init__( super().__init__( hass, _LOGGER, + config_entry=entry, name=DOMAIN, update_interval=WEATHER_UPDATE_INTERVAL, ) diff --git a/homeassistant/components/aemet/diagnostics.py b/homeassistant/components/aemet/diagnostics.py index 2379bd34bc0800..bc366fc6d4475f 100644 --- a/homeassistant/components/aemet/diagnostics.py +++ b/homeassistant/components/aemet/diagnostics.py @@ -15,7 +15,7 @@ ) from homeassistant.core import HomeAssistant -from . import AemetConfigEntry +from .coordinator import AemetConfigEntry TO_REDACT_CONFIG = [ CONF_API_KEY, diff --git a/homeassistant/components/aemet/sensor.py b/homeassistant/components/aemet/sensor.py index 83d490f7fe2d67..88eb34b6f84453 100644 --- a/homeassistant/components/aemet/sensor.py +++ b/homeassistant/components/aemet/sensor.py @@ -55,7 +55,6 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.util import dt as dt_util -from . import AemetConfigEntry from .const import ( ATTR_API_CONDITION, ATTR_API_FORECAST_CONDITION, @@ -87,7 +86,7 @@ ATTR_API_WIND_SPEED, CONDITIONS_MAP, ) -from .coordinator import WeatherUpdateCoordinator +from .coordinator import AemetConfigEntry, WeatherUpdateCoordinator from .entity import AemetEntity @@ -249,6 +248,7 @@ class AemetSensorEntityDescription(SensorEntityDescription): name="Rain", native_unit_of_measurement=UnitOfVolumetricFlux.MILLIMETERS_PER_HOUR, device_class=SensorDeviceClass.PRECIPITATION_INTENSITY, + state_class=SensorStateClass.MEASUREMENT, ), AemetSensorEntityDescription( key=ATTR_API_RAIN_PROB, @@ -263,6 +263,7 @@ class AemetSensorEntityDescription(SensorEntityDescription): name="Snow", native_unit_of_measurement=UnitOfVolumetricFlux.MILLIMETERS_PER_HOUR, device_class=SensorDeviceClass.PRECIPITATION_INTENSITY, + state_class=SensorStateClass.MEASUREMENT, ), AemetSensorEntityDescription( key=ATTR_API_SNOW_PROB, diff --git a/homeassistant/components/aemet/weather.py b/homeassistant/components/aemet/weather.py index 341b81d71c4c27..a156652eadd675 100644 --- a/homeassistant/components/aemet/weather.py +++ b/homeassistant/components/aemet/weather.py @@ -27,9 +27,8 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import AemetConfigEntry from .const import CONDITIONS_MAP -from .coordinator import WeatherUpdateCoordinator +from .coordinator import AemetConfigEntry, WeatherUpdateCoordinator from .entity import AemetEntity diff --git a/homeassistant/components/agent_dvr/alarm_control_panel.py b/homeassistant/components/agent_dvr/alarm_control_panel.py index f098184321f78f..23328315e42617 100644 --- a/homeassistant/components/agent_dvr/alarm_control_panel.py +++ b/homeassistant/components/agent_dvr/alarm_control_panel.py @@ -5,12 +5,7 @@ from homeassistant.components.alarm_control_panel import ( AlarmControlPanelEntity, AlarmControlPanelEntityFeature, -) -from homeassistant.const import ( - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_DISARMED, + AlarmControlPanelState, ) from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceInfo @@ -65,37 +60,37 @@ async def async_update(self) -> None: self._attr_available = self._client.is_available armed = self._client.is_armed if armed is None: - self._attr_state = None + self._attr_alarm_state = None return if armed: prof = (await self._client.get_active_profile()).lower() - self._attr_state = STATE_ALARM_ARMED_AWAY + self._attr_alarm_state = AlarmControlPanelState.ARMED_AWAY if prof == CONF_HOME_MODE_NAME: - self._attr_state = STATE_ALARM_ARMED_HOME + self._attr_alarm_state = AlarmControlPanelState.ARMED_HOME elif prof == CONF_NIGHT_MODE_NAME: - self._attr_state = STATE_ALARM_ARMED_NIGHT + self._attr_alarm_state = AlarmControlPanelState.ARMED_NIGHT else: - self._attr_state = STATE_ALARM_DISARMED + self._attr_alarm_state = AlarmControlPanelState.DISARMED async def async_alarm_disarm(self, code: str | None = None) -> None: """Send disarm command.""" await self._client.disarm() - self._attr_state = STATE_ALARM_DISARMED + self._attr_alarm_state = AlarmControlPanelState.DISARMED async def async_alarm_arm_away(self, code: str | None = None) -> None: """Send arm away command. Uses custom mode.""" await self._client.arm() await self._client.set_active_profile(CONF_AWAY_MODE_NAME) - self._attr_state = STATE_ALARM_ARMED_AWAY + self._attr_alarm_state = AlarmControlPanelState.ARMED_AWAY async def async_alarm_arm_home(self, code: str | None = None) -> None: """Send arm home command. Uses custom mode.""" await self._client.arm() await self._client.set_active_profile(CONF_HOME_MODE_NAME) - self._attr_state = STATE_ALARM_ARMED_HOME + self._attr_alarm_state = AlarmControlPanelState.ARMED_HOME async def async_alarm_arm_night(self, code: str | None = None) -> None: """Send arm night command. Uses custom mode.""" await self._client.arm() await self._client.set_active_profile(CONF_NIGHT_MODE_NAME) - self._attr_state = STATE_ALARM_ARMED_NIGHT + self._attr_alarm_state = AlarmControlPanelState.ARMED_NIGHT diff --git a/homeassistant/components/agent_dvr/manifest.json b/homeassistant/components/agent_dvr/manifest.json index 9a6c528c33665d..4ec142963637e8 100644 --- a/homeassistant/components/agent_dvr/manifest.json +++ b/homeassistant/components/agent_dvr/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/agent_dvr", "iot_class": "local_polling", "loggers": ["agent"], - "requirements": ["agent-py==0.0.23"] + "requirements": ["agent-py==0.0.24"] } diff --git a/homeassistant/components/airnow/config_flow.py b/homeassistant/components/airnow/config_flow.py index e839acdcb7b310..d0ab16e9758c49 100644 --- a/homeassistant/components/airnow/config_flow.py +++ b/homeassistant/components/airnow/config_flow.py @@ -1,5 +1,7 @@ """Config flow for AirNow integration.""" +from __future__ import annotations + import logging from typing import Any @@ -12,7 +14,6 @@ ConfigFlow, ConfigFlowResult, OptionsFlow, - OptionsFlowWithConfigEntry, ) from homeassistant.const import CONF_API_KEY, CONF_LATITUDE, CONF_LONGITUDE, CONF_RADIUS from homeassistant.core import HomeAssistant, callback @@ -120,12 +121,12 @@ async def async_step_user( @callback def async_get_options_flow( config_entry: ConfigEntry, - ) -> OptionsFlow: + ) -> AirNowOptionsFlowHandler: """Return the options flow.""" - return AirNowOptionsFlowHandler(config_entry) + return AirNowOptionsFlowHandler() -class AirNowOptionsFlowHandler(OptionsFlowWithConfigEntry): +class AirNowOptionsFlowHandler(OptionsFlow): """Handle an options flow for AirNow.""" async def async_step_init( @@ -136,12 +137,7 @@ async def async_step_init( return self.async_create_entry(data=user_input) options_schema = vol.Schema( - { - vol.Optional(CONF_RADIUS): vol.All( - int, - vol.Range(min=5), - ), - } + {vol.Optional(CONF_RADIUS): vol.All(int, vol.Range(min=5))} ) return self.async_show_form( diff --git a/homeassistant/components/airthings/__init__.py b/homeassistant/components/airthings/__init__.py index 22138c7d4fc6d5..14e2f28370fe16 100644 --- a/homeassistant/components/airthings/__init__.py +++ b/homeassistant/components/airthings/__init__.py @@ -42,6 +42,7 @@ async def _update_method() -> dict[str, AirthingsDevice]: coordinator = DataUpdateCoordinator( hass, _LOGGER, + config_entry=entry, name=DOMAIN, update_method=_update_method, update_interval=SCAN_INTERVAL, diff --git a/homeassistant/components/airthings_ble/__init__.py b/homeassistant/components/airthings_ble/__init__.py index 79384eed4efb32..1c3c608473967b 100644 --- a/homeassistant/components/airthings_ble/__init__.py +++ b/homeassistant/components/airthings_ble/__init__.py @@ -2,75 +2,27 @@ from __future__ import annotations -from datetime import timedelta -import logging - -from airthings_ble import AirthingsBluetoothDeviceData, AirthingsDevice -from bleak_retry_connector import close_stale_connections_by_address - -from homeassistant.components import bluetooth -from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant -from homeassistant.exceptions import ConfigEntryNotReady -from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed -from homeassistant.util.unit_system import METRIC_SYSTEM -from .const import DEFAULT_SCAN_INTERVAL, DOMAIN, MAX_RETRIES_AFTER_STARTUP +from .const import MAX_RETRIES_AFTER_STARTUP +from .coordinator import AirthingsBLEConfigEntry, AirthingsBLEDataUpdateCoordinator PLATFORMS: list[Platform] = [Platform.SENSOR] -_LOGGER = logging.getLogger(__name__) - -AirthingsBLEDataUpdateCoordinator = DataUpdateCoordinator[AirthingsDevice] -AirthingsBLEConfigEntry = ConfigEntry[AirthingsBLEDataUpdateCoordinator] - async def async_setup_entry( hass: HomeAssistant, entry: AirthingsBLEConfigEntry ) -> bool: """Set up Airthings BLE device from a config entry.""" - hass.data.setdefault(DOMAIN, {}) - address = entry.unique_id - - is_metric = hass.config.units is METRIC_SYSTEM - assert address is not None - - await close_stale_connections_by_address(address) - - ble_device = bluetooth.async_ble_device_from_address(hass, address) - - if not ble_device: - raise ConfigEntryNotReady( - f"Could not find Airthings device with address {address}" - ) - - airthings = AirthingsBluetoothDeviceData(_LOGGER, is_metric) - - async def _async_update_method() -> AirthingsDevice: - """Get data from Airthings BLE.""" - try: - data = await airthings.update_device(ble_device) - except Exception as err: - raise UpdateFailed(f"Unable to fetch data: {err}") from err - - return data - - coordinator: AirthingsBLEDataUpdateCoordinator = DataUpdateCoordinator( - hass, - _LOGGER, - name=DOMAIN, - update_method=_async_update_method, - update_interval=timedelta(seconds=DEFAULT_SCAN_INTERVAL), - ) - + coordinator = AirthingsBLEDataUpdateCoordinator(hass, entry) await coordinator.async_config_entry_first_refresh() # Once its setup and we know we are not going to delay # the startup of Home Assistant, we can set the max attempts # to a higher value. If the first connection attempt fails, # Home Assistant's built-in retry logic will take over. - airthings.set_max_attempts(MAX_RETRIES_AFTER_STARTUP) + coordinator.airthings.set_max_attempts(MAX_RETRIES_AFTER_STARTUP) entry.runtime_data = coordinator diff --git a/homeassistant/components/airthings_ble/coordinator.py b/homeassistant/components/airthings_ble/coordinator.py new file mode 100644 index 00000000000000..81009dcea812c2 --- /dev/null +++ b/homeassistant/components/airthings_ble/coordinator.py @@ -0,0 +1,68 @@ +"""The Airthings BLE integration.""" + +from __future__ import annotations + +from datetime import timedelta +import logging + +from airthings_ble import AirthingsBluetoothDeviceData, AirthingsDevice +from bleak.backends.device import BLEDevice +from bleak_retry_connector import close_stale_connections_by_address + +from homeassistant.components import bluetooth +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryNotReady +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed +from homeassistant.util.unit_system import METRIC_SYSTEM + +from .const import DEFAULT_SCAN_INTERVAL, DOMAIN + +_LOGGER = logging.getLogger(__name__) + +type AirthingsBLEConfigEntry = ConfigEntry[AirthingsBLEDataUpdateCoordinator] + + +class AirthingsBLEDataUpdateCoordinator(DataUpdateCoordinator[AirthingsDevice]): + """Class to manage fetching Airthings BLE data.""" + + ble_device: BLEDevice + config_entry: AirthingsBLEConfigEntry + + def __init__(self, hass: HomeAssistant, entry: AirthingsBLEConfigEntry) -> None: + """Initialize the coordinator.""" + self.airthings = AirthingsBluetoothDeviceData( + _LOGGER, hass.config.units is METRIC_SYSTEM + ) + super().__init__( + hass, + _LOGGER, + config_entry=entry, + name=DOMAIN, + update_interval=timedelta(seconds=DEFAULT_SCAN_INTERVAL), + ) + + async def _async_setup(self) -> None: + """Set up the coordinator.""" + address = self.config_entry.unique_id + + assert address is not None + + await close_stale_connections_by_address(address) + + ble_device = bluetooth.async_ble_device_from_address(self.hass, address) + + if not ble_device: + raise ConfigEntryNotReady( + f"Could not find Airthings device with address {address}" + ) + self.ble_device = ble_device + + async def _async_update_data(self) -> AirthingsDevice: + """Get data from Airthings BLE.""" + try: + data = await self.airthings.update_device(self.ble_device) + except Exception as err: + raise UpdateFailed(f"Unable to fetch data: {err}") from err + + return data diff --git a/homeassistant/components/airthings_ble/manifest.json b/homeassistant/components/airthings_ble/manifest.json index 6c00fe79e7bd3a..fe2cc0eeb36b0e 100644 --- a/homeassistant/components/airthings_ble/manifest.json +++ b/homeassistant/components/airthings_ble/manifest.json @@ -24,5 +24,5 @@ "dependencies": ["bluetooth_adapters"], "documentation": "https://www.home-assistant.io/integrations/airthings_ble", "iot_class": "local_polling", - "requirements": ["airthings-ble==0.9.1"] + "requirements": ["airthings-ble==0.9.2"] } diff --git a/homeassistant/components/airthings_ble/sensor.py b/homeassistant/components/airthings_ble/sensor.py index b1ae7d533d8127..0dfd82a38c422d 100644 --- a/homeassistant/components/airthings_ble/sensor.py +++ b/homeassistant/components/airthings_ble/sensor.py @@ -34,8 +34,8 @@ from homeassistant.helpers.update_coordinator import CoordinatorEntity from homeassistant.util.unit_system import METRIC_SYSTEM -from . import AirthingsBLEConfigEntry, AirthingsBLEDataUpdateCoordinator from .const import DOMAIN, VOLUME_BECQUEREL, VOLUME_PICOCURIE +from .coordinator import AirthingsBLEConfigEntry, AirthingsBLEDataUpdateCoordinator _LOGGER = logging.getLogger(__name__) diff --git a/homeassistant/components/airtouch5/__init__.py b/homeassistant/components/airtouch5/__init__.py index 8aab41d72cb216..f0c7ba8123c96e 100644 --- a/homeassistant/components/airtouch5/__init__.py +++ b/homeassistant/components/airtouch5/__init__.py @@ -9,8 +9,6 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady -from .const import DOMAIN - PLATFORMS: list[Platform] = [Platform.CLIMATE, Platform.COVER] type Airtouch5ConfigEntry = ConfigEntry[Airtouch5SimpleClient] @@ -19,8 +17,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: Airtouch5ConfigEntry) -> bool: """Set up Airtouch 5 from a config entry.""" - hass.data.setdefault(DOMAIN, {}) - # Create API instance host = entry.data[CONF_HOST] client = Airtouch5SimpleClient(host) diff --git a/homeassistant/components/airvisual/__init__.py b/homeassistant/components/airvisual/__init__.py index dac34b170c9e95..d2e5e7169b92a9 100644 --- a/homeassistant/components/airvisual/__init__.py +++ b/homeassistant/components/airvisual/__init__.py @@ -204,6 +204,7 @@ async def async_update_data() -> dict[str, Any]: coordinator = DataUpdateCoordinator( hass, LOGGER, + config_entry=entry, name=async_get_geography_id(entry.data), # We give a placeholder update interval in order to create the coordinator; # then, below, we use the coordinator's presence (along with any other diff --git a/homeassistant/components/airvisual/config_flow.py b/homeassistant/components/airvisual/config_flow.py index 8c012aca93d678..7643d541070d67 100644 --- a/homeassistant/components/airvisual/config_flow.py +++ b/homeassistant/components/airvisual/config_flow.py @@ -16,7 +16,12 @@ from pyairvisual.errors import AirVisualError import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import ( + SOURCE_REAUTH, + ConfigEntry, + ConfigFlow, + ConfigFlowResult, +) from homeassistant.const import ( CONF_API_KEY, CONF_COUNTRY, @@ -140,8 +145,11 @@ async def _async_finish_geography( valid_keys.add(user_input[CONF_API_KEY]) - if existing_entry := await self.async_set_unique_id(self._geo_id): - return self.async_update_reload_and_abort(existing_entry, data=user_input) + if self.source == SOURCE_REAUTH: + return self.async_update_reload_and_abort( + self._get_reauth_entry(), + data_updates={CONF_API_KEY: user_input[CONF_API_KEY]}, + ) return self.async_create_entry( title=f"Cloud API ({self._geo_id})", diff --git a/homeassistant/components/airvisual_pro/__init__.py b/homeassistant/components/airvisual_pro/__init__.py index b95d0597bab86c..3b3ac6df232518 100644 --- a/homeassistant/components/airvisual_pro/__init__.py +++ b/homeassistant/components/airvisual_pro/__init__.py @@ -81,6 +81,7 @@ async def async_get_data() -> dict[str, Any]: coordinator = DataUpdateCoordinator( hass, LOGGER, + config_entry=entry, name="Node/Pro data", update_interval=UPDATE_INTERVAL, update_method=async_get_data, diff --git a/homeassistant/components/airvisual_pro/config_flow.py b/homeassistant/components/airvisual_pro/config_flow.py index d1ac60abcace2f..c2d136f3102600 100644 --- a/homeassistant/components/airvisual_pro/config_flow.py +++ b/homeassistant/components/airvisual_pro/config_flow.py @@ -14,7 +14,7 @@ ) import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_IP_ADDRESS, CONF_PASSWORD from .const import DOMAIN, LOGGER @@ -76,7 +76,7 @@ class AirVisualProFlowHandler(ConfigFlow, domain=DOMAIN): VERSION = 1 - _reauth_entry: ConfigEntry + _reauth_entry_data: Mapping[str, Any] async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Import a config entry from `airvisual` integration (see #83882).""" @@ -86,7 +86,7 @@ async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle configuration by re-auth.""" - self._reauth_entry = self._get_reauth_entry() + self._reauth_entry_data = entry_data return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -99,7 +99,7 @@ async def async_step_reauth_confirm( ) validation_result = await async_validate_credentials( - self._reauth_entry.data[CONF_IP_ADDRESS], user_input[CONF_PASSWORD] + self._reauth_entry_data[CONF_IP_ADDRESS], user_input[CONF_PASSWORD] ) if validation_result.errors: @@ -110,7 +110,7 @@ async def async_step_reauth_confirm( ) return self.async_update_reload_and_abort( - self._reauth_entry, data=self._reauth_entry.data | user_input + self._get_reauth_entry(), data_updates=user_input ) async def async_step_user( diff --git a/homeassistant/components/airzone/__init__.py b/homeassistant/components/airzone/__init__.py index 754dfe90dceb18..5d1f9f051a36a8 100644 --- a/homeassistant/components/airzone/__init__.py +++ b/homeassistant/components/airzone/__init__.py @@ -24,6 +24,7 @@ Platform.CLIMATE, Platform.SELECT, Platform.SENSOR, + Platform.SWITCH, Platform.WATER_HEATER, ] diff --git a/homeassistant/components/airzone/climate.py b/homeassistant/components/airzone/climate.py index 5e5e1c126dedfc..6be7416bbb03e2 100644 --- a/homeassistant/components/airzone/climate.py +++ b/homeassistant/components/airzone/climate.py @@ -85,6 +85,7 @@ OperationMode.HEATING: HVACMode.HEAT, OperationMode.FAN: HVACMode.FAN_ONLY, OperationMode.DRY: HVACMode.DRY, + OperationMode.AUX_HEATING: HVACMode.HEAT, OperationMode.AUTO: HVACMode.HEAT_COOL, } HVAC_MODE_HASS_TO_LIB: Final[dict[HVACMode, OperationMode]] = { @@ -157,9 +158,10 @@ def __init__( self._attr_temperature_unit = TEMP_UNIT_LIB_TO_HASS[ self.get_airzone_value(AZD_TEMP_UNIT) ] - self._attr_hvac_modes = [ + _attr_hvac_modes = [ HVAC_MODE_LIB_TO_HASS[mode] for mode in self.get_airzone_value(AZD_MODES) ] + self._attr_hvac_modes = list(dict.fromkeys(_attr_hvac_modes)) if ( self.get_airzone_value(AZD_SPEED) is not None and self.get_airzone_value(AZD_SPEEDS) is not None @@ -273,12 +275,18 @@ def _async_update_attrs(self) -> None: self._attr_min_temp = self.get_airzone_value(AZD_TEMP_MIN) if self.supported_features & ClimateEntityFeature.FAN_MODE: self._attr_fan_mode = self._speeds.get(self.get_airzone_value(AZD_SPEED)) - if self.supported_features & ClimateEntityFeature.TARGET_TEMPERATURE_RANGE: + if ( + self.supported_features & ClimateEntityFeature.TARGET_TEMPERATURE_RANGE + and self._attr_hvac_mode == HVACMode.HEAT_COOL + ): self._attr_target_temperature_high = self.get_airzone_value( AZD_COOL_TEMP_SET ) self._attr_target_temperature_low = self.get_airzone_value( AZD_HEAT_TEMP_SET ) + self._attr_target_temperature = None else: + self._attr_target_temperature_high = None + self._attr_target_temperature_low = None self._attr_target_temperature = self.get_airzone_value(AZD_TEMP_SET) diff --git a/homeassistant/components/airzone/manifest.json b/homeassistant/components/airzone/manifest.json index c40f4138b0ad3e..10fb20bb2ce9f5 100644 --- a/homeassistant/components/airzone/manifest.json +++ b/homeassistant/components/airzone/manifest.json @@ -11,5 +11,5 @@ "documentation": "https://www.home-assistant.io/integrations/airzone", "iot_class": "local_polling", "loggers": ["aioairzone"], - "requirements": ["aioairzone==0.9.3"] + "requirements": ["aioairzone==0.9.5"] } diff --git a/homeassistant/components/airzone/switch.py b/homeassistant/components/airzone/switch.py new file mode 100644 index 00000000000000..93136810604977 --- /dev/null +++ b/homeassistant/components/airzone/switch.py @@ -0,0 +1,122 @@ +"""Support for the Airzone switch.""" + +from __future__ import annotations + +from dataclasses import dataclass +from typing import Any, Final + +from aioairzone.const import API_ON, AZD_ON, AZD_ZONES + +from homeassistant.components.switch import ( + SwitchDeviceClass, + SwitchEntity, + SwitchEntityDescription, +) +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import AirzoneConfigEntry +from .coordinator import AirzoneUpdateCoordinator +from .entity import AirzoneEntity, AirzoneZoneEntity + + +@dataclass(frozen=True, kw_only=True) +class AirzoneSwitchDescription(SwitchEntityDescription): + """Class to describe an Airzone switch entity.""" + + api_param: str + + +ZONE_SWITCH_TYPES: Final[tuple[AirzoneSwitchDescription, ...]] = ( + AirzoneSwitchDescription( + api_param=API_ON, + device_class=SwitchDeviceClass.SWITCH, + key=AZD_ON, + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: AirzoneConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Add Airzone switch from a config_entry.""" + coordinator = entry.runtime_data + + added_zones: set[str] = set() + + def _async_entity_listener() -> None: + """Handle additions of switch.""" + + zones_data = coordinator.data.get(AZD_ZONES, {}) + received_zones = set(zones_data) + new_zones = received_zones - added_zones + if new_zones: + async_add_entities( + AirzoneZoneSwitch( + coordinator, + description, + entry, + system_zone_id, + zones_data.get(system_zone_id), + ) + for system_zone_id in new_zones + for description in ZONE_SWITCH_TYPES + if description.key in zones_data.get(system_zone_id) + ) + added_zones.update(new_zones) + + entry.async_on_unload(coordinator.async_add_listener(_async_entity_listener)) + _async_entity_listener() + + +class AirzoneBaseSwitch(AirzoneEntity, SwitchEntity): + """Define an Airzone switch.""" + + entity_description: AirzoneSwitchDescription + + @callback + def _handle_coordinator_update(self) -> None: + """Update attributes when the coordinator updates.""" + self._async_update_attrs() + super()._handle_coordinator_update() + + @callback + def _async_update_attrs(self) -> None: + """Update switch attributes.""" + self._attr_is_on = self.get_airzone_value(self.entity_description.key) + + +class AirzoneZoneSwitch(AirzoneZoneEntity, AirzoneBaseSwitch): + """Define an Airzone Zone switch.""" + + def __init__( + self, + coordinator: AirzoneUpdateCoordinator, + description: AirzoneSwitchDescription, + entry: ConfigEntry, + system_zone_id: str, + zone_data: dict[str, Any], + ) -> None: + """Initialize.""" + super().__init__(coordinator, entry, system_zone_id, zone_data) + + self._attr_name = None + self._attr_unique_id = ( + f"{self._attr_unique_id}_{system_zone_id}_{description.key}" + ) + self.entity_description = description + + self._async_update_attrs() + + async def async_turn_on(self, **kwargs: Any) -> None: + """Turn the entity on.""" + param = self.entity_description.api_param + await self._async_update_hvac_params({param: True}) + + async def async_turn_off(self, **kwargs: Any) -> None: + """Turn the entity off.""" + param = self.entity_description.api_param + await self._async_update_hvac_params({param: False}) diff --git a/homeassistant/components/airzone_cloud/__init__.py b/homeassistant/components/airzone_cloud/__init__.py index b1d7900f2e8a6a..5baa0bcea1085d 100644 --- a/homeassistant/components/airzone_cloud/__init__.py +++ b/homeassistant/components/airzone_cloud/__init__.py @@ -17,6 +17,7 @@ Platform.CLIMATE, Platform.SELECT, Platform.SENSOR, + Platform.SWITCH, Platform.WATER_HEATER, ] diff --git a/homeassistant/components/airzone_cloud/climate.py b/homeassistant/components/airzone_cloud/climate.py index 3658c0737959dc..d32b070ad8cc9f 100644 --- a/homeassistant/components/airzone_cloud/climate.py +++ b/homeassistant/components/airzone_cloud/climate.py @@ -224,14 +224,20 @@ def _async_update_attrs(self) -> None: self._attr_hvac_mode = HVACMode.OFF self._attr_max_temp = self.get_airzone_value(AZD_TEMP_SET_MAX) self._attr_min_temp = self.get_airzone_value(AZD_TEMP_SET_MIN) - if self.supported_features & ClimateEntityFeature.TARGET_TEMPERATURE_RANGE: + if ( + self.supported_features & ClimateEntityFeature.TARGET_TEMPERATURE_RANGE + and self._attr_hvac_mode == HVACMode.HEAT_COOL + ): self._attr_target_temperature_high = self.get_airzone_value( AZD_TEMP_SET_COOL_AIR ) self._attr_target_temperature_low = self.get_airzone_value( AZD_TEMP_SET_HOT_AIR ) + self._attr_target_temperature = None else: + self._attr_target_temperature_high = None + self._attr_target_temperature_low = None self._attr_target_temperature = self.get_airzone_value(AZD_TEMP_SET) @@ -304,6 +310,10 @@ async def async_set_fan_mode(self, fan_mode: str) -> None: async def async_set_temperature(self, **kwargs: Any) -> None: """Set new target temperature.""" + hvac_mode = kwargs.get(ATTR_HVAC_MODE) + if hvac_mode is not None: + await self.async_set_hvac_mode(hvac_mode) + params: dict[str, Any] = {} if ATTR_TEMPERATURE in kwargs: params[API_SETPOINT] = { @@ -327,9 +337,6 @@ async def async_set_temperature(self, **kwargs: Any) -> None: } await self._async_update_params(params) - if ATTR_HVAC_MODE in kwargs: - await self.async_set_hvac_mode(kwargs[ATTR_HVAC_MODE]) - class AirzoneDeviceGroupClimate(AirzoneClimate): """Define an Airzone Cloud DeviceGroup base class.""" @@ -360,6 +367,10 @@ async def async_turn_off(self) -> None: async def async_set_temperature(self, **kwargs: Any) -> None: """Set new target temperature.""" + hvac_mode = kwargs.get(ATTR_HVAC_MODE) + if hvac_mode is not None: + await self.async_set_hvac_mode(hvac_mode) + params: dict[str, Any] = {} if ATTR_TEMPERATURE in kwargs: params[API_PARAMS] = { @@ -370,9 +381,6 @@ async def async_set_temperature(self, **kwargs: Any) -> None: } await self._async_update_params(params) - if ATTR_HVAC_MODE in kwargs: - await self.async_set_hvac_mode(kwargs[ATTR_HVAC_MODE]) - async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None: """Set hvac mode.""" params: dict[str, Any] = { diff --git a/homeassistant/components/airzone_cloud/manifest.json b/homeassistant/components/airzone_cloud/manifest.json index b1d3400c9be8e2..0e21e57ec5230d 100644 --- a/homeassistant/components/airzone_cloud/manifest.json +++ b/homeassistant/components/airzone_cloud/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/airzone_cloud", "iot_class": "cloud_push", "loggers": ["aioairzone_cloud"], - "requirements": ["aioairzone-cloud==0.6.6"] + "requirements": ["aioairzone-cloud==0.6.10"] } diff --git a/homeassistant/components/airzone_cloud/select.py b/homeassistant/components/airzone_cloud/select.py index 9bc0bdd1f5b661..895796a1073d45 100644 --- a/homeassistant/components/airzone_cloud/select.py +++ b/homeassistant/components/airzone_cloud/select.py @@ -2,14 +2,19 @@ from __future__ import annotations +from collections.abc import Callable from dataclasses import dataclass from typing import Any, Final -from aioairzone_cloud.common import AirQualityMode +from aioairzone_cloud.common import AirQualityMode, OperationMode from aioairzone_cloud.const import ( API_AQ_MODE_CONF, + API_MODE, API_VALUE, AZD_AQ_MODE_CONF, + AZD_MASTER, + AZD_MODE, + AZD_MODES, AZD_ZONES, ) @@ -28,7 +33,10 @@ class AirzoneSelectDescription(SelectEntityDescription): """Class to describe an Airzone select entity.""" api_param: str - options_dict: dict[str, str] + options_dict: dict[str, Any] + options_fn: Callable[[dict[str, Any], dict[str, Any]], list[str]] = ( + lambda zone_data, value: list(value) + ) AIR_QUALITY_MAP: Final[dict[str, str]] = { @@ -37,6 +45,35 @@ class AirzoneSelectDescription(SelectEntityDescription): "auto": AirQualityMode.AUTO, } +MODE_MAP: Final[dict[str, int]] = { + "cool": OperationMode.COOLING, + "dry": OperationMode.DRY, + "fan": OperationMode.VENTILATION, + "heat": OperationMode.HEATING, + "heat_cool": OperationMode.AUTO, + "stop": OperationMode.STOP, +} + + +def main_zone_options( + zone_data: dict[str, Any], + options: dict[str, int], +) -> list[str]: + """Filter available modes.""" + modes = zone_data.get(AZD_MODES, []) + return [k for k, v in options.items() if v in modes] + + +MAIN_ZONE_SELECT_TYPES: Final[tuple[AirzoneSelectDescription, ...]] = ( + AirzoneSelectDescription( + api_param=API_MODE, + key=AZD_MODE, + options_dict=MODE_MAP, + options_fn=main_zone_options, + translation_key="modes", + ), +) + ZONE_SELECT_TYPES: Final[tuple[AirzoneSelectDescription, ...]] = ( AirzoneSelectDescription( @@ -59,7 +96,19 @@ async def async_setup_entry( coordinator = entry.runtime_data # Zones - async_add_entities( + entities: list[AirzoneZoneSelect] = [ + AirzoneZoneSelect( + coordinator, + description, + zone_id, + zone_data, + ) + for description in MAIN_ZONE_SELECT_TYPES + for zone_id, zone_data in coordinator.data.get(AZD_ZONES, {}).items() + if description.key in zone_data and zone_data.get(AZD_MASTER) + ] + + entities.extend( AirzoneZoneSelect( coordinator, description, @@ -71,6 +120,8 @@ async def async_setup_entry( if description.key in zone_data ) + async_add_entities(entities) + class AirzoneBaseSelect(AirzoneEntity, SelectEntity): """Define an Airzone Cloud select.""" @@ -110,6 +161,11 @@ def __init__( self._attr_unique_id = f"{zone_id}_{description.key}" self.entity_description = description + + self._attr_options = self.entity_description.options_fn( + zone_data, description.options_dict + ) + self.values_dict = {v: k for k, v in description.options_dict.items()} self._async_update_attrs() diff --git a/homeassistant/components/airzone_cloud/strings.json b/homeassistant/components/airzone_cloud/strings.json index 523c43f4955827..6e0f9adcd66922 100644 --- a/homeassistant/components/airzone_cloud/strings.json +++ b/homeassistant/components/airzone_cloud/strings.json @@ -36,6 +36,17 @@ "on": "On", "auto": "Auto" } + }, + "modes": { + "name": "Mode", + "state": { + "cool": "[%key:component::climate::entity_component::_::state::cool%]", + "dry": "[%key:component::climate::entity_component::_::state::dry%]", + "fan": "[%key:component::climate::entity_component::_::state::fan_only%]", + "heat": "[%key:component::climate::entity_component::_::state::heat%]", + "heat_cool": "[%key:component::climate::entity_component::_::state::heat_cool%]", + "stop": "Stop" + } } }, "sensor": { diff --git a/homeassistant/components/airzone_cloud/switch.py b/homeassistant/components/airzone_cloud/switch.py new file mode 100644 index 00000000000000..0eb907ff7924d8 --- /dev/null +++ b/homeassistant/components/airzone_cloud/switch.py @@ -0,0 +1,115 @@ +"""Support for the Airzone Cloud switch.""" + +from __future__ import annotations + +from dataclasses import dataclass +from typing import Any, Final + +from aioairzone_cloud.const import API_POWER, API_VALUE, AZD_POWER, AZD_ZONES + +from homeassistant.components.switch import ( + SwitchDeviceClass, + SwitchEntity, + SwitchEntityDescription, +) +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import AirzoneCloudConfigEntry +from .coordinator import AirzoneUpdateCoordinator +from .entity import AirzoneEntity, AirzoneZoneEntity + + +@dataclass(frozen=True, kw_only=True) +class AirzoneSwitchDescription(SwitchEntityDescription): + """Class to describe an Airzone switch entity.""" + + api_param: str + + +ZONE_SWITCH_TYPES: Final[tuple[AirzoneSwitchDescription, ...]] = ( + AirzoneSwitchDescription( + api_param=API_POWER, + device_class=SwitchDeviceClass.SWITCH, + key=AZD_POWER, + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: AirzoneCloudConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Add Airzone Cloud switch from a config_entry.""" + coordinator = entry.runtime_data + + # Zones + async_add_entities( + AirzoneZoneSwitch( + coordinator, + description, + zone_id, + zone_data, + ) + for description in ZONE_SWITCH_TYPES + for zone_id, zone_data in coordinator.data.get(AZD_ZONES, {}).items() + if description.key in zone_data + ) + + +class AirzoneBaseSwitch(AirzoneEntity, SwitchEntity): + """Define an Airzone Cloud switch.""" + + entity_description: AirzoneSwitchDescription + + @callback + def _handle_coordinator_update(self) -> None: + """Update attributes when the coordinator updates.""" + self._async_update_attrs() + super()._handle_coordinator_update() + + @callback + def _async_update_attrs(self) -> None: + """Update switch attributes.""" + self._attr_is_on = self.get_airzone_value(self.entity_description.key) + + +class AirzoneZoneSwitch(AirzoneZoneEntity, AirzoneBaseSwitch): + """Define an Airzone Cloud Zone switch.""" + + def __init__( + self, + coordinator: AirzoneUpdateCoordinator, + description: AirzoneSwitchDescription, + zone_id: str, + zone_data: dict[str, Any], + ) -> None: + """Initialize.""" + super().__init__(coordinator, zone_id, zone_data) + + self._attr_name = None + self._attr_unique_id = f"{zone_id}_{description.key}" + self.entity_description = description + + self._async_update_attrs() + + async def async_turn_on(self, **kwargs: Any) -> None: + """Turn the entity on.""" + param = self.entity_description.api_param + params: dict[str, Any] = { + param: { + API_VALUE: True, + } + } + await self._async_update_params(params) + + async def async_turn_off(self, **kwargs: Any) -> None: + """Turn the entity off.""" + param = self.entity_description.api_param + params: dict[str, Any] = { + param: { + API_VALUE: False, + } + } + await self._async_update_params(params) diff --git a/homeassistant/components/alarm_control_panel/__init__.py b/homeassistant/components/alarm_control_panel/__init__.py index e5c2745104dcca..2946fc64941e12 100644 --- a/homeassistant/components/alarm_control_panel/__init__.py +++ b/homeassistant/components/alarm_control_panel/__init__.py @@ -2,6 +2,7 @@ from __future__ import annotations +import asyncio from datetime import timedelta from functools import partial import logging @@ -33,6 +34,7 @@ ) from homeassistant.helpers.entity import Entity, EntityDescription from homeassistant.helpers.entity_component import EntityComponent +from homeassistant.helpers.entity_platform import EntityPlatform from homeassistant.helpers.typing import ConfigType from homeassistant.util.hass_dict import HassKey @@ -49,6 +51,7 @@ ATTR_CODE_ARM_REQUIRED, DOMAIN, AlarmControlPanelEntityFeature, + AlarmControlPanelState, CodeFormat, ) @@ -142,6 +145,7 @@ class AlarmControlPanelEntityDescription(EntityDescription, frozen_or_thawed=Tru "changed_by", "code_arm_required", "supported_features", + "alarm_state", } @@ -149,6 +153,7 @@ class AlarmControlPanelEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_A """An abstract class for alarm control entities.""" entity_description: AlarmControlPanelEntityDescription + _attr_alarm_state: AlarmControlPanelState | None = None _attr_changed_by: str | None = None _attr_code_arm_required: bool = True _attr_code_format: CodeFormat | None = None @@ -157,6 +162,78 @@ class AlarmControlPanelEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_A ) _alarm_control_panel_option_default_code: str | None = None + __alarm_legacy_state: bool = False + __alarm_legacy_state_reported: bool = False + + def __init_subclass__(cls, **kwargs: Any) -> None: + """Post initialisation processing.""" + super().__init_subclass__(**kwargs) + if any(method in cls.__dict__ for method in ("_attr_state", "state")): + # Integrations should use the 'alarm_state' property instead of + # setting the state directly. + cls.__alarm_legacy_state = True + + def __setattr__(self, __name: str, __value: Any) -> None: + """Set attribute. + + Deprecation warning if setting '_attr_state' directly + unless already reported. + """ + if __name == "_attr_state": + if self.__alarm_legacy_state_reported is not True: + self._report_deprecated_alarm_state_handling() + self.__alarm_legacy_state_reported = True + return super().__setattr__(__name, __value) + + @callback + def add_to_platform_start( + self, + hass: HomeAssistant, + platform: EntityPlatform, + parallel_updates: asyncio.Semaphore | None, + ) -> None: + """Start adding an entity to a platform.""" + super().add_to_platform_start(hass, platform, parallel_updates) + if self.__alarm_legacy_state and not self.__alarm_legacy_state_reported: + self._report_deprecated_alarm_state_handling() + + @callback + def _report_deprecated_alarm_state_handling(self) -> None: + """Report on deprecated handling of alarm state. + + Integrations should implement alarm_state instead of using state directly. + """ + self.__alarm_legacy_state_reported = True + if "custom_components" in type(self).__module__: + # Do not report on core integrations as they have been fixed. + report_issue = "report it to the custom integration author." + _LOGGER.warning( + "Entity %s (%s) is setting state directly" + " which will stop working in HA Core 2025.11." + " Entities should implement the 'alarm_state' property and" + " return its state using the AlarmControlPanelState enum, please %s", + self.entity_id, + type(self), + report_issue, + ) + + @final + @property + def state(self) -> str | None: + """Return the current state.""" + if (alarm_state := self.alarm_state) is None: + return None + return alarm_state + + @cached_property + def alarm_state(self) -> AlarmControlPanelState | None: + """Return the current alarm control panel entity state. + + Integrations should overwrite this or use the '_attr_alarm_state' + attribute to set the alarm status using the 'AlarmControlPanelState' enum. + """ + return self._attr_alarm_state + @final @callback def code_or_default_code(self, code: str | None) -> str | None: diff --git a/homeassistant/components/alarm_control_panel/const.py b/homeassistant/components/alarm_control_panel/const.py index 2e8fe98da3b6e1..f3218626ead814 100644 --- a/homeassistant/components/alarm_control_panel/const.py +++ b/homeassistant/components/alarm_control_panel/const.py @@ -17,6 +17,21 @@ ATTR_CODE_ARM_REQUIRED: Final = "code_arm_required" +class AlarmControlPanelState(StrEnum): + """Alarm control panel entity states.""" + + DISARMED = "disarmed" + ARMED_HOME = "armed_home" + ARMED_AWAY = "armed_away" + ARMED_NIGHT = "armed_night" + ARMED_VACATION = "armed_vacation" + ARMED_CUSTOM_BYPASS = "armed_custom_bypass" + PENDING = "pending" + ARMING = "arming" + DISARMING = "disarming" + TRIGGERED = "triggered" + + class CodeFormat(StrEnum): """Code formats for the Alarm Control Panel.""" diff --git a/homeassistant/components/alarm_control_panel/device_condition.py b/homeassistant/components/alarm_control_panel/device_condition.py index 227fc31413e562..6d343bbe6058ba 100644 --- a/homeassistant/components/alarm_control_panel/device_condition.py +++ b/homeassistant/components/alarm_control_panel/device_condition.py @@ -13,13 +13,6 @@ CONF_DOMAIN, CONF_ENTITY_ID, CONF_TYPE, - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_CUSTOM_BYPASS, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_ARMED_VACATION, - STATE_ALARM_DISARMED, - STATE_ALARM_TRIGGERED, ) from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import ( @@ -31,7 +24,7 @@ from homeassistant.helpers.entity import get_supported_features from homeassistant.helpers.typing import ConfigType, TemplateVarsType -from . import DOMAIN +from . import DOMAIN, AlarmControlPanelState from .const import ( CONDITION_ARMED_AWAY, CONDITION_ARMED_CUSTOM_BYPASS, @@ -109,19 +102,19 @@ def async_condition_from_config( ) -> condition.ConditionCheckerType: """Create a function to test a device condition.""" if config[CONF_TYPE] == CONDITION_TRIGGERED: - state = STATE_ALARM_TRIGGERED + state = AlarmControlPanelState.TRIGGERED elif config[CONF_TYPE] == CONDITION_DISARMED: - state = STATE_ALARM_DISARMED + state = AlarmControlPanelState.DISARMED elif config[CONF_TYPE] == CONDITION_ARMED_HOME: - state = STATE_ALARM_ARMED_HOME + state = AlarmControlPanelState.ARMED_HOME elif config[CONF_TYPE] == CONDITION_ARMED_AWAY: - state = STATE_ALARM_ARMED_AWAY + state = AlarmControlPanelState.ARMED_AWAY elif config[CONF_TYPE] == CONDITION_ARMED_NIGHT: - state = STATE_ALARM_ARMED_NIGHT + state = AlarmControlPanelState.ARMED_NIGHT elif config[CONF_TYPE] == CONDITION_ARMED_VACATION: - state = STATE_ALARM_ARMED_VACATION + state = AlarmControlPanelState.ARMED_VACATION elif config[CONF_TYPE] == CONDITION_ARMED_CUSTOM_BYPASS: - state = STATE_ALARM_ARMED_CUSTOM_BYPASS + state = AlarmControlPanelState.ARMED_CUSTOM_BYPASS registry = er.async_get(hass) entity_id = er.async_resolve_entity_id(registry, config[ATTR_ENTITY_ID]) diff --git a/homeassistant/components/alarm_control_panel/device_trigger.py b/homeassistant/components/alarm_control_panel/device_trigger.py index 557666720e8f9e..a488cf10870ac5 100644 --- a/homeassistant/components/alarm_control_panel/device_trigger.py +++ b/homeassistant/components/alarm_control_panel/device_trigger.py @@ -15,13 +15,6 @@ CONF_FOR, CONF_PLATFORM, CONF_TYPE, - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_ARMED_VACATION, - STATE_ALARM_ARMING, - STATE_ALARM_DISARMED, - STATE_ALARM_TRIGGERED, ) from homeassistant.core import CALLBACK_TYPE, HomeAssistant from homeassistant.helpers import config_validation as cv, entity_registry as er @@ -29,7 +22,7 @@ from homeassistant.helpers.trigger import TriggerActionType, TriggerInfo from homeassistant.helpers.typing import ConfigType -from . import DOMAIN +from . import DOMAIN, AlarmControlPanelState from .const import AlarmControlPanelEntityFeature BASIC_TRIGGER_TYPES: Final[set[str]] = {"triggered", "disarmed", "arming"} @@ -129,19 +122,19 @@ async def async_attach_trigger( ) -> CALLBACK_TYPE: """Attach a trigger.""" if config[CONF_TYPE] == "triggered": - to_state = STATE_ALARM_TRIGGERED + to_state = AlarmControlPanelState.TRIGGERED elif config[CONF_TYPE] == "disarmed": - to_state = STATE_ALARM_DISARMED + to_state = AlarmControlPanelState.DISARMED elif config[CONF_TYPE] == "arming": - to_state = STATE_ALARM_ARMING + to_state = AlarmControlPanelState.ARMING elif config[CONF_TYPE] == "armed_home": - to_state = STATE_ALARM_ARMED_HOME + to_state = AlarmControlPanelState.ARMED_HOME elif config[CONF_TYPE] == "armed_away": - to_state = STATE_ALARM_ARMED_AWAY + to_state = AlarmControlPanelState.ARMED_AWAY elif config[CONF_TYPE] == "armed_night": - to_state = STATE_ALARM_ARMED_NIGHT + to_state = AlarmControlPanelState.ARMED_NIGHT elif config[CONF_TYPE] == "armed_vacation": - to_state = STATE_ALARM_ARMED_VACATION + to_state = AlarmControlPanelState.ARMED_VACATION state_config = { state_trigger.CONF_PLATFORM: "state", diff --git a/homeassistant/components/alarm_control_panel/reproduce_state.py b/homeassistant/components/alarm_control_panel/reproduce_state.py index 5a3d79fe2ed35b..765514e98ec956 100644 --- a/homeassistant/components/alarm_control_panel/reproduce_state.py +++ b/homeassistant/components/alarm_control_panel/reproduce_state.py @@ -16,28 +16,21 @@ SERVICE_ALARM_ARM_VACATION, SERVICE_ALARM_DISARM, SERVICE_ALARM_TRIGGER, - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_CUSTOM_BYPASS, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_ARMED_VACATION, - STATE_ALARM_DISARMED, - STATE_ALARM_TRIGGERED, ) from homeassistant.core import Context, HomeAssistant, State -from . import DOMAIN +from . import DOMAIN, AlarmControlPanelState _LOGGER: Final = logging.getLogger(__name__) VALID_STATES: Final[set[str]] = { - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_CUSTOM_BYPASS, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_ARMED_VACATION, - STATE_ALARM_DISARMED, - STATE_ALARM_TRIGGERED, + AlarmControlPanelState.ARMED_AWAY, + AlarmControlPanelState.ARMED_CUSTOM_BYPASS, + AlarmControlPanelState.ARMED_HOME, + AlarmControlPanelState.ARMED_NIGHT, + AlarmControlPanelState.ARMED_VACATION, + AlarmControlPanelState.DISARMED, + AlarmControlPanelState.TRIGGERED, } @@ -65,19 +58,19 @@ async def _async_reproduce_state( service_data = {ATTR_ENTITY_ID: state.entity_id} - if state.state == STATE_ALARM_ARMED_AWAY: + if state.state == AlarmControlPanelState.ARMED_AWAY: service = SERVICE_ALARM_ARM_AWAY - elif state.state == STATE_ALARM_ARMED_CUSTOM_BYPASS: + elif state.state == AlarmControlPanelState.ARMED_CUSTOM_BYPASS: service = SERVICE_ALARM_ARM_CUSTOM_BYPASS - elif state.state == STATE_ALARM_ARMED_HOME: + elif state.state == AlarmControlPanelState.ARMED_HOME: service = SERVICE_ALARM_ARM_HOME - elif state.state == STATE_ALARM_ARMED_NIGHT: + elif state.state == AlarmControlPanelState.ARMED_NIGHT: service = SERVICE_ALARM_ARM_NIGHT - elif state.state == STATE_ALARM_ARMED_VACATION: + elif state.state == AlarmControlPanelState.ARMED_VACATION: service = SERVICE_ALARM_ARM_VACATION - elif state.state == STATE_ALARM_DISARMED: + elif state.state == AlarmControlPanelState.DISARMED: service = SERVICE_ALARM_DISARM - elif state.state == STATE_ALARM_TRIGGERED: + elif state.state == AlarmControlPanelState.TRIGGERED: service = SERVICE_ALARM_TRIGGER await hass.services.async_call( diff --git a/homeassistant/components/alarmdecoder/alarm_control_panel.py b/homeassistant/components/alarmdecoder/alarm_control_panel.py index 7375320f800fab..cf72133ea12fe0 100644 --- a/homeassistant/components/alarmdecoder/alarm_control_panel.py +++ b/homeassistant/components/alarmdecoder/alarm_control_panel.py @@ -7,16 +7,10 @@ from homeassistant.components.alarm_control_panel import ( AlarmControlPanelEntity, AlarmControlPanelEntityFeature, + AlarmControlPanelState, CodeFormat, ) -from homeassistant.const import ( - ATTR_CODE, - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_DISARMED, - STATE_ALARM_TRIGGERED, -) +from homeassistant.const import ATTR_CODE from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_platform import homeassistant.helpers.config_validation as cv @@ -106,15 +100,15 @@ async def async_added_to_hass(self) -> None: def _message_callback(self, message): """Handle received messages.""" if message.alarm_sounding or message.fire_alarm: - self._attr_state = STATE_ALARM_TRIGGERED + self._attr_alarm_state = AlarmControlPanelState.TRIGGERED elif message.armed_away: - self._attr_state = STATE_ALARM_ARMED_AWAY + self._attr_alarm_state = AlarmControlPanelState.ARMED_AWAY elif message.armed_home and (message.entry_delay_off or message.perimeter_only): - self._attr_state = STATE_ALARM_ARMED_NIGHT + self._attr_alarm_state = AlarmControlPanelState.ARMED_NIGHT elif message.armed_home: - self._attr_state = STATE_ALARM_ARMED_HOME + self._attr_alarm_state = AlarmControlPanelState.ARMED_HOME else: - self._attr_state = STATE_ALARM_DISARMED + self._attr_alarm_state = AlarmControlPanelState.DISARMED self._attr_extra_state_attributes = { "ac_power": message.ac_power, diff --git a/homeassistant/components/alarmdecoder/strings.json b/homeassistant/components/alarmdecoder/strings.json index dd698201b0944e..ccf1d965855996 100644 --- a/homeassistant/components/alarmdecoder/strings.json +++ b/homeassistant/components/alarmdecoder/strings.json @@ -22,7 +22,8 @@ } }, "error": { - "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]" + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", + "unknown": "[%key:common::config_flow::error::unknown%]" }, "create_entry": { "default": "Successfully connected to AlarmDecoder." @@ -37,7 +38,7 @@ "title": "Configure AlarmDecoder", "description": "What would you like to edit?", "data": { - "edit_select": "Edit" + "edit_selection": "Edit" } }, "arm_settings": { diff --git a/homeassistant/components/alexa/capabilities.py b/homeassistant/components/alexa/capabilities.py index 6633cda8a97e7d..09b461428ac4eb 100644 --- a/homeassistant/components/alexa/capabilities.py +++ b/homeassistant/components/alexa/capabilities.py @@ -26,6 +26,7 @@ ) from homeassistant.components.alarm_control_panel import ( AlarmControlPanelEntityFeature, + AlarmControlPanelState, CodeFormat, ) from homeassistant.components.climate import HVACMode @@ -36,10 +37,6 @@ ATTR_TEMPERATURE, ATTR_UNIT_OF_MEASUREMENT, PERCENTAGE, - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_CUSTOM_BYPASS, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_NIGHT, STATE_IDLE, STATE_OFF, STATE_ON, @@ -1317,13 +1314,13 @@ def get_property(self, name: str) -> Any: raise UnsupportedProperty(name) arm_state = self.entity.state - if arm_state == STATE_ALARM_ARMED_HOME: + if arm_state == AlarmControlPanelState.ARMED_HOME: return "ARMED_STAY" - if arm_state == STATE_ALARM_ARMED_AWAY: + if arm_state == AlarmControlPanelState.ARMED_AWAY: return "ARMED_AWAY" - if arm_state == STATE_ALARM_ARMED_NIGHT: + if arm_state == AlarmControlPanelState.ARMED_NIGHT: return "ARMED_NIGHT" - if arm_state == STATE_ALARM_ARMED_CUSTOM_BYPASS: + if arm_state == AlarmControlPanelState.ARMED_CUSTOM_BYPASS: return "ARMED_STAY" return "DISARMED" diff --git a/homeassistant/components/alexa/handlers.py b/homeassistant/components/alexa/handlers.py index 3571f436ff6b22..8ea61ddbceb109 100644 --- a/homeassistant/components/alexa/handlers.py +++ b/homeassistant/components/alexa/handlers.py @@ -9,6 +9,7 @@ from homeassistant import core as ha from homeassistant.components import ( + alarm_control_panel, button, camera, climate, @@ -51,7 +52,6 @@ SERVICE_VOLUME_MUTE, SERVICE_VOLUME_SET, SERVICE_VOLUME_UP, - STATE_ALARM_DISARMED, UnitOfTemperature, ) from homeassistant.helpers import network @@ -1083,7 +1083,13 @@ async def async_api_arm( arm_state = directive.payload["armState"] data: dict[str, Any] = {ATTR_ENTITY_ID: entity.entity_id} - if entity.state != STATE_ALARM_DISARMED: + # Per Alexa Documentation: users are not allowed to switch from armed_away + # directly to another armed state without first disarming the system. + # https://developer.amazon.com/en-US/docs/alexa/device-apis/alexa-securitypanelcontroller.html#arming + if ( + entity.state == alarm_control_panel.AlarmControlPanelState.ARMED_AWAY + and arm_state != "ARMED_AWAY" + ): msg = "You must disarm the system before you can set the requested arm state." raise AlexaSecurityPanelAuthorizationRequired(msg) @@ -1133,7 +1139,7 @@ async def async_api_disarm( # Per Alexa Documentation: If you receive a Disarm directive, and the # system is already disarmed, respond with a success response, # not an error response. - if entity.state == STATE_ALARM_DISARMED: + if entity.state == alarm_control_panel.AlarmControlPanelState.DISARMED: return response payload = directive.payload diff --git a/homeassistant/components/analytics/analytics.py b/homeassistant/components/analytics/analytics.py index c1141b40e4dd47..b63475c80a4e5f 100644 --- a/homeassistant/components/analytics/analytics.py +++ b/homeassistant/components/analytics/analytics.py @@ -29,6 +29,7 @@ from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.aiohttp_client import async_get_clientsession import homeassistant.helpers.entity_registry as er +from homeassistant.helpers.hassio import is_hassio from homeassistant.helpers.storage import Store from homeassistant.helpers.system_info import async_get_system_info from homeassistant.loader import ( @@ -136,7 +137,7 @@ def endpoint(self) -> str: @property def supervisor(self) -> bool: """Return bool if a supervisor is present.""" - return hassio.is_hassio(self.hass) + return is_hassio(self.hass) async def load(self) -> None: """Load preferences.""" diff --git a/homeassistant/components/analytics/manifest.json b/homeassistant/components/analytics/manifest.json index 955c4a813f40c3..5142a86ad97216 100644 --- a/homeassistant/components/analytics/manifest.json +++ b/homeassistant/components/analytics/manifest.json @@ -1,7 +1,7 @@ { "domain": "analytics", "name": "Analytics", - "after_dependencies": ["energy", "recorder"], + "after_dependencies": ["energy", "hassio", "recorder"], "codeowners": ["@home-assistant/core", "@ludeeus"], "dependencies": ["api", "websocket_api"], "documentation": "https://www.home-assistant.io/integrations/analytics", diff --git a/homeassistant/components/analytics_insights/config_flow.py b/homeassistant/components/analytics_insights/config_flow.py index 909290b10355db..c36755f5403caa 100644 --- a/homeassistant/components/analytics_insights/config_flow.py +++ b/homeassistant/components/analytics_insights/config_flow.py @@ -16,7 +16,6 @@ ConfigFlow, ConfigFlowResult, OptionsFlow, - OptionsFlowWithConfigEntry, ) from homeassistant.core import callback from homeassistant.helpers.aiohttp_client import async_get_clientsession @@ -27,6 +26,7 @@ ) from .const import ( + CONF_TRACKED_ADDONS, CONF_TRACKED_CUSTOM_INTEGRATIONS, CONF_TRACKED_INTEGRATIONS, DOMAIN, @@ -45,9 +45,11 @@ class HomeassistantAnalyticsConfigFlow(ConfigFlow, domain=DOMAIN): @staticmethod @callback - def async_get_options_flow(config_entry: ConfigEntry) -> OptionsFlow: + def async_get_options_flow( + config_entry: ConfigEntry, + ) -> HomeassistantAnalyticsOptionsFlowHandler: """Get the options flow for this handler.""" - return HomeassistantAnalyticsOptionsFlowHandler(config_entry) + return HomeassistantAnalyticsOptionsFlowHandler() async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -55,8 +57,12 @@ async def async_step_user( """Handle the initial step.""" errors: dict[str, str] = {} if user_input is not None: - if not user_input.get(CONF_TRACKED_INTEGRATIONS) and not user_input.get( - CONF_TRACKED_CUSTOM_INTEGRATIONS + if all( + [ + not user_input.get(CONF_TRACKED_ADDONS), + not user_input.get(CONF_TRACKED_INTEGRATIONS), + not user_input.get(CONF_TRACKED_CUSTOM_INTEGRATIONS), + ] ): errors["base"] = "no_integrations_selected" else: @@ -64,6 +70,7 @@ async def async_step_user( title="Home Assistant Analytics Insights", data={}, options={ + CONF_TRACKED_ADDONS: user_input.get(CONF_TRACKED_ADDONS, []), CONF_TRACKED_INTEGRATIONS: user_input.get( CONF_TRACKED_INTEGRATIONS, [] ), @@ -77,6 +84,7 @@ async def async_step_user( session=async_get_clientsession(self.hass) ) try: + addons = await client.get_addons() integrations = await client.get_integrations() custom_integrations = await client.get_custom_integrations() except HomeassistantAnalyticsConnectionError: @@ -99,6 +107,13 @@ async def async_step_user( errors=errors, data_schema=vol.Schema( { + vol.Optional(CONF_TRACKED_ADDONS): SelectSelector( + SelectSelectorConfig( + options=list(addons), + multiple=True, + sort=True, + ) + ), vol.Optional(CONF_TRACKED_INTEGRATIONS): SelectSelector( SelectSelectorConfig( options=options, @@ -118,7 +133,7 @@ async def async_step_user( ) -class HomeassistantAnalyticsOptionsFlowHandler(OptionsFlowWithConfigEntry): +class HomeassistantAnalyticsOptionsFlowHandler(OptionsFlow): """Handle Homeassistant Analytics options.""" async def async_step_init( @@ -127,14 +142,19 @@ async def async_step_init( """Manage the options.""" errors: dict[str, str] = {} if user_input is not None: - if not user_input.get(CONF_TRACKED_INTEGRATIONS) and not user_input.get( - CONF_TRACKED_CUSTOM_INTEGRATIONS + if all( + [ + not user_input.get(CONF_TRACKED_ADDONS), + not user_input.get(CONF_TRACKED_INTEGRATIONS), + not user_input.get(CONF_TRACKED_CUSTOM_INTEGRATIONS), + ] ): errors["base"] = "no_integrations_selected" else: return self.async_create_entry( title="", data={ + CONF_TRACKED_ADDONS: user_input.get(CONF_TRACKED_ADDONS, []), CONF_TRACKED_INTEGRATIONS: user_input.get( CONF_TRACKED_INTEGRATIONS, [] ), @@ -148,6 +168,7 @@ async def async_step_init( session=async_get_clientsession(self.hass) ) try: + addons = await client.get_addons() integrations = await client.get_integrations() custom_integrations = await client.get_custom_integrations() except HomeassistantAnalyticsConnectionError: @@ -168,6 +189,13 @@ async def async_step_init( data_schema=self.add_suggested_values_to_schema( vol.Schema( { + vol.Optional(CONF_TRACKED_ADDONS): SelectSelector( + SelectSelectorConfig( + options=list(addons), + multiple=True, + sort=True, + ) + ), vol.Optional(CONF_TRACKED_INTEGRATIONS): SelectSelector( SelectSelectorConfig( options=options, @@ -184,6 +212,6 @@ async def async_step_init( ), }, ), - self.options, + self.config_entry.options, ), ) diff --git a/homeassistant/components/analytics_insights/const.py b/homeassistant/components/analytics_insights/const.py index 56ea3f59794054..1a01755f9ed906 100644 --- a/homeassistant/components/analytics_insights/const.py +++ b/homeassistant/components/analytics_insights/const.py @@ -4,6 +4,7 @@ DOMAIN = "analytics_insights" +CONF_TRACKED_ADDONS = "tracked_addons" CONF_TRACKED_INTEGRATIONS = "tracked_integrations" CONF_TRACKED_CUSTOM_INTEGRATIONS = "tracked_custom_integrations" diff --git a/homeassistant/components/analytics_insights/coordinator.py b/homeassistant/components/analytics_insights/coordinator.py index 3a7c40dfa82c70..701f1a8dbd4f92 100644 --- a/homeassistant/components/analytics_insights/coordinator.py +++ b/homeassistant/components/analytics_insights/coordinator.py @@ -12,11 +12,13 @@ HomeassistantAnalyticsConnectionError, HomeassistantAnalyticsNotModifiedError, ) +from python_homeassistant_analytics.models import Addon from homeassistant.core import HomeAssistant from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed from .const import ( + CONF_TRACKED_ADDONS, CONF_TRACKED_CUSTOM_INTEGRATIONS, CONF_TRACKED_INTEGRATIONS, DOMAIN, @@ -33,6 +35,7 @@ class AnalyticsData: active_installations: int reports_integrations: int + addons: dict[str, int] core_integrations: dict[str, int] custom_integrations: dict[str, int] @@ -53,6 +56,7 @@ def __init__( update_interval=timedelta(hours=12), ) self._client = client + self._tracked_addons = self.config_entry.options.get(CONF_TRACKED_ADDONS, []) self._tracked_integrations = self.config_entry.options[ CONF_TRACKED_INTEGRATIONS ] @@ -62,6 +66,7 @@ def __init__( async def _async_update_data(self) -> AnalyticsData: try: + addons_data = await self._client.get_addons() data = await self._client.get_current_analytics() custom_data = await self._client.get_custom_integrations() except HomeassistantAnalyticsConnectionError as err: @@ -70,6 +75,9 @@ async def _async_update_data(self) -> AnalyticsData: ) from err except HomeassistantAnalyticsNotModifiedError: return self.data + addons = { + addon: get_addon_value(addons_data, addon) for addon in self._tracked_addons + } core_integrations = { integration: data.integrations.get(integration, 0) for integration in self._tracked_integrations @@ -81,11 +89,19 @@ async def _async_update_data(self) -> AnalyticsData: return AnalyticsData( data.active_installations, data.reports_integrations, + addons, core_integrations, custom_integrations, ) +def get_addon_value(data: dict[str, Addon], name_slug: str) -> int: + """Get addon value.""" + if name_slug in data: + return data[name_slug].total + return 0 + + def get_custom_integration_value( data: dict[str, CustomIntegration], domain: str ) -> int: diff --git a/homeassistant/components/analytics_insights/sensor.py b/homeassistant/components/analytics_insights/sensor.py index 264c34e75ef788..324ca6991d2a64 100644 --- a/homeassistant/components/analytics_insights/sensor.py +++ b/homeassistant/components/analytics_insights/sensor.py @@ -29,6 +29,20 @@ class AnalyticsSensorEntityDescription(SensorEntityDescription): value_fn: Callable[[AnalyticsData], StateType] +def get_addon_entity_description( + name_slug: str, +) -> AnalyticsSensorEntityDescription: + """Get addon entity description.""" + return AnalyticsSensorEntityDescription( + key=f"addon_{name_slug}_active_installations", + translation_key="addons", + name=name_slug, + state_class=SensorStateClass.TOTAL, + native_unit_of_measurement="active installations", + value_fn=lambda data: data.addons.get(name_slug), + ) + + def get_core_integration_entity_description( domain: str, name: str ) -> AnalyticsSensorEntityDescription: @@ -89,6 +103,13 @@ async def async_setup_entry( analytics_data.coordinator ) entities: list[HomeassistantAnalyticsSensor] = [] + entities.extend( + HomeassistantAnalyticsSensor( + coordinator, + get_addon_entity_description(addon_name_slug), + ) + for addon_name_slug in coordinator.data.addons + ) entities.extend( HomeassistantAnalyticsSensor( coordinator, diff --git a/homeassistant/components/analytics_insights/strings.json b/homeassistant/components/analytics_insights/strings.json index e37ac26f829a4a..10d3c19a2f6dd3 100644 --- a/homeassistant/components/analytics_insights/strings.json +++ b/homeassistant/components/analytics_insights/strings.json @@ -3,10 +3,12 @@ "step": { "user": { "data": { + "tracked_addons": "Addons", "tracked_integrations": "Integrations", "tracked_custom_integrations": "Custom integrations" }, "data_description": { + "tracked_addons": "Select the addons you want to track", "tracked_integrations": "Select the integrations you want to track", "tracked_custom_integrations": "Select the custom integrations you want to track" } @@ -17,17 +19,19 @@ "unknown": "[%key:common::config_flow::error::unknown%]" }, "error": { - "no_integration_selected": "You must select at least one integration to track" + "no_integrations_selected": "You must select at least one integration to track" } }, "options": { "step": { "init": { "data": { + "tracked_addons": "[%key:component::analytics_insights::config::step::user::data::tracked_addons%]", "tracked_integrations": "[%key:component::analytics_insights::config::step::user::data::tracked_integrations%]", "tracked_custom_integrations": "[%key:component::analytics_insights::config::step::user::data::tracked_custom_integrations%]" }, "data_description": { + "tracked_addons": "[%key:component::analytics_insights::config::step::user::data_description::tracked_addons%]", "tracked_integrations": "[%key:component::analytics_insights::config::step::user::data_description::tracked_integrations%]", "tracked_custom_integrations": "[%key:component::analytics_insights::config::step::user::data_description::tracked_custom_integrations%]" } @@ -37,7 +41,7 @@ "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]" }, "error": { - "no_integration_selected": "[%key:component::analytics_insights::config::error::no_integration_selected%]" + "no_integrations_selected": "[%key:component::analytics_insights::config::error::no_integrations_selected%]" } }, "entity": { diff --git a/homeassistant/components/androidtv/__init__.py b/homeassistant/components/androidtv/__init__.py index 34b324db169534..34c4212c913dbe 100644 --- a/homeassistant/components/androidtv/__init__.py +++ b/homeassistant/components/androidtv/__init__.py @@ -4,6 +4,7 @@ from collections.abc import Mapping from dataclasses import dataclass +import logging import os from typing import Any @@ -40,6 +41,7 @@ CONF_ADB_SERVER_IP, CONF_ADB_SERVER_PORT, CONF_ADBKEY, + CONF_SCREENCAP_INTERVAL, CONF_STATE_DETECTION_RULES, DEFAULT_ADB_SERVER_PORT, DEVICE_ANDROIDTV, @@ -66,6 +68,8 @@ _INVALID_MACS = {"ff:ff:ff:ff:ff:ff"} +_LOGGER = logging.getLogger(__name__) + @dataclass class AndroidTVRuntimeData: @@ -157,6 +161,32 @@ async def async_connect_androidtv( return aftv, None +async def async_migrate_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + """Migrate old entry.""" + _LOGGER.debug( + "Migrating configuration from version %s.%s", entry.version, entry.minor_version + ) + + if entry.version == 1: + new_options = {**entry.options} + + # Migrate MinorVersion 1 -> MinorVersion 2: New option + if entry.minor_version < 2: + new_options = {**new_options, CONF_SCREENCAP_INTERVAL: 0} + + hass.config_entries.async_update_entry( + entry, options=new_options, minor_version=2, version=1 + ) + + _LOGGER.debug( + "Migration to configuration version %s.%s successful", + entry.version, + entry.minor_version, + ) + + return True + + async def async_setup_entry(hass: HomeAssistant, entry: AndroidTVConfigEntry) -> bool: """Set up Android Debug Bridge platform.""" diff --git a/homeassistant/components/androidtv/config_flow.py b/homeassistant/components/androidtv/config_flow.py index e8350acc9cb91a..afaba5175dac96 100644 --- a/homeassistant/components/androidtv/config_flow.py +++ b/homeassistant/components/androidtv/config_flow.py @@ -13,7 +13,7 @@ ConfigEntry, ConfigFlow, ConfigFlowResult, - OptionsFlowWithConfigEntry, + OptionsFlow, ) from homeassistant.const import CONF_DEVICE_CLASS, CONF_HOST, CONF_PORT from homeassistant.core import callback @@ -34,7 +34,7 @@ CONF_APPS, CONF_EXCLUDE_UNNAMED_APPS, CONF_GET_SOURCES, - CONF_SCREENCAP, + CONF_SCREENCAP_INTERVAL, CONF_STATE_DETECTION_RULES, CONF_TURN_OFF_COMMAND, CONF_TURN_ON_COMMAND, @@ -43,7 +43,7 @@ DEFAULT_EXCLUDE_UNNAMED_APPS, DEFAULT_GET_SOURCES, DEFAULT_PORT, - DEFAULT_SCREENCAP, + DEFAULT_SCREENCAP_INTERVAL, DEVICE_CLASSES, DOMAIN, PROP_ETHMAC, @@ -76,6 +76,7 @@ class AndroidTVFlowHandler(ConfigFlow, domain=DOMAIN): """Handle a config flow.""" VERSION = 1 + MINOR_VERSION = 2 @callback def _show_setup_form( @@ -185,16 +186,14 @@ def async_get_options_flow(config_entry: ConfigEntry) -> OptionsFlowHandler: return OptionsFlowHandler(config_entry) -class OptionsFlowHandler(OptionsFlowWithConfigEntry): +class OptionsFlowHandler(OptionsFlow): """Handle an option flow for Android Debug Bridge.""" def __init__(self, config_entry: ConfigEntry) -> None: """Initialize options flow.""" - super().__init__(config_entry) - - self._apps: dict[str, Any] = self.options.setdefault(CONF_APPS, {}) - self._state_det_rules: dict[str, Any] = self.options.setdefault( - CONF_STATE_DETECTION_RULES, {} + self._apps: dict[str, Any] = dict(config_entry.options.get(CONF_APPS, {})) + self._state_det_rules: dict[str, Any] = dict( + config_entry.options.get(CONF_STATE_DETECTION_RULES, {}) ) self._conf_app_id: str | None = None self._conf_rule_id: str | None = None @@ -236,7 +235,7 @@ def _async_init_form(self) -> ConfigFlowResult: SelectOptionDict(value=k, label=v) for k, v in apps_list.items() ] rules = [RULES_NEW_ID, *self._state_det_rules] - options = self.options + options = self.config_entry.options data_schema = vol.Schema( { @@ -253,10 +252,12 @@ def _async_init_form(self) -> ConfigFlowResult: CONF_EXCLUDE_UNNAMED_APPS, DEFAULT_EXCLUDE_UNNAMED_APPS ), ): bool, - vol.Optional( - CONF_SCREENCAP, - default=options.get(CONF_SCREENCAP, DEFAULT_SCREENCAP), - ): bool, + vol.Required( + CONF_SCREENCAP_INTERVAL, + default=options.get( + CONF_SCREENCAP_INTERVAL, DEFAULT_SCREENCAP_INTERVAL + ), + ): vol.All(vol.Coerce(int), vol.Clamp(min=0, max=15)), vol.Optional( CONF_TURN_OFF_COMMAND, description={ diff --git a/homeassistant/components/androidtv/const.py b/homeassistant/components/androidtv/const.py index ee279c0fb3ac45..0d9bdc8f6c0020 100644 --- a/homeassistant/components/androidtv/const.py +++ b/homeassistant/components/androidtv/const.py @@ -9,6 +9,7 @@ CONF_EXCLUDE_UNNAMED_APPS = "exclude_unnamed_apps" CONF_GET_SOURCES = "get_sources" CONF_SCREENCAP = "screencap" +CONF_SCREENCAP_INTERVAL = "screencap_interval" CONF_STATE_DETECTION_RULES = "state_detection_rules" CONF_TURN_OFF_COMMAND = "turn_off_command" CONF_TURN_ON_COMMAND = "turn_on_command" @@ -18,7 +19,7 @@ DEFAULT_EXCLUDE_UNNAMED_APPS = False DEFAULT_GET_SOURCES = True DEFAULT_PORT = 5555 -DEFAULT_SCREENCAP = True +DEFAULT_SCREENCAP_INTERVAL = 5 DEVICE_ANDROIDTV = "androidtv" DEVICE_FIRETV = "firetv" diff --git a/homeassistant/components/androidtv/media_player.py b/homeassistant/components/androidtv/media_player.py index 6e338529ad4285..728411ddf42410 100644 --- a/homeassistant/components/androidtv/media_player.py +++ b/homeassistant/components/androidtv/media_player.py @@ -2,10 +2,9 @@ from __future__ import annotations -from datetime import timedelta +from datetime import datetime, timedelta import hashlib import logging -from typing import Any from androidtv.constants import APPS, KEYS from androidtv.setup_async import AndroidTVAsync, FireTVAsync @@ -23,19 +22,19 @@ from homeassistant.helpers import config_validation as cv, entity_platform from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.util import Throttle +from homeassistant.util.dt import utcnow from . import AndroidTVConfigEntry from .const import ( CONF_APPS, CONF_EXCLUDE_UNNAMED_APPS, CONF_GET_SOURCES, - CONF_SCREENCAP, + CONF_SCREENCAP_INTERVAL, CONF_TURN_OFF_COMMAND, CONF_TURN_ON_COMMAND, DEFAULT_EXCLUDE_UNNAMED_APPS, DEFAULT_GET_SOURCES, - DEFAULT_SCREENCAP, + DEFAULT_SCREENCAP_INTERVAL, DEVICE_ANDROIDTV, SIGNAL_CONFIG_ENTITY, ) @@ -48,8 +47,6 @@ ATTR_HDMI_INPUT = "hdmi_input" ATTR_LOCAL_PATH = "local_path" -MIN_TIME_BETWEEN_SCREENCAPS = timedelta(seconds=60) - SERVICE_ADB_COMMAND = "adb_command" SERVICE_DOWNLOAD = "download" SERVICE_LEARN_SENDEVENT = "learn_sendevent" @@ -125,7 +122,8 @@ def __init__(self, entry: AndroidTVConfigEntry) -> None: self._app_name_to_id: dict[str, str] = {} self._get_sources = DEFAULT_GET_SOURCES self._exclude_unnamed_apps = DEFAULT_EXCLUDE_UNNAMED_APPS - self._screencap = DEFAULT_SCREENCAP + self._screencap_delta: timedelta | None = None + self._last_screencap: datetime | None = None self.turn_on_command: str | None = None self.turn_off_command: str | None = None @@ -159,7 +157,13 @@ def _process_config(self) -> None: self._exclude_unnamed_apps = options.get( CONF_EXCLUDE_UNNAMED_APPS, DEFAULT_EXCLUDE_UNNAMED_APPS ) - self._screencap = options.get(CONF_SCREENCAP, DEFAULT_SCREENCAP) + screencap_interval: int = options.get( + CONF_SCREENCAP_INTERVAL, DEFAULT_SCREENCAP_INTERVAL + ) + if screencap_interval > 0: + self._screencap_delta = timedelta(minutes=screencap_interval) + else: + self._screencap_delta = None self.turn_off_command = options.get(CONF_TURN_OFF_COMMAND) self.turn_on_command = options.get(CONF_TURN_ON_COMMAND) @@ -183,7 +187,7 @@ async def _adb_screencap(self) -> bytes | None: async def _async_get_screencap(self, prev_app_id: str | None = None) -> None: """Take a screen capture from the device when enabled.""" if ( - not self._screencap + not self._screencap_delta or self.state in {MediaPlayerState.OFF, None} or not self.available ): @@ -193,11 +197,18 @@ async def _async_get_screencap(self, prev_app_id: str | None = None) -> None: force: bool = prev_app_id is not None if force: force = prev_app_id != self._attr_app_id - await self._adb_get_screencap(no_throttle=force) + await self._adb_get_screencap(force) + + async def _adb_get_screencap(self, force: bool = False) -> None: + """Take a screen capture from the device every configured minutes.""" + time_elapsed = self._screencap_delta is not None and ( + self._last_screencap is None + or (utcnow() - self._last_screencap) >= self._screencap_delta + ) + if not (force or time_elapsed): + return - @Throttle(MIN_TIME_BETWEEN_SCREENCAPS) - async def _adb_get_screencap(self, **kwargs: Any) -> None: - """Take a screen capture from the device every 60 seconds.""" + self._last_screencap = utcnow() if media_data := await self._adb_screencap(): self._media_image = media_data, "image/png" self._attr_media_image_hash = hashlib.sha256(media_data).hexdigest()[:16] diff --git a/homeassistant/components/androidtv/strings.json b/homeassistant/components/androidtv/strings.json index 3032e9ac6ef280..b6f5d494d0f67e 100644 --- a/homeassistant/components/androidtv/strings.json +++ b/homeassistant/components/androidtv/strings.json @@ -31,7 +31,7 @@ "apps": "Configure applications list", "get_sources": "Retrieve the running apps as the list of sources", "exclude_unnamed_apps": "Exclude apps with unknown name from the sources list", - "screencap": "Use screen capture for album art", + "screencap_interval": "Interval in minutes between screen capture for album art (set 0 to disable)", "state_detection_rules": "Configure state detection rules", "turn_off_command": "ADB shell turn off command (leave empty for default)", "turn_on_command": "ADB shell turn on command (leave empty for default)" diff --git a/homeassistant/components/androidtv_remote/config_flow.py b/homeassistant/components/androidtv_remote/config_flow.py index 89bf321d80b5c9..3500e4ff47b039 100644 --- a/homeassistant/components/androidtv_remote/config_flow.py +++ b/homeassistant/components/androidtv_remote/config_flow.py @@ -20,7 +20,7 @@ ConfigEntry, ConfigFlow, ConfigFlowResult, - OptionsFlowWithConfigEntry, + OptionsFlow, ) from homeassistant.const import CONF_HOST, CONF_MAC, CONF_NAME from homeassistant.core import callback @@ -63,7 +63,6 @@ class AndroidTVRemoteConfigFlow(ConfigFlow, domain=DOMAIN): host: str name: str mac: str - reauth_entry: ConfigEntry async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -107,7 +106,7 @@ async def async_step_pair( await self.api.async_finish_pairing(pin) if self.source == SOURCE_REAUTH: await self.hass.config_entries.async_reload( - self.reauth_entry.entry_id + self._get_reauth_entry().entry_id ) return self.async_abort(reason="reauth_successful") return self.async_create_entry( @@ -152,7 +151,18 @@ async def async_step_zeroconf( if not (mac := discovery_info.properties.get("bt")): return self.async_abort(reason="cannot_connect") self.mac = mac - await self.async_set_unique_id(format_mac(self.mac)) + existing_config_entry = await self.async_set_unique_id(format_mac(mac)) + # Sometimes, devices send an invalid zeroconf message with multiple addresses + # and one of them, which could end up being in discovery_info.host, is from a + # different device. If any of the discovery_info.ip_addresses matches the + # existing host, don't update the host. + if existing_config_entry and len(discovery_info.ip_addresses) > 1: + existing_host = existing_config_entry.data[CONF_HOST] + if existing_host != self.host: + if existing_host in [ + str(ip_address) for ip_address in discovery_info.ip_addresses + ]: + self.host = existing_host self._abort_if_unique_id_configured( updates={CONF_HOST: self.host, CONF_NAME: self.name} ) @@ -183,7 +193,6 @@ async def async_step_reauth( self.host = entry_data[CONF_HOST] self.name = entry_data[CONF_NAME] self.mac = entry_data[CONF_MAC] - self.reauth_entry = self._get_reauth_entry() return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -212,13 +221,12 @@ def async_get_options_flow( return AndroidTVRemoteOptionsFlowHandler(config_entry) -class AndroidTVRemoteOptionsFlowHandler(OptionsFlowWithConfigEntry): +class AndroidTVRemoteOptionsFlowHandler(OptionsFlow): """Android TV Remote options flow.""" def __init__(self, config_entry: ConfigEntry) -> None: """Initialize options flow.""" - super().__init__(config_entry) - self._apps: dict[str, Any] = self.options.setdefault(CONF_APPS, {}) + self._apps: dict[str, Any] = dict(config_entry.options.get(CONF_APPS, {})) self._conf_app_id: str | None = None @callback diff --git a/homeassistant/components/anova/__init__.py b/homeassistant/components/anova/__init__.py index 02c468c131983d..4ae4750b9a9ad5 100644 --- a/homeassistant/components/anova/__init__.py +++ b/homeassistant/components/anova/__init__.py @@ -13,7 +13,7 @@ WebsocketFailure, ) -from homeassistant.const import CONF_PASSWORD, CONF_USERNAME, Platform +from homeassistant.const import CONF_DEVICES, CONF_PASSWORD, CONF_USERNAME, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers import aiohttp_client @@ -71,3 +71,25 @@ async def async_unload_entry(hass: HomeAssistant, entry: AnovaConfigEntry) -> bo # Disconnect from WS await entry.runtime_data.api.disconnect_websocket() return unload_ok + + +async def async_migrate_entry(hass: HomeAssistant, entry: AnovaConfigEntry) -> bool: + """Migrate entry.""" + _LOGGER.debug("Migrating from version %s:%s", entry.version, entry.minor_version) + + if entry.version > 1: + # This means the user has downgraded from a future version + return False + + if entry.version == 1 and entry.minor_version == 1: + new_data = {**entry.data} + if CONF_DEVICES in new_data: + new_data.pop(CONF_DEVICES) + + hass.config_entries.async_update_entry(entry, data=new_data, minor_version=2) + + _LOGGER.debug( + "Migration to version %s:%s successful", entry.version, entry.minor_version + ) + + return True diff --git a/homeassistant/components/anova/config_flow.py b/homeassistant/components/anova/config_flow.py index 6e331ccf4a2763..bc4723b1dbad31 100644 --- a/homeassistant/components/anova/config_flow.py +++ b/homeassistant/components/anova/config_flow.py @@ -6,7 +6,7 @@ import voluptuous as vol from homeassistant.config_entries import ConfigFlow, ConfigFlowResult -from homeassistant.const import CONF_DEVICES, CONF_PASSWORD, CONF_USERNAME +from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.helpers.aiohttp_client import async_get_clientsession from .const import DOMAIN @@ -16,6 +16,7 @@ class AnovaConfligFlow(ConfigFlow, domain=DOMAIN): """Sets up a config flow for Anova.""" VERSION = 1 + MINOR_VERSION = 2 async def async_step_user( self, user_input: dict[str, str] | None = None @@ -42,8 +43,6 @@ async def async_step_user( data={ CONF_USERNAME: user_input[CONF_USERNAME], CONF_PASSWORD: user_input[CONF_PASSWORD], - # this can be removed in a migration to 1.2 in 2024.11 - CONF_DEVICES: [], }, ) diff --git a/homeassistant/components/anthropic/config_flow.py b/homeassistant/components/anthropic/config_flow.py index 01e16ec5350136..fa43a3c4bccc19 100644 --- a/homeassistant/components/anthropic/config_flow.py +++ b/homeassistant/components/anthropic/config_flow.py @@ -87,10 +87,13 @@ async def async_step_user( except anthropic.APIConnectionError: errors["base"] = "cannot_connect" except anthropic.APIStatusError as e: - if isinstance(e.body, dict): - errors["base"] = e.body.get("error", {}).get("type", "unknown") - else: - errors["base"] = "unknown" + errors["base"] = "unknown" + if ( + isinstance(e.body, dict) + and (error := e.body.get("error")) + and error.get("type") == "authentication_error" + ): + errors["base"] = "authentication_error" except Exception: _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" @@ -118,7 +121,6 @@ class AnthropicOptionsFlow(OptionsFlow): def __init__(self, config_entry: ConfigEntry) -> None: """Initialize options flow.""" - self.config_entry = config_entry self.last_rendered_recommended = config_entry.options.get( CONF_RECOMMENDED, False ) diff --git a/homeassistant/components/aosmith/config_flow.py b/homeassistant/components/aosmith/config_flow.py index 1e618a79f9c85b..a6a0712c4f7512 100644 --- a/homeassistant/components/aosmith/config_flow.py +++ b/homeassistant/components/aosmith/config_flow.py @@ -88,12 +88,11 @@ async def async_step_reauth_confirm( if user_input: password = user_input[CONF_PASSWORD] - entry = self._get_reauth_entry() error = await self._async_validate_credentials(self._reauth_email, password) if error is None: return self.async_update_reload_and_abort( - entry, - data=entry.data | user_input, + self._get_reauth_entry(), + data_updates=user_input, ) errors["base"] = error diff --git a/homeassistant/components/apache_kafka/__init__.py b/homeassistant/components/apache_kafka/__init__.py index 0f781e0e1c67f6..68d3f58a63a894 100644 --- a/homeassistant/components/apache_kafka/__init__.py +++ b/homeassistant/components/apache_kafka/__init__.py @@ -38,7 +38,7 @@ vol.Required(CONF_TOPIC): cv.string, vol.Optional(CONF_FILTER, default={}): FILTER_SCHEMA, vol.Optional(CONF_SECURITY_PROTOCOL, default="PLAINTEXT"): vol.In( - ["PLAINTEXT", "SASL_SSL"] + ["PLAINTEXT", "SSL", "SASL_SSL"] ), vol.Optional(CONF_USERNAME): cv.string, vol.Optional(CONF_PASSWORD): cv.string, @@ -94,7 +94,7 @@ def __init__( port: int, topic: str, entities_filter: EntityFilter, - security_protocol: Literal["PLAINTEXT", "SASL_SSL"], + security_protocol: Literal["PLAINTEXT", "SSL", "SASL_SSL"], username: str | None, password: str | None, ) -> None: diff --git a/homeassistant/components/apsystems/sensor.py b/homeassistant/components/apsystems/sensor.py index afeb9d071ab1c8..f87bc0f3f26576 100644 --- a/homeassistant/components/apsystems/sensor.py +++ b/homeassistant/components/apsystems/sensor.py @@ -12,12 +12,11 @@ SensorEntity, SensorEntityDescription, SensorStateClass, - StateType, ) from homeassistant.const import UnitOfEnergy, UnitOfPower from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.typing import DiscoveryInfoType +from homeassistant.helpers.typing import DiscoveryInfoType, StateType from homeassistant.helpers.update_coordinator import CoordinatorEntity from . import ApSystemsConfigEntry, ApSystemsData diff --git a/homeassistant/components/aranet/__init__.py b/homeassistant/components/aranet/__init__.py index 3a2bc26665362c..81b3dae04de5ce 100644 --- a/homeassistant/components/aranet/__init__.py +++ b/homeassistant/components/aranet/__init__.py @@ -15,12 +15,14 @@ from homeassistant.const import Platform from homeassistant.core import HomeAssistant -from .const import DOMAIN - PLATFORMS: list[Platform] = [Platform.SENSOR] _LOGGER = logging.getLogger(__name__) +type AranetConfigEntry = ConfigEntry[ + PassiveBluetoothProcessorCoordinator[Aranet4Advertisement] +] + def _service_info_to_adv( service_info: BluetoothServiceInfoBleak, @@ -28,30 +30,25 @@ def _service_info_to_adv( return Aranet4Advertisement(service_info.device, service_info.advertisement) -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_setup_entry(hass: HomeAssistant, entry: AranetConfigEntry) -> bool: """Set up Aranet from a config entry.""" address = entry.unique_id assert address is not None - coordinator = hass.data.setdefault(DOMAIN, {})[entry.entry_id] = ( - PassiveBluetoothProcessorCoordinator( - hass, - _LOGGER, - address=address, - mode=BluetoothScanningMode.PASSIVE, - update_method=_service_info_to_adv, - ) + coordinator = PassiveBluetoothProcessorCoordinator( + hass, + _LOGGER, + address=address, + mode=BluetoothScanningMode.PASSIVE, + update_method=_service_info_to_adv, ) + entry.runtime_data = coordinator await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) - entry.async_on_unload( - coordinator.async_start() - ) # only start after all platforms have had a chance to subscribe + # only start after all platforms have had a chance to subscribe + entry.async_on_unload(coordinator.async_start()) return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: AranetConfigEntry) -> bool: """Unload a config entry.""" - if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): - hass.data[DOMAIN].pop(entry.entry_id) - - return unload_ok + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/aranet/sensor.py b/homeassistant/components/aranet/sensor.py index 1dc4b9f956ec93..d7fbd0e4b3b401 100644 --- a/homeassistant/components/aranet/sensor.py +++ b/homeassistant/components/aranet/sensor.py @@ -8,12 +8,10 @@ from aranet4.client import Aranet4Advertisement from bleak.backends.device import BLEDevice -from homeassistant import config_entries from homeassistant.components.bluetooth.passive_update_processor import ( PassiveBluetoothDataProcessor, PassiveBluetoothDataUpdate, PassiveBluetoothEntityKey, - PassiveBluetoothProcessorCoordinator, PassiveBluetoothProcessorEntity, ) from homeassistant.components.sensor import ( @@ -38,7 +36,8 @@ from homeassistant.helpers.entity import EntityDescription from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import ARANET_MANUFACTURER_NAME, DOMAIN +from . import AranetConfigEntry +from .const import ARANET_MANUFACTURER_NAME @dataclass(frozen=True) @@ -174,20 +173,17 @@ def sensor_update_to_bluetooth_data_update( async def async_setup_entry( hass: HomeAssistant, - entry: config_entries.ConfigEntry, + entry: AranetConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Aranet sensors.""" - coordinator: PassiveBluetoothProcessorCoordinator[Aranet4Advertisement] = hass.data[ - DOMAIN - ][entry.entry_id] processor = PassiveBluetoothDataProcessor(sensor_update_to_bluetooth_data_update) entry.async_on_unload( processor.async_add_entities_listener( Aranet4BluetoothSensorEntity, async_add_entities ) ) - entry.async_on_unload(coordinator.async_register_processor(processor)) + entry.async_on_unload(entry.runtime_data.async_register_processor(processor)) class Aranet4BluetoothSensorEntity( diff --git a/homeassistant/components/arris_tg2492lg/manifest.json b/homeassistant/components/arris_tg2492lg/manifest.json index fa7673b4276e5f..c36423d287a56d 100644 --- a/homeassistant/components/arris_tg2492lg/manifest.json +++ b/homeassistant/components/arris_tg2492lg/manifest.json @@ -2,7 +2,6 @@ "domain": "arris_tg2492lg", "name": "Arris TG2492LG", "codeowners": ["@vanbalken"], - "dependencies": [], "documentation": "https://www.home-assistant.io/integrations/arris_tg2492lg", "integration_type": "hub", "iot_class": "local_polling", diff --git a/homeassistant/components/aseko_pool_live/config_flow.py b/homeassistant/components/aseko_pool_live/config_flow.py index eacb7f2a42dfb9..e93eb803d62f66 100644 --- a/homeassistant/components/aseko_pool_live/config_flow.py +++ b/homeassistant/components/aseko_pool_live/config_flow.py @@ -9,12 +9,7 @@ from aioaseko import Aseko, AsekoAPIError, AsekoInvalidCredentials import voluptuous as vol -from homeassistant.config_entries import ( - SOURCE_REAUTH, - ConfigEntry, - ConfigFlow, - ConfigFlowResult, -) +from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_EMAIL, CONF_PASSWORD, CONF_UNIQUE_ID from .const import DOMAIN @@ -34,9 +29,7 @@ class AsekoConfigFlow(ConfigFlow, domain=DOMAIN): } ) - reauth_entry: ConfigEntry - - async def get_account_info(self, email: str, password: str) -> dict: + async def get_account_info(self, email: str, password: str) -> dict[str, Any]: """Get account info from the mobile API and the web API.""" aseko = Aseko(email, password) user = await aseko.login() @@ -77,9 +70,11 @@ async def async_step_user( async def async_store_credentials(self, info: dict[str, Any]) -> ConfigFlowResult: """Store validated credentials.""" + await self.async_set_unique_id(info[CONF_UNIQUE_ID]) if self.source == SOURCE_REAUTH: + self._abort_if_unique_id_mismatch() return self.async_update_reload_and_abort( - self.reauth_entry, + self._get_reauth_entry(), title=info[CONF_EMAIL], data={ CONF_EMAIL: info[CONF_EMAIL], @@ -87,7 +82,6 @@ async def async_store_credentials(self, info: dict[str, Any]) -> ConfigFlowResul }, ) - await self.async_set_unique_id(info[CONF_UNIQUE_ID]) self._abort_if_unique_id_configured() return self.async_create_entry( @@ -102,9 +96,6 @@ async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Perform reauth upon an API authentication error.""" - - self.reauth_entry = self._get_reauth_entry() - return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( diff --git a/homeassistant/components/aseko_pool_live/strings.json b/homeassistant/components/aseko_pool_live/strings.json index 9f6a99b8d12b63..2805b60cdfd4f7 100644 --- a/homeassistant/components/aseko_pool_live/strings.json +++ b/homeassistant/components/aseko_pool_live/strings.json @@ -21,7 +21,8 @@ }, "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_account%]", - "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", + "unique_id_mismatch": "The user identifier does not match the previous identifier" } }, "entity": { diff --git a/homeassistant/components/assist_pipeline/audio_enhancer.py b/homeassistant/components/assist_pipeline/audio_enhancer.py index ff2b122187a7ed..1fabc7790e73f5 100644 --- a/homeassistant/components/assist_pipeline/audio_enhancer.py +++ b/homeassistant/components/assist_pipeline/audio_enhancer.py @@ -22,8 +22,8 @@ class EnhancedAudioChunk: timestamp_ms: int """Timestamp relative to start of audio stream (milliseconds)""" - is_speech: bool | None - """True if audio chunk likely contains speech, False if not, None if unknown""" + speech_probability: float | None + """Probability that audio chunk contains speech (0-1), None if unknown""" class AudioEnhancer(ABC): @@ -70,27 +70,27 @@ def __init__( ) self.vad: MicroVad | None = None - self.threshold = 0.5 if self.is_vad_enabled: self.vad = MicroVad() - _LOGGER.debug("Initialized microVAD with threshold=%s", self.threshold) + _LOGGER.debug("Initialized microVAD") def enhance_chunk(self, audio: bytes, timestamp_ms: int) -> EnhancedAudioChunk: """Enhance 10ms chunk of PCM audio @ 16Khz with 16-bit mono samples.""" - is_speech: bool | None = None + speech_probability: float | None = None assert len(audio) == BYTES_PER_CHUNK if self.vad is not None: # Run VAD - speech_prob = self.vad.Process10ms(audio) - is_speech = speech_prob > self.threshold + speech_probability = self.vad.Process10ms(audio) if self.audio_processor is not None: # Run noise suppression and auto gain audio = self.audio_processor.Process10ms(audio).audio return EnhancedAudioChunk( - audio=audio, timestamp_ms=timestamp_ms, is_speech=is_speech + audio=audio, + timestamp_ms=timestamp_ms, + speech_probability=speech_probability, ) diff --git a/homeassistant/components/assist_pipeline/pipeline.py b/homeassistant/components/assist_pipeline/pipeline.py index a4255e377568b1..a55e23ae05189b 100644 --- a/homeassistant/components/assist_pipeline/pipeline.py +++ b/homeassistant/components/assist_pipeline/pipeline.py @@ -780,7 +780,9 @@ async def wake_word_detection( # speaking the voice command. audio_chunks_for_stt.extend( EnhancedAudioChunk( - audio=chunk_ts[0], timestamp_ms=chunk_ts[1], is_speech=False + audio=chunk_ts[0], + timestamp_ms=chunk_ts[1], + speech_probability=None, ) for chunk_ts in result.queued_audio ) @@ -827,7 +829,7 @@ async def _wake_word_audio_stream( if wake_word_vad is not None: chunk_seconds = (len(chunk.audio) // sample_width) / sample_rate - if not wake_word_vad.process(chunk_seconds, chunk.is_speech): + if not wake_word_vad.process(chunk_seconds, chunk.speech_probability): raise WakeWordTimeoutError( code="wake-word-timeout", message="Wake word was not detected" ) @@ -955,7 +957,7 @@ async def _speech_to_text_stream( if stt_vad is not None: chunk_seconds = (len(chunk.audio) // sample_width) / sample_rate - if not stt_vad.process(chunk_seconds, chunk.is_speech): + if not stt_vad.process(chunk_seconds, chunk.speech_probability): # Silence detected at the end of voice command self.process_event( PipelineEvent( @@ -1221,7 +1223,7 @@ async def process_volume_only( yield EnhancedAudioChunk( audio=sub_chunk, timestamp_ms=timestamp_ms, - is_speech=None, # no VAD + speech_probability=None, # no VAD ) timestamp_ms += MS_PER_CHUNK diff --git a/homeassistant/components/assist_pipeline/vad.py b/homeassistant/components/assist_pipeline/vad.py index 4782d14dee47da..deae5b9b7b3876 100644 --- a/homeassistant/components/assist_pipeline/vad.py +++ b/homeassistant/components/assist_pipeline/vad.py @@ -75,7 +75,7 @@ def __bool__(self) -> bool: class VoiceCommandSegmenter: """Segments an audio stream into voice commands.""" - speech_seconds: float = 0.3 + speech_seconds: float = 0.1 """Seconds of speech before voice command has started.""" command_seconds: float = 1.0 @@ -96,6 +96,12 @@ class VoiceCommandSegmenter: timed_out: bool = False """True a timeout occurred during voice command.""" + before_command_speech_threshold: float = 0.2 + """Probability threshold for speech before voice command.""" + + in_command_speech_threshold: float = 0.5 + """Probability threshold for speech during voice command.""" + _speech_seconds_left: float = 0.0 """Seconds left before considering voice command as started.""" @@ -124,7 +130,7 @@ def reset(self) -> None: self._reset_seconds_left = self.reset_seconds self.in_command = False - def process(self, chunk_seconds: float, is_speech: bool | None) -> bool: + def process(self, chunk_seconds: float, speech_probability: float | None) -> bool: """Process samples using external VAD. Returns False when command is done. @@ -142,7 +148,12 @@ def process(self, chunk_seconds: float, is_speech: bool | None) -> bool: self.timed_out = True return False + if speech_probability is None: + speech_probability = 0.0 + if not self.in_command: + # Before command + is_speech = speech_probability > self.before_command_speech_threshold if is_speech: self._reset_seconds_left = self.reset_seconds self._speech_seconds_left -= chunk_seconds @@ -160,24 +171,29 @@ def process(self, chunk_seconds: float, is_speech: bool | None) -> bool: if self._reset_seconds_left <= 0: self._speech_seconds_left = self.speech_seconds self._reset_seconds_left = self.reset_seconds - elif not is_speech: - # Silence in command - self._reset_seconds_left = self.reset_seconds - self._silence_seconds_left -= chunk_seconds - self._command_seconds_left -= chunk_seconds - if (self._silence_seconds_left <= 0) and (self._command_seconds_left <= 0): - # Command finished successfully - self.reset() - _LOGGER.debug("Voice command finished") - return False else: - # Speech in command. - # Reset silence counter if enough speech. - self._reset_seconds_left -= chunk_seconds - self._command_seconds_left -= chunk_seconds - if self._reset_seconds_left <= 0: - self._silence_seconds_left = self.silence_seconds + # In command + is_speech = speech_probability > self.in_command_speech_threshold + if not is_speech: + # Silence in command self._reset_seconds_left = self.reset_seconds + self._silence_seconds_left -= chunk_seconds + self._command_seconds_left -= chunk_seconds + if (self._silence_seconds_left <= 0) and ( + self._command_seconds_left <= 0 + ): + # Command finished successfully + self.reset() + _LOGGER.debug("Voice command finished") + return False + else: + # Speech in command. + # Reset silence counter if enough speech. + self._reset_seconds_left -= chunk_seconds + self._command_seconds_left -= chunk_seconds + if self._reset_seconds_left <= 0: + self._silence_seconds_left = self.silence_seconds + self._reset_seconds_left = self.reset_seconds return True @@ -226,6 +242,9 @@ class VoiceActivityTimeout: reset_seconds: float = 0.5 """Seconds of speech before resetting timeout.""" + speech_threshold: float = 0.5 + """Threshold for speech.""" + _silence_seconds_left: float = 0.0 """Seconds left before considering voice command as stopped.""" @@ -241,12 +260,15 @@ def reset(self) -> None: self._silence_seconds_left = self.silence_seconds self._reset_seconds_left = self.reset_seconds - def process(self, chunk_seconds: float, is_speech: bool | None) -> bool: + def process(self, chunk_seconds: float, speech_probability: float | None) -> bool: """Process samples using external VAD. Returns False when timeout is reached. """ - if is_speech: + if speech_probability is None: + speech_probability = 0.0 + + if speech_probability > self.speech_threshold: # Speech self._reset_seconds_left -= chunk_seconds if self._reset_seconds_left <= 0: diff --git a/homeassistant/components/aurora/__init__.py b/homeassistant/components/aurora/__init__.py index 273f6c6fec269d..b6c47cf36b23c4 100644 --- a/homeassistant/components/aurora/__init__.py +++ b/homeassistant/components/aurora/__init__.py @@ -4,6 +4,7 @@ from homeassistant.const import Platform from homeassistant.core import HomeAssistant +from .const import CONF_THRESHOLD, DEFAULT_THRESHOLD from .coordinator import AuroraDataUpdateCoordinator PLATFORMS = [Platform.BINARY_SENSOR, Platform.SENSOR] @@ -21,9 +22,19 @@ async def async_setup_entry(hass: HomeAssistant, entry: AuroraConfigEntry) -> bo await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + entry.async_on_unload(entry.add_update_listener(update_listener)) return True +async def update_listener(hass: HomeAssistant, entry: AuroraConfigEntry) -> None: + """Handle options update.""" + entry.runtime_data.threshold = int( + entry.options.get(CONF_THRESHOLD, DEFAULT_THRESHOLD) + ) + # refresh the state of the visibility alert binary sensor + await entry.runtime_data.async_request_refresh() + + async def async_unload_entry(hass: HomeAssistant, entry: AuroraConfigEntry) -> bool: """Unload a config entry.""" return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/aurora/manifest.json b/homeassistant/components/aurora/manifest.json index 018e8ab8135ae4..d94707bfa81d34 100644 --- a/homeassistant/components/aurora/manifest.json +++ b/homeassistant/components/aurora/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/aurora", "iot_class": "cloud_polling", "loggers": ["auroranoaa"], - "requirements": ["auroranoaa==0.0.3"] + "requirements": ["auroranoaa==0.0.5"] } diff --git a/homeassistant/components/aurora/strings.json b/homeassistant/components/aurora/strings.json index 09ec86bdf4d38a..5ba3a1273fd482 100644 --- a/homeassistant/components/aurora/strings.json +++ b/homeassistant/components/aurora/strings.json @@ -14,14 +14,15 @@ "already_configured": "[%key:common::config_flow::abort::already_configured_service%]" }, "error": { - "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]" + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", + "unknown": "[%key:common::config_flow::error::unknown%]" } }, "options": { "step": { "init": { "data": { - "threshold": "Threshold (%)" + "forecast_threshold": "Threshold (%)" } } } diff --git a/homeassistant/components/aussie_broadband/config_flow.py b/homeassistant/components/aussie_broadband/config_flow.py index 540c04f39934e4..5bc6ed1aa5cf94 100644 --- a/homeassistant/components/aussie_broadband/config_flow.py +++ b/homeassistant/components/aussie_broadband/config_flow.py @@ -99,8 +99,9 @@ async def async_step_reauth_confirm( } if not (errors := await self.async_auth(data)): - entry = self._get_reauth_entry() - return self.async_update_reload_and_abort(entry, data=data) + return self.async_update_reload_and_abort( + self._get_reauth_entry(), data=data + ) return self.async_show_form( step_id="reauth_confirm", diff --git a/homeassistant/components/autarco/manifest.json b/homeassistant/components/autarco/manifest.json index 0058ab9af7781d..0567aeba722413 100644 --- a/homeassistant/components/autarco/manifest.json +++ b/homeassistant/components/autarco/manifest.json @@ -5,5 +5,5 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/autarco", "iot_class": "cloud_polling", - "requirements": ["autarco==3.0.0"] + "requirements": ["autarco==3.1.0"] } diff --git a/homeassistant/components/auth/login_flow.py b/homeassistant/components/auth/login_flow.py index 3664c3ca5c919b..d27235123b9947 100644 --- a/homeassistant/components/auth/login_flow.py +++ b/homeassistant/components/auth/login_flow.py @@ -80,7 +80,7 @@ from homeassistant import data_entry_flow from homeassistant.auth import AuthManagerFlowManager, InvalidAuthError -from homeassistant.auth.models import AuthFlowResult, Credentials +from homeassistant.auth.models import AuthFlowContext, AuthFlowResult, Credentials from homeassistant.components import onboarding from homeassistant.components.http import KEY_HASS from homeassistant.components.http.auth import async_user_not_allowed_do_auth @@ -322,11 +322,11 @@ async def post(self, request: web.Request, data: dict[str, Any]) -> web.Response try: result = await self._flow_mgr.async_init( handler, - context={ - "ip_address": ip_address(request.remote), # type: ignore[arg-type] - "credential_only": data.get("type") == "link_user", - "redirect_uri": redirect_uri, - }, + context=AuthFlowContext( + ip_address=ip_address(request.remote), # type: ignore[arg-type] + credential_only=data.get("type") == "link_user", + redirect_uri=redirect_uri, + ), ) except data_entry_flow.UnknownHandler: return self.json_message("Invalid handler specified", HTTPStatus.NOT_FOUND) diff --git a/homeassistant/components/auth/mfa_setup_flow.py b/homeassistant/components/auth/mfa_setup_flow.py index 34787894c8cce2..c9efb081a014df 100644 --- a/homeassistant/components/auth/mfa_setup_flow.py +++ b/homeassistant/components/auth/mfa_setup_flow.py @@ -11,6 +11,7 @@ from homeassistant import data_entry_flow from homeassistant.components import websocket_api from homeassistant.core import HomeAssistant, callback +from homeassistant.data_entry_flow import FlowContext import homeassistant.helpers.config_validation as cv from homeassistant.util.hass_dict import HassKey @@ -44,7 +45,7 @@ async def async_create_flow( # type: ignore[override] self, handler_key: str, *, - context: dict[str, Any], + context: FlowContext | None, data: dict[str, Any], ) -> data_entry_flow.FlowHandler: """Create a setup flow. handler is a mfa module.""" diff --git a/homeassistant/components/automation/blueprints/motion_light.yaml b/homeassistant/components/automation/blueprints/motion_light.yaml index ad9c6f0286b551..11900708b19a23 100644 --- a/homeassistant/components/automation/blueprints/motion_light.yaml +++ b/homeassistant/components/automation/blueprints/motion_light.yaml @@ -35,24 +35,24 @@ blueprint: mode: restart max_exceeded: silent -trigger: - platform: state +triggers: + trigger: state entity_id: !input motion_entity from: "off" to: "on" -action: +actions: - alias: "Turn on the light" - service: light.turn_on + action: light.turn_on target: !input light_target - alias: "Wait until there is no motion from device" wait_for_trigger: - platform: state + trigger: state entity_id: !input motion_entity from: "on" to: "off" - alias: "Wait the number of seconds that has been set" delay: !input no_motion_wait - alias: "Turn off the light" - service: light.turn_off + action: light.turn_off target: !input light_target diff --git a/homeassistant/components/automation/blueprints/notify_leaving_zone.yaml b/homeassistant/components/automation/blueprints/notify_leaving_zone.yaml index e1e3bd5b2f693b..e072aad2565a41 100644 --- a/homeassistant/components/automation/blueprints/notify_leaving_zone.yaml +++ b/homeassistant/components/automation/blueprints/notify_leaving_zone.yaml @@ -25,8 +25,8 @@ blueprint: filter: integration: mobile_app -trigger: - platform: state +triggers: + trigger: state entity_id: !input person_entity variables: @@ -36,13 +36,13 @@ variables: person_entity: !input person_entity person_name: "{{ states[person_entity].name }}" -condition: +conditions: condition: template # The first case handles leaving the Home zone which has a special state when zoning called 'home'. # The second case handles leaving all other zones. value_template: "{{ zone_entity == 'zone.home' and trigger.from_state.state == 'home' and trigger.to_state.state != 'home' or trigger.from_state.state == zone_state and trigger.to_state.state != zone_state }}" -action: +actions: - alias: "Notify that a person has left the zone" domain: mobile_app type: notify diff --git a/homeassistant/components/awair/config_flow.py b/homeassistant/components/awair/config_flow.py index 8b40eacbafca42..88985b0db10d54 100644 --- a/homeassistant/components/awair/config_flow.py +++ b/homeassistant/components/awair/config_flow.py @@ -209,10 +209,9 @@ async def async_step_reauth_confirm( _, error = await self._check_cloud_connection(access_token) if error is None: - entry = await self.async_set_unique_id(self.unique_id) - assert entry - self.hass.config_entries.async_update_entry(entry, data=user_input) - return self.async_abort(reason="reauth_successful") + return self.async_update_reload_and_abort( + self._get_reauth_entry(), data_updates=user_input + ) if error != "invalid_access_token": return self.async_abort(reason=error) diff --git a/homeassistant/components/awair/manifest.json b/homeassistant/components/awair/manifest.json index 25257bc3e1c421..a0fbd350dab7e6 100644 --- a/homeassistant/components/awair/manifest.json +++ b/homeassistant/components/awair/manifest.json @@ -3,11 +3,6 @@ "name": "Awair", "codeowners": ["@ahayworth", "@danielsjf"], "config_flow": true, - "dhcp": [ - { - "macaddress": "70886B1*" - } - ], "documentation": "https://www.home-assistant.io/integrations/awair", "iot_class": "local_polling", "loggers": ["python_awair"], diff --git a/homeassistant/components/awair/strings.json b/homeassistant/components/awair/strings.json index 071893ce7a23a3..a7c5c647af8511 100644 --- a/homeassistant/components/awair/strings.json +++ b/homeassistant/components/awair/strings.json @@ -45,6 +45,7 @@ "already_configured_device": "[%key:common::config_flow::abort::already_configured_device%]", "no_devices_found": "[%key:common::config_flow::abort::no_devices_found%]", "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", + "unknown": "[%key:common::config_flow::error::unknown%]", "unreachable": "[%key:common::config_flow::error::cannot_connect%]" }, "flow_title": "{model} ({device_id})" diff --git a/homeassistant/components/axis/config_flow.py b/homeassistant/components/axis/config_flow.py index 0434ed71a22b07..592b1e2d41f7eb 100644 --- a/homeassistant/components/axis/config_flow.py +++ b/homeassistant/components/axis/config_flow.py @@ -13,10 +13,12 @@ from homeassistant.components import dhcp, ssdp, zeroconf from homeassistant.config_entries import ( SOURCE_IGNORE, + SOURCE_REAUTH, + SOURCE_RECONFIGURE, ConfigEntry, ConfigFlow, ConfigFlowResult, - OptionsFlowWithConfigEntry, + OptionsFlow, ) from homeassistant.const import ( CONF_HOST, @@ -57,9 +59,11 @@ class AxisFlowHandler(ConfigFlow, domain=AXIS_DOMAIN): @staticmethod @callback - def async_get_options_flow(config_entry: ConfigEntry) -> AxisOptionsFlowHandler: + def async_get_options_flow( + config_entry: ConfigEntry, + ) -> AxisOptionsFlowHandler: """Get the options flow for this handler.""" - return AxisOptionsFlowHandler(config_entry) + return AxisOptionsFlowHandler() def __init__(self) -> None: """Initialize the Axis config flow.""" @@ -87,27 +91,30 @@ async def async_step_user( else: serial = api.vapix.serial_number - await self.async_set_unique_id(format_mac(serial)) - - self._abort_if_unique_id_configured( - updates={ - CONF_PROTOCOL: user_input[CONF_PROTOCOL], - CONF_HOST: user_input[CONF_HOST], - CONF_PORT: user_input[CONF_PORT], - CONF_USERNAME: user_input[CONF_USERNAME], - CONF_PASSWORD: user_input[CONF_PASSWORD], - } - ) - - self.config = { + config = { CONF_PROTOCOL: user_input[CONF_PROTOCOL], CONF_HOST: user_input[CONF_HOST], CONF_PORT: user_input[CONF_PORT], CONF_USERNAME: user_input[CONF_USERNAME], CONF_PASSWORD: user_input[CONF_PASSWORD], - CONF_MODEL: api.vapix.product_number, } + await self.async_set_unique_id(format_mac(serial)) + + if self.source == SOURCE_REAUTH: + self._abort_if_unique_id_mismatch() + return self.async_update_reload_and_abort( + self._get_reauth_entry(), data_updates=config + ) + if self.source == SOURCE_RECONFIGURE: + self._abort_if_unique_id_mismatch() + return self.async_update_reload_and_abort( + self._get_reconfigure_entry(), data_updates=config + ) + self._abort_if_unique_id_configured() + + self.config = config | {CONF_MODEL: api.vapix.product_number} + return await self._create_entry(serial) data = self.discovery_schema or { @@ -152,8 +159,9 @@ async def async_step_reconfigure( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Trigger a reconfiguration flow.""" - entry = self._get_reconfigure_entry() - return await self._redo_configuration(entry.data, keep_password=True) + return await self._redo_configuration( + self._get_reconfigure_entry().data, keep_password=True + ) async def async_step_reauth( self, entry_data: Mapping[str, Any] @@ -258,7 +266,7 @@ async def _process_discovered_device( return await self.async_step_user() -class AxisOptionsFlowHandler(OptionsFlowWithConfigEntry): +class AxisOptionsFlowHandler(OptionsFlow): """Handle Axis device options.""" config_entry: AxisConfigEntry @@ -276,8 +284,7 @@ async def async_step_configure_stream( ) -> ConfigFlowResult: """Manage the Axis device stream options.""" if user_input is not None: - self.options.update(user_input) - return self.async_create_entry(title="", data=self.options) + return self.async_create_entry(data=self.config_entry.options | user_input) schema = {} diff --git a/homeassistant/components/axis/manifest.json b/homeassistant/components/axis/manifest.json index e028736f4caaab..d2265307d47546 100644 --- a/homeassistant/components/axis/manifest.json +++ b/homeassistant/components/axis/manifest.json @@ -30,7 +30,7 @@ "iot_class": "local_push", "loggers": ["axis"], "quality_scale": "platinum", - "requirements": ["axis==62"], + "requirements": ["axis==63"], "ssdp": [ { "manufacturer": "AXIS" diff --git a/homeassistant/components/axis/strings.json b/homeassistant/components/axis/strings.json index 9534989305dd25..da1963deacd0bf 100644 --- a/homeassistant/components/axis/strings.json +++ b/homeassistant/components/axis/strings.json @@ -26,7 +26,10 @@ "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", "link_local_address": "Link local addresses are not supported", - "not_axis_device": "Discovered device not an Axis device" + "not_axis_device": "Discovered device not an Axis device", + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", + "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]", + "unique_id_mismatch": "The serial number of the device does not match the previous serial number" } }, "options": { diff --git a/homeassistant/components/azure_devops/config_flow.py b/homeassistant/components/azure_devops/config_flow.py index 995f9c5f5a10a0..13666343e1df23 100644 --- a/homeassistant/components/azure_devops/config_flow.py +++ b/homeassistant/components/azure_devops/config_flow.py @@ -42,17 +42,6 @@ async def _show_setup_form( errors=errors or {}, ) - async def _show_reauth_form(self, errors: dict[str, str]) -> ConfigFlowResult: - """Show the reauth form to the user.""" - return self.async_show_form( - step_id="reauth", - description_placeholders={ - "project_url": f"{self._organization}/{self._project}" - }, - data_schema=vol.Schema({vol.Required(CONF_PAT): str}), - errors=errors or {}, - ) - async def _check_setup(self) -> dict[str, str] | None: """Check the setup of the flow.""" errors: dict[str, str] = {} @@ -106,22 +95,33 @@ async def async_step_reauth( self.context["title_placeholders"] = { "project_url": f"{self._organization}/{self._project}", } + return await self.async_step_reauth_confirm() - await self.async_set_unique_id(f"{self._organization}_{self._project}") - - errors = await self._check_setup() - if errors is not None: - return await self._show_reauth_form(errors) - - self.hass.config_entries.async_update_entry( - self._get_reauth_entry(), - data={ - CONF_ORG: self._organization, - CONF_PROJECT: self._project, - CONF_PAT: self._pat, + async def async_step_reauth_confirm( + self, user_input: dict[str, str] | None = None + ) -> ConfigFlowResult: + """Handle configuration by re-auth.""" + errors: dict[str, str] | None = None + if user_input is not None: + errors = await self._check_setup() + if errors is None: + self.hass.config_entries.async_update_entry( + self._get_reauth_entry(), + data={ + CONF_ORG: self._organization, + CONF_PROJECT: self._project, + CONF_PAT: self._pat, + }, + ) + return self.async_abort(reason="reauth_successful") + return self.async_show_form( + step_id="reauth_confirm", + description_placeholders={ + "project_url": f"{self._organization}/{self._project}" }, + data_schema=vol.Schema({vol.Required(CONF_PAT): str}), + errors=errors or {}, ) - return self.async_abort(reason="reauth_successful") def _async_create_entry(self) -> ConfigFlowResult: """Handle create entry.""" diff --git a/homeassistant/components/azure_devops/strings.json b/homeassistant/components/azure_devops/strings.json index c530427039660a..f5fe5cd06a79ef 100644 --- a/homeassistant/components/azure_devops/strings.json +++ b/homeassistant/components/azure_devops/strings.json @@ -16,7 +16,7 @@ "description": "Set up an Azure DevOps instance to access your project. A Personal Access Token is only required for a private project.", "title": "Add Azure DevOps Project" }, - "reauth": { + "reauth_confirm": { "data": { "personal_access_token": "[%key:component::azure_devops::config::step::user::data::personal_access_token%]" }, diff --git a/homeassistant/components/azure_event_hub/config_flow.py b/homeassistant/components/azure_event_hub/config_flow.py index 046851e6926bca..60ac9bff8cd494 100644 --- a/homeassistant/components/azure_event_hub/config_flow.py +++ b/homeassistant/components/azure_event_hub/config_flow.py @@ -124,7 +124,9 @@ async def async_step_conn_string( step_id=STEP_CONN_STRING, data_schema=CONN_STRING_SCHEMA, errors=errors, - description_placeholders=self._data[CONF_EVENT_HUB_INSTANCE_NAME], + description_placeholders={ + "event_hub_instance_name": self._data[CONF_EVENT_HUB_INSTANCE_NAME] + }, last_step=True, ) @@ -144,7 +146,9 @@ async def async_step_sas( step_id=STEP_SAS, data_schema=SAS_SCHEMA, errors=errors, - description_placeholders=self._data[CONF_EVENT_HUB_INSTANCE_NAME], + description_placeholders={ + "event_hub_instance_name": self._data[CONF_EVENT_HUB_INSTANCE_NAME] + }, last_step=True, ) diff --git a/homeassistant/components/azure_event_hub/strings.json b/homeassistant/components/azure_event_hub/strings.json index 3f05e4b8e35b79..3319a29a15432a 100644 --- a/homeassistant/components/azure_event_hub/strings.json +++ b/homeassistant/components/azure_event_hub/strings.json @@ -38,7 +38,7 @@ }, "options": { "step": { - "options": { + "init": { "title": "Options for the Azure Event Hub.", "data": { "send_interval": "Interval between sending batches to the hub." diff --git a/homeassistant/components/backup/__init__.py b/homeassistant/components/backup/__init__.py index ac37ef4ec59069..200cb4a3f65591 100644 --- a/homeassistant/components/backup/__init__.py +++ b/homeassistant/components/backup/__init__.py @@ -1,8 +1,8 @@ """The Backup integration.""" -from homeassistant.components.hassio import is_hassio from homeassistant.core import HomeAssistant, ServiceCall from homeassistant.helpers import config_validation as cv +from homeassistant.helpers.hassio import is_hassio from homeassistant.helpers.typing import ConfigType from .const import DATA_MANAGER, DOMAIN, LOGGER @@ -32,7 +32,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: async def async_handle_create_service(call: ServiceCall) -> None: """Service handler for creating backups.""" - await backup_manager.generate_backup() + await backup_manager.async_create_backup() hass.services.async_register(DOMAIN, "create", async_handle_create_service) diff --git a/homeassistant/components/backup/const.py b/homeassistant/components/backup/const.py index 90faa33fc7ff23..f613f7cc352a9c 100644 --- a/homeassistant/components/backup/const.py +++ b/homeassistant/components/backup/const.py @@ -17,6 +17,7 @@ EXCLUDE_FROM_BACKUP = [ "__pycache__/*", ".DS_Store", + ".HA_RESTORE", "*.db-shm", "*.log.*", "*.log", diff --git a/homeassistant/components/backup/http.py b/homeassistant/components/backup/http.py index 793192aa6234c1..4cc4e61c9e4ccd 100644 --- a/homeassistant/components/backup/http.py +++ b/homeassistant/components/backup/http.py @@ -12,7 +12,7 @@ from homeassistant.util import slugify from .const import DOMAIN -from .manager import BackupManager +from .manager import BaseBackupManager @callback @@ -36,8 +36,8 @@ async def get( if not request["hass_user"].is_admin: return Response(status=HTTPStatus.UNAUTHORIZED) - manager: BackupManager = request.app[KEY_HASS].data[DOMAIN] - backup = await manager.get_backup(slug) + manager: BaseBackupManager = request.app[KEY_HASS].data[DOMAIN] + backup = await manager.async_get_backup(slug=slug) if backup is None or not backup.path.exists(): return Response(status=HTTPStatus.NOT_FOUND) diff --git a/homeassistant/components/backup/manager.py b/homeassistant/components/backup/manager.py index e33318362029a4..8265dade3aae77 100644 --- a/homeassistant/components/backup/manager.py +++ b/homeassistant/components/backup/manager.py @@ -2,6 +2,7 @@ from __future__ import annotations +import abc import asyncio from dataclasses import asdict, dataclass import hashlib @@ -15,6 +16,7 @@ from securetar import SecureTarFile, atomic_contents_add +from homeassistant.backup_restore import RESTORE_BACKUP_FILE from homeassistant.const import __version__ as HAVERSION from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError @@ -53,18 +55,16 @@ async def async_post_backup(self, hass: HomeAssistant) -> None: """Perform operations after a backup finishes.""" -class BackupManager: - """Backup manager for the Backup integration.""" +class BaseBackupManager(abc.ABC): + """Define the format that backup managers can have.""" def __init__(self, hass: HomeAssistant) -> None: """Initialize the backup manager.""" self.hass = hass - self.backup_dir = Path(hass.config.path("backups")) self.backing_up = False self.backups: dict[str, Backup] = {} - self.platforms: dict[str, BackupPlatformProtocol] = {} - self.loaded_backups = False self.loaded_platforms = False + self.platforms: dict[str, BackupPlatformProtocol] = {} @callback def _add_platform( @@ -84,7 +84,7 @@ def _add_platform( return self.platforms[integration_domain] = platform - async def pre_backup_actions(self) -> None: + async def async_pre_backup_actions(self, **kwargs: Any) -> None: """Perform pre backup actions.""" if not self.loaded_platforms: await self.load_platforms() @@ -100,7 +100,7 @@ async def pre_backup_actions(self) -> None: if isinstance(result, Exception): raise result - async def post_backup_actions(self) -> None: + async def async_post_backup_actions(self, **kwargs: Any) -> None: """Perform post backup actions.""" if not self.loaded_platforms: await self.load_platforms() @@ -116,13 +116,6 @@ async def post_backup_actions(self) -> None: if isinstance(result, Exception): raise result - async def load_backups(self) -> None: - """Load data of stored backup files.""" - backups = await self.hass.async_add_executor_job(self._read_backups) - LOGGER.debug("Loaded %s backups", len(backups)) - self.backups = backups - self.loaded_backups = True - async def load_platforms(self) -> None: """Load backup platforms.""" await integration_platform.async_process_integration_platforms( @@ -131,6 +124,46 @@ async def load_platforms(self) -> None: LOGGER.debug("Loaded %s platforms", len(self.platforms)) self.loaded_platforms = True + @abc.abstractmethod + async def async_restore_backup(self, slug: str, **kwargs: Any) -> None: + """Restore a backup.""" + + @abc.abstractmethod + async def async_create_backup(self, **kwargs: Any) -> Backup: + """Generate a backup.""" + + @abc.abstractmethod + async def async_get_backups(self, **kwargs: Any) -> dict[str, Backup]: + """Get backups. + + Return a dictionary of Backup instances keyed by their slug. + """ + + @abc.abstractmethod + async def async_get_backup(self, *, slug: str, **kwargs: Any) -> Backup | None: + """Get a backup.""" + + @abc.abstractmethod + async def async_remove_backup(self, *, slug: str, **kwargs: Any) -> None: + """Remove a backup.""" + + +class BackupManager(BaseBackupManager): + """Backup manager for the Backup integration.""" + + def __init__(self, hass: HomeAssistant) -> None: + """Initialize the backup manager.""" + super().__init__(hass=hass) + self.backup_dir = Path(hass.config.path("backups")) + self.loaded_backups = False + + async def load_backups(self) -> None: + """Load data of stored backup files.""" + backups = await self.hass.async_add_executor_job(self._read_backups) + LOGGER.debug("Loaded %s backups", len(backups)) + self.backups = backups + self.loaded_backups = True + def _read_backups(self) -> dict[str, Backup]: """Read backups from disk.""" backups: dict[str, Backup] = {} @@ -151,14 +184,14 @@ def _read_backups(self) -> dict[str, Backup]: LOGGER.warning("Unable to read backup %s: %s", backup_path, err) return backups - async def get_backups(self) -> dict[str, Backup]: + async def async_get_backups(self, **kwargs: Any) -> dict[str, Backup]: """Return backups.""" if not self.loaded_backups: await self.load_backups() return self.backups - async def get_backup(self, slug: str) -> Backup | None: + async def async_get_backup(self, *, slug: str, **kwargs: Any) -> Backup | None: """Return a backup.""" if not self.loaded_backups: await self.load_backups() @@ -180,23 +213,23 @@ async def get_backup(self, slug: str) -> Backup | None: return backup - async def remove_backup(self, slug: str) -> None: + async def async_remove_backup(self, *, slug: str, **kwargs: Any) -> None: """Remove a backup.""" - if (backup := await self.get_backup(slug)) is None: + if (backup := await self.async_get_backup(slug=slug)) is None: return await self.hass.async_add_executor_job(backup.path.unlink, True) LOGGER.debug("Removed backup located at %s", backup.path) self.backups.pop(slug) - async def generate_backup(self) -> Backup: + async def async_create_backup(self, **kwargs: Any) -> Backup: """Generate a backup.""" if self.backing_up: raise HomeAssistantError("Backup already in progress") try: self.backing_up = True - await self.pre_backup_actions() + await self.async_pre_backup_actions() backup_name = f"Core {HAVERSION}" date_str = dt_util.now().isoformat() slug = _generate_slug(date_str, backup_name) @@ -229,7 +262,7 @@ async def generate_backup(self) -> Backup: return backup finally: self.backing_up = False - await self.post_backup_actions() + await self.async_post_backup_actions() def _mkdir_and_generate_backup_contents( self, @@ -263,6 +296,25 @@ def _mkdir_and_generate_backup_contents( return tar_file_path.stat().st_size + async def async_restore_backup(self, slug: str, **kwargs: Any) -> None: + """Restore a backup. + + This will write the restore information to .HA_RESTORE which + will be handled during startup by the restore_backup module. + """ + if (backup := await self.async_get_backup(slug=slug)) is None: + raise HomeAssistantError(f"Backup {slug} not found") + + def _write_restore_file() -> None: + """Write the restore file.""" + Path(self.hass.config.path(RESTORE_BACKUP_FILE)).write_text( + json.dumps({"path": backup.path.as_posix()}), + encoding="utf-8", + ) + + await self.hass.async_add_executor_job(_write_restore_file) + await self.hass.services.async_call("homeassistant", "restart", {}) + def _generate_slug(date: str, name: str) -> str: """Generate a backup slug.""" diff --git a/homeassistant/components/backup/websocket.py b/homeassistant/components/backup/websocket.py index dd42fe06afc3a3..3ac8a7ace3e67f 100644 --- a/homeassistant/components/backup/websocket.py +++ b/homeassistant/components/backup/websocket.py @@ -18,9 +18,11 @@ def async_register_websocket_handlers(hass: HomeAssistant, with_hassio: bool) -> websocket_api.async_register_command(hass, handle_backup_start) return + websocket_api.async_register_command(hass, handle_details) websocket_api.async_register_command(hass, handle_info) websocket_api.async_register_command(hass, handle_create) websocket_api.async_register_command(hass, handle_remove) + websocket_api.async_register_command(hass, handle_restore) @websocket_api.require_admin @@ -33,7 +35,7 @@ async def handle_info( ) -> None: """List all stored backups.""" manager = hass.data[DATA_MANAGER] - backups = await manager.get_backups() + backups = await manager.async_get_backups() connection.send_result( msg["id"], { @@ -43,6 +45,29 @@ async def handle_info( ) +@websocket_api.require_admin +@websocket_api.websocket_command( + { + vol.Required("type"): "backup/details", + vol.Required("slug"): str, + } +) +@websocket_api.async_response +async def handle_details( + hass: HomeAssistant, + connection: websocket_api.ActiveConnection, + msg: dict[str, Any], +) -> None: + """Get backup details for a specific slug.""" + backup = await hass.data[DATA_MANAGER].async_get_backup(slug=msg["slug"]) + connection.send_result( + msg["id"], + { + "backup": backup, + }, + ) + + @websocket_api.require_admin @websocket_api.websocket_command( { @@ -57,7 +82,25 @@ async def handle_remove( msg: dict[str, Any], ) -> None: """Remove a backup.""" - await hass.data[DATA_MANAGER].remove_backup(msg["slug"]) + await hass.data[DATA_MANAGER].async_remove_backup(slug=msg["slug"]) + connection.send_result(msg["id"]) + + +@websocket_api.require_admin +@websocket_api.websocket_command( + { + vol.Required("type"): "backup/restore", + vol.Required("slug"): str, + } +) +@websocket_api.async_response +async def handle_restore( + hass: HomeAssistant, + connection: websocket_api.ActiveConnection, + msg: dict[str, Any], +) -> None: + """Restore a backup.""" + await hass.data[DATA_MANAGER].async_restore_backup(msg["slug"]) connection.send_result(msg["id"]) @@ -70,7 +113,7 @@ async def handle_create( msg: dict[str, Any], ) -> None: """Generate a backup.""" - backup = await hass.data[DATA_MANAGER].generate_backup() + backup = await hass.data[DATA_MANAGER].async_create_backup() connection.send_result(msg["id"], backup) @@ -88,7 +131,7 @@ async def handle_backup_start( LOGGER.debug("Backup start notification") try: - await manager.pre_backup_actions() + await manager.async_pre_backup_actions() except Exception as err: # noqa: BLE001 connection.send_error(msg["id"], "pre_backup_actions_failed", str(err)) return @@ -110,7 +153,7 @@ async def handle_backup_end( LOGGER.debug("Backup end notification") try: - await manager.post_backup_actions() + await manager.async_post_backup_actions() except Exception as err: # noqa: BLE001 connection.send_error(msg["id"], "post_backup_actions_failed", str(err)) return diff --git a/homeassistant/components/balboa/__init__.py b/homeassistant/components/balboa/__init__.py index 7e220bd46f8502..7838db16820252 100644 --- a/homeassistant/components/balboa/__init__.py +++ b/homeassistant/components/balboa/__init__.py @@ -14,7 +14,7 @@ from homeassistant.helpers.event import async_track_time_interval import homeassistant.util.dt as dt_util -from .const import CONF_SYNC_TIME, DEFAULT_SYNC_TIME, DOMAIN +from .const import CONF_SYNC_TIME, DEFAULT_SYNC_TIME _LOGGER = logging.getLogger(__name__) @@ -30,8 +30,10 @@ KEEP_ALIVE_INTERVAL = timedelta(minutes=1) SYNC_TIME_INTERVAL = timedelta(hours=1) +type BalboaConfigEntry = ConfigEntry[SpaClient] -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + +async def async_setup_entry(hass: HomeAssistant, entry: BalboaConfigEntry) -> bool: """Set up Balboa Spa from a config entry.""" host = entry.data[CONF_HOST] @@ -44,41 +46,34 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: _LOGGER.error("Failed to get spa info at %s", host) raise ConfigEntryNotReady("Unable to configure") - hass.data.setdefault(DOMAIN, {})[entry.entry_id] = spa + entry.runtime_data = spa await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) await async_setup_time_sync(hass, entry) entry.async_on_unload(entry.add_update_listener(update_listener)) + entry.async_on_unload(spa.disconnect) return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: BalboaConfigEntry) -> bool: """Unload a config entry.""" - _LOGGER.debug("Disconnecting from spa") - spa: SpaClient = hass.data[DOMAIN][entry.entry_id] - - if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): - hass.data[DOMAIN].pop(entry.entry_id) - - await spa.disconnect() - - return unload_ok + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) -async def update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None: +async def update_listener(hass: HomeAssistant, entry: BalboaConfigEntry) -> None: """Handle options update.""" await hass.config_entries.async_reload(entry.entry_id) -async def async_setup_time_sync(hass: HomeAssistant, entry: ConfigEntry) -> None: +async def async_setup_time_sync(hass: HomeAssistant, entry: BalboaConfigEntry) -> None: """Set up the time sync.""" if not entry.options.get(CONF_SYNC_TIME, DEFAULT_SYNC_TIME): return _LOGGER.debug("Setting up daily time sync") - spa: SpaClient = hass.data[DOMAIN][entry.entry_id] + spa = entry.runtime_data async def sync_time(now: datetime) -> None: now = dt_util.as_local(now) diff --git a/homeassistant/components/balboa/binary_sensor.py b/homeassistant/components/balboa/binary_sensor.py index d3352208cd919f..b8c62ce8abf42c 100644 --- a/homeassistant/components/balboa/binary_sensor.py +++ b/homeassistant/components/balboa/binary_sensor.py @@ -12,19 +12,20 @@ BinarySensorEntity, BinarySensorEntityDescription, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN +from . import BalboaConfigEntry from .entity import BalboaEntity async def async_setup_entry( - hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback + hass: HomeAssistant, + entry: BalboaConfigEntry, + async_add_entities: AddEntitiesCallback, ) -> None: """Set up the spa's binary sensors.""" - spa: SpaClient = hass.data[DOMAIN][entry.entry_id] + spa = entry.runtime_data entities = [ BalboaBinarySensorEntity(spa, description) for description in BINARY_SENSOR_DESCRIPTIONS diff --git a/homeassistant/components/balboa/climate.py b/homeassistant/components/balboa/climate.py index 8cd9e93e539e0e..d27fd459676b78 100644 --- a/homeassistant/components/balboa/climate.py +++ b/homeassistant/components/balboa/climate.py @@ -14,7 +14,6 @@ HVACAction, HVACMode, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( ATTR_TEMPERATURE, PRECISION_HALVES, @@ -24,6 +23,7 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback +from . import BalboaConfigEntry from .const import DOMAIN from .entity import BalboaEntity @@ -45,10 +45,12 @@ async def async_setup_entry( - hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback + hass: HomeAssistant, + entry: BalboaConfigEntry, + async_add_entities: AddEntitiesCallback, ) -> None: """Set up the spa climate entity.""" - async_add_entities([BalboaClimateEntity(hass.data[DOMAIN][entry.entry_id])]) + async_add_entities([BalboaClimateEntity(entry.runtime_data)]) class BalboaClimateEntity(BalboaEntity, ClimateEntity): diff --git a/homeassistant/components/balboa/fan.py b/homeassistant/components/balboa/fan.py index bf7425f0e642a5..67c1d9a9a62d2e 100644 --- a/homeassistant/components/balboa/fan.py +++ b/homeassistant/components/balboa/fan.py @@ -5,11 +5,10 @@ import math from typing import Any, cast -from pybalboa import SpaClient, SpaControl +from pybalboa import SpaControl from pybalboa.enums import OffOnState, UnknownState from homeassistant.components.fan import FanEntity, FanEntityFeature -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.util.percentage import ( @@ -17,15 +16,17 @@ ranged_value_to_percentage, ) -from .const import DOMAIN +from . import BalboaConfigEntry from .entity import BalboaEntity async def async_setup_entry( - hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback + hass: HomeAssistant, + entry: BalboaConfigEntry, + async_add_entities: AddEntitiesCallback, ) -> None: """Set up the spa's pumps.""" - spa: SpaClient = hass.data[DOMAIN][entry.entry_id] + spa = entry.runtime_data async_add_entities(BalboaPumpFanEntity(control) for control in spa.pumps) diff --git a/homeassistant/components/balboa/light.py b/homeassistant/components/balboa/light.py index 5dc8d48ef9df53..21e4dfc5e08b1a 100644 --- a/homeassistant/components/balboa/light.py +++ b/homeassistant/components/balboa/light.py @@ -4,23 +4,24 @@ from typing import Any, cast -from pybalboa import SpaClient, SpaControl +from pybalboa import SpaControl from pybalboa.enums import OffOnState, UnknownState from homeassistant.components.light import ColorMode, LightEntity -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN +from . import BalboaConfigEntry from .entity import BalboaEntity async def async_setup_entry( - hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback + hass: HomeAssistant, + entry: BalboaConfigEntry, + async_add_entities: AddEntitiesCallback, ) -> None: """Set up the spa's lights.""" - spa: SpaClient = hass.data[DOMAIN][entry.entry_id] + spa = entry.runtime_data async_add_entities(BalboaLightEntity(control) for control in spa.lights) diff --git a/homeassistant/components/balboa/select.py b/homeassistant/components/balboa/select.py index 9c3074350c54df..e88e40ab063552 100644 --- a/homeassistant/components/balboa/select.py +++ b/homeassistant/components/balboa/select.py @@ -1,22 +1,23 @@ """Support for Spa Client selects.""" -from pybalboa import SpaClient, SpaControl +from pybalboa import SpaControl from pybalboa.enums import LowHighRange from homeassistant.components.select import SelectEntity -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN +from . import BalboaConfigEntry from .entity import BalboaEntity async def async_setup_entry( - hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback + hass: HomeAssistant, + entry: BalboaConfigEntry, + async_add_entities: AddEntitiesCallback, ) -> None: """Set up the spa select entity.""" - spa: SpaClient = hass.data[DOMAIN][entry.entry_id] + spa = entry.runtime_data async_add_entities([BalboaTempRangeSelectEntity(spa.temperature_range)]) diff --git a/homeassistant/components/bang_olufsen/__init__.py b/homeassistant/components/bang_olufsen/__init__.py index e11df6ad5ed4da..c8ba1f1c3dc02c 100644 --- a/homeassistant/components/bang_olufsen/__init__.py +++ b/homeassistant/components/bang_olufsen/__init__.py @@ -31,10 +31,12 @@ class BangOlufsenData: client: MozartClient +type BangOlufsenConfigEntry = ConfigEntry[BangOlufsenData] + PLATFORMS = [Platform.MEDIA_PLAYER] -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_setup_entry(hass: HomeAssistant, entry: BangOlufsenConfigEntry) -> bool: """Set up from a config entry.""" # Remove casts to str @@ -67,10 +69,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: websocket = BangOlufsenWebsocket(hass, entry, client) # Add the websocket and API client - hass.data.setdefault(DOMAIN, {})[entry.entry_id] = BangOlufsenData( - websocket, - client, - ) + entry.runtime_data = BangOlufsenData(websocket, client) # Start WebSocket connection await client.connect_notifications(remote_control=True, reconnect=True) @@ -80,15 +79,12 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry( + hass: HomeAssistant, entry: BangOlufsenConfigEntry +) -> bool: """Unload a config entry.""" # Close the API client and WebSocket notification listener - hass.data[DOMAIN][entry.entry_id].client.disconnect_notifications() - await hass.data[DOMAIN][entry.entry_id].client.close_api_client() - - unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS) - - if unload_ok: - hass.data[DOMAIN].pop(entry.entry_id) + entry.runtime_data.client.disconnect_notifications() + await entry.runtime_data.client.close_api_client() - return unload_ok + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/bang_olufsen/const.py b/homeassistant/components/bang_olufsen/const.py index 64ee4cf275dc74..209311d3e8aae2 100644 --- a/homeassistant/components/bang_olufsen/const.py +++ b/homeassistant/components/bang_olufsen/const.py @@ -7,20 +7,19 @@ from mozart_api.models import Source, SourceArray, SourceTypeEnum -from homeassistant.components.media_player import MediaPlayerState, MediaType +from homeassistant.components.media_player import ( + MediaPlayerState, + MediaType, + RepeatMode, +) class BangOlufsenSource: """Class used for associating device source ids with friendly names. May not include all sources.""" - URI_STREAMER: Final[Source] = Source(name="Audio Streamer", id="uriStreamer") - BLUETOOTH: Final[Source] = Source(name="Bluetooth", id="bluetooth") - CHROMECAST: Final[Source] = Source(name="Chromecast built-in", id="chromeCast") LINE_IN: Final[Source] = Source(name="Line-In", id="lineIn") SPDIF: Final[Source] = Source(name="Optical", id="spdif") - NET_RADIO: Final[Source] = Source(name="B&O Radio", id="netRadio") - DEEZER: Final[Source] = Source(name="Deezer", id="deezer") - TIDAL: Final[Source] = Source(name="Tidal", id="tidal") + URI_STREAMER: Final[Source] = Source(name="Audio Streamer", id="uriStreamer") BANG_OLUFSEN_STATES: dict[str, MediaPlayerState] = { @@ -36,6 +35,17 @@ class BangOlufsenSource: "unknown": MediaPlayerState.IDLE, } +# Dict used for translating Home Assistant settings to device repeat settings. +BANG_OLUFSEN_REPEAT_FROM_HA: dict[RepeatMode, str] = { + RepeatMode.ALL: "all", + RepeatMode.ONE: "track", + RepeatMode.OFF: "none", +} +# Dict used for translating device repeat settings to Home Assistant settings. +BANG_OLUFSEN_REPEAT_TO_HA: dict[str, RepeatMode] = { + value: key for key, value in BANG_OLUFSEN_REPEAT_FROM_HA.items() +} + # Media types for play_media class BangOlufsenMediaType(StrEnum): @@ -123,20 +133,6 @@ class WebsocketNotification(StrEnum): MediaType.CHANNEL, ) -# Sources on the device that should not be selectable by the user -HIDDEN_SOURCE_IDS: Final[tuple] = ( - "airPlay", - "bluetooth", - "chromeCast", - "generator", - "local", - "dlna", - "qplay", - "wpl", - "pl", - "beolink", - "usbIn", -) # Fallback sources to use in case of API failure. FALLBACK_SOURCES: Final[SourceArray] = SourceArray( @@ -144,23 +140,26 @@ class WebsocketNotification(StrEnum): Source( id="uriStreamer", is_enabled=True, - is_playable=False, + is_playable=True, name="Audio Streamer", type=SourceTypeEnum(value="uriStreamer"), + is_seekable=False, ), Source( id="bluetooth", is_enabled=True, - is_playable=False, + is_playable=True, name="Bluetooth", type=SourceTypeEnum(value="bluetooth"), + is_seekable=False, ), Source( id="spotify", is_enabled=True, - is_playable=False, + is_playable=True, name="Spotify Connect", type=SourceTypeEnum(value="spotify"), + is_seekable=True, ), Source( id="lineIn", @@ -168,6 +167,7 @@ class WebsocketNotification(StrEnum): is_playable=True, name="Line-In", type=SourceTypeEnum(value="lineIn"), + is_seekable=False, ), Source( id="spdif", @@ -175,6 +175,7 @@ class WebsocketNotification(StrEnum): is_playable=True, name="Optical", type=SourceTypeEnum(value="spdif"), + is_seekable=False, ), Source( id="netRadio", @@ -182,6 +183,7 @@ class WebsocketNotification(StrEnum): is_playable=True, name="B&O Radio", type=SourceTypeEnum(value="netRadio"), + is_seekable=False, ), Source( id="deezer", @@ -189,6 +191,7 @@ class WebsocketNotification(StrEnum): is_playable=True, name="Deezer", type=SourceTypeEnum(value="deezer"), + is_seekable=True, ), Source( id="tidalConnect", @@ -196,6 +199,7 @@ class WebsocketNotification(StrEnum): is_playable=True, name="Tidal Connect", type=SourceTypeEnum(value="tidalConnect"), + is_seekable=True, ), ] ) diff --git a/homeassistant/components/bang_olufsen/icons.json b/homeassistant/components/bang_olufsen/icons.json new file mode 100644 index 00000000000000..fec0bf20937f88 --- /dev/null +++ b/homeassistant/components/bang_olufsen/icons.json @@ -0,0 +1,9 @@ +{ + "services": { + "beolink_join": { "service": "mdi:location-enter" }, + "beolink_expand": { "service": "mdi:location-enter" }, + "beolink_unexpand": { "service": "mdi:location-exit" }, + "beolink_leave": { "service": "mdi:close-circle-outline" }, + "beolink_allstandby": { "service": "mdi:close-circle-multiple-outline" } + } +} diff --git a/homeassistant/components/bang_olufsen/manifest.json b/homeassistant/components/bang_olufsen/manifest.json index a93a6e7a6244c6..b4a92d4da250f6 100644 --- a/homeassistant/components/bang_olufsen/manifest.json +++ b/homeassistant/components/bang_olufsen/manifest.json @@ -6,6 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/bang_olufsen", "integration_type": "device", "iot_class": "local_push", - "requirements": ["mozart-api==3.4.1.8.8"], + "requirements": ["mozart-api==4.1.1.116.0"], "zeroconf": ["_bangolufsen._tcp.local."] } diff --git a/homeassistant/components/bang_olufsen/media_player.py b/homeassistant/components/bang_olufsen/media_player.py index ecf571d5456421..56aa66d32e80f4 100644 --- a/homeassistant/components/bang_olufsen/media_player.py +++ b/homeassistant/components/bang_olufsen/media_player.py @@ -3,12 +3,15 @@ from __future__ import annotations from collections.abc import Callable +import contextlib +from datetime import timedelta import json import logging from typing import TYPE_CHECKING, Any, cast +from aiohttp import ClientConnectorError from mozart_api import __version__ as MOZART_API_VERSION -from mozart_api.exceptions import ApiException +from mozart_api.exceptions import ApiException, NotFoundException from mozart_api.models import ( Action, Art, @@ -22,6 +25,7 @@ PlaybackProgress, PlayQueueItem, PlayQueueItemType, + PlayQueueSettings, RenderingState, SceneProperties, SoftwareUpdateState, @@ -34,6 +38,7 @@ VolumeState, ) from mozart_api.mozart_client import MozartClient, get_highest_resolution_artwork +import voluptuous as vol from homeassistant.components import media_source from homeassistant.components.media_player import ( @@ -44,26 +49,35 @@ MediaPlayerEntityFeature, MediaPlayerState, MediaType, + RepeatMode, async_process_play_media_url, ) from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_MODEL, Platform from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError, ServiceValidationError -from homeassistant.helpers import entity_registry as er +from homeassistant.helpers import ( + config_validation as cv, + device_registry as dr, + entity_registry as er, +) from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.dispatcher import async_dispatcher_connect -from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.entity_platform import ( + AddEntitiesCallback, + async_get_current_platform, +) from homeassistant.util.dt import utcnow -from . import BangOlufsenData +from . import BangOlufsenConfigEntry from .const import ( + BANG_OLUFSEN_REPEAT_FROM_HA, + BANG_OLUFSEN_REPEAT_TO_HA, BANG_OLUFSEN_STATES, CONF_BEOLINK_JID, CONNECTION_STATUS, DOMAIN, FALLBACK_SOURCES, - HIDDEN_SOURCE_IDS, VALID_MEDIA_TYPES, BangOlufsenMediaType, BangOlufsenSource, @@ -72,6 +86,8 @@ from .entity import BangOlufsenEntity from .util import get_serial_number_from_jid +SCAN_INTERVAL = timedelta(seconds=30) + _LOGGER = logging.getLogger(__name__) BANG_OLUFSEN_FEATURES = ( @@ -84,8 +100,9 @@ | MediaPlayerEntityFeature.PLAY | MediaPlayerEntityFeature.PLAY_MEDIA | MediaPlayerEntityFeature.PREVIOUS_TRACK - | MediaPlayerEntityFeature.SEEK + | MediaPlayerEntityFeature.REPEAT_SET | MediaPlayerEntityFeature.SELECT_SOURCE + | MediaPlayerEntityFeature.SHUFFLE_SET | MediaPlayerEntityFeature.STOP | MediaPlayerEntityFeature.TURN_OFF | MediaPlayerEntityFeature.VOLUME_MUTE @@ -96,14 +113,68 @@ async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: BangOlufsenConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up a Media Player entity from config entry.""" - data: BangOlufsenData = hass.data[DOMAIN][config_entry.entry_id] - # Add MediaPlayer entity - async_add_entities(new_entities=[BangOlufsenMediaPlayer(config_entry, data.client)]) + async_add_entities( + new_entities=[ + BangOlufsenMediaPlayer(config_entry, config_entry.runtime_data.client) + ] + ) + + # Register actions. + platform = async_get_current_platform() + + jid_regex = vol.Match( + r"(^\d{4})[.](\d{7})[.](\d{8})(@products\.bang-olufsen\.com)$" + ) + + platform.async_register_entity_service( + name="beolink_join", + schema={vol.Optional("beolink_jid"): jid_regex}, + func="async_beolink_join", + ) + + platform.async_register_entity_service( + name="beolink_expand", + schema={ + vol.Exclusive("all_discovered", "devices", ""): cv.boolean, + vol.Exclusive( + "beolink_jids", + "devices", + "Define either specific Beolink JIDs or all discovered", + ): vol.All( + cv.ensure_list, + [jid_regex], + ), + }, + func="async_beolink_expand", + ) + + platform.async_register_entity_service( + name="beolink_unexpand", + schema={ + vol.Required("beolink_jids"): vol.All( + cv.ensure_list, + [jid_regex], + ), + }, + func="async_beolink_unexpand", + ) + + platform.async_register_entity_service( + name="beolink_leave", + schema=None, + func="async_beolink_leave", + ) + + platform.async_register_entity_service( + name="beolink_allstandby", + schema=None, + func="async_beolink_allstandby", + ) class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity): @@ -112,7 +183,6 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity): _attr_icon = "mdi:speaker-wireless" _attr_name = None _attr_device_class = MediaPlayerDeviceClass.SPEAKER - _attr_supported_features = BANG_OLUFSEN_FEATURES def __init__(self, entry: ConfigEntry, client: MozartClient) -> None: """Initialize the media player.""" @@ -129,6 +199,7 @@ def __init__(self, entry: ConfigEntry, client: MozartClient) -> None: serial_number=self._unique_id, ) self._attr_unique_id = self._unique_id + self._attr_should_poll = True # Misc. variables. self._audio_sources: dict[str, str] = {} @@ -145,6 +216,8 @@ def __init__(self, entry: ConfigEntry, client: MozartClient) -> None: # Beolink compatible sources self._beolink_sources: dict[str, bool] = {} self._remote_leader: BeolinkLeader | None = None + # Extra state attributes for showing Beolink: peer(s), listener(s), leader and self + self._beolink_attributes: dict[str, dict[str, dict[str, str]]] = {} async def async_added_to_hass(self) -> None: """Turn on the dispatchers.""" @@ -154,9 +227,11 @@ async def async_added_to_hass(self) -> None: CONNECTION_STATUS: self._async_update_connection_state, WebsocketNotification.ACTIVE_LISTENING_MODE: self._async_update_sound_modes, WebsocketNotification.BEOLINK: self._async_update_beolink, + WebsocketNotification.CONFIGURATION: self._async_update_name_and_beolink, WebsocketNotification.PLAYBACK_ERROR: self._async_update_playback_error, WebsocketNotification.PLAYBACK_METADATA: self._async_update_playback_metadata_and_beolink, WebsocketNotification.PLAYBACK_PROGRESS: self._async_update_playback_progress, + WebsocketNotification.PLAYBACK_SOURCE: self._async_update_sources, WebsocketNotification.PLAYBACK_STATE: self._async_update_playback_state, WebsocketNotification.REMOTE_MENU_CHANGED: self._async_update_sources, WebsocketNotification.SOURCE_CHANGE: self._async_update_source_change, @@ -218,7 +293,23 @@ async def _initialize(self) -> None: await self._async_update_sound_modes() - async def _async_update_sources(self) -> None: + # Update beolink attributes and device name. + await self._async_update_name_and_beolink() + + async def async_update(self) -> None: + """Update queue settings.""" + # The WebSocket event listener is the main handler for connection state. + # The polling updates do therefore not set the device as available or unavailable + with contextlib.suppress(ApiException, ClientConnectorError, TimeoutError): + queue_settings = await self._client.get_settings_queue(_request_timeout=5) + + if queue_settings.repeat is not None: + self._attr_repeat = BANG_OLUFSEN_REPEAT_TO_HA[queue_settings.repeat] + + if queue_settings.shuffle is not None: + self._attr_shuffle = queue_settings.shuffle + + async def _async_update_sources(self, _: Source | None = None) -> None: """Get sources for the specific product.""" # Audio sources @@ -245,10 +336,7 @@ async def _async_update_sources(self) -> None: self._audio_sources = { source.id: source.name for source in cast(list[Source], sources.items) - if source.is_enabled - and source.id - and source.name - and source.id not in HIDDEN_SOURCE_IDS + if source.is_enabled and source.id and source.name and source.is_playable } # Some sources are not Beolink expandable, meaning that they can't be joined by @@ -350,9 +438,44 @@ def _async_update_volume(self, data: VolumeState) -> None: self.async_write_ha_state() + async def _async_update_name_and_beolink(self) -> None: + """Update the device friendly name.""" + beolink_self = await self._client.get_beolink_self() + + # Update device name + device_registry = dr.async_get(self.hass) + assert self.device_entry is not None + + device_registry.async_update_device( + device_id=self.device_entry.id, + name=beolink_self.friendly_name, + ) + + await self._async_update_beolink() + async def _async_update_beolink(self) -> None: """Update the current Beolink leader, listeners, peers and self.""" + self._beolink_attributes = {} + + assert self.device_entry is not None + assert self.device_entry.name is not None + + # Add Beolink self + self._beolink_attributes = { + "beolink": {"self": {self.device_entry.name: self._beolink_jid}} + } + + # Add Beolink peers + peers = await self._client.get_beolink_peers() + + if len(peers) > 0: + self._beolink_attributes["beolink"]["peers"] = {} + for peer in peers: + self._beolink_attributes["beolink"]["peers"][peer.friendly_name] = ( + peer.jid + ) + # Add Beolink listeners / leader self._remote_leader = self._playback_metadata.remote_leader @@ -372,9 +495,14 @@ async def _async_update_beolink(self) -> None: # Add self group_members.append(self.entity_id) + self._beolink_attributes["beolink"]["leader"] = { + self._remote_leader.friendly_name: self._remote_leader.jid, + } + # If not listener, check if leader. else: beolink_listeners = await self._client.get_beolink_listeners() + beolink_listeners_attribute = {} # Check if the device is a leader. if len(beolink_listeners) > 0: @@ -395,6 +523,18 @@ async def _async_update_beolink(self) -> None: for beolink_listener in beolink_listeners ] ) + # Update Beolink attributes + for beolink_listener in beolink_listeners: + for peer in peers: + if peer.jid == beolink_listener.jid: + # Get the friendly names for the listeners from the peers + beolink_listeners_attribute[peer.friendly_name] = ( + beolink_listener.jid + ) + break + self._beolink_attributes["beolink"]["listeners"] = ( + beolink_listeners_attribute + ) self._attr_group_members = group_members @@ -462,6 +602,17 @@ async def _async_update_sound_modes( self.async_write_ha_state() + @property + def supported_features(self) -> MediaPlayerEntityFeature: + """Flag media player features that are supported.""" + features = BANG_OLUFSEN_FEATURES + + # Add seeking if supported by the current source + if self._source_change.is_seekable is True: + features |= MediaPlayerEntityFeature.SEEK + + return features + @property def state(self) -> MediaPlayerState: """Return the current state of the media player.""" @@ -537,37 +688,18 @@ def media_channel(self) -> str | None: @property def source(self) -> str | None: """Return the current audio source.""" + return self._source_change.name - # Try to fix some of the source_change chromecast weirdness. - if hasattr(self._playback_metadata, "title"): - # source_change is chromecast but line in is selected. - if self._playback_metadata.title == BangOlufsenSource.LINE_IN.name: - return BangOlufsenSource.LINE_IN.name - - # source_change is chromecast but bluetooth is selected. - if self._playback_metadata.title == BangOlufsenSource.BLUETOOTH.name: - return BangOlufsenSource.BLUETOOTH.name - - # source_change is line in, bluetooth or optical but stale metadata is sent through the WebSocket, - # And the source has not changed. - if self._source_change.id in ( - BangOlufsenSource.BLUETOOTH.id, - BangOlufsenSource.LINE_IN.id, - BangOlufsenSource.SPDIF.id, - ): - return BangOlufsenSource.CHROMECAST.name + @property + def extra_state_attributes(self) -> dict[str, Any] | None: + """Return information that is not returned anywhere else.""" + attributes: dict[str, Any] = {} - # source_change is chromecast and there is metadata but no artwork. Bluetooth does support metadata but not artwork - # So i assume that it is bluetooth and not chromecast - if ( - hasattr(self._playback_metadata, "art") - and self._playback_metadata.art is not None - and len(self._playback_metadata.art) == 0 - and self._source_change.id == BangOlufsenSource.CHROMECAST.id - ): - return BangOlufsenSource.BLUETOOTH.name + # Add Beolink attributes + if self._beolink_attributes: + attributes.update(self._beolink_attributes) - return self._source_change.name + return attributes async def async_turn_off(self) -> None: """Set the device to "networkStandby".""" @@ -608,17 +740,12 @@ async def async_media_next_track(self) -> None: async def async_media_seek(self, position: float) -> None: """Seek to position in ms.""" - if self._source_change.id == BangOlufsenSource.DEEZER.id: - await self._client.seek_to_position(position_ms=int(position * 1000)) - # Try to prevent the playback progress from bouncing in the UI. - self._attr_media_position_updated_at = utcnow() - self._playback_progress = PlaybackProgress(progress=int(position)) + await self._client.seek_to_position(position_ms=int(position * 1000)) + # Try to prevent the playback progress from bouncing in the UI. + self._attr_media_position_updated_at = utcnow() + self._playback_progress = PlaybackProgress(progress=int(position)) - self.async_write_ha_state() - else: - raise HomeAssistantError( - translation_domain=DOMAIN, translation_key="non_deezer_seeking" - ) + self.async_write_ha_state() async def async_media_previous_track(self) -> None: """Send the previous track command.""" @@ -628,6 +755,20 @@ async def async_clear_playlist(self) -> None: """Clear the current playback queue.""" await self._client.post_clear_queue() + async def async_set_repeat(self, repeat: RepeatMode) -> None: + """Set playback queues to repeat.""" + await self._client.set_settings_queue( + play_queue_settings=PlayQueueSettings( + repeat=BANG_OLUFSEN_REPEAT_FROM_HA[repeat] + ) + ) + + async def async_set_shuffle(self, shuffle: bool) -> None: + """Set playback queues to shuffle.""" + await self._client.set_settings_queue( + play_queue_settings=PlayQueueSettings(shuffle=shuffle), + ) + async def async_select_source(self, source: str) -> None: """Select an input source.""" if source not in self._sources.values(): @@ -831,23 +972,30 @@ async def async_join_players(self, group_members: list[str]) -> None: # Beolink compatible B&O device. # Repeated presses / calls will cycle between compatible playing devices. if len(group_members) == 0: - await self._async_beolink_join() + await self.async_beolink_join() return # Get JID for each group member jids = [self._get_beolink_jid(group_member) for group_member in group_members] - await self._async_beolink_expand(jids) + await self.async_beolink_expand(jids) async def async_unjoin_player(self) -> None: """Unjoin Beolink session. End session if leader.""" - await self._async_beolink_leave() + await self.async_beolink_leave() - async def _async_beolink_join(self) -> None: + # Custom actions: + async def async_beolink_join(self, beolink_jid: str | None = None) -> None: """Join a Beolink multi-room experience.""" - await self._client.join_latest_beolink_experience() + if beolink_jid is None: + await self._client.join_latest_beolink_experience() + else: + await self._client.join_beolink_peer(jid=beolink_jid) - async def _async_beolink_expand(self, beolink_jids: list[str]) -> None: + async def async_beolink_expand( + self, beolink_jids: list[str] | None = None, all_discovered: bool = False + ) -> None: """Expand a Beolink multi-room experience with a device or devices.""" + # Ensure that the current source is expandable if not self._beolink_sources[cast(str, self._source_change.id)]: raise ServiceValidationError( @@ -859,10 +1007,37 @@ async def _async_beolink_expand(self, beolink_jids: list[str]) -> None: }, ) + # Expand to all discovered devices + if all_discovered: + peers = await self._client.get_beolink_peers() + + for peer in peers: + try: + await self._client.post_beolink_expand(jid=peer.jid) + except NotFoundException: + _LOGGER.warning("Unable to expand to %s", peer.jid) + # Try to expand to all defined devices + elif beolink_jids: + for beolink_jid in beolink_jids: + try: + await self._client.post_beolink_expand(jid=beolink_jid) + except NotFoundException: + _LOGGER.warning( + "Unable to expand to %s. Is the device available on the network?", + beolink_jid, + ) + + async def async_beolink_unexpand(self, beolink_jids: list[str]) -> None: + """Unexpand a Beolink multi-room experience with a device or devices.""" + # Unexpand all defined devices for beolink_jid in beolink_jids: - await self._client.post_beolink_expand(jid=beolink_jid) + await self._client.post_beolink_unexpand(jid=beolink_jid) - async def _async_beolink_leave(self) -> None: + async def async_beolink_leave(self) -> None: """Leave the current Beolink experience.""" await self._client.post_beolink_leave() + + async def async_beolink_allstandby(self) -> None: + """Set all connected Beolink devices to standby.""" + await self._client.post_beolink_allstandby() diff --git a/homeassistant/components/bang_olufsen/services.yaml b/homeassistant/components/bang_olufsen/services.yaml new file mode 100644 index 00000000000000..e5d61420dffa33 --- /dev/null +++ b/homeassistant/components/bang_olufsen/services.yaml @@ -0,0 +1,79 @@ +beolink_allstandby: + target: + entity: + integration: bang_olufsen + domain: media_player + device: + integration: bang_olufsen + +beolink_expand: + target: + entity: + integration: bang_olufsen + domain: media_player + device: + integration: bang_olufsen + fields: + all_discovered: + required: false + example: false + selector: + boolean: + jid_options: + collapsed: false + fields: + beolink_jids: + required: false + example: >- + [ + 1111.2222222.33333333@products.bang-olufsen.com, + 4444.5555555.66666666@products.bang-olufsen.com + ] + selector: + object: + +beolink_join: + target: + entity: + integration: bang_olufsen + domain: media_player + device: + integration: bang_olufsen + fields: + jid_options: + collapsed: false + fields: + beolink_jid: + required: false + example: 1111.2222222.33333333@products.bang-olufsen.com + selector: + text: + +beolink_leave: + target: + entity: + integration: bang_olufsen + domain: media_player + device: + integration: bang_olufsen + +beolink_unexpand: + target: + entity: + integration: bang_olufsen + domain: media_player + device: + integration: bang_olufsen + fields: + jid_options: + collapsed: false + fields: + beolink_jids: + required: true + example: >- + [ + 1111.2222222.33333333@products.bang-olufsen.com, + 4444.5555555.66666666@products.bang-olufsen.com + ] + selector: + object: diff --git a/homeassistant/components/bang_olufsen/strings.json b/homeassistant/components/bang_olufsen/strings.json index b0cb88985d254e..aef6f953524fac 100644 --- a/homeassistant/components/bang_olufsen/strings.json +++ b/homeassistant/components/bang_olufsen/strings.json @@ -1,4 +1,8 @@ { + "common": { + "jid_options_name": "JID options", + "jid_options_description": "Advanced grouping options, where devices' unique Beolink IDs (Called JIDs) are used directly. JIDs can be found in the state attributes of the media player entity." + }, "config": { "error": { "api_exception": "[%key:common::config_flow::error::cannot_connect%]", @@ -25,13 +29,72 @@ } } }, + "services": { + "beolink_allstandby": { + "name": "Beolink all standby", + "description": "Set all Connected Beolink devices to standby." + }, + "beolink_expand": { + "name": "Beolink expand", + "description": "Expand current Beolink experience.", + "fields": { + "all_discovered": { + "name": "All discovered", + "description": "Expand Beolink experience to all discovered devices." + }, + "beolink_jids": { + "name": "Beolink JIDs", + "description": "Specify which Beolink JIDs will join current Beolink experience." + } + }, + "sections": { + "jid_options": { + "name": "[%key:component::bang_olufsen::common::jid_options_name%]", + "description": "[%key:component::bang_olufsen::common::jid_options_description%]" + } + } + }, + "beolink_join": { + "name": "Beolink join", + "description": "Join a Beolink experience.", + "fields": { + "beolink_jid": { + "name": "Beolink JID", + "description": "Manually specify Beolink JID to join." + } + }, + "sections": { + "jid_options": { + "name": "[%key:component::bang_olufsen::common::jid_options_name%]", + "description": "[%key:component::bang_olufsen::common::jid_options_description%]" + } + } + }, + "beolink_leave": { + "name": "Beolink leave", + "description": "Leave a Beolink experience." + }, + "beolink_unexpand": { + "name": "Beolink unexpand", + "description": "Unexpand from current Beolink experience.", + "fields": { + "beolink_jids": { + "name": "Beolink JIDs", + "description": "Specify which Beolink JIDs will leave from current Beolink experience." + } + }, + "sections": { + "jid_options": { + "name": "[%key:component::bang_olufsen::common::jid_options_name%]", + "description": "[%key:component::bang_olufsen::common::jid_options_description%]" + } + } + } + }, "exceptions": { "m3u_invalid_format": { "message": "Media sources with the .m3u extension are not supported." }, - "non_deezer_seeking": { - "message": "Seeking is currently only supported when using Deezer" - }, "invalid_source": { "message": "Invalid source: {invalid_source}. Valid sources are: {valid_sources}" }, diff --git a/homeassistant/components/bang_olufsen/websocket.py b/homeassistant/components/bang_olufsen/websocket.py index 3519fcd9a48a70..913f7cb32414a0 100644 --- a/homeassistant/components/bang_olufsen/websocket.py +++ b/homeassistant/components/bang_olufsen/websocket.py @@ -63,6 +63,9 @@ def __init__( self._client.get_playback_progress_notifications( self.on_playback_progress_notification ) + self._client.get_playback_source_notifications( + self.on_playback_source_notification + ) self._client.get_playback_state_notifications( self.on_playback_state_notification ) @@ -117,6 +120,11 @@ def on_notification_notification( self.hass, f"{self._unique_id}_{WebsocketNotification.BEOLINK}", ) + elif notification_type is WebsocketNotification.CONFIGURATION: + async_dispatcher_send( + self.hass, + f"{self._unique_id}_{WebsocketNotification.CONFIGURATION}", + ) elif notification_type is WebsocketNotification.REMOTE_MENU_CHANGED: async_dispatcher_send( self.hass, @@ -157,6 +165,14 @@ def on_playback_state_notification(self, notification: RenderingState) -> None: notification, ) + def on_playback_source_notification(self, notification: Source) -> None: + """Send playback_source dispatch.""" + async_dispatcher_send( + self.hass, + f"{self._unique_id}_{WebsocketNotification.PLAYBACK_SOURCE}", + notification, + ) + def on_source_change_notification(self, notification: Source) -> None: """Send source_change dispatch.""" async_dispatcher_send( diff --git a/homeassistant/components/blebox/__init__.py b/homeassistant/components/blebox/__init__.py index 89d0d5fb1464b9..983f5750036dbb 100644 --- a/homeassistant/components/blebox/__init__.py +++ b/homeassistant/components/blebox/__init__.py @@ -17,9 +17,11 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady -from .const import DEFAULT_SETUP_TIMEOUT, DOMAIN, PRODUCT +from .const import DEFAULT_SETUP_TIMEOUT from .helpers import get_maybe_authenticated_session +type BleBoxConfigEntry = ConfigEntry[Box] + _LOGGER = logging.getLogger(__name__) PLATFORMS = [ @@ -35,7 +37,7 @@ PARALLEL_UPDATES = 0 -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_setup_entry(hass: HomeAssistant, entry: BleBoxConfigEntry) -> bool: """Set up BleBox devices from a config entry.""" host = entry.data[CONF_HOST] port = entry.data[CONF_PORT] @@ -55,20 +57,13 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: _LOGGER.error("Identify failed at %s:%d (%s)", api_host.host, api_host.port, ex) raise ConfigEntryNotReady from ex - domain = hass.data.setdefault(DOMAIN, {}) - domain_entry = domain.setdefault(entry.entry_id, {}) - product = domain_entry.setdefault(PRODUCT, product) + entry.runtime_data = product await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: BleBoxConfigEntry) -> bool: """Unload a config entry.""" - unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS) - - if unload_ok: - hass.data[DOMAIN].pop(entry.entry_id) - - return unload_ok + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/blebox/binary_sensor.py b/homeassistant/components/blebox/binary_sensor.py index 7f909fd9a7bd95..2aa86059ee29b4 100644 --- a/homeassistant/components/blebox/binary_sensor.py +++ b/homeassistant/components/blebox/binary_sensor.py @@ -1,18 +1,16 @@ """BleBox binary sensor entities.""" from blebox_uniapi.binary_sensor import BinarySensor as BinarySensorFeature -from blebox_uniapi.box import Box from homeassistant.components.binary_sensor import ( BinarySensorDeviceClass, BinarySensorEntity, BinarySensorEntityDescription, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN, PRODUCT +from . import BleBoxConfigEntry from .entity import BleBoxEntity BINARY_SENSOR_TYPES = ( @@ -25,15 +23,13 @@ async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: BleBoxConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up a BleBox entry.""" - - product: Box = hass.data[DOMAIN][config_entry.entry_id][PRODUCT] entities = [ BleBoxBinarySensorEntity(feature, description) - for feature in product.features.get("binary_sensors", []) + for feature in config_entry.runtime_data.features.get("binary_sensors", []) for description in BINARY_SENSOR_TYPES if description.key == feature.device_class ] diff --git a/homeassistant/components/blebox/button.py b/homeassistant/components/blebox/button.py index 24b09306de7c65..90356c8ae14ba7 100644 --- a/homeassistant/components/blebox/button.py +++ b/homeassistant/components/blebox/button.py @@ -2,28 +2,25 @@ from __future__ import annotations -from blebox_uniapi.box import Box import blebox_uniapi.button from homeassistant.components.button import ButtonEntity -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN, PRODUCT +from . import BleBoxConfigEntry from .entity import BleBoxEntity async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: BleBoxConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up a BleBox button entry.""" - product: Box = hass.data[DOMAIN][config_entry.entry_id][PRODUCT] - entities = [ - BleBoxButtonEntity(feature) for feature in product.features.get("buttons", []) + BleBoxButtonEntity(feature) + for feature in config_entry.runtime_data.features.get("buttons", []) ] async_add_entities(entities, True) diff --git a/homeassistant/components/blebox/climate.py b/homeassistant/components/blebox/climate.py index d4834ebbc28f8e..e04503974b7302 100644 --- a/homeassistant/components/blebox/climate.py +++ b/homeassistant/components/blebox/climate.py @@ -3,7 +3,6 @@ from datetime import timedelta from typing import Any -from blebox_uniapi.box import Box import blebox_uniapi.climate from homeassistant.components.climate import ( @@ -12,12 +11,11 @@ HVACAction, HVACMode, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ATTR_TEMPERATURE, UnitOfTemperature from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN, PRODUCT +from . import BleBoxConfigEntry from .entity import BleBoxEntity SCAN_INTERVAL = timedelta(seconds=5) @@ -39,14 +37,13 @@ async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: BleBoxConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up a BleBox climate entity.""" - product: Box = hass.data[DOMAIN][config_entry.entry_id][PRODUCT] - entities = [ - BleBoxClimateEntity(feature) for feature in product.features.get("climates", []) + BleBoxClimateEntity(feature) + for feature in config_entry.runtime_data.features.get("climates", []) ] async_add_entities(entities, True) diff --git a/homeassistant/components/blebox/const.py b/homeassistant/components/blebox/const.py index ff6a6b33af6774..e9ea19223023ca 100644 --- a/homeassistant/components/blebox/const.py +++ b/homeassistant/components/blebox/const.py @@ -1,7 +1,6 @@ """Constants for the BleBox devices integration.""" DOMAIN = "blebox" -PRODUCT = "product" DEFAULT_SETUP_TIMEOUT = 10 diff --git a/homeassistant/components/blebox/cover.py b/homeassistant/components/blebox/cover.py index c86d7aef0565ab..4f2a7eeef1166d 100644 --- a/homeassistant/components/blebox/cover.py +++ b/homeassistant/components/blebox/cover.py @@ -4,7 +4,6 @@ from typing import Any -from blebox_uniapi.box import Box import blebox_uniapi.cover from blebox_uniapi.cover import BleboxCoverState @@ -14,13 +13,12 @@ CoverDeviceClass, CoverEntity, CoverEntityFeature, + CoverState, ) -from homeassistant.config_entries import ConfigEntry -from homeassistant.const import STATE_CLOSED, STATE_CLOSING, STATE_OPEN, STATE_OPENING from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN, PRODUCT +from . import BleBoxConfigEntry from .entity import BleBoxEntity BLEBOX_TO_COVER_DEVICE_CLASSES = { @@ -32,27 +30,27 @@ BLEBOX_TO_HASS_COVER_STATES = { None: None, # all blebox covers - BleboxCoverState.MOVING_DOWN: STATE_CLOSING, - BleboxCoverState.MOVING_UP: STATE_OPENING, - BleboxCoverState.MANUALLY_STOPPED: STATE_OPEN, - BleboxCoverState.LOWER_LIMIT_REACHED: STATE_CLOSED, - BleboxCoverState.UPPER_LIMIT_REACHED: STATE_OPEN, + BleboxCoverState.MOVING_DOWN: CoverState.CLOSING, + BleboxCoverState.MOVING_UP: CoverState.OPENING, + BleboxCoverState.MANUALLY_STOPPED: CoverState.OPEN, + BleboxCoverState.LOWER_LIMIT_REACHED: CoverState.CLOSED, + BleboxCoverState.UPPER_LIMIT_REACHED: CoverState.OPEN, # extra states of gateController product - BleboxCoverState.OVERLOAD: STATE_OPEN, - BleboxCoverState.MOTOR_FAILURE: STATE_OPEN, - BleboxCoverState.SAFETY_STOP: STATE_OPEN, + BleboxCoverState.OVERLOAD: CoverState.OPEN, + BleboxCoverState.MOTOR_FAILURE: CoverState.OPEN, + BleboxCoverState.SAFETY_STOP: CoverState.OPEN, } async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: BleBoxConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up a BleBox entry.""" - product: Box = hass.data[DOMAIN][config_entry.entry_id][PRODUCT] entities = [ - BleBoxCoverEntity(feature) for feature in product.features.get("covers", []) + BleBoxCoverEntity(feature) + for feature in config_entry.runtime_data.features.get("covers", []) ] async_add_entities(entities, True) @@ -98,17 +96,17 @@ def current_cover_tilt_position(self) -> int | None: @property def is_opening(self) -> bool | None: """Return whether cover is opening.""" - return self._is_state(STATE_OPENING) + return self._is_state(CoverState.OPENING) @property def is_closing(self) -> bool | None: """Return whether cover is closing.""" - return self._is_state(STATE_CLOSING) + return self._is_state(CoverState.CLOSING) @property def is_closed(self) -> bool | None: """Return whether cover is closed.""" - return self._is_state(STATE_CLOSED) + return self._is_state(CoverState.CLOSED) async def async_open_cover(self, **kwargs: Any) -> None: """Fully open the cover position.""" diff --git a/homeassistant/components/blebox/light.py b/homeassistant/components/blebox/light.py index 650b8c057de50f..33fff1d71da1ab 100644 --- a/homeassistant/components/blebox/light.py +++ b/homeassistant/components/blebox/light.py @@ -6,7 +6,6 @@ import logging from typing import Any -from blebox_uniapi.box import Box import blebox_uniapi.light from blebox_uniapi.light import BleboxColorMode @@ -21,11 +20,10 @@ LightEntity, LightEntityFeature, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN, PRODUCT +from . import BleBoxConfigEntry from .entity import BleBoxEntity _LOGGER = logging.getLogger(__name__) @@ -35,13 +33,13 @@ async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: BleBoxConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up a BleBox entry.""" - product: Box = hass.data[DOMAIN][config_entry.entry_id][PRODUCT] entities = [ - BleBoxLightEntity(feature) for feature in product.features.get("lights", []) + BleBoxLightEntity(feature) + for feature in config_entry.runtime_data.features.get("lights", []) ] async_add_entities(entities, True) diff --git a/homeassistant/components/blebox/sensor.py b/homeassistant/components/blebox/sensor.py index c60387c97b10b7..c0abff31257a55 100644 --- a/homeassistant/components/blebox/sensor.py +++ b/homeassistant/components/blebox/sensor.py @@ -1,6 +1,5 @@ """BleBox sensor entities.""" -from blebox_uniapi.box import Box import blebox_uniapi.sensor from homeassistant.components.sensor import ( @@ -9,7 +8,6 @@ SensorEntityDescription, SensorStateClass, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, LIGHT_LUX, @@ -27,7 +25,7 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN, PRODUCT +from . import BleBoxConfigEntry from .entity import BleBoxEntity SENSOR_TYPES = ( @@ -117,14 +115,13 @@ async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: BleBoxConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up a BleBox entry.""" - product: Box = hass.data[DOMAIN][config_entry.entry_id][PRODUCT] entities = [ BleBoxSensorEntity(feature, description) - for feature in product.features.get("sensors", []) + for feature in config_entry.runtime_data.features.get("sensors", []) for description in SENSOR_TYPES if description.key == feature.device_class ] diff --git a/homeassistant/components/blebox/strings.json b/homeassistant/components/blebox/strings.json index b179f0d097ba85..18c689e093d2c6 100644 --- a/homeassistant/components/blebox/strings.json +++ b/homeassistant/components/blebox/strings.json @@ -15,7 +15,9 @@ "description": "Set up your BleBox to integrate with Home Assistant.", "data": { "host": "[%key:common::config_flow::data::ip%]", - "port": "[%key:common::config_flow::data::port%]" + "password": "[%key:common::config_flow::data::password%]", + "port": "[%key:common::config_flow::data::port%]", + "username": "[%key:common::config_flow::data::username%]" }, "title": "Set up your BleBox device" } diff --git a/homeassistant/components/blebox/switch.py b/homeassistant/components/blebox/switch.py index 93c8df0030c645..c6f439e27c546d 100644 --- a/homeassistant/components/blebox/switch.py +++ b/homeassistant/components/blebox/switch.py @@ -3,15 +3,13 @@ from datetime import timedelta from typing import Any -from blebox_uniapi.box import Box import blebox_uniapi.switch from homeassistant.components.switch import SwitchDeviceClass, SwitchEntity -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN, PRODUCT +from . import BleBoxConfigEntry from .entity import BleBoxEntity SCAN_INTERVAL = timedelta(seconds=5) @@ -19,13 +17,13 @@ async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: BleBoxConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up a BleBox switch entity.""" - product: Box = hass.data[DOMAIN][config_entry.entry_id][PRODUCT] entities = [ - BleBoxSwitchEntity(feature) for feature in product.features.get("switches", []) + BleBoxSwitchEntity(feature) + for feature in config_entry.runtime_data.features.get("switches", []) ] async_add_entities(entities, True) diff --git a/homeassistant/components/blink/__init__.py b/homeassistant/components/blink/__init__.py index d21994ecc8fbad..f6516434cd21a2 100644 --- a/homeassistant/components/blink/__init__.py +++ b/homeassistant/components/blink/__init__.py @@ -2,6 +2,7 @@ from copy import deepcopy import logging +from typing import Any from aiohttp import ClientError from blinkpy.auth import Auth @@ -9,7 +10,6 @@ import voluptuous as vol from homeassistant.components import persistent_notification -from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntry from homeassistant.const import ( CONF_FILE_PATH, CONF_FILENAME, @@ -24,7 +24,7 @@ from homeassistant.helpers.typing import ConfigType from .const import DEFAULT_SCAN_INTERVAL, DOMAIN, PLATFORMS -from .coordinator import BlinkUpdateCoordinator +from .coordinator import BlinkConfigEntry, BlinkUpdateCoordinator from .services import setup_services _LOGGER = logging.getLogger(__name__) @@ -40,13 +40,11 @@ CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN) -async def _reauth_flow_wrapper(hass, data): +async def _reauth_flow_wrapper( + hass: HomeAssistant, entry: BlinkConfigEntry, data: dict[str, Any] +) -> None: """Reauth flow wrapper.""" - hass.add_job( - hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_REAUTH}, data=data - ) - ) + entry.async_start_reauth(hass, data=data) persistent_notification.async_create( hass, ( @@ -57,16 +55,16 @@ async def _reauth_flow_wrapper(hass, data): ) -async def async_migrate_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_migrate_entry(hass: HomeAssistant, entry: BlinkConfigEntry) -> bool: """Handle migration of a previous version config entry.""" _LOGGER.debug("Migrating from version %s", entry.version) data = {**entry.data} if entry.version == 1: data.pop("login_response", None) - await _reauth_flow_wrapper(hass, data) + await _reauth_flow_wrapper(hass, entry, data) return False if entry.version == 2: - await _reauth_flow_wrapper(hass, data) + await _reauth_flow_wrapper(hass, entry, data) return False return True @@ -79,10 +77,8 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: return True -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_setup_entry(hass: HomeAssistant, entry: BlinkConfigEntry) -> bool: """Set up Blink via config entry.""" - hass.data.setdefault(DOMAIN, {}) - _async_import_options_from_data_if_missing(hass, entry) session = async_get_clientsession(hass) blink = Blink(session=session) @@ -104,7 +100,8 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: raise ConfigEntryNotReady await coordinator.async_config_entry_first_refresh() - hass.data[DOMAIN][entry.entry_id] = coordinator + + entry.runtime_data = coordinator await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) @@ -113,7 +110,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: @callback def _async_import_options_from_data_if_missing( - hass: HomeAssistant, entry: ConfigEntry + hass: HomeAssistant, entry: BlinkConfigEntry ) -> None: options = dict(entry.options) if CONF_SCAN_INTERVAL not in entry.options: @@ -123,8 +120,6 @@ def _async_import_options_from_data_if_missing( hass.config_entries.async_update_entry(entry, options=options) -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: BlinkConfigEntry) -> bool: """Unload Blink entry.""" - if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): - hass.data[DOMAIN].pop(entry.entry_id) - return unload_ok + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/blink/alarm_control_panel.py b/homeassistant/components/blink/alarm_control_panel.py index 0ad15cf0d31dc2..bfb8aa9a3a031c 100644 --- a/homeassistant/components/blink/alarm_control_panel.py +++ b/homeassistant/components/blink/alarm_control_panel.py @@ -9,13 +9,9 @@ from homeassistant.components.alarm_control_panel import ( AlarmControlPanelEntity, AlarmControlPanelEntityFeature, + AlarmControlPanelState, ) -from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ( - ATTR_ATTRIBUTION, - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_DISARMED, -) +from homeassistant.const import ATTR_ATTRIBUTION from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.device_registry import DeviceInfo @@ -23,16 +19,18 @@ from homeassistant.helpers.update_coordinator import CoordinatorEntity from .const import DEFAULT_ATTRIBUTION, DEFAULT_BRAND, DOMAIN -from .coordinator import BlinkUpdateCoordinator +from .coordinator import BlinkConfigEntry, BlinkUpdateCoordinator _LOGGER = logging.getLogger(__name__) async def async_setup_entry( - hass: HomeAssistant, config: ConfigEntry, async_add_entities: AddEntitiesCallback + hass: HomeAssistant, + config_entry: BlinkConfigEntry, + async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Blink Alarm Control Panels.""" - coordinator: BlinkUpdateCoordinator = hass.data[DOMAIN][config.entry_id] + coordinator = config_entry.runtime_data sync_modules = [] for sync_name, sync_module in coordinator.api.sync.items(): @@ -80,8 +78,10 @@ def _update_attr(self) -> None: self.sync.attributes["associated_cameras"] = list(self.sync.cameras) self.sync.attributes[ATTR_ATTRIBUTION] = DEFAULT_ATTRIBUTION self._attr_extra_state_attributes = self.sync.attributes - self._attr_state = ( - STATE_ALARM_ARMED_AWAY if self.sync.arm else STATE_ALARM_DISARMED + self._attr_alarm_state = ( + AlarmControlPanelState.ARMED_AWAY + if self.sync.arm + else AlarmControlPanelState.DISARMED ) async def async_alarm_disarm(self, code: str | None = None) -> None: diff --git a/homeassistant/components/blink/binary_sensor.py b/homeassistant/components/blink/binary_sensor.py index 2f0a56a901c2ec..c11d4cfea23761 100644 --- a/homeassistant/components/blink/binary_sensor.py +++ b/homeassistant/components/blink/binary_sensor.py @@ -9,7 +9,6 @@ BinarySensorEntity, BinarySensorEntityDescription, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.device_registry import DeviceInfo @@ -23,7 +22,7 @@ TYPE_CAMERA_ARMED, TYPE_MOTION_DETECTED, ) -from .coordinator import BlinkUpdateCoordinator +from .coordinator import BlinkConfigEntry, BlinkUpdateCoordinator _LOGGER = logging.getLogger(__name__) @@ -47,11 +46,13 @@ async def async_setup_entry( - hass: HomeAssistant, config: ConfigEntry, async_add_entities: AddEntitiesCallback + hass: HomeAssistant, + config_entry: BlinkConfigEntry, + async_add_entities: AddEntitiesCallback, ) -> None: """Set up the blink binary sensors.""" - coordinator: BlinkUpdateCoordinator = hass.data[DOMAIN][config.entry_id] + coordinator = config_entry.runtime_data entities = [ BlinkBinarySensor(coordinator, camera, description) diff --git a/homeassistant/components/blink/camera.py b/homeassistant/components/blink/camera.py index cce9100a0bd570..56a84135a9bca3 100644 --- a/homeassistant/components/blink/camera.py +++ b/homeassistant/components/blink/camera.py @@ -10,7 +10,6 @@ import voluptuous as vol from homeassistant.components.camera import Camera -from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_FILE_PATH, CONF_FILENAME from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError, ServiceValidationError @@ -28,7 +27,7 @@ SERVICE_SAVE_VIDEO, SERVICE_TRIGGER, ) -from .coordinator import BlinkUpdateCoordinator +from .coordinator import BlinkConfigEntry, BlinkUpdateCoordinator _LOGGER = logging.getLogger(__name__) @@ -38,11 +37,13 @@ async def async_setup_entry( - hass: HomeAssistant, config: ConfigEntry, async_add_entities: AddEntitiesCallback + hass: HomeAssistant, + config_entry: BlinkConfigEntry, + async_add_entities: AddEntitiesCallback, ) -> None: """Set up a Blink Camera.""" - coordinator: BlinkUpdateCoordinator = hass.data[DOMAIN][config.entry_id] + coordinator = config_entry.runtime_data entities = [ BlinkCamera(coordinator, name, camera) for name, camera in coordinator.api.cameras.items() diff --git a/homeassistant/components/blink/coordinator.py b/homeassistant/components/blink/coordinator.py index e71ff4e449e809..7278dabe08358c 100644 --- a/homeassistant/components/blink/coordinator.py +++ b/homeassistant/components/blink/coordinator.py @@ -8,6 +8,7 @@ from blinkpy.blinkpy import Blink +from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.update_coordinator import DataUpdateCoordinator @@ -16,6 +17,8 @@ _LOGGER = logging.getLogger(__name__) SCAN_INTERVAL = 300 +type BlinkConfigEntry = ConfigEntry[BlinkUpdateCoordinator] + class BlinkUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]): """BlinkUpdateCoordinator - In charge of downloading the data for a site.""" diff --git a/homeassistant/components/blink/diagnostics.py b/homeassistant/components/blink/diagnostics.py index 88ff2aff92814a..255f58fc3695d8 100644 --- a/homeassistant/components/blink/diagnostics.py +++ b/homeassistant/components/blink/diagnostics.py @@ -4,24 +4,21 @@ from typing import Any -from blinkpy.blinkpy import Blink - from homeassistant.components.diagnostics import async_redact_data -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant -from .const import DOMAIN +from .coordinator import BlinkConfigEntry TO_REDACT = {"serial", "macaddress", "username", "password", "token", "unique_id"} async def async_get_config_entry_diagnostics( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: BlinkConfigEntry, ) -> dict[str, Any]: """Return diagnostics for a config entry.""" - api: Blink = hass.data[DOMAIN][config_entry.entry_id].api + api = config_entry.runtime_data.api data = { camera.name: dict(camera.attributes.items()) diff --git a/homeassistant/components/blink/sensor.py b/homeassistant/components/blink/sensor.py index 8a807b9303e843..e0b5989cc80508 100644 --- a/homeassistant/components/blink/sensor.py +++ b/homeassistant/components/blink/sensor.py @@ -10,15 +10,18 @@ SensorEntityDescription, SensorStateClass, ) -from homeassistant.config_entries import ConfigEntry -from homeassistant.const import EntityCategory, UnitOfTemperature +from homeassistant.const import ( + SIGNAL_STRENGTH_DECIBELS_MILLIWATT, + EntityCategory, + UnitOfTemperature, +) from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.update_coordinator import CoordinatorEntity from .const import DEFAULT_BRAND, DOMAIN, TYPE_TEMPERATURE, TYPE_WIFI_STRENGTH -from .coordinator import BlinkUpdateCoordinator +from .coordinator import BlinkConfigEntry, BlinkUpdateCoordinator _LOGGER = logging.getLogger(__name__) @@ -33,6 +36,8 @@ SensorEntityDescription( key=TYPE_WIFI_STRENGTH, translation_key="wifi_strength", + native_unit_of_measurement=SIGNAL_STRENGTH_DECIBELS_MILLIWATT, + device_class=SensorDeviceClass.SIGNAL_STRENGTH, entity_category=EntityCategory.DIAGNOSTIC, state_class=SensorStateClass.MEASUREMENT, ), @@ -40,11 +45,13 @@ async def async_setup_entry( - hass: HomeAssistant, config: ConfigEntry, async_add_entities: AddEntitiesCallback + hass: HomeAssistant, + config_entry: BlinkConfigEntry, + async_add_entities: AddEntitiesCallback, ) -> None: """Initialize a Blink sensor.""" - coordinator: BlinkUpdateCoordinator = hass.data[DOMAIN][config.entry_id] + coordinator = config_entry.runtime_data entities = [ BlinkSensor(coordinator, camera, description) for camera in coordinator.api.cameras diff --git a/homeassistant/components/blink/services.py b/homeassistant/components/blink/services.py index bb2cbf575ddbbe..5f51598e721987 100644 --- a/homeassistant/components/blink/services.py +++ b/homeassistant/components/blink/services.py @@ -11,6 +11,7 @@ from homeassistant.helpers import config_validation as cv from .const import ATTR_CONFIG_ENTRY_ID, DOMAIN, SERVICE_SEND_PIN +from .coordinator import BlinkConfigEntry SERVICE_UPDATE_SCHEMA = vol.Schema( { @@ -30,6 +31,7 @@ def setup_services(hass: HomeAssistant) -> None: async def send_pin(call: ServiceCall): """Call blink to send new pin.""" + config_entry: BlinkConfigEntry | None for entry_id in call.data[ATTR_CONFIG_ENTRY_ID]: if not (config_entry := hass.config_entries.async_get_entry(entry_id)): raise ServiceValidationError( @@ -43,7 +45,7 @@ async def send_pin(call: ServiceCall): translation_key="not_loaded", translation_placeholders={"target": config_entry.title}, ) - coordinator = hass.data[DOMAIN][entry_id] + coordinator = config_entry.runtime_data await coordinator.api.auth.send_auth_key( coordinator.api, call.data[CONF_PIN], diff --git a/homeassistant/components/blink/switch.py b/homeassistant/components/blink/switch.py index ab9b825ded1771..8eabd5c0e599ac 100644 --- a/homeassistant/components/blink/switch.py +++ b/homeassistant/components/blink/switch.py @@ -9,7 +9,6 @@ SwitchEntity, SwitchEntityDescription, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.device_registry import DeviceInfo @@ -17,7 +16,7 @@ from homeassistant.helpers.update_coordinator import CoordinatorEntity from .const import DEFAULT_BRAND, DOMAIN, TYPE_CAMERA_ARMED -from .coordinator import BlinkUpdateCoordinator +from .coordinator import BlinkConfigEntry, BlinkUpdateCoordinator SWITCH_TYPES: tuple[SwitchEntityDescription, ...] = ( SwitchEntityDescription( @@ -30,11 +29,11 @@ async def async_setup_entry( hass: HomeAssistant, - config: ConfigEntry, + config_entry: BlinkConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Blink switches.""" - coordinator: BlinkUpdateCoordinator = hass.data[DOMAIN][config.entry_id] + coordinator = config_entry.runtime_data async_add_entities( BlinkSwitch(coordinator, camera, description) diff --git a/homeassistant/components/bloomsky/__init__.py b/homeassistant/components/bloomsky/__init__.py deleted file mode 100644 index c2a46baaeb3616..00000000000000 --- a/homeassistant/components/bloomsky/__init__.py +++ /dev/null @@ -1,83 +0,0 @@ -"""Support for BloomSky weather station.""" - -from datetime import timedelta -from http import HTTPStatus -import logging - -import requests -import voluptuous as vol - -from homeassistant.const import CONF_API_KEY, Platform -from homeassistant.core import HomeAssistant -from homeassistant.helpers import discovery -import homeassistant.helpers.config_validation as cv -from homeassistant.helpers.typing import ConfigType -from homeassistant.util import Throttle -from homeassistant.util.unit_system import METRIC_SYSTEM - -_LOGGER = logging.getLogger(__name__) - -PLATFORMS = [Platform.BINARY_SENSOR, Platform.CAMERA, Platform.SENSOR] - -DOMAIN = "bloomsky" - -# The BloomSky only updates every 5-8 minutes as per the API spec so there's -# no point in polling the API more frequently -MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=300) - -CONFIG_SCHEMA = vol.Schema( - {DOMAIN: vol.Schema({vol.Required(CONF_API_KEY): cv.string})}, extra=vol.ALLOW_EXTRA -) - - -def setup(hass: HomeAssistant, config: ConfigType) -> bool: - """Set up the BloomSky integration.""" - api_key = config[DOMAIN][CONF_API_KEY] - - try: - bloomsky = BloomSky(api_key, hass.config.units is METRIC_SYSTEM) - except RuntimeError: - return False - - hass.data[DOMAIN] = bloomsky - - for platform in PLATFORMS: - discovery.load_platform(hass, platform, DOMAIN, {}, config) - - return True - - -class BloomSky: - """Handle all communication with the BloomSky API.""" - - # API documentation at http://weatherlution.com/bloomsky-api/ - API_URL = "http://api.bloomsky.com/api/skydata" - - def __init__(self, api_key, is_metric): - """Initialize the BookSky.""" - self._api_key = api_key - self._endpoint_argument = "unit=intl" if is_metric else "" - self.devices = {} - self.is_metric = is_metric - _LOGGER.debug("Initial BloomSky device load") - self.refresh_devices() - - @Throttle(MIN_TIME_BETWEEN_UPDATES) - def refresh_devices(self): - """Use the API to retrieve a list of devices.""" - _LOGGER.debug("Fetching BloomSky update") - response = requests.get( - f"{self.API_URL}?{self._endpoint_argument}", - headers={"Authorization": self._api_key}, - timeout=10, - ) - if response.status_code == HTTPStatus.UNAUTHORIZED: - raise RuntimeError("Invalid API_KEY") - if response.status_code == HTTPStatus.METHOD_NOT_ALLOWED: - _LOGGER.error("You have no bloomsky devices configured") - return - if response.status_code != HTTPStatus.OK: - _LOGGER.error("Invalid HTTP response: %s", response.status_code) - return - # Create dictionary keyed off of the device unique id - self.devices.update({device["DeviceID"]: device for device in response.json()}) diff --git a/homeassistant/components/bloomsky/binary_sensor.py b/homeassistant/components/bloomsky/binary_sensor.py deleted file mode 100644 index 12d55f971e18a1..00000000000000 --- a/homeassistant/components/bloomsky/binary_sensor.py +++ /dev/null @@ -1,68 +0,0 @@ -"""Support the binary sensors of a BloomSky weather station.""" - -from __future__ import annotations - -import voluptuous as vol - -from homeassistant.components.binary_sensor import ( - PLATFORM_SCHEMA as BINARY_SENSOR_PLATFORM_SCHEMA, - BinarySensorDeviceClass, - BinarySensorEntity, -) -from homeassistant.const import CONF_MONITORED_CONDITIONS -from homeassistant.core import HomeAssistant -import homeassistant.helpers.config_validation as cv -from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType - -from . import DOMAIN - -SENSOR_TYPES = {"Rain": BinarySensorDeviceClass.MOISTURE, "Night": None} - -PLATFORM_SCHEMA = BINARY_SENSOR_PLATFORM_SCHEMA.extend( - { - vol.Optional(CONF_MONITORED_CONDITIONS, default=list(SENSOR_TYPES)): vol.All( - cv.ensure_list, [vol.In(SENSOR_TYPES)] - ) - } -) - - -def setup_platform( - hass: HomeAssistant, - config: ConfigType, - add_entities: AddEntitiesCallback, - discovery_info: DiscoveryInfoType | None = None, -) -> None: - """Set up the available BloomSky weather binary sensors.""" - # Default needed in case of discovery - if discovery_info is not None: - return - - sensors = config[CONF_MONITORED_CONDITIONS] - bloomsky = hass.data[DOMAIN] - - for device in bloomsky.devices.values(): - for variable in sensors: - add_entities([BloomSkySensor(bloomsky, device, variable)], True) - - -class BloomSkySensor(BinarySensorEntity): - """Representation of a single binary sensor in a BloomSky device.""" - - def __init__(self, bs, device, sensor_name): - """Initialize a BloomSky binary sensor.""" - self._bloomsky = bs - self._device_id = device["DeviceID"] - self._sensor_name = sensor_name - self._attr_name = f"{device['DeviceName']} {sensor_name}" - self._attr_unique_id = f"{self._device_id}-{sensor_name}" - self._attr_device_class = SENSOR_TYPES.get(sensor_name) - - def update(self) -> None: - """Request an update from the BloomSky API.""" - self._bloomsky.refresh_devices() - - self._attr_is_on = self._bloomsky.devices[self._device_id]["Data"][ - self._sensor_name - ] diff --git a/homeassistant/components/bloomsky/camera.py b/homeassistant/components/bloomsky/camera.py deleted file mode 100644 index f07dd1e9d14efd..00000000000000 --- a/homeassistant/components/bloomsky/camera.py +++ /dev/null @@ -1,67 +0,0 @@ -"""Support for a camera of a BloomSky weather station.""" - -from __future__ import annotations - -import logging - -import requests - -from homeassistant.components.camera import Camera -from homeassistant.core import HomeAssistant -from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType - -from . import DOMAIN - - -def setup_platform( - hass: HomeAssistant, - config: ConfigType, - add_entities: AddEntitiesCallback, - discovery_info: DiscoveryInfoType | None = None, -) -> None: - """Set up access to BloomSky cameras.""" - if discovery_info is not None: - return - - bloomsky = hass.data[DOMAIN] - - for device in bloomsky.devices.values(): - add_entities([BloomSkyCamera(bloomsky, device)]) - - -class BloomSkyCamera(Camera): - """Representation of the images published from the BloomSky's camera.""" - - def __init__(self, bs, device): - """Initialize access to the BloomSky camera images.""" - super().__init__() - self._attr_name = device["DeviceName"] - self._id = device["DeviceID"] - self._bloomsky = bs - self._url = "" - self._last_url = "" - # last_image will store images as they are downloaded so that the - # frequent updates in home-assistant don't keep poking the server - # to download the same image over and over. - self._last_image = "" - self._logger = logging.getLogger(__name__) - self._attr_unique_id = self._id - - def camera_image( - self, width: int | None = None, height: int | None = None - ) -> bytes | None: - """Update the camera's image if it has changed.""" - try: - self._url = self._bloomsky.devices[self._id]["Data"]["ImageURL"] - self._bloomsky.refresh_devices() - # If the URL hasn't changed then the image hasn't changed. - if self._url != self._last_url: - response = requests.get(self._url, timeout=10) - self._last_url = self._url - self._last_image = response.content - except requests.exceptions.RequestException as error: - self._logger.error("Error getting bloomsky image: %s", error) - return None - - return self._last_image diff --git a/homeassistant/components/bloomsky/manifest.json b/homeassistant/components/bloomsky/manifest.json deleted file mode 100644 index 65d302df239a11..00000000000000 --- a/homeassistant/components/bloomsky/manifest.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "domain": "bloomsky", - "name": "BloomSky", - "codeowners": [], - "documentation": "https://www.home-assistant.io/integrations/bloomsky", - "iot_class": "cloud_polling" -} diff --git a/homeassistant/components/bloomsky/sensor.py b/homeassistant/components/bloomsky/sensor.py deleted file mode 100644 index 6d99506bd4410f..00000000000000 --- a/homeassistant/components/bloomsky/sensor.py +++ /dev/null @@ -1,115 +0,0 @@ -"""Support the sensor of a BloomSky weather station.""" - -from __future__ import annotations - -import voluptuous as vol - -from homeassistant.components.sensor import ( - PLATFORM_SCHEMA as SENSOR_PLATFORM_SCHEMA, - SensorDeviceClass, - SensorEntity, -) -from homeassistant.const import ( - AREA_SQUARE_METERS, - CONF_MONITORED_CONDITIONS, - PERCENTAGE, - UnitOfElectricPotential, - UnitOfPressure, - UnitOfTemperature, -) -from homeassistant.core import HomeAssistant -import homeassistant.helpers.config_validation as cv -from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType - -from . import DOMAIN - -# These are the available sensors -SENSOR_TYPES = [ - "Temperature", - "Humidity", - "Pressure", - "Luminance", - "UVIndex", - "Voltage", -] - -# Sensor units - these do not currently align with the API documentation -SENSOR_UNITS_IMPERIAL = { - "Temperature": UnitOfTemperature.FAHRENHEIT, - "Humidity": PERCENTAGE, - "Pressure": UnitOfPressure.INHG, - "Luminance": f"cd/{AREA_SQUARE_METERS}", - "Voltage": UnitOfElectricPotential.MILLIVOLT, -} - -# Metric units -SENSOR_UNITS_METRIC = { - "Temperature": UnitOfTemperature.CELSIUS, - "Humidity": PERCENTAGE, - "Pressure": UnitOfPressure.MBAR, - "Luminance": f"cd/{AREA_SQUARE_METERS}", - "Voltage": UnitOfElectricPotential.MILLIVOLT, -} - -# Device class -SENSOR_DEVICE_CLASS = { - "Temperature": SensorDeviceClass.TEMPERATURE, - "Humidity": SensorDeviceClass.HUMIDITY, - "Pressure": SensorDeviceClass.PRESSURE, - "Voltage": SensorDeviceClass.VOLTAGE, -} - -# Which sensors to format numerically -FORMAT_NUMBERS = ["Temperature", "Pressure", "Voltage"] - -PLATFORM_SCHEMA = SENSOR_PLATFORM_SCHEMA.extend( - { - vol.Optional(CONF_MONITORED_CONDITIONS, default=SENSOR_TYPES): vol.All( - cv.ensure_list, [vol.In(SENSOR_TYPES)] - ) - } -) - - -def setup_platform( - hass: HomeAssistant, - config: ConfigType, - add_entities: AddEntitiesCallback, - discovery_info: DiscoveryInfoType | None = None, -) -> None: - """Set up the available BloomSky weather sensors.""" - # Default needed in case of discovery - if discovery_info is not None: - return - - sensors = config[CONF_MONITORED_CONDITIONS] - bloomsky = hass.data[DOMAIN] - - for device in bloomsky.devices.values(): - for variable in sensors: - add_entities([BloomSkySensor(bloomsky, device, variable)], True) - - -class BloomSkySensor(SensorEntity): - """Representation of a single sensor in a BloomSky device.""" - - def __init__(self, bs, device, sensor_name): - """Initialize a BloomSky sensor.""" - self._bloomsky = bs - self._device_id = device["DeviceID"] - self._sensor_name = sensor_name - self._attr_name = f"{device['DeviceName']} {sensor_name}" - self._attr_unique_id = f"{self._device_id}-{sensor_name}" - self._attr_device_class = SENSOR_DEVICE_CLASS.get(sensor_name) - self._attr_native_unit_of_measurement = SENSOR_UNITS_IMPERIAL.get(sensor_name) - if self._bloomsky.is_metric: - self._attr_native_unit_of_measurement = SENSOR_UNITS_METRIC.get(sensor_name) - - def update(self) -> None: - """Request an update from the BloomSky API.""" - self._bloomsky.refresh_devices() - state = self._bloomsky.devices[self._device_id]["Data"][self._sensor_name] - self._attr_native_value = ( - f"{state:.2f}" if self._sensor_name in FORMAT_NUMBERS else state - ) diff --git a/homeassistant/components/blue_current/__init__.py b/homeassistant/components/blue_current/__init__.py index e852dfc8c6e684..6d0ccd7b6dbd2d 100644 --- a/homeassistant/components/blue_current/__init__.py +++ b/homeassistant/components/blue_current/__init__.py @@ -22,6 +22,8 @@ from .const import DOMAIN, EVSE_ID, LOGGER, MODEL_TYPE +type BlueCurrentConfigEntry = ConfigEntry[Connector] + PLATFORMS = [Platform.SENSOR] CHARGE_POINTS = "CHARGE_POINTS" DATA = "data" @@ -32,9 +34,10 @@ VALUE_TYPES = ["CH_STATUS"] -async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool: +async def async_setup_entry( + hass: HomeAssistant, config_entry: BlueCurrentConfigEntry +) -> bool: """Set up Blue Current as a config entry.""" - hass.data.setdefault(DOMAIN, {}) client = Client() api_token = config_entry.data[CONF_API_TOKEN] connector = Connector(hass, config_entry, client) @@ -50,29 +53,25 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b ) await client.wait_for_charge_points() - hass.data[DOMAIN][config_entry.entry_id] = connector + config_entry.runtime_data = connector await hass.config_entries.async_forward_entry_setups(config_entry, PLATFORMS) return True -async def async_unload_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool: +async def async_unload_entry( + hass: HomeAssistant, config_entry: BlueCurrentConfigEntry +) -> bool: """Unload the Blue Current config entry.""" - unload_ok = await hass.config_entries.async_unload_platforms( - config_entry, PLATFORMS - ) - if unload_ok: - hass.data[DOMAIN].pop(config_entry.entry_id) - - return unload_ok + return await hass.config_entries.async_unload_platforms(config_entry, PLATFORMS) class Connector: """Define a class that connects to the Blue Current websocket API.""" def __init__( - self, hass: HomeAssistant, config: ConfigEntry, client: Client + self, hass: HomeAssistant, config: BlueCurrentConfigEntry, client: Client ) -> None: """Initialize.""" self.config = config diff --git a/homeassistant/components/blue_current/sensor.py b/homeassistant/components/blue_current/sensor.py index 4c590544984876..be39e9571ec44a 100644 --- a/homeassistant/components/blue_current/sensor.py +++ b/homeassistant/components/blue_current/sensor.py @@ -8,7 +8,6 @@ SensorEntityDescription, SensorStateClass, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( CURRENCY_EURO, UnitOfElectricCurrent, @@ -19,7 +18,7 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import Connector +from . import BlueCurrentConfigEntry, Connector from .const import DOMAIN from .entity import BlueCurrentEntity, ChargepointEntity @@ -211,10 +210,12 @@ async def async_setup_entry( - hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback + hass: HomeAssistant, + entry: BlueCurrentConfigEntry, + async_add_entities: AddEntitiesCallback, ) -> None: """Set up Blue Current sensors.""" - connector: Connector = hass.data[DOMAIN][entry.entry_id] + connector = entry.runtime_data sensor_list: list[SensorEntity] = [ ChargePointSensor(connector, sensor, evse_id) for evse_id in connector.charge_points diff --git a/homeassistant/components/bluemaestro/__init__.py b/homeassistant/components/bluemaestro/__init__.py index c25ceb44759e7f..3d358148fabe9b 100644 --- a/homeassistant/components/bluemaestro/__init__.py +++ b/homeassistant/components/bluemaestro/__init__.py @@ -14,27 +14,26 @@ from homeassistant.const import Platform from homeassistant.core import HomeAssistant -from .const import DOMAIN - PLATFORMS: list[Platform] = [Platform.SENSOR] _LOGGER = logging.getLogger(__name__) +type BlueMaestroConfigEntry = ConfigEntry[PassiveBluetoothProcessorCoordinator] + -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_setup_entry(hass: HomeAssistant, entry: BlueMaestroConfigEntry) -> bool: """Set up BlueMaestro BLE device from a config entry.""" address = entry.unique_id assert address is not None data = BlueMaestroBluetoothDeviceData() - coordinator = hass.data.setdefault(DOMAIN, {})[entry.entry_id] = ( - PassiveBluetoothProcessorCoordinator( - hass, - _LOGGER, - address=address, - mode=BluetoothScanningMode.PASSIVE, - update_method=data.update, - ) + coordinator = PassiveBluetoothProcessorCoordinator( + hass, + _LOGGER, + address=address, + mode=BluetoothScanningMode.PASSIVE, + update_method=data.update, ) + entry.runtime_data = coordinator await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) entry.async_on_unload( coordinator.async_start() @@ -42,9 +41,8 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry( + hass: HomeAssistant, entry: BlueMaestroConfigEntry +) -> bool: """Unload a config entry.""" - if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): - hass.data[DOMAIN].pop(entry.entry_id) - - return unload_ok + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/bluemaestro/sensor.py b/homeassistant/components/bluemaestro/sensor.py index 75d448c9b9dc2d..57702d4ff31b53 100644 --- a/homeassistant/components/bluemaestro/sensor.py +++ b/homeassistant/components/bluemaestro/sensor.py @@ -8,11 +8,9 @@ Units, ) -from homeassistant import config_entries from homeassistant.components.bluetooth.passive_update_processor import ( PassiveBluetoothDataProcessor, PassiveBluetoothDataUpdate, - PassiveBluetoothProcessorCoordinator, PassiveBluetoothProcessorEntity, ) from homeassistant.components.sensor import ( @@ -32,7 +30,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.sensor import sensor_device_info_to_hass_device_info -from .const import DOMAIN +from . import BlueMaestroConfigEntry from .device import device_key_to_bluetooth_entity_key SENSOR_DESCRIPTIONS = { @@ -117,13 +115,11 @@ def sensor_update_to_bluetooth_data_update( async def async_setup_entry( hass: HomeAssistant, - entry: config_entries.ConfigEntry, + entry: BlueMaestroConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the BlueMaestro BLE sensors.""" - coordinator: PassiveBluetoothProcessorCoordinator = hass.data[DOMAIN][ - entry.entry_id - ] + coordinator = entry.runtime_data processor = PassiveBluetoothDataProcessor(sensor_update_to_bluetooth_data_update) entry.async_on_unload( processor.async_add_entities_listener( diff --git a/homeassistant/components/bluesound/__init__.py b/homeassistant/components/bluesound/__init__.py index da74ed042bef71..82fe9b00d57043 100644 --- a/homeassistant/components/bluesound/__init__.py +++ b/homeassistant/components/bluesound/__init__.py @@ -14,7 +14,7 @@ from homeassistant.helpers.typing import ConfigType from .const import DOMAIN -from .media_player import setup_services +from .services import setup_services CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN) diff --git a/homeassistant/components/bluesound/const.py b/homeassistant/components/bluesound/const.py index b7da4e31702128..b1be33f6770a7d 100644 --- a/homeassistant/components/bluesound/const.py +++ b/homeassistant/components/bluesound/const.py @@ -2,9 +2,5 @@ DOMAIN = "bluesound" INTEGRATION_TITLE = "Bluesound" -SERVICE_CLEAR_TIMER = "clear_sleep_timer" -SERVICE_JOIN = "join" -SERVICE_SET_TIMER = "set_sleep_timer" -SERVICE_UNJOIN = "unjoin" ATTR_BLUESOUND_GROUP = "bluesound_group" ATTR_MASTER = "master" diff --git a/homeassistant/components/bluesound/manifest.json b/homeassistant/components/bluesound/manifest.json index 4d92a5f7fc06ec..462112a8b783c7 100644 --- a/homeassistant/components/bluesound/manifest.json +++ b/homeassistant/components/bluesound/manifest.json @@ -6,7 +6,7 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/bluesound", "iot_class": "local_polling", - "requirements": ["pyblu==1.0.3"], + "requirements": ["pyblu==1.0.4"], "zeroconf": [ { "type": "_musc._tcp.local." diff --git a/homeassistant/components/bluesound/media_player.py b/homeassistant/components/bluesound/media_player.py index 1e2a537cd62e67..97985a74300c55 100644 --- a/homeassistant/components/bluesound/media_player.py +++ b/homeassistant/components/bluesound/media_player.py @@ -7,7 +7,7 @@ from contextlib import suppress from datetime import datetime, timedelta import logging -from typing import TYPE_CHECKING, Any, NamedTuple, cast +from typing import TYPE_CHECKING, Any from pyblu import Input, Player, Preset, Status, SyncStatus from pyblu.errors import PlayerUnreachableError @@ -24,18 +24,8 @@ async_process_play_media_url, ) from homeassistant.config_entries import SOURCE_IMPORT -from homeassistant.const import ( - ATTR_ENTITY_ID, - CONF_HOST, - CONF_HOSTS, - CONF_NAME, - CONF_PORT, -) -from homeassistant.core import ( - DOMAIN as HOMEASSISTANT_DOMAIN, - HomeAssistant, - ServiceCall, -) +from homeassistant.const import CONF_HOST, CONF_HOSTS, CONF_NAME, CONF_PORT +from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant from homeassistant.data_entry_flow import FlowResultType from homeassistant.exceptions import ServiceValidationError from homeassistant.helpers import config_validation as cv, issue_registry as ir @@ -48,16 +38,7 @@ from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType import homeassistant.util.dt as dt_util -from .const import ( - ATTR_BLUESOUND_GROUP, - ATTR_MASTER, - DOMAIN, - INTEGRATION_TITLE, - SERVICE_CLEAR_TIMER, - SERVICE_JOIN, - SERVICE_SET_TIMER, - SERVICE_UNJOIN, -) +from .const import ATTR_BLUESOUND_GROUP, ATTR_MASTER, DOMAIN, INTEGRATION_TITLE from .utils import format_unique_id if TYPE_CHECKING: @@ -92,29 +73,6 @@ } ) -BS_SCHEMA = vol.Schema({vol.Optional(ATTR_ENTITY_ID): cv.entity_ids}) - -BS_JOIN_SCHEMA = BS_SCHEMA.extend({vol.Required(ATTR_MASTER): cv.entity_id}) - - -class ServiceMethodDetails(NamedTuple): - """Details for SERVICE_TO_METHOD mapping.""" - - method: str - schema: vol.Schema - - -SERVICE_TO_METHOD = { - SERVICE_JOIN: ServiceMethodDetails(method="async_join", schema=BS_JOIN_SCHEMA), - SERVICE_UNJOIN: ServiceMethodDetails(method="async_unjoin", schema=BS_SCHEMA), - SERVICE_SET_TIMER: ServiceMethodDetails( - method="async_increase_timer", schema=BS_SCHEMA - ), - SERVICE_CLEAR_TIMER: ServiceMethodDetails( - method="async_clear_timer", schema=BS_SCHEMA - ), -} - async def _async_import(hass: HomeAssistant, config: ConfigType) -> None: """Import config entry from configuration.yaml.""" @@ -159,33 +117,6 @@ async def _async_import(hass: HomeAssistant, config: ConfigType) -> None: ) -def setup_services(hass: HomeAssistant) -> None: - """Set up services for Bluesound component.""" - - async def async_service_handler(service: ServiceCall) -> None: - """Map services to method of Bluesound devices.""" - if not (method := SERVICE_TO_METHOD.get(service.service)): - return - - params = { - key: value for key, value in service.data.items() if key != ATTR_ENTITY_ID - } - if entity_ids := service.data.get(ATTR_ENTITY_ID): - target_players = [ - player for player in hass.data[DOMAIN] if player.entity_id in entity_ids - ] - else: - target_players = hass.data[DOMAIN] - - for player in target_players: - await getattr(player, method.method)(**params) - - for service, method in SERVICE_TO_METHOD.items(): - hass.services.async_register( - DOMAIN, service, async_service_handler, schema=method.schema - ) - - async def async_setup_entry( hass: HomeAssistant, config_entry: BluesoundConfigEntry, @@ -433,12 +364,13 @@ def state(self) -> MediaPlayerState: if self.is_grouped and not self.is_master: return MediaPlayerState.IDLE - status = self._status.state - if status in ("pause", "stop"): - return MediaPlayerState.PAUSED - if status in ("stream", "play"): - return MediaPlayerState.PLAYING - return MediaPlayerState.IDLE + match self._status.state: + case "pause": + return MediaPlayerState.PAUSED + case "stream" | "play": + return MediaPlayerState.PLAYING + case _: + return MediaPlayerState.IDLE @property def media_title(self) -> str | None: @@ -493,6 +425,8 @@ def media_position(self) -> int | None: return None position = self._status.seconds + if position is None: + return None if mediastate == MediaPlayerState.PLAYING: position += (dt_util.utcnow() - self._last_status_update).total_seconds() @@ -553,6 +487,11 @@ def bluesound_device_name(self) -> str | None: """Return the device name as returned by the device.""" return self._bluesound_device_name + @property + def sync_status(self) -> SyncStatus: + """Return the sync status.""" + return self._sync_status + @property def source_list(self) -> list[str] | None: """List of available input sources.""" @@ -691,7 +630,7 @@ def rebuild_bluesound_group(self) -> list[str]: reverse=True, ) return [ - cast(str, entity.name) + entity.sync_status.name for entity in sorted_entities if entity.bluesound_device_name in device_group ] @@ -831,7 +770,7 @@ async def async_volume_down(self) -> None: async def async_set_volume_level(self, volume: float) -> None: """Send volume_up command to media player.""" - volume = int(volume * 100) + volume = int(round(volume * 100)) volume = min(100, volume) volume = max(0, volume) diff --git a/homeassistant/components/bluesound/services.py b/homeassistant/components/bluesound/services.py new file mode 100644 index 00000000000000..06a507420f871b --- /dev/null +++ b/homeassistant/components/bluesound/services.py @@ -0,0 +1,68 @@ +"""Support for Bluesound devices.""" + +from __future__ import annotations + +from typing import NamedTuple + +import voluptuous as vol + +from homeassistant.const import ATTR_ENTITY_ID +from homeassistant.core import HomeAssistant, ServiceCall +from homeassistant.helpers import config_validation as cv + +from .const import ATTR_MASTER, DOMAIN + +SERVICE_CLEAR_TIMER = "clear_sleep_timer" +SERVICE_JOIN = "join" +SERVICE_SET_TIMER = "set_sleep_timer" +SERVICE_UNJOIN = "unjoin" + +BS_SCHEMA = vol.Schema({vol.Optional(ATTR_ENTITY_ID): cv.entity_ids}) + +BS_JOIN_SCHEMA = BS_SCHEMA.extend({vol.Required(ATTR_MASTER): cv.entity_id}) + + +class ServiceMethodDetails(NamedTuple): + """Details for SERVICE_TO_METHOD mapping.""" + + method: str + schema: vol.Schema + + +SERVICE_TO_METHOD = { + SERVICE_JOIN: ServiceMethodDetails(method="async_join", schema=BS_JOIN_SCHEMA), + SERVICE_UNJOIN: ServiceMethodDetails(method="async_unjoin", schema=BS_SCHEMA), + SERVICE_SET_TIMER: ServiceMethodDetails( + method="async_increase_timer", schema=BS_SCHEMA + ), + SERVICE_CLEAR_TIMER: ServiceMethodDetails( + method="async_clear_timer", schema=BS_SCHEMA + ), +} + + +def setup_services(hass: HomeAssistant) -> None: + """Set up services for Bluesound component.""" + + async def async_service_handler(service: ServiceCall) -> None: + """Map services to method of Bluesound devices.""" + if not (method := SERVICE_TO_METHOD.get(service.service)): + return + + params = { + key: value for key, value in service.data.items() if key != ATTR_ENTITY_ID + } + if entity_ids := service.data.get(ATTR_ENTITY_ID): + target_players = [ + player for player in hass.data[DOMAIN] if player.entity_id in entity_ids + ] + else: + target_players = hass.data[DOMAIN] + + for player in target_players: + await getattr(player, method.method)(**params) + + for service, method in SERVICE_TO_METHOD.items(): + hass.services.async_register( + DOMAIN, service, async_service_handler, schema=method.schema + ) diff --git a/homeassistant/components/bluetooth/manifest.json b/homeassistant/components/bluetooth/manifest.json index 81602359c88f4c..fe16bd73a9e7c8 100644 --- a/homeassistant/components/bluetooth/manifest.json +++ b/homeassistant/components/bluetooth/manifest.json @@ -20,6 +20,6 @@ "bluetooth-auto-recovery==1.4.2", "bluetooth-data-tools==1.20.0", "dbus-fast==2.24.3", - "habluetooth==3.5.0" + "habluetooth==3.6.0" ] } diff --git a/homeassistant/components/bmw_connected_drive/config_flow.py b/homeassistant/components/bmw_connected_drive/config_flow.py index 8132d241ca4086..409bfdca6f1896 100644 --- a/homeassistant/components/bmw_connected_drive/config_flow.py +++ b/homeassistant/components/bmw_connected_drive/config_flow.py @@ -7,16 +7,21 @@ from bimmer_connected.api.authentication import MyBMWAuthentication from bimmer_connected.api.regions import get_region_from_name -from bimmer_connected.models import MyBMWAPIError, MyBMWAuthError +from bimmer_connected.models import ( + MyBMWAPIError, + MyBMWAuthError, + MyBMWCaptchaMissingError, +) from httpx import RequestError import voluptuous as vol from homeassistant.config_entries import ( SOURCE_REAUTH, + SOURCE_RECONFIGURE, ConfigEntry, ConfigFlow, ConfigFlowResult, - OptionsFlowWithConfigEntry, + OptionsFlow, ) from homeassistant.const import CONF_PASSWORD, CONF_REGION, CONF_SOURCE, CONF_USERNAME from homeassistant.core import HomeAssistant, callback @@ -53,6 +58,8 @@ async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> dict[str, try: await auth.login() + except MyBMWCaptchaMissingError as ex: + raise MissingCaptcha from ex except MyBMWAuthError as ex: raise InvalidAuth from ex except (MyBMWAPIError, RequestError) as ex: @@ -72,7 +79,7 @@ class BMWConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - _reauth_entry: ConfigEntry + _existing_entry_data: Mapping[str, Any] | None = None async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -82,9 +89,11 @@ async def async_step_user( if user_input is not None: unique_id = f"{user_input[CONF_REGION]}-{user_input[CONF_USERNAME]}" + await self.async_set_unique_id(unique_id) - if self.source != SOURCE_REAUTH: - await self.async_set_unique_id(unique_id) + if self.source in {SOURCE_REAUTH, SOURCE_RECONFIGURE}: + self._abort_if_unique_id_mismatch(reason="account_mismatch") + else: self._abort_if_unique_id_configured() info = None @@ -95,6 +104,8 @@ async def async_step_user( CONF_REFRESH_TOKEN: info.get(CONF_REFRESH_TOKEN), CONF_GCID: info.get(CONF_GCID), } + except MissingCaptcha: + errors["base"] = "missing_captcha" except CannotConnect: errors["base"] = "cannot_connect" except InvalidAuth: @@ -102,23 +113,22 @@ async def async_step_user( if info: if self.source == SOURCE_REAUTH: - self.hass.config_entries.async_update_entry( - self._reauth_entry, data=entry_data + return self.async_update_reload_and_abort( + self._get_reauth_entry(), data=entry_data ) - self.hass.async_create_task( - self.hass.config_entries.async_reload( - self._reauth_entry.entry_id - ) + if self.source == SOURCE_RECONFIGURE: + return self.async_update_reload_and_abort( + self._get_reconfigure_entry(), + data=entry_data, ) - return self.async_abort(reason="reauth_successful") - return self.async_create_entry( title=info["title"], data=entry_data, ) schema = self.add_suggested_values_to_schema( - DATA_SCHEMA, self._reauth_entry.data if self.source == SOURCE_REAUTH else {} + DATA_SCHEMA, + self._existing_entry_data, ) return self.async_show_form(step_id="user", data_schema=schema, errors=errors) @@ -127,7 +137,14 @@ async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle configuration by re-auth.""" - self._reauth_entry = self._get_reauth_entry() + self._existing_entry_data = entry_data + return await self.async_step_user() + + async def async_step_reconfigure( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle a reconfiguration flow initialized by the user.""" + self._existing_entry_data = self._get_reconfigure_entry().data return await self.async_step_user() @staticmethod @@ -136,10 +153,10 @@ def async_get_options_flow( config_entry: ConfigEntry, ) -> BMWOptionsFlow: """Return a MyBMW option flow.""" - return BMWOptionsFlow(config_entry) + return BMWOptionsFlow() -class BMWOptionsFlow(OptionsFlowWithConfigEntry): +class BMWOptionsFlow(OptionsFlow): """Handle a option flow for MyBMW.""" async def async_step_init( @@ -183,3 +200,7 @@ class CannotConnect(HomeAssistantError): class InvalidAuth(HomeAssistantError): """Error to indicate there is invalid auth.""" + + +class MissingCaptcha(HomeAssistantError): + """Error to indicate the captcha token is missing.""" diff --git a/homeassistant/components/bmw_connected_drive/coordinator.py b/homeassistant/components/bmw_connected_drive/coordinator.py index 992e7dea6b263c..d38b7ffacc2a7c 100644 --- a/homeassistant/components/bmw_connected_drive/coordinator.py +++ b/homeassistant/components/bmw_connected_drive/coordinator.py @@ -7,7 +7,12 @@ from bimmer_connected.account import MyBMWAccount from bimmer_connected.api.regions import get_region_from_name -from bimmer_connected.models import GPSPosition, MyBMWAPIError, MyBMWAuthError +from bimmer_connected.models import ( + GPSPosition, + MyBMWAPIError, + MyBMWAuthError, + MyBMWCaptchaMissingError, +) from httpx import RequestError from homeassistant.config_entries import ConfigEntry @@ -61,6 +66,12 @@ async def _async_update_data(self) -> None: try: await self.account.get_vehicles() + except MyBMWCaptchaMissingError as err: + # If a captcha is required (user/password login flow), always trigger the reauth flow + raise ConfigEntryAuthFailed( + translation_domain=DOMAIN, + translation_key="missing_captcha", + ) from err except MyBMWAuthError as err: # Allow one retry interval before raising AuthFailed to avoid flaky API issues if self.last_update_success: diff --git a/homeassistant/components/bmw_connected_drive/manifest.json b/homeassistant/components/bmw_connected_drive/manifest.json index 6bc9027ac19983..584eb1eebb554d 100644 --- a/homeassistant/components/bmw_connected_drive/manifest.json +++ b/homeassistant/components/bmw_connected_drive/manifest.json @@ -7,5 +7,5 @@ "iot_class": "cloud_polling", "loggers": ["bimmer_connected"], "quality_scale": "platinum", - "requirements": ["bimmer-connected[china]==0.16.3"] + "requirements": ["bimmer-connected[china]==0.16.4"] } diff --git a/homeassistant/components/bmw_connected_drive/sensor.py b/homeassistant/components/bmw_connected_drive/sensor.py index fe0e835622b8e5..e24e2dd75f67a2 100644 --- a/homeassistant/components/bmw_connected_drive/sensor.py +++ b/homeassistant/components/bmw_connected_drive/sensor.py @@ -80,7 +80,6 @@ class BMWSensorEntityDescription(SensorEntityDescription): BMWSensorEntityDescription( key="fuel_and_battery.charging_target", translation_key="charging_target", - device_class=SensorDeviceClass.BATTERY, native_unit_of_measurement=PERCENTAGE, suggested_display_precision=0, is_available=lambda v: v.is_lsc_enabled and v.has_electric_drivetrain, diff --git a/homeassistant/components/bmw_connected_drive/strings.json b/homeassistant/components/bmw_connected_drive/strings.json index c59900ef4f9bb2..0e7a4a32ef45ef 100644 --- a/homeassistant/components/bmw_connected_drive/strings.json +++ b/homeassistant/components/bmw_connected_drive/strings.json @@ -11,11 +11,14 @@ }, "error": { "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", - "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]" + "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", + "missing_captcha": "Captcha validation missing" }, "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_account%]", - "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", + "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]", + "account_mismatch": "Username and region are not allowed to change" } }, "options": { @@ -198,6 +201,9 @@ "exceptions": { "invalid_poi": { "message": "Invalid data for point of interest: {poi_exception}" + }, + "missing_captcha": { + "message": "Login requires captcha validation" } } } diff --git a/homeassistant/components/bond/button.py b/homeassistant/components/bond/button.py index a2d88bc6f6a69b..42915c7dc0b6c4 100644 --- a/homeassistant/components/bond/button.py +++ b/homeassistant/components/bond/button.py @@ -237,6 +237,20 @@ class BondButtonEntityDescription(ButtonEntityDescription): mutually_exclusive=Action.SET_POSITION, argument=STEP_SIZE, ), + BondButtonEntityDescription( + key=Action.OPEN_NEXT, + name="Open Next", + translation_key="open_next", + mutually_exclusive=None, + argument=None, + ), + BondButtonEntityDescription( + key=Action.CLOSE_NEXT, + name="Close Next", + translation_key="close_next", + mutually_exclusive=None, + argument=None, + ), ) diff --git a/homeassistant/components/bond/icons.json b/homeassistant/components/bond/icons.json index 48b351b1c7600a..b150d1c1fa3cf7 100644 --- a/homeassistant/components/bond/icons.json +++ b/homeassistant/components/bond/icons.json @@ -84,6 +84,12 @@ }, "decrease_position": { "default": "mdi:minus-box" + }, + "open_next": { + "default": "mdi:plus-box" + }, + "close_next": { + "default": "mdi:minus-box" } }, "light": { diff --git a/homeassistant/components/bosch_shc/config_flow.py b/homeassistant/components/bosch_shc/config_flow.py index a8896414a4f6b9..58601152da53a3 100644 --- a/homeassistant/components/bosch_shc/config_flow.py +++ b/homeassistant/components/bosch_shc/config_flow.py @@ -39,16 +39,21 @@ ) -def write_tls_asset(hass: HomeAssistant, filename: str, asset: bytes) -> None: +def write_tls_asset( + hass: HomeAssistant, folder: str, filename: str, asset: bytes +) -> None: """Write the tls assets to disk.""" - makedirs(hass.config.path(DOMAIN), exist_ok=True) - with open(hass.config.path(DOMAIN, filename), "w", encoding="utf8") as file_handle: + makedirs(hass.config.path(DOMAIN, folder), exist_ok=True) + with open( + hass.config.path(DOMAIN, folder, filename), "w", encoding="utf8" + ) as file_handle: file_handle.write(asset.decode("utf-8")) def create_credentials_and_validate( hass: HomeAssistant, host: str, + unique_id: str, user_input: dict[str, Any], zeroconf_instance: zeroconf.HaZeroconf, ) -> dict[str, Any] | None: @@ -57,13 +62,15 @@ def create_credentials_and_validate( result = helper.register(host, "HomeAssistant") if result is not None: - write_tls_asset(hass, CONF_SHC_CERT, result["cert"]) - write_tls_asset(hass, CONF_SHC_KEY, result["key"]) + # Save key/certificate pair for each registered host separately + # otherwise only the last registered host is accessible. + write_tls_asset(hass, unique_id, CONF_SHC_CERT, result["cert"]) + write_tls_asset(hass, unique_id, CONF_SHC_KEY, result["key"]) session = SHCSession( host, - hass.config.path(DOMAIN, CONF_SHC_CERT), - hass.config.path(DOMAIN, CONF_SHC_KEY), + hass.config.path(DOMAIN, unique_id, CONF_SHC_CERT), + hass.config.path(DOMAIN, unique_id, CONF_SHC_KEY), True, zeroconf_instance, ) @@ -143,11 +150,16 @@ async def async_step_credentials( errors: dict[str, str] = {} if user_input is not None: zeroconf_instance = await zeroconf.async_get_instance(self.hass) + # unique_id uniquely identifies the registered controller and is used + # to save the key/certificate pair for each controller separately + unique_id = self.info["unique_id"] + assert unique_id try: result = await self.hass.async_add_executor_job( create_credentials_and_validate, self.hass, self.host, + unique_id, user_input, zeroconf_instance, ) @@ -167,13 +179,18 @@ async def async_step_credentials( else: assert result entry_data = { - CONF_SSL_CERTIFICATE: self.hass.config.path(DOMAIN, CONF_SHC_CERT), - CONF_SSL_KEY: self.hass.config.path(DOMAIN, CONF_SHC_KEY), + # Each host has its own key/certificate pair + CONF_SSL_CERTIFICATE: self.hass.config.path( + DOMAIN, unique_id, CONF_SHC_CERT + ), + CONF_SSL_KEY: self.hass.config.path( + DOMAIN, unique_id, CONF_SHC_KEY + ), CONF_HOST: self.host, CONF_TOKEN: result["token"], CONF_HOSTNAME: result["token"].split(":", 1)[1], } - existing_entry = await self.async_set_unique_id(self.info["unique_id"]) + existing_entry = await self.async_set_unique_id(unique_id) if existing_entry: return self.async_update_reload_and_abort( existing_entry, diff --git a/homeassistant/components/bring/icons.json b/homeassistant/components/bring/icons.json index 7a4775066cf8ba..c670ef87700d66 100644 --- a/homeassistant/components/bring/icons.json +++ b/homeassistant/components/bring/icons.json @@ -12,6 +12,13 @@ }, "list_language": { "default": "mdi:earth" + }, + "list_access": { + "default": "mdi:account-lock", + "state": { + "shared": "mdi:account-group", + "invitation": "mdi:account-multiple-plus" + } } }, "todo": { diff --git a/homeassistant/components/bring/manifest.json b/homeassistant/components/bring/manifest.json index 79336c086ed844..ff24a9913508a3 100644 --- a/homeassistant/components/bring/manifest.json +++ b/homeassistant/components/bring/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/bring", "integration_type": "service", "iot_class": "cloud_polling", - "requirements": ["bring-api==0.9.0"] + "requirements": ["bring-api==0.9.1"] } diff --git a/homeassistant/components/bring/sensor.py b/homeassistant/components/bring/sensor.py index edc1da3d59bf92..746ed397e1bcd1 100644 --- a/homeassistant/components/bring/sensor.py +++ b/homeassistant/components/bring/sensor.py @@ -40,6 +40,7 @@ class BringSensor(StrEnum): CONVENIENT = "convenient" DISCOUNTED = "discounted" LIST_LANGUAGE = "list_language" + LIST_ACCESS = "list_access" SENSOR_DESCRIPTIONS: tuple[BringSensorEntityDescription, ...] = ( @@ -73,6 +74,14 @@ class BringSensor(StrEnum): options=[x.lower() for x in BRING_SUPPORTED_LOCALES], device_class=SensorDeviceClass.ENUM, ), + BringSensorEntityDescription( + key=BringSensor.LIST_ACCESS, + translation_key=BringSensor.LIST_ACCESS, + value_fn=lambda lst, _: lst["status"].lower(), + entity_category=EntityCategory.DIAGNOSTIC, + options=["registered", "shared", "invitation"], + device_class=SensorDeviceClass.ENUM, + ), ) diff --git a/homeassistant/components/bring/strings.json b/homeassistant/components/bring/strings.json index bce18fc6a925ed..9a93881b5d297b 100644 --- a/homeassistant/components/bring/strings.json +++ b/homeassistant/components/bring/strings.json @@ -61,6 +61,14 @@ "sv-se": "Sweden", "tr-tr": "Türkiye" } + }, + "list_access": { + "name": "List access", + "state": { + "registered": "Private", + "shared": "Shared", + "invitation": "Invitation pending" + } } } }, diff --git a/homeassistant/components/broadlink/device.py b/homeassistant/components/broadlink/device.py index 2518cd65bd34e6..75b6236a4731ef 100644 --- a/homeassistant/components/broadlink/device.py +++ b/homeassistant/components/broadlink/device.py @@ -15,7 +15,7 @@ ) from typing_extensions import TypeVar -from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntry +from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( CONF_HOST, CONF_MAC, @@ -200,10 +200,4 @@ async def _async_handle_auth_error(self) -> None: self.api.host[0], ) - self.hass.async_create_task( - self.hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_REAUTH}, - data={CONF_NAME: self.name, **self.config.data}, - ) - ) + self.config.async_start_reauth(self.hass, data={CONF_NAME: self.name}) diff --git a/homeassistant/components/broadlink/strings.json b/homeassistant/components/broadlink/strings.json index 5150a521363bbd..17c98f0182f9b0 100644 --- a/homeassistant/components/broadlink/strings.json +++ b/homeassistant/components/broadlink/strings.json @@ -43,6 +43,7 @@ }, "error": { "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", + "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", "invalid_host": "[%key:common::config_flow::error::invalid_host%]", "unknown": "[%key:common::config_flow::error::unknown%]" } diff --git a/homeassistant/components/brother/config_flow.py b/homeassistant/components/brother/config_flow.py index 8966b41c948e23..d9130b96300b19 100644 --- a/homeassistant/components/brother/config_flow.py +++ b/homeassistant/components/brother/config_flow.py @@ -9,7 +9,7 @@ from homeassistant.components import zeroconf from homeassistant.components.snmp import async_get_snmp_engine -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_HOST, CONF_TYPE from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError @@ -49,8 +49,6 @@ class BrotherConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - entry: ConfigEntry - def __init__(self) -> None: """Initialize.""" self.brother: Brother @@ -145,18 +143,12 @@ async def async_step_reconfigure( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle a reconfiguration flow initialized by the user.""" - self.entry = self._get_reconfigure_entry() - return await self.async_step_reconfigure_confirm() - - async def async_step_reconfigure_confirm( - self, user_input: dict[str, Any] | None = None - ) -> ConfigFlowResult: - """Handle a reconfiguration flow initialized by the user.""" + entry = self._get_reconfigure_entry() errors = {} if user_input is not None: try: - await validate_input(self.hass, user_input, self.entry.unique_id) + await validate_input(self.hass, user_input, entry.unique_id) except InvalidHost: errors[CONF_HOST] = "wrong_host" except (ConnectionError, TimeoutError): @@ -166,20 +158,18 @@ async def async_step_reconfigure_confirm( except AnotherDevice: errors["base"] = "another_device" else: - self.hass.config_entries.async_update_entry( - self.entry, - data=self.entry.data | {CONF_HOST: user_input[CONF_HOST]}, + return self.async_update_reload_and_abort( + entry, + data_updates={CONF_HOST: user_input[CONF_HOST]}, ) - await self.hass.config_entries.async_reload(self.entry.entry_id) - return self.async_abort(reason="reconfigure_successful") return self.async_show_form( - step_id="reconfigure_confirm", + step_id="reconfigure", data_schema=self.add_suggested_values_to_schema( data_schema=RECONFIGURE_SCHEMA, - suggested_values=self.entry.data | (user_input or {}), + suggested_values=entry.data | (user_input or {}), ), - description_placeholders={"printer_name": self.entry.title}, + description_placeholders={"printer_name": entry.title}, errors=errors, ) diff --git a/homeassistant/components/brother/strings.json b/homeassistant/components/brother/strings.json index d7f8f4a1b8975e..3b5b38ce9a05dd 100644 --- a/homeassistant/components/brother/strings.json +++ b/homeassistant/components/brother/strings.json @@ -18,7 +18,7 @@ "type": "[%key:component::brother::config::step::user::data::type%]" } }, - "reconfigure_confirm": { + "reconfigure": { "description": "Update configuration for {printer_name}.", "data": { "host": "[%key:common::config_flow::data::host%]" diff --git a/homeassistant/components/brunt/__init__.py b/homeassistant/components/brunt/__init__.py index bec281d1902d16..c488c813b3b298 100644 --- a/homeassistant/components/brunt/__init__.py +++ b/homeassistant/components/brunt/__init__.py @@ -2,79 +2,22 @@ from __future__ import annotations -from asyncio import timeout -import logging - -from aiohttp.client_exceptions import ClientResponseError, ServerDisconnectedError -from brunt import BruntClientAsync, Thing - -from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant -from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady -from homeassistant.helpers.aiohttp_client import async_get_clientsession -from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed - -from .const import DATA_BAPI, DATA_COOR, DOMAIN, PLATFORMS, REGULAR_INTERVAL -_LOGGER = logging.getLogger(__name__) +from .const import PLATFORMS +from .coordinator import BruntConfigEntry, BruntCoordinator -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_setup_entry(hass: HomeAssistant, entry: BruntConfigEntry) -> bool: """Set up Brunt using config flow.""" - session = async_get_clientsession(hass) - bapi = BruntClientAsync( - username=entry.data[CONF_USERNAME], - password=entry.data[CONF_PASSWORD], - session=session, - ) - try: - await bapi.async_login() - except ServerDisconnectedError as exc: - raise ConfigEntryNotReady("Brunt not ready to connect.") from exc - except ClientResponseError as exc: - raise ConfigEntryAuthFailed( - f"Brunt could not connect with username: {entry.data[CONF_USERNAME]}." - ) from exc - - async def async_update_data() -> dict[str | None, Thing]: - """Fetch data from the Brunt endpoint for all Things. - - Error 403 is the API response for any kind of authentication error (failed password or email) - Error 401 is the API response for things that are not part of the account, could happen when a device is deleted from the account. - """ - try: - async with timeout(10): - things = await bapi.async_get_things(force=True) - return {thing.serial: thing for thing in things} - except ServerDisconnectedError as err: - raise UpdateFailed(f"Error communicating with API: {err}") from err - except ClientResponseError as err: - if err.status == 403: - raise ConfigEntryAuthFailed from err - if err.status == 401: - _LOGGER.warning("Device not found, will reload Brunt integration") - await hass.config_entries.async_reload(entry.entry_id) - raise UpdateFailed from err - - coordinator = DataUpdateCoordinator( - hass, - _LOGGER, - name="brunt", - update_method=async_update_data, - update_interval=REGULAR_INTERVAL, - ) + coordinator = BruntCoordinator(hass, entry) await coordinator.async_config_entry_first_refresh() - hass.data.setdefault(DOMAIN, {}) - hass.data[DOMAIN][entry.entry_id] = {DATA_BAPI: bapi, DATA_COOR: coordinator} + entry.runtime_data = coordinator await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: BruntConfigEntry) -> bool: """Unload a config entry.""" - unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS) - if unload_ok: - hass.data[DOMAIN].pop(entry.entry_id) - return unload_ok + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/brunt/config_flow.py b/homeassistant/components/brunt/config_flow.py index dd119a402d8654..3baea9b98cc5a7 100644 --- a/homeassistant/components/brunt/config_flow.py +++ b/homeassistant/components/brunt/config_flow.py @@ -12,7 +12,7 @@ import voluptuous as vol from homeassistant.config_entries import ConfigFlow, ConfigFlowResult -from homeassistant.const import CONF_PASSWORD, CONF_USERNAME +from homeassistant.const import CONF_NAME, CONF_PASSWORD, CONF_USERNAME from .const import DOMAIN @@ -92,7 +92,10 @@ async def async_step_reauth_confirm( return self.async_show_form( step_id="reauth_confirm", data_schema=REAUTH_SCHEMA, - description_placeholders={"username": username}, + description_placeholders={ + CONF_USERNAME: username, + CONF_NAME: reauth_entry.title, + }, ) user_input[CONF_USERNAME] = username errors = await validate_input(user_input) @@ -101,7 +104,10 @@ async def async_step_reauth_confirm( step_id="reauth_confirm", data_schema=REAUTH_SCHEMA, errors=errors, - description_placeholders={"username": username}, + description_placeholders={ + CONF_USERNAME: username, + CONF_NAME: reauth_entry.title, + }, ) return self.async_update_reload_and_abort(reauth_entry, data=user_input) diff --git a/homeassistant/components/brunt/const.py b/homeassistant/components/brunt/const.py index 4c246d28d64601..0d9323cbf0716b 100644 --- a/homeassistant/components/brunt/const.py +++ b/homeassistant/components/brunt/const.py @@ -10,8 +10,6 @@ NOTIFICATION_TITLE = "Brunt Cover Setup" ATTRIBUTION = "Based on an unofficial Brunt SDK." PLATFORMS = [Platform.COVER] -DATA_BAPI = "bapi" -DATA_COOR = "coordinator" CLOSED_POSITION = 0 OPEN_POSITION = 100 diff --git a/homeassistant/components/brunt/coordinator.py b/homeassistant/components/brunt/coordinator.py new file mode 100644 index 00000000000000..b07ec2c0c8873a --- /dev/null +++ b/homeassistant/components/brunt/coordinator.py @@ -0,0 +1,80 @@ +"""The brunt component.""" + +from __future__ import annotations + +from asyncio import timeout +import logging + +from aiohttp.client_exceptions import ClientResponseError, ServerDisconnectedError +from brunt import BruntClientAsync, Thing + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_PASSWORD, CONF_USERNAME +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady +from homeassistant.helpers.aiohttp_client import async_get_clientsession +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed + +from .const import REGULAR_INTERVAL + +_LOGGER = logging.getLogger(__name__) + +type BruntConfigEntry = ConfigEntry[BruntCoordinator] + + +class BruntCoordinator(DataUpdateCoordinator[dict[str | None, Thing]]): + """Config entry data.""" + + bapi: BruntClientAsync + config_entry: BruntConfigEntry + + def __init__( + self, + hass: HomeAssistant, + config_entry: BruntConfigEntry, + ) -> None: + """Initialize the Brunt coordinator.""" + super().__init__( + hass, + _LOGGER, + config_entry=config_entry, + name="brunt", + update_interval=REGULAR_INTERVAL, + ) + + async def _async_setup(self) -> None: + session = async_get_clientsession(self.hass) + + self.bapi = BruntClientAsync( + username=self.config_entry.data[CONF_USERNAME], + password=self.config_entry.data[CONF_PASSWORD], + session=session, + ) + try: + await self.bapi.async_login() + except ServerDisconnectedError as exc: + raise ConfigEntryNotReady("Brunt not ready to connect.") from exc + except ClientResponseError as exc: + raise ConfigEntryAuthFailed( + f"Brunt could not connect with username: {self.config_entry.data[CONF_USERNAME]}." + ) from exc + + async def _async_update_data(self) -> dict[str | None, Thing]: + """Fetch data from the Brunt endpoint for all Things. + + Error 403 is the API response for any kind of authentication error (failed password or email) + Error 401 is the API response for things that are not part of the account, could happen when a device is deleted from the account. + """ + try: + async with timeout(10): + things = await self.bapi.async_get_things(force=True) + return {thing.serial: thing for thing in things} + except ServerDisconnectedError as err: + raise UpdateFailed(f"Error communicating with API: {err}") from err + except ClientResponseError as err: + if err.status == 403: + raise ConfigEntryAuthFailed from err + if err.status == 401: + _LOGGER.warning("Device not found, will reload Brunt integration") + await self.hass.config_entries.async_reload(self.config_entry.entry_id) + raise UpdateFailed from err diff --git a/homeassistant/components/brunt/cover.py b/homeassistant/components/brunt/cover.py index 519885fe54248a..bb97f42bd369be 100644 --- a/homeassistant/components/brunt/cover.py +++ b/homeassistant/components/brunt/cover.py @@ -5,7 +5,7 @@ from typing import Any from aiohttp.client_exceptions import ClientResponseError -from brunt import BruntClientAsync, Thing +from brunt import Thing from homeassistant.components.cover import ( ATTR_POSITION, @@ -13,49 +13,39 @@ CoverEntity, CoverEntityFeature, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.update_coordinator import ( - CoordinatorEntity, - DataUpdateCoordinator, -) +from homeassistant.helpers.update_coordinator import CoordinatorEntity from .const import ( ATTR_REQUEST_POSITION, ATTRIBUTION, CLOSED_POSITION, - DATA_BAPI, - DATA_COOR, DOMAIN, FAST_INTERVAL, OPEN_POSITION, REGULAR_INTERVAL, ) +from .coordinator import BruntConfigEntry, BruntCoordinator async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: BruntConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the brunt platform.""" - bapi: BruntClientAsync = hass.data[DOMAIN][entry.entry_id][DATA_BAPI] - coordinator: DataUpdateCoordinator[dict[str | None, Thing]] = hass.data[DOMAIN][ - entry.entry_id - ][DATA_COOR] + coordinator = entry.runtime_data async_add_entities( - BruntDevice(coordinator, serial, thing, bapi, entry.entry_id) + BruntDevice(coordinator, serial, thing, entry.entry_id) for serial, thing in coordinator.data.items() ) -class BruntDevice( - CoordinatorEntity[DataUpdateCoordinator[dict[str | None, Thing]]], CoverEntity -): +class BruntDevice(CoordinatorEntity[BruntCoordinator], CoverEntity): """Representation of a Brunt cover device. Contains the common logic for all Brunt devices. @@ -73,16 +63,14 @@ class BruntDevice( def __init__( self, - coordinator: DataUpdateCoordinator[dict[str | None, Thing]], + coordinator: BruntCoordinator, serial: str | None, thing: Thing, - bapi: BruntClientAsync, entry_id: str, ) -> None: """Init the Brunt device.""" super().__init__(coordinator) self._attr_unique_id = serial - self._bapi = bapi self._thing = thing self._entry_id = entry_id @@ -167,7 +155,7 @@ async def async_set_cover_position(self, **kwargs: Any) -> None: async def _async_update_cover(self, position: int) -> None: """Set the cover to the new position and wait for the update to be reflected.""" try: - await self._bapi.async_change_request_position( + await self.coordinator.bapi.async_change_request_position( position, thing_uri=self._thing.thing_uri ) except ClientResponseError as exc: @@ -182,7 +170,7 @@ def _brunt_update_listener(self) -> None: """Update the update interval after each refresh.""" if ( self.request_cover_position - == self._bapi.last_requested_positions[self._thing.thing_uri] + == self.coordinator.bapi.last_requested_positions[self._thing.thing_uri] and self.move_state == 0 ): self.coordinator.update_interval = REGULAR_INTERVAL diff --git a/homeassistant/components/bryant_evolution/config_flow.py b/homeassistant/components/bryant_evolution/config_flow.py index 9e115bd69eee84..2e5a094948d887 100644 --- a/homeassistant/components/bryant_evolution/config_flow.py +++ b/homeassistant/components/bryant_evolution/config_flow.py @@ -61,12 +61,6 @@ async def async_step_user( async def async_step_reconfigure( self, user_input: dict[str, Any] | None = None - ) -> ConfigFlowResult: - """Handle integration reconfiguration.""" - return await self.async_step_reconfigure_confirm() - - async def async_step_reconfigure_confirm( - self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle integration reconfiguration.""" errors: dict[str, str] = {} @@ -82,7 +76,7 @@ async def async_step_reconfigure_confirm( ) errors["base"] = "cannot_connect" return self.async_show_form( - step_id="reconfigure_confirm", + step_id="reconfigure", data_schema=STEP_USER_DATA_SCHEMA, errors=errors, ) diff --git a/homeassistant/components/bsblan/__init__.py b/homeassistant/components/bsblan/__init__.py index 79447c6cff5e21..4d3c6ee2073804 100644 --- a/homeassistant/components/bsblan/__init__.py +++ b/homeassistant/components/bsblan/__init__.py @@ -15,11 +15,13 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.aiohttp_client import async_get_clientsession -from .const import CONF_PASSKEY, DOMAIN +from .const import CONF_PASSKEY from .coordinator import BSBLanUpdateCoordinator PLATFORMS = [Platform.CLIMATE, Platform.SENSOR] +type BSBLanConfigEntry = ConfigEntry[BSBLanData] + @dataclasses.dataclass class BSBLanData: @@ -32,7 +34,7 @@ class BSBLanData: static: StaticState -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_setup_entry(hass: HomeAssistant, entry: BSBLanConfigEntry) -> bool: """Set up BSB-Lan from a config entry.""" # create config using BSBLANConfig @@ -57,7 +59,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: info = await bsblan.info() static = await bsblan.static_values() - hass.data.setdefault(DOMAIN, {})[entry.entry_id] = BSBLanData( + entry.runtime_data = BSBLanData( client=bsblan, coordinator=coordinator, device=device, @@ -70,11 +72,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: BSBLanConfigEntry) -> bool: """Unload BSBLAN config entry.""" - if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): - # Cleanup - del hass.data[DOMAIN][entry.entry_id] - if not hass.data[DOMAIN]: - del hass.data[DOMAIN] - return unload_ok + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/bsblan/climate.py b/homeassistant/components/bsblan/climate.py index 3a204a9e0c244f..fcbe88f2face5b 100644 --- a/homeassistant/components/bsblan/climate.py +++ b/homeassistant/components/bsblan/climate.py @@ -15,7 +15,6 @@ ClimateEntityFeature, HVACMode, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ATTR_TEMPERATURE, UnitOfTemperature from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError, ServiceValidationError @@ -23,7 +22,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.util.enum import try_parse_enum -from . import BSBLanData +from . import BSBLanConfigEntry, BSBLanData from .const import ATTR_TARGET_TEMPERATURE, DOMAIN from .entity import BSBLanEntity @@ -43,18 +42,12 @@ async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: BSBLanConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up BSBLAN device based on a config entry.""" - data: BSBLanData = hass.data[DOMAIN][entry.entry_id] - async_add_entities( - [ - BSBLANClimate( - data, - ) - ] - ) + data = entry.runtime_data + async_add_entities([BSBLANClimate(data)]) class BSBLANClimate(BSBLanEntity, ClimateEntity): diff --git a/homeassistant/components/bsblan/coordinator.py b/homeassistant/components/bsblan/coordinator.py index 508f2c898c305f..1a4299fe72f763 100644 --- a/homeassistant/components/bsblan/coordinator.py +++ b/homeassistant/components/bsblan/coordinator.py @@ -54,6 +54,9 @@ def _get_update_interval(self) -> timedelta: async def _async_update_data(self) -> BSBLanCoordinatorData: """Get state and sensor data from BSB-Lan device.""" try: + # initialize the client, this is cached and will only be called once + await self.client.initialize() + state = await self.client.state() sensor = await self.client.sensor() except BSBLANConnectionError as err: diff --git a/homeassistant/components/bsblan/diagnostics.py b/homeassistant/components/bsblan/diagnostics.py index 88418f306c82be..5a8e5c1c4c5e63 100644 --- a/homeassistant/components/bsblan/diagnostics.py +++ b/homeassistant/components/bsblan/diagnostics.py @@ -4,18 +4,16 @@ from typing import Any -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant -from . import BSBLanData -from .const import DOMAIN +from . import BSBLanConfigEntry async def async_get_config_entry_diagnostics( - hass: HomeAssistant, entry: ConfigEntry + hass: HomeAssistant, entry: BSBLanConfigEntry ) -> dict[str, Any]: """Return diagnostics for a config entry.""" - data: BSBLanData = hass.data[DOMAIN][entry.entry_id] + data = entry.runtime_data return { "info": data.info.to_dict(), diff --git a/homeassistant/components/bsblan/manifest.json b/homeassistant/components/bsblan/manifest.json index 6cd8608c42db24..aa9c03abf4ad2d 100644 --- a/homeassistant/components/bsblan/manifest.json +++ b/homeassistant/components/bsblan/manifest.json @@ -7,5 +7,5 @@ "integration_type": "device", "iot_class": "local_polling", "loggers": ["bsblan"], - "requirements": ["python-bsblan==0.6.2"] + "requirements": ["python-bsblan==1.2.1"] } diff --git a/homeassistant/components/bsblan/sensor.py b/homeassistant/components/bsblan/sensor.py index 346f972ea9ae58..eab03d7a50cb6d 100644 --- a/homeassistant/components/bsblan/sensor.py +++ b/homeassistant/components/bsblan/sensor.py @@ -11,14 +11,12 @@ SensorEntityDescription, SensorStateClass, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import UnitOfTemperature from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import StateType -from . import BSBLanData -from .const import DOMAIN +from . import BSBLanConfigEntry, BSBLanData from .coordinator import BSBLanCoordinatorData from .entity import BSBLanEntity @@ -52,11 +50,11 @@ class BSBLanSensorEntityDescription(SensorEntityDescription): async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: BSBLanConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up BSB-Lan sensor based on a config entry.""" - data: BSBLanData = hass.data[DOMAIN][entry.entry_id] + data = entry.runtime_data async_add_entities(BSBLanSensor(data, description) for description in SENSOR_TYPES) diff --git a/homeassistant/components/bthome/sensor.py b/homeassistant/components/bthome/sensor.py index 64e6d61cefb8a9..417df9f5068eae 100644 --- a/homeassistant/components/bthome/sensor.py +++ b/homeassistant/components/bthome/sensor.py @@ -364,7 +364,7 @@ ): SensorEntityDescription( key=f"{BTHomeSensorDeviceClass.CONDUCTIVITY}_{Units.CONDUCTIVITY}", device_class=SensorDeviceClass.CONDUCTIVITY, - native_unit_of_measurement=UnitOfConductivity.MICROSIEMENS, + native_unit_of_measurement=UnitOfConductivity.MICROSIEMENS_PER_CM, state_class=SensorStateClass.MEASUREMENT, ), } diff --git a/homeassistant/components/buienradar/__init__.py b/homeassistant/components/buienradar/__init__.py index 3bf593b2dabf98..bea0102be40f3c 100644 --- a/homeassistant/components/buienradar/__init__.py +++ b/homeassistant/components/buienradar/__init__.py @@ -6,25 +6,26 @@ from homeassistant.const import Platform from homeassistant.core import HomeAssistant -from .const import DOMAIN +from .util import BrData PLATFORMS = [Platform.CAMERA, Platform.SENSOR, Platform.WEATHER] +type BuienRadarConfigEntry = ConfigEntry[dict[Platform, BrData]] -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + +async def async_setup_entry(hass: HomeAssistant, entry: BuienRadarConfigEntry) -> bool: """Set up buienradar from a config entry.""" - hass.data.setdefault(DOMAIN, {}).setdefault(entry.entry_id, {}) + entry.runtime_data = {} await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) entry.async_on_unload(entry.add_update_listener(async_update_options)) return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: BuienRadarConfigEntry) -> bool: """Unload a config entry.""" if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): - entry_data = hass.data[DOMAIN].pop(entry.entry_id) for platform in PLATFORMS: - if (data := entry_data.get(platform)) and ( + if (data := entry.runtime_data.get(platform)) and ( unsub := data.unsub_schedule_update ): unsub() @@ -32,6 +33,8 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: return unload_ok -async def async_update_options(hass: HomeAssistant, config_entry: ConfigEntry) -> None: +async def async_update_options( + hass: HomeAssistant, config_entry: BuienRadarConfigEntry +) -> None: """Update options.""" await hass.config_entries.async_reload(config_entry.entry_id) diff --git a/homeassistant/components/buienradar/camera.py b/homeassistant/components/buienradar/camera.py index e9a7d2517cb66d..45ff2d6de52cbe 100644 --- a/homeassistant/components/buienradar/camera.py +++ b/homeassistant/components/buienradar/camera.py @@ -10,13 +10,13 @@ import voluptuous as vol from homeassistant.components.camera import Camera -from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_COUNTRY_CODE, CONF_LATITUDE, CONF_LONGITUDE from homeassistant.core import HomeAssistant from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.util import dt as dt_util +from . import BuienRadarConfigEntry from .const import CONF_DELTA, DEFAULT_COUNTRY, DEFAULT_DELTA, DEFAULT_DIMENSION _LOGGER = logging.getLogger(__name__) @@ -29,7 +29,9 @@ async def async_setup_entry( - hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback + hass: HomeAssistant, + entry: BuienRadarConfigEntry, + async_add_entities: AddEntitiesCallback, ) -> None: """Set up buienradar radar-loop camera component.""" config = entry.data diff --git a/homeassistant/components/buienradar/sensor.py b/homeassistant/components/buienradar/sensor.py index c61d8e10b852a2..afce293402e05e 100644 --- a/homeassistant/components/buienradar/sensor.py +++ b/homeassistant/components/buienradar/sensor.py @@ -28,7 +28,6 @@ SensorEntityDescription, SensorStateClass, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( ATTR_ATTRIBUTION, CONF_LATITUDE, @@ -49,10 +48,10 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.util import dt as dt_util +from . import BuienRadarConfigEntry from .const import ( CONF_TIMEFRAME, DEFAULT_TIMEFRAME, - DOMAIN, STATE_CONDITION_CODES, STATE_CONDITIONS, STATE_DETAILED_CONDITIONS, @@ -690,7 +689,9 @@ async def async_setup_entry( - hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback + hass: HomeAssistant, + entry: BuienRadarConfigEntry, + async_add_entities: AddEntitiesCallback, ) -> None: """Create the buienradar sensor.""" config = entry.data @@ -723,7 +724,7 @@ async def async_setup_entry( # create weather data: data = BrData(hass, coordinates, timeframe, entities) - hass.data[DOMAIN][entry.entry_id][Platform.SENSOR] = data + entry.runtime_data[Platform.SENSOR] = data await data.async_update() async_add_entities(entities) diff --git a/homeassistant/components/buienradar/weather.py b/homeassistant/components/buienradar/weather.py index 2af66982fab640..8b71032bace603 100644 --- a/homeassistant/components/buienradar/weather.py +++ b/homeassistant/components/buienradar/weather.py @@ -39,7 +39,6 @@ WeatherEntity, WeatherEntityFeature, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( CONF_LATITUDE, CONF_LONGITUDE, @@ -54,8 +53,8 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback -# Reuse data and API logic from the sensor implementation -from .const import DEFAULT_TIMEFRAME, DOMAIN +from . import BuienRadarConfigEntry +from .const import DEFAULT_TIMEFRAME from .util import BrData _LOGGER = logging.getLogger(__name__) @@ -93,7 +92,9 @@ async def async_setup_entry( - hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback + hass: HomeAssistant, + entry: BuienRadarConfigEntry, + async_add_entities: AddEntitiesCallback, ) -> None: """Set up the buienradar platform.""" config = entry.data @@ -113,7 +114,7 @@ async def async_setup_entry( # create weather data: data = BrData(hass, coordinates, DEFAULT_TIMEFRAME, entities) - hass.data[DOMAIN][entry.entry_id][Platform.WEATHER] = data + entry.runtime_data[Platform.WEATHER] = data await data.async_update() async_add_entities(entities) diff --git a/homeassistant/components/caldav/__init__.py b/homeassistant/components/caldav/__init__.py index beb03cec554743..1d50e6d309a3dc 100644 --- a/homeassistant/components/caldav/__init__.py +++ b/homeassistant/components/caldav/__init__.py @@ -17,7 +17,7 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady -from .const import DOMAIN +type CalDavConfigEntry = ConfigEntry[caldav.DAVClient] _LOGGER = logging.getLogger(__name__) @@ -25,10 +25,8 @@ PLATFORMS: list[Platform] = [Platform.CALENDAR, Platform.TODO] -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_setup_entry(hass: HomeAssistant, entry: CalDavConfigEntry) -> bool: """Set up CalDAV from a config entry.""" - hass.data.setdefault(DOMAIN, {}) - client = caldav.DAVClient( entry.data[CONF_URL], username=entry.data[CONF_USERNAME], @@ -50,7 +48,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: except DAVError as err: raise ConfigEntryNotReady("CalDAV client error") from err - hass.data[DOMAIN][entry.entry_id] = client + entry.runtime_data = client await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) diff --git a/homeassistant/components/caldav/calendar.py b/homeassistant/components/caldav/calendar.py index 7591722b1abe5c..fb53947a7237c1 100644 --- a/homeassistant/components/caldav/calendar.py +++ b/homeassistant/components/caldav/calendar.py @@ -15,7 +15,6 @@ CalendarEvent, is_offset_reached, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( CONF_NAME, CONF_PASSWORD, @@ -30,8 +29,8 @@ from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from homeassistant.helpers.update_coordinator import CoordinatorEntity +from . import CalDavConfigEntry from .api import async_get_calendars -from .const import DOMAIN from .coordinator import CalDavUpdateCoordinator _LOGGER = logging.getLogger(__name__) @@ -110,6 +109,7 @@ async def async_setup_platform( entity_id = async_generate_entity_id(ENTITY_ID_FORMAT, device_id, hass=hass) coordinator = CalDavUpdateCoordinator( hass, + None, calendar=calendar, days=days, include_all_day=True, @@ -127,6 +127,7 @@ async def async_setup_platform( entity_id = async_generate_entity_id(ENTITY_ID_FORMAT, device_id, hass=hass) coordinator = CalDavUpdateCoordinator( hass, + None, calendar=calendar, days=days, include_all_day=False, @@ -141,12 +142,11 @@ async def async_setup_platform( async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: CalDavConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the CalDav calendar platform for a config entry.""" - client: caldav.DAVClient = hass.data[DOMAIN][entry.entry_id] - calendars = await async_get_calendars(hass, client, SUPPORTED_COMPONENT) + calendars = await async_get_calendars(hass, entry.runtime_data, SUPPORTED_COMPONENT) async_add_entities( ( WebDavCalendarEntity( @@ -154,6 +154,7 @@ async def async_setup_entry( async_generate_entity_id(ENTITY_ID_FORMAT, calendar.name, hass=hass), CalDavUpdateCoordinator( hass, + entry, calendar=calendar, days=CONFIG_ENTRY_DEFAULT_DAYS, include_all_day=True, @@ -206,7 +207,8 @@ def _handle_coordinator_update(self) -> None: if self._supports_offset: self._attr_extra_state_attributes = { "offset_reached": is_offset_reached( - self._event.start_datetime_local, self.coordinator.offset + self._event.start_datetime_local, + self.coordinator.offset, # type: ignore[arg-type] ) if self._event else False diff --git a/homeassistant/components/caldav/coordinator.py b/homeassistant/components/caldav/coordinator.py index 3a10b567167100..eb09e3f5452226 100644 --- a/homeassistant/components/caldav/coordinator.py +++ b/homeassistant/components/caldav/coordinator.py @@ -6,6 +6,9 @@ from functools import partial import logging import re +from typing import TYPE_CHECKING + +import caldav from homeassistant.components.calendar import CalendarEvent, extract_offset from homeassistant.core import HomeAssistant @@ -14,6 +17,9 @@ from .api import get_attr_value +if TYPE_CHECKING: + from . import CalDavConfigEntry + _LOGGER = logging.getLogger(__name__) MIN_TIME_BETWEEN_UPDATES = timedelta(minutes=15) @@ -23,11 +29,20 @@ class CalDavUpdateCoordinator(DataUpdateCoordinator[CalendarEvent | None]): """Class to utilize the calendar dav client object to get next event.""" - def __init__(self, hass, calendar, days, include_all_day, search): + def __init__( + self, + hass: HomeAssistant, + entry: CalDavConfigEntry | None, + calendar: caldav.Calendar, + days: int, + include_all_day: bool, + search: str | None, + ) -> None: """Set up how we are going to search the WebDav calendar.""" super().__init__( hass, _LOGGER, + config_entry=entry, name=f"CalDAV {calendar.name}", update_interval=MIN_TIME_BETWEEN_UPDATES, ) @@ -35,7 +50,7 @@ def __init__(self, hass, calendar, days, include_all_day, search): self.days = days self.include_all_day = include_all_day self.search = search - self.offset = None + self.offset: timedelta | None = None async def async_get_events( self, hass: HomeAssistant, start_date: datetime, end_date: datetime @@ -109,7 +124,7 @@ async def _async_update_data(self) -> CalendarEvent | None: _start_of_tomorrow = start_of_tomorrow if _start_of_today <= start_dt < _start_of_tomorrow: new_event = event.copy() - new_vevent = new_event.instance.vevent + new_vevent = new_event.instance.vevent # type: ignore[attr-defined] if hasattr(new_vevent, "dtend"): dur = new_vevent.dtend.value - new_vevent.dtstart.value new_vevent.dtend.value = start_dt + dur diff --git a/homeassistant/components/caldav/todo.py b/homeassistant/components/caldav/todo.py index e8cd4fc9334b87..cbd7963b595a21 100644 --- a/homeassistant/components/caldav/todo.py +++ b/homeassistant/components/caldav/todo.py @@ -18,14 +18,13 @@ TodoListEntity, TodoListEntityFeature, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.util import dt as dt_util +from . import CalDavConfigEntry from .api import async_get_calendars, get_attr_value -from .const import DOMAIN _LOGGER = logging.getLogger(__name__) @@ -46,12 +45,11 @@ async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: CalDavConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the CalDav todo platform for a config entry.""" - client: caldav.DAVClient = hass.data[DOMAIN][entry.entry_id] - calendars = await async_get_calendars(hass, client, SUPPORTED_COMPONENT) + calendars = await async_get_calendars(hass, entry.runtime_data, SUPPORTED_COMPONENT) async_add_entities( ( WebDavTodoListEntity( diff --git a/homeassistant/components/calendar/icons.json b/homeassistant/components/calendar/icons.json index 9b8df3ec6d3f0a..a28adcf317e321 100644 --- a/homeassistant/components/calendar/icons.json +++ b/homeassistant/components/calendar/icons.json @@ -14,9 +14,6 @@ }, "get_events": { "service": "mdi:calendar-month" - }, - "list_events": { - "service": "mdi:calendar-month" } } } diff --git a/homeassistant/components/calendar/services.yaml b/homeassistant/components/calendar/services.yaml index 2e926fbdeed3a8..9701293c0bea2d 100644 --- a/homeassistant/components/calendar/services.yaml +++ b/homeassistant/components/calendar/services.yaml @@ -36,22 +36,6 @@ create_event: example: "Conference Room - F123, Bldg. 002" selector: text: -list_events: - target: - entity: - domain: calendar - fields: - start_date_time: - example: "2022-03-22 20:00:00" - selector: - datetime: - end_date_time: - example: "2022-03-22 22:00:00" - selector: - datetime: - duration: - selector: - duration: get_events: target: entity: diff --git a/homeassistant/components/calendar/strings.json b/homeassistant/components/calendar/strings.json index 5b76a33f7c3745..76e6c42b6663d8 100644 --- a/homeassistant/components/calendar/strings.json +++ b/homeassistant/components/calendar/strings.json @@ -89,24 +89,6 @@ "description": "Returns active events from start_date_time until the specified duration." } } - }, - "list_events": { - "name": "List event", - "description": "Lists events on a calendar within a time range.", - "fields": { - "start_date_time": { - "name": "[%key:component::calendar::services::get_events::fields::start_date_time::name%]", - "description": "[%key:component::calendar::services::get_events::fields::start_date_time::description%]" - }, - "end_date_time": { - "name": "[%key:component::calendar::services::get_events::fields::end_date_time::name%]", - "description": "[%key:component::calendar::services::get_events::fields::end_date_time::description%]" - }, - "duration": { - "name": "[%key:component::calendar::services::get_events::fields::duration::name%]", - "description": "[%key:component::calendar::services::get_events::fields::duration::description%]" - } - } } }, "issues": { diff --git a/homeassistant/components/cambridge_audio/__init__.py b/homeassistant/components/cambridge_audio/__init__.py index 5060d12cfe1816..a584f0db6c19f4 100644 --- a/homeassistant/components/cambridge_audio/__init__.py +++ b/homeassistant/components/cambridge_audio/__init__.py @@ -13,9 +13,9 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady -from .const import CONNECT_TIMEOUT, STREAM_MAGIC_EXCEPTIONS +from .const import CONNECT_TIMEOUT, DOMAIN, STREAM_MAGIC_EXCEPTIONS -PLATFORMS: list[Platform] = [Platform.MEDIA_PLAYER] +PLATFORMS: list[Platform] = [Platform.MEDIA_PLAYER, Platform.SELECT, Platform.SWITCH] _LOGGER = logging.getLogger(__name__) @@ -45,7 +45,13 @@ async def _connection_update_callback( async with asyncio.timeout(CONNECT_TIMEOUT): await client.connect() except STREAM_MAGIC_EXCEPTIONS as err: - raise ConfigEntryNotReady(f"Error while connecting to {client.host}") from err + raise ConfigEntryNotReady( + translation_domain=DOMAIN, + translation_key="entry_cannot_connect", + translation_placeholders={ + "host": client.host, + }, + ) from err entry.runtime_data = client await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) diff --git a/homeassistant/components/cambridge_audio/const.py b/homeassistant/components/cambridge_audio/const.py index 5a4e5a1f2e053c..eae417ffe39cc7 100644 --- a/homeassistant/components/cambridge_audio/const.py +++ b/homeassistant/components/cambridge_audio/const.py @@ -17,3 +17,7 @@ ) CONNECT_TIMEOUT = 5 + +CAMBRIDGE_MEDIA_TYPE_PRESET = "preset" +CAMBRIDGE_MEDIA_TYPE_AIRABLE = "airable" +CAMBRIDGE_MEDIA_TYPE_INTERNET_RADIO = "internet_radio" diff --git a/homeassistant/components/cambridge_audio/diagnostics.py b/homeassistant/components/cambridge_audio/diagnostics.py index b4295e7c885a1c..a670b1f32eb6c6 100644 --- a/homeassistant/components/cambridge_audio/diagnostics.py +++ b/homeassistant/components/cambridge_audio/diagnostics.py @@ -2,20 +2,22 @@ from typing import Any -from homeassistant.const import CONF_HOST from homeassistant.core import HomeAssistant -from homeassistant.helpers.redact import async_redact_data from . import CambridgeAudioConfigEntry -TO_REDACT = {CONF_HOST} - async def async_get_config_entry_diagnostics( hass: HomeAssistant, entry: CambridgeAudioConfigEntry ) -> dict[str, Any]: """Return diagnostics for the provided config entry.""" client = entry.runtime_data - return async_redact_data( - {"info": client.info, "sources": client.sources}, TO_REDACT - ) + return { + "display": client.display.to_dict(), + "info": client.info.to_dict(), + "now_playing": client.now_playing.to_dict(), + "play_state": client.play_state.to_dict(), + "presets_list": client.preset_list.to_dict(), + "sources": [s.to_dict() for s in client.sources], + "update": client.update.to_dict(), + } diff --git a/homeassistant/components/cambridge_audio/entity.py b/homeassistant/components/cambridge_audio/entity.py index ac43a6737250a5..de7a3e31765ddc 100644 --- a/homeassistant/components/cambridge_audio/entity.py +++ b/homeassistant/components/cambridge_audio/entity.py @@ -26,7 +26,12 @@ async def decorator(self: _EntityT, *args: _P.args, **kwargs: _P.kwargs) -> None await func(self, *args, **kwargs) except STREAM_MAGIC_EXCEPTIONS as exc: raise HomeAssistantError( - f"Error executing {func.__name__} on entity {self.entity_id}," + translation_domain=DOMAIN, + translation_key="command_error", + translation_placeholders={ + "function_name": func.__name__, + "entity_id": self.entity_id, + }, ) from exc return decorator @@ -62,4 +67,4 @@ async def async_added_to_hass(self) -> None: async def async_will_remove_from_hass(self) -> None: """Remove callbacks.""" - await self.client.unregister_state_update_callbacks(self._state_update_callback) + self.client.unregister_state_update_callbacks(self._state_update_callback) diff --git a/homeassistant/components/cambridge_audio/icons.json b/homeassistant/components/cambridge_audio/icons.json new file mode 100644 index 00000000000000..b4346a7fe8e517 --- /dev/null +++ b/homeassistant/components/cambridge_audio/icons.json @@ -0,0 +1,28 @@ +{ + "entity": { + "select": { + "display_brightness": { + "default": "mdi:brightness-7", + "state": { + "bright": "mdi:brightness-7", + "dim": "mdi:brightness-6", + "off": "mdi:brightness-3" + } + }, + "audio_output": { + "default": "mdi:audio-input-stereo-minijack" + } + }, + "switch": { + "pre_amp": { + "default": "mdi:volume-high", + "state": { + "off": "mdi:volume-low" + } + }, + "early_update": { + "default": "mdi:update" + } + } + } +} diff --git a/homeassistant/components/cambridge_audio/manifest.json b/homeassistant/components/cambridge_audio/manifest.json index 232e3d8e2aa020..edacd17f54dd20 100644 --- a/homeassistant/components/cambridge_audio/manifest.json +++ b/homeassistant/components/cambridge_audio/manifest.json @@ -7,6 +7,6 @@ "integration_type": "device", "iot_class": "local_push", "loggers": ["aiostreammagic"], - "requirements": ["aiostreammagic==2.5.0"], + "requirements": ["aiostreammagic==2.8.4"], "zeroconf": ["_stream-magic._tcp.local.", "_smoip._tcp.local."] } diff --git a/homeassistant/components/cambridge_audio/media_player.py b/homeassistant/components/cambridge_audio/media_player.py index 1c490cd6ac9e06..5e340cdd21ecca 100644 --- a/homeassistant/components/cambridge_audio/media_player.py +++ b/homeassistant/components/cambridge_audio/media_player.py @@ -3,6 +3,7 @@ from __future__ import annotations from datetime import datetime +from typing import Any from aiostreammagic import ( RepeatMode as CambridgeRepeatMode, @@ -21,14 +22,22 @@ ) from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from homeassistant.helpers.entity_platform import AddEntitiesCallback +from .const import ( + CAMBRIDGE_MEDIA_TYPE_AIRABLE, + CAMBRIDGE_MEDIA_TYPE_INTERNET_RADIO, + CAMBRIDGE_MEDIA_TYPE_PRESET, + DOMAIN, +) from .entity import CambridgeAudioEntity, command BASE_FEATURES = ( MediaPlayerEntityFeature.SELECT_SOURCE | MediaPlayerEntityFeature.TURN_OFF | MediaPlayerEntityFeature.TURN_ON + | MediaPlayerEntityFeature.PLAY_MEDIA ) PREAMP_FEATURES = ( @@ -168,12 +177,9 @@ def volume_level(self) -> float | None: return volume / 100 @property - def shuffle(self) -> bool | None: + def shuffle(self) -> bool: """Current shuffle configuration.""" - mode_shuffle = self.client.play_state.mode_shuffle - if not mode_shuffle: - return False - return mode_shuffle != ShuffleMode.OFF + return self.client.play_state.mode_shuffle != ShuffleMode.OFF @property def repeat(self) -> RepeatMode | None: @@ -285,3 +291,48 @@ async def async_set_repeat(self, repeat: RepeatMode) -> None: if repeat in {RepeatMode.ALL, RepeatMode.ONE}: repeat_mode = CambridgeRepeatMode.ALL await self.client.set_repeat(repeat_mode) + + @command + async def async_play_media( + self, media_type: MediaType | str, media_id: str, **kwargs: Any + ) -> None: + """Play media on the Cambridge Audio device.""" + + if media_type not in { + CAMBRIDGE_MEDIA_TYPE_PRESET, + CAMBRIDGE_MEDIA_TYPE_AIRABLE, + CAMBRIDGE_MEDIA_TYPE_INTERNET_RADIO, + }: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="unsupported_media_type", + translation_placeholders={"media_type": media_type}, + ) + + if media_type == CAMBRIDGE_MEDIA_TYPE_PRESET: + try: + preset_id = int(media_id) + except ValueError as ve: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="preset_non_integer", + translation_placeholders={"preset_id": media_id}, + ) from ve + preset = None + for _preset in self.client.preset_list.presets: + if _preset.preset_id == preset_id: + preset = _preset + if not preset: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="missing_preset", + translation_placeholders={"preset_id": media_id}, + ) + await self.client.recall_preset(preset.preset_id) + + if media_type == CAMBRIDGE_MEDIA_TYPE_AIRABLE: + preset_id = int(media_id) + await self.client.play_radio_airable("Radio", preset_id) + + if media_type == CAMBRIDGE_MEDIA_TYPE_INTERNET_RADIO: + await self.client.play_radio_url("Radio", media_id) diff --git a/homeassistant/components/cambridge_audio/select.py b/homeassistant/components/cambridge_audio/select.py new file mode 100644 index 00000000000000..ca6eebdec6b8cc --- /dev/null +++ b/homeassistant/components/cambridge_audio/select.py @@ -0,0 +1,116 @@ +"""Support for Cambridge Audio select entities.""" + +from collections.abc import Awaitable, Callable +from dataclasses import dataclass, field + +from aiostreammagic import StreamMagicClient +from aiostreammagic.models import DisplayBrightness + +from homeassistant.components.select import SelectEntity, SelectEntityDescription +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import EntityCategory +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from .entity import CambridgeAudioEntity + + +@dataclass(frozen=True, kw_only=True) +class CambridgeAudioSelectEntityDescription(SelectEntityDescription): + """Describes Cambridge Audio select entity.""" + + options_fn: Callable[[StreamMagicClient], list[str]] = field(default=lambda _: []) + load_fn: Callable[[StreamMagicClient], bool] = field(default=lambda _: True) + value_fn: Callable[[StreamMagicClient], str | None] + set_value_fn: Callable[[StreamMagicClient, str], Awaitable[None]] + + +async def _audio_output_set_value_fn(client: StreamMagicClient, value: str) -> None: + """Set the audio output using the display name.""" + audio_output_id = next( + (output.id for output in client.audio_output.outputs if value == output.name), + None, + ) + assert audio_output_id is not None + await client.set_audio_output(audio_output_id) + + +def _audio_output_value_fn(client: StreamMagicClient) -> str | None: + """Convert the current audio output id to name.""" + return next( + ( + output.name + for output in client.audio_output.outputs + if client.state.audio_output == output.id + ), + None, + ) + + +CONTROL_ENTITIES: tuple[CambridgeAudioSelectEntityDescription, ...] = ( + CambridgeAudioSelectEntityDescription( + key="display_brightness", + translation_key="display_brightness", + options=[x.value for x in DisplayBrightness], + entity_category=EntityCategory.CONFIG, + value_fn=lambda client: client.display.brightness, + set_value_fn=lambda client, value: client.set_display_brightness( + DisplayBrightness(value) + ), + ), + CambridgeAudioSelectEntityDescription( + key="audio_output", + translation_key="audio_output", + entity_category=EntityCategory.CONFIG, + options_fn=lambda client: [ + output.name for output in client.audio_output.outputs + ], + load_fn=lambda client: len(client.audio_output.outputs) > 0, + value_fn=_audio_output_value_fn, + set_value_fn=_audio_output_set_value_fn, + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up Cambridge Audio select entities based on a config entry.""" + + client: StreamMagicClient = entry.runtime_data + entities: list[CambridgeAudioSelect] = [ + CambridgeAudioSelect(client, description) + for description in CONTROL_ENTITIES + if description.load_fn(client) + ] + async_add_entities(entities) + + +class CambridgeAudioSelect(CambridgeAudioEntity, SelectEntity): + """Defines a Cambridge Audio select entity.""" + + entity_description: CambridgeAudioSelectEntityDescription + + def __init__( + self, + client: StreamMagicClient, + description: CambridgeAudioSelectEntityDescription, + ) -> None: + """Initialize Cambridge Audio select.""" + super().__init__(client) + self.entity_description = description + self._attr_unique_id = f"{client.info.unit_id}-{description.key}" + options_fn = description.options_fn(client) + if options_fn: + self._attr_options = options_fn + + @property + def current_option(self) -> str | None: + """Return the state of the select.""" + return self.entity_description.value_fn(self.client) + + async def async_select_option(self, option: str) -> None: + """Change the selected option.""" + await self.entity_description.set_value_fn(self.client, option) diff --git a/homeassistant/components/cambridge_audio/strings.json b/homeassistant/components/cambridge_audio/strings.json index fa27dc452de552..c368ba060a7848 100644 --- a/homeassistant/components/cambridge_audio/strings.json +++ b/homeassistant/components/cambridge_audio/strings.json @@ -22,5 +22,45 @@ "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" } + }, + "entity": { + "select": { + "display_brightness": { + "name": "Display brightness", + "state": { + "bright": "Bright", + "dim": "Dim", + "off": "[%key:common::state::off%]" + } + }, + "audio_output": { + "name": "Audio output" + } + }, + "switch": { + "pre_amp": { + "name": "Pre-Amp" + }, + "early_update": { + "name": "Early update" + } + } + }, + "exceptions": { + "unsupported_media_type": { + "message": "Unsupported media type for Cambridge Audio device: {media_type}" + }, + "missing_preset": { + "message": "Missing preset for media_id: {preset_id}" + }, + "preset_non_integer": { + "message": "Preset must be an integer, got: {preset_id}" + }, + "entry_cannot_connect": { + "message": "Error while connecting to {host}" + }, + "command_error": { + "message": "Error executing {function_name} on entity {entity_id}" + } } } diff --git a/homeassistant/components/cambridge_audio/switch.py b/homeassistant/components/cambridge_audio/switch.py new file mode 100644 index 00000000000000..3209b275d4689f --- /dev/null +++ b/homeassistant/components/cambridge_audio/switch.py @@ -0,0 +1,82 @@ +"""Support for Cambridge Audio switch entities.""" + +from collections.abc import Awaitable, Callable +from dataclasses import dataclass +from typing import Any + +from aiostreammagic import StreamMagicClient + +from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import EntityCategory +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from .entity import CambridgeAudioEntity + + +@dataclass(frozen=True, kw_only=True) +class CambridgeAudioSwitchEntityDescription(SwitchEntityDescription): + """Describes Cambridge Audio switch entity.""" + + value_fn: Callable[[StreamMagicClient], bool] + set_value_fn: Callable[[StreamMagicClient, bool], Awaitable[None]] + + +CONTROL_ENTITIES: tuple[CambridgeAudioSwitchEntityDescription, ...] = ( + CambridgeAudioSwitchEntityDescription( + key="pre_amp", + translation_key="pre_amp", + entity_category=EntityCategory.CONFIG, + value_fn=lambda client: client.state.pre_amp_mode, + set_value_fn=lambda client, value: client.set_pre_amp_mode(value), + ), + CambridgeAudioSwitchEntityDescription( + key="early_update", + translation_key="early_update", + entity_category=EntityCategory.CONFIG, + value_fn=lambda client: client.update.early_update, + set_value_fn=lambda client, value: client.set_early_update(value), + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up Cambridge Audio switch entities based on a config entry.""" + async_add_entities( + CambridgeAudioSwitch(entry.runtime_data, description) + for description in CONTROL_ENTITIES + ) + + +class CambridgeAudioSwitch(CambridgeAudioEntity, SwitchEntity): + """Defines a Cambridge Audio switch entity.""" + + entity_description: CambridgeAudioSwitchEntityDescription + + def __init__( + self, + client: StreamMagicClient, + description: CambridgeAudioSwitchEntityDescription, + ) -> None: + """Initialize Cambridge Audio switch.""" + super().__init__(client) + self.entity_description = description + self._attr_unique_id = f"{client.info.unit_id}-{description.key}" + + @property + def is_on(self) -> bool: + """Return the state of the switch.""" + return self.entity_description.value_fn(self.client) + + async def async_turn_on(self, **kwargs: Any) -> None: + """Turn the switch on.""" + await self.entity_description.set_value_fn(self.client, True) + + async def async_turn_off(self, **kwargs: Any) -> None: + """Turn the switch off.""" + await self.entity_description.set_value_fn(self.client, False) diff --git a/homeassistant/components/camera/__init__.py b/homeassistant/components/camera/__init__.py index e0d3ce1e4c2220..d31d21d424c849 100644 --- a/homeassistant/components/camera/__init__.py +++ b/homeassistant/components/camera/__init__.py @@ -4,9 +4,9 @@ import asyncio import collections -from collections.abc import Awaitable, Callable +from collections.abc import Awaitable, Callable, Coroutine from contextlib import suppress -from dataclasses import asdict +from dataclasses import asdict, dataclass from datetime import datetime, timedelta from enum import IntFlag from functools import partial @@ -18,8 +18,9 @@ from aiohttp import hdrs, web import attr -from propcache import cached_property +from propcache import cached_property, under_cached_property import voluptuous as vol +from webrtc_models import RTCIceCandidate, RTCIceServer from homeassistant.components import websocket_api from homeassistant.components.http import KEY_AUTHENTICATED, HomeAssistantView @@ -49,7 +50,7 @@ ) from homeassistant.core import Event, HomeAssistant, ServiceCall, callback from homeassistant.exceptions import HomeAssistantError -from homeassistant.helpers import config_validation as cv +from homeassistant.helpers import config_validation as cv, issue_registry as ir from homeassistant.helpers.deprecation import ( DeprecatedConstantEnum, all_with_deprecated_constants, @@ -85,13 +86,20 @@ from .prefs import CameraPreferences, DynamicStreamSettings # noqa: F401 from .webrtc import ( DATA_ICE_SERVERS, + CameraWebRTCLegacyProvider, CameraWebRTCProvider, - RTCIceServer, + WebRTCAnswer, + WebRTCCandidate, # noqa: F401 WebRTCClientConfiguration, - async_get_supported_providers, + WebRTCError, + WebRTCMessage, # noqa: F401 + WebRTCSendMessage, + async_get_supported_legacy_provider, + async_get_supported_provider, + async_register_ice_servers, async_register_rtsp_to_web_rtc_provider, # noqa: F401 - register_ice_server, - ws_get_client_config, + async_register_webrtc_provider, # noqa: F401 + async_register_ws, ) _LOGGER = logging.getLogger(__name__) @@ -169,6 +177,13 @@ class Image: content: bytes = attr.ib() +@dataclass(frozen=True) +class CameraCapabilities: + """Camera capabilities.""" + + frontend_stream_types: set[StreamType] + + @bind_hass async def async_request_stream(hass: HomeAssistant, entity_id: str, fmt: str) -> str: """Request a stream for a camera entity.""" @@ -342,10 +357,10 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: hass.http.register_view(CameraMjpegStream(component)) websocket_api.async_register_command(hass, ws_camera_stream) - websocket_api.async_register_command(hass, ws_camera_web_rtc_offer) websocket_api.async_register_command(hass, websocket_get_prefs) websocket_api.async_register_command(hass, websocket_update_prefs) - websocket_api.async_register_command(hass, ws_get_client_config) + websocket_api.async_register_command(hass, ws_camera_capabilities) + async_register_ws(hass) await component.async_setup(config) @@ -401,12 +416,20 @@ def unsub_track_time_interval(_event: Event) -> None: SERVICE_RECORD, CAMERA_SERVICE_RECORD, async_handle_record_service ) - async def get_ice_server() -> RTCIceServer: - # The following servers will replaced before the next stable release with - # STUN server provided by Home Assistant. Used Google ones for testing purposes. - return RTCIceServer(urls="stun:stun.l.google.com:19302") + @callback + def get_ice_servers() -> list[RTCIceServer]: + if hass.config.webrtc.ice_servers: + return hass.config.webrtc.ice_servers + return [ + RTCIceServer( + urls=[ + "stun:stun.home-assistant.io:80", + "stun:stun.home-assistant.io:3478", + ] + ), + ] - register_ice_server(hass, get_ice_server) + async_register_ice_servers(hass, get_ice_servers) return True @@ -453,8 +476,11 @@ class Camera(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): _attr_state: None = None # State is determined by is_on _attr_supported_features: CameraEntityFeature = CameraEntityFeature(0) + __supports_stream: CameraEntityFeature | None = None + def __init__(self) -> None: """Initialize a camera.""" + self._cache: dict[str, Any] = {} self.stream: Stream | None = None self.stream_options: dict[str, str | bool | float] = {} self.content_type: str = DEFAULT_CONTENT_TYPE @@ -462,7 +488,15 @@ def __init__(self) -> None: self._warned_old_signature = False self.async_update_token() self._create_stream_lock: asyncio.Lock | None = None - self._webrtc_providers: list[CameraWebRTCProvider] = [] + self._webrtc_provider: CameraWebRTCProvider | None = None + self._legacy_webrtc_provider: CameraWebRTCLegacyProvider | None = None + self._supports_native_sync_webrtc = ( + type(self).async_handle_web_rtc_offer != Camera.async_handle_web_rtc_offer + ) + self._supports_native_async_webrtc = ( + type(self).async_handle_async_webrtc_offer + != Camera.async_handle_async_webrtc_offer + ) @cached_property def entity_picture(self) -> str: @@ -536,7 +570,7 @@ def frontend_stream_type(self) -> StreamType | None: return self._attr_frontend_stream_type if CameraEntityFeature.STREAM not in self.supported_features_compat: return None - if self._webrtc_providers: + if self._webrtc_provider or self._legacy_webrtc_provider: return StreamType.WEB_RTC return StreamType.HLS @@ -586,12 +620,66 @@ async def async_handle_web_rtc_offer(self, offer_sdp: str) -> str | None: Integrations can override with a native WebRTC implementation. """ - for provider in self._webrtc_providers: - if answer := await provider.async_handle_web_rtc_offer(self, offer_sdp): - return answer - raise HomeAssistantError( - "WebRTC offer was not accepted by the supported providers" - ) + + async def async_handle_async_webrtc_offer( + self, offer_sdp: str, session_id: str, send_message: WebRTCSendMessage + ) -> None: + """Handle the async WebRTC offer. + + Async means that it could take some time to process the offer and responses/message + will be sent with the send_message callback. + This method is used by cameras with CameraEntityFeature.STREAM and StreamType.WEB_RTC. + An integration overriding this method must also implement async_on_webrtc_candidate. + + Integrations can override with a native WebRTC implementation. + """ + if self._supports_native_sync_webrtc: + try: + answer = await self.async_handle_web_rtc_offer(offer_sdp) + except ValueError as ex: + _LOGGER.error("Error handling WebRTC offer: %s", ex) + send_message( + WebRTCError( + "webrtc_offer_failed", + str(ex), + ) + ) + except TimeoutError: + # This catch was already here and should stay through the deprecation + _LOGGER.error("Timeout handling WebRTC offer") + send_message( + WebRTCError( + "webrtc_offer_failed", + "Timeout handling WebRTC offer", + ) + ) + else: + if answer: + send_message(WebRTCAnswer(answer)) + else: + _LOGGER.error("Error handling WebRTC offer: No answer") + send_message( + WebRTCError( + "webrtc_offer_failed", + "No answer on WebRTC offer", + ) + ) + return + + if self._webrtc_provider: + await self._webrtc_provider.async_handle_async_webrtc_offer( + self, offer_sdp, session_id, send_message + ) + return + + if self._legacy_webrtc_provider and ( + answer := await self._legacy_webrtc_provider.async_handle_web_rtc_offer( + self, offer_sdp + ) + ): + send_message(WebRTCAnswer(answer)) + else: + raise HomeAssistantError("Camera does not support WebRTC") def camera_image( self, width: int | None = None, height: int | None = None @@ -701,55 +789,133 @@ def async_update_token(self) -> None: async def async_internal_added_to_hass(self) -> None: """Run when entity about to be added to hass.""" await super().async_internal_added_to_hass() - # Avoid calling async_refresh_providers() in here because it - # it will write state a second time since state is always - # written when an entity is added to hass. - self._webrtc_providers = await self._async_get_supported_webrtc_providers() + self.__supports_stream = ( + self.supported_features_compat & CameraEntityFeature.STREAM + ) + await self.async_refresh_providers(write_state=False) - async def async_refresh_providers(self) -> None: + async def async_refresh_providers(self, *, write_state: bool = True) -> None: """Determine if any of the registered providers are suitable for this entity. This affects state attributes, so it should be invoked any time the registered providers or inputs to the state attributes change. - - Returns True if any state was updated (and needs to be written) """ - old_providers = self._webrtc_providers - new_providers = await self._async_get_supported_webrtc_providers() - self._webrtc_providers = new_providers - if old_providers != new_providers: - self.async_write_ha_state() - - async def _async_get_supported_webrtc_providers( - self, - ) -> list[CameraWebRTCProvider]: - """Get the all providers that supports this camera.""" - if CameraEntityFeature.STREAM not in self.supported_features_compat: - return [] + old_provider = self._webrtc_provider + old_legacy_provider = self._legacy_webrtc_provider + new_provider = None + new_legacy_provider = None + + # Skip all providers if the camera has a native WebRTC implementation + if not ( + self._supports_native_sync_webrtc or self._supports_native_async_webrtc + ): + # Camera doesn't have a native WebRTC implementation + new_provider = await self._async_get_supported_webrtc_provider( + async_get_supported_provider + ) + + if new_provider is None: + # Only add the legacy provider if the new provider is not available + new_legacy_provider = await self._async_get_supported_webrtc_provider( + async_get_supported_legacy_provider + ) - return await async_get_supported_providers(self.hass, self) + if old_provider != new_provider or old_legacy_provider != new_legacy_provider: + self._webrtc_provider = new_provider + self._legacy_webrtc_provider = new_legacy_provider + self._invalidate_camera_capabilities_cache() + if write_state: + self.async_write_ha_state() + + async def _async_get_supported_webrtc_provider[_T]( + self, fn: Callable[[HomeAssistant, Camera], Coroutine[None, None, _T | None]] + ) -> _T | None: + """Get first provider that supports this camera.""" + if CameraEntityFeature.STREAM not in self.supported_features_compat: + return None - @property - def webrtc_providers(self) -> list[CameraWebRTCProvider]: - """Return the WebRTC providers.""" - return self._webrtc_providers + return await fn(self.hass, self) - async def _async_get_webrtc_client_configuration(self) -> WebRTCClientConfiguration: + @callback + def _async_get_webrtc_client_configuration(self) -> WebRTCClientConfiguration: """Return the WebRTC client configuration adjustable per integration.""" return WebRTCClientConfiguration() @final - async def async_get_webrtc_client_configuration(self) -> WebRTCClientConfiguration: + @callback + def async_get_webrtc_client_configuration(self) -> WebRTCClientConfiguration: """Return the WebRTC client configuration and extend it with the registered ice servers.""" - config = await self._async_get_webrtc_client_configuration() - - ice_servers = await asyncio.gather( - *[server() for server in self.hass.data.get(DATA_ICE_SERVERS, [])] + config = self._async_get_webrtc_client_configuration() + + if not self._supports_native_sync_webrtc: + # Until 2024.11, the frontend was not resolving any ice servers + # The async approach was added 2024.11 and new integrations need to use it + ice_servers = [ + server + for servers in self.hass.data.get(DATA_ICE_SERVERS, []) + for server in servers() + ] + config.configuration.ice_servers.extend(ice_servers) + + config.get_candidates_upfront = ( + self._supports_native_sync_webrtc + or self._legacy_webrtc_provider is not None ) - config.configuration.ice_servers.extend(ice_servers) return config + async def async_on_webrtc_candidate( + self, session_id: str, candidate: RTCIceCandidate + ) -> None: + """Handle a WebRTC candidate.""" + if self._webrtc_provider: + await self._webrtc_provider.async_on_webrtc_candidate(session_id, candidate) + else: + raise HomeAssistantError("Cannot handle WebRTC candidate") + + @callback + def close_webrtc_session(self, session_id: str) -> None: + """Close a WebRTC session.""" + if self._webrtc_provider: + self._webrtc_provider.async_close_session(session_id) + + @callback + def _invalidate_camera_capabilities_cache(self) -> None: + """Invalidate the camera capabilities cache.""" + self._cache.pop("camera_capabilities", None) + + @final + @under_cached_property + def camera_capabilities(self) -> CameraCapabilities: + """Return the camera capabilities.""" + frontend_stream_types = set() + if CameraEntityFeature.STREAM in self.supported_features_compat: + if self._supports_native_sync_webrtc or self._supports_native_async_webrtc: + # The camera has a native WebRTC implementation + frontend_stream_types.add(StreamType.WEB_RTC) + else: + frontend_stream_types.add(StreamType.HLS) + + if self._webrtc_provider: + frontend_stream_types.add(StreamType.WEB_RTC) + + return CameraCapabilities(frontend_stream_types) + + @callback + def async_write_ha_state(self) -> None: + """Write the state to the state machine. + + Schedules async_refresh_providers if support of streams have changed. + """ + super().async_write_ha_state() + if self.__supports_stream != ( + supports_stream := self.supported_features_compat + & CameraEntityFeature.STREAM + ): + self.__supports_stream = supports_stream + self._invalidate_camera_capabilities_cache() + self.hass.async_create_task(self.async_refresh_providers()) + class CameraView(HomeAssistantView): """Base CameraView.""" @@ -842,79 +1008,50 @@ async def handle(self, request: web.Request, camera: Camera) -> web.StreamRespon @websocket_api.websocket_command( { - vol.Required("type"): "camera/stream", + vol.Required("type"): "camera/capabilities", vol.Required("entity_id"): cv.entity_id, - vol.Optional("format", default="hls"): vol.In(OUTPUT_FORMATS), } ) @websocket_api.async_response -async def ws_camera_stream( +async def ws_camera_capabilities( hass: HomeAssistant, connection: ActiveConnection, msg: dict[str, Any] ) -> None: - """Handle get camera stream websocket command. + """Handle get camera capabilities websocket command. Async friendly. """ - try: - entity_id = msg["entity_id"] - camera = get_camera_from_entity_id(hass, entity_id) - url = await _async_stream_endpoint_url(hass, camera, fmt=msg["format"]) - connection.send_result(msg["id"], {"url": url}) - except HomeAssistantError as ex: - _LOGGER.error("Error requesting stream: %s", ex) - connection.send_error(msg["id"], "start_stream_failed", str(ex)) - except TimeoutError: - _LOGGER.error("Timeout getting stream source") - connection.send_error( - msg["id"], "start_stream_failed", "Timeout getting stream source" - ) + camera = get_camera_from_entity_id(hass, msg["entity_id"]) + connection.send_result(msg["id"], asdict(camera.camera_capabilities)) @websocket_api.websocket_command( { - vol.Required("type"): "camera/web_rtc_offer", + vol.Required("type"): "camera/stream", vol.Required("entity_id"): cv.entity_id, - vol.Required("offer"): str, + vol.Optional("format", default="hls"): vol.In(OUTPUT_FORMATS), } ) @websocket_api.async_response -async def ws_camera_web_rtc_offer( +async def ws_camera_stream( hass: HomeAssistant, connection: ActiveConnection, msg: dict[str, Any] ) -> None: - """Handle the signal path for a WebRTC stream. - - This signal path is used to route the offer created by the client to the - camera device through the integration for negotiation on initial setup, - which returns an answer. The actual streaming is handled entirely between - the client and camera device. + """Handle get camera stream websocket command. Async friendly. """ - entity_id = msg["entity_id"] - offer = msg["offer"] - camera = get_camera_from_entity_id(hass, entity_id) - if camera.frontend_stream_type != StreamType.WEB_RTC: - connection.send_error( - msg["id"], - "web_rtc_offer_failed", - ( - "Camera does not support WebRTC," - f" frontend_stream_type={camera.frontend_stream_type}" - ), - ) - return try: - answer = await camera.async_handle_web_rtc_offer(offer) - except (HomeAssistantError, ValueError) as ex: - _LOGGER.error("Error handling WebRTC offer: %s", ex) - connection.send_error(msg["id"], "web_rtc_offer_failed", str(ex)) + entity_id = msg["entity_id"] + camera = get_camera_from_entity_id(hass, entity_id) + url = await _async_stream_endpoint_url(hass, camera, fmt=msg["format"]) + connection.send_result(msg["id"], {"url": url}) + except HomeAssistantError as ex: + _LOGGER.error("Error requesting stream: %s", ex) + connection.send_error(msg["id"], "start_stream_failed", str(ex)) except TimeoutError: - _LOGGER.error("Timeout handling WebRTC offer") + _LOGGER.error("Timeout getting stream source") connection.send_error( - msg["id"], "web_rtc_offer_failed", "Timeout handling WebRTC offer" + msg["id"], "start_stream_failed", "Timeout getting stream source" ) - else: - connection.send_result(msg["id"], {"answer": answer}) @websocket_api.websocket_command( @@ -959,6 +1096,46 @@ async def websocket_update_prefs( connection.send_result(msg["id"], entity_prefs) +class _TemplateCameraEntity: + """Class to warn when the `entity_id` template variable is accessed. + + Can be removed in HA Core 2025.6. + """ + + def __init__(self, camera: Camera, service: str) -> None: + """Initialize.""" + self._camera = camera + self._entity_id = camera.entity_id + self._hass = camera.hass + self._service = service + + def _report_issue(self) -> None: + """Create a repair issue.""" + ir.async_create_issue( + self._hass, + DOMAIN, + f"deprecated_filename_template_{self._entity_id}_{self._service}", + breaks_in_ha_version="2025.6.0", + is_fixable=True, + severity=ir.IssueSeverity.WARNING, + translation_key="deprecated_filename_template", + translation_placeholders={ + "entity_id": self._entity_id, + "service": f"{DOMAIN}.{self._service}", + }, + ) + + def __getattr__(self, name: str) -> Any: + """Forward to the camera entity.""" + self._report_issue() + return getattr(self._camera, name) + + def __str__(self) -> str: + """Forward to the camera entity.""" + self._report_issue() + return str(self._camera) + + async def async_handle_snapshot_service( camera: Camera, service_call: ServiceCall ) -> None: @@ -966,7 +1143,9 @@ async def async_handle_snapshot_service( hass = camera.hass filename: Template = service_call.data[ATTR_FILENAME] - snapshot_file = filename.async_render(variables={ATTR_ENTITY_ID: camera}) + snapshot_file = filename.async_render( + variables={ATTR_ENTITY_ID: _TemplateCameraEntity(camera, SERVICE_SNAPSHOT)} + ) # check if we allow to access to that file if not hass.config.is_allowed_path(snapshot_file): @@ -1042,7 +1221,9 @@ async def async_handle_record_service( raise HomeAssistantError(f"{camera.entity_id} does not support record service") filename = service_call.data[CONF_FILENAME] - video_path = filename.async_render(variables={ATTR_ENTITY_ID: camera}) + video_path = filename.async_render( + variables={ATTR_ENTITY_ID: _TemplateCameraEntity(camera, SERVICE_RECORD)} + ) await stream.async_record( video_path, diff --git a/homeassistant/components/camera/strings.json b/homeassistant/components/camera/strings.json index 90b053ec0877d4..4a7e9aafc6e4ab 100644 --- a/homeassistant/components/camera/strings.json +++ b/homeassistant/components/camera/strings.json @@ -35,6 +35,23 @@ } } }, + "issues": { + "deprecated_filename_template": { + "title": "Detected use of deprecated template variable", + "fix_flow": { + "step": { + "confirm": { + "title": "[%key:component::camera::issues::deprecated_filename_template::title%]", + "description": "The pre-defined template variable `entity_id` was used when performing action `{service}` targeting camera entity `{entity_id}`. The pre-defined template variable `entity_id` is being removed from the `filename` parameter of `{service}`.\n\nPlease update your automations and scripts to use a manually defined variable instead and select **Submit** to close this issue." + } + } + } + }, + "legacy_webrtc_provider": { + "title": "Detected use of legacy WebRTC provider registered by {legacy_integration}", + "description": "The {legacy_integration} integration has registered a legacy WebRTC provider. Home Assistant prefers using the built-in modern WebRTC provider registered by the {builtin_integration} integration.\n\nBenefits of the built-in integration are:\n\n- The camera stream is started faster.\n- More camera devices are supported.\n\nTo fix this issue, you can either keep using the built-in modern WebRTC provider and remove the {legacy_integration} integration or remove the {builtin_integration} integration to use the legacy provider, and then restart Home Assistant." + } + }, "services": { "turn_off": { "name": "[%key:common::action::turn_off%]", @@ -58,7 +75,7 @@ "fields": { "filename": { "name": "Filename", - "description": "Template of a filename. Variable available is `entity_id`." + "description": "Full path to filename." } } }, @@ -82,7 +99,7 @@ "fields": { "filename": { "name": "[%key:component::camera::services::snapshot::fields::filename::name%]", - "description": "Template of a filename. Variable available is `entity_id`. Must be mp4." + "description": "Full path to filename. Must be mp4." }, "duration": { "name": "Duration", diff --git a/homeassistant/components/camera/webrtc.py b/homeassistant/components/camera/webrtc.py index 05924855bc4caa..0612c96e40c8a9 100644 --- a/homeassistant/components/camera/webrtc.py +++ b/homeassistant/components/camera/webrtc.py @@ -2,20 +2,23 @@ from __future__ import annotations +from abc import ABC, abstractmethod import asyncio -from collections.abc import Awaitable, Callable, Coroutine -from dataclasses import dataclass, field +from collections.abc import Awaitable, Callable, Iterable +from dataclasses import asdict, dataclass, field +from functools import cache, partial +import logging from typing import TYPE_CHECKING, Any, Protocol -from mashumaro import field_options -from mashumaro.config import BaseConfig -from mashumaro.mixins.dict import DataClassDictMixin import voluptuous as vol +from webrtc_models import RTCConfiguration, RTCIceCandidate, RTCIceServer from homeassistant.components import websocket_api from homeassistant.core import HomeAssistant, callback -from homeassistant.helpers import config_validation as cv +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers import config_validation as cv, issue_registry as ir from homeassistant.util.hass_dict import HassKey +from homeassistant.util.ulid import ulid from .const import DATA_COMPONENT, DOMAIN, StreamType from .helper import get_camera_from_entity_id @@ -23,64 +26,137 @@ if TYPE_CHECKING: from . import Camera +_LOGGER = logging.getLogger(__name__) + DATA_WEBRTC_PROVIDERS: HassKey[set[CameraWebRTCProvider]] = HassKey( - "camera_web_rtc_providers" + "camera_webrtc_providers" +) +DATA_WEBRTC_LEGACY_PROVIDERS: HassKey[dict[str, CameraWebRTCLegacyProvider]] = HassKey( + "camera_webrtc_legacy_providers" ) -DATA_ICE_SERVERS: HassKey[list[Callable[[], Coroutine[Any, Any, RTCIceServer]]]] = ( - HassKey("camera_web_rtc_ice_servers") +DATA_ICE_SERVERS: HassKey[list[Callable[[], Iterable[RTCIceServer]]]] = HassKey( + "camera_webrtc_ice_servers" ) -class _RTCBaseModel(DataClassDictMixin): - """Base class for RTC models.""" +_WEBRTC = "WebRTC" - class Config(BaseConfig): - """Mashumaro config.""" - # Serialize to spec conform names and omit default values - omit_default = True - serialize_by_alias = True +@dataclass(frozen=True) +class WebRTCMessage: + """Base class for WebRTC messages.""" + @classmethod + @cache + def _get_type(cls) -> str: + _, _, name = cls.__name__.partition(_WEBRTC) + return name.lower() -@dataclass -class RTCIceServer(_RTCBaseModel): - """RTC Ice Server. + def as_dict(self) -> dict[str, Any]: + """Return a dict representation of the message.""" + data = asdict(self) + data["type"] = self._get_type() + return data - See https://www.w3.org/TR/webrtc/#rtciceserver-dictionary - """ - urls: list[str] | str - username: str | None = None - credential: str | None = None +@dataclass(frozen=True) +class WebRTCSession(WebRTCMessage): + """WebRTC session.""" + session_id: str -@dataclass -class RTCConfiguration(_RTCBaseModel): - """RTC Configuration. - See https://www.w3.org/TR/webrtc/#rtcconfiguration-dictionary - """ +@dataclass(frozen=True) +class WebRTCAnswer(WebRTCMessage): + """WebRTC answer.""" - ice_servers: list[RTCIceServer] = field( - metadata=field_options(alias="iceServers"), default_factory=list - ) + answer: str + + +@dataclass(frozen=True) +class WebRTCCandidate(WebRTCMessage): + """WebRTC candidate.""" + + candidate: RTCIceCandidate + + def as_dict(self) -> dict[str, Any]: + """Return a dict representation of the message.""" + return { + "type": self._get_type(), + "candidate": self.candidate.candidate, + } + + +@dataclass(frozen=True) +class WebRTCError(WebRTCMessage): + """WebRTC error.""" + + code: str + message: str + + +type WebRTCSendMessage = Callable[[WebRTCMessage], None] @dataclass(kw_only=True) -class WebRTCClientConfiguration(_RTCBaseModel): +class WebRTCClientConfiguration: """WebRTC configuration for the client. Not part of the spec, but required to configure client. """ configuration: RTCConfiguration = field(default_factory=RTCConfiguration) - data_channel: str | None = field( - metadata=field_options(alias="dataChannel"), default=None - ) + data_channel: str | None = None + get_candidates_upfront: bool = False + + def to_frontend_dict(self) -> dict[str, Any]: + """Return a dict that can be used by the frontend.""" + data: dict[str, Any] = { + "configuration": self.configuration.to_dict(), + "getCandidatesUpfront": self.get_candidates_upfront, + } + if self.data_channel is not None: + data["dataChannel"] = self.data_channel + return data + + +class CameraWebRTCProvider(ABC): + """WebRTC provider.""" + + @property + @abstractmethod + def domain(self) -> str: + """Return the integration domain of the provider.""" + + @callback + @abstractmethod + def async_is_supported(self, stream_source: str) -> bool: + """Determine if the provider supports the stream source.""" + @abstractmethod + async def async_handle_async_webrtc_offer( + self, + camera: Camera, + offer_sdp: str, + session_id: str, + send_message: WebRTCSendMessage, + ) -> None: + """Handle the WebRTC offer and return the answer via the provided callback.""" + + @abstractmethod + async def async_on_webrtc_candidate( + self, session_id: str, candidate: RTCIceCandidate + ) -> None: + """Handle the WebRTC candidate.""" + + @callback + def async_close_session(self, session_id: str) -> None: + """Close the session.""" + return ## This is an optional method so we need a default here. -class CameraWebRTCProvider(Protocol): + +class CameraWebRTCLegacyProvider(Protocol): """WebRTC provider.""" async def async_is_supported(self, stream_source: str) -> bool: @@ -92,6 +168,7 @@ async def async_handle_web_rtc_offer( """Handle the WebRTC offer and return an answer.""" +@callback def async_register_webrtc_provider( hass: HomeAssistant, provider: CameraWebRTCProvider, @@ -103,9 +180,7 @@ def async_register_webrtc_provider( if DOMAIN not in hass.data: raise ValueError("Unexpected state, camera not loaded") - providers: set[CameraWebRTCProvider] = hass.data.setdefault( - DATA_WEBRTC_PROVIDERS, set() - ) + providers = hass.data.setdefault(DATA_WEBRTC_PROVIDERS, set()) @callback def remove_provider() -> None: @@ -122,6 +197,7 @@ def remove_provider() -> None: async def _async_refresh_providers(hass: HomeAssistant) -> None: """Check all cameras for any state changes for registered providers.""" + _async_check_conflicting_legacy_provider(hass) component = hass.data[DATA_COMPONENT] await asyncio.gather( @@ -129,6 +205,72 @@ async def _async_refresh_providers(hass: HomeAssistant) -> None: ) +@websocket_api.websocket_command( + { + vol.Required("type"): "camera/webrtc/offer", + vol.Required("entity_id"): cv.entity_id, + vol.Required("offer"): str, + } +) +@websocket_api.async_response +async def ws_webrtc_offer( + hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict[str, Any] +) -> None: + """Handle the signal path for a WebRTC stream. + + This signal path is used to route the offer created by the client to the + camera device through the integration for negotiation on initial setup. + The ws endpoint returns a subscription id, where ice candidates and the + final answer will be returned. + The actual streaming is handled entirely between the client and camera device. + + Async friendly. + """ + entity_id = msg["entity_id"] + offer = msg["offer"] + camera = get_camera_from_entity_id(hass, entity_id) + if camera.frontend_stream_type != StreamType.WEB_RTC: + connection.send_error( + msg["id"], + "webrtc_offer_failed", + ( + "Camera does not support WebRTC," + f" frontend_stream_type={camera.frontend_stream_type}" + ), + ) + return + + session_id = ulid() + connection.subscriptions[msg["id"]] = partial( + camera.close_webrtc_session, session_id + ) + + connection.send_message(websocket_api.result_message(msg["id"])) + + @callback + def send_message(message: WebRTCMessage) -> None: + """Push a value to websocket.""" + connection.send_message( + websocket_api.event_message( + msg["id"], + message.as_dict(), + ) + ) + + send_message(WebRTCSession(session_id)) + + try: + await camera.async_handle_async_webrtc_offer(offer, session_id, send_message) + except HomeAssistantError as ex: + _LOGGER.error("Error handling WebRTC offer: %s", ex) + send_message( + WebRTCError( + "webrtc_offer_failed", + str(ex), + ) + ) + + @websocket_api.websocket_command( { vol.Required("type"): "camera/webrtc/get_client_config", @@ -145,7 +287,7 @@ async def ws_get_client_config( if camera.frontend_stream_type != StreamType.WEB_RTC: connection.send_error( msg["id"], - "web_rtc_offer_failed", + "webrtc_get_client_config_failed", ( "Camera does not support WebRTC," f" frontend_stream_type={camera.frontend_stream_type}" @@ -153,32 +295,88 @@ async def ws_get_client_config( ) return - config = (await camera.async_get_webrtc_client_configuration()).to_dict() + config = camera.async_get_webrtc_client_configuration().to_frontend_dict() connection.send_result( msg["id"], config, ) -async def async_get_supported_providers( +@websocket_api.websocket_command( + { + vol.Required("type"): "camera/webrtc/candidate", + vol.Required("entity_id"): cv.entity_id, + vol.Required("session_id"): str, + vol.Required("candidate"): str, + } +) +@websocket_api.async_response +async def ws_candidate( + hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict[str, Any] +) -> None: + """Handle WebRTC candidate websocket command.""" + entity_id = msg["entity_id"] + camera = get_camera_from_entity_id(hass, entity_id) + if camera.frontend_stream_type != StreamType.WEB_RTC: + connection.send_error( + msg["id"], + "webrtc_candidate_failed", + ( + "Camera does not support WebRTC," + f" frontend_stream_type={camera.frontend_stream_type}" + ), + ) + return + + await camera.async_on_webrtc_candidate( + msg["session_id"], RTCIceCandidate(msg["candidate"]) + ) + connection.send_message(websocket_api.result_message(msg["id"])) + + +@callback +def async_register_ws(hass: HomeAssistant) -> None: + """Register camera webrtc ws endpoints.""" + + websocket_api.async_register_command(hass, ws_webrtc_offer) + websocket_api.async_register_command(hass, ws_get_client_config) + websocket_api.async_register_command(hass, ws_candidate) + + +async def async_get_supported_provider( hass: HomeAssistant, camera: Camera -) -> list[CameraWebRTCProvider]: - """Return a list of supported providers for the camera.""" +) -> CameraWebRTCProvider | None: + """Return the first supported provider for the camera.""" providers = hass.data.get(DATA_WEBRTC_PROVIDERS) if not providers or not (stream_source := await camera.stream_source()): - return [] + return None + + for provider in providers: + if provider.async_is_supported(stream_source): + return provider + + return None + + +async def async_get_supported_legacy_provider( + hass: HomeAssistant, camera: Camera +) -> CameraWebRTCLegacyProvider | None: + """Return the first supported provider for the camera.""" + providers = hass.data.get(DATA_WEBRTC_LEGACY_PROVIDERS) + if not providers or not (stream_source := await camera.stream_source()): + return None - return [ - provider - for provider in providers - if await provider.async_is_supported(stream_source) - ] + for provider in providers.values(): + if await provider.async_is_supported(stream_source): + return provider + + return None @callback -def register_ice_server( +def async_register_ice_servers( hass: HomeAssistant, - get_ice_server_fn: Callable[[], Coroutine[Any, Any, RTCIceServer]], + get_ice_server_fn: Callable[[], Iterable[RTCIceServer]], ) -> Callable[[], None]: """Register a ICE server. @@ -207,7 +405,7 @@ def remove() -> None: type RtspToWebRtcProviderType = Callable[[str, str, str], Awaitable[str | None]] -class _CameraRtspToWebRTCProvider(CameraWebRTCProvider): +class _CameraRtspToWebRTCProvider(CameraWebRTCLegacyProvider): def __init__(self, fn: RtspToWebRtcProviderType) -> None: """Initialize the RTSP to WebRTC provider.""" self._fn = fn @@ -235,5 +433,49 @@ def async_register_rtsp_to_web_rtc_provider( The first provider to satisfy the offer will be used. """ + if DOMAIN not in hass.data: + raise ValueError("Unexpected state, camera not loaded") + + legacy_providers = hass.data.setdefault(DATA_WEBRTC_LEGACY_PROVIDERS, {}) + + if domain in legacy_providers: + raise ValueError("Provider already registered") + provider_instance = _CameraRtspToWebRTCProvider(provider) - return async_register_webrtc_provider(hass, provider_instance) + + @callback + def remove_provider() -> None: + legacy_providers.pop(domain) + hass.async_create_task(_async_refresh_providers(hass)) + + legacy_providers[domain] = provider_instance + hass.async_create_task(_async_refresh_providers(hass)) + + return remove_provider + + +@callback +def _async_check_conflicting_legacy_provider(hass: HomeAssistant) -> None: + """Check if a legacy provider is registered together with the builtin provider.""" + builtin_provider_domain = "go2rtc" + if ( + (legacy_providers := hass.data.get(DATA_WEBRTC_LEGACY_PROVIDERS)) + and (providers := hass.data.get(DATA_WEBRTC_PROVIDERS)) + and any(provider.domain == builtin_provider_domain for provider in providers) + ): + for domain in legacy_providers: + ir.async_create_issue( + hass, + DOMAIN, + f"legacy_webrtc_provider_{domain}", + is_fixable=False, + is_persistent=False, + issue_domain=domain, + learn_more_url="https://www.home-assistant.io/integrations/go2rtc/", + severity=ir.IssueSeverity.WARNING, + translation_key="legacy_webrtc_provider", + translation_placeholders={ + "legacy_integration": domain, + "builtin_integration": builtin_provider_domain, + }, + ) diff --git a/homeassistant/components/canary/alarm_control_panel.py b/homeassistant/components/canary/alarm_control_panel.py index a7d5dc8ab98c7f..69600e4bbc7dc8 100644 --- a/homeassistant/components/canary/alarm_control_panel.py +++ b/homeassistant/components/canary/alarm_control_panel.py @@ -10,14 +10,9 @@ from homeassistant.components.alarm_control_panel import ( AlarmControlPanelEntity, AlarmControlPanelEntityFeature, + AlarmControlPanelState, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ( - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_DISARMED, -) from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.update_coordinator import CoordinatorEntity @@ -70,18 +65,18 @@ def location(self) -> Location: return self.coordinator.data["locations"][self._location_id] @property - def state(self) -> str | None: + def alarm_state(self) -> AlarmControlPanelState | None: """Return the state of the device.""" if self.location.is_private: - return STATE_ALARM_DISARMED + return AlarmControlPanelState.DISARMED mode = self.location.mode if mode.name == LOCATION_MODE_AWAY: - return STATE_ALARM_ARMED_AWAY + return AlarmControlPanelState.ARMED_AWAY if mode.name == LOCATION_MODE_HOME: - return STATE_ALARM_ARMED_HOME + return AlarmControlPanelState.ARMED_HOME if mode.name == LOCATION_MODE_NIGHT: - return STATE_ALARM_ARMED_NIGHT + return AlarmControlPanelState.ARMED_NIGHT return None diff --git a/homeassistant/components/canary/config_flow.py b/homeassistant/components/canary/config_flow.py index 5af7142af8fa2b..2dd3a678b5da1b 100644 --- a/homeassistant/components/canary/config_flow.py +++ b/homeassistant/components/canary/config_flow.py @@ -52,7 +52,7 @@ class CanaryConfigFlow(ConfigFlow, domain=DOMAIN): @callback def async_get_options_flow(config_entry: ConfigEntry) -> OptionsFlow: """Get the options flow for this handler.""" - return CanaryOptionsFlowHandler(config_entry) + return CanaryOptionsFlowHandler() async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Handle a flow initiated by configuration file.""" @@ -104,10 +104,6 @@ async def async_step_user( class CanaryOptionsFlowHandler(OptionsFlow): """Handle Canary client options.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/cast/config_flow.py b/homeassistant/components/cast/config_flow.py index 4f7dd59e83ed88..03a3f2ea1f84f9 100644 --- a/homeassistant/components/cast/config_flow.py +++ b/homeassistant/components/cast/config_flow.py @@ -41,24 +41,18 @@ def async_get_options_flow( config_entry: ConfigEntry, ) -> CastOptionsFlowHandler: """Get the options flow for this handler.""" - return CastOptionsFlowHandler(config_entry) + return CastOptionsFlowHandler() async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle a flow initialized by the user.""" - if self._async_current_entries(): - return self.async_abort(reason="single_instance_allowed") - return await self.async_step_config() async def async_step_zeroconf( self, discovery_info: zeroconf.ZeroconfServiceInfo ) -> ConfigFlowResult: """Handle a flow initialized by zeroconf discovery.""" - if self._async_in_progress() or self._async_current_entries(): - return self.async_abort(reason="single_instance_allowed") - await self.async_set_unique_id(DOMAIN) return await self.async_step_confirm() @@ -115,9 +109,8 @@ def _get_data(self): class CastOptionsFlowHandler(OptionsFlow): """Handle Google Cast options.""" - def __init__(self, config_entry: ConfigEntry) -> None: + def __init__(self) -> None: """Initialize Google Cast options flow.""" - self.config_entry = config_entry self.updated_config: dict[str, Any] = {} async def async_step_init(self, user_input: None = None) -> ConfigFlowResult: diff --git a/homeassistant/components/cast/manifest.json b/homeassistant/components/cast/manifest.json index 72b2f799d184fa..0650f267544a5d 100644 --- a/homeassistant/components/cast/manifest.json +++ b/homeassistant/components/cast/manifest.json @@ -14,6 +14,7 @@ "documentation": "https://www.home-assistant.io/integrations/cast", "iot_class": "local_polling", "loggers": ["casttube", "pychromecast"], - "requirements": ["PyChromecast==14.0.4"], + "requirements": ["PyChromecast==14.0.5"], + "single_config_entry": true, "zeroconf": ["_googlecast._tcp.local."] } diff --git a/homeassistant/components/cast/strings.json b/homeassistant/components/cast/strings.json index ce622e48aaec3f..12f2edeee9a982 100644 --- a/homeassistant/components/cast/strings.json +++ b/homeassistant/components/cast/strings.json @@ -12,9 +12,6 @@ } } }, - "abort": { - "single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]" - }, "error": { "invalid_known_hosts": "Known hosts must be a comma separated list of hosts." } diff --git a/homeassistant/components/cisco_webex_teams/__init__.py b/homeassistant/components/cisco_webex_teams/__init__.py index 0a8714806a18f3..5932f2ed680756 100644 --- a/homeassistant/components/cisco_webex_teams/__init__.py +++ b/homeassistant/components/cisco_webex_teams/__init__.py @@ -1 +1 @@ -"""Component to integrate the Cisco Webex Teams cloud.""" +"""Component to integrate the Cisco Webex cloud.""" diff --git a/homeassistant/components/cisco_webex_teams/manifest.json b/homeassistant/components/cisco_webex_teams/manifest.json index 822919213c2791..3da31a0b453a06 100644 --- a/homeassistant/components/cisco_webex_teams/manifest.json +++ b/homeassistant/components/cisco_webex_teams/manifest.json @@ -2,9 +2,8 @@ "domain": "cisco_webex_teams", "name": "Cisco Webex Teams", "codeowners": ["@fbradyirl"], - "disabled": "Integration library not compatible with Python 3.12", "documentation": "https://www.home-assistant.io/integrations/cisco_webex_teams", "iot_class": "cloud_push", - "loggers": ["webexteamssdk"], - "requirements": ["webexteamssdk==1.1.1;python_version<'3.12'"] + "loggers": ["webexpythonsdk"], + "requirements": ["webexpythonsdk==2.0.1"] } diff --git a/homeassistant/components/cisco_webex_teams/notify.py b/homeassistant/components/cisco_webex_teams/notify.py index b93ebb273dd9df..74d033c62d4edb 100644 --- a/homeassistant/components/cisco_webex_teams/notify.py +++ b/homeassistant/components/cisco_webex_teams/notify.py @@ -1,11 +1,11 @@ -"""Cisco Webex Teams notify component.""" +"""Cisco Webex notify component.""" from __future__ import annotations import logging import voluptuous as vol -from webexteamssdk import ApiError, WebexTeamsAPI, exceptions +from webexpythonsdk import ApiError, WebexAPI, exceptions from homeassistant.components.notify import ( ATTR_TITLE, @@ -30,9 +30,9 @@ def get_service( hass: HomeAssistant, config: ConfigType, discovery_info: DiscoveryInfoType | None = None, -) -> CiscoWebexTeamsNotificationService | None: - """Get the CiscoWebexTeams notification service.""" - client = WebexTeamsAPI(access_token=config[CONF_TOKEN]) +) -> CiscoWebexNotificationService | None: + """Get the Cisco Webex notification service.""" + client = WebexAPI(access_token=config[CONF_TOKEN]) try: # Validate the token & room_id client.rooms.get(config[CONF_ROOM_ID]) @@ -40,11 +40,11 @@ def get_service( _LOGGER.error(error) return None - return CiscoWebexTeamsNotificationService(client, config[CONF_ROOM_ID]) + return CiscoWebexNotificationService(client, config[CONF_ROOM_ID]) -class CiscoWebexTeamsNotificationService(BaseNotificationService): - """The Cisco Webex Teams Notification Service.""" +class CiscoWebexNotificationService(BaseNotificationService): + """The Cisco Webex Notification Service.""" def __init__(self, client, room): """Initialize the service.""" @@ -62,5 +62,5 @@ def send_message(self, message="", **kwargs): self.client.messages.create(roomId=self.room, html=f"{title}{message}") except ApiError as api_error: _LOGGER.error( - "Could not send CiscoWebexTeams notification. Error: %s", api_error + "Could not send Cisco Webex notification. Error: %s", api_error ) diff --git a/homeassistant/components/cloud/client.py b/homeassistant/components/cloud/client.py index 01c8de77156b30..ee46fa4212519b 100644 --- a/homeassistant/components/cloud/client.py +++ b/homeassistant/components/cloud/client.py @@ -3,6 +3,7 @@ from __future__ import annotations import asyncio +from collections.abc import Callable from datetime import datetime from http import HTTPStatus import logging @@ -11,12 +12,14 @@ import aiohttp from hass_nabucasa.client import CloudClient as Interface, RemoteActivationNotAllowed +from webrtc_models import RTCIceServer from homeassistant.components import google_assistant, persistent_notification, webhook from homeassistant.components.alexa import ( errors as alexa_errors, smart_home as alexa_smart_home, ) +from homeassistant.components.camera.webrtc import async_register_ice_servers from homeassistant.components.google_assistant import smart_home as ga from homeassistant.const import __version__ as HA_VERSION from homeassistant.core import Context, HassJob, HomeAssistant, callback @@ -27,7 +30,7 @@ from homeassistant.util.aiohttp import MockRequest, serialize_response from . import alexa_config, google_config -from .const import DISPATCHER_REMOTE_UPDATE, DOMAIN +from .const import DISPATCHER_REMOTE_UPDATE, DOMAIN, PREF_ENABLE_CLOUD_ICE_SERVERS from .prefs import CloudPreferences _LOGGER = logging.getLogger(__name__) @@ -60,6 +63,7 @@ def __init__( self._alexa_config_init_lock = asyncio.Lock() self._google_config_init_lock = asyncio.Lock() self._relayer_region: str | None = None + self._cloud_ice_servers_listener: Callable[[], None] | None = None @property def base_path(self) -> Path: @@ -187,6 +191,49 @@ async def enable_google(_: datetime) -> None: if is_new_user: await gconf.async_sync_entities(gconf.agent_user_id) + async def setup_cloud_ice_servers(_: datetime) -> None: + async def register_cloud_ice_server( + ice_servers: list[RTCIceServer], + ) -> Callable[[], None]: + """Register cloud ice server.""" + + def get_ice_servers() -> list[RTCIceServer]: + return ice_servers + + return async_register_ice_servers(self._hass, get_ice_servers) + + async def async_register_cloud_ice_servers_listener( + prefs: CloudPreferences, + ) -> None: + is_cloud_ice_servers_enabled = ( + self.cloud.is_logged_in + and not self.cloud.subscription_expired + and prefs.cloud_ice_servers_enabled + ) + if is_cloud_ice_servers_enabled: + if self._cloud_ice_servers_listener is None: + self._cloud_ice_servers_listener = await self.cloud.ice_servers.async_register_ice_servers_listener( + register_cloud_ice_server + ) + elif self._cloud_ice_servers_listener: + self._cloud_ice_servers_listener() + self._cloud_ice_servers_listener = None + + async def async_prefs_updated(prefs: CloudPreferences) -> None: + updated_prefs = prefs.last_updated + + if ( + updated_prefs is None + or PREF_ENABLE_CLOUD_ICE_SERVERS not in updated_prefs + ): + return + + await async_register_cloud_ice_servers_listener(prefs) + + await async_register_cloud_ice_servers_listener(self._prefs) + + self._prefs.async_listen_updates(async_prefs_updated) + tasks = [] if self._prefs.alexa_enabled and self._prefs.alexa_report_state: @@ -195,6 +242,8 @@ async def enable_google(_: datetime) -> None: if self._prefs.google_enabled: tasks.append(enable_google) + tasks.append(setup_cloud_ice_servers) + if tasks: await asyncio.gather(*(task(None) for task in tasks)) @@ -222,6 +271,10 @@ async def logout_cleanups(self) -> None: self._google_config.async_deinitialize() self._google_config = None + if self._cloud_ice_servers_listener: + self._cloud_ice_servers_listener() + self._cloud_ice_servers_listener = None + @callback def user_message(self, identifier: str, title: str, message: str) -> None: """Create a message for user to UI.""" diff --git a/homeassistant/components/cloud/config_flow.py b/homeassistant/components/cloud/config_flow.py index 932291c2bfa580..92fbf78378bfb6 100644 --- a/homeassistant/components/cloud/config_flow.py +++ b/homeassistant/components/cloud/config_flow.py @@ -18,6 +18,4 @@ async def async_step_system( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle the system step.""" - if self._async_current_entries(): - return self.async_abort(reason="single_instance_allowed") return self.async_create_entry(title="Home Assistant Cloud", data={}) diff --git a/homeassistant/components/cloud/const.py b/homeassistant/components/cloud/const.py index 5e9fb2e9dc7b21..4392bf9482704a 100644 --- a/homeassistant/components/cloud/const.py +++ b/homeassistant/components/cloud/const.py @@ -43,6 +43,7 @@ PREF_TTS_DEFAULT_VOICE = "tts_default_voice" PREF_GOOGLE_CONNECTED = "google_connected" PREF_REMOTE_ALLOW_REMOTE_ENABLE = "remote_allow_remote_enable" +PREF_ENABLE_CLOUD_ICE_SERVERS = "cloud_ice_servers_enabled" DEFAULT_TTS_DEFAULT_VOICE = ("en-US", "JennyNeural") DEFAULT_DISABLE_2FA = False DEFAULT_ALEXA_REPORT_STATE = True diff --git a/homeassistant/components/cloud/http_api.py b/homeassistant/components/cloud/http_api.py index b19315157457bb..4f2ad0ddcf7bc0 100644 --- a/homeassistant/components/cloud/http_api.py +++ b/homeassistant/components/cloud/http_api.py @@ -42,6 +42,7 @@ PREF_ALEXA_REPORT_STATE, PREF_DISABLE_2FA, PREF_ENABLE_ALEXA, + PREF_ENABLE_CLOUD_ICE_SERVERS, PREF_ENABLE_GOOGLE, PREF_GOOGLE_REPORT_STATE, PREF_GOOGLE_SECURE_DEVICES_PIN, @@ -439,15 +440,16 @@ def validate_language_voice(value: tuple[str, str]) -> tuple[str, str]: @websocket_api.websocket_command( { vol.Required("type"): "cloud/update_prefs", - vol.Optional(PREF_ENABLE_GOOGLE): bool, - vol.Optional(PREF_ENABLE_ALEXA): bool, vol.Optional(PREF_ALEXA_REPORT_STATE): bool, + vol.Optional(PREF_ENABLE_ALEXA): bool, + vol.Optional(PREF_ENABLE_CLOUD_ICE_SERVERS): bool, + vol.Optional(PREF_ENABLE_GOOGLE): bool, vol.Optional(PREF_GOOGLE_REPORT_STATE): bool, vol.Optional(PREF_GOOGLE_SECURE_DEVICES_PIN): vol.Any(None, str), + vol.Optional(PREF_REMOTE_ALLOW_REMOTE_ENABLE): bool, vol.Optional(PREF_TTS_DEFAULT_VOICE): vol.All( vol.Coerce(tuple), validate_language_voice ), - vol.Optional(PREF_REMOTE_ALLOW_REMOTE_ENABLE): bool, } ) @websocket_api.async_response diff --git a/homeassistant/components/cloud/manifest.json b/homeassistant/components/cloud/manifest.json index 529f4fb9be96c9..4201cb1b2d44e2 100644 --- a/homeassistant/components/cloud/manifest.json +++ b/homeassistant/components/cloud/manifest.json @@ -8,5 +8,6 @@ "integration_type": "system", "iot_class": "cloud_push", "loggers": ["hass_nabucasa"], - "requirements": ["hass-nabucasa==0.81.1"] + "requirements": ["hass-nabucasa==0.84.0"], + "single_config_entry": true } diff --git a/homeassistant/components/cloud/prefs.py b/homeassistant/components/cloud/prefs.py index 9f76c16a113951..ae4b2794e1b24e 100644 --- a/homeassistant/components/cloud/prefs.py +++ b/homeassistant/components/cloud/prefs.py @@ -32,6 +32,7 @@ PREF_CLOUD_USER, PREF_CLOUDHOOKS, PREF_ENABLE_ALEXA, + PREF_ENABLE_CLOUD_ICE_SERVERS, PREF_ENABLE_GOOGLE, PREF_ENABLE_REMOTE, PREF_GOOGLE_CONNECTED, @@ -162,20 +163,21 @@ def unsubscribe() -> None: async def async_update( self, *, - google_enabled: bool | UndefinedType = UNDEFINED, alexa_enabled: bool | UndefinedType = UNDEFINED, - remote_enabled: bool | UndefinedType = UNDEFINED, - google_secure_devices_pin: str | None | UndefinedType = UNDEFINED, - cloudhooks: dict[str, dict[str, str | bool]] | UndefinedType = UNDEFINED, - cloud_user: str | UndefinedType = UNDEFINED, alexa_report_state: bool | UndefinedType = UNDEFINED, - google_report_state: bool | UndefinedType = UNDEFINED, - tts_default_voice: tuple[str, str] | UndefinedType = UNDEFINED, - remote_domain: str | None | UndefinedType = UNDEFINED, alexa_settings_version: int | UndefinedType = UNDEFINED, - google_settings_version: int | UndefinedType = UNDEFINED, + cloud_ice_servers_enabled: bool | UndefinedType = UNDEFINED, + cloud_user: str | UndefinedType = UNDEFINED, + cloudhooks: dict[str, dict[str, str | bool]] | UndefinedType = UNDEFINED, google_connected: bool | UndefinedType = UNDEFINED, + google_enabled: bool | UndefinedType = UNDEFINED, + google_report_state: bool | UndefinedType = UNDEFINED, + google_secure_devices_pin: str | None | UndefinedType = UNDEFINED, + google_settings_version: int | UndefinedType = UNDEFINED, remote_allow_remote_enable: bool | UndefinedType = UNDEFINED, + remote_domain: str | None | UndefinedType = UNDEFINED, + remote_enabled: bool | UndefinedType = UNDEFINED, + tts_default_voice: tuple[str, str] | UndefinedType = UNDEFINED, ) -> None: """Update user preferences.""" prefs = {**self._prefs} @@ -184,20 +186,21 @@ async def async_update( { key: value for key, value in ( - (PREF_ENABLE_GOOGLE, google_enabled), + (PREF_ALEXA_REPORT_STATE, alexa_report_state), + (PREF_ALEXA_SETTINGS_VERSION, alexa_settings_version), + (PREF_CLOUD_USER, cloud_user), + (PREF_CLOUDHOOKS, cloudhooks), (PREF_ENABLE_ALEXA, alexa_enabled), + (PREF_ENABLE_CLOUD_ICE_SERVERS, cloud_ice_servers_enabled), + (PREF_ENABLE_GOOGLE, google_enabled), (PREF_ENABLE_REMOTE, remote_enabled), - (PREF_GOOGLE_SECURE_DEVICES_PIN, google_secure_devices_pin), - (PREF_CLOUDHOOKS, cloudhooks), - (PREF_CLOUD_USER, cloud_user), - (PREF_ALEXA_REPORT_STATE, alexa_report_state), + (PREF_GOOGLE_CONNECTED, google_connected), (PREF_GOOGLE_REPORT_STATE, google_report_state), - (PREF_ALEXA_SETTINGS_VERSION, alexa_settings_version), + (PREF_GOOGLE_SECURE_DEVICES_PIN, google_secure_devices_pin), (PREF_GOOGLE_SETTINGS_VERSION, google_settings_version), - (PREF_TTS_DEFAULT_VOICE, tts_default_voice), - (PREF_REMOTE_DOMAIN, remote_domain), - (PREF_GOOGLE_CONNECTED, google_connected), (PREF_REMOTE_ALLOW_REMOTE_ENABLE, remote_allow_remote_enable), + (PREF_REMOTE_DOMAIN, remote_domain), + (PREF_TTS_DEFAULT_VOICE, tts_default_voice), ) if value is not UNDEFINED } @@ -239,6 +242,7 @@ def as_dict(self) -> dict[str, Any]: PREF_ALEXA_REPORT_STATE: self.alexa_report_state, PREF_CLOUDHOOKS: self.cloudhooks, PREF_ENABLE_ALEXA: self.alexa_enabled, + PREF_ENABLE_CLOUD_ICE_SERVERS: self.cloud_ice_servers_enabled, PREF_ENABLE_GOOGLE: self.google_enabled, PREF_ENABLE_REMOTE: self.remote_enabled, PREF_GOOGLE_DEFAULT_EXPOSE: self.google_default_expose, @@ -362,6 +366,14 @@ def tts_default_voice(self) -> tuple[str, str]: """ return self._prefs.get(PREF_TTS_DEFAULT_VOICE, DEFAULT_TTS_DEFAULT_VOICE) # type: ignore[no-any-return] + @property + def cloud_ice_servers_enabled(self) -> bool: + """Return if cloud ICE servers are enabled.""" + cloud_ice_servers_enabled: bool = self._prefs.get( + PREF_ENABLE_CLOUD_ICE_SERVERS, True + ) + return cloud_ice_servers_enabled + async def get_cloud_user(self) -> str: """Return ID of Home Assistant Cloud system user.""" user = await self._load_cloud_user() @@ -409,6 +421,7 @@ def _empty_config(username: str) -> dict[str, Any]: PREF_ENABLE_ALEXA: True, PREF_ENABLE_GOOGLE: True, PREF_ENABLE_REMOTE: False, + PREF_ENABLE_CLOUD_ICE_SERVERS: True, PREF_GOOGLE_CONNECTED: False, PREF_GOOGLE_DEFAULT_EXPOSE: DEFAULT_EXPOSED_DOMAINS, PREF_GOOGLE_ENTITY_CONFIGS: {}, diff --git a/homeassistant/components/cloud/strings.json b/homeassistant/components/cloud/strings.json index fe36159e5eb91a..9f7e0dbadcdf81 100644 --- a/homeassistant/components/cloud/strings.json +++ b/homeassistant/components/cloud/strings.json @@ -1,10 +1,4 @@ { - "config": { - "step": {}, - "abort": { - "single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]" - } - }, "system_health": { "info": { "can_reach_cert_server": "Reach certificate server", diff --git a/homeassistant/components/cloud/system_health.py b/homeassistant/components/cloud/system_health.py index 0e65aa93eaf952..ac50c2fb49b418 100644 --- a/homeassistant/components/cloud/system_health.py +++ b/homeassistant/components/cloud/system_health.py @@ -33,6 +33,7 @@ async def system_health_info(hass: HomeAssistant) -> dict[str, Any]: data["remote_connected"] = cloud.remote.is_connected data["alexa_enabled"] = client.prefs.alexa_enabled data["google_enabled"] = client.prefs.google_enabled + data["cloud_ice_servers_enabled"] = client.prefs.cloud_ice_servers_enabled data["remote_server"] = cloud.remote.snitun_server data["certificate_status"] = cloud.remote.certificate_status data["instance_id"] = client.prefs.instance_id diff --git a/homeassistant/components/cloudflare/config_flow.py b/homeassistant/components/cloudflare/config_flow.py index a4276cf9dd3113..c3845a447e496a 100644 --- a/homeassistant/components/cloudflare/config_flow.py +++ b/homeassistant/components/cloudflare/config_flow.py @@ -118,9 +118,6 @@ async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle a flow initiated by the user.""" - if self._async_current_entries(): - return self.async_abort(reason="single_instance_allowed") - persistent_notification.async_dismiss(self.hass, "cloudflare_setup") errors: dict[str, str] = {} diff --git a/homeassistant/components/cloudflare/manifest.json b/homeassistant/components/cloudflare/manifest.json index 0f689aa3e03a94..8529a0b9bad962 100644 --- a/homeassistant/components/cloudflare/manifest.json +++ b/homeassistant/components/cloudflare/manifest.json @@ -6,5 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/cloudflare", "iot_class": "cloud_push", "loggers": ["pycfdns"], - "requirements": ["pycfdns==3.0.0"] + "requirements": ["pycfdns==3.0.0"], + "single_config_entry": true } diff --git a/homeassistant/components/cloudflare/strings.json b/homeassistant/components/cloudflare/strings.json index 75dc8f079c7e88..8c8ec57b0748bf 100644 --- a/homeassistant/components/cloudflare/strings.json +++ b/homeassistant/components/cloudflare/strings.json @@ -30,12 +30,11 @@ }, "error": { "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", - "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]" + "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", + "unknown": "[%key:common::config_flow::error::unknown%]" }, "abort": { - "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", - "single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]", - "unknown": "[%key:common::config_flow::error::unknown%]" + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" } }, "services": { diff --git a/homeassistant/components/co2signal/config_flow.py b/homeassistant/components/co2signal/config_flow.py index 3313d01be85eb8..0d357cce1993c1 100644 --- a/homeassistant/components/co2signal/config_flow.py +++ b/homeassistant/components/co2signal/config_flow.py @@ -13,7 +13,7 @@ ) import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlow, ConfigFlowResult from homeassistant.const import ( CONF_API_KEY, CONF_COUNTRY_CODE, @@ -42,7 +42,6 @@ class ElectricityMapsConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 _data: dict | None - _reauth_entry: ConfigEntry | None = None async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -128,9 +127,6 @@ async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle the reauth step.""" - self._reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -165,16 +161,14 @@ async def _validate_and_create( except ElectricityMapsError: errors["base"] = "unknown" else: - if self._reauth_entry: + if self.source == SOURCE_REAUTH: return self.async_update_reload_and_abort( - self._reauth_entry, - data={ - CONF_API_KEY: data[CONF_API_KEY], - }, + self._get_reauth_entry(), + data_updates={CONF_API_KEY: data[CONF_API_KEY]}, ) return self.async_create_entry( - title=get_extra_name(data) or "CO2 Signal", + title=get_extra_name(data) or "Electricity Maps", data=data, ) diff --git a/homeassistant/components/coinbase/config_flow.py b/homeassistant/components/coinbase/config_flow.py index 616fdaf8f7ab43..8b7b4b9e3135a5 100644 --- a/homeassistant/components/coinbase/config_flow.py +++ b/homeassistant/components/coinbase/config_flow.py @@ -158,16 +158,12 @@ def async_get_options_flow( config_entry: ConfigEntry, ) -> OptionsFlowHandler: """Get the options flow for this handler.""" - return OptionsFlowHandler(config_entry) + return OptionsFlowHandler() class OptionsFlowHandler(OptionsFlow): """Handle a option flow for Coinbase.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/comelit/alarm_control_panel.py b/homeassistant/components/comelit/alarm_control_panel.py index b325de25e978de..b3bd6664bf8c19 100644 --- a/homeassistant/components/comelit/alarm_control_panel.py +++ b/homeassistant/components/comelit/alarm_control_panel.py @@ -10,21 +10,12 @@ from homeassistant.components.alarm_control_panel import ( AlarmControlPanelEntity, AlarmControlPanelEntityFeature, + AlarmControlPanelState, CodeFormat, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ( - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_ARMING, - STATE_ALARM_DISARMED, - STATE_ALARM_DISARMING, - STATE_ALARM_TRIGGERED, -) from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.typing import StateType from homeassistant.helpers.update_coordinator import CoordinatorEntity from .const import DOMAIN @@ -112,7 +103,7 @@ def available(self) -> bool: return super().available @property - def state(self) -> StateType: + def alarm_state(self) -> AlarmControlPanelState | None: """Return the state of the alarm.""" _LOGGER.debug( @@ -123,16 +114,16 @@ def state(self) -> StateType: ) if self._area.human_status == AlarmAreaState.ARMED: if self._area.armed == ALARM_AREA_ARMED_STATUS[AWAY]: - return STATE_ALARM_ARMED_AWAY + return AlarmControlPanelState.ARMED_AWAY if self._area.armed == ALARM_AREA_ARMED_STATUS[NIGHT]: - return STATE_ALARM_ARMED_NIGHT - return STATE_ALARM_ARMED_HOME + return AlarmControlPanelState.ARMED_NIGHT + return AlarmControlPanelState.ARMED_HOME return { - AlarmAreaState.DISARMED: STATE_ALARM_DISARMED, - AlarmAreaState.ENTRY_DELAY: STATE_ALARM_DISARMING, - AlarmAreaState.EXIT_DELAY: STATE_ALARM_ARMING, - AlarmAreaState.TRIGGERED: STATE_ALARM_TRIGGERED, + AlarmAreaState.DISARMED: AlarmControlPanelState.DISARMED, + AlarmAreaState.ENTRY_DELAY: AlarmControlPanelState.DISARMING, + AlarmAreaState.EXIT_DELAY: AlarmControlPanelState.ARMING, + AlarmAreaState.TRIGGERED: AlarmControlPanelState.TRIGGERED, }.get(self._area.human_status) async def async_alarm_disarm(self, code: str | None = None) -> None: diff --git a/homeassistant/components/comelit/cover.py b/homeassistant/components/comelit/cover.py index 011ed81b5cb2eb..5169217ebc529e 100644 --- a/homeassistant/components/comelit/cover.py +++ b/homeassistant/components/comelit/cover.py @@ -7,7 +7,7 @@ from aiocomelit import ComelitSerialBridgeObject from aiocomelit.const import COVER, STATE_COVER, STATE_OFF, STATE_ON -from homeassistant.components.cover import STATE_CLOSED, CoverDeviceClass, CoverEntity +from homeassistant.components.cover import CoverDeviceClass, CoverEntity, CoverState from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -85,7 +85,7 @@ def is_closed(self) -> bool | None: if self._last_action: return self._last_action == STATE_COVER.index("closing") - return self._last_state == STATE_CLOSED + return self._last_state == CoverState.CLOSED @property def is_closing(self) -> bool: diff --git a/homeassistant/components/comelit/diagnostics.py b/homeassistant/components/comelit/diagnostics.py new file mode 100644 index 00000000000000..afa57831eaeb46 --- /dev/null +++ b/homeassistant/components/comelit/diagnostics.py @@ -0,0 +1,93 @@ +"""Diagnostics support for Comelit integration.""" + +from __future__ import annotations + +from typing import Any + +from aiocomelit import ( + ComelitSerialBridgeObject, + ComelitVedoAreaObject, + ComelitVedoZoneObject, +) +from aiocomelit.const import BRIDGE + +from homeassistant.components.diagnostics import async_redact_data +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_PIN, CONF_TYPE +from homeassistant.core import HomeAssistant + +from .const import DOMAIN +from .coordinator import ComelitBaseCoordinator + +TO_REDACT = {CONF_PIN} + + +async def async_get_config_entry_diagnostics( + hass: HomeAssistant, entry: ConfigEntry +) -> dict[str, Any]: + """Return diagnostics for a config entry.""" + + coordinator: ComelitBaseCoordinator = hass.data[DOMAIN][entry.entry_id] + + dev_list: list[dict[str, Any]] = [] + dev_type_list: list[dict[int, Any]] = [] + + for dev_type in coordinator.data: + dev_type_list = [] + for sensor_data in coordinator.data[dev_type].values(): + if isinstance(sensor_data, ComelitSerialBridgeObject): + dev_type_list.append( + { + sensor_data.index: { + "name": sensor_data.name, + "status": sensor_data.status, + "human_status": sensor_data.human_status, + "protected": sensor_data.protected, + "val": sensor_data.val, + "zone": sensor_data.zone, + "power": sensor_data.power, + "power_unit": sensor_data.power_unit, + } + } + ) + if isinstance(sensor_data, ComelitVedoAreaObject): + dev_type_list.append( + { + sensor_data.index: { + "name": sensor_data.name, + "human_status": sensor_data.human_status.value, + "p1": sensor_data.p1, + "p2": sensor_data.p2, + "ready": sensor_data.ready, + "armed": sensor_data.armed, + "alarm": sensor_data.alarm, + "alarm_memory": sensor_data.alarm_memory, + "sabotage": sensor_data.sabotage, + "anomaly": sensor_data.anomaly, + "in_time": sensor_data.in_time, + "out_time": sensor_data.out_time, + } + } + ) + if isinstance(sensor_data, ComelitVedoZoneObject): + dev_type_list.append( + { + sensor_data.index: { + "name": sensor_data.name, + "human_status": sensor_data.human_status.value, + "status": sensor_data.status, + "status_api": sensor_data.status_api, + } + } + ) + dev_list.append({dev_type: dev_type_list}) + + return { + "entry": async_redact_data(entry.as_dict(), TO_REDACT), + "type": entry.data.get(CONF_TYPE, BRIDGE), + "device_info": { + "last_update success": coordinator.last_update_success, + "last_exception": repr(coordinator.last_exception), + "devices": dev_list, + }, + } diff --git a/homeassistant/components/comelit/manifest.json b/homeassistant/components/comelit/manifest.json index b9264d16f69df9..d25d5c1d7d53ff 100644 --- a/homeassistant/components/comelit/manifest.json +++ b/homeassistant/components/comelit/manifest.json @@ -8,5 +8,5 @@ "iot_class": "local_polling", "loggers": ["aiocomelit"], "quality_scale": "silver", - "requirements": ["aiocomelit==0.9.0"] + "requirements": ["aiocomelit==0.9.1"] } diff --git a/homeassistant/components/compensation/manifest.json b/homeassistant/components/compensation/manifest.json index caae9190bca557..775bde3c8591e4 100644 --- a/homeassistant/components/compensation/manifest.json +++ b/homeassistant/components/compensation/manifest.json @@ -4,5 +4,5 @@ "codeowners": ["@Petro31"], "documentation": "https://www.home-assistant.io/integrations/compensation", "iot_class": "calculated", - "requirements": ["numpy==1.26.4"] + "requirements": ["numpy==2.1.3"] } diff --git a/homeassistant/components/concord232/alarm_control_panel.py b/homeassistant/components/concord232/alarm_control_panel.py index 661a2beacc0002..02453b56376018 100644 --- a/homeassistant/components/concord232/alarm_control_panel.py +++ b/homeassistant/components/concord232/alarm_control_panel.py @@ -13,18 +13,10 @@ PLATFORM_SCHEMA as ALARM_CONTROL_PANEL_PLATFORM_SCHEMA, AlarmControlPanelEntity, AlarmControlPanelEntityFeature, + AlarmControlPanelState, CodeFormat, ) -from homeassistant.const import ( - CONF_CODE, - CONF_HOST, - CONF_MODE, - CONF_NAME, - CONF_PORT, - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_DISARMED, -) +from homeassistant.const import CONF_CODE, CONF_HOST, CONF_MODE, CONF_NAME, CONF_PORT from homeassistant.core import HomeAssistant import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -75,7 +67,6 @@ class Concord232Alarm(AlarmControlPanelEntity): """Representation of the Concord232-based alarm panel.""" _attr_code_format = CodeFormat.NUMBER - _attr_state: str | None _attr_supported_features = ( AlarmControlPanelEntityFeature.ARM_HOME | AlarmControlPanelEntityFeature.ARM_AWAY @@ -107,21 +98,21 @@ def update(self) -> None: return if part["arming_level"] == "Off": - self._attr_state = STATE_ALARM_DISARMED + self._attr_alarm_state = AlarmControlPanelState.DISARMED elif "Home" in part["arming_level"]: - self._attr_state = STATE_ALARM_ARMED_HOME + self._attr_alarm_state = AlarmControlPanelState.ARMED_HOME else: - self._attr_state = STATE_ALARM_ARMED_AWAY + self._attr_alarm_state = AlarmControlPanelState.ARMED_AWAY def alarm_disarm(self, code: str | None = None) -> None: """Send disarm command.""" - if not self._validate_code(code, STATE_ALARM_DISARMED): + if not self._validate_code(code, AlarmControlPanelState.DISARMED): return self._alarm.disarm(code) def alarm_arm_home(self, code: str | None = None) -> None: """Send arm home command.""" - if not self._validate_code(code, STATE_ALARM_ARMED_HOME): + if not self._validate_code(code, AlarmControlPanelState.ARMED_HOME): return if self._mode == "silent": self._alarm.arm("stay", "silent") @@ -130,7 +121,7 @@ def alarm_arm_home(self, code: str | None = None) -> None: def alarm_arm_away(self, code: str | None = None) -> None: """Send arm away command.""" - if not self._validate_code(code, STATE_ALARM_ARMED_AWAY): + if not self._validate_code(code, AlarmControlPanelState.ARMED_AWAY): return self._alarm.arm("away") @@ -138,10 +129,7 @@ def _validate_code(self, code, state): """Validate given code.""" if self._code is None: return True - if isinstance(self._code, str): - alarm_code = self._code - else: - alarm_code = self._code.render(from_state=self._attr_state, to_state=state) + alarm_code = self._code check = not alarm_code or code == alarm_code if not check: _LOGGER.warning("Invalid code given for %s", state) diff --git a/homeassistant/components/config/config_entries.py b/homeassistant/components/config/config_entries.py index 9149ffe98e1e99..da50f7e93a119f 100644 --- a/homeassistant/components/config/config_entries.py +++ b/homeassistant/components/config/config_entries.py @@ -463,7 +463,7 @@ async def ignore_config_flow( ) return - context = {"source": config_entries.SOURCE_IGNORE} + context = config_entries.ConfigFlowContext(source=config_entries.SOURCE_IGNORE) if "discovery_key" in flow["context"]: context["discovery_key"] = flow["context"]["discovery_key"] await hass.config_entries.flow.async_init( diff --git a/homeassistant/components/control4/config_flow.py b/homeassistant/components/control4/config_flow.py index 77ae2c98c7d9d2..19fae1ef7ca621 100644 --- a/homeassistant/components/control4/config_flow.py +++ b/homeassistant/components/control4/config_flow.py @@ -154,16 +154,12 @@ def async_get_options_flow( config_entry: ConfigEntry, ) -> OptionsFlowHandler: """Get the options flow for this handler.""" - return OptionsFlowHandler(config_entry) + return OptionsFlowHandler() class OptionsFlowHandler(OptionsFlow): """Handle a option flow for Control4.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/conversation/default_agent.py b/homeassistant/components/conversation/default_agent.py index 155909d5fe3591..a7110c35795550 100644 --- a/homeassistant/components/conversation/default_agent.py +++ b/homeassistant/components/conversation/default_agent.py @@ -16,6 +16,7 @@ from hassil.intents import Intents, SlotList, TextSlotList, WildcardSlotList from hassil.recognize import ( MISSING_ENTITY, + MatchEntity, RecognizeResult, UnmatchedTextEntity, recognize_all, @@ -293,7 +294,7 @@ async def async_process(self, user_input: ConversationInput) -> ConversationResu self.hass, language, DOMAIN, [DOMAIN] ) response_text = translations.get( - f"component.{DOMAIN}.agent.done", "Done" + f"component.{DOMAIN}.conversation.agent.done", "Done" ) response.async_set_speech(response_text) @@ -437,69 +438,67 @@ def _recognize( language: str, ) -> RecognizeResult | None: """Search intents for a match to user input.""" - custom_result: RecognizeResult | None = None - name_result: RecognizeResult | None = None - best_results: list[RecognizeResult] = [] - best_text_chunks_matched: int | None = None - for result in recognize_all( - user_input.text, - lang_intents.intents, - slot_lists=slot_lists, - intent_context=intent_context, - language=language, - ): - # User intents have highest priority - if (result.intent_metadata is not None) and result.intent_metadata.get( - METADATA_CUSTOM_SENTENCE - ): - if (custom_result is None) or ( - result.text_chunks_matched > custom_result.text_chunks_matched + strict_result = self._recognize_strict( + user_input, lang_intents, slot_lists, intent_context, language + ) + + if strict_result is not None: + # Successful strict match + return strict_result + + # Try again with all entities (including unexposed) + entity_registry = er.async_get(self.hass) + all_entity_names: list[tuple[str, str, dict[str, Any]]] = [] + + for state in self.hass.states.async_all(): + context = {"domain": state.domain} + if state.attributes: + # Include some attributes + for attr in DEFAULT_EXPOSED_ATTRIBUTES: + if attr not in state.attributes: + continue + context[attr] = state.attributes[attr] + + if entity := entity_registry.async_get(state.entity_id): + # Skip config/hidden entities + if (entity.entity_category is not None) or ( + entity.hidden_by is not None ): - custom_result = result + continue - # Clear builtin results - best_results = [] - name_result = None - continue + if entity.aliases: + # Also add aliases + for alias in entity.aliases: + if not alias.strip(): + continue - # Prioritize results with a "name" slot, but still prefer ones with - # more literal text matched. - if ( - ("name" in result.entities) - and (not result.entities["name"].is_wildcard) - and ( - (name_result is None) - or (result.text_chunks_matched > name_result.text_chunks_matched) - ) - ): - name_result = result + all_entity_names.append((alias, alias, context)) - if (best_text_chunks_matched is None) or ( - result.text_chunks_matched > best_text_chunks_matched - ): - # Only overwrite if more literal text was matched. - # This causes wildcards to match last. - best_results = [result] - best_text_chunks_matched = result.text_chunks_matched - elif result.text_chunks_matched == best_text_chunks_matched: - # Accumulate results with the same number of literal text matched. - # We will resolve the ambiguity below. - best_results.append(result) + # Default name + all_entity_names.append((state.name, state.name, context)) - if custom_result is not None: - # Prioritize user intents - return custom_result + slot_lists = { + **slot_lists, + "name": TextSlotList.from_tuples(all_entity_names, allow_template=False), + } - if name_result is not None: - # Prioritize matches with entity names above area names - return name_result + strict_result = self._recognize_strict( + user_input, + lang_intents, + slot_lists, + intent_context, + language, + ) - if best_results: - # Successful strict match - return best_results[0] + if strict_result is not None: + # Not a successful match, but useful for an error message. + # This should fail the intent handling phase (async_match_targets). + return strict_result # Try again with missing entities enabled maybe_result: RecognizeResult | None = None + best_num_matched_entities = 0 + best_num_unmatched_entities = 0 for result in recognize_all( user_input.text, lang_intents.intents, @@ -512,35 +511,126 @@ def _recognize( continue # Don't count missing entities that couldn't be filled from context + num_matched_entities = 0 + for matched_entity in result.entities_list: + if matched_entity.name not in result.unmatched_entities: + num_matched_entities += 1 + num_unmatched_entities = 0 - for entity in result.unmatched_entities_list: - if isinstance(entity, UnmatchedTextEntity): - if entity.text != MISSING_ENTITY: + for unmatched_entity in result.unmatched_entities_list: + if isinstance(unmatched_entity, UnmatchedTextEntity): + if unmatched_entity.text != MISSING_ENTITY: num_unmatched_entities += 1 else: num_unmatched_entities += 1 - if maybe_result is None: - # First result - maybe_result = result - best_num_unmatched_entities = num_unmatched_entities - elif num_unmatched_entities < best_num_unmatched_entities: - # Fewer unmatched entities + if ( + (maybe_result is None) # first result + or (num_matched_entities > best_num_matched_entities) + or ( + # Fewer unmatched entities + (num_matched_entities == best_num_matched_entities) + and (num_unmatched_entities < best_num_unmatched_entities) + ) + or ( + # More literal text matched + (num_matched_entities == best_num_matched_entities) + and (num_unmatched_entities == best_num_unmatched_entities) + and (result.text_chunks_matched > maybe_result.text_chunks_matched) + ) + or ( + # Prefer match failures with entities + (result.text_chunks_matched == maybe_result.text_chunks_matched) + and ( + ("name" in result.entities) + or ("name" in result.unmatched_entities) + ) + ) + ): maybe_result = result + best_num_matched_entities = num_matched_entities best_num_unmatched_entities = num_unmatched_entities - elif num_unmatched_entities == best_num_unmatched_entities: - if (result.text_chunks_matched > maybe_result.text_chunks_matched) or ( - (result.text_chunks_matched == maybe_result.text_chunks_matched) - and ("name" in result.unmatched_entities) # prefer entities - ): - # More literal text chunks matched, but prefer entities to areas, etc. - maybe_result = result - if (maybe_result is not None) and maybe_result.unmatched_entities: - # Failed to match, but we have more information about why in unmatched_entities - return maybe_result + return maybe_result + + def _recognize_strict( + self, + user_input: ConversationInput, + lang_intents: LanguageIntents, + slot_lists: dict[str, SlotList], + intent_context: dict[str, Any] | None, + language: str, + ) -> RecognizeResult | None: + """Search intents for a strict match to user input.""" + custom_found = False + name_found = False + best_results: list[RecognizeResult] = [] + best_name_quality: int | None = None + best_text_chunks_matched: int | None = None + for result in recognize_all( + user_input.text, + lang_intents.intents, + slot_lists=slot_lists, + intent_context=intent_context, + language=language, + ): + # Prioritize user intents + is_custom = ( + result.intent_metadata is not None + and result.intent_metadata.get(METADATA_CUSTOM_SENTENCE) + ) + + if custom_found and not is_custom: + continue + + if not custom_found and is_custom: + custom_found = True + # Clear builtin results + name_found = False + best_results = [] + best_name_quality = None + best_text_chunks_matched = None + + # Prioritize results with a "name" slot + name = result.entities.get("name") + is_name = name and not name.is_wildcard + + if name_found and not is_name: + continue + + if not name_found and is_name: + name_found = True + # Clear non-name results + best_results = [] + best_text_chunks_matched = None + + if is_name: + # Prioritize results with a better "name" slot + name_quality = len(cast(MatchEntity, name).value.split()) + if (best_name_quality is None) or (name_quality > best_name_quality): + best_name_quality = name_quality + # Clear worse name results + best_results = [] + best_text_chunks_matched = None + elif name_quality < best_name_quality: + continue + + # Prioritize results with more literal text + # This causes wildcards to match last. + if (best_text_chunks_matched is None) or ( + result.text_chunks_matched > best_text_chunks_matched + ): + best_results = [result] + best_text_chunks_matched = result.text_chunks_matched + elif result.text_chunks_matched == best_text_chunks_matched: + # Accumulate results with the same number of literal text matched. + # We will resolve the ambiguity below. + best_results.append(result) + + if best_results: + # Successful strict match + return best_results[0] - # Complete match failure return None async def _build_speech( @@ -824,20 +914,18 @@ def _make_slot_lists(self) -> dict[str, SlotList]: start = time.monotonic() entity_registry = er.async_get(self.hass) - states = [ - state - for state in self.hass.states.async_all() - if async_should_expose(self.hass, DOMAIN, state.entity_id) - ] - # Gather exposed entity names. + # Gather entity names, keeping track of exposed names. + # We try intent recognition with only exposed names first, then all names. # # NOTE: We do not pass entity ids in here because multiple entities may # have the same name. The intent matcher doesn't gather all matching # values for a list, just the first. So we will need to match by name no # matter what. - entity_names = [] - for state in states: + exposed_entity_names = [] + for state in self.hass.states.async_all(): + is_exposed = async_should_expose(self.hass, DOMAIN, state.entity_id) + # Checked against "requires_context" and "excludes_context" in hassil context = {"domain": state.domain} if state.attributes: @@ -847,24 +935,23 @@ def _make_slot_lists(self) -> dict[str, SlotList]: continue context[attr] = state.attributes[attr] - entity = entity_registry.async_get(state.entity_id) - - if not entity: - # Default name - entity_names.append((state.name, state.name, context)) - continue - - if entity.aliases: + if ( + entity := entity_registry.async_get(state.entity_id) + ) and entity.aliases: for alias in entity.aliases: if not alias.strip(): continue - entity_names.append((alias, alias, context)) + name_tuple = (alias, alias, context) + if is_exposed: + exposed_entity_names.append(name_tuple) # Default name - entity_names.append((state.name, state.name, context)) + name_tuple = (state.name, state.name, context) + if is_exposed: + exposed_entity_names.append(name_tuple) - _LOGGER.debug("Exposed entities: %s", entity_names) + _LOGGER.debug("Exposed entities: %s", exposed_entity_names) # Expose all areas. areas = ar.async_get(self.hass) @@ -898,7 +985,9 @@ def _make_slot_lists(self) -> dict[str, SlotList]: self._slot_lists = { "area": TextSlotList.from_tuples(area_names, allow_template=False), - "name": TextSlotList.from_tuples(entity_names, allow_template=False), + "name": TextSlotList.from_tuples( + exposed_entity_names, allow_template=False + ), "floor": TextSlotList.from_tuples(floor_names, allow_template=False), } @@ -1092,6 +1181,10 @@ def _get_unmatched_response(result: RecognizeResult) -> tuple[ErrorKey, dict[str if matched_area_entity := result.entities.get("area"): matched_area = matched_area_entity.text.strip() + matched_floor: str | None = None + if matched_floor_entity := result.entities.get("floor"): + matched_floor = matched_floor_entity.text.strip() + if unmatched_name := unmatched_text.get("name"): if matched_area: # device in area @@ -1099,6 +1192,12 @@ def _get_unmatched_response(result: RecognizeResult) -> tuple[ErrorKey, dict[str "entity": unmatched_name, "area": matched_area, } + if matched_floor: + # device on floor + return ErrorKey.NO_ENTITY_IN_FLOOR, { + "entity": unmatched_name, + "floor": matched_floor, + } # device only return ErrorKey.NO_ENTITY, {"entity": unmatched_name} @@ -1181,17 +1280,62 @@ def _get_match_error_response( if reason == intent.MatchFailedReason.STATE: # Entity is not in correct state - assert match_error.constraints.states - state = next(iter(match_error.constraints.states)) - if match_error.constraints.domains: + assert constraints.states + state = next(iter(constraints.states)) + if constraints.domains: # Translate if domain is available - domain = next(iter(match_error.constraints.domains)) + domain = next(iter(constraints.domains)) state = translation.async_translate_state( hass, state, domain, None, None, None ) return ErrorKey.ENTITY_WRONG_STATE, {"state": state} + if reason == intent.MatchFailedReason.ASSISTANT: + # Not exposed + if constraints.name: + if constraints.area_name: + return ErrorKey.NO_ENTITY_IN_AREA_EXPOSED, { + "entity": constraints.name, + "area": constraints.area_name, + } + if constraints.floor_name: + return ErrorKey.NO_ENTITY_IN_FLOOR_EXPOSED, { + "entity": constraints.name, + "floor": constraints.floor_name, + } + return ErrorKey.NO_ENTITY_EXPOSED, {"entity": constraints.name} + + if constraints.device_classes: + device_class = next(iter(constraints.device_classes)) + + if constraints.area_name: + return ErrorKey.NO_DEVICE_CLASS_IN_AREA_EXPOSED, { + "device_class": device_class, + "area": constraints.area_name, + } + if constraints.floor_name: + return ErrorKey.NO_DEVICE_CLASS_IN_FLOOR_EXPOSED, { + "device_class": device_class, + "floor": constraints.floor_name, + } + return ErrorKey.NO_DEVICE_CLASS_EXPOSED, {"device_class": device_class} + + if constraints.domains: + domain = next(iter(constraints.domains)) + + if constraints.area_name: + return ErrorKey.NO_DOMAIN_IN_AREA_EXPOSED, { + "domain": domain, + "area": constraints.area_name, + } + if constraints.floor_name: + return ErrorKey.NO_DOMAIN_IN_FLOOR_EXPOSED, { + "domain": domain, + "floor": constraints.floor_name, + } + return ErrorKey.NO_DOMAIN_EXPOSED, {"domain": domain} + # Default error return ErrorKey.NO_INTENT, {} diff --git a/homeassistant/components/conversation/manifest.json b/homeassistant/components/conversation/manifest.json index c2168ce715259d..8b5c6ef173ff3d 100644 --- a/homeassistant/components/conversation/manifest.json +++ b/homeassistant/components/conversation/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/conversation", "integration_type": "system", "quality_scale": "internal", - "requirements": ["hassil==1.7.4", "home-assistant-intents==2024.10.2"] + "requirements": ["hassil==1.7.4", "home-assistant-intents==2024.11.6"] } diff --git a/homeassistant/components/cover/__init__.py b/homeassistant/components/cover/__init__.py index 629d4c87ee35ed..ea11761a753d44 100644 --- a/homeassistant/components/cover/__init__.py +++ b/homeassistant/components/cover/__init__.py @@ -13,7 +13,7 @@ import voluptuous as vol from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ( +from homeassistant.const import ( # noqa: F401 SERVICE_CLOSE_COVER, SERVICE_CLOSE_COVER_TILT, SERVICE_OPEN_COVER, @@ -54,6 +54,24 @@ SCAN_INTERVAL = timedelta(seconds=15) +class CoverState(StrEnum): + """State of Cover entities.""" + + CLOSED = "closed" + CLOSING = "closing" + OPEN = "open" + OPENING = "opening" + + +# STATE_* below are deprecated as of 2024.11 +# when imported from homeassistant.components.cover +# use the CoverState enum instead. +_DEPRECATED_STATE_CLOSED = DeprecatedConstantEnum(CoverState.CLOSED, "2025.11") +_DEPRECATED_STATE_CLOSING = DeprecatedConstantEnum(CoverState.CLOSING, "2025.11") +_DEPRECATED_STATE_OPEN = DeprecatedConstantEnum(CoverState.OPEN, "2025.11") +_DEPRECATED_STATE_OPENING = DeprecatedConstantEnum(CoverState.OPENING, "2025.11") + + class CoverDeviceClass(StrEnum): """Device class for cover.""" @@ -148,7 +166,7 @@ class CoverEntityFeature(IntFlag): @bind_hass def is_closed(hass: HomeAssistant, entity_id: str) -> bool: """Return if the cover is closed based on the statemachine.""" - return hass.states.is_state(entity_id, STATE_CLOSED) + return hass.states.is_state(entity_id, CoverState.CLOSED) async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: @@ -303,15 +321,15 @@ def state(self) -> str | None: """Return the state of the cover.""" if self.is_opening: self._cover_is_last_toggle_direction_open = True - return STATE_OPENING + return CoverState.OPENING if self.is_closing: self._cover_is_last_toggle_direction_open = False - return STATE_CLOSING + return CoverState.CLOSING if (closed := self.is_closed) is None: return None - return STATE_CLOSED if closed else STATE_OPEN + return CoverState.CLOSED if closed else CoverState.OPEN @final @property diff --git a/homeassistant/components/cover/device_condition.py b/homeassistant/components/cover/device_condition.py index 9c746284fe5be7..f1d89a0e1ebee2 100644 --- a/homeassistant/components/cover/device_condition.py +++ b/homeassistant/components/cover/device_condition.py @@ -12,10 +12,6 @@ CONF_DOMAIN, CONF_ENTITY_ID, CONF_TYPE, - STATE_CLOSED, - STATE_CLOSING, - STATE_OPEN, - STATE_OPENING, ) from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import ( @@ -27,7 +23,7 @@ from homeassistant.helpers.entity import get_supported_features from homeassistant.helpers.typing import ConfigType, TemplateVarsType -from . import DOMAIN, CoverEntityFeature +from . import DOMAIN, CoverEntityFeature, CoverState # mypy: disallow-any-generics @@ -128,13 +124,13 @@ def async_condition_from_config( if config[CONF_TYPE] in STATE_CONDITION_TYPES: if config[CONF_TYPE] == "is_open": - state = STATE_OPEN + state = CoverState.OPEN elif config[CONF_TYPE] == "is_closed": - state = STATE_CLOSED + state = CoverState.CLOSED elif config[CONF_TYPE] == "is_opening": - state = STATE_OPENING + state = CoverState.OPENING elif config[CONF_TYPE] == "is_closing": - state = STATE_CLOSING + state = CoverState.CLOSING def test_is_state(hass: HomeAssistant, variables: TemplateVarsType) -> bool: """Test if an entity is a certain state.""" diff --git a/homeassistant/components/cover/device_trigger.py b/homeassistant/components/cover/device_trigger.py index 302b1d4340a151..0f65ef80a7f45d 100644 --- a/homeassistant/components/cover/device_trigger.py +++ b/homeassistant/components/cover/device_trigger.py @@ -19,10 +19,6 @@ CONF_PLATFORM, CONF_TYPE, CONF_VALUE_TEMPLATE, - STATE_CLOSED, - STATE_CLOSING, - STATE_OPEN, - STATE_OPENING, ) from homeassistant.core import CALLBACK_TYPE, HomeAssistant from homeassistant.helpers import config_validation as cv, entity_registry as er @@ -30,7 +26,7 @@ from homeassistant.helpers.trigger import TriggerActionType, TriggerInfo from homeassistant.helpers.typing import ConfigType -from . import DOMAIN, CoverEntityFeature +from . import DOMAIN, CoverEntityFeature, CoverState POSITION_TRIGGER_TYPES = {"position", "tilt_position"} STATE_TRIGGER_TYPES = {"opened", "closed", "opening", "closing"} @@ -147,13 +143,13 @@ async def async_attach_trigger( """Attach a trigger.""" if config[CONF_TYPE] in STATE_TRIGGER_TYPES: if config[CONF_TYPE] == "opened": - to_state = STATE_OPEN + to_state = CoverState.OPEN elif config[CONF_TYPE] == "closed": - to_state = STATE_CLOSED + to_state = CoverState.CLOSED elif config[CONF_TYPE] == "opening": - to_state = STATE_OPENING + to_state = CoverState.OPENING elif config[CONF_TYPE] == "closing": - to_state = STATE_CLOSING + to_state = CoverState.CLOSING state_config = { CONF_PLATFORM: "state", diff --git a/homeassistant/components/cover/reproduce_state.py b/homeassistant/components/cover/reproduce_state.py index 59f3df61795b04..307fe5f11bd223 100644 --- a/homeassistant/components/cover/reproduce_state.py +++ b/homeassistant/components/cover/reproduce_state.py @@ -15,10 +15,6 @@ SERVICE_OPEN_COVER_TILT, SERVICE_SET_COVER_POSITION, SERVICE_SET_COVER_TILT_POSITION, - STATE_CLOSED, - STATE_CLOSING, - STATE_OPEN, - STATE_OPENING, ) from homeassistant.core import Context, HomeAssistant, State @@ -28,11 +24,17 @@ ATTR_POSITION, ATTR_TILT_POSITION, DOMAIN, + CoverState, ) _LOGGER = logging.getLogger(__name__) -VALID_STATES = {STATE_CLOSED, STATE_CLOSING, STATE_OPEN, STATE_OPENING} +VALID_STATES = { + CoverState.CLOSED, + CoverState.CLOSING, + CoverState.OPEN, + CoverState.OPENING, +} async def _async_reproduce_state( @@ -72,9 +74,9 @@ async def _async_reproduce_state( == state.attributes.get(ATTR_CURRENT_POSITION) ): # Open/Close - if state.state in [STATE_CLOSED, STATE_CLOSING]: + if state.state in [CoverState.CLOSED, CoverState.CLOSING]: service = SERVICE_CLOSE_COVER - elif state.state in [STATE_OPEN, STATE_OPENING]: + elif state.state in [CoverState.OPEN, CoverState.OPENING]: if ( ATTR_CURRENT_POSITION in cur_state.attributes and ATTR_CURRENT_POSITION in state.attributes diff --git a/homeassistant/components/crownstone/config_flow.py b/homeassistant/components/crownstone/config_flow.py index 0e707c0805a45a..bf6e92047147db 100644 --- a/homeassistant/components/crownstone/config_flow.py +++ b/homeassistant/components/crownstone/config_flow.py @@ -177,7 +177,7 @@ async def async_step_user( elif auth_error.type == "LOGIN_FAILED_EMAIL_NOT_VERIFIED": errors["base"] = "account_not_verified" except CrownstoneUnknownError: - errors["base"] = "unknown_error" + errors["base"] = "unknown" # show form again, with the errors if errors: @@ -213,18 +213,19 @@ class CrownstoneOptionsFlowHandler(BaseCrownstoneFlowHandler, OptionsFlow): def __init__(self, config_entry: ConfigEntry) -> None: """Initialize Crownstone options.""" super().__init__(OPTIONS_FLOW, self.async_create_new_entry) - self.entry = config_entry - self.updated_options = config_entry.options.copy() + self.options = config_entry.options.copy() async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Manage Crownstone options.""" - self.cloud: CrownstoneCloud = self.hass.data[DOMAIN][self.entry.entry_id].cloud + self.cloud: CrownstoneCloud = self.hass.data[DOMAIN][ + self.config_entry.entry_id + ].cloud spheres = {sphere.name: sphere.cloud_id for sphere in self.cloud.cloud_data} - usb_path = self.entry.options.get(CONF_USB_PATH) - usb_sphere = self.entry.options.get(CONF_USB_SPHERE) + usb_path = self.config_entry.options.get(CONF_USB_PATH) + usb_sphere = self.config_entry.options.get(CONF_USB_SPHERE) options_schema = vol.Schema( {vol.Optional(CONF_USE_USB_OPTION, default=usb_path is not None): bool} @@ -243,14 +244,14 @@ async def async_step_init( if user_input[CONF_USE_USB_OPTION] and usb_path is None: return await self.async_step_usb_config() if not user_input[CONF_USE_USB_OPTION] and usb_path is not None: - self.updated_options[CONF_USB_PATH] = None - self.updated_options[CONF_USB_SPHERE] = None + self.options[CONF_USB_PATH] = None + self.options[CONF_USB_SPHERE] = None elif ( CONF_USB_SPHERE_OPTION in user_input and spheres[user_input[CONF_USB_SPHERE_OPTION]] != usb_sphere ): sphere_id = spheres[user_input[CONF_USB_SPHERE_OPTION]] - self.updated_options[CONF_USB_SPHERE] = sphere_id + self.options[CONF_USB_SPHERE] = sphere_id return self.async_create_new_entry() @@ -260,7 +261,7 @@ def async_create_new_entry(self) -> ConfigFlowResult: """Create a new entry.""" # these attributes will only change when a usb was configured if self.usb_path is not None and self.usb_sphere_id is not None: - self.updated_options[CONF_USB_PATH] = self.usb_path - self.updated_options[CONF_USB_SPHERE] = self.usb_sphere_id + self.options[CONF_USB_PATH] = self.usb_path + self.options[CONF_USB_SPHERE] = self.usb_sphere_id - return super().async_create_entry(title="", data=self.updated_options) + return super().async_create_entry(title="", data=self.options) diff --git a/homeassistant/components/daikin/climate.py b/homeassistant/components/daikin/climate.py index f1fc047311504d..39e92ab19216d6 100644 --- a/homeassistant/components/daikin/climate.py +++ b/homeassistant/components/daikin/climate.py @@ -159,6 +159,7 @@ async def _set(self, settings: dict[str, Any]) -> None: if values: await self.device.set(values) + await self.coordinator.async_refresh() @property def unique_id(self) -> str: @@ -261,6 +262,7 @@ async def async_set_preset_mode(self, preset_mode: str) -> None: await self.device.set_advanced_mode( HA_PRESET_TO_DAIKIN[PRESET_ECO], ATTR_STATE_OFF ) + await self.coordinator.async_refresh() @property def preset_modes(self) -> list[str]: @@ -275,9 +277,11 @@ def preset_modes(self) -> list[str]: async def async_turn_on(self) -> None: """Turn device on.""" await self.device.set({}) + await self.coordinator.async_refresh() async def async_turn_off(self) -> None: """Turn device off.""" await self.device.set( {HA_ATTR_TO_DAIKIN[ATTR_HVAC_MODE]: HA_STATE_TO_DAIKIN[HVACMode.OFF]} ) + await self.coordinator.async_refresh() diff --git a/homeassistant/components/daikin/switch.py b/homeassistant/components/daikin/switch.py index 23517d085d2f3d..669048ac45e1da 100644 --- a/homeassistant/components/daikin/switch.py +++ b/homeassistant/components/daikin/switch.py @@ -63,10 +63,12 @@ def is_on(self) -> bool: async def async_turn_on(self, **kwargs: Any) -> None: """Turn the zone on.""" await self.device.set_zone(self._zone_id, "zone_onoff", "1") + await self.coordinator.async_refresh() async def async_turn_off(self, **kwargs: Any) -> None: """Turn the zone off.""" await self.device.set_zone(self._zone_id, "zone_onoff", "0") + await self.coordinator.async_refresh() class DaikinStreamerSwitch(DaikinEntity, SwitchEntity): @@ -88,10 +90,12 @@ def is_on(self) -> bool: async def async_turn_on(self, **kwargs: Any) -> None: """Turn the zone on.""" await self.device.set_streamer("on") + await self.coordinator.async_refresh() async def async_turn_off(self, **kwargs: Any) -> None: """Turn the zone off.""" await self.device.set_streamer("off") + await self.coordinator.async_refresh() class DaikinToggleSwitch(DaikinEntity, SwitchEntity): @@ -112,7 +116,9 @@ def is_on(self) -> bool: async def async_turn_on(self, **kwargs: Any) -> None: """Turn the zone on.""" await self.device.set({}) + await self.coordinator.async_refresh() async def async_turn_off(self, **kwargs: Any) -> None: """Turn the zone off.""" await self.device.set({DAIKIN_ATTR_MODE: "off"}) + await self.coordinator.async_refresh() diff --git a/homeassistant/components/deako/manifest.json b/homeassistant/components/deako/manifest.json index e8f6f235107ec5..e3099439b9d18e 100644 --- a/homeassistant/components/deako/manifest.json +++ b/homeassistant/components/deako/manifest.json @@ -7,7 +7,7 @@ "documentation": "https://www.home-assistant.io/integrations/deako", "iot_class": "local_polling", "loggers": ["pydeako"], - "requirements": ["pydeako==0.4.0"], + "requirements": ["pydeako==0.5.4"], "single_config_entry": true, "zeroconf": ["_deako._tcp.local."] } diff --git a/homeassistant/components/deconz/alarm_control_panel.py b/homeassistant/components/deconz/alarm_control_panel.py index 2f9bda6d5ed131..678e441a7a9a7e 100644 --- a/homeassistant/components/deconz/alarm_control_panel.py +++ b/homeassistant/components/deconz/alarm_control_panel.py @@ -13,18 +13,10 @@ DOMAIN as ALARM_CONTROl_PANEL_DOMAIN, AlarmControlPanelEntity, AlarmControlPanelEntityFeature, + AlarmControlPanelState, CodeFormat, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ( - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_ARMING, - STATE_ALARM_DISARMED, - STATE_ALARM_PENDING, - STATE_ALARM_TRIGGERED, -) from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -32,16 +24,16 @@ from .hub import DeconzHub DECONZ_TO_ALARM_STATE = { - AncillaryControlPanel.ARMED_AWAY: STATE_ALARM_ARMED_AWAY, - AncillaryControlPanel.ARMED_NIGHT: STATE_ALARM_ARMED_NIGHT, - AncillaryControlPanel.ARMED_STAY: STATE_ALARM_ARMED_HOME, - AncillaryControlPanel.ARMING_AWAY: STATE_ALARM_ARMING, - AncillaryControlPanel.ARMING_NIGHT: STATE_ALARM_ARMING, - AncillaryControlPanel.ARMING_STAY: STATE_ALARM_ARMING, - AncillaryControlPanel.DISARMED: STATE_ALARM_DISARMED, - AncillaryControlPanel.ENTRY_DELAY: STATE_ALARM_PENDING, - AncillaryControlPanel.EXIT_DELAY: STATE_ALARM_PENDING, - AncillaryControlPanel.IN_ALARM: STATE_ALARM_TRIGGERED, + AncillaryControlPanel.ARMED_AWAY: AlarmControlPanelState.ARMED_AWAY, + AncillaryControlPanel.ARMED_NIGHT: AlarmControlPanelState.ARMED_NIGHT, + AncillaryControlPanel.ARMED_STAY: AlarmControlPanelState.ARMED_HOME, + AncillaryControlPanel.ARMING_AWAY: AlarmControlPanelState.ARMING, + AncillaryControlPanel.ARMING_NIGHT: AlarmControlPanelState.ARMING, + AncillaryControlPanel.ARMING_STAY: AlarmControlPanelState.ARMING, + AncillaryControlPanel.DISARMED: AlarmControlPanelState.DISARMED, + AncillaryControlPanel.ENTRY_DELAY: AlarmControlPanelState.PENDING, + AncillaryControlPanel.EXIT_DELAY: AlarmControlPanelState.PENDING, + AncillaryControlPanel.IN_ALARM: AlarmControlPanelState.TRIGGERED, } @@ -105,7 +97,7 @@ def async_update_callback(self) -> None: super().async_update_callback() @property - def state(self) -> str | None: + def alarm_state(self) -> AlarmControlPanelState | None: """Return the state of the control panel.""" if self._device.panel in DECONZ_TO_ALARM_STATE: return DECONZ_TO_ALARM_STATE[self._device.panel] diff --git a/homeassistant/components/deconz/config_flow.py b/homeassistant/components/deconz/config_flow.py index d017e2c5c6535c..ed54701f656913 100644 --- a/homeassistant/components/deconz/config_flow.py +++ b/homeassistant/components/deconz/config_flow.py @@ -20,7 +20,6 @@ import voluptuous as vol from homeassistant.components import ssdp -from homeassistant.components.hassio import HassioServiceInfo from homeassistant.config_entries import ( SOURCE_HASSIO, ConfigEntry, @@ -31,6 +30,7 @@ from homeassistant.const import CONF_API_KEY, CONF_HOST, CONF_PORT from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import aiohttp_client +from homeassistant.helpers.service_info.hassio import HassioServiceInfo from .const import ( CONF_ALLOW_CLIP_SENSOR, @@ -74,9 +74,11 @@ class DeconzFlowHandler(ConfigFlow, domain=DOMAIN): @staticmethod @callback - def async_get_options_flow(config_entry: ConfigEntry) -> OptionsFlow: + def async_get_options_flow( + config_entry: ConfigEntry, + ) -> DeconzOptionsFlowHandler: """Get the options flow for this handler.""" - return DeconzOptionsFlowHandler(config_entry) + return DeconzOptionsFlowHandler() def __init__(self) -> None: """Initialize the deCONZ config flow.""" @@ -299,11 +301,6 @@ class DeconzOptionsFlowHandler(OptionsFlow): gateway: DeconzHub - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize deCONZ options flow.""" - self.config_entry = config_entry - self.options = dict(config_entry.options) - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -315,8 +312,7 @@ async def async_step_deconz_devices( ) -> ConfigFlowResult: """Manage the deconz devices options.""" if user_input is not None: - self.options.update(user_input) - return self.async_create_entry(title="", data=self.options) + return self.async_create_entry(data=self.config_entry.options | user_input) schema_options = {} for option, default in ( diff --git a/homeassistant/components/deconz/device_trigger.py b/homeassistant/components/deconz/device_trigger.py index e31fdc66db2736..2aeeece3ac56ef 100644 --- a/homeassistant/components/deconz/device_trigger.py +++ b/homeassistant/components/deconz/device_trigger.py @@ -169,6 +169,30 @@ (CONF_LONG_RELEASE, CONF_BOTTOM_BUTTONS): {CONF_EVENT: 6003}, } +RODRET_REMOTE_MODEL = "RODRET Dimmer" +RODRET_REMOTE = { + (CONF_SHORT_RELEASE, CONF_TURN_ON): {CONF_EVENT: 1002}, + (CONF_LONG_PRESS, CONF_TURN_ON): {CONF_EVENT: 1001}, + (CONF_LONG_RELEASE, CONF_TURN_ON): {CONF_EVENT: 1003}, + (CONF_SHORT_RELEASE, CONF_TURN_OFF): {CONF_EVENT: 2002}, + (CONF_LONG_PRESS, CONF_TURN_OFF): {CONF_EVENT: 2001}, + (CONF_LONG_RELEASE, CONF_TURN_OFF): {CONF_EVENT: 2003}, +} + +SOMRIG_REMOTE_MODEL = "SOMRIG shortcut button" +SOMRIG_REMOTE = { + (CONF_SHORT_PRESS, CONF_BUTTON_1): {CONF_EVENT: 1000}, + (CONF_SHORT_RELEASE, CONF_BUTTON_1): {CONF_EVENT: 1002}, + (CONF_LONG_PRESS, CONF_BUTTON_1): {CONF_EVENT: 1001}, + (CONF_LONG_RELEASE, CONF_BUTTON_1): {CONF_EVENT: 1003}, + (CONF_DOUBLE_PRESS, CONF_BUTTON_1): {CONF_EVENT: 1004}, + (CONF_SHORT_PRESS, CONF_BUTTON_2): {CONF_EVENT: 2000}, + (CONF_SHORT_RELEASE, CONF_BUTTON_2): {CONF_EVENT: 2002}, + (CONF_LONG_PRESS, CONF_BUTTON_2): {CONF_EVENT: 2001}, + (CONF_LONG_RELEASE, CONF_BUTTON_2): {CONF_EVENT: 2003}, + (CONF_DOUBLE_PRESS, CONF_BUTTON_2): {CONF_EVENT: 2004}, +} + STYRBAR_REMOTE_MODEL = "Remote Control N2" STYRBAR_REMOTE = { (CONF_SHORT_RELEASE, CONF_DIM_UP): {CONF_EVENT: 1002}, @@ -600,6 +624,8 @@ HUE_TAP_REMOTE_MODEL: HUE_TAP_REMOTE, HUE_WALL_REMOTE_MODEL: HUE_WALL_REMOTE, FRIENDS_OF_HUE_SWITCH_MODEL: FRIENDS_OF_HUE_SWITCH, + RODRET_REMOTE_MODEL: RODRET_REMOTE, + SOMRIG_REMOTE_MODEL: SOMRIG_REMOTE, STYRBAR_REMOTE_MODEL: STYRBAR_REMOTE, SYMFONISK_SOUND_CONTROLLER_MODEL: SYMFONISK_SOUND_CONTROLLER, TRADFRI_ON_OFF_SWITCH_MODEL: TRADFRI_ON_OFF_SWITCH, diff --git a/homeassistant/components/deconz/light.py b/homeassistant/components/deconz/light.py index a15aeb5a05941f..95a97959d5b93a 100644 --- a/homeassistant/components/deconz/light.py +++ b/homeassistant/components/deconz/light.py @@ -39,7 +39,22 @@ DECONZ_GROUP = "is_deconz_group" EFFECT_TO_DECONZ = { EFFECT_COLORLOOP: LightEffect.COLOR_LOOP, - "None": LightEffect.NONE, + "none": LightEffect.NONE, + # Specific to Philips Hue + "candle": LightEffect.CANDLE, + "cosmos": LightEffect.COSMOS, + "enchant": LightEffect.ENCHANT, + "fire": LightEffect.FIRE, + "fireplace": LightEffect.FIREPLACE, + "glisten": LightEffect.GLISTEN, + "loop": LightEffect.LOOP, + "opal": LightEffect.OPAL, + "prism": LightEffect.PRISM, + "sparkle": LightEffect.SPARKLE, + "sunbeam": LightEffect.SUNBEAM, + "sunrise": LightEffect.SUNRISE, + "sunset": LightEffect.SUNSET, + "underwater": LightEffect.UNDERWATER, # Specific to Lidl christmas light "carnival": LightEffect.CARNIVAL, "collide": LightEffect.COLLIDE, @@ -208,8 +223,17 @@ def __init__(self, device: _LightDeviceT, hub: DeconzHub) -> None: if device.effect is not None: self._attr_supported_features |= LightEntityFeature.EFFECT self._attr_effect_list = [EFFECT_COLORLOOP] - if device.model_id in ("HG06467", "TS0601"): - self._attr_effect_list = XMAS_LIGHT_EFFECTS + + # For lights that report supported effects. + if isinstance(device, Light): + if device.supported_effects is not None: + self._attr_effect_list = [ + EFFECT_TO_DECONZ[el] + for el in device.supported_effects + if el in EFFECT_TO_DECONZ + ] + if device.model_id in ("HG06467", "TS0601"): + self._attr_effect_list = XMAS_LIGHT_EFFECTS @property def color_mode(self) -> str | None: diff --git a/homeassistant/components/deconz/manifest.json b/homeassistant/components/deconz/manifest.json index 2f58cacfa2ce2d..04aaa6bc3246c2 100644 --- a/homeassistant/components/deconz/manifest.json +++ b/homeassistant/components/deconz/manifest.json @@ -8,7 +8,7 @@ "iot_class": "local_push", "loggers": ["pydeconz"], "quality_scale": "platinum", - "requirements": ["pydeconz==116"], + "requirements": ["pydeconz==118"], "ssdp": [ { "manufacturer": "Royal Philips Electronics", diff --git a/homeassistant/components/default_config/manifest.json b/homeassistant/components/default_config/manifest.json index cbadb704a427e2..8299fe43f0966e 100644 --- a/homeassistant/components/default_config/manifest.json +++ b/homeassistant/components/default_config/manifest.json @@ -9,10 +9,10 @@ "conversation", "dhcp", "energy", + "go2rtc", "history", "homeassistant_alerts", "logbook", - "map", "media_source", "mobile_app", "my", diff --git a/homeassistant/components/deluge/strings.json b/homeassistant/components/deluge/strings.json index b4654c4a48246a..6adde8ef7dfe20 100644 --- a/homeassistant/components/deluge/strings.json +++ b/homeassistant/components/deluge/strings.json @@ -17,10 +17,12 @@ }, "error": { "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", - "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]" + "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", + "unknown": "[%key:common::config_flow::error::unknown%]" }, "abort": { - "already_configured": "[%key:common::config_flow::abort::already_configured_service%]" + "already_configured": "[%key:common::config_flow::abort::already_configured_service%]", + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" } }, "entity": { diff --git a/homeassistant/components/demo/alarm_control_panel.py b/homeassistant/components/demo/alarm_control_panel.py index f9b791668e89f6..d34830042d7e1a 100644 --- a/homeassistant/components/demo/alarm_control_panel.py +++ b/homeassistant/components/demo/alarm_control_panel.py @@ -4,20 +4,10 @@ import datetime +from homeassistant.components.alarm_control_panel import AlarmControlPanelState from homeassistant.components.manual.alarm_control_panel import ManualAlarm from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ( - CONF_ARMING_TIME, - CONF_DELAY_TIME, - CONF_TRIGGER_TIME, - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_CUSTOM_BYPASS, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_ARMED_VACATION, - STATE_ALARM_DISARMED, - STATE_ALARM_TRIGGERED, -) +from homeassistant.const import CONF_ARMING_TIME, CONF_DELAY_TIME, CONF_TRIGGER_TIME from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -39,36 +29,36 @@ async def async_setup_entry( True, False, { - STATE_ALARM_ARMED_AWAY: { + AlarmControlPanelState.ARMED_AWAY: { CONF_ARMING_TIME: datetime.timedelta(seconds=5), CONF_DELAY_TIME: datetime.timedelta(seconds=0), CONF_TRIGGER_TIME: datetime.timedelta(seconds=10), }, - STATE_ALARM_ARMED_HOME: { + AlarmControlPanelState.ARMED_HOME: { CONF_ARMING_TIME: datetime.timedelta(seconds=5), CONF_DELAY_TIME: datetime.timedelta(seconds=0), CONF_TRIGGER_TIME: datetime.timedelta(seconds=10), }, - STATE_ALARM_ARMED_NIGHT: { + AlarmControlPanelState.ARMED_NIGHT: { CONF_ARMING_TIME: datetime.timedelta(seconds=5), CONF_DELAY_TIME: datetime.timedelta(seconds=0), CONF_TRIGGER_TIME: datetime.timedelta(seconds=10), }, - STATE_ALARM_ARMED_VACATION: { + AlarmControlPanelState.ARMED_VACATION: { CONF_ARMING_TIME: datetime.timedelta(seconds=5), CONF_DELAY_TIME: datetime.timedelta(seconds=0), CONF_TRIGGER_TIME: datetime.timedelta(seconds=10), }, - STATE_ALARM_DISARMED: { + AlarmControlPanelState.DISARMED: { CONF_DELAY_TIME: datetime.timedelta(seconds=0), CONF_TRIGGER_TIME: datetime.timedelta(seconds=10), }, - STATE_ALARM_ARMED_CUSTOM_BYPASS: { + AlarmControlPanelState.ARMED_CUSTOM_BYPASS: { CONF_ARMING_TIME: datetime.timedelta(seconds=5), CONF_DELAY_TIME: datetime.timedelta(seconds=0), CONF_TRIGGER_TIME: datetime.timedelta(seconds=10), }, - STATE_ALARM_TRIGGERED: { + AlarmControlPanelState.TRIGGERED: { CONF_ARMING_TIME: datetime.timedelta(seconds=5) }, }, diff --git a/homeassistant/components/demo/config_flow.py b/homeassistant/components/demo/config_flow.py index c866873732c5de..53c1678aa818a7 100644 --- a/homeassistant/components/demo/config_flow.py +++ b/homeassistant/components/demo/config_flow.py @@ -39,9 +39,6 @@ def async_get_options_flow( async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Set the config entry up from yaml.""" - if self._async_current_entries(): - return self.async_abort(reason="single_instance_allowed") - return self.async_create_entry(title="Demo", data=import_data) @@ -50,7 +47,6 @@ class OptionsFlowHandler(OptionsFlow): def __init__(self, config_entry: ConfigEntry) -> None: """Initialize options flow.""" - self.config_entry = config_entry self.options = dict(config_entry.options) async def async_step_init( diff --git a/homeassistant/components/demo/manifest.json b/homeassistant/components/demo/manifest.json index 887a82a0078c9c..be3456b561963a 100644 --- a/homeassistant/components/demo/manifest.json +++ b/homeassistant/components/demo/manifest.json @@ -5,5 +5,6 @@ "dependencies": ["conversation", "group", "zone"], "documentation": "https://www.home-assistant.io/integrations/demo", "iot_class": "calculated", - "quality_scale": "internal" + "quality_scale": "internal", + "single_config_entry": true } diff --git a/homeassistant/components/demo/update.py b/homeassistant/components/demo/update.py index 7e53f5ce8ca63c..3fa037f6b02868 100644 --- a/homeassistant/components/demo/update.py +++ b/homeassistant/components/demo/update.py @@ -75,6 +75,21 @@ async def async_setup_entry( support_release_notes=True, release_url="https://www.example.com/release/1.93.3", device_class=UpdateDeviceClass.FIRMWARE, + update_steps=10, + ), + DemoUpdate( + unique_id="update_support_decimal_progress", + device_name="Demo Update with Decimal Progress", + title="Philips Lamps Firmware", + installed_version="1.93.3", + latest_version="1.94.2", + support_progress=True, + release_summary="Added support for effects", + support_release_notes=True, + release_url="https://www.example.com/release/1.93.3", + device_class=UpdateDeviceClass.FIRMWARE, + display_precision=2, + update_steps=1000, ), ] ) @@ -106,10 +121,13 @@ def __init__( support_install: bool = True, support_release_notes: bool = False, device_class: UpdateDeviceClass | None = None, + display_precision: int = 0, + update_steps: int = 100, ) -> None: """Initialize the Demo select entity.""" self._attr_installed_version = installed_version self._attr_device_class = device_class + self._attr_display_precision = display_precision self._attr_latest_version = latest_version self._attr_release_summary = release_summary self._attr_release_url = release_url @@ -119,6 +137,7 @@ def __init__( identifiers={(DOMAIN, unique_id)}, name=device_name, ) + self._update_steps = update_steps if support_install: self._attr_supported_features |= ( UpdateEntityFeature.INSTALL @@ -136,12 +155,14 @@ async def async_install( ) -> None: """Install an update.""" if self.supported_features & UpdateEntityFeature.PROGRESS: - for progress in range(0, 100, 10): - self._attr_in_progress = progress + self._attr_in_progress = True + for progress in range(0, self._update_steps, 1): + self._attr_update_percentage = progress / (self._update_steps / 100) self.async_write_ha_state() await _fake_install() self._attr_in_progress = False + self._attr_update_percentage = None self._attr_installed_version = ( version if version is not None else self.latest_version ) diff --git a/homeassistant/components/denonavr/config_flow.py b/homeassistant/components/denonavr/config_flow.py index 9a7d2a30438d29..9ff0541158833f 100644 --- a/homeassistant/components/denonavr/config_flow.py +++ b/homeassistant/components/denonavr/config_flow.py @@ -52,10 +52,6 @@ class OptionsFlowHandler(OptionsFlow): """Options for the component.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Init object.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -119,7 +115,7 @@ def async_get_options_flow( config_entry: ConfigEntry, ) -> OptionsFlowHandler: """Get the options flow.""" - return OptionsFlowHandler(config_entry) + return OptionsFlowHandler() async def async_step_user( self, user_input: dict[str, Any] | None = None diff --git a/homeassistant/components/denonavr/receiver.py b/homeassistant/components/denonavr/receiver.py index ebe09f518fb1bc..cbafe35cfc58a2 100644 --- a/homeassistant/components/denonavr/receiver.py +++ b/homeassistant/components/denonavr/receiver.py @@ -3,9 +3,11 @@ from __future__ import annotations from collections.abc import Callable +import contextlib import logging from denonavr import DenonAVR +from denonavr.exceptions import AvrProcessingError import httpx _LOGGER = logging.getLogger(__name__) @@ -94,7 +96,8 @@ async def async_init_receiver_class(self) -> None: # Do an initial update if telnet is used. if self._use_telnet: for zone in receiver.zones.values(): - await zone.async_update() + with contextlib.suppress(AvrProcessingError): + await zone.async_update() if self._update_audyssey: await zone.async_update_audyssey() await receiver.async_telnet_connect() diff --git a/homeassistant/components/derivative/sensor.py b/homeassistant/components/derivative/sensor.py index be27201bda9024..77ce5169d8d857 100644 --- a/homeassistant/components/derivative/sensor.py +++ b/homeassistant/components/derivative/sensor.py @@ -5,7 +5,6 @@ from datetime import datetime, timedelta from decimal import Decimal, DecimalException import logging -from typing import TYPE_CHECKING import voluptuous as vol @@ -162,7 +161,7 @@ def __init__( self._attr_device_info = device_info self._sensor_source_id = source_entity self._round_digits = round_digits - self._state: float | int | Decimal = 0 + self._attr_native_value = round(Decimal(0), round_digits) # List of tuples with (timestamp_start, timestamp_end, derivative) self._state_list: list[tuple[datetime, datetime, Decimal]] = [] @@ -190,7 +189,10 @@ async def async_added_to_hass(self) -> None: restored_data.native_unit_of_measurement ) try: - self._state = Decimal(restored_data.native_value) # type: ignore[arg-type] + self._attr_native_value = round( + Decimal(restored_data.native_value), # type: ignore[arg-type] + self._round_digits, + ) except SyntaxError as err: _LOGGER.warning("Could not restore last state: %s", err) @@ -270,12 +272,11 @@ def calculate_weight( if elapsed_time > self._time_window: derivative = new_derivative else: - derivative = Decimal(0) + derivative = Decimal(0.00) for start, end, value in self._state_list: weight = calculate_weight(start, end, new_state.last_updated) derivative = derivative + (value * Decimal(weight)) - - self._state = derivative + self._attr_native_value = round(derivative, self._round_digits) self.async_write_ha_state() self.async_on_remove( @@ -283,11 +284,3 @@ def calculate_weight( self.hass, self._sensor_source_id, calc_derivative ) ) - - @property - def native_value(self) -> float | int | Decimal: - """Return the state of the sensor.""" - value = round(self._state, self._round_digits) - if TYPE_CHECKING: - assert isinstance(value, (float, int, Decimal)) - return value diff --git a/homeassistant/components/devolo_home_network/__init__.py b/homeassistant/components/devolo_home_network/__init__.py index f8a0f015543caf..70a9453143162b 100644 --- a/homeassistant/components/devolo_home_network/__init__.py +++ b/homeassistant/components/devolo_home_network/__init__.py @@ -2,6 +2,7 @@ from __future__ import annotations +from asyncio import Semaphore from dataclasses import dataclass import logging from typing import Any @@ -32,7 +33,7 @@ from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady from homeassistant.helpers import device_registry as dr from homeassistant.helpers.httpx_client import get_async_client -from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed +from homeassistant.helpers.update_coordinator import UpdateFailed from .const import ( CONNECTED_PLC_DEVICES, @@ -47,6 +48,7 @@ SWITCH_GUEST_WIFI, SWITCH_LEDS, ) +from .coordinator import DevoloDataUpdateCoordinator _LOGGER = logging.getLogger(__name__) @@ -58,7 +60,7 @@ class DevoloHomeNetworkData: """The devolo Home Network data.""" device: Device - coordinators: dict[str, DataUpdateCoordinator[Any]] + coordinators: dict[str, DevoloDataUpdateCoordinator[Any]] async def async_setup_entry( @@ -68,6 +70,7 @@ async def async_setup_entry( zeroconf_instance = await zeroconf.async_get_async_instance(hass) async_client = get_async_client(hass) device_registry = dr.async_get(hass) + semaphore = Semaphore(1) try: device = Device( @@ -163,58 +166,72 @@ async def disconnect(event: Event) -> None: """Disconnect from device.""" await device.async_disconnect() - coordinators: dict[str, DataUpdateCoordinator[Any]] = {} + coordinators: dict[str, DevoloDataUpdateCoordinator[Any]] = {} if device.plcnet: - coordinators[CONNECTED_PLC_DEVICES] = DataUpdateCoordinator( + coordinators[CONNECTED_PLC_DEVICES] = DevoloDataUpdateCoordinator( hass, _LOGGER, + config_entry=entry, name=CONNECTED_PLC_DEVICES, + semaphore=semaphore, update_method=async_update_connected_plc_devices, update_interval=LONG_UPDATE_INTERVAL, ) if device.device and "led" in device.device.features: - coordinators[SWITCH_LEDS] = DataUpdateCoordinator( + coordinators[SWITCH_LEDS] = DevoloDataUpdateCoordinator( hass, _LOGGER, + config_entry=entry, name=SWITCH_LEDS, + semaphore=semaphore, update_method=async_update_led_status, update_interval=SHORT_UPDATE_INTERVAL, ) if device.device and "restart" in device.device.features: - coordinators[LAST_RESTART] = DataUpdateCoordinator( + coordinators[LAST_RESTART] = DevoloDataUpdateCoordinator( hass, _LOGGER, + config_entry=entry, name=LAST_RESTART, + semaphore=semaphore, update_method=async_update_last_restart, update_interval=SHORT_UPDATE_INTERVAL, ) if device.device and "update" in device.device.features: - coordinators[REGULAR_FIRMWARE] = DataUpdateCoordinator( + coordinators[REGULAR_FIRMWARE] = DevoloDataUpdateCoordinator( hass, _LOGGER, + config_entry=entry, name=REGULAR_FIRMWARE, + semaphore=semaphore, update_method=async_update_firmware_available, update_interval=FIRMWARE_UPDATE_INTERVAL, ) if device.device and "wifi1" in device.device.features: - coordinators[CONNECTED_WIFI_CLIENTS] = DataUpdateCoordinator( + coordinators[CONNECTED_WIFI_CLIENTS] = DevoloDataUpdateCoordinator( hass, _LOGGER, + config_entry=entry, name=CONNECTED_WIFI_CLIENTS, + semaphore=semaphore, update_method=async_update_wifi_connected_station, update_interval=SHORT_UPDATE_INTERVAL, ) - coordinators[NEIGHBORING_WIFI_NETWORKS] = DataUpdateCoordinator( + coordinators[NEIGHBORING_WIFI_NETWORKS] = DevoloDataUpdateCoordinator( hass, _LOGGER, + config_entry=entry, name=NEIGHBORING_WIFI_NETWORKS, + semaphore=semaphore, update_method=async_update_wifi_neighbor_access_points, update_interval=LONG_UPDATE_INTERVAL, ) - coordinators[SWITCH_GUEST_WIFI] = DataUpdateCoordinator( + coordinators[SWITCH_GUEST_WIFI] = DevoloDataUpdateCoordinator( hass, _LOGGER, + config_entry=entry, name=SWITCH_GUEST_WIFI, + semaphore=semaphore, update_method=async_update_guest_wifi_status, update_interval=SHORT_UPDATE_INTERVAL, ) diff --git a/homeassistant/components/devolo_home_network/binary_sensor.py b/homeassistant/components/devolo_home_network/binary_sensor.py index c96d0273a50e12..5752956ffb565c 100644 --- a/homeassistant/components/devolo_home_network/binary_sensor.py +++ b/homeassistant/components/devolo_home_network/binary_sensor.py @@ -15,13 +15,13 @@ from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.update_coordinator import DataUpdateCoordinator from . import DevoloHomeNetworkConfigEntry from .const import CONNECTED_PLC_DEVICES, CONNECTED_TO_ROUTER +from .coordinator import DevoloDataUpdateCoordinator from .entity import DevoloCoordinatorEntity -PARALLEL_UPDATES = 1 +PARALLEL_UPDATES = 0 def _is_connected_to_router(entity: DevoloBinarySensorEntity) -> bool: @@ -78,7 +78,7 @@ class DevoloBinarySensorEntity( def __init__( self, entry: DevoloHomeNetworkConfigEntry, - coordinator: DataUpdateCoordinator[LogicalNetwork], + coordinator: DevoloDataUpdateCoordinator[LogicalNetwork], description: DevoloBinarySensorEntityDescription, ) -> None: """Initialize entity.""" diff --git a/homeassistant/components/devolo_home_network/button.py b/homeassistant/components/devolo_home_network/button.py index ca17b5725222cd..06822ff199e171 100644 --- a/homeassistant/components/devolo_home_network/button.py +++ b/homeassistant/components/devolo_home_network/button.py @@ -22,7 +22,7 @@ from .const import DOMAIN, IDENTIFY, PAIRING, RESTART, START_WPS from .entity import DevoloEntity -PARALLEL_UPDATES = 1 +PARALLEL_UPDATES = 0 @dataclass(frozen=True, kw_only=True) diff --git a/homeassistant/components/devolo_home_network/coordinator.py b/homeassistant/components/devolo_home_network/coordinator.py new file mode 100644 index 00000000000000..c0af9668279d75 --- /dev/null +++ b/homeassistant/components/devolo_home_network/coordinator.py @@ -0,0 +1,41 @@ +"""Base coordinator.""" + +from asyncio import Semaphore +from collections.abc import Awaitable, Callable +from datetime import timedelta +from logging import Logger + +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator + + +class DevoloDataUpdateCoordinator[_DataT](DataUpdateCoordinator[_DataT]): + """Class to manage fetching data from devolo Home Network devices.""" + + def __init__( + self, + hass: HomeAssistant, + logger: Logger, + *, + config_entry: ConfigEntry, + name: str, + semaphore: Semaphore, + update_interval: timedelta, + update_method: Callable[[], Awaitable[_DataT]], + ) -> None: + """Initialize global data updater.""" + super().__init__( + hass, + logger, + config_entry=config_entry, + name=name, + update_interval=update_interval, + update_method=update_method, + ) + self._semaphore = semaphore + + async def _async_update_data(self) -> _DataT: + """Fetch the latest data from the source.""" + async with self._semaphore: + return await super()._async_update_data() diff --git a/homeassistant/components/devolo_home_network/device_tracker.py b/homeassistant/components/devolo_home_network/device_tracker.py index d372ba3d468894..583f022df8413e 100644 --- a/homeassistant/components/devolo_home_network/device_tracker.py +++ b/homeassistant/components/devolo_home_network/device_tracker.py @@ -13,15 +13,13 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import entity_registry as er from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.update_coordinator import ( - CoordinatorEntity, - DataUpdateCoordinator, -) +from homeassistant.helpers.update_coordinator import CoordinatorEntity from . import DevoloHomeNetworkConfigEntry from .const import CONNECTED_WIFI_CLIENTS, DOMAIN, WIFI_APTYPE, WIFI_BANDS +from .coordinator import DevoloDataUpdateCoordinator -PARALLEL_UPDATES = 1 +PARALLEL_UPDATES = 0 async def async_setup_entry( @@ -31,7 +29,7 @@ async def async_setup_entry( ) -> None: """Get all devices and sensors and setup them via config entry.""" device = entry.runtime_data.device - coordinators: dict[str, DataUpdateCoordinator[list[ConnectedStationInfo]]] = ( + coordinators: dict[str, DevoloDataUpdateCoordinator[list[ConnectedStationInfo]]] = ( entry.runtime_data.coordinators ) registry = er.async_get(hass) @@ -51,7 +49,7 @@ def new_device_callback() -> None: ) ) tracked.add(station.mac_address) - async_add_entities(new_entities) + async_add_entities(new_entities) @callback def restore_entities() -> None: @@ -83,14 +81,16 @@ def restore_entities() -> None: ) -class DevoloScannerEntity( - CoordinatorEntity[DataUpdateCoordinator[list[ConnectedStationInfo]]], ScannerEntity +# The pylint disable is needed because of https://github.com/pylint-dev/pylint/issues/9138 +class DevoloScannerEntity( # pylint: disable=hass-enforce-class-module + CoordinatorEntity[DevoloDataUpdateCoordinator[list[ConnectedStationInfo]]], + ScannerEntity, ): """Representation of a devolo device tracker.""" def __init__( self, - coordinator: DataUpdateCoordinator[list[ConnectedStationInfo]], + coordinator: DevoloDataUpdateCoordinator[list[ConnectedStationInfo]], device: Device, mac: str, ) -> None: diff --git a/homeassistant/components/devolo_home_network/entity.py b/homeassistant/components/devolo_home_network/entity.py index d381f48ca058a1..93ec1b9a3a247b 100644 --- a/homeassistant/components/devolo_home_network/entity.py +++ b/homeassistant/components/devolo_home_network/entity.py @@ -9,15 +9,14 @@ ) from devolo_plc_api.plcnet_api import DataRate, LogicalNetwork +from homeassistant.const import ATTR_CONNECTIONS from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, DeviceInfo from homeassistant.helpers.entity import Entity -from homeassistant.helpers.update_coordinator import ( - CoordinatorEntity, - DataUpdateCoordinator, -) +from homeassistant.helpers.update_coordinator import CoordinatorEntity from . import DevoloHomeNetworkConfigEntry from .const import DOMAIN +from .coordinator import DevoloDataUpdateCoordinator type _DataType = ( LogicalNetwork @@ -45,7 +44,6 @@ def __init__( self._attr_device_info = DeviceInfo( configuration_url=f"http://{self.device.ip}", - connections={(CONNECTION_NETWORK_MAC, self.device.mac)}, identifiers={(DOMAIN, str(self.device.serial_number))}, manufacturer="devolo", model=self.device.product, @@ -53,6 +51,10 @@ def __init__( serial_number=self.device.serial_number, sw_version=self.device.firmware_version, ) + if self.device.mac: + self._attr_device_info[ATTR_CONNECTIONS] = { + (CONNECTION_NETWORK_MAC, self.device.mac) + } self._attr_translation_key = self.entity_description.key self._attr_unique_id = ( f"{self.device.serial_number}_{self.entity_description.key}" @@ -60,14 +62,14 @@ def __init__( class DevoloCoordinatorEntity[_DataT: _DataType]( - CoordinatorEntity[DataUpdateCoordinator[_DataT]], DevoloEntity + CoordinatorEntity[DevoloDataUpdateCoordinator[_DataT]], DevoloEntity ): """Representation of a coordinated devolo home network device.""" def __init__( self, entry: DevoloHomeNetworkConfigEntry, - coordinator: DataUpdateCoordinator[_DataT], + coordinator: DevoloDataUpdateCoordinator[_DataT], ) -> None: """Initialize a devolo home network device.""" super().__init__(coordinator) diff --git a/homeassistant/components/devolo_home_network/image.py b/homeassistant/components/devolo_home_network/image.py index 58052d3021e7c9..240686ed3bb87b 100644 --- a/homeassistant/components/devolo_home_network/image.py +++ b/homeassistant/components/devolo_home_network/image.py @@ -13,14 +13,14 @@ from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.update_coordinator import DataUpdateCoordinator import homeassistant.util.dt as dt_util from . import DevoloHomeNetworkConfigEntry from .const import IMAGE_GUEST_WIFI, SWITCH_GUEST_WIFI +from .coordinator import DevoloDataUpdateCoordinator from .entity import DevoloCoordinatorEntity -PARALLEL_UPDATES = 1 +PARALLEL_UPDATES = 0 @dataclass(frozen=True, kw_only=True) @@ -66,7 +66,7 @@ class DevoloImageEntity(DevoloCoordinatorEntity[WifiGuestAccessGet], ImageEntity def __init__( self, entry: DevoloHomeNetworkConfigEntry, - coordinator: DataUpdateCoordinator[WifiGuestAccessGet], + coordinator: DevoloDataUpdateCoordinator[WifiGuestAccessGet], description: DevoloImageEntityDescription, ) -> None: """Initialize entity.""" diff --git a/homeassistant/components/devolo_home_network/sensor.py b/homeassistant/components/devolo_home_network/sensor.py index 667bbc2c55705a..220ab66312a4d7 100644 --- a/homeassistant/components/devolo_home_network/sensor.py +++ b/homeassistant/components/devolo_home_network/sensor.py @@ -6,7 +6,7 @@ from dataclasses import dataclass from datetime import datetime, timedelta from enum import StrEnum -from typing import Any, Generic, TypeVar +from typing import Any from devolo_plc_api.device_api import ConnectedStationInfo, NeighborAPInfo from devolo_plc_api.plcnet_api import REMOTE, DataRate, LogicalNetwork @@ -20,7 +20,6 @@ from homeassistant.const import EntityCategory, UnitOfDataRate from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.update_coordinator import DataUpdateCoordinator from homeassistant.util.dt import utcnow from . import DevoloHomeNetworkConfigEntry @@ -32,9 +31,10 @@ PLC_RX_RATE, PLC_TX_RATE, ) +from .coordinator import DevoloDataUpdateCoordinator from .entity import DevoloCoordinatorEntity -PARALLEL_UPDATES = 1 +PARALLEL_UPDATES = 0 def _last_restart(runtime: int) -> datetime: @@ -47,26 +47,10 @@ def _last_restart(runtime: int) -> datetime: ) -_CoordinatorDataT = TypeVar( - "_CoordinatorDataT", - bound=LogicalNetwork - | DataRate - | list[ConnectedStationInfo] - | list[NeighborAPInfo] - | int, -) -_ValueDataT = TypeVar( - "_ValueDataT", - bound=LogicalNetwork - | DataRate - | list[ConnectedStationInfo] - | list[NeighborAPInfo] - | int, -) -_SensorDataT = TypeVar( - "_SensorDataT", - bound=int | float | datetime, +type _CoordinatorDataType = ( + LogicalNetwork | DataRate | list[ConnectedStationInfo] | list[NeighborAPInfo] | int ) +type _SensorDataType = int | float | datetime class DataRateDirection(StrEnum): @@ -77,9 +61,10 @@ class DataRateDirection(StrEnum): @dataclass(frozen=True, kw_only=True) -class DevoloSensorEntityDescription( - SensorEntityDescription, Generic[_CoordinatorDataT, _SensorDataT] -): +class DevoloSensorEntityDescription[ + _CoordinatorDataT: _CoordinatorDataType, + _SensorDataT: _SensorDataType, +](SensorEntityDescription): """Describes devolo sensor entity.""" value_func: Callable[[_CoordinatorDataT], _SensorDataT] @@ -200,8 +185,11 @@ async def async_setup_entry( async_add_entities(entities) -class BaseDevoloSensorEntity( - Generic[_CoordinatorDataT, _ValueDataT, _SensorDataT], +class BaseDevoloSensorEntity[ + _CoordinatorDataT: _CoordinatorDataType, + _ValueDataT: _CoordinatorDataType, + _SensorDataT: _SensorDataType, +]( DevoloCoordinatorEntity[_CoordinatorDataT], SensorEntity, ): @@ -210,7 +198,7 @@ class BaseDevoloSensorEntity( def __init__( self, entry: DevoloHomeNetworkConfigEntry, - coordinator: DataUpdateCoordinator[_CoordinatorDataT], + coordinator: DevoloDataUpdateCoordinator[_CoordinatorDataT], description: DevoloSensorEntityDescription[_ValueDataT, _SensorDataT], ) -> None: """Initialize entity.""" @@ -218,9 +206,11 @@ def __init__( super().__init__(entry, coordinator) -class DevoloSensorEntity( - BaseDevoloSensorEntity[_CoordinatorDataT, _CoordinatorDataT, _SensorDataT] -): +class DevoloSensorEntity[ + _CoordinatorDataT: _CoordinatorDataType, + _ValueDataT: _CoordinatorDataType, + _SensorDataT: _SensorDataType, +](BaseDevoloSensorEntity[_CoordinatorDataT, _ValueDataT, _SensorDataT]): """Representation of a generic devolo sensor.""" entity_description: DevoloSensorEntityDescription[_CoordinatorDataT, _SensorDataT] @@ -241,7 +231,7 @@ class DevoloPlcDataRateSensorEntity( def __init__( self, entry: DevoloHomeNetworkConfigEntry, - coordinator: DataUpdateCoordinator[LogicalNetwork], + coordinator: DevoloDataUpdateCoordinator[LogicalNetwork], description: DevoloSensorEntityDescription[DataRate, float], peer: str, ) -> None: diff --git a/homeassistant/components/devolo_home_network/switch.py b/homeassistant/components/devolo_home_network/switch.py index c3400916d78ea2..8ff35dcc4b6233 100644 --- a/homeassistant/components/devolo_home_network/switch.py +++ b/homeassistant/components/devolo_home_network/switch.py @@ -4,7 +4,7 @@ from collections.abc import Awaitable, Callable from dataclasses import dataclass -from typing import Any, Generic, TypeVar +from typing import Any from devolo_plc_api.device import Device from devolo_plc_api.device_api import WifiGuestAccessGet @@ -15,19 +15,19 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.update_coordinator import DataUpdateCoordinator from . import DevoloHomeNetworkConfigEntry from .const import DOMAIN, SWITCH_GUEST_WIFI, SWITCH_LEDS +from .coordinator import DevoloDataUpdateCoordinator from .entity import DevoloCoordinatorEntity -PARALLEL_UPDATES = 1 +PARALLEL_UPDATES = 0 -_DataT = TypeVar("_DataT", bound=WifiGuestAccessGet | bool) +type _DataType = WifiGuestAccessGet | bool @dataclass(frozen=True, kw_only=True) -class DevoloSwitchEntityDescription(SwitchEntityDescription, Generic[_DataT]): +class DevoloSwitchEntityDescription[_DataT: _DataType](SwitchEntityDescription): """Describes devolo switch entity.""" is_on_func: Callable[[_DataT], bool] @@ -81,7 +81,9 @@ async def async_setup_entry( async_add_entities(entities) -class DevoloSwitchEntity(DevoloCoordinatorEntity[_DataT], SwitchEntity): +class DevoloSwitchEntity[_DataT: _DataType]( + DevoloCoordinatorEntity[_DataT], SwitchEntity +): """Representation of a devolo switch.""" entity_description: DevoloSwitchEntityDescription[_DataT] @@ -89,7 +91,7 @@ class DevoloSwitchEntity(DevoloCoordinatorEntity[_DataT], SwitchEntity): def __init__( self, entry: DevoloHomeNetworkConfigEntry, - coordinator: DataUpdateCoordinator[_DataT], + coordinator: DevoloDataUpdateCoordinator[_DataT], description: DevoloSwitchEntityDescription[_DataT], ) -> None: """Initialize entity.""" diff --git a/homeassistant/components/devolo_home_network/update.py b/homeassistant/components/devolo_home_network/update.py index 29c0c8762b9076..5091ce8e1e7a83 100644 --- a/homeassistant/components/devolo_home_network/update.py +++ b/homeassistant/components/devolo_home_network/update.py @@ -20,13 +20,13 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.update_coordinator import DataUpdateCoordinator from . import DevoloHomeNetworkConfigEntry from .const import DOMAIN, REGULAR_FIRMWARE +from .coordinator import DevoloDataUpdateCoordinator from .entity import DevoloCoordinatorEntity -PARALLEL_UPDATES = 1 +PARALLEL_UPDATES = 0 @dataclass(frozen=True, kw_only=True) @@ -79,7 +79,7 @@ class DevoloUpdateEntity(DevoloCoordinatorEntity, UpdateEntity): def __init__( self, entry: DevoloHomeNetworkConfigEntry, - coordinator: DataUpdateCoordinator, + coordinator: DevoloDataUpdateCoordinator, description: DevoloUpdateEntityDescription, ) -> None: """Initialize entity.""" diff --git a/homeassistant/components/dexcom/__init__.py b/homeassistant/components/dexcom/__init__.py index 5ff95fae47ebaf..b9a3bdba12d955 100644 --- a/homeassistant/components/dexcom/__init__.py +++ b/homeassistant/components/dexcom/__init__.py @@ -46,6 +46,7 @@ async def async_update_data(): coordinator = DataUpdateCoordinator[GlucoseReading]( hass, _LOGGER, + config_entry=entry, name=DOMAIN, update_method=async_update_data, update_interval=SCAN_INTERVAL, diff --git a/homeassistant/components/dexcom/config_flow.py b/homeassistant/components/dexcom/config_flow.py index c3ed43c8e9ad66..c5c830dedf69d5 100644 --- a/homeassistant/components/dexcom/config_flow.py +++ b/homeassistant/components/dexcom/config_flow.py @@ -69,16 +69,12 @@ def async_get_options_flow( config_entry: ConfigEntry, ) -> DexcomOptionsFlowHandler: """Get the options flow for this handler.""" - return DexcomOptionsFlowHandler(config_entry) + return DexcomOptionsFlowHandler() class DexcomOptionsFlowHandler(OptionsFlow): """Handle a option flow for Dexcom.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/dlna_dmr/config_flow.py b/homeassistant/components/dlna_dmr/config_flow.py index 06ac935e8d9dbb..75f50192500d44 100644 --- a/homeassistant/components/dlna_dmr/config_flow.py +++ b/homeassistant/components/dlna_dmr/config_flow.py @@ -74,7 +74,7 @@ def async_get_options_flow( config_entry: ConfigEntry, ) -> OptionsFlow: """Define the config flow to handle options.""" - return DlnaDmrOptionsFlowHandler(config_entry) + return DlnaDmrOptionsFlowHandler() async def async_step_user(self, user_input: FlowInput = None) -> ConfigFlowResult: """Handle a flow initialized by the user. @@ -327,10 +327,6 @@ class DlnaDmrOptionsFlowHandler(OptionsFlow): Configures the single instance and updates the existing config entry. """ - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/dnsip/config_flow.py b/homeassistant/components/dnsip/config_flow.py index 6dda0c03910216..8c2cfa5e556d0c 100644 --- a/homeassistant/components/dnsip/config_flow.py +++ b/homeassistant/components/dnsip/config_flow.py @@ -14,7 +14,7 @@ ConfigEntry, ConfigFlow, ConfigFlowResult, - OptionsFlowWithConfigEntry, + OptionsFlow, ) from homeassistant.const import CONF_NAME, CONF_PORT from homeassistant.core import callback @@ -101,7 +101,7 @@ def async_get_options_flow( config_entry: ConfigEntry, ) -> DnsIPOptionsFlowHandler: """Return Option handler.""" - return DnsIPOptionsFlowHandler(config_entry) + return DnsIPOptionsFlowHandler() async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -165,7 +165,7 @@ async def async_step_user( ) -class DnsIPOptionsFlowHandler(OptionsFlowWithConfigEntry): +class DnsIPOptionsFlowHandler(OptionsFlow): """Handle a option config flow for dnsip integration.""" async def async_step_init( diff --git a/homeassistant/components/dnsip/strings.json b/homeassistant/components/dnsip/strings.json index bc502776cc64b4..39a0fbf7cd3613 100644 --- a/homeassistant/components/dnsip/strings.json +++ b/homeassistant/components/dnsip/strings.json @@ -11,6 +11,9 @@ } } }, + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_service%]" + }, "error": { "invalid_hostname": "Invalid hostname" } diff --git a/homeassistant/components/doods/manifest.json b/homeassistant/components/doods/manifest.json index fabb2c301904e6..7c85ca634670a1 100644 --- a/homeassistant/components/doods/manifest.json +++ b/homeassistant/components/doods/manifest.json @@ -5,5 +5,5 @@ "documentation": "https://www.home-assistant.io/integrations/doods", "iot_class": "local_polling", "loggers": ["pydoods"], - "requirements": ["pydoods==1.0.2", "Pillow==10.4.0"] + "requirements": ["pydoods==1.0.2", "Pillow==11.0.0"] } diff --git a/homeassistant/components/doorbird/config_flow.py b/homeassistant/components/doorbird/config_flow.py index 650ddb8811dc1d..ebb1d6fc126059 100644 --- a/homeassistant/components/doorbird/config_flow.py +++ b/homeassistant/components/doorbird/config_flow.py @@ -213,16 +213,12 @@ def async_get_options_flow( config_entry: ConfigEntry, ) -> OptionsFlowHandler: """Get the options flow for this handler.""" - return OptionsFlowHandler(config_entry) + return OptionsFlowHandler() class OptionsFlowHandler(OptionsFlow): """Handle a option flow for doorbird.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/doorbird/manifest.json b/homeassistant/components/doorbird/manifest.json index 153f552b6984fd..8480a4967629f9 100644 --- a/homeassistant/components/doorbird/manifest.json +++ b/homeassistant/components/doorbird/manifest.json @@ -7,7 +7,7 @@ "documentation": "https://www.home-assistant.io/integrations/doorbird", "iot_class": "local_push", "loggers": ["doorbirdpy"], - "requirements": ["DoorBirdPy==3.0.4"], + "requirements": ["DoorBirdPy==3.0.8"], "zeroconf": [ { "type": "_axis-video._tcp.local.", diff --git a/homeassistant/components/dormakaba_dkey/__init__.py b/homeassistant/components/dormakaba_dkey/__init__.py index a8868e8563cef3..b4304e75aabb9e 100644 --- a/homeassistant/components/dormakaba_dkey/__init__.py +++ b/homeassistant/components/dormakaba_dkey/__init__.py @@ -69,6 +69,7 @@ async def _async_update() -> None: coordinator = DataUpdateCoordinator( hass, _LOGGER, + config_entry=entry, name=lock.name, update_method=_async_update, update_interval=timedelta(seconds=UPDATE_SECONDS), diff --git a/homeassistant/components/dsmr/config_flow.py b/homeassistant/components/dsmr/config_flow.py index 49e1818edcc382..7d6a641b00690c 100644 --- a/homeassistant/components/dsmr/config_flow.py +++ b/homeassistant/components/dsmr/config_flow.py @@ -171,9 +171,11 @@ class DSMRFlowHandler(ConfigFlow, domain=DOMAIN): @staticmethod @callback - def async_get_options_flow(config_entry: ConfigEntry) -> DSMROptionFlowHandler: + def async_get_options_flow( + config_entry: ConfigEntry, + ) -> DSMROptionFlowHandler: """Get the options flow for this handler.""" - return DSMROptionFlowHandler(config_entry) + return DSMROptionFlowHandler() async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -311,10 +313,6 @@ async def async_validate_dsmr( class DSMROptionFlowHandler(OptionsFlow): """Handle options.""" - def __init__(self, entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.entry = entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -328,7 +326,7 @@ async def async_step_init( { vol.Optional( CONF_TIME_BETWEEN_UPDATE, - default=self.entry.options.get( + default=self.config_entry.options.get( CONF_TIME_BETWEEN_UPDATE, DEFAULT_TIME_BETWEEN_UPDATE ), ): vol.All(vol.Coerce(int), vol.Range(min=0)), diff --git a/homeassistant/components/duotecno/config_flow.py b/homeassistant/components/duotecno/config_flow.py index ca95726542fb1b..51b92d4673a0d7 100644 --- a/homeassistant/components/duotecno/config_flow.py +++ b/homeassistant/components/duotecno/config_flow.py @@ -34,9 +34,6 @@ async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle the initial step.""" - if self._async_current_entries(): - return self.async_abort(reason="single_instance_allowed") - errors: dict[str, str] = {} if user_input is not None: try: diff --git a/homeassistant/components/duotecno/manifest.json b/homeassistant/components/duotecno/manifest.json index 8f8740ddfdf4e9..2a427e36e8425b 100644 --- a/homeassistant/components/duotecno/manifest.json +++ b/homeassistant/components/duotecno/manifest.json @@ -7,5 +7,6 @@ "iot_class": "local_push", "loggers": ["pyduotecno", "pyduotecno-node", "pyduotecno-unit"], "quality_scale": "silver", - "requirements": ["pyDuotecno==2024.9.0"] + "requirements": ["pyDuotecno==2024.10.1"], + "single_config_entry": true } diff --git a/homeassistant/components/duotecno/strings.json b/homeassistant/components/duotecno/strings.json index a5585c3dd2c890..7f7c156768dd20 100644 --- a/homeassistant/components/duotecno/strings.json +++ b/homeassistant/components/duotecno/strings.json @@ -5,7 +5,8 @@ "data": { "host": "[%key:common::config_flow::data::host%]", "username": "[%key:common::config_flow::data::username%]", - "password": "[%key:common::config_flow::data::password%]" + "password": "[%key:common::config_flow::data::password%]", + "port": "[%key:common::config_flow::data::port%]" }, "data_description": { "host": "The hostname or IP address of your Duotecno device." @@ -15,8 +16,7 @@ "error": { "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", - "unknown": "[%key:common::config_flow::error::unknown%]", - "single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]" + "unknown": "[%key:common::config_flow::error::unknown%]" } }, "entity": { diff --git a/homeassistant/components/dwd_weather_warnings/coordinator.py b/homeassistant/components/dwd_weather_warnings/coordinator.py index 5570562568529d..8cf3813a85dbeb 100644 --- a/homeassistant/components/dwd_weather_warnings/coordinator.py +++ b/homeassistant/components/dwd_weather_warnings/coordinator.py @@ -37,8 +37,8 @@ def __init__(self, hass: HomeAssistant) -> None: self._device_tracker = None self._previous_position = None - async def async_config_entry_first_refresh(self) -> None: - """Perform first refresh.""" + async def _async_setup(self) -> None: + """Set up coordinator.""" if region_identifier := self.config_entry.data.get(CONF_REGION_IDENTIFIER): self.api = await self.hass.async_add_executor_job( DwdWeatherWarningsAPI, region_identifier @@ -48,8 +48,6 @@ async def async_config_entry_first_refresh(self) -> None: CONF_REGION_DEVICE_TRACKER ) - await super().async_config_entry_first_refresh() - async def _async_update_data(self) -> None: """Get the latest data from the DWD Weather Warnings API.""" if self._device_tracker: diff --git a/homeassistant/components/eafm/__init__.py b/homeassistant/components/eafm/__init__.py index 1f95437484fec5..dc618a983f3fed 100644 --- a/homeassistant/components/eafm/__init__.py +++ b/homeassistant/components/eafm/__init__.py @@ -48,6 +48,7 @@ async def _async_update_data() -> dict[str, dict[str, Any]]: coordinator = DataUpdateCoordinator[dict[str, dict[str, Any]]]( hass, _LOGGER, + config_entry=entry, name="sensor", update_method=_async_update_data, update_interval=timedelta(seconds=15 * 60), diff --git a/homeassistant/components/ecobee/__init__.py b/homeassistant/components/ecobee/__init__.py index 6f032fbaae9f3a..54af6c0f8012a8 100644 --- a/homeassistant/components/ecobee/__init__.py +++ b/homeassistant/components/ecobee/__init__.py @@ -6,15 +6,14 @@ import voluptuous as vol from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry -from homeassistant.const import CONF_API_KEY, CONF_NAME, Platform +from homeassistant.const import CONF_API_KEY from homeassistant.core import HomeAssistant -from homeassistant.helpers import config_validation as cv, discovery +from homeassistant.helpers import config_validation as cv from homeassistant.helpers.typing import ConfigType from homeassistant.util import Throttle from .const import ( _LOGGER, - ATTR_CONFIG_ENTRY_ID, CONF_REFRESH_TOKEN, DATA_ECOBEE_CONFIG, DATA_HASS_CONFIG, @@ -73,18 +72,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) - # The legacy Ecobee notify.notify service is deprecated - # was with HA Core 2024.5.0 and will be removed with HA core 2024.11.0 - hass.async_create_task( - discovery.async_load_platform( - hass, - Platform.NOTIFY, - DOMAIN, - {CONF_NAME: entry.title, ATTR_CONFIG_ENTRY_ID: entry.entry_id}, - hass.data[DATA_HASS_CONFIG], - ) - ) - return True diff --git a/homeassistant/components/ecobee/climate.py b/homeassistant/components/ecobee/climate.py index e6801998e0da1e..6a9ec0d5db9390 100644 --- a/homeassistant/components/ecobee/climate.py +++ b/homeassistant/components/ecobee/climate.py @@ -32,7 +32,8 @@ UnitOfTemperature, ) from homeassistant.core import HomeAssistant, ServiceCall -from homeassistant.helpers import entity_platform +from homeassistant.exceptions import ServiceValidationError +from homeassistant.helpers import device_registry as dr, entity_platform import homeassistant.helpers.config_validation as cv from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -41,6 +42,8 @@ from . import EcobeeData from .const import ( _LOGGER, + ATTR_ACTIVE_SENSORS, + ATTR_AVAILABLE_SENSORS, DOMAIN, ECOBEE_AUX_HEAT_ONLY, ECOBEE_MODEL_TO_NAME, @@ -62,6 +65,8 @@ ATTR_MIC_ENABLED = "mic_enabled" ATTR_AUTO_AWAY = "auto_away" ATTR_FOLLOW_ME = "follow_me" +ATTR_SENSOR_LIST = "device_ids" +ATTR_PRESET_MODE = "preset_mode" DEFAULT_RESUME_ALL = False PRESET_AWAY_INDEFINITELY = "away_indefinitely" @@ -129,6 +134,7 @@ SERVICE_SET_DST_MODE = "set_dst_mode" SERVICE_SET_MIC_MODE = "set_mic_mode" SERVICE_SET_OCCUPANCY_MODES = "set_occupancy_modes" +SERVICE_SET_SENSORS_USED_IN_CLIMATE = "set_sensors_used_in_climate" DTGROUP_START_INCLUSIVE_MSG = ( f"{ATTR_START_DATE} and {ATTR_START_TIME} must be specified together" @@ -217,7 +223,7 @@ async def async_setup_entry( thermostat["name"], thermostat["modelNumber"], ) - entities.append(Thermostat(data, index, thermostat)) + entities.append(Thermostat(data, index, thermostat, hass)) async_add_entities(entities, True) @@ -327,6 +333,15 @@ def resume_program_set_service(service: ServiceCall) -> None: "set_occupancy_modes", ) + platform.async_register_entity_service( + SERVICE_SET_SENSORS_USED_IN_CLIMATE, + { + vol.Optional(ATTR_PRESET_MODE): cv.string, + vol.Required(ATTR_SENSOR_LIST): cv.ensure_list, + }, + "set_sensors_used_in_climate", + ) + class Thermostat(ClimateEntity): """A thermostat class for Ecobee.""" @@ -342,7 +357,11 @@ class Thermostat(ClimateEntity): _attr_translation_key = "ecobee" def __init__( - self, data: EcobeeData, thermostat_index: int, thermostat: dict + self, + data: EcobeeData, + thermostat_index: int, + thermostat: dict, + hass: HomeAssistant, ) -> None: """Initialize the thermostat.""" self.data = data @@ -352,6 +371,7 @@ def __init__( self.vacation = None self._last_active_hvac_mode = HVACMode.HEAT_COOL self._last_hvac_mode_before_aux_heat = HVACMode.HEAT_COOL + self._hass = hass self._attr_hvac_modes = [] if self.settings["heatStages"] or self.settings["hasHeatPump"]: @@ -361,7 +381,11 @@ def __init__( if len(self._attr_hvac_modes) == 2: self._attr_hvac_modes.insert(0, HVACMode.HEAT_COOL) self._attr_hvac_modes.append(HVACMode.OFF) - + self._sensors = self.remote_sensors + self._preset_modes = { + comfort["climateRef"]: comfort["name"] + for comfort in self.thermostat["program"]["climates"] + } self.update_without_throttle = False async def async_update(self) -> None: @@ -552,6 +576,8 @@ def hvac_action(self): return HVACAction.IDLE + _unrecorded_attributes = frozenset({ATTR_AVAILABLE_SENSORS, ATTR_ACTIVE_SENSORS}) + @property def extra_state_attributes(self) -> dict[str, Any] | None: """Return device specific state attributes.""" @@ -563,8 +589,62 @@ def extra_state_attributes(self) -> dict[str, Any] | None: ), "equipment_running": status, "fan_min_on_time": self.settings["fanMinOnTime"], + ATTR_AVAILABLE_SENSORS: self.remote_sensor_devices, + ATTR_ACTIVE_SENSORS: self.active_sensor_devices_in_preset_mode, } + @property + def remote_sensors(self) -> list: + """Return the remote sensor names of the thermostat.""" + sensors_info = self.thermostat.get("remoteSensors", []) + return [sensor["name"] for sensor in sensors_info if sensor.get("name")] + + @property + def remote_sensor_devices(self) -> list: + """Return the remote sensor device name_by_user or name for the thermostat.""" + return sorted( + [ + f'{item["name_by_user"]} ({item["id"]})' + for item in self.remote_sensor_ids_names + ] + ) + + @property + def remote_sensor_ids_names(self) -> list: + """Return the remote sensor device id and name_by_user for the thermostat.""" + sensors_info = self.thermostat.get("remoteSensors", []) + device_registry = dr.async_get(self._hass) + + return [ + { + "id": device.id, + "name_by_user": device.name_by_user + if device.name_by_user + else device.name, + } + for device in device_registry.devices.values() + for sensor_info in sensors_info + if device.name == sensor_info["name"] + ] + + @property + def active_sensors_in_preset_mode(self) -> list: + """Return the currently active/participating sensors.""" + # https://support.ecobee.com/s/articles/SmartSensors-Sensor-Participation + # During a manual hold, the ecobee will follow the Sensor Participation + # rules for the Home Comfort Settings + mode = self._preset_modes.get(self.preset_mode, "Home") + return self._sensors_in_preset_mode(mode) + + @property + def active_sensor_devices_in_preset_mode(self) -> list: + """Return the currently active/participating sensor devices.""" + # https://support.ecobee.com/s/articles/SmartSensors-Sensor-Participation + # During a manual hold, the ecobee will follow the Sensor Participation + # rules for the Home Comfort Settings + mode = self._preset_modes.get(self.preset_mode, "Home") + return self._sensor_devices_in_preset_mode(mode) + def set_preset_mode(self, preset_mode: str) -> None: """Activate a preset.""" preset_mode = HASS_TO_ECOBEE_PRESET.get(preset_mode, preset_mode) @@ -741,6 +821,115 @@ def resume_program(self, resume_all): ) self.update_without_throttle = True + def set_sensors_used_in_climate( + self, device_ids: list[str], preset_mode: str | None = None + ) -> None: + """Set the sensors used on a climate for a thermostat.""" + if preset_mode is None: + preset_mode = self.preset_mode + + # Check if climate is an available preset option. + elif preset_mode not in self._preset_modes.values(): + if self.preset_modes: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="invalid_preset", + translation_placeholders={ + "options": ", ".join(self._preset_modes.values()) + }, + ) + + # Get device name from device id. + device_registry = dr.async_get(self.hass) + sensor_names: list[str] = [] + sensor_ids: list[str] = [] + for device_id in device_ids: + device = device_registry.async_get(device_id) + if device and device.name: + r_sensors = self.thermostat.get("remoteSensors", []) + ecobee_identifier = next( + ( + identifier + for identifier in device.identifiers + if identifier[0] == "ecobee" + ), + None, + ) + if ecobee_identifier: + code = ecobee_identifier[1] + for r_sensor in r_sensors: + if ( # occurs if remote sensor + len(code) == 4 and r_sensor.get("code") == code + ) or ( # occurs if thermostat + len(code) != 4 and r_sensor.get("type") == "thermostat" + ): + sensor_ids.append(r_sensor.get("id")) # noqa: PERF401 + sensor_names.append(device.name) + + # Ensure sensors provided are available for thermostat or not empty. + if not set(sensor_names).issubset(set(self._sensors)) or not sensor_names: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="invalid_sensor", + translation_placeholders={ + "options": ", ".join( + [ + f'{item["name_by_user"]} ({item["id"]})' + for item in self.remote_sensor_ids_names + ] + ) + }, + ) + + # Check that an id was found for each sensor + if len(device_ids) != len(sensor_ids): + raise ServiceValidationError( + translation_domain=DOMAIN, translation_key="sensor_lookup_failed" + ) + + # Check if sensors are currently used on the climate for the thermostat. + current_sensors_in_climate = self._sensors_in_preset_mode(preset_mode) + if set(sensor_names) == set(current_sensors_in_climate): + _LOGGER.debug( + "This action would not be an update, current sensors on climate (%s) are: %s", + preset_mode, + ", ".join(current_sensors_in_climate), + ) + return + + _LOGGER.debug( + "Setting sensors %s to be used on thermostat %s for program %s", + sensor_names, + self.device_info.get("name"), + preset_mode, + ) + self.data.ecobee.update_climate_sensors( + self.thermostat_index, preset_mode, sensor_ids=sensor_ids + ) + self.update_without_throttle = True + + def _sensors_in_preset_mode(self, preset_mode: str | None) -> list[str]: + """Return current sensors used in climate.""" + climates = self.thermostat["program"]["climates"] + for climate in climates: + if climate.get("name") == preset_mode: + return [sensor["name"] for sensor in climate["sensors"]] + + return [] + + def _sensor_devices_in_preset_mode(self, preset_mode: str | None) -> list[str]: + """Return current sensor device name_by_user or name used in climate.""" + device_registry = dr.async_get(self._hass) + sensor_names = self._sensors_in_preset_mode(preset_mode) + return sorted( + [ + device.name_by_user if device.name_by_user else device.name + for device in device_registry.devices.values() + for sensor_name in sensor_names + if device.name == sensor_name + ] + ) + def hold_preference(self): """Return user preference setting for hold time.""" # Values returned from thermostat are: diff --git a/homeassistant/components/ecobee/config_flow.py b/homeassistant/components/ecobee/config_flow.py index f7709c68d915f8..687d9173a66818 100644 --- a/homeassistant/components/ecobee/config_flow.py +++ b/homeassistant/components/ecobee/config_flow.py @@ -29,10 +29,6 @@ async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle a flow initiated by the user.""" - if self._async_current_entries(): - # Config entry already exists, only one allowed. - return self.async_abort(reason="single_instance_allowed") - errors = {} stored_api_key = ( self.hass.data[DATA_ECOBEE_CONFIG].get(CONF_API_KEY) diff --git a/homeassistant/components/ecobee/const.py b/homeassistant/components/ecobee/const.py index 85a332f3c87b16..d0e9ba8e8e906c 100644 --- a/homeassistant/components/ecobee/const.py +++ b/homeassistant/components/ecobee/const.py @@ -23,6 +23,8 @@ DATA_ECOBEE_CONFIG = "ecobee_config" DATA_HASS_CONFIG = "ecobee_hass_config" ATTR_CONFIG_ENTRY_ID = "entry_id" +ATTR_AVAILABLE_SENSORS = "available_sensors" +ATTR_ACTIVE_SENSORS = "active_sensors" CONF_REFRESH_TOKEN = "refresh_token" diff --git a/homeassistant/components/ecobee/icons.json b/homeassistant/components/ecobee/icons.json index f24f1f7cfe5dc8..647a14dc5d5730 100644 --- a/homeassistant/components/ecobee/icons.json +++ b/homeassistant/components/ecobee/icons.json @@ -20,6 +20,9 @@ }, "set_occupancy_modes": { "service": "mdi:eye-settings" + }, + "set_sensors_used_in_climate": { + "service": "mdi:home-thermometer" } } } diff --git a/homeassistant/components/ecobee/manifest.json b/homeassistant/components/ecobee/manifest.json index 83dd18fdaa2df8..20b346b776b725 100644 --- a/homeassistant/components/ecobee/manifest.json +++ b/homeassistant/components/ecobee/manifest.json @@ -10,6 +10,7 @@ "iot_class": "cloud_polling", "loggers": ["pyecobee"], "requirements": ["python-ecobee-api==0.2.20"], + "single_config_entry": true, "zeroconf": [ { "type": "_ecobee._tcp.local." diff --git a/homeassistant/components/ecobee/notify.py b/homeassistant/components/ecobee/notify.py index 167233e4071a5f..28cfbebe506894 100644 --- a/homeassistant/components/ecobee/notify.py +++ b/homeassistant/components/ecobee/notify.py @@ -2,66 +2,16 @@ from __future__ import annotations -from functools import partial -from typing import Any - -from homeassistant.components.notify import ( - ATTR_TARGET, - BaseNotificationService, - NotifyEntity, - migrate_notify_issue, -) +from homeassistant.components.notify import NotifyEntity from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType -from . import Ecobee, EcobeeData +from . import EcobeeData from .const import DOMAIN from .entity import EcobeeBaseEntity -def get_service( - hass: HomeAssistant, - config: ConfigType, - discovery_info: DiscoveryInfoType | None = None, -) -> EcobeeNotificationService | None: - """Get the Ecobee notification service.""" - if discovery_info is None: - return None - - data: EcobeeData = hass.data[DOMAIN] - return EcobeeNotificationService(data.ecobee) - - -class EcobeeNotificationService(BaseNotificationService): - """Implement the notification service for the Ecobee thermostat.""" - - def __init__(self, ecobee: Ecobee) -> None: - """Initialize the service.""" - self.ecobee = ecobee - - async def async_send_message(self, message: str = "", **kwargs: Any) -> None: - """Send a message and raise issue.""" - migrate_notify_issue( - self.hass, DOMAIN, "Ecobee", "2024.11.0", service_name=self._service_name - ) - await self.hass.async_add_executor_job( - partial(self.send_message, message, **kwargs) - ) - - def send_message(self, message: str = "", **kwargs: Any) -> None: - """Send a message.""" - targets = kwargs.get(ATTR_TARGET) - - if not targets: - raise ValueError("Missing required argument: target") - - for target in targets: - thermostat_index = int(target) - self.ecobee.send_message(thermostat_index, message) - - async def async_setup_entry( hass: HomeAssistant, config_entry: ConfigEntry, diff --git a/homeassistant/components/ecobee/number.py b/homeassistant/components/ecobee/number.py index ab09407903db9b..ed3744bf11edd7 100644 --- a/homeassistant/components/ecobee/number.py +++ b/homeassistant/components/ecobee/number.py @@ -6,9 +6,14 @@ from dataclasses import dataclass import logging -from homeassistant.components.number import NumberEntity, NumberEntityDescription +from homeassistant.components.number import ( + NumberDeviceClass, + NumberEntity, + NumberEntityDescription, + NumberMode, +) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import UnitOfTime +from homeassistant.const import UnitOfTemperature, UnitOfTime from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -54,21 +59,30 @@ async def async_setup_entry( ) -> None: """Set up the ecobee thermostat number entity.""" data: EcobeeData = hass.data[DOMAIN] - _LOGGER.debug("Adding min time ventilators numbers (if present)") - async_add_entities( + assert data is not None + + entities: list[NumberEntity] = [ + EcobeeVentilatorMinTime(data, index, numbers) + for index, thermostat in enumerate(data.ecobee.thermostats) + if thermostat["settings"]["ventilatorType"] != "none" + for numbers in VENTILATOR_NUMBERS + ] + + _LOGGER.debug("Adding compressor min temp number (if present)") + entities.extend( ( - EcobeeVentilatorMinTime(data, index, numbers) + EcobeeCompressorMinTemp(data, index) for index, thermostat in enumerate(data.ecobee.thermostats) - if thermostat["settings"]["ventilatorType"] != "none" - for numbers in VENTILATOR_NUMBERS - ), - True, + if thermostat["settings"]["hasHeatPump"] + ) ) + async_add_entities(entities, True) + class EcobeeVentilatorMinTime(EcobeeBaseEntity, NumberEntity): - """A number class, representing min time for an ecobee thermostat with ventilator attached.""" + """A number class, representing min time for an ecobee thermostat with ventilator attached.""" entity_description: EcobeeNumberEntityDescription @@ -105,3 +119,53 @@ def set_native_value(self, value: float) -> None: """Set new ventilator Min On Time value.""" self.entity_description.set_fn(self.data, self.thermostat_index, int(value)) self.update_without_throttle = True + + +class EcobeeCompressorMinTemp(EcobeeBaseEntity, NumberEntity): + """Minimum outdoor temperature at which the compressor will operate. + + This applies more to air source heat pumps than geothermal. This serves as a safety + feature (compressors have a minimum operating temperature) as well as + providing the ability to choose fuel in a dual-fuel system (i.e. choose between + electrical heat pump and fossil auxiliary heat depending on Time of Use, Solar, + etc.). + Note that python-ecobee-api refers to this as Aux Cutover Threshold, but Ecobee + uses Compressor Protection Min Temp. + """ + + _attr_device_class = NumberDeviceClass.TEMPERATURE + _attr_has_entity_name = True + _attr_icon = "mdi:thermometer-off" + _attr_mode = NumberMode.BOX + _attr_native_min_value = -25 + _attr_native_max_value = 66 + _attr_native_step = 5 + _attr_native_unit_of_measurement = UnitOfTemperature.FAHRENHEIT + _attr_translation_key = "compressor_protection_min_temp" + + def __init__( + self, + data: EcobeeData, + thermostat_index: int, + ) -> None: + """Initialize ecobee compressor min temperature.""" + super().__init__(data, thermostat_index) + self._attr_unique_id = f"{self.base_unique_id}_compressor_protection_min_temp" + self.update_without_throttle = False + + async def async_update(self) -> None: + """Get the latest state from the thermostat.""" + if self.update_without_throttle: + await self.data.update(no_throttle=True) + self.update_without_throttle = False + else: + await self.data.update() + + self._attr_native_value = ( + (self.thermostat["settings"]["compressorProtectionMinTemp"]) / 10 + ) + + def set_native_value(self, value: float) -> None: + """Set new compressor minimum temperature.""" + self.data.ecobee.set_aux_cutover_threshold(self.thermostat_index, value) + self.update_without_throttle = True diff --git a/homeassistant/components/ecobee/services.yaml b/homeassistant/components/ecobee/services.yaml index a184f4227253cc..d58ae81d55261c 100644 --- a/homeassistant/components/ecobee/services.yaml +++ b/homeassistant/components/ecobee/services.yaml @@ -134,3 +134,23 @@ set_occupancy_modes: follow_me: selector: boolean: + +set_sensors_used_in_climate: + target: + entity: + integration: ecobee + domain: climate + fields: + preset_mode: + example: "Home" + selector: + text: + device_ids: + required: true + selector: + device: + multiple: true + integration: ecobee + entity: + - domain: climate + - domain: sensor diff --git a/homeassistant/components/ecobee/strings.json b/homeassistant/components/ecobee/strings.json index 2af6e5a90f9912..8c636bd9b04fec 100644 --- a/homeassistant/components/ecobee/strings.json +++ b/homeassistant/components/ecobee/strings.json @@ -33,15 +33,18 @@ }, "number": { "ventilator_min_type_home": { - "name": "Ventilator min time home" + "name": "Ventilator minimum time home" }, "ventilator_min_type_away": { - "name": "Ventilator min time away" + "name": "Ventilator minimum time away" + }, + "compressor_protection_min_temp": { + "name": "Compressor minimum temperature" } }, "switch": { "aux_heat_only": { - "name": "Aux heat only" + "name": "Auxiliary heat only" } } }, @@ -167,6 +170,35 @@ "description": "Enable Follow Me mode." } } + }, + "set_sensors_used_in_climate": { + "name": "Set Sensors Used in Climate", + "description": "Sets the participating sensors for a climate.", + "fields": { + "entity_id": { + "name": "Entity", + "description": "Ecobee thermostat on which to set active sensors." + }, + "preset_mode": { + "name": "Climate Name", + "description": "Name of the climate program to set the sensors active on.\nDefaults to currently active program." + }, + "device_ids": { + "name": "Sensors", + "description": "Sensors to set as participating sensors." + } + } + } + }, + "exceptions": { + "invalid_preset": { + "message": "Invalid climate name, available options are: {options}" + }, + "invalid_sensor": { + "message": "Invalid sensor for thermostat, available options are: {options}" + }, + "sensor_lookup_failed": { + "message": "There was an error getting the sensor ids from sensor names. Try reloading the ecobee integration." } }, "issues": { diff --git a/homeassistant/components/ecobee/switch.py b/homeassistant/components/ecobee/switch.py index 67be78fb21de3b..89ee433c072254 100644 --- a/homeassistant/components/ecobee/switch.py +++ b/homeassistant/components/ecobee/switch.py @@ -31,25 +31,26 @@ async def async_setup_entry( """Set up the ecobee thermostat switch entity.""" data: EcobeeData = hass.data[DOMAIN] - async_add_entities( - [ - EcobeeVentilator20MinSwitch( - data, - index, - (await dt_util.async_get_time_zone(thermostat["location"]["timeZone"])) - or dt_util.get_default_time_zone(), - ) + entities: list[SwitchEntity] = [ + EcobeeVentilator20MinSwitch( + data, + index, + (await dt_util.async_get_time_zone(thermostat["location"]["timeZone"])) + or dt_util.get_default_time_zone(), + ) + for index, thermostat in enumerate(data.ecobee.thermostats) + if thermostat["settings"]["ventilatorType"] != "none" + ] + + entities.extend( + ( + EcobeeSwitchAuxHeatOnly(data, index) for index, thermostat in enumerate(data.ecobee.thermostats) - if thermostat["settings"]["ventilatorType"] != "none" - ], - update_before_add=True, + if thermostat["settings"]["hasHeatPump"] + ) ) - async_add_entities( - EcobeeSwitchAuxHeatOnly(data, index) - for index, thermostat in enumerate(data.ecobee.thermostats) - if thermostat["settings"]["hasHeatPump"] - ) + async_add_entities(entities, update_before_add=True) class EcobeeVentilator20MinSwitch(EcobeeBaseEntity, SwitchEntity): diff --git a/homeassistant/components/econet/manifest.json b/homeassistant/components/econet/manifest.json index c96867b489b048..6586af92d1fc8d 100644 --- a/homeassistant/components/econet/manifest.json +++ b/homeassistant/components/econet/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/econet", "iot_class": "cloud_push", "loggers": ["paho_mqtt", "pyeconet"], - "requirements": ["pyeconet==0.1.22"] + "requirements": ["pyeconet==0.1.23"] } diff --git a/homeassistant/components/ecovacs/manifest.json b/homeassistant/components/ecovacs/manifest.json index 33977b3b0ded5e..0ab9f9a461271c 100644 --- a/homeassistant/components/ecovacs/manifest.json +++ b/homeassistant/components/ecovacs/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/ecovacs", "iot_class": "cloud_push", "loggers": ["sleekxmppfs", "sucks", "deebot_client"], - "requirements": ["py-sucks==0.9.10", "deebot-client==8.4.0"] + "requirements": ["py-sucks==0.9.10", "deebot-client==8.4.1"] } diff --git a/homeassistant/components/egardia/alarm_control_panel.py b/homeassistant/components/egardia/alarm_control_panel.py index 706ba0db71976a..5a18a23541a13a 100644 --- a/homeassistant/components/egardia/alarm_control_panel.py +++ b/homeassistant/components/egardia/alarm_control_panel.py @@ -9,13 +9,7 @@ from homeassistant.components.alarm_control_panel import ( AlarmControlPanelEntity, AlarmControlPanelEntityFeature, -) -from homeassistant.const import ( - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_DISARMED, - STATE_ALARM_TRIGGERED, + AlarmControlPanelState, ) from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -33,13 +27,13 @@ _LOGGER = logging.getLogger(__name__) STATES = { - "ARM": STATE_ALARM_ARMED_AWAY, - "DAY HOME": STATE_ALARM_ARMED_HOME, - "DISARM": STATE_ALARM_DISARMED, - "ARMHOME": STATE_ALARM_ARMED_HOME, - "HOME": STATE_ALARM_ARMED_HOME, - "NIGHT HOME": STATE_ALARM_ARMED_NIGHT, - "TRIGGERED": STATE_ALARM_TRIGGERED, + "ARM": AlarmControlPanelState.ARMED_AWAY, + "DAY HOME": AlarmControlPanelState.ARMED_HOME, + "DISARM": AlarmControlPanelState.DISARMED, + "ARMHOME": AlarmControlPanelState.ARMED_HOME, + "HOME": AlarmControlPanelState.ARMED_HOME, + "NIGHT HOME": AlarmControlPanelState.ARMED_NIGHT, + "TRIGGERED": AlarmControlPanelState.TRIGGERED, } @@ -66,7 +60,6 @@ def setup_platform( class EgardiaAlarm(AlarmControlPanelEntity): """Representation of a Egardia alarm.""" - _attr_state: str | None _attr_code_arm_required = False _attr_supported_features = ( AlarmControlPanelEntityFeature.ARM_HOME @@ -123,7 +116,7 @@ def parsestatus(self, status): _LOGGER.debug("Not ignoring status %s", status) newstatus = STATES.get(status.upper()) _LOGGER.debug("newstatus %s", newstatus) - self._attr_state = newstatus + self._attr_alarm_state = newstatus else: _LOGGER.error("Ignoring status") diff --git a/homeassistant/components/electric_kiwi/api.py b/homeassistant/components/electric_kiwi/api.py index 89109f01948189..dead8a6a3c0869 100644 --- a/homeassistant/components/electric_kiwi/api.py +++ b/homeassistant/components/electric_kiwi/api.py @@ -27,7 +27,6 @@ def __init__( async def async_get_access_token(self) -> str: """Return a valid access token.""" - if not self._oauth_session.valid_token: - await self._oauth_session.async_ensure_token_valid() + await self._oauth_session.async_ensure_token_valid() return cast(str, self._oauth_session.token["access_token"]) diff --git a/homeassistant/components/elevenlabs/config_flow.py b/homeassistant/components/elevenlabs/config_flow.py index b596ec05b00c52..227150a0f4e639 100644 --- a/homeassistant/components/elevenlabs/config_flow.py +++ b/homeassistant/components/elevenlabs/config_flow.py @@ -14,7 +14,6 @@ ConfigFlow, ConfigFlowResult, OptionsFlow, - OptionsFlowWithConfigEntry, ) from homeassistant.const import CONF_API_KEY from homeassistant.core import HomeAssistant @@ -103,13 +102,12 @@ def async_get_options_flow( return ElevenLabsOptionsFlow(config_entry) -class ElevenLabsOptionsFlow(OptionsFlowWithConfigEntry): +class ElevenLabsOptionsFlow(OptionsFlow): """ElevenLabs options flow.""" def __init__(self, config_entry: ConfigEntry) -> None: """Initialize options flow.""" - super().__init__(config_entry) - self.api_key: str = self.config_entry.data[CONF_API_KEY] + self.api_key: str = config_entry.data[CONF_API_KEY] # id -> name self.voices: dict[str, str] = {} self.models: dict[str, str] = {} @@ -170,7 +168,7 @@ def elevenlabs_config_option_schema(self) -> vol.Schema: vol.Required(CONF_CONFIGURE_VOICE, default=False): bool, } ), - self.options, + self.config_entry.options, ) async def async_step_voice_settings( diff --git a/homeassistant/components/elkm1/alarm_control_panel.py b/homeassistant/components/elkm1/alarm_control_panel.py index f5437b6ed94dd3..f1ecf62626382f 100644 --- a/homeassistant/components/elkm1/alarm_control_panel.py +++ b/homeassistant/components/elkm1/alarm_control_panel.py @@ -15,17 +15,9 @@ ATTR_CHANGED_BY, AlarmControlPanelEntity, AlarmControlPanelEntityFeature, + AlarmControlPanelState, CodeFormat, ) -from homeassistant.const import ( - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_ARMING, - STATE_ALARM_DISARMED, - STATE_ALARM_PENDING, - STATE_ALARM_TRIGGERED, -) from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_platform import homeassistant.helpers.config_validation as cv @@ -125,7 +117,7 @@ def __init__(self, element: Element, elk: Elk, elk_data: ELKM1Data) -> None: self._changed_by_time: str | None = None self._changed_by_id: int | None = None self._changed_by: str | None = None - self._state: str | None = None + self._state: AlarmControlPanelState | None = None async def async_added_to_hass(self) -> None: """Register callback for ElkM1 changes.""" @@ -177,7 +169,7 @@ def code_format(self) -> CodeFormat | None: return CodeFormat.NUMBER @property - def state(self) -> str | None: + def alarm_state(self) -> AlarmControlPanelState | None: """Return the state of the element.""" return self._state @@ -207,23 +199,25 @@ def changed_by(self) -> str | None: def _element_changed(self, element: Element, changeset: dict[str, Any]) -> None: elk_state_to_hass_state = { - ArmedStatus.DISARMED: STATE_ALARM_DISARMED, - ArmedStatus.ARMED_AWAY: STATE_ALARM_ARMED_AWAY, - ArmedStatus.ARMED_STAY: STATE_ALARM_ARMED_HOME, - ArmedStatus.ARMED_STAY_INSTANT: STATE_ALARM_ARMED_HOME, - ArmedStatus.ARMED_TO_NIGHT: STATE_ALARM_ARMED_NIGHT, - ArmedStatus.ARMED_TO_NIGHT_INSTANT: STATE_ALARM_ARMED_NIGHT, - ArmedStatus.ARMED_TO_VACATION: STATE_ALARM_ARMED_AWAY, + ArmedStatus.DISARMED: AlarmControlPanelState.DISARMED, + ArmedStatus.ARMED_AWAY: AlarmControlPanelState.ARMED_AWAY, + ArmedStatus.ARMED_STAY: AlarmControlPanelState.ARMED_HOME, + ArmedStatus.ARMED_STAY_INSTANT: AlarmControlPanelState.ARMED_HOME, + ArmedStatus.ARMED_TO_NIGHT: AlarmControlPanelState.ARMED_NIGHT, + ArmedStatus.ARMED_TO_NIGHT_INSTANT: AlarmControlPanelState.ARMED_NIGHT, + ArmedStatus.ARMED_TO_VACATION: AlarmControlPanelState.ARMED_AWAY, } if self._element.alarm_state is None: self._state = None elif self._element.in_alarm_state(): # Area is in alarm state - self._state = STATE_ALARM_TRIGGERED + self._state = AlarmControlPanelState.TRIGGERED elif self._entry_exit_timer_is_running(): self._state = ( - STATE_ALARM_ARMING if self._element.is_exit else STATE_ALARM_PENDING + AlarmControlPanelState.ARMING + if self._element.is_exit + else AlarmControlPanelState.PENDING ) elif self._element.armed_status is not None: self._state = elk_state_to_hass_state[self._element.armed_status] diff --git a/homeassistant/components/elmax/alarm_control_panel.py b/homeassistant/components/elmax/alarm_control_panel.py index 4162b1779754bb..841b94a3d72a7e 100644 --- a/homeassistant/components/elmax/alarm_control_panel.py +++ b/homeassistant/components/elmax/alarm_control_panel.py @@ -10,20 +10,13 @@ from homeassistant.components.alarm_control_panel import ( AlarmControlPanelEntity, AlarmControlPanelEntityFeature, + AlarmControlPanelState, CodeFormat, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ( - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMING, - STATE_ALARM_DISARMED, - STATE_ALARM_DISARMING, - STATE_ALARM_TRIGGERED, -) from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError, InvalidStateError from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.typing import StateType from .const import DOMAIN from .coordinator import ElmaxCoordinator @@ -74,16 +67,16 @@ class ElmaxArea(ElmaxEntity, AlarmControlPanelEntity): _attr_code_arm_required = False _attr_has_entity_name = True _attr_supported_features = AlarmControlPanelEntityFeature.ARM_AWAY - _pending_state: str | None = None + _pending_state: AlarmControlPanelState | None = None async def async_alarm_arm_away(self, code: str | None = None) -> None: """Send arm away command.""" - if self._attr_state == AlarmStatus.NOT_ARMED_NOT_ARMABLE: + if self._attr_alarm_state == AlarmStatus.NOT_ARMED_NOT_ARMABLE: raise InvalidStateError( f"Cannot arm {self.name}: please check for open windows/doors first" ) - self._pending_state = STATE_ALARM_ARMING + self._pending_state = AlarmControlPanelState.ARMING self.async_write_ha_state() try: @@ -107,7 +100,7 @@ async def async_alarm_disarm(self, code: str | None = None) -> None: if code is None or code == "": raise ValueError("Please input the disarm code.") - self._pending_state = STATE_ALARM_DISARMING + self._pending_state = AlarmControlPanelState.DISARMING self.async_write_ha_state() try: @@ -130,7 +123,7 @@ async def async_alarm_disarm(self, code: str | None = None) -> None: await self.coordinator.async_refresh() @property - def state(self) -> StateType: + def alarm_state(self) -> AlarmControlPanelState | None: """Return the state of the entity.""" if self._pending_state is not None: return self._pending_state @@ -151,10 +144,10 @@ def _handle_coordinator_update(self) -> None: ALARM_STATE_TO_HA = { - AlarmArmStatus.ARMED_TOTALLY: STATE_ALARM_ARMED_AWAY, - AlarmArmStatus.ARMED_P1_P2: STATE_ALARM_ARMED_AWAY, - AlarmArmStatus.ARMED_P2: STATE_ALARM_ARMED_AWAY, - AlarmArmStatus.ARMED_P1: STATE_ALARM_ARMED_AWAY, - AlarmArmStatus.NOT_ARMED: STATE_ALARM_DISARMED, - AlarmStatus.TRIGGERED: STATE_ALARM_TRIGGERED, + AlarmArmStatus.ARMED_TOTALLY: AlarmControlPanelState.ARMED_AWAY, + AlarmArmStatus.ARMED_P1_P2: AlarmControlPanelState.ARMED_AWAY, + AlarmArmStatus.ARMED_P2: AlarmControlPanelState.ARMED_AWAY, + AlarmArmStatus.ARMED_P1: AlarmControlPanelState.ARMED_AWAY, + AlarmArmStatus.NOT_ARMED: AlarmControlPanelState.DISARMED, + AlarmStatus.TRIGGERED: AlarmControlPanelState.TRIGGERED, } diff --git a/homeassistant/components/emoncms/__init__.py b/homeassistant/components/emoncms/__init__.py index 98ed6328578012..0cd686b5b56710 100644 --- a/homeassistant/components/emoncms/__init__.py +++ b/homeassistant/components/emoncms/__init__.py @@ -5,8 +5,11 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_API_KEY, CONF_URL, Platform from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er from homeassistant.helpers.aiohttp_client import async_get_clientsession +from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue +from .const import DOMAIN, EMONCMS_UUID_DOC_URL, LOGGER from .coordinator import EmoncmsCoordinator PLATFORMS: list[Platform] = [Platform.SENSOR] @@ -14,6 +17,49 @@ type EmonCMSConfigEntry = ConfigEntry[EmoncmsCoordinator] +def _migrate_unique_id( + hass: HomeAssistant, entry: EmonCMSConfigEntry, emoncms_unique_id: str +) -> None: + """Migrate to emoncms unique id if needed.""" + ent_reg = er.async_get(hass) + entry_entities = ent_reg.entities.get_entries_for_config_entry_id(entry.entry_id) + for entity in entry_entities: + if entity.unique_id.split("-")[0] == entry.entry_id: + feed_id = entity.unique_id.split("-")[-1] + LOGGER.debug(f"moving feed {feed_id} to hardware uuid") + ent_reg.async_update_entity( + entity.entity_id, new_unique_id=f"{emoncms_unique_id}-{feed_id}" + ) + hass.config_entries.async_update_entry( + entry, + unique_id=emoncms_unique_id, + ) + + +async def _check_unique_id_migration( + hass: HomeAssistant, entry: EmonCMSConfigEntry, emoncms_client: EmoncmsClient +) -> None: + """Check if we can migrate to the emoncms uuid.""" + emoncms_unique_id = await emoncms_client.async_get_uuid() + if emoncms_unique_id: + if entry.unique_id != emoncms_unique_id: + _migrate_unique_id(hass, entry, emoncms_unique_id) + else: + async_create_issue( + hass, + DOMAIN, + "migrate database", + is_fixable=False, + issue_domain=DOMAIN, + severity=IssueSeverity.WARNING, + translation_key="migrate_database", + translation_placeholders={ + "url": entry.data[CONF_URL], + "doc_url": EMONCMS_UUID_DOC_URL, + }, + ) + + async def async_setup_entry(hass: HomeAssistant, entry: EmonCMSConfigEntry) -> bool: """Load a config entry.""" emoncms_client = EmoncmsClient( @@ -21,6 +67,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: EmonCMSConfigEntry) -> b entry.data[CONF_API_KEY], session=async_get_clientsession(hass), ) + await _check_unique_id_migration(hass, entry, emoncms_client) coordinator = EmoncmsCoordinator(hass, emoncms_client) await coordinator.async_config_entry_first_refresh() entry.runtime_data = coordinator diff --git a/homeassistant/components/emoncms/config_flow.py b/homeassistant/components/emoncms/config_flow.py index fdd5d29788e1a6..e0d4d0d03e9fc8 100644 --- a/homeassistant/components/emoncms/config_flow.py +++ b/homeassistant/components/emoncms/config_flow.py @@ -1,5 +1,7 @@ """Configflow for the emoncms integration.""" +from __future__ import annotations + from typing import Any from pyemoncms import EmoncmsClient @@ -9,10 +11,10 @@ ConfigEntry, ConfigFlow, ConfigFlowResult, - OptionsFlowWithConfigEntry, + OptionsFlow, ) from homeassistant.const import CONF_API_KEY, CONF_URL -from homeassistant.core import HomeAssistant, callback +from homeassistant.core import callback from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.selector import selector from homeassistant.helpers.typing import ConfigType @@ -46,13 +48,10 @@ def sensor_name(url: str) -> str: return f"emoncms@{sensorip}" -async def get_feed_list(hass: HomeAssistant, url: str, api_key: str) -> dict[str, Any]: +async def get_feed_list( + emoncms_client: EmoncmsClient, +) -> dict[str, Any]: """Check connection to emoncms and return feed list if successful.""" - emoncms_client = EmoncmsClient( - url, - api_key, - session=async_get_clientsession(hass), - ) return await emoncms_client.async_request("/feed/list.json") @@ -68,7 +67,7 @@ class EmoncmsConfigFlow(ConfigFlow, domain=DOMAIN): @callback def async_get_options_flow( config_entry: ConfigEntry, - ) -> OptionsFlowWithConfigEntry: + ) -> EmoncmsOptionsFlow: """Get the options flow for this handler.""" return EmoncmsOptionsFlow(config_entry) @@ -77,23 +76,28 @@ async def async_step_user( ) -> ConfigFlowResult: """Initiate a flow via the UI.""" errors: dict[str, str] = {} + description_placeholders = {} if user_input is not None: + self.url = user_input[CONF_URL] + self.api_key = user_input[CONF_API_KEY] self._async_abort_entries_match( { - CONF_API_KEY: user_input[CONF_API_KEY], - CONF_URL: user_input[CONF_URL], + CONF_API_KEY: self.api_key, + CONF_URL: self.url, } ) - result = await get_feed_list( - self.hass, user_input[CONF_URL], user_input[CONF_API_KEY] + emoncms_client = EmoncmsClient( + self.url, self.api_key, session=async_get_clientsession(self.hass) ) + result = await get_feed_list(emoncms_client) if not result[CONF_SUCCESS]: - errors["base"] = result[CONF_MESSAGE] + errors["base"] = "api_error" + description_placeholders = {"details": result[CONF_MESSAGE]} else: self.include_only_feeds = user_input.get(CONF_ONLY_INCLUDE_FEEDID) - self.url = user_input[CONF_URL] - self.api_key = user_input[CONF_API_KEY] + await self.async_set_unique_id(await emoncms_client.async_get_uuid()) + self._abort_if_unique_id_configured() options = get_options(result[CONF_MESSAGE]) self.dropdown = { "options": options, @@ -113,6 +117,7 @@ async def async_step_user( user_input, ), errors=errors, + description_placeholders=description_placeholders, ) async def async_step_choose_feeds( @@ -167,32 +172,41 @@ async def async_step_import(self, import_info: ConfigType) -> ConfigFlowResult: return result -class EmoncmsOptionsFlow(OptionsFlowWithConfigEntry): +class EmoncmsOptionsFlow(OptionsFlow): """Emoncms Options flow handler.""" + def __init__(self, config_entry: ConfigEntry) -> None: + """Initialize emoncms options flow.""" + self._url = config_entry.data[CONF_URL] + self._api_key = config_entry.data[CONF_API_KEY] + async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Manage the options.""" errors: dict[str, str] = {} - data = self.options if self.options else self._config_entry.data - url = data[CONF_URL] - api_key = data[CONF_API_KEY] - include_only_feeds = data.get(CONF_ONLY_INCLUDE_FEEDID, []) + description_placeholders = {} + include_only_feeds = self.config_entry.options.get( + CONF_ONLY_INCLUDE_FEEDID, + self.config_entry.data.get(CONF_ONLY_INCLUDE_FEEDID, []), + ) options: list = include_only_feeds - result = await get_feed_list(self.hass, url, api_key) + emoncms_client = EmoncmsClient( + self._url, + self._api_key, + session=async_get_clientsession(self.hass), + ) + result = await get_feed_list(emoncms_client) if not result[CONF_SUCCESS]: - errors["base"] = result[CONF_MESSAGE] + errors["base"] = "api_error" + description_placeholders = {"details": result[CONF_MESSAGE]} else: options = get_options(result[CONF_MESSAGE]) dropdown = {"options": options, "mode": "dropdown", "multiple": True} if user_input: include_only_feeds = user_input[CONF_ONLY_INCLUDE_FEEDID] return self.async_create_entry( - title=sensor_name(url), data={ - CONF_URL: url, - CONF_API_KEY: api_key, CONF_ONLY_INCLUDE_FEEDID: include_only_feeds, }, ) @@ -207,4 +221,5 @@ async def async_step_init( } ), errors=errors, + description_placeholders=description_placeholders, ) diff --git a/homeassistant/components/emoncms/const.py b/homeassistant/components/emoncms/const.py index 256db5726bbce0..c53f7cc8a9f5ba 100644 --- a/homeassistant/components/emoncms/const.py +++ b/homeassistant/components/emoncms/const.py @@ -7,6 +7,10 @@ CONF_MESSAGE = "message" CONF_SUCCESS = "success" DOMAIN = "emoncms" +EMONCMS_UUID_DOC_URL = ( + "https://docs.openenergymonitor.org/emoncms/update.html" + "#upgrading-to-a-version-producing-a-unique-identifier" +) FEED_ID = "id" FEED_NAME = "name" FEED_TAG = "tag" diff --git a/homeassistant/components/emoncms/manifest.json b/homeassistant/components/emoncms/manifest.json index f8f0f2edb95370..c7f18cb205ee6f 100644 --- a/homeassistant/components/emoncms/manifest.json +++ b/homeassistant/components/emoncms/manifest.json @@ -5,5 +5,5 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/emoncms", "iot_class": "local_polling", - "requirements": ["pyemoncms==0.0.7"] + "requirements": ["pyemoncms==0.1.1"] } diff --git a/homeassistant/components/emoncms/sensor.py b/homeassistant/components/emoncms/sensor.py index 4add7c9625d921..c696a569135658 100644 --- a/homeassistant/components/emoncms/sensor.py +++ b/homeassistant/components/emoncms/sensor.py @@ -138,29 +138,30 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up the emoncms sensors.""" - config = entry.options if entry.options else entry.data - name = sensor_name(config[CONF_URL]) - exclude_feeds = config.get(CONF_EXCLUDE_FEEDID) - include_only_feeds = config.get(CONF_ONLY_INCLUDE_FEEDID) + name = sensor_name(entry.data[CONF_URL]) + exclude_feeds = entry.data.get(CONF_EXCLUDE_FEEDID) + include_only_feeds = entry.options.get( + CONF_ONLY_INCLUDE_FEEDID, entry.data.get(CONF_ONLY_INCLUDE_FEEDID) + ) if exclude_feeds is None and include_only_feeds is None: return coordinator = entry.runtime_data + # uuid was added in emoncms database 11.5.7 + unique_id = entry.unique_id if entry.unique_id else entry.entry_id elems = coordinator.data if not elems: return - sensors: list[EmonCmsSensor] = [] for idx, elem in enumerate(elems): if include_only_feeds is not None and elem[FEED_ID] not in include_only_feeds: continue - sensors.append( EmonCmsSensor( coordinator, - entry.entry_id, + unique_id, elem["unit"], name, idx, @@ -175,7 +176,7 @@ class EmonCmsSensor(CoordinatorEntity[EmoncmsCoordinator], SensorEntity): def __init__( self, coordinator: EmoncmsCoordinator, - entry_id: str, + unique_id: str, unit_of_measurement: str | None, name: str, idx: int, @@ -188,7 +189,7 @@ def __init__( elem = self.coordinator.data[self.idx] self._attr_name = f"{name} {elem[FEED_NAME]}" self._attr_native_unit_of_measurement = unit_of_measurement - self._attr_unique_id = f"{entry_id}-{elem[FEED_ID]}" + self._attr_unique_id = f"{unique_id}-{elem[FEED_ID]}" if unit_of_measurement in ("kWh", "Wh"): self._attr_device_class = SensorDeviceClass.ENERGY self._attr_state_class = SensorStateClass.TOTAL_INCREASING diff --git a/homeassistant/components/emoncms/strings.json b/homeassistant/components/emoncms/strings.json index 4a700cc8981e07..0d841f2efb464c 100644 --- a/homeassistant/components/emoncms/strings.json +++ b/homeassistant/components/emoncms/strings.json @@ -1,5 +1,8 @@ { "config": { + "error": { + "api_error": "An error occured in the pyemoncms API : {details}" + }, "step": { "user": { "data": { @@ -16,9 +19,15 @@ "include_only_feed_id": "Choose feeds to include" } } + }, + "abort": { + "already_configured": "This server is already configured" } }, "options": { + "error": { + "api_error": "[%key:component::emoncms::config::error::api_error%]" + }, "step": { "init": { "data": { @@ -35,6 +44,10 @@ "missing_include_only_feed_id": { "title": "No feed synchronized with the {domain} sensor", "description": "Configuring {domain} using YAML is being removed.\n\nPlease add manually the feeds you want to synchronize with the `configure` button of the integration." + }, + "migrate_database": { + "title": "Upgrade your emoncms version", + "description": "Your [emoncms]({url}) does not ship a unique identifier.\n\n Please upgrade to at least version 11.5.7 and migrate your emoncms database.\n\n More info on [emoncms documentation]({doc_url})" } } } diff --git a/homeassistant/components/emonitor/__init__.py b/homeassistant/components/emonitor/__init__.py index 7506edae1d3202..4316487352b231 100644 --- a/homeassistant/components/emonitor/__init__.py +++ b/homeassistant/components/emonitor/__init__.py @@ -31,6 +31,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: EmonitorConfigEntry) -> coordinator = DataUpdateCoordinator[EmonitorStatus]( hass, _LOGGER, + config_entry=entry, name=entry.title, update_method=emonitor.async_get_status, update_interval=timedelta(seconds=DEFAULT_UPDATE_RATE), diff --git a/homeassistant/components/emulated_kasa/manifest.json b/homeassistant/components/emulated_kasa/manifest.json index 640a2113d6fa43..d4889c0c5f5bdc 100644 --- a/homeassistant/components/emulated_kasa/manifest.json +++ b/homeassistant/components/emulated_kasa/manifest.json @@ -6,5 +6,5 @@ "iot_class": "local_push", "loggers": ["sense_energy"], "quality_scale": "internal", - "requirements": ["sense-energy==0.12.4"] + "requirements": ["sense-energy==0.13.3"] } diff --git a/homeassistant/components/energy/data.py b/homeassistant/components/energy/data.py index 9c5a9fbacd1647..ff86177cf4120e 100644 --- a/homeassistant/components/energy/data.py +++ b/homeassistant/components/energy/data.py @@ -331,7 +331,7 @@ async def async_update(self, update: EnergyPreferencesUpdate) -> None: "device_consumption", ): if key in update: - data[key] = update[key] # type: ignore[literal-required] + data[key] = update[key] self.data = data self._store.async_delay_save(lambda: data, 60) diff --git a/homeassistant/components/energy/strings.json b/homeassistant/components/energy/strings.json index 4a9c1b4aacf940..e9d72247319377 100644 --- a/homeassistant/components/energy/strings.json +++ b/homeassistant/components/energy/strings.json @@ -56,6 +56,10 @@ "entity_state_class_measurement_no_last_reset": { "title": "Last reset missing", "description": "The following entities have state class 'measurement' but 'last_reset' is missing:" + }, + "statistics_not_defined": { + "title": "Statistics not defined", + "description": "Some entities currently have no statistics metadata. If these are newly created, it may take up to 5 minutes for this to be generated for the following entities:" } } } diff --git a/homeassistant/components/enocean/config_flow.py b/homeassistant/components/enocean/config_flow.py index fef633d94c33a5..2452d27b168e03 100644 --- a/homeassistant/components/enocean/config_flow.py +++ b/homeassistant/components/enocean/config_flow.py @@ -38,9 +38,6 @@ async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle an EnOcean config flow start.""" - if self._async_current_entries(): - return self.async_abort(reason="single_instance_allowed") - return await self.async_step_detect() async def async_step_detect( diff --git a/homeassistant/components/enocean/manifest.json b/homeassistant/components/enocean/manifest.json index 495ab6618e3352..2faba47e1264db 100644 --- a/homeassistant/components/enocean/manifest.json +++ b/homeassistant/components/enocean/manifest.json @@ -6,5 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/enocean", "iot_class": "local_push", "loggers": ["enocean"], - "requirements": ["enocean==0.50"] + "requirements": ["enocean==0.50"], + "single_config_entry": true } diff --git a/homeassistant/components/enocean/strings.json b/homeassistant/components/enocean/strings.json index 97da526185f34e..9d9699481b1a97 100644 --- a/homeassistant/components/enocean/strings.json +++ b/homeassistant/components/enocean/strings.json @@ -18,8 +18,7 @@ "invalid_dongle_path": "No valid dongle found for this path" }, "abort": { - "invalid_dongle_path": "Invalid dongle path", - "single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]" + "invalid_dongle_path": "Invalid dongle path" } } } diff --git a/homeassistant/components/enphase_envoy/config_flow.py b/homeassistant/components/enphase_envoy/config_flow.py index 344431c6ee670e..23c769293c809b 100644 --- a/homeassistant/components/enphase_envoy/config_flow.py +++ b/homeassistant/components/enphase_envoy/config_flow.py @@ -4,7 +4,6 @@ from collections.abc import Mapping import logging -from types import MappingProxyType from typing import TYPE_CHECKING, Any from awesomeversion import AwesomeVersion @@ -17,7 +16,7 @@ ConfigEntry, ConfigFlow, ConfigFlowResult, - OptionsFlowWithConfigEntry, + OptionsFlow, ) from homeassistant.const import CONF_HOST, CONF_NAME, CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant, callback @@ -58,7 +57,6 @@ class EnphaseConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 _reauth_entry: ConfigEntry - _reconnect_entry: ConfigEntry def __init__(self) -> None: """Initialize an envoy flow.""" @@ -68,9 +66,11 @@ def __init__(self) -> None: @staticmethod @callback - def async_get_options_flow(config_entry: ConfigEntry) -> EnvoyOptionsFlowHandler: + def async_get_options_flow( + config_entry: ConfigEntry, + ) -> EnvoyOptionsFlowHandler: """Options flow handler for Enphase_Envoy.""" - return EnvoyOptionsFlowHandler(config_entry) + return EnvoyOptionsFlowHandler() @callback def _async_generate_schema(self) -> vol.Schema: @@ -238,24 +238,14 @@ async def async_step_reconfigure( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Add reconfigure step to allow to manually reconfigure a config entry.""" - self._reconnect_entry = self._get_reconfigure_entry() - return await self.async_step_reconfigure_confirm() - - async def async_step_reconfigure_confirm( - self, user_input: dict[str, Any] | None = None - ) -> ConfigFlowResult: - """Add reconfigure step to allow to manually reconfigure a config entry.""" + reconfigure_entry = self._get_reconfigure_entry() errors: dict[str, str] = {} description_placeholders: dict[str, str] = {} - suggested_values: dict[str, Any] | MappingProxyType[str, Any] = ( - user_input or self._reconnect_entry.data - ) - - host: Any = suggested_values.get(CONF_HOST) - username: Any = suggested_values.get(CONF_USERNAME) - password: Any = suggested_values.get(CONF_PASSWORD) if user_input is not None: + host: str = user_input[CONF_HOST] + username: str = user_input[CONF_USERNAME] + password: str = user_input[CONF_PASSWORD] try: envoy = await validate_input( self.hass, @@ -273,31 +263,25 @@ async def async_step_reconfigure_confirm( _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" else: - if self.unique_id != envoy.serial_number: - errors["base"] = "unexpected_envoy" - description_placeholders = { - "reason": f"target: {self.unique_id}, actual: {envoy.serial_number}" - } - else: - # If envoy exists in configuration update fields and exit - self._abort_if_unique_id_configured( - { - CONF_HOST: host, - CONF_USERNAME: username, - CONF_PASSWORD: password, - }, - error="reconfigure_successful", - ) - if not self.unique_id: - await self.async_set_unique_id(self._reconnect_entry.unique_id) + await self.async_set_unique_id(envoy.serial_number) + self._abort_if_unique_id_mismatch() + return self.async_update_reload_and_abort( + reconfigure_entry, + data_updates={ + CONF_HOST: host, + CONF_USERNAME: username, + CONF_PASSWORD: password, + }, + ) self.context["title_placeholders"] = { - CONF_SERIAL: self.unique_id or "-", - CONF_HOST: host, + CONF_SERIAL: reconfigure_entry.unique_id or "-", + CONF_HOST: reconfigure_entry.data[CONF_HOST], } + suggested_values: Mapping[str, Any] = user_input or reconfigure_entry.data return self.async_show_form( - step_id="reconfigure_confirm", + step_id="reconfigure", data_schema=self.add_suggested_values_to_schema( self._async_generate_schema(), suggested_values ), @@ -306,7 +290,7 @@ async def async_step_reconfigure_confirm( ) -class EnvoyOptionsFlowHandler(OptionsFlowWithConfigEntry): +class EnvoyOptionsFlowHandler(OptionsFlow): """Envoy config flow options handler.""" async def async_step_init( diff --git a/homeassistant/components/enphase_envoy/strings.json b/homeassistant/components/enphase_envoy/strings.json index e848b68e39d590..2d91b3b0960c3c 100644 --- a/homeassistant/components/enphase_envoy/strings.json +++ b/homeassistant/components/enphase_envoy/strings.json @@ -13,7 +13,7 @@ "host": "The hostname or IP address of your Enphase Envoy gateway." } }, - "reconfigure_confirm": { + "reconfigure": { "description": "[%key:component::enphase_envoy::config::step::user::description%]", "data": { "host": "[%key:common::config_flow::data::host%]", @@ -28,12 +28,13 @@ "error": { "cannot_connect": "Cannot connect: {reason}", "invalid_auth": "Invalid authentication: {reason}", - "unexpected_envoy": "Unexpected Envoy: {reason}", "unknown": "[%key:common::config_flow::error::unknown%]" }, "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", - "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", + "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]", + "unique_id_mismatch": "The serial number of the device does not match the previous serial number" } }, "options": { diff --git a/homeassistant/components/envisalink/alarm_control_panel.py b/homeassistant/components/envisalink/alarm_control_panel.py index 4ad9a927d9c375..ce65178b8d8fa7 100644 --- a/homeassistant/components/envisalink/alarm_control_panel.py +++ b/homeassistant/components/envisalink/alarm_control_panel.py @@ -9,20 +9,10 @@ from homeassistant.components.alarm_control_panel import ( AlarmControlPanelEntity, AlarmControlPanelEntityFeature, + AlarmControlPanelState, CodeFormat, ) -from homeassistant.const import ( - ATTR_ENTITY_ID, - CONF_CODE, - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_ARMING, - STATE_ALARM_DISARMED, - STATE_ALARM_PENDING, - STATE_ALARM_TRIGGERED, - STATE_UNKNOWN, -) +from homeassistant.const import ATTR_ENTITY_ID, CONF_CODE from homeassistant.core import HomeAssistant, ServiceCall, callback import homeassistant.helpers.config_validation as cv from homeassistant.helpers.dispatcher import async_dispatcher_connect @@ -144,24 +134,24 @@ def async_update_callback(self, partition): self.async_write_ha_state() @property - def state(self) -> str: + def alarm_state(self) -> AlarmControlPanelState | None: """Return the state of the device.""" - state = STATE_UNKNOWN + state = None if self._info["status"]["alarm"]: - state = STATE_ALARM_TRIGGERED + state = AlarmControlPanelState.TRIGGERED elif self._info["status"]["armed_zero_entry_delay"]: - state = STATE_ALARM_ARMED_NIGHT + state = AlarmControlPanelState.ARMED_NIGHT elif self._info["status"]["armed_away"]: - state = STATE_ALARM_ARMED_AWAY + state = AlarmControlPanelState.ARMED_AWAY elif self._info["status"]["armed_stay"]: - state = STATE_ALARM_ARMED_HOME + state = AlarmControlPanelState.ARMED_HOME elif self._info["status"]["exit_delay"]: - state = STATE_ALARM_ARMING + state = AlarmControlPanelState.ARMING elif self._info["status"]["entry_delay"]: - state = STATE_ALARM_PENDING + state = AlarmControlPanelState.PENDING elif self._info["status"]["alpha"]: - state = STATE_ALARM_DISARMED + state = AlarmControlPanelState.DISARMED return state async def async_alarm_disarm(self, code: str | None = None) -> None: diff --git a/homeassistant/components/eq3btsmart/__init__.py b/homeassistant/components/eq3btsmart/__init__.py index f63e627ea7dfb2..78296c70cef7d0 100644 --- a/homeassistant/components/eq3btsmart/__init__.py +++ b/homeassistant/components/eq3btsmart/__init__.py @@ -15,17 +15,21 @@ from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers.dispatcher import async_dispatcher_send -from .const import DOMAIN, SIGNAL_THERMOSTAT_CONNECTED, SIGNAL_THERMOSTAT_DISCONNECTED +from .const import SIGNAL_THERMOSTAT_CONNECTED, SIGNAL_THERMOSTAT_DISCONNECTED from .models import Eq3Config, Eq3ConfigEntryData PLATFORMS = [ + Platform.BINARY_SENSOR, Platform.CLIMATE, ] _LOGGER = logging.getLogger(__name__) -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +type Eq3ConfigEntry = ConfigEntry[Eq3ConfigEntryData] + + +async def async_setup_entry(hass: HomeAssistant, entry: Eq3ConfigEntry) -> bool: """Handle config entry setup.""" mac_address: str | None = entry.unique_id @@ -53,12 +57,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: ble_device=device, ) - eq3_config_entry = Eq3ConfigEntryData(eq3_config=eq3_config, thermostat=thermostat) - hass.data.setdefault(DOMAIN, {})[entry.entry_id] = eq3_config_entry - + entry.runtime_data = Eq3ConfigEntryData( + eq3_config=eq3_config, thermostat=thermostat + ) entry.async_on_unload(entry.add_update_listener(update_listener)) await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) - entry.async_create_background_task( hass, _async_run_thermostat(hass, entry), entry.entry_id ) @@ -66,29 +69,27 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: Eq3ConfigEntry) -> bool: """Handle config entry unload.""" if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): - eq3_config_entry: Eq3ConfigEntryData = hass.data[DOMAIN].pop(entry.entry_id) - await eq3_config_entry.thermostat.async_disconnect() + await entry.runtime_data.thermostat.async_disconnect() return unload_ok -async def update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None: +async def update_listener(hass: HomeAssistant, entry: Eq3ConfigEntry) -> None: """Handle config entry update.""" await hass.config_entries.async_reload(entry.entry_id) -async def _async_run_thermostat(hass: HomeAssistant, entry: ConfigEntry) -> None: +async def _async_run_thermostat(hass: HomeAssistant, entry: Eq3ConfigEntry) -> None: """Run the thermostat.""" - eq3_config_entry: Eq3ConfigEntryData = hass.data[DOMAIN][entry.entry_id] - thermostat = eq3_config_entry.thermostat - mac_address = eq3_config_entry.eq3_config.mac_address - scan_interval = eq3_config_entry.eq3_config.scan_interval + thermostat = entry.runtime_data.thermostat + mac_address = entry.runtime_data.eq3_config.mac_address + scan_interval = entry.runtime_data.eq3_config.scan_interval await _async_reconnect_thermostat(hass, entry) @@ -117,13 +118,14 @@ async def _async_run_thermostat(hass: HomeAssistant, entry: ConfigEntry) -> None await asyncio.sleep(scan_interval) -async def _async_reconnect_thermostat(hass: HomeAssistant, entry: ConfigEntry) -> None: +async def _async_reconnect_thermostat( + hass: HomeAssistant, entry: Eq3ConfigEntry +) -> None: """Reconnect the thermostat.""" - eq3_config_entry: Eq3ConfigEntryData = hass.data[DOMAIN][entry.entry_id] - thermostat = eq3_config_entry.thermostat - mac_address = eq3_config_entry.eq3_config.mac_address - scan_interval = eq3_config_entry.eq3_config.scan_interval + thermostat = entry.runtime_data.thermostat + mac_address = entry.runtime_data.eq3_config.mac_address + scan_interval = entry.runtime_data.eq3_config.scan_interval while True: try: diff --git a/homeassistant/components/eq3btsmart/binary_sensor.py b/homeassistant/components/eq3btsmart/binary_sensor.py new file mode 100644 index 00000000000000..27525d47972da1 --- /dev/null +++ b/homeassistant/components/eq3btsmart/binary_sensor.py @@ -0,0 +1,86 @@ +"""Platform for eq3 binary sensor entities.""" + +from collections.abc import Callable +from dataclasses import dataclass +from typing import TYPE_CHECKING + +from eq3btsmart.models import Status + +from homeassistant.components.binary_sensor import ( + BinarySensorDeviceClass, + BinarySensorEntity, + BinarySensorEntityDescription, +) +from homeassistant.const import EntityCategory +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import Eq3ConfigEntry +from .const import ENTITY_KEY_BATTERY, ENTITY_KEY_DST, ENTITY_KEY_WINDOW +from .entity import Eq3Entity + + +@dataclass(frozen=True, kw_only=True) +class Eq3BinarySensorEntityDescription(BinarySensorEntityDescription): + """Entity description for eq3 binary sensors.""" + + value_func: Callable[[Status], bool] + + +BINARY_SENSOR_ENTITY_DESCRIPTIONS = [ + Eq3BinarySensorEntityDescription( + value_func=lambda status: status.is_low_battery, + key=ENTITY_KEY_BATTERY, + device_class=BinarySensorDeviceClass.BATTERY, + entity_category=EntityCategory.DIAGNOSTIC, + ), + Eq3BinarySensorEntityDescription( + value_func=lambda status: status.is_window_open, + key=ENTITY_KEY_WINDOW, + device_class=BinarySensorDeviceClass.WINDOW, + ), + Eq3BinarySensorEntityDescription( + value_func=lambda status: status.is_dst, + key=ENTITY_KEY_DST, + translation_key=ENTITY_KEY_DST, + entity_category=EntityCategory.DIAGNOSTIC, + ), +] + + +async def async_setup_entry( + hass: HomeAssistant, + entry: Eq3ConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the entry.""" + + async_add_entities( + Eq3BinarySensorEntity(entry, entity_description) + for entity_description in BINARY_SENSOR_ENTITY_DESCRIPTIONS + ) + + +class Eq3BinarySensorEntity(Eq3Entity, BinarySensorEntity): + """Base class for eQ-3 binary sensor entities.""" + + entity_description: Eq3BinarySensorEntityDescription + + def __init__( + self, + entry: Eq3ConfigEntry, + entity_description: Eq3BinarySensorEntityDescription, + ) -> None: + """Initialize the entity.""" + + super().__init__(entry, entity_description.key) + self.entity_description = entity_description + + @property + def is_on(self) -> bool: + """Return the state of the binary sensor.""" + + if TYPE_CHECKING: + assert self._thermostat.status is not None + + return self.entity_description.value_func(self._thermostat.status) diff --git a/homeassistant/components/eq3btsmart/climate.py b/homeassistant/components/eq3btsmart/climate.py index 7b8ccb6c990116..ae01d0fc9a7fc3 100644 --- a/homeassistant/components/eq3btsmart/climate.py +++ b/homeassistant/components/eq3btsmart/climate.py @@ -3,7 +3,6 @@ import logging from typing import Any -from eq3btsmart import Thermostat from eq3btsmart.const import EQ3BT_MAX_TEMP, EQ3BT_OFF_TEMP, Eq3Preset, OperationMode from eq3btsmart.exceptions import Eq3Exception @@ -15,45 +14,35 @@ HVACAction, HVACMode, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ATTR_TEMPERATURE, PRECISION_HALVES, UnitOfTemperature from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import ServiceValidationError from homeassistant.helpers import device_registry as dr -from homeassistant.helpers.device_registry import CONNECTION_BLUETOOTH, DeviceInfo -from homeassistant.helpers.dispatcher import async_dispatcher_connect +from homeassistant.helpers.device_registry import CONNECTION_BLUETOOTH from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.util import slugify +from . import Eq3ConfigEntry from .const import ( - DEVICE_MODEL, - DOMAIN, EQ_TO_HA_HVAC, HA_TO_EQ_HVAC, - MANUFACTURER, - SIGNAL_THERMOSTAT_CONNECTED, - SIGNAL_THERMOSTAT_DISCONNECTED, CurrentTemperatureSelector, Preset, TargetTemperatureSelector, ) from .entity import Eq3Entity -from .models import Eq3Config, Eq3ConfigEntryData _LOGGER = logging.getLogger(__name__) async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + entry: Eq3ConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Handle config entry setup.""" - eq3_config_entry: Eq3ConfigEntryData = hass.data[DOMAIN][config_entry.entry_id] - async_add_entities( - [Eq3Climate(eq3_config_entry.eq3_config, eq3_config_entry.thermostat)], + [Eq3Climate(entry)], ) @@ -80,53 +69,6 @@ class Eq3Climate(Eq3Entity, ClimateEntity): _attr_preset_mode: str | None = None _target_temperature: float | None = None - def __init__(self, eq3_config: Eq3Config, thermostat: Thermostat) -> None: - """Initialize the climate entity.""" - - super().__init__(eq3_config, thermostat) - self._attr_unique_id = dr.format_mac(eq3_config.mac_address) - self._attr_device_info = DeviceInfo( - name=slugify(self._eq3_config.mac_address), - manufacturer=MANUFACTURER, - model=DEVICE_MODEL, - connections={(CONNECTION_BLUETOOTH, self._eq3_config.mac_address)}, - ) - - async def async_added_to_hass(self) -> None: - """Run when entity about to be added to hass.""" - - self._thermostat.register_update_callback(self._async_on_updated) - - self.async_on_remove( - async_dispatcher_connect( - self.hass, - f"{SIGNAL_THERMOSTAT_DISCONNECTED}_{self._eq3_config.mac_address}", - self._async_on_disconnected, - ) - ) - self.async_on_remove( - async_dispatcher_connect( - self.hass, - f"{SIGNAL_THERMOSTAT_CONNECTED}_{self._eq3_config.mac_address}", - self._async_on_connected, - ) - ) - - async def async_will_remove_from_hass(self) -> None: - """Run when entity will be removed from hass.""" - - self._thermostat.unregister_update_callback(self._async_on_updated) - - @callback - def _async_on_disconnected(self) -> None: - self._attr_available = False - self.async_write_ha_state() - - @callback - def _async_on_connected(self) -> None: - self._attr_available = True - self.async_write_ha_state() - @callback def _async_on_updated(self) -> None: """Handle updated data from the thermostat.""" @@ -137,12 +79,15 @@ def _async_on_updated(self) -> None: if self._thermostat.device_data is not None: self._async_on_device_updated() - self.async_write_ha_state() + super()._async_on_updated() @callback def _async_on_status_updated(self) -> None: """Handle updated status from the thermostat.""" + if self._thermostat.status is None: + return + self._target_temperature = self._thermostat.status.target_temperature.value self._attr_hvac_mode = EQ_TO_HA_HVAC[self._thermostat.status.operation_mode] self._attr_current_temperature = self._get_current_temperature() @@ -154,13 +99,16 @@ def _async_on_status_updated(self) -> None: def _async_on_device_updated(self) -> None: """Handle updated device data from the thermostat.""" + if self._thermostat.device_data is None: + return + device_registry = dr.async_get(self.hass) if device := device_registry.async_get_device( connections={(CONNECTION_BLUETOOTH, self._eq3_config.mac_address)}, ): device_registry.async_update_device( device.id, - sw_version=self._thermostat.device_data.firmware_version, + sw_version=str(self._thermostat.device_data.firmware_version), serial_number=self._thermostat.device_data.device_serial.value, ) @@ -265,7 +213,7 @@ async def async_set_temperature(self, **kwargs: Any) -> None: self.async_write_ha_state() try: - await self._thermostat.async_set_temperature(self._target_temperature) + await self._thermostat.async_set_temperature(temperature) except Eq3Exception: _LOGGER.error( "[%s] Failed setting temperature", self._eq3_config.mac_address diff --git a/homeassistant/components/eq3btsmart/const.py b/homeassistant/components/eq3btsmart/const.py index 111c4d0eba47bc..33d8e6b3ceedcd 100644 --- a/homeassistant/components/eq3btsmart/const.py +++ b/homeassistant/components/eq3btsmart/const.py @@ -18,8 +18,11 @@ MANUFACTURER = "eQ-3 AG" DEVICE_MODEL = "CC-RT-BLE-EQ" -GET_DEVICE_TIMEOUT = 5 # seconds +ENTITY_KEY_DST = "dst" +ENTITY_KEY_BATTERY = "battery" +ENTITY_KEY_WINDOW = "window" +GET_DEVICE_TIMEOUT = 5 # seconds EQ_TO_HA_HVAC: dict[OperationMode, HVACMode] = { OperationMode.OFF: HVACMode.OFF, diff --git a/homeassistant/components/eq3btsmart/entity.py b/homeassistant/components/eq3btsmart/entity.py index e8c00d4e3cf36d..e68545c08c7e59 100644 --- a/homeassistant/components/eq3btsmart/entity.py +++ b/homeassistant/components/eq3btsmart/entity.py @@ -1,10 +1,22 @@ """Base class for all eQ-3 entities.""" -from eq3btsmart.thermostat import Thermostat - +from homeassistant.core import callback +from homeassistant.helpers.device_registry import ( + CONNECTION_BLUETOOTH, + DeviceInfo, + format_mac, +) +from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity import Entity +from homeassistant.util import slugify -from .models import Eq3Config +from . import Eq3ConfigEntry +from .const import ( + DEVICE_MODEL, + MANUFACTURER, + SIGNAL_THERMOSTAT_CONNECTED, + SIGNAL_THERMOSTAT_DISCONNECTED, +) class Eq3Entity(Entity): @@ -12,8 +24,70 @@ class Eq3Entity(Entity): _attr_has_entity_name = True - def __init__(self, eq3_config: Eq3Config, thermostat: Thermostat) -> None: + def __init__( + self, + entry: Eq3ConfigEntry, + unique_id_key: str | None = None, + ) -> None: """Initialize the eq3 entity.""" - self._eq3_config = eq3_config - self._thermostat = thermostat + self._eq3_config = entry.runtime_data.eq3_config + self._thermostat = entry.runtime_data.thermostat + self._attr_device_info = DeviceInfo( + name=slugify(self._eq3_config.mac_address), + manufacturer=MANUFACTURER, + model=DEVICE_MODEL, + connections={(CONNECTION_BLUETOOTH, self._eq3_config.mac_address)}, + ) + suffix = f"_{unique_id_key}" if unique_id_key else "" + self._attr_unique_id = f"{format_mac(self._eq3_config.mac_address)}{suffix}" + + async def async_added_to_hass(self) -> None: + """Run when entity about to be added to hass.""" + + self._thermostat.register_update_callback(self._async_on_updated) + + self.async_on_remove( + async_dispatcher_connect( + self.hass, + f"{SIGNAL_THERMOSTAT_DISCONNECTED}_{self._eq3_config.mac_address}", + self._async_on_disconnected, + ) + ) + self.async_on_remove( + async_dispatcher_connect( + self.hass, + f"{SIGNAL_THERMOSTAT_CONNECTED}_{self._eq3_config.mac_address}", + self._async_on_connected, + ) + ) + + async def async_will_remove_from_hass(self) -> None: + """Run when entity will be removed from hass.""" + + self._thermostat.unregister_update_callback(self._async_on_updated) + + def _async_on_updated(self) -> None: + """Handle updated data from the thermostat.""" + + self.async_write_ha_state() + + @callback + def _async_on_disconnected(self) -> None: + """Handle disconnection from the thermostat.""" + + self._attr_available = False + self.async_write_ha_state() + + @callback + def _async_on_connected(self) -> None: + """Handle connection to the thermostat.""" + + self._attr_available = True + self.async_write_ha_state() + + @property + def available(self) -> bool: + """Whether the entity is available.""" + + return self._thermostat.status is not None and self._attr_available diff --git a/homeassistant/components/eq3btsmart/manifest.json b/homeassistant/components/eq3btsmart/manifest.json index 8c56e5ec598342..bd3f14939ca911 100644 --- a/homeassistant/components/eq3btsmart/manifest.json +++ b/homeassistant/components/eq3btsmart/manifest.json @@ -23,5 +23,5 @@ "iot_class": "local_polling", "loggers": ["eq3btsmart"], "quality_scale": "silver", - "requirements": ["eq3btsmart==1.1.9", "bleak-esphome==1.1.0"] + "requirements": ["eq3btsmart==1.2.1", "bleak-esphome==1.1.0"] } diff --git a/homeassistant/components/eq3btsmart/strings.json b/homeassistant/components/eq3btsmart/strings.json index 7477aab4cfb789..c911be099d5e5f 100644 --- a/homeassistant/components/eq3btsmart/strings.json +++ b/homeassistant/components/eq3btsmart/strings.json @@ -14,6 +14,16 @@ "init": { "title": "Configure new eQ-3 device" } + }, + "error": { + "invalid_mac_address": "Invalid MAC address" + } + }, + "entity": { + "binary_sensor": { + "dst": { + "name": "Daylight saving time" + } } } } diff --git a/homeassistant/components/esphome/alarm_control_panel.py b/homeassistant/components/esphome/alarm_control_panel.py index 64a0210f0f78d8..8f1b5ae8b1a374 100644 --- a/homeassistant/components/esphome/alarm_control_panel.py +++ b/homeassistant/components/esphome/alarm_control_panel.py @@ -6,9 +6,9 @@ from aioesphomeapi import ( AlarmControlPanelCommand, - AlarmControlPanelEntityState, + AlarmControlPanelEntityState as ESPHomeAlarmControlPanelEntityState, AlarmControlPanelInfo, - AlarmControlPanelState, + AlarmControlPanelState as ESPHomeAlarmControlPanelState, APIIntEnum, EntityInfo, ) @@ -16,20 +16,9 @@ from homeassistant.components.alarm_control_panel import ( AlarmControlPanelEntity, AlarmControlPanelEntityFeature, + AlarmControlPanelState, CodeFormat, ) -from homeassistant.const import ( - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_CUSTOM_BYPASS, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_ARMED_VACATION, - STATE_ALARM_ARMING, - STATE_ALARM_DISARMED, - STATE_ALARM_DISARMING, - STATE_ALARM_PENDING, - STATE_ALARM_TRIGGERED, -) from homeassistant.core import callback from .entity import ( @@ -40,21 +29,21 @@ ) from .enum_mapper import EsphomeEnumMapper -_ESPHOME_ACP_STATE_TO_HASS_STATE: EsphomeEnumMapper[AlarmControlPanelState, str] = ( - EsphomeEnumMapper( - { - AlarmControlPanelState.DISARMED: STATE_ALARM_DISARMED, - AlarmControlPanelState.ARMED_HOME: STATE_ALARM_ARMED_HOME, - AlarmControlPanelState.ARMED_AWAY: STATE_ALARM_ARMED_AWAY, - AlarmControlPanelState.ARMED_NIGHT: STATE_ALARM_ARMED_NIGHT, - AlarmControlPanelState.ARMED_VACATION: STATE_ALARM_ARMED_VACATION, - AlarmControlPanelState.ARMED_CUSTOM_BYPASS: STATE_ALARM_ARMED_CUSTOM_BYPASS, - AlarmControlPanelState.PENDING: STATE_ALARM_PENDING, - AlarmControlPanelState.ARMING: STATE_ALARM_ARMING, - AlarmControlPanelState.DISARMING: STATE_ALARM_DISARMING, - AlarmControlPanelState.TRIGGERED: STATE_ALARM_TRIGGERED, - } - ) +_ESPHOME_ACP_STATE_TO_HASS_STATE: EsphomeEnumMapper[ + ESPHomeAlarmControlPanelState, AlarmControlPanelState +] = EsphomeEnumMapper( + { + ESPHomeAlarmControlPanelState.DISARMED: AlarmControlPanelState.DISARMED, + ESPHomeAlarmControlPanelState.ARMED_HOME: AlarmControlPanelState.ARMED_HOME, + ESPHomeAlarmControlPanelState.ARMED_AWAY: AlarmControlPanelState.ARMED_AWAY, + ESPHomeAlarmControlPanelState.ARMED_NIGHT: AlarmControlPanelState.ARMED_NIGHT, + ESPHomeAlarmControlPanelState.ARMED_VACATION: AlarmControlPanelState.ARMED_VACATION, + ESPHomeAlarmControlPanelState.ARMED_CUSTOM_BYPASS: AlarmControlPanelState.ARMED_CUSTOM_BYPASS, + ESPHomeAlarmControlPanelState.PENDING: AlarmControlPanelState.PENDING, + ESPHomeAlarmControlPanelState.ARMING: AlarmControlPanelState.ARMING, + ESPHomeAlarmControlPanelState.DISARMING: AlarmControlPanelState.DISARMING, + ESPHomeAlarmControlPanelState.TRIGGERED: AlarmControlPanelState.TRIGGERED, + } ) @@ -70,7 +59,7 @@ class EspHomeACPFeatures(APIIntEnum): class EsphomeAlarmControlPanel( - EsphomeEntity[AlarmControlPanelInfo, AlarmControlPanelEntityState], + EsphomeEntity[AlarmControlPanelInfo, ESPHomeAlarmControlPanelEntityState], AlarmControlPanelEntity, ): """An Alarm Control Panel implementation for ESPHome.""" @@ -101,7 +90,7 @@ def _on_static_info_update(self, static_info: EntityInfo) -> None: @property @esphome_state_property - def state(self) -> str | None: + def alarm_state(self) -> AlarmControlPanelState | None: """Return the state of the device.""" return _ESPHOME_ACP_STATE_TO_HASS_STATE.from_esphome(self._state.state) @@ -159,5 +148,5 @@ async def async_alarm_trigger(self, code: str | None = None) -> None: platform_async_setup_entry, info_type=AlarmControlPanelInfo, entity_type=EsphomeAlarmControlPanel, - state_type=AlarmControlPanelEntityState, + state_type=ESPHomeAlarmControlPanelEntityState, ) diff --git a/homeassistant/components/esphome/assist_satellite.py b/homeassistant/components/esphome/assist_satellite.py index b2794fe043fe39..dc513a03e02333 100644 --- a/homeassistant/components/esphome/assist_satellite.py +++ b/homeassistant/components/esphome/assist_satellite.py @@ -36,7 +36,7 @@ ) from homeassistant.components.media_player import async_process_play_media_url from homeassistant.config_entries import ConfigEntry -from homeassistant.const import EntityCategory, Platform +from homeassistant.const import Platform from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import entity_registry as er from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -108,9 +108,7 @@ class EsphomeAssistSatellite( """Satellite running ESPHome.""" entity_description = assist_satellite.AssistSatelliteEntityDescription( - key="assist_satellite", - translation_key="assist_satellite", - entity_category=EntityCategory.CONFIG, + key="assist_satellite", translation_key="assist_satellite" ) def __init__( @@ -247,15 +245,15 @@ async def async_added_to_hass(self) -> None: assist_satellite.AssistSatelliteEntityFeature.ANNOUNCE ) + # Block until config is retrieved. + # If the device supports announcements, it will return a config. + _LOGGER.debug("Waiting for satellite configuration") + await self._update_satellite_config() + if not (feature_flags & VoiceAssistantFeature.SPEAKER): # Will use media player for TTS/announcements self._update_tts_format() - # Fetch latest config in the background - self.config_entry.async_create_background_task( - self.hass, self._update_satellite_config(), "esphome_voice_assistant_config" - ) - async def async_will_remove_from_hass(self) -> None: """Run when entity will be removed from hass.""" await super().async_will_remove_from_hass() diff --git a/homeassistant/components/esphome/config_flow.py b/homeassistant/components/esphome/config_flow.py index 937cad040eada7..cb892b314cd06c 100644 --- a/homeassistant/components/esphome/config_flow.py +++ b/homeassistant/components/esphome/config_flow.py @@ -21,7 +21,6 @@ import voluptuous as vol from homeassistant.components import dhcp, zeroconf -from homeassistant.components.hassio import HassioServiceInfo from homeassistant.config_entries import ( SOURCE_REAUTH, ConfigEntry, @@ -32,6 +31,7 @@ from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_PORT from homeassistant.core import callback from homeassistant.helpers.device_registry import format_mac +from homeassistant.helpers.service_info.hassio import HassioServiceInfo from homeassistant.helpers.service_info.mqtt import MqttServiceInfo from homeassistant.util.json import json_loads_object @@ -257,6 +257,9 @@ async def async_step_mqtt( self, discovery_info: MqttServiceInfo ) -> ConfigFlowResult: """Handle MQTT discovery.""" + if not discovery_info.payload: + return self.async_abort(reason="mqtt_missing_payload") + device_info = json_loads_object(discovery_info.payload) if "mac" not in device_info: return self.async_abort(reason="mqtt_missing_mac") @@ -482,16 +485,12 @@ def async_get_options_flow( config_entry: ConfigEntry, ) -> OptionsFlowHandler: """Get the options flow for this handler.""" - return OptionsFlowHandler(config_entry) + return OptionsFlowHandler() class OptionsFlowHandler(OptionsFlow): """Handle a option flow for esphome.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/esphome/coordinator.py b/homeassistant/components/esphome/coordinator.py index 284e17fd1831a9..b31a74dcf3f013 100644 --- a/homeassistant/components/esphome/coordinator.py +++ b/homeassistant/components/esphome/coordinator.py @@ -31,6 +31,7 @@ def __init__( super().__init__( hass, _LOGGER, + config_entry=None, name="ESPHome Dashboard", update_interval=timedelta(minutes=5), always_update=False, diff --git a/homeassistant/components/esphome/ffmpeg_proxy.py b/homeassistant/components/esphome/ffmpeg_proxy.py index c2bf72c40e5cb0..cefe87f49ba552 100644 --- a/homeassistant/components/esphome/ffmpeg_proxy.py +++ b/homeassistant/components/esphome/ffmpeg_proxy.py @@ -1,10 +1,12 @@ """HTTP view that converts audio from a URL to a preferred format.""" import asyncio +from collections import defaultdict from dataclasses import dataclass, field from http import HTTPStatus import logging import secrets +from typing import Final from aiohttp import web from aiohttp.abc import AbstractStreamWriter, BaseRequest @@ -17,6 +19,8 @@ _LOGGER = logging.getLogger(__name__) +_MAX_CONVERSIONS_PER_DEVICE: Final[int] = 2 + def async_create_proxy_url( hass: HomeAssistant, @@ -59,13 +63,18 @@ class FFmpegConversionInfo: proc: asyncio.subprocess.Process | None = None """Subprocess doing ffmpeg conversion.""" + is_finished: bool = False + """True if conversion has finished.""" + @dataclass class FFmpegProxyData: """Data for ffmpeg proxy conversion.""" - # device_id -> info - conversions: dict[str, FFmpegConversionInfo] = field(default_factory=dict) + # device_id -> [info] + conversions: dict[str, list[FFmpegConversionInfo]] = field( + default_factory=lambda: defaultdict(list) + ) def async_create_proxy_url( self, @@ -77,8 +86,15 @@ def async_create_proxy_url( width: int | None, ) -> str: """Create a one-time use proxy URL that automatically converts the media.""" - if (convert_info := self.conversions.pop(device_id, None)) is not None: - # Stop existing conversion before overwriting info + + # Remove completed conversions + device_conversions = [ + info for info in self.conversions[device_id] if not info.is_finished + ] + + while len(device_conversions) >= _MAX_CONVERSIONS_PER_DEVICE: + # Stop oldest conversion before adding a new one + convert_info = device_conversions[0] if (convert_info.proc is not None) and ( convert_info.proc.returncode is None ): @@ -87,12 +103,18 @@ def async_create_proxy_url( ) convert_info.proc.kill() + device_conversions = device_conversions[1:] + convert_id = secrets.token_urlsafe(16) - self.conversions[device_id] = FFmpegConversionInfo( - convert_id, media_url, media_format, rate, channels, width + device_conversions.append( + FFmpegConversionInfo( + convert_id, media_url, media_format, rate, channels, width + ) ) _LOGGER.debug("Media URL allowed by proxy: %s", media_url) + self.conversions[device_id] = device_conversions + return f"/api/esphome/ffmpeg_proxy/{device_id}/{convert_id}.{media_format}" @@ -131,11 +153,10 @@ def __init__( self.proxy_data = proxy_data self.chunk_size = chunk_size - async def prepare(self, request: BaseRequest) -> AbstractStreamWriter | None: + async def transcode( + self, request: BaseRequest, writer: AbstractStreamWriter + ) -> None: """Stream url through ffmpeg conversion and out to HTTP client.""" - writer = await super().prepare(request) - assert writer is not None - command_args = [ "-i", self.convert_info.media_url, @@ -155,6 +176,9 @@ async def prepare(self, request: BaseRequest) -> AbstractStreamWriter | None: # 16-bit samples command_args.extend(["-sample_fmt", "s16"]) + # Remove metadata and cover art + command_args.extend(["-map_metadata", "-1", "-vn"]) + # Output to stdout command_args.append("pipe:") @@ -164,11 +188,24 @@ async def prepare(self, request: BaseRequest) -> AbstractStreamWriter | None: *command_args, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE, + close_fds=False, # use posix_spawn in CPython < 3.13 ) # Only one conversion process per device is allowed self.convert_info.proc = proc + # Create background task which will be cancelled when home assistant shuts down + write_task = self.hass.async_create_background_task( + self._write_ffmpeg_data(request, writer, proc), "ESPHome media proxy" + ) + await write_task + + async def _write_ffmpeg_data( + self, + request: BaseRequest, + writer: AbstractStreamWriter, + proc: asyncio.subprocess.Process, + ) -> None: assert proc.stdout is not None assert proc.stderr is not None @@ -178,12 +215,15 @@ async def prepare(self, request: BaseRequest) -> AbstractStreamWriter | None: self.hass.is_running and (request.transport is not None) and (not request.transport.is_closing()) - and (proc.returncode is None) and (chunk := await proc.stdout.read(self.chunk_size)) ): - await writer.write(chunk) - await writer.drain() + await self.write(chunk) except asyncio.CancelledError: + _LOGGER.debug("ffmpeg transcoding cancelled") + # Abort the transport, we don't wait for ESPHome to drain the write buffer; + # it may need a very long time or never finish if the player is paused. + if request.transport: + request.transport.abort() raise # don't log error except: _LOGGER.exception("Unexpected error during ffmpeg conversion") @@ -196,14 +236,16 @@ async def prepare(self, request: BaseRequest) -> AbstractStreamWriter | None: raise finally: + # Allow conversion info to be removed + self.convert_info.is_finished = True + # Terminate hangs, so kill is used if proc.returncode is None: proc.kill() - # Close connection - await writer.write_eof() - - return writer + # Close connection by writing EOF unless already closing + if request.transport and not request.transport.is_closing(): + await writer.write_eof() class FFmpegProxyView(HomeAssistantView): @@ -222,7 +264,8 @@ async def get( self, request: web.Request, device_id: str, filename: str ) -> web.StreamResponse: """Start a get request.""" - if (convert_info := self.proxy_data.conversions.get(device_id)) is None: + device_conversions = self.proxy_data.conversions[device_id] + if not device_conversions: return web.Response( body="No proxy URL for device", status=HTTPStatus.NOT_FOUND ) @@ -230,9 +273,16 @@ async def get( # {id}.mp3 -> id, mp3 convert_id, media_format = filename.rsplit(".") - if (convert_info.convert_id != convert_id) or ( - convert_info.media_format != media_format - ): + # Look up conversion info + convert_info: FFmpegConversionInfo | None = None + for maybe_convert_info in device_conversions: + if (maybe_convert_info.convert_id == convert_id) and ( + maybe_convert_info.media_format == media_format + ): + convert_info = maybe_convert_info + break + + if convert_info is None: return web.Response(body="Invalid proxy URL", status=HTTPStatus.BAD_REQUEST) # Stop previous process if the URL is being reused. @@ -243,6 +293,10 @@ async def get( convert_info.proc = None # Stream converted audio back to client - return FFmpegConvertResponse( + resp = FFmpegConvertResponse( self.manager, convert_info, device_id, self.proxy_data ) + writer = await resp.prepare(request) + assert writer is not None + await resp.transcode(request, writer) + return resp diff --git a/homeassistant/components/esphome/manager.py b/homeassistant/components/esphome/manager.py index c36a55d1f55002..007b4e791e17ec 100644 --- a/homeassistant/components/esphome/manager.py +++ b/homeassistant/components/esphome/manager.py @@ -570,7 +570,11 @@ def _async_setup_device_registry( configuration_url = None if device_info.webserver_port > 0: configuration_url = f"http://{entry.data['host']}:{device_info.webserver_port}" - elif dashboard := async_get_dashboard(hass): + elif ( + (dashboard := async_get_dashboard(hass)) + and dashboard.data + and dashboard.data.get(device_info.name) + ): configuration_url = f"homeassistant://hassio/ingress/{dashboard.addon_slug}" manufacturer = "espressif" diff --git a/homeassistant/components/esphome/manifest.json b/homeassistant/components/esphome/manifest.json index 410c826c5a0c6c..b9b6a98dcd13d5 100644 --- a/homeassistant/components/esphome/manifest.json +++ b/homeassistant/components/esphome/manifest.json @@ -17,7 +17,7 @@ "mqtt": ["esphome/discover/#"], "quality_scale": "platinum", "requirements": [ - "aioesphomeapi==27.0.0", + "aioesphomeapi==27.0.1", "esphome-dashboard-api==1.2.3", "bleak-esphome==1.1.0" ], diff --git a/homeassistant/components/esphome/strings.json b/homeassistant/components/esphome/strings.json index ec7e6f674b3ea8..18a54772e30380 100644 --- a/homeassistant/components/esphome/strings.json +++ b/homeassistant/components/esphome/strings.json @@ -8,7 +8,8 @@ "service_received": "Action received", "mqtt_missing_mac": "Missing MAC address in MQTT properties.", "mqtt_missing_api": "Missing API port in MQTT properties.", - "mqtt_missing_ip": "Missing IP address in MQTT properties." + "mqtt_missing_ip": "Missing IP address in MQTT properties.", + "mqtt_missing_payload": "Missing MQTT Payload." }, "error": { "resolve_error": "Can't resolve address of the ESP. If this error persists, please set a static IP address", diff --git a/homeassistant/components/esphome/update.py b/homeassistant/components/esphome/update.py index b7905fb4fdbfe7..5e571399ecb037 100644 --- a/homeassistant/components/esphome/update.py +++ b/homeassistant/components/esphome/update.py @@ -230,10 +230,8 @@ def installed_version(self) -> str | None: @property @esphome_state_property - def in_progress(self) -> bool | int | None: + def in_progress(self) -> bool: """Return if the update is in progress.""" - if self._state.has_progress: - return int(self._state.progress) return self._state.in_progress @property @@ -260,6 +258,14 @@ def title(self) -> str | None: """Return the title of the update.""" return self._state.title + @property + @esphome_state_property + def update_percentage(self) -> int | None: + """Return if the update is in progress.""" + if self._state.has_progress: + return int(self._state.progress) + return None + @convert_api_error_ha_error async def async_update(self) -> None: """Command device to check for update.""" diff --git a/homeassistant/components/evohome/__init__.py b/homeassistant/components/evohome/__init__.py index 58e0e16e059d39..612131919d4af3 100644 --- a/homeassistant/components/evohome/__init__.py +++ b/homeassistant/components/evohome/__init__.py @@ -176,7 +176,7 @@ async def _load_auth_tokens(self, username: str) -> None: ): app_storage[ACCESS_TOKEN_EXPIRES] = dt_aware_to_naive(expires) - user_data: dict[str, str] = app_storage.pop(USER_DATA, {}) + user_data: dict[str, str] = app_storage.pop(USER_DATA, {}) or {} self.session_id = user_data.get(SZ_SESSION_ID) self._tokens = app_storage @@ -223,7 +223,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: config[DOMAIN][CONF_PASSWORD], ) - except evo.AuthenticationFailed as err: + except (evo.AuthenticationFailed, evo.RequestFailed) as err: handle_evo_exception(err) return False @@ -240,6 +240,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: coordinator = DataUpdateCoordinator( hass, _LOGGER, + config_entry=None, name=f"{DOMAIN}_coordinator", update_interval=config[DOMAIN][CONF_SCAN_INTERVAL], update_method=broker.async_update, diff --git a/homeassistant/components/evohome/climate.py b/homeassistant/components/evohome/climate.py index 5aa99bca60e35b..1388585bc17332 100644 --- a/homeassistant/components/evohome/climate.py +++ b/homeassistant/components/evohome/climate.py @@ -66,8 +66,6 @@ PRESET_RESET = "Reset" # reset all child zones to EVO_FOLLOW PRESET_CUSTOM = "Custom" -HA_HVAC_TO_TCS = {HVACMode.OFF: EVO_HEATOFF, HVACMode.HEAT: EVO_AUTO} - TCS_PRESET_TO_HA = { EVO_AWAY: PRESET_AWAY, EVO_CUSTOM: PRESET_CUSTOM, @@ -150,14 +148,10 @@ async def async_setup_platform( class EvoClimateEntity(EvoDevice, ClimateEntity): """Base for any evohome-compatible climate entity (controller, zone).""" + _attr_hvac_modes = [HVACMode.OFF, HVACMode.HEAT] _attr_temperature_unit = UnitOfTemperature.CELSIUS _enable_turn_on_off_backwards_compatibility = False - @property - def hvac_modes(self) -> list[HVACMode]: - """Return a list of available hvac operation modes.""" - return list(HA_HVAC_TO_TCS) - class EvoZone(EvoChild, EvoClimateEntity): """Base for any evohome-compatible heating zone.""" @@ -365,9 +359,9 @@ def __init__(self, evo_broker: EvoBroker, evo_device: evo.ControlSystem) -> None self._attr_unique_id = evo_device.systemId self._attr_name = evo_device.location.name - modes = [m[SZ_SYSTEM_MODE] for m in evo_broker.tcs.allowedSystemModes] + self._evo_modes = [m[SZ_SYSTEM_MODE] for m in evo_device.allowedSystemModes] self._attr_preset_modes = [ - TCS_PRESET_TO_HA[m] for m in modes if m in list(TCS_PRESET_TO_HA) + TCS_PRESET_TO_HA[m] for m in self._evo_modes if m in list(TCS_PRESET_TO_HA) ] if self._attr_preset_modes: self._attr_supported_features = ClimateEntityFeature.PRESET_MODE @@ -401,14 +395,14 @@ async def _set_tcs_mode(self, mode: str, until: datetime | None = None) -> None: """Set a Controller to any of its native EVO_* operating modes.""" until = dt_util.as_utc(until) if until else None await self._evo_broker.call_client_api( - self._evo_tcs.set_mode(mode, until=until) # type: ignore[arg-type] + self._evo_device.set_mode(mode, until=until) # type: ignore[arg-type] ) @property def hvac_mode(self) -> HVACMode: """Return the current operating mode of a Controller.""" - tcs_mode = self._evo_tcs.system_mode - return HVACMode.OFF if tcs_mode == EVO_HEATOFF else HVACMode.HEAT + evo_mode = self._evo_device.system_mode + return HVACMode.OFF if evo_mode in (EVO_HEATOFF, "Off") else HVACMode.HEAT @property def current_temperature(self) -> float | None: @@ -418,7 +412,7 @@ def current_temperature(self) -> float | None: """ temps = [ z.temperature - for z in self._evo_tcs.zones.values() + for z in self._evo_device.zones.values() if z.temperature is not None ] return round(sum(temps) / len(temps), 1) if temps else None @@ -426,9 +420,9 @@ def current_temperature(self) -> float | None: @property def preset_mode(self) -> str | None: """Return the current preset mode, e.g., home, away, temp.""" - if not self._evo_tcs.system_mode: + if not self._evo_device.system_mode: return None - return TCS_PRESET_TO_HA.get(self._evo_tcs.system_mode) + return TCS_PRESET_TO_HA.get(self._evo_device.system_mode) async def async_set_temperature(self, **kwargs: Any) -> None: """Raise exception as Controllers don't have a target temperature.""" @@ -436,9 +430,13 @@ async def async_set_temperature(self, **kwargs: Any) -> None: async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None: """Set an operating mode for a Controller.""" - if not (tcs_mode := HA_HVAC_TO_TCS.get(hvac_mode)): + if hvac_mode == HVACMode.HEAT: + evo_mode = EVO_AUTO if EVO_AUTO in self._evo_modes else "Heat" + elif hvac_mode == HVACMode.OFF: + evo_mode = EVO_HEATOFF if EVO_HEATOFF in self._evo_modes else "Off" + else: raise HomeAssistantError(f"Invalid hvac_mode: {hvac_mode}") - await self._set_tcs_mode(tcs_mode) + await self._set_tcs_mode(evo_mode) async def async_set_preset_mode(self, preset_mode: str) -> None: """Set the preset mode; if None, then revert to 'Auto' mode.""" @@ -451,6 +449,6 @@ async def async_update(self) -> None: attrs = self._device_state_attrs for attr in STATE_ATTRS_TCS: if attr == SZ_ACTIVE_FAULTS: - attrs["activeSystemFaults"] = getattr(self._evo_tcs, attr) + attrs["activeSystemFaults"] = getattr(self._evo_device, attr) else: - attrs[attr] = getattr(self._evo_tcs, attr) + attrs[attr] = getattr(self._evo_device, attr) diff --git a/homeassistant/components/evohome/entity.py b/homeassistant/components/evohome/entity.py index 5da9df247cdfdf..b5842c1073a277 100644 --- a/homeassistant/components/evohome/entity.py +++ b/homeassistant/components/evohome/entity.py @@ -42,7 +42,6 @@ def __init__( """Initialize an evohome-compatible entity (TCS, DHW, zone).""" self._evo_device = evo_device self._evo_broker = evo_broker - self._evo_tcs = evo_broker.tcs self._device_state_attrs: dict[str, Any] = {} @@ -101,6 +100,8 @@ def __init__( """Initialize an evohome-compatible child entity (DHW, zone).""" super().__init__(evo_broker, evo_device) + self._evo_tcs = evo_device.tcs + self._schedule: dict[str, Any] = {} self._setpoints: dict[str, Any] = {} diff --git a/homeassistant/components/ezviz/alarm_control_panel.py b/homeassistant/components/ezviz/alarm_control_panel.py index 21e9f2d0422f25..f30a7852b4e79e 100644 --- a/homeassistant/components/ezviz/alarm_control_panel.py +++ b/homeassistant/components/ezviz/alarm_control_panel.py @@ -13,13 +13,9 @@ AlarmControlPanelEntity, AlarmControlPanelEntityDescription, AlarmControlPanelEntityFeature, + AlarmControlPanelState, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ( - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_DISARMED, -) from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.device_registry import DeviceInfo @@ -45,9 +41,9 @@ class EzvizAlarmControlPanelEntityDescription(AlarmControlPanelEntityDescription key="ezviz_alarm", ezviz_alarm_states=[ None, - STATE_ALARM_DISARMED, - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_HOME, + AlarmControlPanelState.DISARMED, + AlarmControlPanelState.ARMED_AWAY, + AlarmControlPanelState.ARMED_HOME, ], ) @@ -96,7 +92,7 @@ def __init__( self._attr_device_info = device_info self.entity_description = entity_description self.coordinator = coordinator - self._attr_state = None + self._attr_alarm_state = None async def async_added_to_hass(self) -> None: """Entity added to hass.""" @@ -108,7 +104,7 @@ def alarm_disarm(self, code: str | None = None) -> None: if self.coordinator.ezviz_client.api_set_defence_mode( DefenseModeType.HOME_MODE.value ): - self._attr_state = STATE_ALARM_DISARMED + self._attr_alarm_state = AlarmControlPanelState.DISARMED except PyEzvizError as err: raise HomeAssistantError("Cannot disarm EZVIZ alarm") from err @@ -119,7 +115,7 @@ def alarm_arm_away(self, code: str | None = None) -> None: if self.coordinator.ezviz_client.api_set_defence_mode( DefenseModeType.AWAY_MODE.value ): - self._attr_state = STATE_ALARM_ARMED_AWAY + self._attr_alarm_state = AlarmControlPanelState.ARMED_AWAY except PyEzvizError as err: raise HomeAssistantError("Cannot arm EZVIZ alarm") from err @@ -130,7 +126,7 @@ def alarm_arm_home(self, code: str | None = None) -> None: if self.coordinator.ezviz_client.api_set_defence_mode( DefenseModeType.SLEEP_MODE.value ): - self._attr_state = STATE_ALARM_ARMED_HOME + self._attr_alarm_state = AlarmControlPanelState.ARMED_HOME except PyEzvizError as err: raise HomeAssistantError("Cannot arm EZVIZ alarm") from err @@ -145,7 +141,7 @@ def update(self) -> None: _LOGGER.debug( "Updating EZVIZ alarm with response %s", ezviz_alarm_state_number ) - self._attr_state = self.entity_description.ezviz_alarm_states[ + self._attr_alarm_state = self.entity_description.ezviz_alarm_states[ int(ezviz_alarm_state_number) ] diff --git a/homeassistant/components/ezviz/config_flow.py b/homeassistant/components/ezviz/config_flow.py index aa998cc6f60bff..a7551737c10d93 100644 --- a/homeassistant/components/ezviz/config_flow.py +++ b/homeassistant/components/ezviz/config_flow.py @@ -150,7 +150,7 @@ async def _validate_and_create_camera_rtsp(self, data: dict) -> ConfigFlowResult @callback def async_get_options_flow(config_entry: ConfigEntry) -> EzvizOptionsFlowHandler: """Get the options flow for this handler.""" - return EzvizOptionsFlowHandler(config_entry) + return EzvizOptionsFlowHandler() async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -391,10 +391,6 @@ async def async_step_reauth_confirm( class EzvizOptionsFlowHandler(OptionsFlow): """Handle EZVIZ client options.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/ezviz/update.py b/homeassistant/components/ezviz/update.py index 05735d152cf30c..25a506a005228d 100644 --- a/homeassistant/components/ezviz/update.py +++ b/homeassistant/components/ezviz/update.py @@ -73,11 +73,9 @@ def installed_version(self) -> str | None: return self.data["version"] @property - def in_progress(self) -> bool | int | None: + def in_progress(self) -> bool: """Update installation progress.""" - if self.data["upgrade_in_progress"]: - return self.data["upgrade_percent"] - return False + return bool(self.data["upgrade_in_progress"]) @property def latest_version(self) -> str | None: @@ -93,6 +91,13 @@ def release_notes(self) -> str | None: return self.data["latest_firmware_info"].get("desc") return None + @property + def update_percentage(self) -> int | None: + """Update installation progress.""" + if self.data["upgrade_in_progress"]: + return self.data["upgrade_percent"] + return None + async def async_install( self, version: str | None, backup: bool, **kwargs: Any ) -> None: diff --git a/homeassistant/components/fastdotcom/__init__.py b/homeassistant/components/fastdotcom/__init__.py index b9593ec907f549..967e7ef8e35f19 100644 --- a/homeassistant/components/fastdotcom/__init__.py +++ b/homeassistant/components/fastdotcom/__init__.py @@ -4,7 +4,7 @@ import logging -from homeassistant.config_entries import ConfigEntry +from homeassistant.config_entries import ConfigEntry, ConfigEntryState from homeassistant.core import HomeAssistant from homeassistant.helpers.start import async_at_started @@ -26,7 +26,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: async def _async_finish_startup(hass: HomeAssistant) -> None: """Run this only when HA has finished its startup.""" - await coordinator.async_config_entry_first_refresh() + if entry.state == ConfigEntryState.LOADED: + await coordinator.async_refresh() + else: + await coordinator.async_config_entry_first_refresh() # Don't start a speedtest during startup, this will slow down the overall startup dramatically async_at_started(hass, _async_finish_startup) diff --git a/homeassistant/components/feedreader/config_flow.py b/homeassistant/components/feedreader/config_flow.py index 4555b08e4f4e3d..b902d48a1c8ebe 100644 --- a/homeassistant/components/feedreader/config_flow.py +++ b/homeassistant/components/feedreader/config_flow.py @@ -15,7 +15,6 @@ ConfigFlow, ConfigFlowResult, OptionsFlow, - OptionsFlowWithConfigEntry, ) from homeassistant.const import CONF_URL from homeassistant.core import HomeAssistant, callback @@ -42,14 +41,15 @@ class FeedReaderConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow.""" VERSION = 1 - _config_entry: ConfigEntry _max_entries: int | None = None @staticmethod @callback - def async_get_options_flow(config_entry: ConfigEntry) -> OptionsFlow: + def async_get_options_flow( + config_entry: ConfigEntry, + ) -> OptionsFlow: """Get the options flow for this handler.""" - return FeedReaderOptionsFlowHandler(config_entry) + return FeedReaderOptionsFlowHandler() def show_user_form( self, @@ -124,18 +124,12 @@ async def async_step_reconfigure( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle a reconfiguration flow initialized by the user.""" - self._config_entry = self._get_reconfigure_entry() - return await self.async_step_reconfigure_confirm() - - async def async_step_reconfigure_confirm( - self, user_input: dict[str, Any] | None = None - ) -> ConfigFlowResult: - """Handle a reconfiguration flow initialized by the user.""" + reconfigure_entry = self._get_reconfigure_entry() if not user_input: return self.show_user_form( - user_input={**self._config_entry.data}, - description_placeholders={"name": self._config_entry.title}, - step_id="reconfigure_confirm", + user_input={**reconfigure_entry.data}, + description_placeholders={"name": reconfigure_entry.title}, + step_id="reconfigure", ) feed = await async_fetch_feed(self.hass, user_input[CONF_URL]) @@ -145,16 +139,16 @@ async def async_step_reconfigure_confirm( if isinstance(feed.bozo_exception, urllib.error.URLError): return self.show_user_form( user_input=user_input, - description_placeholders={"name": self._config_entry.title}, - step_id="reconfigure_confirm", + description_placeholders={"name": reconfigure_entry.title}, + step_id="reconfigure", errors={"base": "url_error"}, ) - self.hass.config_entries.async_update_entry(self._config_entry, data=user_input) + self.hass.config_entries.async_update_entry(reconfigure_entry, data=user_input) return self.async_abort(reason="reconfigure_successful") -class FeedReaderOptionsFlowHandler(OptionsFlowWithConfigEntry): +class FeedReaderOptionsFlowHandler(OptionsFlow): """Handle an options flow.""" async def async_step_init( @@ -169,7 +163,9 @@ async def async_step_init( { vol.Optional( CONF_MAX_ENTRIES, - default=self.options.get(CONF_MAX_ENTRIES, DEFAULT_MAX_ENTRIES), + default=self.config_entry.options.get( + CONF_MAX_ENTRIES, DEFAULT_MAX_ENTRIES + ), ): cv.positive_int, } ) diff --git a/homeassistant/components/feedreader/strings.json b/homeassistant/components/feedreader/strings.json index da66333fa5bcc6..0f0492eb6c9f4e 100644 --- a/homeassistant/components/feedreader/strings.json +++ b/homeassistant/components/feedreader/strings.json @@ -6,7 +6,7 @@ "url": "[%key:common::config_flow::data::url%]" } }, - "reconfigure_confirm": { + "reconfigure": { "description": "Update your configuration information for {name}.", "data": { "url": "[%key:common::config_flow::data::url%]" diff --git a/homeassistant/components/ffmpeg/manifest.json b/homeassistant/components/ffmpeg/manifest.json index ab9f3ed65c1d61..085db6791b33e8 100644 --- a/homeassistant/components/ffmpeg/manifest.json +++ b/homeassistant/components/ffmpeg/manifest.json @@ -4,5 +4,5 @@ "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/ffmpeg", "integration_type": "system", - "requirements": ["ha-ffmpeg==3.2.0"] + "requirements": ["ha-ffmpeg==3.2.2"] } diff --git a/homeassistant/components/fibaro/config_flow.py b/homeassistant/components/fibaro/config_flow.py index 9003704348d406..0ffd9aaa48f0b9 100644 --- a/homeassistant/components/fibaro/config_flow.py +++ b/homeassistant/components/fibaro/config_flow.py @@ -9,8 +9,8 @@ from slugify import slugify import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult -from homeassistant.const import CONF_PASSWORD, CONF_URL, CONF_USERNAME +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.const import CONF_NAME, CONF_PASSWORD, CONF_URL, CONF_USERNAME from homeassistant.core import HomeAssistant from . import FibaroAuthFailed, FibaroConnectFailed, init_controller @@ -63,10 +63,6 @@ class FibaroConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - def __init__(self) -> None: - """Initialize.""" - self._reauth_entry: ConfigEntry | None = None - async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -94,9 +90,6 @@ async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle reauthentication.""" - self._reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -105,9 +98,10 @@ async def async_step_reauth_confirm( """Handle a flow initiated by reauthentication.""" errors = {} - assert self._reauth_entry + reauth_entry = self._get_reauth_entry() + if user_input is not None: - new_data = self._reauth_entry.data | user_input + new_data = reauth_entry.data | user_input try: await _validate_input(self.hass, new_data) except FibaroConnectFailed: @@ -115,19 +109,16 @@ async def async_step_reauth_confirm( except FibaroAuthFailed: errors["base"] = "invalid_auth" else: - self.hass.config_entries.async_update_entry( - self._reauth_entry, data=new_data - ) - self.hass.async_create_task( - self.hass.config_entries.async_reload(self._reauth_entry.entry_id) + return self.async_update_reload_and_abort( + reauth_entry, data_updates=user_input ) - return self.async_abort(reason="reauth_successful") return self.async_show_form( step_id="reauth_confirm", data_schema=vol.Schema({vol.Required(CONF_PASSWORD): str}), errors=errors, description_placeholders={ - CONF_USERNAME: self._reauth_entry.data[CONF_USERNAME] + CONF_USERNAME: reauth_entry.data[CONF_USERNAME], + CONF_NAME: reauth_entry.title, }, ) diff --git a/homeassistant/components/fibaro/cover.py b/homeassistant/components/fibaro/cover.py index fc28e57af7053c..c787ca702726c5 100644 --- a/homeassistant/components/fibaro/cover.py +++ b/homeassistant/components/fibaro/cover.py @@ -79,6 +79,28 @@ def current_cover_tilt_position(self) -> int | None: """Return the current tilt position for venetian blinds.""" return self.bound(self.level2) + @property + def is_opening(self) -> bool | None: + """Return if the cover is opening or not. + + Be aware that this property is only available for some modern devices. + For example the Fibaro Roller Shutter 4 reports this correctly. + """ + if self.fibaro_device.state.has_value: + return self.fibaro_device.state.str_value().lower() == "opening" + return None + + @property + def is_closing(self) -> bool | None: + """Return if the cover is closing or not. + + Be aware that this property is only available for some modern devices. + For example the Fibaro Roller Shutter 4 reports this correctly. + """ + if self.fibaro_device.state.has_value: + return self.fibaro_device.state.str_value().lower() == "closing" + return None + def set_cover_position(self, **kwargs: Any) -> None: """Move the cover to a specific position.""" self.set_level(cast(int, kwargs.get(ATTR_POSITION))) diff --git a/homeassistant/components/fibaro/manifest.json b/homeassistant/components/fibaro/manifest.json index 39850672d060ae..d2a1186b05b5e0 100644 --- a/homeassistant/components/fibaro/manifest.json +++ b/homeassistant/components/fibaro/manifest.json @@ -7,5 +7,5 @@ "integration_type": "hub", "iot_class": "local_push", "loggers": ["pyfibaro"], - "requirements": ["pyfibaro==0.7.8"] + "requirements": ["pyfibaro==0.8.0"] } diff --git a/homeassistant/components/file/__init__.py b/homeassistant/components/file/__init__.py index 0c9cfee5f4d487..7bc206057c8586 100644 --- a/homeassistant/components/file/__init__.py +++ b/homeassistant/components/file/__init__.py @@ -3,88 +3,16 @@ from copy import deepcopy from typing import Any -from homeassistant.components.notify import migrate_notify_issue -from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry -from homeassistant.const import ( - CONF_FILE_PATH, - CONF_NAME, - CONF_PLATFORM, - CONF_SCAN_INTERVAL, - Platform, -) -from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_FILE_PATH, CONF_NAME, CONF_PLATFORM, Platform +from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady -from homeassistant.helpers import ( - config_validation as cv, - discovery, - issue_registry as ir, -) -from homeassistant.helpers.typing import ConfigType from .const import DOMAIN -from .notify import PLATFORM_SCHEMA as NOTIFY_PLATFORM_SCHEMA -from .sensor import PLATFORM_SCHEMA as SENSOR_PLATFORM_SCHEMA - -IMPORT_SCHEMA = { - Platform.SENSOR: SENSOR_PLATFORM_SCHEMA, - Platform.NOTIFY: NOTIFY_PLATFORM_SCHEMA, -} - -CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN) PLATFORMS = [Platform.NOTIFY, Platform.SENSOR] -async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: - """Set up the file integration.""" - - hass.data[DOMAIN] = config - if hass.config_entries.async_entries(DOMAIN): - # We skip import in case we already have config entries - return True - # The use of the legacy notify service was deprecated with HA Core 2024.6.0 - # and will be removed with HA Core 2024.12 - migrate_notify_issue(hass, DOMAIN, "File", "2024.12.0") - # The YAML config was imported with HA Core 2024.6.0 and will be removed with - # HA Core 2024.12 - ir.async_create_issue( - hass, - HOMEASSISTANT_DOMAIN, - f"deprecated_yaml_{DOMAIN}", - breaks_in_ha_version="2024.12.0", - is_fixable=False, - issue_domain=DOMAIN, - learn_more_url="https://www.home-assistant.io/integrations/file/", - severity=ir.IssueSeverity.WARNING, - translation_key="deprecated_yaml", - translation_placeholders={ - "domain": DOMAIN, - "integration_title": "File", - }, - ) - - # Import the YAML config into separate config entries - platforms_config: dict[Platform, list[ConfigType]] = { - domain: config[domain] for domain in PLATFORMS if domain in config - } - for domain, items in platforms_config.items(): - for item in items: - if item[CONF_PLATFORM] == DOMAIN: - file_config_item = IMPORT_SCHEMA[domain](item) - file_config_item[CONF_PLATFORM] = domain - if CONF_SCAN_INTERVAL in file_config_item: - del file_config_item[CONF_SCAN_INTERVAL] - hass.async_create_task( - hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_IMPORT}, - data=file_config_item, - ) - ) - - return True - - async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up a file component entry.""" config = {**entry.data, **entry.options} @@ -102,20 +30,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: entry, [Platform(entry.data[CONF_PLATFORM])] ) entry.async_on_unload(entry.add_update_listener(update_listener)) - if entry.data[CONF_PLATFORM] == Platform.NOTIFY and CONF_NAME in entry.data: - # New notify entities are being setup through the config entry, - # but during the deprecation period we want to keep the legacy notify platform, - # so we forward the setup config through discovery. - # Only the entities from yaml will still be available as legacy service. - hass.async_create_task( - discovery.async_load_platform( - hass, - Platform.NOTIFY, - DOMAIN, - config, - hass.data[DOMAIN], - ) - ) return True diff --git a/homeassistant/components/file/config_flow.py b/homeassistant/components/file/config_flow.py index d74e36ce935e49..992635d05fd937 100644 --- a/homeassistant/components/file/config_flow.py +++ b/homeassistant/components/file/config_flow.py @@ -1,7 +1,8 @@ """Config flow for file integration.""" +from __future__ import annotations + from copy import deepcopy -import os from typing import Any import voluptuous as vol @@ -11,11 +12,9 @@ ConfigFlow, ConfigFlowResult, OptionsFlow, - OptionsFlowWithConfigEntry, ) from homeassistant.const import ( CONF_FILE_PATH, - CONF_FILENAME, CONF_NAME, CONF_PLATFORM, CONF_UNIT_OF_MEASUREMENT, @@ -74,9 +73,11 @@ class FileConfigFlowHandler(ConfigFlow, domain=DOMAIN): @staticmethod @callback - def async_get_options_flow(config_entry: ConfigEntry) -> OptionsFlow: + def async_get_options_flow( + config_entry: ConfigEntry, + ) -> FileOptionsFlowHandler: """Get the options flow for this handler.""" - return FileOptionsFlowHandler(config_entry) + return FileOptionsFlowHandler() async def validate_file_path(self, file_path: str) -> bool: """Ensure the file path is valid.""" @@ -129,29 +130,8 @@ async def async_step_sensor( """Handle file sensor config flow.""" return await self._async_handle_step(Platform.SENSOR.value, user_input) - async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: - """Import `file`` config from configuration.yaml.""" - self._async_abort_entries_match(import_data) - platform = import_data[CONF_PLATFORM] - name: str = import_data.get(CONF_NAME, DEFAULT_NAME) - file_name: str - if platform == Platform.NOTIFY: - file_name = import_data.pop(CONF_FILENAME) - file_path: str = os.path.join(self.hass.config.config_dir, file_name) - import_data[CONF_FILE_PATH] = file_path - else: - file_path = import_data[CONF_FILE_PATH] - title = f"{name} [{file_path}]" - data = deepcopy(import_data) - options = {} - for key, value in import_data.items(): - if key not in (CONF_FILE_PATH, CONF_PLATFORM, CONF_NAME): - data.pop(key) - options[key] = value - return self.async_create_entry(title=title, data=data, options=options) - - -class FileOptionsFlowHandler(OptionsFlowWithConfigEntry): + +class FileOptionsFlowHandler(OptionsFlow): """Handle File options.""" async def async_step_init( diff --git a/homeassistant/components/file/notify.py b/homeassistant/components/file/notify.py index 9411b7cf1a8521..10e3d4a4ac66a0 100644 --- a/homeassistant/components/file/notify.py +++ b/homeassistant/components/file/notify.py @@ -2,104 +2,23 @@ from __future__ import annotations -from functools import partial -import logging import os from typing import Any, TextIO -import voluptuous as vol - from homeassistant.components.notify import ( - ATTR_TITLE, ATTR_TITLE_DEFAULT, - PLATFORM_SCHEMA as NOTIFY_PLATFORM_SCHEMA, - BaseNotificationService, NotifyEntity, NotifyEntityFeature, - migrate_notify_issue, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_FILE_PATH, CONF_FILENAME, CONF_NAME +from homeassistant.const import CONF_FILE_PATH, CONF_NAME from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceValidationError -import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType import homeassistant.util.dt as dt_util from .const import CONF_TIMESTAMP, DEFAULT_NAME, DOMAIN, FILE_ICON -_LOGGER = logging.getLogger(__name__) - -# The legacy platform schema uses a filename, after import -# The full file path is stored in the config entry -PLATFORM_SCHEMA = NOTIFY_PLATFORM_SCHEMA.extend( - { - vol.Required(CONF_FILENAME): cv.string, - vol.Optional(CONF_TIMESTAMP, default=False): cv.boolean, - } -) - - -async def async_get_service( - hass: HomeAssistant, - config: ConfigType, - discovery_info: DiscoveryInfoType | None = None, -) -> FileNotificationService | None: - """Get the file notification service.""" - if discovery_info is None: - # We only set up through discovery - return None - file_path: str = discovery_info[CONF_FILE_PATH] - timestamp: bool = discovery_info[CONF_TIMESTAMP] - - return FileNotificationService(file_path, timestamp) - - -class FileNotificationService(BaseNotificationService): - """Implement the notification service for the File service.""" - - def __init__(self, file_path: str, add_timestamp: bool) -> None: - """Initialize the service.""" - self._file_path = file_path - self.add_timestamp = add_timestamp - - async def async_send_message(self, message: str = "", **kwargs: Any) -> None: - """Send a message to a file.""" - # The use of the legacy notify service was deprecated with HA Core 2024.6.0 - # and will be removed with HA Core 2024.12 - migrate_notify_issue( - self.hass, DOMAIN, "File", "2024.12.0", service_name=self._service_name - ) - await self.hass.async_add_executor_job( - partial(self.send_message, message, **kwargs) - ) - - def send_message(self, message: str = "", **kwargs: Any) -> None: - """Send a message to a file.""" - file: TextIO - filepath = self._file_path - try: - with open(filepath, "a", encoding="utf8") as file: - if os.stat(filepath).st_size == 0: - title = ( - f"{kwargs.get(ATTR_TITLE, ATTR_TITLE_DEFAULT)} notifications (Log" - f" started: {dt_util.utcnow().isoformat()})\n{'-' * 80}\n" - ) - file.write(title) - - if self.add_timestamp: - text = f"{dt_util.utcnow().isoformat()} {message}\n" - else: - text = f"{message}\n" - file.write(text) - except OSError as exc: - raise ServiceValidationError( - translation_domain=DOMAIN, - translation_key="write_access_failed", - translation_placeholders={"filename": filepath, "exc": f"{exc!r}"}, - ) from exc - async def async_setup_entry( hass: HomeAssistant, diff --git a/homeassistant/components/file/sensor.py b/homeassistant/components/file/sensor.py index e37a3df86a687b..879c06e29f3b50 100644 --- a/homeassistant/components/file/sensor.py +++ b/homeassistant/components/file/sensor.py @@ -6,12 +6,8 @@ import os from file_read_backwards import FileReadBackwards -import voluptuous as vol -from homeassistant.components.sensor import ( - PLATFORM_SCHEMA as SENSOR_PLATFORM_SCHEMA, - SensorEntity, -) +from homeassistant.components.sensor import SensorEntity from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( CONF_FILE_PATH, @@ -20,38 +16,13 @@ CONF_VALUE_TEMPLATE, ) from homeassistant.core import HomeAssistant -from homeassistant.helpers import config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.template import Template -from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from .const import DEFAULT_NAME, FILE_ICON _LOGGER = logging.getLogger(__name__) -PLATFORM_SCHEMA = SENSOR_PLATFORM_SCHEMA.extend( - { - vol.Required(CONF_FILE_PATH): cv.isfile, - vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, - vol.Optional(CONF_VALUE_TEMPLATE): cv.string, - vol.Optional(CONF_UNIT_OF_MEASUREMENT): cv.string, - } -) - - -async def async_setup_platform( - hass: HomeAssistant, - config: ConfigType, - async_add_entities: AddEntitiesCallback, - discovery_info: DiscoveryInfoType | None = None, -) -> None: - """Set up the file sensor from YAML. - - The YAML platform config is automatically - imported to a config entry, this method can be removed - when YAML support is removed. - """ - async def async_setup_entry( hass: HomeAssistant, diff --git a/homeassistant/components/fireservicerota/__init__.py b/homeassistant/components/fireservicerota/__init__.py index 9173a2b3392769..aa303a08795368 100644 --- a/homeassistant/components/fireservicerota/__init__.py +++ b/homeassistant/components/fireservicerota/__init__.py @@ -46,6 +46,7 @@ async def async_update_data(): coordinator = DataUpdateCoordinator( hass, _LOGGER, + config_entry=entry, name="duty binary sensor", update_method=async_update_data, update_interval=MIN_TIME_BETWEEN_UPDATES, diff --git a/homeassistant/components/fitbit/api.py b/homeassistant/components/fitbit/api.py index 1eed5acbcca0be..e5ae88c542051a 100644 --- a/homeassistant/components/fitbit/api.py +++ b/homeassistant/components/fitbit/api.py @@ -156,8 +156,7 @@ def __init__( async def async_get_access_token(self) -> dict[str, Any]: """Return a valid access token for the Fitbit API.""" - if not self._oauth_session.valid_token: - await self._oauth_session.async_ensure_token_valid() + await self._oauth_session.async_ensure_token_valid() return self._oauth_session.token diff --git a/homeassistant/components/fitbit/config_flow.py b/homeassistant/components/fitbit/config_flow.py index eff4ba37773afc..cb4e3fb4ea35b1 100644 --- a/homeassistant/components/fitbit/config_flow.py +++ b/homeassistant/components/fitbit/config_flow.py @@ -4,7 +4,7 @@ import logging from typing import Any -from homeassistant.config_entries import ConfigEntry, ConfigFlowResult +from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlowResult from homeassistant.const import CONF_TOKEN from homeassistant.helpers import config_entry_oauth2_flow @@ -22,8 +22,6 @@ class OAuth2FlowHandler( DOMAIN = DOMAIN - reauth_entry: ConfigEntry | None = None - @property def logger(self) -> logging.Logger: """Return logger.""" @@ -34,16 +32,13 @@ def extra_authorize_data(self) -> dict[str, str]: """Extra data that needs to be appended to the authorize url.""" return { "scope": " ".join(OAUTH_SCOPES), - "prompt": "consent" if not self.reauth_entry else "none", + "prompt": "consent" if self.source != SOURCE_REAUTH else "none", } async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Perform reauth upon an API authentication error.""" - self.reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -82,14 +77,13 @@ async def async_oauth_create_entry(self, data: dict[str, Any]) -> ConfigFlowResu _LOGGER.error("Failed to fetch user profile for Fitbit API: %s", err) return self.async_abort(reason="cannot_connect") - if self.reauth_entry: - if self.reauth_entry.unique_id != profile.encoded_id: - return self.async_abort(reason="wrong_account") - self.hass.config_entries.async_update_entry(self.reauth_entry, data=data) - await self.hass.config_entries.async_reload(self.reauth_entry.entry_id) - return self.async_abort(reason="reauth_successful") - await self.async_set_unique_id(profile.encoded_id) + if self.source == SOURCE_REAUTH: + self._abort_if_unique_id_mismatch(reason="wrong_account") + return self.async_update_reload_and_abort( + self._get_reauth_entry(), data=data + ) + self._abort_if_unique_id_configured() return self.async_create_entry(title=profile.display_name, data=data) diff --git a/homeassistant/components/fivem/strings.json b/homeassistant/components/fivem/strings.json index abdef61fb28e0c..fd58922a48128b 100644 --- a/homeassistant/components/fivem/strings.json +++ b/homeassistant/components/fivem/strings.json @@ -15,7 +15,7 @@ "error": { "cannot_connect": "Failed to connect. Please check the host and port and try again. Also ensure that you are running the latest FiveM server.", "invalid_game_name": "The api of the game you are trying to connect to is not a FiveM game.", - "unknown_error": "[%key:common::config_flow::error::unknown%]" + "unknown": "[%key:common::config_flow::error::unknown%]" }, "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_service%]" diff --git a/homeassistant/components/flexit_bacnet/sensor.py b/homeassistant/components/flexit_bacnet/sensor.py index 2453acb90be951..be5f12e480eb23 100644 --- a/homeassistant/components/flexit_bacnet/sensor.py +++ b/homeassistant/components/flexit_bacnet/sensor.py @@ -10,7 +10,6 @@ SensorEntity, SensorEntityDescription, SensorStateClass, - StateType, ) from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( @@ -22,6 +21,7 @@ ) from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.typing import StateType from . import FlexitCoordinator from .const import DOMAIN diff --git a/homeassistant/components/flux_led/config_flow.py b/homeassistant/components/flux_led/config_flow.py index d78fc699579e95..9a02120f33adac 100644 --- a/homeassistant/components/flux_led/config_flow.py +++ b/homeassistant/components/flux_led/config_flow.py @@ -71,9 +71,11 @@ def __init__(self) -> None: @staticmethod @callback - def async_get_options_flow(config_entry: ConfigEntry) -> OptionsFlow: + def async_get_options_flow( + config_entry: ConfigEntry, + ) -> FluxLedOptionsFlow: """Get the options flow for the Flux LED component.""" - return FluxLedOptionsFlow(config_entry) + return FluxLedOptionsFlow() async def async_step_dhcp( self, discovery_info: dhcp.DhcpServiceInfo @@ -320,10 +322,6 @@ async def _async_try_connect( class FluxLedOptionsFlow(OptionsFlow): """Handle flux_led options.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize the flux_led options flow.""" - self._config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -332,7 +330,7 @@ async def async_step_init( if user_input is not None: return self.async_create_entry(title="", data=user_input) - options = self._config_entry.options + options = self.config_entry.options options_schema = vol.Schema( { vol.Optional( diff --git a/homeassistant/components/folder_watcher/__init__.py b/homeassistant/components/folder_watcher/__init__.py index 800a95509c2f9b..3aeaa6f7ef21cf 100644 --- a/homeassistant/components/folder_watcher/__init__.py +++ b/homeassistant/components/folder_watcher/__init__.py @@ -4,9 +4,8 @@ import logging import os -from typing import Any, cast +from typing import cast -import voluptuous as vol from watchdog.events import ( FileClosedEvent, FileCreatedEvent, @@ -19,69 +18,17 @@ ) from watchdog.observers import Observer -from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry +from homeassistant.config_entries import ConfigEntry from homeassistant.const import EVENT_HOMEASSISTANT_START, EVENT_HOMEASSISTANT_STOP from homeassistant.core import Event, HomeAssistant -import homeassistant.helpers.config_validation as cv from homeassistant.helpers.dispatcher import dispatcher_send from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue -from homeassistant.helpers.typing import ConfigType -from .const import CONF_FOLDER, CONF_PATTERNS, DEFAULT_PATTERN, DOMAIN, PLATFORMS +from .const import CONF_FOLDER, CONF_PATTERNS, DOMAIN, PLATFORMS _LOGGER = logging.getLogger(__name__) -CONFIG_SCHEMA = vol.Schema( - { - DOMAIN: vol.All( - cv.ensure_list, - [ - vol.Schema( - { - vol.Required(CONF_FOLDER): cv.isdir, - vol.Optional(CONF_PATTERNS, default=[DEFAULT_PATTERN]): vol.All( - cv.ensure_list, [cv.string] - ), - } - ) - ], - ) - }, - extra=vol.ALLOW_EXTRA, -) - - -async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: - """Set up the folder watcher.""" - if DOMAIN in config: - conf: list[dict[str, Any]] = config[DOMAIN] - for watcher in conf: - path: str = watcher[CONF_FOLDER] - if not hass.config.is_allowed_path(path): - async_create_issue( - hass, - DOMAIN, - f"import_failed_not_allowed_path_{path}", - is_fixable=False, - is_persistent=False, - severity=IssueSeverity.ERROR, - translation_key="import_failed_not_allowed_path", - translation_placeholders={ - "path": path, - "config_variable": "allowlist_external_dirs", - }, - ) - continue - hass.async_create_task( - hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_IMPORT}, data=watcher - ) - ) - - return True - - async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up Folder watcher from a config entry.""" diff --git a/homeassistant/components/folder_watcher/config_flow.py b/homeassistant/components/folder_watcher/config_flow.py index fe43cd1c725377..eb176cfaf24680 100644 --- a/homeassistant/components/folder_watcher/config_flow.py +++ b/homeassistant/components/folder_watcher/config_flow.py @@ -8,10 +8,8 @@ import voluptuous as vol -from homeassistant.components.homeassistant import DOMAIN as HOMEASSISTANT_DOMAIN from homeassistant.config_entries import ConfigFlowResult from homeassistant.core import callback -from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue from homeassistant.helpers.schema_config_entry_flow import ( SchemaCommonFlowHandler, SchemaConfigFlowHandler, @@ -46,28 +44,6 @@ async def validate_setup( return user_input -async def validate_import_setup( - handler: SchemaCommonFlowHandler, user_input: dict[str, Any] -) -> dict[str, Any]: - """Create issue on successful import.""" - async_create_issue( - handler.parent_handler.hass, - HOMEASSISTANT_DOMAIN, - f"deprecated_yaml_{DOMAIN}", - breaks_in_ha_version="2024.11.0", - is_fixable=False, - is_persistent=False, - issue_domain=DOMAIN, - severity=IssueSeverity.WARNING, - translation_key="deprecated_yaml", - translation_placeholders={ - "domain": DOMAIN, - "integration_title": "Folder Watcher", - }, - ) - return user_input - - OPTIONS_SCHEMA = vol.Schema( { vol.Optional(CONF_PATTERNS, default=[DEFAULT_PATTERN]): SelectSelector( @@ -88,9 +64,6 @@ async def validate_import_setup( CONFIG_FLOW = { "user": SchemaFlowFormStep(schema=DATA_SCHEMA, validate_user_input=validate_setup), - "import": SchemaFlowFormStep( - schema=DATA_SCHEMA, validate_user_input=validate_import_setup - ), } OPTIONS_FLOW = { "init": SchemaFlowFormStep(schema=OPTIONS_SCHEMA), diff --git a/homeassistant/components/forecast_solar/config_flow.py b/homeassistant/components/forecast_solar/config_flow.py index 982f32eb07bc69..9a64ce6e1fb353 100644 --- a/homeassistant/components/forecast_solar/config_flow.py +++ b/homeassistant/components/forecast_solar/config_flow.py @@ -41,7 +41,7 @@ def async_get_options_flow( config_entry: ConfigEntry, ) -> ForecastSolarOptionFlowHandler: """Get the options flow for this handler.""" - return ForecastSolarOptionFlowHandler(config_entry) + return ForecastSolarOptionFlowHandler() async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -91,10 +91,6 @@ async def async_step_user( class ForecastSolarOptionFlowHandler(OptionsFlow): """Handle options.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/forked_daapd/config_flow.py b/homeassistant/components/forked_daapd/config_flow.py index 5f061aa4be187e..5fb9f08f1c0098 100644 --- a/homeassistant/components/forked_daapd/config_flow.py +++ b/homeassistant/components/forked_daapd/config_flow.py @@ -52,10 +52,6 @@ class ForkedDaapdOptionsFlowHandler(OptionsFlow): """Handle a forked-daapd options flow.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -122,7 +118,7 @@ def async_get_options_flow( config_entry: ConfigEntry, ) -> ForkedDaapdOptionsFlowHandler: """Return options flow handler.""" - return ForkedDaapdOptionsFlowHandler(config_entry) + return ForkedDaapdOptionsFlowHandler() async def validate_input(self, user_input): """Validate the user input.""" diff --git a/homeassistant/components/freebox/alarm_control_panel.py b/homeassistant/components/freebox/alarm_control_panel.py index 891180785b065b..9d8e85a14caec4 100644 --- a/homeassistant/components/freebox/alarm_control_panel.py +++ b/homeassistant/components/freebox/alarm_control_panel.py @@ -5,15 +5,9 @@ from homeassistant.components.alarm_control_panel import ( AlarmControlPanelEntity, AlarmControlPanelEntityFeature, + AlarmControlPanelState, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ( - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMING, - STATE_ALARM_DISARMED, - STATE_ALARM_TRIGGERED, -) from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -22,14 +16,14 @@ from .router import FreeboxRouter FREEBOX_TO_STATUS = { - "alarm1_arming": STATE_ALARM_ARMING, - "alarm2_arming": STATE_ALARM_ARMING, - "alarm1_armed": STATE_ALARM_ARMED_AWAY, - "alarm2_armed": STATE_ALARM_ARMED_HOME, - "alarm1_alert_timer": STATE_ALARM_TRIGGERED, - "alarm2_alert_timer": STATE_ALARM_TRIGGERED, - "alert": STATE_ALARM_TRIGGERED, - "idle": STATE_ALARM_DISARMED, + "alarm1_arming": AlarmControlPanelState.ARMING, + "alarm2_arming": AlarmControlPanelState.ARMING, + "alarm1_armed": AlarmControlPanelState.ARMED_AWAY, + "alarm2_armed": AlarmControlPanelState.ARMED_HOME, + "alarm1_alert_timer": AlarmControlPanelState.TRIGGERED, + "alarm2_alert_timer": AlarmControlPanelState.TRIGGERED, + "alert": AlarmControlPanelState.TRIGGERED, + "idle": AlarmControlPanelState.DISARMED, } @@ -103,6 +97,6 @@ async def async_update(self) -> None: """Update state.""" state: str | None = await self.get_home_endpoint_value(self._command_state) if state: - self._attr_state = FREEBOX_TO_STATUS.get(state) + self._attr_alarm_state = FREEBOX_TO_STATUS.get(state) else: - self._attr_state = None + self._attr_alarm_state = None diff --git a/homeassistant/components/fritz/config_flow.py b/homeassistant/components/fritz/config_flow.py index 8dfff1337f9a15..ec9ffdd755481c 100644 --- a/homeassistant/components/fritz/config_flow.py +++ b/homeassistant/components/fritz/config_flow.py @@ -23,7 +23,6 @@ ConfigFlow, ConfigFlowResult, OptionsFlow, - OptionsFlowWithConfigEntry, ) from homeassistant.const import ( CONF_HOST, @@ -58,13 +57,13 @@ class FritzBoxToolsFlowHandler(ConfigFlow, domain=DOMAIN): VERSION = 1 - _entry: ConfigEntry - @staticmethod @callback - def async_get_options_flow(config_entry: ConfigEntry) -> OptionsFlow: + def async_get_options_flow( + config_entry: ConfigEntry, + ) -> FritzBoxToolsOptionsFlowHandler: """Get the options flow for this handler.""" - return FritzBoxToolsOptionsFlowHandler(config_entry) + return FritzBoxToolsOptionsFlowHandler() def __init__(self) -> None: """Initialize FRITZ!Box Tools flow.""" @@ -76,6 +75,10 @@ def __init__(self) -> None: self._username: str = "" self._model: str = "" + async def async_fritz_tools_init(self) -> str | None: + """Initialize FRITZ!Box Tools class.""" + return await self.hass.async_add_executor_job(self.fritz_tools_init) + def fritz_tools_init(self) -> str | None: """Initialize FRITZ!Box Tools class.""" @@ -201,7 +204,7 @@ async def async_step_confirm( self._use_tls = user_input[CONF_SSL] self._port = self._determine_port(user_input) - error = await self.hass.async_add_executor_job(self.fritz_tools_init) + error = await self.async_fritz_tools_init() if error: errors["base"] = error @@ -264,7 +267,7 @@ async def async_step_user( self._port = self._determine_port(user_input) - if not (error := await self.hass.async_add_executor_job(self.fritz_tools_init)): + if not (error := await self.async_fritz_tools_init()): self._name = self._model if await self.async_check_configured_entry(): @@ -279,7 +282,6 @@ async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle flow upon an API authentication error.""" - self._entry = self._get_reauth_entry() self._host = entry_data[CONF_HOST] self._port = entry_data[CONF_PORT] self._username = entry_data[CONF_USERNAME] @@ -317,13 +319,13 @@ async def async_step_reauth_confirm( self._username = user_input[CONF_USERNAME] self._password = user_input[CONF_PASSWORD] - if error := await self.hass.async_add_executor_job(self.fritz_tools_init): + if error := await self.async_fritz_tools_init(): return self._show_setup_form_reauth_confirm( user_input=user_input, errors={"base": error} ) - self.hass.config_entries.async_update_entry( - self._entry, + return self.async_update_reload_and_abort( + self._get_reauth_entry(), data={ CONF_HOST: self._host, CONF_PASSWORD: self._password, @@ -332,23 +334,8 @@ async def async_step_reauth_confirm( CONF_SSL: self._use_tls, }, ) - await self.hass.config_entries.async_reload(self._entry.entry_id) - return self.async_abort(reason="reauth_successful") - - async def async_step_reconfigure( - self, user_input: dict[str, Any] | None = None - ) -> ConfigFlowResult: - """Handle reconfigure flow .""" - self._entry = self._get_reconfigure_entry() - self._host = self._entry.data[CONF_HOST] - self._port = self._entry.data[CONF_PORT] - self._username = self._entry.data[CONF_USERNAME] - self._password = self._entry.data[CONF_PASSWORD] - self._use_tls = self._entry.data.get(CONF_SSL, DEFAULT_SSL) - return await self.async_step_reconfigure_confirm() - - def _show_setup_form_reconfigure_confirm( + def _show_setup_form_reconfigure( self, user_input: dict[str, Any], errors: dict[str, str] | None = None ) -> ConfigFlowResult: """Show the reconfigure form to the user.""" @@ -359,7 +346,7 @@ def _show_setup_form_reconfigure_confirm( } return self.async_show_form( - step_id="reconfigure_confirm", + step_id="reconfigure", data_schema=vol.Schema( { vol.Required(CONF_HOST, default=user_input[CONF_HOST]): str, @@ -367,20 +354,21 @@ def _show_setup_form_reconfigure_confirm( vol.Required(CONF_SSL, default=user_input[CONF_SSL]): bool, } ), - description_placeholders={"host": self._host}, + description_placeholders={"host": user_input[CONF_HOST]}, errors=errors or {}, ) - async def async_step_reconfigure_confirm( + async def async_step_reconfigure( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle reconfigure flow.""" if user_input is None: - return self._show_setup_form_reconfigure_confirm( + reconfigure_entry_data = self._get_reconfigure_entry().data + return self._show_setup_form_reconfigure( { - CONF_HOST: self._host, - CONF_PORT: self._port, - CONF_SSL: self._use_tls, + CONF_HOST: reconfigure_entry_data[CONF_HOST], + CONF_PORT: reconfigure_entry_data[CONF_PORT], + CONF_SSL: reconfigure_entry_data.get(CONF_SSL, DEFAULT_SSL), } ) @@ -388,26 +376,25 @@ async def async_step_reconfigure_confirm( self._use_tls = user_input[CONF_SSL] self._port = self._determine_port(user_input) - if error := await self.hass.async_add_executor_job(self.fritz_tools_init): - return self._show_setup_form_reconfigure_confirm( + reconfigure_entry = self._get_reconfigure_entry() + self._username = reconfigure_entry.data[CONF_USERNAME] + self._password = reconfigure_entry.data[CONF_PASSWORD] + if error := await self.async_fritz_tools_init(): + return self._show_setup_form_reconfigure( user_input={**user_input, CONF_PORT: self._port}, errors={"base": error} ) - self.hass.config_entries.async_update_entry( - self._entry, - data={ + return self.async_update_reload_and_abort( + reconfigure_entry, + data_updates={ CONF_HOST: self._host, - CONF_PASSWORD: self._password, CONF_PORT: self._port, - CONF_USERNAME: self._username, CONF_SSL: self._use_tls, }, ) - await self.hass.config_entries.async_reload(self._entry.entry_id) - return self.async_abort(reason="reconfigure_successful") -class FritzBoxToolsOptionsFlowHandler(OptionsFlowWithConfigEntry): +class FritzBoxToolsOptionsFlowHandler(OptionsFlow): """Handle an options flow.""" async def async_step_init( @@ -418,19 +405,18 @@ async def async_step_init( if user_input is not None: return self.async_create_entry(title="", data=user_input) + options = self.config_entry.options data_schema = vol.Schema( { vol.Optional( CONF_CONSIDER_HOME, - default=self.options.get( + default=options.get( CONF_CONSIDER_HOME, DEFAULT_CONSIDER_HOME.total_seconds() ), ): vol.All(vol.Coerce(int), vol.Clamp(min=0, max=900)), vol.Optional( CONF_OLD_DISCOVERY, - default=self.options.get( - CONF_OLD_DISCOVERY, DEFAULT_CONF_OLD_DISCOVERY - ), + default=options.get(CONF_OLD_DISCOVERY, DEFAULT_CONF_OLD_DISCOVERY), ): bool, } ) diff --git a/homeassistant/components/fritz/coordinator.py b/homeassistant/components/fritz/coordinator.py index 4134f0af02623e..31d8ff814915fc 100644 --- a/homeassistant/components/fritz/coordinator.py +++ b/homeassistant/components/fritz/coordinator.py @@ -606,6 +606,9 @@ async def async_scan_devices(self, now: datetime | None = None) -> None: dev_info: Device = hosts[dev_mac] for link in interf["node_links"]: + if link.get("state") != "CONNECTED": + continue # ignore orphan node links + intf = mesh_intf.get(link["node_interface_1_uid"]) if intf is not None: if intf["op_mode"] == "AP_GUEST": diff --git a/homeassistant/components/fritz/manifest.json b/homeassistant/components/fritz/manifest.json index d8d8f6b94bfce3..27aa42d9b2c1c2 100644 --- a/homeassistant/components/fritz/manifest.json +++ b/homeassistant/components/fritz/manifest.json @@ -1,13 +1,13 @@ { "domain": "fritz", "name": "AVM FRITZ!Box Tools", - "codeowners": ["@mammuth", "@AaronDavidSchneider", "@chemelli74", "@mib1185"], + "codeowners": ["@AaronDavidSchneider", "@chemelli74", "@mib1185"], "config_flow": true, "dependencies": ["network"], "documentation": "https://www.home-assistant.io/integrations/fritz", "iot_class": "local_polling", "loggers": ["fritzconnection"], - "requirements": ["fritzconnection[qr]==1.13.2", "xmltodict==0.13.0"], + "requirements": ["fritzconnection[qr]==1.14.0", "xmltodict==0.13.0"], "ssdp": [ { "st": "urn:schemas-upnp-org:device:fritzbox:1" diff --git a/homeassistant/components/fritz/strings.json b/homeassistant/components/fritz/strings.json index 6be393cc63601d..96eb6243529ddd 100644 --- a/homeassistant/components/fritz/strings.json +++ b/homeassistant/components/fritz/strings.json @@ -19,7 +19,7 @@ "password": "[%key:common::config_flow::data::password%]" } }, - "reconfigure_confirm": { + "reconfigure": { "title": "Updating FRITZ!Box Tools - configuration", "description": "Update FRITZ!Box Tools configuration for: {host}.", "data": { @@ -56,6 +56,7 @@ "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]" }, "error": { + "unknown_error": "[%key:common::config_flow::error::unknown%]", "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", "upnp_not_configured": "Missing UPnP settings on device.", "already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]", diff --git a/homeassistant/components/fritzbox/config_flow.py b/homeassistant/components/fritzbox/config_flow.py index fb4ab23a2b2aa0..76754fc5082ec2 100644 --- a/homeassistant/components/fritzbox/config_flow.py +++ b/homeassistant/components/fritzbox/config_flow.py @@ -12,7 +12,7 @@ import voluptuous as vol from homeassistant.components import ssdp -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME from .const import DEFAULT_HOST, DEFAULT_USERNAME, DOMAIN @@ -43,8 +43,6 @@ class FritzboxConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - _entry: ConfigEntry - def __init__(self) -> None: """Initialize flow.""" self._host: str | None = None @@ -62,16 +60,9 @@ def _get_entry(self, name: str) -> ConfigFlowResult: }, ) - async def _update_entry(self) -> None: - self.hass.config_entries.async_update_entry( - self._entry, - data={ - CONF_HOST: self._host, - CONF_PASSWORD: self._password, - CONF_USERNAME: self._username, - }, - ) - await self.hass.config_entries.async_reload(self._entry.entry_id) + async def async_try_connect(self) -> str: + """Try to connect and check auth.""" + return await self.hass.async_add_executor_job(self._try_connect) def _try_connect(self) -> str: """Try to connect and check auth.""" @@ -104,7 +95,7 @@ async def async_step_user( self._password = user_input[CONF_PASSWORD] self._username = user_input[CONF_USERNAME] - result = await self.hass.async_add_executor_job(self._try_connect) + result = await self.async_try_connect() if result == RESULT_SUCCESS: return self._get_entry(self._name) @@ -164,7 +155,7 @@ async def async_step_confirm( if user_input is not None: self._password = user_input[CONF_PASSWORD] self._username = user_input[CONF_USERNAME] - result = await self.hass.async_add_executor_job(self._try_connect) + result = await self.async_try_connect() if result == RESULT_SUCCESS: assert self._name is not None @@ -184,7 +175,6 @@ async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Trigger a reauthentication flow.""" - self._entry = self._get_reauth_entry() self._host = entry_data[CONF_HOST] self._name = str(entry_data[CONF_HOST]) self._username = entry_data[CONF_USERNAME] @@ -201,11 +191,17 @@ async def async_step_reauth_confirm( self._password = user_input[CONF_PASSWORD] self._username = user_input[CONF_USERNAME] - result = await self.hass.async_add_executor_job(self._try_connect) + result = await self.async_try_connect() if result == RESULT_SUCCESS: - await self._update_entry() - return self.async_abort(reason="reauth_successful") + return self.async_update_reload_and_abort( + self._get_reauth_entry(), + data={ + CONF_HOST: self._host, + CONF_PASSWORD: self._password, + CONF_USERNAME: self._username, + }, + ) if result != RESULT_INVALID_AUTH: return self.async_abort(reason=result) errors["base"] = result @@ -224,18 +220,6 @@ async def async_step_reauth_confirm( async def async_step_reconfigure( self, user_input: dict[str, Any] | None = None - ) -> ConfigFlowResult: - """Handle a reconfiguration flow initialized by the user.""" - self._entry = self._get_reconfigure_entry() - self._name = self._entry.data[CONF_HOST] - self._host = self._entry.data[CONF_HOST] - self._username = self._entry.data[CONF_USERNAME] - self._password = self._entry.data[CONF_PASSWORD] - - return await self.async_step_reconfigure_confirm() - - async def async_step_reconfigure_confirm( - self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle a reconfiguration flow initialized by the user.""" errors = {} @@ -243,20 +227,27 @@ async def async_step_reconfigure_confirm( if user_input is not None: self._host = user_input[CONF_HOST] - result = await self.hass.async_add_executor_job(self._try_connect) + reconfigure_entry = self._get_reconfigure_entry() + self._username = reconfigure_entry.data[CONF_USERNAME] + self._password = reconfigure_entry.data[CONF_PASSWORD] + + result = await self.async_try_connect() if result == RESULT_SUCCESS: - await self._update_entry() - return self.async_abort(reason="reconfigure_successful") + return self.async_update_reload_and_abort( + reconfigure_entry, + data_updates={CONF_HOST: self._host}, + ) errors["base"] = result + host = self._get_reconfigure_entry().data[CONF_HOST] return self.async_show_form( - step_id="reconfigure_confirm", + step_id="reconfigure", data_schema=vol.Schema( { - vol.Required(CONF_HOST, default=self._host): str, + vol.Required(CONF_HOST, default=host): str, } ), - description_placeholders={"name": self._name}, + description_placeholders={"name": host}, errors=errors, ) diff --git a/homeassistant/components/fritzbox/strings.json b/homeassistant/components/fritzbox/strings.json index d4f59fd1c08015..c7c2439b566878 100644 --- a/homeassistant/components/fritzbox/strings.json +++ b/homeassistant/components/fritzbox/strings.json @@ -27,7 +27,7 @@ "password": "[%key:common::config_flow::data::password%]" } }, - "reconfigure_confirm": { + "reconfigure": { "description": "Update your configuration information for {name}.", "data": { "host": "[%key:common::config_flow::data::host%]" @@ -47,6 +47,7 @@ "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]" }, "error": { + "no_devices_found": "[%key:common::config_flow::abort::no_devices_found%]", "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]" } }, diff --git a/homeassistant/components/fritzbox_callmonitor/config_flow.py b/homeassistant/components/fritzbox_callmonitor/config_flow.py index 69efceae281bb1..7bd0eacb66aeb6 100644 --- a/homeassistant/components/fritzbox_callmonitor/config_flow.py +++ b/homeassistant/components/fritzbox_callmonitor/config_flow.py @@ -141,7 +141,7 @@ def async_get_options_flow( config_entry: ConfigEntry, ) -> FritzBoxCallMonitorOptionsFlowHandler: """Get the options flow for this handler.""" - return FritzBoxCallMonitorOptionsFlowHandler(config_entry) + return FritzBoxCallMonitorOptionsFlowHandler() async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -278,10 +278,6 @@ async def async_step_reauth_confirm( class FritzBoxCallMonitorOptionsFlowHandler(OptionsFlow): """Handle a fritzbox_callmonitor options flow.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize.""" - self.config_entry = config_entry - @classmethod def _are_prefixes_valid(cls, prefixes: str | None) -> bool: """Check if prefixes are valid.""" diff --git a/homeassistant/components/fritzbox_callmonitor/manifest.json b/homeassistant/components/fritzbox_callmonitor/manifest.json index 4e5c60091c9025..06492647c307d0 100644 --- a/homeassistant/components/fritzbox_callmonitor/manifest.json +++ b/homeassistant/components/fritzbox_callmonitor/manifest.json @@ -7,5 +7,5 @@ "integration_type": "device", "iot_class": "local_polling", "loggers": ["fritzconnection"], - "requirements": ["fritzconnection[qr]==1.13.2"] + "requirements": ["fritzconnection[qr]==1.14.0"] } diff --git a/homeassistant/components/fronius/__init__.py b/homeassistant/components/fronius/__init__.py index 07271b91f28b9f..e30f8e85fa0370 100644 --- a/homeassistant/components/fronius/__init__.py +++ b/homeassistant/components/fronius/__init__.py @@ -199,7 +199,10 @@ async def _init_devices_inverter(self, _now: datetime | None = None) -> None: name=_inverter_name, inverter_info=_inverter_info, ) - await _coordinator.async_config_entry_first_refresh() + if self.config_entry.state == ConfigEntryState.LOADED: + await _coordinator.async_refresh() + else: + await _coordinator.async_config_entry_first_refresh() self.inverter_coordinators.append(_coordinator) # Only for re-scans. Initial setup adds entities through sensor.async_setup_entry diff --git a/homeassistant/components/fronius/config_flow.py b/homeassistant/components/fronius/config_flow.py index b16f43d58e8845..2adbf2ae2f3e79 100644 --- a/homeassistant/components/fronius/config_flow.py +++ b/homeassistant/components/fronius/config_flow.py @@ -10,7 +10,7 @@ import voluptuous as vol from homeassistant.components.dhcp import DhcpServiceInfo -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_HOST from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError @@ -72,7 +72,6 @@ class FroniusConfigFlow(ConfigFlow, domain=DOMAIN): def __init__(self) -> None: """Initialize flow.""" self.info: FroniusConfigEntryData - self._entry: ConfigEntry | None = None async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -145,6 +144,7 @@ async def async_step_reconfigure( ) -> ConfigFlowResult: """Add reconfigure step to allow to reconfigure a config entry.""" errors = {} + reconfigure_entry = self._get_reconfigure_entry() if user_input is not None: try: @@ -155,33 +155,16 @@ async def async_step_reconfigure( _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" else: - # Config didn't change or is already configured in another entry - self._async_abort_entries_match(dict(info)) - - existing_entry = await self.async_set_unique_id( - unique_id, raise_on_progress=False - ) - assert self._entry is not None - if existing_entry and existing_entry.entry_id != self._entry.entry_id: - # Uid of device is already configured in another entry (but with different host) - self._abort_if_unique_id_configured() - - return self.async_update_reload_and_abort( - self._entry, - data=info, - reason="reconfigure_successful", - ) - - if self._entry is None: - self._entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) - assert self._entry is not None - host = self._entry.data[CONF_HOST] + await self.async_set_unique_id(unique_id) + self._abort_if_unique_id_mismatch() + + return self.async_update_reload_and_abort(reconfigure_entry, data=info) + + host = reconfigure_entry.data[CONF_HOST] return self.async_show_form( step_id="reconfigure", data_schema=vol.Schema({vol.Required(CONF_HOST, default=host): str}), - description_placeholders={"device": self._entry.title}, + description_placeholders={"device": reconfigure_entry.title}, errors=errors, ) diff --git a/homeassistant/components/fronius/strings.json b/homeassistant/components/fronius/strings.json index 1eaa612a6e7700..dfdcfc0ddb28c3 100644 --- a/homeassistant/components/fronius/strings.json +++ b/homeassistant/components/fronius/strings.json @@ -26,7 +26,8 @@ "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", "invalid_host": "[%key:common::config_flow::error::invalid_host%]", - "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]" + "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]", + "unique_id_mismatch": "The identifier does not match the previous identifier" } }, "entity": { diff --git a/homeassistant/components/frontend/manifest.json b/homeassistant/components/frontend/manifest.json index 9f79dcf34f6111..4dc5a2b0ae47c6 100644 --- a/homeassistant/components/frontend/manifest.json +++ b/homeassistant/components/frontend/manifest.json @@ -20,5 +20,5 @@ "documentation": "https://www.home-assistant.io/integrations/frontend", "integration_type": "system", "quality_scale": "internal", - "requirements": ["home-assistant-frontend==20241002.2"] + "requirements": ["home-assistant-frontend==20241106.2"] } diff --git a/homeassistant/components/frontier_silicon/config_flow.py b/homeassistant/components/frontier_silicon/config_flow.py index 06af041d8f214c..0612419fc335b2 100644 --- a/homeassistant/components/frontier_silicon/config_flow.py +++ b/homeassistant/components/frontier_silicon/config_flow.py @@ -16,7 +16,7 @@ import voluptuous as vol from homeassistant.components import ssdp -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_HOST, CONF_PIN, CONF_PORT from .const import ( @@ -58,7 +58,6 @@ class FrontierSiliconConfigFlow(ConfigFlow, domain=DOMAIN): _name: str _webfsapi_url: str - _reauth_entry: ConfigEntry | None = None # Only used in reauth flows async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -178,11 +177,6 @@ async def async_step_reauth( ) -> ConfigFlowResult: """Perform reauth upon an API authentication error.""" self._webfsapi_url = entry_data[CONF_WEBFSAPI_URL] - - self._reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) - return await self.async_step_device_config() async def async_step_device_config( @@ -213,13 +207,11 @@ async def async_step_device_config( _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" else: - if self._reauth_entry: - self.hass.config_entries.async_update_entry( - self._reauth_entry, - data={CONF_PIN: user_input[CONF_PIN]}, + if self.source == SOURCE_REAUTH: + return self.async_update_reload_and_abort( + self._get_reauth_entry(), + data_updates={CONF_PIN: user_input[CONF_PIN]}, ) - await self.hass.config_entries.async_reload(self._reauth_entry.entry_id) - return self.async_abort(reason="reauth_successful") try: unique_id = await afsapi.get_radio_id() diff --git a/homeassistant/components/fujitsu_fglair/config_flow.py b/homeassistant/components/fujitsu_fglair/config_flow.py index aef856631f6468..c4b097ff0de605 100644 --- a/homeassistant/components/fujitsu_fglair/config_flow.py +++ b/homeassistant/components/fujitsu_fglair/config_flow.py @@ -8,7 +8,7 @@ from ayla_iot_unofficial.fujitsu_consts import FGLAIR_APP_CREDENTIALS import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.helpers import aiohttp_client from homeassistant.helpers.selector import SelectSelector, SelectSelectorConfig @@ -41,7 +41,6 @@ class FGLairConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for Fujitsu HVAC (based on Ayla IOT).""" MINOR_VERSION = 2 - _reauth_entry: ConfigEntry | None = None async def _async_validate_credentials( self, user_input: dict[str, Any] @@ -93,9 +92,6 @@ async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Perform reauth upon an API authentication error.""" - self._reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -103,25 +99,23 @@ async def async_step_reauth_confirm( ) -> ConfigFlowResult: """Dialog that informs the user that reauth is required.""" errors: dict[str, str] = {} - assert self._reauth_entry + reauth_entry = self._get_reauth_entry() if user_input: - reauth_data = { - **self._reauth_entry.data, - CONF_PASSWORD: user_input[CONF_PASSWORD], - } - errors = await self._async_validate_credentials(reauth_data) + errors = await self._async_validate_credentials( + reauth_entry.data | user_input + ) - if len(errors) == 0: + if not errors: return self.async_update_reload_and_abort( - self._reauth_entry, data=reauth_data + reauth_entry, data_updates=user_input ) return self.async_show_form( step_id="reauth_confirm", data_schema=STEP_REAUTH_DATA_SCHEMA, description_placeholders={ - CONF_USERNAME: self._reauth_entry.data[CONF_USERNAME], + CONF_USERNAME: reauth_entry.data[CONF_USERNAME], **self.context["title_placeholders"], }, errors=errors, diff --git a/homeassistant/components/fujitsu_fglair/const.py b/homeassistant/components/fujitsu_fglair/const.py index 8aa911a8b30174..73c811a1ed517c 100644 --- a/homeassistant/components/fujitsu_fglair/const.py +++ b/homeassistant/components/fujitsu_fglair/const.py @@ -9,5 +9,5 @@ CONF_REGION = "region" CONF_EUROPE = "is_europe" -REGION_EU = "EU" +REGION_EU = "eu" REGION_DEFAULT = "default" diff --git a/homeassistant/components/fujitsu_fglair/manifest.json b/homeassistant/components/fujitsu_fglair/manifest.json index 76cf3966fbe24a..f7f3af8d037500 100644 --- a/homeassistant/components/fujitsu_fglair/manifest.json +++ b/homeassistant/components/fujitsu_fglair/manifest.json @@ -5,5 +5,5 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/fujitsu_fglair", "iot_class": "cloud_polling", - "requirements": ["ayla-iot-unofficial==1.4.1"] + "requirements": ["ayla-iot-unofficial==1.4.3"] } diff --git a/homeassistant/components/fyta/config_flow.py b/homeassistant/components/fyta/config_flow.py index f2b5163c9dbbc0..78cb76477850ec 100644 --- a/homeassistant/components/fyta/config_flow.py +++ b/homeassistant/components/fyta/config_flow.py @@ -23,7 +23,6 @@ TextSelectorType, ) -from . import FytaConfigEntry from .const import CONF_EXPIRATION, DOMAIN _LOGGER = logging.getLogger(__name__) @@ -51,7 +50,6 @@ class FytaConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for Fyta.""" credentials: Credentials - _entry: FytaConfigEntry | None = None VERSION = 1 MINOR_VERSION = 2 @@ -100,7 +98,6 @@ async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle flow upon an API authentication error.""" - self._entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -108,20 +105,21 @@ async def async_step_reauth_confirm( ) -> ConfigFlowResult: """Handle reauthorization flow.""" errors = {} - assert self._entry is not None + reauth_entry = self._get_reauth_entry() if user_input and not (errors := await self.async_auth(user_input)): user_input |= { CONF_ACCESS_TOKEN: self.credentials.access_token, CONF_EXPIRATION: self.credentials.expiration.isoformat(), } return self.async_update_reload_and_abort( - self._entry, data={**self._entry.data, **user_input} + reauth_entry, + data_updates=user_input, ) data_schema = self.add_suggested_values_to_schema( DATA_SCHEMA, - {CONF_USERNAME: self._entry.data[CONF_USERNAME], **(user_input or {})}, + {CONF_USERNAME: reauth_entry.data[CONF_USERNAME], **(user_input or {})}, ) return self.async_show_form( step_id="reauth_confirm", diff --git a/homeassistant/components/fyta/coordinator.py b/homeassistant/components/fyta/coordinator.py index df607de76b0874..c4aa9bfe589158 100644 --- a/homeassistant/components/fyta/coordinator.py +++ b/homeassistant/components/fyta/coordinator.py @@ -2,6 +2,7 @@ from __future__ import annotations +from collections.abc import Callable from datetime import datetime, timedelta import logging from typing import TYPE_CHECKING @@ -18,9 +19,10 @@ from homeassistant.const import CONF_ACCESS_TOKEN from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady +import homeassistant.helpers.device_registry as dr from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed -from .const import CONF_EXPIRATION +from .const import CONF_EXPIRATION, DOMAIN if TYPE_CHECKING: from . import FytaConfigEntry @@ -42,6 +44,8 @@ def __init__(self, hass: HomeAssistant, fyta: FytaConnector) -> None: update_interval=timedelta(minutes=4), ) self.fyta = fyta + self._plants_last_update: set[int] = set() + self.new_device_callbacks: list[Callable[[int], None]] = [] async def _async_update_data( self, @@ -55,9 +59,62 @@ async def _async_update_data( await self.renew_authentication() try: - return await self.fyta.update_all_plants() + data = await self.fyta.update_all_plants() except (FytaConnectionError, FytaPlantError) as err: raise UpdateFailed(err) from err + _LOGGER.debug("Data successfully updated") + + # data must be assigned before _async_add_remove_devices, as it is uses to set-up possible new devices + self.data = data + self._async_add_remove_devices() + + return data + + def _async_add_remove_devices(self) -> None: + """Add new devices, remove non-existing devices.""" + if not self._plants_last_update: + self._plants_last_update = set(self.fyta.plant_list.keys()) + + if ( + current_plants := set(self.fyta.plant_list.keys()) + ) == self._plants_last_update: + return + + _LOGGER.debug( + "Check for new and removed plant(s): old plants: %s; new plants: %s", + ", ".join(map(str, self._plants_last_update)), + ", ".join(map(str, current_plants)), + ) + + # remove old plants + if removed_plants := self._plants_last_update - current_plants: + _LOGGER.debug("Removed plant(s): %s", ", ".join(map(str, removed_plants))) + + device_registry = dr.async_get(self.hass) + for plant_id in removed_plants: + if device := device_registry.async_get_device( + identifiers={ + ( + DOMAIN, + f"{self.config_entry.entry_id}-{plant_id}", + ) + } + ): + device_registry.async_update_device( + device_id=device.id, + remove_config_entry_id=self.config_entry.entry_id, + ) + _LOGGER.debug("Device removed from device registry: %s", device.id) + + # add new devices + if new_plants := current_plants - self._plants_last_update: + _LOGGER.debug("New plant(s) found: %s", ", ".join(map(str, new_plants))) + for plant_id in new_plants: + for callback in self.new_device_callbacks: + callback(plant_id) + _LOGGER.debug("Device added: %s", plant_id) + + self._plants_last_update = current_plants async def renew_authentication(self) -> bool: """Renew access token for FYTA API.""" diff --git a/homeassistant/components/fyta/manifest.json b/homeassistant/components/fyta/manifest.json index 73f6b42f53b316..17fe5199eee19c 100644 --- a/homeassistant/components/fyta/manifest.json +++ b/homeassistant/components/fyta/manifest.json @@ -6,6 +6,7 @@ "documentation": "https://www.home-assistant.io/integrations/fyta", "integration_type": "hub", "iot_class": "cloud_polling", + "loggers": ["fyta_cli"], "quality_scale": "platinum", - "requirements": ["fyta_cli==0.6.7"] + "requirements": ["fyta_cli==0.6.10"] } diff --git a/homeassistant/components/fyta/sensor.py b/homeassistant/components/fyta/sensor.py index a351d79dd8bbae..89ee22265cf9b2 100644 --- a/homeassistant/components/fyta/sensor.py +++ b/homeassistant/components/fyta/sensor.py @@ -113,7 +113,7 @@ class FytaSensorEntityDescription(SensorEntityDescription): FytaSensorEntityDescription( key="salinity", translation_key="salinity", - native_unit_of_measurement=UnitOfConductivity.MILLISIEMENS, + native_unit_of_measurement=UnitOfConductivity.MILLISIEMENS_PER_CM, device_class=SensorDeviceClass.CONDUCTIVITY, state_class=SensorStateClass.MEASUREMENT, value_fn=lambda plant: plant.salinity, @@ -150,6 +150,15 @@ async def async_setup_entry( async_add_entities(plant_entities) + def _async_add_new_device(plant_id: int) -> None: + async_add_entities( + FytaPlantSensor(coordinator, entry, sensor, plant_id) + for sensor in SENSORS + if sensor.key in dir(coordinator.data.get(plant_id)) + ) + + coordinator.new_device_callbacks.append(_async_add_new_device) + class FytaPlantSensor(FytaPlantEntity, SensorEntity): """Represents a Fyta sensor.""" diff --git a/homeassistant/components/garadget/cover.py b/homeassistant/components/garadget/cover.py index 988c66b679c806..82045e913212f4 100644 --- a/homeassistant/components/garadget/cover.py +++ b/homeassistant/components/garadget/cover.py @@ -12,6 +12,7 @@ PLATFORM_SCHEMA as COVER_PLATFORM_SCHEMA, CoverDeviceClass, CoverEntity, + CoverState, ) from homeassistant.const import ( CONF_ACCESS_TOKEN, @@ -20,8 +21,6 @@ CONF_NAME, CONF_PASSWORD, CONF_USERNAME, - STATE_CLOSED, - STATE_OPEN, ) from homeassistant.core import HomeAssistant import homeassistant.helpers.config_validation as cv @@ -38,16 +37,14 @@ DEFAULT_NAME = "Garadget" -STATE_CLOSING = "closing" STATE_OFFLINE = "offline" -STATE_OPENING = "opening" STATE_STOPPED = "stopped" STATES_MAP = { - "open": STATE_OPEN, - "opening": STATE_OPENING, - "closed": STATE_CLOSED, - "closing": STATE_CLOSING, + "open": CoverState.OPEN, + "opening": CoverState.OPENING, + "closed": CoverState.CLOSED, + "closing": CoverState.CLOSING, "stopped": STATE_STOPPED, } @@ -175,7 +172,7 @@ def is_closed(self) -> bool | None: """Return if the cover is closed.""" if self._state is None: return None - return self._state == STATE_CLOSED + return self._state == CoverState.CLOSED def get_token(self): """Get new token for usage during this session.""" @@ -249,7 +246,7 @@ def update(self) -> None: self._state = STATE_OFFLINE if ( - self._state not in [STATE_CLOSING, STATE_OPENING] + self._state not in [CoverState.CLOSING, CoverState.OPENING] and self._unsub_listener_cover is not None ): self._unsub_listener_cover() diff --git a/homeassistant/components/gardena_bluetooth/__init__.py b/homeassistant/components/gardena_bluetooth/__init__.py index b6a26456168007..7aae629974c6b6 100644 --- a/homeassistant/components/gardena_bluetooth/__init__.py +++ b/homeassistant/components/gardena_bluetooth/__init__.py @@ -32,6 +32,8 @@ TIMEOUT = 20.0 DISCONNECT_DELAY = 5 +type GardenaBluetoothConfigEntry = ConfigEntry[GardenaBluetoothCoordinator] + def get_connection(hass: HomeAssistant, address: str) -> CachedConnection: """Set up a cached client that keeps connection after last use.""" @@ -47,7 +49,9 @@ def _device_lookup() -> BLEDevice: return CachedConnection(DISCONNECT_DELAY, _device_lookup) -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_setup_entry( + hass: HomeAssistant, entry: GardenaBluetoothConfigEntry +) -> bool: """Set up Gardena Bluetooth from a config entry.""" address = entry.data[CONF_ADDRESS] @@ -79,17 +83,18 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: hass, LOGGER, client, uuids, device, address ) - hass.data.setdefault(DOMAIN, {})[entry.entry_id] = coordinator + entry.runtime_data = coordinator await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) await coordinator.async_refresh() return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry( + hass: HomeAssistant, entry: GardenaBluetoothConfigEntry +) -> bool: """Unload a config entry.""" if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): - coordinator: GardenaBluetoothCoordinator = hass.data[DOMAIN].pop(entry.entry_id) - await coordinator.async_shutdown() + await entry.runtime_data.async_shutdown() return unload_ok diff --git a/homeassistant/components/gardena_bluetooth/binary_sensor.py b/homeassistant/components/gardena_bluetooth/binary_sensor.py index be6d8bbeede79a..d3ae096e291acb 100644 --- a/homeassistant/components/gardena_bluetooth/binary_sensor.py +++ b/homeassistant/components/gardena_bluetooth/binary_sensor.py @@ -12,13 +12,11 @@ BinarySensorEntity, BinarySensorEntityDescription, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN -from .coordinator import GardenaBluetoothCoordinator +from . import GardenaBluetoothConfigEntry from .entity import GardenaBluetoothDescriptorEntity @@ -53,10 +51,12 @@ def context(self) -> set[str]: async def async_setup_entry( - hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback + hass: HomeAssistant, + entry: GardenaBluetoothConfigEntry, + async_add_entities: AddEntitiesCallback, ) -> None: """Set up binary sensor based on a config entry.""" - coordinator: GardenaBluetoothCoordinator = hass.data[DOMAIN][entry.entry_id] + coordinator = entry.runtime_data entities = [ GardenaBluetoothBinarySensor(coordinator, description, description.context) for description in DESCRIPTIONS diff --git a/homeassistant/components/gardena_bluetooth/button.py b/homeassistant/components/gardena_bluetooth/button.py index 67377dc684e281..9d87cba2446a15 100644 --- a/homeassistant/components/gardena_bluetooth/button.py +++ b/homeassistant/components/gardena_bluetooth/button.py @@ -8,13 +8,11 @@ from gardena_bluetooth.parse import CharacteristicBool from homeassistant.components.button import ButtonEntity, ButtonEntityDescription -from homeassistant.config_entries import ConfigEntry from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN -from .coordinator import GardenaBluetoothCoordinator +from . import GardenaBluetoothConfigEntry from .entity import GardenaBluetoothDescriptorEntity @@ -42,10 +40,12 @@ def context(self) -> set[str]: async def async_setup_entry( - hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback + hass: HomeAssistant, + entry: GardenaBluetoothConfigEntry, + async_add_entities: AddEntitiesCallback, ) -> None: """Set up button based on a config entry.""" - coordinator: GardenaBluetoothCoordinator = hass.data[DOMAIN][entry.entry_id] + coordinator = entry.runtime_data entities = [ GardenaBluetoothButton(coordinator, description, description.context) for description in DESCRIPTIONS diff --git a/homeassistant/components/gardena_bluetooth/manifest.json b/homeassistant/components/gardena_bluetooth/manifest.json index 6d7566b3edfd6e..da5c08c38c5e77 100644 --- a/homeassistant/components/gardena_bluetooth/manifest.json +++ b/homeassistant/components/gardena_bluetooth/manifest.json @@ -14,5 +14,5 @@ "documentation": "https://www.home-assistant.io/integrations/gardena_bluetooth", "iot_class": "local_polling", "loggers": ["bleak", "bleak_esphome", "gardena_bluetooth"], - "requirements": ["gardena-bluetooth==1.4.3"] + "requirements": ["gardena-bluetooth==1.4.4"] } diff --git a/homeassistant/components/gardena_bluetooth/number.py b/homeassistant/components/gardena_bluetooth/number.py index d3c178ee637c0d..b55630fa79769d 100644 --- a/homeassistant/components/gardena_bluetooth/number.py +++ b/homeassistant/components/gardena_bluetooth/number.py @@ -17,12 +17,11 @@ NumberEntityDescription, NumberMode, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import PERCENTAGE, EntityCategory, UnitOfTime from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN +from . import GardenaBluetoothConfigEntry from .coordinator import GardenaBluetoothCoordinator from .entity import GardenaBluetoothDescriptorEntity, GardenaBluetoothEntity @@ -105,10 +104,12 @@ def context(self) -> set[str]: async def async_setup_entry( - hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback + hass: HomeAssistant, + entry: GardenaBluetoothConfigEntry, + async_add_entities: AddEntitiesCallback, ) -> None: """Set up entity based on a config entry.""" - coordinator: GardenaBluetoothCoordinator = hass.data[DOMAIN][entry.entry_id] + coordinator = entry.runtime_data entities: list[NumberEntity] = [ GardenaBluetoothNumber(coordinator, description, description.context) for description in DESCRIPTIONS diff --git a/homeassistant/components/gardena_bluetooth/sensor.py b/homeassistant/components/gardena_bluetooth/sensor.py index 19fefefa9aaf1f..ee8a2663218b4c 100644 --- a/homeassistant/components/gardena_bluetooth/sensor.py +++ b/homeassistant/components/gardena_bluetooth/sensor.py @@ -14,13 +14,12 @@ SensorEntityDescription, SensorStateClass, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import PERCENTAGE, EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback import homeassistant.util.dt as dt_util -from .const import DOMAIN +from . import GardenaBluetoothConfigEntry from .coordinator import GardenaBluetoothCoordinator from .entity import GardenaBluetoothDescriptorEntity, GardenaBluetoothEntity @@ -95,10 +94,12 @@ def context(self) -> set[str]: async def async_setup_entry( - hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback + hass: HomeAssistant, + entry: GardenaBluetoothConfigEntry, + async_add_entities: AddEntitiesCallback, ) -> None: """Set up Gardena Bluetooth sensor based on a config entry.""" - coordinator: GardenaBluetoothCoordinator = hass.data[DOMAIN][entry.entry_id] + coordinator = entry.runtime_data entities: list[GardenaBluetoothEntity] = [ GardenaBluetoothSensor(coordinator, description, description.context) for description in DESCRIPTIONS diff --git a/homeassistant/components/gardena_bluetooth/strings.json b/homeassistant/components/gardena_bluetooth/strings.json index d0c1b878cefb4b..dd50bac0b2a14f 100644 --- a/homeassistant/components/gardena_bluetooth/strings.json +++ b/homeassistant/components/gardena_bluetooth/strings.json @@ -16,7 +16,8 @@ }, "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", - "no_devices_found": "[%key:common::config_flow::abort::no_devices_found%]" + "no_devices_found": "[%key:common::config_flow::abort::no_devices_found%]", + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]" } }, "entity": { diff --git a/homeassistant/components/gardena_bluetooth/switch.py b/homeassistant/components/gardena_bluetooth/switch.py index 58b4b2e4e5124b..f82c39025a5d2c 100644 --- a/homeassistant/components/gardena_bluetooth/switch.py +++ b/homeassistant/components/gardena_bluetooth/switch.py @@ -7,21 +7,22 @@ from gardena_bluetooth.const import Valve from homeassistant.components.switch import SwitchEntity -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN +from . import GardenaBluetoothConfigEntry from .coordinator import GardenaBluetoothCoordinator from .entity import GardenaBluetoothEntity async def async_setup_entry( - hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback + hass: HomeAssistant, + entry: GardenaBluetoothConfigEntry, + async_add_entities: AddEntitiesCallback, ) -> None: """Set up switch based on a config entry.""" - coordinator: GardenaBluetoothCoordinator = hass.data[DOMAIN][entry.entry_id] + coordinator = entry.runtime_data entities = [] if GardenaBluetoothValveSwitch.characteristics.issubset( coordinator.characteristics diff --git a/homeassistant/components/gardena_bluetooth/valve.py b/homeassistant/components/gardena_bluetooth/valve.py index 877cc5b505e747..ae6bf56a7ff675 100644 --- a/homeassistant/components/gardena_bluetooth/valve.py +++ b/homeassistant/components/gardena_bluetooth/valve.py @@ -7,11 +7,10 @@ from gardena_bluetooth.const import Valve from homeassistant.components.valve import ValveEntity, ValveEntityFeature -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN +from . import GardenaBluetoothConfigEntry from .coordinator import GardenaBluetoothCoordinator from .entity import GardenaBluetoothEntity @@ -19,10 +18,12 @@ async def async_setup_entry( - hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback + hass: HomeAssistant, + entry: GardenaBluetoothConfigEntry, + async_add_entities: AddEntitiesCallback, ) -> None: """Set up switch based on a config entry.""" - coordinator: GardenaBluetoothCoordinator = hass.data[DOMAIN][entry.entry_id] + coordinator = entry.runtime_data entities = [] if GardenaBluetoothValve.characteristics.issubset(coordinator.characteristics): entities.append(GardenaBluetoothValve(coordinator)) diff --git a/homeassistant/components/generic/config_flow.py b/homeassistant/components/generic/config_flow.py index 7b10cdfb64b938..8bd238fd0e645e 100644 --- a/homeassistant/components/generic/config_flow.py +++ b/homeassistant/components/generic/config_flow.py @@ -324,7 +324,7 @@ def async_get_options_flow( config_entry: ConfigEntry, ) -> GenericOptionsFlowHandler: """Get the options flow for this handler.""" - return GenericOptionsFlowHandler(config_entry) + return GenericOptionsFlowHandler() def check_for_existing(self, options: dict[str, Any]) -> bool: """Check whether an existing entry is using the same URLs.""" @@ -409,9 +409,8 @@ async def async_step_user_confirm_still( class GenericOptionsFlowHandler(OptionsFlow): """Handle Generic IP Camera options.""" - def __init__(self, config_entry: ConfigEntry) -> None: + def __init__(self) -> None: """Initialize Generic IP Camera options flow.""" - self.config_entry = config_entry self.preview_cam: dict[str, Any] = {} self.user_input: dict[str, Any] = {} diff --git a/homeassistant/components/generic/diagnostics.py b/homeassistant/components/generic/diagnostics.py index e5bf4294e4a073..3150ba0cd4c72a 100644 --- a/homeassistant/components/generic/diagnostics.py +++ b/homeassistant/components/generic/diagnostics.py @@ -23,12 +23,16 @@ def redact_url(data: str) -> str: """Redact credentials from string url.""" url = url_in = yarl.URL(data) + # https://github.com/pylint-dev/pylint/issues/3484 + # pylint: disable-next=using-constant-test if url_in.user: url = url.with_user("****") + # pylint: disable-next=using-constant-test if url_in.password: url = url.with_password("****") if url_in.path != "/": url = url.with_path("****") + # pylint: disable-next=using-constant-test if url_in.query_string: url = url.with_query("****=****") return str(url) diff --git a/homeassistant/components/generic/manifest.json b/homeassistant/components/generic/manifest.json index b19d6d6293ec5f..c1fbc16d9be316 100644 --- a/homeassistant/components/generic/manifest.json +++ b/homeassistant/components/generic/manifest.json @@ -7,5 +7,5 @@ "documentation": "https://www.home-assistant.io/integrations/generic", "integration_type": "device", "iot_class": "local_push", - "requirements": ["ha-av==10.1.1", "Pillow==10.4.0"] + "requirements": ["av==13.1.0", "Pillow==11.0.0"] } diff --git a/homeassistant/components/generic_hygrostat/strings.json b/homeassistant/components/generic_hygrostat/strings.json index a21ab68c62830a..2be3955eff1ee9 100644 --- a/homeassistant/components/generic_hygrostat/strings.json +++ b/homeassistant/components/generic_hygrostat/strings.json @@ -4,7 +4,7 @@ "step": { "user": { "title": "Add generic hygrostat", - "description": "Create a entity that control the humidity via a switch and sensor.", + "description": "Create a humidifier entity that control the humidity via a switch and sensor.", "data": { "device_class": "Device class", "dry_tolerance": "Dry tolerance", diff --git a/homeassistant/components/generic_thermostat/config_flow.py b/homeassistant/components/generic_thermostat/config_flow.py index e9079a9f41adb2..5b0eae8ff66f8f 100644 --- a/homeassistant/components/generic_thermostat/config_flow.py +++ b/homeassistant/components/generic_thermostat/config_flow.py @@ -62,7 +62,7 @@ PRESETS_SCHEMA = { vol.Optional(v): selector.NumberSelector( selector.NumberSelectorConfig( - mode=selector.NumberSelectorMode.BOX, unit_of_measurement=DEGREE + mode=selector.NumberSelectorMode.BOX, unit_of_measurement=DEGREE, step=0.1 ) ) for v in CONF_PRESETS.values() diff --git a/homeassistant/components/generic_thermostat/strings.json b/homeassistant/components/generic_thermostat/strings.json index 1ddd41de734450..51549dc844e2a8 100644 --- a/homeassistant/components/generic_thermostat/strings.json +++ b/homeassistant/components/generic_thermostat/strings.json @@ -3,7 +3,7 @@ "config": { "step": { "user": { - "title": "Add generic thermostat helper", + "title": "Add generic thermostat", "description": "Create a climate entity that controls the temperature via a switch and sensor.", "data": { "ac_mode": "Cooling mode", @@ -17,8 +17,8 @@ "data_description": { "ac_mode": "Set the actuator specified to be treated as a cooling device instead of a heating device.", "heater": "Switch entity used to cool or heat depending on A/C mode.", - "target_sensor": "Temperature sensor that reflect the current temperature.", - "min_cycle_duration": "Set a minimum amount of time that the switch specified must be in its current state prior to being switched either off or on. This option will be ignored if the keep alive option is set.", + "target_sensor": "Temperature sensor that reflects the current temperature.", + "min_cycle_duration": "Set a minimum amount of time that the switch specified must be in its current state prior to being switched either off or on.", "cold_tolerance": "Minimum amount of difference between the temperature read by the temperature sensor the target temperature that must change prior to being switched on. For example, if the target temperature is 25 and the tolerance is 0.5 the heater will start when the sensor equals or goes below 24.5.", "hot_tolerance": "Minimum amount of difference between the temperature read by the temperature sensor the target temperature that must change prior to being switched off. For example, if the target temperature is 25 and the tolerance is 0.5 the heater will stop when the sensor equals or goes above 25.5." } diff --git a/homeassistant/components/geniushub/__init__.py b/homeassistant/components/geniushub/__init__.py index 18580f331d2232..9ca6ecfcfe0684 100644 --- a/homeassistant/components/geniushub/__init__.py +++ b/homeassistant/components/geniushub/__init__.py @@ -9,7 +9,6 @@ from geniushubclient import GeniusHub import voluptuous as vol -from homeassistant import config_entries from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( ATTR_ENTITY_ID, @@ -21,20 +20,12 @@ CONF_USERNAME, Platform, ) -from homeassistant.core import ( - DOMAIN as HOMEASSISTANT_DOMAIN, - HomeAssistant, - ServiceCall, - callback, -) -from homeassistant.data_entry_flow import FlowResultType +from homeassistant.core import HomeAssistant, ServiceCall, callback from homeassistant.helpers import config_validation as cv, entity_registry as er from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.dispatcher import async_dispatcher_send from homeassistant.helpers.event import async_track_time_interval -from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue from homeassistant.helpers.service import verify_domain_control -from homeassistant.helpers.typing import ConfigType from .const import DOMAIN @@ -45,27 +36,6 @@ MAC_ADDRESS_REGEXP = r"^([0-9A-F]{2}:){5}([0-9A-F]{2})$" -CLOUD_API_SCHEMA = vol.Schema( - { - vol.Required(CONF_TOKEN): cv.string, - vol.Required(CONF_MAC): vol.Match(MAC_ADDRESS_REGEXP), - } -) - - -LOCAL_API_SCHEMA = vol.Schema( - { - vol.Required(CONF_HOST): cv.string, - vol.Required(CONF_USERNAME): cv.string, - vol.Required(CONF_PASSWORD): cv.string, - vol.Optional(CONF_MAC): vol.Match(MAC_ADDRESS_REGEXP), - } -) - -CONFIG_SCHEMA = vol.Schema( - {DOMAIN: vol.Any(LOCAL_API_SCHEMA, CLOUD_API_SCHEMA)}, extra=vol.ALLOW_EXTRA -) - ATTR_ZONE_MODE = "mode" ATTR_DURATION = "duration" @@ -100,56 +70,6 @@ ] -async def _async_import(hass: HomeAssistant, base_config: ConfigType) -> None: - """Import a config entry from configuration.yaml.""" - - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_IMPORT}, - data=base_config[DOMAIN], - ) - if ( - result["type"] is FlowResultType.CREATE_ENTRY - or result["reason"] == "already_configured" - ): - async_create_issue( - hass, - HOMEASSISTANT_DOMAIN, - f"deprecated_yaml_{DOMAIN}", - breaks_in_ha_version="2024.12.0", - is_fixable=False, - issue_domain=DOMAIN, - severity=IssueSeverity.WARNING, - translation_key="deprecated_yaml", - translation_placeholders={ - "domain": DOMAIN, - "integration_title": "Genius Hub", - }, - ) - return - async_create_issue( - hass, - DOMAIN, - f"deprecated_yaml_import_issue_{result['reason']}", - breaks_in_ha_version="2024.12.0", - is_fixable=False, - issue_domain=DOMAIN, - severity=IssueSeverity.WARNING, - translation_key=f"deprecated_yaml_import_issue_{result['reason']}", - translation_placeholders={ - "domain": DOMAIN, - "integration_title": "Genius Hub", - }, - ) - - -async def async_setup(hass: HomeAssistant, base_config: ConfigType) -> bool: - """Set up a Genius Hub system.""" - if DOMAIN in base_config: - hass.async_create_task(_async_import(hass, base_config)) - return True - - type GeniusHubConfigEntry = ConfigEntry[GeniusBroker] @@ -170,7 +90,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: GeniusHubConfigEntry) -> ) session = async_get_clientsession(hass) - unique_id: str if CONF_HOST in entry.data: client = GeniusHub( entry.data[CONF_HOST], @@ -178,10 +97,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: GeniusHubConfigEntry) -> password=entry.data[CONF_PASSWORD], session=session, ) - unique_id = entry.data[CONF_MAC] else: client = GeniusHub(entry.data[CONF_TOKEN], session=session) - unique_id = entry.entry_id + + unique_id = entry.unique_id or entry.entry_id broker = entry.runtime_data = GeniusBroker(hass, client, unique_id) diff --git a/homeassistant/components/geniushub/config_flow.py b/homeassistant/components/geniushub/config_flow.py index 601eac6c2f2863..b106f9907bb432 100644 --- a/homeassistant/components/geniushub/config_flow.py +++ b/homeassistant/components/geniushub/config_flow.py @@ -13,7 +13,6 @@ from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_TOKEN, CONF_USERNAME -from homeassistant.data_entry_flow import FlowResultType from homeassistant.helpers.aiohttp_client import async_get_clientsession from .const import DOMAIN @@ -123,14 +122,3 @@ async def async_step_cloud_api( return self.async_show_form( step_id="cloud_api", errors=errors, data_schema=CLOUD_API_SCHEMA ) - - async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: - """Import the yaml config.""" - if CONF_HOST in import_data: - result = await self.async_step_local_api(import_data) - else: - result = await self.async_step_cloud_api(import_data) - if result["type"] is FlowResultType.FORM: - assert result["errors"] - return self.async_abort(reason=result["errors"]["base"]) - return result diff --git a/homeassistant/components/geonetnz_volcano/strings.json b/homeassistant/components/geonetnz_volcano/strings.json index 867d2840fb750b..f49fb4f9830326 100644 --- a/homeassistant/components/geonetnz_volcano/strings.json +++ b/homeassistant/components/geonetnz_volcano/strings.json @@ -6,7 +6,7 @@ "data": { "radius": "Radius" } } }, - "abort": { + "error": { "already_configured": "[%key:common::config_flow::abort::already_configured_location%]" } } diff --git a/homeassistant/components/gios/manifest.json b/homeassistant/components/gios/manifest.json index b509806d07f417..b1eae512688a12 100644 --- a/homeassistant/components/gios/manifest.json +++ b/homeassistant/components/gios/manifest.json @@ -8,5 +8,5 @@ "iot_class": "cloud_polling", "loggers": ["dacite", "gios"], "quality_scale": "platinum", - "requirements": ["gios==4.0.0"] + "requirements": ["gios==5.0.0"] } diff --git a/homeassistant/components/github/config_flow.py b/homeassistant/components/github/config_flow.py index 25d8782618f73b..9977f9d84cc412 100644 --- a/homeassistant/components/github/config_flow.py +++ b/homeassistant/components/github/config_flow.py @@ -211,16 +211,12 @@ def async_get_options_flow( config_entry: ConfigEntry, ) -> OptionsFlowHandler: """Get the options flow for this handler.""" - return OptionsFlowHandler(config_entry) + return OptionsFlowHandler() class OptionsFlowHandler(OptionsFlow): """Handle a option flow for GitHub.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None, diff --git a/homeassistant/components/glances/config_flow.py b/homeassistant/components/glances/config_flow.py index 9208a4b0ebda25..1dbc939d532723 100644 --- a/homeassistant/components/glances/config_flow.py +++ b/homeassistant/components/glances/config_flow.py @@ -11,7 +11,7 @@ ) import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import ( CONF_HOST, CONF_PASSWORD, @@ -40,15 +40,11 @@ class GlancesFlowHandler(ConfigFlow, domain=DOMAIN): """Handle a Glances config flow.""" VERSION = 1 - _reauth_entry: ConfigEntry | None async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Perform reauth upon an API authentication error.""" - self._reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -56,9 +52,10 @@ async def async_step_reauth_confirm( ) -> ConfigFlowResult: """Confirm reauth dialog.""" errors = {} - assert self._reauth_entry + + reauth_entry = self._get_reauth_entry() if user_input is not None: - user_input = {**self._reauth_entry.data, **user_input} + user_input = {**reauth_entry.data, **user_input} try: await get_api(self.hass, user_input) except GlancesApiAuthorizationError: @@ -67,15 +64,13 @@ async def async_step_reauth_confirm( errors["base"] = "cannot_connect" else: self.hass.config_entries.async_update_entry( - self._reauth_entry, data=user_input + reauth_entry, data=user_input ) - await self.hass.config_entries.async_reload(self._reauth_entry.entry_id) + await self.hass.config_entries.async_reload(reauth_entry.entry_id) return self.async_abort(reason="reauth_successful") return self.async_show_form( - description_placeholders={ - CONF_USERNAME: self._reauth_entry.data[CONF_USERNAME] - }, + description_placeholders={CONF_USERNAME: reauth_entry.data[CONF_USERNAME]}, step_id="reauth_confirm", data_schema=vol.Schema( { diff --git a/homeassistant/components/go2rtc/__init__.py b/homeassistant/components/go2rtc/__init__.py index 4ca1d72008fbca..04b5b9f9317326 100644 --- a/homeassistant/components/go2rtc/__init__.py +++ b/homeassistant/components/go2rtc/__init__.py @@ -1,20 +1,57 @@ """The go2rtc component.""" -from go2rtc_client import Go2RtcClient, WebRTCSdpOffer +from __future__ import annotations -from homeassistant.components.camera import Camera -from homeassistant.components.camera.webrtc import ( +from dataclasses import dataclass +import logging +import shutil + +from aiohttp.client_exceptions import ClientConnectionError, ServerConnectionError +from go2rtc_client import Go2RtcRestClient +from go2rtc_client.exceptions import Go2RtcClientError, Go2RtcVersionError +from go2rtc_client.ws import ( + Go2RtcWsClient, + ReceiveMessages, + WebRTCAnswer, + WebRTCCandidate, + WebRTCOffer, + WsError, +) +import voluptuous as vol +from webrtc_models import RTCIceCandidate + +from homeassistant.components.camera import ( + Camera, CameraWebRTCProvider, + WebRTCAnswer as HAWebRTCAnswer, + WebRTCCandidate as HAWebRTCCandidate, + WebRTCError, + WebRTCMessage, + WebRTCSendMessage, async_register_webrtc_provider, ) -from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_HOST -from homeassistant.core import HomeAssistant +from homeassistant.components.default_config import DOMAIN as DEFAULT_CONFIG_DOMAIN +from homeassistant.config_entries import SOURCE_SYSTEM, ConfigEntry +from homeassistant.const import CONF_URL, EVENT_HOMEASSISTANT_STOP +from homeassistant.core import Event, HomeAssistant, callback +from homeassistant.exceptions import ConfigEntryNotReady +from homeassistant.helpers import config_validation as cv, discovery_flow from homeassistant.helpers.aiohttp_client import async_get_clientsession +from homeassistant.helpers.typing import ConfigType +from homeassistant.util.hass_dict import HassKey +from homeassistant.util.package import is_docker_env -from .const import CONF_BINARY +from .const import ( + CONF_DEBUG_UI, + DEBUG_UI_URL_MESSAGE, + DOMAIN, + HA_MANAGED_RTSP_PORT, + HA_MANAGED_URL, +) from .server import Server +_LOGGER = logging.getLogger(__name__) + _SUPPORTED_STREAMS = frozenset( ( "bubble", @@ -45,49 +82,226 @@ ) ) +CONFIG_SCHEMA = vol.Schema( + { + DOMAIN: vol.Schema( + { + vol.Exclusive(CONF_URL, DOMAIN, DEBUG_UI_URL_MESSAGE): cv.url, + vol.Exclusive(CONF_DEBUG_UI, DOMAIN, DEBUG_UI_URL_MESSAGE): cv.boolean, + } + ) + }, + extra=vol.ALLOW_EXTRA, +) + +_DATA_GO2RTC: HassKey[Go2RtcData] = HassKey(DOMAIN) +_RETRYABLE_ERRORS = (ClientConnectionError, ServerConnectionError) + + +@dataclass(frozen=True) +class Go2RtcData: + """Data for go2rtc.""" + + url: str + managed: bool + + +async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: + """Set up WebRTC.""" + url: str | None = None + managed = False + if DOMAIN not in config and DEFAULT_CONFIG_DOMAIN not in config: + await _remove_go2rtc_entries(hass) + return True + + if not (configured_by_user := DOMAIN in config) or not ( + url := config[DOMAIN].get(CONF_URL) + ): + if not is_docker_env(): + if not configured_by_user: + # Remove config entry if it exists + await _remove_go2rtc_entries(hass) + return True + _LOGGER.warning("Go2rtc URL required in non-docker installs") + return False + if not (binary := await _get_binary(hass)): + _LOGGER.error("Could not find go2rtc docker binary") + return False -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: - """Set up WebRTC from a config entry.""" - if binary := entry.data.get(CONF_BINARY): # HA will manage the binary - server = Server(binary) - entry.async_on_unload(server.stop) - server.start() + server = Server( + hass, binary, enable_ui=config.get(DOMAIN, {}).get(CONF_DEBUG_UI, False) + ) + try: + await server.start() + except Exception: # noqa: BLE001 + _LOGGER.warning("Could not start go2rtc server", exc_info=True) + return False + + async def on_stop(event: Event) -> None: + await server.stop() + + hass.bus.async_listen(EVENT_HOMEASSISTANT_STOP, on_stop) - client = Go2RtcClient(async_get_clientsession(hass), entry.data[CONF_HOST]) + url = HA_MANAGED_URL + managed = True - provider = WebRTCProvider(client) - entry.async_on_unload(async_register_webrtc_provider(hass, provider)) + hass.data[_DATA_GO2RTC] = Go2RtcData(url, managed) + discovery_flow.async_create_flow( + hass, DOMAIN, context={"source": SOURCE_SYSTEM}, data={} + ) return True +async def _remove_go2rtc_entries(hass: HomeAssistant) -> None: + """Remove go2rtc config entries, if any.""" + for entry in hass.config_entries.async_entries(DOMAIN): + await hass.config_entries.async_remove(entry.entry_id) + + +async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + """Set up go2rtc from a config entry.""" + data = hass.data[_DATA_GO2RTC] + + # Validate the server URL + try: + client = Go2RtcRestClient(async_get_clientsession(hass), data.url) + await client.validate_server_version() + except Go2RtcClientError as err: + if isinstance(err.__cause__, _RETRYABLE_ERRORS): + raise ConfigEntryNotReady( + f"Could not connect to go2rtc instance on {data.url}" + ) from err + _LOGGER.warning( + "Could not connect to go2rtc instance on %s (%s)", data.url, err + ) + return False + except Go2RtcVersionError as err: + raise ConfigEntryNotReady( + f"The go2rtc server version is not supported, {err}" + ) from err + except Exception as err: # noqa: BLE001 + _LOGGER.warning( + "Could not connect to go2rtc instance on %s (%s)", data.url, err + ) + return False + + provider = WebRTCProvider(hass, data) + async_register_webrtc_provider(hass, provider) + return True + + +async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + """Unload a go2rtc config entry.""" + return True + + +async def _get_binary(hass: HomeAssistant) -> str | None: + """Return the binary path if found.""" + return await hass.async_add_executor_job(shutil.which, "go2rtc") + + class WebRTCProvider(CameraWebRTCProvider): """WebRTC provider.""" - def __init__(self, client: Go2RtcClient) -> None: + def __init__(self, hass: HomeAssistant, data: Go2RtcData) -> None: """Initialize the WebRTC provider.""" - self._client = client + self._hass = hass + self._data = data + self._session = async_get_clientsession(hass) + self._rest_client = Go2RtcRestClient(self._session, data.url) + self._sessions: dict[str, Go2RtcWsClient] = {} + + @property + def domain(self) -> str: + """Return the integration domain of the provider.""" + return DOMAIN - async def async_is_supported(self, stream_source: str) -> bool: + @callback + def async_is_supported(self, stream_source: str) -> bool: """Return if this provider is supports the Camera as source.""" return stream_source.partition(":")[0] in _SUPPORTED_STREAMS - async def async_handle_web_rtc_offer( - self, camera: Camera, offer_sdp: str - ) -> str | None: - """Handle the WebRTC offer and return an answer.""" - streams = await self._client.streams.list() - if camera.entity_id not in streams: - if not (stream_source := await camera.stream_source()): - return None - await self._client.streams.add(camera.entity_id, stream_source) - - answer = await self._client.webrtc.forward_whep_sdp_offer( - camera.entity_id, WebRTCSdpOffer(offer_sdp) + async def async_handle_async_webrtc_offer( + self, + camera: Camera, + offer_sdp: str, + session_id: str, + send_message: WebRTCSendMessage, + ) -> None: + """Handle the WebRTC offer and return the answer via the provided callback.""" + self._sessions[session_id] = ws_client = Go2RtcWsClient( + self._session, self._data.url, source=camera.entity_id ) - return answer.sdp + if not (stream_source := await camera.stream_source()): + send_message( + WebRTCError("go2rtc_webrtc_offer_failed", "Camera has no stream source") + ) + return -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: - """Unload a config entry.""" - return True + streams = await self._rest_client.streams.list() + + if self._data.managed: + # HA manages the go2rtc instance + stream_original_name = f"{camera.entity_id}_original" + stream_redirect_sources = [ + f"rtsp://127.0.0.1:{HA_MANAGED_RTSP_PORT}/{stream_original_name}", + f"ffmpeg:{stream_original_name}#audio=opus", + ] + + if ( + (stream_org := streams.get(stream_original_name)) is None + or not any( + stream_source == producer.url for producer in stream_org.producers + ) + or (stream_redirect := streams.get(camera.entity_id)) is None + or stream_redirect_sources != [p.url for p in stream_redirect.producers] + ): + await self._rest_client.streams.add(stream_original_name, stream_source) + await self._rest_client.streams.add( + camera.entity_id, stream_redirect_sources + ) + + # go2rtc instance is managed outside HA + elif (stream_org := streams.get(camera.entity_id)) is None or not any( + stream_source == producer.url for producer in stream_org.producers + ): + await self._rest_client.streams.add( + camera.entity_id, + [stream_source, f"ffmpeg:{camera.entity_id}#audio=opus"], + ) + + @callback + def on_messages(message: ReceiveMessages) -> None: + """Handle messages.""" + value: WebRTCMessage + match message: + case WebRTCCandidate(): + value = HAWebRTCCandidate(RTCIceCandidate(message.candidate)) + case WebRTCAnswer(): + value = HAWebRTCAnswer(message.sdp) + case WsError(): + value = WebRTCError("go2rtc_webrtc_offer_failed", message.error) + + send_message(value) + + ws_client.subscribe(on_messages) + config = camera.async_get_webrtc_client_configuration() + await ws_client.send(WebRTCOffer(offer_sdp, config.configuration.ice_servers)) + + async def async_on_webrtc_candidate( + self, session_id: str, candidate: RTCIceCandidate + ) -> None: + """Handle the WebRTC candidate.""" + + if ws_client := self._sessions.get(session_id): + await ws_client.send(WebRTCCandidate(candidate.candidate)) + else: + _LOGGER.debug("Unknown session %s. Ignoring candidate", session_id) + + @callback + def async_close_session(self, session_id: str) -> None: + """Close the session.""" + ws_client = self._sessions.pop(session_id) + self._hass.async_create_task(ws_client.close()) diff --git a/homeassistant/components/go2rtc/config_flow.py b/homeassistant/components/go2rtc/config_flow.py index 5162850461402b..02fdfb656a6f87 100644 --- a/homeassistant/components/go2rtc/config_flow.py +++ b/homeassistant/components/go2rtc/config_flow.py @@ -1,90 +1,21 @@ -"""Config flow for WebRTC.""" +"""Config flow for the go2rtc integration.""" from __future__ import annotations -import shutil from typing import Any -from urllib.parse import urlparse - -from go2rtc_client import Go2RtcClient -import voluptuous as vol from homeassistant.config_entries import ConfigFlow, ConfigFlowResult -from homeassistant.const import CONF_HOST -from homeassistant.core import HomeAssistant -from homeassistant.helpers import selector -from homeassistant.helpers.aiohttp_client import async_get_clientsession -from homeassistant.util.package import is_docker_env - -from .const import CONF_BINARY, DOMAIN - -_VALID_URL_SCHEMA = {"http", "https"} - - -async def _validate_url( - hass: HomeAssistant, - value: str, -) -> str | None: - """Validate the URL and return error or None if it's valid.""" - if urlparse(value).scheme not in _VALID_URL_SCHEMA: - return "invalid_url_schema" - try: - vol.Schema(vol.Url())(value) - except vol.Invalid: - return "invalid_url" - try: - client = Go2RtcClient(async_get_clientsession(hass), value) - await client.streams.list() - except Exception: # noqa: BLE001 - return "cannot_connect" - return None +from .const import DOMAIN -class Go2RTCConfigFlow(ConfigFlow, domain=DOMAIN): - """go2rtc config flow.""" +class CloudConfigFlow(ConfigFlow, domain=DOMAIN): + """Handle a config flow for the go2rtc integration.""" - def _get_binary(self) -> str | None: - """Return the binary path if found.""" - return shutil.which(DOMAIN) + VERSION = 1 - async def async_step_user( + async def async_step_system( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: - """Init step.""" - if is_docker_env() and (binary := self._get_binary()): - return self.async_create_entry( - title=DOMAIN, - data={CONF_BINARY: binary, CONF_HOST: "http://localhost:1984/"}, - ) - - return await self.async_step_host() - - async def async_step_host( - self, user_input: dict[str, Any] | None = None - ) -> ConfigFlowResult: - """Step to use selfhosted go2rtc server.""" - errors = {} - if user_input is not None: - if error := await _validate_url(self.hass, user_input[CONF_HOST]): - errors[CONF_HOST] = error - else: - return self.async_create_entry(title=DOMAIN, data=user_input) - - return self.async_show_form( - step_id="host", - data_schema=self.add_suggested_values_to_schema( - data_schema=vol.Schema( - { - vol.Required(CONF_HOST): selector.TextSelector( - selector.TextSelectorConfig( - type=selector.TextSelectorType.URL - ) - ), - } - ), - suggested_values=user_input, - ), - errors=errors, - last_step=True, - ) + """Handle the system step.""" + return self.async_create_entry(title="go2rtc", data={}) diff --git a/homeassistant/components/go2rtc/const.py b/homeassistant/components/go2rtc/const.py index af8266e0d723db..3c4dc9a9500968 100644 --- a/homeassistant/components/go2rtc/const.py +++ b/homeassistant/components/go2rtc/const.py @@ -2,4 +2,8 @@ DOMAIN = "go2rtc" -CONF_BINARY = "binary" +CONF_DEBUG_UI = "debug_ui" +DEBUG_UI_URL_MESSAGE = "Url and debug_ui cannot be set at the same time." +HA_MANAGED_API_PORT = 11984 +HA_MANAGED_URL = f"http://localhost:{HA_MANAGED_API_PORT}/" +HA_MANAGED_RTSP_PORT = 18554 diff --git a/homeassistant/components/go2rtc/manifest.json b/homeassistant/components/go2rtc/manifest.json index faf6c991ac1719..ea9308e5e182a6 100644 --- a/homeassistant/components/go2rtc/manifest.json +++ b/homeassistant/components/go2rtc/manifest.json @@ -2,10 +2,11 @@ "domain": "go2rtc", "name": "go2rtc", "codeowners": ["@home-assistant/core"], - "config_flow": true, + "config_flow": false, "dependencies": ["camera"], "documentation": "https://www.home-assistant.io/integrations/go2rtc", + "integration_type": "system", "iot_class": "local_polling", - "requirements": ["go2rtc-client==0.0.1b0"], + "requirements": ["go2rtc-client==0.1.0"], "single_config_entry": true } diff --git a/homeassistant/components/go2rtc/server.py b/homeassistant/components/go2rtc/server.py index fc9c2b17f60046..91f4433546caf2 100644 --- a/homeassistant/components/go2rtc/server.py +++ b/homeassistant/components/go2rtc/server.py @@ -1,56 +1,254 @@ """Go2rtc server.""" -from __future__ import annotations - +import asyncio +from collections import deque +from contextlib import suppress import logging -import subprocess from tempfile import NamedTemporaryFile -from threading import Thread -from .const import DOMAIN +from go2rtc_client import Go2RtcRestClient + +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers.aiohttp_client import async_get_clientsession + +from .const import HA_MANAGED_API_PORT, HA_MANAGED_RTSP_PORT, HA_MANAGED_URL _LOGGER = logging.getLogger(__name__) +_TERMINATE_TIMEOUT = 5 +_SETUP_TIMEOUT = 30 +_SUCCESSFUL_BOOT_MESSAGE = "INF [api] listen addr=" +_LOCALHOST_IP = "127.0.0.1" +_LOG_BUFFER_SIZE = 512 +_RESPAWN_COOLDOWN = 1 + +# Default configuration for HA +# - Api is listening only on localhost +# - Enable rtsp for localhost only as ffmpeg needs it +# - Clear default ice servers +_GO2RTC_CONFIG_FORMAT = r"""# This file is managed by Home Assistant +# Do not edit it manually + +api: + listen: "{api_ip}:{api_port}" + +rtsp: + listen: "127.0.0.1:{rtsp_port}" + +webrtc: + listen: ":18555/tcp" + ice_servers: [] +""" + +_LOG_LEVEL_MAP = { + "TRC": logging.DEBUG, + "DBG": logging.DEBUG, + "INF": logging.DEBUG, + "WRN": logging.WARNING, + "ERR": logging.WARNING, + "FTL": logging.ERROR, + "PNC": logging.ERROR, +} -class Server(Thread): - """Server thread.""" +class Go2RTCServerStartError(HomeAssistantError): + """Raised when server does not start.""" - def __init__(self, binary: str) -> None: + _message = "Go2rtc server didn't start correctly" + + +class Go2RTCWatchdogError(HomeAssistantError): + """Raised on watchdog error.""" + + +def _create_temp_file(api_ip: str) -> str: + """Create temporary config file.""" + # Set delete=False to prevent the file from being deleted when the file is closed + # Linux is clearing tmp folder on reboot, so no need to delete it manually + with NamedTemporaryFile(prefix="go2rtc_", suffix=".yaml", delete=False) as file: + file.write( + _GO2RTC_CONFIG_FORMAT.format( + api_ip=api_ip, + api_port=HA_MANAGED_API_PORT, + rtsp_port=HA_MANAGED_RTSP_PORT, + ).encode() + ) + return file.name + + +class Server: + """Go2rtc server.""" + + def __init__( + self, hass: HomeAssistant, binary: str, *, enable_ui: bool = False + ) -> None: """Initialize the server.""" - super().__init__(name=DOMAIN, daemon=True) + self._hass = hass self._binary = binary - self._stop_requested = False + self._log_buffer: deque[str] = deque(maxlen=_LOG_BUFFER_SIZE) + self._process: asyncio.subprocess.Process | None = None + self._startup_complete = asyncio.Event() + self._api_ip = _LOCALHOST_IP + if enable_ui: + # Listen on all interfaces for allowing access from all ips + self._api_ip = "" + self._watchdog_task: asyncio.Task | None = None + self._watchdog_tasks: list[asyncio.Task] = [] + + async def start(self) -> None: + """Start the server.""" + await self._start() + self._watchdog_task = asyncio.create_task( + self._watchdog(), name="Go2rtc respawn" + ) - def run(self) -> None: - """Run the server.""" + async def _start(self) -> None: + """Start the server.""" _LOGGER.debug("Starting go2rtc server") - self._stop_requested = False - with ( - NamedTemporaryFile(prefix="go2rtc", suffix=".yaml") as file, - subprocess.Popen( - [self._binary, "-c", "webrtc.ice_servers=[]", "-c", file.name], - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT, - ) as process, - ): - while not self._stop_requested and process.poll() is None: - assert process.stdout - line = process.stdout.readline() - if line == b"": - break - _LOGGER.debug(line[:-1].decode()) - - _LOGGER.debug("Terminating go2rtc server") - process.terminate() + config_file = await self._hass.async_add_executor_job( + _create_temp_file, self._api_ip + ) + + self._startup_complete.clear() + + self._process = await asyncio.create_subprocess_exec( + self._binary, + "-c", + config_file, + stdout=asyncio.subprocess.PIPE, + stderr=asyncio.subprocess.STDOUT, + close_fds=False, # required for posix_spawn on CPython < 3.13 + ) + + self._hass.async_create_background_task( + self._log_output(self._process), "Go2rtc log output" + ) + + try: + async with asyncio.timeout(_SETUP_TIMEOUT): + await self._startup_complete.wait() + except TimeoutError as err: + msg = "Go2rtc server didn't start correctly" + _LOGGER.exception(msg) + self._log_server_output(logging.WARNING) + await self._stop() + raise Go2RTCServerStartError from err + + # Check the server version + client = Go2RtcRestClient(async_get_clientsession(self._hass), HA_MANAGED_URL) + await client.validate_server_version() + + async def _log_output(self, process: asyncio.subprocess.Process) -> None: + """Log the output of the process.""" + assert process.stdout is not None + + async for line in process.stdout: + msg = line[:-1].decode().strip() + self._log_buffer.append(msg) + loglevel = logging.WARNING + if len(split_msg := msg.split(" ", 2)) == 3: + loglevel = _LOG_LEVEL_MAP.get(split_msg[1], loglevel) + _LOGGER.log(loglevel, msg) + if not self._startup_complete.is_set() and _SUCCESSFUL_BOOT_MESSAGE in msg: + self._startup_complete.set() + + def _log_server_output(self, loglevel: int) -> None: + """Log captured process output, then clear the log buffer.""" + for line in list(self._log_buffer): # Copy the deque to avoid mutation error + _LOGGER.log(loglevel, line) + self._log_buffer.clear() + + async def _watchdog(self) -> None: + """Keep respawning go2rtc servers. + + A new go2rtc server is spawned if the process terminates or the API + stops responding. + """ + while True: try: - process.wait(timeout=5) - except subprocess.TimeoutExpired: - _LOGGER.warning("Go2rtc server didn't terminate gracefully.Killing it") - process.kill() - _LOGGER.debug("Go2rtc server has been stopped") + monitor_process_task = asyncio.create_task(self._monitor_process()) + self._watchdog_tasks.append(monitor_process_task) + monitor_process_task.add_done_callback(self._watchdog_tasks.remove) + monitor_api_task = asyncio.create_task(self._monitor_api()) + self._watchdog_tasks.append(monitor_api_task) + monitor_api_task.add_done_callback(self._watchdog_tasks.remove) + try: + await asyncio.gather(monitor_process_task, monitor_api_task) + except Go2RTCWatchdogError: + _LOGGER.debug("Caught Go2RTCWatchdogError") + for task in self._watchdog_tasks: + if task.done(): + if not task.cancelled(): + task.exception() + continue + task.cancel() + await asyncio.sleep(_RESPAWN_COOLDOWN) + try: + await self._stop() + _LOGGER.warning("Go2rtc unexpectedly stopped, server log:") + self._log_server_output(logging.WARNING) + _LOGGER.debug("Spawning new go2rtc server") + with suppress(Go2RTCServerStartError): + await self._start() + except Exception: + _LOGGER.exception( + "Unexpected error when restarting go2rtc server" + ) + except Exception: + _LOGGER.exception("Unexpected error in go2rtc server watchdog") + + async def _monitor_process(self) -> None: + """Raise if the go2rtc process terminates.""" + _LOGGER.debug("Monitoring go2rtc server process") + if self._process: + await self._process.wait() + _LOGGER.debug("go2rtc server terminated") + raise Go2RTCWatchdogError("Process ended") + + async def _monitor_api(self) -> None: + """Raise if the go2rtc process terminates.""" + client = Go2RtcRestClient(async_get_clientsession(self._hass), HA_MANAGED_URL) - def stop(self) -> None: + _LOGGER.debug("Monitoring go2rtc API") + try: + while True: + await client.validate_server_version() + await asyncio.sleep(10) + except Exception as err: + _LOGGER.debug("go2rtc API did not reply", exc_info=True) + raise Go2RTCWatchdogError("API error") from err + + async def _stop_watchdog(self) -> None: + """Handle watchdog stop request.""" + tasks: list[asyncio.Task] = [] + if watchdog_task := self._watchdog_task: + self._watchdog_task = None + tasks.append(watchdog_task) + watchdog_task.cancel() + for task in self._watchdog_tasks: + tasks.append(task) + task.cancel() + await asyncio.gather(*tasks, return_exceptions=True) + + async def stop(self) -> None: + """Stop the server and abort the watchdog task.""" + _LOGGER.debug("Server stop requested") + await self._stop_watchdog() + await self._stop() + + async def _stop(self) -> None: """Stop the server.""" - self._stop_requested = True - if self.is_alive(): - self.join() + if self._process: + _LOGGER.debug("Stopping go2rtc server") + process = self._process + self._process = None + with suppress(ProcessLookupError): + process.terminate() + try: + await asyncio.wait_for(process.wait(), timeout=_TERMINATE_TIMEOUT) + except TimeoutError: + _LOGGER.warning("Go2rtc server didn't terminate gracefully. Killing it") + with suppress(ProcessLookupError): + process.kill() + else: + _LOGGER.debug("Go2rtc server has been stopped") diff --git a/homeassistant/components/go2rtc/strings.json b/homeassistant/components/go2rtc/strings.json deleted file mode 100644 index 44e28d712c1b5d..00000000000000 --- a/homeassistant/components/go2rtc/strings.json +++ /dev/null @@ -1,19 +0,0 @@ -{ - "config": { - "step": { - "host": { - "data": { - "host": "[%key:common::config_flow::data::url%]" - }, - "data_description": { - "host": "The URL of your go2rtc instance." - } - } - }, - "error": { - "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", - "invalid_url": "Invalid URL", - "invalid_url_schema": "Invalid URL scheme.\nThe URL should start with `http://` or `https://`." - } - } -} diff --git a/homeassistant/components/google/__init__.py b/homeassistant/components/google/__init__.py index 9bb6dbd059f913..2ad400aababd75 100644 --- a/homeassistant/components/google/__init__.py +++ b/homeassistant/components/google/__init__.py @@ -175,7 +175,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: except aiohttp.ClientError as err: raise ConfigEntryNotReady from err - if not async_entry_has_scopes(hass, entry): + if not async_entry_has_scopes(entry): raise ConfigEntryAuthFailed( "Required scopes are not available, reauth required" ) @@ -198,7 +198,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: hass.config_entries.async_update_entry(entry, unique_id=primary_calendar.id) # Only expose the add event service if we have the correct permissions - if get_feature_access(hass, entry) is FeatureAccess.read_write: + if get_feature_access(entry) is FeatureAccess.read_write: await async_setup_add_event_service(hass, calendar_service) await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) @@ -208,9 +208,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: return True -def async_entry_has_scopes(hass: HomeAssistant, entry: ConfigEntry) -> bool: +def async_entry_has_scopes(entry: ConfigEntry) -> bool: """Verify that the config entry desired scope is present in the oauth token.""" - access = get_feature_access(hass, entry) + access = get_feature_access(entry) token_scopes = entry.data.get("token", {}).get("scope", []) return access.scope in token_scopes @@ -224,7 +224,7 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: async def async_reload_entry(hass: HomeAssistant, entry: ConfigEntry) -> None: """Reload config entry if the access options change.""" - if not async_entry_has_scopes(hass, entry): + if not async_entry_has_scopes(entry): await hass.config_entries.async_reload(entry.entry_id) diff --git a/homeassistant/components/google/api.py b/homeassistant/components/google/api.py index 8ed18cca41cee8..194c2a0b4a5b6d 100644 --- a/homeassistant/components/google/api.py +++ b/homeassistant/components/google/api.py @@ -26,13 +26,7 @@ ) from homeassistant.util import dt as dt_util -from .const import ( - CONF_CALENDAR_ACCESS, - DATA_CONFIG, - DEFAULT_FEATURE_ACCESS, - DOMAIN, - FeatureAccess, -) +from .const import CONF_CALENDAR_ACCESS, DEFAULT_FEATURE_ACCESS, FeatureAccess _LOGGER = logging.getLogger(__name__) @@ -161,27 +155,11 @@ def _finish(self) -> None: self._listener() -def get_feature_access( - hass: HomeAssistant, config_entry: ConfigEntry | None = None -) -> FeatureAccess: +def get_feature_access(config_entry: ConfigEntry) -> FeatureAccess: """Return the desired calendar feature access.""" - if ( - config_entry - and config_entry.options - and CONF_CALENDAR_ACCESS in config_entry.options - ): + if config_entry.options and CONF_CALENDAR_ACCESS in config_entry.options: return FeatureAccess[config_entry.options[CONF_CALENDAR_ACCESS]] - - # This may be called during config entry setup without integration setup running when there - # is no google entry in configuration.yaml - return cast( - FeatureAccess, - ( - hass.data.get(DOMAIN, {}) - .get(DATA_CONFIG, {}) - .get(CONF_CALENDAR_ACCESS, DEFAULT_FEATURE_ACCESS) - ), - ) + return DEFAULT_FEATURE_ACCESS async def async_create_device_flow( diff --git a/homeassistant/components/google/calendar.py b/homeassistant/components/google/calendar.py index 7fb55f3cfb7d94..5ac5dae616c4d6 100644 --- a/homeassistant/components/google/calendar.py +++ b/homeassistant/components/google/calendar.py @@ -10,7 +10,14 @@ from gcal_sync.api import Range, SyncEventsRequest from gcal_sync.exceptions import ApiException -from gcal_sync.model import AccessRole, Calendar, DateOrDatetime, Event, EventTypeEnum +from gcal_sync.model import ( + AccessRole, + Calendar, + DateOrDatetime, + Event, + EventTypeEnum, + ResponseStatus, +) from gcal_sync.store import ScopedCalendarStore from gcal_sync.sync import CalendarEventSyncManager @@ -132,7 +139,7 @@ def _get_entity_descriptions( ) read_only = not ( calendar_item.access_role.is_writer - and get_feature_access(hass, config_entry) is FeatureAccess.read_write + and get_feature_access(config_entry) is FeatureAccess.read_write ) # Prefer calendar sync down of resources when possible. However, # sync does not work for search. Also free-busy calendars denormalize @@ -304,7 +311,7 @@ def append_calendars_to_config() -> None: platform = entity_platform.async_get_current_platform() if ( any(calendar_item.access_role.is_writer for calendar_item in result.items) - and get_feature_access(hass, config_entry) is FeatureAccess.read_write + and get_feature_access(config_entry) is FeatureAccess.read_write ): platform.async_register_entity_service( SERVICE_CREATE_EVENT, @@ -367,7 +374,14 @@ def event(self) -> CalendarEvent | None: return event def _event_filter(self, event: Event) -> bool: - """Return True if the event is visible.""" + """Return True if the event is visible and not declined.""" + + if any( + attendee.is_self and attendee.response_status == ResponseStatus.DECLINED + for attendee in event.attendees + ): + return False + if event.event_type == EventTypeEnum.WORKING_LOCATION: return self.entity_description.working_location if self._ignore_availability: diff --git a/homeassistant/components/google/config_flow.py b/homeassistant/components/google/config_flow.py index 98424ef24f55c1..8ae09b58957836 100644 --- a/homeassistant/components/google/config_flow.py +++ b/homeassistant/components/google/config_flow.py @@ -11,7 +11,12 @@ from gcal_sync.exceptions import ApiException, ApiForbiddenException import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigFlowResult, OptionsFlow +from homeassistant.config_entries import ( + SOURCE_REAUTH, + ConfigEntry, + ConfigFlowResult, + OptionsFlow, +) from homeassistant.core import callback from homeassistant.helpers import config_entry_oauth2_flow from homeassistant.helpers.aiohttp_client import async_get_clientsession @@ -24,7 +29,6 @@ InvalidCredential, OAuthError, async_create_device_flow, - get_feature_access, ) from .const import ( CONF_CALENDAR_ACCESS, @@ -74,7 +78,6 @@ class OAuth2FlowHandler( def __init__(self) -> None: """Set up instance.""" super().__init__() - self._reauth_config_entry: ConfigEntry | None = None self._device_flow: DeviceFlow | None = None # First attempt is device auth, then fallback to web auth self._web_auth = False @@ -117,11 +120,11 @@ async def async_step_auth( self.flow_impl, ) return self.async_abort(reason="oauth_error") - calendar_access = get_feature_access(self.hass) - if self._reauth_config_entry and self._reauth_config_entry.options: - calendar_access = FeatureAccess[ - self._reauth_config_entry.options[CONF_CALENDAR_ACCESS] - ] + calendar_access = DEFAULT_FEATURE_ACCESS + if self.source == SOURCE_REAUTH and ( + reauth_options := self._get_reauth_entry().options + ): + calendar_access = FeatureAccess[reauth_options[CONF_CALENDAR_ACCESS]] try: device_flow = await async_create_device_flow( self.hass, @@ -178,14 +181,10 @@ async def async_oauth_create_entry(self, data: dict) -> ConfigFlowResult: data[CONF_CREDENTIAL_TYPE] = ( CredentialType.WEB_AUTH if self._web_auth else CredentialType.DEVICE_AUTH ) - if self._reauth_config_entry: - self.hass.config_entries.async_update_entry( - self._reauth_config_entry, data=data - ) - await self.hass.config_entries.async_reload( - self._reauth_config_entry.entry_id + if self.source == SOURCE_REAUTH: + return self.async_update_reload_and_abort( + self._get_reauth_entry(), data=data ) - return self.async_abort(reason="reauth_successful") calendar_service = GoogleCalendarService( AccessTokenAuthImpl( async_get_clientsession(self.hass), data["token"]["access_token"] @@ -214,7 +213,7 @@ async def async_oauth_create_entry(self, data: dict) -> ConfigFlowResult: title=primary_calendar.id, data=data, options={ - CONF_CALENDAR_ACCESS: get_feature_access(self.hass).name, + CONF_CALENDAR_ACCESS: DEFAULT_FEATURE_ACCESS.name, }, ) @@ -222,9 +221,6 @@ async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Perform reauth upon an API authentication error.""" - self._reauth_config_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) self._web_auth = entry_data.get(CONF_CREDENTIAL_TYPE) == CredentialType.WEB_AUTH return await self.async_step_reauth_confirm() @@ -242,16 +238,12 @@ def async_get_options_flow( config_entry: ConfigEntry, ) -> OptionsFlow: """Create an options flow.""" - return OptionsFlowHandler(config_entry) + return OptionsFlowHandler() class OptionsFlowHandler(OptionsFlow): """Google Calendar options flow.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/google/manifest.json b/homeassistant/components/google/manifest.json index 288ccbd6899df7..85c4714432b2f3 100644 --- a/homeassistant/components/google/manifest.json +++ b/homeassistant/components/google/manifest.json @@ -4,8 +4,8 @@ "codeowners": ["@allenporter"], "config_flow": true, "dependencies": ["application_credentials"], - "documentation": "https://www.home-assistant.io/integrations/calendar.google", + "documentation": "https://www.home-assistant.io/integrations/google", "iot_class": "cloud_polling", "loggers": ["googleapiclient"], - "requirements": ["gcal-sync==6.1.5", "oauth2client==4.1.3", "ical==8.2.0"] + "requirements": ["gcal-sync==6.2.0", "oauth2client==4.1.3", "ical==8.2.0"] } diff --git a/homeassistant/components/google/strings.json b/homeassistant/components/google/strings.json index 05c7b8ab190fa9..2ea45239a53072 100644 --- a/homeassistant/components/google/strings.json +++ b/homeassistant/components/google/strings.json @@ -19,6 +19,7 @@ "timeout_connect": "[%key:common::config_flow::error::timeout_connect%]", "oauth_error": "[%key:common::config_flow::abort::oauth2_error%]", "missing_configuration": "[%key:common::config_flow::abort::oauth2_missing_configuration%]", + "missing_credentials": "[%key:common::config_flow::abort::oauth2_missing_credentials%]", "code_expired": "Authentication code expired or credential setup is invalid, please try again.", "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", "invalid_access_token": "[%key:common::config_flow::error::invalid_access_token%]", @@ -86,8 +87,8 @@ } }, "create_event": { - "name": "Creates event", - "description": "Add a new calendar event.", + "name": "Create event", + "description": "Adds a new calendar event.", "fields": { "summary": { "name": "Summary", diff --git a/homeassistant/components/google_assistant/const.py b/homeassistant/components/google_assistant/const.py index 04c85639e075ba..8132ecaae2c526 100644 --- a/homeassistant/components/google_assistant/const.py +++ b/homeassistant/components/google_assistant/const.py @@ -78,6 +78,7 @@ TYPE_BLINDS = f"{PREFIX_TYPES}BLINDS" TYPE_CAMERA = f"{PREFIX_TYPES}CAMERA" TYPE_CURTAIN = f"{PREFIX_TYPES}CURTAIN" +TYPE_CARBON_MONOXIDE_DETECTOR = f"{PREFIX_TYPES}CARBON_MONOXIDE_DETECTOR" TYPE_DEHUMIDIFIER = f"{PREFIX_TYPES}DEHUMIDIFIER" TYPE_DOOR = f"{PREFIX_TYPES}DOOR" TYPE_DOORBELL = f"{PREFIX_TYPES}DOORBELL" @@ -93,6 +94,7 @@ TYPE_SENSOR = f"{PREFIX_TYPES}SENSOR" TYPE_SETTOP = f"{PREFIX_TYPES}SETTOP" TYPE_SHUTTER = f"{PREFIX_TYPES}SHUTTER" +TYPE_SMOKE_DETECTOR = f"{PREFIX_TYPES}SMOKE_DETECTOR" TYPE_SPEAKER = f"{PREFIX_TYPES}SPEAKER" TYPE_SWITCH = f"{PREFIX_TYPES}SWITCH" TYPE_THERMOSTAT = f"{PREFIX_TYPES}THERMOSTAT" @@ -136,6 +138,7 @@ DOMAIN_TO_GOOGLE_TYPES = { alarm_control_panel.DOMAIN: TYPE_ALARM, + binary_sensor.DOMAIN: TYPE_SENSOR, button.DOMAIN: TYPE_SCENE, camera.DOMAIN: TYPE_CAMERA, climate.DOMAIN: TYPE_THERMOSTAT, @@ -168,6 +171,14 @@ binary_sensor.DOMAIN, binary_sensor.BinarySensorDeviceClass.GARAGE_DOOR, ): TYPE_GARAGE, + ( + binary_sensor.DOMAIN, + binary_sensor.BinarySensorDeviceClass.SMOKE, + ): TYPE_SMOKE_DETECTOR, + ( + binary_sensor.DOMAIN, + binary_sensor.BinarySensorDeviceClass.CO, + ): TYPE_CARBON_MONOXIDE_DETECTOR, (cover.DOMAIN, cover.CoverDeviceClass.AWNING): TYPE_AWNING, (cover.DOMAIN, cover.CoverDeviceClass.CURTAIN): TYPE_CURTAIN, (cover.DOMAIN, cover.CoverDeviceClass.DOOR): TYPE_DOOR, diff --git a/homeassistant/components/google_assistant/trait.py b/homeassistant/components/google_assistant/trait.py index 95faf7c3321368..f99f1574038a5d 100644 --- a/homeassistant/components/google_assistant/trait.py +++ b/homeassistant/components/google_assistant/trait.py @@ -33,7 +33,10 @@ valve, water_heater, ) -from homeassistant.components.alarm_control_panel import AlarmControlPanelEntityFeature +from homeassistant.components.alarm_control_panel import ( + AlarmControlPanelEntityFeature, + AlarmControlPanelState, +) from homeassistant.components.camera import CameraEntityFeature from homeassistant.components.climate import ClimateEntityFeature from homeassistant.components.cover import CoverEntityFeature @@ -63,13 +66,6 @@ SERVICE_ALARM_TRIGGER, SERVICE_TURN_OFF, SERVICE_TURN_ON, - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_CUSTOM_BYPASS, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_DISARMED, - STATE_ALARM_PENDING, - STATE_ALARM_TRIGGERED, STATE_IDLE, STATE_OFF, STATE_ON, @@ -109,61 +105,42 @@ _LOGGER = logging.getLogger(__name__) PREFIX_TRAITS = "action.devices.traits." -TRAIT_CAMERA_STREAM = f"{PREFIX_TRAITS}CameraStream" -TRAIT_ONOFF = f"{PREFIX_TRAITS}OnOff" -TRAIT_DOCK = f"{PREFIX_TRAITS}Dock" -TRAIT_STARTSTOP = f"{PREFIX_TRAITS}StartStop" +TRAIT_ARM_DISARM = f"{PREFIX_TRAITS}ArmDisarm" TRAIT_BRIGHTNESS = f"{PREFIX_TRAITS}Brightness" +TRAIT_CAMERA_STREAM = f"{PREFIX_TRAITS}CameraStream" +TRAIT_CHANNEL = f"{PREFIX_TRAITS}Channel" TRAIT_COLOR_SETTING = f"{PREFIX_TRAITS}ColorSetting" -TRAIT_SCENE = f"{PREFIX_TRAITS}Scene" -TRAIT_TEMPERATURE_SETTING = f"{PREFIX_TRAITS}TemperatureSetting" -TRAIT_TEMPERATURE_CONTROL = f"{PREFIX_TRAITS}TemperatureControl" -TRAIT_LOCKUNLOCK = f"{PREFIX_TRAITS}LockUnlock" -TRAIT_FANSPEED = f"{PREFIX_TRAITS}FanSpeed" -TRAIT_MODES = f"{PREFIX_TRAITS}Modes" -TRAIT_INPUTSELECTOR = f"{PREFIX_TRAITS}InputSelector" -TRAIT_OBJECTDETECTION = f"{PREFIX_TRAITS}ObjectDetection" -TRAIT_OPENCLOSE = f"{PREFIX_TRAITS}OpenClose" -TRAIT_VOLUME = f"{PREFIX_TRAITS}Volume" -TRAIT_ARMDISARM = f"{PREFIX_TRAITS}ArmDisarm" +TRAIT_DOCK = f"{PREFIX_TRAITS}Dock" +TRAIT_ENERGY_STORAGE = f"{PREFIX_TRAITS}EnergyStorage" +TRAIT_FAN_SPEED = f"{PREFIX_TRAITS}FanSpeed" TRAIT_HUMIDITY_SETTING = f"{PREFIX_TRAITS}HumiditySetting" -TRAIT_TRANSPORT_CONTROL = f"{PREFIX_TRAITS}TransportControl" -TRAIT_MEDIA_STATE = f"{PREFIX_TRAITS}MediaState" -TRAIT_CHANNEL = f"{PREFIX_TRAITS}Channel" +TRAIT_INPUT_SELECTOR = f"{PREFIX_TRAITS}InputSelector" TRAIT_LOCATOR = f"{PREFIX_TRAITS}Locator" -TRAIT_ENERGYSTORAGE = f"{PREFIX_TRAITS}EnergyStorage" +TRAIT_LOCK_UNLOCK = f"{PREFIX_TRAITS}LockUnlock" +TRAIT_MEDIA_STATE = f"{PREFIX_TRAITS}MediaState" +TRAIT_MODES = f"{PREFIX_TRAITS}Modes" +TRAIT_OBJECT_DETECTION = f"{PREFIX_TRAITS}ObjectDetection" +TRAIT_ON_OFF = f"{PREFIX_TRAITS}OnOff" +TRAIT_OPEN_CLOSE = f"{PREFIX_TRAITS}OpenClose" +TRAIT_SCENE = f"{PREFIX_TRAITS}Scene" TRAIT_SENSOR_STATE = f"{PREFIX_TRAITS}SensorState" +TRAIT_START_STOP = f"{PREFIX_TRAITS}StartStop" +TRAIT_TEMPERATURE_CONTROL = f"{PREFIX_TRAITS}TemperatureControl" +TRAIT_TEMPERATURE_SETTING = f"{PREFIX_TRAITS}TemperatureSetting" +TRAIT_TRANSPORT_CONTROL = f"{PREFIX_TRAITS}TransportControl" +TRAIT_VOLUME = f"{PREFIX_TRAITS}Volume" PREFIX_COMMANDS = "action.devices.commands." -COMMAND_ONOFF = f"{PREFIX_COMMANDS}OnOff" -COMMAND_GET_CAMERA_STREAM = f"{PREFIX_COMMANDS}GetCameraStream" -COMMAND_DOCK = f"{PREFIX_COMMANDS}Dock" -COMMAND_STARTSTOP = f"{PREFIX_COMMANDS}StartStop" -COMMAND_PAUSEUNPAUSE = f"{PREFIX_COMMANDS}PauseUnpause" +COMMAND_ACTIVATE_SCENE = f"{PREFIX_COMMANDS}ActivateScene" +COMMAND_ARM_DISARM = f"{PREFIX_COMMANDS}ArmDisarm" COMMAND_BRIGHTNESS_ABSOLUTE = f"{PREFIX_COMMANDS}BrightnessAbsolute" +COMMAND_CHARGE = f"{PREFIX_COMMANDS}Charge" COMMAND_COLOR_ABSOLUTE = f"{PREFIX_COMMANDS}ColorAbsolute" -COMMAND_ACTIVATE_SCENE = f"{PREFIX_COMMANDS}ActivateScene" -COMMAND_SET_TEMPERATURE = f"{PREFIX_COMMANDS}SetTemperature" -COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT = ( - f"{PREFIX_COMMANDS}ThermostatTemperatureSetpoint" -) -COMMAND_THERMOSTAT_TEMPERATURE_SET_RANGE = ( - f"{PREFIX_COMMANDS}ThermostatTemperatureSetRange" -) -COMMAND_THERMOSTAT_SET_MODE = f"{PREFIX_COMMANDS}ThermostatSetMode" -COMMAND_LOCKUNLOCK = f"{PREFIX_COMMANDS}LockUnlock" -COMMAND_FANSPEED = f"{PREFIX_COMMANDS}SetFanSpeed" -COMMAND_FANSPEEDRELATIVE = f"{PREFIX_COMMANDS}SetFanSpeedRelative" -COMMAND_MODES = f"{PREFIX_COMMANDS}SetModes" -COMMAND_INPUT = f"{PREFIX_COMMANDS}SetInput" +COMMAND_DOCK = f"{PREFIX_COMMANDS}Dock" +COMMAND_GET_CAMERA_STREAM = f"{PREFIX_COMMANDS}GetCameraStream" +COMMAND_LOCK_UNLOCK = f"{PREFIX_COMMANDS}LockUnlock" +COMMAND_LOCATE = f"{PREFIX_COMMANDS}Locate" COMMAND_NEXT_INPUT = f"{PREFIX_COMMANDS}NextInput" -COMMAND_PREVIOUS_INPUT = f"{PREFIX_COMMANDS}PreviousInput" -COMMAND_OPENCLOSE = f"{PREFIX_COMMANDS}OpenClose" -COMMAND_OPENCLOSE_RELATIVE = f"{PREFIX_COMMANDS}OpenCloseRelative" -COMMAND_SET_VOLUME = f"{PREFIX_COMMANDS}setVolume" -COMMAND_VOLUME_RELATIVE = f"{PREFIX_COMMANDS}volumeRelative" -COMMAND_MUTE = f"{PREFIX_COMMANDS}mute" -COMMAND_ARMDISARM = f"{PREFIX_COMMANDS}ArmDisarm" COMMAND_MEDIA_NEXT = f"{PREFIX_COMMANDS}mediaNext" COMMAND_MEDIA_PAUSE = f"{PREFIX_COMMANDS}mediaPause" COMMAND_MEDIA_PREVIOUS = f"{PREFIX_COMMANDS}mediaPrevious" @@ -172,11 +149,30 @@ COMMAND_MEDIA_SEEK_TO_POSITION = f"{PREFIX_COMMANDS}mediaSeekToPosition" COMMAND_MEDIA_SHUFFLE = f"{PREFIX_COMMANDS}mediaShuffle" COMMAND_MEDIA_STOP = f"{PREFIX_COMMANDS}mediaStop" +COMMAND_MUTE = f"{PREFIX_COMMANDS}mute" +COMMAND_OPEN_CLOSE = f"{PREFIX_COMMANDS}OpenClose" +COMMAND_ON_OFF = f"{PREFIX_COMMANDS}OnOff" +COMMAND_OPEN_CLOSE_RELATIVE = f"{PREFIX_COMMANDS}OpenCloseRelative" +COMMAND_PAUSE_UNPAUSE = f"{PREFIX_COMMANDS}PauseUnpause" COMMAND_REVERSE = f"{PREFIX_COMMANDS}Reverse" -COMMAND_SET_HUMIDITY = f"{PREFIX_COMMANDS}SetHumidity" +COMMAND_PREVIOUS_INPUT = f"{PREFIX_COMMANDS}PreviousInput" COMMAND_SELECT_CHANNEL = f"{PREFIX_COMMANDS}selectChannel" -COMMAND_LOCATE = f"{PREFIX_COMMANDS}Locate" -COMMAND_CHARGE = f"{PREFIX_COMMANDS}Charge" +COMMAND_SET_TEMPERATURE = f"{PREFIX_COMMANDS}SetTemperature" +COMMAND_SET_FAN_SPEED = f"{PREFIX_COMMANDS}SetFanSpeed" +COMMAND_SET_FAN_SPEED_RELATIVE = f"{PREFIX_COMMANDS}SetFanSpeedRelative" +COMMAND_SET_HUMIDITY = f"{PREFIX_COMMANDS}SetHumidity" +COMMAND_SET_INPUT = f"{PREFIX_COMMANDS}SetInput" +COMMAND_SET_MODES = f"{PREFIX_COMMANDS}SetModes" +COMMAND_SET_VOLUME = f"{PREFIX_COMMANDS}setVolume" +COMMAND_START_STOP = f"{PREFIX_COMMANDS}StartStop" +COMMAND_THERMOSTAT_SET_MODE = f"{PREFIX_COMMANDS}ThermostatSetMode" +COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT = ( + f"{PREFIX_COMMANDS}ThermostatTemperatureSetpoint" +) +COMMAND_THERMOSTAT_TEMPERATURE_SET_RANGE = ( + f"{PREFIX_COMMANDS}ThermostatTemperatureSetRange" +) +COMMAND_VOLUME_RELATIVE = f"{PREFIX_COMMANDS}volumeRelative" TRAITS: list[type[_Trait]] = [] @@ -415,7 +411,7 @@ class ObjectDetection(_Trait): https://developers.google.com/actions/smarthome/traits/objectdetection """ - name = TRAIT_OBJECTDETECTION + name = TRAIT_OBJECT_DETECTION commands = [] @staticmethod @@ -473,8 +469,8 @@ class OnOffTrait(_Trait): https://developers.google.com/actions/smarthome/traits/onoff """ - name = TRAIT_ONOFF - commands = [COMMAND_ONOFF] + name = TRAIT_ON_OFF + commands = [COMMAND_ON_OFF] @staticmethod def supported(domain, features, device_class, _): @@ -793,7 +789,7 @@ class EnergyStorageTrait(_Trait): https://developers.google.com/actions/smarthome/traits/energystorage """ - name = TRAIT_ENERGYSTORAGE + name = TRAIT_ENERGY_STORAGE commands = [COMMAND_CHARGE] @staticmethod @@ -848,8 +844,8 @@ class StartStopTrait(_Trait): https://developers.google.com/actions/smarthome/traits/startstop """ - name = TRAIT_STARTSTOP - commands = [COMMAND_STARTSTOP, COMMAND_PAUSEUNPAUSE] + name = TRAIT_START_STOP + commands = [COMMAND_START_STOP, COMMAND_PAUSE_UNPAUSE] @staticmethod def supported(domain, features, device_class, _): @@ -913,7 +909,7 @@ async def execute(self, command, data, params, challenge): async def _execute_vacuum(self, command, data, params, challenge): """Execute a StartStop command.""" - if command == COMMAND_STARTSTOP: + if command == COMMAND_START_STOP: if params["start"]: await self.hass.services.async_call( self.state.domain, @@ -930,7 +926,7 @@ async def _execute_vacuum(self, command, data, params, challenge): blocking=not self.config.should_report_state, context=data.context, ) - elif command == COMMAND_PAUSEUNPAUSE: + elif command == COMMAND_PAUSE_UNPAUSE: if params["pause"]: await self.hass.services.async_call( self.state.domain, @@ -951,7 +947,7 @@ async def _execute_vacuum(self, command, data, params, challenge): async def _execute_cover_or_valve(self, command, data, params, challenge): """Execute a StartStop command.""" domain = self.state.domain - if command == COMMAND_STARTSTOP: + if command == COMMAND_START_STOP: if params["start"] is False: if self.state.state in ( COVER_VALVE_STATES[domain]["closing"], @@ -1504,8 +1500,8 @@ class LockUnlockTrait(_Trait): https://developers.google.com/actions/smarthome/traits/lockunlock """ - name = TRAIT_LOCKUNLOCK - commands = [COMMAND_LOCKUNLOCK] + name = TRAIT_LOCK_UNLOCK + commands = [COMMAND_LOCK_UNLOCK] @staticmethod def supported(domain, features, device_class, _): @@ -1553,23 +1549,23 @@ class ArmDisArmTrait(_Trait): https://developers.google.com/actions/smarthome/traits/armdisarm """ - name = TRAIT_ARMDISARM - commands = [COMMAND_ARMDISARM] + name = TRAIT_ARM_DISARM + commands = [COMMAND_ARM_DISARM] state_to_service = { - STATE_ALARM_ARMED_HOME: SERVICE_ALARM_ARM_HOME, - STATE_ALARM_ARMED_NIGHT: SERVICE_ALARM_ARM_NIGHT, - STATE_ALARM_ARMED_AWAY: SERVICE_ALARM_ARM_AWAY, - STATE_ALARM_ARMED_CUSTOM_BYPASS: SERVICE_ALARM_ARM_CUSTOM_BYPASS, - STATE_ALARM_TRIGGERED: SERVICE_ALARM_TRIGGER, + AlarmControlPanelState.ARMED_HOME: SERVICE_ALARM_ARM_HOME, + AlarmControlPanelState.ARMED_NIGHT: SERVICE_ALARM_ARM_NIGHT, + AlarmControlPanelState.ARMED_AWAY: SERVICE_ALARM_ARM_AWAY, + AlarmControlPanelState.ARMED_CUSTOM_BYPASS: SERVICE_ALARM_ARM_CUSTOM_BYPASS, + AlarmControlPanelState.TRIGGERED: SERVICE_ALARM_TRIGGER, } state_to_support = { - STATE_ALARM_ARMED_HOME: AlarmControlPanelEntityFeature.ARM_HOME, - STATE_ALARM_ARMED_NIGHT: AlarmControlPanelEntityFeature.ARM_NIGHT, - STATE_ALARM_ARMED_AWAY: AlarmControlPanelEntityFeature.ARM_AWAY, - STATE_ALARM_ARMED_CUSTOM_BYPASS: AlarmControlPanelEntityFeature.ARM_CUSTOM_BYPASS, - STATE_ALARM_TRIGGERED: AlarmControlPanelEntityFeature.TRIGGER, + AlarmControlPanelState.ARMED_HOME: AlarmControlPanelEntityFeature.ARM_HOME, + AlarmControlPanelState.ARMED_NIGHT: AlarmControlPanelEntityFeature.ARM_NIGHT, + AlarmControlPanelState.ARMED_AWAY: AlarmControlPanelEntityFeature.ARM_AWAY, + AlarmControlPanelState.ARMED_CUSTOM_BYPASS: AlarmControlPanelEntityFeature.ARM_CUSTOM_BYPASS, + AlarmControlPanelState.TRIGGERED: AlarmControlPanelEntityFeature.TRIGGER, } """The list of states to support in increasing security state.""" @@ -1595,8 +1591,8 @@ def _supported_states(self): def _default_arm_state(self): states = self._supported_states() - if STATE_ALARM_TRIGGERED in states: - states.remove(STATE_ALARM_TRIGGERED) + if AlarmControlPanelState.TRIGGERED in states: + states.remove(AlarmControlPanelState.TRIGGERED) if not states: raise SmartHomeError(ERR_NOT_SUPPORTED, "ArmLevel missing") @@ -1611,7 +1607,7 @@ def sync_attributes(self) -> dict[str, Any]: # level synonyms are generated from state names # 'armed_away' becomes 'armed away' or 'away' level_synonym = [state.replace("_", " ")] - if state != STATE_ALARM_TRIGGERED: + if state != AlarmControlPanelState.TRIGGERED: level_synonym.append(state.split("_")[1]) level = { @@ -1652,11 +1648,11 @@ async def execute(self, command, data, params, challenge): elif ( params["arm"] and params.get("cancel") - and self.state.state == STATE_ALARM_PENDING + and self.state.state == AlarmControlPanelState.PENDING ): service = SERVICE_ALARM_DISARM else: - if self.state.state == STATE_ALARM_DISARMED: + if self.state.state == AlarmControlPanelState.DISARMED: raise SmartHomeError(ERR_ALREADY_DISARMED, "System is already disarmed") _verify_pin_challenge(data, self.state, challenge) service = SERVICE_ALARM_DISARM @@ -1694,8 +1690,8 @@ class FanSpeedTrait(_Trait): https://developers.google.com/actions/smarthome/traits/fanspeed """ - name = TRAIT_FANSPEED - commands = [COMMAND_FANSPEED, COMMAND_REVERSE] + name = TRAIT_FAN_SPEED + commands = [COMMAND_SET_FAN_SPEED, COMMAND_REVERSE] def __init__(self, hass, state, config): """Initialize a trait for a state.""" @@ -1840,7 +1836,7 @@ async def execute_reverse(self, data, params): async def execute(self, command, data, params, challenge): """Execute a smart home command.""" - if command == COMMAND_FANSPEED: + if command == COMMAND_SET_FAN_SPEED: await self.execute_fanspeed(data, params) elif command == COMMAND_REVERSE: await self.execute_reverse(data, params) @@ -1854,7 +1850,7 @@ class ModesTrait(_Trait): """ name = TRAIT_MODES - commands = [COMMAND_MODES] + commands = [COMMAND_SET_MODES] SYNONYMS = { "preset mode": ["preset mode", "mode", "preset"], @@ -2088,8 +2084,8 @@ class InputSelectorTrait(_Trait): https://developers.google.com/assistant/smarthome/traits/inputselector """ - name = TRAIT_INPUTSELECTOR - commands = [COMMAND_INPUT, COMMAND_NEXT_INPUT, COMMAND_PREVIOUS_INPUT] + name = TRAIT_INPUT_SELECTOR + commands = [COMMAND_SET_INPUT, COMMAND_NEXT_INPUT, COMMAND_PREVIOUS_INPUT] SYNONYMS: dict[str, list[str]] = {} @@ -2124,7 +2120,7 @@ async def execute(self, command, data, params, challenge): sources = self.state.attributes.get(media_player.ATTR_INPUT_SOURCE_LIST) or [] source = self.state.attributes.get(media_player.ATTR_INPUT_SOURCE) - if command == COMMAND_INPUT: + if command == COMMAND_SET_INPUT: requested_source = params.get("newInput") elif command == COMMAND_NEXT_INPUT: requested_source = _next_selected(sources, source) @@ -2162,8 +2158,8 @@ class OpenCloseTrait(_Trait): cover.CoverDeviceClass.GATE, ) - name = TRAIT_OPENCLOSE - commands = [COMMAND_OPENCLOSE, COMMAND_OPENCLOSE_RELATIVE] + name = TRAIT_OPEN_CLOSE + commands = [COMMAND_OPEN_CLOSE, COMMAND_OPEN_CLOSE_RELATIVE] @staticmethod def supported(domain, features, device_class, _): @@ -2263,7 +2259,7 @@ async def execute(self, command, data, params, challenge): if domain in COVER_VALVE_DOMAINS: svc_params = {ATTR_ENTITY_ID: self.state.entity_id} should_verify = False - if command == COMMAND_OPENCLOSE_RELATIVE: + if command == COMMAND_OPEN_CLOSE_RELATIVE: position = self.state.attributes.get( COVER_VALVE_CURRENT_POSITION[domain] ) @@ -2710,6 +2706,21 @@ class SensorStateTrait(_Trait): ), } + binary_sensor_types = { + binary_sensor.BinarySensorDeviceClass.CO: ( + "CarbonMonoxideLevel", + ["carbon monoxide detected", "no carbon monoxide detected", "unknown"], + ), + binary_sensor.BinarySensorDeviceClass.SMOKE: ( + "SmokeLevel", + ["smoke detected", "no smoke detected", "unknown"], + ), + binary_sensor.BinarySensorDeviceClass.MOISTURE: ( + "WaterLeak", + ["leak", "no leak", "unknown"], + ), + } + name = TRAIT_SENSOR_STATE commands: list[str] = [] @@ -2732,24 +2743,37 @@ def _air_quality_description_for_aqi(self, aqi: float | None) -> str: @classmethod def supported(cls, domain, features, device_class, _): """Test if state is supported.""" - return domain == sensor.DOMAIN and device_class in cls.sensor_types + return (domain == sensor.DOMAIN and device_class in cls.sensor_types) or ( + domain == binary_sensor.DOMAIN and device_class in cls.binary_sensor_types + ) def sync_attributes(self) -> dict[str, Any]: """Return attributes for a sync request.""" device_class = self.state.attributes.get(ATTR_DEVICE_CLASS) - data = self.sensor_types.get(device_class) - if device_class is None or data is None: - return {} - - sensor_state = { - "name": data[0], - "numericCapabilities": {"rawValueUnit": data[1]}, - } + def create_sensor_state( + name: str, + raw_value_unit: str | None = None, + available_states: list[str] | None = None, + ) -> dict[str, Any]: + sensor_state: dict[str, Any] = { + "name": name, + } + if raw_value_unit: + sensor_state["numericCapabilities"] = {"rawValueUnit": raw_value_unit} + if available_states: + sensor_state["descriptiveCapabilities"] = { + "availableStates": available_states + } + return {"sensorStatesSupported": [sensor_state]} - if device_class == sensor.SensorDeviceClass.AQI: - sensor_state["descriptiveCapabilities"] = { - "availableStates": [ + if self.state.domain == sensor.DOMAIN: + sensor_data = self.sensor_types.get(device_class) + if device_class is None or sensor_data is None: + return {} + available_states: list[str] | None = None + if device_class == sensor.SensorDeviceClass.AQI: + available_states = [ "healthy", "moderate", "unhealthy for sensitive groups", @@ -2757,30 +2781,53 @@ def sync_attributes(self) -> dict[str, Any]: "very unhealthy", "hazardous", "unknown", - ], - } - - return {"sensorStatesSupported": [sensor_state]} + ] + return create_sensor_state(sensor_data[0], sensor_data[1], available_states) + binary_sensor_data = self.binary_sensor_types.get(device_class) + if device_class is None or binary_sensor_data is None: + return {} + return create_sensor_state( + binary_sensor_data[0], available_states=binary_sensor_data[1] + ) def query_attributes(self) -> dict[str, Any]: """Return the attributes of this trait for this entity.""" device_class = self.state.attributes.get(ATTR_DEVICE_CLASS) - data = self.sensor_types.get(device_class) - if device_class is None or data is None: - return {} - - try: - value = float(self.state.state) - except ValueError: - value = None - if self.state.state == STATE_UNKNOWN: - value = None - sensor_data = {"name": data[0], "rawValue": value} - - if device_class == sensor.SensorDeviceClass.AQI: - sensor_data["currentSensorState"] = self._air_quality_description_for_aqi( - value - ) + def create_sensor_state( + name: str, raw_value: float | None = None, current_state: str | None = None + ) -> dict[str, Any]: + sensor_state: dict[str, Any] = { + "name": name, + "rawValue": raw_value, + } + if current_state: + sensor_state["currentSensorState"] = current_state + return {"currentSensorStateData": [sensor_state]} - return {"currentSensorStateData": [sensor_data]} + if self.state.domain == sensor.DOMAIN: + sensor_data = self.sensor_types.get(device_class) + if device_class is None or sensor_data is None: + return {} + try: + value = float(self.state.state) + except ValueError: + value = None + if self.state.state == STATE_UNKNOWN: + value = None + current_state: str | None = None + if device_class == sensor.SensorDeviceClass.AQI: + current_state = self._air_quality_description_for_aqi(value) + return create_sensor_state(sensor_data[0], value, current_state) + + binary_sensor_data = self.binary_sensor_types.get(device_class) + if device_class is None or binary_sensor_data is None: + return {} + value = { + STATE_ON: 0, + STATE_OFF: 1, + STATE_UNKNOWN: 2, + }[self.state.state] + return create_sensor_state( + binary_sensor_data[0], current_state=binary_sensor_data[1][value] + ) diff --git a/homeassistant/components/google_assistant_sdk/config_flow.py b/homeassistant/components/google_assistant_sdk/config_flow.py index 85dfd974b22bde..cd78c90e297161 100644 --- a/homeassistant/components/google_assistant_sdk/config_flow.py +++ b/homeassistant/components/google_assistant_sdk/config_flow.py @@ -8,7 +8,12 @@ import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigFlowResult, OptionsFlow +from homeassistant.config_entries import ( + SOURCE_REAUTH, + ConfigEntry, + ConfigFlowResult, + OptionsFlow, +) from homeassistant.core import callback from homeassistant.helpers import config_entry_oauth2_flow @@ -25,8 +30,6 @@ class OAuth2FlowHandler( DOMAIN = DOMAIN - reauth_entry: ConfigEntry | None = None - @property def logger(self) -> logging.Logger: """Return logger.""" @@ -46,9 +49,6 @@ async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Perform reauth upon an API authentication error.""" - self.reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -61,10 +61,10 @@ async def async_step_reauth_confirm( async def async_oauth_create_entry(self, data: dict[str, Any]) -> ConfigFlowResult: """Create an entry for the flow, or update existing entry.""" - if self.reauth_entry: - self.hass.config_entries.async_update_entry(self.reauth_entry, data=data) - await self.hass.config_entries.async_reload(self.reauth_entry.entry_id) - return self.async_abort(reason="reauth_successful") + if self.source == SOURCE_REAUTH: + return self.async_update_reload_and_abort( + self._get_reauth_entry(), data=data + ) if self._async_current_entries(): # Config entry already exists, only one allowed. @@ -84,16 +84,12 @@ def async_get_options_flow( config_entry: ConfigEntry, ) -> OptionsFlow: """Create the options flow.""" - return OptionsFlowHandler(config_entry) + return OptionsFlowHandler() class OptionsFlowHandler(OptionsFlow): """Google Assistant SDK options flow.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/google_cloud/config_flow.py b/homeassistant/components/google_cloud/config_flow.py index dec849de4e6ba3..fa6c952022b519 100644 --- a/homeassistant/components/google_cloud/config_flow.py +++ b/homeassistant/components/google_cloud/config_flow.py @@ -15,7 +15,7 @@ ConfigEntry, ConfigFlow, ConfigFlowResult, - OptionsFlowWithConfigEntry, + OptionsFlow, ) from homeassistant.core import callback from homeassistant.helpers.selector import ( @@ -135,10 +135,10 @@ def async_get_options_flow( config_entry: ConfigEntry, ) -> GoogleCloudOptionsFlowHandler: """Create the options flow.""" - return GoogleCloudOptionsFlowHandler(config_entry) + return GoogleCloudOptionsFlowHandler() -class GoogleCloudOptionsFlowHandler(OptionsFlowWithConfigEntry): +class GoogleCloudOptionsFlowHandler(OptionsFlow): """Google Cloud options flow.""" async def async_step_init( @@ -169,7 +169,7 @@ async def async_step_init( ) ), **tts_options_schema( - self.options, voices, from_config_flow=True + self.config_entry.options, voices, from_config_flow=True ).schema, vol.Optional( CONF_STT_MODEL, @@ -182,6 +182,6 @@ async def async_step_init( ), } ), - self.options, + self.config_entry.options, ), ) diff --git a/homeassistant/components/google_cloud/helpers.py b/homeassistant/components/google_cloud/helpers.py index 3c6141561323f8..f6e89fae7fa492 100644 --- a/homeassistant/components/google_cloud/helpers.py +++ b/homeassistant/components/google_cloud/helpers.py @@ -52,7 +52,7 @@ async def async_tts_voices( def tts_options_schema( - config_options: dict[str, Any], + config_options: Mapping[str, Any], voices: dict[str, list[str]], from_config_flow: bool = False, ) -> vol.Schema: diff --git a/homeassistant/components/google_generative_ai_conversation/config_flow.py b/homeassistant/components/google_generative_ai_conversation/config_flow.py index ab23ac25f26295..83eec25ed1520e 100644 --- a/homeassistant/components/google_generative_ai_conversation/config_flow.py +++ b/homeassistant/components/google_generative_ai_conversation/config_flow.py @@ -15,6 +15,7 @@ import voluptuous as vol from homeassistant.config_entries import ( + SOURCE_REAUTH, ConfigEntry, ConfigFlow, ConfigFlowResult, @@ -85,10 +86,6 @@ class GoogleGenerativeAIConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - def __init__(self) -> None: - """Initialize a new GoogleGenerativeAIConfigFlow.""" - self.reauth_entry: ConfigEntry | None = None - async def async_step_api( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -106,9 +103,9 @@ async def async_step_api( _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" else: - if self.reauth_entry: + if self.source == SOURCE_REAUTH: return self.async_update_reload_and_abort( - self.reauth_entry, + self._get_reauth_entry(), data=user_input, ) return self.async_create_entry( @@ -135,9 +132,6 @@ async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle configuration by re-auth.""" - self.reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -146,12 +140,13 @@ async def async_step_reauth_confirm( """Dialog that informs the user that reauth is required.""" if user_input is not None: return await self.async_step_api() - assert self.reauth_entry + + reauth_entry = self._get_reauth_entry() return self.async_show_form( step_id="reauth_confirm", description_placeholders={ - CONF_NAME: self.reauth_entry.title, - CONF_API_KEY: self.reauth_entry.data.get(CONF_API_KEY, ""), + CONF_NAME: reauth_entry.title, + CONF_API_KEY: reauth_entry.data.get(CONF_API_KEY, ""), }, ) @@ -168,7 +163,6 @@ class GoogleGenerativeAIOptionsFlow(OptionsFlow): def __init__(self, config_entry: ConfigEntry) -> None: """Initialize options flow.""" - self.config_entry = config_entry self.last_rendered_recommended = config_entry.options.get( CONF_RECOMMENDED, False ) diff --git a/homeassistant/components/google_mail/config_flow.py b/homeassistant/components/google_mail/config_flow.py index 5c81f7d49f59c8..b3a9a0e5d5608c 100644 --- a/homeassistant/components/google_mail/config_flow.py +++ b/homeassistant/components/google_mail/config_flow.py @@ -9,11 +9,10 @@ from google.oauth2.credentials import Credentials from googleapiclient.discovery import build -from homeassistant.config_entries import ConfigFlowResult +from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlowResult from homeassistant.const import CONF_ACCESS_TOKEN, CONF_TOKEN from homeassistant.helpers import config_entry_oauth2_flow -from . import GoogleMailConfigEntry from .const import DEFAULT_ACCESS, DOMAIN @@ -24,8 +23,6 @@ class OAuth2FlowHandler( DOMAIN = DOMAIN - reauth_entry: GoogleMailConfigEntry | None = None - @property def logger(self) -> logging.Logger: """Return logger.""" @@ -45,9 +42,6 @@ async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Perform reauth upon an API authentication error.""" - self.reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -69,18 +63,15 @@ def _get_profile() -> str: credentials = Credentials(data[CONF_TOKEN][CONF_ACCESS_TOKEN]) email = await self.hass.async_add_executor_job(_get_profile) - if not self.reauth_entry: - await self.async_set_unique_id(email) + await self.async_set_unique_id(email) + if self.source != SOURCE_REAUTH: self._abort_if_unique_id_configured() return self.async_create_entry(title=email, data=data) - if self.reauth_entry.unique_id == email: - self.hass.config_entries.async_update_entry(self.reauth_entry, data=data) - await self.hass.config_entries.async_reload(self.reauth_entry.entry_id) - return self.async_abort(reason="reauth_successful") - - return self.async_abort( + reauth_entry = self._get_reauth_entry() + self._abort_if_unique_id_mismatch( reason="wrong_account", - description_placeholders={"email": cast(str, self.reauth_entry.unique_id)}, + description_placeholders={"email": cast(str, reauth_entry.unique_id)}, ) + return self.async_update_reload_and_abort(reauth_entry, data=data) diff --git a/homeassistant/components/google_photos/config_flow.py b/homeassistant/components/google_photos/config_flow.py index 6b025cac6be695..a336455c9b41c1 100644 --- a/homeassistant/components/google_photos/config_flow.py +++ b/homeassistant/components/google_photos/config_flow.py @@ -7,11 +7,11 @@ from google_photos_library_api.api import GooglePhotosLibraryApi from google_photos_library_api.exceptions import GooglePhotosApiError -from homeassistant.config_entries import ConfigFlowResult +from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlowResult from homeassistant.const import CONF_ACCESS_TOKEN, CONF_TOKEN from homeassistant.helpers import aiohttp_client, config_entry_oauth2_flow -from . import GooglePhotosConfigEntry, api +from . import api from .const import DOMAIN, OAUTH2_SCOPES @@ -22,8 +22,6 @@ class OAuth2FlowHandler( DOMAIN = DOMAIN - reauth_entry: GooglePhotosConfigEntry | None = None - @property def logger(self) -> logging.Logger: """Return logger.""" @@ -58,14 +56,13 @@ async def async_oauth_create_entry(self, data: dict[str, Any]) -> ConfigFlowResu return self.async_abort(reason="unknown") user_id = user_resource_info.id - if self.reauth_entry: - if self.reauth_entry.unique_id == user_id: - return self.async_update_reload_and_abort( - self.reauth_entry, unique_id=user_id, data=data - ) - return self.async_abort(reason="wrong_account") - await self.async_set_unique_id(user_id) + if self.source == SOURCE_REAUTH: + self._abort_if_unique_id_mismatch(reason="wrong_account") + return self.async_update_reload_and_abort( + self._get_reauth_entry(), data=data + ) + self._abort_if_unique_id_configured() return self.async_create_entry(title=user_resource_info.name, data=data) @@ -73,9 +70,6 @@ async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Perform reauth upon an API authentication error.""" - self.reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( diff --git a/homeassistant/components/google_photos/strings.json b/homeassistant/components/google_photos/strings.json index 21942ce71a7801..bd565a6122d361 100644 --- a/homeassistant/components/google_photos/strings.json +++ b/homeassistant/components/google_photos/strings.json @@ -21,7 +21,8 @@ "wrong_account": "Wrong account: Please authenticate with the right account.", "oauth_timeout": "[%key:common::config_flow::abort::oauth2_timeout%]", "oauth_unauthorized": "[%key:common::config_flow::abort::oauth2_unauthorized%]", - "oauth_failed": "[%key:common::config_flow::abort::oauth2_failed%]" + "oauth_failed": "[%key:common::config_flow::abort::oauth2_failed%]", + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" }, "create_entry": { "default": "[%key:common::config_flow::create_entry::authenticated%]" diff --git a/homeassistant/components/google_sheets/config_flow.py b/homeassistant/components/google_sheets/config_flow.py index 4008d42f52db28..81c82bf1bc430f 100644 --- a/homeassistant/components/google_sheets/config_flow.py +++ b/homeassistant/components/google_sheets/config_flow.py @@ -9,11 +9,10 @@ from google.oauth2.credentials import Credentials from gspread import Client, GSpreadException -from homeassistant.config_entries import ConfigFlowResult +from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlowResult from homeassistant.const import CONF_ACCESS_TOKEN, CONF_TOKEN from homeassistant.helpers import config_entry_oauth2_flow -from . import GoogleSheetsConfigEntry from .const import DEFAULT_ACCESS, DEFAULT_NAME, DOMAIN _LOGGER = logging.getLogger(__name__) @@ -26,8 +25,6 @@ class OAuth2FlowHandler( DOMAIN = DOMAIN - reauth_entry: GoogleSheetsConfigEntry | None = None - @property def logger(self) -> logging.Logger: """Return logger.""" @@ -47,9 +44,6 @@ async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Perform reauth upon an API authentication error.""" - self.reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -66,24 +60,23 @@ async def async_oauth_create_entry(self, data: dict[str, Any]) -> ConfigFlowResu Credentials(data[CONF_TOKEN][CONF_ACCESS_TOKEN]) # type: ignore[no-untyped-call] ) - if self.reauth_entry: + if self.source == SOURCE_REAUTH: + reauth_entry = self._get_reauth_entry() _LOGGER.debug("service.open_by_key") try: await self.hass.async_add_executor_job( service.open_by_key, - self.reauth_entry.unique_id, + reauth_entry.unique_id, ) except GSpreadException as err: _LOGGER.error( "Could not find spreadsheet '%s': %s", - self.reauth_entry.unique_id, + reauth_entry.unique_id, str(err), ) return self.async_abort(reason="open_spreadsheet_failure") - self.hass.config_entries.async_update_entry(self.reauth_entry, data=data) - await self.hass.config_entries.async_reload(self.reauth_entry.entry_id) - return self.async_abort(reason="reauth_successful") + return self.async_update_reload_and_abort(reauth_entry, data=data) try: doc = await self.hass.async_add_executor_job( diff --git a/homeassistant/components/google_tasks/api.py b/homeassistant/components/google_tasks/api.py index c8b30c173eb601..2a294b846540e8 100644 --- a/homeassistant/components/google_tasks/api.py +++ b/homeassistant/components/google_tasks/api.py @@ -46,8 +46,7 @@ def __init__( async def async_get_access_token(self) -> str: """Return a valid access token.""" - if not self._oauth_session.valid_token: - await self._oauth_session.async_ensure_token_valid() + await self._oauth_session.async_ensure_token_valid() return self._oauth_session.token[CONF_ACCESS_TOKEN] async def _get_service(self) -> Resource: diff --git a/homeassistant/components/google_tasks/config_flow.py b/homeassistant/components/google_tasks/config_flow.py index 965c215ee4de5c..795b6e6eff5800 100644 --- a/homeassistant/components/google_tasks/config_flow.py +++ b/homeassistant/components/google_tasks/config_flow.py @@ -9,7 +9,7 @@ from googleapiclient.errors import HttpError from googleapiclient.http import HttpRequest -from homeassistant.config_entries import ConfigEntry, ConfigFlowResult +from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlowResult from homeassistant.const import CONF_ACCESS_TOKEN, CONF_TOKEN from homeassistant.helpers import config_entry_oauth2_flow @@ -23,8 +23,6 @@ class OAuth2FlowHandler( DOMAIN = DOMAIN - reauth_entry: ConfigEntry | None = None - @property def logger(self) -> logging.Logger: """Return logger.""" @@ -70,25 +68,24 @@ async def async_oauth_create_entry(self, data: dict[str, Any]) -> ConfigFlowResu self.logger.exception("Unknown error occurred") return self.async_abort(reason="unknown") user_id = user_resource_info["id"] - if not self.reauth_entry: - await self.async_set_unique_id(user_id) + await self.async_set_unique_id(user_id) + + if self.source != SOURCE_REAUTH: self._abort_if_unique_id_configured() return self.async_create_entry(title=user_resource_info["name"], data=data) - if self.reauth_entry.unique_id == user_id or not self.reauth_entry.unique_id: - return self.async_update_reload_and_abort( - self.reauth_entry, unique_id=user_id, data=data - ) + reauth_entry = self._get_reauth_entry() + if reauth_entry.unique_id: + self._abort_if_unique_id_mismatch(reason="wrong_account") - return self.async_abort(reason="wrong_account") + return self.async_update_reload_and_abort( + reauth_entry, unique_id=user_id, data=data + ) async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Perform reauth upon an API authentication error.""" - self.reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( diff --git a/homeassistant/components/google_tasks/strings.json b/homeassistant/components/google_tasks/strings.json index 447da5e24c27ed..a26cf8c58ec6ae 100644 --- a/homeassistant/components/google_tasks/strings.json +++ b/homeassistant/components/google_tasks/strings.json @@ -21,7 +21,8 @@ "wrong_account": "Wrong account: Please authenticate with the right account.", "oauth_timeout": "[%key:common::config_flow::abort::oauth2_timeout%]", "oauth_unauthorized": "[%key:common::config_flow::abort::oauth2_unauthorized%]", - "oauth_failed": "[%key:common::config_flow::abort::oauth2_failed%]" + "oauth_failed": "[%key:common::config_flow::abort::oauth2_failed%]", + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" }, "create_entry": { "default": "[%key:common::config_flow::create_entry::authenticated%]" diff --git a/homeassistant/components/google_tasks/todo.py b/homeassistant/components/google_tasks/todo.py index 95c5f1c3a16d81..5196f89728d8f8 100644 --- a/homeassistant/components/google_tasks/todo.py +++ b/homeassistant/components/google_tasks/todo.py @@ -106,7 +106,7 @@ def __init__( config_entry_id: str, task_list_id: str, ) -> None: - """Initialize LocalTodoListEntity.""" + """Initialize GoogleTaskTodoListEntity.""" super().__init__(coordinator) self._attr_name = name.capitalize() self._attr_unique_id = f"{config_entry_id}-{task_list_id}" @@ -153,9 +153,9 @@ async def async_move_todo_item( def _order_tasks(tasks: list[dict[str, Any]]) -> list[dict[str, Any]]: """Order the task items response. - All tasks have an order amongst their sibblings based on position. + All tasks have an order amongst their siblings based on position. - Home Assistant To-do items do not support the Google Task parent/sibbling + Home Assistant To-do items do not support the Google Task parent/sibling relationships and the desired behavior is for them to be filtered. """ parents = [task for task in tasks if task.get("parent") is None] diff --git a/homeassistant/components/google_travel_time/config_flow.py b/homeassistant/components/google_travel_time/config_flow.py index 9b59718c945f92..08de293bc7dc2b 100644 --- a/homeassistant/components/google_travel_time/config_flow.py +++ b/homeassistant/components/google_travel_time/config_flow.py @@ -148,10 +148,6 @@ def default_options(hass: HomeAssistant) -> dict[str, str]: class GoogleOptionsFlow(OptionsFlow): """Handle an options flow for Google Travel Time.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize google options flow.""" - self.config_entry = config_entry - async def async_step_init(self, user_input=None) -> ConfigFlowResult: """Handle the initial step.""" if user_input is not None: @@ -207,15 +203,13 @@ class GoogleTravelTimeConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - _context_entry: ConfigEntry - @staticmethod @callback def async_get_options_flow( config_entry: ConfigEntry, ) -> GoogleOptionsFlow: """Get the options flow for this handler.""" - return GoogleOptionsFlow(config_entry) + return GoogleOptionsFlow() async def async_step_user(self, user_input=None) -> ConfigFlowResult: """Handle the initial step.""" @@ -238,13 +232,6 @@ async def async_step_user(self, user_input=None) -> ConfigFlowResult: async def async_step_reconfigure( self, user_input: dict[str, Any] | None = None - ) -> ConfigFlowResult: - """Handle reconfiguration.""" - self._context_entry = self._get_reconfigure_entry() - return await self.async_step_reconfigure_confirm() - - async def async_step_reconfigure_confirm( - self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle reconfiguration.""" errors: dict[str, str] | None = None @@ -252,15 +239,13 @@ async def async_step_reconfigure_confirm( errors = await validate_input(self.hass, user_input) if not errors: return self.async_update_reload_and_abort( - self._context_entry, - data=user_input, - reason="reconfigure_successful", + self._get_reconfigure_entry(), data=user_input ) return self.async_show_form( - step_id="reconfigure_confirm", + step_id="reconfigure", data_schema=self.add_suggested_values_to_schema( - RECONFIGURE_SCHEMA, self._context_entry.data.copy() + RECONFIGURE_SCHEMA, self._get_reconfigure_entry().data ), errors=errors, ) diff --git a/homeassistant/components/google_travel_time/strings.json b/homeassistant/components/google_travel_time/strings.json index 6397336d9ac9e6..765cfc9c4b67d6 100644 --- a/homeassistant/components/google_travel_time/strings.json +++ b/homeassistant/components/google_travel_time/strings.json @@ -11,7 +11,7 @@ "destination": "Destination" } }, - "reconfigure_confirm": { + "reconfigure": { "description": "[%key:component::google_travel_time::config::step::user::description%]", "data": { "api_key": "[%key:common::config_flow::data::api_key%]", diff --git a/homeassistant/components/govee_ble/binary_sensor.py b/homeassistant/components/govee_ble/binary_sensor.py index e5966124216ad7..bd92093c29c4bc 100644 --- a/homeassistant/components/govee_ble/binary_sensor.py +++ b/homeassistant/components/govee_ble/binary_sensor.py @@ -44,7 +44,7 @@ def sensor_update_to_bluetooth_data_update( sensor_update: SensorUpdate, -) -> PassiveBluetoothDataUpdate: +) -> PassiveBluetoothDataUpdate[bool | None]: """Convert a sensor update to a bluetooth data update.""" return PassiveBluetoothDataUpdate( devices={ @@ -95,13 +95,13 @@ class GoveeBluetoothBinarySensorEntity( ): """Representation of a govee-ble binary sensor.""" - processor: GoveeBLEPassiveBluetoothDataProcessor + processor: GoveeBLEPassiveBluetoothDataProcessor[bool | None] @property def available(self) -> bool: """Return False if sensor is in error.""" coordinator = self.processor.coordinator - return self.processor.entity_data.get(self.entity_key) != ERROR and ( + return self.processor.entity_data.get(self.entity_key) != ERROR and ( # type: ignore[comparison-overlap] ((model_info := coordinator.model_info) and model_info.sleepy) or super().available ) diff --git a/homeassistant/components/govee_ble/coordinator.py b/homeassistant/components/govee_ble/coordinator.py index 011a89e565bb8c..4408b7f3199ac6 100644 --- a/homeassistant/components/govee_ble/coordinator.py +++ b/homeassistant/components/govee_ble/coordinator.py @@ -1,5 +1,7 @@ """The govee Bluetooth integration.""" +from __future__ import annotations + from collections.abc import Callable from logging import Logger diff --git a/homeassistant/components/govee_ble/sensor.py b/homeassistant/components/govee_ble/sensor.py index a94610ef0e17d2..383f50e5c4619d 100644 --- a/homeassistant/components/govee_ble/sensor.py +++ b/homeassistant/components/govee_ble/sensor.py @@ -2,6 +2,9 @@ from __future__ import annotations +from datetime import date, datetime +from decimal import Decimal + from govee_ble import DeviceClass, SensorUpdate, Units from govee_ble.parser import ERROR @@ -29,6 +32,8 @@ from .coordinator import GoveeBLEConfigEntry, GoveeBLEPassiveBluetoothDataProcessor from .device import device_key_to_bluetooth_entity_key +type _SensorValueType = str | int | float | date | datetime | Decimal | None + SENSOR_DESCRIPTIONS = { (DeviceClass.TEMPERATURE, Units.TEMP_CELSIUS): SensorEntityDescription( key=f"{DeviceClass.TEMPERATURE}_{Units.TEMP_CELSIUS}", @@ -72,7 +77,7 @@ def sensor_update_to_bluetooth_data_update( sensor_update: SensorUpdate, -) -> PassiveBluetoothDataUpdate: +) -> PassiveBluetoothDataUpdate[_SensorValueType]: """Convert a sensor update to a bluetooth data update.""" return PassiveBluetoothDataUpdate( devices={ @@ -117,13 +122,13 @@ async def async_setup_entry( class GoveeBluetoothSensorEntity( PassiveBluetoothProcessorEntity[ - PassiveBluetoothDataProcessor[float | int | str | None, SensorUpdate] + PassiveBluetoothDataProcessor[_SensorValueType, SensorUpdate] ], SensorEntity, ): """Representation of a govee ble sensor.""" - processor: GoveeBLEPassiveBluetoothDataProcessor + processor: GoveeBLEPassiveBluetoothDataProcessor[_SensorValueType] @property def available(self) -> bool: @@ -135,6 +140,6 @@ def available(self) -> bool: ) @property - def native_value(self) -> float | int | str | None: + def native_value(self) -> _SensorValueType: # pylint: disable=hass-return-type """Return the native value.""" return self.processor.entity_data.get(self.entity_key) diff --git a/homeassistant/components/govee_light_local/__init__.py b/homeassistant/components/govee_light_local/__init__.py index 088f9bae22b0f3..44dbc8256658ef 100644 --- a/homeassistant/components/govee_light_local/__init__.py +++ b/homeassistant/components/govee_light_local/__init__.py @@ -9,23 +9,21 @@ from govee_local_api.controller import LISTENING_PORT -from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady -from .const import DISCOVERY_TIMEOUT, DOMAIN -from .coordinator import GoveeLocalApiCoordinator +from .const import DISCOVERY_TIMEOUT +from .coordinator import GoveeLocalApiCoordinator, GoveeLocalConfigEntry PLATFORMS: list[Platform] = [Platform.LIGHT] _LOGGER = logging.getLogger(__name__) -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_setup_entry(hass: HomeAssistant, entry: GoveeLocalConfigEntry) -> bool: """Set up Govee light local from a config entry.""" - - coordinator: GoveeLocalApiCoordinator = GoveeLocalApiCoordinator(hass=hass) + coordinator = GoveeLocalApiCoordinator(hass=hass) async def await_cleanup(): cleanup_complete: asyncio.Event = coordinator.cleanup() @@ -52,14 +50,11 @@ async def await_cleanup(): except TimeoutError as ex: raise ConfigEntryNotReady from ex - hass.data.setdefault(DOMAIN, {})[entry.entry_id] = coordinator + entry.runtime_data = coordinator await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: GoveeLocalConfigEntry) -> bool: """Unload a config entry.""" - - if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): - hass.data[DOMAIN].pop(entry.entry_id) - return unload_ok + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/govee_light_local/coordinator.py b/homeassistant/components/govee_light_local/coordinator.py index 64119f1871c506..240313a34b8086 100644 --- a/homeassistant/components/govee_light_local/coordinator.py +++ b/homeassistant/components/govee_light_local/coordinator.py @@ -6,6 +6,7 @@ from govee_local_api import GoveeController, GoveeDevice +from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.update_coordinator import DataUpdateCoordinator @@ -19,6 +20,8 @@ _LOGGER = logging.getLogger(__name__) +type GoveeLocalConfigEntry = ConfigEntry[GoveeLocalApiCoordinator] + class GoveeLocalApiCoordinator(DataUpdateCoordinator[list[GoveeDevice]]): """Govee light local coordinator.""" diff --git a/homeassistant/components/govee_light_local/light.py b/homeassistant/components/govee_light_local/light.py index fb52c23343630b..cb2e24fa8a6cde 100644 --- a/homeassistant/components/govee_light_local/light.py +++ b/homeassistant/components/govee_light_local/light.py @@ -15,26 +15,25 @@ LightEntity, filter_supported_color_modes, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.update_coordinator import CoordinatorEntity from .const import DOMAIN, MANUFACTURER -from .coordinator import GoveeLocalApiCoordinator +from .coordinator import GoveeLocalApiCoordinator, GoveeLocalConfigEntry _LOGGER = logging.getLogger(__name__) async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: GoveeLocalConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Govee light setup.""" - coordinator: GoveeLocalApiCoordinator = hass.data[DOMAIN][config_entry.entry_id] + coordinator = config_entry.runtime_data def discovery_callback(device: GoveeDevice, is_new: bool) -> bool: if is_new: diff --git a/homeassistant/components/govee_light_local/manifest.json b/homeassistant/components/govee_light_local/manifest.json index b6b25f5aa09c41..a94d4e58e9ac69 100644 --- a/homeassistant/components/govee_light_local/manifest.json +++ b/homeassistant/components/govee_light_local/manifest.json @@ -6,5 +6,5 @@ "dependencies": ["network"], "documentation": "https://www.home-assistant.io/integrations/govee_light_local", "iot_class": "local_push", - "requirements": ["govee-local-api==1.5.2"] + "requirements": ["govee-local-api==1.5.3"] } diff --git a/homeassistant/components/group/cover.py b/homeassistant/components/group/cover.py index b0b36e11b6bc4d..b2e5c6eef374a9 100644 --- a/homeassistant/components/group/cover.py +++ b/homeassistant/components/group/cover.py @@ -15,6 +15,7 @@ PLATFORM_SCHEMA as COVER_PLATFORM_SCHEMA, CoverEntity, CoverEntityFeature, + CoverState, ) from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( @@ -31,10 +32,6 @@ SERVICE_SET_COVER_TILT_POSITION, SERVICE_STOP_COVER, SERVICE_STOP_COVER_TILT, - STATE_CLOSED, - STATE_CLOSING, - STATE_OPEN, - STATE_OPENING, STATE_UNAVAILABLE, STATE_UNKNOWN, ) @@ -285,15 +282,15 @@ def async_update_group_state(self) -> None: for entity_id in self._entity_ids: if not (state := self.hass.states.get(entity_id)): continue - if state.state == STATE_OPEN: + if state.state == CoverState.OPEN: self._attr_is_closed = False continue - if state.state == STATE_CLOSED: + if state.state == CoverState.CLOSED: continue - if state.state == STATE_CLOSING: + if state.state == CoverState.CLOSING: self._attr_is_closing = True continue - if state.state == STATE_OPENING: + if state.state == CoverState.OPENING: self._attr_is_opening = True continue if not valid_state: diff --git a/homeassistant/components/group/registry.py b/homeassistant/components/group/registry.py index e0a74d32f44773..7ac5770f1714c8 100644 --- a/homeassistant/components/group/registry.py +++ b/homeassistant/components/group/registry.py @@ -8,6 +8,7 @@ from dataclasses import dataclass from typing import Protocol +from homeassistant.components.alarm_control_panel import AlarmControlPanelState from homeassistant.components.climate import HVACMode from homeassistant.components.lock import LockState from homeassistant.components.vacuum import STATE_CLEANING, STATE_ERROR, STATE_RETURNING @@ -20,12 +21,6 @@ STATE_PERFORMANCE, ) from homeassistant.const import ( - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_CUSTOM_BYPASS, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_ARMED_VACATION, - STATE_ALARM_TRIGGERED, STATE_CLOSED, STATE_HOME, STATE_IDLE, @@ -60,12 +55,12 @@ Platform.ALARM_CONTROL_PANEL: ( { STATE_ON, - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_CUSTOM_BYPASS, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_ARMED_VACATION, - STATE_ALARM_TRIGGERED, + AlarmControlPanelState.ARMED_AWAY, + AlarmControlPanelState.ARMED_CUSTOM_BYPASS, + AlarmControlPanelState.ARMED_HOME, + AlarmControlPanelState.ARMED_NIGHT, + AlarmControlPanelState.ARMED_VACATION, + AlarmControlPanelState.TRIGGERED, }, STATE_ON, STATE_OFF, diff --git a/homeassistant/components/group/sensor.py b/homeassistant/components/group/sensor.py index 32744bebc336ea..4a3e191e511b0f 100644 --- a/homeassistant/components/group/sensor.py +++ b/homeassistant/components/group/sensor.py @@ -36,14 +36,7 @@ STATE_UNAVAILABLE, STATE_UNKNOWN, ) -from homeassistant.core import ( - CALLBACK_TYPE, - Event, - EventStateChangedData, - HomeAssistant, - State, - callback, -) +from homeassistant.core import HomeAssistant, State, callback from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import config_validation as cv, entity_registry as er from homeassistant.helpers.entity import ( @@ -52,7 +45,6 @@ get_unit_of_measurement, ) from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.event import async_track_state_change_event from homeassistant.helpers.issue_registry import ( IssueSeverity, async_create_issue, @@ -180,6 +172,17 @@ def async_create_preview_sensor( ) +def _has_numeric_state(hass: HomeAssistant, entity_id: str) -> bool: + """Test if state is numeric.""" + if not (state := hass.states.get(entity_id)): + return False + try: + float(state.state) + except ValueError: + return False + return True + + def calc_min( sensor_values: list[tuple[str, float, State]], ) -> tuple[dict[str, str | None], float | None]: @@ -332,12 +335,11 @@ def __init__( self.hass = hass self._entity_ids = entity_ids self._sensor_type = sensor_type - self._state_class = state_class - self._device_class = device_class - self._native_unit_of_measurement = unit_of_measurement + self._configured_state_class = state_class + self._configured_device_class = device_class + self._configured_unit_of_measurement = unit_of_measurement self._valid_units: set[str | None] = set() self._can_convert: bool = False - self.calculate_attributes_later: CALLBACK_TYPE | None = None self._attr_name = name if name == DEFAULT_NAME: self._attr_name = f"{DEFAULT_NAME} {sensor_type}".capitalize() @@ -352,39 +354,25 @@ def __init__( self._state_incorrect: set[str] = set() self._extra_state_attribute: dict[str, Any] = {} - async def async_added_to_hass(self) -> None: - """When added to hass.""" - for entity_id in self._entity_ids: - if self.hass.states.get(entity_id) is None: - self.calculate_attributes_later = async_track_state_change_event( - self.hass, self._entity_ids, self.calculate_state_attributes - ) - break - if not self.calculate_attributes_later: - await self.calculate_state_attributes() - await super().async_added_to_hass() - - async def calculate_state_attributes( - self, event: Event[EventStateChangedData] | None = None - ) -> None: + def calculate_state_attributes(self, valid_state_entities: list[str]) -> None: """Calculate state attributes.""" - for entity_id in self._entity_ids: - if self.hass.states.get(entity_id) is None: - return - if self.calculate_attributes_later: - self.calculate_attributes_later() - self.calculate_attributes_later = None - self._attr_state_class = self._calculate_state_class(self._state_class) - self._attr_device_class = self._calculate_device_class(self._device_class) + self._attr_state_class = self._calculate_state_class( + self._configured_state_class, valid_state_entities + ) + self._attr_device_class = self._calculate_device_class( + self._configured_device_class, valid_state_entities + ) self._attr_native_unit_of_measurement = self._calculate_unit_of_measurement( - self._native_unit_of_measurement + self._configured_unit_of_measurement, valid_state_entities ) self._valid_units = self._get_valid_units() @callback def async_update_group_state(self) -> None: """Query all members and determine the sensor group state.""" + self.calculate_state_attributes(self._get_valid_entities()) states: list[StateType] = [] + valid_units = self._valid_units valid_states: list[bool] = [] sensor_values: list[tuple[str, float, State]] = [] for entity_id in self._entity_ids: @@ -392,20 +380,18 @@ def async_update_group_state(self) -> None: states.append(state.state) try: numeric_state = float(state.state) - if ( - self._valid_units - and (uom := state.attributes["unit_of_measurement"]) - in self._valid_units - and self._can_convert is True - ): + uom = state.attributes.get("unit_of_measurement") + + # Convert the state to the native unit of measurement when we have valid units + # and a correct device class + if valid_units and uom in valid_units and self._can_convert is True: numeric_state = UNIT_CONVERTERS[self.device_class].convert( numeric_state, uom, self.native_unit_of_measurement ) - if ( - self._valid_units - and (uom := state.attributes["unit_of_measurement"]) - not in self._valid_units - ): + + # If we have valid units and the entity's unit does not match + # we raise which skips the state and log a warning once + if valid_units and uom not in valid_units: raise HomeAssistantError("Not a valid unit") # noqa: TRY301 sensor_values.append((entity_id, numeric_state, state)) @@ -480,7 +466,9 @@ def icon(self) -> str | None: return None def _calculate_state_class( - self, state_class: SensorStateClass | None + self, + state_class: SensorStateClass | None, + valid_state_entities: list[str], ) -> SensorStateClass | None: """Calculate state class. @@ -491,8 +479,18 @@ def _calculate_state_class( """ if state_class: return state_class + + if not valid_state_entities: + return None + + if not self._ignore_non_numeric and len(valid_state_entities) < len( + self._entity_ids + ): + # Only return state class if all states are valid when not ignoring non numeric + return None + state_classes: list[SensorStateClass] = [] - for entity_id in self._entity_ids: + for entity_id in valid_state_entities: try: _state_class = get_capability(self.hass, entity_id, "state_class") except HomeAssistantError: @@ -523,7 +521,9 @@ def _calculate_state_class( return None def _calculate_device_class( - self, device_class: SensorDeviceClass | None + self, + device_class: SensorDeviceClass | None, + valid_state_entities: list[str], ) -> SensorDeviceClass | None: """Calculate device class. @@ -534,8 +534,18 @@ def _calculate_device_class( """ if device_class: return device_class + + if not valid_state_entities: + return None + + if not self._ignore_non_numeric and len(valid_state_entities) < len( + self._entity_ids + ): + # Only return device class if all states are valid when not ignoring non numeric + return None + device_classes: list[SensorDeviceClass] = [] - for entity_id in self._entity_ids: + for entity_id in valid_state_entities: try: _device_class = get_device_class(self.hass, entity_id) except HomeAssistantError: @@ -568,7 +578,9 @@ def _calculate_device_class( return None def _calculate_unit_of_measurement( - self, unit_of_measurement: str | None + self, + unit_of_measurement: str | None, + valid_state_entities: list[str], ) -> str | None: """Calculate the unit of measurement. @@ -579,8 +591,17 @@ def _calculate_unit_of_measurement( if unit_of_measurement: return unit_of_measurement + if not valid_state_entities: + return None + + if not self._ignore_non_numeric and len(valid_state_entities) < len( + self._entity_ids + ): + # Only return device class if all states are valid when not ignoring non numeric + return None + unit_of_measurements: list[str] = [] - for entity_id in self._entity_ids: + for entity_id in valid_state_entities: try: _unit_of_measurement = get_unit_of_measurement(self.hass, entity_id) except HomeAssistantError: @@ -665,19 +686,31 @@ def _get_valid_units(self) -> set[str | None]: If device class is set and compatible unit of measurements. If device class is not set, use one unit of measurement. + Only calculate valid units if there are no valid units set. """ - if ( - device_class := self.device_class - ) in UNIT_CONVERTERS and self.native_unit_of_measurement: + if (valid_units := self._valid_units) and not self._ignore_non_numeric: + # If we have valid units already and not using ignore_non_numeric + # we should not recalculate. + return valid_units + + native_uom = self.native_unit_of_measurement + if (device_class := self.device_class) in UNIT_CONVERTERS and native_uom: self._can_convert = True return UNIT_CONVERTERS[device_class].VALID_UNITS - if ( - device_class - and (device_class) in DEVICE_CLASS_UNITS - and self.native_unit_of_measurement - ): + if device_class and (device_class) in DEVICE_CLASS_UNITS and native_uom: valid_uoms: set = DEVICE_CLASS_UNITS[device_class] return valid_uoms - if device_class is None and self.native_unit_of_measurement: - return {self.native_unit_of_measurement} + if device_class is None and native_uom: + return {native_uom} return set() + + def _get_valid_entities( + self, + ) -> list[str]: + """Return list of valid entities.""" + + return [ + entity_id + for entity_id in self._entity_ids + if _has_numeric_state(self.hass, entity_id) + ] diff --git a/homeassistant/components/guardian/strings.json b/homeassistant/components/guardian/strings.json index e8622fe9d03659..b1b72b7100287e 100644 --- a/homeassistant/components/guardian/strings.json +++ b/homeassistant/components/guardian/strings.json @@ -12,6 +12,9 @@ "description": "Do you want to set up this Guardian device?" } }, + "error": { + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]" + }, "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", "already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]", diff --git a/homeassistant/components/habitica/__init__.py b/homeassistant/components/habitica/__init__.py index 8781a6e2d48b54..5843e14d63e166 100644 --- a/homeassistant/components/habitica/__init__.py +++ b/homeassistant/components/habitica/__init__.py @@ -1,209 +1,48 @@ """The habitica integration.""" from http import HTTPStatus -import logging -from typing import Any from aiohttp import ClientResponseError from habitipy.aio import HabitipyAsync -import voluptuous as vol -from homeassistant import config_entries from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( - ATTR_NAME, + APPLICATION_NAME, CONF_API_KEY, CONF_NAME, - CONF_SENSORS, CONF_URL, CONF_VERIFY_SSL, Platform, + __version__, ) -from homeassistant.core import ( - HomeAssistant, - ServiceCall, - ServiceResponse, - SupportsResponse, -) -from homeassistant.exceptions import ( - ConfigEntryNotReady, - HomeAssistantError, - ServiceValidationError, -) +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers import config_validation as cv from homeassistant.helpers.aiohttp_client import async_get_clientsession -from homeassistant.helpers.selector import ConfigEntrySelector from homeassistant.helpers.typing import ConfigType -from .const import ( - ATTR_ARGS, - ATTR_CONFIG_ENTRY, - ATTR_DATA, - ATTR_PATH, - ATTR_SKILL, - ATTR_TASK, - CONF_API_USER, - DEFAULT_URL, - DOMAIN, - EVENT_API_CALL_SUCCESS, - SERVICE_API_CALL, - SERVICE_CAST_SKILL, -) +from .const import CONF_API_USER, DEVELOPER_ID, DOMAIN from .coordinator import HabiticaDataUpdateCoordinator +from .services import async_setup_services +from .types import HabiticaConfigEntry -_LOGGER = logging.getLogger(__name__) - -type HabiticaConfigEntry = ConfigEntry[HabiticaDataUpdateCoordinator] - -SENSORS_TYPES = ["name", "hp", "maxHealth", "mp", "maxMP", "exp", "toNextLevel", "lvl"] - -INSTANCE_SCHEMA = vol.All( - cv.deprecated(CONF_SENSORS), - vol.Schema( - { - vol.Optional(CONF_URL, default=DEFAULT_URL): cv.url, - vol.Optional(CONF_NAME): cv.string, - vol.Required(CONF_API_USER): cv.string, - vol.Required(CONF_API_KEY): cv.string, - vol.Optional(CONF_SENSORS, default=list(SENSORS_TYPES)): vol.All( - cv.ensure_list, vol.Unique(), [vol.In(list(SENSORS_TYPES))] - ), - } - ), -) - -has_unique_values = vol.Schema(vol.Unique()) -# because we want a handy alias +CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN) -def has_all_unique_users(value): - """Validate that all API users are unique.""" - api_users = [user[CONF_API_USER] for user in value] - has_unique_values(api_users) - return value - - -def has_all_unique_users_names(value): - """Validate that all user's names are unique and set if any is set.""" - names = [user.get(CONF_NAME) for user in value] - if None in names and any(name is not None for name in names): - raise vol.Invalid("user names of all users must be set if any is set") - if not all(name is None for name in names): - has_unique_values(names) - return value - - -INSTANCE_LIST_SCHEMA = vol.All( - cv.ensure_list, has_all_unique_users, has_all_unique_users_names, [INSTANCE_SCHEMA] -) -CONFIG_SCHEMA = vol.Schema({DOMAIN: INSTANCE_LIST_SCHEMA}, extra=vol.ALLOW_EXTRA) - -PLATFORMS = [Platform.BUTTON, Platform.SENSOR, Platform.SWITCH, Platform.TODO] - -SERVICE_API_CALL_SCHEMA = vol.Schema( - { - vol.Required(ATTR_NAME): str, - vol.Required(ATTR_PATH): vol.All(cv.ensure_list, [str]), - vol.Optional(ATTR_ARGS): dict, - } -) -SERVICE_CAST_SKILL_SCHEMA = vol.Schema( - { - vol.Required(ATTR_CONFIG_ENTRY): ConfigEntrySelector(), - vol.Required(ATTR_SKILL): cv.string, - vol.Optional(ATTR_TASK): cv.string, - } -) +PLATFORMS = [ + Platform.BINARY_SENSOR, + Platform.BUTTON, + Platform.CALENDAR, + Platform.SENSOR, + Platform.SWITCH, + Platform.TODO, +] async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up the Habitica service.""" - configs = config.get(DOMAIN, []) - for conf in configs: - if conf.get(CONF_URL) is None: - conf[CONF_URL] = DEFAULT_URL - - hass.async_create_task( - hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=conf - ) - ) - - async def cast_skill(call: ServiceCall) -> ServiceResponse: - """Skill action.""" - entry: HabiticaConfigEntry | None - if not ( - entry := hass.config_entries.async_get_entry(call.data[ATTR_CONFIG_ENTRY]) - ): - raise ServiceValidationError( - translation_domain=DOMAIN, - translation_key="entry_not_found", - ) - coordinator = entry.runtime_data - skill = { - "pickpocket": {"spellId": "pickPocket", "cost": "10 MP"}, - "backstab": {"spellId": "backStab", "cost": "15 MP"}, - "smash": {"spellId": "smash", "cost": "10 MP"}, - "fireball": {"spellId": "fireball", "cost": "10 MP"}, - } - try: - task_id = next( - task["id"] - for task in coordinator.data.tasks - if call.data[ATTR_TASK] in (task["id"], task.get("alias")) - or call.data[ATTR_TASK] == task["text"] - ) - except StopIteration as e: - raise ServiceValidationError( - translation_domain=DOMAIN, - translation_key="task_not_found", - translation_placeholders={"task": f"'{call.data[ATTR_TASK]}'"}, - ) from e - - try: - response: dict[str, Any] = await coordinator.api.user.class_.cast[ - skill[call.data[ATTR_SKILL]]["spellId"] - ].post(targetId=task_id) - except ClientResponseError as e: - if e.status == HTTPStatus.TOO_MANY_REQUESTS: - raise ServiceValidationError( - translation_domain=DOMAIN, - translation_key="setup_rate_limit_exception", - ) from e - if e.status == HTTPStatus.UNAUTHORIZED: - raise ServiceValidationError( - translation_domain=DOMAIN, - translation_key="not_enough_mana", - translation_placeholders={ - "cost": skill[call.data[ATTR_SKILL]]["cost"], - "mana": f"{int(coordinator.data.user.get("stats", {}).get("mp", 0))} MP", - }, - ) from e - if e.status == HTTPStatus.NOT_FOUND: - # could also be task not found, but the task is looked up - # before the request, so most likely wrong skill selected - # or the skill hasn't been unlocked yet. - raise ServiceValidationError( - translation_domain=DOMAIN, - translation_key="skill_not_found", - translation_placeholders={"skill": call.data[ATTR_SKILL]}, - ) from e - raise HomeAssistantError( - translation_domain=DOMAIN, - translation_key="service_call_exception", - ) from e - else: - await coordinator.async_request_refresh() - return response - - hass.services.async_register( - DOMAIN, - SERVICE_CAST_SKILL, - cast_skill, - schema=SERVICE_CAST_SKILL_SCHEMA, - supports_response=SupportsResponse.ONLY, - ) + async_setup_services(hass) return True @@ -218,32 +57,12 @@ class HAHabitipyAsync(HabitipyAsync): def __call__(self, **kwargs): return super().__call__(websession, **kwargs) - async def handle_api_call(call: ServiceCall) -> None: - name = call.data[ATTR_NAME] - path = call.data[ATTR_PATH] - entries = hass.config_entries.async_entries(DOMAIN) - - api = None - for entry in entries: - if entry.data[CONF_NAME] == name: - api = entry.runtime_data.api - break - if api is None: - _LOGGER.error("API_CALL: User '%s' not configured", name) - return - try: - for element in path: - api = api[element] - except KeyError: - _LOGGER.error( - "API_CALL: Path %s is invalid for API on '{%s}' element", path, element + def _make_headers(self) -> dict[str, str]: + headers = super()._make_headers() + headers.update( + {"x-client": f"{DEVELOPER_ID} - {APPLICATION_NAME} {__version__}"} ) - return - kwargs = call.data.get(ATTR_ARGS, {}) - data = await api(**kwargs) - hass.bus.async_fire( - EVENT_API_CALL_SUCCESS, {ATTR_NAME: name, ATTR_PATH: path, ATTR_DATA: data} - ) + return headers websession = async_get_clientsession( hass, verify_ssl=config_entry.data.get(CONF_VERIFY_SSL, True) @@ -280,16 +99,9 @@ async def handle_api_call(call: ServiceCall) -> None: config_entry.runtime_data = coordinator await hass.config_entries.async_forward_entry_setups(config_entry, PLATFORMS) - if not hass.services.has_service(DOMAIN, SERVICE_API_CALL): - hass.services.async_register( - DOMAIN, SERVICE_API_CALL, handle_api_call, schema=SERVICE_API_CALL_SCHEMA - ) - return True async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload a config entry.""" - if len(hass.config_entries.async_entries(DOMAIN)) == 1: - hass.services.async_remove(DOMAIN, SERVICE_API_CALL) return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/habitica/binary_sensor.py b/homeassistant/components/habitica/binary_sensor.py new file mode 100644 index 00000000000000..bc79370ea63b27 --- /dev/null +++ b/homeassistant/components/habitica/binary_sensor.py @@ -0,0 +1,85 @@ +"""Binary sensor platform for Habitica integration.""" + +from __future__ import annotations + +from collections.abc import Callable +from dataclasses import dataclass +from enum import StrEnum +from typing import Any + +from homeassistant.components.binary_sensor import ( + BinarySensorEntity, + BinarySensorEntityDescription, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from .const import ASSETS_URL +from .entity import HabiticaBase +from .types import HabiticaConfigEntry + + +@dataclass(kw_only=True, frozen=True) +class HabiticaBinarySensorEntityDescription(BinarySensorEntityDescription): + """Habitica Binary Sensor Description.""" + + value_fn: Callable[[dict[str, Any]], bool | None] + entity_picture: Callable[[dict[str, Any]], str | None] + + +class HabiticaBinarySensor(StrEnum): + """Habitica Entities.""" + + PENDING_QUEST = "pending_quest" + + +def get_scroll_image_for_pending_quest_invitation(user: dict[str, Any]) -> str | None: + """Entity picture for pending quest invitation.""" + if user["party"]["quest"].get("key") and user["party"]["quest"]["RSVPNeeded"]: + return f"inventory_quest_scroll_{user["party"]["quest"]["key"]}.png" + return None + + +BINARY_SENSOR_DESCRIPTIONS: tuple[HabiticaBinarySensorEntityDescription, ...] = ( + HabiticaBinarySensorEntityDescription( + key=HabiticaBinarySensor.PENDING_QUEST, + translation_key=HabiticaBinarySensor.PENDING_QUEST, + value_fn=lambda user: user["party"]["quest"]["RSVPNeeded"], + entity_picture=get_scroll_image_for_pending_quest_invitation, + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: HabiticaConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the habitica binary sensors.""" + + coordinator = config_entry.runtime_data + + async_add_entities( + HabiticaBinarySensorEntity(coordinator, description) + for description in BINARY_SENSOR_DESCRIPTIONS + ) + + +class HabiticaBinarySensorEntity(HabiticaBase, BinarySensorEntity): + """Representation of a Habitica binary sensor.""" + + entity_description: HabiticaBinarySensorEntityDescription + + @property + def is_on(self) -> bool | None: + """If the binary sensor is on.""" + return self.entity_description.value_fn(self.coordinator.data.user) + + @property + def entity_picture(self) -> str | None: + """Return the entity picture to use in the frontend, if any.""" + if entity_picture := self.entity_description.entity_picture( + self.coordinator.data.user + ): + return f"{ASSETS_URL}{entity_picture}" + return None diff --git a/homeassistant/components/habitica/button.py b/homeassistant/components/habitica/button.py index 276aa4e7fc063a..8b41fb8c987cc0 100644 --- a/homeassistant/components/habitica/button.py +++ b/homeassistant/components/habitica/button.py @@ -10,15 +10,20 @@ from aiohttp import ClientResponseError -from homeassistant.components.button import ButtonEntity, ButtonEntityDescription -from homeassistant.core import HomeAssistant -from homeassistant.exceptions import ServiceValidationError +from homeassistant.components.button import ( + DOMAIN as BUTTON_DOMAIN, + ButtonEntity, + ButtonEntityDescription, +) +from homeassistant.core import HomeAssistant, callback +from homeassistant.exceptions import HomeAssistantError, ServiceValidationError +from homeassistant.helpers import entity_registry as er from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import HabiticaConfigEntry -from .const import DOMAIN +from .const import ASSETS_URL, DOMAIN, HEALER, MAGE, ROGUE, WARRIOR from .coordinator import HabiticaData, HabiticaDataUpdateCoordinator from .entity import HabiticaBase +from .types import HabiticaConfigEntry @dataclass(kw_only=True, frozen=True) @@ -27,6 +32,8 @@ class HabiticaButtonEntityDescription(ButtonEntityDescription): press_fn: Callable[[HabiticaDataUpdateCoordinator], Any] available_fn: Callable[[HabiticaData], bool] | None = None + class_needed: str | None = None + entity_picture: str | None = None class HabitipyButtonEntity(StrEnum): @@ -36,6 +43,18 @@ class HabitipyButtonEntity(StrEnum): BUY_HEALTH_POTION = "buy_health_potion" ALLOCATE_ALL_STAT_POINTS = "allocate_all_stat_points" REVIVE = "revive" + MPHEAL = "mpheal" + EARTH = "earth" + FROST = "frost" + DEFENSIVE_STANCE = "defensive_stance" + VALOROUS_PRESENCE = "valorous_presence" + INTIMIDATE = "intimidate" + TOOLS_OF_TRADE = "tools_of_trade" + STEALTH = "stealth" + HEAL = "heal" + PROTECT_AURA = "protect_aura" + BRIGHTNESS = "brightness" + HEAL_ALL = "heal_all" BUTTON_DESCRIPTIONS: tuple[HabiticaButtonEntityDescription, ...] = ( @@ -55,6 +74,7 @@ class HabitipyButtonEntity(StrEnum): lambda data: data.user["stats"]["gp"] >= 25 and data.user["stats"]["hp"] < 50 ), + entity_picture="shop_potion.png", ), HabiticaButtonEntityDescription( key=HabitipyButtonEntity.ALLOCATE_ALL_STAT_POINTS, @@ -74,6 +94,175 @@ class HabitipyButtonEntity(StrEnum): ) +CLASS_SKILLS: tuple[HabiticaButtonEntityDescription, ...] = ( + HabiticaButtonEntityDescription( + key=HabitipyButtonEntity.MPHEAL, + translation_key=HabitipyButtonEntity.MPHEAL, + press_fn=lambda coordinator: coordinator.api.user.class_.cast["mpheal"].post(), + available_fn=( + lambda data: data.user["stats"]["lvl"] >= 12 + and data.user["stats"]["mp"] >= 30 + ), + class_needed=MAGE, + entity_picture="shop_mpheal.png", + ), + HabiticaButtonEntityDescription( + key=HabitipyButtonEntity.EARTH, + translation_key=HabitipyButtonEntity.EARTH, + press_fn=lambda coordinator: coordinator.api.user.class_.cast["earth"].post(), + available_fn=( + lambda data: data.user["stats"]["lvl"] >= 13 + and data.user["stats"]["mp"] >= 35 + ), + class_needed=MAGE, + entity_picture="shop_earth.png", + ), + HabiticaButtonEntityDescription( + key=HabitipyButtonEntity.FROST, + translation_key=HabitipyButtonEntity.FROST, + press_fn=lambda coordinator: coordinator.api.user.class_.cast["frost"].post(), + # chilling frost can only be cast once per day (streaks buff is false) + available_fn=( + lambda data: data.user["stats"]["lvl"] >= 14 + and data.user["stats"]["mp"] >= 40 + and not data.user["stats"]["buffs"]["streaks"] + ), + class_needed=MAGE, + entity_picture="shop_frost.png", + ), + HabiticaButtonEntityDescription( + key=HabitipyButtonEntity.DEFENSIVE_STANCE, + translation_key=HabitipyButtonEntity.DEFENSIVE_STANCE, + press_fn=( + lambda coordinator: coordinator.api.user.class_.cast[ + "defensiveStance" + ].post() + ), + available_fn=( + lambda data: data.user["stats"]["lvl"] >= 12 + and data.user["stats"]["mp"] >= 25 + ), + class_needed=WARRIOR, + entity_picture="shop_defensiveStance.png", + ), + HabiticaButtonEntityDescription( + key=HabitipyButtonEntity.VALOROUS_PRESENCE, + translation_key=HabitipyButtonEntity.VALOROUS_PRESENCE, + press_fn=( + lambda coordinator: coordinator.api.user.class_.cast[ + "valorousPresence" + ].post() + ), + available_fn=( + lambda data: data.user["stats"]["lvl"] >= 13 + and data.user["stats"]["mp"] >= 20 + ), + class_needed=WARRIOR, + entity_picture="shop_valorousPresence.png", + ), + HabiticaButtonEntityDescription( + key=HabitipyButtonEntity.INTIMIDATE, + translation_key=HabitipyButtonEntity.INTIMIDATE, + press_fn=( + lambda coordinator: coordinator.api.user.class_.cast["intimidate"].post() + ), + available_fn=( + lambda data: data.user["stats"]["lvl"] >= 14 + and data.user["stats"]["mp"] >= 15 + ), + class_needed=WARRIOR, + entity_picture="shop_intimidate.png", + ), + HabiticaButtonEntityDescription( + key=HabitipyButtonEntity.TOOLS_OF_TRADE, + translation_key=HabitipyButtonEntity.TOOLS_OF_TRADE, + press_fn=( + lambda coordinator: coordinator.api.user.class_.cast["toolsOfTrade"].post() + ), + available_fn=( + lambda data: data.user["stats"]["lvl"] >= 13 + and data.user["stats"]["mp"] >= 25 + ), + class_needed=ROGUE, + entity_picture="shop_toolsOfTrade.png", + ), + HabiticaButtonEntityDescription( + key=HabitipyButtonEntity.STEALTH, + translation_key=HabitipyButtonEntity.STEALTH, + press_fn=( + lambda coordinator: coordinator.api.user.class_.cast["stealth"].post() + ), + # Stealth buffs stack and it can only be cast if the amount of + # unfinished dailies is smaller than the amount of buffs + available_fn=( + lambda data: data.user["stats"]["lvl"] >= 14 + and data.user["stats"]["mp"] >= 45 + and data.user["stats"]["buffs"]["stealth"] + < len( + [ + r + for r in data.tasks + if r.get("type") == "daily" + and r.get("isDue") is True + and r.get("completed") is False + ] + ) + ), + class_needed=ROGUE, + entity_picture="shop_stealth.png", + ), + HabiticaButtonEntityDescription( + key=HabitipyButtonEntity.HEAL, + translation_key=HabitipyButtonEntity.HEAL, + press_fn=lambda coordinator: coordinator.api.user.class_.cast["heal"].post(), + available_fn=( + lambda data: data.user["stats"]["lvl"] >= 11 + and data.user["stats"]["mp"] >= 15 + and data.user["stats"]["hp"] < 50 + ), + class_needed=HEALER, + entity_picture="shop_heal.png", + ), + HabiticaButtonEntityDescription( + key=HabitipyButtonEntity.BRIGHTNESS, + translation_key=HabitipyButtonEntity.BRIGHTNESS, + press_fn=( + lambda coordinator: coordinator.api.user.class_.cast["brightness"].post() + ), + available_fn=( + lambda data: data.user["stats"]["lvl"] >= 12 + and data.user["stats"]["mp"] >= 15 + ), + class_needed=HEALER, + entity_picture="shop_brightness.png", + ), + HabiticaButtonEntityDescription( + key=HabitipyButtonEntity.PROTECT_AURA, + translation_key=HabitipyButtonEntity.PROTECT_AURA, + press_fn=( + lambda coordinator: coordinator.api.user.class_.cast["protectAura"].post() + ), + available_fn=( + lambda data: data.user["stats"]["lvl"] >= 13 + and data.user["stats"]["mp"] >= 30 + ), + class_needed=HEALER, + entity_picture="shop_protectAura.png", + ), + HabiticaButtonEntityDescription( + key=HabitipyButtonEntity.HEAL_ALL, + translation_key=HabitipyButtonEntity.HEAL_ALL, + press_fn=lambda coordinator: coordinator.api.user.class_.cast["healAll"].post(), + available_fn=( + lambda data: data.user["stats"]["lvl"] >= 14 + and data.user["stats"]["mp"] >= 25 + ), + class_needed=HEALER, + entity_picture="shop_healAll.png", + ), +) + + async def async_setup_entry( hass: HomeAssistant, entry: HabiticaConfigEntry, @@ -82,6 +271,40 @@ async def async_setup_entry( """Set up buttons from a config entry.""" coordinator = entry.runtime_data + skills_added: set[str] = set() + + @callback + def add_entities() -> None: + """Add or remove a skillset based on the player's class.""" + + nonlocal skills_added + buttons = [] + entity_registry = er.async_get(hass) + + for description in CLASS_SKILLS: + if ( + coordinator.data.user["stats"]["lvl"] >= 10 + and coordinator.data.user["flags"]["classSelected"] + and not coordinator.data.user["preferences"]["disableClasses"] + and description.class_needed == coordinator.data.user["stats"]["class"] + ): + if description.key not in skills_added: + buttons.append(HabiticaButton(coordinator, description)) + skills_added.add(description.key) + elif description.key in skills_added: + if entity_id := entity_registry.async_get_entity_id( + BUTTON_DOMAIN, + DOMAIN, + f"{coordinator.config_entry.unique_id}_{description.key}", + ): + entity_registry.async_remove(entity_id) + skills_added.remove(description.key) + + if buttons: + async_add_entities(buttons) + + coordinator.async_add_listener(add_entities) + add_entities() async_add_entities( HabiticaButton(coordinator, description) for description in BUTTON_DESCRIPTIONS @@ -108,7 +331,7 @@ async def async_press(self) -> None: translation_domain=DOMAIN, translation_key="service_call_unallowed", ) from e - raise ServiceValidationError( + raise HomeAssistantError( translation_domain=DOMAIN, translation_key="service_call_exception", ) from e @@ -123,3 +346,10 @@ def available(self) -> bool: if self.entity_description.available_fn: return self.entity_description.available_fn(self.coordinator.data) return True + + @property + def entity_picture(self) -> str | None: + """Return the entity picture to use in the frontend, if any.""" + if entity_picture := self.entity_description.entity_picture: + return f"{ASSETS_URL}{entity_picture}" + return None diff --git a/homeassistant/components/habitica/calendar.py b/homeassistant/components/habitica/calendar.py new file mode 100644 index 00000000000000..5a0470c3440e4a --- /dev/null +++ b/homeassistant/components/habitica/calendar.py @@ -0,0 +1,227 @@ +"""Calendar platform for Habitica integration.""" + +from __future__ import annotations + +from datetime import date, datetime, timedelta +from enum import StrEnum + +from dateutil.rrule import rrule + +from homeassistant.components.calendar import ( + CalendarEntity, + CalendarEntityDescription, + CalendarEvent, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.util import dt as dt_util + +from . import HabiticaConfigEntry +from .coordinator import HabiticaDataUpdateCoordinator +from .entity import HabiticaBase +from .types import HabiticaTaskType +from .util import build_rrule, get_recurrence_rule + + +class HabiticaCalendar(StrEnum): + """Habitica calendars.""" + + DAILIES = "dailys" + TODOS = "todos" + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: HabiticaConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the calendar platform.""" + coordinator = config_entry.runtime_data + + async_add_entities( + [ + HabiticaTodosCalendarEntity(coordinator), + HabiticaDailiesCalendarEntity(coordinator), + ] + ) + + +class HabiticaCalendarEntity(HabiticaBase, CalendarEntity): + """Base Habitica calendar entity.""" + + def __init__( + self, + coordinator: HabiticaDataUpdateCoordinator, + ) -> None: + """Initialize calendar entity.""" + super().__init__(coordinator, self.entity_description) + + +class HabiticaTodosCalendarEntity(HabiticaCalendarEntity): + """Habitica todos calendar entity.""" + + entity_description = CalendarEntityDescription( + key=HabiticaCalendar.TODOS, + translation_key=HabiticaCalendar.TODOS, + ) + + def dated_todos( + self, start_date: datetime, end_date: datetime | None = None + ) -> list[CalendarEvent]: + """Get all dated todos.""" + + events = [] + for task in self.coordinator.data.tasks: + if not ( + task["type"] == HabiticaTaskType.TODO + and not task["completed"] + and task.get("date") # only if has due date + ): + continue + + start = dt_util.start_of_local_day(datetime.fromisoformat(task["date"])) + end = start + timedelta(days=1) + # return current and upcoming events or events within the requested range + + if end < start_date: + # Event ends before date range + continue + + if end_date and start > end_date: + # Event starts after date range + continue + + events.append( + CalendarEvent( + start=start.date(), + end=end.date(), + summary=task["text"], + description=task["notes"], + uid=task["id"], + ) + ) + return sorted( + events, + key=lambda event: ( + event.start, + self.coordinator.data.user["tasksOrder"]["todos"].index(event.uid), + ), + ) + + @property + def event(self) -> CalendarEvent | None: + """Return the current or next upcoming event.""" + + return next(iter(self.dated_todos(dt_util.now())), None) + + async def async_get_events( + self, hass: HomeAssistant, start_date: datetime, end_date: datetime + ) -> list[CalendarEvent]: + """Return calendar events within a datetime range.""" + return self.dated_todos(start_date, end_date) + + +class HabiticaDailiesCalendarEntity(HabiticaCalendarEntity): + """Habitica dailies calendar entity.""" + + entity_description = CalendarEntityDescription( + key=HabiticaCalendar.DAILIES, + translation_key=HabiticaCalendar.DAILIES, + ) + + @property + def today(self) -> datetime: + """Habitica daystart.""" + return dt_util.start_of_local_day( + datetime.fromisoformat(self.coordinator.data.user["lastCron"]) + ) + + def end_date(self, recurrence: datetime, end: datetime | None = None) -> date: + """Calculate the end date for a yesterdaily. + + The enddates of events from yesterday move forward to the end + of the current day (until the cron resets the dailies) to show them + as still active events on the calendar state entity (state: on). + + Events in the calendar view will show all-day events on their due day + """ + if end: + return recurrence.date() + timedelta(days=1) + return ( + dt_util.start_of_local_day() if recurrence == self.today else recurrence + ).date() + timedelta(days=1) + + def get_recurrence_dates( + self, recurrences: rrule, start_date: datetime, end_date: datetime | None = None + ) -> list[datetime]: + """Calculate recurrence dates based on start_date and end_date.""" + if end_date: + return recurrences.between( + start_date, end_date - timedelta(days=1), inc=True + ) + # if no end_date is given, return only the next recurrence + return [recurrences.after(self.today, inc=True)] + + def due_dailies( + self, start_date: datetime, end_date: datetime | None = None + ) -> list[CalendarEvent]: + """Get dailies and recurrences for a given period or the next upcoming.""" + + # we only have dailies for today and future recurrences + if end_date and end_date < self.today: + return [] + start_date = max(start_date, self.today) + + events = [] + for task in self.coordinator.data.tasks: + # only dailies that that are not 'grey dailies' + if not (task["type"] == HabiticaTaskType.DAILY and task["everyX"]): + continue + + recurrences = build_rrule(task) + recurrence_dates = self.get_recurrence_dates( + recurrences, start_date, end_date + ) + for recurrence in recurrence_dates: + is_future_event = recurrence > self.today + is_current_event = recurrence <= self.today and not task["completed"] + + if not (is_future_event or is_current_event): + continue + + events.append( + CalendarEvent( + start=recurrence.date(), + end=self.end_date(recurrence, end_date), + summary=task["text"], + description=task["notes"], + uid=task["id"], + rrule=get_recurrence_rule(recurrences), + ) + ) + return sorted( + events, + key=lambda event: ( + event.start, + self.coordinator.data.user["tasksOrder"]["dailys"].index(event.uid), + ), + ) + + @property + def event(self) -> CalendarEvent | None: + """Return the next upcoming event.""" + return next(iter(self.due_dailies(self.today)), None) + + async def async_get_events( + self, hass: HomeAssistant, start_date: datetime, end_date: datetime + ) -> list[CalendarEvent]: + """Return calendar events within a datetime range.""" + + return self.due_dailies(start_date, end_date) + + @property + def extra_state_attributes(self) -> dict[str, bool | None] | None: + """Return entity specific state attributes.""" + return { + "yesterdaily": self.event.start < self.today.date() if self.event else None + } diff --git a/homeassistant/components/habitica/config_flow.py b/homeassistant/components/habitica/config_flow.py index 2947032c41ef76..88f3d1b803c8ed 100644 --- a/homeassistant/components/habitica/config_flow.py +++ b/homeassistant/components/habitica/config_flow.py @@ -18,9 +18,7 @@ CONF_USERNAME, CONF_VERIFY_SSL, ) -from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN from homeassistant.helpers.aiohttp_client import async_get_clientsession -from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue from homeassistant.helpers.selector import ( TextSelector, TextSelectorConfig, @@ -178,21 +176,3 @@ async def async_step_advanced( ), errors=errors, ) - - async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: - """Import habitica config from configuration.yaml.""" - - async_create_issue( - self.hass, - HOMEASSISTANT_DOMAIN, - f"deprecated_yaml_{DOMAIN}", - is_fixable=False, - breaks_in_ha_version="2024.11.0", - severity=IssueSeverity.WARNING, - translation_key="deprecated_yaml", - translation_placeholders={ - "domain": DOMAIN, - "integration_title": "Habitica", - }, - ) - return await self.async_step_advanced(import_data) diff --git a/homeassistant/components/habitica/const.py b/homeassistant/components/habitica/const.py index f089be1b73618c..ae98cb13dcb521 100644 --- a/homeassistant/components/habitica/const.py +++ b/homeassistant/components/habitica/const.py @@ -25,4 +25,20 @@ ATTR_CONFIG_ENTRY = "config_entry" ATTR_SKILL = "skill" ATTR_TASK = "task" +ATTR_DIRECTION = "direction" SERVICE_CAST_SKILL = "cast_skill" +SERVICE_START_QUEST = "start_quest" +SERVICE_ACCEPT_QUEST = "accept_quest" +SERVICE_CANCEL_QUEST = "cancel_quest" +SERVICE_ABORT_QUEST = "abort_quest" +SERVICE_REJECT_QUEST = "reject_quest" +SERVICE_LEAVE_QUEST = "leave_quest" +SERVICE_SCORE_HABIT = "score_habit" +SERVICE_SCORE_REWARD = "score_reward" + +WARRIOR = "warrior" +ROGUE = "rogue" +HEALER = "healer" +MAGE = "wizard" + +DEVELOPER_ID = "4c4ca53f-c059-4ffa-966e-9d29dd405daf" diff --git a/homeassistant/components/habitica/coordinator.py b/homeassistant/components/habitica/coordinator.py index 4e949b703fb3e7..f9ffb1b53bd25e 100644 --- a/homeassistant/components/habitica/coordinator.py +++ b/homeassistant/components/habitica/coordinator.py @@ -51,17 +51,22 @@ def __init__(self, hass: HomeAssistant, habitipy: HabitipyAsync) -> None: ), ) self.api = habitipy + self.content: dict[str, Any] = {} async def _async_update_data(self) -> HabiticaData: try: user_response = await self.api.user.get() tasks_response = await self.api.tasks.user.get() tasks_response.extend(await self.api.tasks.user.get(type="completedTodos")) + if not self.content: + self.content = await self.api.content.get( + language=user_response["preferences"]["language"] + ) except ClientResponseError as error: if error.status == HTTPStatus.TOO_MANY_REQUESTS: - _LOGGER.debug("Currently rate limited, skipping update") + _LOGGER.debug("Rate limit exceeded, will try again later") return self.data - raise UpdateFailed(f"Error communicating with API: {error}") from error + raise UpdateFailed(f"Unable to connect to Habitica: {error}") from error return HabiticaData(user=user_response, tasks=tasks_response) diff --git a/homeassistant/components/habitica/icons.json b/homeassistant/components/habitica/icons.json index 544c28e4b9d3fb..d33b9c60c966bc 100644 --- a/homeassistant/components/habitica/icons.json +++ b/homeassistant/components/habitica/icons.json @@ -20,6 +20,50 @@ }, "revive": { "default": "mdi:grave-stone" + }, + "mpheal": { + "default": "mdi:broadcast" + }, + "earth": { + "default": "mdi:landslide" + }, + "frost": { + "default": "mdi:snowflake" + }, + "defensive_stance": { + "default": "mdi:shield-sword" + }, + "valorous_presence": { + "default": "mdi:shield-sun" + }, + "intimidate": { + "default": "mdi:emoticon-angry" + }, + "tools_of_trade": { + "default": "mdi:domino-mask" + }, + "stealth": { + "default": "mdi:ninja" + }, + "heal": { + "default": "mdi:aurora" + }, + "brightness": { + "default": "mdi:flare" + }, + "protect_aura": { + "default": "mdi:shimmer" + }, + "heal_all": { + "default": "mdi:hand-heart-outline" + } + }, + "calendar": { + "todos": { + "default": "mdi:calendar-check" + }, + "dailys": { + "default": "mdi:calendar-multiple" } }, "sensor": { @@ -82,6 +126,18 @@ }, "rewards": { "default": "mdi:treasure-chest" + }, + "strength": { + "default": "mdi:arm-flex-outline" + }, + "intelligence": { + "default": "mdi:head-snowflake-outline" + }, + "perception": { + "default": "mdi:eye-outline" + }, + "constitution": { + "default": "mdi:run-fast" } }, "switch": { @@ -91,6 +147,14 @@ "on": "mdi:sleep" } } + }, + "binary_sensor": { + "pending_quest": { + "default": "mdi:script-outline", + "state": { + "on": "mdi:script-text-outline" + } + } } }, "services": { @@ -99,6 +163,30 @@ }, "cast_skill": { "service": "mdi:creation-outline" + }, + "accept_quest": { + "service": "mdi:script-text" + }, + "reject_quest": { + "service": "mdi:script-text" + }, + "leave_quest": { + "service": "mdi:script-text" + }, + "abort_quest": { + "service": "mdi:script-text-key" + }, + "cancel_quest": { + "service": "mdi:script-text-key" + }, + "start_quest": { + "service": "mdi:script-text-key" + }, + "score_habit": { + "service": "mdi:counter" + }, + "score_reward": { + "service": "mdi:sack" } } } diff --git a/homeassistant/components/habitica/manifest.json b/homeassistant/components/habitica/manifest.json index 16a4ef959a8ee0..8e3396d32cf6bb 100644 --- a/homeassistant/components/habitica/manifest.json +++ b/homeassistant/components/habitica/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/habitica", "iot_class": "cloud_polling", "loggers": ["habitipy", "plumbum"], - "requirements": ["habitipy==0.3.1"] + "requirements": ["habitipy==0.3.3"] } diff --git a/homeassistant/components/habitica/sensor.py b/homeassistant/components/habitica/sensor.py index ccf1e998049969..3b2395ecc52507 100644 --- a/homeassistant/components/habitica/sensor.py +++ b/homeassistant/components/habitica/sensor.py @@ -24,10 +24,10 @@ ) from homeassistant.helpers.typing import StateType -from . import HabiticaConfigEntry from .const import DOMAIN, UNIT_TASKS from .entity import HabiticaBase -from .util import entity_used_in +from .types import HabiticaConfigEntry +from .util import entity_used_in, get_attribute_points, get_attributes_total _LOGGER = logging.getLogger(__name__) @@ -36,7 +36,10 @@ class HabitipySensorEntityDescription(SensorEntityDescription): """Habitipy Sensor Description.""" - value_fn: Callable[[dict[str, Any]], StateType] + value_fn: Callable[[dict[str, Any], dict[str, Any]], StateType] + attributes_fn: ( + Callable[[dict[str, Any], dict[str, Any]], dict[str, Any] | None] | None + ) = None @dataclass(kw_only=True, frozen=True) @@ -65,76 +68,80 @@ class HabitipySensorEntity(StrEnum): REWARDS = "rewards" GEMS = "gems" TRINKETS = "trinkets" + STRENGTH = "strength" + INTELLIGENCE = "intelligence" + CONSTITUTION = "constitution" + PERCEPTION = "perception" SENSOR_DESCRIPTIONS: tuple[HabitipySensorEntityDescription, ...] = ( HabitipySensorEntityDescription( key=HabitipySensorEntity.DISPLAY_NAME, translation_key=HabitipySensorEntity.DISPLAY_NAME, - value_fn=lambda user: user.get("profile", {}).get("name"), + value_fn=lambda user, _: user.get("profile", {}).get("name"), ), HabitipySensorEntityDescription( key=HabitipySensorEntity.HEALTH, translation_key=HabitipySensorEntity.HEALTH, native_unit_of_measurement="HP", suggested_display_precision=0, - value_fn=lambda user: user.get("stats", {}).get("hp"), + value_fn=lambda user, _: user.get("stats", {}).get("hp"), ), HabitipySensorEntityDescription( key=HabitipySensorEntity.HEALTH_MAX, translation_key=HabitipySensorEntity.HEALTH_MAX, native_unit_of_measurement="HP", entity_registry_enabled_default=False, - value_fn=lambda user: user.get("stats", {}).get("maxHealth"), + value_fn=lambda user, _: user.get("stats", {}).get("maxHealth"), ), HabitipySensorEntityDescription( key=HabitipySensorEntity.MANA, translation_key=HabitipySensorEntity.MANA, native_unit_of_measurement="MP", suggested_display_precision=0, - value_fn=lambda user: user.get("stats", {}).get("mp"), + value_fn=lambda user, _: user.get("stats", {}).get("mp"), ), HabitipySensorEntityDescription( key=HabitipySensorEntity.MANA_MAX, translation_key=HabitipySensorEntity.MANA_MAX, native_unit_of_measurement="MP", - value_fn=lambda user: user.get("stats", {}).get("maxMP"), + value_fn=lambda user, _: user.get("stats", {}).get("maxMP"), ), HabitipySensorEntityDescription( key=HabitipySensorEntity.EXPERIENCE, translation_key=HabitipySensorEntity.EXPERIENCE, native_unit_of_measurement="XP", - value_fn=lambda user: user.get("stats", {}).get("exp"), + value_fn=lambda user, _: user.get("stats", {}).get("exp"), ), HabitipySensorEntityDescription( key=HabitipySensorEntity.EXPERIENCE_MAX, translation_key=HabitipySensorEntity.EXPERIENCE_MAX, native_unit_of_measurement="XP", - value_fn=lambda user: user.get("stats", {}).get("toNextLevel"), + value_fn=lambda user, _: user.get("stats", {}).get("toNextLevel"), ), HabitipySensorEntityDescription( key=HabitipySensorEntity.LEVEL, translation_key=HabitipySensorEntity.LEVEL, - value_fn=lambda user: user.get("stats", {}).get("lvl"), + value_fn=lambda user, _: user.get("stats", {}).get("lvl"), ), HabitipySensorEntityDescription( key=HabitipySensorEntity.GOLD, translation_key=HabitipySensorEntity.GOLD, native_unit_of_measurement="GP", suggested_display_precision=2, - value_fn=lambda user: user.get("stats", {}).get("gp"), + value_fn=lambda user, _: user.get("stats", {}).get("gp"), ), HabitipySensorEntityDescription( key=HabitipySensorEntity.CLASS, translation_key=HabitipySensorEntity.CLASS, - value_fn=lambda user: user.get("stats", {}).get("class"), + value_fn=lambda user, _: user.get("stats", {}).get("class"), device_class=SensorDeviceClass.ENUM, options=["warrior", "healer", "wizard", "rogue"], ), HabitipySensorEntityDescription( key=HabitipySensorEntity.GEMS, translation_key=HabitipySensorEntity.GEMS, - value_fn=lambda user: user.get("balance", 0) * 4, + value_fn=lambda user, _: user.get("balance", 0) * 4, suggested_display_precision=0, native_unit_of_measurement="gems", ), @@ -142,7 +149,7 @@ class HabitipySensorEntity(StrEnum): key=HabitipySensorEntity.TRINKETS, translation_key=HabitipySensorEntity.TRINKETS, value_fn=( - lambda user: user.get("purchased", {}) + lambda user, _: user.get("purchased", {}) .get("plan", {}) .get("consecutive", {}) .get("trinkets", 0) @@ -150,6 +157,38 @@ class HabitipySensorEntity(StrEnum): suggested_display_precision=0, native_unit_of_measurement="⧖", ), + HabitipySensorEntityDescription( + key=HabitipySensorEntity.STRENGTH, + translation_key=HabitipySensorEntity.STRENGTH, + value_fn=lambda user, content: get_attributes_total(user, content, "str"), + attributes_fn=lambda user, content: get_attribute_points(user, content, "str"), + suggested_display_precision=0, + native_unit_of_measurement="STR", + ), + HabitipySensorEntityDescription( + key=HabitipySensorEntity.INTELLIGENCE, + translation_key=HabitipySensorEntity.INTELLIGENCE, + value_fn=lambda user, content: get_attributes_total(user, content, "int"), + attributes_fn=lambda user, content: get_attribute_points(user, content, "int"), + suggested_display_precision=0, + native_unit_of_measurement="INT", + ), + HabitipySensorEntityDescription( + key=HabitipySensorEntity.PERCEPTION, + translation_key=HabitipySensorEntity.PERCEPTION, + value_fn=lambda user, content: get_attributes_total(user, content, "per"), + attributes_fn=lambda user, content: get_attribute_points(user, content, "per"), + suggested_display_precision=0, + native_unit_of_measurement="PER", + ), + HabitipySensorEntityDescription( + key=HabitipySensorEntity.CONSTITUTION, + translation_key=HabitipySensorEntity.CONSTITUTION, + value_fn=lambda user, content: get_attributes_total(user, content, "con"), + attributes_fn=lambda user, content: get_attribute_points(user, content, "con"), + suggested_display_precision=0, + native_unit_of_measurement="CON", + ), ) @@ -243,7 +282,16 @@ class HabitipySensor(HabiticaBase, SensorEntity): def native_value(self) -> StateType: """Return the state of the device.""" - return self.entity_description.value_fn(self.coordinator.data.user) + return self.entity_description.value_fn( + self.coordinator.data.user, self.coordinator.content + ) + + @property + def extra_state_attributes(self) -> dict[str, float | None] | None: + """Return entity specific state attributes.""" + if func := self.entity_description.attributes_fn: + return func(self.coordinator.data.user, self.coordinator.content) + return None class HabitipyTaskSensor(HabiticaBase, SensorEntity): diff --git a/homeassistant/components/habitica/services.py b/homeassistant/components/habitica/services.py new file mode 100644 index 00000000000000..a50e5f1e6e3ed4 --- /dev/null +++ b/homeassistant/components/habitica/services.py @@ -0,0 +1,325 @@ +"""Actions for the Habitica integration.""" + +from __future__ import annotations + +from http import HTTPStatus +import logging +from typing import Any + +from aiohttp import ClientResponseError +import voluptuous as vol + +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import ATTR_NAME, CONF_NAME +from homeassistant.core import ( + HomeAssistant, + ServiceCall, + ServiceResponse, + SupportsResponse, +) +from homeassistant.exceptions import HomeAssistantError, ServiceValidationError +from homeassistant.helpers import config_validation as cv +from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue +from homeassistant.helpers.selector import ConfigEntrySelector + +from .const import ( + ATTR_ARGS, + ATTR_CONFIG_ENTRY, + ATTR_DATA, + ATTR_DIRECTION, + ATTR_PATH, + ATTR_SKILL, + ATTR_TASK, + DOMAIN, + EVENT_API_CALL_SUCCESS, + SERVICE_ABORT_QUEST, + SERVICE_ACCEPT_QUEST, + SERVICE_API_CALL, + SERVICE_CANCEL_QUEST, + SERVICE_CAST_SKILL, + SERVICE_LEAVE_QUEST, + SERVICE_REJECT_QUEST, + SERVICE_SCORE_HABIT, + SERVICE_SCORE_REWARD, + SERVICE_START_QUEST, +) +from .types import HabiticaConfigEntry + +_LOGGER = logging.getLogger(__name__) + + +SERVICE_API_CALL_SCHEMA = vol.Schema( + { + vol.Required(ATTR_NAME): str, + vol.Required(ATTR_PATH): vol.All(cv.ensure_list, [str]), + vol.Optional(ATTR_ARGS): dict, + } +) + +SERVICE_CAST_SKILL_SCHEMA = vol.Schema( + { + vol.Required(ATTR_CONFIG_ENTRY): ConfigEntrySelector(), + vol.Required(ATTR_SKILL): cv.string, + vol.Optional(ATTR_TASK): cv.string, + } +) + +SERVICE_MANAGE_QUEST_SCHEMA = vol.Schema( + { + vol.Required(ATTR_CONFIG_ENTRY): ConfigEntrySelector(), + } +) +SERVICE_SCORE_TASK_SCHEMA = vol.Schema( + { + vol.Required(ATTR_CONFIG_ENTRY): ConfigEntrySelector(), + vol.Required(ATTR_TASK): cv.string, + vol.Optional(ATTR_DIRECTION): cv.string, + } +) + + +def get_config_entry(hass: HomeAssistant, entry_id: str) -> HabiticaConfigEntry: + """Return config entry or raise if not found or not loaded.""" + if not (entry := hass.config_entries.async_get_entry(entry_id)): + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="entry_not_found", + ) + if entry.state is not ConfigEntryState.LOADED: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="entry_not_loaded", + ) + return entry + + +def async_setup_services(hass: HomeAssistant) -> None: # noqa: C901 + """Set up services for Habitica integration.""" + + async def handle_api_call(call: ServiceCall) -> None: + async_create_issue( + hass, + DOMAIN, + "deprecated_api_call", + breaks_in_ha_version="2025.6.0", + is_fixable=False, + severity=IssueSeverity.WARNING, + translation_key="deprecated_api_call", + ) + _LOGGER.warning( + "Deprecated action called: 'habitica.api_call' is deprecated and will be removed in Home Assistant version 2025.6.0" + ) + + name = call.data[ATTR_NAME] + path = call.data[ATTR_PATH] + entries = hass.config_entries.async_entries(DOMAIN) + + api = None + for entry in entries: + if entry.data[CONF_NAME] == name: + api = entry.runtime_data.api + break + if api is None: + _LOGGER.error("API_CALL: User '%s' not configured", name) + return + try: + for element in path: + api = api[element] + except KeyError: + _LOGGER.error( + "API_CALL: Path %s is invalid for API on '{%s}' element", path, element + ) + return + kwargs = call.data.get(ATTR_ARGS, {}) + data = await api(**kwargs) + hass.bus.async_fire( + EVENT_API_CALL_SUCCESS, {ATTR_NAME: name, ATTR_PATH: path, ATTR_DATA: data} + ) + + async def cast_skill(call: ServiceCall) -> ServiceResponse: + """Skill action.""" + entry = get_config_entry(hass, call.data[ATTR_CONFIG_ENTRY]) + coordinator = entry.runtime_data + skill = { + "pickpocket": {"spellId": "pickPocket", "cost": "10 MP"}, + "backstab": {"spellId": "backStab", "cost": "15 MP"}, + "smash": {"spellId": "smash", "cost": "10 MP"}, + "fireball": {"spellId": "fireball", "cost": "10 MP"}, + } + try: + task_id = next( + task["id"] + for task in coordinator.data.tasks + if call.data[ATTR_TASK] in (task["id"], task.get("alias")) + or call.data[ATTR_TASK] == task["text"] + ) + except StopIteration as e: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="task_not_found", + translation_placeholders={"task": f"'{call.data[ATTR_TASK]}'"}, + ) from e + + try: + response: dict[str, Any] = await coordinator.api.user.class_.cast[ + skill[call.data[ATTR_SKILL]]["spellId"] + ].post(targetId=task_id) + except ClientResponseError as e: + if e.status == HTTPStatus.TOO_MANY_REQUESTS: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="setup_rate_limit_exception", + ) from e + if e.status == HTTPStatus.UNAUTHORIZED: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="not_enough_mana", + translation_placeholders={ + "cost": skill[call.data[ATTR_SKILL]]["cost"], + "mana": f"{int(coordinator.data.user.get("stats", {}).get("mp", 0))} MP", + }, + ) from e + if e.status == HTTPStatus.NOT_FOUND: + # could also be task not found, but the task is looked up + # before the request, so most likely wrong skill selected + # or the skill hasn't been unlocked yet. + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="skill_not_found", + translation_placeholders={"skill": call.data[ATTR_SKILL]}, + ) from e + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="service_call_exception", + ) from e + else: + await coordinator.async_request_refresh() + return response + + async def manage_quests(call: ServiceCall) -> ServiceResponse: + """Accept, reject, start, leave or cancel quests.""" + entry = get_config_entry(hass, call.data[ATTR_CONFIG_ENTRY]) + coordinator = entry.runtime_data + + COMMAND_MAP = { + SERVICE_ABORT_QUEST: "abort", + SERVICE_ACCEPT_QUEST: "accept", + SERVICE_CANCEL_QUEST: "cancel", + SERVICE_LEAVE_QUEST: "leave", + SERVICE_REJECT_QUEST: "reject", + SERVICE_START_QUEST: "force-start", + } + try: + return await coordinator.api.groups.party.quests[ + COMMAND_MAP[call.service] + ].post() + except ClientResponseError as e: + if e.status == HTTPStatus.TOO_MANY_REQUESTS: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="setup_rate_limit_exception", + ) from e + if e.status == HTTPStatus.UNAUTHORIZED: + raise ServiceValidationError( + translation_domain=DOMAIN, translation_key="quest_action_unallowed" + ) from e + if e.status == HTTPStatus.NOT_FOUND: + raise ServiceValidationError( + translation_domain=DOMAIN, translation_key="quest_not_found" + ) from e + raise HomeAssistantError( + translation_domain=DOMAIN, translation_key="service_call_exception" + ) from e + + for service in ( + SERVICE_ABORT_QUEST, + SERVICE_ACCEPT_QUEST, + SERVICE_CANCEL_QUEST, + SERVICE_LEAVE_QUEST, + SERVICE_REJECT_QUEST, + SERVICE_START_QUEST, + ): + hass.services.async_register( + DOMAIN, + service, + manage_quests, + schema=SERVICE_MANAGE_QUEST_SCHEMA, + supports_response=SupportsResponse.ONLY, + ) + + async def score_task(call: ServiceCall) -> ServiceResponse: + """Score a task action.""" + entry = get_config_entry(hass, call.data[ATTR_CONFIG_ENTRY]) + coordinator = entry.runtime_data + try: + task_id, task_value = next( + (task["id"], task.get("value")) + for task in coordinator.data.tasks + if call.data[ATTR_TASK] in (task["id"], task.get("alias")) + or call.data[ATTR_TASK] == task["text"] + ) + except StopIteration as e: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="task_not_found", + translation_placeholders={"task": f"'{call.data[ATTR_TASK]}'"}, + ) from e + + try: + response: dict[str, Any] = ( + await coordinator.api.tasks[task_id] + .score[call.data.get(ATTR_DIRECTION, "up")] + .post() + ) + except ClientResponseError as e: + if e.status == HTTPStatus.TOO_MANY_REQUESTS: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="setup_rate_limit_exception", + ) from e + if e.status == HTTPStatus.UNAUTHORIZED and task_value is not None: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="not_enough_gold", + translation_placeholders={ + "gold": f"{coordinator.data.user["stats"]["gp"]:.2f} GP", + "cost": f"{task_value} GP", + }, + ) from e + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="service_call_exception", + ) from e + else: + await coordinator.async_request_refresh() + return response + + hass.services.async_register( + DOMAIN, + SERVICE_API_CALL, + handle_api_call, + schema=SERVICE_API_CALL_SCHEMA, + ) + + hass.services.async_register( + DOMAIN, + SERVICE_CAST_SKILL, + cast_skill, + schema=SERVICE_CAST_SKILL_SCHEMA, + supports_response=SupportsResponse.ONLY, + ) + + hass.services.async_register( + DOMAIN, + SERVICE_SCORE_HABIT, + score_task, + schema=SERVICE_SCORE_TASK_SCHEMA, + supports_response=SupportsResponse.ONLY, + ) + hass.services.async_register( + DOMAIN, + SERVICE_SCORE_REWARD, + score_task, + schema=SERVICE_SCORE_TASK_SCHEMA, + supports_response=SupportsResponse.ONLY, + ) diff --git a/homeassistant/components/habitica/services.yaml b/homeassistant/components/habitica/services.yaml index 546ac8c1c342da..b539f6c65bf926 100644 --- a/homeassistant/components/habitica/services.yaml +++ b/homeassistant/components/habitica/services.yaml @@ -17,7 +17,7 @@ api_call: object: cast_skill: fields: - config_entry: + config_entry: &config_entry required: true selector: config_entry: @@ -33,7 +33,42 @@ cast_skill: - "fireball" mode: dropdown translation_key: "skill_select" - task: + task: &task required: true selector: text: +accept_quest: + fields: + config_entry: *config_entry +reject_quest: + fields: + config_entry: *config_entry +start_quest: + fields: + config_entry: *config_entry +cancel_quest: + fields: + config_entry: *config_entry +abort_quest: + fields: + config_entry: *config_entry +leave_quest: + fields: + config_entry: *config_entry +score_habit: + fields: + config_entry: *config_entry + task: *task + direction: + required: true + selector: + select: + options: + - value: up + label: "➕" + - value: down + label: "➖" +score_reward: + fields: + config_entry: *config_entry + task: *task diff --git a/homeassistant/components/habitica/strings.json b/homeassistant/components/habitica/strings.json index 824b3ab3457507..ac1faf5fcef1dd 100644 --- a/homeassistant/components/habitica/strings.json +++ b/homeassistant/components/habitica/strings.json @@ -1,4 +1,9 @@ { + "common": { + "todos": "To-Do's", + "dailies": "Dailies", + "config_entry_name": "Select character" + }, "config": { "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_account%]" @@ -34,6 +39,11 @@ } }, "entity": { + "binary_sensor": { + "pending_quest": { + "name": "Pending quest invitation" + } + }, "button": { "run_cron": { "name": "Start my day" @@ -46,6 +56,59 @@ }, "revive": { "name": "Revive from death" + }, + "mpheal": { + "name": "Ethereal surge" + }, + "earth": { + "name": "Earthquake" + }, + "frost": { + "name": "Chilling frost" + }, + "defensive_stance": { + "name": "Defensive stance" + }, + "valorous_presence": { + "name": "Valorous presence" + }, + "intimidate": { + "name": "Intimidating gaze" + }, + "tools_of_trade": { + "name": "Tools of the trade" + }, + "stealth": { + "name": "Stealth" + }, + "heal": { + "name": "Healing light" + }, + "brightness": { + "name": "Searing brightness" + }, + "protect_aura": { + "name": "Protective aura" + }, + "heal_all": { + "name": "Blessing" + } + }, + "calendar": { + "todos": { + "name": "[%key:component::habitica::common::todos%]" + }, + "dailys": { + "name": "[%key:component::habitica::common::dailies%]", + "state_attributes": { + "yesterdaily": { + "name": "Yester-Daily", + "state": { + "true": "[%key:common::state::yes%]", + "false": "[%key:common::state::no%]" + } + } + } } }, "sensor": { @@ -92,16 +155,96 @@ } }, "todos": { - "name": "To-Do's" + "name": "[%key:component::habitica::common::todos%]" }, "dailys": { - "name": "Dailies" + "name": "[%key:component::habitica::common::dailies%]" }, "habits": { "name": "Habits" }, "rewards": { "name": "Rewards" + }, + "strength": { + "name": "Strength", + "state_attributes": { + "level": { + "name": "[%key:component::habitica::entity::sensor::level::name%]" + }, + "equipment": { + "name": "Battle gear" + }, + "class": { + "name": "Class equip bonus" + }, + "allocated": { + "name": "Allocated attribute points" + }, + "buffs": { + "name": "Buffs" + } + } + }, + "intelligence": { + "name": "Intelligence", + "state_attributes": { + "level": { + "name": "[%key:component::habitica::entity::sensor::level::name%]" + }, + "equipment": { + "name": "[%key:component::habitica::entity::sensor::strength::state_attributes::equipment::name%]" + }, + "class": { + "name": "[%key:component::habitica::entity::sensor::strength::state_attributes::class::name%]" + }, + "allocated": { + "name": "[%key:component::habitica::entity::sensor::strength::state_attributes::allocated::name%]" + }, + "buffs": { + "name": "[%key:component::habitica::entity::sensor::strength::state_attributes::buffs::name%]" + } + } + }, + "perception": { + "name": "Perception", + "state_attributes": { + "level": { + "name": "[%key:component::habitica::entity::sensor::level::name%]" + }, + "equipment": { + "name": "[%key:component::habitica::entity::sensor::strength::state_attributes::equipment::name%]" + }, + "class": { + "name": "[%key:component::habitica::entity::sensor::strength::state_attributes::class::name%]" + }, + "allocated": { + "name": "[%key:component::habitica::entity::sensor::strength::state_attributes::allocated::name%]" + }, + "buffs": { + "name": "[%key:component::habitica::entity::sensor::strength::state_attributes::buffs::name%]" + } + } + }, + "constitution": { + "name": "Constitution", + "state_attributes": { + "level": { + "name": "[%key:component::habitica::entity::sensor::level::name%]" + }, + "equipment": { + "name": "[%key:component::habitica::entity::sensor::strength::state_attributes::equipment::name%]" + }, + "class": { + "name": "[%key:component::habitica::entity::sensor::strength::state_attributes::class::name%]" + }, + "allocated": { + "name": "[%key:component::habitica::entity::sensor::strength::state_attributes::allocated::name%]" + }, + "buffs": { + "name": "[%key:component::habitica::entity::sensor::strength::state_attributes::buffs::name%]" + } + } } }, "switch": { @@ -111,10 +254,10 @@ }, "todo": { "todos": { - "name": "To-Do's" + "name": "[%key:component::habitica::common::todos%]" }, "dailys": { - "name": "Dailies" + "name": "[%key:component::habitica::common::dailies%]" } } }, @@ -147,10 +290,10 @@ "message": "Unable to create new to-do `{name}` for Habitica, please try again" }, "setup_rate_limit_exception": { - "message": "Currently rate limited, try again later" + "message": "Rate limit exceeded, try again later" }, "service_call_unallowed": { - "message": "Unable to carry out this action, because the required conditions are not met" + "message": "Unable to complete action, the required conditions are not met" }, "service_call_exception": { "message": "Unable to connect to Habitica, try again later" @@ -158,20 +301,36 @@ "not_enough_mana": { "message": "Unable to cast skill, not enough mana. Your character has {mana}, but the skill costs {cost}." }, + "not_enough_gold": { + "message": "Unable to buy reward, not enough gold. Your character has {gold}, but the reward costs {cost}." + }, "skill_not_found": { "message": "Unable to cast skill, your character does not have the skill or spell {skill}." }, "entry_not_found": { - "message": "The selected character is currently not configured or loaded in Home Assistant." + "message": "The selected character is not configured in Home Assistant." + }, + "entry_not_loaded": { + "message": "The selected character is currently not loaded or disabled in Home Assistant." }, "task_not_found": { - "message": "Unable to cast skill, could not find the task {task}" + "message": "Unable to complete action, could not find the task {task}" + }, + "quest_action_unallowed": { + "message": "Action not allowed, only quest leader or group leader can perform this action" + }, + "quest_not_found": { + "message": "Unable to complete action, quest or group not found" } }, "issues": { "deprecated_task_entity": { "title": "The Habitica {task_name} sensor is deprecated", "description": "The Habitica entity `{entity}` is deprecated and will be removed in a future release.\nPlease update your automations and scripts to replace the sensor entity with the newly added todo entity.\nWhen you are done migrating you can disable `{entity}`." + }, + "deprecated_api_call": { + "title": "The Habitica action habitica.api_call is deprecated", + "description": "The Habitica action `habitica.api_call` is deprecated and will be removed in Home Assistant 2025.5.0.\n\nPlease update your automations and scripts to use other Habitica actions and entities." } }, "services": { @@ -198,7 +357,7 @@ "description": "Use a skill or spell from your Habitica character on a specific task to affect its progress or status.", "fields": { "config_entry": { - "name": "Select character", + "name": "[%key:component::habitica::common::config_entry_name%]", "description": "Choose the Habitica character to cast the skill." }, "skill": { @@ -210,6 +369,98 @@ "description": "The name (or task ID) of the task you want to target with the skill or spell." } } + }, + "accept_quest": { + "name": "Accept a quest invitation", + "description": "Accept a pending invitation to a quest.", + "fields": { + "config_entry": { + "name": "[%key:component::habitica::common::config_entry_name%]", + "description": "Choose the Habitica character for which to perform the action." + } + } + }, + "reject_quest": { + "name": "Reject a quest invitation", + "description": "Reject a pending invitation to a quest.", + "fields": { + "config_entry": { + "name": "[%key:component::habitica::common::config_entry_name%]", + "description": "[%key:component::habitica::services::accept_quest::fields::config_entry::description%]" + } + } + }, + "leave_quest": { + "name": "Leave a quest", + "description": "Leave the current quest you are participating in.", + "fields": { + "config_entry": { + "name": "[%key:component::habitica::common::config_entry_name%]", + "description": "[%key:component::habitica::services::accept_quest::fields::config_entry::description%]" + } + } + }, + "abort_quest": { + "name": "Abort an active quest", + "description": "Terminate your party's ongoing quest. All progress will be lost and the quest roll returned to the owner's inventory. Only quest leader or group leader can perform this action.", + "fields": { + "config_entry": { + "name": "[%key:component::habitica::common::config_entry_name%]", + "description": "[%key:component::habitica::services::accept_quest::fields::config_entry::description%]" + } + } + }, + "cancel_quest": { + "name": "Cancel a pending quest", + "description": "Cancel a quest that has not yet startet. All accepted and pending invitations will be canceled and the quest roll returned to the owner's inventory. Only quest leader or group leader can perform this action.", + "fields": { + "config_entry": { + "name": "[%key:component::habitica::common::config_entry_name%]", + "description": "[%key:component::habitica::services::accept_quest::fields::config_entry::description%]" + } + } + }, + "start_quest": { + "name": "Force-start a pending quest", + "description": "Begin the quest immediately, bypassing any pending invitations that haven't been accepted or rejected. Only quest leader or group leader can perform this action.", + "fields": { + "config_entry": { + "name": "[%key:component::habitica::common::config_entry_name%]", + "description": "[%key:component::habitica::services::accept_quest::fields::config_entry::description%]" + } + } + }, + "score_habit": { + "name": "Track a habit", + "description": "Increase the positive or negative streak of a habit to track its progress.", + "fields": { + "config_entry": { + "name": "[%key:component::habitica::common::config_entry_name%]", + "description": "Select the Habitica character tracking your habit." + }, + "task": { + "name": "Habit name", + "description": "The name (or task ID) of the Habitica habit." + }, + "direction": { + "name": "Reward or loss", + "description": "Is it positive or negative progress you want to track for your habit." + } + } + }, + "score_reward": { + "name": "Buy a reward", + "description": "Reward yourself and buy one of your custom rewards with gold earned by fulfilling tasks.", + "fields": { + "config_entry": { + "name": "[%key:component::habitica::common::config_entry_name%]", + "description": "Select the Habitica character buying the reward." + }, + "task": { + "name": "Reward name", + "description": "The name (or task ID) of the custom reward." + } + } } }, "selector": { diff --git a/homeassistant/components/habitica/switch.py b/homeassistant/components/habitica/switch.py index c83d2332030f76..6682911e8928ab 100644 --- a/homeassistant/components/habitica/switch.py +++ b/homeassistant/components/habitica/switch.py @@ -15,9 +15,9 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import HabiticaConfigEntry from .coordinator import HabiticaData, HabiticaDataUpdateCoordinator from .entity import HabiticaBase +from .types import HabiticaConfigEntry @dataclass(kw_only=True, frozen=True) diff --git a/homeassistant/components/habitica/todo.py b/homeassistant/components/habitica/todo.py index ae739d4726295e..0fff7b66605b8d 100644 --- a/homeassistant/components/habitica/todo.py +++ b/homeassistant/components/habitica/todo.py @@ -21,10 +21,10 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.util import dt as dt_util -from . import HabiticaConfigEntry from .const import ASSETS_URL, DOMAIN from .coordinator import HabiticaDataUpdateCoordinator from .entity import HabiticaBase +from .types import HabiticaConfigEntry, HabiticaTaskType from .util import next_due_date @@ -37,15 +37,6 @@ class HabiticaTodoList(StrEnum): REWARDS = "rewards" -class HabiticaTaskType(StrEnum): - """Habitica Entities.""" - - HABIT = "habit" - DAILY = "daily" - TODO = "todo" - REWARD = "reward" - - async def async_setup_entry( hass: HomeAssistant, config_entry: HabiticaConfigEntry, diff --git a/homeassistant/components/habitica/types.py b/homeassistant/components/habitica/types.py new file mode 100644 index 00000000000000..9789a65dc40f18 --- /dev/null +++ b/homeassistant/components/habitica/types.py @@ -0,0 +1,18 @@ +"""Types for Habitica integration.""" + +from enum import StrEnum + +from homeassistant.config_entries import ConfigEntry + +from .coordinator import HabiticaDataUpdateCoordinator + +type HabiticaConfigEntry = ConfigEntry[HabiticaDataUpdateCoordinator] + + +class HabiticaTaskType(StrEnum): + """Habitica Entities.""" + + HABIT = "habit" + DAILY = "daily" + TODO = "todo" + REWARD = "reward" diff --git a/homeassistant/components/habitica/util.py b/homeassistant/components/habitica/util.py index 0ac3ea2a4e2e5e..03acb08baf9de4 100644 --- a/homeassistant/components/habitica/util.py +++ b/homeassistant/components/habitica/util.py @@ -3,8 +3,24 @@ from __future__ import annotations import datetime +from math import floor from typing import TYPE_CHECKING, Any +from dateutil.rrule import ( + DAILY, + FR, + MO, + MONTHLY, + SA, + SU, + TH, + TU, + WE, + WEEKLY, + YEARLY, + rrule, +) + from homeassistant.components.automation import automations_with_entity from homeassistant.components.script import scripts_with_entity from homeassistant.core import HomeAssistant @@ -14,6 +30,9 @@ def next_due_date(task: dict[str, Any], last_cron: str) -> datetime.date | None: """Calculate due date for dailies and yesterdailies.""" + if task["everyX"] == 0 or not task.get("nextDue"): # grey dailies never become due + return None + today = to_date(last_cron) startdate = to_date(task["startDate"]) if TYPE_CHECKING: @@ -59,3 +78,114 @@ def entity_used_in(hass: HomeAssistant, entity_id: str) -> list[str]: used_in = automations_with_entity(hass, entity_id) used_in += scripts_with_entity(hass, entity_id) return used_in + + +FREQUENCY_MAP = {"daily": DAILY, "weekly": WEEKLY, "monthly": MONTHLY, "yearly": YEARLY} +WEEKDAY_MAP = {"m": MO, "t": TU, "w": WE, "th": TH, "f": FR, "s": SA, "su": SU} + + +def build_rrule(task: dict[str, Any]) -> rrule: + """Build rrule string.""" + + rrule_frequency = FREQUENCY_MAP.get(task["frequency"], DAILY) + weekdays = [ + WEEKDAY_MAP[day] for day, is_active in task["repeat"].items() if is_active + ] + bymonthday = ( + task["daysOfMonth"] + if rrule_frequency == MONTHLY and task["daysOfMonth"] + else None + ) + + bysetpos = None + if rrule_frequency == MONTHLY and task["weeksOfMonth"]: + bysetpos = task["weeksOfMonth"] + weekdays = weekdays if weekdays else [MO] + + return rrule( + freq=rrule_frequency, + interval=task["everyX"], + dtstart=dt_util.start_of_local_day( + datetime.datetime.fromisoformat(task["startDate"]) + ), + byweekday=weekdays if rrule_frequency in [WEEKLY, MONTHLY] else None, + bymonthday=bymonthday, + bysetpos=bysetpos, + ) + + +def get_recurrence_rule(recurrence: rrule) -> str: + r"""Extract and return the recurrence rule portion of an RRULE. + + This function takes an RRULE representing a task's recurrence pattern, + builds the RRULE string, and extracts the recurrence rule part. + + 'DTSTART:YYYYMMDDTHHMMSS\nRRULE:FREQ=YEARLY;INTERVAL=2' + + Parameters + ---------- + recurrence : rrule + An RRULE object. + + Returns + ------- + str + The recurrence rule portion of the RRULE string, starting with 'FREQ='. + + Example + ------- + >>> rule = get_recurrence_rule(task) + >>> print(rule) + 'FREQ=YEARLY;INTERVAL=2' + + """ + return str(recurrence).split("RRULE:")[1] + + +def get_attribute_points( + user: dict[str, Any], content: dict[str, Any], attribute: str +) -> dict[str, float]: + """Get modifiers contributing to strength attribute.""" + + gear_set = { + "weapon", + "armor", + "head", + "shield", + "back", + "headAccessory", + "eyewear", + "body", + } + + equipment = sum( + stats[attribute] + for gear in gear_set + if (equipped := user["items"]["gear"]["equipped"].get(gear)) + and (stats := content["gear"]["flat"].get(equipped)) + ) + + class_bonus = sum( + stats[attribute] / 2 + for gear in gear_set + if (equipped := user["items"]["gear"]["equipped"].get(gear)) + and (stats := content["gear"]["flat"].get(equipped)) + and stats["klass"] == user["stats"]["class"] + ) + + return { + "level": min(round(user["stats"]["lvl"] / 2), 50), + "equipment": equipment, + "class": class_bonus, + "allocated": user["stats"][attribute], + "buffs": user["stats"]["buffs"][attribute], + } + + +def get_attributes_total( + user: dict[str, Any], content: dict[str, Any], attribute: str +) -> int: + """Get total attribute points.""" + return floor( + sum(value for value in get_attribute_points(user, content, attribute).values()) + ) diff --git a/homeassistant/components/hardkernel/__init__.py b/homeassistant/components/hardkernel/__init__.py index 5d70f6cbfe0c12..66d2fa9d154042 100644 --- a/homeassistant/components/hardkernel/__init__.py +++ b/homeassistant/components/hardkernel/__init__.py @@ -2,10 +2,11 @@ from __future__ import annotations -from homeassistant.components.hassio import get_os_info, is_hassio +from homeassistant.components.hassio import get_os_info from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady +from homeassistant.helpers.hassio import is_hassio async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: diff --git a/homeassistant/components/hardkernel/config_flow.py b/homeassistant/components/hardkernel/config_flow.py index cf70adae55a9e0..5fa3611aa8665e 100644 --- a/homeassistant/components/hardkernel/config_flow.py +++ b/homeassistant/components/hardkernel/config_flow.py @@ -18,7 +18,4 @@ async def async_step_system( self, data: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle the initial step.""" - if self._async_current_entries(): - return self.async_abort(reason="single_instance_allowed") - return self.async_create_entry(title="Hardkernel", data={}) diff --git a/homeassistant/components/hardkernel/manifest.json b/homeassistant/components/hardkernel/manifest.json index 2a528a5173ea50..aca1b207f4fd2d 100644 --- a/homeassistant/components/hardkernel/manifest.json +++ b/homeassistant/components/hardkernel/manifest.json @@ -6,5 +6,6 @@ "config_flow": false, "dependencies": ["hardware"], "documentation": "https://www.home-assistant.io/integrations/hardkernel", - "integration_type": "hardware" + "integration_type": "hardware", + "single_config_entry": true } diff --git a/homeassistant/components/harmony/config_flow.py b/homeassistant/components/harmony/config_flow.py index 87eb657a0a98af..b75ad617b39133 100644 --- a/homeassistant/components/harmony/config_flow.py +++ b/homeassistant/components/harmony/config_flow.py @@ -28,7 +28,6 @@ from homeassistant.exceptions import HomeAssistantError from .const import DOMAIN, PREVIOUS_ACTIVE_ACTIVITY, UNIQUE_ID -from .data import HarmonyConfigEntry from .util import ( find_best_name_for_remote, find_unique_id_for_remote, @@ -156,7 +155,7 @@ def async_get_options_flow( config_entry: ConfigEntry, ) -> OptionsFlowHandler: """Get the options flow for this handler.""" - return OptionsFlowHandler(config_entry) + return OptionsFlowHandler() async def _async_create_entry_from_valid_input( self, validated: dict[str, Any], user_input: dict[str, Any] @@ -186,10 +185,6 @@ def _options_from_user_input(user_input: dict[str, Any]) -> dict[str, Any]: class OptionsFlowHandler(OptionsFlow): """Handle a option flow for Harmony.""" - def __init__(self, config_entry: HarmonyConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/hassio/__init__.py b/homeassistant/components/hassio/__init__.py index 7aa4285314d592..306c9d43d7280e 100644 --- a/homeassistant/components/hassio/__init__.py +++ b/homeassistant/components/hassio/__init__.py @@ -5,11 +5,13 @@ import asyncio from contextlib import suppress from datetime import datetime +from functools import partial import logging import os import re from typing import Any, NamedTuple +from aiohasupervisor import SupervisorError import voluptuous as vol from homeassistant.auth.const import GROUP_ID_ADMIN @@ -37,7 +39,22 @@ discovery_flow, ) from homeassistant.helpers.aiohttp_client import async_get_clientsession +from homeassistant.helpers.deprecation import ( + DeprecatedConstant, + all_with_deprecated_constants, + check_if_deprecated_constant, + deprecated_function, + dir_with_deprecated_constants, +) from homeassistant.helpers.event import async_call_later +from homeassistant.helpers.hassio import ( + get_supervisor_ip as _get_supervisor_ip, + is_hassio as _is_hassio, +) +from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue +from homeassistant.helpers.service_info.hassio import ( + HassioServiceInfo as _HassioServiceInfo, +) from homeassistant.helpers.storage import Store from homeassistant.helpers.typing import ConfigType from homeassistant.loader import bind_hass @@ -92,29 +109,20 @@ get_info, # noqa: F401 get_issues_info, # noqa: F401 get_os_info, - get_store, # noqa: F401 get_supervisor_info, # noqa: F401 get_supervisor_stats, # noqa: F401 ) -from .discovery import HassioServiceInfo, async_setup_discovery_view # noqa: F401 +from .discovery import async_setup_discovery_view # noqa: F401 from .handler import ( # noqa: F401 HassIO, HassioAPIError, async_create_backup, - async_get_addon_discovery_info, - async_get_addon_store_info, async_get_green_settings, async_get_yellow_settings, - async_install_addon, async_reboot_host, - async_set_addon_options, async_set_green_settings, async_set_yellow_settings, - async_update_addon, - async_update_core, async_update_diagnostics, - async_update_os, - async_update_supervisor, get_supervisor_client, ) from .http import HassIOView @@ -124,6 +132,14 @@ _LOGGER = logging.getLogger(__name__) +get_supervisor_ip = deprecated_function( + "homeassistant.helpers.hassio.get_supervisor_ip", breaks_in_ha_version="2025.11" +)(_get_supervisor_ip) +_DEPRECATED_HassioServiceInfo = DeprecatedConstant( + _HassioServiceInfo, + "homeassistant.helpers.service_info.hassio.HassioServiceInfo", + "2025.11", +) STORAGE_KEY = DOMAIN STORAGE_VERSION = 1 @@ -279,21 +295,16 @@ def hostname_from_addon_slug(addon_slug: str) -> str: @callback +@deprecated_function( + "homeassistant.helpers.hassio.is_hassio", breaks_in_ha_version="2025.11" +) @bind_hass def is_hassio(hass: HomeAssistant) -> bool: """Return true if Hass.io is loaded. Async friendly. """ - return DOMAIN in hass.config.components - - -@callback -def get_supervisor_ip() -> str | None: - """Return the supervisor ip address.""" - if "SUPERVISOR" not in os.environ: - return None - return os.environ["SUPERVISOR"].partition(":")[0] + return _is_hassio(hass) async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: # noqa: C901 @@ -314,8 +325,11 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: # noqa: host = os.environ["SUPERVISOR"] websession = async_get_clientsession(hass) hass.data[DOMAIN] = hassio = HassIO(hass.loop, websession, host) + supervisor_client = get_supervisor_client(hass) - if not await hassio.is_connected(): + try: + await supervisor_client.supervisor.ping() + except SupervisorError: _LOGGER.warning("Not connected with the supervisor / system too busy!") store = Store[dict[str, str]](hass, STORAGE_VERSION, STORAGE_KEY) @@ -395,6 +409,16 @@ async def push_config(_: Event | None) -> None: async def async_service_handler(service: ServiceCall) -> None: """Handle service calls for Hass.io.""" + if service.service == SERVICE_ADDON_UPDATE: + async_create_issue( + hass, + DOMAIN, + "update_service_deprecated", + breaks_in_ha_version="2025.5", + is_fixable=False, + severity=IssueSeverity.WARNING, + translation_key="update_service_deprecated", + ) api_endpoint = MAP_SERVICE_API[service.service] data = service.data.copy() @@ -424,12 +448,13 @@ async def async_service_handler(service: ServiceCall) -> None: async def update_info_data(_: datetime | None = None) -> None: """Update last available supervisor information.""" + supervisor_client = get_supervisor_client(hass) try: ( hass.data[DATA_INFO], hass.data[DATA_HOST_INFO], - hass.data[DATA_STORE], + store_info, hass.data[DATA_CORE_INFO], hass.data[DATA_SUPERVISOR_INFO], hass.data[DATA_OS_INFO], @@ -437,7 +462,7 @@ async def update_info_data(_: datetime | None = None) -> None: ) = await asyncio.gather( create_eager_task(hassio.get_info()), create_eager_task(hassio.get_host_info()), - create_eager_task(hassio.get_store()), + create_eager_task(supervisor_client.store.info()), create_eager_task(hassio.get_core_info()), create_eager_task(hassio.get_supervisor_info()), create_eager_task(hassio.get_os_info()), @@ -446,6 +471,8 @@ async def update_info_data(_: datetime | None = None) -> None: except HassioAPIError as err: _LOGGER.warning("Can't read Supervisor data: %s", err) + else: + hass.data[DATA_STORE] = store_info.to_dict() async_call_later( hass, @@ -459,9 +486,9 @@ async def update_info_data(_: datetime | None = None) -> None: async def _async_stop(hass: HomeAssistant, restart: bool) -> None: """Stop or restart home assistant.""" if restart: - await hassio.restart_homeassistant() + await supervisor_client.homeassistant.restart() else: - await hassio.stop_homeassistant() + await supervisor_client.homeassistant.stop() # Set a custom handler for the homeassistant.restart and homeassistant.stop services async_set_stop_handler(hass, _async_stop) @@ -542,3 +569,11 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: hass.data.pop(ADDONS_COORDINATOR, None) return unload_ok + + +# These can be removed if no deprecated constant are in this module anymore +__getattr__ = partial(check_if_deprecated_constant, module_globals=globals()) +__dir__ = partial( + dir_with_deprecated_constants, module_globals_keys=[*globals().keys()] +) +__all__ = all_with_deprecated_constants(globals()) diff --git a/homeassistant/components/hassio/addon_manager.py b/homeassistant/components/hassio/addon_manager.py index 1d51ef30e0f7d8..db81e17e48d7fe 100644 --- a/homeassistant/components/hassio/addon_manager.py +++ b/homeassistant/components/hassio/addon_manager.py @@ -12,23 +12,16 @@ from aiohasupervisor import SupervisorError from aiohasupervisor.models import ( + AddonsOptions, AddonState as SupervisorAddonState, InstalledAddonComplete, + StoreAddonUpdate, ) from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError -from .handler import ( - HassioAPIError, - async_create_backup, - async_get_addon_discovery_info, - async_get_addon_store_info, - async_install_addon, - async_set_addon_options, - async_update_addon, - get_supervisor_client, -) +from .handler import HassioAPIError, async_create_backup, get_supervisor_client type _FuncType[_T, **_P, _R] = Callable[Concatenate[_T, _P], Awaitable[_R]] type _ReturnFuncType[_T, **_P, _R] = Callable[ @@ -38,10 +31,13 @@ def api_error[_AddonManagerT: AddonManager, **_P, _R]( error_message: str, + *, + expected_error_type: type[HassioAPIError | SupervisorError] | None = None, ) -> Callable[ [_FuncType[_AddonManagerT, _P, _R]], _ReturnFuncType[_AddonManagerT, _P, _R] ]: """Handle HassioAPIError and raise a specific AddonError.""" + error_type = expected_error_type or (HassioAPIError, SupervisorError) def handle_hassio_api_error( func: _FuncType[_AddonManagerT, _P, _R], @@ -55,7 +51,7 @@ async def wrapper( """Wrap an add-on manager method.""" try: return_value = await func(self, *args, **kwargs) - except (HassioAPIError, SupervisorError) as err: + except error_type as err: raise AddonError( f"{error_message.format(addon_name=self.addon_name)}: {err}" ) from err @@ -113,6 +109,7 @@ def __init__( self._restart_task: asyncio.Task | None = None self._start_task: asyncio.Task | None = None self._update_task: asyncio.Task | None = None + self._supervisor_client = get_supervisor_client(hass) def task_in_progress(self) -> bool: """Return True if any of the add-on tasks are in progress.""" @@ -126,28 +123,39 @@ def task_in_progress(self) -> bool: ) ) - @api_error("Failed to get the {addon_name} add-on discovery info") + @api_error( + "Failed to get the {addon_name} add-on discovery info", + expected_error_type=SupervisorError, + ) async def async_get_addon_discovery_info(self) -> dict: """Return add-on discovery info.""" - discovery_info = await async_get_addon_discovery_info( - self._hass, self.addon_slug + discovery_info = next( + ( + msg + for msg in await self._supervisor_client.discovery.list() + if msg.addon == self.addon_slug + ), + None, ) if not discovery_info: raise AddonError(f"Failed to get {self.addon_name} add-on discovery info") - discovery_info_config: dict = discovery_info["config"] - return discovery_info_config + return discovery_info.config - @api_error("Failed to get the {addon_name} add-on info") + @api_error( + "Failed to get the {addon_name} add-on info", + expected_error_type=SupervisorError, + ) async def async_get_addon_info(self) -> AddonInfo: """Return and cache manager add-on info.""" - supervisor_client = get_supervisor_client(self._hass) - addon_store_info = await async_get_addon_store_info(self._hass, self.addon_slug) - self._logger.debug("Add-on store info: %s", addon_store_info) - if not addon_store_info["installed"]: + addon_store_info = await self._supervisor_client.store.addon_info( + self.addon_slug + ) + self._logger.debug("Add-on store info: %s", addon_store_info.to_dict()) + if not addon_store_info.installed: return AddonInfo( - available=addon_store_info["available"], + available=addon_store_info.available, hostname=None, options={}, state=AddonState.NOT_INSTALLED, @@ -155,7 +163,7 @@ async def async_get_addon_info(self) -> AddonInfo: version=None, ) - addon_info = await supervisor_client.addons.addon_info(self.addon_slug) + addon_info = await self._supervisor_client.addons.addon_info(self.addon_slug) addon_state = self.async_get_addon_state(addon_info) return AddonInfo( available=addon_info.available, @@ -180,31 +188,39 @@ def async_get_addon_state(self, addon_info: InstalledAddonComplete) -> AddonStat return addon_state - @api_error("Failed to set the {addon_name} add-on options") + @api_error( + "Failed to set the {addon_name} add-on options", + expected_error_type=SupervisorError, + ) async def async_set_addon_options(self, config: dict) -> None: """Set manager add-on options.""" - options = {"options": config} - await async_set_addon_options(self._hass, self.addon_slug, options) + await self._supervisor_client.addons.set_addon_options( + self.addon_slug, AddonsOptions(config=config) + ) def _check_addon_available(self, addon_info: AddonInfo) -> None: """Check if the managed add-on is available.""" - if not addon_info.available: raise AddonError(f"{self.addon_name} add-on is not available") - @api_error("Failed to install the {addon_name} add-on") + @api_error( + "Failed to install the {addon_name} add-on", expected_error_type=SupervisorError + ) async def async_install_addon(self) -> None: """Install the managed add-on.""" addon_info = await self.async_get_addon_info() self._check_addon_available(addon_info) - await async_install_addon(self._hass, self.addon_slug) + await self._supervisor_client.store.install_addon(self.addon_slug) - @api_error("Failed to uninstall the {addon_name} add-on") + @api_error( + "Failed to uninstall the {addon_name} add-on", + expected_error_type=SupervisorError, + ) async def async_uninstall_addon(self) -> None: """Uninstall the managed add-on.""" - await get_supervisor_client(self._hass).addons.uninstall_addon(self.addon_slug) + await self._supervisor_client.addons.uninstall_addon(self.addon_slug) @api_error("Failed to update the {addon_name} add-on") async def async_update_addon(self) -> None: @@ -220,22 +236,30 @@ async def async_update_addon(self) -> None: return await self.async_create_backup() - await async_update_addon(self._hass, self.addon_slug) + await self._supervisor_client.store.update_addon( + self.addon_slug, StoreAddonUpdate(backup=False) + ) - @api_error("Failed to start the {addon_name} add-on") + @api_error( + "Failed to start the {addon_name} add-on", expected_error_type=SupervisorError + ) async def async_start_addon(self) -> None: """Start the managed add-on.""" - await get_supervisor_client(self._hass).addons.start_addon(self.addon_slug) + await self._supervisor_client.addons.start_addon(self.addon_slug) - @api_error("Failed to restart the {addon_name} add-on") + @api_error( + "Failed to restart the {addon_name} add-on", expected_error_type=SupervisorError + ) async def async_restart_addon(self) -> None: """Restart the managed add-on.""" - await get_supervisor_client(self._hass).addons.restart_addon(self.addon_slug) + await self._supervisor_client.addons.restart_addon(self.addon_slug) - @api_error("Failed to stop the {addon_name} add-on") + @api_error( + "Failed to stop the {addon_name} add-on", expected_error_type=SupervisorError + ) async def async_stop_addon(self) -> None: """Stop the managed add-on.""" - await get_supervisor_client(self._hass).addons.stop_addon(self.addon_slug) + await self._supervisor_client.addons.stop_addon(self.addon_slug) @api_error("Failed to create a backup of the {addon_name} add-on") async def async_create_backup(self) -> None: diff --git a/homeassistant/components/hassio/config_flow.py b/homeassistant/components/hassio/config_flow.py index 57be400acc7dbe..e8bed912fd7da9 100644 --- a/homeassistant/components/hassio/config_flow.py +++ b/homeassistant/components/hassio/config_flow.py @@ -18,7 +18,4 @@ async def async_step_system( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle the initial step.""" - # We only need one Hass.io config entry - await self.async_set_unique_id(DOMAIN) - self._abort_if_unique_id_configured() return self.async_create_entry(title="Supervisor", data={}) diff --git a/homeassistant/components/hassio/const.py b/homeassistant/components/hassio/const.py index 6e6c9006fcae3c..82ce74832c25f2 100644 --- a/homeassistant/components/hassio/const.py +++ b/homeassistant/components/hassio/const.py @@ -103,6 +103,7 @@ PLACEHOLDER_KEY_REFERENCE = "reference" PLACEHOLDER_KEY_COMPONENTS = "components" +ISSUE_KEY_ADDON_BOOT_FAIL = "issue_addon_boot_fail" ISSUE_KEY_SYSTEM_DOCKER_CONFIG = "issue_system_docker_config" ISSUE_KEY_ADDON_DETACHED_ADDON_MISSING = "issue_addon_detached_addon_missing" ISSUE_KEY_ADDON_DETACHED_ADDON_REMOVED = "issue_addon_detached_addon_removed" @@ -136,17 +137,3 @@ class SupervisorEntityModel(StrEnum): CORE = "Home Assistant Core" SUPERVIOSR = "Home Assistant Supervisor" HOST = "Home Assistant Host" - - -class SupervisorIssueContext(StrEnum): - """Context for supervisor issues.""" - - ADDON = "addon" - CORE = "core" - DNS_SERVER = "dns_server" - MOUNT = "mount" - OS = "os" - PLUGIN = "plugin" - SUPERVISOR = "supervisor" - STORE = "store" - SYSTEM = "system" diff --git a/homeassistant/components/hassio/coordinator.py b/homeassistant/components/hassio/coordinator.py index dc62f41abb5ec0..cb1dda8aeedc3c 100644 --- a/homeassistant/components/hassio/coordinator.py +++ b/homeassistant/components/hassio/coordinator.py @@ -8,6 +8,7 @@ from typing import TYPE_CHECKING, Any from aiohasupervisor import SupervisorError +from aiohasupervisor.models import StoreInfo from homeassistant.config_entries import ConfigEntry from homeassistant.const import ATTR_MANUFACTURER, ATTR_NAME @@ -55,7 +56,7 @@ SUPERVISOR_CONTAINER, SupervisorEntityModel, ) -from .handler import HassIO, HassioAPIError +from .handler import HassIO, HassioAPIError, get_supervisor_client if TYPE_CHECKING: from .issues import SupervisorIssues @@ -317,6 +318,7 @@ def __init__( self._container_updates: defaultdict[str, dict[str, set[str]]] = defaultdict( lambda: defaultdict(set) ) + self.supervisor_client = get_supervisor_client(hass) async def _async_update_data(self) -> dict[str, Any]: """Update data via library.""" @@ -332,12 +334,15 @@ async def _async_update_data(self) -> dict[str, Any]: addons_info = get_addons_info(self.hass) or {} addons_stats = get_addons_stats(self.hass) addons_changelogs = get_addons_changelogs(self.hass) - store_data = get_store(self.hass) or {} + store_data = get_store(self.hass) - repositories = { - repo[ATTR_SLUG]: repo[ATTR_NAME] - for repo in store_data.get("repositories", []) - } + if store_data: + repositories = { + repo.slug: repo.name + for repo in StoreInfo.from_dict(store_data).repositories + } + else: + repositories = {} new_data[DATA_KEY_ADDONS] = { addon[ATTR_SLUG]: { @@ -498,17 +503,17 @@ async def force_data_refresh(self, first_update: bool) -> None: async def _update_addon_stats(self, slug: str) -> tuple[str, dict[str, Any] | None]: """Update single addon stats.""" try: - stats = await self.hassio.get_addon_stats(slug) - except HassioAPIError as err: + stats = await self.supervisor_client.addons.addon_stats(slug) + except SupervisorError as err: _LOGGER.warning("Could not fetch stats for %s: %s", slug, err) return (slug, None) - return (slug, stats) + return (slug, stats.to_dict()) async def _update_addon_changelog(self, slug: str) -> tuple[str, str | None]: """Return the changelog for an add-on.""" try: - changelog = await self.hassio.get_addon_changelog(slug) - except HassioAPIError as err: + changelog = await self.supervisor_client.store.addon_changelog(slug) + except SupervisorError as err: _LOGGER.warning("Could not fetch changelog for %s: %s", slug, err) return (slug, None) return (slug, changelog) @@ -516,7 +521,7 @@ async def _update_addon_changelog(self, slug: str) -> tuple[str, str | None]: async def _update_addon_info(self, slug: str) -> tuple[str, dict[str, Any] | None]: """Return the info for an add-on.""" try: - info = await self.hassio.client.addons.addon_info(slug) + info = await self.supervisor_client.addons.addon_info(slug) except SupervisorError as err: _LOGGER.warning("Could not fetch info for %s: %s", slug, err) return (slug, None) @@ -558,8 +563,8 @@ async def _async_refresh( # updates if this is not a scheduled refresh and # we are not doing the first refresh. try: - await self.hassio.refresh_updates() - except HassioAPIError as err: + await self.supervisor_client.refresh_updates() + except SupervisorError as err: _LOGGER.warning("Error on Supervisor API: %s", err) await super()._async_refresh( diff --git a/homeassistant/components/hassio/discovery.py b/homeassistant/components/hassio/discovery.py index 009f9dfde7e820..b51b8e5a8f2da5 100644 --- a/homeassistant/components/hassio/discovery.py +++ b/homeassistant/components/hassio/discovery.py @@ -3,10 +3,12 @@ from __future__ import annotations import asyncio -from dataclasses import dataclass import logging from typing import Any +from uuid import UUID +from aiohasupervisor import SupervisorError +from aiohasupervisor.models import Discovery from aiohttp import web from aiohttp.web_exceptions import HTTPServiceUnavailable @@ -14,43 +16,35 @@ from homeassistant.components.http import HomeAssistantView from homeassistant.const import ATTR_SERVICE, EVENT_HOMEASSISTANT_START from homeassistant.core import Event, HomeAssistant, callback -from homeassistant.data_entry_flow import BaseServiceInfo from homeassistant.helpers import discovery_flow +from homeassistant.helpers.dispatcher import async_dispatcher_connect +from homeassistant.helpers.service_info.hassio import HassioServiceInfo -from .const import ATTR_ADDON, ATTR_CONFIG, ATTR_DISCOVERY, ATTR_UUID -from .handler import HassIO, HassioAPIError +from .const import ATTR_ADDON, ATTR_UUID, DOMAIN +from .handler import HassIO, get_supervisor_client _LOGGER = logging.getLogger(__name__) -@dataclass(slots=True) -class HassioServiceInfo(BaseServiceInfo): - """Prepared info from hassio entries.""" - - config: dict[str, Any] - name: str - slug: str - uuid: str - - @callback def async_setup_discovery_view(hass: HomeAssistant, hassio: HassIO) -> None: """Discovery setup.""" hassio_discovery = HassIODiscovery(hass, hassio) + supervisor_client = get_supervisor_client(hass) hass.http.register_view(hassio_discovery) # Handle exists discovery messages async def _async_discovery_start_handler(event: Event) -> None: """Process all exists discovery on startup.""" try: - data = await hassio.retrieve_discovery_messages() - except HassioAPIError as err: + data = await supervisor_client.discovery.list() + except SupervisorError as err: _LOGGER.error("Can't read discover info: %s", err) return jobs = [ asyncio.create_task(hassio_discovery.async_process_new(discovery)) - for discovery in data[ATTR_DISCOVERY] + for discovery in data ] if jobs: await asyncio.wait(jobs) @@ -59,6 +53,23 @@ async def _async_discovery_start_handler(event: Event) -> None: EVENT_HOMEASSISTANT_START, _async_discovery_start_handler ) + async def _handle_config_entry_removed( + entry: config_entries.ConfigEntry, + ) -> None: + """Handle config entry changes.""" + for disc_key in entry.discovery_keys[DOMAIN]: + if disc_key.version != 1 or not isinstance(key := disc_key.key, str): + continue + uuid = key + _LOGGER.debug("Rediscover addon %s", uuid) + await hassio_discovery.async_rediscover(uuid) + + async_dispatcher_connect( + hass, + config_entries.signal_discovered_config_entry_removed(DOMAIN), + _handle_config_entry_removed, + ) + class HassIODiscovery(HomeAssistantView): """Hass.io view to handle base part.""" @@ -70,13 +81,14 @@ def __init__(self, hass: HomeAssistant, hassio: HassIO) -> None: """Initialize WebView.""" self.hass = hass self.hassio = hassio + self._supervisor_client = get_supervisor_client(hass) async def post(self, request: web.Request, uuid: str) -> web.Response: """Handle new discovery requests.""" # Fetch discovery data and prevent injections try: - data = await self.hassio.get_discovery_message(uuid) - except HassioAPIError as err: + data = await self._supervisor_client.discovery.get(UUID(uuid)) + except SupervisorError as err: _LOGGER.error("Can't read discovery data: %s", err) raise HTTPServiceUnavailable from None @@ -90,41 +102,53 @@ async def delete(self, request: web.Request, uuid: str) -> web.Response: await self.async_process_del(data) return web.Response() - async def async_process_new(self, data: dict[str, Any]) -> None: - """Process add discovery entry.""" - service: str = data[ATTR_SERVICE] - config_data: dict[str, Any] = data[ATTR_CONFIG] - slug: str = data[ATTR_ADDON] - uuid: str = data[ATTR_UUID] + async def async_rediscover(self, uuid: str) -> None: + """Rediscover add-on when config entry is removed.""" + try: + data = await self._supervisor_client.discovery.get(UUID(uuid)) + except SupervisorError as err: + _LOGGER.debug("Can't read discovery data: %s", err) + else: + await self.async_process_new(data) + async def async_process_new(self, data: Discovery) -> None: + """Process add discovery entry.""" # Read additional Add-on info try: - addon_info = await self.hassio.client.addons.addon_info(slug) - except HassioAPIError as err: + addon_info = await self._supervisor_client.addons.addon_info(data.addon) + except SupervisorError as err: _LOGGER.error("Can't read add-on info: %s", err) return - config_data[ATTR_ADDON] = addon_info.name + data.config[ATTR_ADDON] = addon_info.name # Use config flow discovery_flow.async_create_flow( self.hass, - service, + data.service, context={"source": config_entries.SOURCE_HASSIO}, data=HassioServiceInfo( - config=config_data, name=addon_info.name, slug=slug, uuid=uuid + config=data.config, + name=addon_info.name, + slug=data.addon, + uuid=data.uuid.hex, + ), + discovery_key=discovery_flow.DiscoveryKey( + domain=DOMAIN, + key=data.uuid.hex, + version=1, ), ) async def async_process_del(self, data: dict[str, Any]) -> None: """Process remove discovery entry.""" - service = data[ATTR_SERVICE] - uuid = data[ATTR_UUID] + service: str = data[ATTR_SERVICE] + uuid: str = data[ATTR_UUID] # Check if really deletet / prevent injections try: - data = await self.hassio.get_discovery_message(uuid) - except HassioAPIError: + await self._supervisor_client.discovery.get(UUID(uuid)) + except SupervisorError: pass else: _LOGGER.warning("Retrieve wrong unload for %s", service) diff --git a/homeassistant/components/hassio/handler.py b/homeassistant/components/hassio/handler.py index afa5cb31abae18..58f2aa8c1444da 100644 --- a/homeassistant/components/hassio/handler.py +++ b/homeassistant/components/hassio/handler.py @@ -21,12 +21,15 @@ ) from homeassistant.const import SERVER_PORT from homeassistant.core import HomeAssistant +from homeassistant.helpers.singleton import singleton from homeassistant.loader import bind_hass -from .const import ATTR_DISCOVERY, ATTR_MESSAGE, ATTR_RESULT, DOMAIN, X_HASS_SOURCE +from .const import ATTR_MESSAGE, ATTR_RESULT, DOMAIN, X_HASS_SOURCE _LOGGER = logging.getLogger(__name__) +KEY_SUPERVISOR_CLIENT = "supervisor_client" + class HassioAPIError(RuntimeError): """Return if a API trow a error.""" @@ -63,17 +66,6 @@ async def _wrapper(*argv: _P.args, **kwargs: _P.kwargs) -> Any: return _wrapper -@api_data -async def async_get_addon_store_info(hass: HomeAssistant, slug: str) -> dict: - """Return add-on store info. - - The caller of the function should handle HassioAPIError. - """ - hassio: HassIO = hass.data[DOMAIN] - command = f"/store/addons/{slug}" - return await hassio.send_command(command, method="get") - - @bind_hass async def async_update_diagnostics(hass: HomeAssistant, diagnostics: bool) -> bool: """Update Supervisor diagnostics toggle. @@ -84,61 +76,6 @@ async def async_update_diagnostics(hass: HomeAssistant, diagnostics: bool) -> bo return await hassio.update_diagnostics(diagnostics) -@bind_hass -@api_data -async def async_install_addon(hass: HomeAssistant, slug: str) -> dict: - """Install add-on. - - The caller of the function should handle HassioAPIError. - """ - hassio: HassIO = hass.data[DOMAIN] - command = f"/addons/{slug}/install" - return await hassio.send_command(command, timeout=None) - - -@bind_hass -@api_data -async def async_update_addon( - hass: HomeAssistant, - slug: str, - backup: bool = False, -) -> dict: - """Update add-on. - - The caller of the function should handle HassioAPIError. - """ - hassio: HassIO = hass.data[DOMAIN] - command = f"/addons/{slug}/update" - return await hassio.send_command( - command, - payload={"backup": backup}, - timeout=None, - ) - - -@bind_hass -@api_data -async def async_set_addon_options( - hass: HomeAssistant, slug: str, options: dict -) -> dict: - """Set add-on options. - - The caller of the function should handle HassioAPIError. - """ - hassio: HassIO = hass.data[DOMAIN] - command = f"/addons/{slug}/options" - return await hassio.send_command(command, payload=options) - - -@bind_hass -async def async_get_addon_discovery_info(hass: HomeAssistant, slug: str) -> dict | None: - """Return discovery data for an add-on.""" - hassio: HassIO = hass.data[DOMAIN] - data = await hassio.retrieve_discovery_messages() - discovered_addons = data[ATTR_DISCOVERY] - return next((addon for addon in discovered_addons if addon["addon"] == slug), None) - - @bind_hass @api_data async def async_create_backup( @@ -154,61 +91,6 @@ async def async_create_backup( return await hassio.send_command(command, payload=payload, timeout=None) -@bind_hass -@api_data -async def async_update_os(hass: HomeAssistant, version: str | None = None) -> dict: - """Update Home Assistant Operating System. - - The caller of the function should handle HassioAPIError. - """ - hassio: HassIO = hass.data[DOMAIN] - command = "/os/update" - return await hassio.send_command( - command, - payload={"version": version}, - timeout=None, - ) - - -@bind_hass -@api_data -async def async_update_supervisor(hass: HomeAssistant) -> dict: - """Update Home Assistant Supervisor. - - The caller of the function should handle HassioAPIError. - """ - hassio: HassIO = hass.data[DOMAIN] - command = "/supervisor/update" - return await hassio.send_command(command, timeout=None) - - -@bind_hass -@api_data -async def async_update_core( - hass: HomeAssistant, version: str | None = None, backup: bool = False -) -> dict: - """Update Home Assistant Core. - - The caller of the function should handle HassioAPIError. - """ - hassio: HassIO = hass.data[DOMAIN] - command = "/core/update" - return await hassio.send_command( - command, - payload={"version": version, "backup": backup}, - timeout=None, - ) - - -@bind_hass -@_api_bool -async def async_apply_suggestion(hass: HomeAssistant, suggestion_uuid: str) -> dict: - """Apply a suggestion from supervisor's resolution center.""" - hassio: HassIO = hass.data[DOMAIN] - command = f"/resolution/suggestion/{suggestion_uuid}" - return await hassio.send_command(command, timeout=None) - - @api_data async def async_get_green_settings(hass: HomeAssistant) -> dict[str, bool]: """Return settings specific to Home Assistant Green.""" @@ -276,22 +158,11 @@ def __init__( self._ip = ip base_url = f"http://{ip}" self._base_url = URL(base_url) - self._client = SupervisorClient( - base_url, os.environ.get("SUPERVISOR_TOKEN", ""), session=websession - ) @property - def client(self) -> SupervisorClient: - """Return aiohasupervisor client.""" - return self._client - - @_api_bool - def is_connected(self) -> Coroutine: - """Return true if it connected to Hass.io supervisor. - - This method returns a coroutine. - """ - return self.send_command("/supervisor/ping", method="get", timeout=15) + def base_url(self) -> URL: + """Return base url for Supervisor.""" + return self._base_url @api_data def get_info(self) -> Coroutine: @@ -349,14 +220,6 @@ def get_core_stats(self) -> Coroutine: """ return self.send_command("/core/stats", method="get") - @api_data - def get_addon_stats(self, addon: str) -> Coroutine: - """Return stats for an Add-on. - - This method returns a coroutine. - """ - return self.send_command(f"/addons/{addon}/stats", method="get") - @api_data def get_supervisor_stats(self) -> Coroutine: """Return stats for the supervisor. @@ -365,23 +228,6 @@ def get_supervisor_stats(self) -> Coroutine: """ return self.send_command("/supervisor/stats", method="get") - def get_addon_changelog(self, addon: str) -> Coroutine: - """Return changelog for an Add-on. - - This method returns a coroutine. - """ - return self.send_command( - f"/addons/{addon}/changelog", method="get", return_text=True - ) - - @api_data - def get_store(self) -> Coroutine: - """Return data from the store. - - This method returns a coroutine. - """ - return self.send_command("/store", method="get") - @api_data def get_ingress_panels(self) -> Coroutine: """Return data for Add-on ingress panels. @@ -390,66 +236,6 @@ def get_ingress_panels(self) -> Coroutine: """ return self.send_command("/ingress/panels", method="get") - @_api_bool - def restart_homeassistant(self) -> Coroutine: - """Restart Home-Assistant container. - - This method returns a coroutine. - """ - return self.send_command("/homeassistant/restart") - - @_api_bool - def stop_homeassistant(self) -> Coroutine: - """Stop Home-Assistant container. - - This method returns a coroutine. - """ - return self.send_command("/homeassistant/stop") - - @_api_bool - def refresh_updates(self) -> Coroutine: - """Refresh available updates. - - This method returns a coroutine. - """ - return self.send_command("/refresh_updates", timeout=300) - - @api_data - def retrieve_discovery_messages(self) -> Coroutine: - """Return all discovery data from Hass.io API. - - This method returns a coroutine. - """ - return self.send_command("/discovery", method="get", timeout=60) - - @api_data - def get_discovery_message(self, uuid: str) -> Coroutine: - """Return a single discovery data message. - - This method returns a coroutine. - """ - return self.send_command(f"/discovery/{uuid}", method="get") - - @api_data - def get_resolution_info(self) -> Coroutine: - """Return data for Supervisor resolution center. - - This method returns a coroutine. - """ - return self.send_command("/resolution/info", method="get") - - @api_data - def get_suggestions_for_issue( - self, issue_id: str - ) -> Coroutine[Any, Any, dict[str, Any]]: - """Return suggestions for issue from Supervisor resolution center. - - This method returns a coroutine. - """ - return self.send_command( - f"/resolution/issue/{issue_id}/suggestions", method="get" - ) - @_api_bool async def update_hass_api( self, http_config: dict[str, Any], refresh_token: RefreshToken @@ -489,14 +275,6 @@ def update_diagnostics(self, diagnostics: bool) -> Coroutine: "/supervisor/options", payload={"diagnostics": diagnostics} ) - @_api_bool - def apply_suggestion(self, suggestion_uuid: str) -> Coroutine: - """Apply a suggestion from supervisor's resolution center. - - This method returns a coroutine. - """ - return self.send_command(f"/resolution/suggestion/{suggestion_uuid}") - async def send_command( self, command: str, @@ -562,7 +340,12 @@ async def send_command( raise HassioAPIError +@singleton(KEY_SUPERVISOR_CLIENT) def get_supervisor_client(hass: HomeAssistant) -> SupervisorClient: """Return supervisor client.""" hassio: HassIO = hass.data[DOMAIN] - return hassio.client + return SupervisorClient( + str(hassio.base_url), + os.environ.get("SUPERVISOR_TOKEN", ""), + session=hassio.websession, + ) diff --git a/homeassistant/components/hassio/http.py b/homeassistant/components/hassio/http.py index 8c1fb11973e567..2b34a48149b963 100644 --- a/homeassistant/components/hassio/http.py +++ b/homeassistant/components/hassio/http.py @@ -18,6 +18,7 @@ CONTENT_ENCODING, CONTENT_LENGTH, CONTENT_TYPE, + RANGE, TRANSFER_ENCODING, ) from aiohttp.web_exceptions import HTTPBadGateway @@ -41,6 +42,15 @@ r"|backups/.+/full" r"|backups/.+/partial" r"|backups/[^/]+/(?:upload|download)" + r"|audio/logs/(follow|boots/-?\d+(/follow)?)" + r"|cli/logs/(follow|boots/-?\d+(/follow)?)" + r"|core/logs/(follow|boots/-?\d+(/follow)?)" + r"|dns/logs/(follow|boots/-?\d+(/follow)?)" + r"|host/logs/(follow|boots/-?\d+(/follow)?)" + r"|multicast/logs/(follow|boots/-?\d+(/follow)?)" + r"|observer/logs/(follow|boots/-?\d+(/follow)?)" + r"|supervisor/logs/(follow|boots/-?\d+(/follow)?)" + r"|addons/[^/]+/logs/(follow|boots/-?\d+(/follow)?)" r")$" ) @@ -58,15 +68,16 @@ r"^(?:" r"|backups/[a-f0-9]{8}(/info|/download|/restore/full|/restore/partial)?" r"|backups/new/upload" - r"|audio/logs" - r"|cli/logs" - r"|core/logs" - r"|dns/logs" - r"|host/logs" - r"|multicast/logs" - r"|observer/logs" - r"|supervisor/logs" - r"|addons/[^/]+/(changelog|documentation|logs)" + r"|audio/logs(/follow|/boots/-?\d+(/follow)?)?" + r"|cli/logs(/follow|/boots/-?\d+(/follow)?)?" + r"|core/logs(/follow|/boots/-?\d+(/follow)?)?" + r"|dns/logs(/follow|/boots/-?\d+(/follow)?)?" + r"|host/logs(/follow|/boots(/-?\d+(/follow)?)?)?" + r"|multicast/logs(/follow|/boots/-?\d+(/follow)?)?" + r"|observer/logs(/follow|/boots/-?\d+(/follow)?)?" + r"|supervisor/logs(/follow|/boots/-?\d+(/follow)?)?" + r"|addons/[^/]+/(changelog|documentation)" + r"|addons/[^/]+/logs(/follow|/boots/-?\d+(/follow)?)?" r")$" ) @@ -83,8 +94,38 @@ r"|app/entrypoint.js" r")$" ) + +# Follow logs should not be compressed, to be able to get streamed by frontend +NO_COMPRESS = re.compile( + r"^(?:" + r"|audio/logs/(follow|boots/-?\d+(/follow)?)" + r"|cli/logs/(follow|boots/-?\d+(/follow)?)" + r"|core/logs/(follow|boots/-?\d+(/follow)?)" + r"|dns/logs/(follow|boots/-?\d+(/follow)?)" + r"|host/logs/(follow|boots/-?\d+(/follow)?)" + r"|multicast/logs/(follow|boots/-?\d+(/follow)?)" + r"|observer/logs/(follow|boots/-?\d+(/follow)?)" + r"|supervisor/logs/(follow|boots/-?\d+(/follow)?)" + r"|addons/[^/]+/logs/(follow|boots/-?\d+(/follow)?)" + r")$" +) + +PATHS_LOGS = re.compile( + r"^(?:" + r"|audio/logs(/follow|/boots/-?\d+(/follow)?)?" + r"|cli/logs(/follow|/boots/-?\d+(/follow)?)?" + r"|core/logs(/follow|/boots/-?\d+(/follow)?)?" + r"|dns/logs(/follow|/boots/-?\d+(/follow)?)?" + r"|host/logs(/follow|/boots/-?\d+(/follow)?)?" + r"|multicast/logs(/follow|/boots/-?\d+(/follow)?)?" + r"|observer/logs(/follow|/boots/-?\d+(/follow)?)?" + r"|supervisor/logs(/follow|/boots/-?\d+(/follow)?)?" + r"|addons/[^/]+/logs(/follow|/boots/-?\d+(/follow)?)?" + r")$" +) # fmt: on + RESPONSE_HEADERS_FILTER = { TRANSFER_ENCODING, CONTENT_LENGTH, @@ -161,6 +202,10 @@ async def _handle(self, request: web.Request, path: str) -> web.StreamResponse: assert isinstance(request._stored_content_type, str) # noqa: SLF001 headers[CONTENT_TYPE] = request._stored_content_type # noqa: SLF001 + # forward range headers for logs + if PATHS_LOGS.match(path) and request.headers.get(RANGE): + headers[RANGE] = request.headers[RANGE] + try: client = await self._websession.request( method=request.method, @@ -177,7 +222,7 @@ async def _handle(self, request: web.Request, path: str) -> web.StreamResponse: ) response.content_type = client.content_type - if should_compress(response.content_type): + if should_compress(response.content_type, path): response.enable_compression() await response.prepare(request) # In testing iter_chunked, iter_any, and iter_chunks: @@ -217,8 +262,10 @@ def _get_timeout(path: str) -> ClientTimeout: return ClientTimeout(connect=10, total=300) -def should_compress(content_type: str) -> bool: +def should_compress(content_type: str, path: str | None = None) -> bool: """Return if we should compress a response.""" + if path is not None and NO_COMPRESS.match(path): + return False if content_type.startswith("image/"): return "svg" in content_type if content_type.startswith("application/"): diff --git a/homeassistant/components/hassio/issues.py b/homeassistant/components/hassio/issues.py index 9c2152489d6335..16697659077fdd 100644 --- a/homeassistant/components/hassio/issues.py +++ b/homeassistant/components/hassio/issues.py @@ -7,6 +7,10 @@ from datetime import datetime import logging from typing import Any, NotRequired, TypedDict +from uuid import UUID + +from aiohasupervisor import SupervisorError +from aiohasupervisor.models import ContextType, Issue as SupervisorIssue from homeassistant.core import HassJob, HomeAssistant, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect @@ -20,12 +24,8 @@ from .const import ( ATTR_DATA, ATTR_HEALTHY, - ATTR_ISSUES, - ATTR_SUGGESTIONS, ATTR_SUPPORTED, - ATTR_UNHEALTHY, ATTR_UNHEALTHY_REASONS, - ATTR_UNSUPPORTED, ATTR_UNSUPPORTED_REASONS, ATTR_UPDATE_KEY, ATTR_WS_EVENT, @@ -36,6 +36,7 @@ EVENT_SUPERVISOR_EVENT, EVENT_SUPERVISOR_UPDATE, EVENT_SUPPORTED_CHANGED, + ISSUE_KEY_ADDON_BOOT_FAIL, ISSUE_KEY_ADDON_DETACHED_ADDON_MISSING, ISSUE_KEY_ADDON_DETACHED_ADDON_REMOVED, ISSUE_KEY_SYSTEM_DOCKER_CONFIG, @@ -44,10 +45,9 @@ PLACEHOLDER_KEY_REFERENCE, REQUEST_REFRESH_DELAY, UPDATE_KEY_SUPERVISOR, - SupervisorIssueContext, ) from .coordinator import get_addons_info -from .handler import HassIO, HassioAPIError +from .handler import HassIO, get_supervisor_client ISSUE_KEY_UNHEALTHY = "unhealthy" ISSUE_KEY_UNSUPPORTED = "unsupported" @@ -94,6 +94,7 @@ # Keys (type + context) of issues that when found should be made into a repair ISSUE_KEYS_FOR_REPAIRS = { + ISSUE_KEY_ADDON_BOOT_FAIL, "issue_mount_mount_failed", "issue_system_multiple_data_disks", "issue_system_reboot_required", @@ -118,9 +119,9 @@ class SuggestionDataType(TypedDict): class Suggestion: """Suggestion from Supervisor which resolves an issue.""" - uuid: str + uuid: UUID type: str - context: SupervisorIssueContext + context: ContextType reference: str | None = None @property @@ -132,9 +133,9 @@ def key(self) -> str: def from_dict(cls, data: SuggestionDataType) -> Suggestion: """Convert from dictionary representation.""" return cls( - uuid=data["uuid"], + uuid=UUID(data["uuid"]), type=data["type"], - context=SupervisorIssueContext(data["context"]), + context=ContextType(data["context"]), reference=data["reference"], ) @@ -153,9 +154,9 @@ class IssueDataType(TypedDict): class Issue: """Issue from Supervisor.""" - uuid: str + uuid: UUID type: str - context: SupervisorIssueContext + context: ContextType reference: str | None = None suggestions: list[Suggestion] = field(default_factory=list, compare=False) @@ -169,9 +170,9 @@ def from_dict(cls, data: IssueDataType) -> Issue: """Convert from dictionary representation.""" suggestions: list[SuggestionDataType] = data.get("suggestions", []) return cls( - uuid=data["uuid"], + uuid=UUID(data["uuid"]), type=data["type"], - context=SupervisorIssueContext(data["context"]), + context=ContextType(data["context"]), reference=data["reference"], suggestions=[ Suggestion.from_dict(suggestion) for suggestion in suggestions @@ -188,7 +189,8 @@ def __init__(self, hass: HomeAssistant, client: HassIO) -> None: self._client = client self._unsupported_reasons: set[str] = set() self._unhealthy_reasons: set[str] = set() - self._issues: dict[str, Issue] = {} + self._issues: dict[UUID, Issue] = {} + self._supervisor_client = get_supervisor_client(hass) @property def unhealthy_reasons(self) -> set[str]: @@ -281,7 +283,7 @@ def add_issue(self, issue: Issue) -> None: async_create_issue( self._hass, DOMAIN, - issue.uuid, + issue.uuid.hex, is_fixable=bool(issue.suggestions), severity=IssueSeverity.WARNING, translation_key=issue.key, @@ -290,19 +292,37 @@ def add_issue(self, issue: Issue) -> None: self._issues[issue.uuid] = issue - async def add_issue_from_data(self, data: IssueDataType) -> None: + async def add_issue_from_data(self, data: SupervisorIssue) -> None: """Add issue from data to list after getting latest suggestions.""" try: - data["suggestions"] = ( - await self._client.get_suggestions_for_issue(data["uuid"]) - )[ATTR_SUGGESTIONS] - except HassioAPIError: + suggestions = ( + await self._supervisor_client.resolution.suggestions_for_issue( + data.uuid + ) + ) + except SupervisorError: _LOGGER.error( "Could not get suggestions for supervisor issue %s, skipping it", - data["uuid"], + data.uuid.hex, ) return - self.add_issue(Issue.from_dict(data)) + self.add_issue( + Issue( + uuid=data.uuid, + type=str(data.type), + context=data.context, + reference=data.reference, + suggestions=[ + Suggestion( + uuid=suggestion.uuid, + type=str(suggestion.type), + context=suggestion.context, + reference=suggestion.reference, + ) + for suggestion in suggestions + ], + ) + ) def remove_issue(self, issue: Issue) -> None: """Remove an issue from the list. Delete a repair if necessary.""" @@ -310,13 +330,13 @@ def remove_issue(self, issue: Issue) -> None: return if issue.key in ISSUE_KEYS_FOR_REPAIRS: - async_delete_issue(self._hass, DOMAIN, issue.uuid) + async_delete_issue(self._hass, DOMAIN, issue.uuid.hex) del self._issues[issue.uuid] def get_issue(self, issue_id: str) -> Issue | None: """Get issue from key.""" - return self._issues.get(issue_id) + return self._issues.get(UUID(issue_id)) async def setup(self) -> None: """Create supervisor events listener.""" @@ -329,8 +349,8 @@ async def setup(self) -> None: async def _update(self, _: datetime | None = None) -> None: """Update issues from Supervisor resolution center.""" try: - data = await self._client.get_resolution_info() - except HassioAPIError as err: + data = await self._supervisor_client.resolution.info() + except SupervisorError as err: _LOGGER.error("Failed to update supervisor issues: %r", err) async_call_later( self._hass, @@ -338,18 +358,16 @@ async def _update(self, _: datetime | None = None) -> None: HassJob(self._update, cancel_on_shutdown=True), ) return - self.unhealthy_reasons = set(data[ATTR_UNHEALTHY]) - self.unsupported_reasons = set(data[ATTR_UNSUPPORTED]) + self.unhealthy_reasons = set(data.unhealthy) + self.unsupported_reasons = set(data.unsupported) # Remove any cached issues that weren't returned - for issue_id in set(self._issues.keys()) - { - issue["uuid"] for issue in data[ATTR_ISSUES] - }: + for issue_id in set(self._issues) - {issue.uuid for issue in data.issues}: self.remove_issue(self._issues[issue_id]) # Add/update any issues that came back await asyncio.gather( - *[self.add_issue_from_data(issue) for issue in data[ATTR_ISSUES]] + *[self.add_issue_from_data(issue) for issue in data.issues] ) @callback diff --git a/homeassistant/components/hassio/manifest.json b/homeassistant/components/hassio/manifest.json index 14e3f3598f1088..31fa27a92c435f 100644 --- a/homeassistant/components/hassio/manifest.json +++ b/homeassistant/components/hassio/manifest.json @@ -6,5 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/hassio", "iot_class": "local_polling", "quality_scale": "internal", - "requirements": ["aiohasupervisor==0.1.0"] + "requirements": ["aiohasupervisor==0.2.1"], + "single_config_entry": true } diff --git a/homeassistant/components/hassio/repairs.py b/homeassistant/components/hassio/repairs.py index 082dbe38beee6e..0e8122c08b995d 100644 --- a/homeassistant/components/hassio/repairs.py +++ b/homeassistant/components/hassio/repairs.py @@ -6,6 +6,8 @@ from types import MethodType from typing import Any +from aiohasupervisor import SupervisorError +from aiohasupervisor.models import ContextType import voluptuous as vol from homeassistant.components.repairs import RepairsFlow @@ -14,14 +16,14 @@ from . import get_addons_info, get_issues_info from .const import ( + ISSUE_KEY_ADDON_BOOT_FAIL, ISSUE_KEY_ADDON_DETACHED_ADDON_REMOVED, ISSUE_KEY_SYSTEM_DOCKER_CONFIG, PLACEHOLDER_KEY_ADDON, PLACEHOLDER_KEY_COMPONENTS, PLACEHOLDER_KEY_REFERENCE, - SupervisorIssueContext, ) -from .handler import async_apply_suggestion +from .handler import get_supervisor_client from .issues import Issue, Suggestion HELP_URLS = { @@ -50,9 +52,10 @@ class SupervisorIssueRepairFlow(RepairsFlow): _data: dict[str, Any] | None = None _issue: Issue | None = None - def __init__(self, issue_id: str) -> None: + def __init__(self, hass: HomeAssistant, issue_id: str) -> None: """Initialize repair flow.""" self._issue_id = issue_id + self._supervisor_client = get_supervisor_client(hass) super().__init__() @property @@ -123,9 +126,12 @@ async def _async_step_apply_suggestion( if not confirmed and suggestion.key in SUGGESTION_CONFIRMATION_REQUIRED: return self._async_form_for_suggestion(suggestion) - if await async_apply_suggestion(self.hass, suggestion.uuid): - return self.async_create_entry(data={}) - return self.async_abort(reason="apply_suggestion_fail") + try: + await self._supervisor_client.resolution.apply_suggestion(suggestion.uuid) + except SupervisorError: + return self.async_abort(reason="apply_suggestion_fail") + + return self.async_create_entry(data={}) @staticmethod def _async_step( @@ -162,9 +168,9 @@ def description_placeholders(self) -> dict[str, str] | None: if issue.key == self.issue.key or issue.type != self.issue.type: continue - if issue.context == SupervisorIssueContext.CORE: + if issue.context == ContextType.CORE: components.insert(0, "Home Assistant") - elif issue.context == SupervisorIssueContext.ADDON: + elif issue.context == ContextType.ADDON: components.append( next( ( @@ -181,8 +187,8 @@ def description_placeholders(self) -> dict[str, str] | None: return placeholders -class DetachedAddonIssueRepairFlow(SupervisorIssueRepairFlow): - """Handler for detached addon issue fixing flows.""" +class AddonIssueRepairFlow(SupervisorIssueRepairFlow): + """Handler for addon issue fixing flows.""" @property def description_placeholders(self) -> dict[str, str] | None: @@ -209,8 +215,11 @@ async def async_create_fix_flow( supervisor_issues = get_issues_info(hass) issue = supervisor_issues and supervisor_issues.get_issue(issue_id) if issue and issue.key == ISSUE_KEY_SYSTEM_DOCKER_CONFIG: - return DockerConfigIssueRepairFlow(issue_id) - if issue and issue.key == ISSUE_KEY_ADDON_DETACHED_ADDON_REMOVED: - return DetachedAddonIssueRepairFlow(issue_id) - - return SupervisorIssueRepairFlow(issue_id) + return DockerConfigIssueRepairFlow(hass, issue_id) + if issue and issue.key in { + ISSUE_KEY_ADDON_DETACHED_ADDON_REMOVED, + ISSUE_KEY_ADDON_BOOT_FAIL, + }: + return AddonIssueRepairFlow(hass, issue_id) + + return SupervisorIssueRepairFlow(hass, issue_id) diff --git a/homeassistant/components/hassio/strings.json b/homeassistant/components/hassio/strings.json index c304373b27b833..09ed45bd5bc2c1 100644 --- a/homeassistant/components/hassio/strings.json +++ b/homeassistant/components/hassio/strings.json @@ -17,6 +17,23 @@ } }, "issues": { + "issue_addon_boot_fail": { + "title": "Add-on failed to start at boot", + "fix_flow": { + "step": { + "fix_menu": { + "description": "Add-on {addon} is set to start at boot but failed to start. Usually this occurs when the configuration is incorrect or the same port is used in multiple add-ons. Check the configuration as well as logs for {addon} and Supervisor.\n\nUse Start to try again or Disable to turn off the start at boot option.", + "menu_options": { + "addon_execute_start": "Start", + "addon_disable_boot": "Disable" + } + } + }, + "abort": { + "apply_suggestion_fail": "Could not apply the fix. Check the Supervisor logs for more details." + } + } + }, "issue_addon_detached_addon_missing": { "title": "Missing repository for an installed add-on", "description": "Repository for add-on {addon} is missing. This means it will not get updates, and backups may not be restored correctly as the supervisor may not be able to build/download the resources required.\n\nPlease check the [add-on's documentation]({addon_url}) for installation instructions and add the repository to the store." @@ -208,6 +225,10 @@ "unsupported_virtualization_image": { "title": "Unsupported system - Incorrect OS image for virtualization", "description": "System is unsupported because the Home Assistant OS image in use is not intended for use in a virtualized environment. Use the link to learn more and how to fix this." + }, + "update_service_deprecated": { + "title": "Deprecated update add-on action", + "description": "The update add-on action has been deprecated and will be removed in 2025.5. Please use the update entity and the respective action to update the add-on instead." } }, "entity": { diff --git a/homeassistant/components/hassio/update.py b/homeassistant/components/hassio/update.py index a7974850e19bcc..fbb3e191f81892 100644 --- a/homeassistant/components/hassio/update.py +++ b/homeassistant/components/hassio/update.py @@ -4,6 +4,12 @@ from typing import Any +from aiohasupervisor import SupervisorError +from aiohasupervisor.models import ( + HomeAssistantUpdateOptions, + OSUpdate, + StoreAddonUpdate, +) from awesomeversion import AwesomeVersion, AwesomeVersionStrategy from homeassistant.components.update import ( @@ -34,13 +40,6 @@ HassioOSEntity, HassioSupervisorEntity, ) -from .handler import ( - HassioAPIError, - async_update_addon, - async_update_core, - async_update_os, - async_update_supervisor, -) ENTITY_DESCRIPTION = UpdateEntityDescription( name="Update", @@ -165,8 +164,10 @@ async def async_install( ) -> None: """Install an update.""" try: - await async_update_addon(self.hass, slug=self._addon_slug, backup=backup) - except HassioAPIError as err: + await self.coordinator.supervisor_client.store.update_addon( + self._addon_slug, StoreAddonUpdate(backup=backup) + ) + except SupervisorError as err: raise HomeAssistantError(f"Error updating {self.title}: {err}") from err await self.coordinator.force_info_update_supervisor() @@ -210,8 +211,10 @@ async def async_install( ) -> None: """Install an update.""" try: - await async_update_os(self.hass, version) - except HassioAPIError as err: + await self.coordinator.supervisor_client.os.update( + OSUpdate(version=version) + ) + except SupervisorError as err: raise HomeAssistantError( f"Error updating Home Assistant Operating System: {err}" ) from err @@ -256,8 +259,8 @@ async def async_install( ) -> None: """Install an update.""" try: - await async_update_supervisor(self.hass) - except HassioAPIError as err: + await self.coordinator.supervisor_client.supervisor.update() + except SupervisorError as err: raise HomeAssistantError( f"Error updating Home Assistant Supervisor: {err}" ) from err @@ -301,8 +304,10 @@ async def async_install( ) -> None: """Install an update.""" try: - await async_update_core(self.hass, version=version, backup=backup) - except HassioAPIError as err: + await self.coordinator.supervisor_client.homeassistant.update( + HomeAssistantUpdateOptions(version=version, backup=backup) + ) + except SupervisorError as err: raise HomeAssistantError( f"Error updating Home Assistant Core: {err}" ) from err diff --git a/homeassistant/components/heatmiser/climate.py b/homeassistant/components/heatmiser/climate.py index f9f0cfacf60974..1102dbc0c748ab 100644 --- a/homeassistant/components/heatmiser/climate.py +++ b/homeassistant/components/heatmiser/climate.py @@ -1,11 +1,11 @@ -"""Support for the PRT Heatmiser themostats using the V3 protocol.""" +"""Support for the PRT Heatmiser thermostats using the V3 protocol.""" from __future__ import annotations import logging from typing import Any -from heatmiserV3 import connection, heatmiser +from heatmiserv3 import connection, heatmiser import voluptuous as vol from homeassistant.components.climate import ( diff --git a/homeassistant/components/heatmiser/manifest.json b/homeassistant/components/heatmiser/manifest.json index 7ae9cac129717a..f3f33f79b04ced 100644 --- a/homeassistant/components/heatmiser/manifest.json +++ b/homeassistant/components/heatmiser/manifest.json @@ -5,5 +5,5 @@ "documentation": "https://www.home-assistant.io/integrations/heatmiser", "iot_class": "local_polling", "loggers": ["heatmiserV3"], - "requirements": ["heatmiserV3==1.1.18"] + "requirements": ["heatmiserV3==2.0.3"] } diff --git a/homeassistant/components/here_travel_time/config_flow.py b/homeassistant/components/here_travel_time/config_flow.py index d5a577aff9de2f..c2b70de148c43a 100644 --- a/homeassistant/components/here_travel_time/config_flow.py +++ b/homeassistant/components/here_travel_time/config_flow.py @@ -103,8 +103,6 @@ class HERETravelTimeConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - _entry: ConfigEntry - def __init__(self) -> None: """Init Config Flow.""" self._config: dict[str, Any] = {} @@ -115,7 +113,7 @@ def async_get_options_flow( config_entry: ConfigEntry, ) -> HERETravelTimeOptionsFlow: """Get the options flow.""" - return HERETravelTimeOptionsFlow(config_entry) + return HERETravelTimeOptionsFlow() async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -144,9 +142,9 @@ async def async_step_reconfigure( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle reconfiguration.""" - self._entry = self._get_reconfigure_entry() return self.async_show_form( - step_id="user", data_schema=get_user_step_schema(self._entry.data.copy()) + step_id="user", + data_schema=get_user_step_schema(self._get_reconfigure_entry().data), ) async def async_step_origin_menu(self, _: None = None) -> ConfigFlowResult: @@ -232,10 +230,9 @@ async def async_step_destination_coordinates( self._config.pop(CONF_DESTINATION_ENTITY_ID, None) if self.source == SOURCE_RECONFIGURE: return self.async_update_reload_and_abort( - self._entry, + self._get_reconfigure_entry(), title=self._config[CONF_NAME], data=self._config, - reason="reconfigure_successful", ) return self.async_create_entry( title=self._config[CONF_NAME], @@ -277,7 +274,7 @@ async def async_step_destination_entity( self._config.pop(CONF_DESTINATION_LONGITUDE, None) if self.source == SOURCE_RECONFIGURE: return self.async_update_reload_and_abort( - self._entry, data=self._config, reason="reconfigure_successful" + self._get_reconfigure_entry(), data=self._config ) return self.async_create_entry( title=self._config[CONF_NAME], @@ -300,9 +297,8 @@ async def async_step_destination_entity( class HERETravelTimeOptionsFlow(OptionsFlow): """Handle HERE Travel Time options.""" - def __init__(self, config_entry: ConfigEntry) -> None: + def __init__(self) -> None: """Initialize HERE Travel Time options flow.""" - self.config_entry = config_entry self._config: dict[str, Any] = {} async def async_step_init( diff --git a/homeassistant/components/history_stats/__init__.py b/homeassistant/components/history_stats/__init__.py index dcca10d73e9b96..63f32138dba351 100644 --- a/homeassistant/components/history_stats/__init__.py +++ b/homeassistant/components/history_stats/__init__.py @@ -41,7 +41,7 @@ async def async_setup_entry( Template(end, hass) if end else None, duration, ) - coordinator = HistoryStatsUpdateCoordinator(hass, history_stats, entry.title) + coordinator = HistoryStatsUpdateCoordinator(hass, history_stats, entry, entry.title) await coordinator.async_config_entry_first_refresh() entry.runtime_data = coordinator diff --git a/homeassistant/components/history_stats/coordinator.py b/homeassistant/components/history_stats/coordinator.py index 0d613d2bbc07c7..fafbb5d3ce01a2 100644 --- a/homeassistant/components/history_stats/coordinator.py +++ b/homeassistant/components/history_stats/coordinator.py @@ -6,6 +6,7 @@ import logging from typing import Any +from homeassistant.config_entries import ConfigEntry from homeassistant.core import ( CALLBACK_TYPE, Event, @@ -33,6 +34,7 @@ def __init__( self, hass: HomeAssistant, history_stats: HistoryStats, + config_entry: ConfigEntry | None, name: str, ) -> None: """Initialize DataUpdateCoordinator.""" @@ -43,6 +45,7 @@ def __init__( super().__init__( hass, _LOGGER, + config_entry=config_entry, name=name, update_interval=UPDATE_INTERVAL, ) diff --git a/homeassistant/components/history_stats/sensor.py b/homeassistant/components/history_stats/sensor.py index 4558da8722c24b..e1241034aebc7e 100644 --- a/homeassistant/components/history_stats/sensor.py +++ b/homeassistant/components/history_stats/sensor.py @@ -104,7 +104,7 @@ async def async_setup_platform( unique_id: str | None = config.get(CONF_UNIQUE_ID) history_stats = HistoryStats(hass, entity_id, entity_states, start, end, duration) - coordinator = HistoryStatsUpdateCoordinator(hass, history_stats, name) + coordinator = HistoryStatsUpdateCoordinator(hass, history_stats, None, name) await coordinator.async_refresh() if not coordinator.last_update_success: raise PlatformNotReady from coordinator.last_exception diff --git a/homeassistant/components/history_stats/strings.json b/homeassistant/components/history_stats/strings.json index 603a6b8c4dced1..8961d66118d287 100644 --- a/homeassistant/components/history_stats/strings.json +++ b/homeassistant/components/history_stats/strings.json @@ -1,4 +1,5 @@ { + "title": "History Stats", "config": { "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_account%]" diff --git a/homeassistant/components/hive/alarm_control_panel.py b/homeassistant/components/hive/alarm_control_panel.py index 34d5d3d10c6924..2b196ce820b7ac 100644 --- a/homeassistant/components/hive/alarm_control_panel.py +++ b/homeassistant/components/hive/alarm_control_panel.py @@ -7,14 +7,9 @@ from homeassistant.components.alarm_control_panel import ( AlarmControlPanelEntity, AlarmControlPanelEntityFeature, + AlarmControlPanelState, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ( - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_DISARMED, - STATE_ALARM_TRIGGERED, -) from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -24,10 +19,10 @@ PARALLEL_UPDATES = 0 SCAN_INTERVAL = timedelta(seconds=15) HIVETOHA = { - "home": STATE_ALARM_DISARMED, - "asleep": STATE_ALARM_ARMED_NIGHT, - "away": STATE_ALARM_ARMED_AWAY, - "sos": STATE_ALARM_TRIGGERED, + "home": AlarmControlPanelState.DISARMED, + "asleep": AlarmControlPanelState.ARMED_NIGHT, + "away": AlarmControlPanelState.ARMED_AWAY, + "sos": AlarmControlPanelState.TRIGGERED, } @@ -76,6 +71,6 @@ async def async_update(self) -> None: self._attr_available = self.device["deviceData"].get("online") if self._attr_available: if self.device["status"]["state"]: - self._attr_state = STATE_ALARM_TRIGGERED + self._attr_alarm_state = AlarmControlPanelState.TRIGGERED else: - self._attr_state = HIVETOHA[self.device["status"]["mode"]] + self._attr_alarm_state = HIVETOHA[self.device["status"]["mode"]] diff --git a/homeassistant/components/hive/config_flow.py b/homeassistant/components/hive/config_flow.py index d6be2d1efabd58..a997954f4ccd42 100644 --- a/homeassistant/components/hive/config_flow.py +++ b/homeassistant/components/hive/config_flow.py @@ -182,7 +182,6 @@ class HiveOptionsFlowHandler(OptionsFlow): def __init__(self, config_entry: ConfigEntry) -> None: """Initialize Hive options flow.""" self.hive = None - self.config_entry = config_entry self.interval = config_entry.options.get(CONF_SCAN_INTERVAL, 120) async def async_step_init( diff --git a/homeassistant/components/holiday/config_flow.py b/homeassistant/components/holiday/config_flow.py index 32b85b5a41d5fc..27b13e34851bc3 100644 --- a/homeassistant/components/holiday/config_flow.py +++ b/homeassistant/components/holiday/config_flow.py @@ -8,7 +8,7 @@ from holidays import list_supported_countries import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_COUNTRY from homeassistant.helpers.selector import ( CountrySelector, @@ -27,7 +27,6 @@ class HolidayConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for Holiday.""" VERSION = 1 - config_entry: ConfigEntry def __init__(self) -> None: """Initialize the config flow.""" @@ -115,15 +114,9 @@ async def async_step_reconfigure( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle the re-configuration of a province.""" - self.config_entry = self._get_reconfigure_entry() - return await self.async_step_reconfigure_confirm() - - async def async_step_reconfigure_confirm( - self, user_input: dict[str, Any] | None = None - ) -> ConfigFlowResult: - """Handle the re-configuration of a province.""" + reconfigure_entry = self._get_reconfigure_entry() if user_input is not None: - combined_input: dict[str, Any] = {**self.config_entry.data, **user_input} + combined_input: dict[str, Any] = {**reconfigure_entry.data, **user_input} country = combined_input[CONF_COUNTRY] province = combined_input.get(CONF_PROVINCE) @@ -145,10 +138,7 @@ async def async_step_reconfigure_confirm( name = f"{locale.territories[country]}{province_str}" return self.async_update_reload_and_abort( - self.config_entry, - title=name, - data=combined_input, - reason="reconfigure_successful", + reconfigure_entry, title=name, data=combined_input ) province_schema = vol.Schema( @@ -156,7 +146,7 @@ async def async_step_reconfigure_confirm( vol.Optional(CONF_PROVINCE): SelectSelector( SelectSelectorConfig( options=SUPPORTED_COUNTRIES[ - self.config_entry.data[CONF_COUNTRY] + reconfigure_entry.data[CONF_COUNTRY] ], mode=SelectSelectorMode.DROPDOWN, ) @@ -164,6 +154,4 @@ async def async_step_reconfigure_confirm( } ) - return self.async_show_form( - step_id="reconfigure_confirm", data_schema=province_schema - ) + return self.async_show_form(step_id="reconfigure", data_schema=province_schema) diff --git a/homeassistant/components/holiday/manifest.json b/homeassistant/components/holiday/manifest.json index 30cfd34e0fb50a..8c64f492d42b24 100644 --- a/homeassistant/components/holiday/manifest.json +++ b/homeassistant/components/holiday/manifest.json @@ -5,5 +5,5 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/holiday", "iot_class": "local_polling", - "requirements": ["holidays==0.57", "babel==2.15.0"] + "requirements": ["holidays==0.60", "babel==2.15.0"] } diff --git a/homeassistant/components/holiday/strings.json b/homeassistant/components/holiday/strings.json index de013f44d6083a..ae4930ecdb44be 100644 --- a/homeassistant/components/holiday/strings.json +++ b/homeassistant/components/holiday/strings.json @@ -16,7 +16,7 @@ "province": "Province" } }, - "reconfigure_confirm": { + "reconfigure": { "data": { "province": "[%key:component::holiday::config::step::province::data::province%]" } diff --git a/homeassistant/components/home_connect/__init__.py b/homeassistant/components/home_connect/__init__.py index 87f4bfa77993d0..c60515eb57f39c 100644 --- a/homeassistant/components/home_connect/__init__.py +++ b/homeassistant/components/home_connect/__init__.py @@ -10,7 +10,7 @@ import voluptuous as vol from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ATTR_DEVICE_ID, CONF_DEVICE, Platform +from homeassistant.const import ATTR_DEVICE_ID, Platform from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import ( config_entry_oauth2_flow, @@ -79,7 +79,14 @@ SERVICE_COMMAND_SCHEMA = vol.Schema({vol.Required(ATTR_DEVICE_ID): str}) -PLATFORMS = [Platform.BINARY_SENSOR, Platform.LIGHT, Platform.SENSOR, Platform.SWITCH] +PLATFORMS = [ + Platform.BINARY_SENSOR, + Platform.LIGHT, + Platform.NUMBER, + Platform.SENSOR, + Platform.SWITCH, + Platform.TIME, +] def _get_appliance_by_device_id( @@ -87,8 +94,7 @@ def _get_appliance_by_device_id( ) -> api.HomeConnectDevice: """Return a Home Connect appliance instance given an device_id.""" for hc_api in hass.data[DOMAIN].values(): - for dev_dict in hc_api.devices: - device = dev_dict[CONF_DEVICE] + for device in hc_api.devices: if device.device_id == device_id: return device.appliance raise ValueError(f"Appliance for device id {device_id} not found") @@ -255,9 +261,7 @@ async def update_all_devices(hass: HomeAssistant, entry: ConfigEntry) -> None: device_registry = dr.async_get(hass) try: await hass.async_add_executor_job(hc_api.get_devices) - for device_dict in hc_api.devices: - device = device_dict["device"] - + for device in hc_api.devices: device_entry = device_registry.async_get_or_create( config_entry_id=entry.entry_id, identifiers={(DOMAIN, device.appliance.haId)}, @@ -299,3 +303,14 @@ def update_unique_id( _LOGGER.debug("Migration to version %s successful", config_entry.version) return True + + +def get_dict_from_home_connect_error(err: api.HomeConnectError) -> dict[str, Any]: + """Return a dict from a Home Connect error.""" + return ( + err.args[0] + if len(err.args) > 0 and isinstance(err.args[0], dict) + else {"description": err.args[0]} + if len(err.args) > 0 and isinstance(err.args[0], str) + else {} + ) diff --git a/homeassistant/components/home_connect/api.py b/homeassistant/components/home_connect/api.py index 4324edc8c1ef69..453f926c402f8f 100644 --- a/homeassistant/components/home_connect/api.py +++ b/homeassistant/components/home_connect/api.py @@ -1,50 +1,17 @@ """API for Home Connect bound to HASS OAuth.""" -from abc import abstractmethod from asyncio import run_coroutine_threadsafe import logging -from typing import Any import homeconnect from homeconnect.api import HomeConnectAppliance, HomeConnectError -from homeassistant.components.sensor import SensorDeviceClass from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ( - ATTR_DEVICE_CLASS, - ATTR_ICON, - CONF_DEVICE, - CONF_ENTITIES, - PERCENTAGE, - UnitOfTime, -) from homeassistant.core import HomeAssistant from homeassistant.helpers import config_entry_oauth2_flow from homeassistant.helpers.dispatcher import dispatcher_send -from .const import ( - ATTR_AMBIENT, - ATTR_BSH_KEY, - ATTR_DESC, - ATTR_DEVICE, - ATTR_KEY, - ATTR_SENSOR_TYPE, - ATTR_SIGN, - ATTR_UNIT, - ATTR_VALUE, - BSH_ACTIVE_PROGRAM, - BSH_AMBIENT_LIGHT_ENABLED, - BSH_COMMON_OPTION_DURATION, - BSH_COMMON_OPTION_PROGRAM_PROGRESS, - BSH_OPERATION_STATE, - BSH_POWER_OFF, - BSH_POWER_STANDBY, - BSH_REMAINING_PROGRAM_TIME, - BSH_REMOTE_CONTROL_ACTIVATION_STATE, - BSH_REMOTE_START_ALLOWANCE_STATE, - COOKING_LIGHTING, - SIGNAL_UPDATE_ENTITIES, -) +from .const import ATTR_KEY, ATTR_VALUE, BSH_ACTIVE_PROGRAM, SIGNAL_UPDATE_ENTITIES _LOGGER = logging.getLogger(__name__) @@ -65,7 +32,7 @@ def __init__( hass, config_entry, implementation ) super().__init__(self.session.token) - self.devices: list[dict[str, Any]] = [] + self.devices: list[HomeConnectDevice] = [] def refresh_tokens(self) -> dict: """Refresh and return new Home Connect tokens using Home Assistant OAuth2 session.""" @@ -75,55 +42,16 @@ def refresh_tokens(self) -> dict: return self.session.token - def get_devices(self) -> list[dict[str, Any]]: + def get_devices(self) -> list[HomeConnectAppliance]: """Get a dictionary of devices.""" - appl = self.get_appliances() - devices = [] - for app in appl: - device: HomeConnectDevice - if app.type == "Dryer": - device = Dryer(self.hass, app) - elif app.type == "Washer": - device = Washer(self.hass, app) - elif app.type == "WasherDryer": - device = WasherDryer(self.hass, app) - elif app.type == "Dishwasher": - device = Dishwasher(self.hass, app) - elif app.type == "FridgeFreezer": - device = FridgeFreezer(self.hass, app) - elif app.type == "Refrigerator": - device = Refrigerator(self.hass, app) - elif app.type == "Freezer": - device = Freezer(self.hass, app) - elif app.type == "Oven": - device = Oven(self.hass, app) - elif app.type == "CoffeeMaker": - device = CoffeeMaker(self.hass, app) - elif app.type == "Hood": - device = Hood(self.hass, app) - elif app.type == "Hob": - device = Hob(self.hass, app) - elif app.type == "CookProcessor": - device = CookProcessor(self.hass, app) - else: - _LOGGER.warning("Appliance type %s not implemented", app.type) - continue - devices.append( - {CONF_DEVICE: device, CONF_ENTITIES: device.get_entity_info()} - ) - self.devices = devices - return devices + appl: list[HomeConnectAppliance] = self.get_appliances() + self.devices = [HomeConnectDevice(self.hass, app) for app in appl] + return self.devices class HomeConnectDevice: """Generic Home Connect device.""" - # for some devices, this is instead BSH_POWER_STANDBY - # see https://developer.home-connect.com/docs/settings/power_state - power_off_state = BSH_POWER_OFF - hass: HomeAssistant - appliance: HomeConnectAppliance - def __init__(self, hass: HomeAssistant, appliance: HomeConnectAppliance) -> None: """Initialize the device class.""" self.hass = hass @@ -155,378 +83,3 @@ def event_callback(self, appliance: HomeConnectAppliance) -> None: _LOGGER.debug("Update triggered on %s", appliance.name) _LOGGER.debug(self.appliance.status) dispatcher_send(self.hass, SIGNAL_UPDATE_ENTITIES, appliance.haId) - - @abstractmethod - def get_entity_info(self) -> dict[str, list[dict[str, Any]]]: - """Get a dictionary with info about the associated entities.""" - raise NotImplementedError - - -class DeviceWithPrograms(HomeConnectDevice): - """Device with programs.""" - - def get_programs_available(self) -> list: - """Get the available programs.""" - try: - programs_available = self.appliance.get_programs_available() - except (HomeConnectError, ValueError): - _LOGGER.debug("Unable to fetch available programs. Probably offline") - programs_available = [] - return programs_available - - def get_program_switches(self) -> list[dict[str, Any]]: - """Get a dictionary with info about program switches. - - There will be one switch for each program. - """ - programs = self.get_programs_available() - return [{ATTR_DEVICE: self, "program_name": p} for p in programs] - - def get_program_sensors(self) -> list[dict[str, Any]]: - """Get a dictionary with info about program sensors. - - There will be one of the four types of sensors for each - device. - """ - sensors = { - BSH_REMAINING_PROGRAM_TIME: ( - "Remaining Program Time", - None, - None, - SensorDeviceClass.TIMESTAMP, - 1, - ), - BSH_COMMON_OPTION_DURATION: ( - "Duration", - UnitOfTime.SECONDS, - "mdi:update", - None, - 1, - ), - BSH_COMMON_OPTION_PROGRAM_PROGRESS: ( - "Program Progress", - PERCENTAGE, - "mdi:progress-clock", - None, - 1, - ), - } - return [ - { - ATTR_DEVICE: self, - ATTR_BSH_KEY: k, - ATTR_DESC: desc, - ATTR_UNIT: unit, - ATTR_ICON: icon, - ATTR_DEVICE_CLASS: device_class, - ATTR_SIGN: sign, - } - for k, (desc, unit, icon, device_class, sign) in sensors.items() - ] - - -class DeviceWithOpState(HomeConnectDevice): - """Device that has an operation state sensor.""" - - def get_opstate_sensor(self) -> list[dict[str, Any]]: - """Get a list with info about operation state sensors.""" - - return [ - { - ATTR_DEVICE: self, - ATTR_BSH_KEY: BSH_OPERATION_STATE, - ATTR_DESC: "Operation State", - ATTR_UNIT: None, - ATTR_ICON: "mdi:state-machine", - ATTR_DEVICE_CLASS: None, - ATTR_SIGN: 1, - } - ] - - -class DeviceWithDoor(HomeConnectDevice): - """Device that has a door sensor.""" - - def get_door_entity(self) -> dict[str, Any]: - """Get a dictionary with info about the door binary sensor.""" - return { - ATTR_DEVICE: self, - ATTR_BSH_KEY: "Door", - ATTR_DESC: "Door", - ATTR_SENSOR_TYPE: "door", - ATTR_DEVICE_CLASS: "door", - } - - -class DeviceWithLight(HomeConnectDevice): - """Device that has lighting.""" - - def get_light_entity(self) -> dict[str, Any]: - """Get a dictionary with info about the lighting.""" - return { - ATTR_DEVICE: self, - ATTR_BSH_KEY: COOKING_LIGHTING, - ATTR_DESC: "Light", - ATTR_AMBIENT: None, - } - - -class DeviceWithAmbientLight(HomeConnectDevice): - """Device that has ambient lighting.""" - - def get_ambientlight_entity(self) -> dict[str, Any]: - """Get a dictionary with info about the ambient lighting.""" - return { - ATTR_DEVICE: self, - ATTR_BSH_KEY: BSH_AMBIENT_LIGHT_ENABLED, - ATTR_DESC: "AmbientLight", - ATTR_AMBIENT: True, - } - - -class DeviceWithRemoteControl(HomeConnectDevice): - """Device that has Remote Control binary sensor.""" - - def get_remote_control(self) -> dict[str, Any]: - """Get a dictionary with info about the remote control sensor.""" - return { - ATTR_DEVICE: self, - ATTR_BSH_KEY: BSH_REMOTE_CONTROL_ACTIVATION_STATE, - ATTR_DESC: "Remote Control", - ATTR_SENSOR_TYPE: "remote_control", - } - - -class DeviceWithRemoteStart(HomeConnectDevice): - """Device that has a Remote Start binary sensor.""" - - def get_remote_start(self) -> dict[str, Any]: - """Get a dictionary with info about the remote start sensor.""" - return { - ATTR_DEVICE: self, - ATTR_BSH_KEY: BSH_REMOTE_START_ALLOWANCE_STATE, - ATTR_DESC: "Remote Start", - ATTR_SENSOR_TYPE: "remote_start", - } - - -class Dryer( - DeviceWithDoor, - DeviceWithOpState, - DeviceWithPrograms, - DeviceWithRemoteControl, - DeviceWithRemoteStart, -): - """Dryer class.""" - - def get_entity_info(self) -> dict[str, list[dict[str, Any]]]: - """Get a dictionary with infos about the associated entities.""" - door_entity = self.get_door_entity() - remote_control = self.get_remote_control() - remote_start = self.get_remote_start() - op_state_sensor = self.get_opstate_sensor() - program_sensors = self.get_program_sensors() - program_switches = self.get_program_switches() - return { - "binary_sensor": [door_entity, remote_control, remote_start], - "switch": program_switches, - "sensor": program_sensors + op_state_sensor, - } - - -class Dishwasher( - DeviceWithDoor, - DeviceWithAmbientLight, - DeviceWithOpState, - DeviceWithPrograms, - DeviceWithRemoteControl, - DeviceWithRemoteStart, -): - """Dishwasher class.""" - - def get_entity_info(self) -> dict[str, list[dict[str, Any]]]: - """Get a dictionary with infos about the associated entities.""" - door_entity = self.get_door_entity() - remote_control = self.get_remote_control() - remote_start = self.get_remote_start() - op_state_sensor = self.get_opstate_sensor() - program_sensors = self.get_program_sensors() - program_switches = self.get_program_switches() - return { - "binary_sensor": [door_entity, remote_control, remote_start], - "switch": program_switches, - "sensor": program_sensors + op_state_sensor, - } - - -class Oven( - DeviceWithDoor, - DeviceWithOpState, - DeviceWithPrograms, - DeviceWithRemoteControl, - DeviceWithRemoteStart, -): - """Oven class.""" - - power_off_state = BSH_POWER_STANDBY - - def get_entity_info(self) -> dict[str, list[dict[str, Any]]]: - """Get a dictionary with infos about the associated entities.""" - door_entity = self.get_door_entity() - remote_control = self.get_remote_control() - remote_start = self.get_remote_start() - op_state_sensor = self.get_opstate_sensor() - program_sensors = self.get_program_sensors() - program_switches = self.get_program_switches() - return { - "binary_sensor": [door_entity, remote_control, remote_start], - "switch": program_switches, - "sensor": program_sensors + op_state_sensor, - } - - -class Washer( - DeviceWithDoor, - DeviceWithOpState, - DeviceWithPrograms, - DeviceWithRemoteControl, - DeviceWithRemoteStart, -): - """Washer class.""" - - def get_entity_info(self) -> dict[str, list[dict[str, Any]]]: - """Get a dictionary with infos about the associated entities.""" - door_entity = self.get_door_entity() - remote_control = self.get_remote_control() - remote_start = self.get_remote_start() - op_state_sensor = self.get_opstate_sensor() - program_sensors = self.get_program_sensors() - program_switches = self.get_program_switches() - return { - "binary_sensor": [door_entity, remote_control, remote_start], - "switch": program_switches, - "sensor": program_sensors + op_state_sensor, - } - - -class WasherDryer( - DeviceWithDoor, - DeviceWithOpState, - DeviceWithPrograms, - DeviceWithRemoteControl, - DeviceWithRemoteStart, -): - """WasherDryer class.""" - - def get_entity_info(self) -> dict[str, list[dict[str, Any]]]: - """Get a dictionary with infos about the associated entities.""" - door_entity = self.get_door_entity() - remote_control = self.get_remote_control() - remote_start = self.get_remote_start() - op_state_sensor = self.get_opstate_sensor() - program_sensors = self.get_program_sensors() - program_switches = self.get_program_switches() - return { - "binary_sensor": [door_entity, remote_control, remote_start], - "switch": program_switches, - "sensor": program_sensors + op_state_sensor, - } - - -class CoffeeMaker(DeviceWithOpState, DeviceWithPrograms, DeviceWithRemoteStart): - """Coffee maker class.""" - - power_off_state = BSH_POWER_STANDBY - - def get_entity_info(self): - """Get a dictionary with infos about the associated entities.""" - remote_start = self.get_remote_start() - op_state_sensor = self.get_opstate_sensor() - program_sensors = self.get_program_sensors() - program_switches = self.get_program_switches() - return { - "binary_sensor": [remote_start], - "switch": program_switches, - "sensor": program_sensors + op_state_sensor, - } - - -class Hood( - DeviceWithLight, - DeviceWithAmbientLight, - DeviceWithOpState, - DeviceWithPrograms, - DeviceWithRemoteControl, - DeviceWithRemoteStart, -): - """Hood class.""" - - def get_entity_info(self) -> dict[str, list[dict[str, Any]]]: - """Get a dictionary with infos about the associated entities.""" - remote_control = self.get_remote_control() - remote_start = self.get_remote_start() - light_entity = self.get_light_entity() - ambientlight_entity = self.get_ambientlight_entity() - op_state_sensor = self.get_opstate_sensor() - program_sensors = self.get_program_sensors() - program_switches = self.get_program_switches() - return { - "binary_sensor": [remote_control, remote_start], - "switch": program_switches, - "sensor": program_sensors + op_state_sensor, - "light": [light_entity, ambientlight_entity], - } - - -class FridgeFreezer(DeviceWithDoor): - """Fridge/Freezer class.""" - - def get_entity_info(self) -> dict[str, list[dict[str, Any]]]: - """Get a dictionary with infos about the associated entities.""" - door_entity = self.get_door_entity() - return {"binary_sensor": [door_entity]} - - -class Refrigerator(DeviceWithDoor): - """Refrigerator class.""" - - def get_entity_info(self) -> dict[str, list[dict[str, Any]]]: - """Get a dictionary with infos about the associated entities.""" - door_entity = self.get_door_entity() - return {"binary_sensor": [door_entity]} - - -class Freezer(DeviceWithDoor): - """Freezer class.""" - - def get_entity_info(self) -> dict[str, list[dict[str, Any]]]: - """Get a dictionary with infos about the associated entities.""" - door_entity = self.get_door_entity() - return {"binary_sensor": [door_entity]} - - -class Hob(DeviceWithOpState, DeviceWithPrograms, DeviceWithRemoteControl): - """Hob class.""" - - def get_entity_info(self) -> dict[str, list[dict[str, Any]]]: - """Get a dictionary with infos about the associated entities.""" - remote_control = self.get_remote_control() - op_state_sensor = self.get_opstate_sensor() - program_sensors = self.get_program_sensors() - program_switches = self.get_program_switches() - return { - "binary_sensor": [remote_control], - "switch": program_switches, - "sensor": program_sensors + op_state_sensor, - } - - -class CookProcessor(DeviceWithOpState): - """CookProcessor class.""" - - power_off_state = BSH_POWER_STANDBY - - def get_entity_info(self) -> dict[str, list[dict[str, Any]]]: - """Get a dictionary with infos about the associated entities.""" - op_state_sensor = self.get_opstate_sensor() - return {"sensor": op_state_sensor} diff --git a/homeassistant/components/home_connect/binary_sensor.py b/homeassistant/components/home_connect/binary_sensor.py index 7c99ee5421f13d..232b581d58bfb2 100644 --- a/homeassistant/components/home_connect/binary_sensor.py +++ b/homeassistant/components/home_connect/binary_sensor.py @@ -1,21 +1,27 @@ """Provides a binary sensor for Home Connect.""" -from dataclasses import dataclass, field +from dataclasses import dataclass import logging +from homeassistant.components.automation import automations_with_entity from homeassistant.components.binary_sensor import ( BinarySensorDeviceClass, BinarySensorEntity, BinarySensorEntityDescription, ) +from homeassistant.components.script import scripts_with_entity from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_ENTITIES from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.issue_registry import ( + IssueSeverity, + async_create_issue, + async_delete_issue, +) from .api import HomeConnectDevice from .const import ( - ATTR_DEVICE, ATTR_VALUE, BSH_DOOR_STATE, BSH_DOOR_STATE_CLOSED, @@ -33,34 +39,79 @@ from .entity import HomeConnectEntity _LOGGER = logging.getLogger(__name__) +REFRIGERATION_DOOR_BOOLEAN_MAP = { + REFRIGERATION_STATUS_DOOR_CLOSED: False, + REFRIGERATION_STATUS_DOOR_OPEN: True, +} @dataclass(frozen=True, kw_only=True) class HomeConnectBinarySensorEntityDescription(BinarySensorEntityDescription): """Entity Description class for binary sensors.""" - desc: str - device_class: BinarySensorDeviceClass | None = BinarySensorDeviceClass.DOOR - boolean_map: dict[str, bool] = field( - default_factory=lambda: { - REFRIGERATION_STATUS_DOOR_CLOSED: False, - REFRIGERATION_STATUS_DOOR_OPEN: True, - } - ) + boolean_map: dict[str, bool] | None = None -BINARY_SENSORS: tuple[HomeConnectBinarySensorEntityDescription, ...] = ( +BINARY_SENSORS = ( + HomeConnectBinarySensorEntityDescription( + key=BSH_REMOTE_CONTROL_ACTIVATION_STATE, + translation_key="remote_control", + ), + HomeConnectBinarySensorEntityDescription( + key=BSH_REMOTE_START_ALLOWANCE_STATE, + translation_key="remote_start", + ), + HomeConnectBinarySensorEntityDescription( + key="BSH.Common.Status.LocalControlActive", + translation_key="local_control", + ), + HomeConnectBinarySensorEntityDescription( + key="BSH.Common.Status.BatteryChargingState", + device_class=BinarySensorDeviceClass.BATTERY_CHARGING, + boolean_map={ + "BSH.Common.EnumType.BatteryChargingState.Charging": True, + "BSH.Common.EnumType.BatteryChargingState.Discharging": False, + }, + translation_key="battery_charging_state", + ), + HomeConnectBinarySensorEntityDescription( + key="BSH.Common.Status.ChargingConnection", + device_class=BinarySensorDeviceClass.PLUG, + boolean_map={ + "BSH.Common.EnumType.ChargingConnection.Connected": True, + "BSH.Common.EnumType.ChargingConnection.Disconnected": False, + }, + translation_key="charging_connection", + ), + HomeConnectBinarySensorEntityDescription( + key="ConsumerProducts.CleaningRobot.Status.DustBoxInserted", + translation_key="dust_box_inserted", + ), + HomeConnectBinarySensorEntityDescription( + key="ConsumerProducts.CleaningRobot.Status.Lifted", + translation_key="lifted", + ), + HomeConnectBinarySensorEntityDescription( + key="ConsumerProducts.CleaningRobot.Status.Lost", + translation_key="lost", + ), HomeConnectBinarySensorEntityDescription( key=REFRIGERATION_STATUS_DOOR_CHILLER, - desc="Chiller Door", + boolean_map=REFRIGERATION_DOOR_BOOLEAN_MAP, + device_class=BinarySensorDeviceClass.DOOR, + translation_key="chiller_door", ), HomeConnectBinarySensorEntityDescription( key=REFRIGERATION_STATUS_DOOR_FREEZER, - desc="Freezer Door", + boolean_map=REFRIGERATION_DOOR_BOOLEAN_MAP, + device_class=BinarySensorDeviceClass.DOOR, + translation_key="freezer_door", ), HomeConnectBinarySensorEntityDescription( key=REFRIGERATION_STATUS_DOOR_REFRIGERATOR, - desc="Refrigerator Door", + boolean_map=REFRIGERATION_DOOR_BOOLEAN_MAP, + device_class=BinarySensorDeviceClass.DOOR, + translation_key="refrigerator_door", ), ) @@ -75,18 +126,14 @@ async def async_setup_entry( def get_entities() -> list[BinarySensorEntity]: entities: list[BinarySensorEntity] = [] hc_api = hass.data[DOMAIN][config_entry.entry_id] - for device_dict in hc_api.devices: - entity_dicts = device_dict.get(CONF_ENTITIES, {}).get("binary_sensor", []) - entities += [HomeConnectBinarySensor(**d) for d in entity_dicts] - device: HomeConnectDevice = device_dict[ATTR_DEVICE] - # Auto-discover entities + for device in hc_api.devices: entities.extend( - HomeConnectFridgeDoorBinarySensor( - device=device, entity_description=description - ) + HomeConnectBinarySensor(device, description) for description in BINARY_SENSORS if description.key in device.appliance.status ) + if BSH_DOOR_STATE in device.appliance.status: + entities.append(HomeConnectDoorBinarySensor(device)) return entities async_add_entities(await hass.async_add_executor_job(get_entities), True) @@ -95,28 +142,7 @@ def get_entities() -> list[BinarySensorEntity]: class HomeConnectBinarySensor(HomeConnectEntity, BinarySensorEntity): """Binary sensor for Home Connect.""" - def __init__( - self, - device: HomeConnectDevice, - bsh_key: str, - desc: str, - sensor_type: str, - device_class: BinarySensorDeviceClass | None = None, - ) -> None: - """Initialize the entity.""" - super().__init__(device, bsh_key, desc) - self._attr_device_class = device_class - self._type = sensor_type - self._false_value_list = None - self._true_value_list = None - if self._type == "door": - self._update_key = BSH_DOOR_STATE - self._false_value_list = [BSH_DOOR_STATE_CLOSED, BSH_DOOR_STATE_LOCKED] - self._true_value_list = [BSH_DOOR_STATE_OPEN] - elif self._type == "remote_control": - self._update_key = BSH_REMOTE_CONTROL_ACTIVATION_STATE - elif self._type == "remote_start": - self._update_key = BSH_REMOTE_START_ALLOWANCE_STATE + entity_description: HomeConnectBinarySensorEntityDescription @property def available(self) -> bool: @@ -125,59 +151,90 @@ def available(self) -> bool: async def async_update(self) -> None: """Update the binary sensor's status.""" - state = self.device.appliance.status.get(self._update_key, {}) - if not state: + if not self.device.appliance.status or not ( + status := self.device.appliance.status.get(self.bsh_key, {}).get(ATTR_VALUE) + ): self._attr_is_on = None return - - value = state.get(ATTR_VALUE) - if self._false_value_list and self._true_value_list: - if value in self._false_value_list: - self._attr_is_on = False - elif value in self._true_value_list: - self._attr_is_on = True - else: - _LOGGER.warning( - "Unexpected value for HomeConnect %s state: %s", self._type, state - ) - self._attr_is_on = None - elif isinstance(value, bool): - self._attr_is_on = value - else: - _LOGGER.warning( - "Unexpected value for HomeConnect %s state: %s", self._type, state - ) + if self.entity_description.boolean_map: + self._attr_is_on = self.entity_description.boolean_map.get(status) + elif status not in [True, False]: self._attr_is_on = None + else: + self._attr_is_on = status _LOGGER.debug("Updated, new state: %s", self._attr_is_on) -class HomeConnectFridgeDoorBinarySensor(HomeConnectEntity, BinarySensorEntity): - """Binary sensor for Home Connect Fridge Doors.""" +class HomeConnectDoorBinarySensor(HomeConnectBinarySensor): + """Binary sensor for Home Connect Generic Door.""" - entity_description: HomeConnectBinarySensorEntityDescription + _attr_has_entity_name = False def __init__( self, device: HomeConnectDevice, - entity_description: HomeConnectBinarySensorEntityDescription, ) -> None: """Initialize the entity.""" - self.entity_description = entity_description - super().__init__(device, entity_description.key, entity_description.desc) - - async def async_update(self) -> None: - """Update the binary sensor's status.""" - _LOGGER.debug( - "Updating: %s, cur state: %s", - self._attr_unique_id, - self.state, + super().__init__( + device, + HomeConnectBinarySensorEntityDescription( + key=BSH_DOOR_STATE, + device_class=BinarySensorDeviceClass.DOOR, + boolean_map={ + BSH_DOOR_STATE_CLOSED: False, + BSH_DOOR_STATE_LOCKED: False, + BSH_DOOR_STATE_OPEN: True, + }, + ), ) - self._attr_is_on = self.entity_description.boolean_map.get( - self.device.appliance.status.get(self.bsh_key, {}).get(ATTR_VALUE) + self._attr_unique_id = f"{device.appliance.haId}-Door" + self._attr_name = f"{device.appliance.name} Door" + + async def async_added_to_hass(self) -> None: + """Call when entity is added to hass.""" + await super().async_added_to_hass() + automations = automations_with_entity(self.hass, self.entity_id) + scripts = scripts_with_entity(self.hass, self.entity_id) + items = automations + scripts + if not items: + return + + entity_reg: er.EntityRegistry = er.async_get(self.hass) + entity_automations = [ + automation_entity + for automation_id in automations + if (automation_entity := entity_reg.async_get(automation_id)) + ] + entity_scripts = [ + script_entity + for script_id in scripts + if (script_entity := entity_reg.async_get(script_id)) + ] + + items_list = [ + f"- [{item.original_name}](/config/automation/edit/{item.unique_id})" + for item in entity_automations + ] + [ + f"- [{item.original_name}](/config/script/edit/{item.unique_id})" + for item in entity_scripts + ] + + async_create_issue( + self.hass, + DOMAIN, + f"deprecated_binary_common_door_sensor_{self.entity_id}", + breaks_in_ha_version="2025.5.0", + is_fixable=False, + severity=IssueSeverity.WARNING, + translation_key="deprecated_binary_common_door_sensor", + translation_placeholders={ + "entity": self.entity_id, + "items": "\n".join(items_list), + }, ) - self._attr_available = self._attr_is_on is not None - _LOGGER.debug( - "Updated: %s, new state: %s", - self._attr_unique_id, - self.state, + + async def async_will_remove_from_hass(self) -> None: + """Call when entity will be removed from hass.""" + async_delete_issue( + self.hass, DOMAIN, f"deprecated_binary_common_door_sensor_{self.entity_id}" ) diff --git a/homeassistant/components/home_connect/const.py b/homeassistant/components/home_connect/const.py index 1da9e517ad5eae..e49a56b9b97f86 100644 --- a/homeassistant/components/home_connect/const.py +++ b/homeassistant/components/home_connect/const.py @@ -36,6 +36,11 @@ COFFEE_EVENT_WATER_TANK_EMPTY = "ConsumerProducts.CoffeeMaker.Event.WaterTankEmpty" COFFEE_EVENT_DRIP_TRAY_FULL = "ConsumerProducts.CoffeeMaker.Event.DripTrayFull" +DISHWASHER_EVENT_SALT_NEARLY_EMPTY = "Dishcare.Dishwasher.Event.SaltNearlyEmpty" +DISHWASHER_EVENT_RINSE_AID_NEARLY_EMPTY = ( + "Dishcare.Dishwasher.Event.RinseAidNearlyEmpty" +) + REFRIGERATION_INTERNAL_LIGHT_POWER = "Refrigeration.Common.Setting.Light.Internal.Power" REFRIGERATION_INTERNAL_LIGHT_BRIGHTNESS = ( "Refrigeration.Common.Setting.Light.Internal.Brightness" @@ -95,17 +100,25 @@ SERVICE_SETTING = "change_setting" SERVICE_START_PROGRAM = "start_program" +ATTR_ALLOWED_VALUES = "allowedvalues" ATTR_AMBIENT = "ambient" ATTR_BSH_KEY = "bsh_key" +ATTR_CONSTRAINTS = "constraints" ATTR_DESC = "desc" ATTR_DEVICE = "device" ATTR_KEY = "key" ATTR_PROGRAM = "program" ATTR_SENSOR_TYPE = "sensor_type" ATTR_SIGN = "sign" +ATTR_STEPSIZE = "stepsize" ATTR_UNIT = "unit" ATTR_VALUE = "value" +SVE_TRANSLATION_PLACEHOLDER_APPLIANCE_NAME = "appliance_name" +SVE_TRANSLATION_PLACEHOLDER_ENTITY_ID = "entity_id" +SVE_TRANSLATION_PLACEHOLDER_SETTING_KEY = "setting_key" +SVE_TRANSLATION_PLACEHOLDER_VALUE = "value" + OLD_NEW_UNIQUE_ID_SUFFIX_MAP = { "ChildLock": BSH_CHILD_LOCK_STATE, "Operation State": BSH_OPERATION_STATE, diff --git a/homeassistant/components/home_connect/entity.py b/homeassistant/components/home_connect/entity.py index 6cad310f76ac56..0ae4a28b8d46a9 100644 --- a/homeassistant/components/home_connect/entity.py +++ b/homeassistant/components/home_connect/entity.py @@ -5,7 +5,7 @@ from homeassistant.core import callback from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.dispatcher import async_dispatcher_connect -from homeassistant.helpers.entity import Entity +from homeassistant.helpers.entity import Entity, EntityDescription from .api import HomeConnectDevice from .const import DOMAIN, SIGNAL_UPDATE_ENTITIES @@ -17,13 +17,13 @@ class HomeConnectEntity(Entity): """Generic Home Connect entity (base class).""" _attr_should_poll = False + _attr_has_entity_name = True - def __init__(self, device: HomeConnectDevice, bsh_key: str, desc: str) -> None: + def __init__(self, device: HomeConnectDevice, desc: EntityDescription) -> None: """Initialize the entity.""" self.device = device - self.bsh_key = bsh_key - self._attr_name = f"{device.appliance.name} {desc}" - self._attr_unique_id = f"{device.appliance.haId}-{bsh_key}" + self.entity_description = desc + self._attr_unique_id = f"{device.appliance.haId}-{self.bsh_key}" self._attr_device_info = DeviceInfo( identifiers={(DOMAIN, device.appliance.haId)}, manufacturer=device.appliance.brand, @@ -50,3 +50,8 @@ def async_entity_update(self) -> None: """Update the entity.""" _LOGGER.debug("Entity update triggered on %s", self) self.async_schedule_update_ha_state(True) + + @property + def bsh_key(self) -> str: + """Return the BSH key.""" + return self.entity_description.key diff --git a/homeassistant/components/home_connect/icons.json b/homeassistant/components/home_connect/icons.json index 949b30919b53fc..166b2fe2c342a4 100644 --- a/homeassistant/components/home_connect/icons.json +++ b/homeassistant/components/home_connect/icons.json @@ -23,43 +23,135 @@ } }, "entity": { + "binary_sensor": { + "remote_control": { + "default": "mdi:remote", + "state": { + "off": "mdi:remote-off" + } + }, + "remote_start": { + "default": "mdi:remote", + "state": { + "off": "mdi:remote-off" + } + }, + "dust_box_inserted": { + "default": "mdi:download" + }, + "lifted": { + "default": "mdi:arrow-up-right-bold" + }, + "lost": { + "default": "mdi:map-marker-remove-variant" + } + }, "sensor": { - "alarm_sensor_fridge": { + "operation_state": { + "default": "mdi:state-machine", + "state": { + "inactive": "mdi:stop", + "ready": "mdi:check-circle", + "delayedstart": "mdi:progress-clock", + "run": "mdi:play", + "pause": "mdi:pause", + "actionrequired": "mdi:gesture-tap", + "finished": "mdi:flag-checkered", + "error": "mdi:alert-circle", + "aborting": "mdi:close-circle" + } + }, + "door": { + "default": "mdi:door", + "state": { + "closed": "mdi:door-closed", + "locked": "mdi:door-closed-lock", + "open": "mdi:door-open" + } + }, + "program_progress": { + "default": "mdi:progress-clock" + }, + "coffee_counter": { + "default": "mdi:coffee" + }, + "powder_coffee_counter": { + "default": "mdi:coffee" + }, + "hot_water_counter": { + "default": "mdi:cup-water" + }, + "hot_water_cups_counter": { + "default": "mdi:cup" + }, + "hot_milk_counter": { + "default": "mdi:cup" + }, + "frothy_milk_counter": { + "default": "mdi:cup" + }, + "milk_counter": { + "default": "mdi:cup" + }, + "coffee_and_milk": { + "default": "mdi:coffee" + }, + "ristretto_espresso_counter": { + "default": "mdi:coffee" + }, + "camera_state": { + "default": "mdi:camera", + "state": { + "disabled": "mdi:camera-off", + "sleeping": "mdi:sleep", + "error": "mdi:alert-circle-outline" + } + }, + "last_selected_map": { + "default": "mdi:map", + "state": { + "tempmap": "mdi:map-clock-outline", + "map1": "mdi:numeric-1", + "map2": "mdi:numeric-2", + "map3": "mdi:numeric-3" + } + }, + "refrigerator_door_alarm": { "default": "mdi:fridge", "state": { "confirmed": "mdi:fridge-alert-outline", "present": "mdi:fridge-alert" } }, - "alarm_sensor_freezer": { + "freezer_door_alarm": { "default": "mdi:snowflake", "state": { "confirmed": "mdi:snowflake-check", "present": "mdi:snowflake-alert" } }, - "alarm_sensor_temp": { + "freezer_temperature_alarm": { "default": "mdi:thermometer", "state": { "confirmed": "mdi:thermometer-check", "present": "mdi:thermometer-alert" } }, - "alarm_sensor_coffee_bean_container": { + "bean_container_empty": { "default": "mdi:coffee-maker", "state": { "confirmed": "mdi:coffee-maker-check", "present": "mdi:coffee-maker-outline" } }, - "alarm_sensor_coffee_water_tank": { + "water_tank_empty": { "default": "mdi:water", "state": { "confirmed": "mdi:water-check", "present": "mdi:water-alert" } }, - "alarm_sensor_coffee_drip_tray": { + "drip_tray_full": { "default": "mdi:tray", "state": { "confirmed": "mdi:tray-full", @@ -68,11 +160,51 @@ } }, "switch": { - "refrigeration_dispenser": { + "power": { + "default": "mdi:power" + }, + "child_lock": { + "default": "mdi:lock", + "state": { + "on": "mdi:lock", + "off": "mdi:lock-off" + } + }, + "cup_warmer": { + "default": "mdi:heat-wave" + }, + "refrigerator_super_mode": { + "default": "mdi:speedometer" + }, + "freezer_super_mode": { + "default": "mdi:speedometer" + }, + "eco_mode": { + "default": "mdi:sprout" + }, + "cooking-oven-setting-sabbath_mode": { + "default": "mdi:volume-mute" + }, + "sabbath_mode": { + "default": "mdi:volume-mute" + }, + "vacation_mode": { + "default": "mdi:beach" + }, + "fresh_mode": { + "default": "mdi:leaf" + }, + "dispenser_enabled": { "default": "mdi:snowflake", "state": { "off": "mdi:snowflake-off" } + }, + "door-assistant_fridge": { + "default": "mdi:door" + }, + "door-assistant_freezer": { + "default": "mdi:door" } } } diff --git a/homeassistant/components/home_connect/light.py b/homeassistant/components/home_connect/light.py index 7f6ea1bb4be570..873e7d24f9360e 100644 --- a/homeassistant/components/home_connect/light.py +++ b/homeassistant/components/home_connect/light.py @@ -10,29 +10,34 @@ from homeassistant.components.light import ( ATTR_BRIGHTNESS, ATTR_HS_COLOR, + ATTR_RGB_COLOR, ColorMode, LightEntity, LightEntityDescription, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_DEVICE, CONF_ENTITIES from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ServiceValidationError from homeassistant.helpers.entity_platform import AddEntitiesCallback import homeassistant.util.color as color_util -from .api import HomeConnectDevice +from . import get_dict_from_home_connect_error +from .api import ConfigEntryAuth, HomeConnectDevice from .const import ( ATTR_VALUE, BSH_AMBIENT_LIGHT_BRIGHTNESS, BSH_AMBIENT_LIGHT_COLOR, BSH_AMBIENT_LIGHT_COLOR_CUSTOM_COLOR, BSH_AMBIENT_LIGHT_CUSTOM_COLOR, + BSH_AMBIENT_LIGHT_ENABLED, + COOKING_LIGHTING, COOKING_LIGHTING_BRIGHTNESS, DOMAIN, REFRIGERATION_EXTERNAL_LIGHT_BRIGHTNESS, REFRIGERATION_EXTERNAL_LIGHT_POWER, REFRIGERATION_INTERNAL_LIGHT_BRIGHTNESS, REFRIGERATION_INTERNAL_LIGHT_POWER, + SVE_TRANSLATION_PLACEHOLDER_ENTITY_ID, ) from .entity import HomeConnectEntity @@ -43,20 +48,40 @@ class HomeConnectLightEntityDescription(LightEntityDescription): """Light entity description.""" - desc: str - brightness_key: str | None + brightness_key: str | None = None + color_key: str | None = None + enable_custom_color_value_key: str | None = None + custom_color_key: str | None = None + brightness_scale: tuple[float, float] = (0.0, 100.0) LIGHTS: tuple[HomeConnectLightEntityDescription, ...] = ( HomeConnectLightEntityDescription( key=REFRIGERATION_INTERNAL_LIGHT_POWER, - desc="Internal Light", brightness_key=REFRIGERATION_INTERNAL_LIGHT_BRIGHTNESS, + brightness_scale=(1.0, 100.0), + translation_key="internal_light", ), HomeConnectLightEntityDescription( key=REFRIGERATION_EXTERNAL_LIGHT_POWER, - desc="External Light", brightness_key=REFRIGERATION_EXTERNAL_LIGHT_BRIGHTNESS, + brightness_scale=(1.0, 100.0), + translation_key="external_light", + ), + HomeConnectLightEntityDescription( + key=COOKING_LIGHTING, + brightness_key=COOKING_LIGHTING_BRIGHTNESS, + brightness_scale=(10.0, 100.0), + translation_key="cooking_lighting", + ), + HomeConnectLightEntityDescription( + key=BSH_AMBIENT_LIGHT_ENABLED, + brightness_key=BSH_AMBIENT_LIGHT_BRIGHTNESS, + color_key=BSH_AMBIENT_LIGHT_COLOR, + enable_custom_color_value_key=BSH_AMBIENT_LIGHT_COLOR_CUSTOM_COLOR, + custom_color_key=BSH_AMBIENT_LIGHT_CUSTOM_COLOR, + brightness_scale=(10.0, 100.0), + translation_key="ambient_light", ), ) @@ -70,24 +95,13 @@ async def async_setup_entry( def get_entities() -> list[LightEntity]: """Get a list of entities.""" - entities: list[LightEntity] = [] - hc_api = hass.data[DOMAIN][config_entry.entry_id] - for device_dict in hc_api.devices: - entity_dicts = device_dict.get(CONF_ENTITIES, {}).get("light", []) - entity_list = [HomeConnectLight(**d) for d in entity_dicts] - device: HomeConnectDevice = device_dict[CONF_DEVICE] - # Auto-discover entities - entities.extend( - HomeConnectCoolingLight( - device=device, - ambient=False, - entity_description=description, - ) - for description in LIGHTS - if description.key in device.appliance.status - ) - entities.extend(entity_list) - return entities + hc_api: ConfigEntryAuth = hass.data[DOMAIN][config_entry.entry_id] + return [ + HomeConnectLight(device, description) + for description in LIGHTS + for device in hc_api.devices + if description.key in device.appliance.status + ] async_add_entities(await hass.async_add_executor_job(get_entities), True) @@ -95,78 +109,128 @@ def get_entities() -> list[LightEntity]: class HomeConnectLight(HomeConnectEntity, LightEntity): """Light for Home Connect.""" + entity_description: LightEntityDescription + def __init__( - self, device: HomeConnectDevice, bsh_key: str, desc: str, ambient: bool + self, device: HomeConnectDevice, desc: HomeConnectLightEntityDescription ) -> None: """Initialize the entity.""" - super().__init__(device, bsh_key, desc) - self._ambient = ambient - self._percentage_scale = (10, 100) - self._brightness_key: str | None - self._custom_color_key: str | None - self._color_key: str | None - if ambient: - self._brightness_key = BSH_AMBIENT_LIGHT_BRIGHTNESS - self._custom_color_key = BSH_AMBIENT_LIGHT_CUSTOM_COLOR - self._color_key = BSH_AMBIENT_LIGHT_COLOR - self._attr_color_mode = ColorMode.HS - self._attr_supported_color_modes = {ColorMode.HS} - else: - self._brightness_key = COOKING_LIGHTING_BRIGHTNESS - self._custom_color_key = None - self._color_key = None - self._attr_color_mode = ColorMode.BRIGHTNESS - self._attr_supported_color_modes = {ColorMode.BRIGHTNESS} + super().__init__(device, desc) + + def get_setting_key_if_setting_exists(setting_key: str | None) -> str | None: + if setting_key and setting_key in device.appliance.status: + return setting_key + return None + + self._brightness_key = get_setting_key_if_setting_exists(desc.brightness_key) + self._custom_color_key = get_setting_key_if_setting_exists( + desc.custom_color_key + ) + self._color_key = get_setting_key_if_setting_exists(desc.color_key) + self._enable_custom_color_value_key = desc.enable_custom_color_value_key + self._custom_color_key = get_setting_key_if_setting_exists( + desc.custom_color_key + ) + self._brightness_scale = desc.brightness_scale + + match (self._brightness_key, self._custom_color_key): + case (None, None): + self._attr_color_mode = ColorMode.ONOFF + self._attr_supported_color_modes = {ColorMode.ONOFF} + case (_, None): + self._attr_color_mode = ColorMode.BRIGHTNESS + self._attr_supported_color_modes = {ColorMode.BRIGHTNESS} + case (_, _): + self._attr_color_mode = ColorMode.HS + self._attr_supported_color_modes = {ColorMode.HS, ColorMode.RGB} async def async_turn_on(self, **kwargs: Any) -> None: """Switch the light on, change brightness, change color.""" - if self._ambient: - _LOGGER.debug("Switching ambient light on for: %s", self.name) - try: - await self.hass.async_add_executor_job( - self.device.appliance.set_setting, self.bsh_key, True - ) - except HomeConnectError as err: - _LOGGER.error("Error while trying to turn on ambient light: %s", err) - return - if ATTR_BRIGHTNESS in kwargs or ATTR_HS_COLOR in kwargs: + _LOGGER.debug("Switching light on for: %s", self.name) + try: + await self.hass.async_add_executor_job( + self.device.appliance.set_setting, self.bsh_key, True + ) + except HomeConnectError as err: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="turn_on_light", + translation_placeholders={ + **get_dict_from_home_connect_error(err), + SVE_TRANSLATION_PLACEHOLDER_ENTITY_ID: self.entity_id, + }, + ) from err + if self._custom_color_key: + if ( + ATTR_RGB_COLOR in kwargs or ATTR_HS_COLOR in kwargs + ) and self._enable_custom_color_value_key: try: await self.hass.async_add_executor_job( self.device.appliance.set_setting, self._color_key, - BSH_AMBIENT_LIGHT_COLOR_CUSTOM_COLOR, + self._enable_custom_color_value_key, ) except HomeConnectError as err: - _LOGGER.error("Error while trying selecting customcolor: %s", err) - if self._attr_brightness is not None: - brightness_arg = self._attr_brightness - if ATTR_BRIGHTNESS in kwargs: - brightness_arg = kwargs[ATTR_BRIGHTNESS] - - brightness = ceil( - color_util.brightness_to_value( - self._percentage_scale, brightness_arg - ) + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="select_light_custom_color", + translation_placeholders={ + **get_dict_from_home_connect_error(err), + SVE_TRANSLATION_PLACEHOLDER_ENTITY_ID: self.entity_id, + }, + ) from err + + if ATTR_RGB_COLOR in kwargs: + hex_val = color_util.color_rgb_to_hex(*kwargs[ATTR_RGB_COLOR]) + try: + await self.hass.async_add_executor_job( + self.device.appliance.set_setting, + self._custom_color_key, + f"#{hex_val}", ) - hs_color = kwargs.get(ATTR_HS_COLOR, self._attr_hs_color) + except HomeConnectError as err: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="set_light_color", + translation_placeholders={ + **get_dict_from_home_connect_error(err), + SVE_TRANSLATION_PLACEHOLDER_ENTITY_ID: self.entity_id, + }, + ) from err + elif (ATTR_BRIGHTNESS in kwargs or ATTR_HS_COLOR in kwargs) and ( + self._attr_brightness is not None or ATTR_BRIGHTNESS in kwargs + ): + brightness = 10 + ceil( + color_util.brightness_to_value( + self._brightness_scale, + kwargs.get(ATTR_BRIGHTNESS, self._attr_brightness), + ) + ) - if hs_color is not None: - rgb = color_util.color_hsv_to_RGB( - hs_color[0], hs_color[1], brightness + hs_color = kwargs.get(ATTR_HS_COLOR, self._attr_hs_color) + + if hs_color is not None: + rgb = color_util.color_hsv_to_RGB( + hs_color[0], hs_color[1], brightness + ) + hex_val = color_util.color_rgb_to_hex(*rgb) + try: + await self.hass.async_add_executor_job( + self.device.appliance.set_setting, + self._custom_color_key, + f"#{hex_val}", ) - hex_val = color_util.color_rgb_to_hex(rgb[0], rgb[1], rgb[2]) - try: - await self.hass.async_add_executor_job( - self.device.appliance.set_setting, - self._custom_color_key, - f"#{hex_val}", - ) - except HomeConnectError as err: - _LOGGER.error( - "Error while trying setting the color: %s", err - ) - - elif ATTR_BRIGHTNESS in kwargs: + except HomeConnectError as err: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="set_light_color", + translation_placeholders={ + **get_dict_from_home_connect_error(err), + SVE_TRANSLATION_PLACEHOLDER_ENTITY_ID: self.entity_id, + }, + ) from err + + elif self._brightness_key and ATTR_BRIGHTNESS in kwargs: _LOGGER.debug( "Changing brightness for: %s, to: %s", self.name, @@ -174,7 +238,7 @@ async def async_turn_on(self, **kwargs: Any) -> None: ) brightness = ceil( color_util.brightness_to_value( - self._percentage_scale, kwargs[ATTR_BRIGHTNESS] + self._brightness_scale, kwargs[ATTR_BRIGHTNESS] ) ) try: @@ -182,15 +246,14 @@ async def async_turn_on(self, **kwargs: Any) -> None: self.device.appliance.set_setting, self._brightness_key, brightness ) except HomeConnectError as err: - _LOGGER.error("Error while trying set the brightness: %s", err) - else: - _LOGGER.debug("Switching light on for: %s", self.name) - try: - await self.hass.async_add_executor_job( - self.device.appliance.set_setting, self.bsh_key, True - ) - except HomeConnectError as err: - _LOGGER.error("Error while trying to turn on light: %s", err) + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="set_light_brightness", + translation_placeholders={ + **get_dict_from_home_connect_error(err), + SVE_TRANSLATION_PLACEHOLDER_ENTITY_ID: self.entity_id, + }, + ) from err self.async_entity_update() @@ -202,7 +265,14 @@ async def async_turn_off(self, **kwargs: Any) -> None: self.device.appliance.set_setting, self.bsh_key, False ) except HomeConnectError as err: - _LOGGER.error("Error while trying to turn off light: %s", err) + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="turn_off_light", + translation_placeholders={ + **get_dict_from_home_connect_error(err), + SVE_TRANSLATION_PLACEHOLDER_ENTITY_ID: self.entity_id, + }, + ) from err self.async_entity_update() async def async_update(self) -> None: @@ -218,46 +288,33 @@ async def async_update(self) -> None: _LOGGER.debug("Updated, new light state: %s", self._attr_is_on) - if self._ambient: + if self._custom_color_key: color = self.device.appliance.status.get(self._custom_color_key, {}) if not color: + self._attr_rgb_color = None self._attr_hs_color = None self._attr_brightness = None else: - colorvalue = color.get(ATTR_VALUE)[1:] - rgb = color_util.rgb_hex_to_rgb_list(colorvalue) - hsv = color_util.color_RGB_to_hsv(rgb[0], rgb[1], rgb[2]) + color_value = color.get(ATTR_VALUE)[1:] + rgb = color_util.rgb_hex_to_rgb_list(color_value) + self._attr_rgb_color = (rgb[0], rgb[1], rgb[2]) + hsv = color_util.color_RGB_to_hsv(*rgb) self._attr_hs_color = (hsv[0], hsv[1]) self._attr_brightness = color_util.value_to_brightness( - self._percentage_scale, hsv[2] + self._brightness_scale, hsv[2] ) - _LOGGER.debug("Updated, new brightness: %s", self._attr_brightness) - - else: + _LOGGER.debug( + "Updated, new color (%s) and new brightness (%s) ", + color_value, + self._attr_brightness, + ) + elif self._brightness_key: brightness = self.device.appliance.status.get(self._brightness_key, {}) if brightness is None: self._attr_brightness = None else: self._attr_brightness = color_util.value_to_brightness( - self._percentage_scale, brightness[ATTR_VALUE] + self._brightness_scale, brightness[ATTR_VALUE] ) _LOGGER.debug("Updated, new brightness: %s", self._attr_brightness) - - -class HomeConnectCoolingLight(HomeConnectLight): - """Light entity for Cooling Appliances.""" - - def __init__( - self, - device: HomeConnectDevice, - ambient: bool, - entity_description: HomeConnectLightEntityDescription, - ) -> None: - """Initialize Cooling Light Entity.""" - super().__init__( - device, entity_description.key, entity_description.desc, ambient - ) - self.entity_description = entity_description - self._brightness_key = entity_description.brightness_key - self._percentage_scale = (1, 100) diff --git a/homeassistant/components/home_connect/manifest.json b/homeassistant/components/home_connect/manifest.json index 389386e42afb3b..e041e13d36bf59 100644 --- a/homeassistant/components/home_connect/manifest.json +++ b/homeassistant/components/home_connect/manifest.json @@ -1,7 +1,7 @@ { "domain": "home_connect", "name": "Home Connect", - "codeowners": ["@DavidMStraub"], + "codeowners": ["@DavidMStraub", "@Diegorro98"], "config_flow": true, "dependencies": ["application_credentials"], "documentation": "https://www.home-assistant.io/integrations/home_connect", diff --git a/homeassistant/components/home_connect/number.py b/homeassistant/components/home_connect/number.py new file mode 100644 index 00000000000000..ad853df77d0847 --- /dev/null +++ b/homeassistant/components/home_connect/number.py @@ -0,0 +1,167 @@ +"""Provides number enties for Home Connect.""" + +import logging + +from homeconnect.api import HomeConnectError + +from homeassistant.components.number import ( + ATTR_MAX, + ATTR_MIN, + NumberDeviceClass, + NumberEntity, + NumberEntityDescription, +) +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ServiceValidationError +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import get_dict_from_home_connect_error +from .api import ConfigEntryAuth +from .const import ( + ATTR_CONSTRAINTS, + ATTR_STEPSIZE, + ATTR_UNIT, + ATTR_VALUE, + DOMAIN, + SVE_TRANSLATION_PLACEHOLDER_ENTITY_ID, + SVE_TRANSLATION_PLACEHOLDER_SETTING_KEY, + SVE_TRANSLATION_PLACEHOLDER_VALUE, +) +from .entity import HomeConnectEntity + +_LOGGER = logging.getLogger(__name__) + + +NUMBERS = ( + NumberEntityDescription( + key="Refrigeration.FridgeFreezer.Setting.SetpointTemperatureRefrigerator", + device_class=NumberDeviceClass.TEMPERATURE, + translation_key="refrigerator_setpoint_temperature", + ), + NumberEntityDescription( + key="Refrigeration.FridgeFreezer.Setting.SetpointTemperatureFreezer", + device_class=NumberDeviceClass.TEMPERATURE, + translation_key="freezer_setpoint_temperature", + ), + NumberEntityDescription( + key="Refrigeration.Common.Setting.BottleCooler.SetpointTemperature", + device_class=NumberDeviceClass.TEMPERATURE, + translation_key="bottle_cooler_setpoint_temperature", + ), + NumberEntityDescription( + key="Refrigeration.Common.Setting.ChillerLeft.SetpointTemperature", + device_class=NumberDeviceClass.TEMPERATURE, + translation_key="chiller_left_setpoint_temperature", + ), + NumberEntityDescription( + key="Refrigeration.Common.Setting.ChillerCommon.SetpointTemperature", + device_class=NumberDeviceClass.TEMPERATURE, + translation_key="chiller_setpoint_temperature", + ), + NumberEntityDescription( + key="Refrigeration.Common.Setting.ChillerRight.SetpointTemperature", + device_class=NumberDeviceClass.TEMPERATURE, + translation_key="chiller_right_setpoint_temperature", + ), + NumberEntityDescription( + key="Refrigeration.Common.Setting.WineCompartment.SetpointTemperature", + device_class=NumberDeviceClass.TEMPERATURE, + translation_key="wine_compartment_setpoint_temperature", + ), + NumberEntityDescription( + key="Refrigeration.Common.Setting.WineCompartment2.SetpointTemperature", + device_class=NumberDeviceClass.TEMPERATURE, + translation_key="wine_compartment_2_setpoint_temperature", + ), + NumberEntityDescription( + key="Refrigeration.Common.Setting.WineCompartment3.SetpointTemperature", + device_class=NumberDeviceClass.TEMPERATURE, + translation_key="wine_compartment_3_setpoint_temperature", + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the Home Connect number.""" + + def get_entities() -> list[HomeConnectNumberEntity]: + """Get a list of entities.""" + hc_api: ConfigEntryAuth = hass.data[DOMAIN][config_entry.entry_id] + return [ + HomeConnectNumberEntity(device, description) + for description in NUMBERS + for device in hc_api.devices + if description.key in device.appliance.status + ] + + async_add_entities(await hass.async_add_executor_job(get_entities), True) + + +class HomeConnectNumberEntity(HomeConnectEntity, NumberEntity): + """Number setting class for Home Connect.""" + + async def async_set_native_value(self, value: float) -> None: + """Set the native value of the entity.""" + _LOGGER.debug( + "Tried to set value %s to %s for %s", + value, + self.bsh_key, + self.entity_id, + ) + try: + await self.hass.async_add_executor_job( + self.device.appliance.set_setting, + self.bsh_key, + value, + ) + except HomeConnectError as err: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="set_setting", + translation_placeholders={ + **get_dict_from_home_connect_error(err), + SVE_TRANSLATION_PLACEHOLDER_ENTITY_ID: self.entity_id, + SVE_TRANSLATION_PLACEHOLDER_SETTING_KEY: self.bsh_key, + SVE_TRANSLATION_PLACEHOLDER_VALUE: str(value), + }, + ) from err + + async def async_fetch_constraints(self) -> None: + """Fetch the max and min values and step for the number entity.""" + try: + data = await self.hass.async_add_executor_job( + self.device.appliance.get, f"/settings/{self.bsh_key}" + ) + except HomeConnectError as err: + _LOGGER.error("An error occurred: %s", err) + return + if not data or not (constraints := data.get(ATTR_CONSTRAINTS)): + return + self._attr_native_max_value = constraints.get(ATTR_MAX) + self._attr_native_min_value = constraints.get(ATTR_MIN) + self._attr_native_step = constraints.get(ATTR_STEPSIZE) + self._attr_native_unit_of_measurement = data.get(ATTR_UNIT) + + async def async_update(self) -> None: + """Update the number setting status.""" + if not (data := self.device.appliance.status.get(self.bsh_key)): + _LOGGER.error("No value for %s", self.bsh_key) + self._attr_native_value = None + return + self._attr_native_value = data.get(ATTR_VALUE, None) + _LOGGER.debug("Updated, new value: %s", self._attr_native_value) + + if ( + not hasattr(self, "_attr_native_min_value") + or self._attr_native_min_value is None + or not hasattr(self, "_attr_native_max_value") + or self._attr_native_max_value is None + or not hasattr(self, "_attr_native_step") + or self._attr_native_step is None + ): + await self.async_fetch_constraints() diff --git a/homeassistant/components/home_connect/sensor.py b/homeassistant/components/home_connect/sensor.py index 599156a6b3a28e..70096313d86b28 100644 --- a/homeassistant/components/home_connect/sensor.py +++ b/homeassistant/components/home_connect/sensor.py @@ -1,26 +1,30 @@ """Provides a sensor for Home Connect.""" -from dataclasses import dataclass, field +import contextlib +from dataclasses import dataclass from datetime import datetime, timedelta import logging from typing import cast +from homeconnect.api import HomeConnectError + from homeassistant.components.sensor import ( SensorDeviceClass, SensorEntity, SensorEntityDescription, + SensorStateClass, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_ENTITIES +from homeassistant.const import PERCENTAGE, UnitOfTime, UnitOfVolume from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.util import slugify import homeassistant.util.dt as dt_util -from .api import ConfigEntryAuth, HomeConnectDevice +from .api import ConfigEntryAuth from .const import ( - ATTR_DEVICE, ATTR_VALUE, - BSH_EVENT_PRESENT_STATE_OFF, + BSH_DOOR_STATE, BSH_OPERATION_STATE, BSH_OPERATION_STATE_FINISHED, BSH_OPERATION_STATE_PAUSE, @@ -28,6 +32,8 @@ COFFEE_EVENT_BEAN_CONTAINER_EMPTY, COFFEE_EVENT_DRIP_TRAY_FULL, COFFEE_EVENT_WATER_TANK_EMPTY, + DISHWASHER_EVENT_RINSE_AID_NEARLY_EMPTY, + DISHWASHER_EVENT_SALT_NEARLY_EMPTY, DOMAIN, REFRIGERATION_EVENT_DOOR_ALARM_FREEZER, REFRIGERATION_EVENT_DOOR_ALARM_REFRIGERATOR, @@ -38,49 +44,210 @@ _LOGGER = logging.getLogger(__name__) +EVENT_OPTIONS = ["confirmed", "off", "present"] + + @dataclass(frozen=True, kw_only=True) class HomeConnectSensorEntityDescription(SensorEntityDescription): """Entity Description class for sensors.""" - device_class: SensorDeviceClass | None = SensorDeviceClass.ENUM - options: list[str] | None = field( - default_factory=lambda: ["confirmed", "off", "present"] - ) - desc: str - appliance_types: tuple[str, ...] + default_value: str | None = None + appliance_types: tuple[str, ...] | None = None + sign: int = 1 + + +BSH_PROGRAM_SENSORS = ( + HomeConnectSensorEntityDescription( + key="BSH.Common.Option.RemainingProgramTime", + device_class=SensorDeviceClass.TIMESTAMP, + sign=1, + translation_key="program_finish_time", + ), + HomeConnectSensorEntityDescription( + key="BSH.Common.Option.Duration", + device_class=SensorDeviceClass.DURATION, + native_unit_of_measurement=UnitOfTime.SECONDS, + sign=1, + ), + HomeConnectSensorEntityDescription( + key="BSH.Common.Option.ProgramProgress", + native_unit_of_measurement=PERCENTAGE, + sign=1, + translation_key="program_progress", + ), +) +SENSORS = ( + HomeConnectSensorEntityDescription( + key=BSH_OPERATION_STATE, + device_class=SensorDeviceClass.ENUM, + options=[ + "inactive", + "ready", + "delayedstart", + "run", + "pause", + "actionrequired", + "finished", + "error", + "aborting", + ], + translation_key="operation_state", + ), + HomeConnectSensorEntityDescription( + key=BSH_DOOR_STATE, + device_class=SensorDeviceClass.ENUM, + options=[ + "closed", + "locked", + "open", + ], + translation_key="door", + ), + HomeConnectSensorEntityDescription( + key="ConsumerProducts.CoffeeMaker.Status.BeverageCounterCoffee", + state_class=SensorStateClass.TOTAL_INCREASING, + translation_key="coffee_counter", + ), + HomeConnectSensorEntityDescription( + key="ConsumerProducts.CoffeeMaker.Status.BeverageCounterPowderCoffee", + state_class=SensorStateClass.TOTAL_INCREASING, + translation_key="powder_coffee_counter", + ), + HomeConnectSensorEntityDescription( + key="ConsumerProducts.CoffeeMaker.Status.BeverageCounterHotWater", + native_unit_of_measurement=UnitOfVolume.MILLILITERS, + device_class=SensorDeviceClass.VOLUME, + state_class=SensorStateClass.TOTAL_INCREASING, + translation_key="hot_water_counter", + ), + HomeConnectSensorEntityDescription( + key="ConsumerProducts.CoffeeMaker.Status.BeverageCounterHotWaterCups", + state_class=SensorStateClass.TOTAL_INCREASING, + translation_key="hot_water_cups_counter", + ), + HomeConnectSensorEntityDescription( + key="ConsumerProducts.CoffeeMaker.Status.BeverageCounterHotMilk", + state_class=SensorStateClass.TOTAL_INCREASING, + translation_key="hot_milk_counter", + ), + HomeConnectSensorEntityDescription( + key="ConsumerProducts.CoffeeMaker.Status.BeverageCounterFrothyMilk", + state_class=SensorStateClass.TOTAL_INCREASING, + translation_key="frothy_milk_counter", + ), + HomeConnectSensorEntityDescription( + key="ConsumerProducts.CoffeeMaker.Status.BeverageCounterMilk", + state_class=SensorStateClass.TOTAL_INCREASING, + translation_key="milk_counter", + ), + HomeConnectSensorEntityDescription( + key="ConsumerProducts.CoffeeMaker.Status.BeverageCounterCoffeeAndMilk", + state_class=SensorStateClass.TOTAL_INCREASING, + translation_key="coffee_and_milk_counter", + ), + HomeConnectSensorEntityDescription( + key="ConsumerProducts.CoffeeMaker.Status.BeverageCounterRistrettoEspresso", + state_class=SensorStateClass.TOTAL_INCREASING, + translation_key="ristretto_espresso_counter", + ), + HomeConnectSensorEntityDescription( + key="BSH.Common.Status.BatteryLevel", + device_class=SensorDeviceClass.BATTERY, + translation_key="battery_level", + ), + HomeConnectSensorEntityDescription( + key="BSH.Common.Status.Video.CameraState", + device_class=SensorDeviceClass.ENUM, + options=[ + "disabled", + "sleeping", + "ready", + "streaminglocal", + "streamingcloud", + "streaminglocalancloud", + "error", + ], + translation_key="camera_state", + ), + HomeConnectSensorEntityDescription( + key="ConsumerProducts.CleaningRobot.Status.LastSelectedMap", + device_class=SensorDeviceClass.ENUM, + options=[ + "tempmap", + "map1", + "map2", + "map3", + ], + translation_key="last_selected_map", + ), +) -SENSORS: tuple[HomeConnectSensorEntityDescription, ...] = ( +EVENT_SENSORS = ( HomeConnectSensorEntityDescription( key=REFRIGERATION_EVENT_DOOR_ALARM_FREEZER, - desc="Door Alarm Freezer", + device_class=SensorDeviceClass.ENUM, + options=EVENT_OPTIONS, + default_value="off", + translation_key="freezer_door_alarm", appliance_types=("FridgeFreezer", "Freezer"), ), HomeConnectSensorEntityDescription( key=REFRIGERATION_EVENT_DOOR_ALARM_REFRIGERATOR, - desc="Door Alarm Refrigerator", + device_class=SensorDeviceClass.ENUM, + options=EVENT_OPTIONS, + default_value="off", + translation_key="refrigerator_door_alarm", appliance_types=("FridgeFreezer", "Refrigerator"), ), HomeConnectSensorEntityDescription( key=REFRIGERATION_EVENT_TEMP_ALARM_FREEZER, - desc="Temperature Alarm Freezer", + device_class=SensorDeviceClass.ENUM, + options=EVENT_OPTIONS, + default_value="off", + translation_key="freezer_temperature_alarm", appliance_types=("FridgeFreezer", "Freezer"), ), HomeConnectSensorEntityDescription( key=COFFEE_EVENT_BEAN_CONTAINER_EMPTY, - desc="Bean Container Empty", + device_class=SensorDeviceClass.ENUM, + options=EVENT_OPTIONS, + default_value="off", + translation_key="bean_container_empty", appliance_types=("CoffeeMaker",), ), HomeConnectSensorEntityDescription( key=COFFEE_EVENT_WATER_TANK_EMPTY, - desc="Water Tank Empty", + device_class=SensorDeviceClass.ENUM, + options=EVENT_OPTIONS, + default_value="off", + translation_key="water_tank_empty", appliance_types=("CoffeeMaker",), ), HomeConnectSensorEntityDescription( key=COFFEE_EVENT_DRIP_TRAY_FULL, - desc="Drip Tray Full", + device_class=SensorDeviceClass.ENUM, + options=EVENT_OPTIONS, + default_value="off", + translation_key="drip_tray_full", appliance_types=("CoffeeMaker",), ), + HomeConnectSensorEntityDescription( + key=DISHWASHER_EVENT_SALT_NEARLY_EMPTY, + device_class=SensorDeviceClass.ENUM, + options=EVENT_OPTIONS, + default_value="off", + translation_key="salt_nearly_empty", + appliance_types=("Dishwasher",), + ), + HomeConnectSensorEntityDescription( + key=DISHWASHER_EVENT_RINSE_AID_NEARLY_EMPTY, + device_class=SensorDeviceClass.ENUM, + options=EVENT_OPTIONS, + default_value="off", + translation_key="rinse_aid_nearly_empty", + appliance_types=("Dishwasher",), + ), ) @@ -95,18 +262,25 @@ def get_entities() -> list[SensorEntity]: """Get a list of entities.""" entities: list[SensorEntity] = [] hc_api: ConfigEntryAuth = hass.data[DOMAIN][config_entry.entry_id] - for device_dict in hc_api.devices: - entity_dicts = device_dict.get(CONF_ENTITIES, {}).get("sensor", []) - entities += [HomeConnectSensor(**d) for d in entity_dicts] - device: HomeConnectDevice = device_dict[ATTR_DEVICE] - # Auto-discover entities + for device in hc_api.devices: entities.extend( - HomeConnectAlarmSensor( + HomeConnectSensor( device, - entity_description=description, + description, ) + for description in EVENT_SENSORS + if description.appliance_types + and device.appliance.type in description.appliance_types + ) + with contextlib.suppress(HomeConnectError): + if device.appliance.get_programs_available(): + entities.extend( + HomeConnectSensor(device, desc) for desc in BSH_PROGRAM_SENSORS + ) + entities.extend( + HomeConnectSensor(device, description) for description in SENSORS - if device.appliance.type in description.appliance_types + if description.key in device.appliance.status ) return entities @@ -116,25 +290,7 @@ def get_entities() -> list[SensorEntity]: class HomeConnectSensor(HomeConnectEntity, SensorEntity): """Sensor class for Home Connect.""" - _key: str - _sign: int - - def __init__( - self, - device: HomeConnectDevice, - bsh_key: str, - desc: str, - unit: str, - icon: str, - device_class: SensorDeviceClass, - sign: int = 1, - ) -> None: - """Initialize the entity.""" - super().__init__(device, bsh_key, desc) - self._sign = sign - self._attr_native_unit_of_measurement = unit - self._attr_icon = icon - self._attr_device_class = device_class + entity_description: HomeConnectSensorEntityDescription @property def available(self) -> bool: @@ -143,78 +299,52 @@ def available(self) -> bool: async def async_update(self) -> None: """Update the sensor's status.""" - status = self.device.appliance.status - if self.bsh_key not in status: - self._attr_native_value = None - elif self.device_class == SensorDeviceClass.TIMESTAMP: - if ATTR_VALUE not in status[self.bsh_key]: - self._attr_native_value = None - elif ( - self._attr_native_value is not None - and self._sign == 1 - and isinstance(self._attr_native_value, datetime) - and self._attr_native_value < dt_util.utcnow() - ): - # if the date is supposed to be in the future but we're - # already past it, set state to None. - self._attr_native_value = None - elif ( - BSH_OPERATION_STATE in status - and ATTR_VALUE in status[BSH_OPERATION_STATE] - and status[BSH_OPERATION_STATE][ATTR_VALUE] - in [ - BSH_OPERATION_STATE_RUN, - BSH_OPERATION_STATE_PAUSE, - BSH_OPERATION_STATE_FINISHED, - ] - ): - seconds = self._sign * float(status[self.bsh_key][ATTR_VALUE]) - self._attr_native_value = dt_util.utcnow() + timedelta(seconds=seconds) - else: - self._attr_native_value = None - else: - self._attr_native_value = status[self.bsh_key].get(ATTR_VALUE) - if self.bsh_key == BSH_OPERATION_STATE: + appliance_status = self.device.appliance.status + if ( + self.bsh_key not in appliance_status + or ATTR_VALUE not in appliance_status[self.bsh_key] + ): + self._attr_native_value = self.entity_description.default_value + _LOGGER.debug("Updated, new state: %s", self._attr_native_value) + return + status = appliance_status[self.bsh_key] + match self.device_class: + case SensorDeviceClass.TIMESTAMP: + if ATTR_VALUE not in status: + self._attr_native_value = None + elif ( + self._attr_native_value is not None + and self.entity_description.sign == 1 + and isinstance(self._attr_native_value, datetime) + and self._attr_native_value < dt_util.utcnow() + ): + # if the date is supposed to be in the future but we're + # already past it, set state to None. + self._attr_native_value = None + elif ( + BSH_OPERATION_STATE + in (appliance_status := self.device.appliance.status) + and ATTR_VALUE in appliance_status[BSH_OPERATION_STATE] + and appliance_status[BSH_OPERATION_STATE][ATTR_VALUE] + in [ + BSH_OPERATION_STATE_RUN, + BSH_OPERATION_STATE_PAUSE, + BSH_OPERATION_STATE_FINISHED, + ] + ): + seconds = self.entity_description.sign * float(status[ATTR_VALUE]) + self._attr_native_value = dt_util.utcnow() + timedelta( + seconds=seconds + ) + else: + self._attr_native_value = None + case SensorDeviceClass.ENUM: # Value comes back as an enum, we only really care about the # last part, so split it off # https://developer.home-connect.com/docs/status/operation_state - self._attr_native_value = cast(str, self._attr_native_value).split(".")[ - -1 - ] + self._attr_native_value = slugify( + cast(str, status.get(ATTR_VALUE)).split(".")[-1] + ) + case _: + self._attr_native_value = status.get(ATTR_VALUE) _LOGGER.debug("Updated, new state: %s", self._attr_native_value) - - -class HomeConnectAlarmSensor(HomeConnectEntity, SensorEntity): - """Sensor entity setup using SensorEntityDescription.""" - - entity_description: HomeConnectSensorEntityDescription - - def __init__( - self, - device: HomeConnectDevice, - entity_description: HomeConnectSensorEntityDescription, - ) -> None: - """Initialize the entity.""" - self.entity_description = entity_description - super().__init__( - device, self.entity_description.key, self.entity_description.desc - ) - - @property - def available(self) -> bool: - """Return true if the sensor is available.""" - return self._attr_native_value is not None - - async def async_update(self) -> None: - """Update the sensor's status.""" - self._attr_native_value = ( - self.device.appliance.status.get(self.bsh_key, {}) - .get(ATTR_VALUE, BSH_EVENT_PRESENT_STATE_OFF) - .rsplit(".", maxsplit=1)[-1] - .lower() - ) - _LOGGER.debug( - "Updated: %s, new state: %s", - self._attr_unique_id, - self._attr_native_value, - ) diff --git a/homeassistant/components/home_connect/strings.json b/homeassistant/components/home_connect/strings.json index 1fcd95e9cb2717..eb57d822b155a0 100644 --- a/homeassistant/components/home_connect/strings.json +++ b/homeassistant/components/home_connect/strings.json @@ -21,6 +21,56 @@ "default": "[%key:common::config_flow::create_entry::authenticated%]" } }, + "exceptions": { + "turn_on_light": { + "message": "Error while trying to turn on {entity_id}: {description}" + }, + "turn_off_light": { + "message": "Error while trying to turn off {entity_id}: {description}" + }, + "set_light_brightness": { + "message": "Error while trying to set brightness of {entity_id}: {description}" + }, + "select_light_custom_color": { + "message": "Error while trying to select custom color of {entity_id}: {description}" + }, + "set_light_color": { + "message": "Error while trying to set color of {entity_id}: {description}" + }, + "set_setting": { + "message": "Error while trying to assign the value \"{value}\" to the setting \"{key}\" for {entity_id}: {description}" + }, + "turn_on": { + "message": "Error while trying to turn on {entity_id} ({key}): {description}" + }, + "turn_off": { + "message": "Error while trying to turn off {entity_id} ({key}): {description}" + }, + "start_program": { + "message": "Error while trying to start program {program}: {description}" + }, + "stop_program": { + "message": "Error while trying to stop program {program}: {description}" + }, + "power_on": { + "message": "Error while trying to turn on {appliance_name}: {description}" + }, + "power_off": { + "message": "Error while trying to turn off {appliance_name} with value \"{value}\": {description}" + }, + "turn_off_not_supported": { + "message": "{appliance_name} does not support turning off or entering standby mode." + }, + "unable_to_retrieve_turn_off": { + "message": "Unable to turn off {appliance_name} because its support for turning off or entering standby mode could not be determined." + } + }, + "issues": { + "deprecated_binary_common_door_sensor": { + "title": "Deprecated binary door sensor detected in some automations or scripts", + "description": "The binary door sensor `{entity}`, which is deprecated, is used in the following automations or scripts:\n{items}\n\nA sensor entity with additional possible states is available and should be used going forward; Please use it on the above automations or scripts to fix this issue." + } + }, "services": { "start_program": { "name": "Start program", @@ -135,42 +185,277 @@ } }, "entity": { + "binary_sensor": { + "remote_control": { + "name": "Remote control" + }, + "remote_start": { + "name": "Remote start" + }, + "local_control": { + "name": "Local control" + }, + "battery_charging_state": { + "name": "Battery charging state" + }, + "charging_connection": { + "name": "Charging connection" + }, + "dust_box_inserted": { + "name": "Dust box", + "state": { + "on": "Inserted", + "off": "Not inserted" + } + }, + "lifted": { + "name": "Lifted" + }, + "lost": { + "name": "Lost" + }, + "chiller_door": { + "name": "Chiller door" + }, + "freezer_door": { + "name": "Freezer door" + }, + "refrigerator_door": { + "name": "Refrigerator door" + } + }, + "light": { + "cooking_lighting": { + "name": "Functional light" + }, + "ambient_light": { + "name": "Ambient light" + }, + "external_light": { + "name": "External light" + }, + "internal_light": { + "name": "Internal light" + } + }, + "number": { + "refrigerator_setpoint_temperature": { + "name": "Refrigerator temperature" + }, + "freezer_setpoint_temperature": { + "name": "Freezer temperature" + }, + "bottle_cooler_setpoint_temperature": { + "name": "Bottle cooler temperature" + }, + "chiller_left_setpoint_temperature": { + "name": "Chiller left temperature" + }, + "chiller_setpoint_temperature": { + "name": "Chiller temperature" + }, + "chiller_right_setpoint_temperature": { + "name": "Chiller right temperature" + }, + "wine_compartment_setpoint_temperature": { + "name": "Wine compartment temperature" + }, + "wine_compartment_2_setpoint_temperature": { + "name": "Wine compartment 2 temperature" + }, + "wine_compartment_3_setpoint_temperature": { + "name": "Wine compartment 3 temperature" + } + }, "sensor": { - "alarm_sensor_fridge": { + "program_progress": { + "name": "Program progress" + }, + "program_finish_time": { + "name": "Program finish time" + }, + "operation_state": { + "name": "Operation state", + "state": { + "inactive": "Inactive", + "ready": "Ready", + "delayedstart": "Delayed start", + "run": "Run", + "pause": "[%key:common::state::paused%]", + "actionrequired": "Action required", + "finished": "Finished", + "error": "Error", + "aborting": "Aborting" + } + }, + "door": { + "name": "Door", + "state": { + "closed": "[%key:common::state::closed%]", + "locked": "[%key:common::state::locked%]", + "open": "[%key:common::state::open%]" + } + }, + "coffee_counter": { + "name": "Coffees" + }, + "powder_coffee_counter": { + "name": "Powder coffees" + }, + "hot_water_counter": { + "name": "Hot water" + }, + "hot_water_cups_counter": { + "name": "Hot water cups" + }, + "hot_milk_counter": { + "name": "Hot milk cups" + }, + "frothy_milk_counter": { + "name": "Frothy milk cups" + }, + "milk_counter": { + "name": "Milk cups" + }, + "coffee_and_milk_counter": { + "name": "Coffee and milk cups" + }, + "ristretto_espresso_counter": { + "name": "Ristretto espresso cups" + }, + "battery_level": { + "name": "Battery level" + }, + "camera_state": { + "name": "Camera state", + "state": { + "disabled": "[%key:common::state::disabled%]", + "sleeping": "Sleeping", + "ready": "Ready", + "streaminglocal": "Streaming local", + "streamingcloud": "Streaming cloud", + "streaminglocal_and_cloud": "Streaming local and cloud", + "error": "Error" + } + }, + "last_selected_map": { + "name": "Last selected map", + "state": { + "tempmap": "Temporary map", + "map1": "Map 1", + "map2": "Map 2", + "map3": "Map 3" + } + }, + "freezer_door_alarm": { + "name": "Freezer door alarm", "state": { "confirmed": "[%key:component::home_connect::common::confirmed%]", "present": "[%key:component::home_connect::common::present%]" } }, - "alarm_sensor_freezer": { + "refrigerator_door_alarm": { + "name": "Refrigerator door alarm", "state": { + "off": "[%key:common::state::off%]", "confirmed": "[%key:component::home_connect::common::confirmed%]", "present": "[%key:component::home_connect::common::present%]" } }, - "alarm_sensor_temp": { + "freezer_temperature_alarm": { + "name": "Freezer temperature alarm", "state": { + "off": "[%key:common::state::off%]", "confirmed": "[%key:component::home_connect::common::confirmed%]", "present": "[%key:component::home_connect::common::present%]" } }, - "alarm_sensor_coffee_bean_container": { + "bean_container_empty": { + "name": "Bean container empty", "state": { + "off": "[%key:common::state::off%]", "confirmed": "[%key:component::home_connect::common::confirmed%]", "present": "[%key:component::home_connect::common::present%]" } }, - "alarm_sensor_coffee_water_tank": { + "water_tank_empty": { + "name": "Water tank empty", "state": { + "off": "[%key:common::state::off%]", "confirmed": "[%key:component::home_connect::common::confirmed%]", "present": "[%key:component::home_connect::common::present%]" } }, - "alarm_sensor_coffee_drip_tray": { + "drip_tray_full": { + "name": "Drip tray full", "state": { + "off": "[%key:common::state::off%]", "confirmed": "[%key:component::home_connect::common::confirmed%]", "present": "[%key:component::home_connect::common::present%]" } + }, + "salt_nearly_empty": { + "name": "Salt nearly empty", + "state": { + "off": "[%key:common::state::off%]", + "confirmed": "[%key:component::home_connect::common::confirmed%]", + "present": "[%key:component::home_connect::common::present%]" + } + }, + "rinse_aid_nearly_empty": { + "name": "Rinse aid nearly empty", + "state": { + "off": "[%key:common::state::off%]", + "confirmed": "[%key:component::home_connect::common::confirmed%]", + "present": "[%key:component::home_connect::common::present%]" + } + } + }, + "switch": { + "power": { + "name": "Power" + }, + "child_lock": { + "name": "Child lock" + }, + "cup_warmer": { + "name": "Cup warmer" + }, + "refrigerator_super_mode": { + "name": "Refrigerator super mode" + }, + "freezer_super_mode": { + "name": "Freezer super mode" + }, + "eco_mode": { + "name": "Eco mode" + }, + "sabbath_mode": { + "name": "Sabbath mode" + }, + "vacation_mode": { + "name": "Vacation mode" + }, + "fresh_mode": { + "name": "Fresh mode" + }, + "dispenser_enabled": { + "name": "Dispenser", + "state": { + "off": "[%key:common::state::disabled%]", + "on": "[%key:common::state::enabled%]" + } + }, + "door_assistant_fridge": { + "name": "Fridge door assistant" + }, + "door_assistant_freezer": { + "name": "Freezer door assistant" + } + }, + "time": { + "alarm_clock": { + "name": "Alarm clock" } } } diff --git a/homeassistant/components/home_connect/switch.py b/homeassistant/components/home_connect/switch.py index 6e96b371b82abf..25bbb85278af90 100644 --- a/homeassistant/components/home_connect/switch.py +++ b/homeassistant/components/home_connect/switch.py @@ -1,6 +1,6 @@ """Provides a switch for Home Connect.""" -from dataclasses import dataclass +import contextlib import logging from typing import Any @@ -8,47 +8,97 @@ from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_DEVICE, CONF_ENTITIES from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ServiceValidationError from homeassistant.helpers.entity_platform import AddEntitiesCallback +from . import get_dict_from_home_connect_error from .api import ConfigEntryAuth from .const import ( + ATTR_ALLOWED_VALUES, + ATTR_CONSTRAINTS, ATTR_VALUE, BSH_ACTIVE_PROGRAM, BSH_CHILD_LOCK_STATE, BSH_OPERATION_STATE, + BSH_POWER_OFF, BSH_POWER_ON, + BSH_POWER_STANDBY, BSH_POWER_STATE, DOMAIN, REFRIGERATION_DISPENSER, REFRIGERATION_SUPERMODEFREEZER, REFRIGERATION_SUPERMODEREFRIGERATOR, + SVE_TRANSLATION_PLACEHOLDER_APPLIANCE_NAME, + SVE_TRANSLATION_PLACEHOLDER_ENTITY_ID, + SVE_TRANSLATION_PLACEHOLDER_SETTING_KEY, + SVE_TRANSLATION_PLACEHOLDER_VALUE, ) from .entity import HomeConnectDevice, HomeConnectEntity _LOGGER = logging.getLogger(__name__) - -@dataclass(frozen=True, kw_only=True) -class HomeConnectSwitchEntityDescription(SwitchEntityDescription): - """Switch entity description.""" - - desc: str +APPLIANCES_WITH_PROGRAMS = ( + "CleaningRobot", + "CoffeeMaker", + "Dishwasher", + "Dryer", + "Hood", + "Oven", + "WarmingDrawer", + "Washer", + "WasherDryer", +) -SWITCHES: tuple[HomeConnectSwitchEntityDescription, ...] = ( - HomeConnectSwitchEntityDescription( +SWITCHES = ( + SwitchEntityDescription( + key=BSH_CHILD_LOCK_STATE, + translation_key="child_lock", + ), + SwitchEntityDescription( + key="ConsumerProducts.CoffeeMaker.Setting.CupWarmer", + translation_key="cup_warmer", + ), + SwitchEntityDescription( key=REFRIGERATION_SUPERMODEFREEZER, - desc="Supermode Freezer", + translation_key="freezer_super_mode", ), - HomeConnectSwitchEntityDescription( + SwitchEntityDescription( key=REFRIGERATION_SUPERMODEREFRIGERATOR, - desc="Supermode Refrigerator", + translation_key="refrigerator_super_mode", + ), + SwitchEntityDescription( + key="Refrigeration.Common.Setting.EcoMode", + translation_key="eco_mode", + ), + SwitchEntityDescription( + key="Cooking.Oven.Setting.SabbathMode", + translation_key="sabbath_mode", ), - HomeConnectSwitchEntityDescription( + SwitchEntityDescription( + key="Refrigeration.Common.Setting.SabbathMode", + translation_key="sabbath_mode", + ), + SwitchEntityDescription( + key="Refrigeration.Common.Setting.VacationMode", + translation_key="vacation_mode", + ), + SwitchEntityDescription( + key="Refrigeration.Common.Setting.FreshMode", + translation_key="fresh_mode", + ), + SwitchEntityDescription( key=REFRIGERATION_DISPENSER, - desc="Dispenser Enabled", + translation_key="dispenser_enabled", + ), + SwitchEntityDescription( + key="Refrigeration.Common.Setting.Door.AssistantFridge", + translation_key="door_assistant_fridge", + ), + SwitchEntityDescription( + key="Refrigeration.Common.Setting.Door.AssistantFreezer", + translation_key="door_assistant_freezer", ), ) @@ -64,17 +114,20 @@ def get_entities() -> list[SwitchEntity]: """Get a list of entities.""" entities: list[SwitchEntity] = [] hc_api: ConfigEntryAuth = hass.data[DOMAIN][config_entry.entry_id] - for device_dict in hc_api.devices: - entity_dicts = device_dict.get(CONF_ENTITIES, {}).get("switch", []) - entities.extend(HomeConnectProgramSwitch(**d) for d in entity_dicts) - entities.append(HomeConnectPowerSwitch(device_dict[CONF_DEVICE])) - entities.append(HomeConnectChildLockSwitch(device_dict[CONF_DEVICE])) - # Auto-discover entities - hc_device: HomeConnectDevice = device_dict[CONF_DEVICE] + for device in hc_api.devices: + if device.appliance.type in APPLIANCES_WITH_PROGRAMS: + with contextlib.suppress(HomeConnectError): + programs = device.appliance.get_programs_available() + if programs: + entities.extend( + HomeConnectProgramSwitch(device, program) + for program in programs + ) + entities.append(HomeConnectPowerSwitch(device)) entities.extend( - HomeConnectSwitch(device=hc_device, entity_description=description) + HomeConnectSwitch(device, description) for description in SWITCHES - if description.key in hc_device.appliance.status + if description.key in device.appliance.status ) return entities @@ -85,18 +138,6 @@ def get_entities() -> list[SwitchEntity]: class HomeConnectSwitch(HomeConnectEntity, SwitchEntity): """Generic switch class for Home Connect Binary Settings.""" - entity_description: HomeConnectSwitchEntityDescription - - def __init__( - self, - device: HomeConnectDevice, - entity_description: HomeConnectSwitchEntityDescription, - ) -> None: - """Initialize the entity.""" - self.entity_description = entity_description - self._attr_available = False - super().__init__(device, entity_description.key, entity_description.desc) - async def async_turn_on(self, **kwargs: Any) -> None: """Turn on setting.""" @@ -106,9 +147,16 @@ async def async_turn_on(self, **kwargs: Any) -> None: self.device.appliance.set_setting, self.entity_description.key, True ) except HomeConnectError as err: - _LOGGER.error("Error while trying to turn on: %s", err) self._attr_available = False - return + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="turn_on", + translation_placeholders={ + **get_dict_from_home_connect_error(err), + SVE_TRANSLATION_PLACEHOLDER_ENTITY_ID: self.entity_id, + SVE_TRANSLATION_PLACEHOLDER_SETTING_KEY: self.bsh_key, + }, + ) from err self._attr_available = True self.async_entity_update() @@ -124,7 +172,15 @@ async def async_turn_off(self, **kwargs: Any) -> None: except HomeConnectError as err: _LOGGER.error("Error while trying to turn off: %s", err) self._attr_available = False - return + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="turn_off", + translation_placeholders={ + **get_dict_from_home_connect_error(err), + SVE_TRANSLATION_PLACEHOLDER_ENTITY_ID: self.entity_id, + SVE_TRANSLATION_PLACEHOLDER_SETTING_KEY: self.bsh_key, + }, + ) from err self._attr_available = True self.async_entity_update() @@ -153,7 +209,10 @@ def __init__(self, device: HomeConnectDevice, program_name: str) -> None: desc = " ".join( ["Program", program_name.split(".")[-3], program_name.split(".")[-1]] ) - super().__init__(device, desc, desc) + super().__init__(device, SwitchEntityDescription(key=program_name)) + self._attr_name = f"{device.appliance.name} {desc}" + self._attr_unique_id = f"{device.appliance.haId}-{desc}" + self._attr_has_entity_name = False self.program_name = program_name async def async_turn_on(self, **kwargs: Any) -> None: @@ -164,7 +223,14 @@ async def async_turn_on(self, **kwargs: Any) -> None: self.device.appliance.start_program, self.program_name ) except HomeConnectError as err: - _LOGGER.error("Error while trying to start program: %s", err) + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="start_program", + translation_placeholders={ + **get_dict_from_home_connect_error(err), + "program": self.program_name, + }, + ) from err self.async_entity_update() async def async_turn_off(self, **kwargs: Any) -> None: @@ -173,7 +239,14 @@ async def async_turn_off(self, **kwargs: Any) -> None: try: await self.hass.async_add_executor_job(self.device.appliance.stop_program) except HomeConnectError as err: - _LOGGER.error("Error while trying to stop program: %s", err) + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="stop_program", + translation_placeholders={ + **get_dict_from_home_connect_error(err), + "program": self.program_name, + }, + ) from err self.async_entity_update() async def async_update(self) -> None: @@ -189,9 +262,26 @@ async def async_update(self) -> None: class HomeConnectPowerSwitch(HomeConnectEntity, SwitchEntity): """Power switch class for Home Connect.""" + power_off_state: str | None + def __init__(self, device: HomeConnectDevice) -> None: """Initialize the entity.""" - super().__init__(device, BSH_POWER_STATE, "Power") + super().__init__( + device, + SwitchEntityDescription(key=BSH_POWER_STATE, translation_key="power"), + ) + if ( + power_state := device.appliance.status.get(BSH_POWER_STATE, {}).get( + ATTR_VALUE + ) + ) and power_state in [BSH_POWER_OFF, BSH_POWER_STANDBY]: + self.power_off_state = power_state + + async def async_added_to_hass(self) -> None: + """Add the entity to the hass instance.""" + await super().async_added_to_hass() + if not hasattr(self, "power_off_state"): + await self.async_fetch_power_off_state() async def async_turn_on(self, **kwargs: Any) -> None: """Switch the device on.""" @@ -201,22 +291,54 @@ async def async_turn_on(self, **kwargs: Any) -> None: self.device.appliance.set_setting, BSH_POWER_STATE, BSH_POWER_ON ) except HomeConnectError as err: - _LOGGER.error("Error while trying to turn on device: %s", err) self._attr_is_on = False + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="power_on", + translation_placeholders={ + **get_dict_from_home_connect_error(err), + SVE_TRANSLATION_PLACEHOLDER_APPLIANCE_NAME: self.device.appliance.name, + }, + ) from err self.async_entity_update() async def async_turn_off(self, **kwargs: Any) -> None: """Switch the device off.""" + if not hasattr(self, "power_off_state"): + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="unable_to_retrieve_turn_off", + translation_placeholders={ + SVE_TRANSLATION_PLACEHOLDER_APPLIANCE_NAME: self.device.appliance.name + }, + ) + + if self.power_off_state is None: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="turn_off_not_supported", + translation_placeholders={ + SVE_TRANSLATION_PLACEHOLDER_APPLIANCE_NAME: self.device.appliance.name + }, + ) _LOGGER.debug("tried to switch off %s", self.name) try: await self.hass.async_add_executor_job( self.device.appliance.set_setting, BSH_POWER_STATE, - self.device.power_off_state, + self.power_off_state, ) except HomeConnectError as err: - _LOGGER.error("Error while trying to turn off device: %s", err) self._attr_is_on = True + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="power_off", + translation_placeholders={ + **get_dict_from_home_connect_error(err), + SVE_TRANSLATION_PLACEHOLDER_APPLIANCE_NAME: self.device.appliance.name, + SVE_TRANSLATION_PLACEHOLDER_VALUE: self.power_off_state, + }, + ) from err self.async_entity_update() async def async_update(self) -> None: @@ -227,8 +349,9 @@ async def async_update(self) -> None: ): self._attr_is_on = True elif ( - self.device.appliance.status.get(BSH_POWER_STATE, {}).get(ATTR_VALUE) - == self.device.power_off_state + hasattr(self, "power_off_state") + and self.device.appliance.status.get(BSH_POWER_STATE, {}).get(ATTR_VALUE) + == self.power_off_state ): self._attr_is_on = False elif self.device.appliance.status.get(BSH_OPERATION_STATE, {}).get( @@ -252,43 +375,23 @@ async def async_update(self) -> None: self._attr_is_on = None _LOGGER.debug("Updated, new state: %s", self._attr_is_on) - -class HomeConnectChildLockSwitch(HomeConnectEntity, SwitchEntity): - """Child lock switch class for Home Connect.""" - - def __init__(self, device: HomeConnectDevice) -> None: - """Initialize the entity.""" - super().__init__(device, BSH_CHILD_LOCK_STATE, "ChildLock") - - async def async_turn_on(self, **kwargs: Any) -> None: - """Switch child lock on.""" - _LOGGER.debug("Tried to switch child lock on device: %s", self.name) + async def async_fetch_power_off_state(self) -> None: + """Fetch the power off state.""" try: - await self.hass.async_add_executor_job( - self.device.appliance.set_setting, BSH_CHILD_LOCK_STATE, True + data = await self.hass.async_add_executor_job( + self.device.appliance.get, f"/settings/{self.bsh_key}" ) except HomeConnectError as err: - _LOGGER.error("Error while trying to turn on child lock on device: %s", err) - self._attr_is_on = False - self.async_entity_update() - - async def async_turn_off(self, **kwargs: Any) -> None: - """Switch child lock off.""" - _LOGGER.debug("Tried to switch off child lock on device: %s", self.name) - try: - await self.hass.async_add_executor_job( - self.device.appliance.set_setting, BSH_CHILD_LOCK_STATE, False - ) - except HomeConnectError as err: - _LOGGER.error( - "Error while trying to turn off child lock on device: %s", err - ) - self._attr_is_on = True - self.async_entity_update() + _LOGGER.error("An error occurred: %s", err) + return + if not data or not ( + allowed_values := data.get(ATTR_CONSTRAINTS, {}).get(ATTR_ALLOWED_VALUES) + ): + return - async def async_update(self) -> None: - """Update the switch's status.""" - self._attr_is_on = False - if self.device.appliance.status.get(BSH_CHILD_LOCK_STATE, {}).get(ATTR_VALUE): - self._attr_is_on = True - _LOGGER.debug("Updated child lock, new state: %s", self._attr_is_on) + if BSH_POWER_OFF in allowed_values: + self.power_off_state = BSH_POWER_OFF + elif BSH_POWER_STANDBY in allowed_values: + self.power_off_state = BSH_POWER_STANDBY + else: + self.power_off_state = None diff --git a/homeassistant/components/home_connect/time.py b/homeassistant/components/home_connect/time.py new file mode 100644 index 00000000000000..946a23549384d2 --- /dev/null +++ b/homeassistant/components/home_connect/time.py @@ -0,0 +1,109 @@ +"""Provides time enties for Home Connect.""" + +from datetime import time +import logging + +from homeconnect.api import HomeConnectError + +from homeassistant.components.time import TimeEntity, TimeEntityDescription +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ServiceValidationError +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import get_dict_from_home_connect_error +from .api import ConfigEntryAuth +from .const import ( + ATTR_VALUE, + DOMAIN, + SVE_TRANSLATION_PLACEHOLDER_ENTITY_ID, + SVE_TRANSLATION_PLACEHOLDER_SETTING_KEY, + SVE_TRANSLATION_PLACEHOLDER_VALUE, +) +from .entity import HomeConnectEntity + +_LOGGER = logging.getLogger(__name__) + + +TIME_ENTITIES = ( + TimeEntityDescription( + key="BSH.Common.Setting.AlarmClock", + translation_key="alarm_clock", + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the Home Connect switch.""" + + def get_entities() -> list[HomeConnectTimeEntity]: + """Get a list of entities.""" + hc_api: ConfigEntryAuth = hass.data[DOMAIN][config_entry.entry_id] + return [ + HomeConnectTimeEntity(device, description) + for description in TIME_ENTITIES + for device in hc_api.devices + if description.key in device.appliance.status + ] + + async_add_entities(await hass.async_add_executor_job(get_entities), True) + + +def seconds_to_time(seconds: int) -> time: + """Convert seconds to a time object.""" + minutes, sec = divmod(seconds, 60) + hours, minutes = divmod(minutes, 60) + return time(hour=hours, minute=minutes, second=sec) + + +def time_to_seconds(t: time) -> int: + """Convert a time object to seconds.""" + return t.hour * 3600 + t.minute * 60 + t.second + + +class HomeConnectTimeEntity(HomeConnectEntity, TimeEntity): + """Time setting class for Home Connect.""" + + async def async_set_value(self, value: time) -> None: + """Set the native value of the entity.""" + _LOGGER.debug( + "Tried to set value %s to %s for %s", + value, + self.bsh_key, + self.entity_id, + ) + try: + await self.hass.async_add_executor_job( + self.device.appliance.set_setting, + self.bsh_key, + time_to_seconds(value), + ) + except HomeConnectError as err: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="set_setting", + translation_placeholders={ + **get_dict_from_home_connect_error(err), + SVE_TRANSLATION_PLACEHOLDER_ENTITY_ID: self.entity_id, + SVE_TRANSLATION_PLACEHOLDER_SETTING_KEY: self.bsh_key, + SVE_TRANSLATION_PLACEHOLDER_VALUE: str(value), + }, + ) from err + + async def async_update(self) -> None: + """Update the Time setting status.""" + data = self.device.appliance.status.get(self.bsh_key) + if data is None: + _LOGGER.error("No value for %s", self.bsh_key) + self._attr_native_value = None + return + seconds = data.get(ATTR_VALUE, None) + if seconds is not None: + self._attr_native_value = seconds_to_time(seconds) + else: + self._attr_native_value = None + _LOGGER.debug("Updated, new value: %s", self._attr_native_value) diff --git a/homeassistant/components/homeassistant/__init__.py b/homeassistant/components/homeassistant/__init__.py index 6cec47152e5ce7..dc33b0c63e336d 100644 --- a/homeassistant/components/homeassistant/__init__.py +++ b/homeassistant/components/homeassistant/__init__.py @@ -8,9 +8,9 @@ import voluptuous as vol +from homeassistant import config as conf_util, core_config from homeassistant.auth.permissions.const import CAT_ENTITIES, POLICY_CONTROL from homeassistant.components import persistent_notification -import homeassistant.config as conf_util from homeassistant.const import ( ATTR_ELEVATION, ATTR_ENTITY_ID, @@ -269,7 +269,7 @@ async def async_handle_reload_config(call: ServiceCall) -> None: return # auth only processed during startup - await conf_util.async_process_ha_core_config(hass, conf.get(DOMAIN) or {}) + await core_config.async_process_ha_core_config(hass, conf.get(DOMAIN) or {}) async_register_admin_service( hass, DOMAIN, SERVICE_RELOAD_CORE_CONFIG, async_handle_reload_config @@ -282,7 +282,7 @@ async def async_set_location(call: ServiceCall) -> None: "longitude": call.data[ATTR_LONGITUDE], } - if elevation := call.data.get(ATTR_ELEVATION): + if (elevation := call.data.get(ATTR_ELEVATION)) is not None: service_data["elevation"] = elevation await hass.config.async_update(**service_data) diff --git a/homeassistant/components/homeassistant/strings.json b/homeassistant/components/homeassistant/strings.json index 29612bd61ed1c5..0dd4eff507d745 100644 --- a/homeassistant/components/homeassistant/strings.json +++ b/homeassistant/components/homeassistant/strings.json @@ -57,6 +57,14 @@ "title": "[%key:common::config_flow::title::reauth%]", "description": "Reauthentication is needed" }, + "config_entry_unique_id_collision": { + "title": "Multiple {domain} config entries with same unique ID", + "description": "There are multiple {domain} config entries with the same unique ID.\nThe config entries are named {titles}.\n\nTo fix this error, [configure the integration]({configure_url}) and remove all except one of the duplicates.\n\nNote: Another group of duplicates may be revealed after removing these duplicates." + }, + "config_entry_unique_id_collision_many": { + "title": "[%key:component::homeassistant::issues::config_entry_unique_id_collision::title%]", + "description": "There are multiple ({number_of_entries}) {domain} config entries with the same unique ID.\nThe first {title_limit} config entries are named {titles}.\n\nTo fix this error, [configure the integration]({configure_url}) and remove all except one of the duplicates.\n\nNote: Another group of duplicates may be revealed after removing these duplicates." + }, "integration_not_found": { "title": "Integration {domain} not found", "fix_flow": { diff --git a/homeassistant/components/homeassistant/triggers/time.py b/homeassistant/components/homeassistant/triggers/time.py index 443d9c65d95718..bea6e8a66a7e86 100644 --- a/homeassistant/components/homeassistant/triggers/time.py +++ b/homeassistant/components/homeassistant/triggers/time.py @@ -3,7 +3,7 @@ from collections.abc import Callable from datetime import datetime, timedelta from functools import partial -from typing import NamedTuple +from typing import Any, NamedTuple import voluptuous as vol @@ -26,7 +26,8 @@ State, callback, ) -from homeassistant.helpers import config_validation as cv +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers import config_validation as cv, template from homeassistant.helpers.event import ( async_track_point_in_time, async_track_state_change_event, @@ -37,6 +38,7 @@ import homeassistant.util.dt as dt_util _TIME_TRIGGER_ENTITY = vol.All(str, cv.entity_domain(["input_datetime", "sensor"])) +_TIME_AT_SCHEMA = vol.Any(cv.time, _TIME_TRIGGER_ENTITY) _TIME_TRIGGER_ENTITY_WITH_OFFSET = vol.Schema( { @@ -45,16 +47,29 @@ } ) + +def valid_at_template(value: Any) -> template.Template: + """Validate either a jinja2 template, valid time, or valid trigger entity.""" + tpl = cv.template(value) + + if tpl.is_static: + _TIME_AT_SCHEMA(value) + + return tpl + + _TIME_TRIGGER_SCHEMA = vol.Any( cv.time, _TIME_TRIGGER_ENTITY, _TIME_TRIGGER_ENTITY_WITH_OFFSET, + valid_at_template, msg=( "Expected HH:MM, HH:MM:SS, an Entity ID with domain 'input_datetime' or " - "'sensor', or a combination of a timestamp sensor entity and an offset." + "'sensor', a combination of a timestamp sensor entity and an offset, or Limited Template" ), ) + TRIGGER_SCHEMA = cv.TRIGGER_BASE_SCHEMA.extend( { vol.Required(CONF_PLATFORM): "time", @@ -78,6 +93,7 @@ async def async_attach_trigger( ) -> CALLBACK_TYPE: """Listen for state changes based on configuration.""" trigger_data = trigger_info["trigger_data"] + variables = trigger_info["variables"] or {} entities: dict[tuple[str, timedelta], CALLBACK_TYPE] = {} removes: list[CALLBACK_TYPE] = [] job = HassJob(action, f"time trigger {trigger_info}") @@ -202,6 +218,16 @@ def update_entity_trigger( to_track: list[TrackEntity] = [] for at_time in config[CONF_AT]: + if isinstance(at_time, template.Template): + render = template.render_complex(at_time, variables, limited=True) + try: + at_time = _TIME_AT_SCHEMA(render) + except vol.Invalid as exc: + raise HomeAssistantError( + f"Limited Template for 'at' rendered a unexpected value '{render}', expected HH:MM, " + f"HH:MM:SS or Entity ID with domain 'input_datetime' or 'sensor'" + ) from exc + if isinstance(at_time, str): # entity update_entity_trigger(at_time, new_state=hass.states.get(at_time)) diff --git a/homeassistant/components/homeassistant_alerts/coordinator.py b/homeassistant/components/homeassistant_alerts/coordinator.py index 5d99e1c980fc31..a81824d2376e5c 100644 --- a/homeassistant/components/homeassistant_alerts/coordinator.py +++ b/homeassistant/components/homeassistant_alerts/coordinator.py @@ -5,10 +5,11 @@ from awesomeversion import AwesomeVersion, AwesomeVersionStrategy -from homeassistant.components.hassio import get_supervisor_info, is_hassio +from homeassistant.components.hassio import get_supervisor_info from homeassistant.const import __version__ from homeassistant.core import HomeAssistant from homeassistant.helpers.aiohttp_client import async_get_clientsession +from homeassistant.helpers.hassio import is_hassio from homeassistant.helpers.update_coordinator import DataUpdateCoordinator from .const import DOMAIN, REQUEST_TIMEOUT, UPDATE_INTERVAL diff --git a/homeassistant/components/homeassistant_alerts/manifest.json b/homeassistant/components/homeassistant_alerts/manifest.json index 96e419ad9a2501..0412f43da69c7e 100644 --- a/homeassistant/components/homeassistant_alerts/manifest.json +++ b/homeassistant/components/homeassistant_alerts/manifest.json @@ -1,6 +1,7 @@ { "domain": "homeassistant_alerts", "name": "Home Assistant Alerts", + "after_dependencies": ["hassio"], "codeowners": ["@home-assistant/core"], "config_flow": false, "documentation": "https://www.home-assistant.io/integrations/homeassistant_alerts", diff --git a/homeassistant/components/homeassistant_green/__init__.py b/homeassistant/components/homeassistant_green/__init__.py index 2d35b5bbed3daa..79688f9d16acc2 100644 --- a/homeassistant/components/homeassistant_green/__init__.py +++ b/homeassistant/components/homeassistant_green/__init__.py @@ -2,10 +2,11 @@ from __future__ import annotations -from homeassistant.components.hassio import get_os_info, is_hassio +from homeassistant.components.hassio import get_os_info from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady +from homeassistant.helpers.hassio import is_hassio async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: diff --git a/homeassistant/components/homeassistant_green/config_flow.py b/homeassistant/components/homeassistant_green/config_flow.py index 4b71c7f105691a..c9aed577365b74 100644 --- a/homeassistant/components/homeassistant_green/config_flow.py +++ b/homeassistant/components/homeassistant_green/config_flow.py @@ -13,7 +13,6 @@ HassioAPIError, async_get_green_settings, async_set_green_settings, - is_hassio, ) from homeassistant.config_entries import ( ConfigEntry, @@ -23,6 +22,7 @@ ) from homeassistant.core import callback from homeassistant.helpers import selector +from homeassistant.helpers.hassio import is_hassio from .const import DOMAIN @@ -55,9 +55,6 @@ async def async_step_system( self, data: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle the initial step.""" - if self._async_current_entries(): - return self.async_abort(reason="single_instance_allowed") - return self.async_create_entry(title="Home Assistant Green", data={}) diff --git a/homeassistant/components/homeassistant_green/manifest.json b/homeassistant/components/homeassistant_green/manifest.json index d543d562ee3611..78da50603df5c4 100644 --- a/homeassistant/components/homeassistant_green/manifest.json +++ b/homeassistant/components/homeassistant_green/manifest.json @@ -6,5 +6,6 @@ "config_flow": false, "dependencies": ["hardware", "homeassistant_hardware"], "documentation": "https://www.home-assistant.io/integrations/homeassistant_green", - "integration_type": "hardware" + "integration_type": "hardware", + "single_config_entry": true } diff --git a/homeassistant/components/homeassistant_green/strings.json b/homeassistant/components/homeassistant_green/strings.json index 9066ca64e5c95b..13507439e4b447 100644 --- a/homeassistant/components/homeassistant_green/strings.json +++ b/homeassistant/components/homeassistant_green/strings.json @@ -21,7 +21,6 @@ "abort": { "not_hassio": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::abort::not_hassio%]", "read_hw_settings_error": "Failed to read hardware settings", - "single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]", "write_hw_settings_error": "Failed to write hardware settings" } } diff --git a/homeassistant/components/homeassistant_hardware/firmware_config_flow.py b/homeassistant/components/homeassistant_hardware/firmware_config_flow.py index b8dc4227ece564..a91fb00c142d07 100644 --- a/homeassistant/components/homeassistant_hardware/firmware_config_flow.py +++ b/homeassistant/components/homeassistant_hardware/firmware_config_flow.py @@ -14,7 +14,6 @@ AddonInfo, AddonManager, AddonState, - is_hassio, ) from homeassistant.components.zha.repairs.wrong_silabs_firmware import ( probe_silabs_firmware_type, @@ -25,10 +24,10 @@ ConfigFlow, ConfigFlowResult, OptionsFlow, - OptionsFlowWithConfigEntry, ) from homeassistant.core import callback from homeassistant.data_entry_flow import AbortFlow +from homeassistant.helpers.hassio import is_hassio from . import silabs_multiprotocol_addon from .const import ZHA_DOMAIN @@ -496,13 +495,15 @@ async def async_step_confirm( return await self.async_step_pick_firmware() -class BaseFirmwareOptionsFlow(BaseFirmwareInstallFlow, OptionsFlowWithConfigEntry): +class BaseFirmwareOptionsFlow(BaseFirmwareInstallFlow, OptionsFlow): """Zigbee and Thread options flow handlers.""" - def __init__(self, *args: Any, **kwargs: Any) -> None: + def __init__(self, config_entry: ConfigEntry, *args: Any, **kwargs: Any) -> None: """Instantiate options flow.""" super().__init__(*args, **kwargs) + self._config_entry = config_entry + self._probed_firmware_type = ApplicationType(self.config_entry.data["firmware"]) # Make `context` a regular dictionary diff --git a/homeassistant/components/homeassistant_hardware/manifest.json b/homeassistant/components/homeassistant_hardware/manifest.json index 8898cece75ae4f..f692094bc67c2e 100644 --- a/homeassistant/components/homeassistant_hardware/manifest.json +++ b/homeassistant/components/homeassistant_hardware/manifest.json @@ -1,7 +1,7 @@ { "domain": "homeassistant_hardware", "name": "Home Assistant Hardware", - "after_dependencies": ["zha"], + "after_dependencies": ["hassio", "zha"], "codeowners": ["@home-assistant/core"], "documentation": "https://www.home-assistant.io/integrations/homeassistant_hardware", "integration_type": "system" diff --git a/homeassistant/components/homeassistant_hardware/silabs_multiprotocol_addon.py b/homeassistant/components/homeassistant_hardware/silabs_multiprotocol_addon.py index 31032ff6a8c935..2b08031405fc9f 100644 --- a/homeassistant/components/homeassistant_hardware/silabs_multiprotocol_addon.py +++ b/homeassistant/components/homeassistant_hardware/silabs_multiprotocol_addon.py @@ -17,7 +17,6 @@ AddonManager, AddonState, hostname_from_addon_slug, - is_hassio, ) from homeassistant.config_entries import ( ConfigEntry, @@ -28,6 +27,7 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.data_entry_flow import AbortFlow from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers.hassio import is_hassio from homeassistant.helpers.integration_platform import ( async_process_integration_platforms, ) @@ -318,7 +318,6 @@ def __init__(self, config_entry: ConfigEntry) -> None: self.start_task: asyncio.Task | None = None self.stop_task: asyncio.Task | None = None self._zha_migration_mgr: ZhaMultiPANMigrationHelper | None = None - self.config_entry = config_entry self.original_addon_config: dict[str, Any] | None = None self.revert_reason: str | None = None diff --git a/homeassistant/components/homeassistant_hardware/util.py b/homeassistant/components/homeassistant_hardware/util.py index 90cfee076e3f66..0c06ff05e5cf83 100644 --- a/homeassistant/components/homeassistant_hardware/util.py +++ b/homeassistant/components/homeassistant_hardware/util.py @@ -9,9 +9,10 @@ from universal_silabs_flasher.const import ApplicationType -from homeassistant.components.hassio import AddonError, AddonState, is_hassio +from homeassistant.components.hassio import AddonError, AddonState from homeassistant.config_entries import ConfigEntry, ConfigEntryState from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.hassio import is_hassio from homeassistant.helpers.singleton import singleton from .const import ( diff --git a/homeassistant/components/homeassistant_sky_connect/config_flow.py b/homeassistant/components/homeassistant_sky_connect/config_flow.py index b1776624736aea..5c35732312be29 100644 --- a/homeassistant/components/homeassistant_sky_connect/config_flow.py +++ b/homeassistant/components/homeassistant_sky_connect/config_flow.py @@ -12,7 +12,13 @@ firmware_config_flow, silabs_multiprotocol_addon, ) -from homeassistant.config_entries import ConfigEntry, ConfigFlowResult, OptionsFlow +from homeassistant.config_entries import ( + ConfigEntry, + ConfigEntryBaseFlow, + ConfigFlowContext, + ConfigFlowResult, + OptionsFlow, +) from homeassistant.core import callback from .const import DOCS_WEB_FLASHER_URL, DOMAIN, HardwareVariant @@ -33,10 +39,10 @@ def _get_translation_placeholders(self) -> dict[str, str]: TranslationPlaceholderProtocol = object -class SkyConnectTranslationMixin(TranslationPlaceholderProtocol): +class SkyConnectTranslationMixin(ConfigEntryBaseFlow, TranslationPlaceholderProtocol): """Translation placeholder mixin for Home Assistant SkyConnect.""" - context: dict[str, Any] + context: ConfigFlowContext def _get_translation_placeholders(self) -> dict[str, str]: """Shared translation placeholders.""" diff --git a/homeassistant/components/homeassistant_sky_connect/manifest.json b/homeassistant/components/homeassistant_sky_connect/manifest.json index f56fd24de611c4..27280c6aac321c 100644 --- a/homeassistant/components/homeassistant_sky_connect/manifest.json +++ b/homeassistant/components/homeassistant_sky_connect/manifest.json @@ -1,6 +1,6 @@ { "domain": "homeassistant_sky_connect", - "name": "Home Assistant SkyConnect", + "name": "Home Assistant Connect ZBT-1", "codeowners": ["@home-assistant/core"], "config_flow": true, "dependencies": ["hardware", "usb", "homeassistant_hardware"], diff --git a/homeassistant/components/homeassistant_yellow/__init__.py b/homeassistant/components/homeassistant_yellow/__init__.py index 04abe5a1dcae2d..dc34cc4cdc9592 100644 --- a/homeassistant/components/homeassistant_yellow/__init__.py +++ b/homeassistant/components/homeassistant_yellow/__init__.py @@ -4,7 +4,7 @@ import logging -from homeassistant.components.hassio import get_os_info, is_hassio +from homeassistant.components.hassio import get_os_info from homeassistant.components.homeassistant_hardware.silabs_multiprotocol_addon import ( check_multi_pan_addon, ) @@ -16,6 +16,7 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady, HomeAssistantError from homeassistant.helpers import discovery_flow +from homeassistant.helpers.hassio import is_hassio from .const import FIRMWARE, RADIO_DEVICE, ZHA_HW_DISCOVERY_DATA diff --git a/homeassistant/components/homeassistant_yellow/config_flow.py b/homeassistant/components/homeassistant_yellow/config_flow.py index 1f4d150e49b556..9edc500917130a 100644 --- a/homeassistant/components/homeassistant_yellow/config_flow.py +++ b/homeassistant/components/homeassistant_yellow/config_flow.py @@ -77,9 +77,6 @@ async def async_step_system( self, data: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle the initial step.""" - if self._async_current_entries(): - return self.async_abort(reason="single_instance_allowed") - # We do not actually use any portion of `BaseFirmwareConfigFlow` beyond this await self._probe_firmware_type() diff --git a/homeassistant/components/homeassistant_yellow/manifest.json b/homeassistant/components/homeassistant_yellow/manifest.json index a97150031721a0..caf4d32c746ed4 100644 --- a/homeassistant/components/homeassistant_yellow/manifest.json +++ b/homeassistant/components/homeassistant_yellow/manifest.json @@ -6,5 +6,6 @@ "config_flow": false, "dependencies": ["hardware", "homeassistant_hardware"], "documentation": "https://www.home-assistant.io/integrations/homeassistant_yellow", - "integration_type": "hardware" + "integration_type": "hardware", + "single_config_entry": true } diff --git a/homeassistant/components/homekit/config_flow.py b/homeassistant/components/homekit/config_flow.py index a63e365ead7c36..53db777482161f 100644 --- a/homeassistant/components/homekit/config_flow.py +++ b/homeassistant/components/homekit/config_flow.py @@ -362,15 +362,14 @@ def async_get_options_flow( config_entry: ConfigEntry, ) -> OptionsFlowHandler: """Get the options flow for this handler.""" - return OptionsFlowHandler(config_entry) + return OptionsFlowHandler() class OptionsFlowHandler(OptionsFlow): """Handle a option flow for homekit.""" - def __init__(self, config_entry: ConfigEntry) -> None: + def __init__(self) -> None: """Initialize options flow.""" - self.config_entry = config_entry self.hk_options: dict[str, Any] = {} self.included_cameras: list[str] = [] diff --git a/homeassistant/components/homekit/manifest.json b/homeassistant/components/homekit/manifest.json index eebdc0026fd738..cf74bcc7d67e88 100644 --- a/homeassistant/components/homekit/manifest.json +++ b/homeassistant/components/homekit/manifest.json @@ -9,7 +9,7 @@ "iot_class": "local_push", "loggers": ["pyhap"], "requirements": [ - "HAP-python==4.9.1", + "HAP-python==4.9.2", "fnv-hash-fast==1.0.2", "PyQRCode==1.2.1", "base36==0.1.1" diff --git a/homeassistant/components/homekit/type_covers.py b/homeassistant/components/homekit/type_covers.py index 855c3b71cc4a0d..6752633f3d268e 100644 --- a/homeassistant/components/homekit/type_covers.py +++ b/homeassistant/components/homekit/type_covers.py @@ -19,6 +19,7 @@ ATTR_TILT_POSITION, DOMAIN as COVER_DOMAIN, CoverEntityFeature, + CoverState, ) from homeassistant.const import ( ATTR_ENTITY_ID, @@ -28,11 +29,7 @@ SERVICE_SET_COVER_POSITION, SERVICE_SET_COVER_TILT_POSITION, SERVICE_STOP_COVER, - STATE_CLOSED, - STATE_CLOSING, STATE_ON, - STATE_OPEN, - STATE_OPENING, ) from homeassistant.core import ( Event, @@ -72,10 +69,10 @@ ) DOOR_CURRENT_HASS_TO_HK = { - STATE_OPEN: HK_DOOR_OPEN, - STATE_CLOSED: HK_DOOR_CLOSED, - STATE_OPENING: HK_DOOR_OPENING, - STATE_CLOSING: HK_DOOR_CLOSING, + CoverState.OPEN: HK_DOOR_OPEN, + CoverState.CLOSED: HK_DOOR_CLOSED, + CoverState.OPENING: HK_DOOR_OPENING, + CoverState.CLOSING: HK_DOOR_CLOSING, } # HomeKit only has two states for @@ -85,13 +82,13 @@ # Opening is mapped to 0 since the target is Open # Closing is mapped to 1 since the target is Closed DOOR_TARGET_HASS_TO_HK = { - STATE_OPEN: HK_DOOR_OPEN, - STATE_CLOSED: HK_DOOR_CLOSED, - STATE_OPENING: HK_DOOR_OPEN, - STATE_CLOSING: HK_DOOR_CLOSED, + CoverState.OPEN: HK_DOOR_OPEN, + CoverState.CLOSED: HK_DOOR_CLOSED, + CoverState.OPENING: HK_DOOR_OPEN, + CoverState.CLOSING: HK_DOOR_CLOSED, } -MOVING_STATES = {STATE_OPENING, STATE_CLOSING} +MOVING_STATES = {CoverState.OPENING, CoverState.CLOSING} _LOGGER = logging.getLogger(__name__) @@ -190,7 +187,7 @@ def set_state(self, value: int) -> None: @callback def async_update_state(self, new_state: State) -> None: """Update cover state after state changed.""" - hass_state = new_state.state + hass_state: CoverState = new_state.state # type: ignore[assignment] target_door_state = DOOR_TARGET_HASS_TO_HK.get(hass_state) current_door_state = DOOR_CURRENT_HASS_TO_HK.get(hass_state) @@ -434,10 +431,11 @@ def move_cover(self, value: int) -> None: @callback def async_update_state(self, new_state: State) -> None: """Update cover position after state changed.""" - position_mapping = {STATE_OPEN: 100, STATE_CLOSED: 0} - hk_position = position_mapping.get(new_state.state) + position_mapping = {CoverState.OPEN: 100, CoverState.CLOSED: 0} + _state: CoverState = new_state.state # type: ignore[assignment] + hk_position = position_mapping.get(_state) if hk_position is not None: - is_moving = new_state.state in MOVING_STATES + is_moving = _state in MOVING_STATES if self.char_current_position.value != hk_position: self.char_current_position.set_value(hk_position) @@ -452,8 +450,8 @@ def async_update_state(self, new_state: State) -> None: def _hass_state_to_position_start(state: str) -> int: """Convert hass state to homekit position state.""" - if state == STATE_OPENING: + if state == CoverState.OPENING: return HK_POSITION_GOING_TO_MAX - if state == STATE_CLOSING: + if state == CoverState.CLOSING: return HK_POSITION_GOING_TO_MIN return HK_POSITION_STOPPED diff --git a/homeassistant/components/homekit/type_lights.py b/homeassistant/components/homekit/type_lights.py index 6b57a03153c081..cde80178c5ee7c 100644 --- a/homeassistant/components/homekit/type_lights.py +++ b/homeassistant/components/homekit/type_lights.py @@ -171,8 +171,9 @@ def _async_send_events(self, _now: datetime) -> None: events = [] service = SERVICE_TURN_ON params: dict[str, Any] = {ATTR_ENTITY_ID: self.entity_id} + has_on = CHAR_ON in char_values - if CHAR_ON in char_values: + if has_on: if not char_values[CHAR_ON]: service = SERVICE_TURN_OFF events.append(f"Set state to {char_values[CHAR_ON]}") @@ -180,7 +181,10 @@ def _async_send_events(self, _now: datetime) -> None: brightness_pct = None if CHAR_BRIGHTNESS in char_values: if char_values[CHAR_BRIGHTNESS] == 0: - events[-1] = "Set state to 0" + if has_on: + events[-1] = "Set state to 0" + else: + events.append("Set state to 0") service = SERVICE_TURN_OFF else: brightness_pct = char_values[CHAR_BRIGHTNESS] diff --git a/homeassistant/components/homekit/type_security_systems.py b/homeassistant/components/homekit/type_security_systems.py index 6ab521b6727036..8634589cb5f576 100644 --- a/homeassistant/components/homekit/type_security_systems.py +++ b/homeassistant/components/homekit/type_security_systems.py @@ -8,6 +8,7 @@ from homeassistant.components.alarm_control_panel import ( DOMAIN as ALARM_CONTROL_PANEL_DOMAIN, AlarmControlPanelEntityFeature, + AlarmControlPanelState, ) from homeassistant.const import ( ATTR_CODE, @@ -17,13 +18,8 @@ SERVICE_ALARM_ARM_HOME, SERVICE_ALARM_ARM_NIGHT, SERVICE_ALARM_DISARM, - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_ARMED_VACATION, - STATE_ALARM_ARMING, - STATE_ALARM_DISARMED, - STATE_ALARM_TRIGGERED, + STATE_UNAVAILABLE, + STATE_UNKNOWN, ) from homeassistant.core import State, callback @@ -43,22 +39,22 @@ HK_ALARM_TRIGGERED = 4 HASS_TO_HOMEKIT_CURRENT = { - STATE_ALARM_ARMED_HOME: HK_ALARM_STAY_ARMED, - STATE_ALARM_ARMED_VACATION: HK_ALARM_AWAY_ARMED, - STATE_ALARM_ARMED_AWAY: HK_ALARM_AWAY_ARMED, - STATE_ALARM_ARMED_NIGHT: HK_ALARM_NIGHT_ARMED, - STATE_ALARM_ARMING: HK_ALARM_DISARMED, - STATE_ALARM_DISARMED: HK_ALARM_DISARMED, - STATE_ALARM_TRIGGERED: HK_ALARM_TRIGGERED, + AlarmControlPanelState.ARMED_HOME: HK_ALARM_STAY_ARMED, + AlarmControlPanelState.ARMED_VACATION: HK_ALARM_AWAY_ARMED, + AlarmControlPanelState.ARMED_AWAY: HK_ALARM_AWAY_ARMED, + AlarmControlPanelState.ARMED_NIGHT: HK_ALARM_NIGHT_ARMED, + AlarmControlPanelState.ARMING: HK_ALARM_DISARMED, + AlarmControlPanelState.DISARMED: HK_ALARM_DISARMED, + AlarmControlPanelState.TRIGGERED: HK_ALARM_TRIGGERED, } HASS_TO_HOMEKIT_TARGET = { - STATE_ALARM_ARMED_HOME: HK_ALARM_STAY_ARMED, - STATE_ALARM_ARMED_VACATION: HK_ALARM_AWAY_ARMED, - STATE_ALARM_ARMED_AWAY: HK_ALARM_AWAY_ARMED, - STATE_ALARM_ARMED_NIGHT: HK_ALARM_NIGHT_ARMED, - STATE_ALARM_ARMING: HK_ALARM_AWAY_ARMED, - STATE_ALARM_DISARMED: HK_ALARM_DISARMED, + AlarmControlPanelState.ARMED_HOME: HK_ALARM_STAY_ARMED, + AlarmControlPanelState.ARMED_VACATION: HK_ALARM_AWAY_ARMED, + AlarmControlPanelState.ARMED_AWAY: HK_ALARM_AWAY_ARMED, + AlarmControlPanelState.ARMED_NIGHT: HK_ALARM_NIGHT_ARMED, + AlarmControlPanelState.ARMING: HK_ALARM_AWAY_ARMED, + AlarmControlPanelState.DISARMED: HK_ALARM_DISARMED, } HASS_TO_HOMEKIT_SERVICES = { @@ -124,7 +120,7 @@ def __init__(self, *args: Any) -> None: self.char_current_state = serv_alarm.configure_char( CHAR_CURRENT_SECURITY_STATE, - value=HASS_TO_HOMEKIT_CURRENT[STATE_ALARM_DISARMED], + value=HASS_TO_HOMEKIT_CURRENT[AlarmControlPanelState.DISARMED], valid_values={ key: val for key, val in default_current_states.items() @@ -158,8 +154,16 @@ def set_security_state(self, value: int) -> None: @callback def async_update_state(self, new_state: State) -> None: """Update security state after state changed.""" - hass_state = new_state.state - if (current_state := HASS_TO_HOMEKIT_CURRENT.get(hass_state)) is not None: + hass_state: str | AlarmControlPanelState = new_state.state + if hass_state in {"None", STATE_UNKNOWN, STATE_UNAVAILABLE}: + # Bail out early for no state, unknown or unavailable + return + if hass_state is not None: + hass_state = AlarmControlPanelState(hass_state) + if ( + hass_state + and (current_state := HASS_TO_HOMEKIT_CURRENT.get(hass_state)) is not None + ): self.char_current_state.set_value(current_state) _LOGGER.debug( "%s: Updated current state to %s (%d)", @@ -167,5 +171,8 @@ def async_update_state(self, new_state: State) -> None: hass_state, current_state, ) - if (target_state := HASS_TO_HOMEKIT_TARGET.get(hass_state)) is not None: + if ( + hass_state + and (target_state := HASS_TO_HOMEKIT_TARGET.get(hass_state)) is not None + ): self.char_target_state.set_value(target_state) diff --git a/homeassistant/components/homekit_controller/alarm_control_panel.py b/homeassistant/components/homekit_controller/alarm_control_panel.py index 1cb94926e8bc05..3cb80f2c817ceb 100644 --- a/homeassistant/components/homekit_controller/alarm_control_panel.py +++ b/homeassistant/components/homekit_controller/alarm_control_panel.py @@ -10,17 +10,10 @@ from homeassistant.components.alarm_control_panel import ( AlarmControlPanelEntity, AlarmControlPanelEntityFeature, + AlarmControlPanelState, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ( - ATTR_BATTERY_LEVEL, - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_DISARMED, - STATE_ALARM_TRIGGERED, - Platform, -) +from homeassistant.const import ATTR_BATTERY_LEVEL, Platform from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -29,18 +22,18 @@ from .entity import HomeKitEntity CURRENT_STATE_MAP = { - 0: STATE_ALARM_ARMED_HOME, - 1: STATE_ALARM_ARMED_AWAY, - 2: STATE_ALARM_ARMED_NIGHT, - 3: STATE_ALARM_DISARMED, - 4: STATE_ALARM_TRIGGERED, + 0: AlarmControlPanelState.ARMED_HOME, + 1: AlarmControlPanelState.ARMED_AWAY, + 2: AlarmControlPanelState.ARMED_NIGHT, + 3: AlarmControlPanelState.DISARMED, + 4: AlarmControlPanelState.TRIGGERED, } TARGET_STATE_MAP = { - STATE_ALARM_ARMED_HOME: 0, - STATE_ALARM_ARMED_AWAY: 1, - STATE_ALARM_ARMED_NIGHT: 2, - STATE_ALARM_DISARMED: 3, + AlarmControlPanelState.ARMED_HOME: 0, + AlarmControlPanelState.ARMED_AWAY: 1, + AlarmControlPanelState.ARMED_NIGHT: 2, + AlarmControlPanelState.DISARMED: 3, } @@ -86,7 +79,7 @@ def get_characteristic_types(self) -> list[str]: ] @property - def state(self) -> str: + def alarm_state(self) -> AlarmControlPanelState: """Return the state of the device.""" return CURRENT_STATE_MAP[ self.service.value(CharacteristicsTypes.SECURITY_SYSTEM_STATE_CURRENT) @@ -94,21 +87,23 @@ def state(self) -> str: async def async_alarm_disarm(self, code: str | None = None) -> None: """Send disarm command.""" - await self.set_alarm_state(STATE_ALARM_DISARMED, code) + await self.set_alarm_state(AlarmControlPanelState.DISARMED, code) async def async_alarm_arm_away(self, code: str | None = None) -> None: """Send arm command.""" - await self.set_alarm_state(STATE_ALARM_ARMED_AWAY, code) + await self.set_alarm_state(AlarmControlPanelState.ARMED_AWAY, code) async def async_alarm_arm_home(self, code: str | None = None) -> None: """Send stay command.""" - await self.set_alarm_state(STATE_ALARM_ARMED_HOME, code) + await self.set_alarm_state(AlarmControlPanelState.ARMED_HOME, code) async def async_alarm_arm_night(self, code: str | None = None) -> None: """Send night command.""" - await self.set_alarm_state(STATE_ALARM_ARMED_NIGHT, code) + await self.set_alarm_state(AlarmControlPanelState.ARMED_NIGHT, code) - async def set_alarm_state(self, state: str, code: str | None = None) -> None: + async def set_alarm_state( + self, state: AlarmControlPanelState, code: str | None = None + ) -> None: """Send state command.""" await self.async_put_characteristics( {CharacteristicsTypes.SECURITY_SYSTEM_STATE_TARGET: TARGET_STATE_MAP[state]} diff --git a/homeassistant/components/homekit_controller/climate.py b/homeassistant/components/homekit_controller/climate.py index 3be0af17dbd7dd..4e55c8212be340 100644 --- a/homeassistant/components/homekit_controller/climate.py +++ b/homeassistant/components/homekit_controller/climate.py @@ -8,6 +8,7 @@ from aiohomekit.model.characteristics import ( ActivationStateValues, CharacteristicsTypes, + CurrentFanStateValues, CurrentHeaterCoolerStateValues, HeatingCoolingCurrentValues, HeatingCoolingTargetValues, @@ -484,6 +485,7 @@ def get_characteristic_types(self) -> list[str]: CharacteristicsTypes.TEMPERATURE_TARGET, CharacteristicsTypes.RELATIVE_HUMIDITY_CURRENT, CharacteristicsTypes.RELATIVE_HUMIDITY_TARGET, + CharacteristicsTypes.FAN_STATE_CURRENT, ] async def async_set_temperature(self, **kwargs: Any) -> None: @@ -666,7 +668,19 @@ def hvac_action(self) -> HVACAction | None: return HVACAction.IDLE value = self.service.value(CharacteristicsTypes.HEATING_COOLING_CURRENT) - return CURRENT_MODE_HOMEKIT_TO_HASS.get(value) + current_hass_value = CURRENT_MODE_HOMEKIT_TO_HASS.get(value) + + # If a device has a fan state (such as an Ecobee thermostat) + # show the Fan state when the device is otherwise idle. + if ( + current_hass_value == HVACAction.IDLE + and self.service.has(CharacteristicsTypes.FAN_STATE_CURRENT) + and self.service.value(CharacteristicsTypes.FAN_STATE_CURRENT) + == CurrentFanStateValues.ACTIVE + ): + return HVACAction.FAN + + return current_hass_value @property def hvac_mode(self) -> HVACMode: diff --git a/homeassistant/components/homekit_controller/const.py b/homeassistant/components/homekit_controller/const.py index aea5a6661eea48..77deb07b3ddd47 100644 --- a/homeassistant/components/homekit_controller/const.py +++ b/homeassistant/components/homekit_controller/const.py @@ -50,6 +50,7 @@ ServicesTypes.FAN_V2: "fan", ServicesTypes.OCCUPANCY_SENSOR: "binary_sensor", ServicesTypes.TELEVISION: "media_player", + ServicesTypes.FAUCET: "switch", ServicesTypes.VALVE: "switch", ServicesTypes.CAMERA_RTP_STREAM_MANAGEMENT: "camera", ServicesTypes.DOORBELL: "event", diff --git a/homeassistant/components/homekit_controller/cover.py b/homeassistant/components/homekit_controller/cover.py index 33336d5a5ba2e9..d7480a40a93893 100644 --- a/homeassistant/components/homekit_controller/cover.py +++ b/homeassistant/components/homekit_controller/cover.py @@ -14,15 +14,10 @@ CoverDeviceClass, CoverEntity, CoverEntityFeature, + CoverState, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ( - STATE_CLOSED, - STATE_CLOSING, - STATE_OPEN, - STATE_OPENING, - Platform, -) +from homeassistant.const import Platform from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -33,16 +28,24 @@ STATE_STOPPED = "stopped" CURRENT_GARAGE_STATE_MAP = { - 0: STATE_OPEN, - 1: STATE_CLOSED, - 2: STATE_OPENING, - 3: STATE_CLOSING, + 0: CoverState.OPEN, + 1: CoverState.CLOSED, + 2: CoverState.OPENING, + 3: CoverState.CLOSING, 4: STATE_STOPPED, } -TARGET_GARAGE_STATE_MAP = {STATE_OPEN: 0, STATE_CLOSED: 1, STATE_STOPPED: 2} +TARGET_GARAGE_STATE_MAP = { + CoverState.OPEN: 0, + CoverState.CLOSED: 1, + STATE_STOPPED: 2, +} -CURRENT_WINDOW_STATE_MAP = {0: STATE_CLOSING, 1: STATE_OPENING, 2: STATE_STOPPED} +CURRENT_WINDOW_STATE_MAP = { + 0: CoverState.CLOSING, + 1: CoverState.OPENING, + 2: STATE_STOPPED, +} async def async_setup_entry( @@ -92,25 +95,25 @@ def _state(self) -> str: @property def is_closed(self) -> bool: """Return true if cover is closed, else False.""" - return self._state == STATE_CLOSED + return self._state == CoverState.CLOSED @property def is_closing(self) -> bool: """Return if the cover is closing or not.""" - return self._state == STATE_CLOSING + return self._state == CoverState.CLOSING @property def is_opening(self) -> bool: """Return if the cover is opening or not.""" - return self._state == STATE_OPENING + return self._state == CoverState.OPENING async def async_open_cover(self, **kwargs: Any) -> None: """Send open command.""" - await self.set_door_state(STATE_OPEN) + await self.set_door_state(CoverState.OPEN) async def async_close_cover(self, **kwargs: Any) -> None: """Send close command.""" - await self.set_door_state(STATE_CLOSED) + await self.set_door_state(CoverState.CLOSED) async def set_door_state(self, state: str) -> None: """Send state command.""" @@ -188,14 +191,14 @@ def is_closing(self) -> bool: """Return if the cover is closing or not.""" value = self.service.value(CharacteristicsTypes.POSITION_STATE) state = CURRENT_WINDOW_STATE_MAP[value] - return state == STATE_CLOSING + return state == CoverState.CLOSING @property def is_opening(self) -> bool: """Return if the cover is opening or not.""" value = self.service.value(CharacteristicsTypes.POSITION_STATE) state = CURRENT_WINDOW_STATE_MAP[value] - return state == STATE_OPENING + return state == CoverState.OPENING @property def is_horizontal_tilt(self) -> bool: diff --git a/homeassistant/components/homekit_controller/manifest.json b/homeassistant/components/homekit_controller/manifest.json index b2b215a98b9ab4..cddd61a12c1142 100644 --- a/homeassistant/components/homekit_controller/manifest.json +++ b/homeassistant/components/homekit_controller/manifest.json @@ -14,6 +14,6 @@ "documentation": "https://www.home-assistant.io/integrations/homekit_controller", "iot_class": "local_push", "loggers": ["aiohomekit", "commentjson"], - "requirements": ["aiohomekit==3.2.3"], + "requirements": ["aiohomekit==3.2.6"], "zeroconf": ["_hap._tcp.local.", "_hap._udp.local."] } diff --git a/homeassistant/components/homekit_controller/switch.py b/homeassistant/components/homekit_controller/switch.py index 9fa4782e06180f..5abed2a5c79a2d 100644 --- a/homeassistant/components/homekit_controller/switch.py +++ b/homeassistant/components/homekit_controller/switch.py @@ -102,6 +102,27 @@ def extra_state_attributes(self) -> dict[str, Any] | None: return None +class HomeKitFaucet(HomeKitEntity, SwitchEntity): + """Representation of a Homekit faucet.""" + + def get_characteristic_types(self) -> list[str]: + """Define the homekit characteristics the entity cares about.""" + return [CharacteristicsTypes.ACTIVE] + + @property + def is_on(self) -> bool: + """Return true if device is on.""" + return self.service.value(CharacteristicsTypes.ACTIVE) + + async def async_turn_on(self, **kwargs: Any) -> None: + """Turn the specified faucet on.""" + await self.async_put_characteristics({CharacteristicsTypes.ACTIVE: True}) + + async def async_turn_off(self, **kwargs: Any) -> None: + """Turn the specified faucet off.""" + await self.async_put_characteristics({CharacteristicsTypes.ACTIVE: False}) + + class HomeKitValve(HomeKitEntity, SwitchEntity): """Represents a valve in an irrigation system.""" @@ -192,9 +213,10 @@ async def async_turn_off(self, **kwargs: Any) -> None: ) -ENTITY_TYPES: dict[str, type[HomeKitSwitch | HomeKitValve]] = { +ENTITY_TYPES: dict[str, type[HomeKitSwitch | HomeKitFaucet | HomeKitValve]] = { ServicesTypes.SWITCH: HomeKitSwitch, ServicesTypes.OUTLET: HomeKitSwitch, + ServicesTypes.FAUCET: HomeKitFaucet, ServicesTypes.VALVE: HomeKitValve, } @@ -213,7 +235,7 @@ def async_add_service(service: Service) -> bool: if not (entity_class := ENTITY_TYPES.get(service.type)): return False info = {"aid": service.accessory.aid, "iid": service.iid} - entity: HomeKitSwitch | HomeKitValve = entity_class(conn, info) + entity: HomeKitSwitch | HomeKitFaucet | HomeKitValve = entity_class(conn, info) conn.async_migrate_unique_id( entity.old_unique_id, entity.unique_id, Platform.SWITCH ) diff --git a/homeassistant/components/homematicip_cloud/alarm_control_panel.py b/homeassistant/components/homematicip_cloud/alarm_control_panel.py index 35aa321f2a86c9..4241316c2a4d9e 100644 --- a/homeassistant/components/homematicip_cloud/alarm_control_panel.py +++ b/homeassistant/components/homematicip_cloud/alarm_control_panel.py @@ -9,14 +9,9 @@ from homeassistant.components.alarm_control_panel import ( AlarmControlPanelEntity, AlarmControlPanelEntityFeature, + AlarmControlPanelState, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ( - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_DISARMED, - STATE_ALARM_TRIGGERED, -) from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -65,21 +60,21 @@ def device_info(self) -> DeviceInfo: ) @property - def state(self) -> str: + def alarm_state(self) -> AlarmControlPanelState: """Return the state of the alarm control panel.""" # check for triggered alarm if self._security_and_alarm.alarmActive: - return STATE_ALARM_TRIGGERED + return AlarmControlPanelState.TRIGGERED activation_state = self._home.get_security_zones_activation() # check arm_away if activation_state == (True, True): - return STATE_ALARM_ARMED_AWAY + return AlarmControlPanelState.ARMED_AWAY # check arm_home if activation_state == (False, True): - return STATE_ALARM_ARMED_HOME + return AlarmControlPanelState.ARMED_HOME - return STATE_ALARM_DISARMED + return AlarmControlPanelState.DISARMED @property def _security_and_alarm(self) -> SecurityAndAlarmHome: diff --git a/homeassistant/components/homematicip_cloud/light.py b/homeassistant/components/homematicip_cloud/light.py index 5a56ae6937710d..cf051103a100d7 100644 --- a/homeassistant/components/homematicip_cloud/light.py +++ b/homeassistant/components/homematicip_cloud/light.py @@ -14,12 +14,14 @@ AsyncPluggableDimmer, AsyncWiredDimmer3, ) -from homematicip.base.enums import RGBColorState +from homematicip.base.enums import OpticalSignalBehaviour, RGBColorState from homematicip.base.functionalChannels import NotificationLightChannel +from packaging.version import Version from homeassistant.components.light import ( ATTR_BRIGHTNESS, ATTR_COLOR_NAME, + ATTR_EFFECT, ATTR_HS_COLOR, ATTR_TRANSITION, ColorMode, @@ -47,15 +49,22 @@ async def async_setup_entry( if isinstance(device, AsyncBrandSwitchMeasuring): entities.append(HomematicipLightMeasuring(hap, device)) elif isinstance(device, AsyncBrandSwitchNotificationLight): + device_version = Version(device.firmwareVersion) entities.append(HomematicipLight(hap, device)) + + entity_class = ( + HomematicipNotificationLightV2 + if device_version > Version("2.0.0") + else HomematicipNotificationLight + ) + entities.append( - HomematicipNotificationLight(hap, device, device.topLightChannelIndex) + entity_class(hap, device, device.topLightChannelIndex, "Top") ) entities.append( - HomematicipNotificationLight( - hap, device, device.bottomLightChannelIndex - ) + entity_class(hap, device, device.bottomLightChannelIndex, "Bottom") ) + elif isinstance(device, (AsyncWiredDimmer3, AsyncDinRailDimmer3)): entities.extend( HomematicipMultiDimmer(hap, device, channel=channel) @@ -158,16 +167,9 @@ class HomematicipNotificationLight(HomematicipGenericEntity, LightEntity): _attr_supported_color_modes = {ColorMode.HS} _attr_supported_features = LightEntityFeature.TRANSITION - def __init__(self, hap: HomematicipHAP, device, channel: int) -> None: + def __init__(self, hap: HomematicipHAP, device, channel: int, post: str) -> None: """Initialize the notification light entity.""" - if channel == 2: - super().__init__( - hap, device, post="Top", channel=channel, is_multi_channel=True - ) - else: - super().__init__( - hap, device, post="Bottom", channel=channel, is_multi_channel=True - ) + super().__init__(hap, device, post=post, channel=channel, is_multi_channel=True) self._color_switcher: dict[str, tuple[float, float]] = { RGBColorState.WHITE: (0.0, 0.0), @@ -259,6 +261,66 @@ async def async_turn_off(self, **kwargs: Any) -> None: ) +class HomematicipNotificationLightV2(HomematicipNotificationLight, LightEntity): + """Representation of HomematicIP Cloud notification light.""" + + _effect_list = [ + OpticalSignalBehaviour.BILLOW_MIDDLE, + OpticalSignalBehaviour.BLINKING_MIDDLE, + OpticalSignalBehaviour.FLASH_MIDDLE, + OpticalSignalBehaviour.OFF, + OpticalSignalBehaviour.ON, + ] + + def __init__(self, hap: HomematicipHAP, device, channel: int, post: str) -> None: + """Initialize the notification light entity.""" + super().__init__(hap, device, post=post, channel=channel) + self._attr_supported_features |= LightEntityFeature.EFFECT + + @property + def effect_list(self) -> list[str] | None: + """Return the list of supported effects.""" + return self._effect_list + + @property + def effect(self) -> str | None: + """Return the current effect.""" + return self._func_channel.opticalSignalBehaviour + + @property + def is_on(self) -> bool: + """Return true if light is on.""" + return self._func_channel.on + + async def async_turn_on(self, **kwargs: Any) -> None: + """Turn the light on.""" + # Use hs_color from kwargs, + # if not applicable use current hs_color. + hs_color = kwargs.get(ATTR_HS_COLOR, self.hs_color) + simple_rgb_color = _convert_color(hs_color) + + # If no kwargs, use default value. + brightness = 255 + if ATTR_BRIGHTNESS in kwargs: + brightness = kwargs[ATTR_BRIGHTNESS] + + # Minimum brightness is 10, otherwise the led is disabled + brightness = max(10, brightness) + dim_level = round(brightness / 255.0, 2) + + effect = self.effect + if ATTR_EFFECT in kwargs: + effect = kwargs[ATTR_EFFECT] + + await self._func_channel.async_set_optical_signal( + opticalSignalBehaviour=effect, rgb=simple_rgb_color, dimLevel=dim_level + ) + + async def async_turn_off(self, **kwargs: Any) -> None: + """Turn the light off.""" + await self._func_channel.async_turn_off() + + def _convert_color(color: tuple) -> RGBColorState: """Convert the given color to the reduced RGBColorState color. diff --git a/homeassistant/components/homematicip_cloud/sensor.py b/homeassistant/components/homematicip_cloud/sensor.py index a9c046e25bfb66..c44d280c190836 100644 --- a/homeassistant/components/homematicip_cloud/sensor.py +++ b/homeassistant/components/homematicip_cloud/sensor.py @@ -8,6 +8,9 @@ from homematicip.aio.device import ( AsyncBrandSwitchMeasuring, AsyncEnergySensorsInterface, + AsyncFloorTerminalBlock6, + AsyncFloorTerminalBlock10, + AsyncFloorTerminalBlock12, AsyncFullFlushSwitchMeasuring, AsyncHeatingThermostat, AsyncHeatingThermostatCompact, @@ -28,9 +31,13 @@ AsyncWeatherSensor, AsyncWeatherSensorPlus, AsyncWeatherSensorPro, + AsyncWiredFloorTerminalBlock12, ) from homematicip.base.enums import FunctionalChannelType, ValveState -from homematicip.base.functionalChannels import FunctionalChannel +from homematicip.base.functionalChannels import ( + FloorTerminalBlockMechanicChannel, + FunctionalChannel, +) from homeassistant.components.sensor import ( SensorDeviceClass, @@ -86,7 +93,7 @@ } -async def async_setup_entry( +async def async_setup_entry( # noqa: C901 hass: HomeAssistant, config_entry: ConfigEntry, async_add_entities: AddEntitiesCallback, @@ -184,10 +191,74 @@ async def async_setup_entry( if ch.currentPowerConsumption is not None: entities.append(HmipEsiLedCurrentPowerConsumption(hap, device)) entities.append(HmipEsiLedEnergyCounterHighTariff(hap, device)) + if isinstance( + device, + ( + AsyncFloorTerminalBlock6, + AsyncFloorTerminalBlock10, + AsyncFloorTerminalBlock12, + AsyncWiredFloorTerminalBlock12, + ), + ): + entities.extend( + HomematicipFloorTerminalBlockMechanicChannelValve( + hap, device, channel=channel.index + ) + for channel in device.functionalChannels + if isinstance(channel, FloorTerminalBlockMechanicChannel) + and getattr(channel, "valvePosition", None) is not None + ) async_add_entities(entities) +class HomematicipFloorTerminalBlockMechanicChannelValve( + HomematicipGenericEntity, SensorEntity +): + """Representation of the HomematicIP floor terminal block.""" + + _attr_native_unit_of_measurement = PERCENTAGE + _attr_state_class = SensorStateClass.MEASUREMENT + + def __init__( + self, hap: HomematicipHAP, device, channel, is_multi_channel=True + ) -> None: + """Initialize floor terminal block 12 device.""" + super().__init__( + hap, + device, + channel=channel, + is_multi_channel=is_multi_channel, + post="Valve Position", + ) + + @property + def icon(self) -> str | None: + """Return the icon.""" + if super().icon: + return super().icon + channel = next( + channel + for channel in self._device.functionalChannels + if channel.index == self._channel + ) + if channel.valveState != ValveState.ADAPTION_DONE: + return "mdi:alert" + return "mdi:heating-coil" + + @property + def native_value(self) -> int | None: + """Return the state of the floor terminal block mechanical channel valve position.""" + channel = next( + channel + for channel in self._device.functionalChannels + if channel.index == self._channel + ) + if channel.valveState != ValveState.ADAPTION_DONE: + return None + return round(channel.valvePosition * 100) + + class HomematicipAccesspointDutyCycle(HomematicipGenericEntity, SensorEntity): """Representation of then HomeMaticIP access point.""" @@ -349,6 +420,7 @@ class HomematicipWindspeedSensor(HomematicipGenericEntity, SensorEntity): _attr_device_class = SensorDeviceClass.WIND_SPEED _attr_native_unit_of_measurement = UnitOfSpeed.KILOMETERS_PER_HOUR + _attr_state_class = SensorStateClass.MEASUREMENT def __init__(self, hap: HomematicipHAP, device) -> None: """Initialize the windspeed sensor.""" @@ -380,6 +452,7 @@ class HomematicipTodayRainSensor(HomematicipGenericEntity, SensorEntity): _attr_device_class = SensorDeviceClass.PRECIPITATION _attr_native_unit_of_measurement = UnitOfPrecipitationDepth.MILLIMETERS + _attr_state_class = SensorStateClass.MEASUREMENT def __init__(self, hap: HomematicipHAP, device) -> None: """Initialize the device.""" diff --git a/homeassistant/components/homewizard/config_flow.py b/homeassistant/components/homewizard/config_flow.py index 06dbb9c8333a94..d52e53cf39be7c 100644 --- a/homeassistant/components/homewizard/config_flow.py +++ b/homeassistant/components/homewizard/config_flow.py @@ -12,7 +12,7 @@ from voluptuous import Required, Schema from homeassistant.components import onboarding, zeroconf -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_IP_ADDRESS, CONF_PATH from homeassistant.data_entry_flow import AbortFlow from homeassistant.exceptions import HomeAssistantError @@ -43,7 +43,6 @@ class HomeWizardConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 discovery: DiscoveryData - entry: ConfigEntry | None async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -151,7 +150,6 @@ async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle re-auth if API was disabled.""" - self.entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -160,20 +158,17 @@ async def async_step_reauth_confirm( """Confirm reauth dialog.""" errors: dict[str, str] | None = None if user_input is not None: - assert self.entry is not None + reauth_entry = self._get_reauth_entry() try: - await self._async_try_connect(self.entry.data[CONF_IP_ADDRESS]) + await self._async_try_connect(reauth_entry.data[CONF_IP_ADDRESS]) except RecoverableError as ex: _LOGGER.error(ex) errors = {"base": ex.error_code} else: - await self.hass.config_entries.async_reload(self.entry.entry_id) + await self.hass.config_entries.async_reload(reauth_entry.entry_id) return self.async_abort(reason="reauth_successful") - return self.async_show_form( - step_id="reauth_confirm", - errors=errors, - ) + return self.async_show_form(step_id="reauth_confirm", errors=errors) @staticmethod async def _async_try_connect(ip_address: str) -> Device: diff --git a/homeassistant/components/homewizard/coordinator.py b/homeassistant/components/homewizard/coordinator.py index db41d1dd12886f..61b304eb39c42a 100644 --- a/homeassistant/components/homewizard/coordinator.py +++ b/homeassistant/components/homewizard/coordinator.py @@ -74,7 +74,8 @@ async def _async_update_data(self) -> DeviceResponseEntry: # Do not reload when performing first refresh if self.data is not None: - await self.hass.config_entries.async_reload( + # Reload config entry to let init flow handle retrying and trigger repair flow + self.hass.config_entries.async_schedule_reload( self.config_entry.entry_id ) diff --git a/homeassistant/components/homewizard/sensor.py b/homeassistant/components/homewizard/sensor.py index 9bb61a467cb207..57071875edbab8 100644 --- a/homeassistant/components/homewizard/sensor.py +++ b/homeassistant/components/homewizard/sensor.py @@ -19,7 +19,6 @@ ATTR_VIA_DEVICE, PERCENTAGE, EntityCategory, - Platform, UnitOfApparentPower, UnitOfElectricCurrent, UnitOfElectricPotential, @@ -30,7 +29,6 @@ UnitOfVolume, ) from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import StateType @@ -625,7 +623,6 @@ async def async_setup_entry( ) -> None: """Initialize sensors.""" - ent_reg = er.async_get(hass) data = entry.runtime_data.data.data # Initialize default sensors @@ -639,17 +636,6 @@ async def async_setup_entry( if data.external_devices is not None: for unique_id, device in data.external_devices.items(): if description := EXTERNAL_SENSORS.get(device.meter_type): - # Migrate external devices to new unique_id - # This is to ensure that devices with same id but different type are unique - # Migration can be removed after 2024.11.0 - if entity_id := ent_reg.async_get_entity_id( - Platform.SENSOR, DOMAIN, f"{DOMAIN}_{device.unique_id}" - ): - ent_reg.async_update_entity( - entity_id, - new_unique_id=f"{DOMAIN}_{unique_id}", - ) - # Add external device entities.append( HomeWizardExternalSensorEntity( diff --git a/homeassistant/components/homewizard/strings.json b/homeassistant/components/homewizard/strings.json index ca903330a4447e..751c1ec450d24f 100644 --- a/homeassistant/components/homewizard/strings.json +++ b/homeassistant/components/homewizard/strings.json @@ -22,9 +22,10 @@ }, "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", - "invalid_discovery_parameters": "Detected unsupported API version", + "invalid_discovery_parameters": "Invalid discovery parameters", "device_not_supported": "This device is not supported", "unknown_error": "[%key:common::config_flow::error::unknown%]", + "unsupported_api_version": "Detected unsupported API version", "reauth_successful": "Enabling API was successful" } }, diff --git a/homeassistant/components/homeworks/config_flow.py b/homeassistant/components/homeworks/config_flow.py index 5d6b95815c657b..d1fa7774ef677b 100644 --- a/homeassistant/components/homeworks/config_flow.py +++ b/homeassistant/components/homeworks/config_flow.py @@ -557,47 +557,35 @@ async def validate_remove_keypad_light( class HomeworksConfigFlowHandler(ConfigFlow, domain=DOMAIN): """Config flow for Lutron Homeworks.""" - _context_entry: ConfigEntry - async def _validate_edit_controller( - self, user_input: dict[str, Any] + self, user_input: dict[str, Any], reconfigure_entry: ConfigEntry ) -> dict[str, Any]: """Validate controller setup.""" _validate_credentials(user_input) user_input[CONF_PORT] = int(user_input[CONF_PORT]) - our_entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) - assert our_entry - other_entries = self._async_current_entries() - for entry in other_entries: - if entry.entry_id == our_entry.entry_id: - continue - if ( - user_input[CONF_HOST] == entry.options[CONF_HOST] - and user_input[CONF_PORT] == entry.options[CONF_PORT] - ): - raise SchemaFlowError("duplicated_host_port") + if any( + entry.entry_id != reconfigure_entry.entry_id + and user_input[CONF_HOST] == entry.options[CONF_HOST] + and user_input[CONF_PORT] == entry.options[CONF_PORT] + for entry in self._async_current_entries() + ): + raise SchemaFlowError("duplicated_host_port") await _try_connection(user_input) return user_input async def async_step_reconfigure( self, user_input: dict[str, Any] | None = None - ) -> ConfigFlowResult: - """Handle a reconfigure flow.""" - self._context_entry = self._get_reconfigure_entry() - return await self.async_step_reconfigure_confirm() - - async def async_step_reconfigure_confirm( - self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle a reconfigure flow.""" errors = {} + reconfigure_entry = self._get_reconfigure_entry() suggested_values = { - CONF_HOST: self._context_entry.options[CONF_HOST], - CONF_PORT: self._context_entry.options[CONF_PORT], - CONF_USERNAME: self._context_entry.data.get(CONF_USERNAME), - CONF_PASSWORD: self._context_entry.data.get(CONF_PASSWORD), + CONF_HOST: reconfigure_entry.options[CONF_HOST], + CONF_PORT: reconfigure_entry.options[CONF_PORT], + CONF_USERNAME: reconfigure_entry.data.get(CONF_USERNAME), + CONF_PASSWORD: reconfigure_entry.data.get(CONF_PASSWORD), } if user_input: @@ -608,30 +596,29 @@ async def async_step_reconfigure_confirm( CONF_PASSWORD: user_input.get(CONF_PASSWORD), } try: - await self._validate_edit_controller(user_input) + await self._validate_edit_controller(user_input, reconfigure_entry) except SchemaFlowError as err: errors["base"] = str(err) else: password = user_input.pop(CONF_PASSWORD, None) username = user_input.pop(CONF_USERNAME, None) - new_data = self._context_entry.data | { + new_data = reconfigure_entry.data | { CONF_PASSWORD: password, CONF_USERNAME: username, } - new_options = self._context_entry.options | { + new_options = reconfigure_entry.options | { CONF_HOST: user_input[CONF_HOST], CONF_PORT: user_input[CONF_PORT], } return self.async_update_reload_and_abort( - self._context_entry, + reconfigure_entry, data=new_data, options=new_options, - reason="reconfigure_successful", reload_even_if_entry_is_unchanged=False, ) return self.async_show_form( - step_id="reconfigure_confirm", + step_id="reconfigure", data_schema=self.add_suggested_values_to_schema( DATA_SCHEMA_EDIT_CONTROLLER, suggested_values ), diff --git a/homeassistant/components/homeworks/strings.json b/homeassistant/components/homeworks/strings.json index c2c8a14f77c0db..977e6be8afdd2c 100644 --- a/homeassistant/components/homeworks/strings.json +++ b/homeassistant/components/homeworks/strings.json @@ -1,5 +1,8 @@ { "config": { + "abort": { + "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]" + }, "error": { "connection_error": "Could not connect to the controller.", "credentials_needed": "The controller needs credentials.", @@ -22,7 +25,7 @@ "name": "[%key:component::homeworks::config::step::user::data_description::name%]" } }, - "reconfigure_confirm": { + "reconfigure": { "data": { "host": "[%key:common::config_flow::data::host%]", "port": "[%key:common::config_flow::data::port%]", @@ -45,8 +48,8 @@ }, "data_description": { "name": "A unique name identifying the Lutron Homeworks controller", - "password": "[%key:component::homeworks::config::step::reconfigure_confirm::data_description::password%]", - "username": "[%key:component::homeworks::config::step::reconfigure_confirm::data_description::username%]" + "password": "[%key:component::homeworks::config::step::reconfigure::data_description::password%]", + "username": "[%key:component::homeworks::config::step::reconfigure::data_description::username%]" }, "description": "Add a Lutron Homeworks controller" } diff --git a/homeassistant/components/honeywell/climate.py b/homeassistant/components/honeywell/climate.py index 934d41b238e271..98cbae4eb7e252 100644 --- a/homeassistant/components/honeywell/climate.py +++ b/homeassistant/components/honeywell/climate.py @@ -49,6 +49,10 @@ RETRY, ) +MODE_PERMANENT_HOLD = 2 +MODE_TEMPORARY_HOLD = 1 +MODE_HOLD = {MODE_PERMANENT_HOLD, MODE_TEMPORARY_HOLD} + ATTR_FAN_ACTION = "fan_action" ATTR_PERMANENT_HOLD = "permanent_hold" @@ -175,6 +179,7 @@ def __init__( self._cool_away_temp = cool_away_temp self._heat_away_temp = heat_away_temp self._away = False + self._away_hold = False self._retry = 0 self._attr_unique_id = str(device.deviceid) @@ -323,11 +328,15 @@ def target_temperature_low(self) -> float | None: @property def preset_mode(self) -> str | None: """Return the current preset mode, e.g., home, away, temp.""" - if self._away: + if self._away and self._is_hold(): + self._away_hold = True return PRESET_AWAY - if self._is_permanent_hold(): + if self._is_hold(): return PRESET_HOLD - + # Someone has changed the stat manually out of hold in away mode + if self._away and self._away_hold: + self._away = False + self._away_hold = False return PRESET_NONE @property @@ -335,10 +344,15 @@ def fan_mode(self) -> str | None: """Return the fan setting.""" return HW_FAN_MODE_TO_HA.get(self._device.fan_mode) + def _is_hold(self) -> bool: + heat_status = self._device.raw_ui_data.get("StatusHeat", 0) + cool_status = self._device.raw_ui_data.get("StatusCool", 0) + return heat_status in MODE_HOLD or cool_status in MODE_HOLD + def _is_permanent_hold(self) -> bool: heat_status = self._device.raw_ui_data.get("StatusHeat", 0) cool_status = self._device.raw_ui_data.get("StatusCool", 0) - return heat_status == 2 or cool_status == 2 + return MODE_PERMANENT_HOLD in (heat_status, cool_status) async def _set_temperature(self, **kwargs) -> None: """Set new target temperature.""" diff --git a/homeassistant/components/honeywell/config_flow.py b/homeassistant/components/honeywell/config_flow.py index 7f298aee632be0..c7cda5006920b6 100644 --- a/homeassistant/components/honeywell/config_flow.py +++ b/homeassistant/components/honeywell/config_flow.py @@ -38,14 +38,11 @@ class HoneywellConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a honeywell config flow.""" VERSION = 1 - entry: ConfigEntry | None async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle re-authentication with Honeywell.""" - - self.entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -53,8 +50,8 @@ async def async_step_reauth_confirm( ) -> ConfigFlowResult: """Confirm re-authentication with Honeywell.""" errors: dict[str, str] = {} - assert self.entry is not None + reauth_entry = self._get_reauth_entry() if user_input: try: await self.is_valid( @@ -72,18 +69,14 @@ async def async_step_reauth_confirm( errors["base"] = "cannot_connect" else: return self.async_update_reload_and_abort( - self.entry, - data={ - **self.entry.data, - **user_input, - }, + reauth_entry, + data_updates=user_input, ) return self.async_show_form( step_id="reauth_confirm", data_schema=self.add_suggested_values_to_schema( - REAUTH_SCHEMA, - self.entry.data, + REAUTH_SCHEMA, reauth_entry.data ), errors=errors, description_placeholders={"name": "Honeywell"}, @@ -136,16 +129,12 @@ def async_get_options_flow( config_entry: ConfigEntry, ) -> HoneywellOptionsFlowHandler: """Options callback for Honeywell.""" - return HoneywellOptionsFlowHandler(config_entry) + return HoneywellOptionsFlowHandler() class HoneywellOptionsFlowHandler(OptionsFlow): """Config flow options for Honeywell.""" - def __init__(self, entry: ConfigEntry) -> None: - """Initialize Honeywell options flow.""" - self.config_entry = entry - async def async_step_init(self, user_input=None) -> ConfigFlowResult: """Manage the options.""" if user_input is not None: diff --git a/homeassistant/components/honeywell/strings.json b/homeassistant/components/honeywell/strings.json index aa6e53620a55b2..a64f1a6fce06af 100644 --- a/homeassistant/components/honeywell/strings.json +++ b/homeassistant/components/honeywell/strings.json @@ -16,6 +16,9 @@ } } }, + "abort": { + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" + }, "error": { "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]" diff --git a/homeassistant/components/http/ban.py b/homeassistant/components/http/ban.py index dd5f1ed1b05921..c8fc8ffb11b265 100644 --- a/homeassistant/components/http/ban.py +++ b/homeassistant/components/http/ban.py @@ -27,6 +27,7 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError import homeassistant.helpers.config_validation as cv +from homeassistant.helpers.hassio import get_supervisor_ip, is_hassio from homeassistant.util import dt as dt_util, yaml from .const import KEY_HASS @@ -149,12 +150,8 @@ async def process_wrong_login(request: Request) -> None: request.app[KEY_FAILED_LOGIN_ATTEMPTS][remote_addr] += 1 # Supervisor IP should never be banned - if "hassio" in hass.config.components: - # pylint: disable-next=import-outside-toplevel - from homeassistant.components import hassio - - if hassio.get_supervisor_ip() == str(remote_addr): - return + if is_hassio(hass) and str(remote_addr) == get_supervisor_ip(): + return if ( request.app[KEY_FAILED_LOGIN_ATTEMPTS][remote_addr] diff --git a/homeassistant/components/huawei_lte/config_flow.py b/homeassistant/components/huawei_lte/config_flow.py index 160b2a62b553be..08fdae50c515db 100644 --- a/homeassistant/components/huawei_lte/config_flow.py +++ b/homeassistant/components/huawei_lte/config_flow.py @@ -69,7 +69,7 @@ def async_get_options_flow( config_entry: ConfigEntry, ) -> OptionsFlowHandler: """Get options flow.""" - return OptionsFlowHandler(config_entry) + return OptionsFlowHandler() async def _async_show_user_form( self, @@ -320,8 +320,7 @@ async def async_step_reauth_confirm( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Dialog that informs the user that reauth is required.""" - entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) - assert entry + entry = self._get_reauth_entry() if not user_input: return await self._async_show_reauth_form( user_input={ @@ -340,18 +339,12 @@ async def async_step_reauth_confirm( user_input=user_input, errors=errors ) - self.hass.config_entries.async_update_entry(entry, data=new_data) - await self.hass.config_entries.async_reload(entry.entry_id) - return self.async_abort(reason="reauth_successful") + return self.async_update_reload_and_abort(entry, data=new_data) class OptionsFlowHandler(OptionsFlow): """Huawei LTE options flow.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/huawei_lte/manifest.json b/homeassistant/components/huawei_lte/manifest.json index 9a44024111c857..6720d6718eff48 100644 --- a/homeassistant/components/huawei_lte/manifest.json +++ b/homeassistant/components/huawei_lte/manifest.json @@ -7,7 +7,7 @@ "iot_class": "local_polling", "loggers": ["huawei_lte_api.Session"], "requirements": [ - "huawei-lte-api==1.7.3", + "huawei-lte-api==1.10.0", "stringcase==1.2.0", "url-normalize==1.4.3" ], diff --git a/homeassistant/components/hue/config_flow.py b/homeassistant/components/hue/config_flow.py index e73ae8fe11ddd4..8d17f810461540 100644 --- a/homeassistant/components/hue/config_flow.py +++ b/homeassistant/components/hue/config_flow.py @@ -57,8 +57,8 @@ def async_get_options_flow( ) -> HueV1OptionsFlowHandler | HueV2OptionsFlowHandler: """Get the options flow for this handler.""" if config_entry.data.get(CONF_API_VERSION, 1) == 1: - return HueV1OptionsFlowHandler(config_entry) - return HueV2OptionsFlowHandler(config_entry) + return HueV1OptionsFlowHandler() + return HueV2OptionsFlowHandler() def __init__(self) -> None: """Initialize the Hue flow.""" @@ -280,10 +280,6 @@ async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResu class HueV1OptionsFlowHandler(OptionsFlow): """Handle Hue options for V1 implementation.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize Hue options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -315,10 +311,6 @@ async def async_step_init( class HueV2OptionsFlowHandler(OptionsFlow): """Handle Hue options for V2 implementation.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize Hue options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/hue/strings.json b/homeassistant/components/hue/strings.json index ab1d0fb58ad183..2f7f2e555615a3 100644 --- a/homeassistant/components/hue/strings.json +++ b/homeassistant/components/hue/strings.json @@ -137,15 +137,15 @@ "services": { "hue_activate_scene": { "name": "Activate scene", - "description": "Activates a hue scene stored in the hue hub.", + "description": "Activates a Hue scene stored in the Hue hub.", "fields": { "group_name": { "name": "Group", - "description": "Name of hue group/room from the hue app." + "description": "Name of Hue group/room from the Hue app." }, "scene_name": { "name": "Scene", - "description": "Name of hue scene from the hue app." + "description": "Name of Hue scene from the Hue app." }, "dynamic": { "name": "Dynamic", diff --git a/homeassistant/components/huisbaasje/__init__.py b/homeassistant/components/huisbaasje/__init__.py index 3e0c9845c92f32..f9703f67df5ace 100644 --- a/homeassistant/components/huisbaasje/__init__.py +++ b/homeassistant/components/huisbaasje/__init__.py @@ -54,6 +54,7 @@ async def async_update_data() -> dict[str, dict[str, Any]]: coordinator = DataUpdateCoordinator( hass, _LOGGER, + config_entry=entry, name="sensor", update_method=async_update_data, update_interval=timedelta(seconds=POLLING_INTERVAL), diff --git a/homeassistant/components/hunterdouglas_powerview/__init__.py b/homeassistant/components/hunterdouglas_powerview/__init__.py index f8c7ac43b94d1f..d9358db2753675 100644 --- a/homeassistant/components/hunterdouglas_powerview/__init__.py +++ b/homeassistant/components/hunterdouglas_powerview/__init__.py @@ -1,9 +1,8 @@ """The Hunter Douglas PowerView integration.""" import logging +from typing import TYPE_CHECKING -from aiopvapi.helpers.aiorequest import AioRequest -from aiopvapi.hub import Hub from aiopvapi.resources.model import PowerviewData from aiopvapi.rooms import Rooms from aiopvapi.scenes import Scenes @@ -12,12 +11,13 @@ from homeassistant.const import CONF_API_VERSION, CONF_HOST, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady -from homeassistant.helpers.aiohttp_client import async_get_clientsession +import homeassistant.helpers.entity_registry as er from .const import DOMAIN, HUB_EXCEPTIONS from .coordinator import PowerviewShadeUpdateCoordinator -from .model import PowerviewConfigEntry, PowerviewDeviceInfo, PowerviewEntryData +from .model import PowerviewConfigEntry, PowerviewEntryData from .shade_data import PowerviewShadeData +from .util import async_connect_hub PARALLEL_UPDATES = 1 @@ -35,29 +35,23 @@ async def async_setup_entry(hass: HomeAssistant, entry: PowerviewConfigEntry) -> bool: """Set up Hunter Douglas PowerView from a config entry.""" - config = entry.data - - hub_address = config[CONF_HOST] - api_version = config.get(CONF_API_VERSION, None) + hub_address: str = config[CONF_HOST] + api_version: int | None = config.get(CONF_API_VERSION) _LOGGER.debug("Connecting %s at %s with v%s api", DOMAIN, hub_address, api_version) - websession = async_get_clientsession(hass) - - pv_request = AioRequest( - hub_address, loop=hass.loop, websession=websession, api_version=api_version - ) - # default 15 second timeout for each call in upstream try: - hub = Hub(pv_request) - await hub.query_firmware() - device_info = await async_get_device_info(hub) + api = await async_connect_hub(hass, hub_address, api_version) except HUB_EXCEPTIONS as err: raise ConfigEntryNotReady( f"Connection error to PowerView hub {hub_address}: {err}" ) from err + hub = api.hub + pv_request = api.pv_request + device_info = api.device_info + if hub.role != "Primary": # this should be caught in config_flow, but account for a hub changing roles # this will only happen manually by a user @@ -92,6 +86,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: PowerviewConfigEntry) -> new_data[CONF_API_VERSION] = hub.api_version hass.config_entries.async_update_entry(entry, data=new_data) + if entry.unique_id is None: + hass.config_entries.async_update_entry( + entry, unique_id=device_info.serial_number + ) + coordinator = PowerviewShadeUpdateCoordinator(hass, shades, hub) coordinator.async_set_updated_data(PowerviewShadeData()) # populate raw shade data into the coordinator for diagnostics @@ -111,18 +110,62 @@ async def async_setup_entry(hass: HomeAssistant, entry: PowerviewConfigEntry) -> return True -async def async_get_device_info(hub: Hub) -> PowerviewDeviceInfo: - """Determine device info.""" - return PowerviewDeviceInfo( - name=hub.name, - mac_address=hub.mac_address, - serial_number=hub.serial_number, - firmware=hub.firmware, - model=hub.model, - hub_address=hub.ip, - ) - - async def async_unload_entry(hass: HomeAssistant, entry: PowerviewConfigEntry) -> bool: """Unload a config entry.""" return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) + + +async def async_migrate_entry(hass: HomeAssistant, entry: PowerviewConfigEntry) -> bool: + """Migrate entry.""" + + _LOGGER.debug("Migrating from version %s.%s", entry.version, entry.minor_version) + + if entry.version == 1: + # 1 -> 2: Unique ID from integer to string + if entry.minor_version == 1: + if entry.unique_id is None: + await _async_add_missing_entry_unique_id(hass, entry) + await _migrate_unique_ids(hass, entry) + hass.config_entries.async_update_entry(entry, minor_version=2) + + _LOGGER.debug("Migrated to version %s.%s", entry.version, entry.minor_version) + + return True + + +async def _async_add_missing_entry_unique_id( + hass: HomeAssistant, entry: PowerviewConfigEntry +) -> None: + """Add the unique id if its missing.""" + address: str = entry.data[CONF_HOST] + api_version: int | None = entry.data.get(CONF_API_VERSION) + api = await async_connect_hub(hass, address, api_version) + hass.config_entries.async_update_entry( + entry, unique_id=api.device_info.serial_number + ) + + +async def _migrate_unique_ids(hass: HomeAssistant, entry: PowerviewConfigEntry) -> None: + """Migrate int based unique ids to str.""" + entity_registry = er.async_get(hass) + registry_entries = er.async_entries_for_config_entry( + entity_registry, entry.entry_id + ) + if TYPE_CHECKING: + assert entry.unique_id + for reg_entry in registry_entries: + if isinstance(reg_entry.unique_id, int) or ( + isinstance(reg_entry.unique_id, str) + and not reg_entry.unique_id.startswith(entry.unique_id) + ): + _LOGGER.debug( + "Migrating %s: %s to %s_%s", + reg_entry.entity_id, + reg_entry.unique_id, + entry.unique_id, + reg_entry.unique_id, + ) + entity_registry.async_update_entity( + reg_entry.entity_id, + new_unique_id=f"{entry.unique_id}_{reg_entry.unique_id}", + ) diff --git a/homeassistant/components/hunterdouglas_powerview/config_flow.py b/homeassistant/components/hunterdouglas_powerview/config_flow.py index 1d4bcd9e2b8cbe..debb9710dbdc9b 100644 --- a/homeassistant/components/hunterdouglas_powerview/config_flow.py +++ b/homeassistant/components/hunterdouglas_powerview/config_flow.py @@ -5,8 +5,6 @@ import logging from typing import TYPE_CHECKING, Any, Self -from aiopvapi.helpers.aiorequest import AioRequest -from aiopvapi.hub import Hub import voluptuous as vol from homeassistant.components import dhcp, zeroconf @@ -14,16 +12,15 @@ from homeassistant.const import CONF_API_VERSION, CONF_HOST, CONF_NAME from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError -from homeassistant.helpers.aiohttp_client import async_get_clientsession -from . import async_get_device_info from .const import DOMAIN, HUB_EXCEPTIONS +from .util import async_connect_hub _LOGGER = logging.getLogger(__name__) HAP_SUFFIX = "._hap._tcp.local." POWERVIEW_G2_SUFFIX = "._powerview._tcp.local." -POWERVIEW_G3_SUFFIX = "._powerview-g3._tcp.local." +POWERVIEW_G3_SUFFIX = "._PowerView-G3._tcp.local." async def validate_input(hass: HomeAssistant, hub_address: str) -> dict[str, str]: @@ -31,18 +28,9 @@ async def validate_input(hass: HomeAssistant, hub_address: str) -> dict[str, str Data has the keys from DATA_SCHEMA with values provided by the user. """ - - websession = async_get_clientsession(hass) - - pv_request = AioRequest(hub_address, loop=hass.loop, websession=websession) - - try: - hub = Hub(pv_request) - await hub.query_firmware() - device_info = await async_get_device_info(hub) - except HUB_EXCEPTIONS as err: - raise CannotConnect from err - + api = await async_connect_hub(hass, hub_address) + hub = api.hub + device_info = api.device_info if hub.role != "Primary": raise UnsupportedDevice( f"{hub.name} ({hub.hub_address}) is the {hub.role} Hub. " @@ -63,6 +51,7 @@ class PowerviewConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for Hunter Douglas PowerView.""" VERSION = 1 + MINOR_VERSION = 2 def __init__(self) -> None: """Initialize the powerview config flow.""" @@ -110,7 +99,7 @@ async def _async_validate_or_error( try: info = await validate_input(self.hass, host) - except CannotConnect: + except HUB_EXCEPTIONS: return None, "cannot_connect" except UnsupportedDevice: return None, "unsupported_device" @@ -199,9 +188,5 @@ async def async_step_link( ) -class CannotConnect(HomeAssistantError): - """Error to indicate we cannot connect.""" - - class UnsupportedDevice(HomeAssistantError): """Error to indicate the device is not supported.""" diff --git a/homeassistant/components/hunterdouglas_powerview/cover.py b/homeassistant/components/hunterdouglas_powerview/cover.py index 6ee5fc92a41dd7..197fb4e6223d43 100644 --- a/homeassistant/components/hunterdouglas_powerview/cover.py +++ b/homeassistant/components/hunterdouglas_powerview/cover.py @@ -595,7 +595,7 @@ def __init__( ) -> None: """Initialize the shade.""" super().__init__(coordinator, device_info, room_name, shade, name) - self._attr_unique_id = f"{self._shade.id}_bottom" + self._attr_unique_id = f"{self._attr_unique_id}_bottom" @callback def _clamp_cover_limit(self, target_hass_position: int) -> int: @@ -632,7 +632,7 @@ def __init__( ) -> None: """Initialize the shade.""" super().__init__(coordinator, device_info, room_name, shade, name) - self._attr_unique_id = f"{self._shade.id}_top" + self._attr_unique_id = f"{self._attr_unique_id}_top" @property def should_poll(self) -> bool: @@ -740,7 +740,7 @@ def __init__( ) -> None: """Initialize the shade.""" super().__init__(coordinator, device_info, room_name, shade, name) - self._attr_unique_id = f"{self._shade.id}_combined" + self._attr_unique_id = f"{self._attr_unique_id}_combined" @property def is_closed(self) -> bool: @@ -806,7 +806,7 @@ def __init__( ) -> None: """Initialize the shade.""" super().__init__(coordinator, device_info, room_name, shade, name) - self._attr_unique_id = f"{self._shade.id}_front" + self._attr_unique_id = f"{self._attr_unique_id}_front" @property def should_poll(self) -> bool: @@ -862,7 +862,7 @@ def __init__( ) -> None: """Initialize the shade.""" super().__init__(coordinator, device_info, room_name, shade, name) - self._attr_unique_id = f"{self._shade.id}_rear" + self._attr_unique_id = f"{self._attr_unique_id}_rear" @property def should_poll(self) -> bool: diff --git a/homeassistant/components/hunterdouglas_powerview/entity.py b/homeassistant/components/hunterdouglas_powerview/entity.py index 424d314c4b9e39..ba572ecefce652 100644 --- a/homeassistant/components/hunterdouglas_powerview/entity.py +++ b/homeassistant/components/hunterdouglas_powerview/entity.py @@ -26,12 +26,12 @@ def __init__( coordinator: PowerviewShadeUpdateCoordinator, device_info: PowerviewDeviceInfo, room_name: str, - unique_id: str, + powerview_id: str, ) -> None: """Initialize the entity.""" super().__init__(coordinator) self._room_name = room_name - self._attr_unique_id = unique_id + self._attr_unique_id = f"{device_info.serial_number}_{powerview_id}" self._device_info = device_info self._configuration_url = self.coordinator.hub.url diff --git a/homeassistant/components/hunterdouglas_powerview/manifest.json b/homeassistant/components/hunterdouglas_powerview/manifest.json index 4120c55a7a7984..a80708d9a3fc91 100644 --- a/homeassistant/components/hunterdouglas_powerview/manifest.json +++ b/homeassistant/components/hunterdouglas_powerview/manifest.json @@ -19,5 +19,5 @@ "iot_class": "local_polling", "loggers": ["aiopvapi"], "requirements": ["aiopvapi==3.1.1"], - "zeroconf": ["_powerview._tcp.local.", "_powerview-g3._tcp.local."] + "zeroconf": ["_powerview._tcp.local.", "_PowerView-G3._tcp.local."] } diff --git a/homeassistant/components/hunterdouglas_powerview/model.py b/homeassistant/components/hunterdouglas_powerview/model.py index 86296b949f40f5..407de86368fcfb 100644 --- a/homeassistant/components/hunterdouglas_powerview/model.py +++ b/homeassistant/components/hunterdouglas_powerview/model.py @@ -3,20 +3,23 @@ from __future__ import annotations from dataclasses import dataclass +from typing import TYPE_CHECKING from aiopvapi.helpers.aiorequest import AioRequest +from aiopvapi.hub import Hub from aiopvapi.resources.room import Room from aiopvapi.resources.scene import Scene from aiopvapi.resources.shade import BaseShade from homeassistant.config_entries import ConfigEntry -from .coordinator import PowerviewShadeUpdateCoordinator +if TYPE_CHECKING: + from .coordinator import PowerviewShadeUpdateCoordinator type PowerviewConfigEntry = ConfigEntry[PowerviewEntryData] -@dataclass +@dataclass(slots=True) class PowerviewEntryData: """Define class for main domain information.""" @@ -28,7 +31,7 @@ class PowerviewEntryData: device_info: PowerviewDeviceInfo -@dataclass +@dataclass(slots=True) class PowerviewDeviceInfo: """Define class for device information.""" @@ -38,3 +41,12 @@ class PowerviewDeviceInfo: firmware: str | None model: str hub_address: str + + +@dataclass(slots=True) +class PowerviewAPI: + """Define class to hold the Powerview Hub API data.""" + + hub: Hub + pv_request: AioRequest + device_info: PowerviewDeviceInfo diff --git a/homeassistant/components/hunterdouglas_powerview/util.py b/homeassistant/components/hunterdouglas_powerview/util.py index 1d670f46429fe8..360bd7f722b60f 100644 --- a/homeassistant/components/hunterdouglas_powerview/util.py +++ b/homeassistant/components/hunterdouglas_powerview/util.py @@ -5,12 +5,38 @@ from collections.abc import Iterable from typing import Any +from aiopvapi.helpers.aiorequest import AioRequest from aiopvapi.helpers.constants import ATTR_ID +from aiopvapi.hub import Hub -from homeassistant.core import callback +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.aiohttp_client import async_get_clientsession + +from .model import PowerviewAPI, PowerviewDeviceInfo @callback def async_map_data_by_id(data: Iterable[dict[str | int, Any]]): """Return a dict with the key being the id for a list of entries.""" return {entry[ATTR_ID]: entry for entry in data} + + +async def async_connect_hub( + hass: HomeAssistant, address: str, api_version: int | None = None +) -> PowerviewAPI: + """Create the hub and fetch the device info address.""" + websession = async_get_clientsession(hass) + pv_request = AioRequest( + address, loop=hass.loop, websession=websession, api_version=api_version + ) + hub = Hub(pv_request) + await hub.query_firmware() + info = PowerviewDeviceInfo( + name=hub.name, + mac_address=hub.mac_address, + serial_number=hub.serial_number, + firmware=hub.firmware, + model=hub.model, + hub_address=hub.ip, + ) + return PowerviewAPI(hub, pv_request, info) diff --git a/homeassistant/components/husqvarna_automower/__init__.py b/homeassistant/components/husqvarna_automower/__init__.py index c7d698663135c3..822f81f5f75687 100644 --- a/homeassistant/components/husqvarna_automower/__init__.py +++ b/homeassistant/components/husqvarna_automower/__init__.py @@ -13,7 +13,9 @@ aiohttp_client, config_entry_oauth2_flow, device_registry as dr, + entity_registry as er, ) +from homeassistant.util import dt as dt_util from . import api from .const import DOMAIN @@ -48,7 +50,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: AutomowerConfigEntry) -> aiohttp_client.async_get_clientsession(hass), session, ) - automower_api = AutomowerSession(api_api) + time_zone_str = str(dt_util.DEFAULT_TIME_ZONE) + automower_api = AutomowerSession( + api_api, + await dt_util.async_get_time_zone(time_zone_str), + ) try: await api_api.async_get_access_token() except ClientResponseError as err: @@ -94,3 +100,20 @@ def cleanup_removed_devices( device_reg.async_update_device( device.id, remove_config_entry_id=config_entry.entry_id ) + + +def remove_work_area_entities( + hass: HomeAssistant, + config_entry: ConfigEntry, + removed_work_areas: set[int], + mower_id: str, +) -> None: + """Remove all unused work area entities for the specified mower.""" + entity_reg = er.async_get(hass) + for entity_entry in er.async_entries_for_config_entry( + entity_reg, config_entry.entry_id + ): + for work_area_id in removed_work_areas: + if entity_entry.unique_id.startswith(f"{mower_id}_{work_area_id}_"): + _LOGGER.info("Deleting: %s", entity_entry.entity_id) + entity_reg.async_remove(entity_entry.entity_id) diff --git a/homeassistant/components/husqvarna_automower/binary_sensor.py b/homeassistant/components/husqvarna_automower/binary_sensor.py index 922f7deb99b6fc..5d1ccb6a074951 100644 --- a/homeassistant/components/husqvarna_automower/binary_sensor.py +++ b/homeassistant/components/husqvarna_automower/binary_sensor.py @@ -28,7 +28,7 @@ class AutomowerBinarySensorEntityDescription(BinarySensorEntityDescription): value_fn: Callable[[MowerAttributes], bool] -BINARY_SENSOR_TYPES: tuple[AutomowerBinarySensorEntityDescription, ...] = ( +MOWER_BINARY_SENSOR_TYPES: tuple[AutomowerBinarySensorEntityDescription, ...] = ( AutomowerBinarySensorEntityDescription( key="battery_charging", value_fn=lambda data: data.mower.activity == MowerActivities.CHARGING, @@ -57,7 +57,7 @@ async def async_setup_entry( async_add_entities( AutomowerBinarySensorEntity(mower_id, coordinator, description) for mower_id in coordinator.data - for description in BINARY_SENSOR_TYPES + for description in MOWER_BINARY_SENSOR_TYPES ) diff --git a/homeassistant/components/husqvarna_automower/button.py b/homeassistant/components/husqvarna_automower/button.py index 696c5ae85ea6d8..22a732ec54c98e 100644 --- a/homeassistant/components/husqvarna_automower/button.py +++ b/homeassistant/components/husqvarna_automower/button.py @@ -11,7 +11,6 @@ from homeassistant.components.button import ButtonEntity, ButtonEntityDescription from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.util import dt as dt_util from . import AutomowerConfigEntry from .coordinator import AutomowerDataUpdateCoordinator @@ -24,19 +23,6 @@ _LOGGER = logging.getLogger(__name__) -async def _async_set_time( - session: AutomowerSession, - mower_id: str, -) -> None: - """Set datetime for the mower.""" - # dt_util returns the current (aware) local datetime, set in the frontend. - # We assume it's the timezone in which the mower is. - await session.commands.set_datetime( - mower_id, - dt_util.now(), - ) - - @dataclass(frozen=True, kw_only=True) class AutomowerButtonEntityDescription(ButtonEntityDescription): """Describes Automower button entities.""" @@ -46,7 +32,7 @@ class AutomowerButtonEntityDescription(ButtonEntityDescription): press_fn: Callable[[AutomowerSession, str], Awaitable[Any]] -BUTTON_TYPES: tuple[AutomowerButtonEntityDescription, ...] = ( +MOWER_BUTTON_TYPES: tuple[AutomowerButtonEntityDescription, ...] = ( AutomowerButtonEntityDescription( key="confirm_error", translation_key="confirm_error", @@ -58,7 +44,7 @@ class AutomowerButtonEntityDescription(ButtonEntityDescription): key="sync_clock", translation_key="sync_clock", available_fn=_check_error_free, - press_fn=_async_set_time, + press_fn=lambda session, mower_id: session.commands.set_datetime(mower_id), ), ) @@ -73,7 +59,7 @@ async def async_setup_entry( async_add_entities( AutomowerButtonEntity(mower_id, coordinator, description) for mower_id in coordinator.data - for description in BUTTON_TYPES + for description in MOWER_BUTTON_TYPES if description.exists_fn(coordinator.data[mower_id]) ) diff --git a/homeassistant/components/husqvarna_automower/calendar.py b/homeassistant/components/husqvarna_automower/calendar.py index 87fac58beb2626..d4162af0c5c522 100644 --- a/homeassistant/components/husqvarna_automower/calendar.py +++ b/homeassistant/components/husqvarna_automower/calendar.py @@ -60,8 +60,8 @@ def event(self) -> CalendarEvent | None: ] return CalendarEvent( summary=make_name_string(work_area_name, program_event.schedule_no), - start=program_event.start.replace(tzinfo=dt_util.DEFAULT_TIME_ZONE), - end=program_event.end.replace(tzinfo=dt_util.DEFAULT_TIME_ZONE), + start=program_event.start, + end=program_event.end, rrule=program_event.rrule_str, ) diff --git a/homeassistant/components/husqvarna_automower/config_flow.py b/homeassistant/components/husqvarna_automower/config_flow.py index 63e78b5d5084b4..3e76b9ac812d8a 100644 --- a/homeassistant/components/husqvarna_automower/config_flow.py +++ b/homeassistant/components/husqvarna_automower/config_flow.py @@ -6,7 +6,7 @@ from aioautomower.utils import structure_token -from homeassistant.config_entries import ConfigEntry, ConfigFlowResult +from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlowResult from homeassistant.const import CONF_ACCESS_TOKEN, CONF_NAME, CONF_TOKEN from homeassistant.helpers import config_entry_oauth2_flow @@ -26,27 +26,29 @@ class HusqvarnaConfigFlowHandler( VERSION = 1 DOMAIN = DOMAIN - reauth_entry: ConfigEntry | None = None async def async_oauth_create_entry(self, data: dict[str, Any]) -> ConfigFlowResult: """Create an entry for the flow.""" token = data[CONF_TOKEN] - if "amc:api" not in token["scope"] and not self.reauth_entry: + if "amc:api" not in token["scope"] and self.source != SOURCE_REAUTH: return self.async_abort(reason="missing_amc_scope") user_id = token[CONF_USER_ID] - if self.reauth_entry: + await self.async_set_unique_id(user_id) + + if self.source == SOURCE_REAUTH: + reauth_entry = self._get_reauth_entry() if "amc:api" not in token["scope"]: return self.async_update_reload_and_abort( - self.reauth_entry, data=data, reason="missing_amc_scope" + reauth_entry, data=data, reason="missing_amc_scope" ) - if self.reauth_entry.unique_id != user_id: - return self.async_abort(reason="wrong_account") - return self.async_update_reload_and_abort(self.reauth_entry, data=data) + self._abort_if_unique_id_mismatch(reason="wrong_account") + return self.async_update_reload_and_abort(reauth_entry, data=data) + + self._abort_if_unique_id_configured() + structured_token = structure_token(token[CONF_ACCESS_TOKEN]) first_name = structured_token.user.first_name last_name = structured_token.user.last_name - await self.async_set_unique_id(user_id) - self._abort_if_unique_id_configured() return self.async_create_entry( title=f"{NAME} of {first_name} {last_name}", data=data, @@ -61,12 +63,8 @@ async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Perform reauth upon an API authentication error.""" - self.reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) - if self.reauth_entry is not None: - if "amc:api" not in self.reauth_entry.data["token"]["scope"]: - return await self.async_step_missing_scope() + if "amc:api" not in entry_data["token"]["scope"]: + return await self.async_step_missing_scope() return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -74,10 +72,9 @@ async def async_step_reauth_confirm( ) -> ConfigFlowResult: """Confirm reauth dialog.""" if user_input is None: - assert self.reauth_entry return self.async_show_form( step_id="reauth_confirm", - description_placeholders={CONF_NAME: self.reauth_entry.title}, + description_placeholders={CONF_NAME: self._get_reauth_entry().title}, ) return await self.async_step_user() @@ -85,9 +82,9 @@ async def async_step_missing_scope( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Confirm reauth for missing scope.""" - if user_input is None and self.reauth_entry is not None: + if user_input is None and self.source == SOURCE_REAUTH: token_structured = structure_token( - self.reauth_entry.data["token"]["access_token"] + self._get_reauth_entry().data["token"]["access_token"] ) return self.async_show_form( step_id="missing_scope", diff --git a/homeassistant/components/husqvarna_automower/entity.py b/homeassistant/components/husqvarna_automower/entity.py index ea3fff079ebb37..da6c0ae59ce3af 100644 --- a/homeassistant/components/husqvarna_automower/entity.py +++ b/homeassistant/components/husqvarna_automower/entity.py @@ -9,13 +9,12 @@ from aioautomower.exceptions import ApiException from aioautomower.model import MowerActivities, MowerAttributes, MowerStates, WorkArea -from homeassistant.core import HomeAssistant, callback +from homeassistant.core import callback from homeassistant.exceptions import HomeAssistantError -from homeassistant.helpers import entity_registry as er from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.update_coordinator import CoordinatorEntity -from . import AutomowerConfigEntry, AutomowerDataUpdateCoordinator +from . import AutomowerDataUpdateCoordinator from .const import DOMAIN, EXECUTION_TIME_DELAY _LOGGER = logging.getLogger(__name__) @@ -53,30 +52,6 @@ def _work_area_translation_key(work_area_id: int, key: str) -> str: return f"work_area_{key}" -@callback -def async_remove_work_area_entities( - hass: HomeAssistant, - coordinator: AutomowerDataUpdateCoordinator, - entry: AutomowerConfigEntry, - mower_id: str, -) -> None: - """Remove deleted work areas from Home Assistant.""" - entity_reg = er.async_get(hass) - active_work_areas = set() - _work_areas = coordinator.data[mower_id].work_areas - if _work_areas is not None: - for work_area_id in _work_areas: - uid = f"{mower_id}_{work_area_id}_cutting_height_work_area" - active_work_areas.add(uid) - for entity_entry in er.async_entries_for_config_entry(entity_reg, entry.entry_id): - if ( - (split := entity_entry.unique_id.split("_"))[0] == mower_id - and split[-1] == "area" - and entity_entry.unique_id not in active_work_areas - ): - entity_reg.async_remove(entity_entry.entity_id) - - def handle_sending_exception( poll_after_sending: bool = False, ) -> Callable[ @@ -125,7 +100,9 @@ def __init__( self._attr_device_info = DeviceInfo( identifiers={(DOMAIN, mower_id)}, manufacturer="Husqvarna", - model=self.mower_attributes.system.model, + model=self.mower_attributes.system.model.removeprefix( + "HUSQVARNA " + ).removeprefix("Husqvarna "), name=self.mower_attributes.system.name, serial_number=self.mower_attributes.system.serial_number, suggested_area="Garden", diff --git a/homeassistant/components/husqvarna_automower/manifest.json b/homeassistant/components/husqvarna_automower/manifest.json index 85acfaf66a2424..d22d23583badad 100644 --- a/homeassistant/components/husqvarna_automower/manifest.json +++ b/homeassistant/components/husqvarna_automower/manifest.json @@ -7,5 +7,5 @@ "documentation": "https://www.home-assistant.io/integrations/husqvarna_automower", "iot_class": "cloud_push", "loggers": ["aioautomower"], - "requirements": ["aioautomower==2024.9.3"] + "requirements": ["aioautomower==2024.10.3"] } diff --git a/homeassistant/components/husqvarna_automower/number.py b/homeassistant/components/husqvarna_automower/number.py index c22bb4d37f7ca4..d6d794f2d83a0a 100644 --- a/homeassistant/components/husqvarna_automower/number.py +++ b/homeassistant/components/husqvarna_automower/number.py @@ -13,13 +13,12 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import AutomowerConfigEntry +from . import AutomowerConfigEntry, remove_work_area_entities from .coordinator import AutomowerDataUpdateCoordinator from .entity import ( AutomowerControlEntity, WorkAreaControlEntity, _work_area_translation_key, - async_remove_work_area_entities, handle_sending_exception, ) @@ -65,7 +64,7 @@ class AutomowerNumberEntityDescription(NumberEntityDescription): set_value_fn: Callable[[AutomowerSession, str, float], Awaitable[Any]] -NUMBER_TYPES: tuple[AutomowerNumberEntityDescription, ...] = ( +MOWER_NUMBER_TYPES: tuple[AutomowerNumberEntityDescription, ...] = ( AutomowerNumberEntityDescription( key="cutting_height", translation_key="cutting_height", @@ -81,7 +80,7 @@ class AutomowerNumberEntityDescription(NumberEntityDescription): @dataclass(frozen=True, kw_only=True) -class AutomowerWorkAreaNumberEntityDescription(NumberEntityDescription): +class WorkAreaNumberEntityDescription(NumberEntityDescription): """Describes Automower work area number entity.""" value_fn: Callable[[WorkArea], int] @@ -91,8 +90,8 @@ class AutomowerWorkAreaNumberEntityDescription(NumberEntityDescription): ] -WORK_AREA_NUMBER_TYPES: tuple[AutomowerWorkAreaNumberEntityDescription, ...] = ( - AutomowerWorkAreaNumberEntityDescription( +WORK_AREA_NUMBER_TYPES: tuple[WorkAreaNumberEntityDescription, ...] = ( + WorkAreaNumberEntityDescription( key="cutting_height_work_area", translation_key_fn=_work_area_translation_key, entity_category=EntityCategory.CONFIG, @@ -110,26 +109,44 @@ async def async_setup_entry( ) -> None: """Set up number platform.""" coordinator = entry.runtime_data - entities: list[NumberEntity] = [] - - for mower_id in coordinator.data: - if coordinator.data[mower_id].capabilities.work_areas: - _work_areas = coordinator.data[mower_id].work_areas - if _work_areas is not None: - entities.extend( - AutomowerWorkAreaNumberEntity( - mower_id, coordinator, description, work_area_id + current_work_areas: dict[str, set[int]] = {} + + async_add_entities( + AutomowerNumberEntity(mower_id, coordinator, description) + for mower_id in coordinator.data + for description in MOWER_NUMBER_TYPES + if description.exists_fn(coordinator.data[mower_id]) + ) + + def _async_work_area_listener() -> None: + """Listen for new work areas and add/remove entities as needed.""" + for mower_id in coordinator.data: + if ( + coordinator.data[mower_id].capabilities.work_areas + and (_work_areas := coordinator.data[mower_id].work_areas) is not None + ): + received_work_areas = set(_work_areas.keys()) + current_work_area_set = current_work_areas.setdefault(mower_id, set()) + + new_work_areas = received_work_areas - current_work_area_set + removed_work_areas = current_work_area_set - received_work_areas + + if new_work_areas: + current_work_area_set.update(new_work_areas) + async_add_entities( + WorkAreaNumberEntity( + mower_id, coordinator, description, work_area_id + ) + for description in WORK_AREA_NUMBER_TYPES + for work_area_id in new_work_areas ) - for description in WORK_AREA_NUMBER_TYPES - for work_area_id in _work_areas - ) - async_remove_work_area_entities(hass, coordinator, entry, mower_id) - entities.extend( - AutomowerNumberEntity(mower_id, coordinator, description) - for description in NUMBER_TYPES - if description.exists_fn(coordinator.data[mower_id]) - ) - async_add_entities(entities) + + if removed_work_areas: + remove_work_area_entities(hass, entry, removed_work_areas, mower_id) + current_work_area_set.difference_update(removed_work_areas) + + coordinator.async_add_listener(_async_work_area_listener) + _async_work_area_listener() class AutomowerNumberEntity(AutomowerControlEntity, NumberEntity): @@ -161,16 +178,16 @@ async def async_set_native_value(self, value: float) -> None: ) -class AutomowerWorkAreaNumberEntity(WorkAreaControlEntity, NumberEntity): - """Defining the AutomowerWorkAreaNumberEntity with AutomowerWorkAreaNumberEntityDescription.""" +class WorkAreaNumberEntity(WorkAreaControlEntity, NumberEntity): + """Defining the WorkAreaNumberEntity with WorkAreaNumberEntityDescription.""" - entity_description: AutomowerWorkAreaNumberEntityDescription + entity_description: WorkAreaNumberEntityDescription def __init__( self, mower_id: str, coordinator: AutomowerDataUpdateCoordinator, - description: AutomowerWorkAreaNumberEntityDescription, + description: WorkAreaNumberEntityDescription, work_area_id: int, ) -> None: """Set up AutomowerNumberEntity.""" diff --git a/homeassistant/components/husqvarna_automower/sensor.py b/homeassistant/components/husqvarna_automower/sensor.py index b9a6fb164869cf..ebb68033918790 100644 --- a/homeassistant/components/husqvarna_automower/sensor.py +++ b/homeassistant/components/husqvarna_automower/sensor.py @@ -4,8 +4,8 @@ from dataclasses import dataclass from datetime import datetime import logging +from operator import attrgetter from typing import TYPE_CHECKING, Any -from zoneinfo import ZoneInfo from aioautomower.model import ( MowerAttributes, @@ -14,7 +14,6 @@ RestrictedReasons, WorkArea, ) -from aioautomower.utils import naive_to_aware from homeassistant.components.sensor import ( SensorDeviceClass, @@ -26,7 +25,6 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import StateType -from homeassistant.util import dt as dt_util from . import AutomowerConfigEntry from .coordinator import AutomowerDataUpdateCoordinator @@ -196,16 +194,16 @@ } RESTRICTED_REASONS: list = [ - RestrictedReasons.ALL_WORK_AREAS_COMPLETED.lower(), - RestrictedReasons.DAILY_LIMIT.lower(), - RestrictedReasons.EXTERNAL.lower(), - RestrictedReasons.FOTA.lower(), - RestrictedReasons.FROST.lower(), - RestrictedReasons.NONE.lower(), - RestrictedReasons.NOT_APPLICABLE.lower(), - RestrictedReasons.PARK_OVERRIDE.lower(), - RestrictedReasons.SENSOR.lower(), - RestrictedReasons.WEEK_SCHEDULE.lower(), + RestrictedReasons.ALL_WORK_AREAS_COMPLETED, + RestrictedReasons.DAILY_LIMIT, + RestrictedReasons.EXTERNAL, + RestrictedReasons.FOTA, + RestrictedReasons.FROST, + RestrictedReasons.NONE, + RestrictedReasons.NOT_APPLICABLE, + RestrictedReasons.PARK_OVERRIDE, + RestrictedReasons.SENSOR, + RestrictedReasons.WEEK_SCHEDULE, ] STATE_NO_WORK_AREA_ACTIVE = "no_work_area_active" @@ -272,15 +270,15 @@ class AutomowerSensorEntityDescription(SensorEntityDescription): state_class=SensorStateClass.MEASUREMENT, device_class=SensorDeviceClass.BATTERY, native_unit_of_measurement=PERCENTAGE, - value_fn=lambda data: data.battery.battery_percent, + value_fn=attrgetter("battery.battery_percent"), ), AutomowerSensorEntityDescription( key="mode", translation_key="mode", device_class=SensorDeviceClass.ENUM, - option_fn=lambda data: [option.lower() for option in list(MowerModes)], + option_fn=lambda data: list(MowerModes), value_fn=( - lambda data: data.mower.mode.lower() + lambda data: data.mower.mode if data.mower.mode != MowerModes.UNKNOWN else None ), @@ -293,7 +291,7 @@ class AutomowerSensorEntityDescription(SensorEntityDescription): native_unit_of_measurement=UnitOfTime.SECONDS, suggested_unit_of_measurement=UnitOfTime.HOURS, exists_fn=lambda data: data.statistics.cutting_blade_usage_time is not None, - value_fn=lambda data: data.statistics.cutting_blade_usage_time, + value_fn=attrgetter("statistics.cutting_blade_usage_time"), ), AutomowerSensorEntityDescription( key="total_charging_time", @@ -304,7 +302,7 @@ class AutomowerSensorEntityDescription(SensorEntityDescription): native_unit_of_measurement=UnitOfTime.SECONDS, suggested_unit_of_measurement=UnitOfTime.HOURS, exists_fn=lambda data: data.statistics.total_charging_time is not None, - value_fn=lambda data: data.statistics.total_charging_time, + value_fn=attrgetter("statistics.total_charging_time"), ), AutomowerSensorEntityDescription( key="total_cutting_time", @@ -315,7 +313,7 @@ class AutomowerSensorEntityDescription(SensorEntityDescription): native_unit_of_measurement=UnitOfTime.SECONDS, suggested_unit_of_measurement=UnitOfTime.HOURS, exists_fn=lambda data: data.statistics.total_cutting_time is not None, - value_fn=lambda data: data.statistics.total_cutting_time, + value_fn=attrgetter("statistics.total_cutting_time"), ), AutomowerSensorEntityDescription( key="total_running_time", @@ -326,7 +324,7 @@ class AutomowerSensorEntityDescription(SensorEntityDescription): native_unit_of_measurement=UnitOfTime.SECONDS, suggested_unit_of_measurement=UnitOfTime.HOURS, exists_fn=lambda data: data.statistics.total_running_time is not None, - value_fn=lambda data: data.statistics.total_running_time, + value_fn=attrgetter("statistics.total_running_time"), ), AutomowerSensorEntityDescription( key="total_searching_time", @@ -337,7 +335,7 @@ class AutomowerSensorEntityDescription(SensorEntityDescription): native_unit_of_measurement=UnitOfTime.SECONDS, suggested_unit_of_measurement=UnitOfTime.HOURS, exists_fn=lambda data: data.statistics.total_searching_time is not None, - value_fn=lambda data: data.statistics.total_searching_time, + value_fn=attrgetter("statistics.total_searching_time"), ), AutomowerSensorEntityDescription( key="number_of_charging_cycles", @@ -345,7 +343,7 @@ class AutomowerSensorEntityDescription(SensorEntityDescription): entity_category=EntityCategory.DIAGNOSTIC, state_class=SensorStateClass.TOTAL, exists_fn=lambda data: data.statistics.number_of_charging_cycles is not None, - value_fn=lambda data: data.statistics.number_of_charging_cycles, + value_fn=attrgetter("statistics.number_of_charging_cycles"), ), AutomowerSensorEntityDescription( key="number_of_collisions", @@ -353,7 +351,7 @@ class AutomowerSensorEntityDescription(SensorEntityDescription): entity_category=EntityCategory.DIAGNOSTIC, state_class=SensorStateClass.TOTAL, exists_fn=lambda data: data.statistics.number_of_collisions is not None, - value_fn=lambda data: data.statistics.number_of_collisions, + value_fn=attrgetter("statistics.number_of_collisions"), ), AutomowerSensorEntityDescription( key="total_drive_distance", @@ -364,16 +362,13 @@ class AutomowerSensorEntityDescription(SensorEntityDescription): native_unit_of_measurement=UnitOfLength.METERS, suggested_unit_of_measurement=UnitOfLength.KILOMETERS, exists_fn=lambda data: data.statistics.total_drive_distance is not None, - value_fn=lambda data: data.statistics.total_drive_distance, + value_fn=attrgetter("statistics.total_drive_distance"), ), AutomowerSensorEntityDescription( key="next_start_timestamp", translation_key="next_start_timestamp", device_class=SensorDeviceClass.TIMESTAMP, - value_fn=lambda data: naive_to_aware( - data.planner.next_start_datetime_naive, - ZoneInfo(str(dt_util.DEFAULT_TIME_ZONE)), - ), + value_fn=attrgetter("planner.next_start_datetime"), ), AutomowerSensorEntityDescription( key="error", @@ -387,7 +382,7 @@ class AutomowerSensorEntityDescription(SensorEntityDescription): translation_key="restricted_reason", device_class=SensorDeviceClass.ENUM, option_fn=lambda data: RESTRICTED_REASONS, - value_fn=lambda data: data.planner.restricted_reason.lower(), + value_fn=attrgetter("planner.restricted_reason"), ), AutomowerSensorEntityDescription( key="work_area", @@ -417,17 +412,14 @@ class WorkAreaSensorEntityDescription(SensorEntityDescription): exists_fn=lambda data: data.progress is not None, state_class=SensorStateClass.MEASUREMENT, native_unit_of_measurement=PERCENTAGE, - value_fn=lambda data: data.progress, + value_fn=attrgetter("progress"), ), WorkAreaSensorEntityDescription( key="last_time_completed", translation_key_fn=_work_area_translation_key, - exists_fn=lambda data: data.last_time_completed_naive is not None, + exists_fn=lambda data: data.last_time_completed is not None, device_class=SensorDeviceClass.TIMESTAMP, - value_fn=lambda data: naive_to_aware( - data.last_time_completed_naive, - ZoneInfo(str(dt_util.DEFAULT_TIME_ZONE)), - ), + value_fn=attrgetter("last_time_completed"), ), ) @@ -439,25 +431,44 @@ async def async_setup_entry( ) -> None: """Set up sensor platform.""" coordinator = entry.runtime_data - entities: list[SensorEntity] = [] - for mower_id in coordinator.data: - if coordinator.data[mower_id].capabilities.work_areas: - _work_areas = coordinator.data[mower_id].work_areas - if _work_areas is not None: - entities.extend( - WorkAreaSensorEntity( - mower_id, coordinator, description, work_area_id - ) - for description in WORK_AREA_SENSOR_TYPES - for work_area_id in _work_areas - if description.exists_fn(_work_areas[work_area_id]) + current_work_areas: dict[str, set[int]] = {} + + async_add_entities( + AutomowerSensorEntity(mower_id, coordinator, description) + for mower_id, data in coordinator.data.items() + for description in MOWER_SENSOR_TYPES + if description.exists_fn(data) + ) + + def _async_work_area_listener() -> None: + """Listen for new work areas and add sensor entities if they did not exist. + + Listening for deletable work areas is managed in the number platform. + """ + for mower_id in coordinator.data: + if ( + coordinator.data[mower_id].capabilities.work_areas + and (_work_areas := coordinator.data[mower_id].work_areas) is not None + ): + received_work_areas = set(_work_areas.keys()) + new_work_areas = received_work_areas - current_work_areas.get( + mower_id, set() ) - entities.extend( - AutomowerSensorEntity(mower_id, coordinator, description) - for description in MOWER_SENSOR_TYPES - if description.exists_fn(coordinator.data[mower_id]) - ) - async_add_entities(entities) + if new_work_areas: + current_work_areas.setdefault(mower_id, set()).update( + new_work_areas + ) + async_add_entities( + WorkAreaSensorEntity( + mower_id, coordinator, description, work_area_id + ) + for description in WORK_AREA_SENSOR_TYPES + for work_area_id in new_work_areas + if description.exists_fn(_work_areas[work_area_id]) + ) + + coordinator.async_add_listener(_async_work_area_listener) + _async_work_area_listener() class AutomowerSensorEntity(AutomowerBaseEntity, SensorEntity): diff --git a/homeassistant/components/husqvarna_automower/switch.py b/homeassistant/components/husqvarna_automower/switch.py index 1808b651d3dfc5..2bbe5c87624f73 100644 --- a/homeassistant/components/husqvarna_automower/switch.py +++ b/homeassistant/components/husqvarna_automower/switch.py @@ -6,8 +6,7 @@ from aioautomower.model import MowerModes, StayOutZones, Zone from homeassistant.components.switch import SwitchEntity -from homeassistant.const import Platform -from homeassistant.core import HomeAssistant, callback +from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -30,30 +29,82 @@ async def async_setup_entry( ) -> None: """Set up switch platform.""" coordinator = entry.runtime_data - entities: list[SwitchEntity] = [] - entities.extend( + current_work_areas: dict[str, set[int]] = {} + current_stay_out_zones: dict[str, set[str]] = {} + + async_add_entities( AutomowerScheduleSwitchEntity(mower_id, coordinator) for mower_id in coordinator.data ) - for mower_id in coordinator.data: - if coordinator.data[mower_id].capabilities.stay_out_zones: - _stay_out_zones = coordinator.data[mower_id].stay_out_zones - if _stay_out_zones is not None: - entities.extend( - AutomowerStayOutZoneSwitchEntity( - coordinator, mower_id, stay_out_zone_uid + + def _async_work_area_listener() -> None: + """Listen for new work areas and add switch entities if they did not exist. + + Listening for deletable work areas is managed in the number platform. + """ + for mower_id in coordinator.data: + if ( + coordinator.data[mower_id].capabilities.work_areas + and (_work_areas := coordinator.data[mower_id].work_areas) is not None + ): + received_work_areas = set(_work_areas.keys()) + new_work_areas = received_work_areas - current_work_areas.get( + mower_id, set() + ) + if new_work_areas: + current_work_areas.setdefault(mower_id, set()).update( + new_work_areas + ) + async_add_entities( + WorkAreaSwitchEntity(coordinator, mower_id, work_area_id) + for work_area_id in new_work_areas ) - for stay_out_zone_uid in _stay_out_zones.zones + + def _remove_stay_out_zone_entities( + removed_stay_out_zones: set, mower_id: str + ) -> None: + """Remove all unused stay-out zones for all platforms.""" + entity_reg = er.async_get(hass) + for entity_entry in er.async_entries_for_config_entry( + entity_reg, entry.entry_id + ): + for stay_out_zone_uid in removed_stay_out_zones: + if entity_entry.unique_id.startswith(f"{mower_id}_{stay_out_zone_uid}"): + entity_reg.async_remove(entity_entry.entity_id) + + def _async_stay_out_zone_listener() -> None: + """Listen for new stay-out zones and add/remove switch entities if they did not exist.""" + for mower_id in coordinator.data: + if ( + coordinator.data[mower_id].capabilities.stay_out_zones + and (_stay_out_zones := coordinator.data[mower_id].stay_out_zones) + is not None + ): + received_stay_out_zones = set(_stay_out_zones.zones) + current_stay_out_zones_set = current_stay_out_zones.get(mower_id, set()) + new_stay_out_zones = ( + received_stay_out_zones - current_stay_out_zones_set ) - async_remove_entities(hass, coordinator, entry, mower_id) - if coordinator.data[mower_id].capabilities.work_areas: - _work_areas = coordinator.data[mower_id].work_areas - if _work_areas is not None: - entities.extend( - WorkAreaSwitchEntity(coordinator, mower_id, work_area_id) - for work_area_id in _work_areas + removed_stay_out_zones = ( + current_stay_out_zones_set - received_stay_out_zones ) - async_add_entities(entities) + if new_stay_out_zones: + current_stay_out_zones.setdefault(mower_id, set()).update( + new_stay_out_zones + ) + async_add_entities( + StayOutZoneSwitchEntity( + coordinator, mower_id, stay_out_zone_uid + ) + for stay_out_zone_uid in new_stay_out_zones + ) + if removed_stay_out_zones: + _remove_stay_out_zone_entities(removed_stay_out_zones, mower_id) + + coordinator.async_add_listener(_async_work_area_listener) + coordinator.async_add_listener(_async_stay_out_zone_listener) + _async_work_area_listener() + _async_stay_out_zone_listener() class AutomowerScheduleSwitchEntity(AutomowerControlEntity, SwitchEntity): @@ -86,7 +137,7 @@ async def async_turn_on(self, **kwargs: Any) -> None: await self.coordinator.api.commands.resume_schedule(self.mower_id) -class AutomowerStayOutZoneSwitchEntity(AutomowerControlEntity, SwitchEntity): +class StayOutZoneSwitchEntity(AutomowerControlEntity, SwitchEntity): """Defining the Automower stay out zone switch.""" _attr_translation_key = "stay_out_zones" @@ -182,28 +233,3 @@ async def async_turn_on(self, **kwargs: Any) -> None: await self.coordinator.api.commands.workarea_settings( self.mower_id, self.work_area_id, enabled=True ) - - -@callback -def async_remove_entities( - hass: HomeAssistant, - coordinator: AutomowerDataUpdateCoordinator, - entry: AutomowerConfigEntry, - mower_id: str, -) -> None: - """Remove deleted stay-out-zones from Home Assistant.""" - entity_reg = er.async_get(hass) - active_zones = set() - _zones = coordinator.data[mower_id].stay_out_zones - if _zones is not None: - for zones_uid in _zones.zones: - uid = f"{mower_id}_{zones_uid}_stay_out_zones" - active_zones.add(uid) - for entity_entry in er.async_entries_for_config_entry(entity_reg, entry.entry_id): - if ( - entity_entry.domain == Platform.SWITCH - and (split := entity_entry.unique_id.split("_"))[0] == mower_id - and split[-1] == "zones" - and entity_entry.unique_id not in active_zones - ): - entity_reg.async_remove(entity_entry.entity_id) diff --git a/homeassistant/components/husqvarna_automower_ble/__init__.py b/homeassistant/components/husqvarna_automower_ble/__init__.py new file mode 100644 index 00000000000000..2025ba64cf1cad --- /dev/null +++ b/homeassistant/components/husqvarna_automower_ble/__init__.py @@ -0,0 +1,63 @@ +"""The Husqvarna Autoconnect Bluetooth integration.""" + +from __future__ import annotations + +from automower_ble.mower import Mower +from bleak import BleakError +from bleak_retry_connector import close_stale_connections_by_address, get_device + +from homeassistant.components import bluetooth +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_ADDRESS, CONF_CLIENT_ID, Platform +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryNotReady + +from .const import LOGGER +from .coordinator import HusqvarnaCoordinator + +PLATFORMS = [ + Platform.LAWN_MOWER, +] + + +async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + """Set up Husqvarna Autoconnect Bluetooth from a config entry.""" + address = entry.data[CONF_ADDRESS] + channel_id = entry.data[CONF_CLIENT_ID] + + mower = Mower(channel_id, address) + + await close_stale_connections_by_address(address) + + LOGGER.debug("connecting to %s with channel ID %s", address, str(channel_id)) + try: + device = bluetooth.async_ble_device_from_address( + hass, address, connectable=True + ) or await get_device(address) + if not await mower.connect(device): + raise ConfigEntryNotReady + except (TimeoutError, BleakError) as exception: + raise ConfigEntryNotReady( + f"Unable to connect to device {address} due to {exception}" + ) from exception + LOGGER.debug("connected and paired") + + model = await mower.get_model() + LOGGER.debug("Connected to Automower: %s", model) + + coordinator = HusqvarnaCoordinator(hass, mower, address, channel_id, model) + + await coordinator.async_config_entry_first_refresh() + entry.runtime_data = coordinator + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + + return True + + +async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + """Unload a config entry.""" + if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): + coordinator: HusqvarnaCoordinator = entry.runtime_data + await coordinator.async_shutdown() + + return unload_ok diff --git a/homeassistant/components/husqvarna_automower_ble/config_flow.py b/homeassistant/components/husqvarna_automower_ble/config_flow.py new file mode 100644 index 00000000000000..72835c223341df --- /dev/null +++ b/homeassistant/components/husqvarna_automower_ble/config_flow.py @@ -0,0 +1,121 @@ +"""Config flow for Husqvarna Bluetooth integration.""" + +from __future__ import annotations + +import random +from typing import Any + +from automower_ble.mower import Mower +from bleak import BleakError +import voluptuous as vol + +from homeassistant.components import bluetooth +from homeassistant.components.bluetooth import BluetoothServiceInfo +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.const import CONF_ADDRESS, CONF_CLIENT_ID + +from .const import DOMAIN, LOGGER + + +def _is_supported(discovery_info: BluetoothServiceInfo): + """Check if device is supported.""" + + LOGGER.debug( + "%s manufacturer data: %s", + discovery_info.address, + discovery_info.manufacturer_data, + ) + + manufacturer = any(key == 1062 for key in discovery_info.manufacturer_data) + service_husqvarna = any( + service == "98bd0001-0b0e-421a-84e5-ddbf75dc6de4" + for service in discovery_info.service_uuids + ) + service_generic = any( + service == "00001800-0000-1000-8000-00805f9b34fb" + for service in discovery_info.service_uuids + ) + + return manufacturer and service_husqvarna and service_generic + + +class HusqvarnaAutomowerBleConfigFlow(ConfigFlow, domain=DOMAIN): + """Handle a config flow for Husqvarna Bluetooth.""" + + VERSION = 1 + + def __init__(self) -> None: + """Initialize the config flow.""" + self.address: str | None + + async def async_step_bluetooth( + self, discovery_info: BluetoothServiceInfo + ) -> ConfigFlowResult: + """Handle the bluetooth discovery step.""" + + LOGGER.debug("Discovered device: %s", discovery_info) + if not _is_supported(discovery_info): + return self.async_abort(reason="no_devices_found") + + self.address = discovery_info.address + await self.async_set_unique_id(self.address) + self._abort_if_unique_id_configured() + return await self.async_step_confirm() + + async def async_step_confirm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Confirm discovery.""" + assert self.address + + device = bluetooth.async_ble_device_from_address( + self.hass, self.address, connectable=True + ) + channel_id = random.randint(1, 0xFFFFFFFF) + + try: + (manufacturer, device_type, model) = await Mower( + channel_id, self.address + ).probe_gatts(device) + except (BleakError, TimeoutError) as exception: + LOGGER.exception("Failed to connect to device: %s", exception) + return self.async_abort(reason="cannot_connect") + + title = manufacturer + " " + device_type + + LOGGER.debug("Found device: %s", title) + + if user_input is not None: + return self.async_create_entry( + title=title, + data={CONF_ADDRESS: self.address, CONF_CLIENT_ID: channel_id}, + ) + + self.context["title_placeholders"] = { + "name": title, + } + + self._set_confirm_only() + return self.async_show_form( + step_id="confirm", + description_placeholders=self.context["title_placeholders"], + ) + + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle the initial step.""" + if user_input is not None: + self.address = user_input[CONF_ADDRESS] + await self.async_set_unique_id(self.address, raise_on_progress=False) + self._abort_if_unique_id_configured() + return await self.async_step_confirm() + + return self.async_show_form( + step_id="user", + data_schema=vol.Schema( + { + vol.Required(CONF_ADDRESS): str, + }, + ), + ) diff --git a/homeassistant/components/husqvarna_automower_ble/const.py b/homeassistant/components/husqvarna_automower_ble/const.py new file mode 100644 index 00000000000000..7117d0c9e29935 --- /dev/null +++ b/homeassistant/components/husqvarna_automower_ble/const.py @@ -0,0 +1,8 @@ +"""Constants for the Husqvarna Automower Bluetooth integration.""" + +import logging + +DOMAIN = "husqvarna_automower_ble" +MANUFACTURER = "Husqvarna" + +LOGGER = logging.getLogger(__package__) diff --git a/homeassistant/components/husqvarna_automower_ble/coordinator.py b/homeassistant/components/husqvarna_automower_ble/coordinator.py new file mode 100644 index 00000000000000..c577ccd9196a51 --- /dev/null +++ b/homeassistant/components/husqvarna_automower_ble/coordinator.py @@ -0,0 +1,100 @@ +"""Provides the DataUpdateCoordinator.""" + +from __future__ import annotations + +from datetime import timedelta + +from automower_ble.mower import Mower +from bleak import BleakError +from bleak_retry_connector import close_stale_connections_by_address + +from homeassistant.components import bluetooth +from homeassistant.core import HomeAssistant +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed + +from .const import DOMAIN, LOGGER + +SCAN_INTERVAL = timedelta(seconds=60) + + +class HusqvarnaCoordinator(DataUpdateCoordinator[dict[str, bytes]]): + """Class to manage fetching data.""" + + def __init__( + self, + hass: HomeAssistant, + mower: Mower, + address: str, + channel_id: str, + model: str, + ) -> None: + """Initialize global data updater.""" + super().__init__( + hass=hass, + logger=LOGGER, + name=DOMAIN, + update_interval=SCAN_INTERVAL, + ) + self.address = address + self.channel_id = channel_id + self.model = model + self.mower = mower + + async def async_shutdown(self) -> None: + """Shutdown coordinator and any connection.""" + LOGGER.debug("Shutdown") + await super().async_shutdown() + if self.mower.is_connected(): + await self.mower.disconnect() + + async def _async_find_device(self): + LOGGER.debug("Trying to reconnect") + await close_stale_connections_by_address(self.address) + + device = bluetooth.async_ble_device_from_address( + self.hass, self.address, connectable=True + ) + + try: + if not await self.mower.connect(device): + raise UpdateFailed("Failed to connect") + except BleakError as err: + raise UpdateFailed("Failed to connect") from err + + async def _async_update_data(self) -> dict[str, bytes]: + """Poll the device.""" + LOGGER.debug("Polling device") + + data: dict[str, bytes] = {} + + try: + if not self.mower.is_connected(): + await self._async_find_device() + except BleakError as err: + raise UpdateFailed("Failed to connect") from err + + try: + data["battery_level"] = await self.mower.battery_level() + LOGGER.debug("battery_level" + str(data["battery_level"])) + if data["battery_level"] is None: + await self._async_find_device() + raise UpdateFailed("Error getting data from device") + + data["activity"] = await self.mower.mower_activity() + LOGGER.debug("activity:" + str(data["activity"])) + if data["activity"] is None: + await self._async_find_device() + raise UpdateFailed("Error getting data from device") + + data["state"] = await self.mower.mower_state() + LOGGER.debug("state:" + str(data["state"])) + if data["state"] is None: + await self._async_find_device() + raise UpdateFailed("Error getting data from device") + + except BleakError as err: + LOGGER.error("Error getting data from device") + await self._async_find_device() + raise UpdateFailed("Error getting data from device") from err + + return data diff --git a/homeassistant/components/husqvarna_automower_ble/entity.py b/homeassistant/components/husqvarna_automower_ble/entity.py new file mode 100644 index 00000000000000..d2873d933ff89c --- /dev/null +++ b/homeassistant/components/husqvarna_automower_ble/entity.py @@ -0,0 +1,30 @@ +"""Provides the HusqvarnaAutomowerBleEntity.""" + +from __future__ import annotations + +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .const import DOMAIN, MANUFACTURER +from .coordinator import HusqvarnaCoordinator + + +class HusqvarnaAutomowerBleEntity(CoordinatorEntity[HusqvarnaCoordinator]): + """HusqvarnaCoordinator entity for Husqvarna Automower Bluetooth.""" + + _attr_has_entity_name = True + + def __init__(self, coordinator: HusqvarnaCoordinator) -> None: + """Initialize coordinator entity.""" + super().__init__(coordinator) + + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, f"{coordinator.address}_{coordinator.channel_id}")}, + manufacturer=MANUFACTURER, + model_id=coordinator.model, + ) + + @property + def available(self) -> bool: + """Return if entity is available.""" + return super().available and self.coordinator.mower.is_connected() diff --git a/homeassistant/components/husqvarna_automower_ble/lawn_mower.py b/homeassistant/components/husqvarna_automower_ble/lawn_mower.py new file mode 100644 index 00000000000000..980efc6f069923 --- /dev/null +++ b/homeassistant/components/husqvarna_automower_ble/lawn_mower.py @@ -0,0 +1,153 @@ +"""The Husqvarna Autoconnect Bluetooth lawn mower platform.""" + +from __future__ import annotations + +from automower_ble.protocol import MowerActivity, MowerState + +from homeassistant.components import bluetooth +from homeassistant.components.lawn_mower import ( + LawnMowerActivity, + LawnMowerEntity, + LawnMowerEntityFeature, +) +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from .const import LOGGER +from .coordinator import HusqvarnaCoordinator +from .entity import HusqvarnaAutomowerBleEntity + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up AutomowerLawnMower integration from a config entry.""" + coordinator: HusqvarnaCoordinator = config_entry.runtime_data + address = coordinator.address + + async_add_entities( + [ + AutomowerLawnMower( + coordinator, + address, + ), + ] + ) + + +class AutomowerLawnMower(HusqvarnaAutomowerBleEntity, LawnMowerEntity): + """Husqvarna Automower.""" + + _attr_name = None + _attr_supported_features = ( + LawnMowerEntityFeature.PAUSE + | LawnMowerEntityFeature.START_MOWING + | LawnMowerEntityFeature.DOCK + ) + + def __init__( + self, + coordinator: HusqvarnaCoordinator, + address: str, + ) -> None: + """Initialize the lawn mower.""" + super().__init__(coordinator) + self._attr_unique_id = str(address) + + def _get_activity(self) -> LawnMowerActivity | None: + """Return the current lawn mower activity.""" + if self.coordinator.data is None: + return None + + state = self.coordinator.data["state"] + activity = self.coordinator.data["activity"] + + if state is None or activity is None: + return None + + if state == MowerState.PAUSED: + return LawnMowerActivity.PAUSED + if state in (MowerState.STOPPED, MowerState.OFF, MowerState.WAIT_FOR_SAFETYPIN): + # This is actually stopped, but that isn't an option + return LawnMowerActivity.ERROR + if state in ( + MowerState.RESTRICTED, + MowerState.IN_OPERATION, + MowerState.PENDING_START, + ): + if activity in ( + MowerActivity.CHARGING, + MowerActivity.PARKED, + MowerActivity.NONE, + ): + return LawnMowerActivity.DOCKED + if activity in (MowerActivity.GOING_OUT, MowerActivity.MOWING): + return LawnMowerActivity.MOWING + if activity == MowerActivity.GOING_HOME: + return LawnMowerActivity.RETURNING + return LawnMowerActivity.ERROR + + @callback + def _handle_coordinator_update(self) -> None: + """Handle updated data from the coordinator.""" + LOGGER.debug("AutomowerLawnMower: _handle_coordinator_update") + + self._attr_activity = self._get_activity() + self._attr_available = self._attr_activity is not None + super()._handle_coordinator_update() + + async def async_start_mowing(self) -> None: + """Start mowing.""" + LOGGER.debug("Starting mower") + + if not self.coordinator.mower.is_connected(): + device = bluetooth.async_ble_device_from_address( + self.coordinator.hass, self.coordinator.address, connectable=True + ) + if not await self.coordinator.mower.connect(device): + return + + await self.coordinator.mower.mower_resume() + if self._attr_activity is LawnMowerActivity.DOCKED: + await self.coordinator.mower.mower_override() + await self.coordinator.async_request_refresh() + + self._attr_activity = self._get_activity() + self.async_write_ha_state() + + async def async_dock(self) -> None: + """Start docking.""" + LOGGER.debug("Start docking") + + if not self.coordinator.mower.is_connected(): + device = bluetooth.async_ble_device_from_address( + self.coordinator.hass, self.coordinator.address, connectable=True + ) + if not await self.coordinator.mower.connect(device): + return + + await self.coordinator.mower.mower_park() + await self.coordinator.async_request_refresh() + + self._attr_activity = self._get_activity() + self.async_write_ha_state() + + async def async_pause(self) -> None: + """Pause mower.""" + LOGGER.debug("Pausing mower") + + if not self.coordinator.mower.is_connected(): + device = bluetooth.async_ble_device_from_address( + self.coordinator.hass, self.coordinator.address, connectable=True + ) + if not await self.coordinator.mower.connect(device): + return + + await self.coordinator.mower.mower_pause() + await self.coordinator.async_request_refresh() + + self._attr_activity = self._get_activity() + self.async_write_ha_state() diff --git a/homeassistant/components/husqvarna_automower_ble/manifest.json b/homeassistant/components/husqvarna_automower_ble/manifest.json new file mode 100644 index 00000000000000..3e72d9707c7792 --- /dev/null +++ b/homeassistant/components/husqvarna_automower_ble/manifest.json @@ -0,0 +1,16 @@ +{ + "domain": "husqvarna_automower_ble", + "name": "Husqvarna Automower BLE", + "bluetooth": [ + { + "service_uuid": "98bd0001-0b0e-421a-84e5-ddbf75dc6de4", + "connectable": true + } + ], + "codeowners": ["@alistair23"], + "config_flow": true, + "dependencies": ["bluetooth_adapters"], + "documentation": "https://www.home-assistant.io/integrations/???", + "iot_class": "local_polling", + "requirements": ["automower-ble==0.2.0"] +} diff --git a/homeassistant/components/husqvarna_automower_ble/strings.json b/homeassistant/components/husqvarna_automower_ble/strings.json new file mode 100644 index 00000000000000..de0a140933acd9 --- /dev/null +++ b/homeassistant/components/husqvarna_automower_ble/strings.json @@ -0,0 +1,21 @@ +{ + "config": { + "flow_title": "{name} ({address})", + "step": { + "user": { + "data": { + "address": "Device BLE address" + } + }, + "confirm": { + "description": "Do you want to set up {name}? Make sure the mower is in pairing mode" + } + }, + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", + "no_devices_found": "Ensure the mower is in pairing mode and try again. It can take a few attempts.", + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", + "unknown": "[%key:common::config_flow::error::unknown%]" + } + } +} diff --git a/homeassistant/components/huum/__init__.py b/homeassistant/components/huum/__init__.py index 75faf1923df6ca..c533ca34ef3a58 100644 --- a/homeassistant/components/huum/__init__.py +++ b/homeassistant/components/huum/__init__.py @@ -3,23 +3,30 @@ from __future__ import annotations import logging - -from huum.exceptions import Forbidden, NotAuthenticated -from huum.huum import Huum +import sys from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant -from homeassistant.exceptions import ConfigEntryNotReady +from homeassistant.exceptions import ConfigEntryNotReady, HomeAssistantError from homeassistant.helpers.aiohttp_client import async_get_clientsession from .const import DOMAIN, PLATFORMS +if sys.version_info < (3, 13): + from huum.exceptions import Forbidden, NotAuthenticated + from huum.huum import Huum + _LOGGER = logging.getLogger(__name__) async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up Huum from a config entry.""" + if sys.version_info >= (3, 13): + raise HomeAssistantError( + "Huum is not supported on Python 3.13. Please use Python 3.12." + ) + username = entry.data[CONF_USERNAME] password = entry.data[CONF_PASSWORD] diff --git a/homeassistant/components/huum/climate.py b/homeassistant/components/huum/climate.py index df740aea3d120b..b659e33038a571 100644 --- a/homeassistant/components/huum/climate.py +++ b/homeassistant/components/huum/climate.py @@ -3,13 +3,9 @@ from __future__ import annotations import logging +import sys from typing import Any -from huum.const import SaunaStatus -from huum.exceptions import SafetyException -from huum.huum import Huum -from huum.schemas import HuumStatusResponse - from homeassistant.components.climate import ( ClimateEntity, ClimateEntityFeature, @@ -24,6 +20,12 @@ from .const import DOMAIN +if sys.version_info < (3, 13): + from huum.const import SaunaStatus + from huum.exceptions import SafetyException + from huum.huum import Huum + from huum.schemas import HuumStatusResponse + _LOGGER = logging.getLogger(__name__) diff --git a/homeassistant/components/huum/config_flow.py b/homeassistant/components/huum/config_flow.py index 6a5fd96b99d95c..10c3137818464c 100644 --- a/homeassistant/components/huum/config_flow.py +++ b/homeassistant/components/huum/config_flow.py @@ -3,10 +3,9 @@ from __future__ import annotations import logging +import sys from typing import Any -from huum.exceptions import Forbidden, NotAuthenticated -from huum.huum import Huum import voluptuous as vol from homeassistant.config_entries import ConfigFlow, ConfigFlowResult @@ -15,6 +14,10 @@ from .const import DOMAIN +if sys.version_info < (3, 13): + from huum.exceptions import Forbidden, NotAuthenticated + from huum.huum import Huum + _LOGGER = logging.getLogger(__name__) STEP_USER_DATA_SCHEMA = vol.Schema( diff --git a/homeassistant/components/huum/manifest.json b/homeassistant/components/huum/manifest.json index 7629f529b91bf9..025d1b97f216f3 100644 --- a/homeassistant/components/huum/manifest.json +++ b/homeassistant/components/huum/manifest.json @@ -5,5 +5,5 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/huum", "iot_class": "cloud_polling", - "requirements": ["huum==0.7.10"] + "requirements": ["huum==0.7.11;python_version<'3.13'"] } diff --git a/homeassistant/components/hvv_departures/config_flow.py b/homeassistant/components/hvv_departures/config_flow.py index 3e1b98d9a3827c..536b8f18259e2a 100644 --- a/homeassistant/components/hvv_departures/config_flow.py +++ b/homeassistant/components/hvv_departures/config_flow.py @@ -141,16 +141,14 @@ def async_get_options_flow( config_entry: ConfigEntry, ) -> OptionsFlowHandler: """Get options flow.""" - return OptionsFlowHandler(config_entry) + return OptionsFlowHandler() class OptionsFlowHandler(OptionsFlow): """Options flow handler.""" - def __init__(self, config_entry: ConfigEntry) -> None: + def __init__(self) -> None: """Initialize HVV Departures options flow.""" - self.config_entry = config_entry - self.options = dict(config_entry.options) self.departure_filters: dict[str, Any] = {} async def async_step_init( diff --git a/homeassistant/components/hydrawise/config_flow.py b/homeassistant/components/hydrawise/config_flow.py index a5e7d616fcf1ae..242763e81e3b78 100644 --- a/homeassistant/components/hydrawise/config_flow.py +++ b/homeassistant/components/hydrawise/config_flow.py @@ -10,7 +10,7 @@ from pydrawise.exceptions import NotAuthorizedError import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from .const import DOMAIN, LOGGER @@ -21,10 +21,6 @@ class HydrawiseConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - def __init__(self) -> None: - """Construct a ConfigFlow.""" - self.reauth_entry: ConfigEntry | None = None - async def _create_or_update_entry( self, username: str, @@ -49,20 +45,17 @@ async def _create_or_update_entry( await self.async_set_unique_id(f"hydrawise-{user.customer_id}") - if not self.reauth_entry: + if self.source != SOURCE_REAUTH: self._abort_if_unique_id_configured() return self.async_create_entry( title="Hydrawise", data={CONF_USERNAME: username, CONF_PASSWORD: password}, ) - self.hass.config_entries.async_update_entry( - self.reauth_entry, - data=self.reauth_entry.data - | {CONF_USERNAME: username, CONF_PASSWORD: password}, + return self.async_update_reload_and_abort( + self._get_reauth_entry(), + data_updates={CONF_USERNAME: username, CONF_PASSWORD: password}, ) - await self.hass.config_entries.async_reload(self.reauth_entry.entry_id) - return self.async_abort(reason="reauth_successful") async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -93,7 +86,4 @@ async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Perform reauth after updating config to username/password.""" - self.reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) return await self.async_step_user() diff --git a/homeassistant/components/hydrawise/const.py b/homeassistant/components/hydrawise/const.py index f731ecf278ccc9..47b9bef845e4b4 100644 --- a/homeassistant/components/hydrawise/const.py +++ b/homeassistant/components/hydrawise/const.py @@ -10,7 +10,7 @@ MANUFACTURER = "Hydrawise" -SCAN_INTERVAL = timedelta(seconds=30) +SCAN_INTERVAL = timedelta(seconds=60) SIGNAL_UPDATE_HYDRAWISE = "hydrawise_update" diff --git a/homeassistant/components/hydrawise/strings.json b/homeassistant/components/hydrawise/strings.json index b6df36ad4ff394..4d50f10bcb2b89 100644 --- a/homeassistant/components/hydrawise/strings.json +++ b/homeassistant/components/hydrawise/strings.json @@ -13,7 +13,8 @@ "error": { "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", "timeout_connect": "[%key:common::config_flow::error::timeout_connect%]", - "unknown": "[%key:common::config_flow::error::unknown%]" + "unknown": "[%key:common::config_flow::error::unknown%]", + "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]" }, "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_service%]", diff --git a/homeassistant/components/hyperion/config_flow.py b/homeassistant/components/hyperion/config_flow.py index 161c531328de56..b2b7dbdf531104 100644 --- a/homeassistant/components/hyperion/config_flow.py +++ b/homeassistant/components/hyperion/config_flow.py @@ -424,24 +424,22 @@ async def async_step_confirm( @staticmethod @callback - def async_get_options_flow(config_entry: ConfigEntry) -> HyperionOptionsFlow: + def async_get_options_flow( + config_entry: ConfigEntry, + ) -> HyperionOptionsFlow: """Get the Hyperion Options flow.""" - return HyperionOptionsFlow(config_entry) + return HyperionOptionsFlow() class HyperionOptionsFlow(OptionsFlow): """Hyperion options flow.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize a Hyperion options flow.""" - self._config_entry = config_entry - def _create_client(self) -> client.HyperionClient: """Create and connect a client instance.""" return create_hyperion_client( - self._config_entry.data[CONF_HOST], - self._config_entry.data[CONF_PORT], - token=self._config_entry.data.get(CONF_TOKEN), + self.config_entry.data[CONF_HOST], + self.config_entry.data[CONF_PORT], + token=self.config_entry.data.get(CONF_TOKEN), ) async def async_step_init( @@ -470,8 +468,7 @@ async def async_step_init( return self.async_create_entry(title="", data=user_input) default_effect_show_list = list( - set(effects) - - set(self._config_entry.options.get(CONF_EFFECT_HIDE_LIST, [])) + set(effects) - set(self.config_entry.options.get(CONF_EFFECT_HIDE_LIST, [])) ) return self.async_show_form( @@ -480,7 +477,7 @@ async def async_step_init( { vol.Optional( CONF_PRIORITY, - default=self._config_entry.options.get( + default=self.config_entry.options.get( CONF_PRIORITY, DEFAULT_PRIORITY ), ): vol.All(vol.Coerce(int), vol.Range(min=0, max=255)), diff --git a/homeassistant/components/hyperion/strings.json b/homeassistant/components/hyperion/strings.json index 79c226b71eb6da..01682648277933 100644 --- a/homeassistant/components/hyperion/strings.json +++ b/homeassistant/components/hyperion/strings.json @@ -52,6 +52,9 @@ "effect_show_list": "Hyperion effects to show" } } + }, + "abort": { + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]" } }, "entity": { diff --git a/homeassistant/components/ialarm/alarm_control_panel.py b/homeassistant/components/ialarm/alarm_control_panel.py index 912f04a1d1eb44..4ae3787dc1d7e5 100644 --- a/homeassistant/components/ialarm/alarm_control_panel.py +++ b/homeassistant/components/ialarm/alarm_control_panel.py @@ -5,6 +5,7 @@ from homeassistant.components.alarm_control_panel import ( AlarmControlPanelEntity, AlarmControlPanelEntityFeature, + AlarmControlPanelState, ) from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant @@ -50,7 +51,7 @@ def __init__(self, coordinator: IAlarmDataUpdateCoordinator) -> None: self._attr_unique_id = coordinator.mac @property - def state(self) -> str | None: + def alarm_state(self) -> AlarmControlPanelState | None: """Return the state of the device.""" return self.coordinator.state diff --git a/homeassistant/components/ialarm/const.py b/homeassistant/components/ialarm/const.py index d1561cc86d5fa5..1b8074c34f0ab1 100644 --- a/homeassistant/components/ialarm/const.py +++ b/homeassistant/components/ialarm/const.py @@ -2,12 +2,7 @@ from pyialarm import IAlarm -from homeassistant.const import ( - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_DISARMED, - STATE_ALARM_TRIGGERED, -) +from homeassistant.components.alarm_control_panel import AlarmControlPanelState DATA_COORDINATOR = "ialarm" @@ -16,8 +11,8 @@ DOMAIN = "ialarm" IALARM_TO_HASS = { - IAlarm.ARMED_AWAY: STATE_ALARM_ARMED_AWAY, - IAlarm.ARMED_STAY: STATE_ALARM_ARMED_HOME, - IAlarm.DISARMED: STATE_ALARM_DISARMED, - IAlarm.TRIGGERED: STATE_ALARM_TRIGGERED, + IAlarm.ARMED_AWAY: AlarmControlPanelState.ARMED_AWAY, + IAlarm.ARMED_STAY: AlarmControlPanelState.ARMED_HOME, + IAlarm.DISARMED: AlarmControlPanelState.DISARMED, + IAlarm.TRIGGERED: AlarmControlPanelState.TRIGGERED, } diff --git a/homeassistant/components/ialarm/coordinator.py b/homeassistant/components/ialarm/coordinator.py index 2aec99c98c4eec..ad0f2298a3b1af 100644 --- a/homeassistant/components/ialarm/coordinator.py +++ b/homeassistant/components/ialarm/coordinator.py @@ -7,7 +7,10 @@ from pyialarm import IAlarm -from homeassistant.components.alarm_control_panel import SCAN_INTERVAL +from homeassistant.components.alarm_control_panel import ( + SCAN_INTERVAL, + AlarmControlPanelState, +) from homeassistant.core import HomeAssistant from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed @@ -22,7 +25,7 @@ class IAlarmDataUpdateCoordinator(DataUpdateCoordinator[None]): def __init__(self, hass: HomeAssistant, ialarm: IAlarm, mac: str) -> None: """Initialize global iAlarm data updater.""" self.ialarm = ialarm - self.state: str | None = None + self.state: AlarmControlPanelState | None = None self.host: str = ialarm.host self.mac = mac diff --git a/homeassistant/components/iaqualink/config_flow.py b/homeassistant/components/iaqualink/config_flow.py index 3605c328903a44..2cb1ba4b5d7c5d 100644 --- a/homeassistant/components/iaqualink/config_flow.py +++ b/homeassistant/components/iaqualink/config_flow.py @@ -27,11 +27,6 @@ async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle a flow start.""" - # Supporting a single account. - entries = self._async_current_entries() - if entries: - return self.async_abort(reason="single_instance_allowed") - errors = {} if user_input is not None: diff --git a/homeassistant/components/iaqualink/manifest.json b/homeassistant/components/iaqualink/manifest.json index 8834a538be9005..2531632075c66d 100644 --- a/homeassistant/components/iaqualink/manifest.json +++ b/homeassistant/components/iaqualink/manifest.json @@ -6,5 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/iaqualink", "iot_class": "cloud_polling", "loggers": ["iaqualink"], - "requirements": ["iaqualink==0.5.0", "h2==4.1.0"] + "requirements": ["iaqualink==0.5.0", "h2==4.1.0"], + "single_config_entry": true } diff --git a/homeassistant/components/iaqualink/strings.json b/homeassistant/components/iaqualink/strings.json index 85b49996f51cf4..032e1a592d9d34 100644 --- a/homeassistant/components/iaqualink/strings.json +++ b/homeassistant/components/iaqualink/strings.json @@ -13,9 +13,6 @@ "error": { "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]" - }, - "abort": { - "single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]" } } } diff --git a/homeassistant/components/ibeacon/config_flow.py b/homeassistant/components/ibeacon/config_flow.py index 424befa81ecfaa..c00398e39b070e 100644 --- a/homeassistant/components/ibeacon/config_flow.py +++ b/homeassistant/components/ibeacon/config_flow.py @@ -30,9 +30,6 @@ async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle the initial step.""" - if self._async_current_entries(): - return self.async_abort(reason="single_instance_allowed") - if not bluetooth.async_scanner_count(self.hass, connectable=False): return self.async_abort(reason="bluetooth_not_available") @@ -47,16 +44,12 @@ def async_get_options_flow( config_entry: ConfigEntry, ) -> OptionsFlow: """Get the options flow for this handler.""" - return IBeaconOptionsFlow(config_entry) + return IBeaconOptionsFlow() class IBeaconOptionsFlow(OptionsFlow): """Handle options.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - async def async_step_init(self, user_input: dict | None = None) -> ConfigFlowResult: """Manage the options.""" errors = {} diff --git a/homeassistant/components/ibeacon/manifest.json b/homeassistant/components/ibeacon/manifest.json index 8dbc99c8ada712..8bd7e3ab9ccc71 100644 --- a/homeassistant/components/ibeacon/manifest.json +++ b/homeassistant/components/ibeacon/manifest.json @@ -13,5 +13,6 @@ "documentation": "https://www.home-assistant.io/integrations/ibeacon", "iot_class": "local_push", "loggers": ["bleak"], - "requirements": ["ibeacon-ble==1.2.0"] + "requirements": ["ibeacon-ble==1.2.0"], + "single_config_entry": true } diff --git a/homeassistant/components/ibeacon/strings.json b/homeassistant/components/ibeacon/strings.json index 440df8292a96f1..9307f8486445e7 100644 --- a/homeassistant/components/ibeacon/strings.json +++ b/homeassistant/components/ibeacon/strings.json @@ -6,8 +6,7 @@ } }, "abort": { - "bluetooth_not_available": "At least one Bluetooth adapter or remote must be configured to use iBeacon Tracker.", - "single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]" + "bluetooth_not_available": "At least one Bluetooth adapter or remote must be configured to use iBeacon Tracker." } }, "options": { diff --git a/homeassistant/components/ifttt/alarm_control_panel.py b/homeassistant/components/ifttt/alarm_control_panel.py index 1af23d716c805b..739352485bda7b 100644 --- a/homeassistant/components/ifttt/alarm_control_panel.py +++ b/homeassistant/components/ifttt/alarm_control_panel.py @@ -10,6 +10,7 @@ PLATFORM_SCHEMA as ALARM_CONTROL_PANEL_PLATFORM_SCHEMA, AlarmControlPanelEntity, AlarmControlPanelEntityFeature, + AlarmControlPanelState, CodeFormat, ) from homeassistant.const import ( @@ -18,10 +19,6 @@ CONF_CODE, CONF_NAME, CONF_OPTIMISTIC, - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_DISARMED, ) from homeassistant.core import HomeAssistant, ServiceCall import homeassistant.helpers.config_validation as cv @@ -33,10 +30,10 @@ _LOGGER = logging.getLogger(__name__) ALLOWED_STATES = [ - STATE_ALARM_DISARMED, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_HOME, + AlarmControlPanelState.DISARMED, + AlarmControlPanelState.ARMED_NIGHT, + AlarmControlPanelState.ARMED_AWAY, + AlarmControlPanelState.ARMED_HOME, ] DATA_IFTTT_ALARM = "ifttt_alarm" @@ -168,40 +165,41 @@ def alarm_disarm(self, code: str | None = None) -> None: """Send disarm command.""" if not self._check_code(code): return - self.set_alarm_state(self._event_disarm, STATE_ALARM_DISARMED) + self.set_alarm_state(self._event_disarm, AlarmControlPanelState.DISARMED) def alarm_arm_away(self, code: str | None = None) -> None: """Send arm away command.""" if self._code_arm_required and not self._check_code(code): return - self.set_alarm_state(self._event_away, STATE_ALARM_ARMED_AWAY) + self.set_alarm_state(self._event_away, AlarmControlPanelState.ARMED_AWAY) def alarm_arm_home(self, code: str | None = None) -> None: """Send arm home command.""" if self._code_arm_required and not self._check_code(code): return - self.set_alarm_state(self._event_home, STATE_ALARM_ARMED_HOME) + self.set_alarm_state(self._event_home, AlarmControlPanelState.ARMED_HOME) def alarm_arm_night(self, code: str | None = None) -> None: """Send arm night command.""" if self._code_arm_required and not self._check_code(code): return - self.set_alarm_state(self._event_night, STATE_ALARM_ARMED_NIGHT) + self.set_alarm_state(self._event_night, AlarmControlPanelState.ARMED_NIGHT) - def set_alarm_state(self, event: str, state: str) -> None: + def set_alarm_state(self, event: str, state: AlarmControlPanelState) -> None: """Call the IFTTT trigger service to change the alarm state.""" data = {ATTR_EVENT: event} self.hass.services.call(DOMAIN, SERVICE_TRIGGER, data) _LOGGER.debug("Called IFTTT integration to trigger event %s", event) if self._optimistic: - self._attr_state = state + self._attr_alarm_state = state def push_alarm_state(self, value: str) -> None: """Push the alarm state to the given value.""" + value = AlarmControlPanelState(value) if value in ALLOWED_STATES: _LOGGER.debug("Pushed the alarm state to %s", value) - self._attr_state = value + self._attr_alarm_state = value def _check_code(self, code: str | None) -> bool: return self._code is None or self._code == code diff --git a/homeassistant/components/image/__init__.py b/homeassistant/components/image/__init__.py index 47019f3e92e7b9..dbb5962eabf5f3 100644 --- a/homeassistant/components/image/__init__.py +++ b/homeassistant/components/image/__init__.py @@ -8,19 +8,27 @@ from dataclasses import dataclass from datetime import datetime, timedelta import logging +import os from random import SystemRandom from typing import Final, final from aiohttp import hdrs, web import httpx from propcache import cached_property +import voluptuous as vol from homeassistant.components.http import KEY_AUTHENTICATED, KEY_HASS, HomeAssistantView from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONTENT_TYPE_MULTIPART, EVENT_HOMEASSISTANT_STOP -from homeassistant.core import Event, EventStateChangedData, HomeAssistant, callback +from homeassistant.core import ( + Event, + EventStateChangedData, + HomeAssistant, + ServiceCall, + callback, +) from homeassistant.exceptions import HomeAssistantError -from homeassistant.helpers import config_validation as cv +import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity import Entity, EntityDescription from homeassistant.helpers.entity_component import EntityComponent from homeassistant.helpers.event import ( @@ -28,17 +36,26 @@ async_track_time_interval, ) from homeassistant.helpers.httpx_client import get_async_client -from homeassistant.helpers.typing import UNDEFINED, ConfigType, UndefinedType +from homeassistant.helpers.typing import ( + UNDEFINED, + ConfigType, + UndefinedType, + VolDictType, +) from .const import DATA_COMPONENT, DOMAIN, IMAGE_TIMEOUT _LOGGER = logging.getLogger(__name__) +SERVICE_SNAPSHOT: Final = "snapshot" + ENTITY_ID_FORMAT: Final = DOMAIN + ".{}" PLATFORM_SCHEMA = cv.PLATFORM_SCHEMA PLATFORM_SCHEMA_BASE = cv.PLATFORM_SCHEMA_BASE SCAN_INTERVAL: Final = timedelta(seconds=30) +ATTR_FILENAME: Final = "filename" + DEFAULT_CONTENT_TYPE: Final = "image/jpeg" ENTITY_IMAGE_URL: Final = "/api/image_proxy/{0}?token={1}" @@ -51,6 +68,8 @@ FRAME_SEPARATOR = bytes(f"\r\n--{FRAME_BOUNDARY}\r\n", "utf-8") LAST_FRAME_MARKER = bytes(f"\r\n--{FRAME_BOUNDARY}--\r\n", "utf-8") +IMAGE_SERVICE_SNAPSHOT: VolDictType = {vol.Required(ATTR_FILENAME): cv.string} + class ImageEntityDescription(EntityDescription, frozen_or_thawed=True): """A class that describes image entities.""" @@ -115,6 +134,10 @@ def unsub_track_time_interval(_event: Event) -> None: hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, unsub_track_time_interval) + component.async_register_entity_service( + SERVICE_SNAPSHOT, IMAGE_SERVICE_SNAPSHOT, async_handle_snapshot_service + ) + return True @@ -380,3 +403,34 @@ async def handle( ) -> web.StreamResponse: """Serve image stream.""" return await async_get_still_stream(request, image_entity) + + +async def async_handle_snapshot_service( + image: ImageEntity, service_call: ServiceCall +) -> None: + """Handle snapshot services calls.""" + hass = image.hass + snapshot_file: str = service_call.data[ATTR_FILENAME] + + # check if we allow to access to that file + if not hass.config.is_allowed_path(snapshot_file): + raise HomeAssistantError( + f"Cannot write `{snapshot_file}`, no access to path; `allowlist_external_dirs` may need to be adjusted in `configuration.yaml`" + ) + + async with asyncio.timeout(IMAGE_TIMEOUT): + image_data = await image.async_image() + + if image_data is None: + return + + def _write_image(to_file: str, image_data: bytes) -> None: + """Executor helper to write image.""" + os.makedirs(os.path.dirname(to_file), exist_ok=True) + with open(to_file, "wb") as img_file: + img_file.write(image_data) + + try: + await hass.async_add_executor_job(_write_image, snapshot_file, image_data) + except OSError as err: + raise HomeAssistantError("Can't write image to file") from err diff --git a/homeassistant/components/image/icons.json b/homeassistant/components/image/icons.json index cec9c99d7657c5..4434f3c180ce46 100644 --- a/homeassistant/components/image/icons.json +++ b/homeassistant/components/image/icons.json @@ -3,5 +3,10 @@ "_": { "default": "mdi:image" } + }, + "services": { + "snapshot": { + "service": "mdi:camera" + } } } diff --git a/homeassistant/components/image/services.yaml b/homeassistant/components/image/services.yaml new file mode 100644 index 00000000000000..8eef055cd892b4 --- /dev/null +++ b/homeassistant/components/image/services.yaml @@ -0,0 +1,12 @@ +# Describes the format for available image services + +snapshot: + target: + entity: + domain: image + fields: + filename: + required: true + example: "/tmp/image_snapshot.jpg" + selector: + text: diff --git a/homeassistant/components/image/strings.json b/homeassistant/components/image/strings.json index ea7ecd1695627b..011102f5b9e00f 100644 --- a/homeassistant/components/image/strings.json +++ b/homeassistant/components/image/strings.json @@ -4,5 +4,17 @@ "_": { "name": "[%key:component::image::title%]" } + }, + "services": { + "snapshot": { + "name": "Take snapshot", + "description": "Takes a snapshot from an image.", + "fields": { + "filename": { + "name": "Filename", + "description": "Template of a filename. Variable available is `entity_id`." + } + } + } } } diff --git a/homeassistant/components/image_processing/__init__.py b/homeassistant/components/image_processing/__init__.py index 2c1d0f9304c9b5..0ac8d39813bfd6 100644 --- a/homeassistant/components/image_processing/__init__.py +++ b/homeassistant/components/image_processing/__init__.py @@ -223,7 +223,7 @@ def state(self) -> str | int | None: confidence = f_co for attr in (ATTR_NAME, ATTR_MOTION): if attr in face: - state = face[attr] # type: ignore[literal-required] + state = face[attr] break return state diff --git a/homeassistant/components/image_upload/manifest.json b/homeassistant/components/image_upload/manifest.json index 963721a0476123..bb8c33ba749795 100644 --- a/homeassistant/components/image_upload/manifest.json +++ b/homeassistant/components/image_upload/manifest.json @@ -7,5 +7,5 @@ "documentation": "https://www.home-assistant.io/integrations/image_upload", "integration_type": "system", "quality_scale": "internal", - "requirements": ["Pillow==10.4.0"] + "requirements": ["Pillow==11.0.0"] } diff --git a/homeassistant/components/imap/config_flow.py b/homeassistant/components/imap/config_flow.py index 6f93ce71d844e2..994c53b5b3e84f 100644 --- a/homeassistant/components/imap/config_flow.py +++ b/homeassistant/components/imap/config_flow.py @@ -13,9 +13,15 @@ ConfigEntry, ConfigFlow, ConfigFlowResult, - OptionsFlowWithConfigEntry, + OptionsFlow, +) +from homeassistant.const import ( + CONF_NAME, + CONF_PASSWORD, + CONF_PORT, + CONF_USERNAME, + CONF_VERIFY_SSL, ) -from homeassistant.const import CONF_PASSWORD, CONF_PORT, CONF_USERNAME, CONF_VERIFY_SSL from homeassistant.core import HomeAssistant, callback from homeassistant.data_entry_flow import AbortFlow from homeassistant.helpers import config_validation as cv @@ -144,7 +150,6 @@ class IMAPConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for imap.""" VERSION = 1 - _reauth_entry: ConfigEntry | None async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -177,9 +182,6 @@ async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Perform reauth upon an API authentication error.""" - self._reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -187,17 +189,16 @@ async def async_step_reauth_confirm( ) -> ConfigFlowResult: """Confirm reauth dialog.""" errors = {} - assert self._reauth_entry + reauth_entry = self._get_reauth_entry() if user_input is not None: - user_input = {**self._reauth_entry.data, **user_input} + user_input = {**reauth_entry.data, **user_input} if not (errors := await validate_input(self.hass, user_input)): - return self.async_update_reload_and_abort( - self._reauth_entry, data=user_input - ) + return self.async_update_reload_and_abort(reauth_entry, data=user_input) return self.async_show_form( description_placeholders={ - CONF_USERNAME: self._reauth_entry.data[CONF_USERNAME] + CONF_USERNAME: reauth_entry.data[CONF_USERNAME], + CONF_NAME: reauth_entry.title, }, step_id="reauth_confirm", data_schema=vol.Schema( @@ -212,12 +213,12 @@ async def async_step_reauth_confirm( @callback def async_get_options_flow( config_entry: ConfigEntry, - ) -> OptionsFlow: + ) -> ImapOptionsFlow: """Get the options flow for this handler.""" - return OptionsFlow(config_entry) + return ImapOptionsFlow() -class OptionsFlow(OptionsFlowWithConfigEntry): +class ImapOptionsFlow(OptionsFlow): """Option flow handler.""" async def async_step_init( @@ -225,13 +226,13 @@ async def async_step_init( ) -> ConfigFlowResult: """Manage the options.""" errors: dict[str, str] | None = None - entry_data: dict[str, Any] = dict(self._config_entry.data) + entry_data: dict[str, Any] = dict(self.config_entry.data) if user_input is not None: try: self._async_abort_entries_match( { - CONF_SERVER: self._config_entry.data[CONF_SERVER], - CONF_USERNAME: self._config_entry.data[CONF_USERNAME], + CONF_SERVER: self.config_entry.data[CONF_SERVER], + CONF_USERNAME: self.config_entry.data[CONF_USERNAME], CONF_FOLDER: user_input[CONF_FOLDER], CONF_SEARCH: user_input[CONF_SEARCH], } diff --git a/homeassistant/components/imap/strings.json b/homeassistant/components/imap/strings.json index 115d46f3d0e46f..7c4a0d9a9736a5 100644 --- a/homeassistant/components/imap/strings.json +++ b/homeassistant/components/imap/strings.json @@ -104,7 +104,7 @@ "services": { "fetch": { "name": "Fetch message", - "description": "Fetch the email message from the server.", + "description": "Fetch an email message from the server.", "fields": { "entry": { "name": "Entry", diff --git a/homeassistant/components/imgw_pib/manifest.json b/homeassistant/components/imgw_pib/manifest.json index 08946a802f1937..c01be10fc68867 100644 --- a/homeassistant/components/imgw_pib/manifest.json +++ b/homeassistant/components/imgw_pib/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/imgw_pib", "iot_class": "cloud_polling", "quality_scale": "platinum", - "requirements": ["imgw_pib==1.0.5"] + "requirements": ["imgw_pib==1.0.6"] } diff --git a/homeassistant/components/improv_ble/config_flow.py b/homeassistant/components/improv_ble/config_flow.py index f38f4830ace971..05dd1de449a7fe 100644 --- a/homeassistant/components/improv_ble/config_flow.py +++ b/homeassistant/components/improv_ble/config_flow.py @@ -120,12 +120,22 @@ def _abort_if_provisioned(self) -> None: assert self._discovery_info is not None service_data = self._discovery_info.service_data - improv_service_data = ImprovServiceData.from_bytes( - service_data[SERVICE_DATA_UUID] - ) + try: + improv_service_data = ImprovServiceData.from_bytes( + service_data[SERVICE_DATA_UUID] + ) + except improv_ble_errors.InvalidCommand as err: + _LOGGER.warning( + "Aborting improv flow, device %s sent invalid improv data: '%s'", + self._discovery_info.address, + service_data[SERVICE_DATA_UUID].hex(), + ) + raise AbortFlow("invalid_improv_data") from err + if improv_service_data.state in (State.PROVISIONING, State.PROVISIONED): _LOGGER.debug( - "Aborting improv flow, device is already provisioned: %s", + "Aborting improv flow, device %s is already provisioned: %s", + self._discovery_info.address, improv_service_data.state, ) raise AbortFlow("already_provisioned") diff --git a/homeassistant/components/incomfort/water_heater.py b/homeassistant/components/incomfort/water_heater.py index 28424069d1cb3f..e7620ac2a1a37b 100644 --- a/homeassistant/components/incomfort/water_heater.py +++ b/homeassistant/components/incomfort/water_heater.py @@ -54,12 +54,16 @@ def extra_state_attributes(self) -> dict[str, Any]: return {k: v for k, v in self._heater.status.items() if k in HEATER_ATTRS} @property - def current_temperature(self) -> float: + def current_temperature(self) -> float | None: """Return the current temperature.""" if self._heater.is_tapping: return self._heater.tap_temp if self._heater.is_pumping: return self._heater.heater_temp + if self._heater.heater_temp is None: + return self._heater.tap_temp + if self._heater.tap_temp is None: + return self._heater.heater_temp return max(self._heater.heater_temp, self._heater.tap_temp) @property diff --git a/homeassistant/components/insteon/config_flow.py b/homeassistant/components/insteon/config_flow.py index 9b486ad01e3dfa..143a9e2a5e211f 100644 --- a/homeassistant/components/insteon/config_flow.py +++ b/homeassistant/components/insteon/config_flow.py @@ -59,8 +59,6 @@ async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Init the config flow.""" - if self._async_current_entries(): - return self.async_abort(reason="single_instance_allowed") modem_types = [STEP_PLM, STEP_HUB_V1, STEP_HUB_V2] return self.async_show_menu(step_id="user", menu_options=modem_types) @@ -135,9 +133,6 @@ async def async_step_usb( self, discovery_info: usb.UsbServiceInfo ) -> ConfigFlowResult: """Handle USB discovery.""" - if self._async_current_entries(): - return self.async_abort(reason="single_instance_allowed") - self._device_path = discovery_info.device self._device_name = usb.human_readable_device_name( discovery_info.device, diff --git a/homeassistant/components/insteon/manifest.json b/homeassistant/components/insteon/manifest.json index c5791573195c36..c91276402502fe 100644 --- a/homeassistant/components/insteon/manifest.json +++ b/homeassistant/components/insteon/manifest.json @@ -20,6 +20,7 @@ "pyinsteon==1.6.3", "insteon-frontend-home-assistant==0.5.0" ], + "single_config_entry": true, "usb": [ { "vid": "10BF" diff --git a/homeassistant/components/insteon/strings.json b/homeassistant/components/insteon/strings.json index 37cdd5c0343b89..4df997ac939395 100644 --- a/homeassistant/components/insteon/strings.json +++ b/homeassistant/components/insteon/strings.json @@ -44,7 +44,6 @@ }, "abort": { "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", - "single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]", "not_insteon_device": "Discovered device not an Insteon device" } }, @@ -113,7 +112,7 @@ "services": { "add_all_link": { "name": "Add all link", - "description": "Tells the Insteom Modem (IM) start All-Linking mode. Once the IM is in All-Linking mode, press the link button on the device to complete All-Linking.", + "description": "Tells the Insteon Modem (IM) start All-Linking mode. Once the IM is in All-Linking mode, press the link button on the device to complete All-Linking.", "fields": { "group": { "name": "Group", diff --git a/homeassistant/components/intellifire/binary_sensor.py b/homeassistant/components/intellifire/binary_sensor.py index f0a5d84fa62dca..7d00bdfc26d374 100644 --- a/homeassistant/components/intellifire/binary_sensor.py +++ b/homeassistant/components/intellifire/binary_sensor.py @@ -5,8 +5,6 @@ from collections.abc import Callable from dataclasses import dataclass -from intellifire4py.model import IntelliFirePollData - from homeassistant.components.binary_sensor import ( BinarySensorDeviceClass, BinarySensorEntity, @@ -26,7 +24,7 @@ class IntellifireBinarySensorRequiredKeysMixin: """Mixin for required keys.""" - value_fn: Callable[[IntelliFirePollData], bool] + value_fn: Callable[[IntellifireDataUpdateCoordinator], bool | None] @dataclass(frozen=True) @@ -40,100 +38,114 @@ class IntellifireBinarySensorEntityDescription( IntellifireBinarySensorEntityDescription( key="on_off", # This is the sensor name translation_key="flame", # This is the translation key - value_fn=lambda data: data.is_on, + value_fn=lambda coordinator: coordinator.data.is_on, ), IntellifireBinarySensorEntityDescription( key="timer_on", translation_key="timer_on", - value_fn=lambda data: data.timer_on, + value_fn=lambda coordinator: coordinator.data.timer_on, ), IntellifireBinarySensorEntityDescription( key="pilot_light_on", translation_key="pilot_light_on", - value_fn=lambda data: data.pilot_on, + value_fn=lambda coordinator: coordinator.data.pilot_on, ), IntellifireBinarySensorEntityDescription( key="thermostat_on", translation_key="thermostat_on", - value_fn=lambda data: data.thermostat_on, + value_fn=lambda coordinator: coordinator.data.thermostat_on, ), IntellifireBinarySensorEntityDescription( key="error_pilot_flame", translation_key="pilot_flame_error", entity_category=EntityCategory.DIAGNOSTIC, - value_fn=lambda data: data.error_pilot_flame, + value_fn=lambda coordinator: coordinator.data.error_pilot_flame, device_class=BinarySensorDeviceClass.PROBLEM, ), IntellifireBinarySensorEntityDescription( key="error_flame", translation_key="flame_error", entity_category=EntityCategory.DIAGNOSTIC, - value_fn=lambda data: data.error_flame, + value_fn=lambda coordinator: coordinator.data.error_flame, device_class=BinarySensorDeviceClass.PROBLEM, ), IntellifireBinarySensorEntityDescription( key="error_fan_delay", translation_key="fan_delay_error", entity_category=EntityCategory.DIAGNOSTIC, - value_fn=lambda data: data.error_fan_delay, + value_fn=lambda coordinator: coordinator.data.error_fan_delay, device_class=BinarySensorDeviceClass.PROBLEM, ), IntellifireBinarySensorEntityDescription( key="error_maintenance", translation_key="maintenance_error", entity_category=EntityCategory.DIAGNOSTIC, - value_fn=lambda data: data.error_maintenance, + value_fn=lambda coordinator: coordinator.data.error_maintenance, device_class=BinarySensorDeviceClass.PROBLEM, ), IntellifireBinarySensorEntityDescription( key="error_disabled", translation_key="disabled_error", entity_category=EntityCategory.DIAGNOSTIC, - value_fn=lambda data: data.error_disabled, + value_fn=lambda coordinator: coordinator.data.error_disabled, device_class=BinarySensorDeviceClass.PROBLEM, ), IntellifireBinarySensorEntityDescription( key="error_fan", translation_key="fan_error", entity_category=EntityCategory.DIAGNOSTIC, - value_fn=lambda data: data.error_fan, + value_fn=lambda coordinator: coordinator.data.error_fan, device_class=BinarySensorDeviceClass.PROBLEM, ), IntellifireBinarySensorEntityDescription( key="error_lights", translation_key="lights_error", entity_category=EntityCategory.DIAGNOSTIC, - value_fn=lambda data: data.error_lights, + value_fn=lambda coordinator: coordinator.data.error_lights, device_class=BinarySensorDeviceClass.PROBLEM, ), IntellifireBinarySensorEntityDescription( key="error_accessory", translation_key="accessory_error", entity_category=EntityCategory.DIAGNOSTIC, - value_fn=lambda data: data.error_accessory, + value_fn=lambda coordinator: coordinator.data.error_accessory, device_class=BinarySensorDeviceClass.PROBLEM, ), IntellifireBinarySensorEntityDescription( key="error_soft_lock_out", translation_key="soft_lock_out_error", entity_category=EntityCategory.DIAGNOSTIC, - value_fn=lambda data: data.error_soft_lock_out, + value_fn=lambda coordinator: coordinator.data.error_soft_lock_out, device_class=BinarySensorDeviceClass.PROBLEM, ), IntellifireBinarySensorEntityDescription( key="error_ecm_offline", translation_key="ecm_offline_error", entity_category=EntityCategory.DIAGNOSTIC, - value_fn=lambda data: data.error_ecm_offline, + value_fn=lambda coordinator: coordinator.data.error_ecm_offline, device_class=BinarySensorDeviceClass.PROBLEM, ), IntellifireBinarySensorEntityDescription( key="error_offline", translation_key="offline_error", entity_category=EntityCategory.DIAGNOSTIC, - value_fn=lambda data: data.error_offline, + value_fn=lambda coordinator: coordinator.data.error_offline, device_class=BinarySensorDeviceClass.PROBLEM, ), + IntellifireBinarySensorEntityDescription( + key="local_connectivity", + translation_key="local_connectivity", + entity_category=EntityCategory.DIAGNOSTIC, + device_class=BinarySensorDeviceClass.CONNECTIVITY, + value_fn=lambda coordinator: coordinator.fireplace.local_connectivity, + ), + IntellifireBinarySensorEntityDescription( + key="cloud_connectivity", + translation_key="cloud_connectivity", + entity_category=EntityCategory.DIAGNOSTIC, + device_class=BinarySensorDeviceClass.CONNECTIVITY, + value_fn=lambda coordinator: coordinator.fireplace.cloud_connectivity, + ), ) @@ -157,6 +169,6 @@ class IntellifireBinarySensor(IntellifireEntity, BinarySensorEntity): entity_description: IntellifireBinarySensorEntityDescription @property - def is_on(self) -> bool: + def is_on(self) -> bool | None: """Use this to get the correct value.""" - return self.entity_description.value_fn(self.coordinator.read_api.data) + return self.entity_description.value_fn(self.coordinator) diff --git a/homeassistant/components/intellifire/config_flow.py b/homeassistant/components/intellifire/config_flow.py index 56f0d5ca6a5857..a6b63f3b3e8aeb 100644 --- a/homeassistant/components/intellifire/config_flow.py +++ b/homeassistant/components/intellifire/config_flow.py @@ -14,7 +14,7 @@ import voluptuous as vol from homeassistant.components.dhcp import DhcpServiceInfo -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlow, ConfigFlowResult from homeassistant.const import ( CONF_API_KEY, CONF_HOST, @@ -79,7 +79,6 @@ def __init__(self) -> None: self._dhcp_discovered_serial: str = "" # used only in discovery mode self._discovered_host: DiscoveredHostInfo self._dhcp_mode = False - self._is_reauth = False self._not_configured_hosts: list[DiscoveredHostInfo] = [] self._reauth_needed: DiscoveredHostInfo @@ -182,14 +181,6 @@ async def async_step_pick_cloud_device( # If there is a single fireplace configure it if len(available_fireplaces) == 1: - if self._is_reauth: - reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) - return await self._async_create_config_entry_from_common_data( - fireplace=available_fireplaces[0], existing_entry=reauth_entry - ) - return await self._async_create_config_entry_from_common_data( fireplace=available_fireplaces[0] ) @@ -207,9 +198,7 @@ async def async_step_pick_cloud_device( ) async def _async_create_config_entry_from_common_data( - self, - fireplace: IntelliFireCommonFireplaceData, - existing_entry: ConfigEntry | None = None, + self, fireplace: IntelliFireCommonFireplaceData ) -> ConfigFlowResult: """Construct a config entry based on an object of IntelliFireCommonFireplaceData.""" @@ -226,9 +215,9 @@ async def _async_create_config_entry_from_common_data( options = {CONF_READ_MODE: API_MODE_LOCAL, CONF_CONTROL_MODE: API_MODE_LOCAL} - if existing_entry: + if self.source == SOURCE_REAUTH: return self.async_update_reload_and_abort( - existing_entry, data=data, options=options + self._get_reauth_entry(), data=data, options=options ) return self.async_create_entry( title=f"Fireplace {fireplace.serial}", data=data, options=options @@ -239,11 +228,9 @@ async def async_step_reauth( ) -> ConfigFlowResult: """Perform reauth upon an API authentication error.""" LOGGER.debug("STEP: reauth") - self._is_reauth = True - entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) # populate the expected vars - self._dhcp_discovered_serial = entry.data[CONF_SERIAL] # type: ignore[union-attr] + self._dhcp_discovered_serial = self._get_reauth_entry().data[CONF_SERIAL] placeholders = {"serial": self._dhcp_discovered_serial} self.context["title_placeholders"] = placeholders diff --git a/homeassistant/components/intellifire/icons.json b/homeassistant/components/intellifire/icons.json index 6dca69484b634d..fd6a2c149a7382 100644 --- a/homeassistant/components/intellifire/icons.json +++ b/homeassistant/components/intellifire/icons.json @@ -18,6 +18,20 @@ }, "fan_error": { "default": "mdi:fan-alert" + }, + "local_connectivity": { + "default": "mdi:lan-pending", + "state": { + "on": "mdi:lan-connect", + "off": "mdi:lan-disconnect" + } + }, + "cloud_connectivity": { + "default": "mdi:cloud-question", + "state": { + "on": "mdi:cloud-check-variant-outline", + "off": "mdi:cloud-alert-outline" + } } }, "number": { diff --git a/homeassistant/components/intellifire/strings.json b/homeassistant/components/intellifire/strings.json index 2eeb2b50b93f68..423d2c0788d0c8 100644 --- a/homeassistant/components/intellifire/strings.json +++ b/homeassistant/components/intellifire/strings.json @@ -73,6 +73,12 @@ }, "offline_error": { "name": "Offline error" + }, + "cloud_connectivity": { + "name": "Cloud connectivity" + }, + "local_connectivity": { + "name": "Local connectivity" } }, "fan": { diff --git a/homeassistant/components/intent/__init__.py b/homeassistant/components/intent/__init__.py index 001f2515ebfe04..1322576f11522f 100644 --- a/homeassistant/components/intent/__init__.py +++ b/homeassistant/components/intent/__init__.py @@ -137,6 +137,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: intent.async_register(hass, TimerStatusIntentHandler()) intent.async_register(hass, GetCurrentDateIntentHandler()) intent.async_register(hass, GetCurrentTimeIntentHandler()) + intent.async_register(hass, HelloIntentHandler()) return True @@ -239,6 +240,8 @@ class GetStateIntentHandler(intent.IntentHandler): vol.Optional("domain"): vol.All(cv.ensure_list, [cv.string]), vol.Optional("device_class"): vol.All(cv.ensure_list, [cv.string]), vol.Optional("state"): vol.All(cv.ensure_list, [cv.string]), + vol.Optional("preferred_area_id"): cv.string, + vol.Optional("preferred_floor_id"): cv.string, } async def async_handle(self, intent_obj: intent.Intent) -> intent.IntentResponse: @@ -280,7 +283,13 @@ async def async_handle(self, intent_obj: intent.Intent) -> intent.IntentResponse device_classes=device_classes, assistant=intent_obj.assistant, ) - match_result = intent.async_match_targets(hass, match_constraints) + match_preferences = intent.MatchTargetsPreferences( + area_id=slots.get("preferred_area_id", {}).get("value"), + floor_id=slots.get("preferred_floor_id", {}).get("value"), + ) + match_result = intent.async_match_targets( + hass, match_constraints, match_preferences + ) if ( (not match_result.is_match) and (match_result.no_match_reason is not None) @@ -356,7 +365,7 @@ class NevermindIntentHandler(intent.IntentHandler): description = "Cancels the current request and does nothing" async def async_handle(self, intent_obj: intent.Intent) -> intent.IntentResponse: - """Doe not do anything, and produces an empty response.""" + """Do nothing and produces an empty response.""" return intent_obj.create_response() @@ -412,6 +421,17 @@ async def async_handle(self, intent_obj: intent.Intent) -> intent.IntentResponse return response +class HelloIntentHandler(intent.IntentHandler): + """Responds with no action.""" + + intent_type = intent.INTENT_RESPOND + description = "Returns the provided response with no action." + + async def async_handle(self, intent_obj: intent.Intent) -> intent.IntentResponse: + """Return the provided response, but take no action.""" + return intent_obj.create_response() + + async def _async_process_intent( hass: HomeAssistant, domain: str, platform: IntentPlatformProtocol ) -> None: diff --git a/homeassistant/components/iotty/api.py b/homeassistant/components/iotty/api.py index 03e18a02903424..d87fda57731eed 100644 --- a/homeassistant/components/iotty/api.py +++ b/homeassistant/components/iotty/api.py @@ -33,8 +33,6 @@ def __init__( async def async_get_access_token(self) -> Any: """Return a valid access token.""" - - if not self._oauth_session.valid_token: - await self._oauth_session.async_ensure_token_valid() + await self._oauth_session.async_ensure_token_valid() return self._oauth_session.token["access_token"] diff --git a/homeassistant/components/iotty/coordinator.py b/homeassistant/components/iotty/coordinator.py index 12764ac1cf6d12..420248f77247f7 100644 --- a/homeassistant/components/iotty/coordinator.py +++ b/homeassistant/components/iotty/coordinator.py @@ -61,14 +61,12 @@ def __init__( ) self._device_registry = dr.async_get(hass) - async def async_config_entry_first_refresh(self) -> None: - """Override the first refresh to also fetch iotty devices list.""" + async def _async_setup(self) -> None: + """Get devices.""" _LOGGER.debug("Fetching devices list from iottyCloud") self._devices = await self.iotty.get_devices() _LOGGER.debug("There are %d devices", len(self._devices)) - await super().async_config_entry_first_refresh() - async def _async_update_data(self) -> IottyData: """Fetch data from iottyCloud device.""" _LOGGER.debug("Fetching devices status from iottyCloud") diff --git a/homeassistant/components/iotty/strings.json b/homeassistant/components/iotty/strings.json index 569e148a5a3af8..cb0dc509d9ad28 100644 --- a/homeassistant/components/iotty/strings.json +++ b/homeassistant/components/iotty/strings.json @@ -12,7 +12,8 @@ "missing_configuration": "[%key:common::config_flow::abort::oauth2_missing_configuration%]", "authorize_url_timeout": "[%key:common::config_flow::abort::oauth2_authorize_url_timeout%]", "no_url_available": "[%key:common::config_flow::abort::oauth2_no_url_available%]", - "user_rejected_authorize": "[%key:common::config_flow::abort::oauth2_user_rejected_authorize%]" + "user_rejected_authorize": "[%key:common::config_flow::abort::oauth2_user_rejected_authorize%]", + "missing_credentials": "[%key:common::config_flow::abort::oauth2_missing_credentials%]" }, "create_entry": { "default": "[%key:common::config_flow::create_entry::authenticated%]" diff --git a/homeassistant/components/ipp/manifest.json b/homeassistant/components/ipp/manifest.json index 2ba82b2cfecef0..baa41cf00bd0d4 100644 --- a/homeassistant/components/ipp/manifest.json +++ b/homeassistant/components/ipp/manifest.json @@ -8,6 +8,6 @@ "iot_class": "local_polling", "loggers": ["deepmerge", "pyipp"], "quality_scale": "platinum", - "requirements": ["pyipp==0.16.0"], + "requirements": ["pyipp==0.17.0"], "zeroconf": ["_ipps._tcp.local.", "_ipp._tcp.local."] } diff --git a/homeassistant/components/ipp/sensor.py b/homeassistant/components/ipp/sensor.py index e872fc7977f37f..a2792c7749b8cf 100644 --- a/homeassistant/components/ipp/sensor.py +++ b/homeassistant/components/ipp/sensor.py @@ -4,7 +4,7 @@ from collections.abc import Callable from dataclasses import dataclass -from datetime import datetime, timedelta +from datetime import datetime from typing import Any from pyipp import Marker, Printer @@ -19,7 +19,6 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import StateType -from homeassistant.util.dt import utcnow from . import IPPConfigEntry from .const import ( @@ -80,7 +79,7 @@ def _get_marker_value_fn( device_class=SensorDeviceClass.TIMESTAMP, entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, - value_fn=lambda printer: (utcnow() - timedelta(seconds=printer.info.uptime)), + value_fn=lambda printer: printer.booted_at, ), ) diff --git a/homeassistant/components/iqvia/__init__.py b/homeassistant/components/iqvia/__init__.py index 8b72d6f8784c05..3fabb88b0416db 100644 --- a/homeassistant/components/iqvia/__init__.py +++ b/homeassistant/components/iqvia/__init__.py @@ -76,6 +76,7 @@ async def async_get_data_from_api( coordinator = coordinators[sensor_type] = DataUpdateCoordinator( hass, LOGGER, + config_entry=entry, name=f"{entry.data[CONF_ZIP_CODE]} {sensor_type}", update_interval=DEFAULT_SCAN_INTERVAL, update_method=partial(async_get_data_from_api, api_coro), diff --git a/homeassistant/components/iqvia/manifest.json b/homeassistant/components/iqvia/manifest.json index 6142fa1349e539..11c99a7428f2d1 100644 --- a/homeassistant/components/iqvia/manifest.json +++ b/homeassistant/components/iqvia/manifest.json @@ -7,5 +7,5 @@ "integration_type": "service", "iot_class": "cloud_polling", "loggers": ["pyiqvia"], - "requirements": ["numpy==1.26.4", "pyiqvia==2022.04.0"] + "requirements": ["numpy==2.1.3", "pyiqvia==2022.04.0"] } diff --git a/homeassistant/components/iron_os/__init__.py b/homeassistant/components/iron_os/__init__.py index 11d99a1558a9a1..56a83117e68bf0 100644 --- a/homeassistant/components/iron_os/__init__.py +++ b/homeassistant/components/iron_os/__init__.py @@ -5,6 +5,7 @@ import logging from typing import TYPE_CHECKING +from aiogithubapi import GitHubAPI from pynecil import Pynecil from homeassistant.components import bluetooth @@ -12,17 +13,36 @@ from homeassistant.const import CONF_NAME, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady +from homeassistant.helpers import config_validation as cv +from homeassistant.helpers.aiohttp_client import async_get_clientsession +from homeassistant.helpers.typing import ConfigType +from homeassistant.util.hass_dict import HassKey from .const import DOMAIN -from .coordinator import IronOSCoordinator +from .coordinator import IronOSFirmwareUpdateCoordinator, IronOSLiveDataCoordinator -PLATFORMS: list[Platform] = [Platform.NUMBER, Platform.SENSOR] +PLATFORMS: list[Platform] = [Platform.NUMBER, Platform.SENSOR, Platform.UPDATE] -type IronOSConfigEntry = ConfigEntry[IronOSCoordinator] + +type IronOSConfigEntry = ConfigEntry[IronOSLiveDataCoordinator] +IRON_OS_KEY: HassKey[IronOSFirmwareUpdateCoordinator] = HassKey(DOMAIN) + +CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN) _LOGGER = logging.getLogger(__name__) +async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: + """Set up IronOS firmware update coordinator.""" + + session = async_get_clientsession(hass) + github = GitHubAPI(session=session) + + hass.data[IRON_OS_KEY] = IronOSFirmwareUpdateCoordinator(hass, github) + await hass.data[IRON_OS_KEY].async_request_refresh() + return True + + async def async_setup_entry(hass: HomeAssistant, entry: IronOSConfigEntry) -> bool: """Set up IronOS from a config entry.""" if TYPE_CHECKING: @@ -39,7 +59,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: IronOSConfigEntry) -> bo device = Pynecil(ble_device) - coordinator = IronOSCoordinator(hass, device) + coordinator = IronOSLiveDataCoordinator(hass, device) await coordinator.async_config_entry_first_refresh() entry.runtime_data = coordinator diff --git a/homeassistant/components/iron_os/coordinator.py b/homeassistant/components/iron_os/coordinator.py index aefb14b689ba1e..699f5a01704695 100644 --- a/homeassistant/components/iron_os/coordinator.py +++ b/homeassistant/components/iron_os/coordinator.py @@ -4,7 +4,9 @@ from datetime import timedelta import logging +from typing import TYPE_CHECKING +from aiogithubapi import GitHubAPI, GitHubException, GitHubReleaseModel from pynecil import CommunicationError, DeviceInfoResponse, LiveDataResponse, Pynecil from homeassistant.config_entries import ConfigEntry @@ -16,10 +18,11 @@ _LOGGER = logging.getLogger(__name__) SCAN_INTERVAL = timedelta(seconds=5) +SCAN_INTERVAL_GITHUB = timedelta(hours=3) -class IronOSCoordinator(DataUpdateCoordinator[LiveDataResponse]): - """IronOS coordinator.""" +class IronOSLiveDataCoordinator(DataUpdateCoordinator[LiveDataResponse]): + """IronOS live data coordinator.""" device_info: DeviceInfoResponse config_entry: ConfigEntry @@ -38,16 +41,42 @@ async def _async_update_data(self) -> LiveDataResponse: """Fetch data from Device.""" try: + # device info is cached and won't be refetched on every + # coordinator refresh, only after the device has disconnected + # the device info is refetched + self.device_info = await self.device.get_device_info() return await self.device.get_live_data() except CommunicationError as e: raise UpdateFailed("Cannot connect to device") from e - async def _async_setup(self) -> None: - """Set up the coordinator.""" + +class IronOSFirmwareUpdateCoordinator(DataUpdateCoordinator[GitHubReleaseModel]): + """IronOS coordinator for retrieving update information from github.""" + + def __init__(self, hass: HomeAssistant, github: GitHubAPI) -> None: + """Initialize IronOS coordinator.""" + super().__init__( + hass, + _LOGGER, + config_entry=None, + name=DOMAIN, + update_interval=SCAN_INTERVAL_GITHUB, + ) + self.github = github + + async def _async_update_data(self) -> GitHubReleaseModel: + """Fetch data from Github.""" try: - self.device_info = await self.device.get_device_info() + release = await self.github.repos.releases.latest("Ralim/IronOS") - except CommunicationError as e: - raise UpdateFailed("Cannot connect to device") from e + except GitHubException as e: + raise UpdateFailed( + "Failed to retrieve latest release data from Github" + ) from e + + if TYPE_CHECKING: + assert release.data + + return release.data diff --git a/homeassistant/components/iron_os/entity.py b/homeassistant/components/iron_os/entity.py index 5a24b0a55671ef..77bebda93904b6 100644 --- a/homeassistant/components/iron_os/entity.py +++ b/homeassistant/components/iron_os/entity.py @@ -9,17 +9,17 @@ from homeassistant.helpers.update_coordinator import CoordinatorEntity from .const import MANUFACTURER, MODEL -from .coordinator import IronOSCoordinator +from .coordinator import IronOSLiveDataCoordinator -class IronOSBaseEntity(CoordinatorEntity[IronOSCoordinator]): +class IronOSBaseEntity(CoordinatorEntity[IronOSLiveDataCoordinator]): """Base IronOS entity.""" _attr_has_entity_name = True def __init__( self, - coordinator: IronOSCoordinator, + coordinator: IronOSLiveDataCoordinator, entity_description: EntityDescription, ) -> None: """Initialize the sensor.""" diff --git a/homeassistant/components/iron_os/manifest.json b/homeassistant/components/iron_os/manifest.json index cfaf36880f2db9..4ec08a43b61e4b 100644 --- a/homeassistant/components/iron_os/manifest.json +++ b/homeassistant/components/iron_os/manifest.json @@ -12,6 +12,6 @@ "dependencies": ["bluetooth_adapters"], "documentation": "https://www.home-assistant.io/integrations/iron_os", "iot_class": "local_polling", - "loggers": ["pynecil"], - "requirements": ["pynecil==0.2.0"] + "loggers": ["pynecil", "aiogithubapi"], + "requirements": ["pynecil==0.2.1", "aiogithubapi==24.6.0"] } diff --git a/homeassistant/components/iron_os/update.py b/homeassistant/components/iron_os/update.py new file mode 100644 index 00000000000000..786ba86f730253 --- /dev/null +++ b/homeassistant/components/iron_os/update.py @@ -0,0 +1,98 @@ +"""Update platform for IronOS integration.""" + +from __future__ import annotations + +from homeassistant.components.update import ( + UpdateDeviceClass, + UpdateEntity, + UpdateEntityDescription, + UpdateEntityFeature, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import IRON_OS_KEY, IronOSConfigEntry, IronOSLiveDataCoordinator +from .coordinator import IronOSFirmwareUpdateCoordinator +from .entity import IronOSBaseEntity + +UPDATE_DESCRIPTION = UpdateEntityDescription( + key="firmware", + device_class=UpdateDeviceClass.FIRMWARE, +) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: IronOSConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up IronOS update platform.""" + + coordinator = entry.runtime_data + + async_add_entities( + [IronOSUpdate(coordinator, hass.data[IRON_OS_KEY], UPDATE_DESCRIPTION)] + ) + + +class IronOSUpdate(IronOSBaseEntity, UpdateEntity): + """Representation of an IronOS update entity.""" + + _attr_supported_features = UpdateEntityFeature.RELEASE_NOTES + + def __init__( + self, + coordinator: IronOSLiveDataCoordinator, + firmware_update: IronOSFirmwareUpdateCoordinator, + entity_description: UpdateEntityDescription, + ) -> None: + """Initialize the sensor.""" + self.firmware_update = firmware_update + super().__init__(coordinator, entity_description) + + @property + def installed_version(self) -> str | None: + """IronOS version on the device.""" + + return self.coordinator.device_info.build + + @property + def title(self) -> str | None: + """Title of the IronOS release.""" + + return f"IronOS {self.firmware_update.data.name}" + + @property + def release_url(self) -> str | None: + """URL to the full release notes of the latest IronOS version available.""" + + return self.firmware_update.data.html_url + + @property + def latest_version(self) -> str | None: + """Latest IronOS version available for install.""" + + return self.firmware_update.data.tag_name + + async def async_release_notes(self) -> str | None: + """Return the release notes.""" + + return self.firmware_update.data.body + + async def async_added_to_hass(self) -> None: + """When entity is added to hass. + + Register extra update listener for the firmware update coordinator. + """ + await super().async_added_to_hass() + self.async_on_remove( + self.firmware_update.async_add_listener(self._handle_coordinator_update) + ) + + @property + def available(self) -> bool: + """Return if entity is available.""" + return ( + self.installed_version is not None + and self.firmware_update.last_update_success + ) diff --git a/homeassistant/components/islamic_prayer_times/config_flow.py b/homeassistant/components/islamic_prayer_times/config_flow.py index 2db89183499d4f..ce911ccc49d351 100644 --- a/homeassistant/components/islamic_prayer_times/config_flow.py +++ b/homeassistant/components/islamic_prayer_times/config_flow.py @@ -52,7 +52,7 @@ def async_get_options_flow( config_entry: ConfigEntry, ) -> IslamicPrayerOptionsFlowHandler: """Get the options flow for this handler.""" - return IslamicPrayerOptionsFlowHandler(config_entry) + return IslamicPrayerOptionsFlowHandler() async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -93,10 +93,6 @@ async def async_step_user( class IslamicPrayerOptionsFlowHandler(OptionsFlow): """Handle Islamic Prayer client options.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/iss/__init__.py b/homeassistant/components/iss/__init__.py index 606263ce769eae..dbbcc8b6c518f8 100644 --- a/homeassistant/components/iss/__init__.py +++ b/homeassistant/components/iss/__init__.py @@ -53,6 +53,7 @@ async def async_update() -> IssData: coordinator = DataUpdateCoordinator( hass, _LOGGER, + config_entry=entry, name=DOMAIN, update_method=async_update, update_interval=timedelta(seconds=60), diff --git a/homeassistant/components/iss/config_flow.py b/homeassistant/components/iss/config_flow.py index 80644698239b80..eaf01a6d0946c1 100644 --- a/homeassistant/components/iss/config_flow.py +++ b/homeassistant/components/iss/config_flow.py @@ -1,5 +1,7 @@ """Config flow to configure iss component.""" +from __future__ import annotations + import voluptuous as vol from homeassistant.config_entries import ( @@ -23,16 +25,12 @@ class ISSConfigFlow(ConfigFlow, domain=DOMAIN): @callback def async_get_options_flow( config_entry: ConfigEntry, - ) -> OptionsFlow: + ) -> OptionsFlowHandler: """Get the options flow for this handler.""" - return OptionsFlowHandler(config_entry) + return OptionsFlowHandler() async def async_step_user(self, user_input=None) -> ConfigFlowResult: """Handle a flow initialized by the user.""" - # Check if already configured - if self._async_current_entries(): - return self.async_abort(reason="single_instance_allowed") - if user_input is not None: return self.async_create_entry( title=DEFAULT_NAME, @@ -46,16 +44,10 @@ async def async_step_user(self, user_input=None) -> ConfigFlowResult: class OptionsFlowHandler(OptionsFlow): """Config flow options handler for iss.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - self.options = dict(config_entry.options) - async def async_step_init(self, user_input=None) -> ConfigFlowResult: """Manage the options.""" if user_input is not None: - self.options.update(user_input) - return self.async_create_entry(title="", data=self.options) + return self.async_create_entry(data=self.config_entry.options | user_input) return self.async_show_form( step_id="init", diff --git a/homeassistant/components/iss/manifest.json b/homeassistant/components/iss/manifest.json index 1dc885c9df6a11..bf36a15db46249 100644 --- a/homeassistant/components/iss/manifest.json +++ b/homeassistant/components/iss/manifest.json @@ -7,5 +7,6 @@ "integration_type": "service", "iot_class": "cloud_polling", "loggers": ["pyiss"], - "requirements": ["pyiss==1.0.1"] + "requirements": ["pyiss==1.0.1"], + "single_config_entry": true } diff --git a/homeassistant/components/iss/strings.json b/homeassistant/components/iss/strings.json index e0c7d85efa43e5..17e86587e855b7 100644 --- a/homeassistant/components/iss/strings.json +++ b/homeassistant/components/iss/strings.json @@ -6,7 +6,6 @@ } }, "abort": { - "single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]", "latitude_longitude_not_defined": "Latitude and longitude are not defined in Home Assistant." } }, diff --git a/homeassistant/components/ista_ecotrend/config_flow.py b/homeassistant/components/ista_ecotrend/config_flow.py index 15222995a37c00..c11c43070df7bb 100644 --- a/homeassistant/components/ista_ecotrend/config_flow.py +++ b/homeassistant/components/ista_ecotrend/config_flow.py @@ -17,7 +17,6 @@ TextSelectorType, ) -from . import IstaConfigEntry from .const import DOMAIN _LOGGER = logging.getLogger(__name__) @@ -43,8 +42,6 @@ class IstaConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for ista EcoTrend.""" - reauth_entry: IstaConfigEntry | None = None - async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -88,9 +85,6 @@ async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Perform reauth upon an API authentication error.""" - self.reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -98,9 +92,8 @@ async def async_step_reauth_confirm( ) -> ConfigFlowResult: """Dialog that informs the user that reauth is required.""" errors: dict[str, str] = {} - if TYPE_CHECKING: - assert self.reauth_entry + reauth_entry = self._get_reauth_entry() if user_input is not None: ista = PyEcotrendIsta( user_input[CONF_EMAIL], @@ -117,9 +110,7 @@ async def async_step_reauth_confirm( _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" else: - return self.async_update_reload_and_abort( - self.reauth_entry, data=user_input - ) + return self.async_update_reload_and_abort(reauth_entry, data=user_input) return self.async_show_form( step_id="reauth_confirm", @@ -128,12 +119,12 @@ async def async_step_reauth_confirm( suggested_values={ CONF_EMAIL: user_input[CONF_EMAIL] if user_input is not None - else self.reauth_entry.data[CONF_EMAIL] + else reauth_entry.data[CONF_EMAIL] }, ), description_placeholders={ - CONF_NAME: self.reauth_entry.title, - CONF_EMAIL: self.reauth_entry.data[CONF_EMAIL], + CONF_NAME: reauth_entry.title, + CONF_EMAIL: reauth_entry.data[CONF_EMAIL], }, errors=errors, ) diff --git a/homeassistant/components/isy994/config_flow.py b/homeassistant/components/isy994/config_flow.py index 0239926f5e3ec5..3575fa99a55e7d 100644 --- a/homeassistant/components/isy994/config_flow.py +++ b/homeassistant/components/isy994/config_flow.py @@ -140,7 +140,7 @@ def async_get_options_flow( config_entry: ConfigEntry, ) -> OptionsFlow: """Get the options flow for this handler.""" - return OptionsFlowHandler(config_entry) + return OptionsFlowHandler() async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -314,10 +314,6 @@ async def async_step_reauth_confirm( class OptionsFlowHandler(OptionsFlow): """Handle a option flow for ISY/IoX.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/isy994/strings.json b/homeassistant/components/isy994/strings.json index ec7d78edd53bce..f0e55881652dae 100644 --- a/homeassistant/components/isy994/strings.json +++ b/homeassistant/components/isy994/strings.json @@ -29,7 +29,8 @@ "invalid_host": "The host entry was not in full URL format, e.g., http://192.168.10.100:80" }, "abort": { - "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" } }, "options": { diff --git a/homeassistant/components/jellyfin/config_flow.py b/homeassistant/components/jellyfin/config_flow.py index 7b5426cffde3ef..0c170d2485f970 100644 --- a/homeassistant/components/jellyfin/config_flow.py +++ b/homeassistant/components/jellyfin/config_flow.py @@ -8,11 +8,7 @@ import voluptuous as vol -from homeassistant.config_entries import ( - ConfigFlow, - ConfigFlowResult, - OptionsFlowWithConfigEntry, -) +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult, OptionsFlow from homeassistant.const import CONF_PASSWORD, CONF_URL, CONF_USERNAME from homeassistant.core import callback from homeassistant.util.uuid import random_uuid_hex @@ -56,7 +52,6 @@ class JellyfinConfigFlow(ConfigFlow, domain=DOMAIN): def __init__(self) -> None: """Initialize the Jellyfin config flow.""" self.client_device_id: str | None = None - self.entry: JellyfinConfigEntry | None = None async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -108,7 +103,6 @@ async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Perform reauth upon an API authentication error.""" - self.entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -118,8 +112,8 @@ async def async_step_reauth_confirm( errors: dict[str, str] = {} if user_input is not None: - assert self.entry is not None - new_input = self.entry.data | user_input + reauth_entry = self._get_reauth_entry() + new_input = reauth_entry.data | user_input if self.client_device_id is None: self.client_device_id = _generate_client_device_id() @@ -135,10 +129,7 @@ async def async_step_reauth_confirm( errors["base"] = "unknown" _LOGGER.exception("Unexpected exception") else: - self.hass.config_entries.async_update_entry(self.entry, data=new_input) - - await self.hass.config_entries.async_reload(self.entry.entry_id) - return self.async_abort(reason="reauth_successful") + return self.async_update_reload_and_abort(reauth_entry, data=new_input) return self.async_show_form( step_id="reauth_confirm", data_schema=REAUTH_DATA_SCHEMA, errors=errors @@ -148,12 +139,12 @@ async def async_step_reauth_confirm( @callback def async_get_options_flow( config_entry: JellyfinConfigEntry, - ) -> OptionsFlowWithConfigEntry: + ) -> OptionsFlowHandler: """Create the options flow.""" - return OptionsFlowHandler(config_entry) + return OptionsFlowHandler() -class OptionsFlowHandler(OptionsFlowWithConfigEntry): +class OptionsFlowHandler(OptionsFlow): """Handle an option flow for jellyfin.""" async def async_step_init( diff --git a/homeassistant/components/jellyfin/const.py b/homeassistant/components/jellyfin/const.py index 34fb040115f6b3..cdddaa46ad102f 100644 --- a/homeassistant/components/jellyfin/const.py +++ b/homeassistant/components/jellyfin/const.py @@ -83,5 +83,5 @@ "Season": MediaClass.SEASON, } -PLATFORMS = [Platform.MEDIA_PLAYER, Platform.SENSOR] +PLATFORMS = [Platform.MEDIA_PLAYER, Platform.REMOTE, Platform.SENSOR] LOGGER = logging.getLogger(__package__) diff --git a/homeassistant/components/jellyfin/coordinator.py b/homeassistant/components/jellyfin/coordinator.py index a9b0a8b7031fea..2042825025419d 100644 --- a/homeassistant/components/jellyfin/coordinator.py +++ b/homeassistant/components/jellyfin/coordinator.py @@ -41,6 +41,7 @@ def __init__( self.user_id: str = user_id self.session_ids: set[str] = set() + self.remote_session_ids: set[str] = set() self.device_ids: set[str] = set() async def _async_update_data(self) -> dict[str, dict[str, Any]]: diff --git a/homeassistant/components/jellyfin/remote.py b/homeassistant/components/jellyfin/remote.py new file mode 100644 index 00000000000000..ae33d58cc0c046 --- /dev/null +++ b/homeassistant/components/jellyfin/remote.py @@ -0,0 +1,80 @@ +"""Support for Jellyfin remote commands.""" + +from __future__ import annotations + +from collections.abc import Iterable +import time +from typing import Any + +from homeassistant.components.remote import ( + ATTR_DELAY_SECS, + ATTR_NUM_REPEATS, + DEFAULT_DELAY_SECS, + DEFAULT_NUM_REPEATS, + RemoteEntity, +) +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import JellyfinConfigEntry +from .const import LOGGER +from .coordinator import JellyfinDataUpdateCoordinator +from .entity import JellyfinClientEntity + + +async def async_setup_entry( + hass: HomeAssistant, + entry: JellyfinConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up Jellyfin remote from a config entry.""" + coordinator = entry.runtime_data + + @callback + def handle_coordinator_update() -> None: + """Add remote per session.""" + entities: list[RemoteEntity] = [] + for session_id, session_data in coordinator.data.items(): + if ( + session_id not in coordinator.remote_session_ids + and session_data["SupportsRemoteControl"] + ): + entity = JellyfinRemote(coordinator, session_id) + LOGGER.debug("Creating remote for session: %s", session_id) + coordinator.remote_session_ids.add(session_id) + entities.append(entity) + async_add_entities(entities) + + handle_coordinator_update() + + entry.async_on_unload(coordinator.async_add_listener(handle_coordinator_update)) + + +class JellyfinRemote(JellyfinClientEntity, RemoteEntity): + """Defines a Jellyfin remote entity.""" + + def __init__( + self, + coordinator: JellyfinDataUpdateCoordinator, + session_id: str, + ) -> None: + """Initialize the Jellyfin Remote entity.""" + super().__init__(coordinator, session_id) + self._attr_unique_id = f"{coordinator.server_id}-{session_id}" + + @property + def is_on(self) -> bool: + """Return if the client is on.""" + return self.session_data["IsActive"] if self.session_data else False + + def send_command(self, command: Iterable[str], **kwargs: Any) -> None: + """Send a command to the client.""" + num_repeats = kwargs.get(ATTR_NUM_REPEATS, DEFAULT_NUM_REPEATS) + delay = kwargs.get(ATTR_DELAY_SECS, DEFAULT_DELAY_SECS) + + for _ in range(num_repeats): + for single_command in command: + self.coordinator.api_client.jellyfin.command( + self.session_id, single_command + ) + time.sleep(delay) diff --git a/homeassistant/components/jewish_calendar/__init__.py b/homeassistant/components/jewish_calendar/__init__.py index fd238e8d615b41..823e9bd59be54f 100644 --- a/homeassistant/components/jewish_calendar/__init__.py +++ b/homeassistant/components/jewish_calendar/__init__.py @@ -5,26 +5,17 @@ from functools import partial from hdate import Location -import voluptuous as vol -from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry from homeassistant.const import ( CONF_ELEVATION, CONF_LANGUAGE, CONF_LATITUDE, - CONF_LOCATION, CONF_LONGITUDE, - CONF_NAME, CONF_TIME_ZONE, Platform, ) -from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant, callback -import homeassistant.helpers.config_validation as cv -import homeassistant.helpers.entity_registry as er -from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue -from homeassistant.helpers.typing import ConfigType +from homeassistant.core import HomeAssistant -from .binary_sensor import BINARY_SENSORS from .const import ( CONF_CANDLE_LIGHT_MINUTES, CONF_DIASPORA, @@ -33,94 +24,15 @@ DEFAULT_DIASPORA, DEFAULT_HAVDALAH_OFFSET_MINUTES, DEFAULT_LANGUAGE, - DEFAULT_NAME, - DOMAIN, ) -from .sensor import INFO_SENSORS, TIME_SENSORS +from .entity import JewishCalendarConfigEntry, JewishCalendarData PLATFORMS: list[Platform] = [Platform.BINARY_SENSOR, Platform.SENSOR] -CONFIG_SCHEMA = vol.Schema( - { - DOMAIN: vol.All( - cv.deprecated(DOMAIN), - { - vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, - vol.Optional(CONF_DIASPORA, default=DEFAULT_DIASPORA): cv.boolean, - vol.Inclusive(CONF_LATITUDE, "coordinates"): cv.latitude, - vol.Inclusive(CONF_LONGITUDE, "coordinates"): cv.longitude, - vol.Optional(CONF_LANGUAGE, default=DEFAULT_LANGUAGE): vol.In( - ["hebrew", "english"] - ), - vol.Optional( - CONF_CANDLE_LIGHT_MINUTES, default=DEFAULT_CANDLE_LIGHT - ): int, - # Default of 0 means use 8.5 degrees / 'three_stars' time. - vol.Optional( - CONF_HAVDALAH_OFFSET_MINUTES, - default=DEFAULT_HAVDALAH_OFFSET_MINUTES, - ): int, - }, - ) - }, - extra=vol.ALLOW_EXTRA, -) - - -def get_unique_prefix( - location: Location, - language: str, - candle_lighting_offset: int | None, - havdalah_offset: int | None, -) -> str: - """Create a prefix for unique ids.""" - # location.altitude was unset before 2024.6 when this method - # was used to create the unique id. As such it would always - # use the default altitude of 754. - config_properties = [ - location.latitude, - location.longitude, - location.timezone, - 754, - location.diaspora, - language, - candle_lighting_offset, - havdalah_offset, - ] - prefix = "_".join(map(str, config_properties)) - return f"{prefix}" - - -async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: - """Set up the Jewish Calendar component.""" - if DOMAIN not in config: - return True - - async_create_issue( - hass, - HOMEASSISTANT_DOMAIN, - f"deprecated_yaml_{DOMAIN}", - is_fixable=False, - issue_domain=DOMAIN, - breaks_in_ha_version="2024.12.0", - severity=IssueSeverity.WARNING, - translation_key="deprecated_yaml", - translation_placeholders={ - "domain": DOMAIN, - "integration_title": DEFAULT_NAME, - }, - ) - - hass.async_create_task( - hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_IMPORT}, data=config[DOMAIN] - ) - ) - return True - - -async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool: +async def async_setup_entry( + hass: HomeAssistant, config_entry: JewishCalendarConfigEntry +) -> bool: """Set up a configuration entry for Jewish calendar.""" language = config_entry.data.get(CONF_LANGUAGE, DEFAULT_LANGUAGE) diaspora = config_entry.data.get(CONF_DIASPORA, DEFAULT_DIASPORA) @@ -143,27 +55,19 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b ) ) - hass.data.setdefault(DOMAIN, {})[config_entry.entry_id] = { - CONF_LANGUAGE: language, - CONF_DIASPORA: diaspora, - CONF_LOCATION: location, - CONF_CANDLE_LIGHT_MINUTES: candle_lighting_offset, - CONF_HAVDALAH_OFFSET_MINUTES: havdalah_offset, - } - - # Update unique ID to be unrelated to user defined options - old_prefix = get_unique_prefix( - location, language, candle_lighting_offset, havdalah_offset + config_entry.runtime_data = JewishCalendarData( + language, + diaspora, + location, + candle_lighting_offset, + havdalah_offset, ) - ent_reg = er.async_get(hass) - entries = er.async_entries_for_config_entry(ent_reg, config_entry.entry_id) - if not entries or any(entry.unique_id.startswith(old_prefix) for entry in entries): - async_update_unique_ids(ent_reg, config_entry.entry_id, old_prefix) - await hass.config_entries.async_forward_entry_setups(config_entry, PLATFORMS) - async def update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None: + async def update_listener( + hass: HomeAssistant, config_entry: JewishCalendarConfigEntry + ) -> None: # Trigger update of states for all platforms await hass.config_entries.async_reload(config_entry.entry_id) @@ -171,35 +75,8 @@ async def update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None: return True -async def async_unload_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool: +async def async_unload_entry( + hass: HomeAssistant, config_entry: JewishCalendarConfigEntry +) -> bool: """Unload a config entry.""" - unload_ok = await hass.config_entries.async_unload_platforms( - config_entry, PLATFORMS - ) - - if unload_ok: - hass.data[DOMAIN].pop(config_entry.entry_id) - - return unload_ok - - -@callback -def async_update_unique_ids( - ent_reg: er.EntityRegistry, new_prefix: str, old_prefix: str -) -> None: - """Update unique ID to be unrelated to user defined options. - - Introduced with release 2024.6 - """ - platform_descriptions = { - Platform.BINARY_SENSOR: BINARY_SENSORS, - Platform.SENSOR: (*INFO_SENSORS, *TIME_SENSORS), - } - for platform, descriptions in platform_descriptions.items(): - for description in descriptions: - new_unique_id = f"{new_prefix}-{description.key}" - old_unique_id = f"{old_prefix}_{description.key}" - if entity_id := ent_reg.async_get_entity_id( - platform, DOMAIN, old_unique_id - ): - ent_reg.async_update_entity(entity_id, new_unique_id=new_unique_id) + return await hass.config_entries.async_unload_platforms(config_entry, PLATFORMS) diff --git a/homeassistant/components/jewish_calendar/binary_sensor.py b/homeassistant/components/jewish_calendar/binary_sensor.py index 060650ee25cc00..9fd1371f8a8faf 100644 --- a/homeassistant/components/jewish_calendar/binary_sensor.py +++ b/homeassistant/components/jewish_calendar/binary_sensor.py @@ -14,15 +14,13 @@ BinarySensorEntity, BinarySensorEntityDescription, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import EntityCategory from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback from homeassistant.helpers import event from homeassistant.helpers.entity_platform import AddEntitiesCallback import homeassistant.util.dt as dt_util -from .const import DOMAIN -from .entity import JewishCalendarEntity +from .entity import JewishCalendarConfigEntry, JewishCalendarEntity @dataclass(frozen=True) @@ -63,14 +61,12 @@ class JewishCalendarBinarySensorEntityDescription( async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: JewishCalendarConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Jewish Calendar binary sensors.""" - entry = hass.data[DOMAIN][config_entry.entry_id] - async_add_entities( - JewishCalendarBinarySensor(config_entry, entry, description) + JewishCalendarBinarySensor(config_entry, description) for description in BINARY_SENSORS ) diff --git a/homeassistant/components/jewish_calendar/config_flow.py b/homeassistant/components/jewish_calendar/config_flow.py index 67223324ae9ee2..a2eadbf57bd713 100644 --- a/homeassistant/components/jewish_calendar/config_flow.py +++ b/homeassistant/components/jewish_calendar/config_flow.py @@ -12,7 +12,7 @@ ConfigEntry, ConfigFlow, ConfigFlowResult, - OptionsFlowWithConfigEntry, + OptionsFlow, ) from homeassistant.const import ( CONF_ELEVATION, @@ -87,36 +87,24 @@ class JewishCalendarConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for Jewish calendar.""" VERSION = 1 - _config_entry: ConfigEntry @staticmethod @callback - def async_get_options_flow(config_entry: ConfigEntry) -> OptionsFlowWithConfigEntry: + def async_get_options_flow( + config_entry: ConfigEntry, + ) -> JewishCalendarOptionsFlowHandler: """Get the options flow for this handler.""" - return JewishCalendarOptionsFlowHandler(config_entry) + return JewishCalendarOptionsFlowHandler() async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle the initial step.""" if user_input is not None: - _options = {} - if CONF_CANDLE_LIGHT_MINUTES in user_input: - _options[CONF_CANDLE_LIGHT_MINUTES] = user_input[ - CONF_CANDLE_LIGHT_MINUTES - ] - del user_input[CONF_CANDLE_LIGHT_MINUTES] - if CONF_HAVDALAH_OFFSET_MINUTES in user_input: - _options[CONF_HAVDALAH_OFFSET_MINUTES] = user_input[ - CONF_HAVDALAH_OFFSET_MINUTES - ] - del user_input[CONF_HAVDALAH_OFFSET_MINUTES] if CONF_LOCATION in user_input: user_input[CONF_LATITUDE] = user_input[CONF_LOCATION][CONF_LATITUDE] user_input[CONF_LONGITUDE] = user_input[CONF_LOCATION][CONF_LONGITUDE] - return self.async_create_entry( - title=DEFAULT_NAME, data=user_input, options=_options - ) + return self.async_create_entry(title=DEFAULT_NAME, data=user_input) return self.async_show_form( step_id="user", @@ -125,36 +113,24 @@ async def async_step_user( ), ) - async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: - """Import a config entry from configuration.yaml.""" - return await self.async_step_user(import_data) - async def async_step_reconfigure( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle a reconfiguration flow initialized by the user.""" - self._config_entry = self._get_reconfigure_entry() - return await self.async_step_reconfigure_confirm() - - async def async_step_reconfigure_confirm( - self, user_input: dict[str, Any] | None = None - ) -> ConfigFlowResult: - """Handle a reconfiguration flow initialized by the user.""" + reconfigure_entry = self._get_reconfigure_entry() if not user_input: return self.async_show_form( data_schema=self.add_suggested_values_to_schema( _get_data_schema(self.hass), - {**self._config_entry.data}, + reconfigure_entry.data, ), - step_id="reconfigure_confirm", + step_id="reconfigure", ) - return self.async_update_reload_and_abort( - self._config_entry, data=user_input, reason="reconfigure_successful" - ) + return self.async_update_reload_and_abort(reconfigure_entry, data=user_input) -class JewishCalendarOptionsFlowHandler(OptionsFlowWithConfigEntry): +class JewishCalendarOptionsFlowHandler(OptionsFlow): """Handle Jewish Calendar options.""" async def async_step_init( diff --git a/homeassistant/components/jewish_calendar/entity.py b/homeassistant/components/jewish_calendar/entity.py index c11925df954889..ad5ac8e21374ab 100644 --- a/homeassistant/components/jewish_calendar/entity.py +++ b/homeassistant/components/jewish_calendar/entity.py @@ -1,18 +1,27 @@ """Entity representing a Jewish Calendar sensor.""" -from typing import Any +from dataclasses import dataclass + +from hdate import Location from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_LANGUAGE, CONF_LOCATION from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo from homeassistant.helpers.entity import Entity, EntityDescription -from .const import ( - CONF_CANDLE_LIGHT_MINUTES, - CONF_DIASPORA, - CONF_HAVDALAH_OFFSET_MINUTES, - DOMAIN, -) +from .const import DOMAIN + +type JewishCalendarConfigEntry = ConfigEntry[JewishCalendarData] + + +@dataclass +class JewishCalendarData: + """Jewish Calendar runtime dataclass.""" + + language: str + diaspora: bool + location: Location + candle_lighting_offset: int + havdalah_offset: int class JewishCalendarEntity(Entity): @@ -22,8 +31,7 @@ class JewishCalendarEntity(Entity): def __init__( self, - config_entry: ConfigEntry, - data: dict[str, Any], + config_entry: JewishCalendarConfigEntry, description: EntityDescription, ) -> None: """Initialize a Jewish Calendar entity.""" @@ -32,10 +40,10 @@ def __init__( self._attr_device_info = DeviceInfo( entry_type=DeviceEntryType.SERVICE, identifiers={(DOMAIN, config_entry.entry_id)}, - name=config_entry.title, ) - self._location = data[CONF_LOCATION] - self._hebrew = data[CONF_LANGUAGE] == "hebrew" - self._candle_lighting_offset = data[CONF_CANDLE_LIGHT_MINUTES] - self._havdalah_offset = data[CONF_HAVDALAH_OFFSET_MINUTES] - self._diaspora = data[CONF_DIASPORA] + data = config_entry.runtime_data + self._location = data.location + self._hebrew = data.language == "hebrew" + self._candle_lighting_offset = data.candle_lighting_offset + self._havdalah_offset = data.havdalah_offset + self._diaspora = data.diaspora diff --git a/homeassistant/components/jewish_calendar/sensor.py b/homeassistant/components/jewish_calendar/sensor.py index 87b4375b8b2eca..c32647af07c6de 100644 --- a/homeassistant/components/jewish_calendar/sensor.py +++ b/homeassistant/components/jewish_calendar/sensor.py @@ -14,15 +14,13 @@ SensorEntity, SensorEntityDescription, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import SUN_EVENT_SUNSET, EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.sun import get_astral_event_date import homeassistant.util.dt as dt_util -from .const import DOMAIN -from .entity import JewishCalendarEntity +from .entity import JewishCalendarConfigEntry, JewishCalendarEntity _LOGGER = logging.getLogger(__name__) @@ -169,17 +167,15 @@ async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: JewishCalendarConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Jewish calendar sensors .""" - entry = hass.data[DOMAIN][config_entry.entry_id] sensors = [ - JewishCalendarSensor(config_entry, entry, description) - for description in INFO_SENSORS + JewishCalendarSensor(config_entry, description) for description in INFO_SENSORS ] sensors.extend( - JewishCalendarTimeSensor(config_entry, entry, description) + JewishCalendarTimeSensor(config_entry, description) for description in TIME_SENSORS ) @@ -193,12 +189,11 @@ class JewishCalendarSensor(JewishCalendarEntity, SensorEntity): def __init__( self, - config_entry: ConfigEntry, - data: dict[str, Any], + config_entry: JewishCalendarConfigEntry, description: SensorEntityDescription, ) -> None: """Initialize the Jewish calendar sensor.""" - super().__init__(config_entry, data, description) + super().__init__(config_entry, description) self._attrs: dict[str, str] = {} async def async_update(self) -> None: diff --git a/homeassistant/components/jewish_calendar/strings.json b/homeassistant/components/jewish_calendar/strings.json index e5367b5819e716..1b7b86c0056871 100644 --- a/homeassistant/components/jewish_calendar/strings.json +++ b/homeassistant/components/jewish_calendar/strings.json @@ -27,7 +27,8 @@ } }, "abort": { - "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", + "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]" } }, "options": { diff --git a/homeassistant/components/juicenet/__init__.py b/homeassistant/components/juicenet/__init__.py index 445d04e67ecbcd..fcfca7f24923f8 100644 --- a/homeassistant/components/juicenet/__init__.py +++ b/homeassistant/components/juicenet/__init__.py @@ -83,6 +83,7 @@ async def async_update_data(): coordinator = DataUpdateCoordinator( hass, _LOGGER, + config_entry=entry, name="JuiceNet", update_method=async_update_data, update_interval=timedelta(seconds=30), diff --git a/homeassistant/components/justnimbus/config_flow.py b/homeassistant/components/justnimbus/config_flow.py index 8c816c1ac1b1ec..7b0d3f8e5db6f0 100644 --- a/homeassistant/components/justnimbus/config_flow.py +++ b/homeassistant/components/justnimbus/config_flow.py @@ -9,7 +9,7 @@ import justnimbus import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_CLIENT_ID from homeassistant.helpers import config_validation as cv @@ -29,7 +29,6 @@ class JustNimbusConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for JustNimbus.""" VERSION = 1 - reauth_entry: ConfigEntry | None = None async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -44,7 +43,7 @@ async def async_step_user( unique_id = f"{user_input[CONF_CLIENT_ID]}{user_input[CONF_ZIP_CODE]}" await self.async_set_unique_id(unique_id=unique_id) - if not self.reauth_entry: + if self.source != SOURCE_REAUTH: self._abort_if_unique_id_configured() client = justnimbus.JustNimbusClient( @@ -60,18 +59,12 @@ async def async_step_user( _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" else: - if not self.reauth_entry: + if self.source != SOURCE_REAUTH: return self.async_create_entry(title="JustNimbus", data=user_input) - self.hass.config_entries.async_update_entry( - self.reauth_entry, data=user_input, unique_id=unique_id + return self.async_update_reload_and_abort( + self._get_reauth_entry(), data=user_input, unique_id=unique_id ) - # Reload the config entry otherwise devices will remain unavailable - self.hass.async_create_task( - self.hass.config_entries.async_reload(self.reauth_entry.entry_id) - ) - return self.async_abort(reason="reauth_successful") - return self.async_show_form( step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors ) @@ -80,7 +73,4 @@ async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Perform reauth upon an API authentication error.""" - self.reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) return await self.async_step_user() diff --git a/homeassistant/components/jvc_projector/config_flow.py b/homeassistant/components/jvc_projector/config_flow.py index 253aa640f718c8..5d9bedd75919e3 100644 --- a/homeassistant/components/jvc_projector/config_flow.py +++ b/homeassistant/components/jvc_projector/config_flow.py @@ -9,7 +9,7 @@ from jvcprojector.projector import DEFAULT_PORT import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_PORT from homeassistant.helpers.device_registry import format_mac from homeassistant.util.network import is_host_valid @@ -22,8 +22,6 @@ class JvcProjectorConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - _reauth_entry: ConfigEntry | None = None - async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -77,22 +75,18 @@ async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Perform reauth on password authentication error.""" - self._reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( self, user_input: Mapping[str, Any] | None = None ) -> ConfigFlowResult: """Dialog that informs the user that reauth is required.""" - assert self._reauth_entry - errors = {} if user_input is not None: - host = self._reauth_entry.data[CONF_HOST] - port = self._reauth_entry.data[CONF_PORT] + reauth_entry = self._get_reauth_entry() + host = reauth_entry.data[CONF_HOST] + port = reauth_entry.data[CONF_PORT] password = user_input[CONF_PASSWORD] try: @@ -102,12 +96,9 @@ async def async_step_reauth_confirm( except JvcProjectorAuthError: errors["base"] = "invalid_auth" else: - self.hass.config_entries.async_update_entry( - self._reauth_entry, - data={CONF_HOST: host, CONF_PORT: port, CONF_PASSWORD: password}, + return self.async_update_reload_and_abort( + reauth_entry, data_updates=user_input ) - await self.hass.config_entries.async_reload(self._reauth_entry.entry_id) - return self.async_abort(reason="reauth_successful") return self.async_show_form( step_id="reauth_confirm", diff --git a/homeassistant/components/jvc_projector/strings.json b/homeassistant/components/jvc_projector/strings.json index b89139cbab3e94..b517bf064e1d3a 100644 --- a/homeassistant/components/jvc_projector/strings.json +++ b/homeassistant/components/jvc_projector/strings.json @@ -24,6 +24,7 @@ "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", "already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]", + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", "unknown": "[%key:common::config_flow::error::unknown%]" }, "error": { diff --git a/homeassistant/components/keenetic_ndms2/config_flow.py b/homeassistant/components/keenetic_ndms2/config_flow.py index 69e81bf292db04..d11fedac38556f 100644 --- a/homeassistant/components/keenetic_ndms2/config_flow.py +++ b/homeassistant/components/keenetic_ndms2/config_flow.py @@ -55,7 +55,7 @@ def async_get_options_flow( config_entry: ConfigEntry, ) -> KeeneticOptionsFlowHandler: """Get the options flow for this handler.""" - return KeeneticOptionsFlowHandler(config_entry) + return KeeneticOptionsFlowHandler() async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -138,9 +138,8 @@ async def async_step_ssdp( class KeeneticOptionsFlowHandler(OptionsFlow): """Handle options.""" - def __init__(self, config_entry: ConfigEntry) -> None: + def __init__(self) -> None: """Initialize options flow.""" - self.config_entry = config_entry self._interface_options: dict[str, str] = {} async def async_step_init( diff --git a/homeassistant/components/kitchen_sink/config_flow.py b/homeassistant/components/kitchen_sink/config_flow.py index 8cff9321729a4d..019d1dddcad535 100644 --- a/homeassistant/components/kitchen_sink/config_flow.py +++ b/homeassistant/components/kitchen_sink/config_flow.py @@ -12,7 +12,7 @@ ConfigEntry, ConfigFlow, ConfigFlowResult, - OptionsFlowWithConfigEntry, + OptionsFlow, ) from homeassistant.core import callback @@ -33,13 +33,10 @@ def async_get_options_flow( config_entry: ConfigEntry, ) -> OptionsFlowHandler: """Get the options flow for this handler.""" - return OptionsFlowHandler(config_entry) + return OptionsFlowHandler() async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Set the config entry up from yaml.""" - if self._async_current_entries(): - return self.async_abort(reason="single_instance_allowed") - return self.async_create_entry(title="Kitchen Sink", data=import_data) async def async_step_reauth( @@ -57,7 +54,7 @@ async def async_step_reauth_confirm( return self.async_abort(reason="reauth_successful") -class OptionsFlowHandler(OptionsFlowWithConfigEntry): +class OptionsFlowHandler(OptionsFlow): """Handle options.""" async def async_step_init( @@ -71,8 +68,7 @@ async def async_step_options_1( ) -> ConfigFlowResult: """Manage the options.""" if user_input is not None: - self.options.update(user_input) - return await self._update_options() + return self.async_create_entry(data=self.config_entry.options | user_input) return self.async_show_form( step_id="options_1", @@ -98,7 +94,3 @@ async def async_step_options_1( } ), ) - - async def _update_options(self) -> ConfigFlowResult: - """Update config entry options.""" - return self.async_create_entry(title="", data=self.options) diff --git a/homeassistant/components/kitchen_sink/manifest.json b/homeassistant/components/kitchen_sink/manifest.json index e2f9468f7e0fa5..ae2462afbbdaee 100644 --- a/homeassistant/components/kitchen_sink/manifest.json +++ b/homeassistant/components/kitchen_sink/manifest.json @@ -5,5 +5,6 @@ "codeowners": ["@home-assistant/core"], "documentation": "https://www.home-assistant.io/integrations/kitchen_sink", "iot_class": "calculated", - "quality_scale": "internal" + "quality_scale": "internal", + "single_config_entry": true } diff --git a/homeassistant/components/kitchen_sink/strings.json b/homeassistant/components/kitchen_sink/strings.json index 74cddb9f2c09e9..63e27e046376eb 100644 --- a/homeassistant/components/kitchen_sink/strings.json +++ b/homeassistant/components/kitchen_sink/strings.json @@ -1,5 +1,8 @@ { "config": { + "abort": { + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" + }, "step": { "reauth_confirm": { "description": "Select **Submit** to reauthenticate" diff --git a/homeassistant/components/kmtronic/__init__.py b/homeassistant/components/kmtronic/__init__.py index 5f93de3c60e491..edec0b32af2505 100644 --- a/homeassistant/components/kmtronic/__init__.py +++ b/homeassistant/components/kmtronic/__init__.py @@ -44,6 +44,7 @@ async def async_update_data(): coordinator = DataUpdateCoordinator( hass, _LOGGER, + config_entry=entry, name=f"{MANUFACTURER} {hub.name}", update_method=async_update_data, update_interval=timedelta(seconds=30), diff --git a/homeassistant/components/kmtronic/config_flow.py b/homeassistant/components/kmtronic/config_flow.py index 6bf0b878f72917..56b1d4675bce9e 100644 --- a/homeassistant/components/kmtronic/config_flow.py +++ b/homeassistant/components/kmtronic/config_flow.py @@ -66,7 +66,7 @@ def async_get_options_flow( config_entry: ConfigEntry, ) -> KMTronicOptionsFlow: """Get the options flow for this handler.""" - return KMTronicOptionsFlow(config_entry) + return KMTronicOptionsFlow() async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -102,10 +102,6 @@ class InvalidAuth(HomeAssistantError): class KMTronicOptionsFlow(OptionsFlow): """Handle options.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/knocki/strings.json b/homeassistant/components/knocki/strings.json index 8f5d01611664d4..8e6fb72228151f 100644 --- a/homeassistant/components/knocki/strings.json +++ b/homeassistant/components/knocki/strings.json @@ -10,6 +10,7 @@ }, "error": { "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", + "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", "unknown": "[%key:common::config_flow::error::unknown%]" }, "abort": { diff --git a/homeassistant/components/knx/__init__.py b/homeassistant/components/knx/__init__.py index 736c5f6cb9dcd0..fe6f3ad88922a0 100644 --- a/homeassistant/components/knx/__init__.py +++ b/homeassistant/components/knx/__init__.py @@ -29,7 +29,6 @@ ) from homeassistant.core import Event, HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady -from homeassistant.helpers import discovery import homeassistant.helpers.config_validation as cv from homeassistant.helpers.device_registry import DeviceEntry from homeassistant.helpers.reload import async_integration_yaml_config @@ -193,14 +192,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: }, ) - # set up notify service for backwards compatibility - remove 2024.11 - if NotifySchema.PLATFORM in config: - hass.async_create_task( - discovery.async_load_platform( - hass, Platform.NOTIFY, DOMAIN, {}, hass.data[DATA_HASS_CONFIG] - ) - ) - await register_panel(hass) return True diff --git a/homeassistant/components/knx/climate.py b/homeassistant/components/knx/climate.py index 879e1421bd4996..0e0da4d5c0cf9d 100644 --- a/homeassistant/components/knx/climate.py +++ b/homeassistant/components/knx/climate.py @@ -136,6 +136,9 @@ def _create_climate(xknx: XKNX, config: ConfigType) -> XknxClimate: ClimateSchema.CONF_FAN_SPEED_STATE_ADDRESS ), fan_speed_mode=config[ClimateSchema.CONF_FAN_SPEED_MODE], + group_address_humidity_state=config.get( + ClimateSchema.CONF_HUMIDITY_STATE_ADDRESS + ), ) @@ -397,6 +400,11 @@ async def async_set_fan_mode(self, fan_mode: str) -> None: await self._device.set_fan_speed(self._fan_modes_percentages[fan_mode_index]) + @property + def current_humidity(self) -> float | None: + """Return the current humidity.""" + return self._device.humidity.value + @property def extra_state_attributes(self) -> dict[str, Any] | None: """Return device specific state attributes.""" diff --git a/homeassistant/components/knx/config_flow.py b/homeassistant/components/knx/config_flow.py index 4a71c60082410d..feeb7626577ba2 100644 --- a/homeassistant/components/knx/config_flow.py +++ b/homeassistant/components/knx/config_flow.py @@ -770,7 +770,6 @@ class KNXOptionsFlow(KNXCommonFlow, OptionsFlow): def __init__(self, config_entry: ConfigEntry) -> None: """Initialize KNX options flow.""" - self.config_entry = config_entry super().__init__(initial_data=config_entry.data) # type: ignore[arg-type] @callback diff --git a/homeassistant/components/knx/manifest.json b/homeassistant/components/knx/manifest.json index aa0178b2c4a860..df895282a2b298 100644 --- a/homeassistant/components/knx/manifest.json +++ b/homeassistant/components/knx/manifest.json @@ -11,8 +11,8 @@ "loggers": ["xknx", "xknxproject"], "quality_scale": "platinum", "requirements": [ - "xknx==3.2.0", - "xknxproject==3.8.0", + "xknx==3.3.0", + "xknxproject==3.8.1", "knx-frontend==2024.9.10.221729" ], "single_config_entry": true diff --git a/homeassistant/components/knx/notify.py b/homeassistant/components/knx/notify.py index 46abbaa1454117..245de2e937e387 100644 --- a/homeassistant/components/knx/notify.py +++ b/homeassistant/components/knx/notify.py @@ -2,86 +2,21 @@ from __future__ import annotations -from typing import Any - from xknx import XKNX from xknx.devices import Notification as XknxNotification from homeassistant import config_entries -from homeassistant.components.notify import ( - BaseNotificationService, - NotifyEntity, - migrate_notify_issue, -) +from homeassistant.components.notify import NotifyEntity from homeassistant.const import CONF_ENTITY_CATEGORY, CONF_NAME, CONF_TYPE, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType +from homeassistant.helpers.typing import ConfigType from . import KNXModule -from .const import DOMAIN, KNX_ADDRESS, KNX_MODULE_KEY +from .const import KNX_ADDRESS, KNX_MODULE_KEY from .entity import KnxYamlEntity -async def async_get_service( - hass: HomeAssistant, - config: ConfigType, - discovery_info: DiscoveryInfoType | None = None, -) -> KNXNotificationService | None: - """Get the KNX notification service.""" - if discovery_info is None: - return None - - knx_module = hass.data[KNX_MODULE_KEY] - if platform_config := knx_module.config_yaml.get(Platform.NOTIFY): - xknx: XKNX = hass.data[KNX_MODULE_KEY].xknx - - notification_devices = [ - _create_notification_instance(xknx, device_config) - for device_config in platform_config - ] - return KNXNotificationService(notification_devices) - - return None - - -class KNXNotificationService(BaseNotificationService): - """Implement notification service.""" - - def __init__(self, devices: list[XknxNotification]) -> None: - """Initialize the service.""" - self.devices = devices - - @property - def targets(self) -> dict[str, str]: - """Return a dictionary of registered targets.""" - ret = {} - for device in self.devices: - ret[device.name] = device.name - return ret - - async def async_send_message(self, message: str = "", **kwargs: Any) -> None: - """Send a notification to knx bus.""" - migrate_notify_issue( - self.hass, DOMAIN, "KNX", "2024.11.0", service_name=self._service_name - ) - if "target" in kwargs: - await self._async_send_to_device(message, kwargs["target"]) - else: - await self._async_send_to_all_devices(message) - - async def _async_send_to_all_devices(self, message: str) -> None: - """Send a notification to knx bus to all connected devices.""" - for device in self.devices: - await device.set(message) - - async def _async_send_to_device(self, message: str, names: str) -> None: - """Send a notification to knx bus to device with given names.""" - for device in self.devices: - if device.name in names: - await device.set(message) - - async def async_setup_entry( hass: HomeAssistant, config_entry: config_entries.ConfigEntry, diff --git a/homeassistant/components/knx/schema.py b/homeassistant/components/knx/schema.py index cc65a399da7e0c..bf2fc55e5c9446 100644 --- a/homeassistant/components/knx/schema.py +++ b/homeassistant/components/knx/schema.py @@ -347,6 +347,7 @@ class ClimateSchema(KNXPlatformSchema): CONF_FAN_MAX_STEP = "fan_max_step" CONF_FAN_SPEED_MODE = "fan_speed_mode" CONF_FAN_ZERO_MODE = "fan_zero_mode" + CONF_HUMIDITY_STATE_ADDRESS = "humidity_state_address" DEFAULT_NAME = "KNX Climate" DEFAULT_SETPOINT_SHIFT_MODE = "DPT6010" @@ -439,6 +440,7 @@ class ClimateSchema(KNXPlatformSchema): vol.Optional(CONF_FAN_ZERO_MODE, default=FAN_OFF): vol.Coerce( FanZeroMode ), + vol.Optional(CONF_HUMIDITY_STATE_ADDRESS): ga_list_validator, } ), ) diff --git a/homeassistant/components/konnected/config_flow.py b/homeassistant/components/konnected/config_flow.py index 3f1ef99c6fb2f5..65dd7cf39b3d41 100644 --- a/homeassistant/components/konnected/config_flow.py +++ b/homeassistant/components/konnected/config_flow.py @@ -402,9 +402,10 @@ class OptionsFlowHandler(OptionsFlow): def __init__(self, config_entry: ConfigEntry) -> None: """Initialize options flow.""" - self.entry = config_entry - self.model = self.entry.data[CONF_MODEL] - self.current_opt = self.entry.options or self.entry.data[CONF_DEFAULT_OPTIONS] + self.model = config_entry.data[CONF_MODEL] + self.current_opt = ( + config_entry.options or config_entry.data[CONF_DEFAULT_OPTIONS] + ) # as config proceeds we'll build up new options and then replace what's in the config entry self.new_opt: dict[str, Any] = {CONF_IO: {}} @@ -475,7 +476,7 @@ async def async_step_options_io( ), description_placeholders={ "model": KONN_PANEL_MODEL_NAMES[self.model], - "host": self.entry.data[CONF_HOST], + "host": self.config_entry.data[CONF_HOST], }, errors=errors, ) @@ -511,7 +512,7 @@ async def async_step_options_io( ), description_placeholders={ "model": KONN_PANEL_MODEL_NAMES[self.model], - "host": self.entry.data[CONF_HOST], + "host": self.config_entry.data[CONF_HOST], }, errors=errors, ) @@ -571,7 +572,7 @@ async def async_step_options_io_ext( ), description_placeholders={ "model": KONN_PANEL_MODEL_NAMES[self.model], - "host": self.entry.data[CONF_HOST], + "host": self.config_entry.data[CONF_HOST], }, errors=errors, ) diff --git a/homeassistant/components/kostal_plenticore/sensor.py b/homeassistant/components/kostal_plenticore/sensor.py index fbbfb03fb3eb37..67de34f2fce77f 100644 --- a/homeassistant/components/kostal_plenticore/sensor.py +++ b/homeassistant/components/kostal_plenticore/sensor.py @@ -17,6 +17,7 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( PERCENTAGE, + EntityCategory, UnitOfElectricCurrent, UnitOfElectricPotential, UnitOfEnergy, @@ -747,6 +748,15 @@ class PlenticoreSensorEntityDescription(SensorEntityDescription): state_class=SensorStateClass.TOTAL_INCREASING, formatter="format_energy", ), + PlenticoreSensorEntityDescription( + module_id="scb:event", + key="Event:ActiveErrorCnt", + name="Active Alarms", + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + icon="mdi:alert", + formatter="format_round", + ), PlenticoreSensorEntityDescription( module_id="_virt_", key="pv_P", diff --git a/homeassistant/components/kraken/config_flow.py b/homeassistant/components/kraken/config_flow.py index 6777851527379f..54a817f0a50dc1 100644 --- a/homeassistant/components/kraken/config_flow.py +++ b/homeassistant/components/kraken/config_flow.py @@ -33,7 +33,7 @@ def async_get_options_flow( config_entry: ConfigEntry, ) -> KrakenOptionsFlowHandler: """Get the options flow for this handler.""" - return KrakenOptionsFlowHandler(config_entry) + return KrakenOptionsFlowHandler() async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -53,10 +53,6 @@ async def async_step_user( class KrakenOptionsFlowHandler(OptionsFlow): """Handle Kraken client options.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize Kraken options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/kraken/manifest.json b/homeassistant/components/kraken/manifest.json index 98347f7681b307..fed16a673b5182 100644 --- a/homeassistant/components/kraken/manifest.json +++ b/homeassistant/components/kraken/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/kraken", "iot_class": "cloud_polling", "loggers": ["krakenex", "pykrakenapi"], - "requirements": ["krakenex==2.1.0", "pykrakenapi==0.1.8"] + "requirements": ["krakenex==2.2.2", "pykrakenapi==0.1.8"] } diff --git a/homeassistant/components/lacrosse_view/config_flow.py b/homeassistant/components/lacrosse_view/config_flow.py index 5a3fe4a03ca55a..ecf30f9a197016 100644 --- a/homeassistant/components/lacrosse_view/config_flow.py +++ b/homeassistant/components/lacrosse_view/config_flow.py @@ -9,7 +9,7 @@ from lacrosse_view import LaCrosse, Location, LoginError import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlow, ConfigFlowResult from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.aiohttp_client import async_get_clientsession @@ -54,7 +54,6 @@ def __init__(self) -> None: """Initialize the config flow.""" self.data: dict[str, str] = {} self.locations: list[Location] = [] - self._reauth_entry: ConfigEntry | None = None async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -83,12 +82,10 @@ async def async_step_user( self.locations = info # Check if we are reauthenticating - if self._reauth_entry is not None: - self.hass.config_entries.async_update_entry( - self._reauth_entry, data=self._reauth_entry.data | self.data + if self.source == SOURCE_REAUTH: + return self.async_update_reload_and_abort( + self._get_reauth_entry(), data_updates=self.data ) - await self.hass.config_entries.async_reload(self._reauth_entry.entry_id) - return self.async_abort(reason="reauth_successful") _LOGGER.debug("Moving on to location step") return await self.async_step_location() @@ -139,9 +136,6 @@ async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Reauth in case of a password change or other error.""" - self._reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) return await self.async_step_user() diff --git a/homeassistant/components/lacrosse_view/manifest.json b/homeassistant/components/lacrosse_view/manifest.json index 1cf8794237d751..453a0855229b92 100644 --- a/homeassistant/components/lacrosse_view/manifest.json +++ b/homeassistant/components/lacrosse_view/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/lacrosse_view", "iot_class": "cloud_polling", "loggers": ["lacrosse_view"], - "requirements": ["lacrosse-view==1.0.2"] + "requirements": ["lacrosse-view==1.0.3"] } diff --git a/homeassistant/components/lamarzocco/__init__.py b/homeassistant/components/lamarzocco/__init__.py index 8df7a2f5d0e23c..da513bc8cffa6f 100644 --- a/homeassistant/components/lamarzocco/__init__.py +++ b/homeassistant/components/lamarzocco/__init__.py @@ -2,12 +2,12 @@ import logging -from lmcloud.client_bluetooth import LaMarzoccoBluetoothClient -from lmcloud.client_cloud import LaMarzoccoCloudClient -from lmcloud.client_local import LaMarzoccoLocalClient -from lmcloud.const import BT_MODEL_PREFIXES, FirmwareType -from lmcloud.exceptions import AuthFail, RequestNotSuccessful from packaging import version +from pylamarzocco.client_bluetooth import LaMarzoccoBluetoothClient +from pylamarzocco.client_cloud import LaMarzoccoCloudClient +from pylamarzocco.client_local import LaMarzoccoLocalClient +from pylamarzocco.const import BT_MODEL_PREFIXES, FirmwareType +from pylamarzocco.exceptions import AuthFail, RequestNotSuccessful from homeassistant.components.bluetooth import async_discovered_service_info from homeassistant.config_entries import ConfigEntry @@ -26,7 +26,7 @@ from homeassistant.helpers.httpx_client import get_async_client from .const import CONF_USE_BLUETOOTH, DOMAIN -from .coordinator import LaMarzoccoUpdateCoordinator +from .coordinator import LaMarzoccoConfigEntry, LaMarzoccoUpdateCoordinator PLATFORMS = [ Platform.BINARY_SENSOR, @@ -41,8 +41,6 @@ _LOGGER = logging.getLogger(__name__) -type LaMarzoccoConfigEntry = ConfigEntry[LaMarzoccoUpdateCoordinator] - async def async_setup_entry(hass: HomeAssistant, entry: LaMarzoccoConfigEntry) -> bool: """Set up La Marzocco as config entry.""" @@ -103,6 +101,7 @@ def bluetooth_configured() -> bool: coordinator = LaMarzoccoUpdateCoordinator( hass=hass, + entry=entry, local_client=local_client, cloud_client=cloud_client, bluetooth_client=bluetooth_client, diff --git a/homeassistant/components/lamarzocco/binary_sensor.py b/homeassistant/components/lamarzocco/binary_sensor.py index 81ac3672a0fa21..444e4d0723b579 100644 --- a/homeassistant/components/lamarzocco/binary_sensor.py +++ b/homeassistant/components/lamarzocco/binary_sensor.py @@ -3,7 +3,7 @@ from collections.abc import Callable from dataclasses import dataclass -from lmcloud.models import LaMarzoccoMachineConfig +from pylamarzocco.models import LaMarzoccoMachineConfig from homeassistant.components.binary_sensor import ( BinarySensorDeviceClass, @@ -14,7 +14,7 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import LaMarzoccoConfigEntry +from .coordinator import LaMarzoccoConfigEntry from .entity import LaMarzoccoEntity, LaMarzoccoEntityDescription diff --git a/homeassistant/components/lamarzocco/button.py b/homeassistant/components/lamarzocco/button.py index 56fcca98cb35cb..ae79e21897ffe4 100644 --- a/homeassistant/components/lamarzocco/button.py +++ b/homeassistant/components/lamarzocco/button.py @@ -1,21 +1,23 @@ """Button platform for La Marzocco espresso machines.""" +import asyncio from collections.abc import Callable, Coroutine from dataclasses import dataclass from typing import Any -from lmcloud.exceptions import RequestNotSuccessful -from lmcloud.lm_machine import LaMarzoccoMachine +from pylamarzocco.exceptions import RequestNotSuccessful from homeassistant.components.button import ButtonEntity, ButtonEntityDescription from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import LaMarzoccoConfigEntry from .const import DOMAIN +from .coordinator import LaMarzoccoConfigEntry, LaMarzoccoUpdateCoordinator from .entity import LaMarzoccoEntity, LaMarzoccoEntityDescription +BACKFLUSH_ENABLED_DURATION = 15 + @dataclass(frozen=True, kw_only=True) class LaMarzoccoButtonEntityDescription( @@ -24,14 +26,25 @@ class LaMarzoccoButtonEntityDescription( ): """Description of a La Marzocco button.""" - press_fn: Callable[[LaMarzoccoMachine], Coroutine[Any, Any, None]] + press_fn: Callable[[LaMarzoccoUpdateCoordinator], Coroutine[Any, Any, None]] + + +async def async_backflush_and_update(coordinator: LaMarzoccoUpdateCoordinator) -> None: + """Press backflush button.""" + await coordinator.device.start_backflush() + # lib will set state optimistically + coordinator.async_set_updated_data(None) + # backflush is enabled for 15 seconds + # then turns off automatically + await asyncio.sleep(BACKFLUSH_ENABLED_DURATION + 1) + await coordinator.async_request_refresh() ENTITIES: tuple[LaMarzoccoButtonEntityDescription, ...] = ( LaMarzoccoButtonEntityDescription( key="start_backflush", translation_key="start_backflush", - press_fn=lambda machine: machine.start_backflush(), + press_fn=async_backflush_and_update, ), ) @@ -59,7 +72,7 @@ class LaMarzoccoButtonEntity(LaMarzoccoEntity, ButtonEntity): async def async_press(self) -> None: """Press button.""" try: - await self.entity_description.press_fn(self.coordinator.device) + await self.entity_description.press_fn(self.coordinator) except RequestNotSuccessful as exc: raise HomeAssistantError( translation_domain=DOMAIN, @@ -68,4 +81,3 @@ async def async_press(self) -> None: "key": self.entity_description.key, }, ) from exc - await self.coordinator.async_request_refresh() diff --git a/homeassistant/components/lamarzocco/calendar.py b/homeassistant/components/lamarzocco/calendar.py index 8b3240ff7a1958..0ec9b55a9a1def 100644 --- a/homeassistant/components/lamarzocco/calendar.py +++ b/homeassistant/components/lamarzocco/calendar.py @@ -3,15 +3,14 @@ from collections.abc import Iterator from datetime import datetime, timedelta -from lmcloud.models import LaMarzoccoWakeUpSleepEntry +from pylamarzocco.models import LaMarzoccoWakeUpSleepEntry from homeassistant.components.calendar import CalendarEntity, CalendarEvent from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.util import dt as dt_util -from . import LaMarzoccoConfigEntry -from .coordinator import LaMarzoccoUpdateCoordinator +from .coordinator import LaMarzoccoConfigEntry, LaMarzoccoUpdateCoordinator from .entity import LaMarzoccoBaseEntity CALENDAR_KEY = "auto_on_off_schedule" diff --git a/homeassistant/components/lamarzocco/config_flow.py b/homeassistant/components/lamarzocco/config_flow.py index 898a93a014ab01..04e705edbdcac8 100644 --- a/homeassistant/components/lamarzocco/config_flow.py +++ b/homeassistant/components/lamarzocco/config_flow.py @@ -1,19 +1,22 @@ """Config flow for La Marzocco integration.""" +from __future__ import annotations + from collections.abc import Mapping import logging from typing import Any -from lmcloud.client_cloud import LaMarzoccoCloudClient -from lmcloud.client_local import LaMarzoccoLocalClient -from lmcloud.exceptions import AuthFail, RequestNotSuccessful -from lmcloud.models import LaMarzoccoDeviceInfo +from pylamarzocco.client_cloud import LaMarzoccoCloudClient +from pylamarzocco.client_local import LaMarzoccoLocalClient +from pylamarzocco.exceptions import AuthFail, RequestNotSuccessful +from pylamarzocco.models import LaMarzoccoDeviceInfo import voluptuous as vol from homeassistant.components.bluetooth import ( BluetoothServiceInfo, async_discovered_service_info, ) +from homeassistant.components.dhcp import DhcpServiceInfo from homeassistant.config_entries import ( SOURCE_REAUTH, SOURCE_RECONFIGURE, @@ -21,7 +24,6 @@ ConfigFlow, ConfigFlowResult, OptionsFlow, - OptionsFlowWithConfigEntry, ) from homeassistant.const import ( CONF_HOST, @@ -54,9 +56,6 @@ class LmConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 2 - reauth_entry: ConfigEntry - reconfigure_entry: ConfigEntry - def __init__(self) -> None: """Initialize the config flow.""" self._config: dict[str, Any] = {} @@ -73,7 +72,7 @@ async def async_step_user( if user_input: data: dict[str, Any] = {} if self.source == SOURCE_REAUTH: - data = dict(self.reauth_entry.data) + data = dict(self._get_reauth_entry().data) data = { **data, **user_input, @@ -99,13 +98,22 @@ async def async_step_user( if not errors: if self.source == SOURCE_REAUTH: return self.async_update_reload_and_abort( - self.reauth_entry, data=data, reason="reauth_successful" + self._get_reauth_entry(), data=data ) if self._discovered: if self._discovered[CONF_MACHINE] not in self._fleet: errors["base"] = "machine_not_found" else: self._config = data + # if DHCP discovery was used, auto fill machine selection + if CONF_HOST in self._discovered: + return await self.async_step_machine_selection( + user_input={ + CONF_HOST: self._discovered[CONF_HOST], + CONF_MACHINE: self._discovered[CONF_MACHINE], + } + ) + # if Bluetooth discovery was used, only select host return self.async_show_form( step_id="machine_selection", data_schema=vol.Schema( @@ -208,12 +216,11 @@ async def async_step_bluetooth_selection( if user_input is not None: return self.async_update_reload_and_abort( - self.reconfigure_entry, + self._get_reconfigure_entry(), data={ **self._config, CONF_MAC: user_input[CONF_MAC], }, - reason="reconfigure_successful", ) bt_options = [ @@ -262,11 +269,31 @@ async def async_step_bluetooth( return await self.async_step_user() + async def async_step_dhcp( + self, discovery_info: DhcpServiceInfo + ) -> ConfigFlowResult: + """Handle discovery via dhcp.""" + + serial = discovery_info.hostname.upper() + + await self.async_set_unique_id(serial) + self._abort_if_unique_id_configured() + + _LOGGER.debug( + "Discovered La Marzocco machine %s through DHCP at address %s", + discovery_info.hostname, + discovery_info.ip, + ) + + self._discovered[CONF_MACHINE] = serial + self._discovered[CONF_HOST] = discovery_info.ip + + return await self.async_step_user() + async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Perform reauth upon an API authentication error.""" - self.reauth_entry = self._get_reauth_entry() return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -289,25 +316,19 @@ async def async_step_reconfigure( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Perform reconfiguration of the config entry.""" - self.reconfigure_entry = self._get_reconfigure_entry() - return await self.async_step_reconfigure_confirm() - - async def async_step_reconfigure_confirm( - self, user_input: dict[str, Any] | None = None - ) -> ConfigFlowResult: - """Confirm reconfiguration of the device.""" if not user_input: + reconfigure_entry = self._get_reconfigure_entry() return self.async_show_form( - step_id="reconfigure_confirm", + step_id="reconfigure", data_schema=vol.Schema( { vol.Required( CONF_USERNAME, - default=self.reconfigure_entry.data[CONF_USERNAME], + default=reconfigure_entry.data[CONF_USERNAME], ): str, vol.Required( CONF_PASSWORD, - default=self.reconfigure_entry.data[CONF_PASSWORD], + default=reconfigure_entry.data[CONF_PASSWORD], ): str, } ), @@ -319,12 +340,12 @@ async def async_step_reconfigure_confirm( @callback def async_get_options_flow( config_entry: ConfigEntry, - ) -> OptionsFlow: + ) -> LmOptionsFlowHandler: """Create the options flow.""" - return LmOptionsFlowHandler(config_entry) + return LmOptionsFlowHandler() -class LmOptionsFlowHandler(OptionsFlowWithConfigEntry): +class LmOptionsFlowHandler(OptionsFlow): """Handles options flow for the component.""" async def async_step_init( @@ -338,7 +359,7 @@ async def async_step_init( { vol.Optional( CONF_USE_BLUETOOTH, - default=self.options.get(CONF_USE_BLUETOOTH, True), + default=self.config_entry.options.get(CONF_USE_BLUETOOTH, True), ): cv.boolean, } ) diff --git a/homeassistant/components/lamarzocco/coordinator.py b/homeassistant/components/lamarzocco/coordinator.py index f255276b192283..05fee98c5997e8 100644 --- a/homeassistant/components/lamarzocco/coordinator.py +++ b/homeassistant/components/lamarzocco/coordinator.py @@ -1,16 +1,18 @@ """Coordinator for La Marzocco API.""" +from __future__ import annotations + from collections.abc import Callable, Coroutine from datetime import timedelta import logging from time import time from typing import Any -from lmcloud.client_bluetooth import LaMarzoccoBluetoothClient -from lmcloud.client_cloud import LaMarzoccoCloudClient -from lmcloud.client_local import LaMarzoccoLocalClient -from lmcloud.exceptions import AuthFail, RequestNotSuccessful -from lmcloud.lm_machine import LaMarzoccoMachine +from pylamarzocco.client_bluetooth import LaMarzoccoBluetoothClient +from pylamarzocco.client_cloud import LaMarzoccoCloudClient +from pylamarzocco.client_local import LaMarzoccoLocalClient +from pylamarzocco.exceptions import AuthFail, RequestNotSuccessful +from pylamarzocco.lm_machine import LaMarzoccoMachine from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_MODEL, CONF_NAME, EVENT_HOMEASSISTANT_STOP @@ -26,21 +28,30 @@ _LOGGER = logging.getLogger(__name__) +type LaMarzoccoConfigEntry = ConfigEntry[LaMarzoccoUpdateCoordinator] + class LaMarzoccoUpdateCoordinator(DataUpdateCoordinator[None]): """Class to handle fetching data from the La Marzocco API centrally.""" - config_entry: ConfigEntry + config_entry: LaMarzoccoConfigEntry def __init__( self, hass: HomeAssistant, + entry: LaMarzoccoConfigEntry, cloud_client: LaMarzoccoCloudClient, local_client: LaMarzoccoLocalClient | None, bluetooth_client: LaMarzoccoBluetoothClient | None, ) -> None: """Initialize coordinator.""" - super().__init__(hass, _LOGGER, name=DOMAIN, update_interval=SCAN_INTERVAL) + super().__init__( + hass, + _LOGGER, + config_entry=entry, + name=DOMAIN, + update_interval=SCAN_INTERVAL, + ) self.local_connection_configured = local_client is not None assert self.config_entry.unique_id diff --git a/homeassistant/components/lamarzocco/diagnostics.py b/homeassistant/components/lamarzocco/diagnostics.py index 4293fdca6150d9..43ae51ee192866 100644 --- a/homeassistant/components/lamarzocco/diagnostics.py +++ b/homeassistant/components/lamarzocco/diagnostics.py @@ -5,12 +5,12 @@ from dataclasses import asdict from typing import Any, TypedDict -from lmcloud.const import FirmwareType +from pylamarzocco.const import FirmwareType from homeassistant.components.diagnostics import async_redact_data from homeassistant.core import HomeAssistant -from . import LaMarzoccoConfigEntry +from .coordinator import LaMarzoccoConfigEntry TO_REDACT = { "serial_number", diff --git a/homeassistant/components/lamarzocco/entity.py b/homeassistant/components/lamarzocco/entity.py index 9cc2ce8ef6bcc5..1ea84302a17acf 100644 --- a/homeassistant/components/lamarzocco/entity.py +++ b/homeassistant/components/lamarzocco/entity.py @@ -3,8 +3,8 @@ from collections.abc import Callable from dataclasses import dataclass -from lmcloud.const import FirmwareType -from lmcloud.lm_machine import LaMarzoccoMachine +from pylamarzocco.const import FirmwareType +from pylamarzocco.lm_machine import LaMarzoccoMachine from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity import EntityDescription @@ -43,6 +43,7 @@ def __init__( name=device.name, manufacturer="La Marzocco", model=device.full_model_name, + model_id=device.model, serial_number=device.serial_number, sw_version=device.firmware[FirmwareType.MACHINE].current_version, ) diff --git a/homeassistant/components/lamarzocco/icons.json b/homeassistant/components/lamarzocco/icons.json index bc7d621d91de14..860da12ddd91d9 100644 --- a/homeassistant/components/lamarzocco/icons.json +++ b/homeassistant/components/lamarzocco/icons.json @@ -43,6 +43,9 @@ "preinfusion_off": { "default": "mdi:water" }, + "smart_standby_time": { + "default": "mdi:timer" + }, "steam_temp": { "default": "mdi:thermometer-water" }, @@ -51,6 +54,13 @@ } }, "select": { + "smart_standby_mode": { + "default": "mdi:power", + "state": { + "poweron": "mdi:power", + "lastbrewing": "mdi:coffee" + } + }, "steam_temp_select": { "default": "mdi:thermometer", "state": { @@ -100,6 +110,12 @@ "off": "mdi:alarm-off" } }, + "smart_standby_enabled": { + "state": { + "on": "mdi:sleep", + "off": "mdi:sleep-off" + } + }, "steam_boiler": { "default": "mdi:water-boiler", "state": { diff --git a/homeassistant/components/lamarzocco/manifest.json b/homeassistant/components/lamarzocco/manifest.json index a1da8982cd8562..6b2260511183e2 100644 --- a/homeassistant/components/lamarzocco/manifest.json +++ b/homeassistant/components/lamarzocco/manifest.json @@ -18,9 +18,20 @@ "codeowners": ["@zweckj"], "config_flow": true, "dependencies": ["bluetooth_adapters"], + "dhcp": [ + { + "hostname": "gs[0-9][0-9][0-9][0-9][0-9][0-9]" + }, + { + "hostname": "lm[0-9][0-9][0-9][0-9][0-9][0-9]" + }, + { + "hostname": "mr[0-9][0-9][0-9][0-9][0-9][0-9]" + } + ], "documentation": "https://www.home-assistant.io/integrations/lamarzocco", "integration_type": "device", "iot_class": "cloud_polling", - "loggers": ["lmcloud"], - "requirements": ["lmcloud==1.2.3"] + "loggers": ["pylamarzocco"], + "requirements": ["pylamarzocco==1.2.3"] } diff --git a/homeassistant/components/lamarzocco/number.py b/homeassistant/components/lamarzocco/number.py index e607d8561936bf..825c5d6deb0743 100644 --- a/homeassistant/components/lamarzocco/number.py +++ b/homeassistant/components/lamarzocco/number.py @@ -4,16 +4,16 @@ from dataclasses import dataclass from typing import Any -from lmcloud.const import ( +from pylamarzocco.const import ( KEYS_PER_MODEL, BoilerType, MachineModel, PhysicalKey, PrebrewMode, ) -from lmcloud.exceptions import RequestNotSuccessful -from lmcloud.lm_machine import LaMarzoccoMachine -from lmcloud.models import LaMarzoccoMachineConfig +from pylamarzocco.exceptions import RequestNotSuccessful +from pylamarzocco.lm_machine import LaMarzoccoMachine +from pylamarzocco.models import LaMarzoccoMachineConfig from homeassistant.components.number import ( NumberDeviceClass, @@ -31,9 +31,8 @@ from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import LaMarzoccoConfigEntry from .const import DOMAIN -from .coordinator import LaMarzoccoUpdateCoordinator +from .coordinator import LaMarzoccoConfigEntry, LaMarzoccoUpdateCoordinator from .entity import LaMarzoccoEntity, LaMarzoccoEntityDescription @@ -109,6 +108,22 @@ class LaMarzoccoKeyNumberEntityDescription( MachineModel.GS3_MP, ), ), + LaMarzoccoNumberEntityDescription( + key="smart_standby_time", + translation_key="smart_standby_time", + device_class=NumberDeviceClass.DURATION, + native_unit_of_measurement=UnitOfTime.MINUTES, + native_step=10, + native_min_value=10, + native_max_value=240, + entity_category=EntityCategory.CONFIG, + set_value_fn=lambda machine, value: machine.set_smart_standby( + enabled=machine.config.smart_standby.enabled, + mode=machine.config.smart_standby.mode, + minutes=int(value), + ), + native_value_fn=lambda config: config.smart_standby.minutes, + ), ) diff --git a/homeassistant/components/lamarzocco/select.py b/homeassistant/components/lamarzocco/select.py index 7a410796285df0..1889ba38d6bc7c 100644 --- a/homeassistant/components/lamarzocco/select.py +++ b/homeassistant/components/lamarzocco/select.py @@ -4,10 +4,10 @@ from dataclasses import dataclass from typing import Any -from lmcloud.const import MachineModel, PrebrewMode, SteamLevel -from lmcloud.exceptions import RequestNotSuccessful -from lmcloud.lm_machine import LaMarzoccoMachine -from lmcloud.models import LaMarzoccoMachineConfig +from pylamarzocco.const import MachineModel, PrebrewMode, SmartStandbyMode, SteamLevel +from pylamarzocco.exceptions import RequestNotSuccessful +from pylamarzocco.lm_machine import LaMarzoccoMachine +from pylamarzocco.models import LaMarzoccoMachineConfig from homeassistant.components.select import SelectEntity, SelectEntityDescription from homeassistant.const import EntityCategory @@ -15,8 +15,8 @@ from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import LaMarzoccoConfigEntry from .const import DOMAIN +from .coordinator import LaMarzoccoConfigEntry from .entity import LaMarzoccoEntity, LaMarzoccoEntityDescription STEAM_LEVEL_HA_TO_LM = { @@ -25,11 +25,7 @@ "3": SteamLevel.LEVEL_3, } -STEAM_LEVEL_LM_TO_HA = { - SteamLevel.LEVEL_1: "1", - SteamLevel.LEVEL_2: "2", - SteamLevel.LEVEL_3: "3", -} +STEAM_LEVEL_LM_TO_HA = {value: key for key, value in STEAM_LEVEL_HA_TO_LM.items()} PREBREW_MODE_HA_TO_LM = { "disabled": PrebrewMode.DISABLED, @@ -37,12 +33,15 @@ "preinfusion": PrebrewMode.PREINFUSION, } -PREBREW_MODE_LM_TO_HA = { - PrebrewMode.DISABLED: "disabled", - PrebrewMode.PREBREW: "prebrew", - PrebrewMode.PREINFUSION: "preinfusion", +PREBREW_MODE_LM_TO_HA = {value: key for key, value in PREBREW_MODE_HA_TO_LM.items()} + +STANDBY_MODE_HA_TO_LM = { + "power_on": SmartStandbyMode.POWER_ON, + "last_brewing": SmartStandbyMode.LAST_BREWING, } +STANDBY_MODE_LM_TO_HA = {value: key for key, value in STANDBY_MODE_HA_TO_LM.items()} + @dataclass(frozen=True, kw_only=True) class LaMarzoccoSelectEntityDescription( @@ -83,6 +82,20 @@ class LaMarzoccoSelectEntityDescription( MachineModel.LINEA_MINI, ), ), + LaMarzoccoSelectEntityDescription( + key="smart_standby_mode", + translation_key="smart_standby_mode", + entity_category=EntityCategory.CONFIG, + options=["power_on", "last_brewing"], + select_option_fn=lambda machine, option: machine.set_smart_standby( + enabled=machine.config.smart_standby.enabled, + mode=STANDBY_MODE_HA_TO_LM[option], + minutes=machine.config.smart_standby.minutes, + ), + current_option_fn=lambda config: STANDBY_MODE_LM_TO_HA[ + config.smart_standby.mode + ], + ), ) diff --git a/homeassistant/components/lamarzocco/sensor.py b/homeassistant/components/lamarzocco/sensor.py index 225f0a43c5c5d1..04b095e798ccc6 100644 --- a/homeassistant/components/lamarzocco/sensor.py +++ b/homeassistant/components/lamarzocco/sensor.py @@ -3,8 +3,8 @@ from collections.abc import Callable from dataclasses import dataclass -from lmcloud.const import BoilerType, MachineModel, PhysicalKey -from lmcloud.lm_machine import LaMarzoccoMachine +from pylamarzocco.const import BoilerType, MachineModel, PhysicalKey +from pylamarzocco.lm_machine import LaMarzoccoMachine from homeassistant.components.sensor import ( SensorDeviceClass, @@ -16,7 +16,7 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import LaMarzoccoConfigEntry +from .coordinator import LaMarzoccoConfigEntry from .entity import LaMarzoccoEntity, LaMarzoccoEntityDescription diff --git a/homeassistant/components/lamarzocco/strings.json b/homeassistant/components/lamarzocco/strings.json index 71b13e2b789eed..959dda265a9454 100644 --- a/homeassistant/components/lamarzocco/strings.json +++ b/homeassistant/components/lamarzocco/strings.json @@ -8,6 +8,7 @@ }, "error": { "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", + "machine_not_found": "Discovered machine not found in given account", "no_machines": "No machines found in account", "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]" }, @@ -47,7 +48,7 @@ "password": "[%key:component::lamarzocco::config::step::user::data_description::password%]" } }, - "reconfigure_confirm": { + "reconfigure": { "data": { "username": "[%key:common::config_flow::data::username%]", "password": "[%key:common::config_flow::data::password%]" @@ -116,6 +117,9 @@ "preinfusion_off_key": { "name": "Preinfusion time Key {key}" }, + "smart_standby_time": { + "name": "Smart standby time" + }, "steam_temp": { "name": "Steam target temperature" }, @@ -132,6 +136,13 @@ "preinfusion": "Preinfusion" } }, + "smart_standby_mode": { + "name": "Smart standby mode", + "state": { + "last_brewing": "Last brewing", + "power_on": "Power on" + } + }, "steam_temp_select": { "name": "Steam level", "state": { @@ -162,6 +173,9 @@ "auto_on_off": { "name": "Auto on/off ({id})" }, + "smart_standby_enabled": { + "name": "Smart standby enabled" + }, "steam_boiler": { "name": "Steam boiler" } diff --git a/homeassistant/components/lamarzocco/switch.py b/homeassistant/components/lamarzocco/switch.py index dda0f0f1d580bd..f7690885f05914 100644 --- a/homeassistant/components/lamarzocco/switch.py +++ b/homeassistant/components/lamarzocco/switch.py @@ -4,10 +4,10 @@ from dataclasses import dataclass from typing import Any -from lmcloud.const import BoilerType -from lmcloud.exceptions import RequestNotSuccessful -from lmcloud.lm_machine import LaMarzoccoMachine -from lmcloud.models import LaMarzoccoMachineConfig +from pylamarzocco.const import BoilerType +from pylamarzocco.exceptions import RequestNotSuccessful +from pylamarzocco.lm_machine import LaMarzoccoMachine +from pylamarzocco.models import LaMarzoccoMachineConfig from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription from homeassistant.const import EntityCategory @@ -15,9 +15,8 @@ from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import LaMarzoccoConfigEntry from .const import DOMAIN -from .coordinator import LaMarzoccoUpdateCoordinator +from .coordinator import LaMarzoccoConfigEntry, LaMarzoccoUpdateCoordinator from .entity import LaMarzoccoBaseEntity, LaMarzoccoEntity, LaMarzoccoEntityDescription @@ -46,6 +45,17 @@ class LaMarzoccoSwitchEntityDescription( control_fn=lambda machine, state: machine.set_steam(state), is_on_fn=lambda config: config.boilers[BoilerType.STEAM].enabled, ), + LaMarzoccoSwitchEntityDescription( + key="smart_standby_enabled", + translation_key="smart_standby_enabled", + entity_category=EntityCategory.CONFIG, + control_fn=lambda machine, state: machine.set_smart_standby( + enabled=state, + mode=machine.config.smart_standby.mode, + minutes=machine.config.smart_standby.minutes, + ), + is_on_fn=lambda config: config.smart_standby.enabled, + ), ) diff --git a/homeassistant/components/lamarzocco/update.py b/homeassistant/components/lamarzocco/update.py index 0bf8ea3264f3bf..371ff679bae0d4 100644 --- a/homeassistant/components/lamarzocco/update.py +++ b/homeassistant/components/lamarzocco/update.py @@ -3,8 +3,8 @@ from dataclasses import dataclass from typing import Any -from lmcloud.const import FirmwareType -from lmcloud.exceptions import RequestNotSuccessful +from pylamarzocco.const import FirmwareType +from pylamarzocco.exceptions import RequestNotSuccessful from homeassistant.components.update import ( UpdateDeviceClass, @@ -17,8 +17,8 @@ from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import LaMarzoccoConfigEntry from .const import DOMAIN +from .coordinator import LaMarzoccoConfigEntry from .entity import LaMarzoccoEntity, LaMarzoccoEntityDescription diff --git a/homeassistant/components/lametric/config_flow.py b/homeassistant/components/lametric/config_flow.py index 8dbd5279bc6034..36dcdf26ed6368 100644 --- a/homeassistant/components/lametric/config_flow.py +++ b/homeassistant/components/lametric/config_flow.py @@ -29,7 +29,7 @@ ATTR_UPNP_SERIAL, SsdpServiceInfo, ) -from homeassistant.config_entries import ConfigEntry, ConfigFlowResult +from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlowResult from homeassistant.const import CONF_API_KEY, CONF_DEVICE, CONF_HOST, CONF_MAC from homeassistant.data_entry_flow import AbortFlow from homeassistant.helpers.aiohttp_client import async_get_clientsession @@ -59,7 +59,6 @@ class LaMetricFlowHandler(AbstractOAuth2FlowHandler, domain=DOMAIN): discovered_host: str discovered_serial: str discovered: bool = False - reauth_entry: ConfigEntry | None = None @property def logger(self) -> logging.Logger: @@ -113,9 +112,6 @@ async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle initiation of re-authentication with LaMetric.""" - self.reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) return await self.async_step_choice_enter_manual_or_fetch_cloud() async def async_step_choice_enter_manual_or_fetch_cloud( @@ -138,8 +134,8 @@ async def async_step_manual_entry( if user_input is not None: if self.discovered: host = self.discovered_host - elif self.reauth_entry: - host = self.reauth_entry.data[CONF_HOST] + elif self.source == SOURCE_REAUTH: + host = self._get_reauth_entry().data[CONF_HOST] else: host = user_input[CONF_HOST] @@ -162,7 +158,7 @@ async def async_step_manual_entry( TextSelectorConfig(type=TextSelectorType.PASSWORD) ) } - if not self.discovered and not self.reauth_entry: + if not self.discovered and self.source != SOURCE_REAUTH: schema = {vol.Required(CONF_HOST): TextSelector()} | schema return self.async_show_form( @@ -195,10 +191,11 @@ async def async_step_cloud_select_device( """Handle device selection from devices offered by the cloud.""" if self.discovered: user_input = {CONF_DEVICE: self.discovered_serial} - elif self.reauth_entry: - if self.reauth_entry.unique_id not in self.devices: + elif self.source == SOURCE_REAUTH: + reauth_unique_id = self._get_reauth_entry().unique_id + if reauth_unique_id not in self.devices: return self.async_abort(reason="reauth_device_not_found") - user_input = {CONF_DEVICE: self.reauth_entry.unique_id} + user_input = {CONF_DEVICE: reauth_unique_id} elif len(self.devices) == 1: user_input = {CONF_DEVICE: list(self.devices.values())[0].serial_number} @@ -251,7 +248,7 @@ async def _async_step_create_entry( device = await lametric.device() - if not self.reauth_entry: + if self.source != SOURCE_REAUTH: await self.async_set_unique_id(device.serial_number) self._abort_if_unique_id_configured( updates={CONF_HOST: lametric.host, CONF_API_KEY: lametric.api_key} @@ -273,19 +270,14 @@ async def _async_step_create_entry( ) ) - if self.reauth_entry: - self.hass.config_entries.async_update_entry( - self.reauth_entry, - data={ - **self.reauth_entry.data, + if self.source == SOURCE_REAUTH: + return self.async_update_reload_and_abort( + self._get_reauth_entry(), + data_updates={ CONF_HOST: lametric.host, CONF_API_KEY: lametric.api_key, }, ) - self.hass.async_create_task( - self.hass.config_entries.async_reload(self.reauth_entry.entry_id) - ) - return self.async_abort(reason="reauth_successful") return self.async_create_entry( title=device.name, diff --git a/homeassistant/components/landisgyr_heat_meter/strings.json b/homeassistant/components/landisgyr_heat_meter/strings.json index 4bae2490006fe9..31f08ded79f827 100644 --- a/homeassistant/components/landisgyr_heat_meter/strings.json +++ b/homeassistant/components/landisgyr_heat_meter/strings.json @@ -12,6 +12,9 @@ } } }, + "error": { + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]" + }, "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" } diff --git a/homeassistant/components/lastfm/config_flow.py b/homeassistant/components/lastfm/config_flow.py index c6ea120242d610..0e1f680dd636cd 100644 --- a/homeassistant/components/lastfm/config_flow.py +++ b/homeassistant/components/lastfm/config_flow.py @@ -11,7 +11,7 @@ ConfigEntry, ConfigFlow, ConfigFlowResult, - OptionsFlowWithConfigEntry, + OptionsFlow, ) from homeassistant.const import CONF_API_KEY from homeassistant.core import callback @@ -80,7 +80,7 @@ def async_get_options_flow( config_entry: ConfigEntry, ) -> LastFmOptionsFlowHandler: """Get the options flow for this handler.""" - return LastFmOptionsFlowHandler(config_entry) + return LastFmOptionsFlowHandler() async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -155,7 +155,7 @@ async def async_step_friends( ) -class LastFmOptionsFlowHandler(OptionsFlowWithConfigEntry): +class LastFmOptionsFlowHandler(OptionsFlow): """LastFm Options flow handler.""" async def async_step_init( @@ -163,24 +163,25 @@ async def async_step_init( ) -> ConfigFlowResult: """Initialize form.""" errors: dict[str, str] = {} + options = self.config_entry.options if user_input is not None: users, errors = validate_lastfm_users( - self.options[CONF_API_KEY], user_input[CONF_USERS] + options[CONF_API_KEY], user_input[CONF_USERS] ) user_input[CONF_USERS] = users if not errors: return self.async_create_entry( title="LastFM", data={ - **self.options, + **options, CONF_USERS: user_input[CONF_USERS], }, ) - if self.options[CONF_MAIN_USER]: + if options[CONF_MAIN_USER]: try: main_user, _ = get_lastfm_user( - self.options[CONF_API_KEY], - self.options[CONF_MAIN_USER], + options[CONF_API_KEY], + options[CONF_MAIN_USER], ) friends_response = await self.hass.async_add_executor_job( main_user.get_friends @@ -206,6 +207,6 @@ async def async_step_init( ), } ), - user_input or self.options, + user_input or options, ), ) diff --git a/homeassistant/components/launch_library/__init__.py b/homeassistant/components/launch_library/__init__.py index 66e7eb832fe3da..6bfd3bc9adf9f4 100644 --- a/homeassistant/components/launch_library/__init__.py +++ b/homeassistant/components/launch_library/__init__.py @@ -51,6 +51,7 @@ async def async_update() -> LaunchLibraryData: coordinator = DataUpdateCoordinator( hass, _LOGGER, + config_entry=entry, name=DOMAIN, update_method=async_update, update_interval=timedelta(hours=1), diff --git a/homeassistant/components/launch_library/config_flow.py b/homeassistant/components/launch_library/config_flow.py index 3cdff3650b3459..37b80fbff8a0fd 100644 --- a/homeassistant/components/launch_library/config_flow.py +++ b/homeassistant/components/launch_library/config_flow.py @@ -18,10 +18,6 @@ async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle a flow initialized by the user.""" - # Check if already configured - if self._async_current_entries(): - return self.async_abort(reason="single_instance_allowed") - if user_input is not None: return self.async_create_entry(title="Launch Library", data=user_input) diff --git a/homeassistant/components/launch_library/manifest.json b/homeassistant/components/launch_library/manifest.json index 00f11f95a448b1..3258a9a34fb286 100644 --- a/homeassistant/components/launch_library/manifest.json +++ b/homeassistant/components/launch_library/manifest.json @@ -6,5 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/launch_library", "integration_type": "service", "iot_class": "cloud_polling", - "requirements": ["pylaunches==2.0.0"] + "requirements": ["pylaunches==2.0.0"], + "single_config_entry": true } diff --git a/homeassistant/components/launch_library/strings.json b/homeassistant/components/launch_library/strings.json index f3cca9fc581418..a587544f836b63 100644 --- a/homeassistant/components/launch_library/strings.json +++ b/homeassistant/components/launch_library/strings.json @@ -4,9 +4,6 @@ "user": { "description": "Do you want to configure the Launch Library?" } - }, - "abort": { - "single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]" } }, "entity": { diff --git a/homeassistant/components/lcn/__init__.py b/homeassistant/components/lcn/__init__.py index a8d75fe56352a2..27f911822b5525 100644 --- a/homeassistant/components/lcn/__init__.py +++ b/homeassistant/components/lcn/__init__.py @@ -8,23 +8,27 @@ import pypck from pypck.connection import PchkConnectionManager -from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry +from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( CONF_DEVICE_ID, + CONF_DOMAIN, + CONF_ENTITIES, CONF_IP_ADDRESS, CONF_PASSWORD, CONF_PORT, CONF_USERNAME, + Platform, ) from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr -from homeassistant.helpers.typing import ConfigType from .const import ( ADD_ENTITIES_CALLBACKS, CONF_ACKNOWLEDGE, CONF_DIM_MODE, + CONF_DOMAIN_DATA, CONF_SK_NUM_TRIES, + CONF_TRANSITION, CONNECTION, DOMAIN, PLATFORMS, @@ -34,37 +38,15 @@ InputType, async_update_config_entry, generate_unique_id, - import_lcn_config, register_lcn_address_devices, register_lcn_host_device, ) -from .schemas import CONFIG_SCHEMA # noqa: F401 from .services import SERVICES from .websocket import register_panel_and_ws_api _LOGGER = logging.getLogger(__name__) -async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: - """Set up the LCN component.""" - if DOMAIN not in config: - return True - - # initialize a config_flow for all LCN configurations read from - # configuration.yaml - config_entries_data = import_lcn_config(config[DOMAIN]) - - for config_entry_data in config_entries_data: - hass.async_create_task( - hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_IMPORT}, - data=config_entry_data, - ) - ) - return True - - async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool: """Set up a connection to PCHK host from a config entry.""" hass.data.setdefault(DOMAIN, {}) @@ -147,15 +129,25 @@ async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> config_entry.minor_version, ) - if config_entry.version == 1: - new_data = {**config_entry.data} + new_data = {**config_entry.data} + if config_entry.version == 1: + # update to 1.2 (add acknowledge flag) if config_entry.minor_version < 2: new_data[CONF_ACKNOWLEDGE] = False - hass.config_entries.async_update_entry( - config_entry, data=new_data, minor_version=2, version=1 - ) + # update to 2.1 (fix transitions for lights and switches) + new_entities_data = [*new_data[CONF_ENTITIES]] + for entity in new_entities_data: + if entity[CONF_DOMAIN] in [Platform.LIGHT, Platform.SCENE]: + if entity[CONF_DOMAIN_DATA][CONF_TRANSITION] is None: + entity[CONF_DOMAIN_DATA][CONF_TRANSITION] = 0 + entity[CONF_DOMAIN_DATA][CONF_TRANSITION] /= 1000.0 + new_data[CONF_ENTITIES] = new_entities_data + + hass.config_entries.async_update_entry( + config_entry, data=new_data, minor_version=1, version=2 + ) _LOGGER.debug( "Migration to configuration version %s.%s successful", diff --git a/homeassistant/components/lcn/binary_sensor.py b/homeassistant/components/lcn/binary_sensor.py index 106e74fd060326..d0ce4815f19224 100644 --- a/homeassistant/components/lcn/binary_sensor.py +++ b/homeassistant/components/lcn/binary_sensor.py @@ -5,14 +5,21 @@ import pypck +from homeassistant.components.automation import automations_with_entity from homeassistant.components.binary_sensor import ( DOMAIN as DOMAIN_BINARY_SENSOR, BinarySensorEntity, ) +from homeassistant.components.script import scripts_with_entity from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_DOMAIN, CONF_ENTITIES, CONF_SOURCE from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.issue_registry import ( + IssueSeverity, + async_create_issue, + async_delete_issue, +) from homeassistant.helpers.typing import ConfigType from .const import ( @@ -83,11 +90,28 @@ def __init__(self, config: ConfigType, config_entry: ConfigEntry) -> None: async def async_added_to_hass(self) -> None: """Run when entity about to be added to hass.""" await super().async_added_to_hass() + if not self.device_connection.is_group: await self.device_connection.activate_status_request_handler( self.setpoint_variable ) + entity_automations = automations_with_entity(self.hass, self.entity_id) + entity_scripts = scripts_with_entity(self.hass, self.entity_id) + if entity_automations + entity_scripts: + async_create_issue( + self.hass, + DOMAIN, + f"deprecated_binary_sensor_{self.entity_id}", + breaks_in_ha_version="2025.5.0", + is_fixable=False, + severity=IssueSeverity.WARNING, + translation_key="deprecated_regulatorlock_sensor", + translation_placeholders={ + "entity": f"{DOMAIN_BINARY_SENSOR}.{self.name.lower().replace(' ', '_')}", + }, + ) + async def async_will_remove_from_hass(self) -> None: """Run when entity will be removed from hass.""" await super().async_will_remove_from_hass() @@ -95,6 +119,9 @@ async def async_will_remove_from_hass(self) -> None: await self.device_connection.cancel_status_request_handler( self.setpoint_variable ) + async_delete_issue( + self.hass, DOMAIN, f"deprecated_binary_sensor_{self.entity_id}" + ) def input_received(self, input_obj: InputType) -> None: """Set sensor value when LCN input object (command) is received.""" @@ -156,14 +183,34 @@ def __init__(self, config: ConfigType, config_entry: ConfigEntry) -> None: async def async_added_to_hass(self) -> None: """Run when entity about to be added to hass.""" await super().async_added_to_hass() + if not self.device_connection.is_group: await self.device_connection.activate_status_request_handler(self.source) + entity_automations = automations_with_entity(self.hass, self.entity_id) + entity_scripts = scripts_with_entity(self.hass, self.entity_id) + if entity_automations + entity_scripts: + async_create_issue( + self.hass, + DOMAIN, + f"deprecated_binary_sensor_{self.entity_id}", + breaks_in_ha_version="2025.5.0", + is_fixable=False, + severity=IssueSeverity.WARNING, + translation_key="deprecated_keylock_sensor", + translation_placeholders={ + "entity": f"{DOMAIN_BINARY_SENSOR}.{self.name.lower().replace(' ', '_')}", + }, + ) + async def async_will_remove_from_hass(self) -> None: """Run when entity will be removed from hass.""" await super().async_will_remove_from_hass() if not self.device_connection.is_group: await self.device_connection.cancel_status_request_handler(self.source) + async_delete_issue( + self.hass, DOMAIN, f"deprecated_binary_sensor_{self.entity_id}" + ) def input_received(self, input_obj: InputType) -> None: """Set sensor value when LCN input object (command) is received.""" diff --git a/homeassistant/components/lcn/config_flow.py b/homeassistant/components/lcn/config_flow.py index a0c911b745e7ee..008265e62aebf2 100644 --- a/homeassistant/components/lcn/config_flow.py +++ b/homeassistant/components/lcn/config_flow.py @@ -9,7 +9,6 @@ import voluptuous as vol from homeassistant import config_entries -from homeassistant.config_entries import ConfigEntry, ConfigFlowResult from homeassistant.const import ( CONF_BASE, CONF_DEVICES, @@ -20,14 +19,12 @@ CONF_PORT, CONF_USERNAME, ) -from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant +from homeassistant.core import HomeAssistant import homeassistant.helpers.config_validation as cv -from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue from homeassistant.helpers.typing import ConfigType from . import PchkConnectionManager from .const import CONF_ACKNOWLEDGE, CONF_DIM_MODE, CONF_SK_NUM_TRIES, DIM_MODES, DOMAIN -from .helpers import purge_device_registry, purge_entity_registry _LOGGER = logging.getLogger(__name__) @@ -110,59 +107,8 @@ async def validate_connection(data: ConfigType) -> str | None: class LcnFlowHandler(config_entries.ConfigFlow, domain=DOMAIN): """Handle a LCN config flow.""" - VERSION = 1 - MINOR_VERSION = 2 - - _context_entry: ConfigEntry - - async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: - """Import existing configuration from LCN.""" - # validate the imported connection parameters - if error := await validate_connection(import_data): - async_create_issue( - self.hass, - DOMAIN, - error, - is_fixable=False, - issue_domain=DOMAIN, - severity=IssueSeverity.ERROR, - translation_key=error, - translation_placeholders={ - "url": "/config/integrations/dashboard/add?domain=lcn" - }, - ) - return self.async_abort(reason=error) - - async_create_issue( - self.hass, - HOMEASSISTANT_DOMAIN, - f"deprecated_yaml_{DOMAIN}", - breaks_in_ha_version="2024.12.0", - is_fixable=False, - is_persistent=False, - issue_domain=DOMAIN, - severity=IssueSeverity.WARNING, - translation_key="deprecated_yaml", - translation_placeholders={ - "domain": DOMAIN, - "integration_title": "LCN", - }, - ) - - # check if we already have a host with the same address configured - if entry := get_config_entry(self.hass, import_data): - entry.source = config_entries.SOURCE_IMPORT - # Cleanup entity and device registry, if we imported from configuration.yaml to - # remove orphans when entities were removed from configuration - purge_entity_registry(self.hass, entry.entry_id, import_data) - purge_device_registry(self.hass, entry.entry_id, import_data) - - self.hass.config_entries.async_update_entry(entry, data=import_data) - return self.async_abort(reason="existing_configuration_updated") - - return self.async_create_entry( - title=f"{import_data[CONF_HOST]}", data=import_data - ) + VERSION = 2 + MINOR_VERSION = 1 async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -198,36 +144,26 @@ async def async_step_reconfigure( self, user_input: dict[str, Any] | None = None ) -> config_entries.ConfigFlowResult: """Reconfigure LCN configuration.""" - self._context_entry = self._get_reconfigure_entry() - return await self.async_step_reconfigure_confirm() - - async def async_step_reconfigure_confirm( - self, user_input: dict[str, Any] | None = None - ) -> config_entries.ConfigFlowResult: - """Reconfigure LCN configuration.""" + reconfigure_entry = self._get_reconfigure_entry() errors = None if user_input is not None: - user_input[CONF_HOST] = self._context_entry.data[CONF_HOST] + user_input[CONF_HOST] = reconfigure_entry.data[CONF_HOST] - await self.hass.config_entries.async_unload(self._context_entry.entry_id) + await self.hass.config_entries.async_unload(reconfigure_entry.entry_id) if (error := await validate_connection(user_input)) is not None: errors = {CONF_BASE: error} if errors is None: - data = self._context_entry.data.copy() - data.update(user_input) - self.hass.config_entries.async_update_entry( - self._context_entry, data=data + return self.async_update_reload_and_abort( + reconfigure_entry, data_updates=user_input ) - await self.hass.config_entries.async_setup(self._context_entry.entry_id) - return self.async_abort(reason="reconfigure_successful") - await self.hass.config_entries.async_setup(self._context_entry.entry_id) + await self.hass.config_entries.async_setup(reconfigure_entry.entry_id) return self.async_show_form( - step_id="reconfigure_confirm", + step_id="reconfigure", data_schema=self.add_suggested_values_to_schema( - CONFIG_SCHEMA, self._context_entry.data + CONFIG_SCHEMA, reconfigure_entry.data ), - errors=errors or {}, + errors=errors, ) diff --git a/homeassistant/components/lcn/const.py b/homeassistant/components/lcn/const.py index 707d0f29ba367e..97aeeecd8b580a 100644 --- a/homeassistant/components/lcn/const.py +++ b/homeassistant/components/lcn/const.py @@ -42,6 +42,7 @@ CONF_KEYS = "keys" CONF_TIME = "time" CONF_TIME_UNIT = "time_unit" +CONF_LOCK_TIME = "lock_time" CONF_TABLE = "table" CONF_ROW = "row" CONF_TEXT = "text" diff --git a/homeassistant/components/lcn/helpers.py b/homeassistant/components/lcn/helpers.py index 7da047682ac273..6a9c63ea212d54 100644 --- a/homeassistant/components/lcn/helpers.py +++ b/homeassistant/components/lcn/helpers.py @@ -9,7 +9,6 @@ from typing import cast import pypck -import voluptuous as vol from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( @@ -19,17 +18,12 @@ CONF_DEVICES, CONF_DOMAIN, CONF_ENTITIES, - CONF_HOST, - CONF_IP_ADDRESS, CONF_LIGHTS, CONF_NAME, - CONF_PASSWORD, - CONF_PORT, CONF_RESOURCE, CONF_SENSORS, CONF_SOURCE, CONF_SWITCHES, - CONF_USERNAME, ) from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er @@ -37,19 +31,13 @@ from .const import ( BINSENSOR_PORTS, - CONF_ACKNOWLEDGE, CONF_CLIMATES, - CONF_CONNECTIONS, - CONF_DIM_MODE, - CONF_DOMAIN_DATA, CONF_HARDWARE_SERIAL, CONF_HARDWARE_TYPE, CONF_OUTPUT, CONF_SCENES, - CONF_SK_NUM_TRIES, CONF_SOFTWARE_SERIAL, CONNECTION, - DEFAULT_NAME, DOMAIN, LED_PORTS, LOGICOP_PORTS, @@ -146,110 +134,6 @@ def generate_unique_id( return unique_id -def import_lcn_config(lcn_config: ConfigType) -> list[ConfigType]: - """Convert lcn settings from configuration.yaml to config_entries data. - - Create a list of config_entry data structures like: - - "data": { - "host": "pchk", - "ip_address": "192.168.2.41", - "port": 4114, - "username": "lcn", - "password": "lcn, - "sk_num_tries: 0, - "dim_mode: "STEPS200", - "acknowledge": False, - "devices": [ - { - "address": (0, 7, False) - "name": "", - "hardware_serial": -1, - "software_serial": -1, - "hardware_type": -1 - }, ... - ], - "entities": [ - { - "address": (0, 7, False) - "name": "Light_Output1", - "resource": "output1", - "domain": "light", - "domain_data": { - "output": "OUTPUT1", - "dimmable": True, - "transition": 5000.0 - } - }, ... - ] - } - """ - data = {} - for connection in lcn_config[CONF_CONNECTIONS]: - host = { - CONF_HOST: connection[CONF_NAME], - CONF_IP_ADDRESS: connection[CONF_HOST], - CONF_PORT: connection[CONF_PORT], - CONF_USERNAME: connection[CONF_USERNAME], - CONF_PASSWORD: connection[CONF_PASSWORD], - CONF_SK_NUM_TRIES: connection[CONF_SK_NUM_TRIES], - CONF_DIM_MODE: connection[CONF_DIM_MODE], - CONF_ACKNOWLEDGE: False, - CONF_DEVICES: [], - CONF_ENTITIES: [], - } - data[connection[CONF_NAME]] = host - - for confkey, domain_config in lcn_config.items(): - if confkey == CONF_CONNECTIONS: - continue - domain = DOMAIN_LOOKUP[confkey] - # loop over entities in configuration.yaml - for domain_data in domain_config: - # remove name and address from domain_data - entity_name = domain_data.pop(CONF_NAME) - address, host_name = domain_data.pop(CONF_ADDRESS) - - if host_name is None: - host_name = DEFAULT_NAME - - # check if we have a new device config - for device_config in data[host_name][CONF_DEVICES]: - if address == device_config[CONF_ADDRESS]: - break - else: # create new device_config - device_config = { - CONF_ADDRESS: address, - CONF_NAME: "", - CONF_HARDWARE_SERIAL: -1, - CONF_SOFTWARE_SERIAL: -1, - CONF_HARDWARE_TYPE: -1, - } - - data[host_name][CONF_DEVICES].append(device_config) - - # insert entity config - resource = get_resource(domain, domain_data).lower() - for entity_config in data[host_name][CONF_ENTITIES]: - if ( - address == entity_config[CONF_ADDRESS] - and resource == entity_config[CONF_RESOURCE] - and domain == entity_config[CONF_DOMAIN] - ): - break - else: # create new entity_config - entity_config = { - CONF_ADDRESS: address, - CONF_NAME: entity_name, - CONF_RESOURCE: resource, - CONF_DOMAIN: domain, - CONF_DOMAIN_DATA: domain_data.copy(), - } - data[host_name][CONF_ENTITIES].append(entity_config) - - return list(data.values()) - - def purge_entity_registry( hass: HomeAssistant, entry_id: str, imported_entry_data: ConfigType ) -> None: @@ -436,26 +320,6 @@ def get_device_config( return None -def has_unique_host_names(hosts: list[ConfigType]) -> list[ConfigType]: - """Validate that all connection names are unique. - - Use 'pchk' as default connection_name (or add a numeric suffix if - pchk' is already in use. - """ - suffix = 0 - for host in hosts: - if host.get(CONF_NAME) is None: - if suffix == 0: - host[CONF_NAME] = DEFAULT_NAME - else: - host[CONF_NAME] = f"{DEFAULT_NAME}{suffix:d}" - suffix += 1 - - schema = vol.Schema(vol.Unique()) - schema([host.get(CONF_NAME) for host in hosts]) - return hosts - - def is_address(value: str) -> tuple[AddressType, str]: """Validate the given address string. diff --git a/homeassistant/components/lcn/light.py b/homeassistant/components/lcn/light.py index 943e3c69acf12f..9ec660325c8650 100644 --- a/homeassistant/components/lcn/light.py +++ b/homeassistant/components/lcn/light.py @@ -90,7 +90,7 @@ def __init__(self, config: ConfigType, config_entry: ConfigEntry) -> None: self.output = pypck.lcn_defs.OutputPort[config[CONF_DOMAIN_DATA][CONF_OUTPUT]] self._transition = pypck.lcn_defs.time_to_ramp_value( - config[CONF_DOMAIN_DATA][CONF_TRANSITION] + config[CONF_DOMAIN_DATA][CONF_TRANSITION] * 1000.0 ) self.dimmable = config[CONF_DOMAIN_DATA][CONF_DIMMABLE] diff --git a/homeassistant/components/lcn/manifest.json b/homeassistant/components/lcn/manifest.json index 8f6b59e0a04769..695a35df871eab 100644 --- a/homeassistant/components/lcn/manifest.json +++ b/homeassistant/components/lcn/manifest.json @@ -8,5 +8,5 @@ "documentation": "https://www.home-assistant.io/integrations/lcn", "iot_class": "local_push", "loggers": ["pypck"], - "requirements": ["pypck==0.7.24", "lcn-frontend==0.1.6"] + "requirements": ["pypck==0.7.24", "lcn-frontend==0.2.2"] } diff --git a/homeassistant/components/lcn/scene.py b/homeassistant/components/lcn/scene.py index 241493ec1085ad..0f40926cf1745b 100644 --- a/homeassistant/components/lcn/scene.py +++ b/homeassistant/components/lcn/scene.py @@ -87,7 +87,7 @@ def __init__(self, config: ConfigType, config_entry: ConfigEntry) -> None: self.transition = None else: self.transition = pypck.lcn_defs.time_to_ramp_value( - config[CONF_DOMAIN_DATA][CONF_TRANSITION] + config[CONF_DOMAIN_DATA][CONF_TRANSITION] * 1000.0 ) async def async_activate(self, **kwargs: Any) -> None: diff --git a/homeassistant/components/lcn/schemas.py b/homeassistant/components/lcn/schemas.py index 0539e83dea808f..c9c91b9843dd3f 100644 --- a/homeassistant/components/lcn/schemas.py +++ b/homeassistant/components/lcn/schemas.py @@ -4,20 +4,9 @@ from homeassistant.components.climate import DEFAULT_MAX_TEMP, DEFAULT_MIN_TEMP from homeassistant.const import ( - CONF_ADDRESS, - CONF_BINARY_SENSORS, - CONF_COVERS, - CONF_HOST, - CONF_LIGHTS, - CONF_NAME, - CONF_PASSWORD, - CONF_PORT, CONF_SCENE, - CONF_SENSORS, CONF_SOURCE, - CONF_SWITCHES, CONF_UNIT_OF_MEASUREMENT, - CONF_USERNAME, UnitOfTemperature, ) import homeassistant.helpers.config_validation as cv @@ -25,9 +14,6 @@ from .const import ( BINSENSOR_PORTS, - CONF_CLIMATES, - CONF_CONNECTIONS, - CONF_DIM_MODE, CONF_DIMMABLE, CONF_LOCKABLE, CONF_MAX_TEMP, @@ -37,12 +23,8 @@ CONF_OUTPUTS, CONF_REGISTER, CONF_REVERSE_TIME, - CONF_SCENES, CONF_SETPOINT, - CONF_SK_NUM_TRIES, CONF_TRANSITION, - DIM_MODES, - DOMAIN, KEYS, LED_PORTS, LOGICOP_PORTS, @@ -56,7 +38,6 @@ VAR_UNITS, VARIABLES, ) -from .helpers import has_unique_host_names, is_address ADDRESS_SCHEMA = vol.Coerce(tuple) @@ -95,7 +76,7 @@ vol.Required(CONF_OUTPUT): vol.All(vol.Upper, vol.In(OUTPUT_PORTS + RELAY_PORTS)), vol.Optional(CONF_DIMMABLE, default=False): vol.Coerce(bool), vol.Optional(CONF_TRANSITION, default=0): vol.All( - vol.Coerce(float), vol.Range(min=0.0, max=486.0), lambda value: value * 1000 + vol.Coerce(float), vol.Range(min=0.0, max=486.0) ), } @@ -106,13 +87,8 @@ vol.Optional(CONF_OUTPUTS, default=[]): vol.All( cv.ensure_list, [vol.All(vol.Upper, vol.In(OUTPUT_PORTS + RELAY_PORTS))] ), - vol.Optional(CONF_TRANSITION, default=None): vol.Any( - vol.All( - vol.Coerce(int), - vol.Range(min=0.0, max=486.0), - lambda value: value * 1000, - ), - None, + vol.Optional(CONF_TRANSITION, default=0): vol.Any( + vol.All(vol.Coerce(int), vol.Range(min=0.0, max=486.0)) ), } @@ -130,73 +106,8 @@ DOMAIN_DATA_SWITCH: VolDictType = { - vol.Required(CONF_OUTPUT): vol.All(vol.Upper, vol.In(OUTPUT_PORTS + RELAY_PORTS)), -} - -# -# Configuration -# - -DOMAIN_DATA_BASE: VolDictType = { - vol.Required(CONF_NAME): cv.string, - vol.Required(CONF_ADDRESS): is_address, -} - -BINARY_SENSORS_SCHEMA = vol.Schema({**DOMAIN_DATA_BASE, **DOMAIN_DATA_BINARY_SENSOR}) - -CLIMATES_SCHEMA = vol.Schema({**DOMAIN_DATA_BASE, **DOMAIN_DATA_CLIMATE}) - -COVERS_SCHEMA = vol.Schema({**DOMAIN_DATA_BASE, **DOMAIN_DATA_COVER}) - -LIGHTS_SCHEMA = vol.Schema({**DOMAIN_DATA_BASE, **DOMAIN_DATA_LIGHT}) - -SCENES_SCHEMA = vol.Schema({**DOMAIN_DATA_BASE, **DOMAIN_DATA_SCENE}) - -SENSORS_SCHEMA = vol.Schema({**DOMAIN_DATA_BASE, **DOMAIN_DATA_SENSOR}) - -SWITCHES_SCHEMA = vol.Schema({**DOMAIN_DATA_BASE, **DOMAIN_DATA_SWITCH}) - -CONNECTION_SCHEMA = vol.Schema( - { - vol.Required(CONF_HOST): cv.string, - vol.Required(CONF_PORT): cv.port, - vol.Required(CONF_USERNAME): cv.string, - vol.Required(CONF_PASSWORD): cv.string, - vol.Optional(CONF_SK_NUM_TRIES, default=0): cv.positive_int, - vol.Optional(CONF_DIM_MODE, default="steps50"): vol.All( - vol.Upper, vol.In(DIM_MODES) - ), - vol.Optional(CONF_NAME): cv.string, - } -) - -CONFIG_SCHEMA = vol.Schema( - vol.All( - cv.deprecated(DOMAIN), - { - DOMAIN: vol.Schema( - { - vol.Required(CONF_CONNECTIONS): vol.All( - cv.ensure_list, has_unique_host_names, [CONNECTION_SCHEMA] - ), - vol.Optional(CONF_BINARY_SENSORS): vol.All( - cv.ensure_list, [BINARY_SENSORS_SCHEMA] - ), - vol.Optional(CONF_CLIMATES): vol.All( - cv.ensure_list, [CLIMATES_SCHEMA] - ), - vol.Optional(CONF_COVERS): vol.All(cv.ensure_list, [COVERS_SCHEMA]), - vol.Optional(CONF_LIGHTS): vol.All(cv.ensure_list, [LIGHTS_SCHEMA]), - vol.Optional(CONF_SCENES): vol.All(cv.ensure_list, [SCENES_SCHEMA]), - vol.Optional(CONF_SENSORS): vol.All( - cv.ensure_list, [SENSORS_SCHEMA] - ), - vol.Optional(CONF_SWITCHES): vol.All( - cv.ensure_list, [SWITCHES_SCHEMA] - ), - }, - ) - }, + vol.Required(CONF_OUTPUT): vol.All( + vol.Upper, + vol.In(OUTPUT_PORTS + RELAY_PORTS + SETPOINTS + KEYS), ), - extra=vol.ALLOW_EXTRA, -) +} diff --git a/homeassistant/components/lcn/sensor.py b/homeassistant/components/lcn/sensor.py index 341182c0639da6..ada0857742c135 100644 --- a/homeassistant/components/lcn/sensor.py +++ b/homeassistant/components/lcn/sensor.py @@ -7,7 +7,11 @@ import pypck -from homeassistant.components.sensor import DOMAIN as DOMAIN_SENSOR, SensorEntity +from homeassistant.components.sensor import ( + DOMAIN as DOMAIN_SENSOR, + SensorDeviceClass, + SensorEntity, +) from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( CONF_DOMAIN, @@ -32,6 +36,17 @@ from .entity import LcnEntity from .helpers import InputType +DEVICE_CLASS_MAPPING = { + pypck.lcn_defs.VarUnit.CELSIUS: SensorDeviceClass.TEMPERATURE, + pypck.lcn_defs.VarUnit.KELVIN: SensorDeviceClass.TEMPERATURE, + pypck.lcn_defs.VarUnit.FAHRENHEIT: SensorDeviceClass.TEMPERATURE, + pypck.lcn_defs.VarUnit.LUX_T: SensorDeviceClass.ILLUMINANCE, + pypck.lcn_defs.VarUnit.LUX_I: SensorDeviceClass.ILLUMINANCE, + pypck.lcn_defs.VarUnit.METERPERSECOND: SensorDeviceClass.SPEED, + pypck.lcn_defs.VarUnit.VOLT: SensorDeviceClass.VOLTAGE, + pypck.lcn_defs.VarUnit.AMPERE: SensorDeviceClass.CURRENT, +} + def add_lcn_entities( config_entry: ConfigEntry, @@ -87,7 +102,9 @@ def __init__(self, config: ConfigType, config_entry: ConfigEntry) -> None: self.unit = pypck.lcn_defs.VarUnit.parse( config[CONF_DOMAIN_DATA][CONF_UNIT_OF_MEASUREMENT] ) + self._attr_native_unit_of_measurement = cast(str, self.unit.value) + self._attr_device_class = DEVICE_CLASS_MAPPING.get(self.unit, None) async def async_added_to_hass(self) -> None: """Run when entity about to be added to hass.""" @@ -109,7 +126,11 @@ def input_received(self, input_obj: InputType) -> None: ): return - self._attr_native_value = input_obj.get_value().to_var_unit(self.unit) + is_regulator = self.variable.name in SETPOINTS + self._attr_native_value = input_obj.get_value().to_var_unit( + self.unit, is_regulator + ) + self.async_write_ha_state() diff --git a/homeassistant/components/lcn/strings.json b/homeassistant/components/lcn/strings.json index 90650c2aed1f3d..088a3654500402 100644 --- a/homeassistant/components/lcn/strings.json +++ b/homeassistant/components/lcn/strings.json @@ -34,7 +34,7 @@ "acknowledge": "Retry sendig commands if no response is received (increases bus traffic)." } }, - "reconfigure_confirm": { + "reconfigure": { "title": "Reconfigure LCN host", "description": "Reconfigure connection to LCN host.", "data": { @@ -63,17 +63,13 @@ } }, "issues": { - "authentication_error": { - "title": "Authentication failed.", - "description": "Configuring LCN using YAML is being removed but there was an error importing your YAML configuration.\n\nEnsure username and password are correct.\n\nConsider removing the LCN YAML configuration from your configuration.yaml file and continue to [set up the integration]({url}) manually." + "deprecated_regulatorlock_sensor": { + "title": "Deprecated LCN regulator lock binary sensor", + "description": "Your LCN regulator lock binary sensor entity `{entity}` is beeing used in automations or scripts. A regulator lock switch entity is available and should be used going forward.\n\nPlease adjust your automations or scripts to fix this issue." }, - "license_error": { - "title": "Maximum number of connections was reached.", - "description": "Configuring LCN using YAML is being removed but there was an error importing your YAML configuration.\n\nEnsure sufficient PCHK licenses are registered and restart Home Assistant.\n\nConsider removing the LCN YAML configuration from your configuration.yaml file and continue to [set up the integration]({url}) manually." - }, - "connection_refused": { - "title": "Unable to connect to PCHK.", - "description": "Configuring LCN using YAML is being removed but there was an error importing your YAML configuration.\n\nEnsure the connection (IP and port) to the LCN bus coupler is correct.\n\nConsider removing the LCN YAML configuration from your configuration.yaml file and continue to [set up the integration]({url}) manually." + "deprecated_keylock_sensor": { + "title": "Deprecated LCN key lock binary sensor", + "description": "Your LCN key lock binary sensor entity `{entity}` is beeing used in automations or scripts. A key lock switch entity is available and should be used going forward.\n\nPlease adjust your automations or scripts to fix this issue." } }, "services": { diff --git a/homeassistant/components/lcn/switch.py b/homeassistant/components/lcn/switch.py index 6ad5977855ea56..dd940bd38b3753 100644 --- a/homeassistant/components/lcn/switch.py +++ b/homeassistant/components/lcn/switch.py @@ -19,6 +19,8 @@ CONF_OUTPUT, DOMAIN, OUTPUT_PORTS, + RELAY_PORTS, + SETPOINTS, ) from .entity import LcnEntity from .helpers import InputType @@ -32,12 +34,18 @@ def add_lcn_switch_entities( entity_configs: Iterable[ConfigType], ) -> None: """Add entities for this domain.""" - entities: list[LcnOutputSwitch | LcnRelaySwitch] = [] + entities: list[ + LcnOutputSwitch | LcnRelaySwitch | LcnRegulatorLockSwitch | LcnKeyLockSwitch + ] = [] for entity_config in entity_configs: if entity_config[CONF_DOMAIN_DATA][CONF_OUTPUT] in OUTPUT_PORTS: entities.append(LcnOutputSwitch(entity_config, config_entry)) - else: # in RELAY_PORTS + elif entity_config[CONF_DOMAIN_DATA][CONF_OUTPUT] in RELAY_PORTS: entities.append(LcnRelaySwitch(entity_config, config_entry)) + elif entity_config[CONF_DOMAIN_DATA][CONF_OUTPUT] in SETPOINTS: + entities.append(LcnRegulatorLockSwitch(entity_config, config_entry)) + else: # in KEYS + entities.append(LcnKeyLockSwitch(entity_config, config_entry)) async_add_entities(entities) @@ -164,3 +172,118 @@ def input_received(self, input_obj: InputType) -> None: self._attr_is_on = input_obj.get_state(self.output.value) self.async_write_ha_state() + + +class LcnRegulatorLockSwitch(LcnEntity, SwitchEntity): + """Representation of a LCN switch for regulator locks.""" + + _attr_is_on = False + + def __init__(self, config: ConfigType, config_entry: ConfigEntry) -> None: + """Initialize the LCN switch.""" + super().__init__(config, config_entry) + + self.setpoint_variable = pypck.lcn_defs.Var[ + config[CONF_DOMAIN_DATA][CONF_OUTPUT] + ] + self.reg_id = pypck.lcn_defs.Var.to_set_point_id(self.setpoint_variable) + + async def async_added_to_hass(self) -> None: + """Run when entity about to be added to hass.""" + await super().async_added_to_hass() + if not self.device_connection.is_group: + await self.device_connection.activate_status_request_handler( + self.setpoint_variable + ) + + async def async_will_remove_from_hass(self) -> None: + """Run when entity will be removed from hass.""" + await super().async_will_remove_from_hass() + if not self.device_connection.is_group: + await self.device_connection.cancel_status_request_handler( + self.setpoint_variable + ) + + async def async_turn_on(self, **kwargs: Any) -> None: + """Turn the entity on.""" + if not await self.device_connection.lock_regulator(self.reg_id, True): + return + self._attr_is_on = True + self.async_write_ha_state() + + async def async_turn_off(self, **kwargs: Any) -> None: + """Turn the entity off.""" + if not await self.device_connection.lock_regulator(self.reg_id, False): + return + self._attr_is_on = False + self.async_write_ha_state() + + def input_received(self, input_obj: InputType) -> None: + """Set switch state when LCN input object (command) is received.""" + if ( + not isinstance(input_obj, pypck.inputs.ModStatusVar) + or input_obj.get_var() != self.setpoint_variable + ): + return + + self._attr_is_on = input_obj.get_value().is_locked_regulator() + self.async_write_ha_state() + + +class LcnKeyLockSwitch(LcnEntity, SwitchEntity): + """Representation of a LCN switch for key locks.""" + + _attr_is_on = False + + def __init__(self, config: ConfigType, config_entry: ConfigEntry) -> None: + """Initialize the LCN switch.""" + super().__init__(config, config_entry) + + self.key = pypck.lcn_defs.Key[config[CONF_DOMAIN_DATA][CONF_OUTPUT]] + self.table_id = ord(self.key.name[0]) - 65 + self.key_id = int(self.key.name[1]) - 1 + + async def async_added_to_hass(self) -> None: + """Run when entity about to be added to hass.""" + await super().async_added_to_hass() + if not self.device_connection.is_group: + await self.device_connection.activate_status_request_handler(self.key) + + async def async_will_remove_from_hass(self) -> None: + """Run when entity will be removed from hass.""" + await super().async_will_remove_from_hass() + if not self.device_connection.is_group: + await self.device_connection.cancel_status_request_handler(self.key) + + async def async_turn_on(self, **kwargs: Any) -> None: + """Turn the entity on.""" + states = [pypck.lcn_defs.KeyLockStateModifier.NOCHANGE] * 8 + states[self.key_id] = pypck.lcn_defs.KeyLockStateModifier.ON + + if not await self.device_connection.lock_keys(self.table_id, states): + return + + self._attr_is_on = True + self.async_write_ha_state() + + async def async_turn_off(self, **kwargs: Any) -> None: + """Turn the entity off.""" + states = [pypck.lcn_defs.KeyLockStateModifier.NOCHANGE] * 8 + states[self.key_id] = pypck.lcn_defs.KeyLockStateModifier.OFF + + if not await self.device_connection.lock_keys(self.table_id, states): + return + + self._attr_is_on = False + self.async_write_ha_state() + + def input_received(self, input_obj: InputType) -> None: + """Set switch state when LCN input object (command) is received.""" + if ( + not isinstance(input_obj, pypck.inputs.ModStatusKeyLocks) + or self.key not in pypck.lcn_defs.Key + ): + return + + self._attr_is_on = input_obj.get_state(self.table_id, self.key_id) + self.async_write_ha_state() diff --git a/homeassistant/components/led_ble/__init__.py b/homeassistant/components/led_ble/__init__.py index d09f88b145ac71..84d7369d706275 100644 --- a/homeassistant/components/led_ble/__init__.py +++ b/homeassistant/components/led_ble/__init__.py @@ -66,6 +66,7 @@ async def _async_update() -> None: coordinator = DataUpdateCoordinator( hass, _LOGGER, + config_entry=entry, name=led_ble.name, update_method=_async_update, update_interval=timedelta(seconds=UPDATE_SECONDS), diff --git a/homeassistant/components/lektrico/__init__.py b/homeassistant/components/lektrico/__init__.py index 0691bfef72a5a7..475b613254111f 100644 --- a/homeassistant/components/lektrico/__init__.py +++ b/homeassistant/components/lektrico/__init__.py @@ -12,9 +12,11 @@ # List the platforms that charger supports. CHARGERS_PLATFORMS: list[Platform] = [ + Platform.BINARY_SENSOR, Platform.BUTTON, Platform.NUMBER, Platform.SENSOR, + Platform.SWITCH, ] # List the platforms that load balancer device supports. diff --git a/homeassistant/components/lektrico/binary_sensor.py b/homeassistant/components/lektrico/binary_sensor.py new file mode 100644 index 00000000000000..d0a3e39690c433 --- /dev/null +++ b/homeassistant/components/lektrico/binary_sensor.py @@ -0,0 +1,139 @@ +"""Support for Lektrico binary sensors entities.""" + +from collections.abc import Callable +from dataclasses import dataclass +from typing import Any + +from homeassistant.components.binary_sensor import ( + BinarySensorDeviceClass, + BinarySensorEntity, + BinarySensorEntityDescription, +) +from homeassistant.const import ATTR_SERIAL_NUMBER, CONF_TYPE, EntityCategory +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import LektricoConfigEntry, LektricoDeviceDataUpdateCoordinator +from .entity import LektricoEntity + + +@dataclass(frozen=True, kw_only=True) +class LektricoBinarySensorEntityDescription(BinarySensorEntityDescription): + """Describes Lektrico binary sensor entity.""" + + value_fn: Callable[[dict[str, Any]], bool] + + +BINARY_SENSORS: tuple[LektricoBinarySensorEntityDescription, ...] = ( + LektricoBinarySensorEntityDescription( + key="state_e_activated", + translation_key="state_e_activated", + entity_category=EntityCategory.DIAGNOSTIC, + device_class=BinarySensorDeviceClass.PROBLEM, + value_fn=lambda data: bool(data["state_e_activated"]), + ), + LektricoBinarySensorEntityDescription( + key="overtemp", + translation_key="overtemp", + entity_category=EntityCategory.DIAGNOSTIC, + device_class=BinarySensorDeviceClass.PROBLEM, + value_fn=lambda data: bool(data["overtemp"]), + ), + LektricoBinarySensorEntityDescription( + key="critical_temp", + translation_key="critical_temp", + entity_category=EntityCategory.DIAGNOSTIC, + device_class=BinarySensorDeviceClass.PROBLEM, + value_fn=lambda data: bool(data["critical_temp"]), + ), + LektricoBinarySensorEntityDescription( + key="overcurrent", + translation_key="overcurrent", + entity_category=EntityCategory.DIAGNOSTIC, + device_class=BinarySensorDeviceClass.PROBLEM, + value_fn=lambda data: bool(data["overcurrent"]), + ), + LektricoBinarySensorEntityDescription( + key="meter_fault", + translation_key="meter_fault", + entity_category=EntityCategory.DIAGNOSTIC, + device_class=BinarySensorDeviceClass.PROBLEM, + value_fn=lambda data: bool(data["meter_fault"]), + ), + LektricoBinarySensorEntityDescription( + key="undervoltage", + translation_key="undervoltage", + entity_category=EntityCategory.DIAGNOSTIC, + device_class=BinarySensorDeviceClass.PROBLEM, + value_fn=lambda data: bool(data["undervoltage_error"]), + ), + LektricoBinarySensorEntityDescription( + key="overvoltage", + translation_key="overvoltage", + entity_category=EntityCategory.DIAGNOSTIC, + device_class=BinarySensorDeviceClass.PROBLEM, + value_fn=lambda data: bool(data["overvoltage_error"]), + ), + LektricoBinarySensorEntityDescription( + key="rcd_error", + translation_key="rcd_error", + entity_category=EntityCategory.DIAGNOSTIC, + device_class=BinarySensorDeviceClass.PROBLEM, + value_fn=lambda data: bool(data["rcd_error"]), + ), + LektricoBinarySensorEntityDescription( + key="cp_diode_failure", + translation_key="cp_diode_failure", + entity_category=EntityCategory.DIAGNOSTIC, + device_class=BinarySensorDeviceClass.PROBLEM, + value_fn=lambda data: bool(data["cp_diode_failure"]), + ), + LektricoBinarySensorEntityDescription( + key="contactor_failure", + translation_key="contactor_failure", + entity_category=EntityCategory.DIAGNOSTIC, + device_class=BinarySensorDeviceClass.PROBLEM, + value_fn=lambda data: bool(data["contactor_failure"]), + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: LektricoConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up Lektrico binary sensor entities based on a config entry.""" + coordinator = entry.runtime_data + + async_add_entities( + LektricoBinarySensor( + description, + coordinator, + f"{entry.data[CONF_TYPE]}_{entry.data[ATTR_SERIAL_NUMBER]}", + ) + for description in BINARY_SENSORS + ) + + +class LektricoBinarySensor(LektricoEntity, BinarySensorEntity): + """Defines a Lektrico binary sensor entity.""" + + entity_description: LektricoBinarySensorEntityDescription + + def __init__( + self, + description: LektricoBinarySensorEntityDescription, + coordinator: LektricoDeviceDataUpdateCoordinator, + device_name: str, + ) -> None: + """Initialize Lektrico binary sensor.""" + super().__init__(coordinator, device_name) + self.entity_description = description + self._coordinator = coordinator + self._attr_unique_id = f"{coordinator.serial_number}_{description.key}" + + @property + def is_on(self) -> bool: + """Return the state of the binary sensor.""" + return self.entity_description.value_fn(self.coordinator.data) diff --git a/homeassistant/components/lektrico/manifest.json b/homeassistant/components/lektrico/manifest.json index d96b8cc4b69eb5..d34915d66ba437 100644 --- a/homeassistant/components/lektrico/manifest.json +++ b/homeassistant/components/lektrico/manifest.json @@ -6,7 +6,7 @@ "documentation": "https://www.home-assistant.io/integrations/lektrico", "integration_type": "device", "iot_class": "local_polling", - "requirements": ["lektricowifi==0.0.42"], + "requirements": ["lektricowifi==0.0.43"], "zeroconf": [ { "type": "_http._tcp.local.", diff --git a/homeassistant/components/lektrico/sensor.py b/homeassistant/components/lektrico/sensor.py index a26a3676d8b578..d55d91c4cd4dbb 100644 --- a/homeassistant/components/lektrico/sensor.py +++ b/homeassistant/components/lektrico/sensor.py @@ -62,11 +62,13 @@ class LektricoSensorEntityDescription(SensorEntityDescription): device_class=SensorDeviceClass.ENUM, options=[ "available", + "charging", "connected", + "error", + "locked", "need_auth", "paused", - "charging", - "error", + "paused_by_scheduler", "updating_firmware", ], translation_key="state", diff --git a/homeassistant/components/lektrico/strings.json b/homeassistant/components/lektrico/strings.json index b749ea23490b62..e24700c9b091d8 100644 --- a/homeassistant/components/lektrico/strings.json +++ b/homeassistant/components/lektrico/strings.json @@ -22,6 +22,38 @@ } }, "entity": { + "binary_sensor": { + "state_e_activated": { + "name": "Ev error" + }, + "overtemp": { + "name": "Thermal throttling" + }, + "critical_temp": { + "name": "Overheating" + }, + "overcurrent": { + "name": "Overcurrent" + }, + "meter_fault": { + "name": "Metering error" + }, + "undervoltage": { + "name": "Undervoltage" + }, + "overvoltage": { + "name": "Overvoltage" + }, + "rcd_error": { + "name": "Rcd error" + }, + "cp_diode_failure": { + "name": "Ev diode short" + }, + "contactor_failure": { + "name": "Relay contacts welded" + } + }, "button": { "charge_start": { "name": "Charge start" @@ -54,11 +86,13 @@ "name": "State", "state": { "available": "Available", + "charging": "Charging", "connected": "Connected", + "error": "Error", + "locked": "Locked", "need_auth": "Waiting for authentication", "paused": "Paused", - "charging": "Charging", - "error": "Error", + "paused_by_scheduler": "Paused by scheduler", "updating_firmware": "Updating firmware" } }, @@ -126,6 +160,17 @@ "pf_l3": { "name": "Power factor L3" } + }, + "switch": { + "authentication": { + "name": "Authentication" + }, + "force_single_phase": { + "name": "Force single phase" + }, + "lock": { + "name": "Lock" + } } } } diff --git a/homeassistant/components/lektrico/switch.py b/homeassistant/components/lektrico/switch.py new file mode 100644 index 00000000000000..0fdfbd2ad41211 --- /dev/null +++ b/homeassistant/components/lektrico/switch.py @@ -0,0 +1,116 @@ +"""Support for Lektrico switch entities.""" + +from collections.abc import Callable, Coroutine +from dataclasses import dataclass +from typing import Any + +from lektricowifi import Device + +from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription +from homeassistant.const import ATTR_SERIAL_NUMBER, CONF_TYPE, EntityCategory +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import LektricoConfigEntry, LektricoDeviceDataUpdateCoordinator +from .entity import LektricoEntity + + +@dataclass(frozen=True, kw_only=True) +class LektricoSwitchEntityDescription(SwitchEntityDescription): + """Describes Lektrico switch entity.""" + + value_fn: Callable[[dict[str, Any]], bool] + set_value_fn: Callable[[Device, dict[Any, Any], bool], Coroutine[Any, Any, Any]] + + +SWITCHS_FOR_ALL_CHARGERS: tuple[LektricoSwitchEntityDescription, ...] = ( + LektricoSwitchEntityDescription( + key="authentication", + translation_key="authentication", + entity_category=EntityCategory.CONFIG, + value_fn=lambda data: bool(data["require_auth"]), + set_value_fn=lambda device, data, value: device.set_auth(not value), + ), + LektricoSwitchEntityDescription( + key="lock", + translation_key="lock", + entity_category=EntityCategory.CONFIG, + value_fn=lambda data: str(data["charger_state"]) == "locked", + set_value_fn=lambda device, data, value: device.set_charger_locked(value), + ), +) + + +SWITCHS_FOR_3_PHASE_CHARGERS: tuple[LektricoSwitchEntityDescription, ...] = ( + LektricoSwitchEntityDescription( + key="force_single_phase", + translation_key="force_single_phase", + entity_category=EntityCategory.CONFIG, + value_fn=lambda data: data["relay_mode"] == 1, + set_value_fn=lambda device, data, value: ( + device.set_relay_mode(data["dynamic_current"], 1) + if value + else device.set_relay_mode(data["dynamic_current"], 3) + ), + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: LektricoConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up Lektrico switch entities based on a config entry.""" + coordinator = entry.runtime_data + + switchs_to_be_used: tuple[LektricoSwitchEntityDescription, ...] + if coordinator.device_type == Device.TYPE_3P22K: + switchs_to_be_used = SWITCHS_FOR_ALL_CHARGERS + SWITCHS_FOR_3_PHASE_CHARGERS + else: + switchs_to_be_used = SWITCHS_FOR_ALL_CHARGERS + + async_add_entities( + LektricoSwitch( + description, + coordinator, + f"{entry.data[CONF_TYPE]}_{entry.data[ATTR_SERIAL_NUMBER]}", + ) + for description in switchs_to_be_used + ) + + +class LektricoSwitch(LektricoEntity, SwitchEntity): + """Defines a Lektrico switch entity.""" + + entity_description: LektricoSwitchEntityDescription + + def __init__( + self, + description: LektricoSwitchEntityDescription, + coordinator: LektricoDeviceDataUpdateCoordinator, + device_name: str, + ) -> None: + """Initialize Lektrico switch.""" + super().__init__(coordinator, device_name) + self.entity_description = description + self._attr_unique_id = f"{coordinator.serial_number}_{description.key}" + + @property + def is_on(self) -> bool: + """Return the state of the switch.""" + return self.entity_description.value_fn(self.coordinator.data) + + async def async_turn_on(self, **kwargs: Any) -> None: + """Turn the switch on.""" + await self.entity_description.set_value_fn( + self.coordinator.device, self.coordinator.data, True + ) + await self.coordinator.async_request_refresh() + + async def async_turn_off(self, **kwargs: Any) -> None: + """Turn the switch off.""" + await self.entity_description.set_value_fn( + self.coordinator.device, self.coordinator.data, False + ) + await self.coordinator.async_request_refresh() diff --git a/homeassistant/components/lg_netcast/config_flow.py b/homeassistant/components/lg_netcast/config_flow.py index 4b1780d41ae63d..d5e28f3c057e29 100644 --- a/homeassistant/components/lg_netcast/config_flow.py +++ b/homeassistant/components/lg_netcast/config_flow.py @@ -18,10 +18,9 @@ CONF_MODEL, CONF_NAME, ) -from homeassistant.core import CALLBACK_TYPE, DOMAIN as HOMEASSISTANT_DOMAIN, callback +from homeassistant.core import CALLBACK_TYPE, callback from homeassistant.data_entry_flow import AbortFlow from homeassistant.helpers.event import async_track_time_interval -from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue from homeassistant.util.network import is_host_valid from .const import DEFAULT_NAME, DOMAIN @@ -68,56 +67,6 @@ async def async_step_user( errors=errors, ) - async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: - """Import configuration from yaml.""" - self.device_config = { - CONF_HOST: import_data[CONF_HOST], - CONF_NAME: import_data[CONF_NAME], - } - - def _create_issue(): - async_create_issue( - self.hass, - HOMEASSISTANT_DOMAIN, - f"deprecated_yaml_{DOMAIN}", - breaks_in_ha_version="2024.11.0", - is_fixable=False, - issue_domain=DOMAIN, - severity=IssueSeverity.WARNING, - translation_key="deprecated_yaml", - translation_placeholders={ - "domain": DOMAIN, - "integration_title": "LG Netcast", - }, - ) - - try: - result: ConfigFlowResult = await self.async_step_authorize(import_data) - except AbortFlow as err: - if err.reason != "already_configured": - async_create_issue( - self.hass, - DOMAIN, - "deprecated_yaml_import_issue_{err.reason}", - breaks_in_ha_version="2024.11.0", - is_fixable=False, - issue_domain=DOMAIN, - severity=IssueSeverity.WARNING, - translation_key=f"deprecated_yaml_import_issue_{err.reason}", - translation_placeholders={ - "domain": DOMAIN, - "integration_title": "LG Netcast", - "error_type": err.reason, - }, - ) - else: - _create_issue() - raise - - _create_issue() - - return result - async def async_discover_client(self): """Handle Discovery step.""" self.create_client() diff --git a/homeassistant/components/lg_netcast/media_player.py b/homeassistant/components/lg_netcast/media_player.py index 4dc694cd085eb0..b3f8f8e043775a 100644 --- a/homeassistant/components/lg_netcast/media_player.py +++ b/homeassistant/components/lg_netcast/media_player.py @@ -7,26 +7,20 @@ from pylgnetcast import LG_COMMAND, LgNetCastClient, LgNetCastError from requests import RequestException -import voluptuous as vol from homeassistant.components.media_player import ( - PLATFORM_SCHEMA as MEDIA_PLAYER_PLATFORM_SCHEMA, MediaPlayerDeviceClass, MediaPlayerEntity, MediaPlayerEntityFeature, MediaPlayerState, MediaType, ) -from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry +from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_ACCESS_TOKEN, CONF_HOST, CONF_MODEL, CONF_NAME from homeassistant.core import HomeAssistant -from homeassistant.data_entry_flow import FlowResultType -from homeassistant.exceptions import PlatformNotReady -import homeassistant.helpers.config_validation as cv from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.trigger import PluggableAction -from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from .const import ATTR_MANUFACTURER, DOMAIN from .triggers.turn_on import async_get_turn_on_trigger @@ -49,15 +43,6 @@ | MediaPlayerEntityFeature.STOP ) -PLATFORM_SCHEMA = MEDIA_PLAYER_PLATFORM_SCHEMA.extend( - { - vol.Optional(CONF_ON_ACTION): cv.SCRIPT_SCHEMA, - vol.Required(CONF_HOST): cv.string, - vol.Optional(CONF_ACCESS_TOKEN): vol.All(cv.string, vol.Length(max=6)), - vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, - } -) - async def async_setup_entry( hass: HomeAssistant, @@ -79,27 +64,6 @@ async def async_setup_entry( async_add_entities([LgTVDevice(client, name, model, unique_id=unique_id)]) -async def async_setup_platform( - hass: HomeAssistant, - config: ConfigType, - async_add_entities: AddEntitiesCallback, - discovery_info: DiscoveryInfoType | None = None, -) -> None: - """Set up the LG TV platform.""" - - host = config.get(CONF_HOST) - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_IMPORT}, data=config - ) - - if ( - result.get("type") == FlowResultType.ABORT - and result.get("reason") == "cannot_connect" - ): - raise PlatformNotReady(f"Connection error while connecting to {host}") - - class LgTVDevice(MediaPlayerEntity): """Representation of a LG TV.""" diff --git a/homeassistant/components/lg_netcast/strings.json b/homeassistant/components/lg_netcast/strings.json index 77003f60f43d5a..209c3837261db9 100644 --- a/homeassistant/components/lg_netcast/strings.json +++ b/homeassistant/components/lg_netcast/strings.json @@ -28,16 +28,6 @@ "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]" } }, - "issues": { - "deprecated_yaml_import_issue_cannot_connect": { - "title": "The {integration_title} is not online for YAML migration to complete", - "description": "Migrating {integration_title} from YAML cannot complete until the TV is online.\n\nPlease turn on your TV for migration to complete." - }, - "deprecated_yaml_import_issue_invalid_host": { - "title": "The {integration_title} YAML configuration has an invalid host.", - "description": "Configuring {integration_title} using YAML is being removed but the device returned an invalid response.\n\nPlease check or manually remove the YAML configuration." - } - }, "device_automation": { "trigger_type": { "lg_netcast.turn_on": "Device is requested to turn on" diff --git a/homeassistant/components/lg_thinq/__init__.py b/homeassistant/components/lg_thinq/__init__.py new file mode 100644 index 00000000000000..a8d3fe175efca0 --- /dev/null +++ b/homeassistant/components/lg_thinq/__init__.py @@ -0,0 +1,166 @@ +"""Support for LG ThinQ Connect device.""" + +from __future__ import annotations + +import asyncio +from dataclasses import dataclass, field +import logging + +from thinqconnect import ThinQApi, ThinQAPIException +from thinqconnect.integration import async_get_ha_bridge_list + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import ( + CONF_ACCESS_TOKEN, + CONF_COUNTRY, + EVENT_HOMEASSISTANT_STOP, + Platform, +) +from homeassistant.core import HomeAssistant, callback +from homeassistant.exceptions import ConfigEntryNotReady +from homeassistant.helpers import device_registry as dr +from homeassistant.helpers.aiohttp_client import async_get_clientsession +from homeassistant.helpers.event import async_track_time_interval + +from .const import CONF_CONNECT_CLIENT_ID, MQTT_SUBSCRIPTION_INTERVAL +from .coordinator import DeviceDataUpdateCoordinator, async_setup_device_coordinator +from .mqtt import ThinQMQTT + + +@dataclass(kw_only=True) +class ThinqData: + """A class that holds runtime data.""" + + coordinators: dict[str, DeviceDataUpdateCoordinator] = field(default_factory=dict) + mqtt_client: ThinQMQTT | None = None + + +type ThinqConfigEntry = ConfigEntry[ThinqData] + +PLATFORMS = [ + Platform.BINARY_SENSOR, + Platform.CLIMATE, + Platform.EVENT, + Platform.FAN, + Platform.NUMBER, + Platform.SELECT, + Platform.SENSOR, + Platform.SWITCH, + Platform.VACUUM, +] + +_LOGGER = logging.getLogger(__name__) + + +async def async_setup_entry(hass: HomeAssistant, entry: ThinqConfigEntry) -> bool: + """Set up an entry.""" + entry.runtime_data = ThinqData() + + access_token = entry.data[CONF_ACCESS_TOKEN] + client_id = entry.data[CONF_CONNECT_CLIENT_ID] + country_code = entry.data[CONF_COUNTRY] + + thinq_api = ThinQApi( + session=async_get_clientsession(hass), + access_token=access_token, + country_code=country_code, + client_id=client_id, + ) + + # Setup coordinators and register devices. + await async_setup_coordinators(hass, entry, thinq_api) + + # Set up all platforms for this device/entry. + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + + # Set up MQTT connection. + await async_setup_mqtt(hass, entry, thinq_api, client_id) + + # Clean up devices they are no longer in use. + async_cleanup_device_registry(hass, entry) + + return True + + +async def async_setup_coordinators( + hass: HomeAssistant, + entry: ThinqConfigEntry, + thinq_api: ThinQApi, +) -> None: + """Set up coordinators and register devices.""" + # Get a list of ha bridge. + try: + bridge_list = await async_get_ha_bridge_list(thinq_api) + except ThinQAPIException as exc: + raise ConfigEntryNotReady(exc.message) from exc + + if not bridge_list: + return + + # Setup coordinator per device. + task_list = [ + hass.async_create_task(async_setup_device_coordinator(hass, bridge)) + for bridge in bridge_list + ] + task_result = await asyncio.gather(*task_list) + for coordinator in task_result: + entry.runtime_data.coordinators[coordinator.unique_id] = coordinator + + +@callback +def async_cleanup_device_registry(hass: HomeAssistant, entry: ThinqConfigEntry) -> None: + """Clean up device registry.""" + new_device_unique_ids = [ + coordinator.unique_id + for coordinator in entry.runtime_data.coordinators.values() + ] + device_registry = dr.async_get(hass) + existing_entries = dr.async_entries_for_config_entry( + device_registry, entry.entry_id + ) + + # Remove devices that are no longer exist. + for old_entry in existing_entries: + old_unique_id = next(iter(old_entry.identifiers))[1] + if old_unique_id not in new_device_unique_ids: + device_registry.async_remove_device(old_entry.id) + _LOGGER.debug("Remove device_registry: device_id=%s", old_entry.id) + + +async def async_setup_mqtt( + hass: HomeAssistant, entry: ThinqConfigEntry, thinq_api: ThinQApi, client_id: str +) -> None: + """Set up MQTT connection.""" + mqtt_client = ThinQMQTT(hass, thinq_api, client_id, entry.runtime_data.coordinators) + entry.runtime_data.mqtt_client = mqtt_client + + # Try to connect. + result = await mqtt_client.async_connect() + if not result: + _LOGGER.error("Failed to set up mqtt connection") + return + + # Ready to subscribe. + await mqtt_client.async_start_subscribes() + + entry.async_on_unload( + async_track_time_interval( + hass, + mqtt_client.async_refresh_subscribe, + MQTT_SUBSCRIPTION_INTERVAL, + cancel_on_shutdown=True, + ) + ) + entry.async_on_unload( + hass.bus.async_listen_once( + EVENT_HOMEASSISTANT_STOP, mqtt_client.async_disconnect + ) + ) + + +async def async_unload_entry(hass: HomeAssistant, entry: ThinqConfigEntry) -> bool: + """Unload the entry.""" + if entry.runtime_data.mqtt_client: + await entry.runtime_data.mqtt_client.async_disconnect() + + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/lg_thinq/binary_sensor.py b/homeassistant/components/lg_thinq/binary_sensor.py new file mode 100644 index 00000000000000..845bf8c3079056 --- /dev/null +++ b/homeassistant/components/lg_thinq/binary_sensor.py @@ -0,0 +1,181 @@ +"""Support for binary sensor entities.""" + +from __future__ import annotations + +from dataclasses import dataclass +import logging + +from thinqconnect import DeviceType +from thinqconnect.devices.const import Property as ThinQProperty +from thinqconnect.integration import ActiveMode + +from homeassistant.components.binary_sensor import ( + BinarySensorDeviceClass, + BinarySensorEntity, + BinarySensorEntityDescription, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import ThinqConfigEntry +from .entity import ThinQEntity + + +@dataclass(frozen=True, kw_only=True) +class ThinQBinarySensorEntityDescription(BinarySensorEntityDescription): + """Describes ThinQ sensor entity.""" + + on_key: str | None = None + + +BINARY_SENSOR_DESC: dict[ThinQProperty, ThinQBinarySensorEntityDescription] = { + ThinQProperty.RINSE_REFILL: ThinQBinarySensorEntityDescription( + key=ThinQProperty.RINSE_REFILL, + translation_key=ThinQProperty.RINSE_REFILL, + ), + ThinQProperty.ECO_FRIENDLY_MODE: ThinQBinarySensorEntityDescription( + key=ThinQProperty.ECO_FRIENDLY_MODE, + translation_key=ThinQProperty.ECO_FRIENDLY_MODE, + ), + ThinQProperty.POWER_SAVE_ENABLED: ThinQBinarySensorEntityDescription( + key=ThinQProperty.POWER_SAVE_ENABLED, + translation_key=ThinQProperty.POWER_SAVE_ENABLED, + ), + ThinQProperty.REMOTE_CONTROL_ENABLED: ThinQBinarySensorEntityDescription( + key=ThinQProperty.REMOTE_CONTROL_ENABLED, + translation_key=ThinQProperty.REMOTE_CONTROL_ENABLED, + ), + ThinQProperty.SABBATH_MODE: ThinQBinarySensorEntityDescription( + key=ThinQProperty.SABBATH_MODE, + translation_key=ThinQProperty.SABBATH_MODE, + ), + ThinQProperty.DOOR_STATE: ThinQBinarySensorEntityDescription( + key=ThinQProperty.DOOR_STATE, + device_class=BinarySensorDeviceClass.DOOR, + on_key="open", + ), + ThinQProperty.MACHINE_CLEAN_REMINDER: ThinQBinarySensorEntityDescription( + key=ThinQProperty.MACHINE_CLEAN_REMINDER, + translation_key=ThinQProperty.MACHINE_CLEAN_REMINDER, + on_key="mcreminder_on", + ), + ThinQProperty.SIGNAL_LEVEL: ThinQBinarySensorEntityDescription( + key=ThinQProperty.SIGNAL_LEVEL, + translation_key=ThinQProperty.SIGNAL_LEVEL, + on_key="signallevel_on", + ), + ThinQProperty.CLEAN_LIGHT_REMINDER: ThinQBinarySensorEntityDescription( + key=ThinQProperty.CLEAN_LIGHT_REMINDER, + translation_key=ThinQProperty.CLEAN_LIGHT_REMINDER, + on_key="cleanlreminder_on", + ), + ThinQProperty.HOOD_OPERATION_MODE: ThinQBinarySensorEntityDescription( + key=ThinQProperty.HOOD_OPERATION_MODE, + translation_key="operation_mode", + on_key="power_on", + ), + ThinQProperty.WATER_HEATER_OPERATION_MODE: ThinQBinarySensorEntityDescription( + key=ThinQProperty.WATER_HEATER_OPERATION_MODE, + translation_key="operation_mode", + on_key="power_on", + ), + ThinQProperty.ONE_TOUCH_FILTER: ThinQBinarySensorEntityDescription( + key=ThinQProperty.ONE_TOUCH_FILTER, + translation_key=ThinQProperty.ONE_TOUCH_FILTER, + on_key="on", + ), +} + +DEVICE_TYPE_BINARY_SENSOR_MAP: dict[ + DeviceType, tuple[ThinQBinarySensorEntityDescription, ...] +] = { + DeviceType.COOKTOP: (BINARY_SENSOR_DESC[ThinQProperty.REMOTE_CONTROL_ENABLED],), + DeviceType.DISH_WASHER: ( + BINARY_SENSOR_DESC[ThinQProperty.DOOR_STATE], + BINARY_SENSOR_DESC[ThinQProperty.RINSE_REFILL], + BINARY_SENSOR_DESC[ThinQProperty.REMOTE_CONTROL_ENABLED], + BINARY_SENSOR_DESC[ThinQProperty.MACHINE_CLEAN_REMINDER], + BINARY_SENSOR_DESC[ThinQProperty.SIGNAL_LEVEL], + BINARY_SENSOR_DESC[ThinQProperty.CLEAN_LIGHT_REMINDER], + ), + DeviceType.DRYER: (BINARY_SENSOR_DESC[ThinQProperty.REMOTE_CONTROL_ENABLED],), + DeviceType.HOOD: (BINARY_SENSOR_DESC[ThinQProperty.HOOD_OPERATION_MODE],), + DeviceType.OVEN: (BINARY_SENSOR_DESC[ThinQProperty.REMOTE_CONTROL_ENABLED],), + DeviceType.REFRIGERATOR: ( + BINARY_SENSOR_DESC[ThinQProperty.DOOR_STATE], + BINARY_SENSOR_DESC[ThinQProperty.ECO_FRIENDLY_MODE], + BINARY_SENSOR_DESC[ThinQProperty.POWER_SAVE_ENABLED], + BINARY_SENSOR_DESC[ThinQProperty.SABBATH_MODE], + ), + DeviceType.KIMCHI_REFRIGERATOR: ( + BINARY_SENSOR_DESC[ThinQProperty.ONE_TOUCH_FILTER], + ), + DeviceType.STYLER: (BINARY_SENSOR_DESC[ThinQProperty.REMOTE_CONTROL_ENABLED],), + DeviceType.WASHCOMBO_MAIN: ( + BINARY_SENSOR_DESC[ThinQProperty.REMOTE_CONTROL_ENABLED], + ), + DeviceType.WASHCOMBO_MINI: ( + BINARY_SENSOR_DESC[ThinQProperty.REMOTE_CONTROL_ENABLED], + ), + DeviceType.WASHER: (BINARY_SENSOR_DESC[ThinQProperty.REMOTE_CONTROL_ENABLED],), + DeviceType.WASHTOWER_DRYER: ( + BINARY_SENSOR_DESC[ThinQProperty.REMOTE_CONTROL_ENABLED], + ), + DeviceType.WASHTOWER: (BINARY_SENSOR_DESC[ThinQProperty.REMOTE_CONTROL_ENABLED],), + DeviceType.WASHTOWER_WASHER: ( + BINARY_SENSOR_DESC[ThinQProperty.REMOTE_CONTROL_ENABLED], + ), + DeviceType.WATER_HEATER: ( + BINARY_SENSOR_DESC[ThinQProperty.WATER_HEATER_OPERATION_MODE], + ), + DeviceType.WINE_CELLAR: (BINARY_SENSOR_DESC[ThinQProperty.SABBATH_MODE],), +} +_LOGGER = logging.getLogger(__name__) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: ThinqConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up an entry for binary sensor platform.""" + entities: list[ThinQBinarySensorEntity] = [] + for coordinator in entry.runtime_data.coordinators.values(): + if ( + descriptions := DEVICE_TYPE_BINARY_SENSOR_MAP.get( + coordinator.api.device.device_type + ) + ) is not None: + for description in descriptions: + entities.extend( + ThinQBinarySensorEntity(coordinator, description, property_id) + for property_id in coordinator.api.get_active_idx( + description.key, ActiveMode.READ_ONLY + ) + ) + + if entities: + async_add_entities(entities) + + +class ThinQBinarySensorEntity(ThinQEntity, BinarySensorEntity): + """Represent a thinq binary sensor platform.""" + + entity_description: ThinQBinarySensorEntityDescription + + def _update_status(self) -> None: + """Update status itself.""" + super()._update_status() + + if (key := self.entity_description.on_key) is not None: + self._attr_is_on = self.data.value == key + else: + self._attr_is_on = self.data.is_on + + _LOGGER.debug( + "[%s:%s] update status: %s -> %s", + self.coordinator.device_name, + self.property_id, + self.data.value, + self.is_on, + ) diff --git a/homeassistant/components/lg_thinq/climate.py b/homeassistant/components/lg_thinq/climate.py new file mode 100644 index 00000000000000..9ead57ab7b0c75 --- /dev/null +++ b/homeassistant/components/lg_thinq/climate.py @@ -0,0 +1,334 @@ +"""Support for climate entities.""" + +from __future__ import annotations + +from dataclasses import dataclass +import logging +from typing import Any + +from thinqconnect import DeviceType +from thinqconnect.integration import ExtendedProperty + +from homeassistant.components.climate import ( + ATTR_TARGET_TEMP_HIGH, + ATTR_TARGET_TEMP_LOW, + FAN_OFF, + ClimateEntity, + ClimateEntityDescription, + ClimateEntityFeature, + HVACMode, +) +from homeassistant.const import ATTR_TEMPERATURE, UnitOfTemperature +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.temperature import display_temp + +from . import ThinqConfigEntry +from .coordinator import DeviceDataUpdateCoordinator +from .entity import ThinQEntity + + +@dataclass(frozen=True, kw_only=True) +class ThinQClimateEntityDescription(ClimateEntityDescription): + """Describes ThinQ climate entity.""" + + min_temp: float | None = None + max_temp: float | None = None + step: float | None = None + + +DEVIE_TYPE_CLIMATE_MAP: dict[DeviceType, tuple[ThinQClimateEntityDescription, ...]] = { + DeviceType.AIR_CONDITIONER: ( + ThinQClimateEntityDescription( + key=ExtendedProperty.CLIMATE_AIR_CONDITIONER, + name=None, + translation_key=ExtendedProperty.CLIMATE_AIR_CONDITIONER, + ), + ), + DeviceType.SYSTEM_BOILER: ( + ThinQClimateEntityDescription( + key=ExtendedProperty.CLIMATE_SYSTEM_BOILER, + name=None, + min_temp=16, + max_temp=30, + step=1, + ), + ), +} + +STR_TO_HVAC: dict[str, HVACMode] = { + "air_dry": HVACMode.DRY, + "auto": HVACMode.AUTO, + "cool": HVACMode.COOL, + "fan": HVACMode.FAN_ONLY, + "heat": HVACMode.HEAT, +} + +HVAC_TO_STR: dict[HVACMode, str] = { + HVACMode.AUTO: "auto", + HVACMode.COOL: "cool", + HVACMode.DRY: "air_dry", + HVACMode.FAN_ONLY: "fan", + HVACMode.HEAT: "heat", +} + +THINQ_PRESET_MODE: list[str] = ["air_clean", "aroma", "energy_saving"] + +_LOGGER = logging.getLogger(__name__) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: ThinqConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up an entry for climate platform.""" + entities: list[ThinQClimateEntity] = [] + for coordinator in entry.runtime_data.coordinators.values(): + if ( + descriptions := DEVIE_TYPE_CLIMATE_MAP.get( + coordinator.api.device.device_type + ) + ) is not None: + for description in descriptions: + entities.extend( + ThinQClimateEntity(coordinator, description, property_id) + for property_id in coordinator.api.get_active_idx(description.key) + ) + + if entities: + async_add_entities(entities) + + +class ThinQClimateEntity(ThinQEntity, ClimateEntity): + """Represent a thinq climate platform.""" + + entity_description: ThinQClimateEntityDescription + + def __init__( + self, + coordinator: DeviceDataUpdateCoordinator, + entity_description: ThinQClimateEntityDescription, + property_id: str, + ) -> None: + """Initialize a climate entity.""" + super().__init__(coordinator, entity_description, property_id) + + self._attr_supported_features = ( + ClimateEntityFeature.TARGET_TEMPERATURE + | ClimateEntityFeature.TURN_ON + | ClimateEntityFeature.TURN_OFF + ) + self._attr_hvac_modes = [HVACMode.OFF] + self._attr_hvac_mode = HVACMode.OFF + self._attr_preset_modes = [] + self._attr_temperature_unit = UnitOfTemperature.CELSIUS + self._requested_hvac_mode: str | None = None + + # Set up HVAC modes. + for mode in self.data.hvac_modes: + if mode in STR_TO_HVAC: + self._attr_hvac_modes.append(STR_TO_HVAC[mode]) + elif mode in THINQ_PRESET_MODE: + self._attr_preset_modes.append(mode) + self._attr_supported_features |= ClimateEntityFeature.PRESET_MODE + + # Set up fan modes. + self._attr_fan_modes = self.data.fan_modes + if self.fan_modes: + self._attr_supported_features |= ClimateEntityFeature.FAN_MODE + + # Supports target temperature range. + if self.data.support_temperature_range: + self._attr_supported_features |= ( + ClimateEntityFeature.TARGET_TEMPERATURE_RANGE + ) + + def _update_status(self) -> None: + """Update status itself.""" + super()._update_status() + + # Update fan, hvac and preset mode. + if self.data.is_on: + if self.supported_features & ClimateEntityFeature.FAN_MODE: + self._attr_fan_mode = self.data.fan_mode + + hvac_mode = self._requested_hvac_mode or self.data.hvac_mode + if hvac_mode in STR_TO_HVAC: + self._attr_hvac_mode = STR_TO_HVAC.get(hvac_mode) + self._attr_preset_mode = None + elif hvac_mode in THINQ_PRESET_MODE: + self._attr_preset_mode = hvac_mode + else: + if self.supported_features & ClimateEntityFeature.FAN_MODE: + self._attr_fan_mode = FAN_OFF + + self._attr_hvac_mode = HVACMode.OFF + self._attr_preset_mode = None + + self.reset_requested_hvac_mode() + self._attr_current_humidity = self.data.humidity + self._attr_current_temperature = self.data.current_temp + + if (max_temp := self.entity_description.max_temp) is not None or ( + max_temp := self.data.max + ) is not None: + self._attr_max_temp = max_temp + if (min_temp := self.entity_description.min_temp) is not None or ( + min_temp := self.data.min + ) is not None: + self._attr_min_temp = min_temp + if (step := self.entity_description.step) is not None or ( + step := self.data.step + ) is not None: + self._attr_target_temperature_step = step + + # Update target temperatures. + if ( + self.supported_features & ClimateEntityFeature.TARGET_TEMPERATURE_RANGE + and self.hvac_mode == HVACMode.AUTO + ): + self._attr_target_temperature = None + self._attr_target_temperature_high = self.data.target_temp_high + self._attr_target_temperature_low = self.data.target_temp_low + else: + self._attr_target_temperature = self.data.target_temp + self._attr_target_temperature_high = None + self._attr_target_temperature_low = None + + _LOGGER.debug( + "[%s:%s] update status: %s/%s -> %s/%s, hvac:%s, unit:%s, step:%s", + self.coordinator.device_name, + self.property_id, + self.data.current_temp, + self.data.target_temp, + self.current_temperature, + self.target_temperature, + self.hvac_mode, + self.temperature_unit, + self.target_temperature_step, + ) + + def reset_requested_hvac_mode(self) -> None: + """Cancel request to set hvac mode.""" + self._requested_hvac_mode = None + + async def async_turn_on(self) -> None: + """Turn the entity on.""" + _LOGGER.debug( + "[%s:%s] async_turn_on", self.coordinator.device_name, self.property_id + ) + await self.async_call_api(self.coordinator.api.async_turn_on(self.property_id)) + + async def async_turn_off(self) -> None: + """Turn the entity off.""" + _LOGGER.debug( + "[%s:%s] async_turn_off", self.coordinator.device_name, self.property_id + ) + await self.async_call_api(self.coordinator.api.async_turn_off(self.property_id)) + + async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None: + """Set new target hvac mode.""" + if hvac_mode == HVACMode.OFF: + await self.async_turn_off() + return + + # If device is off, turn on first. + if not self.data.is_on: + await self.async_turn_on() + + # When we request hvac mode while turning on the device, the previously set + # hvac mode is displayed first and then switches to the requested hvac mode. + # To prevent this, set the requested hvac mode here so that it will be set + # immediately on the next update. + self._requested_hvac_mode = HVAC_TO_STR.get(hvac_mode) + + _LOGGER.debug( + "[%s:%s] async_set_hvac_mode: %s", + self.coordinator.device_name, + self.property_id, + hvac_mode, + ) + await self.async_call_api( + self.coordinator.api.async_set_hvac_mode( + self.property_id, self._requested_hvac_mode + ), + self.reset_requested_hvac_mode, + ) + + async def async_set_preset_mode(self, preset_mode: str) -> None: + """Set new preset mode.""" + _LOGGER.debug( + "[%s:%s] async_set_preset_mode: %s", + self.coordinator.device_name, + self.property_id, + preset_mode, + ) + await self.async_call_api( + self.coordinator.api.async_set_hvac_mode(self.property_id, preset_mode) + ) + + async def async_set_fan_mode(self, fan_mode: str) -> None: + """Set new target fan mode.""" + _LOGGER.debug( + "[%s:%s] async_set_fan_mode: %s", + self.coordinator.device_name, + self.property_id, + fan_mode, + ) + await self.async_call_api( + self.coordinator.api.async_set_fan_mode(self.property_id, fan_mode) + ) + + def _round_by_step(self, temperature: float) -> float: + """Round the value by step.""" + if ( + target_temp := display_temp( + self.coordinator.hass, + temperature, + self.coordinator.hass.config.units.temperature_unit, + self.target_temperature_step or 1, + ) + ) is not None: + return target_temp + + return temperature + + async def async_set_temperature(self, **kwargs: Any) -> None: + """Set new target temperature.""" + _LOGGER.debug( + "[%s:%s] async_set_temperature: %s", + self.coordinator.device_name, + self.property_id, + kwargs, + ) + + if (temperature := kwargs.get(ATTR_TEMPERATURE)) is not None: + if ( + target_temp := self._round_by_step(temperature) + ) != self.target_temperature: + await self.async_call_api( + self.coordinator.api.async_set_target_temperature( + self.property_id, target_temp + ) + ) + + if (temperature_low := kwargs.get(ATTR_TARGET_TEMP_LOW)) is not None: + if ( + target_temp_low := self._round_by_step(temperature_low) + ) != self.target_temperature_low: + await self.async_call_api( + self.coordinator.api.async_set_target_temperature_low( + self.property_id, target_temp_low + ) + ) + + if (temperature_high := kwargs.get(ATTR_TARGET_TEMP_HIGH)) is not None: + if ( + target_temp_high := self._round_by_step(temperature_high) + ) != self.target_temperature_high: + await self.async_call_api( + self.coordinator.api.async_set_target_temperature_high( + self.property_id, target_temp_high + ) + ) diff --git a/homeassistant/components/lg_thinq/config_flow.py b/homeassistant/components/lg_thinq/config_flow.py new file mode 100644 index 00000000000000..cdb419166880da --- /dev/null +++ b/homeassistant/components/lg_thinq/config_flow.py @@ -0,0 +1,103 @@ +"""Config flow for LG ThinQ.""" + +from __future__ import annotations + +import logging +from typing import Any +import uuid + +from thinqconnect import ThinQApi, ThinQAPIException +from thinqconnect.country import Country +import voluptuous as vol + +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.const import CONF_ACCESS_TOKEN, CONF_COUNTRY +from homeassistant.helpers import config_validation as cv +from homeassistant.helpers.aiohttp_client import async_get_clientsession +from homeassistant.helpers.selector import CountrySelector, CountrySelectorConfig + +from .const import ( + CLIENT_PREFIX, + CONF_CONNECT_CLIENT_ID, + DEFAULT_COUNTRY, + DOMAIN, + THINQ_DEFAULT_NAME, + THINQ_PAT_URL, +) + +SUPPORTED_COUNTRIES = [country.value for country in Country] + +_LOGGER = logging.getLogger(__name__) + + +class ThinQFlowHandler(ConfigFlow, domain=DOMAIN): + """Handle a config flow.""" + + VERSION = 1 + + def _get_default_country_code(self) -> str: + """Get the default country code based on config.""" + country = self.hass.config.country + if country is not None and country in SUPPORTED_COUNTRIES: + return country + + return DEFAULT_COUNTRY + + async def _validate_and_create_entry( + self, access_token: str, country_code: str + ) -> ConfigFlowResult: + """Create an entry for the flow.""" + connect_client_id = f"{CLIENT_PREFIX}-{uuid.uuid4()!s}" + + # To verify PAT, create an api to retrieve the device list. + await ThinQApi( + session=async_get_clientsession(self.hass), + access_token=access_token, + country_code=country_code, + client_id=connect_client_id, + ).async_get_device_list() + + # If verification is success, create entry. + return self.async_create_entry( + title=THINQ_DEFAULT_NAME, + data={ + CONF_ACCESS_TOKEN: access_token, + CONF_CONNECT_CLIENT_ID: connect_client_id, + CONF_COUNTRY: country_code, + }, + ) + + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle a flow initiated by the user.""" + errors: dict[str, str] = {} + + if user_input is not None: + access_token = user_input[CONF_ACCESS_TOKEN] + country_code = user_input[CONF_COUNTRY] + + # Check if PAT is already configured. + await self.async_set_unique_id(access_token) + self._abort_if_unique_id_configured() + + try: + return await self._validate_and_create_entry(access_token, country_code) + except ThinQAPIException: + errors["base"] = "token_unauthorized" + + return self.async_show_form( + step_id="user", + data_schema=vol.Schema( + { + vol.Required(CONF_ACCESS_TOKEN): cv.string, + vol.Required( + CONF_COUNTRY, default=self._get_default_country_code() + ): CountrySelector( + CountrySelectorConfig(countries=SUPPORTED_COUNTRIES) + ), + } + ), + description_placeholders={"pat_url": THINQ_PAT_URL}, + errors=errors, + ) diff --git a/homeassistant/components/lg_thinq/const.py b/homeassistant/components/lg_thinq/const.py new file mode 100644 index 00000000000000..a65dee715db710 --- /dev/null +++ b/homeassistant/components/lg_thinq/const.py @@ -0,0 +1,20 @@ +"""Constants for LG ThinQ.""" + +from datetime import timedelta +from typing import Final + +# Config flow +DOMAIN = "lg_thinq" +COMPANY = "LGE" +DEFAULT_COUNTRY: Final = "US" +THINQ_DEFAULT_NAME: Final = "LG ThinQ" +THINQ_PAT_URL: Final = "https://connect-pat.lgthinq.com" +CLIENT_PREFIX: Final = "home-assistant" +CONF_CONNECT_CLIENT_ID: Final = "connect_client_id" + +# MQTT +MQTT_SUBSCRIPTION_INTERVAL: Final = timedelta(days=1) + +# MQTT: Message types +DEVICE_PUSH_MESSAGE: Final = "DEVICE_PUSH" +DEVICE_STATUS_MESSAGE: Final = "DEVICE_STATUS" diff --git a/homeassistant/components/lg_thinq/coordinator.py b/homeassistant/components/lg_thinq/coordinator.py new file mode 100644 index 00000000000000..0ba859b1228fcd --- /dev/null +++ b/homeassistant/components/lg_thinq/coordinator.py @@ -0,0 +1,81 @@ +"""DataUpdateCoordinator for the LG ThinQ device.""" + +from __future__ import annotations + +import logging +from typing import Any + +from thinqconnect import ThinQAPIException +from thinqconnect.integration import HABridge + +from homeassistant.core import HomeAssistant +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed + +from .const import DOMAIN + +_LOGGER = logging.getLogger(__name__) + + +class DeviceDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]): + """LG Device's Data Update Coordinator.""" + + def __init__(self, hass: HomeAssistant, ha_bridge: HABridge) -> None: + """Initialize data coordinator.""" + super().__init__( + hass, + _LOGGER, + name=f"{DOMAIN}_{ha_bridge.device.device_id}", + ) + + self.data = {} + self.api = ha_bridge + self.device_id = ha_bridge.device.device_id + self.sub_id = ha_bridge.sub_id + + alias = ha_bridge.device.alias + + # The device name is usually set to 'alias'. + # But, if the sub_id exists, it will be set to 'alias {sub_id}'. + # e.g. alias='MyWashTower', sub_id='dryer' then 'MyWashTower dryer'. + self.device_name = f"{alias} {self.sub_id}" if self.sub_id else alias + + # The unique id is usually set to 'device_id'. + # But, if the sub_id exists, it will be set to 'device_id_{sub_id}'. + # e.g. device_id='TQSXXXX', sub_id='dryer' then 'TQSXXXX_dryer'. + self.unique_id = ( + f"{self.device_id}_{self.sub_id}" if self.sub_id else self.device_id + ) + + async def _async_update_data(self) -> dict[str, Any]: + """Request to the server to update the status from full response data.""" + try: + return await self.api.fetch_data() + except ThinQAPIException as e: + raise UpdateFailed(e) from e + + def refresh_status(self) -> None: + """Refresh current status.""" + self.async_set_updated_data(self.data) + + def handle_update_status(self, status: dict[str, Any]) -> None: + """Handle the status received from the mqtt connection.""" + data = self.api.update_status(status) + if data is not None: + self.async_set_updated_data(data) + + def handle_notification_message(self, message: str | None) -> None: + """Handle the status received from the mqtt connection.""" + data = self.api.update_notification(message) + if data is not None: + self.async_set_updated_data(data) + + +async def async_setup_device_coordinator( + hass: HomeAssistant, ha_bridge: HABridge +) -> DeviceDataUpdateCoordinator: + """Create DeviceDataUpdateCoordinator and device_api per device.""" + coordinator = DeviceDataUpdateCoordinator(hass, ha_bridge) + await coordinator.async_refresh() + + _LOGGER.debug("Setup device's coordinator: %s", coordinator.device_name) + return coordinator diff --git a/homeassistant/components/lg_thinq/entity.py b/homeassistant/components/lg_thinq/entity.py new file mode 100644 index 00000000000000..f31b535dcafbf8 --- /dev/null +++ b/homeassistant/components/lg_thinq/entity.py @@ -0,0 +1,114 @@ +"""Base class for ThinQ entities.""" + +from __future__ import annotations + +from collections.abc import Callable, Coroutine +import logging +from typing import Any + +from thinqconnect import ThinQAPIException +from thinqconnect.devices.const import Location +from thinqconnect.integration import PropertyState + +from homeassistant.const import UnitOfTemperature +from homeassistant.core import callback +from homeassistant.exceptions import ServiceValidationError +from homeassistant.helpers import device_registry as dr +from homeassistant.helpers.entity import EntityDescription +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .const import COMPANY, DOMAIN +from .coordinator import DeviceDataUpdateCoordinator + +_LOGGER = logging.getLogger(__name__) + +EMPTY_STATE = PropertyState() + +UNIT_CONVERSION_MAP: dict[str, str] = { + "F": UnitOfTemperature.FAHRENHEIT, + "C": UnitOfTemperature.CELSIUS, +} + + +class ThinQEntity(CoordinatorEntity[DeviceDataUpdateCoordinator]): + """The base implementation of all lg thinq entities.""" + + _attr_has_entity_name = True + + def __init__( + self, + coordinator: DeviceDataUpdateCoordinator, + entity_description: EntityDescription, + property_id: str, + ) -> None: + """Initialize an entity.""" + super().__init__(coordinator) + + self.entity_description = entity_description + self.property_id = property_id + self.location = self.coordinator.api.get_location_for_idx(self.property_id) + + self._attr_device_info = dr.DeviceInfo( + identifiers={(DOMAIN, coordinator.unique_id)}, + manufacturer=COMPANY, + model=coordinator.api.device.model_name, + name=coordinator.device_name, + ) + self._attr_unique_id = f"{coordinator.unique_id}_{self.property_id}" + if self.location is not None and self.location not in ( + Location.MAIN, + Location.OVEN, + coordinator.sub_id, + ): + self._attr_translation_placeholders = {"location": self.location} + self._attr_translation_key = ( + f"{entity_description.translation_key}_for_location" + ) + + @property + def data(self) -> PropertyState: + """Return the state data of entity.""" + return self.coordinator.data.get(self.property_id, EMPTY_STATE) + + def _get_unit_of_measurement(self, unit: str | None) -> str | None: + """Convert thinq unit string to HA unit string.""" + if unit is None: + return None + + return UNIT_CONVERSION_MAP.get(unit) + + def _update_status(self) -> None: + """Update status itself. + + All inherited classes can update their own status in here. + """ + + @callback + def _handle_coordinator_update(self) -> None: + """Handle updated data from the coordinator.""" + self._update_status() + self.async_write_ha_state() + + async def async_added_to_hass(self) -> None: + """Call when entity is added to hass.""" + await super().async_added_to_hass() + self._handle_coordinator_update() + + async def async_call_api( + self, + target: Coroutine[Any, Any, Any], + on_fail_method: Callable[[], None] | None = None, + ) -> None: + """Call the given api and handle exception.""" + try: + await target + except ThinQAPIException as exc: + if on_fail_method: + on_fail_method() + raise ServiceValidationError( + exc.message, translation_domain=DOMAIN, translation_key=exc.code + ) from exc + except ValueError as exc: + if on_fail_method: + on_fail_method() + raise ServiceValidationError(exc) from exc diff --git a/homeassistant/components/lg_thinq/event.py b/homeassistant/components/lg_thinq/event.py new file mode 100644 index 00000000000000..b963cba37cce0d --- /dev/null +++ b/homeassistant/components/lg_thinq/event.py @@ -0,0 +1,115 @@ +"""Support for event entity.""" + +from __future__ import annotations + +import logging + +from thinqconnect import DeviceType +from thinqconnect.integration import ActiveMode, ThinQPropertyEx + +from homeassistant.components.event import EventEntity, EventEntityDescription +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import ThinqConfigEntry +from .coordinator import DeviceDataUpdateCoordinator +from .entity import ThinQEntity + +NOTIFICATION_EVENT_DESC = EventEntityDescription( + key=ThinQPropertyEx.NOTIFICATION, + translation_key=ThinQPropertyEx.NOTIFICATION, +) +ERROR_EVENT_DESC = EventEntityDescription( + key=ThinQPropertyEx.ERROR, + translation_key=ThinQPropertyEx.ERROR, +) +ALL_EVENTS: tuple[EventEntityDescription, ...] = ( + ERROR_EVENT_DESC, + NOTIFICATION_EVENT_DESC, +) +DEVICE_TYPE_EVENT_MAP: dict[DeviceType, tuple[EventEntityDescription, ...]] = { + DeviceType.AIR_CONDITIONER: (NOTIFICATION_EVENT_DESC,), + DeviceType.AIR_PURIFIER_FAN: (NOTIFICATION_EVENT_DESC,), + DeviceType.AIR_PURIFIER: (NOTIFICATION_EVENT_DESC,), + DeviceType.DEHUMIDIFIER: (NOTIFICATION_EVENT_DESC,), + DeviceType.DISH_WASHER: ALL_EVENTS, + DeviceType.DRYER: ALL_EVENTS, + DeviceType.HUMIDIFIER: (NOTIFICATION_EVENT_DESC,), + DeviceType.KIMCHI_REFRIGERATOR: (NOTIFICATION_EVENT_DESC,), + DeviceType.MICROWAVE_OVEN: (NOTIFICATION_EVENT_DESC,), + DeviceType.OVEN: (NOTIFICATION_EVENT_DESC,), + DeviceType.REFRIGERATOR: (NOTIFICATION_EVENT_DESC,), + DeviceType.ROBOT_CLEANER: ALL_EVENTS, + DeviceType.STICK_CLEANER: (NOTIFICATION_EVENT_DESC,), + DeviceType.STYLER: ALL_EVENTS, + DeviceType.WASHCOMBO_MAIN: ALL_EVENTS, + DeviceType.WASHCOMBO_MINI: ALL_EVENTS, + DeviceType.WASHER: ALL_EVENTS, + DeviceType.WASHTOWER_DRYER: ALL_EVENTS, + DeviceType.WASHTOWER: ALL_EVENTS, + DeviceType.WASHTOWER_WASHER: ALL_EVENTS, + DeviceType.WINE_CELLAR: (NOTIFICATION_EVENT_DESC,), +} + +_LOGGER = logging.getLogger(__name__) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: ThinqConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up an entry for event platform.""" + entities: list[ThinQEventEntity] = [] + for coordinator in entry.runtime_data.coordinators.values(): + if ( + descriptions := DEVICE_TYPE_EVENT_MAP.get( + coordinator.api.device.device_type + ) + ) is not None: + for description in descriptions: + entities.extend( + ThinQEventEntity(coordinator, description, property_id) + for property_id in coordinator.api.get_active_idx( + description.key, ActiveMode.READ_ONLY + ) + ) + + if entities: + async_add_entities(entities) + + +class ThinQEventEntity(ThinQEntity, EventEntity): + """Represent an thinq event platform.""" + + def __init__( + self, + coordinator: DeviceDataUpdateCoordinator, + entity_description: EventEntityDescription, + property_id: str, + ) -> None: + """Initialize an event platform.""" + super().__init__(coordinator, entity_description, property_id) + + # For event types. + self._attr_event_types = self.data.options + + def _update_status(self) -> None: + """Update status itself.""" + super()._update_status() + + _LOGGER.debug( + "[%s:%s] update status: %s, event_types=%s", + self.coordinator.device_name, + self.property_id, + self.data.value, + self.event_types, + ) + # Handle an event. + if (value := self.data.value) is not None and value in self.event_types: + self._async_handle_update(value) + + def _async_handle_update(self, value: str) -> None: + """Handle the event.""" + self._trigger_event(value) + self.async_write_ha_state() diff --git a/homeassistant/components/lg_thinq/fan.py b/homeassistant/components/lg_thinq/fan.py new file mode 100644 index 00000000000000..187cc74b3eb322 --- /dev/null +++ b/homeassistant/components/lg_thinq/fan.py @@ -0,0 +1,150 @@ +"""Support for fan entities.""" + +from __future__ import annotations + +import logging +from typing import Any + +from thinqconnect import DeviceType +from thinqconnect.integration import ExtendedProperty + +from homeassistant.components.fan import ( + FanEntity, + FanEntityDescription, + FanEntityFeature, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.util.percentage import ( + ordered_list_item_to_percentage, + percentage_to_ordered_list_item, +) + +from . import ThinqConfigEntry +from .coordinator import DeviceDataUpdateCoordinator +from .entity import ThinQEntity + +DEVICE_TYPE_FAN_MAP: dict[DeviceType, tuple[FanEntityDescription, ...]] = { + DeviceType.CEILING_FAN: ( + FanEntityDescription( + key=ExtendedProperty.FAN, + name=None, + ), + ), +} + +FOUR_STEP_SPEEDS = ["low", "mid", "high", "turbo"] + +_LOGGER = logging.getLogger(__name__) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: ThinqConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up an entry for fan platform.""" + entities: list[ThinQFanEntity] = [] + for coordinator in entry.runtime_data.coordinators.values(): + if ( + descriptions := DEVICE_TYPE_FAN_MAP.get(coordinator.api.device.device_type) + ) is not None: + for description in descriptions: + entities.extend( + ThinQFanEntity(coordinator, description, property_id) + for property_id in coordinator.api.get_active_idx(description.key) + ) + + if entities: + async_add_entities(entities) + + +class ThinQFanEntity(ThinQEntity, FanEntity): + """Represent a thinq fan platform.""" + + def __init__( + self, + coordinator: DeviceDataUpdateCoordinator, + entity_description: FanEntityDescription, + property_id: str, + ) -> None: + """Initialize fan platform.""" + super().__init__(coordinator, entity_description, property_id) + + self._ordered_named_fan_speeds = [] + self._attr_supported_features |= FanEntityFeature.SET_SPEED + + if (fan_modes := self.data.fan_modes) is not None: + self._attr_speed_count = len(fan_modes) + if self.speed_count == 4: + self._ordered_named_fan_speeds = FOUR_STEP_SPEEDS + + def _update_status(self) -> None: + """Update status itself.""" + super()._update_status() + + # Update power on state. + self._attr_is_on = self.data.is_on + + # Update fan speed. + if ( + self.data.is_on + and (mode := self.data.fan_mode) in self._ordered_named_fan_speeds + ): + self._attr_percentage = ordered_list_item_to_percentage( + self._ordered_named_fan_speeds, mode + ) + else: + self._attr_percentage = 0 + + _LOGGER.debug( + "[%s:%s] update status: %s -> %s (percntage=%s)", + self.coordinator.device_name, + self.property_id, + self.data.is_on, + self.is_on, + self.percentage, + ) + + async def async_set_percentage(self, percentage: int) -> None: + """Set the speed percentage of the fan.""" + if percentage == 0: + await self.async_turn_off() + return + try: + value = percentage_to_ordered_list_item( + self._ordered_named_fan_speeds, percentage + ) + except ValueError: + _LOGGER.exception("Failed to async_set_percentage") + return + + _LOGGER.debug( + "[%s:%s] async_set_percentage. percntage=%s, value=%s", + self.coordinator.device_name, + self.property_id, + percentage, + value, + ) + await self.async_call_api( + self.coordinator.api.async_set_fan_mode(self.property_id, value) + ) + + async def async_turn_on( + self, + percentage: int | None = None, + preset_mode: str | None = None, + **kwargs: Any, + ) -> None: + """Turn on the fan.""" + _LOGGER.debug( + "[%s:%s] async_turn_on", self.coordinator.device_name, self.property_id + ) + await self.async_call_api(self.coordinator.api.async_turn_on(self.property_id)) + + async def async_turn_off(self, **kwargs: Any) -> None: + """Turn the fan off.""" + _LOGGER.debug( + "[%s:%s] async_turn_off", self.coordinator.device_name, self.property_id + ) + await self.async_call_api(self.coordinator.api.async_turn_off(self.property_id)) diff --git a/homeassistant/components/lg_thinq/icons.json b/homeassistant/components/lg_thinq/icons.json new file mode 100644 index 00000000000000..87cf04e0c1a3a7 --- /dev/null +++ b/homeassistant/components/lg_thinq/icons.json @@ -0,0 +1,407 @@ +{ + "entity": { + "switch": { + "auto_mode": { + "default": "mdi:cogs" + }, + "express_mode": { + "default": "mdi:snowflake-variant" + }, + "hot_water_mode": { + "default": "mdi:list-status" + }, + "humidity_warm_mode": { + "default": "mdi:heat-wave" + }, + "hygiene_dry_mode": { + "default": "mdi:format-list-bulleted" + }, + "mood_lamp_state": { + "default": "mdi:lamp" + }, + "operation_power": { + "default": "mdi:power" + }, + "optimal_humidity": { + "default": "mdi:water-percent" + }, + "power_save_enabled": { + "default": "mdi:hydro-power" + }, + "rapid_freeze": { + "default": "mdi:snowflake" + }, + "sleep_mode": { + "default": "mdi:format-list-bulleted" + }, + "uv_nano": { + "default": "mdi:air-filter" + }, + "warm_mode": { + "default": "mdi:heat-wave" + } + }, + "binary_sensor": { + "eco_friendly_mode": { + "default": "mdi:sprout" + }, + "power_save_enabled": { + "default": "mdi:meter-electric" + }, + "remote_control_enabled": { + "default": "mdi:remote" + }, + "remote_control_enabled_for_location": { + "default": "mdi:remote" + }, + "rinse_refill": { + "default": "mdi:tune-vertical-variant" + }, + "sabbath_mode": { + "default": "mdi:food-off-outline" + }, + "machine_clean_reminder": { + "default": "mdi:tune-vertical-variant" + }, + "signal_level": { + "default": "mdi:tune-vertical-variant" + }, + "clean_light_reminder": { + "default": "mdi:tune-vertical-variant" + }, + "operation_mode": { + "default": "mdi:power" + }, + "one_touch_filter": { + "default": "mdi:air-filter" + } + }, + "climate": { + "climate_air_conditioner": { + "state_attributes": { + "fan_mode": { + "state": { + "slow": "mdi:fan-chevron-down", + "low": "mdi:fan-speed-1", + "mid": "mdi:fan-speed-2", + "high": "mdi:fan-speed-3", + "power": "mdi:fan-chevron-up", + "auto": "mdi:fan-auto" + } + } + } + } + }, + "event": { + "error": { + "default": "mdi:alert-circle-outline" + }, + "notification": { + "default": "mdi:message-badge-outline" + } + }, + "number": { + "target_temperature": { + "default": "mdi:thermometer" + }, + "target_temperature_for_location": { + "default": "mdi:thermometer" + }, + "light_status": { + "default": "mdi:television-ambient-light" + }, + "fan_speed": { + "default": "mdi:wind-power-outline" + }, + "lamp_brightness": { + "default": "mdi:alarm-light-outline" + }, + "wind_temperature": { + "default": "mdi:thermometer" + }, + "relative_hour_to_start": { + "default": "mdi:timer-edit-outline" + }, + "relative_hour_to_start_for_location": { + "default": "mdi:timer-edit-outline" + }, + "relative_hour_to_start_wm": { + "default": "mdi:timer-edit-outline" + }, + "relative_hour_to_start_wm_for_location": { + "default": "mdi:timer-edit-outline" + }, + "relative_hour_to_stop": { + "default": "mdi:timer-edit-outline" + }, + "relative_hour_to_stop_for_location": { + "default": "mdi:timer-edit-outline" + }, + "relative_hour_to_stop_wm": { + "default": "mdi:timer-edit-outline" + }, + "relative_hour_to_stop_wm_for_location": { + "default": "mdi:timer-edit-outline" + }, + "sleep_timer_relative_hour_to_stop": { + "default": "mdi:bed-clock" + }, + "sleep_timer_relative_hour_to_stop_for_location": { + "default": "mdi:bed-clock" + } + }, + "select": { + "wind_strength": { + "default": "mdi:wind-power-outline" + }, + "monitoring_enabled": { + "default": "mdi:monitor-eye" + }, + "current_job_mode": { + "default": "mdi:format-list-bulleted" + }, + "operation_mode": { + "default": "mdi:gesture-tap-button" + }, + "operation_mode_for_location": { + "default": "mdi:gesture-tap-button" + }, + "air_clean_operation_mode": { + "default": "mdi:air-filter" + }, + "cook_mode": { + "default": "mdi:chef-hat" + }, + "cook_mode_for_location": { + "default": "mdi:chef-hat" + }, + "light_brightness": { + "default": "mdi:list-status" + }, + "wind_angle": { + "default": "mdi:rotate-360" + }, + "display_light": { + "default": "mdi:brightness-6" + }, + "fresh_air_filter": { + "default": "mdi:air-filter" + }, + "hygiene_dry_mode": { + "default": "mdi:format-list-bulleted" + } + }, + "sensor": { + "odor_level": { + "default": "mdi:scent" + }, + "current_temperature": { + "default": "mdi:thermometer" + }, + "temperature": { + "default": "mdi:thermometer" + }, + "total_pollution_level": { + "default": "mdi:air-filter" + }, + "monitoring_enabled": { + "default": "mdi:monitor-eye" + }, + "growth_mode": { + "default": "mdi:sprout-outline" + }, + "growth_mode_for_location": { + "default": "mdi:sprout-outline" + }, + "wind_volume": { + "default": "mdi:wind-power-outline" + }, + "wind_volume_for_location": { + "default": "mdi:wind-power-outline" + }, + "brightness": { + "default": "mdi:tune-vertical-variant" + }, + "brightness_for_location": { + "default": "mdi:tune-vertical-variant" + }, + "duration": { + "default": "mdi:tune-vertical-variant" + }, + "duration_for_location": { + "default": "mdi:tune-vertical-variant" + }, + "day_target_temperature": { + "default": "mdi:thermometer" + }, + "day_target_temperature_for_location": { + "default": "mdi:thermometer" + }, + "night_target_temperature": { + "default": "mdi:thermometer" + }, + "night_target_temperature_for_location": { + "default": "mdi:thermometer" + }, + "temperature_state": { + "default": "mdi:thermometer" + }, + "temperature_state_for_location": { + "default": "mdi:thermometer" + }, + "current_state": { + "default": "mdi:list-status" + }, + "current_state_for_location": { + "default": "mdi:list-status" + }, + "fresh_air_filter": { + "default": "mdi:air-filter" + }, + "filter_lifetime": { + "default": "mdi:air-filter" + }, + "used_time": { + "default": "mdi:air-filter" + }, + "current_job_mode": { + "default": "mdi:dots-circle" + }, + "current_job_mode_stick_cleaner": { + "default": "mdi:dots-circle" + }, + "personalization_mode": { + "default": "mdi:dots-circle" + }, + "current_dish_washing_course": { + "default": "mdi:format-list-checks" + }, + "rinse_level": { + "default": "mdi:tune-vertical-variant" + }, + "softening_level": { + "default": "mdi:tune-vertical-variant" + }, + "cock_state": { + "default": "mdi:air-filter" + }, + "sterilizing_state": { + "default": "mdi:water-alert-outline" + }, + "water_type": { + "default": "mdi:water" + }, + "target_temperature": { + "default": "mdi:thermometer" + }, + "target_temperature_for_location": { + "default": "mdi:thermometer" + }, + "elapsed_day_state": { + "default": "mdi:calendar-range-outline" + }, + "elapsed_day_total": { + "default": "mdi:calendar-range-outline" + }, + "recipe_name": { + "default": "mdi:information-box-outline" + }, + "wort_info": { + "default": "mdi:information-box-outline" + }, + "yeast_info": { + "default": "mdi:information-box-outline" + }, + "hop_oil_info": { + "default": "mdi:information-box-outline" + }, + "flavor_info": { + "default": "mdi:information-box-outline" + }, + "beer_remain": { + "default": "mdi:glass-mug-variant" + }, + "battery_level": { + "default": "mdi:battery-medium" + }, + "relative_to_start": { + "default": "mdi:clock-time-three-outline" + }, + "relative_to_start_for_location": { + "default": "mdi:clock-time-three-outline" + }, + "relative_to_start_wm": { + "default": "mdi:clock-time-three-outline" + }, + "relative_to_start_wm_for_location": { + "default": "mdi:clock-time-three-outline" + }, + "relative_to_stop": { + "default": "mdi:clock-time-three-outline" + }, + "relative_to_stop_for_location": { + "default": "mdi:clock-time-three-outline" + }, + "relative_to_stop_wm": { + "default": "mdi:clock-time-three-outline" + }, + "relative_to_stop_wm_for_location": { + "default": "mdi:clock-time-three-outline" + }, + "sleep_timer_relative_to_stop": { + "default": "mdi:bed-clock" + }, + "sleep_timer_relative_to_stop_for_location": { + "default": "mdi:bed-clock" + }, + "absolute_to_start": { + "default": "mdi:clock-time-three-outline" + }, + "absolute_to_start_for_location": { + "default": "mdi:clock-time-three-outline" + }, + "absolute_to_stop": { + "default": "mdi:clock-time-three-outline" + }, + "absolute_to_stop_for_location": { + "default": "mdi:clock-time-three-outline" + }, + "remain": { + "default": "mdi:timer-sand" + }, + "remain_for_location": { + "default": "mdi:timer-sand" + }, + "running": { + "default": "mdi:timer-play-outline" + }, + "running_for_location": { + "default": "mdi:timer-play-outline" + }, + "total": { + "default": "mdi:timer-play-outline" + }, + "total_for_location": { + "default": "mdi:timer-play-outline" + }, + "target": { + "default": "mdi:clock-time-three-outline" + }, + "target_for_location": { + "default": "mdi:clock-time-three-outline" + }, + "light_start": { + "default": "mdi:clock-time-three-outline" + }, + "light_start_for_location": { + "default": "mdi:clock-time-three-outline" + }, + "power_level": { + "default": "mdi:radiator" + }, + "power_level_for_location": { + "default": "mdi:radiator" + } + } + } +} diff --git a/homeassistant/components/lg_thinq/manifest.json b/homeassistant/components/lg_thinq/manifest.json new file mode 100644 index 00000000000000..665a5a9e179537 --- /dev/null +++ b/homeassistant/components/lg_thinq/manifest.json @@ -0,0 +1,11 @@ +{ + "domain": "lg_thinq", + "name": "LG ThinQ", + "codeowners": ["@LG-ThinQ-Integration"], + "config_flow": true, + "dependencies": [], + "documentation": "https://www.home-assistant.io/integrations/lg_thinq/", + "iot_class": "cloud_push", + "loggers": ["thinqconnect"], + "requirements": ["thinqconnect==1.0.0"] +} diff --git a/homeassistant/components/lg_thinq/mqtt.py b/homeassistant/components/lg_thinq/mqtt.py new file mode 100644 index 00000000000000..30d1302e458a9e --- /dev/null +++ b/homeassistant/components/lg_thinq/mqtt.py @@ -0,0 +1,186 @@ +"""Support for LG ThinQ Connect API.""" + +from __future__ import annotations + +import asyncio +from datetime import datetime +import json +import logging +from typing import Any + +from thinqconnect import ( + DeviceType, + ThinQApi, + ThinQAPIErrorCodes, + ThinQAPIException, + ThinQMQTTClient, +) + +from homeassistant.core import Event, HomeAssistant + +from .const import DEVICE_PUSH_MESSAGE, DEVICE_STATUS_MESSAGE +from .coordinator import DeviceDataUpdateCoordinator + +_LOGGER = logging.getLogger(__name__) + + +class ThinQMQTT: + """A class that implements MQTT connection.""" + + def __init__( + self, + hass: HomeAssistant, + thinq_api: ThinQApi, + client_id: str, + coordinators: dict[str, DeviceDataUpdateCoordinator], + ) -> None: + """Initialize a mqtt.""" + self.hass = hass + self.thinq_api = thinq_api + self.client_id = client_id + self.coordinators = coordinators + self.client: ThinQMQTTClient | None = None + + async def async_connect(self) -> bool: + """Create a mqtt client and then try to connect.""" + try: + self.client = await ThinQMQTTClient( + self.thinq_api, self.client_id, self.on_message_received + ) + if self.client is None: + return False + + # Connect to server and create certificate. + return await self.client.async_prepare_mqtt() + except (ThinQAPIException, TypeError, ValueError): + _LOGGER.exception("Failed to connect") + return False + + async def async_disconnect(self, event: Event | None = None) -> None: + """Unregister client and disconnects handlers.""" + await self.async_end_subscribes() + + if self.client is not None: + try: + await self.client.async_disconnect() + except (ThinQAPIException, TypeError, ValueError): + _LOGGER.exception("Failed to disconnect") + + def _get_failed_device_count( + self, results: list[dict | BaseException | None] + ) -> int: + """Check if there exists errors while performing tasks and then return count.""" + # Note that result code '1207' means 'Already subscribed push' + # and is not actually fail. + return sum( + isinstance(result, (TypeError, ValueError)) + or ( + isinstance(result, ThinQAPIException) + and result.code != ThinQAPIErrorCodes.ALREADY_SUBSCRIBED_PUSH + ) + for result in results + ) + + async def async_refresh_subscribe(self, now: datetime | None = None) -> None: + """Update event subscribes.""" + _LOGGER.debug("async_refresh_subscribe: now=%s", now) + + tasks = [ + self.hass.async_create_task( + self.thinq_api.async_post_event_subscribe(coordinator.device_id) + ) + for coordinator in self.coordinators.values() + ] + if tasks: + results = await asyncio.gather(*tasks, return_exceptions=True) + if (count := self._get_failed_device_count(results)) > 0: + _LOGGER.error("Failed to refresh subscription on %s devices", count) + + async def async_start_subscribes(self) -> None: + """Start push/event subscribes.""" + _LOGGER.debug("async_start_subscribes") + + if self.client is None: + _LOGGER.error("Failed to start subscription: No client") + return + + tasks = [ + self.hass.async_create_task( + self.thinq_api.async_post_push_subscribe(coordinator.device_id) + ) + for coordinator in self.coordinators.values() + ] + tasks.extend( + self.hass.async_create_task( + self.thinq_api.async_post_event_subscribe(coordinator.device_id) + ) + for coordinator in self.coordinators.values() + ) + if tasks: + results = await asyncio.gather(*tasks, return_exceptions=True) + if (count := self._get_failed_device_count(results)) > 0: + _LOGGER.error("Failed to start subscription on %s devices", count) + + await self.client.async_connect_mqtt() + + async def async_end_subscribes(self) -> None: + """Start push/event unsubscribes.""" + _LOGGER.debug("async_end_subscribes") + + tasks = [ + self.hass.async_create_task( + self.thinq_api.async_delete_push_subscribe(coordinator.device_id) + ) + for coordinator in self.coordinators.values() + ] + tasks.extend( + self.hass.async_create_task( + self.thinq_api.async_delete_event_subscribe(coordinator.device_id) + ) + for coordinator in self.coordinators.values() + ) + if tasks: + results = await asyncio.gather(*tasks, return_exceptions=True) + if (count := self._get_failed_device_count(results)) > 0: + _LOGGER.error("Failed to end subscription on %s devices", count) + + def on_message_received( + self, + topic: str, + payload: bytes, + dup: bool, + qos: Any, + retain: bool, + **kwargs: dict, + ) -> None: + """Handle the received message that matching the topic.""" + decoded = payload.decode() + try: + message = json.loads(decoded) + except ValueError: + _LOGGER.error("Failed to parse message: payload=%s", decoded) + return + + asyncio.run_coroutine_threadsafe( + self.async_handle_device_event(message), self.hass.loop + ).result() + + async def async_handle_device_event(self, message: dict) -> None: + """Handle received mqtt message.""" + _LOGGER.debug("async_handle_device_event: message=%s", message) + unique_id = ( + f"{message["deviceId"]}_{list(message["report"].keys())[0]}" + if message["deviceType"] == DeviceType.WASHTOWER + else message["deviceId"] + ) + coordinator = self.coordinators.get(unique_id) + if coordinator is None: + _LOGGER.error("Failed to handle device event: No device") + return + + push_type = message.get("pushType") + + if push_type == DEVICE_STATUS_MESSAGE: + coordinator.handle_update_status(message.get("report", {})) + elif push_type == DEVICE_PUSH_MESSAGE: + coordinator.handle_notification_message(message.get("pushCode")) diff --git a/homeassistant/components/lg_thinq/number.py b/homeassistant/components/lg_thinq/number.py new file mode 100644 index 00000000000000..634c1a8fe84315 --- /dev/null +++ b/homeassistant/components/lg_thinq/number.py @@ -0,0 +1,224 @@ +"""Support for number entities.""" + +from __future__ import annotations + +import logging + +from thinqconnect import DeviceType +from thinqconnect.devices.const import Property as ThinQProperty +from thinqconnect.integration import ActiveMode, TimerProperty + +from homeassistant.components.number import ( + NumberDeviceClass, + NumberEntity, + NumberEntityDescription, + NumberMode, +) +from homeassistant.const import PERCENTAGE, UnitOfTemperature, UnitOfTime +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import ThinqConfigEntry +from .entity import ThinQEntity + +NUMBER_DESC: dict[ThinQProperty, NumberEntityDescription] = { + ThinQProperty.FAN_SPEED: NumberEntityDescription( + key=ThinQProperty.FAN_SPEED, + translation_key=ThinQProperty.FAN_SPEED, + ), + ThinQProperty.LAMP_BRIGHTNESS: NumberEntityDescription( + key=ThinQProperty.LAMP_BRIGHTNESS, + translation_key=ThinQProperty.LAMP_BRIGHTNESS, + ), + ThinQProperty.LIGHT_STATUS: NumberEntityDescription( + key=ThinQProperty.LIGHT_STATUS, + native_unit_of_measurement=PERCENTAGE, + translation_key=ThinQProperty.LIGHT_STATUS, + ), + ThinQProperty.TARGET_HUMIDITY: NumberEntityDescription( + key=ThinQProperty.TARGET_HUMIDITY, + device_class=NumberDeviceClass.HUMIDITY, + native_unit_of_measurement=PERCENTAGE, + translation_key=ThinQProperty.TARGET_HUMIDITY, + ), + ThinQProperty.TARGET_TEMPERATURE: NumberEntityDescription( + key=ThinQProperty.TARGET_TEMPERATURE, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + translation_key=ThinQProperty.TARGET_TEMPERATURE, + ), + ThinQProperty.WIND_TEMPERATURE: NumberEntityDescription( + key=ThinQProperty.WIND_TEMPERATURE, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + translation_key=ThinQProperty.WIND_TEMPERATURE, + ), +} +TIMER_NUMBER_DESC: dict[ThinQProperty, NumberEntityDescription] = { + ThinQProperty.RELATIVE_HOUR_TO_START: NumberEntityDescription( + key=ThinQProperty.RELATIVE_HOUR_TO_START, + native_unit_of_measurement=UnitOfTime.HOURS, + translation_key=ThinQProperty.RELATIVE_HOUR_TO_START, + ), + TimerProperty.RELATIVE_HOUR_TO_START_WM: NumberEntityDescription( + key=ThinQProperty.RELATIVE_HOUR_TO_START, + native_min_value=0, + native_unit_of_measurement=UnitOfTime.HOURS, + translation_key=TimerProperty.RELATIVE_HOUR_TO_START_WM, + ), + ThinQProperty.RELATIVE_HOUR_TO_STOP: NumberEntityDescription( + key=ThinQProperty.RELATIVE_HOUR_TO_STOP, + native_unit_of_measurement=UnitOfTime.HOURS, + translation_key=ThinQProperty.RELATIVE_HOUR_TO_STOP, + ), + TimerProperty.RELATIVE_HOUR_TO_STOP_WM: NumberEntityDescription( + key=ThinQProperty.RELATIVE_HOUR_TO_STOP, + native_min_value=0, + native_unit_of_measurement=UnitOfTime.HOURS, + translation_key=TimerProperty.RELATIVE_HOUR_TO_STOP_WM, + ), + ThinQProperty.SLEEP_TIMER_RELATIVE_HOUR_TO_STOP: NumberEntityDescription( + key=ThinQProperty.SLEEP_TIMER_RELATIVE_HOUR_TO_STOP, + native_unit_of_measurement=UnitOfTime.HOURS, + translation_key=ThinQProperty.SLEEP_TIMER_RELATIVE_HOUR_TO_STOP, + ), +} +WASHER_NUMBERS: tuple[NumberEntityDescription, ...] = ( + TIMER_NUMBER_DESC[TimerProperty.RELATIVE_HOUR_TO_START_WM], + TIMER_NUMBER_DESC[TimerProperty.RELATIVE_HOUR_TO_STOP_WM], +) + +DEVICE_TYPE_NUMBER_MAP: dict[DeviceType, tuple[NumberEntityDescription, ...]] = { + DeviceType.AIR_CONDITIONER: ( + TIMER_NUMBER_DESC[ThinQProperty.RELATIVE_HOUR_TO_START], + TIMER_NUMBER_DESC[ThinQProperty.RELATIVE_HOUR_TO_STOP], + TIMER_NUMBER_DESC[ThinQProperty.SLEEP_TIMER_RELATIVE_HOUR_TO_STOP], + ), + DeviceType.AIR_PURIFIER_FAN: ( + NUMBER_DESC[ThinQProperty.WIND_TEMPERATURE], + TIMER_NUMBER_DESC[ThinQProperty.SLEEP_TIMER_RELATIVE_HOUR_TO_STOP], + ), + DeviceType.DRYER: WASHER_NUMBERS, + DeviceType.HOOD: ( + NUMBER_DESC[ThinQProperty.LAMP_BRIGHTNESS], + NUMBER_DESC[ThinQProperty.FAN_SPEED], + ), + DeviceType.HUMIDIFIER: ( + NUMBER_DESC[ThinQProperty.TARGET_HUMIDITY], + TIMER_NUMBER_DESC[ThinQProperty.SLEEP_TIMER_RELATIVE_HOUR_TO_STOP], + ), + DeviceType.MICROWAVE_OVEN: ( + NUMBER_DESC[ThinQProperty.LAMP_BRIGHTNESS], + NUMBER_DESC[ThinQProperty.FAN_SPEED], + ), + DeviceType.OVEN: (NUMBER_DESC[ThinQProperty.TARGET_TEMPERATURE],), + DeviceType.REFRIGERATOR: (NUMBER_DESC[ThinQProperty.TARGET_TEMPERATURE],), + DeviceType.STYLER: (TIMER_NUMBER_DESC[TimerProperty.RELATIVE_HOUR_TO_STOP_WM],), + DeviceType.WASHCOMBO_MAIN: WASHER_NUMBERS, + DeviceType.WASHCOMBO_MINI: WASHER_NUMBERS, + DeviceType.WASHER: WASHER_NUMBERS, + DeviceType.WASHTOWER_DRYER: WASHER_NUMBERS, + DeviceType.WASHTOWER: WASHER_NUMBERS, + DeviceType.WASHTOWER_WASHER: WASHER_NUMBERS, + DeviceType.WATER_HEATER: ( + NumberEntityDescription( + key=ThinQProperty.TARGET_TEMPERATURE, + native_max_value=60, + native_min_value=35, + native_step=1, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + translation_key=ThinQProperty.TARGET_TEMPERATURE, + ), + ), + DeviceType.WINE_CELLAR: ( + NUMBER_DESC[ThinQProperty.LIGHT_STATUS], + NUMBER_DESC[ThinQProperty.TARGET_TEMPERATURE], + ), +} + +_LOGGER = logging.getLogger(__name__) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: ThinqConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up an entry for number platform.""" + entities: list[ThinQNumberEntity] = [] + for coordinator in entry.runtime_data.coordinators.values(): + if ( + descriptions := DEVICE_TYPE_NUMBER_MAP.get( + coordinator.api.device.device_type + ) + ) is not None: + for description in descriptions: + entities.extend( + ThinQNumberEntity(coordinator, description, property_id) + for property_id in coordinator.api.get_active_idx( + description.key, ActiveMode.READ_WRITE + ) + ) + + if entities: + async_add_entities(entities) + + +class ThinQNumberEntity(ThinQEntity, NumberEntity): + """Represent a thinq number platform.""" + + _attr_mode = NumberMode.BOX + + def _update_status(self) -> None: + """Update status itself.""" + super()._update_status() + + self._attr_native_value = self.data.value + + # Update unit. + if ( + unit_of_measurement := self._get_unit_of_measurement(self.data.unit) + ) is not None: + self._attr_native_unit_of_measurement = unit_of_measurement + + # Undate range. + if ( + self.entity_description.native_min_value is None + and (min_value := self.data.min) is not None + ): + self._attr_native_min_value = min_value + + if ( + self.entity_description.native_max_value is None + and (max_value := self.data.max) is not None + ): + self._attr_native_max_value = max_value + + if ( + self.entity_description.native_step is None + and (step := self.data.step) is not None + ): + self._attr_native_step = step + + _LOGGER.debug( + "[%s:%s] update status: %s -> %s, unit:%s, min:%s, max:%s, step:%s", + self.coordinator.device_name, + self.property_id, + self.data.value, + self.native_value, + self.native_unit_of_measurement, + self.native_min_value, + self.native_max_value, + self.native_step, + ) + + async def async_set_native_value(self, value: float) -> None: + """Change to new number value.""" + if self.step.is_integer(): + value = int(value) + _LOGGER.debug( + "[%s:%s] async_set_native_value: %s", + self.coordinator.device_name, + self.property_id, + value, + ) + + await self.async_call_api(self.coordinator.api.post(self.property_id, value)) diff --git a/homeassistant/components/lg_thinq/select.py b/homeassistant/components/lg_thinq/select.py new file mode 100644 index 00000000000000..e555d616ca33c7 --- /dev/null +++ b/homeassistant/components/lg_thinq/select.py @@ -0,0 +1,207 @@ +"""Support for select entities.""" + +from __future__ import annotations + +import logging + +from thinqconnect import DeviceType +from thinqconnect.devices.const import Property as ThinQProperty +from thinqconnect.integration import ActiveMode + +from homeassistant.components.select import SelectEntity, SelectEntityDescription +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import ThinqConfigEntry +from .coordinator import DeviceDataUpdateCoordinator +from .entity import ThinQEntity + +SELECT_DESC: dict[ThinQProperty, SelectEntityDescription] = { + ThinQProperty.MONITORING_ENABLED: SelectEntityDescription( + key=ThinQProperty.MONITORING_ENABLED, + translation_key=ThinQProperty.MONITORING_ENABLED, + ), + ThinQProperty.COOK_MODE: SelectEntityDescription( + key=ThinQProperty.COOK_MODE, + translation_key=ThinQProperty.COOK_MODE, + ), + ThinQProperty.DISPLAY_LIGHT: SelectEntityDescription( + key=ThinQProperty.DISPLAY_LIGHT, + translation_key=ThinQProperty.DISPLAY_LIGHT, + ), + ThinQProperty.CURRENT_JOB_MODE: SelectEntityDescription( + key=ThinQProperty.CURRENT_JOB_MODE, + translation_key=ThinQProperty.CURRENT_JOB_MODE, + ), + ThinQProperty.FRESH_AIR_FILTER: SelectEntityDescription( + key=ThinQProperty.FRESH_AIR_FILTER, + translation_key=ThinQProperty.FRESH_AIR_FILTER, + ), +} +AIR_FLOW_SELECT_DESC: dict[ThinQProperty, SelectEntityDescription] = { + ThinQProperty.WIND_STRENGTH: SelectEntityDescription( + key=ThinQProperty.WIND_STRENGTH, + translation_key=ThinQProperty.WIND_STRENGTH, + ), + ThinQProperty.WIND_ANGLE: SelectEntityDescription( + key=ThinQProperty.WIND_ANGLE, + translation_key=ThinQProperty.WIND_ANGLE, + ), +} +OPERATION_SELECT_DESC: dict[ThinQProperty, SelectEntityDescription] = { + ThinQProperty.AIR_CLEAN_OPERATION_MODE: SelectEntityDescription( + key=ThinQProperty.AIR_CLEAN_OPERATION_MODE, + translation_key="air_clean_operation_mode", + ), + ThinQProperty.DISH_WASHER_OPERATION_MODE: SelectEntityDescription( + key=ThinQProperty.DISH_WASHER_OPERATION_MODE, + translation_key="operation_mode", + ), + ThinQProperty.DRYER_OPERATION_MODE: SelectEntityDescription( + key=ThinQProperty.DRYER_OPERATION_MODE, + translation_key="operation_mode", + ), + ThinQProperty.HYGIENE_DRY_MODE: SelectEntityDescription( + key=ThinQProperty.HYGIENE_DRY_MODE, + translation_key=ThinQProperty.HYGIENE_DRY_MODE, + ), + ThinQProperty.LIGHT_BRIGHTNESS: SelectEntityDescription( + key=ThinQProperty.LIGHT_BRIGHTNESS, + translation_key=ThinQProperty.LIGHT_BRIGHTNESS, + ), + ThinQProperty.OVEN_OPERATION_MODE: SelectEntityDescription( + key=ThinQProperty.OVEN_OPERATION_MODE, + translation_key="operation_mode", + ), + ThinQProperty.STYLER_OPERATION_MODE: SelectEntityDescription( + key=ThinQProperty.STYLER_OPERATION_MODE, + translation_key="operation_mode", + ), + ThinQProperty.WASHER_OPERATION_MODE: SelectEntityDescription( + key=ThinQProperty.WASHER_OPERATION_MODE, + translation_key="operation_mode", + ), +} + +DEVICE_TYPE_SELECT_MAP: dict[DeviceType, tuple[SelectEntityDescription, ...]] = { + DeviceType.AIR_CONDITIONER: ( + SELECT_DESC[ThinQProperty.MONITORING_ENABLED], + OPERATION_SELECT_DESC[ThinQProperty.AIR_CLEAN_OPERATION_MODE], + ), + DeviceType.AIR_PURIFIER_FAN: ( + AIR_FLOW_SELECT_DESC[ThinQProperty.WIND_STRENGTH], + AIR_FLOW_SELECT_DESC[ThinQProperty.WIND_ANGLE], + SELECT_DESC[ThinQProperty.DISPLAY_LIGHT], + SELECT_DESC[ThinQProperty.CURRENT_JOB_MODE], + ), + DeviceType.AIR_PURIFIER: ( + AIR_FLOW_SELECT_DESC[ThinQProperty.WIND_STRENGTH], + SELECT_DESC[ThinQProperty.CURRENT_JOB_MODE], + ), + DeviceType.DEHUMIDIFIER: (AIR_FLOW_SELECT_DESC[ThinQProperty.WIND_STRENGTH],), + DeviceType.DISH_WASHER: ( + OPERATION_SELECT_DESC[ThinQProperty.DISH_WASHER_OPERATION_MODE], + ), + DeviceType.DRYER: (OPERATION_SELECT_DESC[ThinQProperty.DRYER_OPERATION_MODE],), + DeviceType.HUMIDIFIER: ( + AIR_FLOW_SELECT_DESC[ThinQProperty.WIND_STRENGTH], + SELECT_DESC[ThinQProperty.DISPLAY_LIGHT], + SELECT_DESC[ThinQProperty.CURRENT_JOB_MODE], + OPERATION_SELECT_DESC[ThinQProperty.HYGIENE_DRY_MODE], + ), + DeviceType.OVEN: ( + SELECT_DESC[ThinQProperty.COOK_MODE], + OPERATION_SELECT_DESC[ThinQProperty.OVEN_OPERATION_MODE], + ), + DeviceType.REFRIGERATOR: (SELECT_DESC[ThinQProperty.FRESH_AIR_FILTER],), + DeviceType.STYLER: (OPERATION_SELECT_DESC[ThinQProperty.STYLER_OPERATION_MODE],), + DeviceType.WASHCOMBO_MAIN: ( + OPERATION_SELECT_DESC[ThinQProperty.WASHER_OPERATION_MODE], + ), + DeviceType.WASHCOMBO_MINI: ( + OPERATION_SELECT_DESC[ThinQProperty.WASHER_OPERATION_MODE], + ), + DeviceType.WASHER: (OPERATION_SELECT_DESC[ThinQProperty.WASHER_OPERATION_MODE],), + DeviceType.WASHTOWER_DRYER: ( + OPERATION_SELECT_DESC[ThinQProperty.WASHER_OPERATION_MODE], + ), + DeviceType.WASHTOWER: ( + OPERATION_SELECT_DESC[ThinQProperty.DRYER_OPERATION_MODE], + OPERATION_SELECT_DESC[ThinQProperty.WASHER_OPERATION_MODE], + ), + DeviceType.WASHTOWER_WASHER: ( + OPERATION_SELECT_DESC[ThinQProperty.WASHER_OPERATION_MODE], + ), + DeviceType.WATER_HEATER: (SELECT_DESC[ThinQProperty.CURRENT_JOB_MODE],), + DeviceType.WINE_CELLAR: (OPERATION_SELECT_DESC[ThinQProperty.LIGHT_BRIGHTNESS],), +} + +_LOGGER = logging.getLogger(__name__) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: ThinqConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up an entry for select platform.""" + entities: list[ThinQSelectEntity] = [] + for coordinator in entry.runtime_data.coordinators.values(): + if ( + descriptions := DEVICE_TYPE_SELECT_MAP.get( + coordinator.api.device.device_type + ) + ) is not None: + for description in descriptions: + entities.extend( + ThinQSelectEntity(coordinator, description, property_id) + for property_id in coordinator.api.get_active_idx( + description.key, ActiveMode.WRITABLE + ) + ) + + if entities: + async_add_entities(entities) + + +class ThinQSelectEntity(ThinQEntity, SelectEntity): + """Represent a thinq select platform.""" + + def __init__( + self, + coordinator: DeviceDataUpdateCoordinator, + entity_description: SelectEntityDescription, + property_id: str, + ) -> None: + """Initialize a select entity.""" + super().__init__(coordinator, entity_description, property_id) + + self._attr_options = self.data.options if self.data.options is not None else [] + + def _update_status(self) -> None: + """Update status itself.""" + super()._update_status() + + if self.data.value: + self._attr_current_option = str(self.data.value) + else: + self._attr_current_option = None + + _LOGGER.debug( + "[%s:%s] update status: %s -> %s, options:%s", + self.coordinator.device_name, + self.property_id, + self.data.value, + self.current_option, + self.options, + ) + + async def async_select_option(self, option: str) -> None: + """Change the selected option.""" + _LOGGER.debug( + "[%s:%s] async_select_option: %s", + self.coordinator.device_name, + self.property_id, + option, + ) + await self.async_call_api(self.coordinator.api.post(self.property_id, option)) diff --git a/homeassistant/components/lg_thinq/sensor.py b/homeassistant/components/lg_thinq/sensor.py new file mode 100644 index 00000000000000..99b4df8176e29c --- /dev/null +++ b/homeassistant/components/lg_thinq/sensor.py @@ -0,0 +1,447 @@ +"""Support for sensor entities.""" + +from __future__ import annotations + +import logging + +from thinqconnect import DeviceType +from thinqconnect.devices.const import Property as ThinQProperty +from thinqconnect.integration import ActiveMode, ThinQPropertyEx, TimerProperty + +from homeassistant.components.sensor import ( + SensorDeviceClass, + SensorEntity, + SensorEntityDescription, + SensorStateClass, +) +from homeassistant.const import ( + CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, + PERCENTAGE, + UnitOfTemperature, + UnitOfTime, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import ThinqConfigEntry +from .coordinator import DeviceDataUpdateCoordinator +from .entity import ThinQEntity + +AIR_QUALITY_SENSOR_DESC: dict[ThinQProperty, SensorEntityDescription] = { + ThinQProperty.PM1: SensorEntityDescription( + key=ThinQProperty.PM1, + device_class=SensorDeviceClass.PM1, + native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, + state_class=SensorStateClass.MEASUREMENT, + ), + ThinQProperty.PM2: SensorEntityDescription( + key=ThinQProperty.PM2, + device_class=SensorDeviceClass.PM25, + native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, + state_class=SensorStateClass.MEASUREMENT, + ), + ThinQProperty.PM10: SensorEntityDescription( + key=ThinQProperty.PM10, + device_class=SensorDeviceClass.PM10, + native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, + state_class=SensorStateClass.MEASUREMENT, + ), + ThinQProperty.HUMIDITY: SensorEntityDescription( + key=ThinQProperty.HUMIDITY, + device_class=SensorDeviceClass.HUMIDITY, + native_unit_of_measurement=PERCENTAGE, + state_class=SensorStateClass.MEASUREMENT, + ), + ThinQProperty.MONITORING_ENABLED: SensorEntityDescription( + key=ThinQProperty.MONITORING_ENABLED, + device_class=SensorDeviceClass.ENUM, + translation_key=ThinQProperty.MONITORING_ENABLED, + ), + ThinQProperty.TEMPERATURE: SensorEntityDescription( + key=ThinQProperty.TEMPERATURE, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + state_class=SensorStateClass.MEASUREMENT, + translation_key=ThinQProperty.TEMPERATURE, + ), + ThinQProperty.ODOR_LEVEL: SensorEntityDescription( + key=ThinQProperty.ODOR_LEVEL, + device_class=SensorDeviceClass.ENUM, + translation_key=ThinQProperty.ODOR_LEVEL, + ), + ThinQProperty.TOTAL_POLLUTION_LEVEL: SensorEntityDescription( + key=ThinQProperty.TOTAL_POLLUTION_LEVEL, + device_class=SensorDeviceClass.ENUM, + translation_key=ThinQProperty.TOTAL_POLLUTION_LEVEL, + ), +} +BATTERY_SENSOR_DESC: dict[ThinQProperty, SensorEntityDescription] = { + ThinQProperty.BATTERY_PERCENT: SensorEntityDescription( + key=ThinQProperty.BATTERY_PERCENT, + translation_key=ThinQProperty.BATTERY_LEVEL, + ), +} +DISH_WASHING_COURSE_SENSOR_DESC: dict[ThinQProperty, SensorEntityDescription] = { + ThinQProperty.CURRENT_DISH_WASHING_COURSE: SensorEntityDescription( + key=ThinQProperty.CURRENT_DISH_WASHING_COURSE, + device_class=SensorDeviceClass.ENUM, + translation_key=ThinQProperty.CURRENT_DISH_WASHING_COURSE, + ) +} +FILTER_INFO_SENSOR_DESC: dict[ThinQProperty, SensorEntityDescription] = { + ThinQProperty.FILTER_LIFETIME: SensorEntityDescription( + key=ThinQProperty.FILTER_LIFETIME, + native_unit_of_measurement=UnitOfTime.HOURS, + translation_key=ThinQProperty.FILTER_LIFETIME, + ), +} +HUMIDITY_SENSOR_DESC: dict[ThinQProperty, SensorEntityDescription] = { + ThinQProperty.CURRENT_HUMIDITY: SensorEntityDescription( + key=ThinQProperty.CURRENT_HUMIDITY, + device_class=SensorDeviceClass.HUMIDITY, + native_unit_of_measurement=PERCENTAGE, + state_class=SensorStateClass.MEASUREMENT, + ) +} +JOB_MODE_SENSOR_DESC: dict[ThinQProperty, SensorEntityDescription] = { + ThinQProperty.CURRENT_JOB_MODE: SensorEntityDescription( + key=ThinQProperty.CURRENT_JOB_MODE, + device_class=SensorDeviceClass.ENUM, + translation_key=ThinQProperty.CURRENT_JOB_MODE, + ), + ThinQPropertyEx.CURRENT_JOB_MODE_STICK_CLEANER: SensorEntityDescription( + key=ThinQProperty.CURRENT_JOB_MODE, + device_class=SensorDeviceClass.ENUM, + translation_key=ThinQPropertyEx.CURRENT_JOB_MODE_STICK_CLEANER, + ), + ThinQProperty.PERSONALIZATION_MODE: SensorEntityDescription( + key=ThinQProperty.PERSONALIZATION_MODE, + device_class=SensorDeviceClass.ENUM, + translation_key=ThinQProperty.PERSONALIZATION_MODE, + ), +} +LIGHT_SENSOR_DESC: dict[ThinQProperty, SensorEntityDescription] = { + ThinQProperty.BRIGHTNESS: SensorEntityDescription( + key=ThinQProperty.BRIGHTNESS, + translation_key=ThinQProperty.BRIGHTNESS, + ), + ThinQProperty.DURATION: SensorEntityDescription( + key=ThinQProperty.DURATION, + native_unit_of_measurement=UnitOfTime.HOURS, + translation_key=ThinQProperty.DURATION, + ), +} +POWER_SENSOR_DESC: dict[ThinQProperty, SensorEntityDescription] = { + ThinQProperty.POWER_LEVEL: SensorEntityDescription( + key=ThinQProperty.POWER_LEVEL, + translation_key=ThinQProperty.POWER_LEVEL, + ) +} +PREFERENCE_SENSOR_DESC: dict[ThinQProperty, SensorEntityDescription] = { + ThinQProperty.RINSE_LEVEL: SensorEntityDescription( + key=ThinQProperty.RINSE_LEVEL, + device_class=SensorDeviceClass.ENUM, + translation_key=ThinQProperty.RINSE_LEVEL, + ), + ThinQProperty.SOFTENING_LEVEL: SensorEntityDescription( + key=ThinQProperty.SOFTENING_LEVEL, + device_class=SensorDeviceClass.ENUM, + translation_key=ThinQProperty.SOFTENING_LEVEL, + ), +} +RECIPE_SENSOR_DESC: dict[ThinQProperty, SensorEntityDescription] = { + ThinQProperty.RECIPE_NAME: SensorEntityDescription( + key=ThinQProperty.RECIPE_NAME, + device_class=SensorDeviceClass.ENUM, + translation_key=ThinQProperty.RECIPE_NAME, + ), + ThinQProperty.WORT_INFO: SensorEntityDescription( + key=ThinQProperty.WORT_INFO, + device_class=SensorDeviceClass.ENUM, + translation_key=ThinQProperty.WORT_INFO, + ), + ThinQProperty.YEAST_INFO: SensorEntityDescription( + key=ThinQProperty.YEAST_INFO, + device_class=SensorDeviceClass.ENUM, + translation_key=ThinQProperty.YEAST_INFO, + ), + ThinQProperty.HOP_OIL_INFO: SensorEntityDescription( + key=ThinQProperty.HOP_OIL_INFO, + translation_key=ThinQProperty.HOP_OIL_INFO, + ), + ThinQProperty.FLAVOR_INFO: SensorEntityDescription( + key=ThinQProperty.FLAVOR_INFO, + translation_key=ThinQProperty.FLAVOR_INFO, + ), + ThinQProperty.BEER_REMAIN: SensorEntityDescription( + key=ThinQProperty.BEER_REMAIN, + native_unit_of_measurement=PERCENTAGE, + translation_key=ThinQProperty.BEER_REMAIN, + ), +} +REFRIGERATION_SENSOR_DESC: dict[ThinQProperty, SensorEntityDescription] = { + ThinQProperty.FRESH_AIR_FILTER: SensorEntityDescription( + key=ThinQProperty.FRESH_AIR_FILTER, + device_class=SensorDeviceClass.ENUM, + translation_key=ThinQProperty.FRESH_AIR_FILTER, + ), +} +RUN_STATE_SENSOR_DESC: dict[ThinQProperty, SensorEntityDescription] = { + ThinQProperty.CURRENT_STATE: SensorEntityDescription( + key=ThinQProperty.CURRENT_STATE, + device_class=SensorDeviceClass.ENUM, + translation_key=ThinQProperty.CURRENT_STATE, + ), + ThinQProperty.COCK_STATE: SensorEntityDescription( + key=ThinQProperty.COCK_STATE, + device_class=SensorDeviceClass.ENUM, + translation_key=ThinQProperty.COCK_STATE, + ), + ThinQProperty.STERILIZING_STATE: SensorEntityDescription( + key=ThinQProperty.STERILIZING_STATE, + device_class=SensorDeviceClass.ENUM, + translation_key=ThinQProperty.STERILIZING_STATE, + ), + ThinQProperty.GROWTH_MODE: SensorEntityDescription( + key=ThinQProperty.GROWTH_MODE, + device_class=SensorDeviceClass.ENUM, + translation_key=ThinQProperty.GROWTH_MODE, + ), + ThinQProperty.WIND_VOLUME: SensorEntityDescription( + key=ThinQProperty.WIND_VOLUME, + device_class=SensorDeviceClass.WIND_SPEED, + translation_key=ThinQProperty.WIND_VOLUME, + ), +} +TEMPERATURE_SENSOR_DESC: dict[ThinQProperty, SensorEntityDescription] = { + ThinQProperty.TARGET_TEMPERATURE: SensorEntityDescription( + key=ThinQProperty.TARGET_TEMPERATURE, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + translation_key=ThinQProperty.TARGET_TEMPERATURE, + ), + ThinQProperty.DAY_TARGET_TEMPERATURE: SensorEntityDescription( + key=ThinQProperty.DAY_TARGET_TEMPERATURE, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + state_class=SensorStateClass.MEASUREMENT, + translation_key=ThinQProperty.DAY_TARGET_TEMPERATURE, + ), + ThinQProperty.NIGHT_TARGET_TEMPERATURE: SensorEntityDescription( + key=ThinQProperty.NIGHT_TARGET_TEMPERATURE, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + state_class=SensorStateClass.MEASUREMENT, + translation_key=ThinQProperty.NIGHT_TARGET_TEMPERATURE, + ), + ThinQProperty.TEMPERATURE_STATE: SensorEntityDescription( + key=ThinQProperty.TEMPERATURE_STATE, + device_class=SensorDeviceClass.ENUM, + translation_key=ThinQProperty.TEMPERATURE_STATE, + ), + ThinQProperty.CURRENT_TEMPERATURE: SensorEntityDescription( + key=ThinQProperty.CURRENT_TEMPERATURE, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + state_class=SensorStateClass.MEASUREMENT, + translation_key=ThinQProperty.CURRENT_TEMPERATURE, + ), +} +WATER_FILTER_INFO_SENSOR_DESC: dict[ThinQProperty, SensorEntityDescription] = { + ThinQProperty.USED_TIME: SensorEntityDescription( + key=ThinQProperty.USED_TIME, + native_unit_of_measurement=UnitOfTime.MONTHS, + translation_key=ThinQProperty.USED_TIME, + ), +} +WATER_INFO_SENSOR_DESC: dict[ThinQProperty, SensorEntityDescription] = { + ThinQProperty.WATER_TYPE: SensorEntityDescription( + key=ThinQProperty.WATER_TYPE, + translation_key=ThinQProperty.WATER_TYPE, + ), +} + +WASHER_SENSORS: tuple[SensorEntityDescription, ...] = ( + RUN_STATE_SENSOR_DESC[ThinQProperty.CURRENT_STATE], +) +DEVICE_TYPE_SENSOR_MAP: dict[DeviceType, tuple[SensorEntityDescription, ...]] = { + DeviceType.AIR_CONDITIONER: ( + AIR_QUALITY_SENSOR_DESC[ThinQProperty.PM1], + AIR_QUALITY_SENSOR_DESC[ThinQProperty.PM2], + AIR_QUALITY_SENSOR_DESC[ThinQProperty.PM10], + AIR_QUALITY_SENSOR_DESC[ThinQProperty.HUMIDITY], + AIR_QUALITY_SENSOR_DESC[ThinQProperty.ODOR_LEVEL], + AIR_QUALITY_SENSOR_DESC[ThinQProperty.TOTAL_POLLUTION_LEVEL], + FILTER_INFO_SENSOR_DESC[ThinQProperty.FILTER_LIFETIME], + ), + DeviceType.AIR_PURIFIER_FAN: ( + AIR_QUALITY_SENSOR_DESC[ThinQProperty.PM1], + AIR_QUALITY_SENSOR_DESC[ThinQProperty.PM2], + AIR_QUALITY_SENSOR_DESC[ThinQProperty.PM10], + AIR_QUALITY_SENSOR_DESC[ThinQProperty.HUMIDITY], + AIR_QUALITY_SENSOR_DESC[ThinQProperty.TEMPERATURE], + AIR_QUALITY_SENSOR_DESC[ThinQProperty.MONITORING_ENABLED], + AIR_QUALITY_SENSOR_DESC[ThinQProperty.ODOR_LEVEL], + AIR_QUALITY_SENSOR_DESC[ThinQProperty.TOTAL_POLLUTION_LEVEL], + ), + DeviceType.AIR_PURIFIER: ( + AIR_QUALITY_SENSOR_DESC[ThinQProperty.PM1], + AIR_QUALITY_SENSOR_DESC[ThinQProperty.PM2], + AIR_QUALITY_SENSOR_DESC[ThinQProperty.PM10], + AIR_QUALITY_SENSOR_DESC[ThinQProperty.HUMIDITY], + AIR_QUALITY_SENSOR_DESC[ThinQProperty.MONITORING_ENABLED], + AIR_QUALITY_SENSOR_DESC[ThinQProperty.ODOR_LEVEL], + AIR_QUALITY_SENSOR_DESC[ThinQProperty.TOTAL_POLLUTION_LEVEL], + JOB_MODE_SENSOR_DESC[ThinQProperty.CURRENT_JOB_MODE], + JOB_MODE_SENSOR_DESC[ThinQProperty.PERSONALIZATION_MODE], + ), + DeviceType.COOKTOP: ( + RUN_STATE_SENSOR_DESC[ThinQProperty.CURRENT_STATE], + POWER_SENSOR_DESC[ThinQProperty.POWER_LEVEL], + ), + DeviceType.DEHUMIDIFIER: ( + JOB_MODE_SENSOR_DESC[ThinQProperty.CURRENT_JOB_MODE], + HUMIDITY_SENSOR_DESC[ThinQProperty.CURRENT_HUMIDITY], + ), + DeviceType.DISH_WASHER: ( + DISH_WASHING_COURSE_SENSOR_DESC[ThinQProperty.CURRENT_DISH_WASHING_COURSE], + PREFERENCE_SENSOR_DESC[ThinQProperty.RINSE_LEVEL], + PREFERENCE_SENSOR_DESC[ThinQProperty.SOFTENING_LEVEL], + RUN_STATE_SENSOR_DESC[ThinQProperty.CURRENT_STATE], + ), + DeviceType.DRYER: WASHER_SENSORS, + DeviceType.HOME_BREW: ( + RECIPE_SENSOR_DESC[ThinQProperty.RECIPE_NAME], + RECIPE_SENSOR_DESC[ThinQProperty.WORT_INFO], + RECIPE_SENSOR_DESC[ThinQProperty.YEAST_INFO], + RECIPE_SENSOR_DESC[ThinQProperty.HOP_OIL_INFO], + RECIPE_SENSOR_DESC[ThinQProperty.FLAVOR_INFO], + RECIPE_SENSOR_DESC[ThinQProperty.BEER_REMAIN], + RUN_STATE_SENSOR_DESC[ThinQProperty.CURRENT_STATE], + ), + DeviceType.HUMIDIFIER: ( + AIR_QUALITY_SENSOR_DESC[ThinQProperty.PM1], + AIR_QUALITY_SENSOR_DESC[ThinQProperty.PM2], + AIR_QUALITY_SENSOR_DESC[ThinQProperty.PM10], + AIR_QUALITY_SENSOR_DESC[ThinQProperty.HUMIDITY], + AIR_QUALITY_SENSOR_DESC[ThinQProperty.TEMPERATURE], + AIR_QUALITY_SENSOR_DESC[ThinQProperty.MONITORING_ENABLED], + AIR_QUALITY_SENSOR_DESC[ThinQProperty.TOTAL_POLLUTION_LEVEL], + ), + DeviceType.KIMCHI_REFRIGERATOR: ( + REFRIGERATION_SENSOR_DESC[ThinQProperty.FRESH_AIR_FILTER], + SensorEntityDescription( + key=ThinQProperty.TARGET_TEMPERATURE, + translation_key=ThinQProperty.TARGET_TEMPERATURE, + ), + ), + DeviceType.MICROWAVE_OVEN: (RUN_STATE_SENSOR_DESC[ThinQProperty.CURRENT_STATE],), + DeviceType.OVEN: ( + RUN_STATE_SENSOR_DESC[ThinQProperty.CURRENT_STATE], + TEMPERATURE_SENSOR_DESC[ThinQProperty.TARGET_TEMPERATURE], + ), + DeviceType.PLANT_CULTIVATOR: ( + LIGHT_SENSOR_DESC[ThinQProperty.BRIGHTNESS], + LIGHT_SENSOR_DESC[ThinQProperty.DURATION], + RUN_STATE_SENSOR_DESC[ThinQProperty.CURRENT_STATE], + RUN_STATE_SENSOR_DESC[ThinQProperty.GROWTH_MODE], + RUN_STATE_SENSOR_DESC[ThinQProperty.WIND_VOLUME], + TEMPERATURE_SENSOR_DESC[ThinQProperty.DAY_TARGET_TEMPERATURE], + TEMPERATURE_SENSOR_DESC[ThinQProperty.NIGHT_TARGET_TEMPERATURE], + TEMPERATURE_SENSOR_DESC[ThinQProperty.TEMPERATURE_STATE], + ), + DeviceType.REFRIGERATOR: ( + REFRIGERATION_SENSOR_DESC[ThinQProperty.FRESH_AIR_FILTER], + WATER_FILTER_INFO_SENSOR_DESC[ThinQProperty.USED_TIME], + ), + DeviceType.ROBOT_CLEANER: ( + RUN_STATE_SENSOR_DESC[ThinQProperty.CURRENT_STATE], + JOB_MODE_SENSOR_DESC[ThinQProperty.CURRENT_JOB_MODE], + ), + DeviceType.STICK_CLEANER: ( + BATTERY_SENSOR_DESC[ThinQProperty.BATTERY_PERCENT], + JOB_MODE_SENSOR_DESC[ThinQPropertyEx.CURRENT_JOB_MODE_STICK_CLEANER], + RUN_STATE_SENSOR_DESC[ThinQProperty.CURRENT_STATE], + ), + DeviceType.STYLER: WASHER_SENSORS, + DeviceType.WASHCOMBO_MAIN: WASHER_SENSORS, + DeviceType.WASHCOMBO_MINI: WASHER_SENSORS, + DeviceType.WASHER: WASHER_SENSORS, + DeviceType.WASHTOWER_DRYER: WASHER_SENSORS, + DeviceType.WASHTOWER: WASHER_SENSORS, + DeviceType.WASHTOWER_WASHER: WASHER_SENSORS, + DeviceType.WATER_HEATER: ( + TEMPERATURE_SENSOR_DESC[ThinQProperty.CURRENT_TEMPERATURE], + ), + DeviceType.WATER_PURIFIER: ( + RUN_STATE_SENSOR_DESC[ThinQProperty.COCK_STATE], + RUN_STATE_SENSOR_DESC[ThinQProperty.STERILIZING_STATE], + WATER_INFO_SENSOR_DESC[ThinQProperty.WATER_TYPE], + ), +} + +_LOGGER = logging.getLogger(__name__) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: ThinqConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up an entry for sensor platform.""" + entities: list[ThinQSensorEntity] = [] + for coordinator in entry.runtime_data.coordinators.values(): + if ( + descriptions := DEVICE_TYPE_SENSOR_MAP.get( + coordinator.api.device.device_type + ) + ) is not None: + for description in descriptions: + entities.extend( + ThinQSensorEntity(coordinator, description, property_id) + for property_id in coordinator.api.get_active_idx( + description.key, + ( + ActiveMode.READABLE + if ( + coordinator.api.device.device_type == DeviceType.COOKTOP + or isinstance(description.key, TimerProperty) + ) + else ActiveMode.READ_ONLY + ), + ) + ) + + if entities: + async_add_entities(entities) + + +class ThinQSensorEntity(ThinQEntity, SensorEntity): + """Represent a thinq sensor platform.""" + + def __init__( + self, + coordinator: DeviceDataUpdateCoordinator, + entity_description: SensorEntityDescription, + property_id: str, + ) -> None: + """Initialize a sensor entity.""" + super().__init__(coordinator, entity_description, property_id) + + if entity_description.device_class == SensorDeviceClass.ENUM: + self._attr_options = self.data.options + + def _update_status(self) -> None: + """Update status itself.""" + super()._update_status() + + self._attr_native_value = self.data.value + + if (data_unit := self._get_unit_of_measurement(self.data.unit)) is not None: + # For different from description's unit + self._attr_native_unit_of_measurement = data_unit + + _LOGGER.debug( + "[%s:%s] update status: %s -> %s, options:%s, unit:%s", + self.coordinator.device_name, + self.property_id, + self.data.value, + self.native_value, + self.options, + self.native_unit_of_measurement, + ) diff --git a/homeassistant/components/lg_thinq/strings.json b/homeassistant/components/lg_thinq/strings.json new file mode 100644 index 00000000000000..277e3db3df0188 --- /dev/null +++ b/homeassistant/components/lg_thinq/strings.json @@ -0,0 +1,992 @@ +{ + "config": { + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", + "already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]" + }, + "error": { + "token_unauthorized": "The token is invalid or unauthorized." + }, + "step": { + "user": { + "title": "Connect to ThinQ", + "description": "Please enter a ThinQ [PAT(Personal Access Token)]({pat_url}) created with your LG ThinQ account.", + "data": { + "access_token": "Personal Access Token", + "country": "Country" + } + } + } + }, + "entity": { + "switch": { + "auto_mode": { + "name": "Auto mode" + }, + "express_mode": { + "name": "Ice plus" + }, + "hot_water_mode": { + "name": "Hot water" + }, + "humidity_warm_mode": { + "name": "Warm mist" + }, + "hygiene_dry_mode": { + "name": "Drying mode" + }, + "mood_lamp_state": { + "name": "Mood light" + }, + "operation_power": { + "name": "[%key:component::lg_thinq::entity::sensor::current_job_mode::state::high%]" + }, + "optimal_humidity": { + "name": "Ventilation" + }, + "power_save_enabled": { + "name": "Energy saving" + }, + "rapid_freeze": { + "name": "Quick freeze" + }, + "sleep_mode": { + "name": "Sleep mode" + }, + "uv_nano": { + "name": "UVnano" + }, + "warm_mode": { + "name": "Heating" + } + }, + "binary_sensor": { + "eco_friendly_mode": { + "name": "Eco friendly" + }, + "power_save_enabled": { + "name": "Power saving mode" + }, + "remote_control_enabled": { + "name": "Remote start" + }, + "remote_control_enabled_for_location": { + "name": "{location} remote start" + }, + "rinse_refill": { + "name": "Rinse refill needed" + }, + "sabbath_mode": { + "name": "Sabbath" + }, + "machine_clean_reminder": { + "name": "Machine clean reminder" + }, + "signal_level": { + "name": "Chime sound" + }, + "clean_light_reminder": { + "name": "Clean indicator light" + }, + "operation_mode": { + "name": "[%key:component::binary_sensor::entity_component::power::name%]" + }, + "one_touch_filter": { + "name": "Fresh air filter" + } + }, + "climate": { + "climate_air_conditioner": { + "state_attributes": { + "fan_mode": { + "state": { + "slow": "Slow", + "low": "Low", + "mid": "Medium", + "high": "High", + "power": "[%key:component::lg_thinq::entity::sensor::current_job_mode::state::high%]", + "auto": "[%key:component::lg_thinq::entity::sensor::growth_mode::state::standard%]" + } + }, + "preset_mode": { + "state": { + "air_clean": "Air purify", + "aroma": "Aroma", + "energy_saving": "Energy saving" + } + } + } + } + }, + "event": { + "error": { + "name": "Error", + "state_attributes": { + "event_type": { + "state": { + "block_error": "Cleaning has stopped. Check for obstacles", + "brush_error": "Moving brush has a problem", + "bubble_error": "Bubble error", + "child_lock_active_error": "Child lock", + "cliff_error": "Fall prevention sensor has an error", + "clutch_error": "Clutch error", + "compressor_error": "Compressor error", + "dispensing_error": "Dispensor error", + "door_close_error": "Door closed error", + "door_lock_error": "Door lock error", + "door_open_error": "Door open", + "door_sensor_error": "Door sensor error", + "drainmotor_error": "Drain error", + "dust_full_error": "Dust bin is full and needs to be emptied", + "empty_water_alert_error": "Empty water", + "fan_motor_error": "Fan lock error", + "filter_clogging_error": "Filter error", + "frozen_error": "Freezing detection error", + "heater_circuit_error": "Heater circuit failure", + "high_power_supply_error": "Power supply error", + "high_temperature_detection_error": "High-temperature error", + "inner_lid_open_error": "Lid open error", + "ir_sensor_error": "IR sensor error", + "le_error": "LE error", + "le2_error": "LE2 error", + "left_wheel_error": "Left wheel has a problem", + "locked_motor_error": "Driver motor error", + "mop_error": "Cannot operate properly without the mop attached", + "motor_error": "Motor trouble", + "motor_lock_error": "Motor lock error", + "move_error": "The wheels are not touching the floor", + "need_water_drain": "[%key:component::lg_thinq::entity::event::error::state_attributes::event_type::state::empty_water_alert_error%]", + "need_water_replenishment": "Fill water", + "no_battery_error": "Robot cleaner's battery is low", + "no_dust_bin_error": "Dust bin is not installed", + "no_filter_error": "[%key:component::lg_thinq::entity::event::error::state_attributes::event_type::state::filter_clogging_error%]", + "out_of_balance_error": "Out of balance load", + "overfill_error": "Overfill error", + "part_malfunction_error": "AIE error", + "power_code_connection_error": "Power cord connection error", + "power_fail_error": "Power failure", + "right_wheel_error": "Right wheel has a problem", + "stack_error": "Stacking error", + "steam_heat_error": "Steam heater error", + "suction_blocked_error": "Suction motor is clogged", + "temperature_sensor_error": "Thermistor error", + "time_to_run_the_tub_clean_cycle_error": "Tub clean recommendation", + "timeout_error": "Timeout error", + "turbidity_sensor_error": "Turbidity sensor error", + "unable_to_lock_error": "Door lock error", + "unbalanced_load_error": "[%key:component::lg_thinq::entity::event::error::state_attributes::event_type::state::out_of_balance_error%]", + "unknown_error": "Product requires attention", + "vibration_sensor_error": "Vibration sensor error", + "water_drain_error": "Water drain error", + "water_leakage_error": "Water leakage problem", + "water_leaks_error": "[%key:component::lg_thinq::entity::event::error::state_attributes::event_type::state::water_leakage_error%]", + "water_level_sensor_error": "Water sensor error", + "water_supply_error": "Water supply error" + } + } + } + }, + "notification": { + "name": "Notification", + "state_attributes": { + "event_type": { + "state": { + "charging_is_complete": "Charging is completed", + "cleaning_is_complete": "Cycle is finished", + "cleaning_is_completed": "Cleaning is completed", + "cleaning_is_failed": "Cleaning has failed", + "cooking_is_complete": "Turned off", + "door_is_open": "The door is open", + "drying_failed": "An error has occurred in the dryer", + "drying_is_complete": "Drying is completed", + "error_during_cleaning": "Cleaning stopped due to an error", + "error_during_washing": "An error has occurred in the washing machine", + "error_has_occurred": "An error has occurred", + "frozen_is_complete": "Ice plus is done", + "homeguard_is_stopped": "Home guard has stopped", + "lack_of_water": "There is no water in the water tank", + "motion_is_detected": "Photograph is sent as movement is detected during home guard", + "need_to_check_location": "Location check is required", + "pollution_is_high": "Air status is rapidly becoming bad", + "preheating_is_complete": "Preheating is done", + "rinse_is_not_enough": "Add rinse aid for better drying performance", + "salt_refill_is_needed": "Add salt for better softening performance", + "scheduled_cleaning_starts": "Scheduled cleaning starts", + "styling_is_complete": "Styling is completed", + "time_to_change_filter": "It is time to replace the filter", + "time_to_change_water_filter": "You need to replace water filter", + "time_to_clean": "Need to selfcleaning", + "time_to_clean_filter": "It is time to clean the filter", + "timer_is_complete": "Timer has been completed", + "washing_is_complete": "Washing is completed", + "water_is_full": "Water is full", + "water_leak_has_occurred": "The dishwasher has detected a water leak" + } + } + } + } + }, + "number": { + "target_temperature": { + "name": "[%key:component::sensor::entity_component::temperature::name%]" + }, + "target_temperature_for_location": { + "name": "{location} temperature" + }, + "light_status": { + "name": "Light" + }, + "fan_speed": { + "name": "Fan" + }, + "lamp_brightness": { + "name": "[%key:component::lg_thinq::entity::number::light_status::name%]" + }, + "wind_temperature": { + "name": "Wind temperature" + }, + "relative_hour_to_start": { + "name": "Schedule turn-on" + }, + "relative_hour_to_start_for_location": { + "name": "{location} schedule turn-on" + }, + "relative_hour_to_start_wm": { + "name": "Delay starts in" + }, + "relative_hour_to_start_wm_for_location": { + "name": "{location} delay starts in" + }, + "relative_hour_to_stop": { + "name": "Schedule turn-off" + }, + "relative_hour_to_stop_for_location": { + "name": "{location} schedule turn-off" + }, + "relative_hour_to_stop_wm": { + "name": "Delay ends in" + }, + "relative_hour_to_stop_wm_for_location": { + "name": "{location} delay ends in" + }, + "sleep_timer_relative_hour_to_stop": { + "name": "Sleep timer" + }, + "sleep_timer_relative_hour_to_stop_for_location": { + "name": "{location} sleep timer" + }, + "target_humidity": { + "name": "Target humidity" + } + }, + "sensor": { + "odor_level": { + "name": "Odor", + "state": { + "invalid": "Invalid", + "weak": "Weak", + "normal": "Normal", + "strong": "Strong", + "very_strong": "Very strong" + } + }, + "current_temperature": { + "name": "Current temperature" + }, + "temperature": { + "name": "Temperature" + }, + "total_pollution_level": { + "name": "Overall air quality", + "state": { + "invalid": "Invalid", + "good": "Good", + "normal": "Moderate", + "bad": "Unhealthy", + "very_bad": "Poor" + } + }, + "monitoring_enabled": { + "name": "Air quality sensor", + "state": { + "on_working": "Turns on with product", + "always": "Always on" + } + }, + "growth_mode": { + "name": "Mode", + "state": { + "standard": "Auto", + "ext_leaf": "Vegetables", + "ext_herb": "Herbs", + "ext_flower": "Flowers", + "ext_expert": "Custom growing mode" + } + }, + "growth_mode_for_location": { + "name": "{location} mode", + "state": { + "standard": "[%key:component::lg_thinq::entity::sensor::growth_mode::state::standard%]", + "ext_leaf": "[%key:component::lg_thinq::entity::sensor::growth_mode::state::ext_leaf%]", + "ext_herb": "[%key:component::lg_thinq::entity::sensor::growth_mode::state::ext_herb%]", + "ext_flower": "[%key:component::lg_thinq::entity::sensor::growth_mode::state::ext_flower%]", + "ext_expert": "[%key:component::lg_thinq::entity::sensor::growth_mode::state::ext_expert%]" + } + }, + "wind_volume_for_location": { + "name": "{location} wind speed" + }, + "brightness": { + "name": "Lighting intensity" + }, + "brightness_for_location": { + "name": "{location} lighting intensity" + }, + "duration": { + "name": "Lighting duration" + }, + "duration_for_location": { + "name": "{location} lighting duration" + }, + "day_target_temperature": { + "name": "Day growth temperature" + }, + "day_target_temperature_for_location": { + "name": "{location} day growth temperature" + }, + "night_target_temperature": { + "name": "Night growth temperature" + }, + "night_target_temperature_for_location": { + "name": "{location} night growth temperature" + }, + "temperature_state": { + "name": "[%key:component::sensor::entity_component::temperature::name%]", + "state": { + "high": "High", + "normal": "Good", + "low": "Low" + } + }, + "temperature_state_for_location": { + "name": "[%key:component::lg_thinq::entity::number::target_temperature_for_location::name%]", + "state": { + "high": "[%key:component::lg_thinq::entity::sensor::temperature_state::state::high%]", + "normal": "[%key:component::lg_thinq::entity::sensor::temperature_state::state::normal%]", + "low": "[%key:component::lg_thinq::entity::sensor::temperature_state::state::low%]" + } + }, + "current_state": { + "name": "Current status", + "state": { + "add_drain": "Filling", + "as_pop_up": "[%key:component::lg_thinq::entity::event::error::state_attributes::event_type::state::unknown_error%]", + "cancel": "Cancel", + "carbonation": "Carbonation", + "change_condition": "Settings Change", + "charging": "Charging", + "charging_complete": "Charging completed", + "checking_turbidity": "Detecting soil level", + "cleaning": "Cleaning", + "cleaning_is_done": "Cleaning is done", + "complete": "Done", + "cook": "Cooking", + "cook_complete": "[%key:component::lg_thinq::entity::sensor::current_state::state::complete%]", + "cooking_in_progress": "[%key:component::lg_thinq::entity::sensor::current_state::state::cook%]", + "cool_down": "Cool down", + "cooling": "Cooling", + "detecting": "Detecting", + "detergent_amount": "Providing the info about the amount of detergent", + "diagnosis": "Smart diagnosis is in progress", + "dispensing": "Auto dispensing", + "display_loadsize": "Load size", + "done": "[%key:component::lg_thinq::entity::sensor::current_state::state::complete%]", + "drying": "Drying", + "during_aging": "Aging", + "during_fermentation": "Fermentation", + "end": "Finished", + "end_cooling": "[%key:component::lg_thinq::entity::sensor::current_state::state::drying%]", + "error": "[%key:component::lg_thinq::entity::event::error::state_attributes::event_type::state::unknown_error%]", + "extracting_capsule": "Capsule brewing", + "extraction_mode": "Storing", + "firmware": "Updating firmware", + "fota": "Updating", + "frozen_prevent_initial": "Freeze protection standby", + "frozen_prevent_running": "Freeze protection in progress", + "frozen_prevent_pause": "Freeze protection paused", + "homing": "Moving", + "initial": "[%key:common::state::standby%]", + "initializing": "[%key:common::state::standby%]", + "lock": "Control lock", + "macrosector": "Remote is in use", + "melting": "Wort dissolving", + "monitoring_detecting": "HomeGuard is active", + "monitoring_moving": "Going to the starting point", + "monitoring_positioning": "Setting homeguard start point", + "night_dry": "Night dry", + "oven_setting": "Cooktop connected", + "pause": "[%key:common::state::paused%]", + "paused": "[%key:common::state::paused%]", + "power_fail": "Power fail", + "power_on": "[%key:common::state::on%]", + "power_off": "[%key:common::state::off%]", + "preference": "Setting", + "preheat": "Preheating", + "preheat_complete": "[%key:component::lg_thinq::entity::event::notification::state_attributes::event_type::state::preheating_is_complete%]", + "preheating": "[%key:component::lg_thinq::entity::sensor::current_state::state::preheat%]", + "preheating_is_done": "[%key:component::lg_thinq::entity::event::notification::state_attributes::event_type::state::preheating_is_complete%]", + "prepareing_fermentation": "Preparing now", + "presteam": "Ready to steam", + "prewash": "Prewashing", + "proofing": "Proofing", + "refreshing": "Refreshing", + "reservation": "[%key:component::lg_thinq::entity::sensor::current_state::state::macrosector%]", + "reserved": "Delay set", + "rinse_hold": "Waiting to rinse", + "rinsing": "Rinsing", + "running": "Running", + "running_end": "Complete", + "setdate": "[%key:component::lg_thinq::entity::sensor::current_state::state::macrosector%]", + "shoes_module": "Drying shoes", + "sleep": "In sleep mode", + "smart_grid_run": "Running smart grid", + "soaking": "Soak", + "softening": "Softener", + "spinning": "Spinning", + "stay": "Refresh", + "standby": "[%key:common::state::standby%]", + "steam": "Refresh", + "steam_softening": "Steam softening", + "sterilize": "Sterilize", + "temperature_stabilization": "Temperature adjusting", + "working": "[%key:component::lg_thinq::entity::sensor::current_state::state::cleaning%]", + "wrinkle_care": "Wrinkle care" + } + }, + "current_state_for_location": { + "name": "{location} current status", + "state": { + "add_drain": "[%key:component::lg_thinq::entity::sensor::current_state::state::add_drain%]", + "as_pop_up": "[%key:component::lg_thinq::entity::event::error::state_attributes::event_type::state::unknown_error%]", + "cancel": "[%key:component::lg_thinq::entity::sensor::current_state::state::cancel%]", + "carbonation": "[%key:component::lg_thinq::entity::sensor::current_state::state::carbonation%]", + "change_condition": "[%key:component::lg_thinq::entity::sensor::current_state::state::change_condition%]", + "charging": "[%key:component::lg_thinq::entity::sensor::current_state::state::charging%]", + "charging_complete": "[%key:component::lg_thinq::entity::sensor::current_state::state::charging_complete%]", + "checking_turbidity": "[%key:component::lg_thinq::entity::sensor::current_state::state::checking_turbidity%]", + "cleaning": "[%key:component::lg_thinq::entity::sensor::current_state::state::cleaning%]", + "cleaning_is_done": "[%key:component::lg_thinq::entity::sensor::current_state::state::cleaning_is_done%]", + "complete": "[%key:component::lg_thinq::entity::sensor::current_state::state::complete%]", + "cook": "[%key:component::lg_thinq::entity::sensor::current_state::state::cook%]", + "cook_complete": "[%key:component::lg_thinq::entity::sensor::current_state::state::complete%]", + "cooking_in_progress": "[%key:component::lg_thinq::entity::sensor::current_state::state::cook%]", + "cool_down": "[%key:component::lg_thinq::entity::sensor::current_state::state::cool_down%]", + "cooling": "[%key:component::lg_thinq::entity::sensor::current_state::state::cooling%]", + "detecting": "[%key:component::lg_thinq::entity::sensor::current_state::state::detecting%]", + "detergent_amount": "[%key:component::lg_thinq::entity::sensor::current_state::state::detergent_amount%]", + "diagnosis": "[%key:component::lg_thinq::entity::sensor::current_state::state::diagnosis%]", + "dispensing": "[%key:component::lg_thinq::entity::sensor::current_state::state::dispensing%]", + "display_loadsize": "[%key:component::lg_thinq::entity::sensor::current_state::state::display_loadsize%]", + "done": "[%key:component::lg_thinq::entity::sensor::current_state::state::complete%]", + "drying": "[%key:component::lg_thinq::entity::sensor::current_state::state::drying%]", + "during_aging": "[%key:component::lg_thinq::entity::sensor::current_state::state::during_aging%]", + "during_fermentation": "[%key:component::lg_thinq::entity::sensor::current_state::state::during_fermentation%]", + "end": "[%key:component::lg_thinq::entity::sensor::current_state::state::end%]", + "end_cooling": "[%key:component::lg_thinq::entity::sensor::current_state::state::drying%]", + "error": "[%key:component::lg_thinq::entity::event::error::state_attributes::event_type::state::unknown_error%]", + "extracting_capsule": "[%key:component::lg_thinq::entity::sensor::current_state::state::extracting_capsule%]", + "extraction_mode": "[%key:component::lg_thinq::entity::sensor::current_state::state::extraction_mode%]", + "firmware": "[%key:component::lg_thinq::entity::sensor::current_state::state::firmware%]", + "fota": "[%key:component::lg_thinq::entity::sensor::current_state::state::fota%]", + "frozen_prevent_initial": "[%key:component::lg_thinq::entity::sensor::current_state::state::frozen_prevent_initial%]", + "frozen_prevent_running": "[%key:component::lg_thinq::entity::sensor::current_state::state::frozen_prevent_running%]", + "frozen_prevent_pause": "[%key:component::lg_thinq::entity::sensor::current_state::state::frozen_prevent_pause%]", + "homing": "[%key:component::lg_thinq::entity::sensor::current_state::state::homing%]", + "initial": "[%key:common::state::standby%]", + "initializing": "[%key:common::state::standby%]", + "lock": "[%key:component::lg_thinq::entity::sensor::current_state::state::lock%]", + "macrosector": "[%key:component::lg_thinq::entity::sensor::current_state::state::macrosector%]", + "melting": "[%key:component::lg_thinq::entity::sensor::current_state::state::melting%]", + "monitoring_detecting": "[%key:component::lg_thinq::entity::sensor::current_state::state::monitoring_detecting%]", + "monitoring_moving": "[%key:component::lg_thinq::entity::sensor::current_state::state::monitoring_moving%]", + "monitoring_positioning": "[%key:component::lg_thinq::entity::sensor::current_state::state::monitoring_positioning%]", + "night_dry": "[%key:component::lg_thinq::entity::sensor::current_state::state::night_dry%]", + "oven_setting": "[%key:component::lg_thinq::entity::sensor::current_state::state::oven_setting%]", + "pause": "[%key:common::state::paused%]", + "paused": "[%key:common::state::paused%]", + "power_fail": "[%key:component::lg_thinq::entity::sensor::current_state::state::power_fail%]", + "power_on": "[%key:common::state::on%]", + "power_off": "[%key:common::state::off%]", + "preference": "[%key:component::lg_thinq::entity::sensor::current_state::state::preference%]", + "preheat": "[%key:component::lg_thinq::entity::sensor::current_state::state::preheat%]", + "preheat_complete": "[%key:component::lg_thinq::entity::event::notification::state_attributes::event_type::state::preheating_is_complete%]", + "preheating": "[%key:component::lg_thinq::entity::sensor::current_state::state::preheat%]", + "preheating_is_done": "[%key:component::lg_thinq::entity::event::notification::state_attributes::event_type::state::preheating_is_complete%]", + "prepareing_fermentation": "[%key:component::lg_thinq::entity::sensor::current_state::state::prepareing_fermentation%]", + "presteam": "[%key:component::lg_thinq::entity::sensor::current_state::state::presteam%]", + "prewash": "[%key:component::lg_thinq::entity::sensor::current_state::state::prewash%]", + "proofing": "[%key:component::lg_thinq::entity::sensor::current_state::state::proofing%]", + "refreshing": "[%key:component::lg_thinq::entity::sensor::current_state::state::refreshing%]", + "reservation": "[%key:component::lg_thinq::entity::sensor::current_state::state::macrosector%]", + "reserved": "[%key:component::lg_thinq::entity::sensor::current_state::state::reserved%]", + "rinse_hold": "[%key:component::lg_thinq::entity::sensor::current_state::state::rinse_hold%]", + "rinsing": "[%key:component::lg_thinq::entity::sensor::current_state::state::rinsing%]", + "running": "[%key:component::lg_thinq::entity::sensor::current_state::state::running%]", + "running_end": "[%key:component::lg_thinq::entity::sensor::current_state::state::running_end%]", + "setdate": "[%key:component::lg_thinq::entity::sensor::current_state::state::macrosector%]", + "shoes_module": "[%key:component::lg_thinq::entity::sensor::current_state::state::shoes_module%]", + "sleep": "[%key:component::lg_thinq::entity::sensor::current_state::state::sleep%]", + "smart_grid_run": "[%key:component::lg_thinq::entity::sensor::current_state::state::smart_grid_run%]", + "soaking": "[%key:component::lg_thinq::entity::sensor::current_state::state::soaking%]", + "softening": "[%key:component::lg_thinq::entity::sensor::current_state::state::softening%]", + "spinning": "[%key:component::lg_thinq::entity::sensor::current_state::state::spinning%]", + "stay": "[%key:component::lg_thinq::entity::sensor::current_state::state::stay%]", + "standby": "[%key:common::state::standby%]", + "steam": "[%key:component::lg_thinq::entity::sensor::current_state::state::steam%]", + "steam_softening": "[%key:component::lg_thinq::entity::sensor::current_state::state::steam_softening%]", + "sterilize": "[%key:component::lg_thinq::entity::sensor::current_state::state::sterilize%]", + "temperature_stabilization": "[%key:component::lg_thinq::entity::sensor::current_state::state::temperature_stabilization%]", + "working": "[%key:component::lg_thinq::entity::sensor::current_state::state::cleaning%]", + "wrinkle_care": "[%key:component::lg_thinq::entity::sensor::current_state::state::wrinkle_care%]" + } + }, + "fresh_air_filter": { + "name": "[%key:component::lg_thinq::entity::binary_sensor::one_touch_filter::name%]", + "state": { + "off": "[%key:common::state::off%]", + "auto": "[%key:component::lg_thinq::entity::sensor::growth_mode::state::standard%]", + "power": "[%key:component::lg_thinq::entity::sensor::current_job_mode::state::high%]", + "replace": "Replace filter", + "smart_power": "Smart safe storage", + "smart_off": "[%key:common::state::off%]", + "smart_on": "[%key:component::lg_thinq::entity::sensor::fresh_air_filter::state::smart_power%]" + } + }, + "filter_lifetime": { + "name": "Filter remaining" + }, + "used_time": { + "name": "Water filter used" + }, + "current_job_mode": { + "name": "Operating mode", + "state": { + "air_clean": "Purify", + "auto": "[%key:component::lg_thinq::entity::sensor::growth_mode::state::standard%]", + "clothes_dry": "Laundry", + "edge": "Edge cleaning", + "heat_pump": "Heat pump", + "high": "Power", + "intensive_dry": "Spot", + "macro": "Custom mode", + "mop": "Mop", + "normal": "Normal", + "off": "[%key:common::state::off%]", + "quiet_humidity": "Silent", + "rapid_humidity": "Jet", + "sector_base": "Cell by cell", + "select": "My space", + "smart_humidity": "Smart", + "spot": "Spiral spot mode", + "turbo": "[%key:component::lg_thinq::entity::select::wind_strength::state::power%]", + "vacation": "Vacation", + "zigzag": "Zigzag" + } + }, + "current_job_mode_stick_cleaner": { + "name": "Operating mode", + "state": { + "auto": "Low power", + "high": "[%key:component::lg_thinq::entity::sensor::current_job_mode::state::high%]", + "mop": "[%key:component::lg_thinq::entity::sensor::current_job_mode::state::mop%]", + "normal": "[%key:component::lg_thinq::entity::sensor::current_job_mode::state::normal%]", + "off": "[%key:common::state::off%]", + "turbo": "[%key:component::lg_thinq::entity::select::wind_strength::state::power%]" + } + }, + "personalization_mode": { + "name": "Personal mode", + "state": { + "auto_inside": "[%key:component::lg_thinq::entity::switch::auto_mode::name%]", + "sleep": "Sleep mode", + "baby": "Baby care mode", + "sick_house": "New Home mode", + "auto_outside": "Interlocking mode", + "pet": "Pet mode", + "cooking": "Cooking mode", + "smoke": "Smoke mode", + "exercise": "Exercise mode", + "others": "Others" + } + }, + "current_dish_washing_course": { + "name": "Current cycle", + "state": { + "auto": "[%key:component::lg_thinq::entity::sensor::growth_mode::state::standard%]", + "heavy": "Intensive", + "delicate": "Delicate", + "turbo": "[%key:component::lg_thinq::entity::select::wind_strength::state::power%]", + "normal": "Normal", + "rinse": "Rinse", + "refresh": "Refresh", + "express": "Express", + "machine_clean": "Machine clean", + "short_mode": "Short mode", + "download_cycle": "Download cycle", + "quick": "Quick", + "steam": "Steam care", + "spray": "Spray", + "eco": "Eco" + } + }, + "rinse_level": { + "name": "Rinse aid dispenser level", + "state": { + "rinselevel_0": "0", + "rinselevel_1": "1", + "rinselevel_2": "2", + "rinselevel_3": "3", + "rinselevel_4": "4" + } + }, + "softening_level": { + "name": "Softening level", + "state": { + "softeninglevel_0": "[%key:component::lg_thinq::entity::sensor::rinse_level::state::rinselevel_0%]", + "softeninglevel_1": "[%key:component::lg_thinq::entity::sensor::rinse_level::state::rinselevel_1%]", + "softeninglevel_2": "[%key:component::lg_thinq::entity::sensor::rinse_level::state::rinselevel_2%]", + "softeninglevel_3": "[%key:component::lg_thinq::entity::sensor::rinse_level::state::rinselevel_3%]", + "softeninglevel_4": "[%key:component::lg_thinq::entity::sensor::rinse_level::state::rinselevel_4%]" + } + }, + "cock_state": { + "name": "[%key:component::lg_thinq::entity::switch::uv_nano::name%]", + "state": { + "cleaning": "In progress", + "normal": "[%key:common::state::standby%]" + } + }, + "sterilizing_state": { + "name": "High-temp sterilization", + "state": { + "off": "[%key:common::state::off%]", + "on": "Sterilizing", + "cancel": "[%key:component::lg_thinq::entity::sensor::current_state::state::cancel%]" + } + }, + "water_type": { + "name": "Type" + }, + "target_temperature": { + "name": "[%key:component::sensor::entity_component::temperature::name%]", + "state": { + "kimchi": "Kimchi", + "off": "[%key:common::state::off%]", + "freezer": "Freezer", + "fridge": "Fridge", + "storage": "Storage", + "meat_fish": "Meat/Fish", + "rice_grain": "Rice/Grain", + "vegetable_fruit": "Vege/Fruit", + "temperature_number": "Number" + } + }, + "target_temperature_for_location": { + "name": "[%key:component::lg_thinq::entity::number::target_temperature_for_location::name%]", + "state": { + "kimchi": "[%key:component::lg_thinq::entity::sensor::target_temperature::state::kimchi%]", + "off": "[%key:common::state::off%]", + "freezer": "[%key:component::lg_thinq::entity::sensor::target_temperature::state::freezer%]", + "fridge": "[%key:component::lg_thinq::entity::sensor::target_temperature::state::fridge%]", + "storage": "[%key:component::lg_thinq::entity::sensor::target_temperature::state::storage%]", + "meat_fish": "[%key:component::lg_thinq::entity::sensor::target_temperature::state::meat_fish%]", + "rice_grain": "[%key:component::lg_thinq::entity::sensor::target_temperature::state::rice_grain%]", + "vegetable_fruit": "[%key:component::lg_thinq::entity::sensor::target_temperature::state::vegetable_fruit%]", + "temperature_number": "[%key:component::lg_thinq::entity::sensor::target_temperature::state::temperature_number%]" + } + }, + "elapsed_day_state": { + "name": "Brewing period" + }, + "elapsed_day_total": { + "name": "Brewing duration" + }, + "recipe_name": { + "name": "Homebrew recipe", + "state": { + "ipa": "IPA", + "pale_ale": "Pale ale", + "stout": "Stout", + "wheat": "Wheat", + "pilsner": "Pilsner", + "red_ale": "Red ale", + "my_recipe": "My recipe" + } + }, + "wort_info": { + "name": "Wort", + "state": { + "hoppy": "Hoppy", + "deep_gold": "DeepGold", + "wheat": "Wheat", + "dark": "Dark" + } + }, + "yeast_info": { + "name": "Yeast", + "state": { + "american_ale": "American ale", + "english_ale": "English ale", + "lager": "Lager", + "weizen": "Weizen" + } + }, + "hop_oil_info": { + "name": "Hops" + }, + "flavor_info": { + "name": "Flavor" + }, + "beer_remain": { + "name": "Recipe progress" + }, + "battery_level": { + "name": "Battery", + "state": { + "high": "Full", + "mid": "Medium", + "low": "Low", + "warning": "Empty" + } + }, + "relative_to_start": { + "name": "[%key:component::lg_thinq::entity::number::relative_hour_to_start::name%]" + }, + "relative_to_start_for_location": { + "name": "[%key:component::lg_thinq::entity::number::relative_hour_to_start_for_location::name%]" + }, + "relative_to_start_wm": { + "name": "[%key:component::lg_thinq::entity::number::relative_hour_to_start_wm::name%]" + }, + "relative_to_start_wm_for_location": { + "name": "[%key:component::lg_thinq::entity::number::relative_hour_to_start_wm_for_location::name%]" + }, + "relative_to_stop": { + "name": "[%key:component::lg_thinq::entity::number::relative_hour_to_stop::name%]" + }, + "relative_to_stop_for_location": { + "name": "[%key:component::lg_thinq::entity::number::relative_hour_to_stop_for_location::name%]" + }, + "relative_to_stop_wm": { + "name": "[%key:component::lg_thinq::entity::number::relative_hour_to_stop_wm::name%]" + }, + "relative_to_stop_wm_for_location": { + "name": "[%key:component::lg_thinq::entity::number::relative_hour_to_stop_wm_for_location::name%]" + }, + "sleep_timer_relative_to_stop": { + "name": "[%key:component::lg_thinq::entity::number::sleep_timer_relative_hour_to_stop::name%]" + }, + "sleep_timer_relative_to_stop_for_location": { + "name": "[%key:component::lg_thinq::entity::number::sleep_timer_relative_hour_to_stop_for_location::name%]" + }, + "absolute_to_start": { + "name": "[%key:component::lg_thinq::entity::number::relative_hour_to_start::name%]" + }, + "absolute_to_start_for_location": { + "name": "[%key:component::lg_thinq::entity::number::relative_hour_to_start_for_location::name%]" + }, + "absolute_to_stop": { + "name": "[%key:component::lg_thinq::entity::number::relative_hour_to_stop::name%]" + }, + "absolute_to_stop_for_location": { + "name": "[%key:component::lg_thinq::entity::number::relative_hour_to_stop_for_location::name%]" + }, + "remain": { + "name": "Remaining time" + }, + "remain_for_location": { + "name": "{location} remaining time" + }, + "running": { + "name": "Running time" + }, + "running_for_location": { + "name": "{location} running time" + }, + "total": { + "name": "Total time" + }, + "total_for_location": { + "name": "{location} total time" + }, + "target": { + "name": "Cook time" + }, + "target_for_location": { + "name": "{location} cook time" + }, + "light_start": { + "name": "Lights on time" + }, + "light_start_for_location": { + "name": "{location} lights on time" + }, + "power_level": { + "name": "Power level" + }, + "power_level_for_location": { + "name": "{location} power level" + } + }, + "select": { + "wind_strength": { + "name": "Speed", + "state": { + "slow": "[%key:component::lg_thinq::entity::climate::climate_air_conditioner::state_attributes::fan_mode::state::slow%]", + "low": "Low", + "mid": "Medium", + "high": "High", + "power": "Turbo", + "turbo": "[%key:component::lg_thinq::entity::select::wind_strength::state::power%]", + "auto": "[%key:component::lg_thinq::entity::sensor::growth_mode::state::standard%]", + "wind_1": "Step 1", + "wind_2": "Step 2", + "wind_3": "Step 3", + "wind_4": "Step 4", + "wind_5": "Step 5", + "wind_6": "Step 6", + "wind_7": "Step 7", + "wind_8": "Step 8", + "wind_9": "Step 9", + "wind_10": "Step 10" + } + }, + "monitoring_enabled": { + "name": "[%key:component::lg_thinq::entity::sensor::monitoring_enabled::name%]", + "state": { + "on_working": "[%key:component::lg_thinq::entity::sensor::monitoring_enabled::state::on_working%]", + "always": "[%key:component::lg_thinq::entity::sensor::monitoring_enabled::state::always%]" + } + }, + "current_job_mode": { + "name": "Operating mode", + "state": { + "air_clean": "Purifying", + "auto": "[%key:component::lg_thinq::entity::sensor::growth_mode::state::standard%]", + "baby_care": "[%key:component::lg_thinq::entity::sensor::personalization_mode::state::baby%]", + "circulator": "Booster", + "clean": "Single", + "direct_clean": "Direct mode", + "dual_clean": "Dual", + "fast": "[%key:component::lg_thinq::entity::select::wind_strength::state::power%]", + "heat_pump": "[%key:component::lg_thinq::entity::sensor::current_job_mode::state::heat_pump%]", + "humidify": "Mist", + "humidify_and_air_clean": "Mist & purifying", + "humidity": "Humid", + "nature_clean": "Natural mode", + "pet_clean": "[%key:component::lg_thinq::entity::sensor::personalization_mode::state::pet%]", + "silent": "Silent", + "sleep": "Sleep", + "smart": "Smart mode", + "space_clean": "Diffusion mode", + "spot_clean": "Wide mode", + "turbo": "[%key:component::lg_thinq::entity::select::wind_strength::state::power%]", + "up_feature": "Additional mode", + "vacation": "Vacation" + } + }, + "operation_mode": { + "name": "Operation", + "state": { + "cancel": "[%key:component::lg_thinq::entity::sensor::current_state::state::cancel%]", + "power_off": "Power off", + "preheating": "Preheating", + "start": "[%key:common::action::start%]", + "stop": "[%key:common::action::stop%]", + "wake_up": "Sleep mode off" + } + }, + "operation_mode_for_location": { + "name": "{location} operation", + "state": { + "cancel": "[%key:component::lg_thinq::entity::sensor::current_state::state::cancel%]", + "power_off": "[%key:component::lg_thinq::entity::select::operation_mode::state::power_off%]", + "preheating": "[%key:component::lg_thinq::entity::select::operation_mode::state::preheating%]", + "start": "[%key:common::action::start%]", + "stop": "[%key:common::action::stop%]", + "wake_up": "[%key:component::lg_thinq::entity::select::operation_mode::state::wake_up%]" + } + }, + "air_clean_operation_mode": { + "name": "[%key:component::lg_thinq::entity::climate::climate_air_conditioner::state_attributes::preset_mode::state::air_clean%]", + "state": { + "start": "[%key:common::action::start%]", + "stop": "[%key:common::action::stop%]" + } + }, + "cook_mode": { + "name": "Cook mode", + "state": { + "bake": "Bake", + "convection_bake": "Convection bake", + "convection_roast": "Convection roast", + "roast": "Roast", + "crisp_convection": "Crisp convection" + } + }, + "cook_mode_for_location": { + "name": "{location} cook mode", + "state": { + "bake": "[%key:component::lg_thinq::entity::select::cook_mode::state::bake%]", + "convection_bake": "[%key:component::lg_thinq::entity::select::cook_mode::state::convection_bake%]", + "convection_roast": "[%key:component::lg_thinq::entity::select::cook_mode::state::convection_roast%]", + "roast": "[%key:component::lg_thinq::entity::select::cook_mode::state::roast%]", + "crisp_convection": "[%key:component::lg_thinq::entity::select::cook_mode::state::crisp_convection%]" + } + }, + "light_brightness": { + "name": "Light" + }, + "wind_angle": { + "name": "Rotation", + "state": { + "off": "[%key:common::state::off%]", + "angle_45": "45°", + "angle_60": "60°", + "angle_90": "90°", + "angle_140": "140°" + } + }, + "display_light": { + "name": "Display brightness", + "state": { + "off": "[%key:common::state::off%]", + "level_1": "Brightness 1", + "level_2": "Brightness 2", + "level_3": "Brightness 3" + } + }, + "fresh_air_filter": { + "name": "[%key:component::lg_thinq::entity::binary_sensor::one_touch_filter::name%]", + "state": { + "off": "[%key:common::state::off%]", + "auto": "[%key:component::lg_thinq::entity::sensor::growth_mode::state::standard%]", + "power": "[%key:component::lg_thinq::entity::sensor::current_job_mode::state::high%]", + "replace": "[%key:component::lg_thinq::entity::sensor::fresh_air_filter::state::replace%]", + "smart_power": "[%key:component::lg_thinq::entity::sensor::fresh_air_filter::state::smart_power%]", + "smart_off": "[%key:common::state::off%]", + "smart_on": "[%key:component::lg_thinq::entity::sensor::fresh_air_filter::state::smart_power%]" + } + }, + "hygiene_dry_mode": { + "name": "[%key:component::lg_thinq::entity::switch::hygiene_dry_mode::name%]", + "state": { + "off": "[%key:common::state::off%]", + "fast": "Fast", + "silent": "Silent", + "normal": "[%key:component::lg_thinq::entity::sensor::current_dish_washing_course::state::delicate%]" + } + } + } + } +} diff --git a/homeassistant/components/lg_thinq/switch.py b/homeassistant/components/lg_thinq/switch.py new file mode 100644 index 00000000000000..25fd7eb8b64f8b --- /dev/null +++ b/homeassistant/components/lg_thinq/switch.py @@ -0,0 +1,228 @@ +"""Support for switch entities.""" + +from __future__ import annotations + +from dataclasses import dataclass +import logging +from typing import Any + +from thinqconnect import DeviceType +from thinqconnect.devices.const import Property as ThinQProperty +from thinqconnect.integration import ActiveMode + +from homeassistant.components.switch import ( + SwitchDeviceClass, + SwitchEntity, + SwitchEntityDescription, +) +from homeassistant.const import EntityCategory +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import ThinqConfigEntry +from .entity import ThinQEntity + + +@dataclass(frozen=True, kw_only=True) +class ThinQSwitchEntityDescription(SwitchEntityDescription): + """Describes ThinQ switch entity.""" + + on_key: str | None = None + off_key: str | None = None + + +DEVICE_TYPE_SWITCH_MAP: dict[DeviceType, tuple[ThinQSwitchEntityDescription, ...]] = { + DeviceType.AIR_CONDITIONER: ( + ThinQSwitchEntityDescription( + key=ThinQProperty.POWER_SAVE_ENABLED, + translation_key=ThinQProperty.POWER_SAVE_ENABLED, + on_key="true", + off_key="false", + entity_category=EntityCategory.CONFIG, + ), + ), + DeviceType.AIR_PURIFIER_FAN: ( + ThinQSwitchEntityDescription( + key=ThinQProperty.AIR_FAN_OPERATION_MODE, translation_key="operation_power" + ), + ThinQSwitchEntityDescription( + key=ThinQProperty.UV_NANO, + translation_key=ThinQProperty.UV_NANO, + on_key="on", + off_key="off", + entity_category=EntityCategory.CONFIG, + ), + ThinQSwitchEntityDescription( + key=ThinQProperty.WARM_MODE, + translation_key=ThinQProperty.WARM_MODE, + on_key="warm_on", + off_key="warm_off", + entity_category=EntityCategory.CONFIG, + ), + ), + DeviceType.AIR_PURIFIER: ( + ThinQSwitchEntityDescription( + key=ThinQProperty.AIR_PURIFIER_OPERATION_MODE, + translation_key="operation_power", + ), + ), + DeviceType.DEHUMIDIFIER: ( + ThinQSwitchEntityDescription( + key=ThinQProperty.DEHUMIDIFIER_OPERATION_MODE, + translation_key="operation_power", + ), + ), + DeviceType.HUMIDIFIER: ( + ThinQSwitchEntityDescription( + key=ThinQProperty.HUMIDIFIER_OPERATION_MODE, + translation_key="operation_power", + ), + ThinQSwitchEntityDescription( + key=ThinQProperty.WARM_MODE, + translation_key="humidity_warm_mode", + on_key="warm_on", + off_key="warm_off", + entity_category=EntityCategory.CONFIG, + ), + ThinQSwitchEntityDescription( + key=ThinQProperty.MOOD_LAMP_STATE, + translation_key=ThinQProperty.MOOD_LAMP_STATE, + on_key="on", + off_key="off", + entity_category=EntityCategory.CONFIG, + ), + ThinQSwitchEntityDescription( + key=ThinQProperty.AUTO_MODE, + translation_key=ThinQProperty.AUTO_MODE, + on_key="auto_on", + off_key="auto_off", + entity_category=EntityCategory.CONFIG, + ), + ThinQSwitchEntityDescription( + key=ThinQProperty.SLEEP_MODE, + translation_key=ThinQProperty.SLEEP_MODE, + on_key="sleep_on", + off_key="sleep_off", + entity_category=EntityCategory.CONFIG, + ), + ), + DeviceType.REFRIGERATOR: ( + ThinQSwitchEntityDescription( + key=ThinQProperty.EXPRESS_MODE, + translation_key=ThinQProperty.EXPRESS_MODE, + on_key="true", + off_key="false", + entity_category=EntityCategory.CONFIG, + ), + ThinQSwitchEntityDescription( + key=ThinQProperty.RAPID_FREEZE, + translation_key=ThinQProperty.RAPID_FREEZE, + on_key="true", + off_key="false", + entity_category=EntityCategory.CONFIG, + ), + ), + DeviceType.SYSTEM_BOILER: ( + ThinQSwitchEntityDescription( + key=ThinQProperty.HOT_WATER_MODE, + translation_key=ThinQProperty.HOT_WATER_MODE, + on_key="on", + off_key="off", + entity_category=EntityCategory.CONFIG, + ), + ), + DeviceType.WINE_CELLAR: ( + ThinQSwitchEntityDescription( + key=ThinQProperty.OPTIMAL_HUMIDITY, + translation_key=ThinQProperty.OPTIMAL_HUMIDITY, + on_key="on", + off_key="off", + entity_category=EntityCategory.CONFIG, + ), + ), +} + +_LOGGER = logging.getLogger(__name__) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: ThinqConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up an entry for switch platform.""" + entities: list[ThinQSwitchEntity] = [] + for coordinator in entry.runtime_data.coordinators.values(): + if ( + descriptions := DEVICE_TYPE_SWITCH_MAP.get( + coordinator.api.device.device_type + ) + ) is not None: + for description in descriptions: + entities.extend( + ThinQSwitchEntity(coordinator, description, property_id) + for property_id in coordinator.api.get_active_idx( + description.key, ActiveMode.READ_WRITE + ) + ) + + if entities: + async_add_entities(entities) + + +class ThinQSwitchEntity(ThinQEntity, SwitchEntity): + """Represent a thinq switch platform.""" + + entity_description: ThinQSwitchEntityDescription + _attr_device_class = SwitchDeviceClass.SWITCH + + def _update_status(self) -> None: + """Update status itself.""" + super()._update_status() + + if (key := self.entity_description.on_key) is not None: + self._attr_is_on = self.data.value == key + else: + self._attr_is_on = self.data.is_on + + _LOGGER.debug( + "[%s:%s] update status: %s -> %s", + self.coordinator.device_name, + self.property_id, + self.data.is_on, + self.is_on, + ) + + async def async_turn_on(self, **kwargs: Any) -> None: + """Turn on the switch.""" + _LOGGER.debug( + "[%s:%s] async_turn_on id: %s", + self.coordinator.device_name, + self.name, + self.property_id, + ) + if (on_command := self.entity_description.on_key) is not None: + await self.async_call_api( + self.coordinator.api.post(self.property_id, on_command) + ) + else: + await self.async_call_api( + self.coordinator.api.async_turn_on(self.property_id) + ) + + async def async_turn_off(self, **kwargs: Any) -> None: + """Turn off the switch.""" + _LOGGER.debug( + "[%s:%s] async_turn_off id: %s", + self.coordinator.device_name, + self.name, + self.property_id, + ) + if (off_command := self.entity_description.off_key) is not None: + await self.async_call_api( + self.coordinator.api.post(self.property_id, off_command) + ) + else: + await self.async_call_api( + self.coordinator.api.async_turn_off(self.property_id) + ) diff --git a/homeassistant/components/lg_thinq/vacuum.py b/homeassistant/components/lg_thinq/vacuum.py new file mode 100644 index 00000000000000..138b9ba55bf0bd --- /dev/null +++ b/homeassistant/components/lg_thinq/vacuum.py @@ -0,0 +1,172 @@ +"""Support for vacuum entities.""" + +from __future__ import annotations + +from enum import StrEnum +import logging + +from thinqconnect import DeviceType +from thinqconnect.integration import ExtendedProperty + +from homeassistant.components.vacuum import ( + STATE_CLEANING, + STATE_DOCKED, + STATE_ERROR, + STATE_RETURNING, + StateVacuumEntity, + StateVacuumEntityDescription, + VacuumEntityFeature, +) +from homeassistant.const import STATE_IDLE, STATE_PAUSED +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import ThinqConfigEntry +from .entity import ThinQEntity + +DEVICE_TYPE_VACUUM_MAP: dict[DeviceType, tuple[StateVacuumEntityDescription, ...]] = { + DeviceType.ROBOT_CLEANER: ( + StateVacuumEntityDescription( + key=ExtendedProperty.VACUUM, + name=None, + ), + ), +} + + +class State(StrEnum): + """State of device.""" + + HOMING = "homing" + PAUSE = "pause" + RESUME = "resume" + SLEEP = "sleep" + START = "start" + WAKE_UP = "wake_up" + + +ROBOT_STATUS_TO_HA = { + "charging": STATE_DOCKED, + "diagnosis": STATE_IDLE, + "homing": STATE_RETURNING, + "initializing": STATE_IDLE, + "macrosector": STATE_IDLE, + "monitoring_detecting": STATE_IDLE, + "monitoring_moving": STATE_IDLE, + "monitoring_positioning": STATE_IDLE, + "pause": STATE_PAUSED, + "reservation": STATE_IDLE, + "setdate": STATE_IDLE, + "sleep": STATE_IDLE, + "standby": STATE_IDLE, + "working": STATE_CLEANING, + "error": STATE_ERROR, +} +ROBOT_BATT_TO_HA = { + "moveless": 5, + "dock_level": 5, + "low": 30, + "mid": 50, + "high": 90, + "full": 100, + "over_charge": 100, +} +_LOGGER = logging.getLogger(__name__) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: ThinqConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up an entry for vacuum platform.""" + entities: list[ThinQStateVacuumEntity] = [] + for coordinator in entry.runtime_data.coordinators.values(): + if ( + descriptions := DEVICE_TYPE_VACUUM_MAP.get( + coordinator.api.device.device_type + ) + ) is not None: + for description in descriptions: + entities.extend( + ThinQStateVacuumEntity(coordinator, description, property_id) + for property_id in coordinator.api.get_active_idx(description.key) + ) + + if entities: + async_add_entities(entities) + + +class ThinQStateVacuumEntity(ThinQEntity, StateVacuumEntity): + """Represent a thinq vacuum platform.""" + + _attr_supported_features = ( + VacuumEntityFeature.SEND_COMMAND + | VacuumEntityFeature.STATE + | VacuumEntityFeature.BATTERY + | VacuumEntityFeature.START + | VacuumEntityFeature.PAUSE + | VacuumEntityFeature.RETURN_HOME + ) + + def _update_status(self) -> None: + """Update status itself.""" + super()._update_status() + + # Update state. + self._attr_state = ROBOT_STATUS_TO_HA[self.data.current_state] + + # Update battery. + if (level := self.data.battery) is not None: + self._attr_battery_level = ( + level if isinstance(level, int) else ROBOT_BATT_TO_HA.get(level, 0) + ) + + _LOGGER.debug( + "[%s:%s] update status: %s -> %s (battery_level=%s)", + self.coordinator.device_name, + self.property_id, + self.data.current_state, + self.state, + self.battery_level, + ) + + async def async_start(self, **kwargs) -> None: + """Start the device.""" + if self.data.current_state == State.SLEEP: + value = State.WAKE_UP + elif self._attr_state == STATE_PAUSED: + value = State.RESUME + else: + value = State.START + + _LOGGER.debug( + "[%s:%s] async_start", self.coordinator.device_name, self.property_id + ) + await self.async_call_api( + self.coordinator.api.async_set_clean_operation_mode(self.property_id, value) + ) + + async def async_pause(self, **kwargs) -> None: + """Pause the device.""" + _LOGGER.debug( + "[%s:%s] async_pause", self.coordinator.device_name, self.property_id + ) + await self.async_call_api( + self.coordinator.api.async_set_clean_operation_mode( + self.property_id, State.PAUSE + ) + ) + + async def async_return_to_base(self, **kwargs) -> None: + """Return device to dock.""" + _LOGGER.debug( + "[%s:%s] async_return_to_base", + self.coordinator.device_name, + self.property_id, + ) + await self.async_call_api( + self.coordinator.api.async_set_clean_operation_mode( + self.property_id, State.HOMING + ) + ) diff --git a/homeassistant/components/lidarr/__init__.py b/homeassistant/components/lidarr/__init__.py index 907c89eb737350..a421a881b69ac2 100644 --- a/homeassistant/components/lidarr/__init__.py +++ b/homeassistant/components/lidarr/__init__.py @@ -16,6 +16,7 @@ from .const import DEFAULT_NAME, DOMAIN from .coordinator import ( + AlbumsDataUpdateCoordinator, DiskSpaceDataUpdateCoordinator, QueueDataUpdateCoordinator, StatusDataUpdateCoordinator, @@ -35,6 +36,7 @@ class LidarrData: queue: QueueDataUpdateCoordinator status: StatusDataUpdateCoordinator wanted: WantedDataUpdateCoordinator + albums: AlbumsDataUpdateCoordinator async def async_setup_entry(hass: HomeAssistant, entry: LidarrConfigEntry) -> bool: @@ -54,6 +56,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: LidarrConfigEntry) -> bo queue=QueueDataUpdateCoordinator(hass, host_configuration, lidarr), status=StatusDataUpdateCoordinator(hass, host_configuration, lidarr), wanted=WantedDataUpdateCoordinator(hass, host_configuration, lidarr), + albums=AlbumsDataUpdateCoordinator(hass, host_configuration, lidarr), ) for field in fields(data): coordinator = getattr(data, field.name) diff --git a/homeassistant/components/lidarr/config_flow.py b/homeassistant/components/lidarr/config_flow.py index bc7a40c976ee7a..dfbfff2cdfd8d7 100644 --- a/homeassistant/components/lidarr/config_flow.py +++ b/homeassistant/components/lidarr/config_flow.py @@ -10,12 +10,11 @@ from aiopyarr.lidarr_client import LidarrClient import voluptuous as vol -from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_API_KEY, CONF_URL, CONF_VERIFY_SSL from homeassistant.core import HomeAssistant from homeassistant.helpers.aiohttp_client import async_get_clientsession -from . import LidarrConfigEntry from .const import DEFAULT_NAME, DOMAIN @@ -24,16 +23,10 @@ class LidarrConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - def __init__(self) -> None: - """Initialize the flow.""" - self.entry: LidarrConfigEntry | None = None - async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle configuration by re-auth.""" - self.entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) - return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -52,10 +45,7 @@ async def async_step_user( """Handle a flow initiated by the user.""" errors = {} - if user_input is None: - user_input = dict(self.entry.data) if self.entry else None - - else: + if user_input is not None: try: if result := await validate_input(self.hass, user_input): user_input[CONF_API_KEY] = result[1] @@ -70,17 +60,18 @@ async def async_step_user( except exceptions.ArrException: errors = {"base": "unknown"} if not errors: - if self.entry: - self.hass.config_entries.async_update_entry( - self.entry, data=user_input + if self.source == SOURCE_REAUTH: + return self.async_update_reload_and_abort( + self._get_reauth_entry(), data=user_input ) - await self.hass.config_entries.async_reload(self.entry.entry_id) - - return self.async_abort(reason="reauth_successful") return self.async_create_entry(title=DEFAULT_NAME, data=user_input) - user_input = user_input or {} + if user_input is None: + user_input = {} + if self.source == SOURCE_REAUTH: + user_input = dict(self._get_reauth_entry().data) + return self.async_show_form( step_id="user", data_schema=vol.Schema( diff --git a/homeassistant/components/lidarr/coordinator.py b/homeassistant/components/lidarr/coordinator.py index 2f18e4f0ebb79b..1010f708748318 100644 --- a/homeassistant/components/lidarr/coordinator.py +++ b/homeassistant/components/lidarr/coordinator.py @@ -17,7 +17,7 @@ from .const import DEFAULT_MAX_RECORDS, DOMAIN, LOGGER -T = TypeVar("T", bound=list[LidarrRootFolder] | LidarrQueue | str | LidarrAlbum) +T = TypeVar("T", bound=list[LidarrRootFolder] | LidarrQueue | str | LidarrAlbum | int) class LidarrDataUpdateCoordinator(DataUpdateCoordinator[T], Generic[T], ABC): @@ -96,3 +96,11 @@ async def _fetch_data(self) -> LidarrAlbum: LidarrAlbum, await self.api_client.async_get_wanted(page_size=DEFAULT_MAX_RECORDS), ) + + +class AlbumsDataUpdateCoordinator(LidarrDataUpdateCoordinator[int]): + """Albums update coordinator.""" + + async def _fetch_data(self) -> int: + """Fetch the album data.""" + return len(cast(list[LidarrAlbum], await self.api_client.async_get_albums())) diff --git a/homeassistant/components/lidarr/sensor.py b/homeassistant/components/lidarr/sensor.py index e7ea1027ff07d3..b02361e65ca12c 100644 --- a/homeassistant/components/lidarr/sensor.py +++ b/homeassistant/components/lidarr/sensor.py @@ -85,7 +85,7 @@ class LidarrSensorEntityDescription( "queue": LidarrSensorEntityDescription[LidarrQueue]( key="queue", translation_key="queue", - native_unit_of_measurement="Albums", + native_unit_of_measurement="albums", value_fn=lambda data, _: data.totalRecords, state_class=SensorStateClass.TOTAL, attributes_fn=lambda data: {i.title: queue_str(i) for i in data.records}, @@ -93,7 +93,7 @@ class LidarrSensorEntityDescription( "wanted": LidarrSensorEntityDescription[LidarrQueue]( key="wanted", translation_key="wanted", - native_unit_of_measurement="Albums", + native_unit_of_measurement="albums", value_fn=lambda data, _: data.totalRecords, state_class=SensorStateClass.TOTAL, entity_registry_enabled_default=False, @@ -101,6 +101,14 @@ class LidarrSensorEntityDescription( album.title: album.artist.artistName for album in data.records }, ), + "albums": LidarrSensorEntityDescription[int]( + key="albums", + translation_key="albums", + native_unit_of_measurement="albums", + value_fn=lambda data, _: data, + state_class=SensorStateClass.TOTAL, + entity_registry_enabled_default=False, + ), } diff --git a/homeassistant/components/lidarr/strings.json b/homeassistant/components/lidarr/strings.json index bbe4b19db25d3a..68e9c39531929f 100644 --- a/homeassistant/components/lidarr/strings.json +++ b/homeassistant/components/lidarr/strings.json @@ -39,6 +39,9 @@ }, "wanted": { "name": "Wanted" + }, + "albums": { + "name": "Albums" } } } diff --git a/homeassistant/components/lifx/strings.json b/homeassistant/components/lifx/strings.json index 68f9e31aabd36f..19d86e57f09876 100644 --- a/homeassistant/components/lifx/strings.json +++ b/homeassistant/components/lifx/strings.json @@ -26,7 +26,8 @@ "abort": { "already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]", "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", - "no_devices_found": "[%key:common::config_flow::abort::no_devices_found%]" + "no_devices_found": "[%key:common::config_flow::abort::no_devices_found%]", + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]" } }, "entity": { diff --git a/homeassistant/components/light/__init__.py b/homeassistant/components/light/__init__.py index 0bdabf26ff4cac..37ee6fe88fdead 100644 --- a/homeassistant/components/light/__init__.py +++ b/homeassistant/components/light/__init__.py @@ -408,7 +408,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: # noqa: def preprocess_data(data: dict[str, Any]) -> VolDictType: """Preprocess the service data.""" base: VolDictType = { - entity_field: data.pop(entity_field) + entity_field: data.pop(entity_field) # type: ignore[arg-type] for entity_field in cv.ENTITY_SERVICE_FIELDS if entity_field in data } diff --git a/homeassistant/components/linear_garage_door/config_flow.py b/homeassistant/components/linear_garage_door/config_flow.py index d1dda97c51370f..2cfd0af6a8f543 100644 --- a/homeassistant/components/linear_garage_door/config_flow.py +++ b/homeassistant/components/linear_garage_door/config_flow.py @@ -11,7 +11,7 @@ from linear_garage_door.errors import InvalidLoginError import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_EMAIL, CONF_PASSWORD from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError @@ -69,7 +69,6 @@ class LinearGarageDoorConfigFlow(ConfigFlow, domain=DOMAIN): def __init__(self) -> None: """Initialize the config flow.""" self.data: dict[str, Sequence[Collection[str]]] = {} - self._reauth_entry: ConfigEntry | None = None async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -93,14 +92,14 @@ async def async_step_user( self.data = info # Check if we are reauthenticating - if self._reauth_entry is not None: - self.hass.config_entries.async_update_entry( - self._reauth_entry, - data=self._reauth_entry.data - | {"email": self.data["email"], "password": self.data["password"]}, + if self.source == SOURCE_REAUTH: + return self.async_update_reload_and_abort( + self._get_reauth_entry(), + data_updates={ + CONF_EMAIL: self.data["email"], + CONF_PASSWORD: self.data["password"], + }, ) - await self.hass.config_entries.async_reload(self._reauth_entry.entry_id) - return self.async_abort(reason="reauth_successful") return await self.async_step_site() @@ -150,9 +149,6 @@ async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Reauth in case of a password change or other error.""" - self._reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) return await self.async_step_user() diff --git a/homeassistant/components/linkplay/__init__.py b/homeassistant/components/linkplay/__init__.py index 808f2f93ce2a7d..918e52a755df94 100644 --- a/homeassistant/components/linkplay/__init__.py +++ b/homeassistant/components/linkplay/__init__.py @@ -4,6 +4,7 @@ from aiohttp import ClientSession from linkplay.bridge import LinkPlayBridge +from linkplay.controller import LinkPlayController from linkplay.discovery import linkplay_factory_httpapi_bridge from linkplay.exceptions import LinkPlayRequestException @@ -12,7 +13,7 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady -from .const import PLATFORMS +from .const import CONTROLLER, CONTROLLER_KEY, DOMAIN, PLATFORMS from .utils import async_get_client_session @@ -32,6 +33,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: LinkPlayConfigEntry) -> session: ClientSession = await async_get_client_session(hass) bridge: LinkPlayBridge | None = None + # try create a bridge try: bridge = await linkplay_factory_httpapi_bridge(entry.data[CONF_HOST], session) except LinkPlayRequestException as exception: @@ -39,6 +41,19 @@ async def async_setup_entry(hass: HomeAssistant, entry: LinkPlayConfigEntry) -> f"Failed to connect to LinkPlay device at {entry.data[CONF_HOST]}" ) from exception + # setup the controller and discover multirooms + controller: LinkPlayController | None = None + hass.data.setdefault(DOMAIN, {}) + if CONTROLLER not in hass.data[DOMAIN]: + controller = LinkPlayController(session) + hass.data[DOMAIN][CONTROLLER_KEY] = controller + else: + controller = hass.data[DOMAIN][CONTROLLER_KEY] + + await controller.add_bridge(bridge) + await controller.discover_multirooms() + + # forward to platforms entry.runtime_data = LinkPlayData(bridge=bridge) await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) return True diff --git a/homeassistant/components/linkplay/const.py b/homeassistant/components/linkplay/const.py index f531e311f46eb8..a776365e38f57c 100644 --- a/homeassistant/components/linkplay/const.py +++ b/homeassistant/components/linkplay/const.py @@ -1,7 +1,12 @@ """LinkPlay constants.""" +from linkplay.controller import LinkPlayController + from homeassistant.const import Platform +from homeassistant.util.hass_dict import HassKey DOMAIN = "linkplay" +CONTROLLER = "controller" +CONTROLLER_KEY: HassKey[LinkPlayController] = HassKey(CONTROLLER) PLATFORMS = [Platform.MEDIA_PLAYER] DATA_SESSION = "session" diff --git a/homeassistant/components/linkplay/diagnostics.py b/homeassistant/components/linkplay/diagnostics.py new file mode 100644 index 00000000000000..cfc1346aff4ad2 --- /dev/null +++ b/homeassistant/components/linkplay/diagnostics.py @@ -0,0 +1,17 @@ +"""Diagnostics support for Linkplay.""" + +from __future__ import annotations + +from typing import Any + +from homeassistant.core import HomeAssistant + +from . import LinkPlayConfigEntry + + +async def async_get_config_entry_diagnostics( + hass: HomeAssistant, entry: LinkPlayConfigEntry +) -> dict[str, Any]: + """Return diagnostics for a config entry.""" + data = entry.runtime_data + return {"device_info": data.bridge.to_dict()} diff --git a/homeassistant/components/linkplay/manifest.json b/homeassistant/components/linkplay/manifest.json index dd1e08eda49b07..e74d22b8207e88 100644 --- a/homeassistant/components/linkplay/manifest.json +++ b/homeassistant/components/linkplay/manifest.json @@ -7,6 +7,6 @@ "integration_type": "hub", "iot_class": "local_polling", "loggers": ["linkplay"], - "requirements": ["python-linkplay==0.0.15"], + "requirements": ["python-linkplay==0.0.20"], "zeroconf": ["_linkplay._tcp.local."] } diff --git a/homeassistant/components/linkplay/media_player.py b/homeassistant/components/linkplay/media_player.py index 8654600ac73299..c29c29785228fd 100644 --- a/homeassistant/components/linkplay/media_player.py +++ b/homeassistant/components/linkplay/media_player.py @@ -8,7 +8,8 @@ from linkplay.bridge import LinkPlayBridge from linkplay.consts import EqualizerMode, LoopMode, PlayingMode, PlayingStatus -from linkplay.exceptions import LinkPlayException, LinkPlayRequestException +from linkplay.controller import LinkPlayController, LinkPlayMultiroom +from linkplay.exceptions import LinkPlayRequestException import voluptuous as vol from homeassistant.components import media_source @@ -22,18 +23,20 @@ RepeatMode, async_process_play_media_url, ) +from homeassistant.const import Platform from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError +from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from homeassistant.helpers import ( config_validation as cv, device_registry as dr, entity_platform, + entity_registry as er, ) from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.util.dt import utcnow -from . import LinkPlayConfigEntry -from .const import DOMAIN +from . import LinkPlayConfigEntry, LinkPlayData +from .const import CONTROLLER_KEY, DOMAIN from .utils import MANUFACTURER_GENERIC, get_info_from_project _LOGGER = logging.getLogger(__name__) @@ -45,6 +48,7 @@ } SOURCE_MAP: dict[PlayingMode, str] = { + PlayingMode.NETWORK: "Wifi", PlayingMode.LINE_IN: "Line In", PlayingMode.BLUETOOTH: "Bluetooth", PlayingMode.OPTICAL: "Optical", @@ -65,6 +69,8 @@ PlayingMode.FM: "FM Radio", PlayingMode.RCA: "RCA", PlayingMode.UDISK: "USB", + PlayingMode.SPOTIFY: "Spotify", + PlayingMode.TIDAL: "Tidal", PlayingMode.FOLLOWER: "Follower", } @@ -197,9 +203,8 @@ async def async_update(self) -> None: try: await self._bridge.player.update_status() self._update_properties() - except LinkPlayException: + except LinkPlayRequestException: self._attr_available = False - raise @exception_wrap async def async_select_source(self, source: str) -> None: @@ -288,7 +293,82 @@ async def async_play_media( @exception_wrap async def async_play_preset(self, preset_number: int) -> None: """Play preset number.""" - await self._bridge.player.play_preset(preset_number) + try: + await self._bridge.player.play_preset(preset_number) + except ValueError as err: + raise HomeAssistantError(err) from err + + @exception_wrap + async def async_media_seek(self, position: float) -> None: + """Seek to a position.""" + await self._bridge.player.seek(round(position)) + + @exception_wrap + async def async_join_players(self, group_members: list[str]) -> None: + """Join `group_members` as a player group with the current player.""" + + controller: LinkPlayController = self.hass.data[DOMAIN][CONTROLLER_KEY] + multiroom = self._bridge.multiroom + if multiroom is None: + multiroom = LinkPlayMultiroom(self._bridge) + + for group_member in group_members: + bridge = self._get_linkplay_bridge(group_member) + if bridge: + await multiroom.add_follower(bridge) + + await controller.discover_multirooms() + + def _get_linkplay_bridge(self, entity_id: str) -> LinkPlayBridge: + """Get linkplay bridge from entity_id.""" + + entity_registry = er.async_get(self.hass) + + # Check for valid linkplay media_player entity + entity_entry = entity_registry.async_get(entity_id) + + if ( + entity_entry is None + or entity_entry.domain != Platform.MEDIA_PLAYER + or entity_entry.platform != DOMAIN + or entity_entry.config_entry_id is None + ): + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="invalid_grouping_entity", + translation_placeholders={"entity_id": entity_id}, + ) + + config_entry = self.hass.config_entries.async_get_entry( + entity_entry.config_entry_id + ) + assert config_entry + + # Return bridge + data: LinkPlayData = config_entry.runtime_data + return data.bridge + + @property + def group_members(self) -> list[str]: + """List of players which are grouped together.""" + multiroom = self._bridge.multiroom + if multiroom is not None: + return [multiroom.leader.device.uuid] + [ + follower.device.uuid for follower in multiroom.followers + ] + + return [] + + @exception_wrap + async def async_unjoin_player(self) -> None: + """Remove this player from any group.""" + controller: LinkPlayController = self.hass.data[DOMAIN][CONTROLLER_KEY] + + multiroom = self._bridge.multiroom + if multiroom is not None: + await multiroom.remove_follower(self._bridge) + + await controller.discover_multirooms() def _update_properties(self) -> None: """Update the properties of the media player.""" @@ -308,9 +388,9 @@ def _update_properties(self) -> None: ) self._attr_source = SOURCE_MAP.get(self._bridge.player.play_mode, "other") - self._attr_media_position = self._bridge.player.current_position / 1000 + self._attr_media_position = self._bridge.player.current_position_in_seconds self._attr_media_position_updated_at = utcnow() - self._attr_media_duration = self._bridge.player.total_length / 1000 + self._attr_media_duration = self._bridge.player.total_length_in_seconds self._attr_media_artist = self._bridge.player.artist self._attr_media_title = self._bridge.player.title self._attr_media_album_name = self._bridge.player.album diff --git a/homeassistant/components/linkplay/services.yaml b/homeassistant/components/linkplay/services.yaml index 20bc47be7a774a..0d7335a28c85ce 100644 --- a/homeassistant/components/linkplay/services.yaml +++ b/homeassistant/components/linkplay/services.yaml @@ -11,5 +11,4 @@ play_preset: selector: number: min: 1 - max: 10 mode: box diff --git a/homeassistant/components/linkplay/strings.json b/homeassistant/components/linkplay/strings.json index 12870816af78bd..f3495b293e02d1 100644 --- a/homeassistant/components/linkplay/strings.json +++ b/homeassistant/components/linkplay/strings.json @@ -34,5 +34,10 @@ } } } + }, + "exceptions": { + "invalid_grouping_entity": { + "message": "Entity with id {entity_id} can't be added to the LinkPlay multiroom. Is the entity a LinkPlay mediaplayer?" + } } } diff --git a/homeassistant/components/litejet/config_flow.py b/homeassistant/components/litejet/config_flow.py index 19ddf0122c4b3f..9aa0b19c5060a8 100644 --- a/homeassistant/components/litejet/config_flow.py +++ b/homeassistant/components/litejet/config_flow.py @@ -24,10 +24,6 @@ class LiteJetOptionsFlow(OptionsFlow): """Handle LiteJet options.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize LiteJet options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -57,9 +53,6 @@ async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Create a LiteJet config entry based upon user input.""" - if self._async_current_entries(): - return self.async_abort(reason="single_instance_allowed") - errors = {} if user_input is not None: port = user_input[CONF_PORT] @@ -87,4 +80,4 @@ def async_get_options_flow( config_entry: ConfigEntry, ) -> LiteJetOptionsFlow: """Get the options flow for this handler.""" - return LiteJetOptionsFlow(config_entry) + return LiteJetOptionsFlow() diff --git a/homeassistant/components/litejet/manifest.json b/homeassistant/components/litejet/manifest.json index 3cff83707f5adf..1df907029a9249 100644 --- a/homeassistant/components/litejet/manifest.json +++ b/homeassistant/components/litejet/manifest.json @@ -8,5 +8,6 @@ "iot_class": "local_push", "loggers": ["pylitejet"], "quality_scale": "platinum", - "requirements": ["pylitejet==0.6.3"] + "requirements": ["pylitejet==0.6.3"], + "single_config_entry": true } diff --git a/homeassistant/components/litejet/strings.json b/homeassistant/components/litejet/strings.json index 398f1a1e5aa824..c55df54c931a81 100644 --- a/homeassistant/components/litejet/strings.json +++ b/homeassistant/components/litejet/strings.json @@ -9,9 +9,6 @@ } } }, - "abort": { - "single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]" - }, "error": { "open_failed": "Cannot open the specified serial port." } diff --git a/homeassistant/components/litterrobot/config_flow.py b/homeassistant/components/litterrobot/config_flow.py index 633c6a5a5a2ac6..90f1fcba56d25e 100644 --- a/homeassistant/components/litterrobot/config_flow.py +++ b/homeassistant/components/litterrobot/config_flow.py @@ -43,16 +43,11 @@ async def async_step_reauth_confirm( """Handle user's reauth credentials.""" errors = {} if user_input: - entry_id = self.context["entry_id"] - if entry := self.hass.config_entries.async_get_entry(entry_id): - user_input = user_input | {CONF_USERNAME: self.username} - if not (error := await self._async_validate_input(user_input)): - self.hass.config_entries.async_update_entry( - entry, - data=entry.data | user_input, - ) - await self.hass.config_entries.async_reload(entry.entry_id) - return self.async_abort(reason="reauth_successful") + user_input = user_input | {CONF_USERNAME: self.username} + if not (error := await self._async_validate_input(user_input)): + return self.async_update_reload_and_abort( + self._get_reauth_entry(), data_updates=user_input + ) errors["base"] = error return self.async_show_form( diff --git a/homeassistant/components/litterrobot/vacuum.py b/homeassistant/components/litterrobot/vacuum.py index a1ed2ea600deb7..f5553bf5d49b77 100644 --- a/homeassistant/components/litterrobot/vacuum.py +++ b/homeassistant/components/litterrobot/vacuum.py @@ -18,7 +18,6 @@ StateVacuumEntityDescription, VacuumEntityFeature, ) -from homeassistant.const import STATE_OFF from homeassistant.core import HomeAssistant from homeassistant.helpers import config_validation as cv, entity_platform from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -39,7 +38,7 @@ LitterBoxStatus.DRAWER_FULL_2: STATE_DOCKED, LitterBoxStatus.READY: STATE_DOCKED, LitterBoxStatus.CAT_SENSOR_INTERRUPTED: STATE_PAUSED, - LitterBoxStatus.OFF: STATE_OFF, + LitterBoxStatus.OFF: STATE_DOCKED, } LITTER_BOX_ENTITY = StateVacuumEntityDescription( diff --git a/homeassistant/components/local_calendar/strings.json b/homeassistant/components/local_calendar/strings.json index 387cfdcf092ce2..2b61fc9ab3e3f6 100644 --- a/homeassistant/components/local_calendar/strings.json +++ b/homeassistant/components/local_calendar/strings.json @@ -13,6 +13,9 @@ "description": "You can import events in iCal format (.ics file)." } }, + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_service%]" + }, "error": { "invalid_ics_file": "Invalid .ics file" } diff --git a/homeassistant/components/local_file/__init__.py b/homeassistant/components/local_file/__init__.py index 4ad752bbc54887..70144cd070477d 100644 --- a/homeassistant/components/local_file/__init__.py +++ b/homeassistant/components/local_file/__init__.py @@ -1 +1,37 @@ """The local_file component.""" + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_FILE_PATH, Platform +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryError + +from .const import DOMAIN +from .util import check_file_path_access + +PLATFORMS = [Platform.CAMERA] + + +async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + """Set up Local file from a config entry.""" + file_path: str = entry.options[CONF_FILE_PATH] + if not await hass.async_add_executor_job(check_file_path_access, file_path): + raise ConfigEntryError( + translation_domain=DOMAIN, + translation_key="not_readable_path", + translation_placeholders={"file_path": file_path}, + ) + + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + entry.async_on_unload(entry.add_update_listener(update_listener)) + + return True + + +async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + """Unload Local file config entry.""" + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) + + +async def update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None: + """Handle options update.""" + await hass.config_entries.async_reload(entry.entry_id) diff --git a/homeassistant/components/local_file/camera.py b/homeassistant/components/local_file/camera.py index 74d887b613fac7..db421bbce1d98b 100644 --- a/homeassistant/components/local_file/camera.py +++ b/homeassistant/components/local_file/camera.py @@ -4,7 +4,6 @@ import logging import mimetypes -import os import voluptuous as vol @@ -12,14 +11,21 @@ PLATFORM_SCHEMA as CAMERA_PLATFORM_SCHEMA, Camera, ) +from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry from homeassistant.const import CONF_FILE_PATH, CONF_NAME -from homeassistant.core import HomeAssistant -from homeassistant.exceptions import PlatformNotReady, ServiceValidationError -from homeassistant.helpers import config_validation as cv, entity_platform +from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant +from homeassistant.exceptions import ServiceValidationError +from homeassistant.helpers import ( + config_validation as cv, + entity_platform, + issue_registry as ir, +) from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType +from homeassistant.util import slugify -from .const import DEFAULT_NAME, SERVICE_UPDATE_FILE_PATH +from .const import DEFAULT_NAME, DOMAIN, SERVICE_UPDATE_FILE_PATH +from .util import check_file_path_access _LOGGER = logging.getLogger(__name__) @@ -31,21 +37,12 @@ ) -def check_file_path_access(file_path: str) -> bool: - """Check that filepath given is readable.""" - if not os.access(file_path, os.R_OK): - return False - return True - - -async def async_setup_platform( +async def async_setup_entry( hass: HomeAssistant, - config: ConfigType, + entry: ConfigEntry, async_add_entities: AddEntitiesCallback, - discovery_info: DiscoveryInfoType | None = None, ) -> None: - """Set up the Camera that works with local files.""" - file_path: str = config[CONF_FILE_PATH] + """Set up the Camera for local file from a config entry.""" platform = entity_platform.async_get_current_platform() platform.async_register_entity_service( @@ -56,19 +53,76 @@ async def async_setup_platform( "update_file_path", ) + async_add_entities( + [ + LocalFile( + entry.options[CONF_NAME], + entry.options[CONF_FILE_PATH], + entry.entry_id, + ) + ] + ) + + +async def async_setup_platform( + hass: HomeAssistant, + config: ConfigType, + async_add_entities: AddEntitiesCallback, + discovery_info: DiscoveryInfoType | None = None, +) -> None: + """Set up the Camera that works with local files.""" + file_path: str = config[CONF_FILE_PATH] + file_path_slug = slugify(file_path) + if not await hass.async_add_executor_job(check_file_path_access, file_path): - raise PlatformNotReady(f"File path {file_path} is not readable") + ir.async_create_issue( + hass, + DOMAIN, + f"no_access_path_{file_path_slug}", + breaks_in_ha_version="2025.5.0", + is_fixable=False, + learn_more_url="https://www.home-assistant.io/integrations/local_file/", + severity=ir.IssueSeverity.WARNING, + translation_key="no_access_path", + translation_placeholders={ + "file_path": file_path_slug, + }, + ) + return + + ir.async_create_issue( + hass, + HOMEASSISTANT_DOMAIN, + f"deprecated_yaml_{DOMAIN}", + breaks_in_ha_version="2025.5.0", + is_fixable=False, + issue_domain=DOMAIN, + learn_more_url="https://www.home-assistant.io/integrations/local_file/", + severity=ir.IssueSeverity.WARNING, + translation_key="deprecated_yaml", + translation_placeholders={ + "domain": DOMAIN, + "integration_title": "Local file", + }, + ) - async_add_entities([LocalFile(config[CONF_NAME], file_path)]) + hass.async_create_task( + hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_IMPORT}, + data=config, + ) + ) class LocalFile(Camera): """Representation of a local file camera.""" - def __init__(self, name: str, file_path: str) -> None: + def __init__(self, name: str, file_path: str, unique_id: str) -> None: """Initialize Local File Camera component.""" super().__init__() self._attr_name = name + self._attr_unique_id = unique_id self._file_path = file_path # Set content type of local file content, _ = mimetypes.guess_type(file_path) diff --git a/homeassistant/components/local_file/config_flow.py b/homeassistant/components/local_file/config_flow.py new file mode 100644 index 00000000000000..36a41c03543f57 --- /dev/null +++ b/homeassistant/components/local_file/config_flow.py @@ -0,0 +1,77 @@ +"""Config flow for Local file.""" + +from __future__ import annotations + +from collections.abc import Mapping +from typing import Any, cast + +import voluptuous as vol + +from homeassistant.const import CONF_FILE_PATH, CONF_NAME +from homeassistant.helpers.schema_config_entry_flow import ( + SchemaCommonFlowHandler, + SchemaConfigFlowHandler, + SchemaFlowError, + SchemaFlowFormStep, +) +from homeassistant.helpers.selector import TextSelector + +from .const import DEFAULT_NAME, DOMAIN +from .util import check_file_path_access + + +async def validate_options( + handler: SchemaCommonFlowHandler, user_input: dict[str, Any] +) -> dict[str, Any]: + """Validate options selected.""" + file_path: str = user_input[CONF_FILE_PATH] + if not await handler.parent_handler.hass.async_add_executor_job( + check_file_path_access, file_path + ): + raise SchemaFlowError("not_readable_path") + + handler.parent_handler._async_abort_entries_match( # noqa: SLF001 + {CONF_FILE_PATH: user_input[CONF_FILE_PATH]} + ) + + return user_input + + +DATA_SCHEMA_OPTIONS = vol.Schema( + { + vol.Required(CONF_FILE_PATH): TextSelector(), + } +) +DATA_SCHEMA_SETUP = vol.Schema( + { + vol.Optional(CONF_NAME, default=DEFAULT_NAME): TextSelector(), + } +).extend(DATA_SCHEMA_OPTIONS.schema) + +CONFIG_FLOW = { + "user": SchemaFlowFormStep( + schema=DATA_SCHEMA_SETUP, + validate_user_input=validate_options, + ), + "import": SchemaFlowFormStep( + schema=DATA_SCHEMA_SETUP, + validate_user_input=validate_options, + ), +} +OPTIONS_FLOW = { + "init": SchemaFlowFormStep( + DATA_SCHEMA_OPTIONS, + validate_user_input=validate_options, + ) +} + + +class LocalFileConfigFlowHandler(SchemaConfigFlowHandler, domain=DOMAIN): + """Handle a config flow for Local file.""" + + config_flow = CONFIG_FLOW + options_flow = OPTIONS_FLOW + + def async_config_entry_title(self, options: Mapping[str, Any]) -> str: + """Return config entry title.""" + return cast(str, options[CONF_NAME]) diff --git a/homeassistant/components/local_file/manifest.json b/homeassistant/components/local_file/manifest.json index 46268ff2a77f3b..0e6e64d17e5cf9 100644 --- a/homeassistant/components/local_file/manifest.json +++ b/homeassistant/components/local_file/manifest.json @@ -2,6 +2,7 @@ "domain": "local_file", "name": "Local File", "codeowners": [], + "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/local_file", "iot_class": "local_polling" } diff --git a/homeassistant/components/local_file/strings.json b/homeassistant/components/local_file/strings.json index 801d85ce1e0b4a..abf31a6f94e79e 100644 --- a/homeassistant/components/local_file/strings.json +++ b/homeassistant/components/local_file/strings.json @@ -1,4 +1,42 @@ { + "config": { + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_account%]" + }, + "error": { + "not_readable_path": "The provided path to the file can not be read" + }, + "step": { + "user": { + "data": { + "name": "[%key:common::config_flow::data::name%]", + "file_path": "File path" + }, + "data_description": { + "name": "Name for the created entity.", + "file_path": "The full path to the image file to be displayed. Be sure the path of the file is in the allowed paths, you can read more about this in the documentation." + } + } + } + }, + "options": { + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_account%]" + }, + "error": { + "not_readable_path": "[%key:component::local_file::config::error::not_readable_path%]" + }, + "step": { + "init": { + "data": { + "file_path": "[%key:component::local_file::config::step::user::data::file_path%]" + }, + "data_description": { + "file_path": "[%key:component::local_file::config::step::user::data_description::file_path%]" + } + } + } + }, "services": { "update_file_path": { "name": "Updates file path", @@ -6,7 +44,7 @@ "fields": { "file_path": { "name": "File path", - "description": "The full path to the new image file to be displayed." + "description": "[%key:component::local_file::config::step::user::data_description::file_path%]" } } } @@ -15,5 +53,11 @@ "file_path_not_accessible": { "message": "Path {file_path} is not accessible" } + }, + "issues": { + "no_access_path": { + "title": "Incorrect file path", + "description": "While trying to import your configuration the provided file path {file_path} could not be read.\nPlease update your configuration to a correct file path and restart to fix this issue." + } } } diff --git a/homeassistant/components/local_file/util.py b/homeassistant/components/local_file/util.py new file mode 100644 index 00000000000000..9e25bb88678d29 --- /dev/null +++ b/homeassistant/components/local_file/util.py @@ -0,0 +1,10 @@ +"""Utils for local file.""" + +import os + + +def check_file_path_access(file_path: str) -> bool: + """Check that filepath given is readable.""" + if not os.access(file_path, os.R_OK): + return False + return True diff --git a/homeassistant/components/local_ip/config_flow.py b/homeassistant/components/local_ip/config_flow.py index 3a4612d84aa790..6bf9f865489ed0 100644 --- a/homeassistant/components/local_ip/config_flow.py +++ b/homeassistant/components/local_ip/config_flow.py @@ -16,9 +16,6 @@ async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle the initial step.""" - if self._async_current_entries(): - return self.async_abort(reason="single_instance_allowed") - if user_input is None: return self.async_show_form(step_id="user") diff --git a/homeassistant/components/local_ip/manifest.json b/homeassistant/components/local_ip/manifest.json index 11d86ea0230acc..6a68ed59628966 100644 --- a/homeassistant/components/local_ip/manifest.json +++ b/homeassistant/components/local_ip/manifest.json @@ -5,5 +5,6 @@ "config_flow": true, "dependencies": ["network"], "documentation": "https://www.home-assistant.io/integrations/local_ip", - "iot_class": "local_polling" + "iot_class": "local_polling", + "single_config_entry": true } diff --git a/homeassistant/components/local_ip/strings.json b/homeassistant/components/local_ip/strings.json index a4d9138d88e563..7f7508aa9b3a0d 100644 --- a/homeassistant/components/local_ip/strings.json +++ b/homeassistant/components/local_ip/strings.json @@ -6,9 +6,6 @@ "title": "[%key:component::local_ip::title%]", "description": "[%key:common::config_flow::description::confirm_setup%]" } - }, - "abort": { - "single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]" } } } diff --git a/homeassistant/components/logbook/__init__.py b/homeassistant/components/logbook/__init__.py index 239a52ff7a14d1..2e2ffddac8833c 100644 --- a/homeassistant/components/logbook/__init__.py +++ b/homeassistant/components/logbook/__init__.py @@ -55,7 +55,7 @@ LOG_MESSAGE_SCHEMA = vol.Schema( { vol.Required(ATTR_NAME): cv.string, - vol.Required(ATTR_MESSAGE): cv.template, + vol.Required(ATTR_MESSAGE): cv.string, vol.Optional(ATTR_DOMAIN): cv.slug, vol.Optional(ATTR_ENTITY_ID): cv.entity_id, } @@ -112,7 +112,6 @@ def log_message(service: ServiceCall) -> None: # away so we use the "logbook" domain domain = DOMAIN - message = message.async_render(parse_result=False) async_log_entry(hass, name, message, domain, entity_id, service.context) frontend.async_register_built_in_panel( diff --git a/homeassistant/components/london_underground/coordinator.py b/homeassistant/components/london_underground/coordinator.py index cf14ad14b43546..29d1e8e2f54a35 100644 --- a/homeassistant/components/london_underground/coordinator.py +++ b/homeassistant/components/london_underground/coordinator.py @@ -24,6 +24,7 @@ def __init__(self, hass: HomeAssistant, data: TubeData) -> None: super().__init__( hass, _LOGGER, + config_entry=None, name=DOMAIN, update_interval=SCAN_INTERVAL, ) diff --git a/homeassistant/components/luftdaten/__init__.py b/homeassistant/components/luftdaten/__init__.py index 9079b0567311e2..37f0f27d2d82f7 100644 --- a/homeassistant/components/luftdaten/__init__.py +++ b/homeassistant/components/luftdaten/__init__.py @@ -52,6 +52,7 @@ async def async_update() -> dict[str, float | int]: coordinator: DataUpdateCoordinator[dict[str, Any]] = DataUpdateCoordinator( hass, _LOGGER, + config_entry=entry, name=f"{DOMAIN}_{sensor_community.sensor_id}", update_interval=DEFAULT_SCAN_INTERVAL, update_method=async_update, diff --git a/homeassistant/components/luftdaten/strings.json b/homeassistant/components/luftdaten/strings.json index b7d0a90b511d02..ea842f18ebd375 100644 --- a/homeassistant/components/luftdaten/strings.json +++ b/homeassistant/components/luftdaten/strings.json @@ -8,6 +8,9 @@ } } }, + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_service%]" + }, "error": { "already_configured": "[%key:common::config_flow::abort::already_configured_service%]", "invalid_sensor": "Sensor not available or invalid", diff --git a/homeassistant/components/lupusec/alarm_control_panel.py b/homeassistant/components/lupusec/alarm_control_panel.py index 73aba775a2a19f..4b3d12ad74338a 100644 --- a/homeassistant/components/lupusec/alarm_control_panel.py +++ b/homeassistant/components/lupusec/alarm_control_panel.py @@ -9,14 +9,9 @@ from homeassistant.components.alarm_control_panel import ( AlarmControlPanelEntity, AlarmControlPanelEntityFeature, + AlarmControlPanelState, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ( - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_DISARMED, - STATE_ALARM_TRIGGERED, -) from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -64,16 +59,16 @@ def __init__( ) @property - def state(self) -> str | None: + def alarm_state(self) -> AlarmControlPanelState | None: """Return the state of the device.""" if self._device.is_standby: - state = STATE_ALARM_DISARMED + state = AlarmControlPanelState.DISARMED elif self._device.is_away: - state = STATE_ALARM_ARMED_AWAY + state = AlarmControlPanelState.ARMED_AWAY elif self._device.is_home: - state = STATE_ALARM_ARMED_HOME + state = AlarmControlPanelState.ARMED_HOME elif self._device.is_alarm_triggered: - state = STATE_ALARM_TRIGGERED + state = AlarmControlPanelState.TRIGGERED else: state = None return state diff --git a/homeassistant/components/lutron/config_flow.py b/homeassistant/components/lutron/config_flow.py index e14d56fde575c5..6a48e0d4b674df 100644 --- a/homeassistant/components/lutron/config_flow.py +++ b/homeassistant/components/lutron/config_flow.py @@ -26,11 +26,6 @@ async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """First step in the config flow.""" - - # Check if a configuration entry already exists - if self._async_current_entries(): - return self.async_abort(reason="single_instance_allowed") - errors = {} if user_input is not None: diff --git a/homeassistant/components/lutron/manifest.json b/homeassistant/components/lutron/manifest.json index d9432f77bba39b..82bdfad4774e95 100644 --- a/homeassistant/components/lutron/manifest.json +++ b/homeassistant/components/lutron/manifest.json @@ -6,5 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/lutron", "iot_class": "local_polling", "loggers": ["pylutron"], - "requirements": ["pylutron==0.2.15"] + "requirements": ["pylutron==0.2.16"], + "single_config_entry": true } diff --git a/homeassistant/components/lutron/scene.py b/homeassistant/components/lutron/scene.py index b66ca08a587e0f..9e8070713a946c 100644 --- a/homeassistant/components/lutron/scene.py +++ b/homeassistant/components/lutron/scene.py @@ -51,4 +51,4 @@ def __init__( def activate(self, **kwargs: Any) -> None: """Activate the scene.""" - self._lutron_device.press() + self._lutron_device.tap() diff --git a/homeassistant/components/lutron/strings.json b/homeassistant/components/lutron/strings.json index 770a453eb9e9f9..b73e0bd15ed0fd 100644 --- a/homeassistant/components/lutron/strings.json +++ b/homeassistant/components/lutron/strings.json @@ -17,9 +17,6 @@ "description": "Please enter the main repeater login information", "title": "Main repeater setup" } - }, - "abort": { - "single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]" } }, "entity": { diff --git a/homeassistant/components/lutron_caseta/manifest.json b/homeassistant/components/lutron_caseta/manifest.json index 776e771b9d3d0e..e96778f0a31a74 100644 --- a/homeassistant/components/lutron_caseta/manifest.json +++ b/homeassistant/components/lutron_caseta/manifest.json @@ -11,6 +11,12 @@ "loggers": ["pylutron_caseta"], "requirements": ["pylutron-caseta==0.21.1"], "zeroconf": [ + { + "type": "_lutron._tcp.local.", + "properties": { + "SYSTYPE": "hwqs*" + } + }, { "type": "_lutron._tcp.local.", "properties": { diff --git a/homeassistant/components/lyric/__init__.py b/homeassistant/components/lyric/__init__.py index b338605a6eac8b..f99adf269999ae 100644 --- a/homeassistant/components/lyric/__init__.py +++ b/homeassistant/components/lyric/__init__.py @@ -95,6 +95,7 @@ async def async_update_data(force_refresh_token: bool = False) -> Lyric: coordinator = DataUpdateCoordinator[Lyric]( hass, _LOGGER, + config_entry=entry, # Name of the data. For logging purposes. name="lyric_coordinator", update_method=async_update_data, diff --git a/homeassistant/components/lyric/api.py b/homeassistant/components/lyric/api.py index c9a424bf8ab185..7399e013b96280 100644 --- a/homeassistant/components/lyric/api.py +++ b/homeassistant/components/lyric/api.py @@ -36,8 +36,7 @@ def __init__( async def async_get_access_token(self): """Return a valid access token.""" - if not self._oauth_session.valid_token: - await self._oauth_session.async_ensure_token_valid() + await self._oauth_session.async_ensure_token_valid() return self._oauth_session.token["access_token"] diff --git a/homeassistant/components/lyric/strings.json b/homeassistant/components/lyric/strings.json index 739ad7fad68655..83c65359643144 100644 --- a/homeassistant/components/lyric/strings.json +++ b/homeassistant/components/lyric/strings.json @@ -16,7 +16,8 @@ "oauth_error": "[%key:common::config_flow::abort::oauth2_error%]", "oauth_timeout": "[%key:common::config_flow::abort::oauth2_timeout%]", "oauth_unauthorized": "[%key:common::config_flow::abort::oauth2_unauthorized%]", - "oauth_failed": "[%key:common::config_flow::abort::oauth2_failed%]" + "oauth_failed": "[%key:common::config_flow::abort::oauth2_failed%]", + "missing_credentials": "[%key:common::config_flow::abort::oauth2_missing_credentials%]" }, "create_entry": { "default": "[%key:common::config_flow::create_entry::authenticated%]" diff --git a/homeassistant/components/madvr/config_flow.py b/homeassistant/components/madvr/config_flow.py index ea587d11e48481..60f7b8fc481574 100644 --- a/homeassistant/components/madvr/config_flow.py +++ b/homeassistant/components/madvr/config_flow.py @@ -10,7 +10,6 @@ from homeassistant.config_entries import ( SOURCE_RECONFIGURE, - ConfigEntry, ConfigFlow, ConfigFlowResult, ) @@ -37,8 +36,6 @@ class MadVRConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - entry: ConfigEntry - async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -47,16 +44,9 @@ async def async_step_user( async def async_step_reconfigure( self, user_input: dict[str, Any] | None = None - ) -> ConfigFlowResult: - """Handle reconfiguration of the device.""" - self.entry = self._get_reconfigure_entry() - return await self.async_step_reconfigure_confirm() - - async def async_step_reconfigure_confirm( - self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle a reconfiguration flow initialized by the user.""" - return await self._handle_config_step(user_input, step_id="reconfigure_confirm") + return await self._handle_config_step(user_input, step_id="reconfigure") async def _handle_config_step( self, user_input: dict[str, Any] | None = None, step_id: str = "user" @@ -80,23 +70,16 @@ async def _handle_config_step( else: _LOGGER.debug("MAC address found: %s", mac) # abort if the detected mac differs from the one in the entry + await self.async_set_unique_id(mac) if self.source == SOURCE_RECONFIGURE: - existing_mac = self.entry.unique_id - if existing_mac != mac: - _LOGGER.debug( - "MAC address changed from %s to %s", existing_mac, mac - ) - # abort - return self.async_abort(reason="set_up_new_device") + self._abort_if_unique_id_mismatch(reason="set_up_new_device") _LOGGER.debug("Reconfiguration done") return self.async_update_reload_and_abort( - entry=self.entry, + entry=self._get_reconfigure_entry(), data={**user_input, CONF_HOST: host, CONF_PORT: port}, - reason="reconfigure_successful", ) # abort if already configured with same mac - await self.async_set_unique_id(mac) self._abort_if_unique_id_configured(updates={CONF_HOST: host}) _LOGGER.debug("Configuration successful") diff --git a/homeassistant/components/madvr/strings.json b/homeassistant/components/madvr/strings.json index 9c7594c68d0435..06851efa2c8d4f 100644 --- a/homeassistant/components/madvr/strings.json +++ b/homeassistant/components/madvr/strings.json @@ -13,7 +13,7 @@ "port": "The port your madVR Envy is listening on. In 99% of cases, leave this as the default." } }, - "reconfigure_confirm": { + "reconfigure": { "title": "Reconfigure madVR Envy", "description": "Your device needs to be on in order to reconfigure the integation.", "data": { diff --git a/homeassistant/components/manual/alarm_control_panel.py b/homeassistant/components/manual/alarm_control_panel.py index c1910d0dfa1a56..244f38e090232f 100644 --- a/homeassistant/components/manual/alarm_control_panel.py +++ b/homeassistant/components/manual/alarm_control_panel.py @@ -11,6 +11,7 @@ PLATFORM_SCHEMA as ALARM_CONTROL_PANEL_PLATFORM_SCHEMA, AlarmControlPanelEntity, AlarmControlPanelEntityFeature, + AlarmControlPanelState, CodeFormat, ) from homeassistant.const import ( @@ -21,15 +22,6 @@ CONF_NAME, CONF_TRIGGER_TIME, CONF_UNIQUE_ID, - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_CUSTOM_BYPASS, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_ARMED_VACATION, - STATE_ALARM_ARMING, - STATE_ALARM_DISARMED, - STATE_ALARM_PENDING, - STATE_ALARM_TRIGGERED, ) from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import ServiceValidationError @@ -47,6 +39,16 @@ CONF_CODE_TEMPLATE = "code_template" CONF_CODE_ARM_REQUIRED = "code_arm_required" +CONF_ALARM_ARMED_AWAY = "armed_away" +CONF_ALARM_ARMED_CUSTOM_BYPASS = "armed_custom_bypass" +CONF_ALARM_ARMED_HOME = "armed_home" +CONF_ALARM_ARMED_NIGHT = "armed_night" +CONF_ALARM_ARMED_VACATION = "armed_vacation" +CONF_ALARM_ARMING = "arming" +CONF_ALARM_DISARMED = "disarmed" +CONF_ALARM_PENDING = "pending" +CONF_ALARM_TRIGGERED = "triggered" + DEFAULT_ALARM_NAME = "HA Alarm" DEFAULT_DELAY_TIME = datetime.timedelta(seconds=60) DEFAULT_ARMING_TIME = datetime.timedelta(seconds=60) @@ -54,39 +56,46 @@ DEFAULT_DISARM_AFTER_TRIGGER = False SUPPORTED_STATES = [ - STATE_ALARM_DISARMED, - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_ARMED_VACATION, - STATE_ALARM_ARMED_CUSTOM_BYPASS, - STATE_ALARM_TRIGGERED, + AlarmControlPanelState.DISARMED, + AlarmControlPanelState.ARMED_AWAY, + AlarmControlPanelState.ARMED_HOME, + AlarmControlPanelState.ARMED_NIGHT, + AlarmControlPanelState.ARMED_VACATION, + AlarmControlPanelState.ARMED_CUSTOM_BYPASS, + AlarmControlPanelState.TRIGGERED, ] SUPPORTED_PRETRIGGER_STATES = [ - state for state in SUPPORTED_STATES if state != STATE_ALARM_TRIGGERED + state for state in SUPPORTED_STATES if state != AlarmControlPanelState.TRIGGERED ] SUPPORTED_ARMING_STATES = [ state for state in SUPPORTED_STATES - if state not in (STATE_ALARM_DISARMED, STATE_ALARM_TRIGGERED) + if state + not in ( + AlarmControlPanelState.DISARMED, + AlarmControlPanelState.TRIGGERED, + ) ] SUPPORTED_ARMING_STATE_TO_FEATURE = { - STATE_ALARM_ARMED_AWAY: AlarmControlPanelEntityFeature.ARM_AWAY, - STATE_ALARM_ARMED_HOME: AlarmControlPanelEntityFeature.ARM_HOME, - STATE_ALARM_ARMED_NIGHT: AlarmControlPanelEntityFeature.ARM_NIGHT, - STATE_ALARM_ARMED_VACATION: AlarmControlPanelEntityFeature.ARM_VACATION, - STATE_ALARM_ARMED_CUSTOM_BYPASS: AlarmControlPanelEntityFeature.ARM_CUSTOM_BYPASS, + AlarmControlPanelState.ARMED_AWAY: AlarmControlPanelEntityFeature.ARM_AWAY, + AlarmControlPanelState.ARMED_HOME: AlarmControlPanelEntityFeature.ARM_HOME, + AlarmControlPanelState.ARMED_NIGHT: AlarmControlPanelEntityFeature.ARM_NIGHT, + AlarmControlPanelState.ARMED_VACATION: AlarmControlPanelEntityFeature.ARM_VACATION, + AlarmControlPanelState.ARMED_CUSTOM_BYPASS: AlarmControlPanelEntityFeature.ARM_CUSTOM_BYPASS, } ATTR_PREVIOUS_STATE = "previous_state" ATTR_NEXT_STATE = "next_state" -def _state_validator(config: dict[str, Any]) -> dict[str, Any]: +def _state_validator( + config: dict[AlarmControlPanelState | str, Any], +) -> dict[str, Any]: """Validate the state.""" + state: AlarmControlPanelState for state in SUPPORTED_PRETRIGGER_STATES: if CONF_DELAY_TIME not in config[state]: config[state] = config[state] | {CONF_DELAY_TIME: config[CONF_DELAY_TIME]} @@ -142,26 +151,26 @@ def _state_schema(state: str) -> vol.Schema: vol.Optional( CONF_ARMING_STATES, default=SUPPORTED_ARMING_STATES ): vol.All(cv.ensure_list, [vol.In(SUPPORTED_ARMING_STATES)]), - vol.Optional(STATE_ALARM_ARMED_AWAY, default={}): _state_schema( - STATE_ALARM_ARMED_AWAY + vol.Optional(CONF_ALARM_ARMED_AWAY, default={}): _state_schema( + AlarmControlPanelState.ARMED_AWAY ), - vol.Optional(STATE_ALARM_ARMED_HOME, default={}): _state_schema( - STATE_ALARM_ARMED_HOME + vol.Optional(CONF_ALARM_ARMED_HOME, default={}): _state_schema( + AlarmControlPanelState.ARMED_HOME ), - vol.Optional(STATE_ALARM_ARMED_NIGHT, default={}): _state_schema( - STATE_ALARM_ARMED_NIGHT + vol.Optional(CONF_ALARM_ARMED_NIGHT, default={}): _state_schema( + AlarmControlPanelState.ARMED_NIGHT ), - vol.Optional(STATE_ALARM_ARMED_VACATION, default={}): _state_schema( - STATE_ALARM_ARMED_VACATION + vol.Optional(CONF_ALARM_ARMED_VACATION, default={}): _state_schema( + AlarmControlPanelState.ARMED_VACATION ), - vol.Optional( - STATE_ALARM_ARMED_CUSTOM_BYPASS, default={} - ): _state_schema(STATE_ALARM_ARMED_CUSTOM_BYPASS), - vol.Optional(STATE_ALARM_DISARMED, default={}): _state_schema( - STATE_ALARM_DISARMED + vol.Optional(CONF_ALARM_ARMED_CUSTOM_BYPASS, default={}): _state_schema( + AlarmControlPanelState.ARMED_CUSTOM_BYPASS + ), + vol.Optional(CONF_ALARM_DISARMED, default={}): _state_schema( + AlarmControlPanelState.DISARMED ), - vol.Optional(STATE_ALARM_TRIGGERED, default={}): _state_schema( - STATE_ALARM_TRIGGERED + vol.Optional(CONF_ALARM_TRIGGERED, default={}): _state_schema( + AlarmControlPanelState.TRIGGERED ), }, ), @@ -217,25 +226,25 @@ def __init__( config: dict[str, Any], ) -> None: """Init the manual alarm panel.""" - self._state = STATE_ALARM_DISARMED + self._state: AlarmControlPanelState = AlarmControlPanelState.DISARMED self._hass = hass self._attr_name = name self._attr_unique_id = unique_id self._code = code_template or code or None self._attr_code_arm_required = code_arm_required self._disarm_after_trigger = disarm_after_trigger - self._previous_state = self._state + self._previous_state: AlarmControlPanelState = self._state self._state_ts: datetime.datetime = dt_util.utcnow() - self._delay_time_by_state = { + self._delay_time_by_state: dict[AlarmControlPanelState, Any] = { state: config[state][CONF_DELAY_TIME] for state in SUPPORTED_PRETRIGGER_STATES } - self._trigger_time_by_state = { + self._trigger_time_by_state: dict[AlarmControlPanelState, Any] = { state: config[state][CONF_TRIGGER_TIME] for state in SUPPORTED_PRETRIGGER_STATES } - self._arming_time_by_state = { + self._arming_time_by_state: dict[AlarmControlPanelState, Any] = { state: config[state][CONF_ARMING_TIME] for state in SUPPORTED_ARMING_STATES } @@ -246,11 +255,11 @@ def __init__( ] @property - def state(self) -> str: + def alarm_state(self) -> AlarmControlPanelState: """Return the state of the device.""" - if self._state == STATE_ALARM_TRIGGERED: + if self._state == AlarmControlPanelState.TRIGGERED: if self._within_pending_time(self._state): - return STATE_ALARM_PENDING + return AlarmControlPanelState.PENDING trigger_time: datetime.timedelta = self._trigger_time_by_state[ self._previous_state ] @@ -258,39 +267,42 @@ def state(self) -> str: self._state_ts + self._pending_time(self._state) + trigger_time ) < dt_util.utcnow(): if self._disarm_after_trigger: - return STATE_ALARM_DISARMED + return AlarmControlPanelState.DISARMED self._state = self._previous_state return self._state if self._state in SUPPORTED_ARMING_STATES and self._within_arming_time( self._state ): - return STATE_ALARM_ARMING + return AlarmControlPanelState.ARMING return self._state @property - def _active_state(self) -> str: + def _active_state(self) -> AlarmControlPanelState: """Get the current state.""" - if self.state in (STATE_ALARM_PENDING, STATE_ALARM_ARMING): + if self.state in ( + AlarmControlPanelState.PENDING, + AlarmControlPanelState.ARMING, + ): return self._previous_state return self._state - def _arming_time(self, state: str) -> datetime.timedelta: + def _arming_time(self, state: AlarmControlPanelState) -> datetime.timedelta: """Get the arming time.""" arming_time: datetime.timedelta = self._arming_time_by_state[state] return arming_time - def _pending_time(self, state: str) -> datetime.timedelta: + def _pending_time(self, state: AlarmControlPanelState) -> datetime.timedelta: """Get the pending time.""" delay_time: datetime.timedelta = self._delay_time_by_state[self._previous_state] return delay_time - def _within_arming_time(self, state: str) -> bool: + def _within_arming_time(self, state: AlarmControlPanelState) -> bool: """Get if the action is in the arming time window.""" return self._state_ts + self._arming_time(state) > dt_util.utcnow() - def _within_pending_time(self, state: str) -> bool: + def _within_pending_time(self, state: AlarmControlPanelState) -> bool: """Get if the action is in the pending time window.""" return self._state_ts + self._pending_time(state) > dt_util.utcnow() @@ -305,35 +317,35 @@ def code_format(self) -> CodeFormat | None: async def async_alarm_disarm(self, code: str | None = None) -> None: """Send disarm command.""" - self._async_validate_code(code, STATE_ALARM_DISARMED) - self._state = STATE_ALARM_DISARMED + self._async_validate_code(code, AlarmControlPanelState.DISARMED) + self._state = AlarmControlPanelState.DISARMED self._state_ts = dt_util.utcnow() self.async_write_ha_state() async def async_alarm_arm_home(self, code: str | None = None) -> None: """Send arm home command.""" - self._async_validate_code(code, STATE_ALARM_ARMED_HOME) - self._async_update_state(STATE_ALARM_ARMED_HOME) + self._async_validate_code(code, AlarmControlPanelState.ARMED_HOME) + self._async_update_state(AlarmControlPanelState.ARMED_HOME) async def async_alarm_arm_away(self, code: str | None = None) -> None: """Send arm away command.""" - self._async_validate_code(code, STATE_ALARM_ARMED_AWAY) - self._async_update_state(STATE_ALARM_ARMED_AWAY) + self._async_validate_code(code, AlarmControlPanelState.ARMED_AWAY) + self._async_update_state(AlarmControlPanelState.ARMED_AWAY) async def async_alarm_arm_night(self, code: str | None = None) -> None: """Send arm night command.""" - self._async_validate_code(code, STATE_ALARM_ARMED_NIGHT) - self._async_update_state(STATE_ALARM_ARMED_NIGHT) + self._async_validate_code(code, AlarmControlPanelState.ARMED_NIGHT) + self._async_update_state(AlarmControlPanelState.ARMED_NIGHT) async def async_alarm_arm_vacation(self, code: str | None = None) -> None: """Send arm vacation command.""" - self._async_validate_code(code, STATE_ALARM_ARMED_VACATION) - self._async_update_state(STATE_ALARM_ARMED_VACATION) + self._async_validate_code(code, AlarmControlPanelState.ARMED_VACATION) + self._async_update_state(AlarmControlPanelState.ARMED_VACATION) async def async_alarm_arm_custom_bypass(self, code: str | None = None) -> None: """Send arm custom bypass command.""" - self._async_validate_code(code, STATE_ALARM_ARMED_CUSTOM_BYPASS) - self._async_update_state(STATE_ALARM_ARMED_CUSTOM_BYPASS) + self._async_validate_code(code, AlarmControlPanelState.ARMED_CUSTOM_BYPASS) + self._async_update_state(AlarmControlPanelState.ARMED_CUSTOM_BYPASS) async def async_alarm_trigger(self, code: str | None = None) -> None: """Send alarm trigger command. @@ -343,9 +355,9 @@ async def async_alarm_trigger(self, code: str | None = None) -> None: """ if not self._trigger_time_by_state[self._active_state]: return - self._async_update_state(STATE_ALARM_TRIGGERED) + self._async_update_state(AlarmControlPanelState.TRIGGERED) - def _async_update_state(self, state: str) -> None: + def _async_update_state(self, state: AlarmControlPanelState) -> None: """Update the state.""" if self._state == state: return @@ -358,7 +370,7 @@ def _async_update_state(self, state: str) -> None: def _async_set_state_update_events(self) -> None: state = self._state - if state == STATE_ALARM_TRIGGERED: + if state == AlarmControlPanelState.TRIGGERED: pending_time = self._pending_time(state) async_track_point_in_time( self._hass, self.async_scheduled_update, self._state_ts + pending_time @@ -382,7 +394,7 @@ def _async_set_state_update_events(self) -> None: def _async_validate_code(self, code: str | None, state: str) -> None: """Validate given code.""" if ( - state != STATE_ALARM_DISARMED and not self.code_arm_required + state != AlarmControlPanelState.DISARMED and not self.code_arm_required ) or self._code is None: return @@ -405,10 +417,13 @@ def _async_validate_code(self, code: str | None, state: str) -> None: @property def extra_state_attributes(self) -> dict[str, Any]: """Return the state attributes.""" - if self.state in (STATE_ALARM_PENDING, STATE_ALARM_ARMING): + if self.state in ( + AlarmControlPanelState.PENDING, + AlarmControlPanelState.ARMING, + ): prev_state: str | None = self._previous_state state: str | None = self._state - elif self.state == STATE_ALARM_TRIGGERED: + elif self.state == AlarmControlPanelState.TRIGGERED: prev_state = self._previous_state state = None else: @@ -429,9 +444,9 @@ async def async_added_to_hass(self) -> None: if next_state := state.attributes.get(ATTR_NEXT_STATE): # If in arming or pending state we record the transition, # not the current state - self._state = next_state + self._state = AlarmControlPanelState(next_state) else: - self._state = state.state + self._state = AlarmControlPanelState(state.state) if prev_state := state.attributes.get(ATTR_PREVIOUS_STATE): self._previous_state = prev_state diff --git a/homeassistant/components/manual_mqtt/alarm_control_panel.py b/homeassistant/components/manual_mqtt/alarm_control_panel.py index 8d447bbc8ac3a8..768690e8ec5a54 100644 --- a/homeassistant/components/manual_mqtt/alarm_control_panel.py +++ b/homeassistant/components/manual_mqtt/alarm_control_panel.py @@ -12,6 +12,7 @@ from homeassistant.components.alarm_control_panel import ( AlarmControlPanelEntity, AlarmControlPanelEntityFeature, + AlarmControlPanelState, CodeFormat, ) from homeassistant.const import ( @@ -22,14 +23,6 @@ CONF_PENDING_TIME, CONF_PLATFORM, CONF_TRIGGER_TIME, - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_CUSTOM_BYPASS, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_ARMED_VACATION, - STATE_ALARM_DISARMED, - STATE_ALARM_PENDING, - STATE_ALARM_TRIGGERED, ) from homeassistant.core import Event, EventStateChangedData, HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError @@ -54,6 +47,15 @@ CONF_PAYLOAD_ARM_VACATION = "payload_arm_vacation" CONF_PAYLOAD_ARM_CUSTOM_BYPASS = "payload_arm_custom_bypass" +CONF_ALARM_ARMED_AWAY = "armed_away" +CONF_ALARM_ARMED_CUSTOM_BYPASS = "armed_custom_bypass" +CONF_ALARM_ARMED_HOME = "armed_home" +CONF_ALARM_ARMED_NIGHT = "armed_night" +CONF_ALARM_ARMED_VACATION = "armed_vacation" +CONF_ALARM_DISARMED = "disarmed" +CONF_ALARM_PENDING = "pending" +CONF_ALARM_TRIGGERED = "triggered" + DEFAULT_ALARM_NAME = "HA Alarm" DEFAULT_DELAY_TIME = datetime.timedelta(seconds=0) DEFAULT_PENDING_TIME = datetime.timedelta(seconds=60) @@ -67,21 +69,21 @@ DEFAULT_DISARM = "DISARM" SUPPORTED_STATES = [ - STATE_ALARM_DISARMED, - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_ARMED_VACATION, - STATE_ALARM_ARMED_CUSTOM_BYPASS, - STATE_ALARM_TRIGGERED, + AlarmControlPanelState.DISARMED, + AlarmControlPanelState.ARMED_AWAY, + AlarmControlPanelState.ARMED_HOME, + AlarmControlPanelState.ARMED_NIGHT, + AlarmControlPanelState.ARMED_VACATION, + AlarmControlPanelState.ARMED_CUSTOM_BYPASS, + AlarmControlPanelState.TRIGGERED, ] SUPPORTED_PRETRIGGER_STATES = [ - state for state in SUPPORTED_STATES if state != STATE_ALARM_TRIGGERED + state for state in SUPPORTED_STATES if state != AlarmControlPanelState.TRIGGERED ] SUPPORTED_PENDING_STATES = [ - state for state in SUPPORTED_STATES if state != STATE_ALARM_DISARMED + state for state in SUPPORTED_STATES if state != AlarmControlPanelState.DISARMED ] ATTR_PRE_PENDING_STATE = "pre_pending_state" @@ -143,26 +145,26 @@ def _state_schema(state): vol.Optional( CONF_DISARM_AFTER_TRIGGER, default=DEFAULT_DISARM_AFTER_TRIGGER ): cv.boolean, - vol.Optional(STATE_ALARM_ARMED_AWAY, default={}): _state_schema( - STATE_ALARM_ARMED_AWAY + vol.Optional(CONF_ALARM_ARMED_AWAY, default={}): _state_schema( + AlarmControlPanelState.ARMED_AWAY ), - vol.Optional(STATE_ALARM_ARMED_HOME, default={}): _state_schema( - STATE_ALARM_ARMED_HOME + vol.Optional(CONF_ALARM_ARMED_HOME, default={}): _state_schema( + AlarmControlPanelState.ARMED_HOME ), - vol.Optional(STATE_ALARM_ARMED_NIGHT, default={}): _state_schema( - STATE_ALARM_ARMED_NIGHT + vol.Optional(CONF_ALARM_ARMED_NIGHT, default={}): _state_schema( + AlarmControlPanelState.ARMED_NIGHT ), - vol.Optional(STATE_ALARM_ARMED_VACATION, default={}): _state_schema( - STATE_ALARM_ARMED_VACATION + vol.Optional(CONF_ALARM_ARMED_VACATION, default={}): _state_schema( + AlarmControlPanelState.ARMED_VACATION ), - vol.Optional( - STATE_ALARM_ARMED_CUSTOM_BYPASS, default={} - ): _state_schema(STATE_ALARM_ARMED_CUSTOM_BYPASS), - vol.Optional(STATE_ALARM_DISARMED, default={}): _state_schema( - STATE_ALARM_DISARMED + vol.Optional(CONF_ALARM_ARMED_CUSTOM_BYPASS, default={}): _state_schema( + AlarmControlPanelState.ARMED_CUSTOM_BYPASS + ), + vol.Optional(CONF_ALARM_DISARMED, default={}): _state_schema( + AlarmControlPanelState.DISARMED ), - vol.Optional(STATE_ALARM_TRIGGERED, default={}): _state_schema( - STATE_ALARM_TRIGGERED + vol.Optional(CONF_ALARM_TRIGGERED, default={}): _state_schema( + AlarmControlPanelState.TRIGGERED ), vol.Required(mqtt.CONF_COMMAND_TOPIC): mqtt.valid_publish_topic, vol.Required(mqtt.CONF_STATE_TOPIC): mqtt.valid_subscribe_topic, @@ -268,7 +270,7 @@ def __init__( config, ): """Init the manual MQTT alarm panel.""" - self._state = STATE_ALARM_DISARMED + self._state = AlarmControlPanelState.DISARMED self._hass = hass self._attr_name = name if code_template: @@ -304,38 +306,38 @@ def __init__( self._payload_arm_custom_bypass = payload_arm_custom_bypass @property - def state(self) -> str: + def alarm_state(self) -> AlarmControlPanelState: """Return the state of the device.""" - if self._state == STATE_ALARM_TRIGGERED: + if self._state == AlarmControlPanelState.TRIGGERED: if self._within_pending_time(self._state): - return STATE_ALARM_PENDING + return AlarmControlPanelState.PENDING trigger_time = self._trigger_time_by_state[self._previous_state] if ( self._state_ts + self._pending_time(self._state) + trigger_time ) < dt_util.utcnow(): if self._disarm_after_trigger: - return STATE_ALARM_DISARMED + return AlarmControlPanelState.DISARMED self._state = self._previous_state return self._state if self._state in SUPPORTED_PENDING_STATES and self._within_pending_time( self._state ): - return STATE_ALARM_PENDING + return AlarmControlPanelState.PENDING return self._state @property def _active_state(self): """Get the current state.""" - if self.state == STATE_ALARM_PENDING: + if self.state == AlarmControlPanelState.PENDING: return self._previous_state return self._state def _pending_time(self, state): """Get the pending time.""" pending_time = self._pending_time_by_state[state] - if state == STATE_ALARM_TRIGGERED: + if state == AlarmControlPanelState.TRIGGERED: pending_time += self._delay_time_by_state[self._previous_state] return pending_time @@ -354,35 +356,35 @@ def code_format(self) -> CodeFormat | None: async def async_alarm_disarm(self, code: str | None = None) -> None: """Send disarm command.""" - self._async_validate_code(code, STATE_ALARM_DISARMED) - self._state = STATE_ALARM_DISARMED + self._async_validate_code(code, AlarmControlPanelState.DISARMED) + self._state = AlarmControlPanelState.DISARMED self._state_ts = dt_util.utcnow() self.async_write_ha_state() async def async_alarm_arm_home(self, code: str | None = None) -> None: """Send arm home command.""" - self._async_validate_code(code, STATE_ALARM_ARMED_HOME) - self._async_update_state(STATE_ALARM_ARMED_HOME) + self._async_validate_code(code, AlarmControlPanelState.ARMED_HOME) + self._async_update_state(AlarmControlPanelState.ARMED_HOME) async def async_alarm_arm_away(self, code: str | None = None) -> None: """Send arm away command.""" - self._async_validate_code(code, STATE_ALARM_ARMED_AWAY) - self._async_update_state(STATE_ALARM_ARMED_AWAY) + self._async_validate_code(code, AlarmControlPanelState.ARMED_AWAY) + self._async_update_state(AlarmControlPanelState.ARMED_AWAY) async def async_alarm_arm_night(self, code: str | None = None) -> None: """Send arm night command.""" - self._async_validate_code(code, STATE_ALARM_ARMED_NIGHT) - self._async_update_state(STATE_ALARM_ARMED_NIGHT) + self._async_validate_code(code, AlarmControlPanelState.ARMED_NIGHT) + self._async_update_state(AlarmControlPanelState.ARMED_NIGHT) async def async_alarm_arm_vacation(self, code: str | None = None) -> None: """Send arm vacation command.""" - self._async_validate_code(code, STATE_ALARM_ARMED_VACATION) - self._async_update_state(STATE_ALARM_ARMED_VACATION) + self._async_validate_code(code, AlarmControlPanelState.ARMED_VACATION) + self._async_update_state(AlarmControlPanelState.ARMED_VACATION) async def async_alarm_arm_custom_bypass(self, code: str | None = None) -> None: """Send arm custom bypass command.""" - self._async_validate_code(code, STATE_ALARM_ARMED_CUSTOM_BYPASS) - self._async_update_state(STATE_ALARM_ARMED_CUSTOM_BYPASS) + self._async_validate_code(code, AlarmControlPanelState.ARMED_CUSTOM_BYPASS) + self._async_update_state(AlarmControlPanelState.ARMED_CUSTOM_BYPASS) async def async_alarm_trigger(self, code: str | None = None) -> None: """Send alarm trigger command. @@ -392,7 +394,7 @@ async def async_alarm_trigger(self, code: str | None = None) -> None: """ if not self._trigger_time_by_state[self._active_state]: return - self._async_update_state(STATE_ALARM_TRIGGERED) + self._async_update_state(AlarmControlPanelState.TRIGGERED) def _async_update_state(self, state: str) -> None: """Update the state.""" @@ -405,7 +407,7 @@ def _async_update_state(self, state: str) -> None: self.async_write_ha_state() pending_time = self._pending_time(state) - if state == STATE_ALARM_TRIGGERED: + if state == AlarmControlPanelState.TRIGGERED: async_track_point_in_time( self._hass, self.async_scheduled_update, self._state_ts + pending_time ) @@ -424,7 +426,7 @@ def _async_update_state(self, state: str) -> None: def _async_validate_code(self, code, state): """Validate given code.""" if ( - state != STATE_ALARM_DISARMED and not self.code_arm_required + state != AlarmControlPanelState.DISARMED and not self.code_arm_required ) or self._code is None: return @@ -443,7 +445,7 @@ def _async_validate_code(self, code, state): @property def extra_state_attributes(self) -> dict[str, Any]: """Return the state attributes.""" - if self.state != STATE_ALARM_PENDING: + if self.state != AlarmControlPanelState.PENDING: return {} return { ATTR_PRE_PENDING_STATE: self._previous_state, diff --git a/homeassistant/components/map/__init__.py b/homeassistant/components/map/__init__.py deleted file mode 100644 index 25095e92b930a5..00000000000000 --- a/homeassistant/components/map/__init__.py +++ /dev/null @@ -1,53 +0,0 @@ -"""Support for showing device locations.""" - -from homeassistant.components import onboarding -from homeassistant.components.lovelace import _create_map_dashboard -from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant -from homeassistant.helpers import config_validation as cv -from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue -from homeassistant.helpers.storage import Store -from homeassistant.helpers.typing import ConfigType - -DOMAIN = "map" - -CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN) - -STORAGE_KEY = DOMAIN -STORAGE_VERSION_MAJOR = 1 - - -async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: - """Create a map panel.""" - - if DOMAIN in config: - async_create_issue( - hass, - HOMEASSISTANT_DOMAIN, - f"deprecated_yaml_{DOMAIN}", - breaks_in_ha_version="2024.10.0", - is_fixable=False, - is_persistent=False, - issue_domain=DOMAIN, - severity=IssueSeverity.WARNING, - translation_key="deprecated_yaml", - translation_placeholders={ - "domain": DOMAIN, - "integration_title": "map", - }, - ) - - store: Store[dict[str, bool]] = Store( - hass, - STORAGE_VERSION_MAJOR, - STORAGE_KEY, - ) - data = await store.async_load() - if data: - return True - - if onboarding.async_is_onboarded(hass): - await _create_map_dashboard(hass) - - await store.async_save({"migrated": True}) - - return True diff --git a/homeassistant/components/map/manifest.json b/homeassistant/components/map/manifest.json deleted file mode 100644 index 6a0333c862a053..00000000000000 --- a/homeassistant/components/map/manifest.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "domain": "map", - "name": "Map", - "codeowners": [], - "dependencies": ["frontend", "lovelace"], - "documentation": "https://www.home-assistant.io/integrations/map", - "integration_type": "system", - "quality_scale": "internal" -} diff --git a/homeassistant/components/mastodon/config_flow.py b/homeassistant/components/mastodon/config_flow.py index 5e1af5fae92dbf..7c0985570f7e0b 100644 --- a/homeassistant/components/mastodon/config_flow.py +++ b/homeassistant/components/mastodon/config_flow.py @@ -6,6 +6,7 @@ from mastodon.Mastodon import MastodonNetworkError, MastodonUnauthorizedError import voluptuous as vol +from yarl import URL from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult from homeassistant.const import ( @@ -42,6 +43,11 @@ ) +def base_url_from_url(url: str) -> str: + """Return the base url from a url.""" + return str(URL(url).origin()) + + class MastodonConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow.""" @@ -105,6 +111,8 @@ async def async_step_user( """Handle a flow initialized by the user.""" errors: dict[str, str] | None = None if user_input: + user_input[CONF_BASE_URL] = base_url_from_url(user_input[CONF_BASE_URL]) + instance, account, errors = await self.hass.async_add_executor_job( self.check_connection, user_input[CONF_BASE_URL], @@ -130,7 +138,7 @@ async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResu LOGGER.debug("Importing Mastodon from configuration.yaml") - base_url = str(import_data.get(CONF_BASE_URL, DEFAULT_URL)) + base_url = base_url_from_url(str(import_data.get(CONF_BASE_URL, DEFAULT_URL))) client_id = str(import_data.get(CONF_CLIENT_ID)) client_secret = str(import_data.get(CONF_CLIENT_SECRET)) access_token = str(import_data.get(CONF_ACCESS_TOKEN)) diff --git a/homeassistant/components/matrix/manifest.json b/homeassistant/components/matrix/manifest.json index 520bd0550ccecf..43c151c7c23688 100644 --- a/homeassistant/components/matrix/manifest.json +++ b/homeassistant/components/matrix/manifest.json @@ -5,5 +5,5 @@ "documentation": "https://www.home-assistant.io/integrations/matrix", "iot_class": "cloud_push", "loggers": ["matrix_client"], - "requirements": ["matrix-nio==0.25.2", "Pillow==10.4.0"] + "requirements": ["matrix-nio==0.25.2", "Pillow==11.0.0"] } diff --git a/homeassistant/components/matter/climate.py b/homeassistant/components/matter/climate.py index f41fa3baaba979..cdbe1e3624530e 100644 --- a/homeassistant/components/matter/climate.py +++ b/homeassistant/components/matter/climate.py @@ -188,6 +188,7 @@ class MatterClimate(MatterEntity, ClimateEntity): _attr_hvac_mode: HVACMode = HVACMode.OFF _feature_map: int | None = None _enable_turn_on_off_backwards_compatibility = False + _platform_translation_key = "thermostat" async def async_set_temperature(self, **kwargs: Any) -> None: """Set new target temperature.""" @@ -427,7 +428,7 @@ def _get_temperature_in_degrees( platform=Platform.CLIMATE, entity_description=ClimateEntityDescription( key="MatterThermostat", - translation_key="thermostat", + name=None, ), entity_class=MatterClimate, required_attributes=(clusters.Thermostat.Attributes.LocalTemperature,), diff --git a/homeassistant/components/matter/config_flow.py b/homeassistant/components/matter/config_flow.py index ae71b7a17116f4..6f7505eb61ff43 100644 --- a/homeassistant/components/matter/config_flow.py +++ b/homeassistant/components/matter/config_flow.py @@ -14,8 +14,6 @@ AddonInfo, AddonManager, AddonState, - HassioServiceInfo, - is_hassio, ) from homeassistant.components.onboarding import async_is_onboarded from homeassistant.components.zeroconf import ZeroconfServiceInfo @@ -25,6 +23,8 @@ from homeassistant.data_entry_flow import AbortFlow from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import aiohttp_client +from homeassistant.helpers.hassio import is_hassio +from homeassistant.helpers.service_info.hassio import HassioServiceInfo from .addon import get_addon_manager from .const import ( diff --git a/homeassistant/components/matter/cover.py b/homeassistant/components/matter/cover.py index c32b7bc9e1a589..ba9c3afbdee2e6 100644 --- a/homeassistant/components/matter/cover.py +++ b/homeassistant/components/matter/cover.py @@ -201,7 +201,8 @@ def _update_from_device(self) -> None: MatterDiscoverySchema( platform=Platform.COVER, entity_description=CoverEntityDescription( - key="MatterCover", translation_key="cover" + key="MatterCover", + name=None, ), entity_class=MatterCover, required_attributes=( @@ -216,7 +217,7 @@ def _update_from_device(self) -> None: MatterDiscoverySchema( platform=Platform.COVER, entity_description=CoverEntityDescription( - key="MatterCoverPositionAwareLift", translation_key="cover" + key="MatterCoverPositionAwareLift", name=None ), entity_class=MatterCover, required_attributes=( @@ -231,7 +232,7 @@ def _update_from_device(self) -> None: MatterDiscoverySchema( platform=Platform.COVER, entity_description=CoverEntityDescription( - key="MatterCoverPositionAwareTilt", translation_key="cover" + key="MatterCoverPositionAwareTilt", name=None ), entity_class=MatterCover, required_attributes=( @@ -246,7 +247,7 @@ def _update_from_device(self) -> None: MatterDiscoverySchema( platform=Platform.COVER, entity_description=CoverEntityDescription( - key="MatterCoverPositionAwareLiftAndTilt", translation_key="cover" + key="MatterCoverPositionAwareLiftAndTilt", name=None ), entity_class=MatterCover, required_attributes=( diff --git a/homeassistant/components/matter/discovery.py b/homeassistant/components/matter/discovery.py index 342522787ab705..5b07f9a069fed5 100644 --- a/homeassistant/components/matter/discovery.py +++ b/homeassistant/components/matter/discovery.py @@ -24,6 +24,7 @@ from .sensor import DISCOVERY_SCHEMAS as SENSOR_SCHEMAS from .switch import DISCOVERY_SCHEMAS as SWITCH_SCHEMAS from .update import DISCOVERY_SCHEMAS as UPDATE_SCHEMAS +from .vacuum import DISCOVERY_SCHEMAS as VACUUM_SCHEMAS from .valve import DISCOVERY_SCHEMAS as VALVE_SCHEMAS DISCOVERY_SCHEMAS: dict[Platform, list[MatterDiscoverySchema]] = { @@ -40,6 +41,7 @@ Platform.SENSOR: SENSOR_SCHEMAS, Platform.SWITCH: SWITCH_SCHEMAS, Platform.UPDATE: UPDATE_SCHEMAS, + Platform.VACUUM: VACUUM_SCHEMAS, Platform.VALVE: VALVE_SCHEMAS, } SUPPORTED_PLATFORMS = tuple(DISCOVERY_SCHEMAS) diff --git a/homeassistant/components/matter/entity.py b/homeassistant/components/matter/entity.py index 1a454bb7357c8e..7c378fe465eee0 100644 --- a/homeassistant/components/matter/entity.py +++ b/homeassistant/components/matter/entity.py @@ -45,6 +45,7 @@ class MatterEntity(Entity): _attr_has_entity_name = True _attr_should_poll = False _name_postfix: str | None = None + _platform_translation_key: str | None = None def __init__( self, @@ -83,6 +84,8 @@ def __init__( and ep.has_attribute(None, entity_info.primary_attribute) ): self._name_postfix = str(self._endpoint.endpoint_id) + if self._platform_translation_key and not self.translation_key: + self._attr_translation_key = self._platform_translation_key # prefer the label attribute for the entity name # Matter has a way for users and/or vendors to specify a name for an endpoint diff --git a/homeassistant/components/matter/fan.py b/homeassistant/components/matter/fan.py index 458a57538ebff4..51c2fb0c882a5b 100644 --- a/homeassistant/components/matter/fan.py +++ b/homeassistant/components/matter/fan.py @@ -60,6 +60,7 @@ class MatterFan(MatterEntity, FanEntity): _last_known_percentage: int = 0 _enable_turn_on_off_backwards_compatibility = False _feature_map: int | None = None + _platform_translation_key = "fan" async def async_turn_on( self, @@ -329,7 +330,8 @@ def _calculate_features( MatterDiscoverySchema( platform=Platform.FAN, entity_description=FanEntityDescription( - key="MatterFan", name=None, translation_key="fan" + key="MatterFan", + name=None, ), entity_class=MatterFan, # FanEntityFeature diff --git a/homeassistant/components/matter/light.py b/homeassistant/components/matter/light.py index 72d06f4b9f18fb..6d184bcc01f710 100644 --- a/homeassistant/components/matter/light.py +++ b/homeassistant/components/matter/light.py @@ -89,6 +89,7 @@ class MatterLight(MatterEntity, LightEntity): _supports_color = False _supports_color_temperature = False _transitions_disabled = False + _platform_translation_key = "light" async def _set_xy_color( self, xy_color: tuple[float, float], transition: float = 0.0 @@ -443,7 +444,8 @@ def _check_transition_blocklist(self) -> None: MatterDiscoverySchema( platform=Platform.LIGHT, entity_description=LightEntityDescription( - key="MatterLight", translation_key="light" + key="MatterLight", + name=None, ), entity_class=MatterLight, required_attributes=(clusters.OnOff.Attributes.OnOff,), @@ -470,7 +472,8 @@ def _check_transition_blocklist(self) -> None: MatterDiscoverySchema( platform=Platform.LIGHT, entity_description=LightEntityDescription( - key="MatterHSColorLightFallback", translation_key="light" + key="MatterHSColorLightFallback", + name=None, ), entity_class=MatterLight, required_attributes=( @@ -490,7 +493,8 @@ def _check_transition_blocklist(self) -> None: MatterDiscoverySchema( platform=Platform.LIGHT, entity_description=LightEntityDescription( - key="MatterXYColorLightFallback", translation_key="light" + key="MatterXYColorLightFallback", + name=None, ), entity_class=MatterLight, required_attributes=( @@ -510,7 +514,8 @@ def _check_transition_blocklist(self) -> None: MatterDiscoverySchema( platform=Platform.LIGHT, entity_description=LightEntityDescription( - key="MatterColorTemperatureLightFallback", translation_key="light" + key="MatterColorTemperatureLightFallback", + name=None, ), entity_class=MatterLight, required_attributes=( diff --git a/homeassistant/components/matter/lock.py b/homeassistant/components/matter/lock.py index 8adaecd67ad78e..c5e10554fe7042 100644 --- a/homeassistant/components/matter/lock.py +++ b/homeassistant/components/matter/lock.py @@ -40,6 +40,7 @@ class MatterLock(MatterEntity, LockEntity): _feature_map: int | None = None _optimistic_timer: asyncio.TimerHandle | None = None + _platform_translation_key = "lock" @property def code_format(self) -> str | None: @@ -200,7 +201,8 @@ def _calculate_features( MatterDiscoverySchema( platform=Platform.LOCK, entity_description=LockEntityDescription( - key="MatterLock", translation_key="lock" + key="MatterLock", + name=None, ), entity_class=MatterLock, required_attributes=(clusters.DoorLock.Attributes.LockState,), diff --git a/homeassistant/components/matter/manifest.json b/homeassistant/components/matter/manifest.json index 295b0a23735ab7..4573fe17401893 100644 --- a/homeassistant/components/matter/manifest.json +++ b/homeassistant/components/matter/manifest.json @@ -1,6 +1,7 @@ { "domain": "matter", "name": "Matter (BETA)", + "after_dependencies": ["hassio"], "codeowners": ["@home-assistant/matter"], "config_flow": true, "dependencies": ["websocket_api"], diff --git a/homeassistant/components/matter/select.py b/homeassistant/components/matter/select.py index 1bba18b2c5bb7b..1a2fc36c014553 100644 --- a/homeassistant/components/matter/select.py +++ b/homeassistant/components/matter/select.py @@ -162,23 +162,11 @@ def _update_from_device(self) -> None: clusters.RefrigeratorAndTemperatureControlledCabinetMode.Attributes.SupportedModes, ), ), - MatterDiscoverySchema( - platform=Platform.SELECT, - entity_description=MatterSelectEntityDescription( - key="MatterRvcRunMode", - translation_key="mode", - ), - entity_class=MatterModeSelectEntity, - required_attributes=( - clusters.RvcRunMode.Attributes.CurrentMode, - clusters.RvcRunMode.Attributes.SupportedModes, - ), - ), MatterDiscoverySchema( platform=Platform.SELECT, entity_description=MatterSelectEntityDescription( key="MatterRvcCleanMode", - translation_key="mode", + translation_key="clean_mode", ), entity_class=MatterModeSelectEntity, required_attributes=( diff --git a/homeassistant/components/matter/strings.json b/homeassistant/components/matter/strings.json index b4ef5b793404dd..69fa68765b37ae 100644 --- a/homeassistant/components/matter/strings.json +++ b/homeassistant/components/matter/strings.json @@ -36,6 +36,7 @@ "addon_start_failed": "Failed to start the Matter Server add-on.", "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", "already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]", + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", "not_matter_addon": "Discovered add-on is not the official Matter Server add-on.", "reconfiguration_successful": "Successfully reconfigured the Matter integration." }, @@ -173,6 +174,9 @@ } }, "select": { + "clean_mode": { + "name": "Clean mode" + }, "mode": { "name": "Mode" }, @@ -251,6 +255,11 @@ "name": "Power" } }, + "vacuum": { + "vacuum": { + "name": "[%key:component::vacuum::title%]" + } + }, "valve": { "valve": { "name": "[%key:component::valve::title%]" diff --git a/homeassistant/components/matter/switch.py b/homeassistant/components/matter/switch.py index 953897fdaa67d0..75269de953c828 100644 --- a/homeassistant/components/matter/switch.py +++ b/homeassistant/components/matter/switch.py @@ -35,6 +35,8 @@ async def async_setup_entry( class MatterSwitch(MatterEntity, SwitchEntity): """Representation of a Matter switch.""" + _platform_translation_key = "switch" + async def async_turn_on(self, **kwargs: Any) -> None: """Turn switch on.""" await self.matter_client.send_device_command( @@ -66,7 +68,7 @@ def _update_from_device(self) -> None: entity_description=SwitchEntityDescription( key="MatterPlug", device_class=SwitchDeviceClass.OUTLET, - translation_key="switch", + name=None, ), entity_class=MatterSwitch, required_attributes=(clusters.OnOff.Attributes.OnOff,), @@ -106,7 +108,7 @@ def _update_from_device(self) -> None: entity_description=SwitchEntityDescription( key="MatterSwitch", device_class=SwitchDeviceClass.OUTLET, - translation_key="switch", + name=None, ), entity_class=MatterSwitch, required_attributes=(clusters.OnOff.Attributes.OnOff,), diff --git a/homeassistant/components/matter/update.py b/homeassistant/components/matter/update.py index 736664e01019ba..f31dd7b3aa3557 100644 --- a/homeassistant/components/matter/update.py +++ b/homeassistant/components/matter/update.py @@ -100,21 +100,23 @@ def _update_from_device(self) -> None: == clusters.OtaSoftwareUpdateRequestor.Enums.UpdateStateEnum.kIdle ): self._attr_in_progress = False + self._attr_update_percentage = None return update_progress: int = self.get_matter_attribute_value( clusters.OtaSoftwareUpdateRequestor.Attributes.UpdateStateProgress ) + self._attr_in_progress = True if ( update_state == clusters.OtaSoftwareUpdateRequestor.Enums.UpdateStateEnum.kDownloading and update_progress is not None and update_progress > 0 ): - self._attr_in_progress = update_progress + self._attr_update_percentage = update_progress else: - self._attr_in_progress = True + self._attr_update_percentage = None async def async_update(self) -> None: """Call when the entity needs to be updated.""" diff --git a/homeassistant/components/matter/vacuum.py b/homeassistant/components/matter/vacuum.py new file mode 100644 index 00000000000000..2ecd7128df60aa --- /dev/null +++ b/homeassistant/components/matter/vacuum.py @@ -0,0 +1,226 @@ +"""Matter vacuum platform.""" + +from __future__ import annotations + +from enum import IntEnum +from typing import TYPE_CHECKING, Any + +from chip.clusters import Objects as clusters +from matter_server.client.models import device_types + +from homeassistant.components.vacuum import ( + STATE_CLEANING, + STATE_DOCKED, + STATE_ERROR, + STATE_RETURNING, + StateVacuumEntity, + StateVacuumEntityDescription, + VacuumEntityFeature, +) +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import STATE_IDLE, Platform +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from .entity import MatterEntity +from .helpers import get_matter +from .models import MatterDiscoverySchema + + +class OperationalState(IntEnum): + """Operational State of the vacuum cleaner. + + Combination of generic OperationalState and RvcOperationalState. + """ + + NO_ERROR = 0x00 + UNABLE_TO_START_OR_RESUME = 0x01 + UNABLE_TO_COMPLETE_OPERATION = 0x02 + COMMAND_INVALID_IN_STATE = 0x03 + SEEKING_CHARGER = 0x40 + CHARGING = 0x41 + DOCKED = 0x42 + + +class ModeTag(IntEnum): + """Enum with available ModeTag values.""" + + IDLE = 0x4000 # 16384 decimal + CLEANING = 0x4001 # 16385 decimal + MAPPING = 0x4002 # 16386 decimal + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up Matter vacuum platform from Config Entry.""" + matter = get_matter(hass) + matter.register_platform_handler(Platform.VACUUM, async_add_entities) + + +class MatterVacuum(MatterEntity, StateVacuumEntity): + """Representation of a Matter Vacuum cleaner entity.""" + + _last_accepted_commands: list[int] | None = None + _supported_run_modes: ( + dict[int, clusters.RvcCleanMode.Structs.ModeOptionStruct] | None + ) = None + entity_description: StateVacuumEntityDescription + _platform_translation_key = "vacuum" + + async def async_stop(self, **kwargs: Any) -> None: + """Stop the vacuum cleaner.""" + await self._send_device_command(clusters.OperationalState.Commands.Stop()) + + async def async_return_to_base(self, **kwargs: Any) -> None: + """Set the vacuum cleaner to return to the dock.""" + await self._send_device_command(clusters.RvcOperationalState.Commands.GoHome()) + + async def async_locate(self, **kwargs: Any) -> None: + """Locate the vacuum cleaner.""" + await self._send_device_command(clusters.Identify.Commands.Identify()) + + async def async_start(self) -> None: + """Start or resume the cleaning task.""" + if TYPE_CHECKING: + assert self._last_accepted_commands is not None + if ( + clusters.RvcOperationalState.Commands.Resume.command_id + in self._last_accepted_commands + ): + await self._send_device_command( + clusters.RvcOperationalState.Commands.Resume() + ) + else: + await self._send_device_command(clusters.OperationalState.Commands.Start()) + + async def async_pause(self) -> None: + """Pause the cleaning task.""" + await self._send_device_command(clusters.OperationalState.Commands.Pause()) + + async def _send_device_command( + self, + command: clusters.ClusterCommand, + ) -> None: + """Send a command to the device.""" + await self.matter_client.send_device_command( + node_id=self._endpoint.node.node_id, + endpoint_id=self._endpoint.endpoint_id, + command=command, + ) + + @callback + def _update_from_device(self) -> None: + """Update from device.""" + self._calculate_features() + # optional battery level + if VacuumEntityFeature.BATTERY & self._attr_supported_features: + self._attr_battery_level = self.get_matter_attribute_value( + clusters.PowerSource.Attributes.BatPercentRemaining + ) + # derive state from the run mode + operational state + run_mode_raw: int = self.get_matter_attribute_value( + clusters.RvcRunMode.Attributes.CurrentMode + ) + operational_state: int = self.get_matter_attribute_value( + clusters.RvcOperationalState.Attributes.OperationalState + ) + state: str | None = None + if TYPE_CHECKING: + assert self._supported_run_modes is not None + if operational_state in (OperationalState.CHARGING, OperationalState.DOCKED): + state = STATE_DOCKED + elif operational_state == OperationalState.SEEKING_CHARGER: + state = STATE_RETURNING + elif operational_state in ( + OperationalState.UNABLE_TO_COMPLETE_OPERATION, + OperationalState.UNABLE_TO_START_OR_RESUME, + ): + state = STATE_ERROR + elif (run_mode := self._supported_run_modes.get(run_mode_raw)) is not None: + tags = {x.value for x in run_mode.modeTags} + if ModeTag.CLEANING in tags: + state = STATE_CLEANING + elif ModeTag.IDLE in tags: + state = STATE_IDLE + self._attr_state = state + + @callback + def _calculate_features(self) -> None: + """Calculate features for HA Vacuum platform.""" + accepted_operational_commands: list[int] = self.get_matter_attribute_value( + clusters.RvcOperationalState.Attributes.AcceptedCommandList + ) + # in principle the feature set should not change, except for the accepted commands + if self._last_accepted_commands == accepted_operational_commands: + return + self._last_accepted_commands = accepted_operational_commands + supported_features: VacuumEntityFeature = VacuumEntityFeature(0) + supported_features |= VacuumEntityFeature.STATE + # optional battery attribute = battery feature + if self.get_matter_attribute_value( + clusters.PowerSource.Attributes.BatPercentRemaining + ): + supported_features |= VacuumEntityFeature.BATTERY + # optional identify cluster = locate feature (value must be not None or 0) + if self.get_matter_attribute_value(clusters.Identify.Attributes.IdentifyType): + supported_features |= VacuumEntityFeature.LOCATE + # create a map of supported run modes + run_modes: list[clusters.RvcCleanMode.Structs.ModeOptionStruct] = ( + self.get_matter_attribute_value( + clusters.RvcRunMode.Attributes.SupportedModes + ) + ) + self._supported_run_modes = {mode.mode: mode for mode in run_modes} + # map operational state commands to vacuum features + if ( + clusters.RvcOperationalState.Commands.Pause.command_id + in accepted_operational_commands + ): + supported_features |= VacuumEntityFeature.PAUSE + if ( + clusters.OperationalState.Commands.Stop.command_id + in accepted_operational_commands + ): + supported_features |= VacuumEntityFeature.STOP + if ( + clusters.OperationalState.Commands.Start.command_id + in accepted_operational_commands + ): + # note that start has been replaced by resume in rev2 of the spec + supported_features |= VacuumEntityFeature.START + if ( + clusters.RvcOperationalState.Commands.Resume.command_id + in accepted_operational_commands + ): + supported_features |= VacuumEntityFeature.START + if ( + clusters.RvcOperationalState.Commands.GoHome.command_id + in accepted_operational_commands + ): + supported_features |= VacuumEntityFeature.RETURN_HOME + + self._attr_supported_features = supported_features + + +# Discovery schema(s) to map Matter Attributes to HA entities +DISCOVERY_SCHEMAS = [ + MatterDiscoverySchema( + platform=Platform.VACUUM, + entity_description=StateVacuumEntityDescription( + key="MatterVacuumCleaner", name=None + ), + entity_class=MatterVacuum, + required_attributes=( + clusters.RvcRunMode.Attributes.CurrentMode, + clusters.RvcOperationalState.Attributes.CurrentPhase, + ), + optional_attributes=( + clusters.RvcCleanMode.Attributes.CurrentMode, + clusters.PowerSource.Attributes.BatPercentRemaining, + ), + device_type=(device_types.RoboticVacuumCleaner,), + ), +] diff --git a/homeassistant/components/matter/valve.py b/homeassistant/components/matter/valve.py index f2e212246ca354..ccb4e89da1717d 100644 --- a/homeassistant/components/matter/valve.py +++ b/homeassistant/components/matter/valve.py @@ -40,6 +40,7 @@ class MatterValve(MatterEntity, ValveEntity): _feature_map: int | None = None entity_description: ValveEntityDescription + _platform_translation_key = "valve" async def send_device_command( self, @@ -139,7 +140,7 @@ def _calculate_features( entity_description=ValveEntityDescription( key="MatterValve", device_class=ValveDeviceClass.WATER, - translation_key="valve", + name=None, ), entity_class=MatterValve, required_attributes=( diff --git a/homeassistant/components/mealie/config_flow.py b/homeassistant/components/mealie/config_flow.py index 29c2591c7f8514..2f90ceaf97afc7 100644 --- a/homeassistant/components/mealie/config_flow.py +++ b/homeassistant/components/mealie/config_flow.py @@ -6,7 +6,7 @@ from aiomealie import MealieAuthenticationError, MealieClient, MealieConnectionError import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_API_TOKEN, CONF_HOST, CONF_VERIFY_SSL from homeassistant.helpers.aiohttp_client import async_get_clientsession @@ -32,7 +32,6 @@ class MealieConfigFlow(ConfigFlow, domain=DOMAIN): host: str | None = None verify_ssl: bool = True - entry: ConfigEntry async def check_connection( self, api_token: str @@ -89,7 +88,6 @@ async def async_step_reauth( """Perform reauth upon an API authentication error.""" self.host = entry_data[CONF_HOST] self.verify_ssl = entry_data.get(CONF_VERIFY_SSL, True) - self.entry = self._get_reauth_entry() return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -102,15 +100,12 @@ async def async_step_reauth_confirm( user_input[CONF_API_TOKEN], ) if not errors: - if self.entry.unique_id == user_id: - return self.async_update_reload_and_abort( - self.entry, - data={ - **self.entry.data, - CONF_API_TOKEN: user_input[CONF_API_TOKEN], - }, - ) - return self.async_abort(reason="wrong_account") + await self.async_set_unique_id(user_id) + self._abort_if_unique_id_mismatch(reason="wrong_account") + return self.async_update_reload_and_abort( + self._get_reauth_entry(), + data_updates={CONF_API_TOKEN: user_input[CONF_API_TOKEN]}, + ) return self.async_show_form( step_id="reauth_confirm", data_schema=REAUTH_SCHEMA, @@ -121,13 +116,6 @@ async def async_step_reconfigure( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle reconfiguration of the integration.""" - self.entry = self._get_reconfigure_entry() - return await self.async_step_reconfigure_confirm() - - async def async_step_reconfigure_confirm( - self, user_input: dict[str, Any] | None = None - ) -> ConfigFlowResult: - """Handle reconfiguration confirmation.""" errors: dict[str, str] = {} if user_input: self.host = user_input[CONF_HOST] @@ -136,20 +124,18 @@ async def async_step_reconfigure_confirm( user_input[CONF_API_TOKEN], ) if not errors: - if self.entry.unique_id == user_id: - return self.async_update_reload_and_abort( - self.entry, - data={ - **self.entry.data, - CONF_VERIFY_SSL: user_input[CONF_VERIFY_SSL], - CONF_HOST: user_input[CONF_HOST], - CONF_API_TOKEN: user_input[CONF_API_TOKEN], - }, - reason="reconfigure_successful", - ) - return self.async_abort(reason="wrong_account") + await self.async_set_unique_id(user_id) + self._abort_if_unique_id_mismatch(reason="wrong_account") + return self.async_update_reload_and_abort( + self._get_reconfigure_entry(), + data_updates={ + CONF_VERIFY_SSL: user_input[CONF_VERIFY_SSL], + CONF_HOST: user_input[CONF_HOST], + CONF_API_TOKEN: user_input[CONF_API_TOKEN], + }, + ) return self.async_show_form( - step_id="reconfigure_confirm", + step_id="reconfigure", data_schema=USER_SCHEMA, errors=errors, ) diff --git a/homeassistant/components/mealie/strings.json b/homeassistant/components/mealie/strings.json index 72f2d769dd22dc..b59399815ea03a 100644 --- a/homeassistant/components/mealie/strings.json +++ b/homeassistant/components/mealie/strings.json @@ -17,7 +17,7 @@ "api_token": "[%key:common::config_flow::data::api_token%]" } }, - "reconfigure_confirm": { + "reconfigure": { "description": "Please reconfigure with Mealie.", "data": { "host": "[%key:common::config_flow::data::url%]", diff --git a/homeassistant/components/meater/__init__.py b/homeassistant/components/meater/__init__.py index 08ca32029cb1e5..50eff40c0e8b7e 100644 --- a/homeassistant/components/meater/__init__.py +++ b/homeassistant/components/meater/__init__.py @@ -64,6 +64,7 @@ async def async_update_data() -> dict[str, MeaterProbe]: coordinator = DataUpdateCoordinator( hass, _LOGGER, + config_entry=entry, # Name of the data. For logging purposes. name="meater_api", update_method=async_update_data, diff --git a/homeassistant/components/meater/strings.json b/homeassistant/components/meater/strings.json index 279841bb14777c..20dd291902634b 100644 --- a/homeassistant/components/meater/strings.json +++ b/homeassistant/components/meater/strings.json @@ -19,7 +19,8 @@ } }, "abort": { - "already_configured": "[%key:common::config_flow::abort::already_configured_account%]" + "already_configured": "[%key:common::config_flow::abort::already_configured_account%]", + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" }, "error": { "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", diff --git a/homeassistant/components/medcom_ble/__init__.py b/homeassistant/components/medcom_ble/__init__.py index 36357746b955ef..8603e1b9ce567d 100644 --- a/homeassistant/components/medcom_ble/__init__.py +++ b/homeassistant/components/medcom_ble/__init__.py @@ -53,6 +53,7 @@ async def _async_update_method(): coordinator = DataUpdateCoordinator( hass, _LOGGER, + config_entry=entry, name=DOMAIN, update_method=_async_update_method, update_interval=timedelta(seconds=DEFAULT_SCAN_INTERVAL), diff --git a/homeassistant/components/media_extractor/manifest.json b/homeassistant/components/media_extractor/manifest.json index 635ab5f6d40744..ebfa79d71902cb 100644 --- a/homeassistant/components/media_extractor/manifest.json +++ b/homeassistant/components/media_extractor/manifest.json @@ -8,6 +8,6 @@ "iot_class": "calculated", "loggers": ["yt_dlp"], "quality_scale": "internal", - "requirements": ["yt-dlp==2024.09.27"], + "requirements": ["yt-dlp[default]==2024.11.04"], "single_config_entry": true } diff --git a/homeassistant/components/media_player/browse_media.py b/homeassistant/components/media_player/browse_media.py index e1c2fa37ca0940..c917164a2ee289 100644 --- a/homeassistant/components/media_player/browse_media.py +++ b/homeassistant/components/media_player/browse_media.py @@ -46,6 +46,8 @@ def async_process_play_media_url( elif media_content_id[0] != "/": return media_content_id + # https://github.com/pylint-dev/pylint/issues/3484 + # pylint: disable-next=using-constant-test if parsed.query: logging.getLogger(__name__).debug( "Not signing path for content with query param" diff --git a/homeassistant/components/media_source/__init__.py b/homeassistant/components/media_source/__init__.py index 604f9b7cc88146..3ea8f581245a24 100644 --- a/homeassistant/components/media_source/__init__.py +++ b/homeassistant/components/media_source/__init__.py @@ -18,7 +18,7 @@ from homeassistant.components.websocket_api import ActiveConnection from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import config_validation as cv -from homeassistant.helpers.frame import report +from homeassistant.helpers.frame import report_usage from homeassistant.helpers.integration_platform import ( async_process_integration_platforms, ) @@ -156,7 +156,7 @@ async def async_resolve_media( raise Unresolvable("Media Source not loaded") if target_media_player is UNDEFINED: - report( + report_usage( "calls media_source.async_resolve_media without passing an entity_id", exclude_integrations={DOMAIN}, ) diff --git a/homeassistant/components/melcloud/config_flow.py b/homeassistant/components/melcloud/config_flow.py index 72abc0fbca7d61..b604ee5016e860 100644 --- a/homeassistant/components/melcloud/config_flow.py +++ b/homeassistant/components/melcloud/config_flow.py @@ -12,7 +12,7 @@ import pymelcloud import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_PASSWORD, CONF_TOKEN, CONF_USERNAME from homeassistant.helpers.aiohttp_client import async_get_clientsession @@ -25,7 +25,6 @@ class FlowHandler(ConfigFlow, domain=DOMAIN): """Handle a config flow.""" VERSION = 1 - entry: ConfigEntry async def _create_entry(self, username: str, token: str) -> ConfigFlowResult: """Register new entry.""" @@ -82,7 +81,6 @@ async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle initiation of re-authentication with MELCloud.""" - self.entry = self._get_reauth_entry() return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -95,15 +93,9 @@ async def async_step_reauth_confirm( aquired_token, errors = await self.async_reauthenticate_client(user_input) if not errors: - self.hass.config_entries.async_update_entry( - self.entry, - data={CONF_TOKEN: aquired_token}, - ) - self.hass.async_create_task( - self.hass.config_entries.async_reload(self.entry.entry_id) + return self.async_update_reload_and_abort( + self._get_reauth_entry(), data={CONF_TOKEN: aquired_token} ) - return self.async_abort(reason="reauth_successful") - return self.async_show_form( step_id="reauth_confirm", data_schema=vol.Schema( @@ -150,20 +142,14 @@ async def async_reauthenticate_client( async def async_step_reconfigure( self, user_input: dict[str, Any] | None = None - ) -> ConfigFlowResult: - """Handle a reconfiguration flow initialized by the user.""" - self.entry = self._get_reconfigure_entry() - return await self.async_step_reconfigure_confirm() - - async def async_step_reconfigure_confirm( - self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle a reconfiguration flow initialized by the user.""" errors: dict[str, str] = {} acquired_token = None + reconfigure_entry = self._get_reconfigure_entry() if user_input is not None: - user_input[CONF_USERNAME] = self.entry.data[CONF_USERNAME] + user_input[CONF_USERNAME] = reconfigure_entry.data[CONF_USERNAME] try: async with asyncio.timeout(10): acquired_token = await pymelcloud.login( @@ -194,18 +180,18 @@ async def async_step_reconfigure_confirm( if not errors: user_input[CONF_TOKEN] = acquired_token return self.async_update_reload_and_abort( - self.entry, - data={**self.entry.data, **user_input}, - reason="reconfigure_successful", + reconfigure_entry, data_updates=user_input ) return self.async_show_form( - step_id="reconfigure_confirm", + step_id="reconfigure", data_schema=vol.Schema( { vol.Required(CONF_PASSWORD): str, } ), errors=errors, - description_placeholders={CONF_USERNAME: self.entry.data[CONF_USERNAME]}, + description_placeholders={ + CONF_USERNAME: reconfigure_entry.data[CONF_USERNAME] + }, ) diff --git a/homeassistant/components/melcloud/strings.json b/homeassistant/components/melcloud/strings.json index 968f9cf4e50856..19ef0b76aada79 100644 --- a/homeassistant/components/melcloud/strings.json +++ b/homeassistant/components/melcloud/strings.json @@ -17,7 +17,7 @@ "password": "[%key:common::config_flow::data::password%]" } }, - "reconfigure_confirm": { + "reconfigure": { "title": "Reconfigure your MelCloud", "description": "Reconfigure the entry to obtain a new token, for your account: `{username}`.", "data": { @@ -36,7 +36,9 @@ "abort": { "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", "already_configured": "MELCloud integration already configured for this email. Access token has been refreshed.", - "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]" + "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]", + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", + "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]" } }, "services": { diff --git a/homeassistant/components/met/config_flow.py b/homeassistant/components/met/config_flow.py index 84a446824131fa..62964d22bb1a87 100644 --- a/homeassistant/components/met/config_flow.py +++ b/homeassistant/components/met/config_flow.py @@ -11,7 +11,6 @@ ConfigFlow, ConfigFlowResult, OptionsFlow, - OptionsFlowWithConfigEntry, ) from homeassistant.const import ( CONF_ELEVATION, @@ -143,12 +142,12 @@ async def async_step_onboarding( @callback def async_get_options_flow( config_entry: ConfigEntry, - ) -> OptionsFlow: + ) -> MetOptionsFlowHandler: """Get the options flow for Met.""" - return MetOptionsFlowHandler(config_entry) + return MetOptionsFlowHandler() -class MetOptionsFlowHandler(OptionsFlowWithConfigEntry): +class MetOptionsFlowHandler(OptionsFlow): """Options flow for Met component.""" async def async_step_init( @@ -159,13 +158,13 @@ async def async_step_init( if user_input is not None: # Update config entry with data from user input self.hass.config_entries.async_update_entry( - self._config_entry, data=user_input + self.config_entry, data=user_input ) return self.async_create_entry( - title=self._config_entry.title, data=user_input + title=self.config_entry.title, data=user_input ) return self.async_show_form( step_id="init", - data_schema=_get_data_schema(self.hass, config_entry=self._config_entry), + data_schema=_get_data_schema(self.hass, config_entry=self.config_entry), ) diff --git a/homeassistant/components/met_eireann/__init__.py b/homeassistant/components/met_eireann/__init__.py index 7d0e6401bd65ec..ab2695cbd11ae3 100644 --- a/homeassistant/components/met_eireann/__init__.py +++ b/homeassistant/components/met_eireann/__init__.py @@ -46,6 +46,7 @@ async def _async_update_data() -> MetEireannWeatherData: coordinator = DataUpdateCoordinator( hass, _LOGGER, + config_entry=config_entry, name=DOMAIN, update_method=_async_update_data, update_interval=UPDATE_INTERVAL, diff --git a/homeassistant/components/met_eireann/strings.json b/homeassistant/components/met_eireann/strings.json index 984f46d71d61cc..d8c2918e6d3dd8 100644 --- a/homeassistant/components/met_eireann/strings.json +++ b/homeassistant/components/met_eireann/strings.json @@ -12,6 +12,9 @@ } } }, + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_service%]" + }, "error": { "already_configured": "[%key:common::config_flow::abort::already_configured_service%]" } diff --git a/homeassistant/components/meteo_france/__init__.py b/homeassistant/components/meteo_france/__init__.py index ddba982934c109..1d4f8293c5efe5 100644 --- a/homeassistant/components/meteo_france/__init__.py +++ b/homeassistant/components/meteo_france/__init__.py @@ -75,24 +75,15 @@ async def _async_update_data_alert() -> CurrentPhenomenons: if not coordinator_forecast.last_update_success: raise ConfigEntryNotReady - # Check if rain forecast is available. - if coordinator_forecast.data.position.get("rain_product_available") == 1: - coordinator_rain = DataUpdateCoordinator( - hass, - _LOGGER, - name=f"Météo-France rain for city {entry.title}", - update_method=_async_update_data_rain, - update_interval=SCAN_INTERVAL_RAIN, - ) - await coordinator_rain.async_refresh() - - if not coordinator_rain.last_update_success: - raise ConfigEntryNotReady - else: - _LOGGER.warning( - "1 hour rain forecast not available. %s is not in covered zone", - entry.title, - ) + # Check rain forecast. + coordinator_rain = DataUpdateCoordinator( + hass, + _LOGGER, + name=f"Météo-France rain for city {entry.title}", + update_method=_async_update_data_rain, + update_interval=SCAN_INTERVAL_RAIN, + ) + await coordinator_rain.async_config_entry_first_refresh() department = coordinator_forecast.data.position.get("dept") _LOGGER.debug( diff --git a/homeassistant/components/meteoclimatic/__init__.py b/homeassistant/components/meteoclimatic/__init__.py index f81d60c3d003e0..8c2fb41c634c3e 100644 --- a/homeassistant/components/meteoclimatic/__init__.py +++ b/homeassistant/components/meteoclimatic/__init__.py @@ -32,6 +32,7 @@ async def async_update_data(): coordinator = DataUpdateCoordinator( hass, _LOGGER, + config_entry=entry, name=f"Meteoclimatic weather for {entry.title} ({station_code})", update_method=async_update_data, update_interval=SCAN_INTERVAL, diff --git a/homeassistant/components/metoffice/__init__.py b/homeassistant/components/metoffice/__init__.py index 18fc121d5d3781..1d516bbc4f5a08 100644 --- a/homeassistant/components/metoffice/__init__.py +++ b/homeassistant/components/metoffice/__init__.py @@ -109,6 +109,7 @@ async def async_update_daily() -> MetOfficeData: metoffice_hourly_coordinator = TimestampDataUpdateCoordinator( hass, _LOGGER, + config_entry=entry, name=f"MetOffice Hourly Coordinator for {site_name}", update_method=async_update_3hourly, update_interval=DEFAULT_SCAN_INTERVAL, @@ -117,6 +118,7 @@ async def async_update_daily() -> MetOfficeData: metoffice_daily_coordinator = TimestampDataUpdateCoordinator( hass, _LOGGER, + config_entry=entry, name=f"MetOffice Daily Coordinator for {site_name}", update_method=async_update_daily, update_interval=DEFAULT_SCAN_INTERVAL, diff --git a/homeassistant/components/microbees/config_flow.py b/homeassistant/components/microbees/config_flow.py index 4d0f5b4474b853..92fa40b24f0909 100644 --- a/homeassistant/components/microbees/config_flow.py +++ b/homeassistant/components/microbees/config_flow.py @@ -6,8 +6,7 @@ from microBeesPy import MicroBees, MicroBeesException -from homeassistant import config_entries -from homeassistant.config_entries import ConfigFlowResult +from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlowResult from homeassistant.const import CONF_ACCESS_TOKEN, CONF_TOKEN from homeassistant.helpers import aiohttp_client, config_entry_oauth2_flow @@ -20,7 +19,6 @@ class OAuth2FlowHandler( """Handle a config flow for microBees.""" DOMAIN = DOMAIN - reauth_entry: config_entries.ConfigEntry | None = None @property def logger(self) -> logging.Logger: @@ -49,26 +47,21 @@ async def async_oauth_create_entry(self, data: dict[str, Any]) -> ConfigFlowResu self.logger.exception("Unexpected error") return self.async_abort(reason="unknown") - if not self.reauth_entry: - await self.async_set_unique_id(current_user.id) + await self.async_set_unique_id(current_user.id) + if self.source != SOURCE_REAUTH: self._abort_if_unique_id_configured() return self.async_create_entry( title=current_user.username, data=data, ) - if self.reauth_entry.unique_id == current_user.id: - self.hass.config_entries.async_update_entry(self.reauth_entry, data=data) - await self.hass.config_entries.async_reload(self.reauth_entry.entry_id) - return self.async_abort(reason="reauth_successful") - return self.async_abort(reason="wrong_account") + + self._abort_if_unique_id_mismatch(reason="wrong_account") + return self.async_update_reload_and_abort(self._get_reauth_entry(), data=data) async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Perform reauth upon an API authentication error.""" - self.reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( diff --git a/homeassistant/components/microbees/strings.json b/homeassistant/components/microbees/strings.json index 49d42af83d3a77..8635753a564278 100644 --- a/homeassistant/components/microbees/strings.json +++ b/homeassistant/components/microbees/strings.json @@ -21,6 +21,7 @@ "no_url_available": "[%key:common::config_flow::abort::oauth2_no_url_available%]", "user_rejected_authorize": "[%key:common::config_flow::abort::oauth2_user_rejected_authorize%]", "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", "unknown": "[%key:common::config_flow::error::unknown%]", "wrong_account": "You can only reauthenticate this entry with the same microBees account." }, diff --git a/homeassistant/components/mikrotik/config_flow.py b/homeassistant/components/mikrotik/config_flow.py index 6035565acf1d34..bca394f0d38424 100644 --- a/homeassistant/components/mikrotik/config_flow.py +++ b/homeassistant/components/mikrotik/config_flow.py @@ -39,7 +39,6 @@ class MikrotikFlowHandler(ConfigFlow, domain=DOMAIN): """Handle a Mikrotik config flow.""" VERSION = 1 - _reauth_entry: ConfigEntry | None @staticmethod @callback @@ -47,7 +46,7 @@ def async_get_options_flow( config_entry: ConfigEntry, ) -> MikrotikOptionsFlowHandler: """Get the options flow for this handler.""" - return MikrotikOptionsFlowHandler(config_entry) + return MikrotikOptionsFlowHandler() async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -87,9 +86,6 @@ async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Perform reauth upon an API authentication error.""" - self._reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -97,9 +93,10 @@ async def async_step_reauth_confirm( ) -> ConfigFlowResult: """Confirm reauth dialog.""" errors = {} - assert self._reauth_entry + + reauth_entry = self._get_reauth_entry() if user_input is not None: - user_input = {**self._reauth_entry.data, **user_input} + user_input = {**reauth_entry.data, **user_input} try: await self.hass.async_add_executor_job(get_api, user_input) except CannotConnect: @@ -108,17 +105,10 @@ async def async_step_reauth_confirm( errors[CONF_PASSWORD] = "invalid_auth" if not errors: - self.hass.config_entries.async_update_entry( - self._reauth_entry, - data=user_input, - ) - await self.hass.config_entries.async_reload(self._reauth_entry.entry_id) - return self.async_abort(reason="reauth_successful") + return self.async_update_reload_and_abort(reauth_entry, data=user_input) return self.async_show_form( - description_placeholders={ - CONF_USERNAME: self._reauth_entry.data[CONF_USERNAME] - }, + description_placeholders={CONF_USERNAME: reauth_entry.data[CONF_USERNAME]}, step_id="reauth_confirm", data_schema=vol.Schema( { @@ -132,10 +122,6 @@ async def async_step_reauth_confirm( class MikrotikOptionsFlowHandler(OptionsFlow): """Handle Mikrotik options.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize Mikrotik options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/minio/__init__.py b/homeassistant/components/minio/__init__.py index 8a301ea4225628..57a9632a6ff4d1 100644 --- a/homeassistant/components/minio/__init__.py +++ b/homeassistant/components/minio/__init__.py @@ -73,11 +73,11 @@ ) BUCKET_KEY_SCHEMA = vol.Schema( - {vol.Required(ATTR_BUCKET): cv.template, vol.Required(ATTR_KEY): cv.template} + {vol.Required(ATTR_BUCKET): cv.string, vol.Required(ATTR_KEY): cv.string} ) BUCKET_KEY_FILE_SCHEMA = BUCKET_KEY_SCHEMA.extend( - {vol.Required(ATTR_FILE_PATH): cv.template} + {vol.Required(ATTR_FILE_PATH): cv.string} ) @@ -125,15 +125,11 @@ def _setup_listener(listener_conf): get_minio_endpoint(host, port), access_key, secret_key, secure ) - def _render_service_value(service, key): - value = service.data[key] - return value.async_render(parse_result=False) - def put_file(service: ServiceCall) -> None: """Upload file service.""" - bucket = _render_service_value(service, ATTR_BUCKET) - key = _render_service_value(service, ATTR_KEY) - file_path = _render_service_value(service, ATTR_FILE_PATH) + bucket = service.data[ATTR_BUCKET] + key = service.data[ATTR_KEY] + file_path = service.data[ATTR_FILE_PATH] if not hass.config.is_allowed_path(file_path): raise ValueError(f"Invalid file_path {file_path}") @@ -142,9 +138,9 @@ def put_file(service: ServiceCall) -> None: def get_file(service: ServiceCall) -> None: """Download file service.""" - bucket = _render_service_value(service, ATTR_BUCKET) - key = _render_service_value(service, ATTR_KEY) - file_path = _render_service_value(service, ATTR_FILE_PATH) + bucket = service.data[ATTR_BUCKET] + key = service.data[ATTR_KEY] + file_path = service.data[ATTR_FILE_PATH] if not hass.config.is_allowed_path(file_path): raise ValueError(f"Invalid file_path {file_path}") @@ -153,8 +149,8 @@ def get_file(service: ServiceCall) -> None: def remove_file(service: ServiceCall) -> None: """Delete file service.""" - bucket = _render_service_value(service, ATTR_BUCKET) - key = _render_service_value(service, ATTR_KEY) + bucket = service.data[ATTR_BUCKET] + key = service.data[ATTR_KEY] minio_client.remove_object(bucket, key) diff --git a/homeassistant/components/mjpeg/config_flow.py b/homeassistant/components/mjpeg/config_flow.py index 8426793678812e..e0150f8c461d4f 100644 --- a/homeassistant/components/mjpeg/config_flow.py +++ b/homeassistant/components/mjpeg/config_flow.py @@ -141,7 +141,7 @@ def async_get_options_flow( config_entry: ConfigEntry, ) -> MJPEGOptionsFlowHandler: """Get the options flow for this handler.""" - return MJPEGOptionsFlowHandler(config_entry) + return MJPEGOptionsFlowHandler() async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -183,10 +183,6 @@ async def async_step_user( class MJPEGOptionsFlowHandler(OptionsFlow): """Handle MJPEG IP Camera options.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize MJPEG IP Camera options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/mobile_app/__init__.py b/homeassistant/components/mobile_app/__init__.py index 80893e0cbfad6f..9fadca31b5061c 100644 --- a/homeassistant/components/mobile_app/__init__.py +++ b/homeassistant/components/mobile_app/__init__.py @@ -4,6 +4,7 @@ from functools import partial from typing import Any +from homeassistant.auth import EVENT_USER_REMOVED from homeassistant.components import cloud, intent, notify as hass_notify from homeassistant.components.webhook import ( async_register as webhook_register, @@ -11,7 +12,7 @@ ) from homeassistant.config_entries import ConfigEntry from homeassistant.const import ATTR_DEVICE_ID, CONF_WEBHOOK_ID, Platform -from homeassistant.core import HomeAssistant +from homeassistant.core import Event, HomeAssistant from homeassistant.helpers import ( config_validation as cv, device_registry as dr, @@ -36,6 +37,7 @@ ATTR_MODEL, ATTR_OS_VERSION, CONF_CLOUDHOOK_URL, + CONF_USER_ID, DATA_CONFIG_ENTRIES, DATA_DELETED_IDS, DATA_DEVICES, @@ -90,6 +92,15 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: websocket_api.async_setup_commands(hass) + async def _handle_user_removed(event: Event) -> None: + """Remove an entry when the user is removed.""" + user_id = event.data["user_id"] + for entry in hass.config_entries.async_entries(DOMAIN): + if entry.data[CONF_USER_ID] == user_id: + await hass.config_entries.async_remove(entry.entry_id) + + hass.bus.async_listen(EVENT_USER_REMOVED, _handle_user_removed) + return True diff --git a/homeassistant/components/modbus/__init__.py b/homeassistant/components/modbus/__init__.py index 64a9e71b3fc8a7..48f8c726836755 100644 --- a/homeassistant/components/modbus/__init__.py +++ b/homeassistant/components/modbus/__init__.py @@ -87,7 +87,6 @@ CONF_HVAC_MODE_VALUES, CONF_HVAC_ONOFF_REGISTER, CONF_INPUT_TYPE, - CONF_LAZY_ERROR, CONF_MAX_TEMP, CONF_MAX_VALUE, CONF_MIN_TEMP, @@ -96,7 +95,6 @@ CONF_NAN_VALUE, CONF_PARITY, CONF_PRECISION, - CONF_RETRIES, CONF_SCALE, CONF_SLAVE_COUNT, CONF_STATE_CLOSED, @@ -162,7 +160,6 @@ vol.Optional( CONF_SCAN_INTERVAL, default=DEFAULT_SCAN_INTERVAL ): cv.positive_int, - vol.Optional(CONF_LAZY_ERROR): cv.positive_int, vol.Optional(CONF_UNIQUE_ID): cv.string, } ) @@ -234,8 +231,10 @@ CALL_TYPE_X_REGISTER_HOLDINGS, ] ), - vol.Optional(CONF_STATE_OFF): cv.positive_int, - vol.Optional(CONF_STATE_ON): cv.positive_int, + vol.Optional(CONF_STATE_OFF): vol.All( + cv.ensure_list, [cv.positive_int] + ), + vol.Optional(CONF_STATE_ON): vol.All(cv.ensure_list, [cv.positive_int]), vol.Optional(CONF_DELAY, default=0): cv.positive_int, } ), @@ -393,7 +392,6 @@ vol.Optional(CONF_NAME, default=DEFAULT_HUB): cv.string, vol.Optional(CONF_TIMEOUT, default=3): cv.socket_timeout, vol.Optional(CONF_DELAY, default=0): cv.positive_int, - vol.Optional(CONF_RETRIES): cv.positive_int, vol.Optional(CONF_MSG_WAIT): cv.positive_int, vol.Optional(CONF_BINARY_SENSORS): vol.All( cv.ensure_list, [BINARY_SENSOR_SCHEMA] diff --git a/homeassistant/components/modbus/binary_sensor.py b/homeassistant/components/modbus/binary_sensor.py index 54ee49ed6a2768..b50d21faf424c2 100644 --- a/homeassistant/components/modbus/binary_sensor.py +++ b/homeassistant/components/modbus/binary_sensor.py @@ -90,6 +90,7 @@ async def async_setup_slaves( self._coordinator = DataUpdateCoordinator( hass, _LOGGER, + config_entry=None, name=name, ) diff --git a/homeassistant/components/modbus/const.py b/homeassistant/components/modbus/const.py index 02f5d99c72c721..7a1a4121a938fe 100644 --- a/homeassistant/components/modbus/const.py +++ b/homeassistant/components/modbus/const.py @@ -20,7 +20,6 @@ CONF_DEVICE_ADDRESS = "device_address" CONF_FANS = "fans" CONF_INPUT_TYPE = "input_type" -CONF_LAZY_ERROR = "lazy_error_count" CONF_MAX_TEMP = "max_temp" CONF_MAX_VALUE = "max_value" CONF_MIN_TEMP = "min_temp" @@ -28,7 +27,6 @@ CONF_MSG_WAIT = "message_wait_milliseconds" CONF_NAN_VALUE = "nan_value" CONF_PARITY = "parity" -CONF_RETRIES = "retries" CONF_PRECISION = "precision" CONF_SCALE = "scale" CONF_SLAVE_COUNT = "slave_count" diff --git a/homeassistant/components/modbus/cover.py b/homeassistant/components/modbus/cover.py index ce44c2935f63e4..eb9dac58900d89 100644 --- a/homeassistant/components/modbus/cover.py +++ b/homeassistant/components/modbus/cover.py @@ -5,17 +5,8 @@ from datetime import datetime from typing import Any -from homeassistant.components.cover import CoverEntity, CoverEntityFeature -from homeassistant.const import ( - CONF_COVERS, - CONF_NAME, - STATE_CLOSED, - STATE_CLOSING, - STATE_OPEN, - STATE_OPENING, - STATE_UNAVAILABLE, - STATE_UNKNOWN, -) +from homeassistant.components.cover import CoverEntity, CoverEntityFeature, CoverState +from homeassistant.const import CONF_COVERS, CONF_NAME, STATE_UNAVAILABLE, STATE_UNKNOWN from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.restore_state import RestoreEntity @@ -105,10 +96,10 @@ async def async_added_to_hass(self) -> None: await self.async_base_added_to_hass() if state := await self.async_get_last_state(): convert = { - STATE_CLOSED: self._state_closed, - STATE_CLOSING: self._state_closing, - STATE_OPENING: self._state_opening, - STATE_OPEN: self._state_open, + CoverState.CLOSED: self._state_closed, + CoverState.CLOSING: self._state_closing, + CoverState.OPENING: self._state_opening, + CoverState.OPEN: self._state_open, STATE_UNAVAILABLE: None, STATE_UNKNOWN: None, } diff --git a/homeassistant/components/modbus/entity.py b/homeassistant/components/modbus/entity.py index 9f0e862f28397e..90833516e59c28 100644 --- a/homeassistant/components/modbus/entity.py +++ b/homeassistant/components/modbus/entity.py @@ -297,8 +297,10 @@ def __init__(self, hass: HomeAssistant, hub: ModbusHub, config: dict) -> None: self._verify_type = convert[ config[CONF_VERIFY].get(CONF_INPUT_TYPE, config[CONF_WRITE_TYPE]) ][0] - self._state_on = config[CONF_VERIFY].get(CONF_STATE_ON, self.command_on) - self._state_off = config[CONF_VERIFY].get(CONF_STATE_OFF, self._command_off) + self._state_on = config[CONF_VERIFY].get(CONF_STATE_ON, [self.command_on]) + self._state_off = config[CONF_VERIFY].get( + CONF_STATE_OFF, [self._command_off] + ) else: self._verify_active = False @@ -363,9 +365,9 @@ async def async_update(self, now: datetime | None = None) -> None: self._attr_is_on = bool(result.bits[0] & 1) else: value = int(result.registers[0]) - if value == self._state_on: + if value in self._state_on: self._attr_is_on = True - elif value == self._state_off: + elif value in self._state_off: self._attr_is_on = False elif value is not None: _LOGGER.error( diff --git a/homeassistant/components/modbus/modbus.py b/homeassistant/components/modbus/modbus.py index cc70a783234d35..d85b4e0e67f42f 100644 --- a/homeassistant/components/modbus/modbus.py +++ b/homeassistant/components/modbus/modbus.py @@ -34,7 +34,6 @@ from homeassistant.helpers.discovery import async_load_platform from homeassistant.helpers.dispatcher import async_dispatcher_send from homeassistant.helpers.event import async_call_later -from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue from homeassistant.helpers.reload import async_setup_reload_service from homeassistant.helpers.typing import ConfigType @@ -62,11 +61,9 @@ PLATFORMS, RTUOVERTCP, SERIAL, - SERVICE_RESTART, SERVICE_STOP, SERVICE_WRITE_COIL, SERVICE_WRITE_REGISTER, - SIGNAL_START_ENTITY, SIGNAL_STOP_ENTITY, TCP, UDP, @@ -233,34 +230,12 @@ async def async_stop_hub(service: ServiceCall) -> None: hub = hub_collect[service.data[ATTR_HUB]] await hub.async_close() - async def async_restart_hub(service: ServiceCall) -> None: - """Restart Modbus hub.""" - async_create_issue( - hass, - DOMAIN, - "deprecated_restart", - breaks_in_ha_version="2024.11.0", - is_fixable=False, - severity=IssueSeverity.WARNING, - translation_key="deprecated_restart", - ) - _LOGGER.warning( - "`modbus.restart` is deprecated and will be removed in version 2024.11" - ) - async_dispatcher_send(hass, SIGNAL_START_ENTITY) - hub = hub_collect[service.data[ATTR_HUB]] - await hub.async_restart() - - for x_service in ( - (SERVICE_STOP, async_stop_hub), - (SERVICE_RESTART, async_restart_hub), - ): - hass.services.async_register( - DOMAIN, - x_service[0], - x_service[1], - schema=vol.Schema({vol.Required(ATTR_HUB): cv.string}), - ) + hass.services.async_register( + DOMAIN, + SERVICE_STOP, + async_stop_hub, + schema=vol.Schema({vol.Required(ATTR_HUB): cv.string}), + ) return True @@ -341,7 +316,7 @@ async def async_pb_connect(self) -> None: self._log_error(err, error_state=False) return message = f"modbus {self.name} communication open" - _LOGGER.warning(message) + _LOGGER.info(message) async def async_setup(self) -> bool: """Set up pymodbus client.""" @@ -393,7 +368,7 @@ async def async_close(self) -> None: del self._client self._client = None message = f"modbus {self.name} communication closed" - _LOGGER.warning(message) + _LOGGER.info(message) async def low_level_pb_call( self, slave: int | None, address: int, value: int | list[int], use_call: str diff --git a/homeassistant/components/modbus/sensor.py b/homeassistant/components/modbus/sensor.py index 4b4fd5bd51a400..d5a16c95cc4de5 100644 --- a/homeassistant/components/modbus/sensor.py +++ b/homeassistant/components/modbus/sensor.py @@ -91,6 +91,7 @@ async def async_setup_slaves( self._coordinator = DataUpdateCoordinator( hass, _LOGGER, + config_entry=None, name=name, ) diff --git a/homeassistant/components/modbus/strings.json b/homeassistant/components/modbus/strings.json index c0d702a9b89936..7b55022645e5fe 100644 --- a/homeassistant/components/modbus/strings.json +++ b/homeassistant/components/modbus/strings.json @@ -97,10 +97,6 @@ "no_entities": { "title": "Modbus {sub_1} contain no entities, entry not loaded.", "description": "Please add at least one entity to Modbus {sub_1} in your configuration.yaml file and restart Home Assistant to fix this issue." - }, - "deprecated_restart": { - "title": "modbus.restart is being removed", - "description": "Please use reload yaml via the developer tools in the UI instead of via the `modbus.restart` action." } } } diff --git a/homeassistant/components/modbus/validators.py b/homeassistant/components/modbus/validators.py index e1120094d01094..f8f1a7450eba8f 100644 --- a/homeassistant/components/modbus/validators.py +++ b/homeassistant/components/modbus/validators.py @@ -27,8 +27,6 @@ from .const import ( CONF_DATA_TYPE, CONF_FAN_MODE_VALUES, - CONF_LAZY_ERROR, - CONF_RETRIES, CONF_SLAVE_COUNT, CONF_SWAP, CONF_SWAP_BYTE, @@ -284,27 +282,6 @@ def validate_modbus( hub_name_inx: int, ) -> bool: """Validate modbus entries.""" - if CONF_RETRIES in hub: - async_create_issue( - hass, - DOMAIN, - "deprecated_retries", - breaks_in_ha_version="2024.7.0", - is_fixable=False, - severity=IssueSeverity.WARNING, - translation_key="deprecated_retries", - translation_placeholders={ - "config_key": "retries", - "integration": DOMAIN, - "url": "https://www.home-assistant.io/integrations/modbus", - }, - ) - _LOGGER.warning( - "`retries`: is deprecated and will be removed in version 2024.7" - ) - else: - hub[CONF_RETRIES] = 3 - host: str = ( hub[CONF_PORT] if hub[CONF_TYPE] == SERIAL @@ -353,24 +330,6 @@ def validate_entity( ent_addr: set[str], ) -> bool: """Validate entity.""" - if CONF_LAZY_ERROR in entity: - async_create_issue( - hass, - DOMAIN, - "removed_lazy_error_count", - breaks_in_ha_version="2024.7.0", - is_fixable=False, - severity=IssueSeverity.WARNING, - translation_key="removed_lazy_error_count", - translation_placeholders={ - "config_key": "lazy_error_count", - "integration": DOMAIN, - "url": "https://www.home-assistant.io/integrations/modbus", - }, - ) - _LOGGER.warning( - "`lazy_error_count`: is deprecated and will be removed in version 2024.7" - ) name = f"{component}.{entity[CONF_NAME]}" scan_interval = entity.get(CONF_SCAN_INTERVAL, DEFAULT_SCAN_INTERVAL) if 0 < scan_interval < 5: diff --git a/homeassistant/components/mold_indicator/sensor.py b/homeassistant/components/mold_indicator/sensor.py index eb4c0bf7284aa5..262d13ad3af2fd 100644 --- a/homeassistant/components/mold_indicator/sensor.py +++ b/homeassistant/components/mold_indicator/sensor.py @@ -22,6 +22,7 @@ CONF_NAME, CONF_UNIQUE_ID, PERCENTAGE, + STATE_UNAVAILABLE, STATE_UNKNOWN, UnitOfTemperature, ) @@ -37,7 +38,7 @@ from homeassistant.helpers.device import async_device_info_to_link_from_entity from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.event import async_track_state_change_event -from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType, StateType +from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from homeassistant.util.unit_conversion import TemperatureConverter from homeassistant.util.unit_system import METRIC_SYSTEM @@ -150,7 +151,6 @@ def __init__( unique_id: str | None, ) -> None: """Initialize the sensor.""" - self._state: str | None = None self._attr_name = name self._attr_unique_id = unique_id self._indoor_temp_sensor = indoor_temp_sensor @@ -272,7 +272,7 @@ def mold_indicator_startup() -> None: # re-calculate dewpoint and mold indicator self._calc_dewpoint() self._calc_moldindicator() - if self._state is None: + if self._attr_native_value is None: self._attr_available = False else: self._attr_available = True @@ -311,7 +311,7 @@ def _update_temp_sensor(state: State) -> float | None: _LOGGER.debug("Updating temp sensor with value %s", state.state) # Return an error if the sensor change its state to Unknown. - if state.state == STATE_UNKNOWN: + if state.state in (STATE_UNKNOWN, STATE_UNAVAILABLE): _LOGGER.error( "Unable to parse temperature sensor %s with state: %s", state.entity_id, @@ -319,8 +319,6 @@ def _update_temp_sensor(state: State) -> float | None: ) return None - unit = state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) - if (temp := util.convert(state.state, float)) is None: _LOGGER.error( "Unable to parse temperature sensor %s with state: %s", @@ -330,12 +328,10 @@ def _update_temp_sensor(state: State) -> float | None: return None # convert to celsius if necessary - if unit == UnitOfTemperature.FAHRENHEIT: - return TemperatureConverter.convert( - temp, UnitOfTemperature.FAHRENHEIT, UnitOfTemperature.CELSIUS - ) - if unit == UnitOfTemperature.CELSIUS: - return temp + if ( + unit := state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) + ) in UnitOfTemperature: + return TemperatureConverter.convert(temp, unit, UnitOfTemperature.CELSIUS) _LOGGER.error( "Temp sensor %s has unsupported unit: %s (allowed: %s, %s)", state.entity_id, @@ -352,7 +348,7 @@ def _update_hum_sensor(state: State) -> float | None: _LOGGER.debug("Updating humidity sensor with value %s", state.state) # Return an error if the sensor change its state to Unknown. - if state.state == STATE_UNKNOWN: + if state.state in (STATE_UNKNOWN, STATE_UNAVAILABLE): _LOGGER.error( "Unable to parse humidity sensor %s, state: %s", state.entity_id, @@ -370,19 +366,18 @@ def _update_hum_sensor(state: State) -> float | None: if (unit := state.attributes.get(ATTR_UNIT_OF_MEASUREMENT)) != PERCENTAGE: _LOGGER.error( - "Humidity sensor %s has unsupported unit: %s %s", + "Humidity sensor %s has unsupported unit: %s (allowed: %s)", state.entity_id, unit, - " (allowed: %)", + PERCENTAGE, ) return None if hum > 100 or hum < 0: _LOGGER.error( - "Humidity sensor %s is out of range: %s %s", + "Humidity sensor %s is out of range: %s (allowed: 0-100)", state.entity_id, hum, - "(allowed: 0-100%)", ) return None @@ -401,7 +396,7 @@ async def async_update(self) -> None: # re-calculate dewpoint and mold indicator self._calc_dewpoint() self._calc_moldindicator() - if self._state is None: + if self._attr_native_value is None: self._attr_available = False self._dewpoint = None self._crit_temp = None @@ -437,7 +432,7 @@ def _calc_moldindicator(self) -> None: self._dewpoint, self._calib_factor, ) - self._state = None + self._attr_native_value = None self._attr_available = False self._crit_temp = None return @@ -468,18 +463,13 @@ def _calc_moldindicator(self) -> None: # check bounds and format if crit_humidity > 100: - self._state = "100" + self._attr_native_value = "100" elif crit_humidity < 0: - self._state = "0" + self._attr_native_value = "0" else: - self._state = f"{int(crit_humidity):d}" + self._attr_native_value = f"{int(crit_humidity):d}" - _LOGGER.debug("Mold indicator humidity: %s", self._state) - - @property - def native_value(self) -> StateType: - """Return the state of the entity.""" - return self._state + _LOGGER.debug("Mold indicator humidity: %s", self.native_value) @property def extra_state_attributes(self) -> dict[str, Any]: diff --git a/homeassistant/components/mold_indicator/strings.json b/homeassistant/components/mold_indicator/strings.json index 03c6a05546f8d2..e19fed690b2000 100644 --- a/homeassistant/components/mold_indicator/strings.json +++ b/homeassistant/components/mold_indicator/strings.json @@ -1,4 +1,5 @@ { + "title": "Mold Indicator", "config": { "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" diff --git a/homeassistant/components/monoprice/config_flow.py b/homeassistant/components/monoprice/config_flow.py index cac673e38c1914..b2619623a07b28 100644 --- a/homeassistant/components/monoprice/config_flow.py +++ b/homeassistant/components/monoprice/config_flow.py @@ -108,7 +108,7 @@ def async_get_options_flow( config_entry: ConfigEntry, ) -> MonopriceOptionsFlowHandler: """Define the config flow to handle options.""" - return MonopriceOptionsFlowHandler(config_entry) + return MonopriceOptionsFlowHandler() @callback @@ -126,10 +126,6 @@ def _key_for_source(index, source, previous_sources): class MonopriceOptionsFlowHandler(OptionsFlow): """Handle a Monoprice options flow.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize.""" - self.config_entry = config_entry - @callback def _previous_sources(self): if CONF_SOURCES in self.config_entry.options: diff --git a/homeassistant/components/monzo/api.py b/homeassistant/components/monzo/api.py index 6862564d34330e..5216232199ce4c 100644 --- a/homeassistant/components/monzo/api.py +++ b/homeassistant/components/monzo/api.py @@ -20,7 +20,6 @@ def __init__( async def async_get_access_token(self) -> str: """Return a valid access token.""" - if not self._oauth_session.valid_token: - await self._oauth_session.async_ensure_token_valid() + await self._oauth_session.async_ensure_token_valid() return str(self._oauth_session.token["access_token"]) diff --git a/homeassistant/components/monzo/config_flow.py b/homeassistant/components/monzo/config_flow.py index 2eb51b4d305924..9f005c6aaa436b 100644 --- a/homeassistant/components/monzo/config_flow.py +++ b/homeassistant/components/monzo/config_flow.py @@ -8,7 +8,7 @@ import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigFlowResult +from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlowResult from homeassistant.const import CONF_TOKEN from homeassistant.helpers import config_entry_oauth2_flow @@ -23,7 +23,6 @@ class MonzoFlowHandler( DOMAIN = DOMAIN oauth_data: dict[str, Any] - reauth_entry: ConfigEntry | None = None @property def logger(self) -> logging.Logger: @@ -35,10 +34,11 @@ async def async_step_await_approval_confirmation( ) -> ConfigFlowResult: """Wait for the user to confirm in-app approval.""" if user_input is not None: - if not self.reauth_entry: + if self.source != SOURCE_REAUTH: return self.async_create_entry(title=DOMAIN, data=self.oauth_data) return self.async_update_reload_and_abort( - self.reauth_entry, data={**self.reauth_entry.data, **self.oauth_data} + self._get_reauth_entry(), + data_updates=self.oauth_data, ) data_schema = vol.Schema({vol.Required("confirm"): bool}) @@ -51,11 +51,11 @@ async def async_oauth_create_entry(self, data: dict[str, Any]) -> ConfigFlowResu """Create an entry for the flow.""" self.oauth_data = data user_id = data[CONF_TOKEN]["user_id"] - if not self.reauth_entry: - await self.async_set_unique_id(user_id) + await self.async_set_unique_id(user_id) + if self.source != SOURCE_REAUTH: self._abort_if_unique_id_configured() - elif self.reauth_entry.unique_id != user_id: - return self.async_abort(reason="wrong_account") + else: + self._abort_if_unique_id_mismatch(reason="wrong_account") return await self.async_step_await_approval_confirmation() @@ -63,9 +63,6 @@ async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Perform reauth upon an API authentication error.""" - self.reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( diff --git a/homeassistant/components/monzo/manifest.json b/homeassistant/components/monzo/manifest.json index d9d17eb8abcf3c..7038cecd7ea0f9 100644 --- a/homeassistant/components/monzo/manifest.json +++ b/homeassistant/components/monzo/manifest.json @@ -6,5 +6,5 @@ "dependencies": ["application_credentials"], "documentation": "https://www.home-assistant.io/integrations/monzo", "iot_class": "cloud_polling", - "requirements": ["monzopy==1.3.2"] + "requirements": ["monzopy==1.4.2"] } diff --git a/homeassistant/components/mopeka/config_flow.py b/homeassistant/components/mopeka/config_flow.py index 72e9386a47f545..2e35ff4283f453 100644 --- a/homeassistant/components/mopeka/config_flow.py +++ b/homeassistant/components/mopeka/config_flow.py @@ -58,7 +58,7 @@ def async_get_options_flow( config_entry: config_entries.ConfigEntry, ) -> MopekaOptionsFlow: """Return the options flow for this handler.""" - return MopekaOptionsFlow(config_entry) + return MopekaOptionsFlow() async def async_step_bluetooth( self, discovery_info: BluetoothServiceInfoBleak @@ -139,10 +139,6 @@ async def async_step_user( class MopekaOptionsFlow(config_entries.OptionsFlow): """Handle options for the Mopeka component.""" - def __init__(self, config_entry: config_entries.ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/motion_blinds/config_flow.py b/homeassistant/components/motion_blinds/config_flow.py index 131299314a2562..e961880375c39b 100644 --- a/homeassistant/components/motion_blinds/config_flow.py +++ b/homeassistant/components/motion_blinds/config_flow.py @@ -38,10 +38,6 @@ class OptionsFlowHandler(OptionsFlow): """Options for the component.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Init object.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -83,7 +79,7 @@ def async_get_options_flow( config_entry: ConfigEntry, ) -> OptionsFlowHandler: """Get the options flow.""" - return OptionsFlowHandler(config_entry) + return OptionsFlowHandler() async def async_step_dhcp( self, discovery_info: dhcp.DhcpServiceInfo diff --git a/homeassistant/components/motionblinds_ble/config_flow.py b/homeassistant/components/motionblinds_ble/config_flow.py index cda673b13aca02..d99096d3a09b11 100644 --- a/homeassistant/components/motionblinds_ble/config_flow.py +++ b/homeassistant/components/motionblinds_ble/config_flow.py @@ -187,16 +187,12 @@ def async_get_options_flow( config_entry: ConfigEntry, ) -> OptionsFlow: """Create the options flow.""" - return OptionsFlowHandler(config_entry) + return OptionsFlowHandler() class OptionsFlowHandler(OptionsFlow): """Handle an options flow for Motionblinds BLE.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/motionblinds_ble/manifest.json b/homeassistant/components/motionblinds_ble/manifest.json index d9968cfde4cf3a..ce7e7a6bb8be85 100644 --- a/homeassistant/components/motionblinds_ble/manifest.json +++ b/homeassistant/components/motionblinds_ble/manifest.json @@ -14,5 +14,5 @@ "integration_type": "device", "iot_class": "assumed_state", "loggers": ["motionblindsble"], - "requirements": ["motionblindsble==0.1.1"] + "requirements": ["motionblindsble==0.1.2"] } diff --git a/homeassistant/components/motioneye/__init__.py b/homeassistant/components/motioneye/__init__.py index e24b844c4a2f46..3e4ad53d200153 100644 --- a/homeassistant/components/motioneye/__init__.py +++ b/homeassistant/components/motioneye/__init__.py @@ -322,6 +322,7 @@ async def async_update_data() -> dict[str, Any] | None: coordinator = DataUpdateCoordinator( hass, _LOGGER, + config_entry=entry, name=DOMAIN, update_method=async_update_data, update_interval=DEFAULT_SCAN_INTERVAL, diff --git a/homeassistant/components/motioneye/config_flow.py b/homeassistant/components/motioneye/config_flow.py index 8107ca760cb6e6..80a6449a22de5b 100644 --- a/homeassistant/components/motioneye/config_flow.py +++ b/homeassistant/components/motioneye/config_flow.py @@ -3,7 +3,7 @@ from __future__ import annotations from collections.abc import Mapping -from typing import Any, cast +from typing import Any from motioneye_client.client import ( MotionEyeClientConnectionError, @@ -12,7 +12,6 @@ ) import voluptuous as vol -from homeassistant.components.hassio import HassioServiceInfo from homeassistant.config_entries import ( SOURCE_REAUTH, ConfigEntry, @@ -20,10 +19,11 @@ ConfigFlowResult, OptionsFlow, ) -from homeassistant.const import CONF_SOURCE, CONF_URL, CONF_WEBHOOK_ID +from homeassistant.const import CONF_URL, CONF_WEBHOOK_ID from homeassistant.core import callback from homeassistant.helpers import config_validation as cv from homeassistant.helpers.aiohttp_client import async_get_clientsession +from homeassistant.helpers.service_info.hassio import HassioServiceInfo from homeassistant.helpers.typing import VolDictType from . import create_motioneye_client @@ -53,7 +53,7 @@ async def async_step_user( """Handle the initial step.""" def _get_form( - user_input: dict[str, Any], errors: dict[str, str] | None = None + user_input: Mapping[str, Any], errors: dict[str, str] | None = None ) -> ConfigFlowResult: """Show the form to the user.""" url_schema: VolDictType = {} @@ -89,16 +89,10 @@ def _get_form( errors=errors, ) - reauth_entry = None - if self.context.get("entry_id"): - reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) - if user_input is None: - return _get_form( - cast(dict[str, Any], reauth_entry.data) if reauth_entry else {} - ) + if self.source == SOURCE_REAUTH: + return _get_form(self._get_reauth_entry().data) + return _get_form({}) if self._hassio_discovery: # In case of Supervisor discovery, use pushed URL @@ -135,16 +129,13 @@ def _get_form( if errors: return _get_form(user_input, errors) - if self.context.get(CONF_SOURCE) == SOURCE_REAUTH and reauth_entry is not None: + if self.source == SOURCE_REAUTH: + reauth_entry = self._get_reauth_entry() # Persist the same webhook id across reauths. if CONF_WEBHOOK_ID in reauth_entry.data: user_input[CONF_WEBHOOK_ID] = reauth_entry.data[CONF_WEBHOOK_ID] - self.hass.config_entries.async_update_entry(reauth_entry, data=user_input) - # Need to manually reload, as the listener won't have been - # installed because the initial load did not succeed (the reauth - # flow will not be initiated if the load succeeds). - await self.hass.config_entries.async_reload(reauth_entry.entry_id) - return self.async_abort(reason="reauth_successful") + + return self.async_update_reload_and_abort(reauth_entry, data=user_input) # Search for duplicates: there isn't a useful unique_id, but # at least prevent entries with the same motionEye URL. @@ -188,18 +179,16 @@ async def async_step_hassio_confirm( @staticmethod @callback - def async_get_options_flow(config_entry: ConfigEntry) -> MotionEyeOptionsFlow: + def async_get_options_flow( + config_entry: ConfigEntry, + ) -> MotionEyeOptionsFlow: """Get the Hyperion Options flow.""" - return MotionEyeOptionsFlow(config_entry) + return MotionEyeOptionsFlow() class MotionEyeOptionsFlow(OptionsFlow): """motionEye options flow.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize a motionEye options flow.""" - self._config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -210,14 +199,14 @@ async def async_step_init( schema: dict[vol.Marker, type] = { vol.Required( CONF_WEBHOOK_SET, - default=self._config_entry.options.get( + default=self.config_entry.options.get( CONF_WEBHOOK_SET, DEFAULT_WEBHOOK_SET, ), ): bool, vol.Required( CONF_WEBHOOK_SET_OVERWRITE, - default=self._config_entry.options.get( + default=self.config_entry.options.get( CONF_WEBHOOK_SET_OVERWRITE, DEFAULT_WEBHOOK_SET_OVERWRITE, ), @@ -228,9 +217,9 @@ async def async_step_init( # The input URL is not validated as being a URL, to allow for the possibility # the template input won't be a valid URL until after it's rendered description: dict[str, str] | None = None - if CONF_STREAM_URL_TEMPLATE in self._config_entry.options: + if CONF_STREAM_URL_TEMPLATE in self.config_entry.options: description = { - "suggested_value": self._config_entry.options[ + "suggested_value": self.config_entry.options[ CONF_STREAM_URL_TEMPLATE ] } diff --git a/homeassistant/components/mqtt/__init__.py b/homeassistant/components/mqtt/__init__.py index 86eeca2017c0a9..907b1a1dd11813 100644 --- a/homeassistant/components/mqtt/__init__.py +++ b/homeassistant/components/mqtt/__init__.py @@ -76,8 +76,8 @@ DEFAULT_QOS, DEFAULT_RETAIN, DOMAIN, + ENTITY_PLATFORMS, MQTT_CONNECTION_STATE, - RELOADABLE_PLATFORMS, TEMPLATE_ERRORS, ) from .models import ( # noqa: F401 @@ -438,7 +438,7 @@ async def _reload_config(call: ServiceCall) -> None: for entity in list(mqtt_platform.entities.values()) if getattr(entity, "_discovery_data", None) is None and mqtt_platform.config_entry - and mqtt_platform.domain in RELOADABLE_PLATFORMS + and mqtt_platform.domain in ENTITY_PLATFORMS ] await asyncio.gather(*tasks) diff --git a/homeassistant/components/mqtt/abbreviations.py b/homeassistant/components/mqtt/abbreviations.py index 3c1d0abdb667b8..65e24d5d78063d 100644 --- a/homeassistant/components/mqtt/abbreviations.py +++ b/homeassistant/components/mqtt/abbreviations.py @@ -30,6 +30,7 @@ "cmd_on_tpl": "command_on_template", "cmd_t": "command_topic", "cmd_tpl": "command_template", + "cmps": "components", "cod_arm_req": "code_arm_required", "cod_dis_req": "code_disarm_required", "cod_form": "code_format", @@ -45,6 +46,7 @@ "dir_cmd_tpl": "direction_command_template", "dir_stat_t": "direction_state_topic", "dir_val_tpl": "direction_value_template", + "dsp_prc": "display_precision", "dock_cmd_t": "dock_command_topic", "dock_cmd_tpl": "dock_command_template", "e": "encoding", @@ -92,6 +94,7 @@ "min_mirs": "min_mireds", "max_temp": "max_temp", "min_temp": "min_temp", + "migr_discvry": "migrate_discovery", "mode": "mode", "mode_cmd_tpl": "mode_command_template", "mode_cmd_t": "mode_command_topic", @@ -109,6 +112,7 @@ "osc_cmd_tpl": "oscillation_command_template", "osc_stat_t": "oscillation_state_topic", "osc_val_tpl": "oscillation_value_template", + "p": "platform", "pause_cmd_t": "pause_command_topic", "pause_mw_cmd_tpl": "pause_command_template", "pct_cmd_t": "percentage_command_topic", diff --git a/homeassistant/components/mqtt/alarm_control_panel.py b/homeassistant/components/mqtt/alarm_control_panel.py index 7f14c65ffb0007..76bac8540a495e 100644 --- a/homeassistant/components/mqtt/alarm_control_panel.py +++ b/homeassistant/components/mqtt/alarm_control_panel.py @@ -7,23 +7,12 @@ import voluptuous as vol import homeassistant.components.alarm_control_panel as alarm -from homeassistant.components.alarm_control_panel import AlarmControlPanelEntityFeature -from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ( - CONF_CODE, - CONF_NAME, - CONF_VALUE_TEMPLATE, - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_CUSTOM_BYPASS, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_ARMED_VACATION, - STATE_ALARM_ARMING, - STATE_ALARM_DISARMED, - STATE_ALARM_DISARMING, - STATE_ALARM_PENDING, - STATE_ALARM_TRIGGERED, +from homeassistant.components.alarm_control_panel import ( + AlarmControlPanelEntityFeature, + AlarmControlPanelState, ) +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_CODE, CONF_NAME, CONF_VALUE_TEMPLATE from homeassistant.core import HomeAssistant, callback import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -182,29 +171,30 @@ def _state_message_received(self, msg: ReceiveMessage) -> None: ) return if payload == PAYLOAD_NONE: - self._attr_state = None + self._attr_alarm_state = None return if payload not in ( - STATE_ALARM_DISARMED, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_ARMED_VACATION, - STATE_ALARM_ARMED_CUSTOM_BYPASS, - STATE_ALARM_PENDING, - STATE_ALARM_ARMING, - STATE_ALARM_DISARMING, - STATE_ALARM_TRIGGERED, + AlarmControlPanelState.DISARMED, + AlarmControlPanelState.ARMED_HOME, + AlarmControlPanelState.ARMED_AWAY, + AlarmControlPanelState.ARMED_NIGHT, + AlarmControlPanelState.ARMED_VACATION, + AlarmControlPanelState.ARMED_CUSTOM_BYPASS, + AlarmControlPanelState.PENDING, + AlarmControlPanelState.ARMING, + AlarmControlPanelState.DISARMING, + AlarmControlPanelState.TRIGGERED, ): _LOGGER.warning("Received unexpected payload: %s", msg.payload) return - self._attr_state = str(payload) + assert isinstance(payload, str) + self._attr_alarm_state = AlarmControlPanelState(payload) @callback def _prepare_subscribe_topics(self) -> None: """(Re)Subscribe to topics.""" self.add_subscription( - CONF_STATE_TOPIC, self._state_message_received, {"_attr_state"} + CONF_STATE_TOPIC, self._state_message_received, {"_attr_alarm_state"} ) async def _subscribe_topics(self) -> None: diff --git a/homeassistant/components/mqtt/client.py b/homeassistant/components/mqtt/client.py index 4fa8b7db02a8b1..a626e0e5b28a19 100644 --- a/homeassistant/components/mqtt/client.py +++ b/homeassistant/components/mqtt/client.py @@ -376,7 +376,9 @@ def __init__( self._simple_subscriptions: defaultdict[str, set[Subscription]] = defaultdict( set ) - self._wildcard_subscriptions: set[Subscription] = set() + # To ensure the wildcard subscriptions order is preserved, we use a dict + # with `None` values instead of a set. + self._wildcard_subscriptions: dict[Subscription, None] = {} # _retained_topics prevents a Subscription from receiving a # retained message more than once per topic. This prevents flooding # already active subscribers when new subscribers subscribe to a topic @@ -754,7 +756,7 @@ def _async_track_subscription(self, subscription: Subscription) -> None: if subscription.is_simple_match: self._simple_subscriptions[subscription.topic].add(subscription) else: - self._wildcard_subscriptions.add(subscription) + self._wildcard_subscriptions[subscription] = None @callback def _async_untrack_subscription(self, subscription: Subscription) -> None: @@ -772,7 +774,7 @@ def _async_untrack_subscription(self, subscription: Subscription) -> None: if not simple_subscriptions[topic]: del simple_subscriptions[topic] else: - self._wildcard_subscriptions.remove(subscription) + del self._wildcard_subscriptions[subscription] except (KeyError, ValueError) as exc: raise HomeAssistantError("Can't remove subscription twice") from exc diff --git a/homeassistant/components/mqtt/config_flow.py b/homeassistant/components/mqtt/config_flow.py index ad41c35e51ad09..6e6b44cd4b886f 100644 --- a/homeassistant/components/mqtt/config_flow.py +++ b/homeassistant/components/mqtt/config_flow.py @@ -16,13 +16,7 @@ import voluptuous as vol from homeassistant.components.file_upload import process_uploaded_file -from homeassistant.components.hassio import ( - AddonError, - AddonManager, - AddonState, - HassioServiceInfo, - is_hassio, -) +from homeassistant.components.hassio import AddonError, AddonManager, AddonState from homeassistant.config_entries import ( ConfigEntry, ConfigFlow, @@ -42,6 +36,7 @@ from homeassistant.core import callback from homeassistant.data_entry_flow import AbortFlow from homeassistant.helpers import config_validation as cv +from homeassistant.helpers.hassio import is_hassio from homeassistant.helpers.json import json_dumps from homeassistant.helpers.selector import ( BooleanSelector, @@ -58,6 +53,7 @@ TextSelectorConfig, TextSelectorType, ) +from homeassistant.helpers.service_info.hassio import HassioServiceInfo from homeassistant.util.json import JSON_DECODE_EXCEPTIONS, json_loads from .addon import get_addon_manager @@ -210,7 +206,6 @@ class FlowHandler(ConfigFlow, domain=DOMAIN): VERSION = 1 - entry: ConfigEntry | None _hassio_discovery: dict[str, Any] | None = None _addon_manager: AddonManager @@ -225,7 +220,7 @@ def async_get_options_flow( config_entry: ConfigEntry, ) -> MQTTOptionsFlowHandler: """Get the options flow for this handler.""" - return MQTTOptionsFlowHandler(config_entry) + return MQTTOptionsFlowHandler() async def _async_install_addon(self) -> None: """Install the Mosquitto Mqtt broker add-on.""" @@ -343,9 +338,6 @@ async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle a flow initialized by the user.""" - if self._async_current_entries(): - return self.async_abort(reason="single_instance_allowed") - if is_hassio(self.hass): # Offer to set up broker add-on if supervisor is available self._addon_manager = get_addon_manager(self.hass) @@ -401,7 +393,6 @@ async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle re-authentication with MQTT broker.""" - self.entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) if is_hassio(self.hass): # Check if entry setup matches the add-on discovery config addon_manager = get_addon_manager(self.hass) @@ -440,18 +431,18 @@ async def async_step_reauth_confirm( """Confirm re-authentication with MQTT broker.""" errors: dict[str, str] = {} - assert self.entry is not None + reauth_entry = self._get_reauth_entry() if user_input: substituted_used_data = update_password_from_user_input( - self.entry.data.get(CONF_PASSWORD), user_input + reauth_entry.data.get(CONF_PASSWORD), user_input ) - new_entry_data = {**self.entry.data, **substituted_used_data} + new_entry_data = {**reauth_entry.data, **substituted_used_data} if await self.hass.async_add_executor_job( try_connection, new_entry_data, ): return self.async_update_reload_and_abort( - self.entry, data=new_entry_data + reauth_entry, data=new_entry_data ) errors["base"] = "invalid_auth" @@ -459,7 +450,7 @@ async def async_step_reauth_confirm( schema = self.add_suggested_values_to_schema( REAUTH_SCHEMA, { - CONF_USERNAME: self.entry.data.get(CONF_USERNAME), + CONF_USERNAME: reauth_entry.data.get(CONF_USERNAME), CONF_PASSWORD: PWD_NOT_CHANGED, }, ) @@ -552,11 +543,9 @@ async def async_step_hassio_confirm( class MQTTOptionsFlowHandler(OptionsFlow): """Handle MQTT options.""" - def __init__(self, config_entry: ConfigEntry) -> None: + def __init__(self) -> None: """Initialize MQTT options flow.""" - self.config_entry = config_entry self.broker_config: dict[str, str | int] = {} - self.options = config_entry.options async def async_step_init(self, user_input: None = None) -> ConfigFlowResult: """Manage the MQTT options.""" diff --git a/homeassistant/components/mqtt/const.py b/homeassistant/components/mqtt/const.py index 1e1011cc38121a..9f1c55a54e0c61 100644 --- a/homeassistant/components/mqtt/const.py +++ b/homeassistant/components/mqtt/const.py @@ -61,6 +61,7 @@ CONF_CURRENT_TEMP_TEMPLATE = "current_temperature_template" CONF_CURRENT_TEMP_TOPIC = "current_temperature_topic" CONF_ENABLED_BY_DEFAULT = "enabled_by_default" +CONF_ENTITY_PICTURE = "entity_picture" CONF_MODE_COMMAND_TEMPLATE = "mode_command_template" CONF_MODE_COMMAND_TOPIC = "mode_command_topic" CONF_MODE_LIST = "modes" @@ -89,6 +90,7 @@ CONF_CERTIFICATE = "certificate" CONF_CLIENT_KEY = "client_key" CONF_CLIENT_CERT = "client_cert" +CONF_COMPONENTS = "components" CONF_TLS_INSECURE = "tls_insecure" # Device and integration info options @@ -158,7 +160,7 @@ PAYLOAD_EMPTY_JSON = "{}" PAYLOAD_NONE = "None" -RELOADABLE_PLATFORMS = [ +ENTITY_PLATFORMS = [ Platform.ALARM_CONTROL_PANEL, Platform.BINARY_SENSOR, Platform.BUTTON, @@ -189,7 +191,7 @@ TEMPLATE_ERRORS = (jinja2.TemplateError, TemplateError, TypeError, ValueError) -SUPPORTED_COMPONENTS = { +SUPPORTED_COMPONENTS = ( "alarm_control_panel", "binary_sensor", "button", @@ -218,4 +220,4 @@ "vacuum", "valve", "water_heater", -} +) diff --git a/homeassistant/components/mqtt/cover.py b/homeassistant/components/mqtt/cover.py index f53d895ec4f888..0b495663803d77 100644 --- a/homeassistant/components/mqtt/cover.py +++ b/homeassistant/components/mqtt/cover.py @@ -15,6 +15,7 @@ DEVICE_CLASSES_SCHEMA, CoverEntity, CoverEntityFeature, + CoverState, ) from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( @@ -354,9 +355,9 @@ def _update_state(self, state: str | None) -> None: # Reset the state to `unknown` self._attr_is_closed = None else: - self._attr_is_closed = state == STATE_CLOSED - self._attr_is_opening = state == STATE_OPENING - self._attr_is_closing = state == STATE_CLOSING + self._attr_is_closed = state == CoverState.CLOSED + self._attr_is_opening = state == CoverState.OPENING + self._attr_is_closing = state == CoverState.CLOSING @callback def _tilt_message_received(self, msg: ReceiveMessage) -> None: @@ -382,24 +383,24 @@ def _state_message_received(self, msg: ReceiveMessage) -> None: if payload == self._config[CONF_STATE_STOPPED]: if self._config.get(CONF_GET_POSITION_TOPIC) is not None: state = ( - STATE_CLOSED + CoverState.CLOSED if self._attr_current_cover_position == DEFAULT_POSITION_CLOSED - else STATE_OPEN + else CoverState.OPEN ) else: state = ( - STATE_CLOSED - if self.state in [STATE_CLOSED, STATE_CLOSING] - else STATE_OPEN + CoverState.CLOSED + if self.state in [CoverState.CLOSED, CoverState.CLOSING] + else CoverState.OPEN ) elif payload == self._config[CONF_STATE_OPENING]: - state = STATE_OPENING + state = CoverState.OPENING elif payload == self._config[CONF_STATE_CLOSING]: - state = STATE_CLOSING + state = CoverState.CLOSING elif payload == self._config[CONF_STATE_OPEN]: - state = STATE_OPEN + state = CoverState.OPEN elif payload == self._config[CONF_STATE_CLOSED]: - state = STATE_CLOSED + state = CoverState.CLOSED elif payload == PAYLOAD_NONE: state = None else: @@ -451,7 +452,9 @@ def _position_message_received(self, msg: ReceiveMessage) -> None: self._attr_current_cover_position = min(100, max(0, percentage_payload)) if self._config.get(CONF_STATE_TOPIC) is None: self._update_state( - STATE_CLOSED if self.current_cover_position == 0 else STATE_OPEN + CoverState.CLOSED + if self.current_cover_position == 0 + else CoverState.OPEN ) @callback @@ -493,7 +496,7 @@ async def async_open_cover(self, **kwargs: Any) -> None: ) if self._optimistic: # Optimistically assume that cover has changed state. - self._update_state(STATE_OPEN) + self._update_state(CoverState.OPEN) if self._config.get(CONF_GET_POSITION_TOPIC): self._attr_current_cover_position = 100 self.async_write_ha_state() @@ -508,7 +511,7 @@ async def async_close_cover(self, **kwargs: Any) -> None: ) if self._optimistic: # Optimistically assume that cover has changed state. - self._update_state(STATE_CLOSED) + self._update_state(CoverState.CLOSED) if self._config.get(CONF_GET_POSITION_TOPIC): self._attr_current_cover_position = 0 self.async_write_ha_state() @@ -609,9 +612,9 @@ async def async_set_cover_position(self, **kwargs: Any) -> None: ) if self._optimistic: self._update_state( - STATE_CLOSED + CoverState.CLOSED if position_percentage <= self._config[CONF_POSITION_CLOSED] - else STATE_OPEN + else CoverState.OPEN ) self._attr_current_cover_position = position_percentage self.async_write_ha_state() diff --git a/homeassistant/components/mqtt/discovery.py b/homeassistant/components/mqtt/discovery.py index af27615e2c0e34..a5ddb3ef4e6713 100644 --- a/homeassistant/components/mqtt/discovery.py +++ b/homeassistant/components/mqtt/discovery.py @@ -4,6 +4,7 @@ import asyncio from collections import deque +from dataclasses import dataclass import functools from itertools import chain import logging @@ -11,15 +12,22 @@ import time from typing import TYPE_CHECKING, Any -from homeassistant.config_entries import ConfigEntry +import voluptuous as vol + +from homeassistant.config_entries import ( + SOURCE_MQTT, + ConfigEntry, + signal_discovered_config_entry_removed, +) from homeassistant.const import CONF_DEVICE, CONF_PLATFORM from homeassistant.core import HassJobType, HomeAssistant, callback +from homeassistant.helpers import discovery_flow import homeassistant.helpers.config_validation as cv from homeassistant.helpers.dispatcher import ( async_dispatcher_connect, async_dispatcher_send, ) -from homeassistant.helpers.service_info.mqtt import MqttServiceInfo +from homeassistant.helpers.service_info.mqtt import MqttServiceInfo, ReceivePayloadType from homeassistant.helpers.typing import DiscoveryInfoType from homeassistant.loader import async_get_mqtt from homeassistant.util.json import json_loads_object @@ -32,13 +40,14 @@ ATTR_DISCOVERY_PAYLOAD, ATTR_DISCOVERY_TOPIC, CONF_AVAILABILITY, + CONF_COMPONENTS, CONF_ORIGIN, CONF_TOPIC, DOMAIN, SUPPORTED_COMPONENTS, ) -from .models import DATA_MQTT, MqttOriginInfo, ReceiveMessage -from .schemas import MQTT_ORIGIN_INFO_SCHEMA +from .models import DATA_MQTT, MqttComponentConfig, MqttOriginInfo, ReceiveMessage +from .schemas import DEVICE_DISCOVERY_SCHEMA, MQTT_ORIGIN_INFO_SCHEMA, SHARED_OPTIONS from .util import async_forward_entry_setup_and_setup_discovery ABBREVIATIONS_SET = set(ABBREVIATIONS) @@ -64,13 +73,47 @@ TOPIC_BASE = "~" +CONF_MIGRATE_DISCOVERY = "migrate_discovery" + +MIGRATE_DISCOVERY_SCHEMA = vol.Schema( + {vol.Optional(CONF_MIGRATE_DISCOVERY): True}, +) + class MQTTDiscoveryPayload(dict[str, Any]): """Class to hold and MQTT discovery payload and discovery data.""" + device_discovery: bool = False + migrate_discovery: bool = False discovery_data: DiscoveryInfoType +@dataclass(frozen=True) +class MQTTIntegrationDiscoveryConfig: + """Class to hold an integration discovery playload.""" + + integration: str + msg: ReceiveMessage + + +@callback +def _async_process_discovery_migration(payload: MQTTDiscoveryPayload) -> bool: + """Process a discovery migration request in the discovery payload.""" + # Allow abbreviation + if migr_discvry := (payload.pop("migr_discvry", None)): + payload[CONF_MIGRATE_DISCOVERY] = migr_discvry + if CONF_MIGRATE_DISCOVERY in payload: + try: + MIGRATE_DISCOVERY_SCHEMA(payload) + except vol.Invalid as exc: + _LOGGER.warning(exc) + return False + payload.migrate_discovery = True + payload.clear() + return True + return False + + def clear_discovery_hash(hass: HomeAssistant, discovery_hash: tuple[str, str]) -> None: """Clear entry from already discovered list.""" hass.data[DATA_MQTT].discovery_already_discovered.discard(discovery_hash) @@ -82,36 +125,51 @@ def set_discovery_hash(hass: HomeAssistant, discovery_hash: tuple[str, str]) -> @callback -def async_log_discovery_origin_info( - message: str, discovery_payload: MQTTDiscoveryPayload, level: int = logging.INFO -) -> None: - """Log information about the discovery and origin.""" - if not _LOGGER.isEnabledFor(level): - # bail early if logging is disabled - return +def get_origin_log_string( + discovery_payload: MQTTDiscoveryPayload, *, include_url: bool +) -> str: + """Get the origin information from a discovery payload for logging.""" if CONF_ORIGIN not in discovery_payload: - _LOGGER.log(level, message) - return + return "" origin_info: MqttOriginInfo = discovery_payload[CONF_ORIGIN] sw_version_log = "" if sw_version := origin_info.get("sw_version"): sw_version_log = f", version: {sw_version}" support_url_log = "" - if support_url := origin_info.get("support_url"): + if include_url and (support_url := get_origin_support_url(discovery_payload)): support_url_log = f", support URL: {support_url}" + return f" from external application {origin_info["name"]}{sw_version_log}{support_url_log}" + + +@callback +def get_origin_support_url(discovery_payload: MQTTDiscoveryPayload) -> str | None: + """Get the origin information support URL from a discovery payload.""" + if CONF_ORIGIN not in discovery_payload: + return "" + origin_info: MqttOriginInfo = discovery_payload[CONF_ORIGIN] + return origin_info.get("support_url") + + +@callback +def async_log_discovery_origin_info( + message: str, discovery_payload: MQTTDiscoveryPayload, level: int = logging.INFO +) -> None: + """Log information about the discovery and origin.""" + # We only log origin info once per device discovery + if not _LOGGER.isEnabledFor(level): + # bail out early if logging is disabled + return _LOGGER.log( level, - "%s from external application %s%s%s", + "%s%s", message, - origin_info["name"], - sw_version_log, - support_url_log, + get_origin_log_string(discovery_payload, include_url=True), ) @callback def _replace_abbreviations( - payload: Any | dict[str, Any], + payload: dict[str, Any] | str, abbreviations: dict[str, str], abbreviations_set: set[str], ) -> None: @@ -123,11 +181,20 @@ def _replace_abbreviations( @callback -def _replace_all_abbreviations(discovery_payload: Any | dict[str, Any]) -> None: +def _replace_all_abbreviations( + discovery_payload: dict[str, Any], component_only: bool = False +) -> None: """Replace all abbreviations in an MQTT discovery payload.""" _replace_abbreviations(discovery_payload, ABBREVIATIONS, ABBREVIATIONS_SET) + if CONF_AVAILABILITY in discovery_payload: + for availability_conf in cv.ensure_list(discovery_payload[CONF_AVAILABILITY]): + _replace_abbreviations(availability_conf, ABBREVIATIONS, ABBREVIATIONS_SET) + + if component_only: + return + if CONF_ORIGIN in discovery_payload: _replace_abbreviations( discovery_payload[CONF_ORIGIN], @@ -142,13 +209,15 @@ def _replace_all_abbreviations(discovery_payload: Any | dict[str, Any]) -> None: DEVICE_ABBREVIATIONS_SET, ) - if CONF_AVAILABILITY in discovery_payload: - for availability_conf in cv.ensure_list(discovery_payload[CONF_AVAILABILITY]): - _replace_abbreviations(availability_conf, ABBREVIATIONS, ABBREVIATIONS_SET) + if CONF_COMPONENTS in discovery_payload: + if not isinstance(discovery_payload[CONF_COMPONENTS], dict): + return + for comp_conf in discovery_payload[CONF_COMPONENTS].values(): + _replace_all_abbreviations(comp_conf, component_only=True) @callback -def _replace_topic_base(discovery_payload: dict[str, Any]) -> None: +def _replace_topic_base(discovery_payload: MQTTDiscoveryPayload) -> None: """Replace topic base in MQTT discovery data.""" base = discovery_payload.pop(TOPIC_BASE) for key, value in discovery_payload.items(): @@ -168,6 +237,79 @@ def _replace_topic_base(discovery_payload: dict[str, Any]) -> None: availability_conf[CONF_TOPIC] = f"{topic[:-1]}{base}" +@callback +def _generate_device_config( + hass: HomeAssistant, + object_id: str, + node_id: str | None, + migrate_discovery: bool = False, +) -> MQTTDiscoveryPayload: + """Generate a cleanup or discovery migration message on device cleanup. + + If an empty payload, or a migrate discovery request is received for a device, + we forward an empty payload for all previously discovered components. + """ + mqtt_data = hass.data[DATA_MQTT] + device_node_id: str = f"{node_id} {object_id}" if node_id else object_id + config = MQTTDiscoveryPayload({CONF_DEVICE: {}, CONF_COMPONENTS: {}}) + config.migrate_discovery = migrate_discovery + comp_config = config[CONF_COMPONENTS] + for platform, discover_id in mqtt_data.discovery_already_discovered: + ids = discover_id.split(" ") + component_node_id = ids.pop(0) + component_object_id = " ".join(ids) + if not ids: + continue + if device_node_id == component_node_id: + comp_config[component_object_id] = {CONF_PLATFORM: platform} + + return config if comp_config else MQTTDiscoveryPayload({}) + + +@callback +def _parse_device_payload( + hass: HomeAssistant, + payload: ReceivePayloadType, + object_id: str, + node_id: str | None, +) -> MQTTDiscoveryPayload: + """Parse a device discovery payload. + + The device discovery payload is translated info the config payloads for every single + component inside the device based configuration. + An empty payload is translated in a cleanup, which forwards an empty payload to all + removed components. + """ + device_payload = MQTTDiscoveryPayload() + if payload == "": + if not (device_payload := _generate_device_config(hass, object_id, node_id)): + _LOGGER.warning( + "No device components to cleanup for %s, node_id '%s'", + object_id, + node_id, + ) + return device_payload + try: + device_payload = MQTTDiscoveryPayload(json_loads_object(payload)) + except ValueError: + _LOGGER.warning("Unable to parse JSON %s: '%s'", object_id, payload) + return device_payload + if _async_process_discovery_migration(device_payload): + return _generate_device_config(hass, object_id, node_id, migrate_discovery=True) + _replace_all_abbreviations(device_payload) + try: + DEVICE_DISCOVERY_SCHEMA(device_payload) + except vol.Invalid as exc: + _LOGGER.warning( + "Invalid MQTT device discovery payload for %s, %s: '%s'", + object_id, + exc, + payload, + ) + return MQTTDiscoveryPayload({}) + return device_payload + + @callback def _valid_origin_info(discovery_payload: MQTTDiscoveryPayload) -> bool: """Parse and validate origin info from a single component discovery payload.""" @@ -185,13 +327,37 @@ def _valid_origin_info(discovery_payload: MQTTDiscoveryPayload) -> bool: return True +@callback +def _merge_common_device_options( + component_config: MQTTDiscoveryPayload, device_config: dict[str, Any] +) -> None: + """Merge common device options with the component config options. + + Common options are: + CONF_AVAILABILITY, + CONF_AVAILABILITY_MODE, + CONF_AVAILABILITY_TEMPLATE, + CONF_AVAILABILITY_TOPIC, + CONF_COMMAND_TOPIC, + CONF_PAYLOAD_AVAILABLE, + CONF_PAYLOAD_NOT_AVAILABLE, + CONF_STATE_TOPIC, + Common options in the body of the device based config are inherited into + the component. Unless the option is explicitly specified at component level, + in that case the option at component level will override the common option. + """ + for option in SHARED_OPTIONS: + if option in device_config and option not in component_config: + component_config[option] = device_config.get(option) + + async def async_start( # noqa: C901 hass: HomeAssistant, discovery_topic: str, config_entry: ConfigEntry ) -> None: """Start MQTT Discovery.""" mqtt_data = hass.data[DATA_MQTT] platform_setup_lock: dict[str, asyncio.Lock] = {} - integration_discovery_messages: dict[str, int] = {} + integration_discovery_messages: dict[str, MQTTIntegrationDiscoveryConfig] = {} @callback def _async_add_component(discovery_payload: MQTTDiscoveryPayload) -> None: @@ -229,8 +395,7 @@ def async_discovery_message_received(msg: ReceiveMessage) -> None: # noqa: C901 _LOGGER.warning( ( "Received message on illegal discovery topic '%s'. The topic" - " contains " - "not allowed characters. For more information see " + " contains non allowed characters. For more information see " "https://www.home-assistant.io/integrations/mqtt/#discovery-topic" ), topic, @@ -239,51 +404,118 @@ def async_discovery_message_received(msg: ReceiveMessage) -> None: # noqa: C901 component, node_id, object_id = match.groups() - if payload: + discovered_components: list[MqttComponentConfig] = [] + if component == CONF_DEVICE: + # Process device based discovery message and regenerate + # cleanup config for the all the components that are being removed. + # This is done when a component in the device config is omitted and detected + # as being removed, or when the device config update payload is empty. + # In that case this will regenerate a cleanup message for all every already + # discovered components that were linked to the initial device discovery. + device_discovery_payload = _parse_device_payload( + hass, payload, object_id, node_id + ) + if not device_discovery_payload: + return + device_config: dict[str, Any] + origin_config: dict[str, Any] | None + component_configs: dict[str, dict[str, Any]] + device_config = device_discovery_payload[CONF_DEVICE] + origin_config = device_discovery_payload.get(CONF_ORIGIN) + component_configs = device_discovery_payload[CONF_COMPONENTS] + for component_id, config in component_configs.items(): + component = config.pop(CONF_PLATFORM) + # The object_id in the device discovery topic is the unique identifier. + # It is used as node_id for the components it contains. + component_node_id = object_id + # The component_id in the discovery playload is used as object_id + # If we have an additional node_id in the discovery topic, + # we extend the component_id with it. + component_object_id = ( + f"{node_id} {component_id}" if node_id else component_id + ) + # We add wrapper to the discovery payload with the discovery data. + # If the dict is empty after removing the platform, the payload is + # assumed to remove the existing config and we do not want to add + # device or orig or shared availability attributes. + if discovery_payload := MQTTDiscoveryPayload(config): + discovery_payload[CONF_DEVICE] = device_config + discovery_payload[CONF_ORIGIN] = origin_config + # Only assign shared config options + # when they are not set at entity level + _merge_common_device_options( + discovery_payload, device_discovery_payload + ) + discovery_payload.device_discovery = True + discovery_payload.migrate_discovery = ( + device_discovery_payload.migrate_discovery + ) + discovered_components.append( + MqttComponentConfig( + component, + component_object_id, + component_node_id, + discovery_payload, + ) + ) + _LOGGER.debug( + "Process device discovery payload %s", device_discovery_payload + ) + device_discovery_id = f"{node_id} {object_id}" if node_id else object_id + message = f"Processing device discovery for '{device_discovery_id}'" + async_log_discovery_origin_info( + message, MQTTDiscoveryPayload(device_discovery_payload) + ) + + else: + # Process component based discovery message try: - discovery_payload = MQTTDiscoveryPayload(json_loads_object(payload)) + discovery_payload = MQTTDiscoveryPayload( + json_loads_object(payload) if payload else {} + ) except ValueError: _LOGGER.warning("Unable to parse JSON %s: '%s'", object_id, payload) return - _replace_all_abbreviations(discovery_payload) - if not _valid_origin_info(discovery_payload): - return + if not _async_process_discovery_migration(discovery_payload): + _replace_all_abbreviations(discovery_payload) + if not _valid_origin_info(discovery_payload): + return + discovered_components.append( + MqttComponentConfig(component, object_id, node_id, discovery_payload) + ) + + discovery_pending_discovered = mqtt_data.discovery_pending_discovered + for component_config in discovered_components: + component = component_config.component + node_id = component_config.node_id + object_id = component_config.object_id + discovery_payload = component_config.discovery_payload + if TOPIC_BASE in discovery_payload: _replace_topic_base(discovery_payload) - else: - discovery_payload = MQTTDiscoveryPayload({}) - # If present, the node_id will be included in the discovered object id - discovery_id = f"{node_id} {object_id}" if node_id else object_id - discovery_hash = (component, discovery_id) + # If present, the node_id will be included in the discovery_id. + discovery_id = f"{node_id} {object_id}" if node_id else object_id + discovery_hash = (component, discovery_id) - if discovery_payload: # Attach MQTT topic to the payload, used for debug prints - setattr( - discovery_payload, - "__configuration_source__", - f"MQTT (topic: '{topic}')", - ) - discovery_data = { + discovery_payload.discovery_data = { ATTR_DISCOVERY_HASH: discovery_hash, ATTR_DISCOVERY_PAYLOAD: discovery_payload, ATTR_DISCOVERY_TOPIC: topic, } - setattr(discovery_payload, "discovery_data", discovery_data) - - discovery_payload[CONF_PLATFORM] = "mqtt" - if discovery_hash in mqtt_data.discovery_pending_discovered: - pending = mqtt_data.discovery_pending_discovered[discovery_hash]["pending"] - pending.appendleft(discovery_payload) - _LOGGER.debug( - "Component has already been discovered: %s %s, queuing update", - component, - discovery_id, - ) - return + if discovery_hash in discovery_pending_discovered: + pending = discovery_pending_discovered[discovery_hash]["pending"] + pending.appendleft(discovery_payload) + _LOGGER.debug( + "Component has already been discovered: %s %s, queuing update", + component, + discovery_id, + ) + return - async_process_discovery_payload(component, discovery_id, discovery_payload) + async_process_discovery_payload(component, discovery_id, discovery_payload) @callback def async_process_discovery_payload( @@ -291,7 +523,7 @@ def async_process_discovery_payload( ) -> None: """Process the payload of a new discovery.""" - _LOGGER.debug("Process discovery payload %s", payload) + _LOGGER.debug("Process component discovery payload %s", payload) discovery_hash = (component, discovery_id) already_discovered = discovery_hash in mqtt_data.discovery_already_discovered @@ -348,6 +580,8 @@ def discovery_done(_: Any) -> None: 0, job_type=HassJobType.Callback, ) + # Subscribe first for platform discovery wildcard topics first, + # and then subscribe device discovery wildcard topics. for topic in chain( ( f"{discovery_topic}/{component}/+/config" @@ -357,6 +591,10 @@ def discovery_done(_: Any) -> None: f"{discovery_topic}/{component}/+/+/config" for component in SUPPORTED_COMPONENTS ), + ( + f"{discovery_topic}/device/+/config", + f"{discovery_topic}/device/+/+/config", + ), ) ] @@ -364,13 +602,39 @@ def discovery_done(_: Any) -> None: mqtt_integrations = await async_get_mqtt(hass) integration_unsubscribe = mqtt_data.integration_unsubscribe + async def _async_handle_config_entry_removed(entry: ConfigEntry) -> None: + """Handle integration config entry changes.""" + for discovery_key in entry.discovery_keys[DOMAIN]: + if ( + discovery_key.version != 1 + or not isinstance(discovery_key.key, str) + or discovery_key.key not in integration_discovery_messages + ): + continue + topic = discovery_key.key + discovery_message = integration_discovery_messages[topic] + del integration_discovery_messages[topic] + _LOGGER.debug("Rediscover service on topic %s", topic) + # Initiate re-discovery + await async_integration_message_received( + discovery_message.integration, discovery_message.msg + ) + + mqtt_data.discovery_unsubscribe.append( + async_dispatcher_connect( + hass, + signal_discovered_config_entry_removed(DOMAIN), + _async_handle_config_entry_removed, + ) + ) + async def async_integration_message_received( integration: str, msg: ReceiveMessage ) -> None: """Process the received message.""" if ( msg.topic in integration_discovery_messages - and integration_discovery_messages[msg.topic] == hash(msg.payload) + and integration_discovery_messages[msg.topic].msg.payload == msg.payload ): _LOGGER.debug( "Ignoring already processed discovery message for '%s' on topic %s: %s", @@ -393,14 +657,23 @@ async def async_integration_message_received( subscribed_topic=msg.subscribed_topic, timestamp=msg.timestamp, ) - await hass.config_entries.flow.async_init( - integration, context={"source": DOMAIN}, data=data + discovery_key = discovery_flow.DiscoveryKey( + domain=DOMAIN, key=msg.topic, version=1 + ) + discovery_flow.async_create_flow( + hass, + integration, + {"source": SOURCE_MQTT}, + data, + discovery_key=discovery_key, ) if msg.payload: # Update the last discovered config message - integration_discovery_messages[msg.topic] = hash(msg.payload) + integration_discovery_messages[msg.topic] = ( + MQTTIntegrationDiscoveryConfig(integration=integration, msg=msg) + ) elif msg.topic in integration_discovery_messages: - # Cleanup hash if discovery payload is empty + # Cleanup cache if discovery payload is empty del integration_discovery_messages[msg.topic] integration_unsubscribe.update( diff --git a/homeassistant/components/mqtt/entity.py b/homeassistant/components/mqtt/entity.py index 5845dae12e2808..46b2c9e1d42b5e 100644 --- a/homeassistant/components/mqtt/entity.py +++ b/homeassistant/components/mqtt/entity.py @@ -76,6 +76,7 @@ CONF_CONNECTIONS, CONF_ENABLED_BY_DEFAULT, CONF_ENCODING, + CONF_ENTITY_PICTURE, CONF_HW_VERSION, CONF_IDENTIFIERS, CONF_JSON_ATTRS_TEMPLATE, @@ -103,6 +104,8 @@ MQTT_DISCOVERY_UPDATED, MQTTDiscoveryPayload, clear_discovery_hash, + get_origin_log_string, + get_origin_support_url, set_discovery_hash, ) from .models import ( @@ -590,6 +593,7 @@ async def cleanup_device_registry( entity_registry = er.async_get(hass) if ( device_id + and device_id not in device_registry.deleted_devices and config_entry_id and not er.async_entries_for_device( entity_registry, device_id, include_disabled_entities=False @@ -671,6 +675,7 @@ def __init__( self._config_entry = config_entry self._config_entry_id = config_entry.entry_id self._skip_device_removal: bool = False + self._migrate_discovery: str | None = None discovery_hash = get_discovery_hash(discovery_data) self._remove_discovery_updated = async_dispatcher_connect( @@ -703,12 +708,95 @@ async def async_discovery_update( ) -> None: """Handle discovery update.""" discovery_hash = get_discovery_hash(self._discovery_data) + # Start discovery migration or rollback if migrate_discovery flag is set + # and the discovery topic is valid and not yet migrating + if ( + discovery_payload.migrate_discovery + and self._migrate_discovery is None + and self._discovery_data[ATTR_DISCOVERY_TOPIC] + == discovery_payload.discovery_data[ATTR_DISCOVERY_TOPIC] + ): + self._migrate_discovery = self._discovery_data[ATTR_DISCOVERY_TOPIC] + discovery_hash = self._discovery_data[ATTR_DISCOVERY_HASH] + origin_info = get_origin_log_string( + self._discovery_data[ATTR_DISCOVERY_PAYLOAD], include_url=False + ) + action = "Rollback" if discovery_payload.device_discovery else "Migration" + schema_type = "platform" if discovery_payload.device_discovery else "device" + _LOGGER.info( + "%s to MQTT %s discovery schema started for %s '%s'" + "%s on topic %s. To complete %s, publish a %s discovery " + "message with %s '%s'. After completed %s, " + "publish an empty (retained) payload to %s", + action, + schema_type, + discovery_hash[0], + discovery_hash[1], + origin_info, + self._migrate_discovery, + action.lower(), + schema_type, + discovery_hash[0], + discovery_hash[1], + action.lower(), + self._migrate_discovery, + ) + + # Cleanup platform resources + await self.async_tear_down() + # Unregister and clean discovery + stop_discovery_updates( + self.hass, self._discovery_data, self._remove_discovery_updated + ) + send_discovery_done(self.hass, self._discovery_data) + return + _LOGGER.debug( "Got update for %s with hash: %s '%s'", self.log_name, discovery_hash, discovery_payload, ) + new_discovery_topic = discovery_payload.discovery_data[ATTR_DISCOVERY_TOPIC] + + # Abort early if an update is not received via the registered discovery topic. + # This can happen if a device and single component discovery payload + # share the same discovery ID. + if self._discovery_data[ATTR_DISCOVERY_TOPIC] != new_discovery_topic: + # Prevent illegal updates + old_origin_info = get_origin_log_string( + self._discovery_data[ATTR_DISCOVERY_PAYLOAD], include_url=False + ) + new_origin_info = get_origin_log_string( + discovery_payload.discovery_data[ATTR_DISCOVERY_PAYLOAD], + include_url=False, + ) + new_origin_support_url = get_origin_support_url( + discovery_payload.discovery_data[ATTR_DISCOVERY_PAYLOAD] + ) + if new_origin_support_url: + get_support = f"for support visit {new_origin_support_url}" + else: + get_support = ( + "for documentation on migration to device schema or rollback to " + "discovery schema, visit https://www.home-assistant.io/integrations/" + "mqtt/#migration-from-single-component-to-device-based-discovery" + ) + _LOGGER.warning( + "Received a conflicting MQTT discovery message for %s '%s' which was " + "previously discovered on topic %s%s; the conflicting discovery " + "message was received on topic %s%s; %s", + discovery_hash[0], + discovery_hash[1], + self._discovery_data[ATTR_DISCOVERY_TOPIC], + old_origin_info, + new_discovery_topic, + new_origin_info, + get_support, + ) + send_discovery_done(self.hass, self._discovery_data) + return + if ( discovery_payload and discovery_payload != self._discovery_data[ATTR_DISCOVERY_PAYLOAD] @@ -805,6 +893,7 @@ def __init__( mqtt_data = hass.data[DATA_MQTT] self._registry_hooks = mqtt_data.discovery_registry_hooks discovery_hash: tuple[str, str] = discovery_data[ATTR_DISCOVERY_HASH] + self._migrate_discovery: str | None = None if discovery_hash in self._registry_hooks: self._registry_hooks.pop(discovery_hash)() @@ -862,7 +951,12 @@ async def _async_process_discovery_update_and_remove(self) -> None: if TYPE_CHECKING: assert self._discovery_data self._cleanup_discovery_on_remove() - await self._async_remove_state_and_registry_entry() + if self._migrate_discovery is None: + # Unload and cleanup registry + await self._async_remove_state_and_registry_entry() + else: + # Only unload the entity + await self.async_remove(force_remove=True) send_discovery_done(self.hass, self._discovery_data) @callback @@ -877,18 +971,102 @@ def _async_discovery_callback(self, payload: MQTTDiscoveryPayload) -> None: """ if TYPE_CHECKING: assert self._discovery_data - discovery_hash: tuple[str, str] = self._discovery_data[ATTR_DISCOVERY_HASH] + discovery_hash = get_discovery_hash(self._discovery_data) + # Start discovery migration or rollback if migrate_discovery flag is set + # and the discovery topic is valid and not yet migrating + if ( + payload.migrate_discovery + and self._migrate_discovery is None + and self._discovery_data[ATTR_DISCOVERY_TOPIC] + == payload.discovery_data[ATTR_DISCOVERY_TOPIC] + ): + if self.unique_id is None or self.device_info is None: + _LOGGER.error( + "Discovery migration is not possible for " + "for entity %s on topic %s. A unique_id " + "and device context is required, got unique_id: %s, device: %s", + self.entity_id, + self._discovery_data[ATTR_DISCOVERY_TOPIC], + self.unique_id, + self.device_info, + ) + send_discovery_done(self.hass, self._discovery_data) + return + + self._migrate_discovery = self._discovery_data[ATTR_DISCOVERY_TOPIC] + discovery_hash = self._discovery_data[ATTR_DISCOVERY_HASH] + origin_info = get_origin_log_string( + self._discovery_data[ATTR_DISCOVERY_PAYLOAD], include_url=False + ) + action = "Rollback" if payload.device_discovery else "Migration" + schema_type = "platform" if payload.device_discovery else "device" + _LOGGER.info( + "%s to MQTT %s discovery schema started for entity %s" + "%s on topic %s. To complete %s, publish a %s discovery " + "message with %s entity '%s'. After completed %s, " + "publish an empty (retained) payload to %s", + action, + schema_type, + self.entity_id, + origin_info, + self._migrate_discovery, + action.lower(), + schema_type, + discovery_hash[0], + discovery_hash[1], + action.lower(), + self._migrate_discovery, + ) + old_payload = self._discovery_data[ATTR_DISCOVERY_PAYLOAD] _LOGGER.debug( "Got update for entity with hash: %s '%s'", discovery_hash, payload, ) - old_payload: DiscoveryInfoType - old_payload = self._discovery_data[ATTR_DISCOVERY_PAYLOAD] + new_discovery_topic = payload.discovery_data[ATTR_DISCOVERY_TOPIC] + # Abort early if an update is not received via the registered discovery topic. + # This can happen if a device and single component discovery payload + # share the same discovery ID. + if self._discovery_data[ATTR_DISCOVERY_TOPIC] != new_discovery_topic: + # Prevent illegal updates + old_origin_info = get_origin_log_string( + self._discovery_data[ATTR_DISCOVERY_PAYLOAD], include_url=False + ) + new_origin_info = get_origin_log_string( + payload.discovery_data[ATTR_DISCOVERY_PAYLOAD], include_url=False + ) + new_origin_support_url = get_origin_support_url( + payload.discovery_data[ATTR_DISCOVERY_PAYLOAD] + ) + if new_origin_support_url: + get_support = f"for support visit {new_origin_support_url}" + else: + get_support = ( + "for documentation on migration to device schema or rollback to " + "discovery schema, visit https://www.home-assistant.io/integrations/" + "mqtt/#migration-from-single-component-to-device-based-discovery" + ) + _LOGGER.warning( + "Received a conflicting MQTT discovery message for entity %s; the " + "entity was previously discovered on topic %s%s; the conflicting " + "discovery message was received on topic %s%s; %s", + self.entity_id, + self._discovery_data[ATTR_DISCOVERY_TOPIC], + old_origin_info, + new_discovery_topic, + new_origin_info, + get_support, + ) + send_discovery_done(self.hass, self._discovery_data) + return + debug_info.update_entity_discovery_data(self.hass, payload, self.entity_id) if not payload: # Empty payload: Remove component - _LOGGER.info("Removing component: %s", self.entity_id) + if self._migrate_discovery is None: + _LOGGER.info("Removing component: %s", self.entity_id) + else: + _LOGGER.info("Unloading component: %s", self.entity_id) self.hass.async_create_task( self._async_process_discovery_update_and_remove() ) @@ -1211,6 +1389,7 @@ def _setup_common_attributes_from_config(self, config: ConfigType) -> None: config.get(CONF_ENABLED_BY_DEFAULT) ) self._attr_icon = config.get(CONF_ICON) + self._attr_entity_picture = config.get(CONF_ENTITY_PICTURE) # Set the entity name if needed self._set_entity_name(config) diff --git a/homeassistant/components/mqtt/manifest.json b/homeassistant/components/mqtt/manifest.json index 34370c82507494..25e98c01aafe24 100644 --- a/homeassistant/components/mqtt/manifest.json +++ b/homeassistant/components/mqtt/manifest.json @@ -1,11 +1,13 @@ { "domain": "mqtt", "name": "MQTT", + "after_dependencies": ["hassio"], "codeowners": ["@emontnemery", "@jbouwh", "@bdraco"], "config_flow": true, "dependencies": ["file_upload", "http"], "documentation": "https://www.home-assistant.io/integrations/mqtt", "iot_class": "local_push", "quality_scale": "platinum", - "requirements": ["paho-mqtt==1.6.1"] + "requirements": ["paho-mqtt==1.6.1"], + "single_config_entry": true } diff --git a/homeassistant/components/mqtt/models.py b/homeassistant/components/mqtt/models.py index f7abbc29464326..34c1f3049441b2 100644 --- a/homeassistant/components/mqtt/models.py +++ b/homeassistant/components/mqtt/models.py @@ -410,5 +410,15 @@ class MqttData: tags: dict[str, dict[str, MQTTTagScanner]] = field(default_factory=dict) +@dataclass(slots=True) +class MqttComponentConfig: + """(component, object_id, node_id, discovery_payload).""" + + component: str + object_id: str + node_id: str | None + discovery_payload: MQTTDiscoveryPayload + + DATA_MQTT: HassKey[MqttData] = HassKey("mqtt") DATA_MQTT_AVAILABLE: HassKey[asyncio.Future[bool]] = HassKey("mqtt_client_available") diff --git a/homeassistant/components/mqtt/schemas.py b/homeassistant/components/mqtt/schemas.py index 67c6b447709cc4..5e942c24738e34 100644 --- a/homeassistant/components/mqtt/schemas.py +++ b/homeassistant/components/mqtt/schemas.py @@ -2,6 +2,8 @@ from __future__ import annotations +from typing import Any + import voluptuous as vol from homeassistant.const import ( @@ -11,6 +13,7 @@ CONF_MODEL, CONF_MODEL_ID, CONF_NAME, + CONF_PLATFORM, CONF_UNIQUE_ID, CONF_VALUE_TEMPLATE, ) @@ -25,10 +28,14 @@ CONF_AVAILABILITY_MODE, CONF_AVAILABILITY_TEMPLATE, CONF_AVAILABILITY_TOPIC, + CONF_COMMAND_TOPIC, + CONF_COMPONENTS, CONF_CONFIGURATION_URL, CONF_CONNECTIONS, CONF_DEPRECATED_VIA_HUB, CONF_ENABLED_BY_DEFAULT, + CONF_ENCODING, + CONF_ENTITY_PICTURE, CONF_HW_VERSION, CONF_IDENTIFIERS, CONF_JSON_ATTRS_TEMPLATE, @@ -38,7 +45,9 @@ CONF_ORIGIN, CONF_PAYLOAD_AVAILABLE, CONF_PAYLOAD_NOT_AVAILABLE, + CONF_QOS, CONF_SERIAL_NUMBER, + CONF_STATE_TOPIC, CONF_SUGGESTED_AREA, CONF_SUPPORT_URL, CONF_SW_VERSION, @@ -46,10 +55,34 @@ CONF_VIA_DEVICE, DEFAULT_PAYLOAD_AVAILABLE, DEFAULT_PAYLOAD_NOT_AVAILABLE, + ENTITY_PLATFORMS, + SUPPORTED_COMPONENTS, ) -from .util import valid_subscribe_topic +from .util import valid_publish_topic, valid_qos_schema, valid_subscribe_topic + +# Device discovery options that are also available at entity component level +SHARED_OPTIONS = [ + CONF_AVAILABILITY, + CONF_AVAILABILITY_MODE, + CONF_AVAILABILITY_TEMPLATE, + CONF_AVAILABILITY_TOPIC, + CONF_COMMAND_TOPIC, + CONF_PAYLOAD_AVAILABLE, + CONF_PAYLOAD_NOT_AVAILABLE, + CONF_STATE_TOPIC, +] -MQTT_AVAILABILITY_SINGLE_SCHEMA = vol.Schema( +MQTT_ORIGIN_INFO_SCHEMA = vol.All( + vol.Schema( + { + vol.Required(CONF_NAME): cv.string, + vol.Optional(CONF_SW_VERSION): cv.string, + vol.Optional(CONF_SUPPORT_URL): cv.configuration_url, + } + ), +) + +_MQTT_AVAILABILITY_SINGLE_SCHEMA = vol.Schema( { vol.Exclusive(CONF_AVAILABILITY_TOPIC, "availability"): valid_subscribe_topic, vol.Optional(CONF_AVAILABILITY_TEMPLATE): cv.template, @@ -62,7 +95,7 @@ } ) -MQTT_AVAILABILITY_LIST_SCHEMA = vol.Schema( +_MQTT_AVAILABILITY_LIST_SCHEMA = vol.Schema( { vol.Optional(CONF_AVAILABILITY_MODE, default=AVAILABILITY_LATEST): vol.All( cv.string, vol.In(AVAILABILITY_MODES) @@ -86,8 +119,8 @@ } ) -MQTT_AVAILABILITY_SCHEMA = MQTT_AVAILABILITY_SINGLE_SCHEMA.extend( - MQTT_AVAILABILITY_LIST_SCHEMA.schema +_MQTT_AVAILABILITY_SCHEMA = _MQTT_AVAILABILITY_SINGLE_SCHEMA.extend( + _MQTT_AVAILABILITY_LIST_SCHEMA.schema ) @@ -137,9 +170,10 @@ def validate_device_has_at_least_one_identifier(value: ConfigType) -> ConfigType ), ) -MQTT_ENTITY_COMMON_SCHEMA = MQTT_AVAILABILITY_SCHEMA.extend( +MQTT_ENTITY_COMMON_SCHEMA = _MQTT_AVAILABILITY_SCHEMA.extend( { vol.Optional(CONF_DEVICE): MQTT_ENTITY_DEVICE_INFO_SCHEMA, + vol.Optional(CONF_ENTITY_PICTURE): cv.url, vol.Optional(CONF_ORIGIN): MQTT_ORIGIN_INFO_SCHEMA, vol.Optional(CONF_ENABLED_BY_DEFAULT, default=True): cv.boolean, vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA, @@ -150,3 +184,35 @@ def validate_device_has_at_least_one_identifier(value: ConfigType) -> ConfigType vol.Optional(CONF_UNIQUE_ID): cv.string, } ) + +_UNIQUE_ID_SCHEMA = vol.Schema( + {vol.Required(CONF_UNIQUE_ID): cv.string}, +).extend({}, extra=True) + + +def check_unique_id(config: dict[str, Any]) -> dict[str, Any]: + """Check if a unique ID is set in case an entity platform is configured.""" + platform = config[CONF_PLATFORM] + if platform in ENTITY_PLATFORMS and len(config.keys()) > 1: + _UNIQUE_ID_SCHEMA(config) + return config + + +_COMPONENT_CONFIG_SCHEMA = vol.All( + vol.Schema( + {vol.Required(CONF_PLATFORM): vol.In(SUPPORTED_COMPONENTS)}, + ).extend({}, extra=True), + check_unique_id, +) + +DEVICE_DISCOVERY_SCHEMA = _MQTT_AVAILABILITY_SCHEMA.extend( + { + vol.Required(CONF_DEVICE): MQTT_ENTITY_DEVICE_INFO_SCHEMA, + vol.Required(CONF_COMPONENTS): vol.Schema({str: _COMPONENT_CONFIG_SCHEMA}), + vol.Required(CONF_ORIGIN): MQTT_ORIGIN_INFO_SCHEMA, + vol.Optional(CONF_STATE_TOPIC): valid_subscribe_topic, + vol.Optional(CONF_COMMAND_TOPIC): valid_publish_topic, + vol.Optional(CONF_QOS): valid_qos_schema, + vol.Optional(CONF_ENCODING): cv.string, + } +) diff --git a/homeassistant/components/mqtt/update.py b/homeassistant/components/mqtt/update.py index f7bb9f75dd1f97..8878ff6312753b 100644 --- a/homeassistant/components/mqtt/update.py +++ b/homeassistant/components/mqtt/update.py @@ -3,7 +3,7 @@ from __future__ import annotations import logging -from typing import Any, TypedDict, cast +from typing import Any import voluptuous as vol @@ -34,7 +34,7 @@ DEFAULT_NAME = "MQTT Update" -CONF_ENTITY_PICTURE = "entity_picture" +CONF_DISPLAY_PRECISION = "display_precision" CONF_LATEST_VERSION_TEMPLATE = "latest_version_template" CONF_LATEST_VERSION_TOPIC = "latest_version_topic" CONF_PAYLOAD_INSTALL = "payload_install" @@ -47,7 +47,7 @@ { vol.Optional(CONF_COMMAND_TOPIC): valid_publish_topic, vol.Optional(CONF_DEVICE_CLASS): vol.Any(DEVICE_CLASSES_SCHEMA, None), - vol.Optional(CONF_ENTITY_PICTURE): cv.string, + vol.Optional(CONF_DISPLAY_PRECISION, default=0): cv.positive_int, vol.Optional(CONF_LATEST_VERSION_TEMPLATE): cv.template, vol.Optional(CONF_LATEST_VERSION_TOPIC): valid_subscribe_topic, vol.Optional(CONF_NAME): vol.Any(cv.string, None), @@ -63,15 +63,18 @@ DISCOVERY_SCHEMA = vol.All(PLATFORM_SCHEMA_MODERN.extend({}, extra=vol.REMOVE_EXTRA)) -class _MqttUpdatePayloadType(TypedDict, total=False): - """Presentation of supported JSON payload to process state updates.""" - - installed_version: str - latest_version: str - title: str - release_summary: str - release_url: str - entity_picture: str +MQTT_JSON_UPDATE_SCHEMA = vol.Schema( + { + vol.Optional("installed_version"): cv.string, + vol.Optional("latest_version"): cv.string, + vol.Optional("title"): cv.string, + vol.Optional("release_summary"): cv.string, + vol.Optional("release_url"): cv.url, + vol.Optional("entity_picture"): cv.url, + vol.Optional("in_progress"): cv.boolean, + vol.Optional("update_percentage"): vol.Any(vol.Range(min=0, max=100), None), + } +) async def async_setup_entry( @@ -96,13 +99,12 @@ class MqttUpdate(MqttEntity, UpdateEntity, RestoreEntity): _default_name = DEFAULT_NAME _entity_id_format = update.ENTITY_ID_FORMAT - _entity_picture: str | None @property def entity_picture(self) -> str | None: """Return the entity picture to use in the frontend.""" - if self._entity_picture is not None: - return self._entity_picture + if self._attr_entity_picture is not None: + return self._attr_entity_picture return super().entity_picture @@ -114,10 +116,10 @@ def config_schema() -> VolSchemaType: def _setup_from_config(self, config: ConfigType) -> None: """(Re)Setup the entity.""" self._attr_device_class = self._config.get(CONF_DEVICE_CLASS) + self._attr_display_precision = self._config[CONF_DISPLAY_PRECISION] self._attr_release_summary = self._config.get(CONF_RELEASE_SUMMARY) self._attr_release_url = self._config.get(CONF_RELEASE_URL) self._attr_title = self._config.get(CONF_TITLE) - self._entity_picture: str | None = self._config.get(CONF_ENTITY_PICTURE) self._templates = { CONF_VALUE_TEMPLATE: MqttValueTemplate( config.get(CONF_VALUE_TEMPLATE), @@ -142,7 +144,7 @@ def _handle_state_message_received(self, msg: ReceiveMessage) -> None: ) return - json_payload: _MqttUpdatePayloadType = {} + json_payload: dict[str, Any] = {} try: rendered_json_payload = json_loads(payload) if isinstance(rendered_json_payload, dict): @@ -154,7 +156,7 @@ def _handle_state_message_received(self, msg: ReceiveMessage) -> None: rendered_json_payload, msg.topic, ) - json_payload = cast(_MqttUpdatePayloadType, rendered_json_payload) + json_payload = MQTT_JSON_UPDATE_SCHEMA(rendered_json_payload) else: _LOGGER.debug( ( @@ -165,14 +167,27 @@ def _handle_state_message_received(self, msg: ReceiveMessage) -> None: msg.topic, ) json_payload = {"installed_version": str(payload)} + except vol.MultipleInvalid as exc: + _LOGGER.warning( + ( + "Schema violation after processing payload '%s'" + " on topic '%s' for entity '%s': %s" + ), + payload, + msg.topic, + self.entity_id, + exc, + ) + return except JSON_DECODE_EXCEPTIONS: _LOGGER.debug( ( "No valid (JSON) payload detected after processing payload '%s'" - " on topic %s" + " on topic '%s' for entity '%s'" ), payload, msg.topic, + self.entity_id, ) json_payload["installed_version"] = str(payload) @@ -192,7 +207,14 @@ def _handle_state_message_received(self, msg: ReceiveMessage) -> None: self._attr_release_url = json_payload["release_url"] if "entity_picture" in json_payload: - self._entity_picture = json_payload["entity_picture"] + self._attr_entity_picture = json_payload["entity_picture"] + + if "update_percentage" in json_payload: + self._attr_update_percentage = json_payload["update_percentage"] + self._attr_in_progress = self._attr_update_percentage is not None + + if "in_progress" in json_payload: + self._attr_in_progress = json_payload["in_progress"] @callback def _handle_latest_version_received(self, msg: ReceiveMessage) -> None: @@ -209,12 +231,14 @@ def _prepare_subscribe_topics(self) -> None: CONF_STATE_TOPIC, self._handle_state_message_received, { + "_attr_entity_picture", + "_attr_in_progress", "_attr_installed_version", "_attr_latest_version", "_attr_title", "_attr_release_summary", "_attr_release_url", - "_entity_picture", + "_attr_update_percentage", }, ) self.add_subscription( @@ -237,7 +261,7 @@ async def async_install( @property def supported_features(self) -> UpdateEntityFeature: """Return the list of supported features.""" - support = UpdateEntityFeature(0) + support = UpdateEntityFeature(UpdateEntityFeature.PROGRESS) if self._config.get(CONF_COMMAND_TOPIC) is not None: support |= UpdateEntityFeature.INSTALL diff --git a/homeassistant/components/mullvad/__init__.py b/homeassistant/components/mullvad/__init__.py index b79b9b4aa6a9a8..f2f6f39c96f150 100644 --- a/homeassistant/components/mullvad/__init__.py +++ b/homeassistant/components/mullvad/__init__.py @@ -27,6 +27,7 @@ async def async_get_mullvad_api_data(): coordinator = DataUpdateCoordinator( hass, logging.getLogger(__name__), + config_entry=entry, name=DOMAIN, update_method=async_get_mullvad_api_data, update_interval=timedelta(minutes=1), diff --git a/homeassistant/components/music_assistant/__init__.py b/homeassistant/components/music_assistant/__init__.py new file mode 100644 index 00000000000000..9f0fc1aad27601 --- /dev/null +++ b/homeassistant/components/music_assistant/__init__.py @@ -0,0 +1,164 @@ +"""Music Assistant (music-assistant.io) integration.""" + +from __future__ import annotations + +import asyncio +from dataclasses import dataclass +from typing import TYPE_CHECKING + +from music_assistant_client import MusicAssistantClient +from music_assistant_client.exceptions import CannotConnect, InvalidServerVersion +from music_assistant_models.enums import EventType +from music_assistant_models.errors import MusicAssistantError + +from homeassistant.config_entries import ConfigEntry, ConfigEntryState +from homeassistant.const import CONF_URL, EVENT_HOMEASSISTANT_STOP, Platform +from homeassistant.core import Event, HomeAssistant +from homeassistant.exceptions import ConfigEntryNotReady +from homeassistant.helpers import device_registry as dr +from homeassistant.helpers.aiohttp_client import async_get_clientsession +from homeassistant.helpers.issue_registry import ( + IssueSeverity, + async_create_issue, + async_delete_issue, +) + +from .const import DOMAIN, LOGGER + +if TYPE_CHECKING: + from music_assistant_models.event import MassEvent + +type MusicAssistantConfigEntry = ConfigEntry[MusicAssistantEntryData] + +PLATFORMS = [Platform.MEDIA_PLAYER] + +CONNECT_TIMEOUT = 10 +LISTEN_READY_TIMEOUT = 30 + + +@dataclass +class MusicAssistantEntryData: + """Hold Mass data for the config entry.""" + + mass: MusicAssistantClient + listen_task: asyncio.Task + + +async def async_setup_entry( + hass: HomeAssistant, entry: MusicAssistantConfigEntry +) -> bool: + """Set up from a config entry.""" + http_session = async_get_clientsession(hass, verify_ssl=False) + mass_url = entry.data[CONF_URL] + mass = MusicAssistantClient(mass_url, http_session) + + try: + async with asyncio.timeout(CONNECT_TIMEOUT): + await mass.connect() + except (TimeoutError, CannotConnect) as err: + raise ConfigEntryNotReady( + f"Failed to connect to music assistant server {mass_url}" + ) from err + except InvalidServerVersion as err: + async_create_issue( + hass, + DOMAIN, + "invalid_server_version", + is_fixable=False, + severity=IssueSeverity.ERROR, + translation_key="invalid_server_version", + ) + raise ConfigEntryNotReady(f"Invalid server version: {err}") from err + except MusicAssistantError as err: + LOGGER.exception("Failed to connect to music assistant server", exc_info=err) + raise ConfigEntryNotReady( + f"Unknown error connecting to the Music Assistant server {mass_url}" + ) from err + + async_delete_issue(hass, DOMAIN, "invalid_server_version") + + async def on_hass_stop(event: Event) -> None: + """Handle incoming stop event from Home Assistant.""" + await mass.disconnect() + + entry.async_on_unload( + hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, on_hass_stop) + ) + + # launch the music assistant client listen task in the background + # use the init_ready event to wait until initialization is done + init_ready = asyncio.Event() + listen_task = asyncio.create_task(_client_listen(hass, entry, mass, init_ready)) + + try: + async with asyncio.timeout(LISTEN_READY_TIMEOUT): + await init_ready.wait() + except TimeoutError as err: + listen_task.cancel() + raise ConfigEntryNotReady("Music Assistant client not ready") from err + + entry.runtime_data = MusicAssistantEntryData(mass, listen_task) + + # If the listen task is already failed, we need to raise ConfigEntryNotReady + if listen_task.done() and (listen_error := listen_task.exception()) is not None: + await hass.config_entries.async_unload_platforms(entry, PLATFORMS) + try: + await mass.disconnect() + finally: + raise ConfigEntryNotReady(listen_error) from listen_error + + # initialize platforms + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + + # register listener for removed players + async def handle_player_removed(event: MassEvent) -> None: + """Handle Mass Player Removed event.""" + if event.object_id is None: + return + dev_reg = dr.async_get(hass) + if hass_device := dev_reg.async_get_device({(DOMAIN, event.object_id)}): + dev_reg.async_update_device( + hass_device.id, remove_config_entry_id=entry.entry_id + ) + + entry.async_on_unload( + mass.subscribe(handle_player_removed, EventType.PLAYER_REMOVED) + ) + + return True + + +async def _client_listen( + hass: HomeAssistant, + entry: ConfigEntry, + mass: MusicAssistantClient, + init_ready: asyncio.Event, +) -> None: + """Listen with the client.""" + try: + await mass.start_listening(init_ready) + except MusicAssistantError as err: + if entry.state != ConfigEntryState.LOADED: + raise + LOGGER.error("Failed to listen: %s", err) + except Exception as err: # pylint: disable=broad-except + # We need to guard against unknown exceptions to not crash this task. + if entry.state != ConfigEntryState.LOADED: + raise + LOGGER.exception("Unexpected exception: %s", err) + + if not hass.is_stopping: + LOGGER.debug("Disconnected from server. Reloading integration") + hass.async_create_task(hass.config_entries.async_reload(entry.entry_id)) + + +async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + """Unload a config entry.""" + unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS) + + if unload_ok: + mass_entry_data: MusicAssistantEntryData = entry.runtime_data + mass_entry_data.listen_task.cancel() + await mass_entry_data.mass.disconnect() + + return unload_ok diff --git a/homeassistant/components/music_assistant/config_flow.py b/homeassistant/components/music_assistant/config_flow.py new file mode 100644 index 00000000000000..fc50a2d654bf7b --- /dev/null +++ b/homeassistant/components/music_assistant/config_flow.py @@ -0,0 +1,137 @@ +"""Config flow for MusicAssistant integration.""" + +from __future__ import annotations + +from typing import TYPE_CHECKING, Any + +from music_assistant_client import MusicAssistantClient +from music_assistant_client.exceptions import ( + CannotConnect, + InvalidServerVersion, + MusicAssistantClientException, +) +from music_assistant_models.api import ServerInfoMessage +import voluptuous as vol + +from homeassistant.components import zeroconf +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.const import CONF_URL +from homeassistant.core import HomeAssistant +from homeassistant.helpers import aiohttp_client + +from .const import DOMAIN, LOGGER + +DEFAULT_URL = "http://mass.local:8095" +DEFAULT_TITLE = "Music Assistant" + + +def get_manual_schema(user_input: dict[str, Any]) -> vol.Schema: + """Return a schema for the manual step.""" + default_url = user_input.get(CONF_URL, DEFAULT_URL) + return vol.Schema( + { + vol.Required(CONF_URL, default=default_url): str, + } + ) + + +async def get_server_info(hass: HomeAssistant, url: str) -> ServerInfoMessage: + """Validate the user input allows us to connect.""" + async with MusicAssistantClient( + url, aiohttp_client.async_get_clientsession(hass) + ) as client: + if TYPE_CHECKING: + assert client.server_info is not None + return client.server_info + + +class MusicAssistantConfigFlow(ConfigFlow, domain=DOMAIN): + """Handle a config flow for MusicAssistant.""" + + VERSION = 1 + + def __init__(self) -> None: + """Set up flow instance.""" + self.server_info: ServerInfoMessage | None = None + + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle a manual configuration.""" + errors: dict[str, str] = {} + if user_input is not None: + try: + self.server_info = await get_server_info( + self.hass, user_input[CONF_URL] + ) + await self.async_set_unique_id( + self.server_info.server_id, raise_on_progress=False + ) + self._abort_if_unique_id_configured( + updates={CONF_URL: self.server_info.base_url}, + reload_on_update=True, + ) + except CannotConnect: + errors["base"] = "cannot_connect" + except InvalidServerVersion: + errors["base"] = "invalid_server_version" + except MusicAssistantClientException: + LOGGER.exception("Unexpected exception") + errors["base"] = "unknown" + else: + return self.async_create_entry( + title=DEFAULT_TITLE, + data={ + CONF_URL: self.server_info.base_url, + }, + ) + + return self.async_show_form( + step_id="user", data_schema=get_manual_schema(user_input), errors=errors + ) + + return self.async_show_form(step_id="user", data_schema=get_manual_schema({})) + + async def async_step_zeroconf( + self, discovery_info: zeroconf.ZeroconfServiceInfo + ) -> ConfigFlowResult: + """Handle a discovered Mass server. + + This flow is triggered by the Zeroconf component. It will check if the + host is already configured and delegate to the import step if not. + """ + # abort if discovery info is not what we expect + if "server_id" not in discovery_info.properties: + return self.async_abort(reason="missing_server_id") + # abort if we already have exactly this server_id + # reload the integration if the host got updated + self.server_info = ServerInfoMessage.from_dict(discovery_info.properties) + await self.async_set_unique_id(self.server_info.server_id) + self._abort_if_unique_id_configured( + updates={CONF_URL: self.server_info.base_url}, + reload_on_update=True, + ) + try: + await get_server_info(self.hass, self.server_info.base_url) + except CannotConnect: + return self.async_abort(reason="cannot_connect") + return await self.async_step_discovery_confirm() + + async def async_step_discovery_confirm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle user-confirmation of discovered server.""" + if TYPE_CHECKING: + assert self.server_info is not None + if user_input is not None: + return self.async_create_entry( + title=DEFAULT_TITLE, + data={ + CONF_URL: self.server_info.base_url, + }, + ) + self._set_confirm_only() + return self.async_show_form( + step_id="discovery_confirm", + description_placeholders={"url": self.server_info.base_url}, + ) diff --git a/homeassistant/components/music_assistant/const.py b/homeassistant/components/music_assistant/const.py new file mode 100644 index 00000000000000..6512f58b96c74b --- /dev/null +++ b/homeassistant/components/music_assistant/const.py @@ -0,0 +1,18 @@ +"""Constants for Music Assistant Component.""" + +import logging + +DOMAIN = "music_assistant" +DOMAIN_EVENT = f"{DOMAIN}_event" + +DEFAULT_NAME = "Music Assistant" + +ATTR_IS_GROUP = "is_group" +ATTR_GROUP_MEMBERS = "group_members" +ATTR_GROUP_PARENTS = "group_parents" + +ATTR_MASS_PLAYER_TYPE = "mass_player_type" +ATTR_ACTIVE_QUEUE = "active_queue" +ATTR_STREAM_TITLE = "stream_title" + +LOGGER = logging.getLogger(__package__) diff --git a/homeassistant/components/music_assistant/entity.py b/homeassistant/components/music_assistant/entity.py new file mode 100644 index 00000000000000..f5b6d92b0cfa94 --- /dev/null +++ b/homeassistant/components/music_assistant/entity.py @@ -0,0 +1,86 @@ +"""Base entity model.""" + +from __future__ import annotations + +from typing import TYPE_CHECKING + +from music_assistant_models.enums import EventType +from music_assistant_models.event import MassEvent +from music_assistant_models.player import Player + +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.entity import Entity + +from .const import DOMAIN + +if TYPE_CHECKING: + from music_assistant_client import MusicAssistantClient + + +class MusicAssistantEntity(Entity): + """Base Entity from Music Assistant Player.""" + + _attr_has_entity_name = True + _attr_should_poll = False + + def __init__(self, mass: MusicAssistantClient, player_id: str) -> None: + """Initialize MediaPlayer entity.""" + self.mass = mass + self.player_id = player_id + provider = self.mass.get_provider(self.player.provider) + if TYPE_CHECKING: + assert provider is not None + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, player_id)}, + manufacturer=self.player.device_info.manufacturer or provider.name, + model=self.player.device_info.model or self.player.name, + name=self.player.display_name, + configuration_url=f"{mass.server_url}/#/settings/editplayer/{player_id}", + ) + + async def async_added_to_hass(self) -> None: + """Register callbacks.""" + await self.async_on_update() + self.async_on_remove( + self.mass.subscribe( + self.__on_mass_update, EventType.PLAYER_UPDATED, self.player_id + ) + ) + self.async_on_remove( + self.mass.subscribe( + self.__on_mass_update, + EventType.QUEUE_UPDATED, + ) + ) + + @property + def player(self) -> Player: + """Return the Mass Player attached to this HA entity.""" + return self.mass.players[self.player_id] + + @property + def unique_id(self) -> str | None: + """Return unique id for entity.""" + _base = self.player_id + if hasattr(self, "entity_description"): + return f"{_base}_{self.entity_description.key}" + return _base + + @property + def available(self) -> bool: + """Return availability of entity.""" + return self.player.available and bool(self.mass.connection.connected) + + async def __on_mass_update(self, event: MassEvent) -> None: + """Call when we receive an event from MusicAssistant.""" + if event.event == EventType.QUEUE_UPDATED and event.object_id not in ( + self.player.active_source, + self.player.active_group, + self.player.player_id, + ): + return + await self.async_on_update() + self.async_write_ha_state() + + async def async_on_update(self) -> None: + """Handle player updates.""" diff --git a/homeassistant/components/music_assistant/manifest.json b/homeassistant/components/music_assistant/manifest.json new file mode 100644 index 00000000000000..23401f30abc3ea --- /dev/null +++ b/homeassistant/components/music_assistant/manifest.json @@ -0,0 +1,13 @@ +{ + "domain": "music_assistant", + "name": "Music Assistant", + "after_dependencies": ["media_source", "media_player"], + "codeowners": ["@music-assistant"], + "config_flow": true, + "documentation": "https://music-assistant.io", + "iot_class": "local_push", + "issue_tracker": "https://github.com/music-assistant/hass-music-assistant/issues", + "loggers": ["music_assistant"], + "requirements": ["music-assistant-client==1.0.5"], + "zeroconf": ["_mass._tcp.local."] +} diff --git a/homeassistant/components/music_assistant/media_player.py b/homeassistant/components/music_assistant/media_player.py new file mode 100644 index 00000000000000..f0f3675ee32cef --- /dev/null +++ b/homeassistant/components/music_assistant/media_player.py @@ -0,0 +1,557 @@ +"""MediaPlayer platform for Music Assistant integration.""" + +from __future__ import annotations + +import asyncio +from collections.abc import Awaitable, Callable, Coroutine, Mapping +from contextlib import suppress +import functools +import os +from typing import TYPE_CHECKING, Any + +from music_assistant_models.enums import ( + EventType, + MediaType, + PlayerFeature, + QueueOption, + RepeatMode as MassRepeatMode, +) +from music_assistant_models.errors import MediaNotFoundError, MusicAssistantError +from music_assistant_models.event import MassEvent +from music_assistant_models.media_items import ItemMapping, MediaItemType, Track + +from homeassistant.components import media_source +from homeassistant.components.media_player import ( + ATTR_MEDIA_EXTRA, + BrowseMedia, + MediaPlayerDeviceClass, + MediaPlayerEnqueue, + MediaPlayerEntity, + MediaPlayerEntityFeature, + MediaPlayerState, + MediaType as HAMediaType, + RepeatMode, + async_process_play_media_url, +) +from homeassistant.const import STATE_OFF +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers import entity_registry as er +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.util.dt import utc_from_timestamp + +from . import MusicAssistantConfigEntry +from .const import ATTR_ACTIVE_QUEUE, ATTR_MASS_PLAYER_TYPE, DOMAIN +from .entity import MusicAssistantEntity + +if TYPE_CHECKING: + from music_assistant_client import MusicAssistantClient + from music_assistant_models.player import Player + from music_assistant_models.player_queue import PlayerQueue + +SUPPORTED_FEATURES = ( + MediaPlayerEntityFeature.PAUSE + | MediaPlayerEntityFeature.VOLUME_SET + | MediaPlayerEntityFeature.STOP + | MediaPlayerEntityFeature.PREVIOUS_TRACK + | MediaPlayerEntityFeature.NEXT_TRACK + | MediaPlayerEntityFeature.SHUFFLE_SET + | MediaPlayerEntityFeature.REPEAT_SET + | MediaPlayerEntityFeature.TURN_ON + | MediaPlayerEntityFeature.TURN_OFF + | MediaPlayerEntityFeature.PLAY + | MediaPlayerEntityFeature.PLAY_MEDIA + | MediaPlayerEntityFeature.VOLUME_STEP + | MediaPlayerEntityFeature.CLEAR_PLAYLIST + | MediaPlayerEntityFeature.BROWSE_MEDIA + | MediaPlayerEntityFeature.MEDIA_ENQUEUE + | MediaPlayerEntityFeature.MEDIA_ANNOUNCE + | MediaPlayerEntityFeature.SEEK +) + +QUEUE_OPTION_MAP = { + # map from HA enqueue options to MA enqueue options + # which are the same but just in case + MediaPlayerEnqueue.ADD: QueueOption.ADD, + MediaPlayerEnqueue.NEXT: QueueOption.NEXT, + MediaPlayerEnqueue.PLAY: QueueOption.PLAY, + MediaPlayerEnqueue.REPLACE: QueueOption.REPLACE, +} + +ATTR_RADIO_MODE = "radio_mode" +ATTR_MEDIA_ID = "media_id" +ATTR_MEDIA_TYPE = "media_type" +ATTR_ARTIST = "artist" +ATTR_ALBUM = "album" +ATTR_URL = "url" +ATTR_USE_PRE_ANNOUNCE = "use_pre_announce" +ATTR_ANNOUNCE_VOLUME = "announce_volume" +ATTR_SOURCE_PLAYER = "source_player" +ATTR_AUTO_PLAY = "auto_play" + + +def catch_musicassistant_error[_R, **P]( + func: Callable[..., Awaitable[_R]], +) -> Callable[..., Coroutine[Any, Any, _R | None]]: + """Check and log commands to players.""" + + @functools.wraps(func) + async def wrapper( + self: MusicAssistantPlayer, *args: P.args, **kwargs: P.kwargs + ) -> _R | None: + """Catch Music Assistant errors and convert to Home Assistant error.""" + try: + return await func(self, *args, **kwargs) + except MusicAssistantError as err: + error_msg = str(err) or err.__class__.__name__ + raise HomeAssistantError(error_msg) from err + + return wrapper + + +async def async_setup_entry( + hass: HomeAssistant, + entry: MusicAssistantConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up Music Assistant MediaPlayer(s) from Config Entry.""" + mass = entry.runtime_data.mass + added_ids = set() + + async def handle_player_added(event: MassEvent) -> None: + """Handle Mass Player Added event.""" + if TYPE_CHECKING: + assert event.object_id is not None + if event.object_id in added_ids: + return + added_ids.add(event.object_id) + async_add_entities([MusicAssistantPlayer(mass, event.object_id)]) + + # register listener for new players + entry.async_on_unload(mass.subscribe(handle_player_added, EventType.PLAYER_ADDED)) + mass_players = [] + # add all current players + for player in mass.players: + added_ids.add(player.player_id) + mass_players.append(MusicAssistantPlayer(mass, player.player_id)) + + async_add_entities(mass_players) + + +class MusicAssistantPlayer(MusicAssistantEntity, MediaPlayerEntity): + """Representation of MediaPlayerEntity from Music Assistant Player.""" + + _attr_name = None + _attr_media_image_remotely_accessible = True + _attr_media_content_type = HAMediaType.MUSIC + + def __init__(self, mass: MusicAssistantClient, player_id: str) -> None: + """Initialize MediaPlayer entity.""" + super().__init__(mass, player_id) + self._attr_icon = self.player.icon.replace("mdi-", "mdi:") + self._attr_supported_features = SUPPORTED_FEATURES + if PlayerFeature.SYNC in self.player.supported_features: + self._attr_supported_features |= MediaPlayerEntityFeature.GROUPING + self._attr_device_class = MediaPlayerDeviceClass.SPEAKER + self._prev_time: float = 0 + + async def async_added_to_hass(self) -> None: + """Register callbacks.""" + await super().async_added_to_hass() + + # we subscribe to player queue time update but we only + # accept a state change on big time jumps (e.g. seeking) + async def queue_time_updated(event: MassEvent) -> None: + if event.object_id != self.player.active_source: + return + if abs((self._prev_time or 0) - event.data) > 5: + await self.async_on_update() + self.async_write_ha_state() + self._prev_time = event.data + + self.async_on_remove( + self.mass.subscribe( + queue_time_updated, + EventType.QUEUE_TIME_UPDATED, + ) + ) + + @property + def active_queue(self) -> PlayerQueue | None: + """Return the active queue for this player (if any).""" + if not self.player.active_source: + return None + return self.mass.player_queues.get(self.player.active_source) + + @property + def extra_state_attributes(self) -> Mapping[str, Any]: + """Return additional state attributes.""" + return { + ATTR_MASS_PLAYER_TYPE: self.player.type.value, + ATTR_ACTIVE_QUEUE: ( + self.active_queue.queue_id if self.active_queue else None + ), + } + + async def async_on_update(self) -> None: + """Handle player updates.""" + if not self.available: + return + player = self.player + active_queue = self.active_queue + # update generic attributes + if player.powered and active_queue is not None: + self._attr_state = MediaPlayerState(active_queue.state.value) + if player.powered and player.state is not None: + self._attr_state = MediaPlayerState(player.state.value) + else: + self._attr_state = MediaPlayerState(STATE_OFF) + group_members_entity_ids: list[str] = [] + if player.group_childs: + # translate MA group_childs to HA group_members as entity id's + entity_registry = er.async_get(self.hass) + group_members_entity_ids = [ + entity_id + for child_id in player.group_childs + if ( + entity_id := entity_registry.async_get_entity_id( + self.platform.domain, DOMAIN, child_id + ) + ) + ] + self._attr_group_members = group_members_entity_ids + self._attr_volume_level = ( + player.volume_level / 100 if player.volume_level is not None else None + ) + self._attr_is_volume_muted = player.volume_muted + self._update_media_attributes(player, active_queue) + self._update_media_image_url(player, active_queue) + + @catch_musicassistant_error + async def async_media_play(self) -> None: + """Send play command to device.""" + await self.mass.players.player_command_play(self.player_id) + + @catch_musicassistant_error + async def async_media_pause(self) -> None: + """Send pause command to device.""" + await self.mass.players.player_command_pause(self.player_id) + + @catch_musicassistant_error + async def async_media_stop(self) -> None: + """Send stop command to device.""" + await self.mass.players.player_command_stop(self.player_id) + + @catch_musicassistant_error + async def async_media_next_track(self) -> None: + """Send next track command to device.""" + await self.mass.players.player_command_next_track(self.player_id) + + @catch_musicassistant_error + async def async_media_previous_track(self) -> None: + """Send previous track command to device.""" + await self.mass.players.player_command_previous_track(self.player_id) + + @catch_musicassistant_error + async def async_media_seek(self, position: float) -> None: + """Send seek command.""" + position = int(position) + await self.mass.players.player_command_seek(self.player_id, position) + + @catch_musicassistant_error + async def async_mute_volume(self, mute: bool) -> None: + """Mute the volume.""" + await self.mass.players.player_command_volume_mute(self.player_id, mute) + + @catch_musicassistant_error + async def async_set_volume_level(self, volume: float) -> None: + """Send new volume_level to device.""" + volume = int(volume * 100) + await self.mass.players.player_command_volume_set(self.player_id, volume) + + @catch_musicassistant_error + async def async_volume_up(self) -> None: + """Send new volume_level to device.""" + await self.mass.players.player_command_volume_up(self.player_id) + + @catch_musicassistant_error + async def async_volume_down(self) -> None: + """Send new volume_level to device.""" + await self.mass.players.player_command_volume_down(self.player_id) + + @catch_musicassistant_error + async def async_turn_on(self) -> None: + """Turn on device.""" + await self.mass.players.player_command_power(self.player_id, True) + + @catch_musicassistant_error + async def async_turn_off(self) -> None: + """Turn off device.""" + await self.mass.players.player_command_power(self.player_id, False) + + @catch_musicassistant_error + async def async_set_shuffle(self, shuffle: bool) -> None: + """Set shuffle state.""" + if not self.active_queue: + return + await self.mass.player_queues.queue_command_shuffle( + self.active_queue.queue_id, shuffle + ) + + @catch_musicassistant_error + async def async_set_repeat(self, repeat: RepeatMode) -> None: + """Set repeat state.""" + if not self.active_queue: + return + await self.mass.player_queues.queue_command_repeat( + self.active_queue.queue_id, MassRepeatMode(repeat) + ) + + @catch_musicassistant_error + async def async_clear_playlist(self) -> None: + """Clear players playlist.""" + if TYPE_CHECKING: + assert self.player.active_source is not None + if queue := self.mass.player_queues.get(self.player.active_source): + await self.mass.player_queues.queue_command_clear(queue.queue_id) + + @catch_musicassistant_error + async def async_play_media( + self, + media_type: MediaType | str, + media_id: str, + enqueue: MediaPlayerEnqueue | None = None, + announce: bool | None = None, + **kwargs: Any, + ) -> None: + """Send the play_media command to the media player.""" + if media_source.is_media_source_id(media_id): + # Handle media_source + sourced_media = await media_source.async_resolve_media( + self.hass, media_id, self.entity_id + ) + media_id = sourced_media.url + media_id = async_process_play_media_url(self.hass, media_id) + + if announce: + await self._async_handle_play_announcement( + media_id, + use_pre_announce=kwargs[ATTR_MEDIA_EXTRA].get("use_pre_announce"), + announce_volume=kwargs[ATTR_MEDIA_EXTRA].get("announce_volume"), + ) + return + + # forward to our advanced play_media handler + await self._async_handle_play_media( + media_id=[media_id], + enqueue=enqueue, + media_type=media_type, + radio_mode=kwargs[ATTR_MEDIA_EXTRA].get(ATTR_RADIO_MODE), + ) + + @catch_musicassistant_error + async def async_join_players(self, group_members: list[str]) -> None: + """Join `group_members` as a player group with the current player.""" + player_ids: list[str] = [] + for child_entity_id in group_members: + # resolve HA entity_id to MA player_id + if (hass_state := self.hass.states.get(child_entity_id)) is None: + continue + if (mass_player_id := hass_state.attributes.get("mass_player_id")) is None: + continue + player_ids.append(mass_player_id) + await self.mass.players.player_command_sync_many(self.player_id, player_ids) + + @catch_musicassistant_error + async def async_unjoin_player(self) -> None: + """Remove this player from any group.""" + await self.mass.players.player_command_unsync(self.player_id) + + @catch_musicassistant_error + async def _async_handle_play_media( + self, + media_id: list[str], + enqueue: MediaPlayerEnqueue | QueueOption | None = None, + radio_mode: bool | None = None, + media_type: str | None = None, + ) -> None: + """Send the play_media command to the media player.""" + media_uris: list[str] = [] + item: MediaItemType | ItemMapping | None = None + # work out (all) uri(s) to play + for media_id_str in media_id: + # URL or URI string + if "://" in media_id_str: + media_uris.append(media_id_str) + continue + # try content id as library id + if media_type and media_id_str.isnumeric(): + with suppress(MediaNotFoundError): + item = await self.mass.music.get_item( + MediaType(media_type), media_id_str, "library" + ) + if isinstance(item, MediaItemType | ItemMapping) and item.uri: + media_uris.append(item.uri) + continue + # try local accessible filename + elif await asyncio.to_thread(os.path.isfile, media_id_str): + media_uris.append(media_id_str) + continue + + if not media_uris: + raise HomeAssistantError( + f"Could not resolve {media_id} to playable media item" + ) + + # determine active queue to send the play request to + if TYPE_CHECKING: + assert self.player.active_source is not None + if queue := self.mass.player_queues.get(self.player.active_source): + queue_id = queue.queue_id + else: + queue_id = self.player_id + + await self.mass.player_queues.play_media( + queue_id, + media=media_uris, + option=self._convert_queueoption_to_media_player_enqueue(enqueue), + radio_mode=radio_mode if radio_mode else False, + ) + + @catch_musicassistant_error + async def _async_handle_play_announcement( + self, + url: str, + use_pre_announce: bool | None = None, + announce_volume: int | None = None, + ) -> None: + """Send the play_announcement command to the media player.""" + await self.mass.players.play_announcement( + self.player_id, url, use_pre_announce, announce_volume + ) + + async def async_browse_media( + self, + media_content_type: MediaType | str | None = None, + media_content_id: str | None = None, + ) -> BrowseMedia: + """Implement the websocket media browsing helper.""" + return await media_source.async_browse_media( + self.hass, + media_content_id, + content_filter=lambda item: item.media_content_type.startswith("audio/"), + ) + + def _update_media_image_url( + self, player: Player, queue: PlayerQueue | None + ) -> None: + """Update image URL for the active queue item.""" + if queue is None or queue.current_item is None: + self._attr_media_image_url = None + return + if image_url := self.mass.get_media_item_image_url(queue.current_item): + self._attr_media_image_remotely_accessible = ( + self.mass.server_url not in image_url + ) + self._attr_media_image_url = image_url + return + self._attr_media_image_url = None + + def _update_media_attributes( + self, player: Player, queue: PlayerQueue | None + ) -> None: + """Update media attributes for the active queue item.""" + # pylint: disable=too-many-statements + self._attr_media_artist = None + self._attr_media_album_artist = None + self._attr_media_album_name = None + self._attr_media_title = None + self._attr_media_content_id = None + self._attr_media_duration = None + self._attr_media_position = None + self._attr_media_position_updated_at = None + + if queue is None and player.current_media: + # player has some external source active + self._attr_media_content_id = player.current_media.uri + self._attr_app_id = player.active_source + self._attr_media_title = player.current_media.title + self._attr_media_artist = player.current_media.artist + self._attr_media_album_name = player.current_media.album + self._attr_media_duration = player.current_media.duration + # shuffle and repeat are not (yet) supported for external sources + self._attr_shuffle = None + self._attr_repeat = None + if TYPE_CHECKING: + assert player.elapsed_time is not None + self._attr_media_position = int(player.elapsed_time) + self._attr_media_position_updated_at = ( + utc_from_timestamp(player.elapsed_time_last_updated) + if player.elapsed_time_last_updated + else None + ) + if TYPE_CHECKING: + assert player.elapsed_time is not None + self._prev_time = player.elapsed_time + return + + if queue is None: + # player has no MA queue active + self._attr_source = player.active_source + self._attr_app_id = player.active_source + return + + # player has an MA queue active (either its own queue or some group queue) + self._attr_app_id = DOMAIN + self._attr_shuffle = queue.shuffle_enabled + self._attr_repeat = queue.repeat_mode.value + if not (cur_item := queue.current_item): + # queue is empty + return + + self._attr_media_content_id = queue.current_item.uri + self._attr_media_duration = queue.current_item.duration + self._attr_media_position = int(queue.elapsed_time) + self._attr_media_position_updated_at = utc_from_timestamp( + queue.elapsed_time_last_updated + ) + self._prev_time = queue.elapsed_time + + # handle stream title (radio station icy metadata) + if (stream_details := cur_item.streamdetails) and stream_details.stream_title: + self._attr_media_album_name = cur_item.name + if " - " in stream_details.stream_title: + stream_title_parts = stream_details.stream_title.split(" - ", 1) + self._attr_media_title = stream_title_parts[1] + self._attr_media_artist = stream_title_parts[0] + else: + self._attr_media_title = stream_details.stream_title + return + + if not (media_item := cur_item.media_item): + # queue is not playing a regular media item (edge case?!) + self._attr_media_title = cur_item.name + return + + # queue is playing regular media item + self._attr_media_title = media_item.name + # for tracks we can extract more info + if media_item.media_type == MediaType.TRACK: + if TYPE_CHECKING: + assert isinstance(media_item, Track) + self._attr_media_artist = media_item.artist_str + if media_item.version: + self._attr_media_title += f" ({media_item.version})" + if media_item.album: + self._attr_media_album_name = media_item.album.name + self._attr_media_album_artist = getattr( + media_item.album, "artist_str", None + ) + + def _convert_queueoption_to_media_player_enqueue( + self, queue_option: MediaPlayerEnqueue | QueueOption | None + ) -> QueueOption | None: + """Convert a QueueOption to a MediaPlayerEnqueue.""" + if isinstance(queue_option, MediaPlayerEnqueue): + queue_option = QUEUE_OPTION_MAP.get(queue_option) + return queue_option diff --git a/homeassistant/components/music_assistant/strings.json b/homeassistant/components/music_assistant/strings.json new file mode 100644 index 00000000000000..f15b0b1b3065f1 --- /dev/null +++ b/homeassistant/components/music_assistant/strings.json @@ -0,0 +1,51 @@ +{ + "config": { + "step": { + "init": { + "data": { + "url": "URL of the Music Assistant server" + } + }, + "manual": { + "title": "Manually add Music Assistant Server", + "description": "Enter the URL to your already running Music Assistant Server. If you do not have the Music Assistant Server running, you should install it first.", + "data": { + "url": "URL of the Music Assistant server" + } + }, + "discovery_confirm": { + "description": "Do you want to add the Music Assistant Server `{url}` to Home Assistant?", + "title": "Discovered Music Assistant Server" + } + }, + "error": { + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", + "invalid_server_version": "The Music Assistant server is not the correct version", + "unknown": "[%key:common::config_flow::error::unknown%]" + }, + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", + "already_in_progress": "Configuration flow is already in progress", + "reconfiguration_successful": "Successfully reconfigured the Music Assistant integration.", + "cannot_connect": "Failed to connect", + "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]" + } + }, + "issues": { + "invalid_server_version": { + "title": "The Music Assistant server is not the correct version", + "description": "Check if there are updates available for the Music Assistant Server and/or integration." + } + }, + "selector": { + "enqueue": { + "options": { + "play": "Play", + "next": "Play next", + "add": "Add to queue", + "replace": "Play now and clear queue", + "replace_next": "Play next and clear queue" + } + } + } +} diff --git a/homeassistant/components/mutesync/__init__.py b/homeassistant/components/mutesync/__init__.py index 75eefaf6784a87..d5d2e3414d566b 100644 --- a/homeassistant/components/mutesync/__init__.py +++ b/homeassistant/components/mutesync/__init__.py @@ -45,6 +45,7 @@ async def update_data(): update_coordinator.DataUpdateCoordinator( hass, logging.getLogger(__name__), + config_entry=entry, name=DOMAIN, update_interval=UPDATE_INTERVAL_NOT_IN_MEETING, update_method=update_data, diff --git a/homeassistant/components/mysensors/sensor.py b/homeassistant/components/mysensors/sensor.py index 3cf4be21757836..eec3c6bcd79405 100644 --- a/homeassistant/components/mysensors/sensor.py +++ b/homeassistant/components/mysensors/sensor.py @@ -193,7 +193,7 @@ ), "V_EC": SensorEntityDescription( key="V_EC", - native_unit_of_measurement=UnitOfConductivity.MICROSIEMENS, + native_unit_of_measurement=UnitOfConductivity.MICROSIEMENS_PER_CM, ), "V_VAR": SensorEntityDescription( key="V_VAR", diff --git a/homeassistant/components/myuplink/api.py b/homeassistant/components/myuplink/api.py index 89a5d0c19b0707..32e0ea70193a31 100644 --- a/homeassistant/components/myuplink/api.py +++ b/homeassistant/components/myuplink/api.py @@ -26,7 +26,6 @@ def __init__( async def async_get_access_token(self) -> str: """Return a valid access token.""" - if not self._oauth_session.valid_token: - await self._oauth_session.async_ensure_token_valid() + await self._oauth_session.async_ensure_token_valid() return cast(str, self._oauth_session.token["access_token"]) diff --git a/homeassistant/components/myuplink/binary_sensor.py b/homeassistant/components/myuplink/binary_sensor.py index 1478ed9c8b0d78..953859986d0af5 100644 --- a/homeassistant/components/myuplink/binary_sensor.py +++ b/homeassistant/components/myuplink/binary_sensor.py @@ -12,10 +12,17 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import MyUplinkConfigEntry, MyUplinkDataCoordinator +from .const import F_SERIES from .entity import MyUplinkEntity, MyUplinkSystemEntity -from .helpers import find_matching_platform +from .helpers import find_matching_platform, transform_model_series CATEGORY_BASED_DESCRIPTIONS: dict[str, dict[str, BinarySensorEntityDescription]] = { + F_SERIES: { + "43161": BinarySensorEntityDescription( + key="elect_add", + translation_key="elect_add", + ), + }, "NIBEF": { "43161": BinarySensorEntityDescription( key="elect_add", @@ -44,6 +51,7 @@ def get_description(device_point: DevicePoint) -> BinarySensorEntityDescription 2. Default to None """ prefix, _, _ = device_point.category.partition(" ") + prefix = transform_model_series(prefix) return CATEGORY_BASED_DESCRIPTIONS.get(prefix, {}).get(device_point.parameter_id) diff --git a/homeassistant/components/myuplink/config_flow.py b/homeassistant/components/myuplink/config_flow.py index fe31dcc61833a7..554347cfd19b50 100644 --- a/homeassistant/components/myuplink/config_flow.py +++ b/homeassistant/components/myuplink/config_flow.py @@ -4,7 +4,7 @@ import logging from typing import Any -from homeassistant.config_entries import ConfigEntry, ConfigFlowResult +from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlowResult from homeassistant.helpers import config_entry_oauth2_flow from .const import DOMAIN, OAUTH2_SCOPES @@ -17,8 +17,6 @@ class OAuth2FlowHandler( DOMAIN = DOMAIN - config_entry_reauth: ConfigEntry | None = None - @property def logger(self) -> logging.Logger: """Return logger.""" @@ -33,9 +31,6 @@ async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Perform reauth upon an API authentication error.""" - self.config_entry_reauth = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -51,9 +46,8 @@ async def async_step_reauth_confirm( async def async_oauth_create_entry(self, data: dict) -> ConfigFlowResult: """Create or update the config entry.""" - if self.config_entry_reauth: + if self.source == SOURCE_REAUTH: return self.async_update_reload_and_abort( - self.config_entry_reauth, - data=data, + self._get_reauth_entry(), data=data ) return await super().async_oauth_create_entry(data) diff --git a/homeassistant/components/myuplink/const.py b/homeassistant/components/myuplink/const.py index 3541a8078c3c46..6fd354a21ec44a 100644 --- a/homeassistant/components/myuplink/const.py +++ b/homeassistant/components/myuplink/const.py @@ -6,3 +6,5 @@ OAUTH2_AUTHORIZE = "https://api.myuplink.com/oauth/authorize" OAUTH2_TOKEN = "https://api.myuplink.com/oauth/token" OAUTH2_SCOPES = ["WRITESYSTEM", "READSYSTEM", "offline_access"] + +F_SERIES = "f-series" diff --git a/homeassistant/components/myuplink/helpers.py b/homeassistant/components/myuplink/helpers.py index ac3d2a2d7fa3b9..de5486d8dea204 100644 --- a/homeassistant/components/myuplink/helpers.py +++ b/homeassistant/components/myuplink/helpers.py @@ -6,6 +6,8 @@ from homeassistant.components.sensor import SensorEntityDescription from homeassistant.const import Platform +from .const import F_SERIES + def find_matching_platform( device_point: DevicePoint, @@ -36,17 +38,93 @@ def find_matching_platform( return Platform.SENSOR +WEEKDAYS = ( + "monday", + "tuesday", + "wednesday", + "thursday", + "friday", + "saturday", + "sunday", +) + +PARAMETER_ID_TO_EXCLUDE_F730 = ( + "40940", + "47007", + "47015", + "47020", + "47021", + "47022", + "47023", + "47024", + "47025", + "47026", + "47027", + "47028", + "47032", + "47050", + "47051", + "47206", + "47209", + "47271", + "47272", + "47273", + "47274", + "47375", + "47376", + "47538", + "47539", + "47635", + "47669", + "47703", + "47737", + "47771", + "47772", + "47805", + "47806", + "47839", + "47840", + "47907", + "47941", + "47975", + "48009", + "48072", + "48442", + "49909", + "50113", +) + +PARAMETER_ID_TO_INCLUDE_SMO20 = ( + "40940", + "47011", + "47015", + "47028", + "47032", + "50004", +) + + def skip_entity(model: str, device_point: DevicePoint) -> bool: """Check if entity should be skipped for this device model.""" if model == "SMO 20": - if len(device_point.smart_home_categories) > 0 or device_point.parameter_id in ( - "40940", - "47011", - "47015", - "47028", - "47032", - "50004", + if ( + len(device_point.smart_home_categories) > 0 + or device_point.parameter_id in PARAMETER_ID_TO_INCLUDE_SMO20 ): return False return True + if model.lower().startswith("f"): + # Entity names containing weekdays are used for advanced scheduling in the + # heat pump and should not be exposed in the integration + if any(d in device_point.parameter_name.lower() for d in WEEKDAYS): + return True + if device_point.parameter_id in PARAMETER_ID_TO_EXCLUDE_F730: + return True return False + + +def transform_model_series(prefix: str) -> str: + """Remap all F-series models.""" + if prefix.lower().startswith("f"): + return F_SERIES + return prefix diff --git a/homeassistant/components/myuplink/number.py b/homeassistant/components/myuplink/number.py index 7c63a8ec8a2a26..b05ab5d46c9697 100644 --- a/homeassistant/components/myuplink/number.py +++ b/homeassistant/components/myuplink/number.py @@ -10,8 +10,9 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import MyUplinkConfigEntry, MyUplinkDataCoordinator +from .const import F_SERIES from .entity import MyUplinkEntity -from .helpers import find_matching_platform, skip_entity +from .helpers import find_matching_platform, skip_entity, transform_model_series DEVICE_POINT_UNIT_DESCRIPTIONS: dict[str, NumberEntityDescription] = { "DM": NumberEntityDescription( @@ -22,6 +23,13 @@ } CATEGORY_BASED_DESCRIPTIONS: dict[str, dict[str, NumberEntityDescription]] = { + F_SERIES: { + "40940": NumberEntityDescription( + key="degree_minutes", + translation_key="degree_minutes", + native_unit_of_measurement="DM", + ), + }, "NIBEF": { "40940": NumberEntityDescription( key="degree_minutes", @@ -41,6 +49,7 @@ def get_description(device_point: DevicePoint) -> NumberEntityDescription | None 3. Default to None """ prefix, _, _ = device_point.category.partition(" ") + prefix = transform_model_series(prefix) description = CATEGORY_BASED_DESCRIPTIONS.get(prefix, {}).get( device_point.parameter_id ) diff --git a/homeassistant/components/myuplink/sensor.py b/homeassistant/components/myuplink/sensor.py index e7c8054e3049e2..ef827fc1fb102b 100644 --- a/homeassistant/components/myuplink/sensor.py +++ b/homeassistant/components/myuplink/sensor.py @@ -25,8 +25,9 @@ from homeassistant.helpers.typing import StateType from . import MyUplinkConfigEntry, MyUplinkDataCoordinator +from .const import F_SERIES from .entity import MyUplinkEntity -from .helpers import find_matching_platform, skip_entity +from .helpers import find_matching_platform, skip_entity, transform_model_series DEVICE_POINT_UNIT_DESCRIPTIONS: dict[str, SensorEntityDescription] = { "°C": SensorEntityDescription( @@ -139,6 +140,32 @@ MARKER_FOR_UNKNOWN_VALUE = -32768 CATEGORY_BASED_DESCRIPTIONS: dict[str, dict[str, SensorEntityDescription]] = { + F_SERIES: { + "43108": SensorEntityDescription( + key="fan_mode", + translation_key="fan_mode", + ), + "43427": SensorEntityDescription( + key="status_compressor", + translation_key="status_compressor", + device_class=SensorDeviceClass.ENUM, + ), + "49993": SensorEntityDescription( + key="elect_add", + translation_key="elect_add", + device_class=SensorDeviceClass.ENUM, + ), + "49994": SensorEntityDescription( + key="priority", + translation_key="priority", + device_class=SensorDeviceClass.ENUM, + ), + "50095": SensorEntityDescription( + key="status", + translation_key="status", + device_class=SensorDeviceClass.ENUM, + ), + }, "NIBEF": { "43108": SensorEntityDescription( key="fan_mode", @@ -174,6 +201,7 @@ def get_description(device_point: DevicePoint) -> SensorEntityDescription | None """ description = None prefix, _, _ = device_point.category.partition(" ") + prefix = transform_model_series(prefix) description = CATEGORY_BASED_DESCRIPTIONS.get(prefix, {}).get( device_point.parameter_id ) diff --git a/homeassistant/components/myuplink/strings.json b/homeassistant/components/myuplink/strings.json index 3351901b50b809..9ec5c355d7820e 100644 --- a/homeassistant/components/myuplink/strings.json +++ b/homeassistant/components/myuplink/strings.json @@ -34,6 +34,11 @@ "alarm": { "name": "Alarm" } + }, + "sensor": { + "status": { + "name": "Status" + } } } } diff --git a/homeassistant/components/myuplink/switch.py b/homeassistant/components/myuplink/switch.py index 1589701fcbc6a6..75ba6bd7819f69 100644 --- a/homeassistant/components/myuplink/switch.py +++ b/homeassistant/components/myuplink/switch.py @@ -12,10 +12,21 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import MyUplinkConfigEntry, MyUplinkDataCoordinator +from .const import F_SERIES from .entity import MyUplinkEntity -from .helpers import find_matching_platform, skip_entity +from .helpers import find_matching_platform, skip_entity, transform_model_series CATEGORY_BASED_DESCRIPTIONS: dict[str, dict[str, SwitchEntityDescription]] = { + F_SERIES: { + "50004": SwitchEntityDescription( + key="temporary_lux", + translation_key="temporary_lux", + ), + "50005": SwitchEntityDescription( + key="boost_ventilation", + translation_key="boost_ventilation", + ), + }, "NIBEF": { "50004": SwitchEntityDescription( key="temporary_lux", @@ -37,6 +48,7 @@ def get_description(device_point: DevicePoint) -> SwitchEntityDescription | None 2. Default to None """ prefix, _, _ = device_point.category.partition(" ") + prefix = transform_model_series(prefix) return CATEGORY_BASED_DESCRIPTIONS.get(prefix, {}).get(device_point.parameter_id) diff --git a/homeassistant/components/nam/config_flow.py b/homeassistant/components/nam/config_flow.py index 75f3d4b8cd8eb0..494ce9fdac03da 100644 --- a/homeassistant/components/nam/config_flow.py +++ b/homeassistant/components/nam/config_flow.py @@ -18,7 +18,7 @@ import voluptuous as vol from homeassistant.components import zeroconf -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.helpers.aiohttp_client import async_get_clientsession @@ -73,7 +73,6 @@ class NAMFlowHandler(ConfigFlow, domain=DOMAIN): VERSION = 1 _config: NamConfig - entry: ConfigEntry host: str async def async_step_user( @@ -187,7 +186,6 @@ async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle configuration by re-auth.""" - self.entry = self._get_reauth_entry() self.host = entry_data[CONF_HOST] self.context["title_placeholders"] = {"host": self.host} return await self.async_step_reauth_confirm() @@ -209,11 +207,9 @@ async def async_step_reauth_confirm( ): return self.async_abort(reason="reauth_unsuccessful") - self.hass.config_entries.async_update_entry( - self.entry, data={**user_input, CONF_HOST: self.host} + return self.async_update_reload_and_abort( + self._get_reauth_entry(), data={**user_input, CONF_HOST: self.host} ) - await self.hass.config_entries.async_reload(self.entry.entry_id) - return self.async_abort(reason="reauth_successful") return self.async_show_form( step_id="reauth_confirm", @@ -224,18 +220,11 @@ async def async_step_reauth_confirm( async def async_step_reconfigure( self, user_input: dict[str, Any] | None = None - ) -> ConfigFlowResult: - """Handle a reconfiguration flow initialized by the user.""" - self.entry = self._get_reconfigure_entry() - self.host = self.entry.data[CONF_HOST] - - return await self.async_step_reconfigure_confirm() - - async def async_step_reconfigure_confirm( - self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle a reconfiguration flow initialized by the user.""" errors = {} + reconfigure_entry = self._get_reconfigure_entry() + self.host = reconfigure_entry.data[CONF_HOST] if user_input is not None: try: @@ -243,21 +232,20 @@ async def async_step_reconfigure_confirm( except (ApiError, ClientConnectorError, TimeoutError): errors["base"] = "cannot_connect" else: - if format_mac(config.mac_address) != self.entry.unique_id: - return self.async_abort(reason="another_device") + await self.async_set_unique_id(format_mac(config.mac_address)) + self._abort_if_unique_id_mismatch(reason="another_device") - data = {**self.entry.data, CONF_HOST: user_input[CONF_HOST]} - self.hass.config_entries.async_update_entry(self.entry, data=data) - await self.hass.config_entries.async_reload(self.entry.entry_id) - return self.async_abort(reason="reconfigure_successful") + return self.async_update_reload_and_abort( + reconfigure_entry, data_updates={CONF_HOST: user_input[CONF_HOST]} + ) return self.async_show_form( - step_id="reconfigure_confirm", + step_id="reconfigure", data_schema=vol.Schema( { vol.Required(CONF_HOST, default=self.host): str, } ), - description_placeholders={"device_name": self.entry.title}, + description_placeholders={"device_name": reconfigure_entry.title}, errors=errors, ) diff --git a/homeassistant/components/nam/strings.json b/homeassistant/components/nam/strings.json index c4921ec52f96b4..2caa4d8bd976ae 100644 --- a/homeassistant/components/nam/strings.json +++ b/homeassistant/components/nam/strings.json @@ -28,7 +28,7 @@ "confirm_discovery": { "description": "Do you want to set up Nettigo Air Monitor at {host}?" }, - "reconfigure_confirm": { + "reconfigure": { "description": "Update configuration for {device_name}.", "data": { "host": "[%key:common::config_flow::data::host%]" diff --git a/homeassistant/components/nanoleaf/config_flow.py b/homeassistant/components/nanoleaf/config_flow.py index cc34e30eb5989d..27ef9a887feb29 100644 --- a/homeassistant/components/nanoleaf/config_flow.py +++ b/homeassistant/components/nanoleaf/config_flow.py @@ -11,7 +11,7 @@ import voluptuous as vol from homeassistant.components import ssdp, zeroconf -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_HOST, CONF_TOKEN from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.json import save_json @@ -34,8 +34,6 @@ class NanoleafConfigFlow(ConfigFlow, domain=DOMAIN): """Nanoleaf config flow.""" - reauth_entry: ConfigEntry | None = None - nanoleaf: Nanoleaf # For discovery integration import @@ -81,14 +79,10 @@ async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle Nanoleaf reauth flow if token is invalid.""" - self.reauth_entry = cast( - ConfigEntry, - self.hass.config_entries.async_get_entry(self.context["entry_id"]), - ) self.nanoleaf = Nanoleaf( async_get_clientsession(self.hass), entry_data[CONF_HOST] ) - self.context["title_placeholders"] = {"name": self.reauth_entry.title} + self.context["title_placeholders"] = {"name": self._get_reauth_entry().title} return await self.async_step_link() async def async_step_zeroconf( @@ -177,16 +171,11 @@ async def async_step_link( _LOGGER.exception("Unknown error authorizing Nanoleaf") return self.async_show_form(step_id="link", errors={"base": "unknown"}) - if self.reauth_entry is not None: - self.hass.config_entries.async_update_entry( - self.reauth_entry, - data={ - **self.reauth_entry.data, - CONF_TOKEN: self.nanoleaf.auth_token, - }, + if self.source == SOURCE_REAUTH: + return self.async_update_reload_and_abort( + self._get_reauth_entry(), + data_updates={CONF_TOKEN: self.nanoleaf.auth_token}, ) - await self.hass.config_entries.async_reload(self.reauth_entry.entry_id) - return self.async_abort(reason="reauth_successful") return await self.async_setup_finish() diff --git a/homeassistant/components/nasweb/__init__.py b/homeassistant/components/nasweb/__init__.py new file mode 100644 index 00000000000000..1992cc41c75a7c --- /dev/null +++ b/homeassistant/components/nasweb/__init__.py @@ -0,0 +1,125 @@ +"""The NASweb integration.""" + +from __future__ import annotations + +import logging + +from webio_api import WebioAPI +from webio_api.api_client import AuthError + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME, Platform +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryError, ConfigEntryNotReady +from homeassistant.helpers import device_registry as dr +from homeassistant.helpers.network import NoURLAvailableError +from homeassistant.util.hass_dict import HassKey + +from .const import DOMAIN, MANUFACTURER, SUPPORT_EMAIL +from .coordinator import NASwebCoordinator +from .nasweb_data import NASwebData + +PLATFORMS: list[Platform] = [Platform.SWITCH] + +NASWEB_CONFIG_URL = "https://{host}/page" + +_LOGGER = logging.getLogger(__name__) +type NASwebConfigEntry = ConfigEntry[NASwebCoordinator] +DATA_NASWEB: HassKey[NASwebData] = HassKey(DOMAIN) + + +async def async_setup_entry(hass: HomeAssistant, entry: NASwebConfigEntry) -> bool: + """Set up NASweb from a config entry.""" + + if DATA_NASWEB not in hass.data: + data = NASwebData() + data.initialize(hass) + hass.data[DATA_NASWEB] = data + nasweb_data = hass.data[DATA_NASWEB] + + webio_api = WebioAPI( + entry.data[CONF_HOST], entry.data[CONF_USERNAME], entry.data[CONF_PASSWORD] + ) + try: + if not await webio_api.check_connection(): + raise ConfigEntryNotReady( + f"[{entry.data[CONF_HOST]}] Check connection failed" + ) + if not await webio_api.refresh_device_info(): + _LOGGER.error("[%s] Refresh device info failed", entry.data[CONF_HOST]) + raise ConfigEntryError( + translation_key="config_entry_error_internal_error", + translation_placeholders={"support_email": SUPPORT_EMAIL}, + ) + webio_serial = webio_api.get_serial_number() + if webio_serial is None: + _LOGGER.error("[%s] Serial number not available", entry.data[CONF_HOST]) + raise ConfigEntryError( + translation_key="config_entry_error_internal_error", + translation_placeholders={"support_email": SUPPORT_EMAIL}, + ) + if entry.unique_id != webio_serial: + _LOGGER.error( + "[%s] Serial number doesn't match config entry", entry.data[CONF_HOST] + ) + raise ConfigEntryError(translation_key="config_entry_error_serial_mismatch") + + coordinator = NASwebCoordinator( + hass, webio_api, name=f"NASweb[{webio_api.get_name()}]" + ) + entry.runtime_data = coordinator + nasweb_data.notify_coordinator.add_coordinator(webio_serial, entry.runtime_data) + + webhook_url = nasweb_data.get_webhook_url(hass) + if not await webio_api.status_subscription(webhook_url, True): + _LOGGER.error("Failed to subscribe for status updates from webio") + raise ConfigEntryError( + translation_key="config_entry_error_internal_error", + translation_placeholders={"support_email": SUPPORT_EMAIL}, + ) + if not await nasweb_data.notify_coordinator.check_connection(webio_serial): + _LOGGER.error("Did not receive status from device") + raise ConfigEntryError( + translation_key="config_entry_error_no_status_update", + translation_placeholders={"support_email": SUPPORT_EMAIL}, + ) + except TimeoutError as error: + raise ConfigEntryNotReady( + f"[{entry.data[CONF_HOST]}] Check connection reached timeout" + ) from error + except AuthError as error: + raise ConfigEntryError( + translation_key="config_entry_error_invalid_authentication" + ) from error + except NoURLAvailableError as error: + raise ConfigEntryError( + translation_key="config_entry_error_missing_internal_url" + ) from error + + device_registry = dr.async_get(hass) + device_registry.async_get_or_create( + config_entry_id=entry.entry_id, + identifiers={(DOMAIN, webio_serial)}, + manufacturer=MANUFACTURER, + name=webio_api.get_name(), + configuration_url=NASWEB_CONFIG_URL.format(host=entry.data[CONF_HOST]), + ) + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + return True + + +async def async_unload_entry(hass: HomeAssistant, entry: NASwebConfigEntry) -> bool: + """Unload a config entry.""" + if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): + nasweb_data = hass.data[DATA_NASWEB] + coordinator = entry.runtime_data + serial = entry.unique_id + if serial is not None: + nasweb_data.notify_coordinator.remove_coordinator(serial) + if nasweb_data.can_be_deinitialized(): + nasweb_data.deinitialize(hass) + hass.data.pop(DATA_NASWEB) + webhook_url = nasweb_data.get_webhook_url(hass) + await coordinator.webio_api.status_subscription(webhook_url, False) + + return unload_ok diff --git a/homeassistant/components/nasweb/config_flow.py b/homeassistant/components/nasweb/config_flow.py new file mode 100644 index 00000000000000..3a9ad3f7d498e3 --- /dev/null +++ b/homeassistant/components/nasweb/config_flow.py @@ -0,0 +1,137 @@ +"""Config flow for NASweb integration.""" + +from __future__ import annotations + +import logging +from typing import Any + +import voluptuous as vol +from webio_api import WebioAPI +from webio_api.api_client import AuthError + +from homeassistant import config_entries +from homeassistant.config_entries import ConfigFlowResult +from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_UNIQUE_ID, CONF_USERNAME +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import AbortFlow +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers.network import NoURLAvailableError + +from .const import DOMAIN +from .coordinator import NASwebCoordinator +from .nasweb_data import NASwebData + +NASWEB_SCHEMA_IMG_URL = ( + "https://home-assistant.io/images/integrations/nasweb/nasweb_scheme.png" +) + +_LOGGER = logging.getLogger(__name__) + +STEP_USER_DATA_SCHEMA = vol.Schema( + { + vol.Required(CONF_HOST): str, + vol.Required(CONF_USERNAME): str, + vol.Required(CONF_PASSWORD): str, + } +) + + +async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> dict[str, Any]: + """Validate user-provided data.""" + webio_api = WebioAPI(data[CONF_HOST], data[CONF_USERNAME], data[CONF_PASSWORD]) + if not await webio_api.check_connection(): + raise CannotConnect + try: + await webio_api.refresh_device_info() + except AuthError as e: + raise InvalidAuth from e + + nasweb_data = NASwebData() + nasweb_data.initialize(hass) + try: + webio_serial = webio_api.get_serial_number() + if webio_serial is None: + raise MissingNASwebData("Device serial number is not available") + + coordinator = NASwebCoordinator(hass, webio_api) + webhook_url = nasweb_data.get_webhook_url(hass) + nasweb_data.notify_coordinator.add_coordinator(webio_serial, coordinator) + subscription = await webio_api.status_subscription(webhook_url, True) + if not subscription: + nasweb_data.notify_coordinator.remove_coordinator(webio_serial) + raise MissingNASwebData( + "Failed to subscribe for status updates from device" + ) + + result = await nasweb_data.notify_coordinator.check_connection(webio_serial) + nasweb_data.notify_coordinator.remove_coordinator(webio_serial) + if not result: + if subscription: + await webio_api.status_subscription(webhook_url, False) + raise MissingNASwebStatus("Did not receive status from device") + + name = webio_api.get_name() + finally: + nasweb_data.deinitialize(hass) + return {"title": name, CONF_UNIQUE_ID: webio_serial} + + +class NASwebConfigFlow(config_entries.ConfigFlow, domain=DOMAIN): + """Handle a config flow for NASweb.""" + + VERSION = 1 + + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle the initial step.""" + errors: dict[str, str] = {} + if user_input is not None: + try: + info = await validate_input(self.hass, user_input) + await self.async_set_unique_id(info[CONF_UNIQUE_ID]) + self._abort_if_unique_id_configured() + except CannotConnect: + errors["base"] = "cannot_connect" + except InvalidAuth: + errors["base"] = "invalid_auth" + except NoURLAvailableError: + errors["base"] = "missing_internal_url" + except MissingNASwebData: + errors["base"] = "missing_nasweb_data" + except MissingNASwebStatus: + errors["base"] = "missing_status" + except AbortFlow: + raise + except Exception: # pylint: disable=broad-except + _LOGGER.exception("Unexpected exception") + errors["base"] = "unknown" + else: + return self.async_create_entry(title=info["title"], data=user_input) + + return self.async_show_form( + step_id="user", + data_schema=self.add_suggested_values_to_schema( + STEP_USER_DATA_SCHEMA, user_input + ), + errors=errors, + description_placeholders={ + "nasweb_schema_img": '
', + }, + ) + + +class CannotConnect(HomeAssistantError): + """Error to indicate we cannot connect.""" + + +class InvalidAuth(HomeAssistantError): + """Error to indicate there is invalid auth.""" + + +class MissingNASwebData(HomeAssistantError): + """Error to indicate missing information from NASweb.""" + + +class MissingNASwebStatus(HomeAssistantError): + """Error to indicate there was no status received from NASweb.""" diff --git a/homeassistant/components/nasweb/const.py b/homeassistant/components/nasweb/const.py new file mode 100644 index 00000000000000..ec750c90c8c691 --- /dev/null +++ b/homeassistant/components/nasweb/const.py @@ -0,0 +1,7 @@ +"""Constants for the NASweb integration.""" + +DOMAIN = "nasweb" +MANUFACTURER = "chomtech.pl" +STATUS_UPDATE_MAX_TIME_INTERVAL = 60 +SUPPORT_EMAIL = "support@chomtech.eu" +WEBHOOK_URL = "{internal_url}/api/webhook/{webhook_id}" diff --git a/homeassistant/components/nasweb/coordinator.py b/homeassistant/components/nasweb/coordinator.py new file mode 100644 index 00000000000000..90dca0f3022144 --- /dev/null +++ b/homeassistant/components/nasweb/coordinator.py @@ -0,0 +1,191 @@ +"""Message routing coordinators for handling NASweb push notifications.""" + +from __future__ import annotations + +import asyncio +from collections.abc import Callable +from datetime import datetime, timedelta +import logging +import time +from typing import Any + +from aiohttp.web import Request, Response +from webio_api import WebioAPI +from webio_api.const import KEY_DEVICE_SERIAL, KEY_OUTPUTS, KEY_TYPE, TYPE_STATUS_UPDATE + +from homeassistant.core import CALLBACK_TYPE, HassJob, HomeAssistant, callback +from homeassistant.helpers import event +from homeassistant.helpers.update_coordinator import BaseDataUpdateCoordinatorProtocol + +from .const import STATUS_UPDATE_MAX_TIME_INTERVAL + +_LOGGER = logging.getLogger(__name__) + + +class NotificationCoordinator: + """Coordinator redirecting push notifications for this integration to appropriate NASwebCoordinator.""" + + def __init__(self) -> None: + """Initialize coordinator.""" + self._coordinators: dict[str, NASwebCoordinator] = {} + + def add_coordinator(self, serial: str, coordinator: NASwebCoordinator) -> None: + """Add NASwebCoordinator to possible notification targets.""" + self._coordinators[serial] = coordinator + _LOGGER.debug("Added NASwebCoordinator for NASweb[%s]", serial) + + def remove_coordinator(self, serial: str) -> None: + """Remove NASwebCoordinator from possible notification targets.""" + self._coordinators.pop(serial) + _LOGGER.debug("Removed NASwebCoordinator for NASweb[%s]", serial) + + def has_coordinators(self) -> bool: + """Check if there is any registered coordinator for push notifications.""" + return len(self._coordinators) > 0 + + async def check_connection(self, serial: str) -> bool: + """Wait for first status update to confirm connection with NASweb.""" + nasweb_coordinator = self._coordinators.get(serial) + if nasweb_coordinator is None: + _LOGGER.error("Cannot check connection. No device match serial number") + return False + for counter in range(10): + _LOGGER.debug("Checking connection with: %s (%s)", serial, counter) + if nasweb_coordinator.is_connection_confirmed(): + return True + await asyncio.sleep(1) + return False + + async def handle_webhook_request( + self, hass: HomeAssistant, webhook_id: str, request: Request + ) -> Response | None: + """Handle webhook request from Push API.""" + if not self.has_coordinators(): + return None + notification = await request.json() + serial = notification.get(KEY_DEVICE_SERIAL, None) + _LOGGER.debug("Received push: %s", notification) + if serial is None: + _LOGGER.warning("Received notification without nasweb identifier") + return None + nasweb_coordinator = self._coordinators.get(serial) + if nasweb_coordinator is None: + _LOGGER.warning("Received notification for not registered nasweb") + return None + await nasweb_coordinator.handle_push_notification(notification) + return Response(body='{"response": "ok"}', content_type="application/json") + + +class NASwebCoordinator(BaseDataUpdateCoordinatorProtocol): + """Coordinator managing status of single NASweb device. + + Since status updates are managed through push notifications, this class schedules + periodic checks to ensure that devices are marked unavailable if updates + haven't been received for a prolonged period. + """ + + def __init__( + self, hass: HomeAssistant, webio_api: WebioAPI, name: str = "NASweb[default]" + ) -> None: + """Initialize NASweb coordinator.""" + self._hass = hass + self.name = name + self.webio_api = webio_api + self._last_update: float | None = None + job_name = f"NASwebCoordinator[{name}]" + self._job = HassJob(self._handle_max_update_interval, job_name) + self._unsub_last_update_check: CALLBACK_TYPE | None = None + self._listeners: dict[CALLBACK_TYPE, tuple[CALLBACK_TYPE, object | None]] = {} + data: dict[str, Any] = {} + data[KEY_OUTPUTS] = self.webio_api.outputs + self.async_set_updated_data(data) + + def is_connection_confirmed(self) -> bool: + """Check whether coordinator received status update from NASweb.""" + return self._last_update is not None + + @callback + def async_add_listener( + self, update_callback: CALLBACK_TYPE, context: Any = None + ) -> Callable[[], None]: + """Listen for data updates.""" + schedule_update_check = not self._listeners + + @callback + def remove_listener() -> None: + """Remove update listener.""" + self._listeners.pop(remove_listener) + if not self._listeners: + self._async_unsub_last_update_check() + + self._listeners[remove_listener] = (update_callback, context) + # This is the first listener, set up interval. + if schedule_update_check: + self._schedule_last_update_check() + return remove_listener + + @callback + def async_set_updated_data(self, data: dict[str, Any]) -> None: + """Update data and notify listeners.""" + self.data = data + self.last_update = self._hass.loop.time() + _LOGGER.debug("Updated %s data", self.name) + if self._listeners: + self._schedule_last_update_check() + self.async_update_listeners() + + @callback + def async_update_listeners(self) -> None: + """Update all registered listeners.""" + for update_callback, _ in list(self._listeners.values()): + update_callback() + + async def _handle_max_update_interval(self, now: datetime) -> None: + """Handle max update interval occurrence. + + This method is called when `STATUS_UPDATE_MAX_TIME_INTERVAL` has passed without + receiving a status update. It only needs to trigger state update of entities + which then change their state accordingly. + """ + self._unsub_last_update_check = None + if self._listeners: + self.async_update_listeners() + + def _schedule_last_update_check(self) -> None: + """Schedule a task to trigger entities state update after `STATUS_UPDATE_MAX_TIME_INTERVAL`. + + This method schedules a task (`_handle_max_update_interval`) to be executed after + `STATUS_UPDATE_MAX_TIME_INTERVAL` seconds without status update, which enables entities + to change their state to unavailable. After each status update this task is rescheduled. + """ + self._async_unsub_last_update_check() + now = self._hass.loop.time() + next_check = ( + now + timedelta(seconds=STATUS_UPDATE_MAX_TIME_INTERVAL).total_seconds() + ) + self._unsub_last_update_check = event.async_call_at( + self._hass, + self._job, + next_check, + ) + + def _async_unsub_last_update_check(self) -> None: + """Cancel any scheduled update check call.""" + if self._unsub_last_update_check: + self._unsub_last_update_check() + self._unsub_last_update_check = None + + async def handle_push_notification(self, notification: dict) -> None: + """Handle incoming push notification from NASweb.""" + msg_type = notification.get(KEY_TYPE) + _LOGGER.debug("Received push notification: %s", msg_type) + + if msg_type == TYPE_STATUS_UPDATE: + await self.process_status_update(notification) + self._last_update = time.time() + + async def process_status_update(self, new_status: dict) -> None: + """Process status update from NASweb.""" + self.webio_api.update_device_status(new_status) + new_data = {KEY_OUTPUTS: self.webio_api.outputs} + self.async_set_updated_data(new_data) diff --git a/homeassistant/components/nasweb/manifest.json b/homeassistant/components/nasweb/manifest.json new file mode 100644 index 00000000000000..e7e06419dade1e --- /dev/null +++ b/homeassistant/components/nasweb/manifest.json @@ -0,0 +1,14 @@ +{ + "domain": "nasweb", + "name": "NASweb", + "codeowners": ["@nasWebio"], + "config_flow": true, + "dependencies": ["webhook"], + "documentation": "https://www.home-assistant.io/integrations/nasweb", + "homekit": {}, + "integration_type": "hub", + "iot_class": "local_push", + "requirements": ["webio-api==0.1.8"], + "ssdp": [], + "zeroconf": [] +} diff --git a/homeassistant/components/nasweb/nasweb_data.py b/homeassistant/components/nasweb/nasweb_data.py new file mode 100644 index 00000000000000..4f6a37e6cc74ae --- /dev/null +++ b/homeassistant/components/nasweb/nasweb_data.py @@ -0,0 +1,64 @@ +"""Dataclass storing integration data in hass.data[DOMAIN].""" + +from dataclasses import dataclass, field +import logging + +from aiohttp.hdrs import METH_POST + +from homeassistant.components.webhook import ( + async_generate_id, + async_register as webhook_register, + async_unregister as webhook_unregister, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers.network import get_url + +from .const import DOMAIN, WEBHOOK_URL +from .coordinator import NotificationCoordinator + +_LOGGER = logging.getLogger(__name__) + + +@dataclass +class NASwebData: + """Class storing integration data.""" + + notify_coordinator: NotificationCoordinator = field( + default_factory=NotificationCoordinator + ) + webhook_id = "" + + def is_initialized(self) -> bool: + """Return True if instance was initialized and is ready for use.""" + return bool(self.webhook_id) + + def can_be_deinitialized(self) -> bool: + """Return whether this instance can be deinitialized.""" + return not self.notify_coordinator.has_coordinators() + + def initialize(self, hass: HomeAssistant) -> None: + """Initialize NASwebData instance.""" + if self.is_initialized(): + return + new_webhook_id = async_generate_id() + webhook_register( + hass, + DOMAIN, + "NASweb", + new_webhook_id, + self.notify_coordinator.handle_webhook_request, + allowed_methods=[METH_POST], + ) + self.webhook_id = new_webhook_id + _LOGGER.debug("Registered webhook: %s", self.webhook_id) + + def deinitialize(self, hass: HomeAssistant) -> None: + """Deinitialize NASwebData instance.""" + if not self.is_initialized(): + return + webhook_unregister(hass, self.webhook_id) + + def get_webhook_url(self, hass: HomeAssistant) -> str: + """Return webhook url for Push API.""" + hass_url = get_url(hass, allow_external=False) + return WEBHOOK_URL.format(internal_url=hass_url, webhook_id=self.webhook_id) diff --git a/homeassistant/components/nasweb/strings.json b/homeassistant/components/nasweb/strings.json new file mode 100644 index 00000000000000..b8af8cd54db788 --- /dev/null +++ b/homeassistant/components/nasweb/strings.json @@ -0,0 +1,50 @@ +{ + "config": { + "step": { + "user": { + "title": "Add NASweb device", + "description": "{nasweb_schema_img}NASweb combines the functions of a control panel and the ability to manage building automation. The device monitors the flow of information from sensors and programmable switches and stores settings, definitions and configured actions.", + "data": { + "host": "[%key:common::config_flow::data::host%]", + "username": "[%key:common::config_flow::data::username%]", + "password": "[%key:common::config_flow::data::password%]" + } + } + }, + "error": { + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", + "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", + "missing_internal_url": "Make sure Home Assistant has valid internal url", + "missing_nasweb_data": "Something isn't right with device internal configuration. Try restarting the device and HomeAssistant.", + "missing_status": "Did not received any status updates within the expected time window. Make sure the Home Assistant Internal URL is reachable from the NASweb device.", + "unknown": "[%key:common::config_flow::error::unknown%]" + }, + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" + } + }, + "exceptions": { + "config_entry_error_invalid_authentication": { + "message": "Invalid username/password. Most likely user changed password or was removed. Delete this entry and create new one with correct username/password." + }, + "config_entry_error_internal_error": { + "message": "Something isn't right with device internal configuration. Try restarting the device and HomeAssistant. If the issue persists contact support at {support_email}" + }, + "config_entry_error_no_status_update": { + "message": "Did not received any status updates within the expected time window. Make sure the Home Assistant Internal URL is reachable from the NASweb device. If the issue persists contact support at {support_email}" + }, + "config_entry_error_missing_internal_url": { + "message": "[%key:component::nasweb::config::error::missing_internal_url%]" + }, + "serial_mismatch": { + "message": "Connected to different NASweb device (serial number mismatch)." + } + }, + "entity": { + "switch": { + "switch_output": { + "name": "Relay Switch {index}" + } + } + } +} diff --git a/homeassistant/components/nasweb/switch.py b/homeassistant/components/nasweb/switch.py new file mode 100644 index 00000000000000..00e5a21da18dc6 --- /dev/null +++ b/homeassistant/components/nasweb/switch.py @@ -0,0 +1,133 @@ +"""Platform for NASweb output.""" + +from __future__ import annotations + +import logging +import time +from typing import Any + +from webio_api import Output as NASwebOutput + +from homeassistant.components.switch import DOMAIN as DOMAIN_SWITCH, SwitchEntity +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.entity_platform import AddEntitiesCallback +import homeassistant.helpers.entity_registry as er +from homeassistant.helpers.typing import DiscoveryInfoType +from homeassistant.helpers.update_coordinator import ( + BaseCoordinatorEntity, + BaseDataUpdateCoordinatorProtocol, +) + +from . import NASwebConfigEntry +from .const import DOMAIN, STATUS_UPDATE_MAX_TIME_INTERVAL +from .coordinator import NASwebCoordinator + +OUTPUT_TRANSLATION_KEY = "switch_output" + +_LOGGER = logging.getLogger(__name__) + + +def _get_output(coordinator: NASwebCoordinator, index: int) -> NASwebOutput | None: + for out in coordinator.webio_api.outputs: + if out.index == index: + return out + return None + + +async def async_setup_entry( + hass: HomeAssistant, + config: NASwebConfigEntry, + async_add_entities: AddEntitiesCallback, + discovery_info: DiscoveryInfoType | None = None, +) -> None: + """Set up switch platform.""" + coordinator = config.runtime_data + current_outputs: set[int] = set() + + @callback + def _check_entities() -> None: + received_outputs = {out.index for out in coordinator.webio_api.outputs} + added = {i for i in received_outputs if i not in current_outputs} + removed = {i for i in current_outputs if i not in received_outputs} + entities_to_add: list[RelaySwitch] = [] + for index in added: + webio_output = _get_output(coordinator, index) + if not isinstance(webio_output, NASwebOutput): + _LOGGER.error("Cannot create RelaySwitch entity without NASwebOutput") + continue + new_output = RelaySwitch(coordinator, webio_output) + entities_to_add.append(new_output) + current_outputs.add(index) + async_add_entities(entities_to_add) + entity_registry = er.async_get(hass) + for index in removed: + unique_id = f"{DOMAIN}.{config.unique_id}.relay_switch.{index}" + if entity_id := entity_registry.async_get_entity_id( + DOMAIN_SWITCH, DOMAIN, unique_id + ): + entity_registry.async_remove(entity_id) + current_outputs.remove(index) + else: + _LOGGER.warning("Failed to remove old output: no entity_id") + + coordinator.async_add_listener(_check_entities) + _check_entities() + + +class RelaySwitch(SwitchEntity, BaseCoordinatorEntity): + """Entity representing NASweb Output.""" + + def __init__( + self, + coordinator: BaseDataUpdateCoordinatorProtocol, + nasweb_output: NASwebOutput, + ) -> None: + """Initialize RelaySwitch.""" + super().__init__(coordinator) + self._output = nasweb_output + self._attr_icon = "mdi:export" + self._attr_has_entity_name = True + self._attr_translation_key = OUTPUT_TRANSLATION_KEY + self._attr_translation_placeholders = {"index": f"{nasweb_output.index:2d}"} + self._attr_unique_id = ( + f"{DOMAIN}.{self._output.webio_serial}.relay_switch.{self._output.index}" + ) + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, self._output.webio_serial)}, + ) + + async def async_added_to_hass(self) -> None: + """When entity is added to hass.""" + await super().async_added_to_hass() + self._handle_coordinator_update() + + @callback + def _handle_coordinator_update(self) -> None: + """Handle updated data from the coordinator.""" + self._attr_is_on = self._output.state + if ( + self.coordinator.last_update is None + or time.time() - self._output.last_update >= STATUS_UPDATE_MAX_TIME_INTERVAL + ): + self._attr_available = False + else: + self._attr_available = ( + self._output.available if self._output.available is not None else False + ) + self.async_write_ha_state() + + async def async_update(self) -> None: + """Update the entity. + + Only used by the generic entity update service. + Scheduling updates is not necessary, the coordinator takes care of updates via push notifications. + """ + + async def async_turn_on(self, **kwargs: Any) -> None: + """Turn On RelaySwitch.""" + await self._output.turn_on() + + async def async_turn_off(self, **kwargs: Any) -> None: + """Turn Off RelaySwitch.""" + await self._output.turn_off() diff --git a/homeassistant/components/ness_alarm/alarm_control_panel.py b/homeassistant/components/ness_alarm/alarm_control_panel.py index e44c06ecc85fe3..64b764c6872628 100644 --- a/homeassistant/components/ness_alarm/alarm_control_panel.py +++ b/homeassistant/components/ness_alarm/alarm_control_panel.py @@ -9,18 +9,9 @@ from homeassistant.components.alarm_control_panel import ( AlarmControlPanelEntity, AlarmControlPanelEntityFeature, + AlarmControlPanelState, CodeFormat, ) -from homeassistant.const import ( - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_ARMED_VACATION, - STATE_ALARM_ARMING, - STATE_ALARM_DISARMED, - STATE_ALARM_PENDING, - STATE_ALARM_TRIGGERED, -) from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -31,12 +22,12 @@ _LOGGER = logging.getLogger(__name__) ARMING_MODE_TO_STATE = { - ArmingMode.ARMED_AWAY: STATE_ALARM_ARMED_AWAY, - ArmingMode.ARMED_HOME: STATE_ALARM_ARMED_HOME, - ArmingMode.ARMED_DAY: STATE_ALARM_ARMED_AWAY, # no applicable state, fallback to away - ArmingMode.ARMED_NIGHT: STATE_ALARM_ARMED_NIGHT, - ArmingMode.ARMED_VACATION: STATE_ALARM_ARMED_VACATION, - ArmingMode.ARMED_HIGHEST: STATE_ALARM_ARMED_AWAY, # no applicable state, fallback to away + ArmingMode.ARMED_AWAY: AlarmControlPanelState.ARMED_AWAY, + ArmingMode.ARMED_HOME: AlarmControlPanelState.ARMED_HOME, + ArmingMode.ARMED_DAY: AlarmControlPanelState.ARMED_AWAY, # no applicable state, fallback to away + ArmingMode.ARMED_NIGHT: AlarmControlPanelState.ARMED_NIGHT, + ArmingMode.ARMED_VACATION: AlarmControlPanelState.ARMED_VACATION, + ArmingMode.ARMED_HIGHEST: AlarmControlPanelState.ARMED_AWAY, # no applicable state, fallback to away } @@ -101,19 +92,19 @@ def _handle_arming_state_change( """Handle arming state update.""" if arming_state == ArmingState.UNKNOWN: - self._attr_state = None + self._attr_alarm_state = None elif arming_state == ArmingState.DISARMED: - self._attr_state = STATE_ALARM_DISARMED + self._attr_alarm_state = AlarmControlPanelState.DISARMED elif arming_state in (ArmingState.ARMING, ArmingState.EXIT_DELAY): - self._attr_state = STATE_ALARM_ARMING + self._attr_alarm_state = AlarmControlPanelState.ARMING elif arming_state == ArmingState.ARMED: - self._attr_state = ARMING_MODE_TO_STATE.get( - arming_mode, STATE_ALARM_ARMED_AWAY + self._attr_alarm_state = ARMING_MODE_TO_STATE.get( + arming_mode, AlarmControlPanelState.ARMED_AWAY ) elif arming_state == ArmingState.ENTRY_DELAY: - self._attr_state = STATE_ALARM_PENDING + self._attr_alarm_state = AlarmControlPanelState.PENDING elif arming_state == ArmingState.TRIGGERED: - self._attr_state = STATE_ALARM_TRIGGERED + self._attr_alarm_state = AlarmControlPanelState.TRIGGERED else: _LOGGER.warning("Unhandled arming state: %s", arming_state) diff --git a/homeassistant/components/nest/__init__.py b/homeassistant/components/nest/__init__.py index 8a1719a9bd575c..6b094c68cb024c 100644 --- a/homeassistant/components/nest/__init__.py +++ b/homeassistant/components/nest/__init__.py @@ -59,6 +59,7 @@ CONF_PROJECT_ID, CONF_SUBSCRIBER_ID, CONF_SUBSCRIBER_ID_IMPORTED, + CONF_SUBSCRIPTION_NAME, DATA_DEVICE_MANAGER, DATA_SDM, DATA_SUBSCRIBER, @@ -103,10 +104,10 @@ PLATFORMS = [Platform.CAMERA, Platform.CLIMATE, Platform.EVENT, Platform.SENSOR] # Fetch media events with a disk backed cache, with a limit for each camera -# device. The largest media items are mp4 clips at ~120kb each, and we target +# device. The largest media items are mp4 clips at ~450kb each, and we target # ~125MB of storage per camera to try to balance a reasonable user experience # for event history not not filling the disk. -EVENT_MEDIA_CACHE_SIZE = 1024 # number of events +EVENT_MEDIA_CACHE_SIZE = 256 # number of events THUMBNAIL_SIZE_PX = 175 @@ -289,7 +290,9 @@ async def async_remove_entry(hass: HomeAssistant, entry: ConfigEntry) -> None: """Handle removal of pubsub subscriptions created during config flow.""" if ( DATA_SDM not in entry.data - or CONF_SUBSCRIBER_ID not in entry.data + or not ( + CONF_SUBSCRIPTION_NAME in entry.data or CONF_SUBSCRIBER_ID in entry.data + ) or CONF_SUBSCRIBER_ID_IMPORTED in entry.data ): return diff --git a/homeassistant/components/nest/api.py b/homeassistant/components/nest/api.py index 3ef26747115a6f..5c65a70c75dfc2 100644 --- a/homeassistant/components/nest/api.py +++ b/homeassistant/components/nest/api.py @@ -8,6 +8,7 @@ from aiohttp import ClientSession from google.oauth2.credentials import Credentials +from google_nest_sdm.admin_client import PUBSUB_API_HOST, AdminClient from google_nest_sdm.auth import AbstractAuth from google_nest_sdm.google_nest_subscriber import GoogleNestSubscriber @@ -19,6 +20,7 @@ API_URL, CONF_PROJECT_ID, CONF_SUBSCRIBER_ID, + CONF_SUBSCRIPTION_NAME, OAUTH2_TOKEN, SDM_SCOPES, ) @@ -44,8 +46,7 @@ def __init__( async def async_get_access_token(self) -> str: """Return a valid access token for SDM API.""" - if not self._oauth_session.valid_token: - await self._oauth_session.async_ensure_token_valid() + await self._oauth_session.async_ensure_token_valid() return cast(str, self._oauth_session.token["access_token"]) async def async_get_creds(self) -> Credentials: @@ -81,9 +82,10 @@ def __init__( self, websession: ClientSession, access_token: str, + host: str, ) -> None: """Init the Nest client library auth implementation.""" - super().__init__(websession, API_URL) + super().__init__(websession, host) self._access_token = access_token async def async_get_access_token(self) -> str: @@ -112,29 +114,46 @@ async def new_subscriber( implementation, config_entry_oauth2_flow.LocalOAuth2Implementation ): raise TypeError(f"Unexpected auth implementation {implementation}") - if not (subscriber_id := entry.data.get(CONF_SUBSCRIBER_ID)): - raise ValueError("Configuration option 'subscriber_id' missing") + if (subscription_name := entry.data.get(CONF_SUBSCRIPTION_NAME)) is None: + subscription_name = entry.data[CONF_SUBSCRIBER_ID] auth = AsyncConfigEntryAuth( aiohttp_client.async_get_clientsession(hass), config_entry_oauth2_flow.OAuth2Session(hass, entry, implementation), implementation.client_id, implementation.client_secret, ) - return GoogleNestSubscriber(auth, entry.data[CONF_PROJECT_ID], subscriber_id) + return GoogleNestSubscriber(auth, entry.data[CONF_PROJECT_ID], subscription_name) def new_subscriber_with_token( hass: HomeAssistant, access_token: str, project_id: str, - subscriber_id: str, + subscription_name: str, ) -> GoogleNestSubscriber: """Create a GoogleNestSubscriber with an access token.""" return GoogleNestSubscriber( AccessTokenAuthImpl( aiohttp_client.async_get_clientsession(hass), access_token, + API_URL, ), project_id, - subscriber_id, + subscription_name, + ) + + +def new_pubsub_admin_client( + hass: HomeAssistant, + access_token: str, + cloud_project_id: str, +) -> AdminClient: + """Create a Nest AdminClient with an access token.""" + return AdminClient( + auth=AccessTokenAuthImpl( + aiohttp_client.async_get_clientsession(hass), + access_token, + PUBSUB_API_HOST, + ), + cloud_project_id=cloud_project_id, ) diff --git a/homeassistant/components/nest/camera.py b/homeassistant/components/nest/camera.py index e25ff82694f961..0a46d67a3ad526 100644 --- a/homeassistant/components/nest/camera.py +++ b/homeassistant/components/nest/camera.py @@ -2,29 +2,36 @@ from __future__ import annotations +from abc import ABC import asyncio -from collections.abc import Callable +from collections.abc import Awaitable, Callable import datetime import functools import logging from pathlib import Path -from typing import cast from google_nest_sdm.camera_traits import ( - CameraImageTrait, CameraLiveStreamTrait, RtspStream, StreamingProtocol, + WebRtcStream, ) from google_nest_sdm.device import Device from google_nest_sdm.device_manager import DeviceManager from google_nest_sdm.exceptions import ApiException +from webrtc_models import RTCIceCandidate -from homeassistant.components.camera import Camera, CameraEntityFeature, StreamType -from homeassistant.components.camera.webrtc import WebRTCClientConfiguration +from homeassistant.components.camera import ( + Camera, + CameraEntityFeature, + StreamType, + WebRTCAnswer, + WebRTCClientConfiguration, + WebRTCSendMessage, +) from homeassistant.components.stream import CONF_EXTRA_PART_WAIT_TIME from homeassistant.config_entries import ConfigEntry -from homeassistant.core import HomeAssistant +from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.event import async_track_point_in_utc_time @@ -40,6 +47,11 @@ # Used to schedule an alarm to refresh the stream before expiration STREAM_EXPIRATION_BUFFER = datetime.timedelta(seconds=30) +# Refresh streams with a bounded interval and backoff on failure +MIN_REFRESH_BACKOFF_INTERVAL = datetime.timedelta(minutes=1) +MAX_REFRESH_BACKOFF_INTERVAL = datetime.timedelta(minutes=10) +BACKOFF_MULTIPLIER = 1.5 + async def async_setup_entry( hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback @@ -49,19 +61,87 @@ async def async_setup_entry( device_manager: DeviceManager = hass.data[DOMAIN][entry.entry_id][ DATA_DEVICE_MANAGER ] - async_add_entities( - NestCamera(device) - for device in device_manager.devices.values() - if CameraImageTrait.NAME in device.traits - or CameraLiveStreamTrait.NAME in device.traits - ) + entities: list[NestCameraBaseEntity] = [] + for device in device_manager.devices.values(): + if (live_stream := device.traits.get(CameraLiveStreamTrait.NAME)) is None: + continue + if StreamingProtocol.WEB_RTC in live_stream.supported_protocols: + entities.append(NestWebRTCEntity(device)) + elif StreamingProtocol.RTSP in live_stream.supported_protocols: + entities.append(NestRTSPEntity(device)) + + async_add_entities(entities) + + +class StreamRefresh: + """Class that will refresh an expiring stream. + This class will schedule an alarm for the next expiration time of a stream. + When the alarm fires, it runs the provided `refresh_cb` to extend the + lifetime of the stream and return a new expiration time. -class NestCamera(Camera): + A simple backoff will be applied when the refresh callback fails. + """ + + def __init__( + self, + hass: HomeAssistant, + expires_at: datetime.datetime, + refresh_cb: Callable[[], Awaitable[datetime.datetime | None]], + ) -> None: + """Initialize StreamRefresh.""" + self._hass = hass + self._unsub: Callable[[], None] | None = None + self._min_refresh_interval = MIN_REFRESH_BACKOFF_INTERVAL + self._refresh_cb = refresh_cb + self._schedule_stream_refresh(expires_at - STREAM_EXPIRATION_BUFFER) + + def unsub(self) -> None: + """Invalidates the stream.""" + if self._unsub: + self._unsub() + + async def _handle_refresh(self, _: datetime.datetime) -> None: + """Alarm that fires to check if the stream should be refreshed.""" + self._unsub = None + try: + expires_at = await self._refresh_cb() + except ApiException as err: + _LOGGER.debug("Failed to refresh stream: %s", err) + # Increase backoff until the max backoff interval is reached + self._min_refresh_interval = min( + self._min_refresh_interval * BACKOFF_MULTIPLIER, + MAX_REFRESH_BACKOFF_INTERVAL, + ) + refresh_time = utcnow() + self._min_refresh_interval + else: + if expires_at is None: + return + self._min_refresh_interval = MIN_REFRESH_BACKOFF_INTERVAL # Reset backoff + # Defend against invalid stream expiration time in the past + refresh_time = max( + expires_at - STREAM_EXPIRATION_BUFFER, + utcnow() + self._min_refresh_interval, + ) + self._schedule_stream_refresh(refresh_time) + + def _schedule_stream_refresh(self, refresh_time: datetime.datetime) -> None: + """Schedules an alarm to refresh any streams before expiration.""" + _LOGGER.debug("Scheduling stream refresh for %s", refresh_time) + self._unsub = async_track_point_in_utc_time( + self._hass, + self._handle_refresh, + refresh_time, + ) + + +class NestCameraBaseEntity(Camera, ABC): """Devices that support cameras.""" _attr_has_entity_name = True _attr_name = None + _attr_is_streaming = True + _attr_supported_features = CameraEntityFeature.STREAM def __init__(self, device: Device) -> None: """Initialize the camera.""" @@ -71,38 +151,34 @@ def __init__(self, device: Device) -> None: self._attr_device_info = nest_device_info.device_info self._attr_brand = nest_device_info.device_brand self._attr_model = nest_device_info.device_model - self._stream: RtspStream | None = None - self._create_stream_url_lock = asyncio.Lock() - self._stream_refresh_unsub: Callable[[], None] | None = None - self._attr_is_streaming = False - self._attr_supported_features = CameraEntityFeature(0) - self._rtsp_live_stream_trait: CameraLiveStreamTrait | None = None - if CameraLiveStreamTrait.NAME in self._device.traits: - self._attr_is_streaming = True - self._attr_supported_features |= CameraEntityFeature.STREAM - trait = cast( - CameraLiveStreamTrait, self._device.traits[CameraLiveStreamTrait.NAME] - ) - if StreamingProtocol.RTSP in trait.supported_protocols: - self._rtsp_live_stream_trait = trait self.stream_options[CONF_EXTRA_PART_WAIT_TIME] = 3 # The API "name" field is a unique device identifier. self._attr_unique_id = f"{self._device.name}-camera" - @property - def use_stream_for_stills(self) -> bool: - """Whether or not to use stream to generate stills.""" - return self._rtsp_live_stream_trait is not None + async def async_added_to_hass(self) -> None: + """Run when entity is added to register update signal handler.""" + self.async_on_remove( + self._device.add_update_listener(self.async_write_ha_state) + ) + + +class NestRTSPEntity(NestCameraBaseEntity): + """Nest cameras that use RTSP.""" + + _rtsp_stream: RtspStream | None = None + _rtsp_live_stream_trait: CameraLiveStreamTrait + + def __init__(self, device: Device) -> None: + """Initialize the camera.""" + super().__init__(device) + self._create_stream_url_lock = asyncio.Lock() + self._rtsp_live_stream_trait = device.traits[CameraLiveStreamTrait.NAME] + self._refresh_unsub: Callable[[], None] | None = None @property - def frontend_stream_type(self) -> StreamType | None: - """Return the type of stream supported by this camera.""" - if CameraLiveStreamTrait.NAME not in self._device.traits: - return None - trait = self._device.traits[CameraLiveStreamTrait.NAME] - if StreamingProtocol.WEB_RTC in trait.supported_protocols: - return StreamType.WEB_RTC - return super().frontend_stream_type + def use_stream_for_stills(self) -> bool: + """Always use the RTSP stream to generate snapshots.""" + return True @property def available(self) -> bool: @@ -116,83 +192,88 @@ def available(self) -> bool: async def stream_source(self) -> str | None: """Return the source of the stream.""" - if not self._rtsp_live_stream_trait: - return None async with self._create_stream_url_lock: - if not self._stream: + if not self._rtsp_stream: _LOGGER.debug("Fetching stream url") try: - self._stream = ( + self._rtsp_stream = ( await self._rtsp_live_stream_trait.generate_rtsp_stream() ) except ApiException as err: raise HomeAssistantError(f"Nest API error: {err}") from err - self._schedule_stream_refresh() - assert self._stream - if self._stream.expires_at < utcnow(): + refresh = StreamRefresh( + self.hass, + self._rtsp_stream.expires_at, + self._async_refresh_stream, + ) + self._refresh_unsub = refresh.unsub + assert self._rtsp_stream + if self._rtsp_stream.expires_at < utcnow(): _LOGGER.warning("Stream already expired") - return self._stream.rtsp_stream_url - - def _schedule_stream_refresh(self) -> None: - """Schedules an alarm to refresh the stream url before expiration.""" - assert self._stream - _LOGGER.debug("New stream url expires at %s", self._stream.expires_at) - refresh_time = self._stream.expires_at - STREAM_EXPIRATION_BUFFER - # Schedule an alarm to extend the stream - if self._stream_refresh_unsub is not None: - self._stream_refresh_unsub() - - self._stream_refresh_unsub = async_track_point_in_utc_time( - self.hass, - self._handle_stream_refresh, - refresh_time, - ) + return self._rtsp_stream.rtsp_stream_url - async def _handle_stream_refresh(self, now: datetime.datetime) -> None: - """Alarm that fires to check if the stream should be refreshed.""" - if not self._stream: - return - _LOGGER.debug("Extending stream url") + async def _async_refresh_stream(self) -> datetime.datetime | None: + """Refresh stream to extend expiration time.""" + if not self._rtsp_stream: + return None + _LOGGER.debug("Extending RTSP stream") try: - self._stream = await self._stream.extend_rtsp_stream() + self._rtsp_stream = await self._rtsp_stream.extend_rtsp_stream() except ApiException as err: _LOGGER.debug("Failed to extend stream: %s", err) # Next attempt to catch a url will get a new one - self._stream = None + self._rtsp_stream = None if self.stream: await self.stream.stop() self.stream = None - return + return None # Update the stream worker with the latest valid url if self.stream: - self.stream.update_source(self._stream.rtsp_stream_url) - self._schedule_stream_refresh() + self.stream.update_source(self._rtsp_stream.rtsp_stream_url) + return self._rtsp_stream.expires_at async def async_will_remove_from_hass(self) -> None: """Invalidates the RTSP token when unloaded.""" - if self._stream: - _LOGGER.debug("Invalidating stream") + await super().async_will_remove_from_hass() + if self._refresh_unsub is not None: + self._refresh_unsub() + if self._rtsp_stream: try: - await self._stream.stop_rtsp_stream() + await self._rtsp_stream.stop_stream() except ApiException as err: - _LOGGER.debug( - "Failed to revoke stream token, will rely on ttl: %s", err - ) - if self._stream_refresh_unsub: - self._stream_refresh_unsub() + _LOGGER.debug("Error stopping stream: %s", err) + self._rtsp_stream = None - async def async_added_to_hass(self) -> None: - """Run when entity is added to register update signal handler.""" - self.async_on_remove( - self._device.add_update_listener(self.async_write_ha_state) - ) + +class NestWebRTCEntity(NestCameraBaseEntity): + """Nest cameras that use WebRTC.""" + + def __init__(self, device: Device) -> None: + """Initialize the camera.""" + super().__init__(device) + self._webrtc_sessions: dict[str, WebRtcStream] = {} + self._refresh_unsub: dict[str, Callable[[], None]] = {} + + @property + def frontend_stream_type(self) -> StreamType | None: + """Return the type of stream supported by this camera.""" + return StreamType.WEB_RTC + + async def _async_refresh_stream(self, session_id: str) -> datetime.datetime | None: + """Refresh stream to extend expiration time.""" + if not (webrtc_stream := self._webrtc_sessions.get(session_id)): + return None + _LOGGER.debug("Extending WebRTC stream %s", webrtc_stream.media_session_id) + webrtc_stream = await webrtc_stream.extend_stream() + if session_id in self._webrtc_sessions: + self._webrtc_sessions[session_id] = webrtc_stream + return webrtc_stream.expires_at + return None async def async_camera_image( self, width: int | None = None, height: int | None = None ) -> bytes | None: - """Return bytes of camera image.""" - # Use the thumbnail from RTSP stream, or a placeholder if stream is - # not supported (e.g. WebRTC) as a fallback when 'use_stream_for_stills' if False + """Return a placeholder image for WebRTC cameras that don't support snapshots.""" return await self.hass.async_add_executor_job(self.placeholder_image) @classmethod @@ -201,17 +282,59 @@ def placeholder_image(cls) -> bytes: """Return placeholder image to use when no stream is available.""" return PLACEHOLDER.read_bytes() - async def async_handle_web_rtc_offer(self, offer_sdp: str) -> str | None: + async def async_handle_async_webrtc_offer( + self, offer_sdp: str, session_id: str, send_message: WebRTCSendMessage + ) -> None: """Return the source of the stream.""" trait: CameraLiveStreamTrait = self._device.traits[CameraLiveStreamTrait.NAME] - if StreamingProtocol.WEB_RTC not in trait.supported_protocols: - return await super().async_handle_web_rtc_offer(offer_sdp) try: stream = await trait.generate_web_rtc_stream(offer_sdp) except ApiException as err: raise HomeAssistantError(f"Nest API error: {err}") from err - return stream.answer_sdp + _LOGGER.debug( + "Started WebRTC session %s, %s", session_id, stream.media_session_id + ) + self._webrtc_sessions[session_id] = stream + send_message(WebRTCAnswer(stream.answer_sdp)) + refresh = StreamRefresh( + self.hass, + stream.expires_at, + functools.partial(self._async_refresh_stream, session_id), + ) + self._refresh_unsub[session_id] = refresh.unsub + + async def async_on_webrtc_candidate( + self, session_id: str, candidate: RTCIceCandidate + ) -> None: + """Ignore WebRTC candidates for Nest cloud based cameras.""" + return - async def _async_get_webrtc_client_configuration(self) -> WebRTCClientConfiguration: + @callback + def close_webrtc_session(self, session_id: str) -> None: + """Close a WebRTC session.""" + if (stream := self._webrtc_sessions.pop(session_id, None)) is not None: + _LOGGER.debug( + "Closing WebRTC session %s, %s", session_id, stream.media_session_id + ) + unsub = self._refresh_unsub.pop(session_id) + unsub() + + async def stop_stream() -> None: + try: + await stream.stop_stream() + except ApiException as err: + _LOGGER.debug("Error stopping stream: %s", err) + + self.hass.async_create_task(stop_stream()) + super().close_webrtc_session(session_id) + + @callback + def _async_get_webrtc_client_configuration(self) -> WebRTCClientConfiguration: """Return the WebRTC client configuration adjustable per integration.""" return WebRTCClientConfiguration(data_channel="dataSendChannel") + + async def async_will_remove_from_hass(self) -> None: + """Invalidates the RTSP token when unloaded.""" + await super().async_will_remove_from_hass() + for session_id in list(self._webrtc_sessions.keys()): + self.close_webrtc_session(session_id) diff --git a/homeassistant/components/nest/config_flow.py b/homeassistant/components/nest/config_flow.py index 29ae9f6a08e1b2..274e4c288b4b3b 100644 --- a/homeassistant/components/nest/config_flow.py +++ b/homeassistant/components/nest/config_flow.py @@ -12,18 +12,18 @@ from collections.abc import Iterable, Mapping import logging -from typing import Any +from typing import TYPE_CHECKING, Any -from google_nest_sdm.exceptions import ( - ApiException, - AuthException, - ConfigurationException, - SubscriberException, +from google_nest_sdm.admin_client import ( + AdminClient, + EligibleSubscriptions, + EligibleTopics, ) +from google_nest_sdm.exceptions import ApiException from google_nest_sdm.structure import Structure import voluptuous as vol -from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntry, ConfigFlowResult +from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlowResult from homeassistant.helpers import config_entry_oauth2_flow from homeassistant.util import get_random_string @@ -31,8 +31,9 @@ from .const import ( CONF_CLOUD_PROJECT_ID, CONF_PROJECT_ID, - CONF_SUBSCRIBER_ID, - DATA_NEST_CONFIG, + CONF_SUBSCRIBER_ID_IMPORTED, + CONF_SUBSCRIPTION_NAME, + CONF_TOPIC_NAME, DATA_SDM, DOMAIN, OAUTH2_AUTHORIZE, @@ -58,7 +59,7 @@ DEVICE_ACCESS_CONSOLE_EDIT_URL = ( "https://console.nest.google.com/device-access/project/{project_id}/information" ) - +CREATE_NEW_SUBSCRIPTION_KEY = "create_new_subscription" _LOGGER = logging.getLogger(__name__) @@ -95,21 +96,9 @@ def __init__(self) -> None: self._data: dict[str, Any] = {DATA_SDM: {}} # Possible name to use for config entry based on the Google Home name self._structure_config_title: str | None = None - - def _async_reauth_entry(self) -> ConfigEntry | None: - """Return existing entry for reauth.""" - if self.source != SOURCE_REAUTH or not ( - entry_id := self.context.get("entry_id") - ): - return None - return next( - ( - entry - for entry in self._async_current_entries() - if entry.entry_id == entry_id - ), - None, - ) + self._admin_client: AdminClient | None = None + self._eligible_topics: EligibleTopics | None = None + self._eligible_subscriptions: EligibleSubscriptions | None = None @property def logger(self) -> logging.Logger: @@ -128,8 +117,7 @@ def extra_authorize_data(self) -> dict[str, str]: async def async_generate_authorize_url(self) -> str: """Generate a url for the user to authorize based on user input.""" - config = self.hass.data.get(DOMAIN, {}).get(DATA_NEST_CONFIG, {}) - project_id = self._data.get(CONF_PROJECT_ID, config.get(CONF_PROJECT_ID, "")) + project_id = self._data.get(CONF_PROJECT_ID) query = await super().async_generate_authorize_url() authorize_url = OAUTH2_AUTHORIZE.format(project_id=project_id) return f"{authorize_url}{query}" @@ -138,15 +126,17 @@ async def async_oauth_create_entry(self, data: dict[str, Any]) -> ConfigFlowResu """Complete OAuth setup and finish pubsub or finish.""" _LOGGER.debug("Finishing post-oauth configuration") self._data.update(data) + _LOGGER.debug("self.source=%s", self.source) if self.source == SOURCE_REAUTH: _LOGGER.debug("Skipping Pub/Sub configuration") - return await self.async_step_finish() + return await self._async_finish() return await self.async_step_pubsub() async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Perform reauth upon an API authentication error.""" + _LOGGER.debug("async_step_reauth %s", self.source) self._data.update(entry_data) return await self.async_step_reauth_confirm() @@ -253,40 +243,114 @@ async def async_step_device_project( async def async_step_pubsub( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: - """Configure and create Pub/Sub subscriber.""" + """Configure and the pre-requisites to configure Pub/Sub topics and subscriptions.""" data = { **self._data, **(user_input if user_input is not None else {}), } cloud_project_id = data.get(CONF_CLOUD_PROJECT_ID, "").strip() - config = self.hass.data.get(DOMAIN, {}).get(DATA_NEST_CONFIG, {}) - project_id = data.get(CONF_PROJECT_ID, config.get(CONF_PROJECT_ID)) + device_access_project_id = data[CONF_PROJECT_ID] errors: dict[str, str] = {} if cloud_project_id: - # Create the subscriber id and/or verify it already exists. Note that - # the existing id is used, and create call below is idempotent - if not (subscriber_id := data.get(CONF_SUBSCRIBER_ID, "")): - subscriber_id = _generate_subscription_id(cloud_project_id) - _LOGGER.debug("Creating subscriber id '%s'", subscriber_id) - subscriber = api.new_subscriber_with_token( - self.hass, - self._data["token"]["access_token"], - project_id, - subscriber_id, + access_token = self._data["token"]["access_token"] + self._admin_client = api.new_pubsub_admin_client( + self.hass, access_token=access_token, cloud_project_id=cloud_project_id ) try: - await subscriber.create_subscription() - except AuthException as err: - _LOGGER.error("Subscriber authentication error: %s", err) - return self.async_abort(reason="invalid_access_token") - except ConfigurationException as err: - _LOGGER.error("Configuration error creating subscription: %s", err) - errors[CONF_CLOUD_PROJECT_ID] = "bad_project_id" - except SubscriberException as err: - _LOGGER.error("Error creating subscription: %s", err) - errors[CONF_CLOUD_PROJECT_ID] = "subscriber_error" + eligible_topics = await self._admin_client.list_eligible_topics( + device_access_project_id=device_access_project_id + ) + except ApiException as err: + _LOGGER.error("Error listing eligible Pub/Sub topics: %s", err) + errors["base"] = "pubsub_api_error" + else: + if not eligible_topics.topic_names: + errors["base"] = "no_pubsub_topics" + if not errors: + self._data[CONF_CLOUD_PROJECT_ID] = cloud_project_id + self._eligible_topics = eligible_topics + return await self.async_step_pubsub_topic() + + return self.async_show_form( + step_id="pubsub", + data_schema=vol.Schema( + { + vol.Required(CONF_CLOUD_PROJECT_ID, default=cloud_project_id): str, + } + ), + description_placeholders={ + "url": CLOUD_CONSOLE_URL, + "device_access_console_url": DEVICE_ACCESS_CONSOLE_URL, + "more_info_url": MORE_INFO_URL, + }, + errors=errors, + ) + + async def async_step_pubsub_topic( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Configure and create Pub/Sub topic.""" + if TYPE_CHECKING: + assert self._eligible_topics + if user_input is not None: + self._data.update(user_input) + return await self.async_step_pubsub_subscription() + topics = list(self._eligible_topics.topic_names) + return self.async_show_form( + step_id="pubsub_topic", + data_schema=vol.Schema( + { + vol.Optional(CONF_TOPIC_NAME, default=topics[0]): vol.In(topics), + } + ), + description_placeholders={ + "device_access_console_url": DEVICE_ACCESS_CONSOLE_URL, + "more_info_url": MORE_INFO_URL, + }, + ) + + async def async_step_pubsub_subscription( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Configure and create Pub/Sub subscription.""" + if TYPE_CHECKING: + assert self._admin_client + errors = {} + if user_input is not None: + subscription_name = user_input[CONF_SUBSCRIPTION_NAME] + if subscription_name == CREATE_NEW_SUBSCRIPTION_KEY: + topic_name = self._data[CONF_TOPIC_NAME] + subscription_name = _generate_subscription_id( + self._data[CONF_CLOUD_PROJECT_ID] + ) + _LOGGER.debug( + "Creating subscription %s on topic %s", + subscription_name, + topic_name, + ) + try: + await self._admin_client.create_subscription( + topic_name, + subscription_name, + ) + except ApiException as err: + _LOGGER.error("Error creatingPub/Sub subscription: %s", err) + errors["base"] = "pubsub_api_error" + else: + user_input[CONF_SUBSCRIPTION_NAME] = subscription_name + else: + # The user created this subscription themselves so do not delete when removing the integration. + user_input[CONF_SUBSCRIBER_ID_IMPORTED] = True + if not errors: + self._data.update(user_input) + subscriber = api.new_subscriber_with_token( + self.hass, + self._data["token"]["access_token"], + self._data[CONF_PROJECT_ID], + subscription_name, + ) try: device_manager = await subscriber.async_get_device_manager() except ApiException as err: @@ -296,39 +360,51 @@ async def async_step_pubsub( self._structure_config_title = generate_config_title( device_manager.structures.values() ) + return await self._async_finish() - self._data.update( - { - CONF_SUBSCRIBER_ID: subscriber_id, - CONF_CLOUD_PROJECT_ID: cloud_project_id, - } + subscriptions = {} + try: + eligible_subscriptions = ( + await self._admin_client.list_eligible_subscriptions( + expected_topic_name=self._data[CONF_TOPIC_NAME], ) - return await self.async_step_finish() - + ) + except ApiException as err: + _LOGGER.error( + "Error talking to API to list eligible Pub/Sub subscriptions: %s", err + ) + errors["base"] = "pubsub_api_error" + else: + subscriptions.update( + {name: name for name in eligible_subscriptions.subscription_names} + ) + subscriptions[CREATE_NEW_SUBSCRIPTION_KEY] = "Create New" return self.async_show_form( - step_id="pubsub", + step_id="pubsub_subscription", data_schema=vol.Schema( { - vol.Required(CONF_CLOUD_PROJECT_ID, default=cloud_project_id): str, + vol.Optional( + CONF_SUBSCRIPTION_NAME, + default=next(iter(subscriptions)), + ): vol.In(subscriptions), } ), - description_placeholders={"url": CLOUD_CONSOLE_URL}, + description_placeholders={ + "topic": self._data[CONF_TOPIC_NAME], + "more_info_url": MORE_INFO_URL, + }, errors=errors, ) - async def async_step_finish( - self, data: dict[str, Any] | None = None - ) -> ConfigFlowResult: + async def _async_finish(self) -> ConfigFlowResult: """Create an entry for the SDM flow.""" _LOGGER.debug("Creating/updating configuration entry") # Update existing config entry when in the reauth flow. - if entry := self._async_reauth_entry(): - self.hass.config_entries.async_update_entry( - entry, + if self.source == SOURCE_REAUTH: + return self.async_update_reload_and_abort( + self._get_reauth_entry(), data=self._data, ) - await self.hass.config_entries.async_reload(entry.entry_id) - return self.async_abort(reason="reauth_successful") title = self.flow_impl.name if self._structure_config_title: title = self._structure_config_title diff --git a/homeassistant/components/nest/const.py b/homeassistant/components/nest/const.py index 853e778977d35a..0a828dcbf78a04 100644 --- a/homeassistant/components/nest/const.py +++ b/homeassistant/components/nest/const.py @@ -4,13 +4,14 @@ DATA_SDM = "sdm" DATA_SUBSCRIBER = "subscriber" DATA_DEVICE_MANAGER = "device_manager" -DATA_NEST_CONFIG = "nest_config" WEB_AUTH_DOMAIN = DOMAIN INSTALLED_AUTH_DOMAIN = f"{DOMAIN}.installed" CONF_PROJECT_ID = "project_id" -CONF_SUBSCRIBER_ID = "subscriber_id" +CONF_TOPIC_NAME = "topic_name" +CONF_SUBSCRIPTION_NAME = "subscription_name" +CONF_SUBSCRIBER_ID = "subscriber_id" # Old format CONF_SUBSCRIBER_ID_IMPORTED = "subscriber_id_imported" CONF_CLOUD_PROJECT_ID = "cloud_project_id" diff --git a/homeassistant/components/nest/manifest.json b/homeassistant/components/nest/manifest.json index 8453c51518d978..44eaeeaf62d199 100644 --- a/homeassistant/components/nest/manifest.json +++ b/homeassistant/components/nest/manifest.json @@ -20,5 +20,5 @@ "iot_class": "cloud_push", "loggers": ["google_nest_sdm"], "quality_scale": "platinum", - "requirements": ["google-nest-sdm==5.0.1"] + "requirements": ["google-nest-sdm==6.1.5"] } diff --git a/homeassistant/components/nest/strings.json b/homeassistant/components/nest/strings.json index 8e40bf27d1f0e1..f6a64dd66e64aa 100644 --- a/homeassistant/components/nest/strings.json +++ b/homeassistant/components/nest/strings.json @@ -26,12 +26,26 @@ "title": "[%key:common::config_flow::title::oauth2_pick_implementation%]" }, "pubsub": { - "title": "Configure Google Cloud", - "description": "Visit the [Cloud Console]({url}) to find your Google Cloud Project ID.", + "title": "Configure Google Cloud Pub/Sub", + "description": "Home Assistant uses Cloud Pub/Sub receive realtime Nest device updates. Nest servers publish updates to a Pub/Sub topic and Home Assistant receives the updates through a Pub/Sub subscription.\n\n1. Visit the [Device Access Console]({device_access_console_url}) and ensure a Pub/Sub topic is configured.\n2. Visit the [Cloud Console]({url}) to find your Google Cloud Project ID and confirm it is correct below.\n3. The next step will attempt to auto-discover Pub/Sub topics and subscriptions.\n\nSee the integration documentation for [more info]({more_info_url}).", "data": { "cloud_project_id": "[%key:component::nest::config::step::cloud_project::data::cloud_project_id%]" } }, + "pubsub_topic": { + "title": "Configure Cloud Pub/Sub topic", + "description": "Nest devices publish updates on a Cloud Pub/Sub topic. Select the Pub/Sub topic below that is the same as the [Device Access Console]({device_access_console_url}). See the integration documentation for [more info]({more_info_url}).", + "data": { + "topic_name": "Pub/Sub topic Name" + } + }, + "pubsub_subscription": { + "title": "Configure Cloud Pub/Sub subscription", + "description": "Home Assistant receives realtime Nest device updates with a Cloud Pub/Sub subscription for topic `{topic}`.\n\nSelect an existing subscription below if one already exists, or the next step will create a new one for you. See the integration documentation for [more info]({more_info_url}).", + "data": { + "subscription_name": "Pub/Sub subscription Name" + } + }, "reauth_confirm": { "title": "[%key:common::config_flow::title::reauth%]", "description": "The Nest integration needs to re-authenticate your account" @@ -40,11 +54,14 @@ "error": { "bad_project_id": "Please enter a valid Cloud Project ID (check Cloud Console)", "wrong_project_id": "Please enter a valid Cloud Project ID (was same as Device Access Project ID)", - "subscriber_error": "Unknown subscriber error, see logs" + "subscriber_error": "Unknown subscriber error, see logs", + "no_pubsub_topics": "No eligible Pub/Sub topics found, please ensure Device Access Console has a Pub/Sub topic.", + "pubsub_api_error": "Unknown error talking to Cloud Pub/Sub, see logs" }, "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_account%]", "missing_configuration": "[%key:common::config_flow::abort::oauth2_missing_configuration%]", + "missing_credentials": "[%key:common::config_flow::abort::oauth2_missing_credentials%]", "authorize_url_timeout": "[%key:common::config_flow::abort::oauth2_authorize_url_timeout%]", "unknown_authorize_url_generation": "[%key:common::config_flow::abort::unknown_authorize_url_generation%]", "no_url_available": "[%key:common::config_flow::abort::oauth2_no_url_available%]", diff --git a/homeassistant/components/netatmo/api.py b/homeassistant/components/netatmo/api.py index f5fe591bfbfc06..f01436a45d59cc 100644 --- a/homeassistant/components/netatmo/api.py +++ b/homeassistant/components/netatmo/api.py @@ -40,6 +40,5 @@ def __init__( async def async_get_access_token(self) -> str: """Return a valid access token for Netatmo API.""" - if not self._oauth_session.valid_token: - await self._oauth_session.async_ensure_token_valid() + await self._oauth_session.async_ensure_token_valid() return cast(str, self._oauth_session.token["access_token"]) diff --git a/homeassistant/components/netatmo/climate.py b/homeassistant/components/netatmo/climate.py index c2953b9d49d741..752dee5a95270c 100644 --- a/homeassistant/components/netatmo/climate.py +++ b/homeassistant/components/netatmo/climate.py @@ -58,9 +58,9 @@ _LOGGER = logging.getLogger(__name__) -PRESET_FROST_GUARD = "Frost Guard" -PRESET_SCHEDULE = "Schedule" -PRESET_MANUAL = "Manual" +PRESET_FROST_GUARD = "frost_guard" +PRESET_SCHEDULE = "schedule" +PRESET_MANUAL = "manual" SUPPORT_FLAGS = ( ClimateEntityFeature.TARGET_TEMPERATURE @@ -188,6 +188,7 @@ class NetatmoThermostat(NetatmoRoomEntity, ClimateEntity): _attr_supported_features = SUPPORT_FLAGS _attr_target_temperature_step = PRECISION_HALVES _attr_temperature_unit = UnitOfTemperature.CELSIUS + _attr_translation_key = "thermostat" _attr_name = None _away: bool | None = None _connected: bool | None = None diff --git a/homeassistant/components/netatmo/config_flow.py b/homeassistant/components/netatmo/config_flow.py index 0da4d6f16b73a3..d853694ffeae7c 100644 --- a/homeassistant/components/netatmo/config_flow.py +++ b/homeassistant/components/netatmo/config_flow.py @@ -101,7 +101,6 @@ class NetatmoOptionsFlowHandler(OptionsFlow): def __init__(self, config_entry: ConfigEntry) -> None: """Initialize Netatmo options flow.""" - self.config_entry = config_entry self.options = dict(config_entry.options) self.options.setdefault(CONF_WEATHER_AREAS, {}) diff --git a/homeassistant/components/netatmo/icons.json b/homeassistant/components/netatmo/icons.json index 70a51542126252..9f712e08f336a3 100644 --- a/homeassistant/components/netatmo/icons.json +++ b/homeassistant/components/netatmo/icons.json @@ -1,5 +1,18 @@ { "entity": { + "climate": { + "thermostat": { + "state_attributes": { + "preset_mode": { + "state": { + "frost_guard": "mdi:snowflake-thermometer", + "schedule": "mdi:clock-outline", + "manual": "mdi:gesture-tap" + } + } + } + } + }, "sensor": { "temp_trend": { "default": "mdi:trending-up" diff --git a/homeassistant/components/netatmo/strings.json b/homeassistant/components/netatmo/strings.json index 3c360634147f7b..6b91aa204b2302 100644 --- a/homeassistant/components/netatmo/strings.json +++ b/homeassistant/components/netatmo/strings.json @@ -168,6 +168,19 @@ } }, "entity": { + "climate": { + "thermostat": { + "state_attributes": { + "preset_mode": { + "state": { + "frost_guard": "Frost guard", + "schedule": "Schedule", + "manual": "Manual" + } + } + } + } + }, "sensor": { "temp_trend": { "name": "Temperature trend" diff --git a/homeassistant/components/netgear/__init__.py b/homeassistant/components/netgear/__init__.py index 58f63e5212a2bc..fa18c3510ba9be 100644 --- a/homeassistant/components/netgear/__init__.py +++ b/homeassistant/components/netgear/__init__.py @@ -93,6 +93,7 @@ async def async_check_link_status() -> dict[str, Any] | None: coordinator = DataUpdateCoordinator( hass, _LOGGER, + config_entry=entry, name=f"{router.device_name} Devices", update_method=async_update_devices, update_interval=SCAN_INTERVAL, @@ -100,6 +101,7 @@ async def async_check_link_status() -> dict[str, Any] | None: coordinator_traffic_meter = DataUpdateCoordinator( hass, _LOGGER, + config_entry=entry, name=f"{router.device_name} Traffic meter", update_method=async_update_traffic_meter, update_interval=SCAN_INTERVAL, @@ -107,6 +109,7 @@ async def async_check_link_status() -> dict[str, Any] | None: coordinator_speed_test = DataUpdateCoordinator( hass, _LOGGER, + config_entry=entry, name=f"{router.device_name} Speed test", update_method=async_update_speed_test, update_interval=SPEED_TEST_INTERVAL, @@ -114,6 +117,7 @@ async def async_check_link_status() -> dict[str, Any] | None: coordinator_firmware = DataUpdateCoordinator( hass, _LOGGER, + config_entry=entry, name=f"{router.device_name} Firmware", update_method=async_check_firmware, update_interval=SCAN_INTERVAL_FIRMWARE, @@ -121,6 +125,7 @@ async def async_check_link_status() -> dict[str, Any] | None: coordinator_utilization = DataUpdateCoordinator( hass, _LOGGER, + config_entry=entry, name=f"{router.device_name} Utilization", update_method=async_update_utilization, update_interval=SCAN_INTERVAL, @@ -128,6 +133,7 @@ async def async_check_link_status() -> dict[str, Any] | None: coordinator_link = DataUpdateCoordinator( hass, _LOGGER, + config_entry=entry, name=f"{router.device_name} Ethernet Link Status", update_method=async_check_link_status, update_interval=SCAN_INTERVAL, diff --git a/homeassistant/components/netgear/config_flow.py b/homeassistant/components/netgear/config_flow.py index fba934af38d1fa..965e3618645b1d 100644 --- a/homeassistant/components/netgear/config_flow.py +++ b/homeassistant/components/netgear/config_flow.py @@ -63,10 +63,6 @@ def _ordered_shared_schema(schema_input): class OptionsFlowHandler(OptionsFlow): """Options for the component.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Init object.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, int] | None = None ) -> ConfigFlowResult: @@ -109,7 +105,7 @@ def async_get_options_flow( config_entry: ConfigEntry, ) -> OptionsFlowHandler: """Get the options flow.""" - return OptionsFlowHandler(config_entry) + return OptionsFlowHandler() async def _show_setup_form( self, diff --git a/homeassistant/components/network/websocket.py b/homeassistant/components/network/websocket.py index b97bd2d58d1ab6..22f7dc23f1ed39 100644 --- a/homeassistant/components/network/websocket.py +++ b/homeassistant/components/network/websocket.py @@ -2,6 +2,7 @@ from __future__ import annotations +from contextlib import suppress from typing import Any import voluptuous as vol @@ -9,6 +10,7 @@ from homeassistant.components import websocket_api from homeassistant.components.websocket_api import ActiveConnection from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.network import NoURLAvailableError, get_url from .const import ATTR_ADAPTERS, ATTR_CONFIGURED_ADAPTERS, NETWORK_CONFIG_SCHEMA from .network import async_get_network @@ -19,6 +21,7 @@ def async_register_websocket_commands(hass: HomeAssistant) -> None: """Register network websocket commands.""" websocket_api.async_register_command(hass, websocket_network_adapters) websocket_api.async_register_command(hass, websocket_network_adapters_configure) + websocket_api.async_register_command(hass, websocket_network_url) @websocket_api.require_admin @@ -62,3 +65,40 @@ async def websocket_network_adapters_configure( msg["id"], {ATTR_CONFIGURED_ADAPTERS: network.configured_adapters}, ) + + +@callback +@websocket_api.require_admin +@websocket_api.websocket_command( + { + vol.Required("type"): "network/url", + } +) +def websocket_network_url( + hass: HomeAssistant, + connection: ActiveConnection, + msg: dict[str, Any], +) -> None: + """Get the internal, external, and cloud URLs.""" + internal_url = None + external_url = None + cloud_url = None + with suppress(NoURLAvailableError): + internal_url = get_url( + hass, allow_internal=True, allow_external=False, allow_cloud=False + ) + with suppress(NoURLAvailableError): + external_url = get_url( + hass, allow_internal=False, allow_external=True, prefer_external=True + ) + with suppress(NoURLAvailableError): + cloud_url = get_url(hass, allow_internal=False, require_cloud=True) + + connection.send_result( + msg["id"], + { + "internal": internal_url, + "external": external_url, + "cloud": cloud_url, + }, + ) diff --git a/homeassistant/components/nexia/switch.py b/homeassistant/components/nexia/switch.py index f92443517c8641..9505538e86a0e1 100644 --- a/homeassistant/components/nexia/switch.py +++ b/homeassistant/components/nexia/switch.py @@ -13,7 +13,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from .coordinator import NexiaDataUpdateCoordinator -from .entity import NexiaThermostatZoneEntity +from .entity import NexiaThermostatEntity, NexiaThermostatZoneEntity from .types import NexiaConfigEntry @@ -28,11 +28,11 @@ async def async_setup_entry( entities: list[NexiaHoldSwitch | NexiaEmergencyHeatSwitch] = [] for thermostat_id in nexia_home.get_thermostat_ids(): thermostat: NexiaThermostat = nexia_home.get_thermostat_by_id(thermostat_id) + if thermostat.has_emergency_heat(): + entities.append(NexiaEmergencyHeatSwitch(coordinator, thermostat)) for zone_id in thermostat.get_zone_ids(): zone: NexiaThermostatZone = thermostat.get_zone_by_id(zone_id) entities.append(NexiaHoldSwitch(coordinator, zone)) - if thermostat.has_emergency_heat(): - entities.append(NexiaEmergencyHeatSwitch(coordinator, zone)) async_add_entities(entities) @@ -68,17 +68,20 @@ async def async_turn_off(self, **kwargs: Any) -> None: self._signal_zone_update() -class NexiaEmergencyHeatSwitch(NexiaThermostatZoneEntity, SwitchEntity): +class NexiaEmergencyHeatSwitch(NexiaThermostatEntity, SwitchEntity): """Provides Nexia emergency heat switch support.""" _attr_translation_key = "emergency_heat" def __init__( - self, coordinator: NexiaDataUpdateCoordinator, zone: NexiaThermostatZone + self, coordinator: NexiaDataUpdateCoordinator, thermostat: NexiaThermostat ) -> None: """Initialize the emergency heat mode switch.""" - zone_id = zone.zone_id - super().__init__(coordinator, zone, zone_id) + super().__init__( + coordinator, + thermostat, + unique_id=f"{thermostat.thermostat_id}_emergency_heat", + ) @property def is_on(self) -> bool: diff --git a/homeassistant/components/nextbus/__init__.py b/homeassistant/components/nextbus/__init__.py index 817990620fe75b..168488e19402cc 100644 --- a/homeassistant/components/nextbus/__init__.py +++ b/homeassistant/components/nextbus/__init__.py @@ -3,6 +3,7 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_STOP, Platform from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryNotReady from .const import CONF_AGENCY, CONF_ROUTE, DOMAIN from .coordinator import NextBusDataUpdateCoordinator @@ -27,7 +28,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: coordinator.add_stop_route(entry_stop, entry.data[CONF_ROUTE]) - await coordinator.async_config_entry_first_refresh() + await coordinator.async_refresh() + if not coordinator.last_update_success: + raise ConfigEntryNotReady from coordinator.last_exception await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) diff --git a/homeassistant/components/nextbus/coordinator.py b/homeassistant/components/nextbus/coordinator.py index dcaafa9573b750..617669adf2f65e 100644 --- a/homeassistant/components/nextbus/coordinator.py +++ b/homeassistant/components/nextbus/coordinator.py @@ -24,6 +24,7 @@ def __init__(self, hass: HomeAssistant, agency: str) -> None: super().__init__( hass, _LOGGER, + config_entry=None, # It is shared between multiple entries name=DOMAIN, update_interval=timedelta(seconds=30), ) @@ -48,13 +49,6 @@ def has_routes(self) -> bool: """Check if this coordinator is tracking any routes.""" return len(self._route_stops) > 0 - async def async_shutdown(self) -> None: - """If there are no more routes, cancel any scheduled call, and ignore new runs.""" - if self.has_routes(): - return - - await super().async_shutdown() - async def _async_update_data(self) -> dict[str, Any]: """Fetch data from NextBus.""" diff --git a/homeassistant/components/nextcloud/config_flow.py b/homeassistant/components/nextcloud/config_flow.py index c469936ac487d6..6c59dd271d5736 100644 --- a/homeassistant/components/nextcloud/config_flow.py +++ b/homeassistant/components/nextcloud/config_flow.py @@ -13,7 +13,7 @@ ) import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_PASSWORD, CONF_URL, CONF_USERNAME, CONF_VERIFY_SSL from .const import DEFAULT_VERIFY_SSL, DOMAIN @@ -39,8 +39,6 @@ class NextcloudConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - _entry: ConfigEntry | None = None - def _try_connect_nc(self, user_input: dict) -> NextcloudMonitor: """Try to connect to nextcloud server.""" return NextcloudMonitor( @@ -79,7 +77,6 @@ async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle flow upon an API authentication error.""" - self._entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -87,32 +84,29 @@ async def async_step_reauth_confirm( ) -> ConfigFlowResult: """Handle reauthorization flow.""" errors = {} - assert self._entry is not None + reauth_entry = self._get_reauth_entry() if user_input is not None: try: await self.hass.async_add_executor_job( - self._try_connect_nc, {**self._entry.data, **user_input} + self._try_connect_nc, {**reauth_entry.data, **user_input} ) except NextcloudMonitorAuthorizationError: errors["base"] = "invalid_auth" except (NextcloudMonitorConnectionError, NextcloudMonitorRequestError): errors["base"] = "connection_error" else: - self.hass.config_entries.async_update_entry( - self._entry, - data={**self._entry.data, **user_input}, + return self.async_update_reload_and_abort( + reauth_entry, data_updates=user_input ) - await self.hass.config_entries.async_reload(self._entry.entry_id) - return self.async_abort(reason="reauth_successful") data_schema = self.add_suggested_values_to_schema( DATA_SCHEMA_REAUTH, - {CONF_USERNAME: self._entry.data[CONF_USERNAME], **(user_input or {})}, + {CONF_USERNAME: reauth_entry.data[CONF_USERNAME], **(user_input or {})}, ) return self.async_show_form( step_id="reauth_confirm", data_schema=data_schema, - description_placeholders={"url": self._entry.data[CONF_URL]}, + description_placeholders={"url": reauth_entry.data[CONF_URL]}, errors=errors, ) diff --git a/homeassistant/components/nextdns/config_flow.py b/homeassistant/components/nextdns/config_flow.py index 80caba6ec7ed5b..d3327c4c08be83 100644 --- a/homeassistant/components/nextdns/config_flow.py +++ b/homeassistant/components/nextdns/config_flow.py @@ -3,14 +3,14 @@ from __future__ import annotations from collections.abc import Mapping -from typing import TYPE_CHECKING, Any +from typing import Any from aiohttp.client_exceptions import ClientConnectorError from nextdns import ApiError, InvalidApiKeyError, NextDns from tenacity import RetryError import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_API_KEY, CONF_PROFILE_NAME from homeassistant.core import HomeAssistant from homeassistant.helpers.aiohttp_client import async_get_clientsession @@ -36,7 +36,6 @@ def __init__(self) -> None: """Initialize the config flow.""" self.nextdns: NextDns self.api_key: str - self.entry: ConfigEntry | None async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -97,7 +96,6 @@ async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle configuration by re-auth.""" - self.entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -116,11 +114,8 @@ async def async_step_reauth_confirm( except Exception: # noqa: BLE001 errors["base"] = "unknown" else: - if TYPE_CHECKING: - assert self.entry is not None - return self.async_update_reload_and_abort( - self.entry, data={**self.entry.data, **user_input} + self._get_reauth_entry(), data_updates=user_input ) return self.async_show_form( diff --git a/homeassistant/components/nice_go/config_flow.py b/homeassistant/components/nice_go/config_flow.py index 94594bbd11fd5d..da3940117e9a90 100644 --- a/homeassistant/components/nice_go/config_flow.py +++ b/homeassistant/components/nice_go/config_flow.py @@ -5,7 +5,7 @@ from collections.abc import Mapping from datetime import datetime import logging -from typing import TYPE_CHECKING, Any +from typing import Any from nice_go import AuthFailedError, NiceGOApi import voluptuous as vol @@ -14,7 +14,6 @@ from homeassistant.const import CONF_EMAIL, CONF_NAME, CONF_PASSWORD from homeassistant.helpers.aiohttp_client import async_get_clientsession -from . import NiceGOConfigEntry from .const import CONF_REFRESH_TOKEN, CONF_REFRESH_TOKEN_CREATION_TIME, DOMAIN _LOGGER = logging.getLogger(__name__) @@ -31,7 +30,6 @@ class NiceGOConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for Nice G.O.""" VERSION = 1 - reauth_entry: NiceGOConfigEntry | None async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -74,10 +72,6 @@ async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle re-authentication.""" - self.reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) - return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -86,9 +80,7 @@ async def async_step_reauth_confirm( """Confirm re-authentication.""" errors = {} - if TYPE_CHECKING: - assert self.reauth_entry is not None - + reauth_entry = self._get_reauth_entry() if user_input is not None: hub = NiceGOApi() @@ -105,7 +97,7 @@ async def async_step_reauth_confirm( errors["base"] = "unknown" else: return self.async_update_reload_and_abort( - self.reauth_entry, + reauth_entry, data={ **user_input, CONF_REFRESH_TOKEN: refresh_token, @@ -118,8 +110,8 @@ async def async_step_reauth_confirm( step_id="reauth_confirm", data_schema=self.add_suggested_values_to_schema( STEP_USER_DATA_SCHEMA, - user_input or {CONF_EMAIL: self.reauth_entry.data[CONF_EMAIL]}, + user_input or {CONF_EMAIL: reauth_entry.data[CONF_EMAIL]}, ), - description_placeholders={CONF_NAME: self.reauth_entry.title}, + description_placeholders={CONF_NAME: reauth_entry.title}, errors=errors, ) diff --git a/homeassistant/components/nice_go/const.py b/homeassistant/components/nice_go/const.py index c3caa92c8be7ac..a6635368f7babf 100644 --- a/homeassistant/components/nice_go/const.py +++ b/homeassistant/components/nice_go/const.py @@ -2,6 +2,8 @@ from datetime import timedelta +from homeassistant.const import Platform + DOMAIN = "nice_go" # Configuration @@ -11,3 +13,22 @@ CONF_REFRESH_TOKEN_CREATION_TIME = "refresh_token_creation_time" REFRESH_TOKEN_EXPIRY_TIME = timedelta(days=30) + +SUPPORTED_DEVICE_TYPES = { + Platform.LIGHT: ["WallStation"], + Platform.SWITCH: ["WallStation"], +} +KNOWN_UNSUPPORTED_DEVICE_TYPES = { + Platform.LIGHT: ["Mms100"], + Platform.SWITCH: ["Mms100"], +} + +UNSUPPORTED_DEVICE_WARNING = ( + "Device '%s' has unknown device type '%s', " + "which is not supported by this integration. " + "We try to support it with a cover and event entity, but nothing else. " + "Please create an issue with your device model in additional info" + " at https://github.com/home-assistant/core/issues/new" + "?assignees=&labels=&projects=&template=bug_report.yml" + "&title=New%%20Nice%%20G.O.%%20device%%20type%%20'%s'%%20found" +) diff --git a/homeassistant/components/nice_go/coordinator.py b/homeassistant/components/nice_go/coordinator.py index dd2d7ccb45e06c..29c0d8233fe325 100644 --- a/homeassistant/components/nice_go/coordinator.py +++ b/homeassistant/components/nice_go/coordinator.py @@ -44,13 +44,14 @@ class NiceGODevice: """Nice G.O. device dataclass.""" + type: str id: str name: str barrier_status: str light_status: bool | None fw_version: str connected: bool - vacation_mode: bool + vacation_mode: bool | None class NiceGOUpdateCoordinator(DataUpdateCoordinator[dict[str, NiceGODevice]]): @@ -85,7 +86,9 @@ def async_ha_stop(self, event: Event) -> None: """Stop reconnecting if hass is stopping.""" self._hass_stopping = True - async def _parse_barrier(self, barrier_state: BarrierState) -> NiceGODevice | None: + async def _parse_barrier( + self, device_type: str, barrier_state: BarrierState + ) -> NiceGODevice | None: """Parse barrier data.""" device_id = barrier_state.deviceId @@ -121,11 +124,15 @@ async def _parse_barrier(self, barrier_state: BarrierState) -> NiceGODevice | No fw_version = barrier_state.reported["deviceFwVersion"] if barrier_state.connectionState: connected = barrier_state.connectionState.connected + elif device_type == "Mms100": + connected = barrier_state.reported.get("radioConnected", 0) == 1 else: - connected = False - vacation_mode = barrier_state.reported["vcnMode"] + # Assume connected + connected = True + vacation_mode = barrier_state.reported.get("vcnMode", None) return NiceGODevice( + type=device_type, id=device_id, name=name, barrier_status=barrier_status, @@ -156,7 +163,8 @@ async def _async_setup(self) -> None: barriers = await self.api.get_all_barriers() parsed_barriers = [ - await self._parse_barrier(barrier.state) for barrier in barriers + await self._parse_barrier(barrier.type, barrier.state) + for barrier in barriers ] # Parse the barriers and save them in a dictionary @@ -226,6 +234,9 @@ async def on_data(self, data: dict[str, Any]) -> None: _LOGGER.debug(data) raw_data = data["data"]["devicesStatesUpdateFeed"]["item"] parsed_data = await self._parse_barrier( + self.data[ + raw_data["deviceId"] + ].type, # Device type is not sent in device state update, and it can't change, so we just reuse the existing one BarrierState( deviceId=raw_data["deviceId"], desired=json.loads(raw_data["desired"]), @@ -238,7 +249,7 @@ async def on_data(self, data: dict[str, Any]) -> None: else None, version=raw_data["version"], timestamp=raw_data["timestamp"], - ) + ), ) if parsed_data is None: return diff --git a/homeassistant/components/nice_go/cover.py b/homeassistant/components/nice_go/cover.py index 7ded43de165a0e..a823e93180411a 100644 --- a/homeassistant/components/nice_go/cover.py +++ b/homeassistant/components/nice_go/cover.py @@ -18,6 +18,10 @@ from .const import DOMAIN from .entity import NiceGOEntity +DEVICE_CLASSES = { + "WallStation": CoverDeviceClass.GARAGE, + "Mms100": CoverDeviceClass.GATE, +} PARALLEL_UPDATES = 1 @@ -40,7 +44,11 @@ class NiceGOCoverEntity(NiceGOEntity, CoverEntity): _attr_supported_features = CoverEntityFeature.OPEN | CoverEntityFeature.CLOSE _attr_name = None - _attr_device_class = CoverDeviceClass.GARAGE + + @property + def device_class(self) -> CoverDeviceClass: + """Return the class of this device, from component DEVICE_CLASSES.""" + return DEVICE_CLASSES.get(self.data.type, CoverDeviceClass.GARAGE) @property def is_closed(self) -> bool: diff --git a/homeassistant/components/nice_go/light.py b/homeassistant/components/nice_go/light.py index 6b5f5cd39eee82..abb192adde176b 100644 --- a/homeassistant/components/nice_go/light.py +++ b/homeassistant/components/nice_go/light.py @@ -1,19 +1,28 @@ """Nice G.O. light.""" +import logging from typing import TYPE_CHECKING, Any from aiohttp import ClientError from nice_go import ApiError from homeassistant.components.light import ColorMode, LightEntity +from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import NiceGOConfigEntry -from .const import DOMAIN +from .const import ( + DOMAIN, + KNOWN_UNSUPPORTED_DEVICE_TYPES, + SUPPORTED_DEVICE_TYPES, + UNSUPPORTED_DEVICE_WARNING, +) from .entity import NiceGOEntity +_LOGGER = logging.getLogger(__name__) + async def async_setup_entry( hass: HomeAssistant, @@ -24,11 +33,20 @@ async def async_setup_entry( coordinator = config_entry.runtime_data - async_add_entities( - NiceGOLightEntity(coordinator, device_id, device_data.name) - for device_id, device_data in coordinator.data.items() - if device_data.light_status is not None - ) + entities = [] + + for device_id, device_data in coordinator.data.items(): + if device_data.type in SUPPORTED_DEVICE_TYPES[Platform.LIGHT]: + entities.append(NiceGOLightEntity(coordinator, device_id, device_data.name)) + elif device_data.type not in KNOWN_UNSUPPORTED_DEVICE_TYPES[Platform.LIGHT]: + _LOGGER.warning( + UNSUPPORTED_DEVICE_WARNING, + device_data.name, + device_data.type, + device_data.type, + ) + + async_add_entities(entities) class NiceGOLightEntity(NiceGOEntity, LightEntity): diff --git a/homeassistant/components/nice_go/manifest.json b/homeassistant/components/nice_go/manifest.json index d3f54e5e668449..817d7ef9bc9d43 100644 --- a/homeassistant/components/nice_go/manifest.json +++ b/homeassistant/components/nice_go/manifest.json @@ -7,5 +7,5 @@ "integration_type": "hub", "iot_class": "cloud_push", "loggers": ["nice_go"], - "requirements": ["nice-go==0.3.9"] + "requirements": ["nice-go==0.3.10"] } diff --git a/homeassistant/components/nice_go/switch.py b/homeassistant/components/nice_go/switch.py index a74a18328c9c08..e3b85528f3bd6a 100644 --- a/homeassistant/components/nice_go/switch.py +++ b/homeassistant/components/nice_go/switch.py @@ -3,18 +3,24 @@ from __future__ import annotations import logging -from typing import Any +from typing import TYPE_CHECKING, Any from aiohttp import ClientError from nice_go import ApiError from homeassistant.components.switch import SwitchDeviceClass, SwitchEntity +from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import NiceGOConfigEntry -from .const import DOMAIN +from .const import ( + DOMAIN, + KNOWN_UNSUPPORTED_DEVICE_TYPES, + SUPPORTED_DEVICE_TYPES, + UNSUPPORTED_DEVICE_WARNING, +) from .entity import NiceGOEntity _LOGGER = logging.getLogger(__name__) @@ -28,10 +34,22 @@ async def async_setup_entry( """Set up Nice G.O. switch.""" coordinator = config_entry.runtime_data - async_add_entities( - NiceGOSwitchEntity(coordinator, device_id, device_data.name) - for device_id, device_data in coordinator.data.items() - ) + entities = [] + + for device_id, device_data in coordinator.data.items(): + if device_data.type in SUPPORTED_DEVICE_TYPES[Platform.SWITCH]: + entities.append( + NiceGOSwitchEntity(coordinator, device_id, device_data.name) + ) + elif device_data.type not in KNOWN_UNSUPPORTED_DEVICE_TYPES[Platform.SWITCH]: + _LOGGER.warning( + UNSUPPORTED_DEVICE_WARNING, + device_data.name, + device_data.type, + device_data.type, + ) + + async_add_entities(entities) class NiceGOSwitchEntity(NiceGOEntity, SwitchEntity): @@ -43,6 +61,8 @@ class NiceGOSwitchEntity(NiceGOEntity, SwitchEntity): @property def is_on(self) -> bool: """Return if switch is on.""" + if TYPE_CHECKING: + assert self.data.vacation_mode is not None return self.data.vacation_mode async def async_turn_on(self, **kwargs: Any) -> None: diff --git a/homeassistant/components/nightscout/sensor.py b/homeassistant/components/nightscout/sensor.py index 92291bdc4f9957..620349ec3c3ff5 100644 --- a/homeassistant/components/nightscout/sensor.py +++ b/homeassistant/components/nightscout/sensor.py @@ -9,9 +9,9 @@ from aiohttp import ClientError from py_nightscout import Api as NightscoutAPI -from homeassistant.components.sensor import SensorEntity +from homeassistant.components.sensor import SensorDeviceClass, SensorEntity from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ATTR_DATE +from homeassistant.const import ATTR_DATE, UnitOfBloodGlucoseConcentration from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -37,7 +37,10 @@ async def async_setup_entry( class NightscoutSensor(SensorEntity): """Implementation of a Nightscout sensor.""" - _attr_native_unit_of_measurement = "mg/dL" + _attr_device_class = SensorDeviceClass.BLOOD_GLUCOSE_CONCENTRATION + _attr_native_unit_of_measurement = ( + UnitOfBloodGlucoseConcentration.MILLIGRAMS_PER_DECILITER + ) _attr_icon = "mdi:cloud-question" def __init__(self, api: NightscoutAPI, name: str, unique_id: str | None) -> None: diff --git a/homeassistant/components/nina/config_flow.py b/homeassistant/components/nina/config_flow.py index e048ce81be3f2f..a1ba9ae0c61cfb 100644 --- a/homeassistant/components/nina/config_flow.py +++ b/homeassistant/components/nina/config_flow.py @@ -116,7 +116,7 @@ async def async_step_user( errors["base"] = "cannot_connect" except Exception as err: # noqa: BLE001 _LOGGER.exception("Unexpected exception: %s", err) - return self.async_abort(reason="unknown") + errors["base"] = "unknown" self.regions = split_regions(self._all_region_codes_sorted, self.regions) @@ -171,8 +171,7 @@ class OptionsFlowHandler(OptionsFlow): def __init__(self, config_entry: ConfigEntry) -> None: """Initialize options flow.""" - self.config_entry = config_entry - self.data = dict(self.config_entry.data) + self.data = dict(config_entry.data) self._all_region_codes_sorted: dict[str, str] = {} self.regions: dict[str, dict[str, Any]] = {} @@ -199,7 +198,7 @@ async def async_step_init( errors["base"] = "cannot_connect" except Exception as err: # noqa: BLE001 _LOGGER.exception("Unexpected exception: %s", err) - return self.async_abort(reason="unknown") + errors["base"] = "unknown" self.regions = split_regions(self._all_region_codes_sorted, self.regions) diff --git a/homeassistant/components/nmap_tracker/config_flow.py b/homeassistant/components/nmap_tracker/config_flow.py index b724dca1a81514..e05150995aa92b 100644 --- a/homeassistant/components/nmap_tracker/config_flow.py +++ b/homeassistant/components/nmap_tracker/config_flow.py @@ -213,6 +213,6 @@ def _async_is_unique_host_list(self, user_input: dict[str, Any]) -> bool: @staticmethod @callback - def async_get_options_flow(config_entry: ConfigEntry) -> OptionsFlow: + def async_get_options_flow(config_entry: ConfigEntry) -> OptionsFlowHandler: """Get the options flow for this handler.""" return OptionsFlowHandler(config_entry) diff --git a/homeassistant/components/nmap_tracker/manifest.json b/homeassistant/components/nmap_tracker/manifest.json index 08d9b94cf2d512..5b2dab50812ece 100644 --- a/homeassistant/components/nmap_tracker/manifest.json +++ b/homeassistant/components/nmap_tracker/manifest.json @@ -7,5 +7,5 @@ "documentation": "https://www.home-assistant.io/integrations/nmap_tracker", "iot_class": "local_polling", "loggers": ["nmap"], - "requirements": ["netmap==0.7.0.2", "getmac==0.9.4", "aiooui==0.1.6"] + "requirements": ["netmap==0.7.0.2", "getmac==0.9.4", "aiooui==0.1.7"] } diff --git a/homeassistant/components/nobo_hub/config_flow.py b/homeassistant/components/nobo_hub/config_flow.py index 8aed520f21e7a7..7e1ae4c1d9b657 100644 --- a/homeassistant/components/nobo_hub/config_flow.py +++ b/homeassistant/components/nobo_hub/config_flow.py @@ -175,7 +175,7 @@ def async_get_options_flow( config_entry: ConfigEntry, ) -> OptionsFlow: """Get the options flow for this handler.""" - return OptionsFlowHandler(config_entry) + return OptionsFlowHandler() class NoboHubConnectError(HomeAssistantError): @@ -190,10 +190,6 @@ def __init__(self, msg) -> None: class OptionsFlowHandler(OptionsFlow): """Handles options flow for the component.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize the options flow.""" - self.config_entry = config_entry - async def async_step_init(self, user_input=None) -> ConfigFlowResult: """Manage the options.""" diff --git a/homeassistant/components/nordpool/__init__.py b/homeassistant/components/nordpool/__init__.py new file mode 100644 index 00000000000000..b688bf74a3705e --- /dev/null +++ b/homeassistant/components/nordpool/__init__.py @@ -0,0 +1,29 @@ +"""The Nord Pool component.""" + +from __future__ import annotations + +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant +from homeassistant.util import dt as dt_util + +from .const import PLATFORMS +from .coordinator import NordPoolDataUpdateCoordinator + +type NordPoolConfigEntry = ConfigEntry[NordPoolDataUpdateCoordinator] + + +async def async_setup_entry(hass: HomeAssistant, entry: NordPoolConfigEntry) -> bool: + """Set up Nord Pool from a config entry.""" + + coordinator = NordPoolDataUpdateCoordinator(hass, entry) + await coordinator.fetch_data(dt_util.utcnow()) + entry.runtime_data = coordinator + + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + + return True + + +async def async_unload_entry(hass: HomeAssistant, entry: NordPoolConfigEntry) -> bool: + """Unload Nord Pool config entry.""" + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/nordpool/config_flow.py b/homeassistant/components/nordpool/config_flow.py new file mode 100644 index 00000000000000..1d75d825e47679 --- /dev/null +++ b/homeassistant/components/nordpool/config_flow.py @@ -0,0 +1,115 @@ +"""Adds config flow for Nord Pool integration.""" + +from __future__ import annotations + +from typing import Any + +from pynordpool import ( + Currency, + NordPoolClient, + NordPoolEmptyResponseError, + NordPoolError, +) +from pynordpool.const import AREAS +import voluptuous as vol + +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.const import CONF_CURRENCY +from homeassistant.core import HomeAssistant +from homeassistant.helpers.aiohttp_client import async_get_clientsession +from homeassistant.helpers.selector import ( + SelectOptionDict, + SelectSelector, + SelectSelectorConfig, + SelectSelectorMode, +) +from homeassistant.util import dt as dt_util + +from .const import CONF_AREAS, DEFAULT_NAME, DOMAIN + +SELECT_AREAS = [ + SelectOptionDict(value=area, label=name) for area, name in AREAS.items() +] +SELECT_CURRENCY = [currency.value for currency in Currency] + +DATA_SCHEMA = vol.Schema( + { + vol.Required(CONF_AREAS, default=[]): SelectSelector( + SelectSelectorConfig( + options=SELECT_AREAS, + multiple=True, + mode=SelectSelectorMode.DROPDOWN, + sort=True, + ) + ), + vol.Required(CONF_CURRENCY, default="SEK"): SelectSelector( + SelectSelectorConfig( + options=SELECT_CURRENCY, + multiple=False, + mode=SelectSelectorMode.DROPDOWN, + sort=True, + ) + ), + } +) + + +async def test_api(hass: HomeAssistant, user_input: dict[str, Any]) -> dict[str, str]: + """Test fetch data from Nord Pool.""" + client = NordPoolClient(async_get_clientsession(hass)) + try: + await client.async_get_delivery_period( + dt_util.now(), + Currency(user_input[CONF_CURRENCY]), + user_input[CONF_AREAS], + ) + except NordPoolEmptyResponseError: + return {"base": "no_data"} + except NordPoolError: + return {"base": "cannot_connect"} + + return {} + + +class NordpoolConfigFlow(ConfigFlow, domain=DOMAIN): + """Handle a config flow for Nord Pool integration.""" + + VERSION = 1 + + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle the initial step.""" + errors: dict[str, str] = {} + if user_input: + errors = await test_api(self.hass, user_input) + if not errors: + return self.async_create_entry( + title=DEFAULT_NAME, + data=user_input, + ) + + return self.async_show_form( + step_id="user", + data_schema=DATA_SCHEMA, + errors=errors, + ) + + async def async_step_reconfigure( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle the reconfiguration step.""" + errors: dict[str, str] = {} + if user_input: + errors = await test_api(self.hass, user_input) + reconfigure_entry = self._get_reconfigure_entry() + if not errors: + return self.async_update_reload_and_abort( + reconfigure_entry, data_updates=user_input + ) + + return self.async_show_form( + step_id="reconfigure", + data_schema=DATA_SCHEMA, + errors=errors, + ) diff --git a/homeassistant/components/nordpool/const.py b/homeassistant/components/nordpool/const.py new file mode 100644 index 00000000000000..19a978d946cb47 --- /dev/null +++ b/homeassistant/components/nordpool/const.py @@ -0,0 +1,14 @@ +"""Constants for Nord Pool.""" + +import logging + +from homeassistant.const import Platform + +LOGGER = logging.getLogger(__package__) + +DEFAULT_SCAN_INTERVAL = 60 +DOMAIN = "nordpool" +PLATFORMS = [Platform.SENSOR] +DEFAULT_NAME = "Nord Pool" + +CONF_AREAS = "areas" diff --git a/homeassistant/components/nordpool/coordinator.py b/homeassistant/components/nordpool/coordinator.py new file mode 100644 index 00000000000000..fa4e9ca2548101 --- /dev/null +++ b/homeassistant/components/nordpool/coordinator.py @@ -0,0 +1,91 @@ +"""DataUpdateCoordinator for the Nord Pool integration.""" + +from __future__ import annotations + +from collections.abc import Callable +from datetime import datetime, timedelta +from typing import TYPE_CHECKING + +from pynordpool import ( + Currency, + DeliveryPeriodData, + NordPoolClient, + NordPoolEmptyResponseError, + NordPoolError, + NordPoolResponseError, +) + +from homeassistant.const import CONF_CURRENCY +from homeassistant.core import HomeAssistant +from homeassistant.helpers.aiohttp_client import async_get_clientsession +from homeassistant.helpers.event import async_track_point_in_utc_time +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator +from homeassistant.util import dt as dt_util + +from .const import CONF_AREAS, DOMAIN, LOGGER + +if TYPE_CHECKING: + from . import NordPoolConfigEntry + + +class NordPoolDataUpdateCoordinator(DataUpdateCoordinator[DeliveryPeriodData]): + """A Nord Pool Data Update Coordinator.""" + + config_entry: NordPoolConfigEntry + + def __init__(self, hass: HomeAssistant, config_entry: NordPoolConfigEntry) -> None: + """Initialize the Nord Pool coordinator.""" + super().__init__( + hass, + LOGGER, + config_entry=config_entry, + name=DOMAIN, + ) + self.client = NordPoolClient(session=async_get_clientsession(hass)) + self.unsub: Callable[[], None] | None = None + + def get_next_interval(self, now: datetime) -> datetime: + """Compute next time an update should occur.""" + next_hour = dt_util.utcnow() + timedelta(hours=1) + next_run = datetime( + next_hour.year, + next_hour.month, + next_hour.day, + next_hour.hour, + tzinfo=dt_util.UTC, + ) + LOGGER.debug("Next update at %s", next_run) + return next_run + + async def async_shutdown(self) -> None: + """Cancel any scheduled call, and ignore new runs.""" + await super().async_shutdown() + if self.unsub: + self.unsub() + self.unsub = None + + async def fetch_data(self, now: datetime) -> None: + """Fetch data from Nord Pool.""" + self.unsub = async_track_point_in_utc_time( + self.hass, self.fetch_data, self.get_next_interval(dt_util.utcnow()) + ) + try: + data = await self.client.async_get_delivery_period( + dt_util.now(), + Currency(self.config_entry.data[CONF_CURRENCY]), + self.config_entry.data[CONF_AREAS], + ) + except NordPoolEmptyResponseError as error: + LOGGER.debug("Empty response error: %s", error) + self.async_set_update_error(error) + return + except NordPoolResponseError as error: + LOGGER.debug("Response error: %s", error) + self.async_set_update_error(error) + return + except NordPoolError as error: + LOGGER.debug("Connection error: %s", error) + self.async_set_update_error(error) + return + + self.async_set_updated_data(data) diff --git a/homeassistant/components/nordpool/entity.py b/homeassistant/components/nordpool/entity.py new file mode 100644 index 00000000000000..32240aad12cc8f --- /dev/null +++ b/homeassistant/components/nordpool/entity.py @@ -0,0 +1,32 @@ +"""Base entity for Nord Pool.""" + +from __future__ import annotations + +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.entity import EntityDescription +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .const import DOMAIN +from .coordinator import NordPoolDataUpdateCoordinator + + +class NordpoolBaseEntity(CoordinatorEntity[NordPoolDataUpdateCoordinator]): + """Representation of a Nord Pool base entity.""" + + _attr_has_entity_name = True + + def __init__( + self, + coordinator: NordPoolDataUpdateCoordinator, + entity_description: EntityDescription, + area: str, + ) -> None: + """Initiate Nord Pool base entity.""" + super().__init__(coordinator) + self.entity_description = entity_description + self._attr_unique_id = f"{area}-{entity_description.key}" + self.area = area + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, area)}, + name=f"Nord Pool {area}", + ) diff --git a/homeassistant/components/nordpool/icons.json b/homeassistant/components/nordpool/icons.json new file mode 100644 index 00000000000000..85434a2d09b613 --- /dev/null +++ b/homeassistant/components/nordpool/icons.json @@ -0,0 +1,42 @@ +{ + "entity": { + "sensor": { + "updated_at": { + "default": "mdi:clock-outline" + }, + "currency": { + "default": "mdi:currency-usd" + }, + "exchange_rate": { + "default": "mdi:currency-usd" + }, + "current_price": { + "default": "mdi:cash" + }, + "last_price": { + "default": "mdi:cash" + }, + "next_price": { + "default": "mdi:cash" + }, + "block_average": { + "default": "mdi:cash-multiple" + }, + "block_min": { + "default": "mdi:cash-multiple" + }, + "block_max": { + "default": "mdi:cash-multiple" + }, + "block_start_time": { + "default": "mdi:clock-time-twelve-outline" + }, + "block_end_time": { + "default": "mdi:clock-time-two-outline" + }, + "daily_average": { + "default": "mdi:cash-multiple" + } + } + } +} diff --git a/homeassistant/components/nordpool/manifest.json b/homeassistant/components/nordpool/manifest.json new file mode 100644 index 00000000000000..bf093eb3ee9282 --- /dev/null +++ b/homeassistant/components/nordpool/manifest.json @@ -0,0 +1,12 @@ +{ + "domain": "nordpool", + "name": "Nord Pool", + "codeowners": ["@gjohansson-ST"], + "config_flow": true, + "documentation": "https://www.home-assistant.io/integrations/nordpool", + "integration_type": "hub", + "iot_class": "cloud_polling", + "loggers": ["pynordpool"], + "requirements": ["pynordpool==0.2.2"], + "single_config_entry": true +} diff --git a/homeassistant/components/nordpool/sensor.py b/homeassistant/components/nordpool/sensor.py new file mode 100644 index 00000000000000..e7e655a66572f1 --- /dev/null +++ b/homeassistant/components/nordpool/sensor.py @@ -0,0 +1,328 @@ +"""Sensor platform for Nord Pool integration.""" + +from __future__ import annotations + +from collections.abc import Callable +from dataclasses import dataclass +from datetime import datetime, timedelta + +from pynordpool import DeliveryPeriodData + +from homeassistant.components.sensor import ( + EntityCategory, + SensorDeviceClass, + SensorEntity, + SensorEntityDescription, + SensorStateClass, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.util import dt as dt_util, slugify + +from . import NordPoolConfigEntry +from .const import LOGGER +from .coordinator import NordPoolDataUpdateCoordinator +from .entity import NordpoolBaseEntity + +PARALLEL_UPDATES = 0 + + +def get_prices(data: DeliveryPeriodData) -> dict[str, tuple[float, float, float]]: + """Return previous, current and next prices. + + Output: {"SE3": (10.0, 10.5, 12.1)} + """ + last_price_entries: dict[str, float] = {} + current_price_entries: dict[str, float] = {} + next_price_entries: dict[str, float] = {} + current_time = dt_util.utcnow() + previous_time = current_time - timedelta(hours=1) + next_time = current_time + timedelta(hours=1) + price_data = data.entries + for entry in price_data: + if entry.start <= current_time <= entry.end: + current_price_entries = entry.entry + if entry.start <= previous_time <= entry.end: + last_price_entries = entry.entry + if entry.start <= next_time <= entry.end: + next_price_entries = entry.entry + + result = {} + for area, price in current_price_entries.items(): + result[area] = (last_price_entries[area], price, next_price_entries[area]) + LOGGER.debug("Prices: %s", result) + return result + + +def get_blockprices( + data: DeliveryPeriodData, +) -> dict[str, dict[str, tuple[datetime, datetime, float, float, float]]]: + """Return average, min and max for block prices. + + Output: {"SE3": {"Off-peak 1": (_datetime_, _datetime_, 9.3, 10.5, 12.1)}} + """ + result: dict[str, dict[str, tuple[datetime, datetime, float, float, float]]] = {} + block_prices = data.block_prices + for entry in block_prices: + for _area in entry.average: + if _area not in result: + result[_area] = {} + result[_area][entry.name] = ( + entry.start, + entry.end, + entry.average[_area]["average"], + entry.average[_area]["min"], + entry.average[_area]["max"], + ) + + LOGGER.debug("Block prices: %s", result) + return result + + +@dataclass(frozen=True, kw_only=True) +class NordpoolDefaultSensorEntityDescription(SensorEntityDescription): + """Describes Nord Pool default sensor entity.""" + + value_fn: Callable[[DeliveryPeriodData], str | float | datetime | None] + + +@dataclass(frozen=True, kw_only=True) +class NordpoolPricesSensorEntityDescription(SensorEntityDescription): + """Describes Nord Pool prices sensor entity.""" + + value_fn: Callable[[tuple[float, float, float]], float | None] + + +@dataclass(frozen=True, kw_only=True) +class NordpoolBlockPricesSensorEntityDescription(SensorEntityDescription): + """Describes Nord Pool block prices sensor entity.""" + + value_fn: Callable[ + [tuple[datetime, datetime, float, float, float]], float | datetime | None + ] + + +DEFAULT_SENSOR_TYPES: tuple[NordpoolDefaultSensorEntityDescription, ...] = ( + NordpoolDefaultSensorEntityDescription( + key="updated_at", + translation_key="updated_at", + device_class=SensorDeviceClass.TIMESTAMP, + value_fn=lambda data: data.updated_at, + entity_category=EntityCategory.DIAGNOSTIC, + ), + NordpoolDefaultSensorEntityDescription( + key="currency", + translation_key="currency", + value_fn=lambda data: data.currency, + entity_category=EntityCategory.DIAGNOSTIC, + ), + NordpoolDefaultSensorEntityDescription( + key="exchange_rate", + translation_key="exchange_rate", + value_fn=lambda data: data.exchange_rate, + state_class=SensorStateClass.MEASUREMENT, + entity_registry_enabled_default=False, + entity_category=EntityCategory.DIAGNOSTIC, + ), +) +PRICES_SENSOR_TYPES: tuple[NordpoolPricesSensorEntityDescription, ...] = ( + NordpoolPricesSensorEntityDescription( + key="current_price", + translation_key="current_price", + value_fn=lambda data: data[1] / 1000, + state_class=SensorStateClass.MEASUREMENT, + suggested_display_precision=2, + ), + NordpoolPricesSensorEntityDescription( + key="last_price", + translation_key="last_price", + value_fn=lambda data: data[0] / 1000, + suggested_display_precision=2, + ), + NordpoolPricesSensorEntityDescription( + key="next_price", + translation_key="next_price", + value_fn=lambda data: data[2] / 1000, + suggested_display_precision=2, + ), +) +BLOCK_PRICES_SENSOR_TYPES: tuple[NordpoolBlockPricesSensorEntityDescription, ...] = ( + NordpoolBlockPricesSensorEntityDescription( + key="block_average", + translation_key="block_average", + value_fn=lambda data: data[2] / 1000, + state_class=SensorStateClass.MEASUREMENT, + suggested_display_precision=2, + entity_registry_enabled_default=False, + ), + NordpoolBlockPricesSensorEntityDescription( + key="block_min", + translation_key="block_min", + value_fn=lambda data: data[3] / 1000, + state_class=SensorStateClass.MEASUREMENT, + suggested_display_precision=2, + entity_registry_enabled_default=False, + ), + NordpoolBlockPricesSensorEntityDescription( + key="block_max", + translation_key="block_max", + value_fn=lambda data: data[4] / 1000, + state_class=SensorStateClass.MEASUREMENT, + suggested_display_precision=2, + entity_registry_enabled_default=False, + ), + NordpoolBlockPricesSensorEntityDescription( + key="block_start_time", + translation_key="block_start_time", + value_fn=lambda data: data[0], + device_class=SensorDeviceClass.TIMESTAMP, + entity_registry_enabled_default=False, + ), + NordpoolBlockPricesSensorEntityDescription( + key="block_end_time", + translation_key="block_end_time", + value_fn=lambda data: data[1], + device_class=SensorDeviceClass.TIMESTAMP, + entity_registry_enabled_default=False, + ), +) +DAILY_AVERAGE_PRICES_SENSOR_TYPES: tuple[SensorEntityDescription, ...] = ( + SensorEntityDescription( + key="daily_average", + translation_key="daily_average", + state_class=SensorStateClass.MEASUREMENT, + suggested_display_precision=2, + entity_registry_enabled_default=False, + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: NordPoolConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up Nord Pool sensor platform.""" + + coordinator = entry.runtime_data + + entities: list[NordpoolBaseEntity] = [] + currency = entry.runtime_data.data.currency + + for area in get_prices(entry.runtime_data.data): + LOGGER.debug("Setting up base sensors for area %s", area) + entities.extend( + NordpoolSensor(coordinator, description, area) + for description in DEFAULT_SENSOR_TYPES + ) + LOGGER.debug( + "Setting up price sensors for area %s with currency %s", area, currency + ) + entities.extend( + NordpoolPriceSensor(coordinator, description, area, currency) + for description in PRICES_SENSOR_TYPES + ) + entities.extend( + NordpoolDailyAveragePriceSensor(coordinator, description, area, currency) + for description in DAILY_AVERAGE_PRICES_SENSOR_TYPES + ) + for block_name in get_blockprices(coordinator.data)[area]: + LOGGER.debug( + "Setting up block price sensors for area %s with currency %s in block %s", + area, + currency, + block_name, + ) + entities.extend( + NordpoolBlockPriceSensor( + coordinator, description, area, currency, block_name + ) + for description in BLOCK_PRICES_SENSOR_TYPES + ) + async_add_entities(entities) + + +class NordpoolSensor(NordpoolBaseEntity, SensorEntity): + """Representation of a Nord Pool sensor.""" + + entity_description: NordpoolDefaultSensorEntityDescription + + @property + def native_value(self) -> str | float | datetime | None: + """Return value of sensor.""" + return self.entity_description.value_fn(self.coordinator.data) + + +class NordpoolPriceSensor(NordpoolBaseEntity, SensorEntity): + """Representation of a Nord Pool price sensor.""" + + entity_description: NordpoolPricesSensorEntityDescription + + def __init__( + self, + coordinator: NordPoolDataUpdateCoordinator, + entity_description: NordpoolPricesSensorEntityDescription, + area: str, + currency: str, + ) -> None: + """Initiate Nord Pool sensor.""" + super().__init__(coordinator, entity_description, area) + self._attr_native_unit_of_measurement = f"{currency}/kWh" + + @property + def native_value(self) -> float | None: + """Return value of sensor.""" + return self.entity_description.value_fn( + get_prices(self.coordinator.data)[self.area] + ) + + +class NordpoolBlockPriceSensor(NordpoolBaseEntity, SensorEntity): + """Representation of a Nord Pool block price sensor.""" + + entity_description: NordpoolBlockPricesSensorEntityDescription + + def __init__( + self, + coordinator: NordPoolDataUpdateCoordinator, + entity_description: NordpoolBlockPricesSensorEntityDescription, + area: str, + currency: str, + block_name: str, + ) -> None: + """Initiate Nord Pool sensor.""" + super().__init__(coordinator, entity_description, area) + if entity_description.device_class is not SensorDeviceClass.TIMESTAMP: + self._attr_native_unit_of_measurement = f"{currency}/kWh" + self._attr_unique_id = f"{slugify(block_name)}-{area}-{entity_description.key}" + self.block_name = block_name + self._attr_translation_placeholders = {"block": block_name} + + @property + def native_value(self) -> float | datetime | None: + """Return value of sensor.""" + return self.entity_description.value_fn( + get_blockprices(self.coordinator.data)[self.area][self.block_name] + ) + + +class NordpoolDailyAveragePriceSensor(NordpoolBaseEntity, SensorEntity): + """Representation of a Nord Pool daily average price sensor.""" + + entity_description: SensorEntityDescription + + def __init__( + self, + coordinator: NordPoolDataUpdateCoordinator, + entity_description: SensorEntityDescription, + area: str, + currency: str, + ) -> None: + """Initiate Nord Pool sensor.""" + super().__init__(coordinator, entity_description, area) + self._attr_native_unit_of_measurement = f"{currency}/kWh" + + @property + def native_value(self) -> float | None: + """Return value of sensor.""" + return self.coordinator.data.area_average[self.area] / 1000 diff --git a/homeassistant/components/nordpool/strings.json b/homeassistant/components/nordpool/strings.json new file mode 100644 index 00000000000000..59ba009eb900fa --- /dev/null +++ b/homeassistant/components/nordpool/strings.json @@ -0,0 +1,65 @@ +{ + "config": { + "abort": { + "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]" + }, + "error": { + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", + "no_data": "API connected but the response was empty" + }, + "step": { + "user": { + "data": { + "currency": "Currency", + "areas": "Areas" + } + }, + "reconfigure": { + "data": { + "currency": "[%key:component::nordpool::config::step::user::data::currency%]", + "areas": "[%key:component::nordpool::config::step::user::data::areas%]" + } + } + } + }, + "entity": { + "sensor": { + "updated_at": { + "name": "Last updated" + }, + "currency": { + "name": "Currency" + }, + "exchange_rate": { + "name": "Exchange rate" + }, + "current_price": { + "name": "Current price" + }, + "last_price": { + "name": "Previous price" + }, + "next_price": { + "name": "Next price" + }, + "block_average": { + "name": "{block} average" + }, + "block_min": { + "name": "{block} lowest price" + }, + "block_max": { + "name": "{block} highest price" + }, + "block_start_time": { + "name": "{block} time from" + }, + "block_end_time": { + "name": "{block} time until" + }, + "daily_average": { + "name": "Daily average" + } + } + } +} diff --git a/homeassistant/components/notify/strings.json b/homeassistant/components/notify/strings.json index d1deca0a6c4a3e..b7d4ec1ad256d2 100644 --- a/homeassistant/components/notify/strings.json +++ b/homeassistant/components/notify/strings.json @@ -74,7 +74,7 @@ } }, "migrate_notify_service": { - "title": "Legacy action notify.{service_name} stll being used", + "title": "Legacy action notify.{service_name} still being used", "fix_flow": { "step": { "confirm": { diff --git a/homeassistant/components/notion/config_flow.py b/homeassistant/components/notion/config_flow.py index c803992c2e2394..f7347a8f595f67 100644 --- a/homeassistant/components/notion/config_flow.py +++ b/homeassistant/components/notion/config_flow.py @@ -9,7 +9,7 @@ from aionotion.errors import InvalidCredentialsError, NotionError import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant @@ -68,36 +68,29 @@ class NotionFlowHandler(ConfigFlow, domain=DOMAIN): VERSION = 1 - def __init__(self) -> None: - """Initialize.""" - self._reauth_entry: ConfigEntry | None = None - async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle configuration by re-auth.""" - self._reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle re-auth completion.""" - assert self._reauth_entry + reauth_entry = self._get_reauth_entry() if not user_input: return self.async_show_form( step_id="reauth_confirm", data_schema=REAUTH_SCHEMA, description_placeholders={ - CONF_USERNAME: self._reauth_entry.data[CONF_USERNAME] + CONF_USERNAME: reauth_entry.data[CONF_USERNAME] }, ) credentials_validation_result = await async_validate_credentials( - self.hass, self._reauth_entry.data[CONF_USERNAME], user_input[CONF_PASSWORD] + self.hass, reauth_entry.data[CONF_USERNAME], user_input[CONF_PASSWORD] ) if credentials_validation_result.errors: @@ -106,19 +99,16 @@ async def async_step_reauth_confirm( data_schema=REAUTH_SCHEMA, errors=credentials_validation_result.errors, description_placeholders={ - CONF_USERNAME: self._reauth_entry.data[CONF_USERNAME] + CONF_USERNAME: reauth_entry.data[CONF_USERNAME] }, ) - self.hass.config_entries.async_update_entry( - self._reauth_entry, - data=self._reauth_entry.data - | {CONF_REFRESH_TOKEN: credentials_validation_result.refresh_token}, - ) - self.hass.async_create_task( - self.hass.config_entries.async_reload(self._reauth_entry.entry_id) + return self.async_update_reload_and_abort( + reauth_entry, + data_updates={ + CONF_REFRESH_TOKEN: credentials_validation_result.refresh_token + }, ) - return self.async_abort(reason="reauth_successful") async def async_step_user( self, user_input: dict[str, str] | None = None diff --git a/homeassistant/components/nsw_fuel_station/__init__.py b/homeassistant/components/nsw_fuel_station/__init__.py index 76dc9d4c6ffabe..85e204b6f5145a 100644 --- a/homeassistant/components/nsw_fuel_station/__init__.py +++ b/homeassistant/components/nsw_fuel_station/__init__.py @@ -33,6 +33,7 @@ async def async_update_data(): coordinator = DataUpdateCoordinator( hass, _LOGGER, + config_entry=None, name="sensor", update_interval=SCAN_INTERVAL, update_method=async_update_data, diff --git a/homeassistant/components/nuheat/__init__.py b/homeassistant/components/nuheat/__init__.py index fdb49688ebadf7..fb17e6b45bf4b3 100644 --- a/homeassistant/components/nuheat/__init__.py +++ b/homeassistant/components/nuheat/__init__.py @@ -60,6 +60,7 @@ async def _async_update_data(): coordinator = DataUpdateCoordinator( hass, _LOGGER, + config_entry=entry, name=f"nuheat {serial_number}", update_method=_async_update_data, update_interval=timedelta(minutes=5), diff --git a/homeassistant/components/number/const.py b/homeassistant/components/number/const.py index ad95c9b5358b7b..23e3ce0910bc33 100644 --- a/homeassistant/components/number/const.py +++ b/homeassistant/components/number/const.py @@ -17,6 +17,7 @@ SIGNAL_STRENGTH_DECIBELS, SIGNAL_STRENGTH_DECIBELS_MILLIWATT, UnitOfApparentPower, + UnitOfBloodGlucoseConcentration, UnitOfConductivity, UnitOfDataRate, UnitOfElectricCurrent, @@ -109,6 +110,12 @@ class NumberDeviceClass(StrEnum): Unit of measurement: `%` """ + BLOOD_GLUCOSE_CONCENTRATION = "blood_glucose_concentration" + """Blood glucose concentration. + + Unit of measurement: `mg/dL`, `mmol/L` + """ + CO = "carbon_monoxide" """Carbon Monoxide gas concentration. @@ -162,7 +169,7 @@ class NumberDeviceClass(StrEnum): ENERGY = "energy" """Energy. - Unit of measurement: `Wh`, `kWh`, `MWh`, `MJ`, `GJ` + Unit of measurement: `Wh`, `kWh`, `MWh`, `GWh`, `TWh`, `MJ`, `GJ` """ ENERGY_STORAGE = "energy_storage" @@ -171,7 +178,7 @@ class NumberDeviceClass(StrEnum): Use this device class for sensors measuring stored energy, for example the amount of electric energy currently stored in a battery or the capacity of a battery. - Unit of measurement: `Wh`, `kWh`, `MWh`, `MJ`, `GJ` + Unit of measurement: `Wh`, `kWh`, `MWh`, `GWh`, `TWh`, `MJ`, `GJ` """ FREQUENCY = "frequency" @@ -279,7 +286,7 @@ class NumberDeviceClass(StrEnum): POWER = "power" """Power. - Unit of measurement: `W`, `kW` + Unit of measurement: `W`, `kW`, `MW`, `GW`, `TW` """ PRECIPITATION = "precipitation" @@ -429,6 +436,7 @@ class NumberDeviceClass(StrEnum): NumberDeviceClass.AQI: {None}, NumberDeviceClass.ATMOSPHERIC_PRESSURE: set(UnitOfPressure), NumberDeviceClass.BATTERY: {PERCENTAGE}, + NumberDeviceClass.BLOOD_GLUCOSE_CONCENTRATION: set(UnitOfBloodGlucoseConcentration), NumberDeviceClass.CO: {CONCENTRATION_PARTS_PER_MILLION}, NumberDeviceClass.CO2: {CONCENTRATION_PARTS_PER_MILLION}, NumberDeviceClass.CONDUCTIVITY: set(UnitOfConductivity), diff --git a/homeassistant/components/number/icons.json b/homeassistant/components/number/icons.json index a122aaecb09465..5e0fc6e44d2612 100644 --- a/homeassistant/components/number/icons.json +++ b/homeassistant/components/number/icons.json @@ -15,6 +15,9 @@ "battery": { "default": "mdi:battery" }, + "blood_glucose_concentration": { + "default": "mdi:spoon-sugar" + }, "carbon_dioxide": { "default": "mdi:molecule-co2" }, diff --git a/homeassistant/components/number/strings.json b/homeassistant/components/number/strings.json index 580385172e38a3..b9aec880ecc22f 100644 --- a/homeassistant/components/number/strings.json +++ b/homeassistant/components/number/strings.json @@ -43,6 +43,9 @@ "battery": { "name": "[%key:component::sensor::entity_component::battery::name%]" }, + "blood_glucose_concentration": { + "name": "[%key:component::sensor::entity_component::blood_glucose_concentration::name%]" + }, "carbon_dioxide": { "name": "[%key:component::sensor::entity_component::carbon_dioxide::name%]" }, diff --git a/homeassistant/components/nut/__init__.py b/homeassistant/components/nut/__init__.py index 2ce67c76649367..169dbbbff5db1b 100644 --- a/homeassistant/components/nut/__init__.py +++ b/homeassistant/components/nut/__init__.py @@ -86,6 +86,7 @@ async def async_update_data() -> dict[str, str]: coordinator = DataUpdateCoordinator( hass, _LOGGER, + config_entry=entry, name="NUT resource status", update_method=async_update_data, update_interval=timedelta(seconds=scan_interval), @@ -129,7 +130,10 @@ async def async_update_data() -> dict[str, str]: name=data.name.title(), manufacturer=data.device_info.manufacturer, model=data.device_info.model, + model_id=data.device_info.model_id, sw_version=data.device_info.firmware, + serial_number=data.device_info.serial, + suggested_area=data.device_info.device_location, ) await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) @@ -207,7 +211,10 @@ class NUTDeviceInfo: manufacturer: str | None = None model: str | None = None + model_id: str | None = None firmware: str | None = None + serial: str | None = None + device_location: str | None = None class PyNUTData: @@ -266,8 +273,13 @@ def _get_device_info(self) -> NUTDeviceInfo | None: manufacturer = _manufacturer_from_status(self._status) model = _model_from_status(self._status) + model_id: str | None = self._status.get("device.part") firmware = _firmware_from_status(self._status) - return NUTDeviceInfo(manufacturer, model, firmware) + serial = _serial_from_status(self._status) + device_location: str | None = self._status.get("device.location") + return NUTDeviceInfo( + manufacturer, model, model_id, firmware, serial, device_location + ) async def _async_get_status(self) -> dict[str, str]: """Get the ups status from NUT.""" diff --git a/homeassistant/components/nut/config_flow.py b/homeassistant/components/nut/config_flow.py index d0a2da124a66f7..966c51e98e9d03 100644 --- a/homeassistant/components/nut/config_flow.py +++ b/homeassistant/components/nut/config_flow.py @@ -235,16 +235,12 @@ async def async_step_reauth_confirm( @callback def async_get_options_flow(config_entry: ConfigEntry) -> OptionsFlow: """Get the options flow for this handler.""" - return OptionsFlowHandler(config_entry) + return OptionsFlowHandler() class OptionsFlowHandler(OptionsFlow): """Handle a option flow for nut.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/nut/sensor.py b/homeassistant/components/nut/sensor.py index 7f211d5452b0ec..bb702873052242 100644 --- a/homeassistant/components/nut/sensor.py +++ b/homeassistant/components/nut/sensor.py @@ -15,6 +15,7 @@ from homeassistant.const import ( ATTR_MANUFACTURER, ATTR_MODEL, + ATTR_SERIAL_NUMBER, ATTR_SW_VERSION, PERCENTAGE, STATE_UNKNOWN, @@ -42,6 +43,7 @@ "manufacturer": ATTR_MANUFACTURER, "model": ATTR_MODEL, "firmware": ATTR_SW_VERSION, + "serial": ATTR_SERIAL_NUMBER, } _LOGGER = logging.getLogger(__name__) diff --git a/homeassistant/components/nws/__init__.py b/homeassistant/components/nws/__init__.py index 2e643d7dbc60ac..c700476ed3d5e9 100644 --- a/homeassistant/components/nws/__init__.py +++ b/homeassistant/components/nws/__init__.py @@ -110,6 +110,7 @@ async def update_forecast_hourly() -> None: coordinator_forecast = TimestampDataUpdateCoordinator( hass, _LOGGER, + config_entry=entry, name=f"NWS forecast station {station}", update_method=async_setup_update_forecast(0, 0), update_interval=DEFAULT_SCAN_INTERVAL, @@ -121,6 +122,7 @@ async def update_forecast_hourly() -> None: coordinator_forecast_hourly = TimestampDataUpdateCoordinator( hass, _LOGGER, + config_entry=entry, name=f"NWS forecast hourly station {station}", update_method=async_setup_update_forecast_hourly(0, 0), update_interval=DEFAULT_SCAN_INTERVAL, diff --git a/homeassistant/components/nx584/alarm_control_panel.py b/homeassistant/components/nx584/alarm_control_panel.py index 61de4f611b82aa..6622eec530f290 100644 --- a/homeassistant/components/nx584/alarm_control_panel.py +++ b/homeassistant/components/nx584/alarm_control_panel.py @@ -13,17 +13,10 @@ PLATFORM_SCHEMA as ALARM_CONTROL_PANEL_PLATFORM_SCHEMA, AlarmControlPanelEntity, AlarmControlPanelEntityFeature, + AlarmControlPanelState, CodeFormat, ) -from homeassistant.const import ( - CONF_HOST, - CONF_NAME, - CONF_PORT, - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_DISARMED, - STATE_ALARM_TRIGGERED, -) +from homeassistant.const import CONF_HOST, CONF_NAME, CONF_PORT from homeassistant.core import HomeAssistant from homeassistant.exceptions import PlatformNotReady from homeassistant.helpers import config_validation as cv, entity_platform @@ -95,7 +88,6 @@ class NX584Alarm(AlarmControlPanelEntity): """Representation of a NX584-based alarm panel.""" _attr_code_format = CodeFormat.NUMBER - _attr_state: str | None _attr_supported_features = ( AlarmControlPanelEntityFeature.ARM_HOME | AlarmControlPanelEntityFeature.ARM_AWAY @@ -118,11 +110,11 @@ def update(self) -> None: "Unable to connect to %(host)s: %(reason)s", {"host": self._url, "reason": ex}, ) - self._attr_state = None + self._attr_alarm_state = None zones = [] except IndexError: _LOGGER.error("NX584 reports no partitions") - self._attr_state = None + self._attr_alarm_state = None zones = [] bypassed = False @@ -136,15 +128,15 @@ def update(self) -> None: break if not part["armed"]: - self._attr_state = STATE_ALARM_DISARMED + self._attr_alarm_state = AlarmControlPanelState.DISARMED elif bypassed: - self._attr_state = STATE_ALARM_ARMED_HOME + self._attr_alarm_state = AlarmControlPanelState.ARMED_HOME else: - self._attr_state = STATE_ALARM_ARMED_AWAY + self._attr_alarm_state = AlarmControlPanelState.ARMED_AWAY for flag in part["condition_flags"]: if flag == "Siren on": - self._attr_state = STATE_ALARM_TRIGGERED + self._attr_alarm_state = AlarmControlPanelState.TRIGGERED def alarm_disarm(self, code: str | None = None) -> None: """Send disarm command.""" diff --git a/homeassistant/components/nyt_games/manifest.json b/homeassistant/components/nyt_games/manifest.json index a2cd5629ed1c96..c32de754782208 100644 --- a/homeassistant/components/nyt_games/manifest.json +++ b/homeassistant/components/nyt_games/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/nyt_games", "integration_type": "service", "iot_class": "cloud_polling", - "requirements": ["nyt_games==0.4.3"] + "requirements": ["nyt_games==0.4.4"] } diff --git a/homeassistant/components/nyt_games/sensor.py b/homeassistant/components/nyt_games/sensor.py index 57759fb354d6bb..01b2db4620becb 100644 --- a/homeassistant/components/nyt_games/sensor.py +++ b/homeassistant/components/nyt_games/sensor.py @@ -139,7 +139,7 @@ class NYTGamesConnectionsSensorEntityDescription(SensorEntityDescription): state_class=SensorStateClass.TOTAL_INCREASING, native_unit_of_measurement=UnitOfTime.DAYS, device_class=SensorDeviceClass.DURATION, - value_fn=lambda connections: connections.current_streak, + value_fn=lambda connections: connections.max_streak, ), ) diff --git a/homeassistant/components/nzbget/config_flow.py b/homeassistant/components/nzbget/config_flow.py index 47d35f32f9f013..a99d3d3f328b0e 100644 --- a/homeassistant/components/nzbget/config_flow.py +++ b/homeassistant/components/nzbget/config_flow.py @@ -50,9 +50,6 @@ async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle a flow initiated by the user.""" - if self._async_current_entries(): - return self.async_abort(reason="single_instance_allowed") - errors = {} if user_input is not None: diff --git a/homeassistant/components/nzbget/manifest.json b/homeassistant/components/nzbget/manifest.json index 34f6f37873b94a..60e90e372ffed1 100644 --- a/homeassistant/components/nzbget/manifest.json +++ b/homeassistant/components/nzbget/manifest.json @@ -6,5 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/nzbget", "iot_class": "local_polling", "loggers": ["pynzbgetapi"], - "requirements": ["pynzbgetapi==0.2.0"] + "requirements": ["pynzbgetapi==0.2.0"], + "single_config_entry": true } diff --git a/homeassistant/components/nzbget/strings.json b/homeassistant/components/nzbget/strings.json index 4da9a0b505ede3..84a2ed0b821be6 100644 --- a/homeassistant/components/nzbget/strings.json +++ b/homeassistant/components/nzbget/strings.json @@ -19,7 +19,6 @@ "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]" }, "abort": { - "single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]", "unknown": "[%key:common::config_flow::error::unknown%]" } }, diff --git a/homeassistant/components/ollama/config_flow.py b/homeassistant/components/ollama/config_flow.py index 65b8efaf525c63..1024a824c25bf6 100644 --- a/homeassistant/components/ollama/config_flow.py +++ b/homeassistant/components/ollama/config_flow.py @@ -207,9 +207,8 @@ class OllamaOptionsFlow(OptionsFlow): def __init__(self, config_entry: ConfigEntry) -> None: """Initialize options flow.""" - self.config_entry = config_entry - self.url: str = self.config_entry.data[CONF_URL] - self.model: str = self.config_entry.data[CONF_MODEL] + self.url: str = config_entry.data[CONF_URL] + self.model: str = config_entry.data[CONF_MODEL] async def async_step_init( self, user_input: dict[str, Any] | None = None diff --git a/homeassistant/components/ollama/const.py b/homeassistant/components/ollama/const.py index 6152b223d6d2ba..69c0a3d62965f8 100644 --- a/homeassistant/components/ollama/const.py +++ b/homeassistant/components/ollama/const.py @@ -24,8 +24,12 @@ MODEL_NAMES = [ # https://ollama.com/library "alfred", "all-minilm", + "aya-expanse", "aya", "bakllava", + "bespoke-minicheck", + "bge-large", + "bge-m3", "codebooga", "codegeex4", "codegemma", @@ -33,18 +37,19 @@ "codeqwen", "codestral", "codeup", - "command-r", "command-r-plus", + "command-r", "dbrx", - "deepseek-coder", "deepseek-coder-v2", + "deepseek-coder", "deepseek-llm", + "deepseek-v2.5", "deepseek-v2", - "dolphincoder", "dolphin-llama3", "dolphin-mistral", "dolphin-mixtral", "dolphin-phi", + "dolphincoder", "duckdb-nsql", "everythinglm", "falcon", @@ -55,74 +60,97 @@ "glm4", "goliath", "granite-code", + "granite3-dense", + "granite3-guardian" "granite3-moe", + "hermes3", "internlm2", - "llama2", + "llama-guard3", + "llama-pro", "llama2-chinese", "llama2-uncensored", - "llama3", + "llama2", "llama3-chatqa", "llama3-gradient", "llama3-groq-tool-use", - "llama-pro", - "llava", + "llama3.1", + "llama3.2", + "llama3", "llava-llama3", "llava-phi3", + "llava", "magicoder", "mathstral", "meditron", "medllama2", "megadolphin", - "mistral", - "mistrallite", + "minicpm-v", + "mistral-large", "mistral-nemo", "mistral-openorca", + "mistral-small", + "mistral", + "mistrallite", "mixtral", "moondream", "mxbai-embed-large", + "nemotron-mini", + "nemotron", "neural-chat", "nexusraven", "nomic-embed-text", "notus", "notux", "nous-hermes", - "nous-hermes2", "nous-hermes2-mixtral", + "nous-hermes2", "nuextract", + "open-orca-platypus2", "openchat", "openhermes", - "open-orca-platypus2", - "orca2", "orca-mini", + "orca2", + "paraphrase-multilingual", "phi", + "phi3.5", "phi3", "phind-codellama", "qwen", + "qwen2-math", + "qwen2.5-coder", + "qwen2.5", "qwen2", + "reader-lm", + "reflection", "samantha-mistral", + "shieldgemma", + "smollm", + "smollm2", "snowflake-arctic-embed", + "solar-pro", "solar", "sqlcoder", "stable-beluga", "stable-code", - "stablelm2", "stablelm-zephyr", + "stablelm2", "starcoder", "starcoder2", "starling-lm", "tinydolphin", "tinyllama", "vicuna", + "wizard-math", + "wizard-vicuna-uncensored", + "wizard-vicuna", "wizardcoder", + "wizardlm-uncensored", "wizardlm", "wizardlm2", - "wizardlm-uncensored", - "wizard-math", - "wizard-vicuna", - "wizard-vicuna-uncensored", "xwinlm", "yarn-llama2", "yarn-mistral", + "yi-coder", "yi", "zephyr", ] -DEFAULT_MODEL = "llama3.1:latest" +DEFAULT_MODEL = "llama3.2:latest" diff --git a/homeassistant/components/ollama/strings.json b/homeassistant/components/ollama/strings.json index c307f160228d47..248cac34f115b5 100644 --- a/homeassistant/components/ollama/strings.json +++ b/homeassistant/components/ollama/strings.json @@ -11,9 +11,11 @@ "title": "Downloading model" } }, + "abort": { + "download_failed": "Model downloading failed" + }, "error": { "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", - "download_failed": "Model downloading failed", "unknown": "[%key:common::config_flow::error::unknown%]" }, "progress": { diff --git a/homeassistant/components/omnilogic/config_flow.py b/homeassistant/components/omnilogic/config_flow.py index 77bca0039a9633..dfbd010ea98f61 100644 --- a/homeassistant/components/omnilogic/config_flow.py +++ b/homeassistant/components/omnilogic/config_flow.py @@ -34,7 +34,7 @@ def async_get_options_flow( config_entry: ConfigEntry, ) -> OptionsFlowHandler: """Get the options flow for this handler.""" - return OptionsFlowHandler(config_entry) + return OptionsFlowHandler() async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -42,12 +42,6 @@ async def async_step_user( """Handle the initial step.""" errors: dict[str, str] = {} - config_entry = self._async_current_entries() - if config_entry: - return self.async_abort(reason="single_instance_allowed") - - errors = {} - if user_input is not None: username = user_input[CONF_USERNAME] password = user_input[CONF_PASSWORD] @@ -84,10 +78,6 @@ async def async_step_user( class OptionsFlowHandler(OptionsFlow): """Handle Omnilogic client options.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/omnilogic/manifest.json b/homeassistant/components/omnilogic/manifest.json index 252718d2c21b78..361a15e2d9cb58 100644 --- a/homeassistant/components/omnilogic/manifest.json +++ b/homeassistant/components/omnilogic/manifest.json @@ -6,5 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/omnilogic", "iot_class": "cloud_polling", "loggers": ["config", "omnilogic"], - "requirements": ["omnilogic==0.4.5"] + "requirements": ["omnilogic==0.4.5"], + "single_config_entry": true } diff --git a/homeassistant/components/omnilogic/strings.json b/homeassistant/components/omnilogic/strings.json index 454644be244329..5b193b7f5ba69d 100644 --- a/homeassistant/components/omnilogic/strings.json +++ b/homeassistant/components/omnilogic/strings.json @@ -14,8 +14,7 @@ "unknown": "[%key:common::config_flow::error::unknown%]" }, "abort": { - "already_configured": "[%key:common::config_flow::abort::already_configured_account%]", - "single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]" + "already_configured": "[%key:common::config_flow::abort::already_configured_account%]" } }, "options": { diff --git a/homeassistant/components/onboarding/views.py b/homeassistant/components/onboarding/views.py index 1ecfc10d974c48..b33440a9eb7d32 100644 --- a/homeassistant/components/onboarding/views.py +++ b/homeassistant/components/onboarding/views.py @@ -20,6 +20,7 @@ from homeassistant.components.http.view import HomeAssistantView from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import area_registry as ar +from homeassistant.helpers.hassio import is_hassio from homeassistant.helpers.system_info import async_get_system_info from homeassistant.helpers.translation import async_get_translations from homeassistant.setup import async_setup_component @@ -216,7 +217,7 @@ async def post(self, request: web.Request) -> web.Response: from homeassistant.components import hassio if ( - hassio.is_hassio(hass) + is_hassio(hass) and (core_info := hassio.get_core_info(hass)) and "raspberrypi" in core_info["machine"] ): diff --git a/homeassistant/components/oncue/__init__.py b/homeassistant/components/oncue/__init__.py index 53443b9ed81c2b..19d134a398fbba 100644 --- a/homeassistant/components/oncue/__init__.py +++ b/homeassistant/components/oncue/__init__.py @@ -43,6 +43,7 @@ async def _async_update() -> dict[str, OncueDevice]: coordinator = DataUpdateCoordinator[dict[str, OncueDevice]]( hass, _LOGGER, + config_entry=entry, name=f"Oncue {entry.data[CONF_USERNAME]}", update_interval=timedelta(minutes=10), update_method=_async_update, diff --git a/homeassistant/components/oncue/config_flow.py b/homeassistant/components/oncue/config_flow.py index 92cd037734ef03..872fe84350bfb7 100644 --- a/homeassistant/components/oncue/config_flow.py +++ b/homeassistant/components/oncue/config_flow.py @@ -9,7 +9,7 @@ from aiooncue import LoginFailedException, Oncue import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.helpers.aiohttp_client import async_get_clientsession @@ -23,10 +23,6 @@ class OncueConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - def __init__(self) -> None: - """Initialize the oncue config flow.""" - self.reauth_entry: ConfigEntry | None = None - async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -80,8 +76,6 @@ async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle reauth.""" - entry_id = self.context["entry_id"] - self.reauth_entry = self.hass.config_entries.async_get_entry(entry_id) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -89,18 +83,15 @@ async def async_step_reauth_confirm( ) -> ConfigFlowResult: """Handle reauth input.""" errors: dict[str, str] = {} - existing_entry = self.reauth_entry - assert existing_entry - existing_data = existing_entry.data + reauth_entry = self._get_reauth_entry() + existing_data = reauth_entry.data description_placeholders: dict[str, str] = { CONF_USERNAME: existing_data[CONF_USERNAME] } if user_input is not None: new_config = {**existing_data, CONF_PASSWORD: user_input[CONF_PASSWORD]} if not (errors := await self._async_validate_or_error(new_config)): - return self.async_update_reload_and_abort( - existing_entry, data=new_config - ) + return self.async_update_reload_and_abort(reauth_entry, data=new_config) return self.async_show_form( description_placeholders=description_placeholders, diff --git a/homeassistant/components/ondilo_ico/config_flow.py b/homeassistant/components/ondilo_ico/config_flow.py index d65c1b15e2ae87..fe0b89e725873c 100644 --- a/homeassistant/components/ondilo_ico/config_flow.py +++ b/homeassistant/components/ondilo_ico/config_flow.py @@ -21,9 +21,6 @@ async def async_step_user( """Handle a flow initialized by the user.""" await self.async_set_unique_id(DOMAIN) - if self._async_current_entries(): - return self.async_abort(reason="single_instance_allowed") - self.async_register_implementation( self.hass, OndiloOauth2Implementation(self.hass), diff --git a/homeassistant/components/ondilo_ico/manifest.json b/homeassistant/components/ondilo_ico/manifest.json index 2f522f1b77c821..84862a89fbb478 100644 --- a/homeassistant/components/ondilo_ico/manifest.json +++ b/homeassistant/components/ondilo_ico/manifest.json @@ -8,5 +8,6 @@ "integration_type": "hub", "iot_class": "cloud_polling", "loggers": ["ondilo"], - "requirements": ["ondilo==0.5.0"] + "requirements": ["ondilo==0.5.0"], + "single_config_entry": true } diff --git a/homeassistant/components/onewire/config_flow.py b/homeassistant/components/onewire/config_flow.py index a217674e3b4298..abb4c8849747ff 100644 --- a/homeassistant/components/onewire/config_flow.py +++ b/homeassistant/components/onewire/config_flow.py @@ -2,6 +2,7 @@ from __future__ import annotations +from copy import deepcopy from typing import Any import voluptuous as vol @@ -10,7 +11,7 @@ ConfigEntry, ConfigFlow, ConfigFlowResult, - OptionsFlowWithConfigEntry, + OptionsFlow, ) from homeassistant.const import CONF_HOST, CONF_PORT from homeassistant.core import HomeAssistant, callback @@ -100,12 +101,14 @@ async def async_step_user( @staticmethod @callback - def async_get_options_flow(config_entry: ConfigEntry) -> OnewireOptionsFlowHandler: + def async_get_options_flow( + config_entry: ConfigEntry, + ) -> OnewireOptionsFlowHandler: """Get the options flow for this handler.""" return OnewireOptionsFlowHandler(config_entry) -class OnewireOptionsFlowHandler(OptionsFlowWithConfigEntry): +class OnewireOptionsFlowHandler(OptionsFlow): """Handle OneWire Config options.""" configurable_devices: dict[str, str] @@ -123,6 +126,10 @@ class OnewireOptionsFlowHandler(OptionsFlowWithConfigEntry): current_device: str """Friendly name of the currently selected device.""" + def __init__(self, config_entry: ConfigEntry) -> None: + """Initialize options flow.""" + self.options = deepcopy(dict(config_entry.options)) + async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/onkyo/__init__.py b/homeassistant/components/onkyo/__init__.py index 02c026d1973c3c..fd5c0ba634a3a4 100644 --- a/homeassistant/components/onkyo/__init__.py +++ b/homeassistant/components/onkyo/__init__.py @@ -1 +1,76 @@ """The onkyo component.""" + +from dataclasses import dataclass + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_HOST, Platform +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryNotReady +from homeassistant.helpers import config_validation as cv +from homeassistant.helpers.typing import ConfigType + +from .const import DOMAIN, OPTION_INPUT_SOURCES, InputSource +from .receiver import Receiver, async_interview +from .services import DATA_MP_ENTITIES, async_register_services + +PLATFORMS = [Platform.MEDIA_PLAYER] + +CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN) + + +@dataclass +class OnkyoData: + """Config Entry data.""" + + receiver: Receiver + sources: dict[InputSource, str] + + +type OnkyoConfigEntry = ConfigEntry[OnkyoData] + + +async def async_setup(hass: HomeAssistant, _: ConfigType) -> bool: + """Set up Onkyo component.""" + await async_register_services(hass) + return True + + +async def async_setup_entry(hass: HomeAssistant, entry: OnkyoConfigEntry) -> bool: + """Set up the Onkyo config entry.""" + entry.async_on_unload(entry.add_update_listener(update_listener)) + + host = entry.data[CONF_HOST] + + info = await async_interview(host) + if info is None: + raise ConfigEntryNotReady(f"Unable to connect to: {host}") + + receiver = await Receiver.async_create(info) + + sources_store: dict[str, str] = entry.options[OPTION_INPUT_SOURCES] + sources = {InputSource(k): v for k, v in sources_store.items()} + + entry.runtime_data = OnkyoData(receiver, sources) + + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + + await receiver.conn.connect() + + return True + + +async def async_unload_entry(hass: HomeAssistant, entry: OnkyoConfigEntry) -> bool: + """Unload Onkyo config entry.""" + del hass.data[DATA_MP_ENTITIES][entry.entry_id] + + unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS) + + receiver = entry.runtime_data.receiver + receiver.conn.close() + + return unload_ok + + +async def update_listener(hass: HomeAssistant, entry: OnkyoConfigEntry) -> None: + """Handle options update.""" + await hass.config_entries.async_reload(entry.entry_id) diff --git a/homeassistant/components/onkyo/config_flow.py b/homeassistant/components/onkyo/config_flow.py new file mode 100644 index 00000000000000..a8ced6fae640c4 --- /dev/null +++ b/homeassistant/components/onkyo/config_flow.py @@ -0,0 +1,371 @@ +"""Config flow for Onkyo.""" + +import logging +from typing import Any + +import voluptuous as vol + +from homeassistant.config_entries import ( + SOURCE_RECONFIGURE, + ConfigEntry, + ConfigFlow, + ConfigFlowResult, + OptionsFlow, +) +from homeassistant.const import CONF_HOST, CONF_NAME +from homeassistant.core import callback +from homeassistant.helpers.selector import ( + NumberSelector, + NumberSelectorConfig, + NumberSelectorMode, + Selector, + SelectSelector, + SelectSelectorConfig, + SelectSelectorMode, + TextSelector, +) + +from .const import ( + CONF_RECEIVER_MAX_VOLUME, + CONF_SOURCES, + DOMAIN, + OPTION_INPUT_SOURCES, + OPTION_MAX_VOLUME, + OPTION_MAX_VOLUME_DEFAULT, + OPTION_VOLUME_RESOLUTION, + OPTION_VOLUME_RESOLUTION_DEFAULT, + VOLUME_RESOLUTION_ALLOWED, + InputSource, +) +from .receiver import ReceiverInfo, async_discover, async_interview + +_LOGGER = logging.getLogger(__name__) + +CONF_DEVICE = "device" + +INPUT_SOURCES_ALL_MEANINGS = [ + input_source.value_meaning for input_source in InputSource +] +STEP_MANUAL_SCHEMA = vol.Schema({vol.Required(CONF_HOST): str}) +STEP_CONFIGURE_SCHEMA = vol.Schema( + { + vol.Required(OPTION_VOLUME_RESOLUTION): vol.In(VOLUME_RESOLUTION_ALLOWED), + vol.Required(OPTION_INPUT_SOURCES): SelectSelector( + SelectSelectorConfig( + options=INPUT_SOURCES_ALL_MEANINGS, + multiple=True, + mode=SelectSelectorMode.DROPDOWN, + ) + ), + } +) + + +class OnkyoConfigFlow(ConfigFlow, domain=DOMAIN): + """Onkyo config flow.""" + + _receiver_info: ReceiverInfo + _discovered_infos: dict[str, ReceiverInfo] + + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle a flow initialized by the user.""" + return self.async_show_menu( + step_id="user", menu_options=["manual", "eiscp_discovery"] + ) + + async def async_step_manual( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle manual device entry.""" + errors = {} + + if user_input is not None: + host = user_input[CONF_HOST] + _LOGGER.debug("Config flow start manual: %s", host) + try: + info = await async_interview(host) + except Exception: + _LOGGER.exception("Unexpected exception") + errors["base"] = "unknown" + else: + if info is None: + errors["base"] = "cannot_connect" + else: + self._receiver_info = info + + await self.async_set_unique_id( + info.identifier, raise_on_progress=False + ) + if self.source == SOURCE_RECONFIGURE: + self._abort_if_unique_id_mismatch() + else: + self._abort_if_unique_id_configured() + + return await self.async_step_configure_receiver() + + suggested_values = user_input + if suggested_values is None and self.source == SOURCE_RECONFIGURE: + suggested_values = { + CONF_HOST: self._get_reconfigure_entry().data[CONF_HOST] + } + + return self.async_show_form( + step_id="manual", + data_schema=self.add_suggested_values_to_schema( + STEP_MANUAL_SCHEMA, suggested_values + ), + errors=errors, + ) + + async def async_step_eiscp_discovery( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Start eiscp discovery and handle user device selection.""" + if user_input is not None: + self._receiver_info = self._discovered_infos[user_input[CONF_DEVICE]] + await self.async_set_unique_id( + self._receiver_info.identifier, raise_on_progress=False + ) + self._abort_if_unique_id_configured( + updates={CONF_HOST: self._receiver_info.host} + ) + return await self.async_step_configure_receiver() + + _LOGGER.debug("Config flow start eiscp discovery") + + try: + infos = await async_discover() + except Exception: + _LOGGER.exception("Unexpected exception") + return self.async_abort(reason="unknown") + + _LOGGER.debug("Discovered devices: %s", infos) + + self._discovered_infos = {} + discovered_names = {} + current_unique_ids = self._async_current_ids() + for info in infos: + if info.identifier in current_unique_ids: + continue + self._discovered_infos[info.identifier] = info + device_name = f"{info.model_name} ({info.host})" + discovered_names[info.identifier] = device_name + + _LOGGER.debug("Discovered new devices: %s", self._discovered_infos) + + if not discovered_names: + return self.async_abort(reason="no_devices_found") + + return self.async_show_form( + step_id="eiscp_discovery", + data_schema=vol.Schema( + {vol.Required(CONF_DEVICE): vol.In(discovered_names)} + ), + ) + + async def async_step_configure_receiver( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle the configuration of a single receiver.""" + errors = {} + + entry = None + entry_options = None + if self.source == SOURCE_RECONFIGURE: + entry = self._get_reconfigure_entry() + entry_options = entry.options + + if user_input is not None: + source_meanings: list[str] = user_input[OPTION_INPUT_SOURCES] + if not source_meanings: + errors[OPTION_INPUT_SOURCES] = "empty_input_source_list" + else: + sources_store: dict[str, str] = {} + for source_meaning in source_meanings: + source = InputSource.from_meaning(source_meaning) + + source_name = source_meaning + if entry_options is not None: + source_name = entry_options[OPTION_INPUT_SOURCES].get( + source.value, source_name + ) + sources_store[source.value] = source_name + + volume_resolution = user_input[OPTION_VOLUME_RESOLUTION] + + if entry_options is None: + result = self.async_create_entry( + title=self._receiver_info.model_name, + data={ + CONF_HOST: self._receiver_info.host, + }, + options={ + OPTION_VOLUME_RESOLUTION: volume_resolution, + OPTION_MAX_VOLUME: OPTION_MAX_VOLUME_DEFAULT, + OPTION_INPUT_SOURCES: sources_store, + }, + ) + else: + assert entry is not None + result = self.async_update_reload_and_abort( + entry, + data={ + CONF_HOST: self._receiver_info.host, + }, + options={ + OPTION_VOLUME_RESOLUTION: volume_resolution, + OPTION_MAX_VOLUME: entry_options[OPTION_MAX_VOLUME], + OPTION_INPUT_SOURCES: sources_store, + }, + ) + + _LOGGER.debug("Configured receiver, result: %s", result) + return result + + _LOGGER.debug("Configuring receiver, info: %s", self._receiver_info) + + suggested_values = user_input + if suggested_values is None: + if entry_options is None: + suggested_values = { + OPTION_VOLUME_RESOLUTION: OPTION_VOLUME_RESOLUTION_DEFAULT, + OPTION_INPUT_SOURCES: [], + } + else: + suggested_values = { + OPTION_VOLUME_RESOLUTION: entry_options[OPTION_VOLUME_RESOLUTION], + OPTION_INPUT_SOURCES: [ + InputSource(input_source).value_meaning + for input_source in entry_options[OPTION_INPUT_SOURCES] + ], + } + + return self.async_show_form( + step_id="configure_receiver", + data_schema=self.add_suggested_values_to_schema( + STEP_CONFIGURE_SCHEMA, suggested_values + ), + errors=errors, + description_placeholders={ + "name": f"{self._receiver_info.model_name} ({self._receiver_info.host})" + }, + ) + + async def async_step_reconfigure( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle reconfiguration of the receiver.""" + return await self.async_step_manual() + + async def async_step_import(self, user_input: dict[str, Any]) -> ConfigFlowResult: + """Import the yaml config.""" + _LOGGER.debug("Import flow user input: %s", user_input) + + host: str = user_input[CONF_HOST] + name: str | None = user_input.get(CONF_NAME) + user_max_volume: int = user_input[OPTION_MAX_VOLUME] + user_volume_resolution: int = user_input[CONF_RECEIVER_MAX_VOLUME] + user_sources: dict[InputSource, str] = user_input[CONF_SOURCES] + + info: ReceiverInfo | None = user_input.get("info") + if info is None: + try: + info = await async_interview(host) + except Exception: + _LOGGER.exception("Import flow interview error for host %s", host) + return self.async_abort(reason="cannot_connect") + + if info is None: + _LOGGER.error("Import flow interview error for host %s", host) + return self.async_abort(reason="cannot_connect") + + unique_id = info.identifier + await self.async_set_unique_id(unique_id) + self._abort_if_unique_id_configured() + + name = name or info.model_name + + volume_resolution = VOLUME_RESOLUTION_ALLOWED[-1] + for volume_resolution_allowed in VOLUME_RESOLUTION_ALLOWED: + if user_volume_resolution <= volume_resolution_allowed: + volume_resolution = volume_resolution_allowed + break + + max_volume = min( + 100, user_max_volume * user_volume_resolution / volume_resolution + ) + + sources_store: dict[str, str] = {} + for source, source_name in user_sources.items(): + sources_store[source.value] = source_name + + return self.async_create_entry( + title=name, + data={ + CONF_HOST: host, + }, + options={ + OPTION_VOLUME_RESOLUTION: volume_resolution, + OPTION_MAX_VOLUME: max_volume, + OPTION_INPUT_SOURCES: sources_store, + }, + ) + + @staticmethod + @callback + def async_get_options_flow( + config_entry: ConfigEntry, + ) -> OptionsFlow: + """Return the options flow.""" + return OnkyoOptionsFlowHandler(config_entry) + + +class OnkyoOptionsFlowHandler(OptionsFlow): + """Handle an options flow for Onkyo.""" + + def __init__(self, config_entry: ConfigEntry) -> None: + """Initialize options flow.""" + sources_store: dict[str, str] = config_entry.options[OPTION_INPUT_SOURCES] + self._input_sources = {InputSource(k): v for k, v in sources_store.items()} + + async def async_step_init( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Manage the options.""" + if user_input is not None: + sources_store: dict[str, str] = {} + for source_meaning, source_name in user_input.items(): + if source_meaning in INPUT_SOURCES_ALL_MEANINGS: + source = InputSource.from_meaning(source_meaning) + sources_store[source.value] = source_name + + return self.async_create_entry( + data={ + OPTION_VOLUME_RESOLUTION: self.config_entry.options[ + OPTION_VOLUME_RESOLUTION + ], + OPTION_MAX_VOLUME: user_input[OPTION_MAX_VOLUME], + OPTION_INPUT_SOURCES: sources_store, + } + ) + + schema_dict: dict[Any, Selector] = {} + + max_volume: float = self.config_entry.options[OPTION_MAX_VOLUME] + schema_dict[vol.Required(OPTION_MAX_VOLUME, default=max_volume)] = ( + NumberSelector( + NumberSelectorConfig(min=1, max=100, mode=NumberSelectorMode.BOX) + ) + ) + + for source, source_name in self._input_sources.items(): + schema_dict[vol.Required(source.value_meaning, default=source_name)] = ( + TextSelector() + ) + + return self.async_show_form( + step_id="init", + data_schema=vol.Schema(schema_dict), + ) diff --git a/homeassistant/components/onkyo/const.py b/homeassistant/components/onkyo/const.py new file mode 100644 index 00000000000000..bd4fe98ae7d511 --- /dev/null +++ b/homeassistant/components/onkyo/const.py @@ -0,0 +1,141 @@ +"""Constants for the Onkyo integration.""" + +from enum import Enum +import typing +from typing import ClassVar, Literal, Self + +import pyeiscp + +DOMAIN = "onkyo" + +DEVICE_INTERVIEW_TIMEOUT = 5 +DEVICE_DISCOVERY_TIMEOUT = 5 + +CONF_SOURCES = "sources" +CONF_RECEIVER_MAX_VOLUME = "receiver_max_volume" + +type VolumeResolution = Literal[50, 80, 100, 200] +OPTION_VOLUME_RESOLUTION = "volume_resolution" +OPTION_VOLUME_RESOLUTION_DEFAULT: VolumeResolution = 50 +VOLUME_RESOLUTION_ALLOWED: tuple[VolumeResolution, ...] = typing.get_args( + VolumeResolution.__value__ +) + +OPTION_MAX_VOLUME = "max_volume" +OPTION_MAX_VOLUME_DEFAULT = 100.0 + +OPTION_INPUT_SOURCES = "input_sources" + +_INPUT_SOURCE_MEANINGS = { + "00": "VIDEO1 ··· VCR/DVR ··· STB/DVR", + "01": "VIDEO2 ··· CBL/SAT", + "02": "VIDEO3 ··· GAME/TV ··· GAME", + "03": "VIDEO4 ··· AUX", + "04": "VIDEO5 ··· AUX2 ··· GAME2", + "05": "VIDEO6 ··· PC", + "06": "VIDEO7", + "07": "HIDDEN1 ··· EXTRA1", + "08": "HIDDEN2 ··· EXTRA2", + "09": "HIDDEN3 ··· EXTRA3", + "10": "DVD ··· BD/DVD", + "11": "STRM BOX", + "12": "TV", + "20": "TAPE ··· TV/TAPE", + "21": "TAPE2", + "22": "PHONO", + "23": "CD ··· TV/CD", + "24": "FM", + "25": "AM", + "26": "TUNER", + "27": "MUSIC SERVER ··· P4S ··· DLNA", + "28": "INTERNET RADIO ··· IRADIO FAVORITE", + "29": "USB ··· USB(FRONT)", + "2A": "USB(REAR)", + "2B": "NETWORK ··· NET", + "2D": "AIRPLAY", + "2E": "BLUETOOTH", + "2F": "USB DAC IN", + "30": "MULTI CH", + "31": "XM", + "32": "SIRIUS", + "33": "DAB", + "40": "UNIVERSAL PORT", + "41": "LINE", + "42": "LINE2", + "44": "OPTICAL", + "45": "COAXIAL", + "55": "HDMI 5", + "56": "HDMI 6", + "57": "HDMI 7", + "80": "MAIN SOURCE", +} + + +class InputSource(Enum): + """Receiver input source.""" + + DVR = "00" + CBL = "01" + GAME = "02" + AUX = "03" + GAME2 = "04" + PC = "05" + VIDEO7 = "06" + EXTRA1 = "07" + EXTRA2 = "08" + EXTRA3 = "09" + DVD = "10" + STRM_BOX = "11" + TV = "12" + TAPE = "20" + TAPE2 = "21" + PHONO = "22" + CD = "23" + FM = "24" + AM = "25" + TUNER = "26" + MUSIC_SERVER = "27" + INTERNET_RADIO = "28" + USB = "29" + USB_REAR = "2A" + NETWORK = "2B" + AIRPLAY = "2D" + BLUETOOTH = "2E" + USB_DAC_IN = "2F" + MULTI_CH = "30" + XM = "31" + SIRIUS = "32" + DAB = "33" + UNIVERSAL_PORT = "40" + LINE = "41" + LINE2 = "42" + OPTICAL = "44" + COAXIAL = "45" + HDMI_5 = "55" + HDMI_6 = "56" + HDMI_7 = "57" + MAIN_SOURCE = "80" + + __meaning_mapping: ClassVar[dict[str, Self]] = {} # type: ignore[misc] + + value_meaning: str + + def __new__(cls, value: str) -> Self: + """Create InputSource enum.""" + obj = object.__new__(cls) + obj._value_ = value + obj.value_meaning = _INPUT_SOURCE_MEANINGS[value] + + cls.__meaning_mapping[obj.value_meaning] = obj + + return obj + + @classmethod + def from_meaning(cls, meaning: str) -> Self: + """Get InputSource enum from its meaning.""" + return cls.__meaning_mapping[meaning] + + +ZONES = {"main": "Main", "zone2": "Zone 2", "zone3": "Zone 3", "zone4": "Zone 4"} + +PYEISCP_COMMANDS = pyeiscp.commands.COMMANDS diff --git a/homeassistant/components/onkyo/manifest.json b/homeassistant/components/onkyo/manifest.json index 072dc9f9e3b8d9..0e75404b3ebaf6 100644 --- a/homeassistant/components/onkyo/manifest.json +++ b/homeassistant/components/onkyo/manifest.json @@ -2,7 +2,9 @@ "domain": "onkyo", "name": "Onkyo", "codeowners": ["@arturpragacz"], + "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/onkyo", + "integration_type": "device", "iot_class": "local_push", "loggers": ["pyeiscp"], "requirements": ["pyeiscp==0.0.7"] diff --git a/homeassistant/components/onkyo/media_player.py b/homeassistant/components/onkyo/media_player.py index af4285e2abd7cf..41e36a7f23793d 100644 --- a/homeassistant/components/onkyo/media_player.py +++ b/homeassistant/components/onkyo/media_player.py @@ -6,62 +6,44 @@ import logging from typing import Any, Literal -import pyeiscp import voluptuous as vol from homeassistant.components.media_player import ( - DOMAIN as MEDIA_PLAYER_DOMAIN, PLATFORM_SCHEMA as MEDIA_PLAYER_PLATFORM_SCHEMA, MediaPlayerEntity, MediaPlayerEntityFeature, MediaPlayerState, MediaType, ) -from homeassistant.const import ( - ATTR_ENTITY_ID, - CONF_HOST, - CONF_NAME, - EVENT_HOMEASSISTANT_STOP, -) -from homeassistant.core import Event, HomeAssistant, ServiceCall, callback -from homeassistant.helpers import config_validation as cv +from homeassistant.config_entries import SOURCE_IMPORT +from homeassistant.const import CONF_HOST, CONF_NAME +from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant, callback +from homeassistant.data_entry_flow import FlowResultType +from homeassistant.helpers import config_validation as cv, entity_registry as er from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType -from homeassistant.util.hass_dict import HassKey - -from .receiver import Receiver, ReceiverInfo - -_LOGGER = logging.getLogger(__name__) - -DOMAIN = "onkyo" - -DATA_MP_ENTITIES: HassKey[list[dict[str, OnkyoMediaPlayer]]] = HassKey(DOMAIN) -CONF_SOURCES = "sources" -CONF_MAX_VOLUME = "max_volume" -CONF_RECEIVER_MAX_VOLUME = "receiver_max_volume" - -DEFAULT_NAME = "Onkyo Receiver" -SUPPORTED_MAX_VOLUME = 100 -DEFAULT_RECEIVER_MAX_VOLUME = 80 -ZONES = {"zone2": "Zone 2", "zone3": "Zone 3", "zone4": "Zone 4"} - -SUPPORT_ONKYO_WO_VOLUME = ( - MediaPlayerEntityFeature.TURN_ON - | MediaPlayerEntityFeature.TURN_OFF - | MediaPlayerEntityFeature.SELECT_SOURCE - | MediaPlayerEntityFeature.PLAY_MEDIA -) -SUPPORT_ONKYO = ( - SUPPORT_ONKYO_WO_VOLUME - | MediaPlayerEntityFeature.VOLUME_SET - | MediaPlayerEntityFeature.VOLUME_MUTE - | MediaPlayerEntityFeature.VOLUME_STEP +from . import OnkyoConfigEntry +from .const import ( + CONF_RECEIVER_MAX_VOLUME, + CONF_SOURCES, + DOMAIN, + OPTION_MAX_VOLUME, + OPTION_VOLUME_RESOLUTION, + PYEISCP_COMMANDS, + ZONES, + InputSource, + VolumeResolution, ) +from .receiver import Receiver, async_discover +from .services import DATA_MP_ENTITIES -KNOWN_HOSTS: list[str] = [] +_LOGGER = logging.getLogger(__name__) -DEFAULT_SOURCES = { +CONF_MAX_VOLUME_DEFAULT = 100 +CONF_RECEIVER_MAX_VOLUME_DEFAULT = 80 +CONF_SOURCES_DEFAULT = { "tv": "TV", "bd": "Bluray", "game": "Game", @@ -75,23 +57,42 @@ "video7": "Video 7", "fm": "Radio", } -DEFAULT_PLAYABLE_SOURCES = ("fm", "am", "tuner") PLATFORM_SCHEMA = MEDIA_PLAYER_PLATFORM_SCHEMA.extend( { vol.Optional(CONF_HOST): cv.string, - vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, - vol.Optional(CONF_MAX_VOLUME, default=SUPPORTED_MAX_VOLUME): vol.All( + vol.Optional(CONF_NAME): cv.string, + vol.Optional(OPTION_MAX_VOLUME, default=CONF_MAX_VOLUME_DEFAULT): vol.All( vol.Coerce(int), vol.Range(min=1, max=100) ), vol.Optional( - CONF_RECEIVER_MAX_VOLUME, default=DEFAULT_RECEIVER_MAX_VOLUME + CONF_RECEIVER_MAX_VOLUME, default=CONF_RECEIVER_MAX_VOLUME_DEFAULT ): cv.positive_int, - vol.Optional(CONF_SOURCES, default=DEFAULT_SOURCES): {cv.string: cv.string}, + vol.Optional(CONF_SOURCES, default=CONF_SOURCES_DEFAULT): { + cv.string: cv.string + }, } ) -ATTR_HDMI_OUTPUT = "hdmi_output" +SUPPORT_ONKYO_WO_VOLUME = ( + MediaPlayerEntityFeature.TURN_ON + | MediaPlayerEntityFeature.TURN_OFF + | MediaPlayerEntityFeature.SELECT_SOURCE + | MediaPlayerEntityFeature.PLAY_MEDIA +) +SUPPORT_ONKYO = ( + SUPPORT_ONKYO_WO_VOLUME + | MediaPlayerEntityFeature.VOLUME_SET + | MediaPlayerEntityFeature.VOLUME_MUTE + | MediaPlayerEntityFeature.VOLUME_STEP +) + +DEFAULT_PLAYABLE_SOURCES = ( + InputSource.from_meaning("FM"), + InputSource.from_meaning("AM"), + InputSource.from_meaning("TUNER"), +) + ATTR_PRESET = "preset" ATTR_AUDIO_INFORMATION = "audio_information" ATTR_VIDEO_INFORMATION = "video_information" @@ -123,52 +124,31 @@ "output_color_depth", "picture_mode", ] +ISSUE_URL_PLACEHOLDER = "/config/integrations/dashboard/add?domain=onkyo" -ACCEPTED_VALUES = [ - "no", - "analog", - "yes", - "out", - "out-sub", - "sub", - "hdbaset", - "both", - "up", -] -ONKYO_SELECT_OUTPUT_SCHEMA = vol.Schema( - { - vol.Required(ATTR_ENTITY_ID): cv.entity_ids, - vol.Required(ATTR_HDMI_OUTPUT): vol.In(ACCEPTED_VALUES), - } -) -SERVICE_SELECT_HDMI_OUTPUT = "onkyo_select_hdmi_output" - +type InputLibValue = str | tuple[str, ...] -async def async_register_services(hass: HomeAssistant) -> None: - """Register Onkyo services.""" - async def async_service_handle(service: ServiceCall) -> None: - """Handle for services.""" - entity_ids = service.data[ATTR_ENTITY_ID] +def _input_lib_cmds(zone: str) -> dict[InputSource, InputLibValue]: + match zone: + case "main": + cmds = PYEISCP_COMMANDS["main"]["SLI"] + case "zone2": + cmds = PYEISCP_COMMANDS["zone2"]["SLZ"] + case "zone3": + cmds = PYEISCP_COMMANDS["zone3"]["SL3"] + case "zone4": + cmds = PYEISCP_COMMANDS["zone4"]["SL4"] - targets: list[OnkyoMediaPlayer] = [] - for receiver_entities in hass.data[DATA_MP_ENTITIES]: - targets.extend( - entity - for entity in receiver_entities.values() - if entity.entity_id in entity_ids - ) + result: dict[InputSource, InputLibValue] = {} + for k, v in cmds["values"].items(): + try: + source = InputSource(k) + except ValueError: + continue + result[source] = v["name"] - for target in targets: - if service.service == SERVICE_SELECT_HDMI_OUTPUT: - await target.async_select_output(service.data[ATTR_HDMI_OUTPUT]) - - hass.services.async_register( - MEDIA_PLAYER_DOMAIN, - SERVICE_SELECT_HDMI_OUTPUT, - async_service_handle, - schema=ONKYO_SELECT_OUTPUT_SCHEMA, - ) + return result async def async_setup_platform( @@ -177,130 +157,167 @@ async def async_setup_platform( async_add_entities: AddEntitiesCallback, discovery_info: DiscoveryInfoType | None = None, ) -> None: - """Set up the Onkyo platform.""" - await async_register_services(hass) - - receivers: dict[str, Receiver] = {} # indexed by host - all_entities = hass.data.setdefault(DATA_MP_ENTITIES, []) - + """Import config from yaml.""" host = config.get(CONF_HOST) - name = config.get(CONF_NAME) - max_volume = config[CONF_MAX_VOLUME] - receiver_max_volume = config[CONF_RECEIVER_MAX_VOLUME] - sources = config[CONF_SOURCES] - async def async_setup_receiver( - info: ReceiverInfo, discovered: bool, name: str | None - ) -> None: - entities: dict[str, OnkyoMediaPlayer] = {} - all_entities.append(entities) - - @callback - def async_onkyo_update_callback( - message: tuple[str, str, Any], origin: str - ) -> None: - """Process new message from receiver.""" - receiver = receivers[origin] - _LOGGER.debug( - "Received update callback from %s: %s", receiver.name, message - ) + source_mapping: dict[str, InputSource] = {} + for zone in ZONES: + for source, source_lib in _input_lib_cmds(zone).items(): + if isinstance(source_lib, str): + source_mapping.setdefault(source_lib, source) + else: + for source_lib_single in source_lib: + source_mapping.setdefault(source_lib_single, source) - zone, _, value = message - entity = entities.get(zone) - if entity is not None: - if entity.enabled: - entity.process_update(message) - elif zone in ZONES and value != "N/A": - # When we receive the status for a zone, and the value is not "N/A", - # then zone is available on the receiver, so we create the entity for it. - _LOGGER.debug("Discovered %s on %s", ZONES[zone], receiver.name) - zone_entity = OnkyoMediaPlayer( - receiver, sources, zone, max_volume, receiver_max_volume - ) - entities[zone] = zone_entity - async_add_entities([zone_entity]) + sources: dict[InputSource, str] = {} + for source_lib_single, source_name in config[CONF_SOURCES].items(): + user_source = source_mapping.get(source_lib_single.lower()) + if user_source is not None: + sources[user_source] = source_name - @callback - def async_onkyo_connect_callback(origin: str) -> None: - """Receiver (re)connected.""" - receiver = receivers[origin] - _LOGGER.debug( - "Receiver (re)connected: %s (%s)", receiver.name, receiver.conn.host - ) + config[CONF_SOURCES] = sources - for entity in entities.values(): - entity.backfill_state() - - _LOGGER.debug("Creating receiver: %s (%s)", info.model_name, info.host) - connection = await pyeiscp.Connection.create( - host=info.host, - port=info.port, - update_callback=async_onkyo_update_callback, - connect_callback=async_onkyo_connect_callback, + results = [] + if host is not None: + _LOGGER.debug("Importing yaml single: %s", host) + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_IMPORT}, data=config ) + results.append((host, result)) + else: + for info in await async_discover(): + host = info.host + + # Migrate legacy entities. + registry = er.async_get(hass) + old_unique_id = f"{info.model_name}_{info.identifier}" + new_unique_id = f"{info.identifier}_main" + entity_id = registry.async_get_entity_id( + "media_player", DOMAIN, old_unique_id + ) + if entity_id is not None: + _LOGGER.debug( + "Migrating unique_id from [%s] to [%s] for entity %s", + old_unique_id, + new_unique_id, + entity_id, + ) + registry.async_update_entity(entity_id, new_unique_id=new_unique_id) - receiver = Receiver( - conn=connection, - model_name=info.model_name, - identifier=info.identifier, - name=name or info.model_name, - discovered=discovered, + _LOGGER.debug("Importing yaml discover: %s", info.host) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_IMPORT}, + data=config | {CONF_HOST: info.host} | {"info": info}, + ) + results.append((host, result)) + + _LOGGER.debug("Importing yaml results: %s", results) + if not results: + async_create_issue( + hass, + DOMAIN, + "deprecated_yaml_import_issue_no_discover", + breaks_in_ha_version="2025.5.0", + is_fixable=False, + issue_domain=DOMAIN, + severity=IssueSeverity.WARNING, + translation_key="deprecated_yaml_import_issue_no_discover", + translation_placeholders={"url": ISSUE_URL_PLACEHOLDER}, ) - receivers[connection.host] = receiver - - # Discover what zones are available for the receiver by querying the power. - # If we get a response for the specific zone, it means it is available. - for zone in ZONES: - receiver.conn.query_property(zone, "power") + all_successful = True + for host, result in results: + if ( + result.get("type") == FlowResultType.CREATE_ENTRY + or result.get("reason") == "already_configured" + ): + continue + if error := result.get("reason"): + all_successful = False + async_create_issue( + hass, + DOMAIN, + f"deprecated_yaml_import_issue_{host}_{error}", + breaks_in_ha_version="2025.5.0", + is_fixable=False, + issue_domain=DOMAIN, + severity=IssueSeverity.WARNING, + translation_key=f"deprecated_yaml_import_issue_{error}", + translation_placeholders={ + "host": host, + "url": ISSUE_URL_PLACEHOLDER, + }, + ) - # Add the main zone to entities, since it is always active. - _LOGGER.debug("Adding Main Zone on %s", receiver.name) - main_entity = OnkyoMediaPlayer( - receiver, sources, "main", max_volume, receiver_max_volume + if all_successful: + async_create_issue( + hass, + HOMEASSISTANT_DOMAIN, + f"deprecated_yaml_{DOMAIN}", + is_fixable=False, + issue_domain=DOMAIN, + breaks_in_ha_version="2025.5.0", + severity=IssueSeverity.WARNING, + translation_key="deprecated_yaml", + translation_placeholders={ + "domain": DOMAIN, + "integration_title": "onkyo", + }, ) - entities["main"] = main_entity - async_add_entities([main_entity]) - if host is not None: - if host in KNOWN_HOSTS: - return - _LOGGER.debug("Manually creating receiver: %s (%s)", name, host) +async def async_setup_entry( + hass: HomeAssistant, + entry: OnkyoConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up MediaPlayer for config entry.""" + data = entry.runtime_data - async def async_onkyo_interview_callback(conn: pyeiscp.Connection) -> None: - """Receiver interviewed, connection not yet active.""" - info = ReceiverInfo(conn.host, conn.port, conn.name, conn.identifier) - _LOGGER.debug("Receiver interviewed: %s (%s)", info.model_name, info.host) - if info.host not in KNOWN_HOSTS: - KNOWN_HOSTS.append(info.host) - await async_setup_receiver(info, False, name) + receiver = data.receiver + all_entities = hass.data[DATA_MP_ENTITIES] - await pyeiscp.Connection.discover( - host=host, - discovery_callback=async_onkyo_interview_callback, - ) - else: - _LOGGER.debug("Discovering receivers") - - async def async_onkyo_discovery_callback(conn: pyeiscp.Connection) -> None: - """Receiver discovered, connection not yet active.""" - info = ReceiverInfo(conn.host, conn.port, conn.name, conn.identifier) - _LOGGER.debug("Receiver discovered: %s (%s)", info.model_name, info.host) - if info.host not in KNOWN_HOSTS: - KNOWN_HOSTS.append(info.host) - await async_setup_receiver(info, True, None) - - await pyeiscp.Connection.discover( - discovery_callback=async_onkyo_discovery_callback, - ) + entities: dict[str, OnkyoMediaPlayer] = {} + all_entities[entry.entry_id] = entities - @callback - def close_receiver(_event: Event) -> None: - for receiver in receivers.values(): - receiver.conn.close() + volume_resolution: VolumeResolution = entry.options[OPTION_VOLUME_RESOLUTION] + max_volume: float = entry.options[OPTION_MAX_VOLUME] + sources = data.sources - hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, close_receiver) + def connect_callback(receiver: Receiver) -> None: + if not receiver.first_connect: + for entity in entities.values(): + if entity.enabled: + entity.backfill_state() + + def update_callback(receiver: Receiver, message: tuple[str, str, Any]) -> None: + zone, _, value = message + entity = entities.get(zone) + if entity is not None: + if entity.enabled: + entity.process_update(message) + elif zone in ZONES and value != "N/A": + # When we receive the status for a zone, and the value is not "N/A", + # then zone is available on the receiver, so we create the entity for it. + _LOGGER.debug( + "Discovered %s on %s (%s)", + ZONES[zone], + receiver.model_name, + receiver.host, + ) + zone_entity = OnkyoMediaPlayer( + receiver, + zone, + volume_resolution=volume_resolution, + max_volume=max_volume, + sources=sources, + ) + entities[zone] = zone_entity + async_add_entities([zone_entity]) + + receiver.callbacks.connect.append(connect_callback) + receiver.callbacks.update.append(update_callback) class OnkyoMediaPlayer(MediaPlayerEntity): @@ -316,27 +333,30 @@ class OnkyoMediaPlayer(MediaPlayerEntity): def __init__( self, receiver: Receiver, - sources: dict[str, str], zone: str, - max_volume: int, - volume_resolution: int, + *, + volume_resolution: VolumeResolution, + max_volume: float, + sources: dict[InputSource, str], ) -> None: """Initialize the Onkyo Receiver.""" self._receiver = receiver - name = receiver.name + name = receiver.model_name identifier = receiver.identifier self._attr_name = f"{name}{' ' + ZONES[zone] if zone != 'main' else ''}" - if receiver.discovered and zone == "main": - # keep legacy unique_id - self._attr_unique_id = f"{name}_{identifier}" - else: - self._attr_unique_id = f"{identifier}_{zone}" + self._attr_unique_id = f"{identifier}_{zone}" self._zone = zone - self._source_mapping = sources - self._reverse_mapping = {value: key for key, value in sources.items()} - self._max_volume = max_volume + self._volume_resolution = volume_resolution + self._max_volume = max_volume + + self._name_mapping = sources + self._reverse_name_mapping = {value: key for key, value in sources.items()} + self._lib_mapping = _input_lib_cmds(zone) + self._reverse_lib_mapping = { + value: key for key, value in self._lib_mapping.items() + } self._attr_source_list = list(sources.values()) self._attr_extra_state_attributes = {} @@ -408,9 +428,13 @@ async def async_mute_volume(self, mute: bool) -> None: async def async_select_source(self, source: str) -> None: """Select input source.""" if self.source_list and source in self.source_list: - source = self._reverse_mapping[source] + source_lib = self._lib_mapping[self._reverse_name_mapping[source]] + if isinstance(source_lib, str): + source_lib_single = source_lib + else: + source_lib_single = source_lib[0] self._update_receiver( - "input-selector" if self._zone == "main" else "selector", source + "input-selector" if self._zone == "main" else "selector", source_lib_single ) async def async_select_output(self, hdmi_output: str) -> None: @@ -422,7 +446,7 @@ async def async_play_media( ) -> None: """Play radio station by preset number.""" if self.source is not None: - source = self._reverse_mapping[self.source] + source = self._reverse_name_mapping[self.source] if media_type.lower() == "radio" and source in DEFAULT_PLAYABLE_SOURCES: self._update_receiver("preset", media_id) @@ -466,9 +490,10 @@ def process_update(self, update: tuple[str, str, Any]) -> None: elif command in ["volume", "master-volume"] and value != "N/A": self._supports_volume = True # AMP_VOL / (VOL_RESOLUTION * (MAX_VOL / 100)) - self._attr_volume_level = value / ( + volume_level: float = value / ( self._volume_resolution * self._max_volume / 100 ) + self._attr_volume_level = min(1, volume_level) elif command in ["muting", "audio-muting"]: self._attr_is_volume_muted = bool(value == "on") elif command in ["selector", "input-selector"]: @@ -493,18 +518,17 @@ def process_update(self, update: tuple[str, str, Any]) -> None: self.async_write_ha_state() @callback - def _parse_source(self, source_raw: str | int | tuple[str]) -> None: - # source is either a tuple of values or a single value, - # so we convert to a tuple, when it is a single value. - if isinstance(source_raw, str | int): - source = (str(source_raw),) - else: - source = source_raw - for value in source: - if value in self._source_mapping: - self._attr_source = self._source_mapping[value] - return - self._attr_source = "_".join(source) + def _parse_source(self, source_lib: InputLibValue) -> None: + source = self._reverse_lib_mapping[source_lib] + if source in self._name_mapping: + self._attr_source = self._name_mapping[source] + return + + source_meaning = source.value_meaning + _LOGGER.error( + 'Input source "%s" not in source list: %s', source_meaning, self.entity_id + ) + self._attr_source = source_meaning @callback def _parse_audio_information( diff --git a/homeassistant/components/onkyo/receiver.py b/homeassistant/components/onkyo/receiver.py index eb20f327b6937f..cc6cbbc95fbf12 100644 --- a/homeassistant/components/onkyo/receiver.py +++ b/homeassistant/components/onkyo/receiver.py @@ -2,10 +2,29 @@ from __future__ import annotations -from dataclasses import dataclass +import asyncio +from collections.abc import Callable, Iterable +import contextlib +from dataclasses import dataclass, field +import logging +from typing import Any import pyeiscp +from .const import DEVICE_DISCOVERY_TIMEOUT, DEVICE_INTERVIEW_TIMEOUT, ZONES + +_LOGGER = logging.getLogger(__name__) + + +@dataclass +class Callbacks: + """Onkyo Receiver Callbacks.""" + + connect: list[Callable[[Receiver], None]] = field(default_factory=list) + update: list[Callable[[Receiver, tuple[str, str, Any]], None]] = field( + default_factory=list + ) + @dataclass class Receiver: @@ -14,8 +33,62 @@ class Receiver: conn: pyeiscp.Connection model_name: str identifier: str - name: str - discovered: bool + host: str + first_connect: bool = True + callbacks: Callbacks = field(default_factory=Callbacks) + + @classmethod + async def async_create(cls, info: ReceiverInfo) -> Receiver: + """Set up Onkyo Receiver.""" + + receiver: Receiver | None = None + + def on_connect(_origin: str) -> None: + assert receiver is not None + receiver.on_connect() + + def on_update(message: tuple[str, str, Any], _origin: str) -> None: + assert receiver is not None + receiver.on_update(message) + + _LOGGER.debug("Creating receiver: %s (%s)", info.model_name, info.host) + + connection = await pyeiscp.Connection.create( + host=info.host, + port=info.port, + connect_callback=on_connect, + update_callback=on_update, + auto_connect=False, + ) + + return ( + receiver := cls( + conn=connection, + model_name=info.model_name, + identifier=info.identifier, + host=info.host, + ) + ) + + def on_connect(self) -> None: + """Receiver (re)connected.""" + _LOGGER.debug("Receiver (re)connected: %s (%s)", self.model_name, self.host) + + # Discover what zones are available for the receiver by querying the power. + # If we get a response for the specific zone, it means it is available. + for zone in ZONES: + self.conn.query_property(zone, "power") + + for callback in self.callbacks.connect: + callback(self) + + self.first_connect = False + + def on_update(self, message: tuple[str, str, Any]) -> None: + """Process new message from the receiver.""" + _LOGGER.debug("Received update callback from %s: %s", self.model_name, message) + for callback in self.callbacks.update: + callback(self, message) @dataclass @@ -26,3 +99,53 @@ class ReceiverInfo: port: int model_name: str identifier: str + + +async def async_interview(host: str) -> ReceiverInfo | None: + """Interview Onkyo Receiver.""" + _LOGGER.debug("Interviewing receiver: %s", host) + + receiver_info: ReceiverInfo | None = None + + event = asyncio.Event() + + async def _callback(conn: pyeiscp.Connection) -> None: + """Receiver interviewed, connection not yet active.""" + nonlocal receiver_info + if receiver_info is None: + info = ReceiverInfo(host, conn.port, conn.name, conn.identifier) + _LOGGER.debug("Receiver interviewed: %s (%s)", info.model_name, info.host) + receiver_info = info + event.set() + + timeout = DEVICE_INTERVIEW_TIMEOUT + + await pyeiscp.Connection.discover( + host=host, discovery_callback=_callback, timeout=timeout + ) + + with contextlib.suppress(asyncio.TimeoutError): + await asyncio.wait_for(event.wait(), timeout) + + return receiver_info + + +async def async_discover() -> Iterable[ReceiverInfo]: + """Discover Onkyo Receivers.""" + _LOGGER.debug("Discovering receivers") + + receiver_infos: list[ReceiverInfo] = [] + + async def _callback(conn: pyeiscp.Connection) -> None: + """Receiver discovered, connection not yet active.""" + info = ReceiverInfo(conn.host, conn.port, conn.name, conn.identifier) + _LOGGER.debug("Receiver discovered: %s (%s)", info.model_name, info.host) + receiver_infos.append(info) + + timeout = DEVICE_DISCOVERY_TIMEOUT + + await pyeiscp.Connection.discover(discovery_callback=_callback, timeout=timeout) + + await asyncio.sleep(timeout) + + return receiver_infos diff --git a/homeassistant/components/onkyo/services.py b/homeassistant/components/onkyo/services.py new file mode 100644 index 00000000000000..d875d8287fe412 --- /dev/null +++ b/homeassistant/components/onkyo/services.py @@ -0,0 +1,69 @@ +"""Onkyo services.""" + +from __future__ import annotations + +from typing import TYPE_CHECKING + +import voluptuous as vol + +from homeassistant.components.media_player import DOMAIN as MEDIA_PLAYER_DOMAIN +from homeassistant.const import ATTR_ENTITY_ID +from homeassistant.core import HomeAssistant, ServiceCall +from homeassistant.helpers import config_validation as cv +from homeassistant.util.hass_dict import HassKey + +from .const import DOMAIN + +if TYPE_CHECKING: + from .media_player import OnkyoMediaPlayer + +DATA_MP_ENTITIES: HassKey[dict[str, dict[str, OnkyoMediaPlayer]]] = HassKey(DOMAIN) + +ATTR_HDMI_OUTPUT = "hdmi_output" +ACCEPTED_VALUES = [ + "no", + "analog", + "yes", + "out", + "out-sub", + "sub", + "hdbaset", + "both", + "up", +] +ONKYO_SELECT_OUTPUT_SCHEMA = vol.Schema( + { + vol.Required(ATTR_ENTITY_ID): cv.entity_ids, + vol.Required(ATTR_HDMI_OUTPUT): vol.In(ACCEPTED_VALUES), + } +) +SERVICE_SELECT_HDMI_OUTPUT = "onkyo_select_hdmi_output" + + +async def async_register_services(hass: HomeAssistant) -> None: + """Register Onkyo services.""" + + hass.data.setdefault(DATA_MP_ENTITIES, {}) + + async def async_service_handle(service: ServiceCall) -> None: + """Handle for services.""" + entity_ids = service.data[ATTR_ENTITY_ID] + + targets: list[OnkyoMediaPlayer] = [] + for receiver_entities in hass.data[DATA_MP_ENTITIES].values(): + targets.extend( + entity + for entity in receiver_entities.values() + if entity.entity_id in entity_ids + ) + + for target in targets: + if service.service == SERVICE_SELECT_HDMI_OUTPUT: + await target.async_select_output(service.data[ATTR_HDMI_OUTPUT]) + + hass.services.async_register( + MEDIA_PLAYER_DOMAIN, + SERVICE_SELECT_HDMI_OUTPUT, + async_service_handle, + schema=ONKYO_SELECT_OUTPUT_SCHEMA, + ) diff --git a/homeassistant/components/onkyo/strings.json b/homeassistant/components/onkyo/strings.json new file mode 100644 index 00000000000000..1b0eadcc45e00e --- /dev/null +++ b/homeassistant/components/onkyo/strings.json @@ -0,0 +1,60 @@ +{ + "config": { + "step": { + "user": { + "menu_options": { + "manual": "Manual entry", + "eiscp_discovery": "Onkyo discovery" + } + }, + "manual": { + "data": { + "host": "[%key:common::config_flow::data::host%]" + } + }, + "eiscp_discovery": { + "data": { + "device": "[%key:common::config_flow::data::device%]" + } + }, + "configure_receiver": { + "description": "Configure {name}", + "data": { + "volume_resolution": "Number of steps it takes for the receiver to go from the lowest to the highest possible volume", + "input_sources": "List of input sources supported by the receiver" + } + } + }, + "error": { + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", + "empty_input_source_list": "Input source list cannot be empty", + "unknown": "[%key:common::config_flow::error::unknown%]" + }, + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", + "no_devices_found": "[%key:common::config_flow::abort::no_devices_found%]", + "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]", + "unique_id_mismatch": "The serial number of the device does not match the previous serial number", + "unknown": "[%key:common::config_flow::error::unknown%]" + } + }, + "options": { + "step": { + "init": { + "data": { + "max_volume": "Maximum volume limit (%)" + } + } + } + }, + "issues": { + "deprecated_yaml_import_issue_no_discover": { + "title": "The Onkyo YAML configuration import failed", + "description": "Configuring Onkyo using YAML is being removed but no receivers were discovered when importing your YAML configuration.\n\nEnsure the connection to the receiver works and restart Home Assistant to try again or remove the Onkyo YAML configuration from your configuration.yaml file and continue to [set up the integration]({url}) manually." + }, + "deprecated_yaml_import_issue_cannot_connect": { + "title": "The Onkyo YAML configuration import failed", + "description": "Configuring Onkyo using YAML is being removed but there was a connection error when importing your YAML configuration for host {host}.\n\nEnsure the connection to the receiver works and restart Home Assistant to try again or remove the Onkyo YAML configuration from your configuration.yaml file and continue to [set up the integration]({url}) manually." + } + } +} diff --git a/homeassistant/components/onvif/config_flow.py b/homeassistant/components/onvif/config_flow.py index f4e3f11d0b72c8..66e566af0bf906 100644 --- a/homeassistant/components/onvif/config_flow.py +++ b/homeassistant/components/onvif/config_flow.py @@ -102,7 +102,6 @@ class OnvifFlowHandler(ConfigFlow, domain=DOMAIN): """Handle a ONVIF config flow.""" VERSION = 1 - _reauth_entry: ConfigEntry @staticmethod @callback @@ -136,30 +135,28 @@ async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle re-authentication of an existing config entry.""" - reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) - assert reauth_entry is not None - self._reauth_entry = reauth_entry return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Confirm reauth.""" - entry = self._reauth_entry errors: dict[str, str] | None = {} + reauth_entry = self._get_reauth_entry() description_placeholders: dict[str, str] | None = None if user_input is not None: - entry_data = entry.data - self.onvif_config = entry_data | user_input + self.onvif_config = reauth_entry.data | user_input errors, description_placeholders = await self.async_setup_profiles( configure_unique_id=False ) if not errors: - return self.async_update_reload_and_abort(entry, data=self.onvif_config) + return self.async_update_reload_and_abort( + reauth_entry, data=self.onvif_config + ) - username = (user_input or {}).get(CONF_USERNAME) or entry.data[CONF_USERNAME] + username = (user_input or {}).get(CONF_USERNAME) or reauth_entry.data[ + CONF_USERNAME + ] return self.async_show_form( step_id="reauth_confirm", data_schema=vol.Schema( @@ -394,7 +391,6 @@ class OnvifOptionsFlowHandler(OptionsFlow): def __init__(self, config_entry: ConfigEntry) -> None: """Initialize ONVIF options flow.""" - self.config_entry = config_entry self.options = dict(config_entry.options) async def async_step_init(self, user_input: None = None) -> ConfigFlowResult: diff --git a/homeassistant/components/open_meteo/__init__.py b/homeassistant/components/open_meteo/__init__.py index e3bf763f4291a7..6deb63904ff8e6 100644 --- a/homeassistant/components/open_meteo/__init__.py +++ b/homeassistant/components/open_meteo/__init__.py @@ -62,6 +62,7 @@ async def async_update_forecast() -> Forecast: coordinator: DataUpdateCoordinator[Forecast] = DataUpdateCoordinator( hass, LOGGER, + config_entry=entry, name=f"{DOMAIN}_{entry.data[CONF_ZONE]}", update_interval=SCAN_INTERVAL, update_method=async_update_forecast, diff --git a/homeassistant/components/openai_conversation/config_flow.py b/homeassistant/components/openai_conversation/config_flow.py index 9a2b1b6fa795cb..2a1764e6b5e94e 100644 --- a/homeassistant/components/openai_conversation/config_flow.py +++ b/homeassistant/components/openai_conversation/config_flow.py @@ -26,6 +26,7 @@ SelectSelectorConfig, TemplateSelector, ) +from homeassistant.helpers.typing import VolDictType from .const import ( CONF_CHAT_MODEL, @@ -79,7 +80,7 @@ async def async_step_user( step_id="user", data_schema=STEP_USER_DATA_SCHEMA ) - errors = {} + errors: dict[str, str] = {} try: await validate_input(self.hass, user_input) @@ -114,7 +115,6 @@ class OpenAIOptionsFlow(OptionsFlow): def __init__(self, config_entry: ConfigEntry) -> None: """Initialize options flow.""" - self.config_entry = config_entry self.last_rendered_recommended = config_entry.options.get( CONF_RECOMMENDED, False ) @@ -150,7 +150,7 @@ async def async_step_init( def openai_config_option_schema( hass: HomeAssistant, options: dict[str, Any] | MappingProxyType[str, Any], -) -> dict: +) -> VolDictType: """Return a schema for OpenAI completion options.""" hass_apis: list[SelectOptionDict] = [ SelectOptionDict( @@ -166,7 +166,7 @@ def openai_config_option_schema( for api in llm.async_get_apis(hass) ) - schema = { + schema: VolDictType = { vol.Optional( CONF_PROMPT, description={ diff --git a/homeassistant/components/openexchangerates/config_flow.py b/homeassistant/components/openexchangerates/config_flow.py index df83690d2e3a66..ffcc60bfa26087 100644 --- a/homeassistant/components/openexchangerates/config_flow.py +++ b/homeassistant/components/openexchangerates/config_flow.py @@ -13,7 +13,7 @@ ) import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_API_KEY, CONF_BASE from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import AbortFlow @@ -54,7 +54,6 @@ class OpenExchangeRatesConfigFlow(ConfigFlow, domain=DOMAIN): def __init__(self) -> None: """Initialize the config flow.""" self.currencies: dict[str, str] = {} - self._reauth_entry: ConfigEntry | None = None async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -63,9 +62,9 @@ async def async_step_user( currencies = await self.async_get_currencies() if user_input is None: - existing_data: Mapping[str, str] | dict[str, str] = ( - self._reauth_entry.data if self._reauth_entry else {} - ) + existing_data: Mapping[str, Any] = {} + if self.source == SOURCE_REAUTH: + existing_data = self._get_reauth_entry().data return self.async_show_form( step_id="user", data_schema=get_data_schema(currencies, existing_data), @@ -95,12 +94,10 @@ async def async_step_user( } ) - if self._reauth_entry is not None: - self.hass.config_entries.async_update_entry( - self._reauth_entry, data=self._reauth_entry.data | user_input + if self.source == SOURCE_REAUTH: + return self.async_update_reload_and_abort( + self._get_reauth_entry(), data_updates=user_input ) - await self.hass.config_entries.async_reload(self._reauth_entry.entry_id) - return self.async_abort(reason="reauth_successful") return self.async_create_entry(title=info["title"], data=user_input) @@ -115,9 +112,6 @@ async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle reauth.""" - self._reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) return await self.async_step_user() async def async_get_currencies(self) -> dict[str, str]: diff --git a/homeassistant/components/openexchangerates/manifest.json b/homeassistant/components/openexchangerates/manifest.json index cce90d0fb12279..9e5cd95a93d150 100644 --- a/homeassistant/components/openexchangerates/manifest.json +++ b/homeassistant/components/openexchangerates/manifest.json @@ -5,5 +5,5 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/openexchangerates", "iot_class": "cloud_polling", - "requirements": ["aioopenexchangerates==0.6.2"] + "requirements": ["aioopenexchangerates==0.6.8"] } diff --git a/homeassistant/components/opengarage/cover.py b/homeassistant/components/opengarage/cover.py index a165fcc4785135..9623050c09030f 100644 --- a/homeassistant/components/opengarage/cover.py +++ b/homeassistant/components/opengarage/cover.py @@ -9,9 +9,9 @@ CoverDeviceClass, CoverEntity, CoverEntityFeature, + CoverState, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import STATE_CLOSED, STATE_CLOSING, STATE_OPEN, STATE_OPENING from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -21,7 +21,7 @@ _LOGGER = logging.getLogger(__name__) -STATES_MAP = {0: STATE_CLOSED, 1: STATE_OPEN} +STATES_MAP = {0: CoverState.CLOSED, 1: CoverState.OPEN} async def async_setup_entry( @@ -54,36 +54,36 @@ def is_closed(self) -> bool | None: """Return if the cover is closed.""" if self._state is None: return None - return self._state == STATE_CLOSED + return self._state == CoverState.CLOSED @property def is_closing(self) -> bool | None: """Return if the cover is closing.""" if self._state is None: return None - return self._state == STATE_CLOSING + return self._state == CoverState.CLOSING @property def is_opening(self) -> bool | None: """Return if the cover is opening.""" if self._state is None: return None - return self._state == STATE_OPENING + return self._state == CoverState.OPENING async def async_close_cover(self, **kwargs: Any) -> None: """Close the cover.""" - if self._state in [STATE_CLOSED, STATE_CLOSING]: + if self._state in [CoverState.CLOSED, CoverState.CLOSING]: return self._state_before_move = self._state - self._state = STATE_CLOSING + self._state = CoverState.CLOSING await self._push_button() async def async_open_cover(self, **kwargs: Any) -> None: """Open the cover.""" - if self._state in [STATE_OPEN, STATE_OPENING]: + if self._state in [CoverState.OPEN, CoverState.OPENING]: return self._state_before_move = self._state - self._state = STATE_OPENING + self._state = CoverState.OPENING await self._push_button() @callback diff --git a/homeassistant/components/opensky/config_flow.py b/homeassistant/components/opensky/config_flow.py index 3cfd1ad30a0788..867a4781265c44 100644 --- a/homeassistant/components/opensky/config_flow.py +++ b/homeassistant/components/opensky/config_flow.py @@ -13,12 +13,11 @@ ConfigEntry, ConfigFlow, ConfigFlowResult, - OptionsFlowWithConfigEntry, + OptionsFlow, ) from homeassistant.const import ( CONF_LATITUDE, CONF_LONGITUDE, - CONF_NAME, CONF_PASSWORD, CONF_RADIUS, CONF_USERNAME, @@ -45,7 +44,7 @@ def async_get_options_flow( config_entry: ConfigEntry, ) -> OpenSkyOptionsFlowHandler: """Get the options flow for this handler.""" - return OpenSkyOptionsFlowHandler(config_entry) + return OpenSkyOptionsFlowHandler() async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -83,7 +82,7 @@ async def async_step_user( ) -class OpenSkyOptionsFlowHandler(OptionsFlowWithConfigEntry): +class OpenSkyOptionsFlowHandler(OptionsFlow): """OpenSky Options flow handler.""" async def async_step_init( @@ -112,10 +111,7 @@ async def async_step_init( except OpenSkyUnauthenticatedError: errors["base"] = "invalid_auth" if not errors: - return self.async_create_entry( - title=self.options.get(CONF_NAME, "OpenSky"), - data=user_input, - ) + return self.async_create_entry(data=user_input) return self.async_show_form( step_id="init", @@ -130,6 +126,6 @@ async def async_step_init( vol.Optional(CONF_CONTRIBUTING_USER, default=False): bool, } ), - user_input or self.options, + user_input or self.config_entry.options, ), ) diff --git a/homeassistant/components/opentherm_gw/config_flow.py b/homeassistant/components/opentherm_gw/config_flow.py index 1f52b47cbad0a8..80c16ee88e1f84 100644 --- a/homeassistant/components/opentherm_gw/config_flow.py +++ b/homeassistant/components/opentherm_gw/config_flow.py @@ -49,7 +49,7 @@ def async_get_options_flow( config_entry: ConfigEntry, ) -> OpenThermGwOptionsFlow: """Get the options flow for this handler.""" - return OpenThermGwOptionsFlow(config_entry) + return OpenThermGwOptionsFlow() async def async_step_init( self, info: dict[str, Any] | None = None @@ -132,10 +132,6 @@ def _create_entry(self, gw_id, name, device): class OpenThermGwOptionsFlow(OptionsFlow): """Handle opentherm_gw options.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize the options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/opentherm_gw/manifest.json b/homeassistant/components/opentherm_gw/manifest.json index 927f9c9ca3e1a7..ecd0a6b99d5d73 100644 --- a/homeassistant/components/opentherm_gw/manifest.json +++ b/homeassistant/components/opentherm_gw/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/opentherm_gw", "iot_class": "local_push", "loggers": ["pyotgw"], - "requirements": ["pyotgw==2.2.1"] + "requirements": ["pyotgw==2.2.2"] } diff --git a/homeassistant/components/openweathermap/config_flow.py b/homeassistant/components/openweathermap/config_flow.py index 5fe06ea2dcd374..8d33e1172879c8 100644 --- a/homeassistant/components/openweathermap/config_flow.py +++ b/homeassistant/components/openweathermap/config_flow.py @@ -44,7 +44,7 @@ def async_get_options_flow( config_entry: ConfigEntry, ) -> OpenWeatherMapOptionsFlow: """Get the options flow for this handler.""" - return OpenWeatherMapOptionsFlow(config_entry) + return OpenWeatherMapOptionsFlow() async def async_step_user(self, user_input=None) -> ConfigFlowResult: """Handle a flow initialized by the user.""" @@ -97,10 +97,6 @@ async def async_step_user(self, user_input=None) -> ConfigFlowResult: class OpenWeatherMapOptionsFlow(OptionsFlow): """Handle options.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - async def async_step_init(self, user_input: dict | None = None) -> ConfigFlowResult: """Manage the options.""" if user_input is not None: diff --git a/homeassistant/components/openweathermap/coordinator.py b/homeassistant/components/openweathermap/coordinator.py index f7672a1290bc51..3ef0eda0c8fbd7 100644 --- a/homeassistant/components/openweathermap/coordinator.py +++ b/homeassistant/components/openweathermap/coordinator.py @@ -192,12 +192,13 @@ def _calc_precipitation_kind(rain, snow): @staticmethod def _get_precipitation_value(precipitation): """Get precipitation value from weather data.""" - if "all" in precipitation: - return round(precipitation["all"], 2) - if "3h" in precipitation: - return round(precipitation["3h"], 2) - if "1h" in precipitation: - return round(precipitation["1h"], 2) + if precipitation is not None: + if "all" in precipitation: + return round(precipitation["all"], 2) + if "3h" in precipitation: + return round(precipitation["3h"], 2) + if "1h" in precipitation: + return round(precipitation["1h"], 2) return 0 def _get_condition(self, weather_code, timestamp=None): diff --git a/homeassistant/components/openweathermap/manifest.json b/homeassistant/components/openweathermap/manifest.json index 199e750ad4f3fa..14313a5a77e846 100644 --- a/homeassistant/components/openweathermap/manifest.json +++ b/homeassistant/components/openweathermap/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/openweathermap", "iot_class": "cloud_polling", "loggers": ["pyopenweathermap"], - "requirements": ["pyopenweathermap==0.1.1"] + "requirements": ["pyopenweathermap==0.2.1"] } diff --git a/homeassistant/components/opower/config_flow.py b/homeassistant/components/opower/config_flow.py index a9162b060a2ed4..6396ba24a15e5b 100644 --- a/homeassistant/components/opower/config_flow.py +++ b/homeassistant/components/opower/config_flow.py @@ -15,7 +15,7 @@ ) import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_NAME, CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.aiohttp_client import async_create_clientsession @@ -49,8 +49,12 @@ async def _validate_login( try: await api.async_login() except InvalidAuth: + _LOGGER.exception( + "Invalid auth when connecting to %s", login_data[CONF_UTILITY] + ) errors["base"] = "invalid_auth" except CannotConnect: + _LOGGER.exception("Could not connect to %s", login_data[CONF_UTILITY]) errors["base"] = "cannot_connect" return errors @@ -62,7 +66,6 @@ class OpowerConfigFlow(ConfigFlow, domain=DOMAIN): def __init__(self) -> None: """Initialize a new OpowerConfigFlow.""" - self.reauth_entry: ConfigEntry | None = None self.utility_info: dict[str, Any] | None = None async def async_step_user( @@ -131,35 +134,29 @@ async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle configuration by re-auth.""" - self.reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Dialog that informs the user that reauth is required.""" - assert self.reauth_entry errors: dict[str, str] = {} + reauth_entry = self._get_reauth_entry() if user_input is not None: - data = {**self.reauth_entry.data, **user_input} + data = {**reauth_entry.data, **user_input} errors = await _validate_login(self.hass, data) if not errors: - self.hass.config_entries.async_update_entry( - self.reauth_entry, data=data - ) - await self.hass.config_entries.async_reload(self.reauth_entry.entry_id) - return self.async_abort(reason="reauth_successful") + return self.async_update_reload_and_abort(reauth_entry, data=data) + schema: VolDictType = { - vol.Required(CONF_USERNAME): self.reauth_entry.data[CONF_USERNAME], + vol.Required(CONF_USERNAME): reauth_entry.data[CONF_USERNAME], vol.Required(CONF_PASSWORD): str, } - if select_utility(self.reauth_entry.data[CONF_UTILITY]).accepts_mfa(): + if select_utility(reauth_entry.data[CONF_UTILITY]).accepts_mfa(): schema[vol.Optional(CONF_TOTP_SECRET)] = str return self.async_show_form( step_id="reauth_confirm", data_schema=vol.Schema(schema), errors=errors, - description_placeholders={CONF_NAME: self.reauth_entry.title}, + description_placeholders={CONF_NAME: reauth_entry.title}, ) diff --git a/homeassistant/components/opower/coordinator.py b/homeassistant/components/opower/coordinator.py index cd2e28ed63850f..629dce0823c400 100644 --- a/homeassistant/components/opower/coordinator.py +++ b/homeassistant/components/opower/coordinator.py @@ -130,20 +130,32 @@ async def _insert_statistics(self) -> None: continue start = cost_reads[0].start_time _LOGGER.debug("Getting statistics at: %s", start) - stats = await get_instance(self.hass).async_add_executor_job( - statistics_during_period, - self.hass, - start, - start + timedelta(seconds=1), - {cost_statistic_id, consumption_statistic_id}, - "hour", - None, - {"sum"}, - ) + # In the common case there should be a previous statistic at start time + # so we only need to fetch one statistic. If there isn't any, fetch all. + for end in (start + timedelta(seconds=1), None): + stats = await get_instance(self.hass).async_add_executor_job( + statistics_during_period, + self.hass, + start, + end, + {cost_statistic_id, consumption_statistic_id}, + "hour", + None, + {"sum"}, + ) + if stats: + break + if end: + _LOGGER.debug( + "Not found. Trying to find the oldest statistic after %s", + start, + ) + # We are in this code path only if get_last_statistics found a stat + # so statistics_during_period should also have found at least one. + assert stats cost_sum = cast(float, stats[cost_statistic_id][0]["sum"]) consumption_sum = cast(float, stats[consumption_statistic_id][0]["sum"]) last_stats_time = stats[consumption_statistic_id][0]["start"] - assert last_stats_time == start.timestamp() cost_statistics = [] consumption_statistics = [] diff --git a/homeassistant/components/opower/manifest.json b/homeassistant/components/opower/manifest.json index 23386a777d2560..593e4cf34b8822 100644 --- a/homeassistant/components/opower/manifest.json +++ b/homeassistant/components/opower/manifest.json @@ -7,5 +7,5 @@ "documentation": "https://www.home-assistant.io/integrations/opower", "iot_class": "cloud_polling", "loggers": ["opower"], - "requirements": ["opower==0.8.2"] + "requirements": ["opower==0.8.6"] } diff --git a/homeassistant/components/osoenergy/config_flow.py b/homeassistant/components/osoenergy/config_flow.py index 0642250e9ed8f7..a47f90e3c04379 100644 --- a/homeassistant/components/osoenergy/config_flow.py +++ b/homeassistant/components/osoenergy/config_flow.py @@ -7,12 +7,7 @@ from apyosoenergyapi import OSOEnergy import voluptuous as vol -from homeassistant.config_entries import ( - SOURCE_REAUTH, - ConfigEntry, - ConfigFlow, - ConfigFlowResult, -) +from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_API_KEY from homeassistant.helpers import aiohttp_client @@ -27,10 +22,6 @@ class OSOEnergyFlowHandler(ConfigFlow, domain=DOMAIN): VERSION = 1 - def __init__(self) -> None: - """Initialize.""" - self.entry: ConfigEntry | None = None - async def async_step_user(self, user_input=None) -> ConfigFlowResult: """Handle a flow initialized by the user.""" errors = {} @@ -40,12 +31,10 @@ async def async_step_user(self, user_input=None) -> ConfigFlowResult: if user_email := await self.get_user_email(user_input[CONF_API_KEY]): await self.async_set_unique_id(user_email) - if self.context["source"] == SOURCE_REAUTH and self.entry: - self.hass.config_entries.async_update_entry( - self.entry, title=user_email, data=user_input + if self.source == SOURCE_REAUTH: + return self.async_update_reload_and_abort( + self._get_reauth_entry(), title=user_email, data=user_input ) - await self.hass.config_entries.async_reload(self.entry.entry_id) - return self.async_abort(reason="reauth_successful") self._abort_if_unique_id_configured() return self.async_create_entry(title=user_email, data=user_input) @@ -72,6 +61,9 @@ async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Re Authenticate a user.""" - self.entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) - data = {CONF_API_KEY: entry_data[CONF_API_KEY]} - return await self.async_step_user(data) + return self.async_show_form( + step_id="user", + data_schema=self.add_suggested_values_to_schema( + _SCHEMA_STEP_USER, self._get_reauth_entry().data + ), + ) diff --git a/homeassistant/components/osoenergy/icons.json b/homeassistant/components/osoenergy/icons.json index 60b2d257b8aa20..42d1f2cc480ae8 100644 --- a/homeassistant/components/osoenergy/icons.json +++ b/homeassistant/components/osoenergy/icons.json @@ -11,5 +11,22 @@ "default": "mdi:water-boiler" } } + }, + "services": { + "get_profile": { + "service": "mdi:thermometer-lines" + }, + "set_profile": { + "service": "mdi:thermometer-lines" + }, + "set_v40_min": { + "service": "mdi:car-coolant-level" + }, + "turn_off": { + "service": "mdi:water-boiler-off" + }, + "turn_on": { + "service": "mdi:water-boiler" + } } } diff --git a/homeassistant/components/osoenergy/services.yaml b/homeassistant/components/osoenergy/services.yaml new file mode 100644 index 00000000000000..6c8f5512215b26 --- /dev/null +++ b/homeassistant/components/osoenergy/services.yaml @@ -0,0 +1,261 @@ +get_profile: + target: + entity: + domain: water_heater +set_profile: + target: + entity: + domain: water_heater + fields: + hour_00: + required: false + example: 75 + selector: + number: + min: 10 + max: 75 + step: 1 + unit_of_measurement: °C + hour_01: + required: false + example: 75 + selector: + number: + min: 10 + max: 75 + step: 1 + unit_of_measurement: °C + hour_02: + required: false + example: 75 + selector: + number: + min: 10 + max: 75 + step: 1 + unit_of_measurement: °C + hour_03: + required: false + example: 75 + selector: + number: + min: 10 + max: 75 + step: 1 + unit_of_measurement: °C + hour_04: + required: false + example: 75 + selector: + number: + min: 10 + max: 75 + step: 1 + unit_of_measurement: °C + hour_05: + required: false + example: 75 + selector: + number: + min: 10 + max: 75 + step: 1 + unit_of_measurement: °C + hour_06: + required: false + example: 75 + selector: + number: + min: 10 + max: 75 + step: 1 + unit_of_measurement: °C + hour_07: + required: false + example: 75 + selector: + number: + min: 10 + max: 75 + step: 1 + unit_of_measurement: °C + hour_08: + required: false + example: 75 + selector: + number: + min: 10 + max: 75 + step: 1 + unit_of_measurement: °C + hour_09: + required: false + example: 75 + selector: + number: + min: 10 + max: 75 + step: 1 + unit_of_measurement: °C + hour_10: + required: false + example: 75 + selector: + number: + min: 10 + max: 75 + step: 1 + unit_of_measurement: °C + hour_11: + required: false + example: 75 + selector: + number: + min: 10 + max: 75 + step: 1 + unit_of_measurement: °C + hour_12: + required: false + example: 75 + selector: + number: + min: 10 + max: 75 + step: 1 + unit_of_measurement: °C + hour_13: + required: false + example: 75 + selector: + number: + min: 10 + max: 75 + step: 1 + unit_of_measurement: °C + hour_14: + required: false + example: 75 + selector: + number: + min: 10 + max: 75 + step: 1 + unit_of_measurement: °C + hour_15: + required: false + example: 75 + selector: + number: + min: 10 + max: 75 + step: 1 + unit_of_measurement: °C + hour_16: + required: false + example: 75 + selector: + number: + min: 10 + max: 75 + step: 1 + unit_of_measurement: °C + hour_17: + required: false + example: 75 + selector: + number: + min: 10 + max: 75 + step: 1 + unit_of_measurement: °C + hour_18: + required: false + example: 75 + selector: + number: + min: 10 + max: 75 + step: 1 + unit_of_measurement: °C + hour_19: + required: false + example: 75 + selector: + number: + min: 10 + max: 75 + step: 1 + unit_of_measurement: °C + hour_20: + required: false + example: 75 + selector: + number: + min: 10 + max: 75 + step: 1 + unit_of_measurement: °C + hour_21: + required: false + example: 75 + selector: + number: + min: 10 + max: 75 + step: 1 + unit_of_measurement: °C + hour_22: + required: false + example: 75 + selector: + number: + min: 10 + max: 75 + step: 1 + unit_of_measurement: °C + hour_23: + required: false + example: 75 + selector: + number: + min: 10 + max: 75 + step: 1 + unit_of_measurement: °C +set_v40_min: + target: + entity: + domain: water_heater + fields: + v40_min: + required: true + example: 240 + selector: + number: + min: 200 + max: 550 + step: 1 + unit_of_measurement: L +turn_off: + target: + entity: + domain: water_heater + fields: + until_temp_limit: + required: true + default: false + example: false + selector: + boolean: +turn_on: + target: + entity: + domain: water_heater + fields: + until_temp_limit: + required: true + default: false + example: false + selector: + boolean: diff --git a/homeassistant/components/osoenergy/strings.json b/homeassistant/components/osoenergy/strings.json index a7963bfa436223..b8f95c021faa03 100644 --- a/homeassistant/components/osoenergy/strings.json +++ b/homeassistant/components/osoenergy/strings.json @@ -91,5 +91,143 @@ "name": "Temperature one" } } + }, + "services": { + "get_profile": { + "name": "Get heater profile", + "description": "Get the temperature profile of water heater" + }, + "set_profile": { + "name": "Set heater profile", + "description": "Set the temperature profile of water heater", + "fields": { + "hour_00": { + "name": "00:00", + "description": "00:00 hour" + }, + "hour_01": { + "name": "01:00", + "description": "01:00 hour" + }, + "hour_02": { + "name": "02:00", + "description": "02:00 hour" + }, + "hour_03": { + "name": "03:00", + "description": "03:00 hour" + }, + "hour_04": { + "name": "04:00", + "description": "04:00 hour" + }, + "hour_05": { + "name": "05:00", + "description": "05:00 hour" + }, + "hour_06": { + "name": "06:00", + "description": "06:00 hour" + }, + "hour_07": { + "name": "07:00", + "description": "07:00 hour" + }, + "hour_08": { + "name": "08:00", + "description": "08:00 hour" + }, + "hour_09": { + "name": "09:00", + "description": "09:00 hour" + }, + "hour_10": { + "name": "10:00", + "description": "10:00 hour" + }, + "hour_11": { + "name": "11:00", + "description": "11:00 hour" + }, + "hour_12": { + "name": "12:00", + "description": "12:00 hour" + }, + "hour_13": { + "name": "13:00", + "description": "13:00 hour" + }, + "hour_14": { + "name": "14:00", + "description": "14:00 hour" + }, + "hour_15": { + "name": "15:00", + "description": "15:00 hour" + }, + "hour_16": { + "name": "16:00", + "description": "16:00 hour" + }, + "hour_17": { + "name": "17:00", + "description": "17:00 hour" + }, + "hour_18": { + "name": "18:00", + "description": "18:00 hour" + }, + "hour_19": { + "name": "19:00", + "description": "19:00 hour" + }, + "hour_20": { + "name": "20:00", + "description": "20:00 hour" + }, + "hour_21": { + "name": "21:00", + "description": "21:00 hour" + }, + "hour_22": { + "name": "22:00", + "description": "22:00 hour" + }, + "hour_23": { + "name": "23:00", + "description": "23:00 hour" + } + } + }, + "set_v40_min": { + "name": "Set v40 min", + "description": "Set the minimum quantity of water at 40°C for a heater", + "fields": { + "v40_min": { + "name": "V40 Min", + "description": "Minimum quantity of water at 40°C (200-350 for SAGA S200, 300-550 for SAGA S300)" + } + } + }, + "turn_off": { + "name": "Turn off heating", + "description": "Turn off heating for one hour or until min temperature is reached", + "fields": { + "until_temp_limit": { + "name": "Until temperature limit", + "description": "Choose if heating should be off until min temperature (True) is reached or for one hour (False)" + } + } + }, + "turn_on": { + "name": "Turn on heating", + "description": "Turn on heating for one hour or until max temperature is reached", + "fields": { + "until_temp_limit": { + "name": "Until temperature limit", + "description": "Choose if heating should be on until max temperature (True) is reached or for one hour (False)" + } + } + } } } diff --git a/homeassistant/components/osoenergy/water_heater.py b/homeassistant/components/osoenergy/water_heater.py index 55229e42c2fce3..ff117d6577dc1d 100644 --- a/homeassistant/components/osoenergy/water_heater.py +++ b/homeassistant/components/osoenergy/water_heater.py @@ -1,9 +1,11 @@ """Support for OSO Energy water heaters.""" +import datetime as dt from typing import Any from apyosoenergyapi import OSOEnergy from apyosoenergyapi.helper.const import OSOEnergyWaterHeaterData +import voluptuous as vol from homeassistant.components.water_heater import ( STATE_ECO, @@ -15,12 +17,17 @@ ) from homeassistant.config_entries import ConfigEntry from homeassistant.const import UnitOfTemperature -from homeassistant.core import HomeAssistant +from homeassistant.core import HomeAssistant, ServiceResponse, SupportsResponse +from homeassistant.helpers import config_validation as cv, entity_platform from homeassistant.helpers.entity_platform import AddEntitiesCallback +import homeassistant.util.dt as dt_util +from homeassistant.util.json import JsonValueType from .const import DOMAIN from .entity import OSOEnergyEntity +ATTR_UNTIL_TEMP_LIMIT = "until_temp_limit" +ATTR_V40MIN = "v40_min" CURRENT_OPERATION_MAP: dict[str, Any] = { "default": { "off": STATE_OFF, @@ -34,6 +41,11 @@ "extraenergy": STATE_HIGH_DEMAND, }, } +SERVICE_GET_PROFILE = "get_profile" +SERVICE_SET_PROFILE = "set_profile" +SERVICE_SET_V40MIN = "set_v40_min" +SERVICE_TURN_OFF = "turn_off" +SERVICE_TURN_ON = "turn_on" async def async_setup_entry( @@ -46,6 +58,102 @@ async def async_setup_entry( return async_add_entities((OSOEnergyWaterHeater(osoenergy, dev) for dev in devices), True) + platform = entity_platform.async_get_current_platform() + + platform.async_register_entity_service( + SERVICE_GET_PROFILE, + {}, + OSOEnergyWaterHeater.async_get_profile.__name__, + supports_response=SupportsResponse.ONLY, + ) + + service_set_profile_schema = cv.make_entity_service_schema( + { + vol.Optional(f"hour_{hour:02d}"): vol.All( + vol.Coerce(int), vol.Range(min=10, max=75) + ) + for hour in range(24) + } + ) + + platform.async_register_entity_service( + SERVICE_SET_PROFILE, + service_set_profile_schema, + OSOEnergyWaterHeater.async_set_profile.__name__, + ) + + platform.async_register_entity_service( + SERVICE_SET_V40MIN, + { + vol.Required(ATTR_V40MIN): vol.All( + vol.Coerce(float), vol.Range(min=200, max=550) + ), + }, + OSOEnergyWaterHeater.async_set_v40_min.__name__, + ) + + platform.async_register_entity_service( + SERVICE_TURN_OFF, + {vol.Required(ATTR_UNTIL_TEMP_LIMIT): vol.All(cv.boolean)}, + OSOEnergyWaterHeater.async_oso_turn_off.__name__, + ) + + platform.async_register_entity_service( + SERVICE_TURN_ON, + {vol.Required(ATTR_UNTIL_TEMP_LIMIT): vol.All(cv.boolean)}, + OSOEnergyWaterHeater.async_oso_turn_on.__name__, + ) + + +def _get_utc_hour(local_hour: int) -> dt.datetime: + """Convert the requested local hour to a utc hour for the day. + + Args: + local_hour: the local hour (0-23) for the current day to be converted. + + Returns: + Datetime representation for the requested hour in utc time for the day. + + """ + now = dt_util.now() + local_time = now.replace(hour=local_hour, minute=0, second=0, microsecond=0) + return dt_util.as_utc(local_time) + + +def _get_local_hour(utc_hour: int) -> dt.datetime: + """Convert the requested utc hour to a local hour for the day. + + Args: + utc_hour: the utc hour (0-23) for the current day to be converted. + + Returns: + Datetime representation for the requested hour in local time for the day. + + """ + utc_now = dt_util.utcnow() + utc_time = utc_now.replace(hour=utc_hour, minute=0, second=0, microsecond=0) + return dt_util.as_local(utc_time) + + +def _convert_profile_to_local(values: list[float]) -> list[JsonValueType]: + """Convert UTC profile to local. + + Receives a device temperature schedule - 24 values for the day where the index represents the hour of the day in UTC. + Converts the schedule to local time. + + Args: + values: list of floats representing the 24 hour temperature schedule for the device + Returns: + The device temperature schedule in local time. + + """ + profile: list[JsonValueType] = [0.0] * 24 + for hour in range(24): + local_hour = _get_local_hour(hour) + profile[local_hour.hour] = float(values[hour]) + + return profile + class OSOEnergyWaterHeater( OSOEnergyEntity[OSOEnergyWaterHeaterData], WaterHeaterEntity @@ -53,7 +161,9 @@ class OSOEnergyWaterHeater( """OSO Energy Water Heater Device.""" _attr_name = None - _attr_supported_features = WaterHeaterEntityFeature.TARGET_TEMPERATURE + _attr_supported_features = ( + WaterHeaterEntityFeature.TARGET_TEMPERATURE | WaterHeaterEntityFeature.ON_OFF + ) _attr_temperature_unit = UnitOfTemperature.CELSIUS def __init__( @@ -131,6 +241,36 @@ async def async_set_temperature(self, **kwargs: Any) -> None: await self.osoenergy.hotwater.set_profile(self.entity_data, profile) + async def async_get_profile(self) -> ServiceResponse: + """Return the current temperature profile of the device.""" + + profile = self.entity_data.profile + return {"profile": _convert_profile_to_local(profile)} + + async def async_set_profile(self, **kwargs: Any) -> None: + """Handle the service call.""" + profile = self.entity_data.profile + + for hour in range(24): + hour_key = f"hour_{hour:02d}" + + if hour_key in kwargs: + profile[_get_utc_hour(hour).hour] = kwargs[hour_key] + + await self.osoenergy.hotwater.set_profile(self.entity_data, profile) + + async def async_set_v40_min(self, v40_min) -> None: + """Handle the service call.""" + await self.osoenergy.hotwater.set_v40_min(self.entity_data, v40_min) + + async def async_oso_turn_off(self, until_temp_limit) -> None: + """Handle the service call.""" + await self.osoenergy.hotwater.turn_off(self.entity_data, until_temp_limit) + + async def async_oso_turn_on(self, until_temp_limit) -> None: + """Handle the service call.""" + await self.osoenergy.hotwater.turn_on(self.entity_data, until_temp_limit) + async def async_update(self) -> None: """Update all Node data from Hive.""" await self.osoenergy.session.update_data() diff --git a/homeassistant/components/otbr/config_flow.py b/homeassistant/components/otbr/config_flow.py index f24d141247db32..aff79ca4651c7f 100644 --- a/homeassistant/components/otbr/config_flow.py +++ b/homeassistant/components/otbr/config_flow.py @@ -13,7 +13,7 @@ import voluptuous as vol import yarl -from homeassistant.components.hassio import AddonError, AddonManager, HassioServiceInfo +from homeassistant.components.hassio import AddonError, AddonManager from homeassistant.components.homeassistant_yellow import hardware as yellow_hardware from homeassistant.components.thread import async_get_preferred_dataset from homeassistant.config_entries import SOURCE_HASSIO, ConfigFlow, ConfigFlowResult @@ -21,6 +21,7 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.aiohttp_client import async_get_clientsession +from homeassistant.helpers.service_info.hassio import HassioServiceInfo from .const import DEFAULT_CHANNEL, DOMAIN from .util import ( diff --git a/homeassistant/components/otbr/strings.json b/homeassistant/components/otbr/strings.json index bc7812c1db777a..e1afa5b89099b6 100644 --- a/homeassistant/components/otbr/strings.json +++ b/homeassistant/components/otbr/strings.json @@ -13,7 +13,9 @@ "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]" }, "abort": { - "single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]" + "already_configured": "The Thread border router is already configured", + "single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]", + "unknown": "[%key:common::config_flow::error::unknown%]" } }, "issues": { diff --git a/homeassistant/components/overkiz/alarm_control_panel.py b/homeassistant/components/overkiz/alarm_control_panel.py index 151f91790cfaa3..bdbf4d0cc8d7d3 100644 --- a/homeassistant/components/overkiz/alarm_control_panel.py +++ b/homeassistant/components/overkiz/alarm_control_panel.py @@ -14,18 +14,10 @@ AlarmControlPanelEntity, AlarmControlPanelEntityDescription, AlarmControlPanelEntityFeature, + AlarmControlPanelState, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ( - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_CUSTOM_BYPASS, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_DISARMED, - STATE_ALARM_PENDING, - STATE_ALARM_TRIGGERED, - Platform, -) +from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers.entity import EntityDescription from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -41,7 +33,7 @@ class OverkizAlarmDescription(AlarmControlPanelEntityDescription): """Class to describe an Overkiz alarm control panel.""" supported_features: AlarmControlPanelEntityFeature - fn_state: Callable[[Callable[[str], OverkizStateType]], str] + fn_state: Callable[[Callable[[str], OverkizStateType]], AlarmControlPanelState] alarm_disarm: str | None = None alarm_disarm_args: OverkizStateType | list[OverkizStateType] = None @@ -55,42 +47,44 @@ class OverkizAlarmDescription(AlarmControlPanelEntityDescription): alarm_trigger_args: OverkizStateType | list[OverkizStateType] = None -MAP_INTERNAL_STATUS_STATE: dict[str, str] = { - OverkizCommandParam.OFF: STATE_ALARM_DISARMED, - OverkizCommandParam.ZONE_1: STATE_ALARM_ARMED_HOME, - OverkizCommandParam.ZONE_2: STATE_ALARM_ARMED_NIGHT, - OverkizCommandParam.TOTAL: STATE_ALARM_ARMED_AWAY, +MAP_INTERNAL_STATUS_STATE: dict[str, AlarmControlPanelState] = { + OverkizCommandParam.OFF: AlarmControlPanelState.DISARMED, + OverkizCommandParam.ZONE_1: AlarmControlPanelState.ARMED_HOME, + OverkizCommandParam.ZONE_2: AlarmControlPanelState.ARMED_NIGHT, + OverkizCommandParam.TOTAL: AlarmControlPanelState.ARMED_AWAY, } -def _state_tsk_alarm_controller(select_state: Callable[[str], OverkizStateType]) -> str: +def _state_tsk_alarm_controller( + select_state: Callable[[str], OverkizStateType], +) -> AlarmControlPanelState: """Return the state of the device.""" if ( cast(str, select_state(OverkizState.INTERNAL_INTRUSION_DETECTED)) == OverkizCommandParam.DETECTED ): - return STATE_ALARM_TRIGGERED + return AlarmControlPanelState.TRIGGERED if cast(str, select_state(OverkizState.INTERNAL_CURRENT_ALARM_MODE)) != cast( str, select_state(OverkizState.INTERNAL_TARGET_ALARM_MODE) ): - return STATE_ALARM_PENDING + return AlarmControlPanelState.PENDING return MAP_INTERNAL_STATUS_STATE[ cast(str, select_state(OverkizState.INTERNAL_TARGET_ALARM_MODE)) ] -MAP_CORE_ACTIVE_ZONES: dict[str, str] = { - OverkizCommandParam.A: STATE_ALARM_ARMED_HOME, - f"{OverkizCommandParam.A},{OverkizCommandParam.B}": STATE_ALARM_ARMED_NIGHT, - f"{OverkizCommandParam.A},{OverkizCommandParam.B},{OverkizCommandParam.C}": STATE_ALARM_ARMED_AWAY, +MAP_CORE_ACTIVE_ZONES: dict[str, AlarmControlPanelState] = { + OverkizCommandParam.A: AlarmControlPanelState.ARMED_HOME, + f"{OverkizCommandParam.A},{OverkizCommandParam.B}": AlarmControlPanelState.ARMED_NIGHT, + f"{OverkizCommandParam.A},{OverkizCommandParam.B},{OverkizCommandParam.C}": AlarmControlPanelState.ARMED_AWAY, } def _state_stateful_alarm_controller( select_state: Callable[[str], OverkizStateType], -) -> str: +) -> AlarmControlPanelState: """Return the state of the device.""" if state := cast(str, select_state(OverkizState.CORE_ACTIVE_ZONES)): # The Stateful Alarm Controller has 3 zones with the following options: @@ -99,44 +93,44 @@ def _state_stateful_alarm_controller( if state in MAP_CORE_ACTIVE_ZONES: return MAP_CORE_ACTIVE_ZONES[state] - return STATE_ALARM_ARMED_CUSTOM_BYPASS + return AlarmControlPanelState.ARMED_CUSTOM_BYPASS - return STATE_ALARM_DISARMED + return AlarmControlPanelState.DISARMED -MAP_MYFOX_STATUS_STATE: dict[str, str] = { - OverkizCommandParam.ARMED: STATE_ALARM_ARMED_AWAY, - OverkizCommandParam.DISARMED: STATE_ALARM_DISARMED, - OverkizCommandParam.PARTIAL: STATE_ALARM_ARMED_NIGHT, +MAP_MYFOX_STATUS_STATE: dict[str, AlarmControlPanelState] = { + OverkizCommandParam.ARMED: AlarmControlPanelState.ARMED_AWAY, + OverkizCommandParam.DISARMED: AlarmControlPanelState.DISARMED, + OverkizCommandParam.PARTIAL: AlarmControlPanelState.ARMED_NIGHT, } def _state_myfox_alarm_controller( select_state: Callable[[str], OverkizStateType], -) -> str: +) -> AlarmControlPanelState: """Return the state of the device.""" if ( cast(str, select_state(OverkizState.CORE_INTRUSION)) == OverkizCommandParam.DETECTED ): - return STATE_ALARM_TRIGGERED + return AlarmControlPanelState.TRIGGERED return MAP_MYFOX_STATUS_STATE[ cast(str, select_state(OverkizState.MYFOX_ALARM_STATUS)) ] -MAP_ARM_TYPE: dict[str, str] = { - OverkizCommandParam.DISARMED: STATE_ALARM_DISARMED, - OverkizCommandParam.ARMED_DAY: STATE_ALARM_ARMED_HOME, - OverkizCommandParam.ARMED_NIGHT: STATE_ALARM_ARMED_NIGHT, - OverkizCommandParam.ARMED: STATE_ALARM_ARMED_AWAY, +MAP_ARM_TYPE: dict[str, AlarmControlPanelState] = { + OverkizCommandParam.DISARMED: AlarmControlPanelState.DISARMED, + OverkizCommandParam.ARMED_DAY: AlarmControlPanelState.ARMED_HOME, + OverkizCommandParam.ARMED_NIGHT: AlarmControlPanelState.ARMED_NIGHT, + OverkizCommandParam.ARMED: AlarmControlPanelState.ARMED_AWAY, } def _state_alarm_panel_controller( select_state: Callable[[str], OverkizStateType], -) -> str: +) -> AlarmControlPanelState: """Return the state of the device.""" return MAP_ARM_TYPE[ cast(str, select_state(OverkizState.VERISURE_ALARM_PANEL_MAIN_ARM_TYPE)) @@ -254,7 +248,7 @@ def __init__( self._attr_supported_features = self.entity_description.supported_features @property - def state(self) -> str: + def alarm_state(self) -> AlarmControlPanelState: """Return the state of the device.""" return self.entity_description.fn_state(self.executor.select_state) diff --git a/homeassistant/components/overkiz/climate/__init__.py b/homeassistant/components/overkiz/climate/__init__.py index f05a716031eaac..97840df7a41a68 100644 --- a/homeassistant/components/overkiz/climate/__init__.py +++ b/homeassistant/components/overkiz/climate/__init__.py @@ -96,7 +96,7 @@ async def async_setup_entry( # ie Atlantic APC entities_based_on_widget_and_controllable: list[Entity] = [ WIDGET_AND_CONTROLLABLE_TO_CLIMATE_ENTITY[device.widget][ - device.controllable_name + device.controllable_name # type: ignore[index] ](device.device_url, data.coordinator) for device in data.platforms[Platform.CLIMATE] if device.widget in WIDGET_AND_CONTROLLABLE_TO_CLIMATE_ENTITY diff --git a/homeassistant/components/overkiz/config_flow.py b/homeassistant/components/overkiz/config_flow.py index 4b88cd4a3e84ae..471a13d0de2e6d 100644 --- a/homeassistant/components/overkiz/config_flow.py +++ b/homeassistant/components/overkiz/config_flow.py @@ -24,7 +24,7 @@ import voluptuous as vol from homeassistant.components import dhcp, zeroconf -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlow, ConfigFlowResult from homeassistant.const import ( CONF_HOST, CONF_PASSWORD, @@ -47,7 +47,6 @@ class OverkizConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - _reauth_entry: ConfigEntry | None = None _api_type: APIType = APIType.CLOUD _user: str | None = None _server: str = DEFAULT_SERVER @@ -174,27 +173,13 @@ async def async_step_cloud( errors["base"] = "unknown" LOGGER.exception("Unknown error") else: - if self._reauth_entry: - if self._reauth_entry.unique_id != self.unique_id: - return self.async_abort(reason="reauth_wrong_account") - - # Update existing entry during reauth - self.hass.config_entries.async_update_entry( - self._reauth_entry, - data={ - **self._reauth_entry.data, - **user_input, - }, - ) + if self.source == SOURCE_REAUTH: + self._abort_if_unique_id_mismatch(reason="reauth_wrong_account") - self.hass.async_create_task( - self.hass.config_entries.async_reload( - self._reauth_entry.entry_id - ) + return self.async_update_reload_and_abort( + self._get_reauth_entry(), data_updates=user_input ) - return self.async_abort(reason="reauth_successful") - # Create new entry self._abort_if_unique_id_configured() @@ -257,27 +242,13 @@ async def async_step_local( errors["base"] = "unknown" LOGGER.exception("Unknown error") else: - if self._reauth_entry: - if self._reauth_entry.unique_id != self.unique_id: - return self.async_abort(reason="reauth_wrong_account") - - # Update existing entry during reauth - self.hass.config_entries.async_update_entry( - self._reauth_entry, - data={ - **self._reauth_entry.data, - **user_input, - }, - ) + if self.source == SOURCE_REAUTH: + self._abort_if_unique_id_mismatch(reason="reauth_wrong_account") - self.hass.async_create_task( - self.hass.config_entries.async_reload( - self._reauth_entry.entry_id - ) + return self.async_update_reload_and_abort( + self._get_reauth_entry(), data_updates=user_input ) - return self.async_abort(reason="reauth_successful") - # Create new entry self._abort_if_unique_id_configured() @@ -346,22 +317,15 @@ async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle reauth.""" - self._reauth_entry = cast( - ConfigEntry, - self.hass.config_entries.async_get_entry(self.context["entry_id"]), - ) - # overkiz entries always have unique IDs - self.context["title_placeholders"] = { - "gateway_id": cast(str, self._reauth_entry.unique_id) - } + self.context["title_placeholders"] = {"gateway_id": cast(str, self.unique_id)} - self._user = self._reauth_entry.data[CONF_USERNAME] - self._server = self._reauth_entry.data[CONF_HUB] - self._api_type = self._reauth_entry.data.get(CONF_API_TYPE, APIType.CLOUD) + self._user = entry_data[CONF_USERNAME] + self._server = entry_data[CONF_HUB] + self._api_type = entry_data.get(CONF_API_TYPE, APIType.CLOUD) if self._api_type == APIType.LOCAL: - self._host = self._reauth_entry.data[CONF_HOST] + self._host = entry_data[CONF_HOST] return await self.async_step_user(dict(entry_data)) diff --git a/homeassistant/components/ovo_energy/__init__.py b/homeassistant/components/ovo_energy/__init__.py index 7cce25d08d5dfd..436180407f49ad 100644 --- a/homeassistant/components/ovo_energy/__init__.py +++ b/homeassistant/components/ovo_energy/__init__.py @@ -32,7 +32,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: client_session=async_get_clientsession(hass), ) - if custom_account := entry.data.get(CONF_ACCOUNT) is not None: + if (custom_account := entry.data.get(CONF_ACCOUNT)) is not None: client.custom_account_id = custom_account try: @@ -49,7 +49,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: async def async_update_data() -> OVODailyUsage: """Fetch data from OVO Energy.""" - if custom_account := entry.data.get(CONF_ACCOUNT) is not None: + if (custom_account := entry.data.get(CONF_ACCOUNT)) is not None: client.custom_account_id = custom_account async with asyncio.timeout(10): @@ -67,6 +67,7 @@ async def async_update_data() -> OVODailyUsage: coordinator = DataUpdateCoordinator[OVODailyUsage]( hass, _LOGGER, + config_entry=entry, # Name of the data. For logging purposes. name="sensor", update_method=async_update_data, diff --git a/homeassistant/components/ovo_energy/config_flow.py b/homeassistant/components/ovo_energy/config_flow.py index 2dee284e1b19b0..53fc4f8eff6331 100644 --- a/homeassistant/components/ovo_energy/config_flow.py +++ b/homeassistant/components/ovo_energy/config_flow.py @@ -46,7 +46,7 @@ async def async_step_user( client_session=async_get_clientsession(self.hass), ) - if custom_account := user_input.get(CONF_ACCOUNT) is not None: + if (custom_account := user_input.get(CONF_ACCOUNT)) is not None: client.custom_account_id = custom_account try: @@ -79,22 +79,26 @@ async def async_step_user( async def async_step_reauth( self, - user_input: Mapping[str, Any], + entry_data: Mapping[str, Any], ) -> ConfigFlowResult: """Handle configuration by re-auth.""" - errors = {} - - if user_input and user_input.get(CONF_USERNAME): - self.username = user_input[CONF_USERNAME] - - if user_input and user_input.get(CONF_ACCOUNT): - self.account = user_input[CONF_ACCOUNT] + self.username = entry_data.get(CONF_USERNAME) + self.account = entry_data.get(CONF_ACCOUNT) if self.username: # If we have a username, use it as flow title self.context["title_placeholders"] = {CONF_USERNAME: self.username} - if user_input is not None and user_input.get(CONF_PASSWORD) is not None: + return await self.async_step_reauth_confirm() + + async def async_step_reauth_confirm( + self, + user_input: Mapping[str, Any] | None = None, + ) -> ConfigFlowResult: + """Handle configuration by re-auth.""" + errors = {} + + if user_input is not None: client = OVOEnergy( client_session=async_get_clientsession(self.hass), ) @@ -111,19 +115,13 @@ async def async_step_reauth( errors["base"] = "connection_error" else: if authenticated: - entry = await self.async_set_unique_id(self.username) - if entry: - self.hass.config_entries.async_update_entry( - entry, - data={ - CONF_USERNAME: self.username, - CONF_PASSWORD: user_input[CONF_PASSWORD], - }, - ) - return self.async_abort(reason="reauth_successful") + return self.async_update_reload_and_abort( + self._get_reauth_entry(), + data_updates={CONF_PASSWORD: user_input[CONF_PASSWORD]}, + ) errors["base"] = "authorization_error" return self.async_show_form( - step_id="reauth", data_schema=REAUTH_SCHEMA, errors=errors + step_id="reauth_confirm", data_schema=REAUTH_SCHEMA, errors=errors ) diff --git a/homeassistant/components/ovo_energy/strings.json b/homeassistant/components/ovo_energy/strings.json index fda0c2996dcbf1..3dc11e3a6017c0 100644 --- a/homeassistant/components/ovo_energy/strings.json +++ b/homeassistant/components/ovo_energy/strings.json @@ -1,10 +1,15 @@ { "config": { "flow_title": "{username}", + "abort": { + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" + }, "error": { "already_configured": "[%key:common::config_flow::abort::already_configured_account%]", "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", - "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]" + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", + "connection_error": "[%key:common::config_flow::error::cannot_connect%]", + "authorization_error": "[%key:common::config_flow::error::invalid_auth%]" }, "step": { "user": { @@ -16,7 +21,7 @@ "description": "Set up an OVO Energy instance to access your energy usage.", "title": "Add OVO Energy Account" }, - "reauth": { + "reauth_confirm": { "data": { "password": "[%key:common::config_flow::data::password%]" }, diff --git a/homeassistant/components/owntracks/config_flow.py b/homeassistant/components/owntracks/config_flow.py index 390cc880c1ec9e..b92f5d7ce0691e 100644 --- a/homeassistant/components/owntracks/config_flow.py +++ b/homeassistant/components/owntracks/config_flow.py @@ -23,9 +23,6 @@ async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle a user initiated set up flow to create OwnTracks webhook.""" - if self._async_current_entries(): - return self.async_abort(reason="single_instance_allowed") - if user_input is None: return self.async_show_form(step_id="user") diff --git a/homeassistant/components/owntracks/manifest.json b/homeassistant/components/owntracks/manifest.json index 79af00627a411c..7ff5a14345165e 100644 --- a/homeassistant/components/owntracks/manifest.json +++ b/homeassistant/components/owntracks/manifest.json @@ -8,5 +8,6 @@ "documentation": "https://www.home-assistant.io/integrations/owntracks", "iot_class": "local_push", "loggers": ["nacl"], - "requirements": ["PyNaCl==1.5.0"] + "requirements": ["PyNaCl==1.5.0"], + "single_config_entry": true } diff --git a/homeassistant/components/owntracks/strings.json b/homeassistant/components/owntracks/strings.json index 8fdd771b95e571..3c08550dab7f12 100644 --- a/homeassistant/components/owntracks/strings.json +++ b/homeassistant/components/owntracks/strings.json @@ -7,8 +7,7 @@ } }, "abort": { - "cloud_not_connected": "[%key:common::config_flow::abort::cloud_not_connected%]", - "single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]" + "cloud_not_connected": "[%key:common::config_flow::abort::cloud_not_connected%]" }, "create_entry": { "default": "On Android, open [the OwnTracks app]({android_url}), go to Preferences > Connection. Change the following settings:\n - Mode: HTTP\n - Host: {webhook_url}\n - Identification:\n - Username: `'(Your name)'`\n - Device ID: `'(Your device name)'`\n\nOn iOS, open [the OwnTracks app]({ios_url}), tap (i) icon in top left > Settings. Change the following settings:\n - Mode: HTTP\n - URL: {webhook_url}\n - Turn on authentication\n - UserID: `'(Your name)'`\n\n{secret}\n\nSee [the documentation]({docs_url}) for more information." diff --git a/homeassistant/components/p1_monitor/__init__.py b/homeassistant/components/p1_monitor/__init__.py index 8125e9f7a551b2..3361506dafb84d 100644 --- a/homeassistant/components/p1_monitor/__init__.py +++ b/homeassistant/components/p1_monitor/__init__.py @@ -3,11 +3,11 @@ from __future__ import annotations from homeassistant.config_entries import ConfigEntry -from homeassistant.const import Platform +from homeassistant.const import CONF_HOST, CONF_PORT, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady -from .const import DOMAIN +from .const import DOMAIN, LOGGER from .coordinator import P1MonitorDataUpdateCoordinator PLATFORMS = [Platform.SENSOR] @@ -30,6 +30,29 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: return True +async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool: + """Migrate old entry.""" + LOGGER.debug("Migrating from version %s", config_entry.version) + + if config_entry.version == 1: + # Migrate to split host and port + host = config_entry.data[CONF_HOST] + if ":" in host: + host, port = host.split(":") + else: + port = 80 + + new_data = { + **config_entry.data, + CONF_HOST: host, + CONF_PORT: int(port), + } + + hass.config_entries.async_update_entry(config_entry, data=new_data, version=2) + LOGGER.debug("Migration to version %s successful", config_entry.version) + return True + + async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload P1 Monitor config entry.""" unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/p1_monitor/config_flow.py b/homeassistant/components/p1_monitor/config_flow.py index 9c039d06b94191..a7ede186d727ec 100644 --- a/homeassistant/components/p1_monitor/config_flow.py +++ b/homeassistant/components/p1_monitor/config_flow.py @@ -8,9 +8,14 @@ import voluptuous as vol from homeassistant.config_entries import ConfigFlow, ConfigFlowResult -from homeassistant.const import CONF_HOST +from homeassistant.const import CONF_HOST, CONF_PORT from homeassistant.helpers.aiohttp_client import async_get_clientsession -from homeassistant.helpers.selector import TextSelector +from homeassistant.helpers.selector import ( + NumberSelector, + NumberSelectorConfig, + NumberSelectorMode, + TextSelector, +) from .const import DOMAIN @@ -18,7 +23,7 @@ class P1MonitorFlowHandler(ConfigFlow, domain=DOMAIN): """Config flow for P1 Monitor.""" - VERSION = 1 + VERSION = 2 async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -31,7 +36,9 @@ async def async_step_user( session = async_get_clientsession(self.hass) try: async with P1Monitor( - host=user_input[CONF_HOST], session=session + host=user_input[CONF_HOST], + port=user_input[CONF_PORT], + session=session, ) as client: await client.smartmeter() except P1MonitorError: @@ -41,6 +48,7 @@ async def async_step_user( title="P1 Monitor", data={ CONF_HOST: user_input[CONF_HOST], + CONF_PORT: user_input[CONF_PORT], }, ) @@ -49,6 +57,14 @@ async def async_step_user( data_schema=vol.Schema( { vol.Required(CONF_HOST): TextSelector(), + vol.Required(CONF_PORT, default=80): vol.All( + NumberSelector( + NumberSelectorConfig( + min=1, max=65535, mode=NumberSelectorMode.BOX + ), + ), + vol.Coerce(int), + ), } ), errors=errors, diff --git a/homeassistant/components/p1_monitor/coordinator.py b/homeassistant/components/p1_monitor/coordinator.py index 49844adf39b952..5459f88c38893c 100644 --- a/homeassistant/components/p1_monitor/coordinator.py +++ b/homeassistant/components/p1_monitor/coordinator.py @@ -15,7 +15,7 @@ ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_HOST +from homeassistant.const import CONF_HOST, CONF_PORT from homeassistant.core import HomeAssistant from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.update_coordinator import DataUpdateCoordinator @@ -59,7 +59,9 @@ def __init__( ) self.p1monitor = P1Monitor( - self.config_entry.data[CONF_HOST], session=async_get_clientsession(hass) + host=self.config_entry.data[CONF_HOST], + port=self.config_entry.data[CONF_PORT], + session=async_get_clientsession(hass), ) async def _async_update_data(self) -> P1MonitorData: diff --git a/homeassistant/components/p1_monitor/diagnostics.py b/homeassistant/components/p1_monitor/diagnostics.py index 5fb8cb472e8e38..c8b4e99099e8da 100644 --- a/homeassistant/components/p1_monitor/diagnostics.py +++ b/homeassistant/components/p1_monitor/diagnostics.py @@ -7,7 +7,7 @@ from homeassistant.components.diagnostics import async_redact_data from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_HOST +from homeassistant.const import CONF_HOST, CONF_PORT from homeassistant.core import HomeAssistant from .const import ( @@ -22,9 +22,7 @@ if TYPE_CHECKING: from _typeshed import DataclassInstance -TO_REDACT = { - CONF_HOST, -} +TO_REDACT = {CONF_HOST, CONF_PORT} async def async_get_config_entry_diagnostics( diff --git a/homeassistant/components/p1_monitor/strings.json b/homeassistant/components/p1_monitor/strings.json index 781ca109235196..b64f1dcc291548 100644 --- a/homeassistant/components/p1_monitor/strings.json +++ b/homeassistant/components/p1_monitor/strings.json @@ -4,10 +4,12 @@ "user": { "description": "Set up P1 Monitor to integrate with Home Assistant.", "data": { - "host": "[%key:common::config_flow::data::host%]" + "host": "[%key:common::config_flow::data::host%]", + "port": "[%key:common::config_flow::data::port%]" }, "data_description": { - "host": "The IP address or hostname of your P1 Monitor installation." + "host": "The IP address or hostname of your P1 Monitor installation.", + "port": "The port of your P1 Monitor installation." } } }, diff --git a/homeassistant/components/palazzetti/__init__.py b/homeassistant/components/palazzetti/__init__.py new file mode 100644 index 00000000000000..ecaa80890977ec --- /dev/null +++ b/homeassistant/components/palazzetti/__init__.py @@ -0,0 +1,27 @@ +"""The Palazzetti integration.""" + +from __future__ import annotations + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant + +from .coordinator import PalazzettiConfigEntry, PalazzettiDataUpdateCoordinator + +PLATFORMS: list[Platform] = [Platform.CLIMATE] + + +async def async_setup_entry(hass: HomeAssistant, entry: PalazzettiConfigEntry) -> bool: + """Set up Palazzetti from a config entry.""" + + coordinator = PalazzettiDataUpdateCoordinator(hass) + + await coordinator.async_config_entry_first_refresh() + entry.runtime_data = coordinator + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + return True + + +async def async_unload_entry(hass: HomeAssistant, entry: PalazzettiConfigEntry) -> bool: + """Unload a config entry.""" + + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/palazzetti/climate.py b/homeassistant/components/palazzetti/climate.py new file mode 100644 index 00000000000000..aff988051f3384 --- /dev/null +++ b/homeassistant/components/palazzetti/climate.py @@ -0,0 +1,160 @@ +"""Support for Palazzetti climates.""" + +from typing import Any + +from pypalazzetti.exceptions import CommunicationError, ValidationError + +from homeassistant.components.climate import ( + ClimateEntity, + ClimateEntityFeature, + HVACMode, +) +from homeassistant.const import ATTR_TEMPERATURE, UnitOfTemperature +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError, ServiceValidationError +from homeassistant.helpers import device_registry as dr +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from . import PalazzettiConfigEntry +from .const import DOMAIN, FAN_AUTO, FAN_HIGH, FAN_MODES, FAN_SILENT, PALAZZETTI +from .coordinator import PalazzettiDataUpdateCoordinator + + +async def async_setup_entry( + hass: HomeAssistant, + entry: PalazzettiConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up Palazzetti climates based on a config entry.""" + async_add_entities([PalazzettiClimateEntity(entry.runtime_data)]) + + +class PalazzettiClimateEntity( + CoordinatorEntity[PalazzettiDataUpdateCoordinator], ClimateEntity +): + """Defines a Palazzetti climate.""" + + _attr_has_entity_name = True + _attr_name = None + _attr_translation_key = DOMAIN + _attr_target_temperature_step = 1.0 + _attr_temperature_unit = UnitOfTemperature.CELSIUS + _attr_supported_features = ( + ClimateEntityFeature.TARGET_TEMPERATURE + | ClimateEntityFeature.FAN_MODE + | ClimateEntityFeature.TURN_ON + | ClimateEntityFeature.TURN_OFF + ) + + def __init__(self, coordinator: PalazzettiDataUpdateCoordinator) -> None: + """Initialize Palazzetti climate.""" + super().__init__(coordinator) + client = coordinator.client + mac = coordinator.config_entry.unique_id + assert mac is not None + self._attr_unique_id = mac + self._attr_device_info = dr.DeviceInfo( + connections={(dr.CONNECTION_NETWORK_MAC, mac)}, + name=client.name, + manufacturer=PALAZZETTI, + sw_version=client.sw_version, + hw_version=client.hw_version, + ) + self._attr_hvac_modes = [HVACMode.HEAT, HVACMode.OFF] + self._attr_min_temp = client.target_temperature_min + self._attr_max_temp = client.target_temperature_max + self._attr_fan_modes = list( + map(str, range(client.fan_speed_min, client.fan_speed_max + 1)) + ) + if client.has_fan_silent: + self._attr_fan_modes.insert(0, FAN_SILENT) + if client.has_fan_high: + self._attr_fan_modes.append(FAN_HIGH) + if client.has_fan_auto: + self._attr_fan_modes.append(FAN_AUTO) + + @property + def available(self) -> bool: + """Is the entity available.""" + return super().available and self.coordinator.client.connected + + @property + def hvac_mode(self) -> HVACMode: + """Return hvac operation ie. heat or off mode.""" + is_heating = bool(self.coordinator.client.is_heating) + return HVACMode.HEAT if is_heating else HVACMode.OFF + + async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None: + """Set new target hvac mode.""" + try: + await self.coordinator.client.set_on(hvac_mode != HVACMode.OFF) + except CommunicationError as err: + raise HomeAssistantError( + translation_domain=DOMAIN, translation_key="cannot_connect" + ) from err + except ValidationError as err: + raise ServiceValidationError( + translation_domain=DOMAIN, translation_key="on_off_not_available" + ) from err + await self.coordinator.async_refresh() + + @property + def current_temperature(self) -> float | None: + """Return current temperature.""" + return self.coordinator.client.room_temperature + + @property + def target_temperature(self) -> int | None: + """Return the temperature.""" + return self.coordinator.client.target_temperature + + async def async_set_temperature(self, **kwargs: Any) -> None: + """Set new temperature.""" + temperature = int(kwargs[ATTR_TEMPERATURE]) + try: + await self.coordinator.client.set_target_temperature(temperature) + except CommunicationError as err: + raise HomeAssistantError( + translation_domain=DOMAIN, translation_key="cannot_connect" + ) from err + except ValidationError as err: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="invalid_target_temperature", + translation_placeholders={ + "value": str(temperature), + }, + ) from err + await self.coordinator.async_refresh() + + @property + def fan_mode(self) -> str | None: + """Return the fan mode.""" + api_state = self.coordinator.client.fan_speed + return FAN_MODES[api_state] + + async def async_set_fan_mode(self, fan_mode: str) -> None: + """Set new fan mode.""" + try: + if fan_mode == FAN_SILENT: + await self.coordinator.client.set_fan_silent() + elif fan_mode == FAN_HIGH: + await self.coordinator.client.set_fan_high() + elif fan_mode == FAN_AUTO: + await self.coordinator.client.set_fan_auto() + else: + await self.coordinator.client.set_fan_speed(FAN_MODES.index(fan_mode)) + except CommunicationError as err: + raise HomeAssistantError( + translation_domain=DOMAIN, translation_key="cannot_connect" + ) from err + except ValidationError as err: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="invalid_fan_mode", + translation_placeholders={ + "value": fan_mode, + }, + ) from err + await self.coordinator.async_refresh() diff --git a/homeassistant/components/palazzetti/config_flow.py b/homeassistant/components/palazzetti/config_flow.py new file mode 100644 index 00000000000000..fe892b6624dd72 --- /dev/null +++ b/homeassistant/components/palazzetti/config_flow.py @@ -0,0 +1,91 @@ +"""Config flow for Palazzetti.""" + +from typing import Any + +from pypalazzetti.client import PalazzettiClient +from pypalazzetti.exceptions import CommunicationError +import voluptuous as vol + +from homeassistant.components import dhcp +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.const import CONF_HOST +from homeassistant.helpers import device_registry as dr + +from .const import DOMAIN, LOGGER + + +class PalazzettiConfigFlow(ConfigFlow, domain=DOMAIN): + """Palazzetti config flow.""" + + _discovered_device: PalazzettiClient + + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """User configuration step.""" + errors: dict[str, str] = {} + if user_input is not None: + host = user_input[CONF_HOST] + client = PalazzettiClient(hostname=host) + try: + await client.connect() + except CommunicationError: + LOGGER.exception("Communication error") + errors["base"] = "cannot_connect" + else: + formatted_mac = dr.format_mac(client.mac) + + # Assign a unique ID to the flow + await self.async_set_unique_id(formatted_mac) + + # Abort the flow if a config entry with the same unique ID exists + self._abort_if_unique_id_configured() + + return self.async_create_entry( + title=client.name, + data=user_input, + ) + + return self.async_show_form( + step_id="user", + data_schema=vol.Schema({vol.Required(CONF_HOST): str}), + errors=errors, + ) + + async def async_step_dhcp( + self, discovery_info: dhcp.DhcpServiceInfo + ) -> ConfigFlowResult: + """Handle DHCP discovery.""" + + LOGGER.debug( + "DHCP discovery detected Palazzetti: %s", discovery_info.macaddress + ) + + await self.async_set_unique_id(dr.format_mac(discovery_info.macaddress)) + self._abort_if_unique_id_configured() + self._discovered_device = PalazzettiClient(hostname=discovery_info.ip) + try: + await self._discovered_device.connect() + except CommunicationError: + return self.async_abort(reason="cannot_connect") + + return await self.async_step_discovery_confirm() + + async def async_step_discovery_confirm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Confirm discovery.""" + if user_input is not None: + return self.async_create_entry( + title=self._discovered_device.name, + data={CONF_HOST: self._discovered_device.host}, + ) + + self._set_confirm_only() + return self.async_show_form( + step_id="discovery_confirm", + description_placeholders={ + "name": self._discovered_device.name, + "host": self._discovered_device.host, + }, + ) diff --git a/homeassistant/components/palazzetti/const.py b/homeassistant/components/palazzetti/const.py new file mode 100644 index 00000000000000..4cb8b1f14a64ba --- /dev/null +++ b/homeassistant/components/palazzetti/const.py @@ -0,0 +1,19 @@ +"""Constants for the Palazzetti integration.""" + +from datetime import timedelta +import logging +from typing import Final + +DOMAIN: Final = "palazzetti" +PALAZZETTI: Final = "Palazzetti" +LOGGER = logging.getLogger(__package__) +SCAN_INTERVAL = timedelta(seconds=30) +ON_OFF_NOT_AVAILABLE = "on_off_not_available" +ERROR_INVALID_FAN_MODE = "invalid_fan_mode" +ERROR_INVALID_TARGET_TEMPERATURE = "invalid_target_temperature" +ERROR_CANNOT_CONNECT = "cannot_connect" + +FAN_SILENT: Final = "silent" +FAN_HIGH: Final = "high" +FAN_AUTO: Final = "auto" +FAN_MODES: Final = [FAN_SILENT, "1", "2", "3", "4", "5", FAN_HIGH, FAN_AUTO] diff --git a/homeassistant/components/palazzetti/coordinator.py b/homeassistant/components/palazzetti/coordinator.py new file mode 100644 index 00000000000000..d992bd3fb62490 --- /dev/null +++ b/homeassistant/components/palazzetti/coordinator.py @@ -0,0 +1,47 @@ +"""Helpers to help coordinate updates.""" + +from pypalazzetti.client import PalazzettiClient +from pypalazzetti.exceptions import CommunicationError, ValidationError + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_HOST +from homeassistant.core import HomeAssistant +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed + +from .const import DOMAIN, LOGGER, SCAN_INTERVAL + +type PalazzettiConfigEntry = ConfigEntry[PalazzettiDataUpdateCoordinator] + + +class PalazzettiDataUpdateCoordinator(DataUpdateCoordinator[None]): + """Class to manage fetching Palazzetti data from a Palazzetti hub.""" + + config_entry: PalazzettiConfigEntry + client: PalazzettiClient + + def __init__( + self, + hass: HomeAssistant, + ) -> None: + """Initialize global Palazzetti data updater.""" + super().__init__( + hass, + LOGGER, + name=DOMAIN, + update_interval=SCAN_INTERVAL, + ) + self.client = PalazzettiClient(self.config_entry.data[CONF_HOST]) + + async def _async_setup(self) -> None: + try: + await self.client.connect() + await self.client.update_state() + except (CommunicationError, ValidationError) as err: + raise UpdateFailed(f"Error communicating with the API: {err}") from err + + async def _async_update_data(self) -> None: + """Fetch data from Palazzetti.""" + try: + await self.client.update_state() + except (CommunicationError, ValidationError) as err: + raise UpdateFailed(f"Error communicating with the API: {err}") from err diff --git a/homeassistant/components/palazzetti/manifest.json b/homeassistant/components/palazzetti/manifest.json new file mode 100644 index 00000000000000..aff82275e2efc7 --- /dev/null +++ b/homeassistant/components/palazzetti/manifest.json @@ -0,0 +1,19 @@ +{ + "domain": "palazzetti", + "name": "Palazzetti", + "codeowners": ["@dotvav"], + "config_flow": true, + "dhcp": [ + { + "hostname": "connbox*", + "macaddress": "40F3857*" + }, + { + "registered_devices": true + } + ], + "documentation": "https://www.home-assistant.io/integrations/palazzetti", + "integration_type": "device", + "iot_class": "local_polling", + "requirements": ["pypalazzetti==0.1.11"] +} diff --git a/homeassistant/components/palazzetti/strings.json b/homeassistant/components/palazzetti/strings.json new file mode 100644 index 00000000000000..cc10c8ed5c63a9 --- /dev/null +++ b/homeassistant/components/palazzetti/strings.json @@ -0,0 +1,52 @@ +{ + "config": { + "step": { + "user": { + "data": { + "host": "[%key:common::config_flow::data::host%]" + }, + "data_description": { + "host": "The host name or the IP address of the Palazzetti CBox" + } + }, + "discovery_confirm": { + "description": "Do you want to add {name} ({host}) to Home Assistant?" + } + }, + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" + }, + "error": { + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]" + } + }, + "exceptions": { + "on_off_not_available": { + "message": "The appliance cannot be turned on or off." + }, + "invalid_fan_mode": { + "message": "Fan mode {value} is invalid." + }, + "invalid_target_temperatures": { + "message": "Target temperature {value} is invalid." + }, + "cannot_connect": { + "message": "Could not connect to the device." + } + }, + "entity": { + "climate": { + "palazzetti": { + "state_attributes": { + "fan_mode": { + "state": { + "silent": "Silent", + "auto": "Auto", + "high": "High" + } + } + } + } + } + } +} diff --git a/homeassistant/components/panel_iframe/__init__.py b/homeassistant/components/panel_iframe/__init__.py deleted file mode 100644 index 1b6dfebd6b05b5..00000000000000 --- a/homeassistant/components/panel_iframe/__init__.py +++ /dev/null @@ -1,98 +0,0 @@ -"""Register an iFrame front end panel.""" - -import voluptuous as vol - -from homeassistant.components import lovelace -from homeassistant.components.lovelace import dashboard -from homeassistant.const import CONF_ICON, CONF_URL -from homeassistant.core import HomeAssistant -import homeassistant.helpers.config_validation as cv -from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue -from homeassistant.helpers.storage import Store -from homeassistant.helpers.typing import ConfigType - -DOMAIN = "panel_iframe" - -CONF_TITLE = "title" - -CONF_RELATIVE_URL_ERROR_MSG = "Invalid relative URL. Absolute path required." -CONF_RELATIVE_URL_REGEX = r"\A/" -CONF_REQUIRE_ADMIN = "require_admin" - -CONFIG_SCHEMA = vol.Schema( - { - DOMAIN: cv.schema_with_slug_keys( - vol.Schema( - { - vol.Optional(CONF_TITLE): cv.string, - vol.Optional(CONF_ICON): cv.icon, - vol.Optional(CONF_REQUIRE_ADMIN, default=False): cv.boolean, - vol.Required(CONF_URL): vol.Any( - vol.Match( - CONF_RELATIVE_URL_REGEX, msg=CONF_RELATIVE_URL_ERROR_MSG - ), - vol.Url(), - ), - } - ) - ) - }, - extra=vol.ALLOW_EXTRA, -) - -STORAGE_KEY = DOMAIN -STORAGE_VERSION_MAJOR = 1 - - -async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: - """Set up the iFrame frontend panels.""" - async_create_issue( - hass, - DOMAIN, - "deprecated_yaml", - breaks_in_ha_version="2024.10.0", - is_fixable=False, - is_persistent=False, - issue_domain=DOMAIN, - severity=IssueSeverity.WARNING, - translation_key="deprecated_yaml", - translation_placeholders={ - "domain": DOMAIN, - "integration_title": "iframe Panel", - }, - ) - - store: Store[dict[str, bool]] = Store( - hass, - STORAGE_VERSION_MAJOR, - STORAGE_KEY, - ) - data = await store.async_load() - if data: - return True - - dashboards_collection: dashboard.DashboardsCollection = hass.data[lovelace.DOMAIN][ - "dashboards_collection" - ] - - for url_path, info in config[DOMAIN].items(): - dashboard_create_data = { - lovelace.CONF_ALLOW_SINGLE_WORD: True, - lovelace.CONF_URL_PATH: url_path, - } - for key in (CONF_ICON, CONF_REQUIRE_ADMIN, CONF_TITLE): - if key in info: - dashboard_create_data[key] = info[key] - - await dashboards_collection.async_create_item(dashboard_create_data) - - dashboard_store: dashboard.LovelaceStorage = hass.data[lovelace.DOMAIN][ - "dashboards" - ][url_path] - await dashboard_store.async_save( - {"strategy": {"type": "iframe", "url": info[CONF_URL]}} - ) - - await store.async_save({"migrated": True}) - - return True diff --git a/homeassistant/components/panel_iframe/manifest.json b/homeassistant/components/panel_iframe/manifest.json deleted file mode 100644 index 7a39e0ba17dc3e..00000000000000 --- a/homeassistant/components/panel_iframe/manifest.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "domain": "panel_iframe", - "name": "iframe Panel", - "codeowners": ["@home-assistant/frontend"], - "dependencies": ["frontend", "lovelace"], - "documentation": "https://www.home-assistant.io/integrations/panel_iframe", - "quality_scale": "internal" -} diff --git a/homeassistant/components/panel_iframe/strings.json b/homeassistant/components/panel_iframe/strings.json deleted file mode 100644 index 595b1f04818c78..00000000000000 --- a/homeassistant/components/panel_iframe/strings.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "issues": { - "deprecated_yaml": { - "title": "The {integration_title} YAML configuration is being removed", - "description": "Configuring {integration_title} using YAML is being removed.\n\nYour existing YAML configuration has been imported into the UI automatically as a regular dashboard.\n\nRemove the `{domain}` configuration from your configuration.yaml file and restart Home Assistant to fix this issue." - } - } -} diff --git a/homeassistant/components/peco/__init__.py b/homeassistant/components/peco/__init__.py index 12979f27793d3c..1de5d4bb6a20e5 100644 --- a/homeassistant/components/peco/__init__.py +++ b/homeassistant/components/peco/__init__.py @@ -68,6 +68,7 @@ async def async_update_outage_data() -> PECOCoordinatorData: outage_coordinator = DataUpdateCoordinator( hass, LOGGER, + config_entry=entry, name="PECO Outage Count", update_method=async_update_outage_data, update_interval=timedelta(minutes=OUTAGE_SCAN_INTERVAL), @@ -97,6 +98,7 @@ async def async_update_meter_data() -> bool: meter_coordinator = DataUpdateCoordinator( hass, LOGGER, + config_entry=entry, name="PECO Smart Meter", update_method=async_update_meter_data, update_interval=timedelta(minutes=SMART_METER_SCAN_INTERVAL), diff --git a/homeassistant/components/pegel_online/diagnostics.py b/homeassistant/components/pegel_online/diagnostics.py new file mode 100644 index 00000000000000..b68437c5ee77a8 --- /dev/null +++ b/homeassistant/components/pegel_online/diagnostics.py @@ -0,0 +1,21 @@ +"""Diagnostics support for pegel_online.""" + +from __future__ import annotations + +from typing import Any + +from homeassistant.core import HomeAssistant + +from . import PegelOnlineConfigEntry + + +async def async_get_config_entry_diagnostics( + hass: HomeAssistant, entry: PegelOnlineConfigEntry +) -> dict[str, Any]: + """Return diagnostics for a config entry.""" + coordinator = entry.runtime_data + + return { + "entry": entry.as_dict(), + "data": coordinator.data, + } diff --git a/homeassistant/components/pegel_online/entity.py b/homeassistant/components/pegel_online/entity.py index 4ad12f12913006..4e157a5f63b997 100644 --- a/homeassistant/components/pegel_online/entity.py +++ b/homeassistant/components/pegel_online/entity.py @@ -2,7 +2,7 @@ from __future__ import annotations -from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo from homeassistant.helpers.update_coordinator import CoordinatorEntity from .const import DOMAIN @@ -29,4 +29,5 @@ def device_info(self) -> DeviceInfo: name=f"{self.station.name} {self.station.water_name}", manufacturer=self.station.agency, configuration_url=self.station.base_data_url, + entry_type=DeviceEntryType.SERVICE, ) diff --git a/homeassistant/components/permobil/strings.json b/homeassistant/components/permobil/strings.json index cbce8d5d86f97d..0b55162b53e02f 100644 --- a/homeassistant/components/permobil/strings.json +++ b/homeassistant/components/permobil/strings.json @@ -15,13 +15,14 @@ "region": { "description": "Select the region of your account.", "data": { - "code": "Region" + "region": "Region" } } }, "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", - "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", + "unknown": "[%key:common::config_flow::error::unknown%]" }, "error": { "unknown": "Unexpected error, more information in the logs", diff --git a/homeassistant/components/persistent_notification/__init__.py b/homeassistant/components/persistent_notification/__init__.py index a785d015ffb620..a5eb8bb4f4d0df 100644 --- a/homeassistant/components/persistent_notification/__init__.py +++ b/homeassistant/components/persistent_notification/__init__.py @@ -184,8 +184,8 @@ def dismiss_all_service(call: ServiceCall) -> None: create_service, vol.Schema( { - vol.Required(ATTR_MESSAGE): vol.Any(cv.dynamic_template, cv.string), - vol.Optional(ATTR_TITLE): vol.Any(cv.dynamic_template, cv.string), + vol.Required(ATTR_MESSAGE): cv.string, + vol.Optional(ATTR_TITLE): cv.string, vol.Optional(ATTR_NOTIFICATION_ID): cv.string, } ), diff --git a/homeassistant/components/philips_js/config_flow.py b/homeassistant/components/philips_js/config_flow.py index a73145f7c1c8ff..66b4439acd8fa5 100644 --- a/homeassistant/components/philips_js/config_flow.py +++ b/homeassistant/components/philips_js/config_flow.py @@ -9,7 +9,12 @@ from haphilipsjs import ConnectionFailure, PairingFailure, PhilipsTV import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import ( + SOURCE_REAUTH, + ConfigEntry, + ConfigFlow, + ConfigFlowResult, +) from homeassistant.const import ( CONF_API_VERSION, CONF_HOST, @@ -75,18 +80,13 @@ def __init__(self) -> None: self._current: dict[str, Any] = {} self._hub: PhilipsTV | None = None self._pair_state: Any = None - self._entry: ConfigEntry | None = None async def _async_create_current(self) -> ConfigFlowResult: system = self._current[CONF_SYSTEM] - if self._entry: - self.hass.config_entries.async_update_entry( - self._entry, data=self._entry.data | self._current - ) - self.hass.async_create_task( - self.hass.config_entries.async_reload(self._entry.entry_id) + if self.source == SOURCE_REAUTH: + return self.async_update_reload_and_abort( + self._get_reauth_entry(), data_updates=self._current ) - return self.async_abort(reason="reauth_successful") return self.async_create_entry( title=f"{system['name']} ({system['serialnumber']})", @@ -150,7 +150,6 @@ async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle configuration by re-auth.""" - self._entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) self._current[CONF_HOST] = entry_data[CONF_HOST] self._current[CONF_API_VERSION] = entry_data[CONF_API_VERSION] return await self.async_step_user() @@ -175,7 +174,7 @@ async def async_step_user( else: if serialnumber := hub.system.get("serialnumber"): await self.async_set_unique_id(serialnumber) - if self._entry is None: + if self.source != SOURCE_REAUTH: self._abort_if_unique_id_configured() self._current[CONF_SYSTEM] = hub.system diff --git a/homeassistant/components/pi_hole/__init__.py b/homeassistant/components/pi_hole/__init__.py index 64e73a20c59eae..5cc21cef3a9359 100644 --- a/homeassistant/components/pi_hole/__init__.py +++ b/homeassistant/components/pi_hole/__init__.py @@ -118,6 +118,7 @@ async def async_update_data() -> None: coordinator = DataUpdateCoordinator( hass, _LOGGER, + config_entry=entry, name=name, update_method=async_update_data, update_interval=MIN_TIME_BETWEEN_UPDATES, diff --git a/homeassistant/components/pi_hole/config_flow.py b/homeassistant/components/pi_hole/config_flow.py index d6f42d57debf92..e50b018caa474b 100644 --- a/homeassistant/components/pi_hole/config_flow.py +++ b/homeassistant/components/pi_hole/config_flow.py @@ -136,15 +136,9 @@ async def async_step_reauth_confirm( if user_input is not None: self._config = {**self._config, CONF_API_KEY: user_input[CONF_API_KEY]} if not (errors := await self._async_try_connect()): - entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] + return self.async_update_reload_and_abort( + self._get_reauth_entry(), data=self._config ) - assert entry - self.hass.config_entries.async_update_entry(entry, data=self._config) - self.hass.async_create_task( - self.hass.config_entries.async_reload(self.context["entry_id"]) - ) - return self.async_abort(reason="reauth_successful") return self.async_show_form( step_id="reauth_confirm", diff --git a/homeassistant/components/ping/config_flow.py b/homeassistant/components/ping/config_flow.py index 9470b2134d4426..4f2adb0d2c0fa9 100644 --- a/homeassistant/components/ping/config_flow.py +++ b/homeassistant/components/ping/config_flow.py @@ -66,16 +66,12 @@ def async_get_options_flow( config_entry: ConfigEntry, ) -> OptionsFlow: """Create the options flow.""" - return OptionsFlowHandler(config_entry) + return OptionsFlowHandler() class OptionsFlowHandler(OptionsFlow): """Handle an options flow for Ping.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/plaato/config_flow.py b/homeassistant/components/plaato/config_flow.py index 74967c417a4672..f398a733cd6773 100644 --- a/homeassistant/components/plaato/config_flow.py +++ b/homeassistant/components/plaato/config_flow.py @@ -176,23 +176,19 @@ def _get_error(device_type: PlaatoDeviceType): @staticmethod @callback - def async_get_options_flow(config_entry: ConfigEntry) -> PlaatoOptionsFlowHandler: + def async_get_options_flow( + config_entry: ConfigEntry, + ) -> PlaatoOptionsFlowHandler: """Get the options flow for this handler.""" - return PlaatoOptionsFlowHandler(config_entry) + return PlaatoOptionsFlowHandler() class PlaatoOptionsFlowHandler(OptionsFlow): """Handle Plaato options.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize domain options flow.""" - super().__init__() - - self._config_entry = config_entry - async def async_step_init(self, user_input: None = None) -> ConfigFlowResult: """Manage the options.""" - use_webhook = self._config_entry.data.get(CONF_USE_WEBHOOK, False) + use_webhook = self.config_entry.data.get(CONF_USE_WEBHOOK, False) if use_webhook: return await self.async_step_webhook() @@ -211,7 +207,7 @@ async def async_step_user( { vol.Optional( CONF_SCAN_INTERVAL, - default=self._config_entry.options.get( + default=self.config_entry.options.get( CONF_SCAN_INTERVAL, DEFAULT_SCAN_INTERVAL ), ): cv.positive_int @@ -226,7 +222,7 @@ async def async_step_webhook( if user_input is not None: return self.async_create_entry(title="", data=user_input) - webhook_id = self._config_entry.data.get(CONF_WEBHOOK_ID, None) + webhook_id = self.config_entry.data.get(CONF_WEBHOOK_ID, None) webhook_url = ( "" if webhook_id is None diff --git a/homeassistant/components/plant/__init__.py b/homeassistant/components/plant/__init__.py index c6e527290df8ee..48c606865df4a8 100644 --- a/homeassistant/components/plant/__init__.py +++ b/homeassistant/components/plant/__init__.py @@ -155,7 +155,7 @@ class Plant(Entity): "max": CONF_MAX_MOISTURE, }, READING_CONDUCTIVITY: { - ATTR_UNIT_OF_MEASUREMENT: UnitOfConductivity.MICROSIEMENS, + ATTR_UNIT_OF_MEASUREMENT: UnitOfConductivity.MICROSIEMENS_PER_CM, "min": CONF_MIN_CONDUCTIVITY, "max": CONF_MAX_CONDUCTIVITY, }, diff --git a/homeassistant/components/plex/config_flow.py b/homeassistant/components/plex/config_flow.py index fcd5751effb352..ae7cbb1257468b 100644 --- a/homeassistant/components/plex/config_flow.py +++ b/homeassistant/components/plex/config_flow.py @@ -3,7 +3,7 @@ from __future__ import annotations from collections.abc import Mapping -import copy +from copy import deepcopy import logging from typing import TYPE_CHECKING, Any @@ -385,7 +385,7 @@ class PlexOptionsFlowHandler(OptionsFlow): def __init__(self, config_entry: ConfigEntry) -> None: """Initialize Plex options flow.""" - self.options = copy.deepcopy(dict(config_entry.options)) + self.options = deepcopy(dict(config_entry.options)) self.server_id = config_entry.data[CONF_SERVER_IDENTIFIER] async def async_step_init(self, user_input: None = None) -> ConfigFlowResult: diff --git a/homeassistant/components/plex/services.py b/homeassistant/components/plex/services.py index cbf72966413006..c70ddb6ed53cfc 100644 --- a/homeassistant/components/plex/services.py +++ b/homeassistant/components/plex/services.py @@ -133,6 +133,8 @@ def process_plex_payload( elif content_id.startswith(PLEX_URI_SCHEME): # Handle standard media_browser payloads plex_url = URL(content_id) + # https://github.com/pylint-dev/pylint/issues/3484 + # pylint: disable-next=using-constant-test if plex_url.name: if len(plex_url.parts) == 2: if plex_url.name == "search": diff --git a/homeassistant/components/plugwise/__init__.py b/homeassistant/components/plugwise/__init__.py index f7677e39f7a9d2..7d1b9ceac8a310 100644 --- a/homeassistant/components/plugwise/__init__.py +++ b/homeassistant/components/plugwise/__init__.py @@ -33,7 +33,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: PlugwiseConfigEntry) -> model=coordinator.api.smile_model, model_id=coordinator.api.smile_model_id, name=coordinator.api.smile_name, - sw_version=coordinator.api.smile_version[0], + sw_version=str(coordinator.api.smile_version), ) # required for adding the entity-less P1 Gateway await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) diff --git a/homeassistant/components/plugwise/config_flow.py b/homeassistant/components/plugwise/config_flow.py index b0d68aaa33be9a..57abb1ccb863f3 100644 --- a/homeassistant/components/plugwise/config_flow.py +++ b/homeassistant/components/plugwise/config_flow.py @@ -71,7 +71,6 @@ async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> Smile: password=data[CONF_PASSWORD], port=data[CONF_PORT], username=data[CONF_USERNAME], - timeout=30, websession=websession, ) await api.connect() diff --git a/homeassistant/components/plugwise/coordinator.py b/homeassistant/components/plugwise/coordinator.py index c3fe33c64d2475..b897a8bf8336eb 100644 --- a/homeassistant/components/plugwise/coordinator.py +++ b/homeassistant/components/plugwise/coordinator.py @@ -2,6 +2,7 @@ from datetime import timedelta +from packaging.version import Version from plugwise import PlugwiseData, Smile from plugwise.exceptions import ( ConnectionFailedError, @@ -53,7 +54,6 @@ def __init__(self, hass: HomeAssistant) -> None: username=self.config_entry.data.get(CONF_USERNAME, DEFAULT_USERNAME), password=self.config_entry.data[CONF_PASSWORD], port=self.config_entry.data.get(CONF_PORT, DEFAULT_PORT), - timeout=30, websession=async_get_clientsession(hass, verify_ssl=False), ) self._current_devices: set[str] = set() @@ -61,8 +61,10 @@ def __init__(self, hass: HomeAssistant) -> None: async def _connect(self) -> None: """Connect to the Plugwise Smile.""" - self._connected = await self.api.connect() - self.api.get_all_devices() + version = await self.api.connect() + self._connected = isinstance(version, Version) + if self._connected: + self.api.get_all_devices() async def _async_update_data(self) -> PlugwiseData: """Fetch data from Plugwise.""" diff --git a/homeassistant/components/plugwise/manifest.json b/homeassistant/components/plugwise/manifest.json index b1ce8961110a50..dbbad15c0dca70 100644 --- a/homeassistant/components/plugwise/manifest.json +++ b/homeassistant/components/plugwise/manifest.json @@ -7,6 +7,6 @@ "integration_type": "hub", "iot_class": "local_polling", "loggers": ["plugwise"], - "requirements": ["plugwise==1.4.0"], + "requirements": ["plugwise==1.5.0"], "zeroconf": ["_plugwise._tcp.local."] } diff --git a/homeassistant/components/point/alarm_control_panel.py b/homeassistant/components/point/alarm_control_panel.py index 3657bad28ae0a1..4e4e4238176521 100644 --- a/homeassistant/components/point/alarm_control_panel.py +++ b/homeassistant/components/point/alarm_control_panel.py @@ -9,13 +9,9 @@ DOMAIN as ALARM_CONTROL_PANEL_DOMAIN, AlarmControlPanelEntity, AlarmControlPanelEntityFeature, + AlarmControlPanelState, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ( - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_DISARMED, - STATE_ALARM_TRIGGERED, -) from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.dispatcher import async_dispatcher_connect @@ -28,9 +24,9 @@ EVENT_MAP = { - "off": STATE_ALARM_DISARMED, - "alarm_silenced": STATE_ALARM_DISARMED, - "alarm_grace_period_expired": STATE_ALARM_TRIGGERED, + "off": AlarmControlPanelState.DISARMED, + "alarm_silenced": AlarmControlPanelState.DISARMED, + "alarm_grace_period_expired": AlarmControlPanelState.TRIGGERED, } @@ -103,9 +99,11 @@ def _webhook_event(self, data, webhook): self.async_write_ha_state() @property - def state(self) -> str: + def alarm_state(self) -> AlarmControlPanelState: """Return state of the device.""" - return EVENT_MAP.get(self._home["alarm_status"], STATE_ALARM_ARMED_AWAY) + return EVENT_MAP.get( + self._home["alarm_status"], AlarmControlPanelState.ARMED_AWAY + ) async def async_alarm_disarm(self, code: str | None = None) -> None: """Send disarm command.""" diff --git a/homeassistant/components/point/api.py b/homeassistant/components/point/api.py index b55a7704cbf3ba..cd854c2b7ec146 100644 --- a/homeassistant/components/point/api.py +++ b/homeassistant/components/point/api.py @@ -20,7 +20,6 @@ def __init__( async def async_get_access_token(self) -> str: """Return a valid access token.""" - if not self._oauth_session.valid_token: - await self._oauth_session.async_ensure_token_valid() + await self._oauth_session.async_ensure_token_valid() return self._oauth_session.token["access_token"] diff --git a/homeassistant/components/point/config_flow.py b/homeassistant/components/point/config_flow.py index 0e4f88ab5786f0..a0a51c7b9e6949 100644 --- a/homeassistant/components/point/config_flow.py +++ b/homeassistant/components/point/config_flow.py @@ -5,7 +5,7 @@ from typing import Any from homeassistant.components.webhook import async_generate_id -from homeassistant.config_entries import ConfigEntry, ConfigFlowResult +from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlowResult from homeassistant.const import CONF_TOKEN, CONF_WEBHOOK_ID from homeassistant.helpers.config_entry_oauth2_flow import AbstractOAuth2FlowHandler @@ -17,8 +17,6 @@ class OAuth2FlowHandler(AbstractOAuth2FlowHandler, domain=DOMAIN): DOMAIN = DOMAIN - reauth_entry: ConfigEntry | None = None - @property def logger(self) -> logging.Logger: """Return logger.""" @@ -32,9 +30,6 @@ async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Perform reauth upon an API authentication error.""" - self.reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -48,8 +43,8 @@ async def async_step_reauth_confirm( async def async_oauth_create_entry(self, data: dict[str, Any]) -> ConfigFlowResult: """Create an oauth config entry or update existing entry for reauth.""" user_id = str(data[CONF_TOKEN]["user_id"]) - if not self.reauth_entry: - await self.async_set_unique_id(user_id) + await self.async_set_unique_id(user_id) + if self.source != SOURCE_REAUTH: self._abort_if_unique_id_configured() return self.async_create_entry( @@ -57,15 +52,11 @@ async def async_oauth_create_entry(self, data: dict[str, Any]) -> ConfigFlowResu data={**data, CONF_WEBHOOK_ID: async_generate_id()}, ) - if ( - self.reauth_entry.unique_id is None - or self.reauth_entry.unique_id == user_id - ): - logging.debug("user_id: %s", user_id) - return self.async_update_reload_and_abort( - self.reauth_entry, - data={**self.reauth_entry.data, **data}, - unique_id=user_id, - ) + reauth_entry = self._get_reauth_entry() + if reauth_entry.unique_id is not None: + self._abort_if_unique_id_mismatch(reason="wrong_account") - return self.async_abort(reason="wrong_account") + logging.debug("user_id: %s", user_id) + return self.async_update_reload_and_abort( + reauth_entry, data_updates=data, unique_id=user_id + ) diff --git a/homeassistant/components/powerwall/__init__.py b/homeassistant/components/powerwall/__init__.py index 0b6f889b90a40a..6a2522ac43b07b 100644 --- a/homeassistant/components/powerwall/__init__.py +++ b/homeassistant/components/powerwall/__init__.py @@ -168,6 +168,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: PowerwallConfigEntry) -> coordinator = DataUpdateCoordinator( hass, _LOGGER, + config_entry=entry, name="Powerwall site", update_method=manager.async_update_data, update_interval=timedelta(seconds=UPDATE_INTERVAL), diff --git a/homeassistant/components/powerwall/config_flow.py b/homeassistant/components/powerwall/config_flow.py index 5d832cb6ae4099..0c39392ca19b74 100644 --- a/homeassistant/components/powerwall/config_flow.py +++ b/homeassistant/components/powerwall/config_flow.py @@ -99,7 +99,6 @@ def __init__(self) -> None: """Initialize the powerwall flow.""" self.ip_address: str | None = None self.title: str | None = None - self.reauth_entry: ConfigEntry | None = None async def _async_powerwall_is_offline(self, entry: ConfigEntry) -> bool: """Check if the power wall is offline. @@ -250,19 +249,22 @@ async def async_step_reauth_confirm( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle reauth confirmation.""" - assert self.reauth_entry is not None errors: dict[str, str] | None = {} description_placeholders: dict[str, str] = {} + reauth_entry = self._get_reauth_entry() if user_input is not None: - entry_data = self.reauth_entry.data errors, _, description_placeholders = await self._async_try_connect( - {CONF_IP_ADDRESS: entry_data[CONF_IP_ADDRESS], **user_input} + {CONF_IP_ADDRESS: reauth_entry.data[CONF_IP_ADDRESS], **user_input} ) if not errors: return self.async_update_reload_and_abort( - self.reauth_entry, data={**entry_data, **user_input} + reauth_entry, data_updates=user_input ) + self.context["title_placeholders"] = { + "name": reauth_entry.title, + "ip_address": reauth_entry.data[CONF_IP_ADDRESS], + } return self.async_show_form( step_id="reauth_confirm", data_schema=vol.Schema({vol.Optional(CONF_PASSWORD): str}), @@ -274,9 +276,6 @@ async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle configuration by re-auth.""" - self.reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) return await self.async_step_reauth_confirm() diff --git a/homeassistant/components/profiler/__init__.py b/homeassistant/components/profiler/__init__.py index 9b2b97365746e9..389e3384ad9b90 100644 --- a/homeassistant/components/profiler/__init__.py +++ b/homeassistant/components/profiler/__init__.py @@ -436,6 +436,10 @@ async def _async_generate_memory_profile(hass: HomeAssistant, call: ServiceCall) # Imports deferred to avoid loading modules # in memory since usually only one part of this # integration is used at a time + if sys.version_info >= (3, 13): + raise HomeAssistantError( + "Memory profiling is not supported on Python 3.13. Please use Python 3.12." + ) from guppy import hpy # pylint: disable=import-outside-toplevel start_time = int(time.time() * 1000000) diff --git a/homeassistant/components/profiler/config_flow.py b/homeassistant/components/profiler/config_flow.py index 19995cf79aaa65..766d847e4a4249 100644 --- a/homeassistant/components/profiler/config_flow.py +++ b/homeassistant/components/profiler/config_flow.py @@ -16,9 +16,6 @@ async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle the initial step.""" - if self._async_current_entries(): - return self.async_abort(reason="single_instance_allowed") - if user_input is not None: return self.async_create_entry(title=DEFAULT_NAME, data={}) diff --git a/homeassistant/components/profiler/manifest.json b/homeassistant/components/profiler/manifest.json index ceaab458e69b48..8d2814c8c7f58b 100644 --- a/homeassistant/components/profiler/manifest.json +++ b/homeassistant/components/profiler/manifest.json @@ -7,7 +7,8 @@ "quality_scale": "internal", "requirements": [ "pyprof2calltree==1.4.5", - "guppy3==3.1.4.post1", + "guppy3==3.1.4.post1;python_version<'3.13'", "objgraph==3.5.0" - ] + ], + "single_config_entry": true } diff --git a/homeassistant/components/profiler/strings.json b/homeassistant/components/profiler/strings.json index 7a31c567040a31..f363b5a22cbfa5 100644 --- a/homeassistant/components/profiler/strings.json +++ b/homeassistant/components/profiler/strings.json @@ -4,9 +4,6 @@ "user": { "description": "[%key:common::config_flow::description::confirm_setup%]" } - }, - "abort": { - "single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]" } }, "services": { diff --git a/homeassistant/components/prometheus/__init__.py b/homeassistant/components/prometheus/__init__.py index 8cc0a8f4b6a6db..c243bf90dc0fff 100644 --- a/homeassistant/components/prometheus/__init__.py +++ b/homeassistant/components/prometheus/__init__.py @@ -14,6 +14,7 @@ import voluptuous as vol from homeassistant import core as hacore +from homeassistant.components.alarm_control_panel import AlarmControlPanelState from homeassistant.components.climate import ( ATTR_CURRENT_TEMPERATURE, ATTR_FAN_MODE, @@ -51,16 +52,6 @@ CONTENT_TYPE_TEXT_PLAIN, EVENT_STATE_CHANGED, PERCENTAGE, - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_CUSTOM_BYPASS, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_ARMED_VACATION, - STATE_ALARM_ARMING, - STATE_ALARM_DISARMED, - STATE_ALARM_DISARMING, - STATE_ALARM_PENDING, - STATE_ALARM_TRIGGERED, STATE_CLOSED, STATE_CLOSING, STATE_ON, @@ -85,6 +76,8 @@ _LOGGER = logging.getLogger(__name__) API_ENDPOINT = "/api/prometheus" +IGNORED_STATES = frozenset({STATE_UNAVAILABLE, STATE_UNKNOWN}) + DOMAIN = "prometheus" CONF_FILTER = "filter" @@ -98,6 +91,7 @@ COMPONENT_CONFIG_SCHEMA_ENTRY = vol.Schema( {vol.Optional(CONF_OVERRIDE_METRIC): cv.string} ) +ALLOWED_METRIC_CHARS = set(string.ascii_letters + string.digits + "_:") DEFAULT_NAMESPACE = "homeassistant" @@ -219,14 +213,6 @@ def handle_state(self, state: State) -> None: """Add/update a state in Prometheus.""" entity_id = state.entity_id _LOGGER.debug("Handling state update for %s", entity_id) - domain, _ = hacore.split_entity_id(entity_id) - - ignored_states = (STATE_UNAVAILABLE, STATE_UNKNOWN) - - handler = f"_handle_{domain}" - - if hasattr(self, handler) and state.state not in ignored_states: - getattr(self, handler)(state) labels = self._labels(state) state_change = self._metric( @@ -239,7 +225,7 @@ def handle_state(self, state: State) -> None: prometheus_client.Gauge, "Entity is available (not in the unavailable or unknown state)", ) - entity_available.labels(**labels).set(float(state.state not in ignored_states)) + entity_available.labels(**labels).set(float(state.state not in IGNORED_STATES)) last_updated_time_seconds = self._metric( "last_updated_time_seconds", @@ -248,6 +234,18 @@ def handle_state(self, state: State) -> None: ) last_updated_time_seconds.labels(**labels).set(state.last_updated.timestamp()) + if state.state in IGNORED_STATES: + self._remove_labelsets( + entity_id, + None, + {state_change, entity_available, last_updated_time_seconds}, + ) + else: + domain, _ = hacore.split_entity_id(entity_id) + handler = f"_handle_{domain}" + if hasattr(self, handler) and state.state: + getattr(self, handler)(state) + def handle_entity_registry_updated( self, event: Event[EventEntityRegistryUpdatedData] ) -> None: @@ -274,10 +272,17 @@ def handle_entity_registry_updated( self._remove_labelsets(metrics_entity_id) def _remove_labelsets( - self, entity_id: str, friendly_name: str | None = None + self, + entity_id: str, + friendly_name: str | None = None, + ignored_metrics: set[MetricWrapperBase] | None = None, ) -> None: - """Remove labelsets matching the given entity id from all metrics.""" + """Remove labelsets matching the given entity id from all non-ignored metrics.""" + if ignored_metrics is None: + ignored_metrics = set() for metric in list(self._metrics.values()): + if metric in ignored_metrics: + continue for sample in cast(list[prometheus_client.Metric], metric.collect())[ 0 ].samples: @@ -334,17 +339,12 @@ def _metric[_MetricBaseT: MetricWrapperBase]( @staticmethod def _sanitize_metric_name(metric: str) -> str: return "".join( - [ - c - if c in string.ascii_letters + string.digits + "_:" - else f"u{hex(ord(c))}" - for c in metric - ] + [c if c in ALLOWED_METRIC_CHARS else f"u{hex(ord(c))}" for c in metric] ) @staticmethod - def state_as_number(state: State) -> float: - """Return a state casted to a float.""" + def state_as_number(state: State) -> float | None: + """Return state as a float, or None if state cannot be converted.""" try: if state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.TIMESTAMP: value = as_timestamp(state.state) @@ -352,7 +352,7 @@ def state_as_number(state: State) -> float: value = state_helper.state_as_number(state) except ValueError: _LOGGER.debug("Could not convert %s to float", state) - value = 0 + value = None return value @staticmethod @@ -382,8 +382,8 @@ def _handle_binary_sensor(self, state: State) -> None: prometheus_client.Gauge, "State of the binary sensor (0/1)", ) - value = self.state_as_number(state) - metric.labels(**self._labels(state)).set(value) + if (value := self.state_as_number(state)) is not None: + metric.labels(**self._labels(state)).set(value) def _handle_input_boolean(self, state: State) -> None: metric = self._metric( @@ -391,8 +391,8 @@ def _handle_input_boolean(self, state: State) -> None: prometheus_client.Gauge, "State of the input boolean (0/1)", ) - value = self.state_as_number(state) - metric.labels(**self._labels(state)).set(value) + if (value := self.state_as_number(state)) is not None: + metric.labels(**self._labels(state)).set(value) def _numeric_handler(self, state: State, domain: str, title: str) -> None: if unit := self._unit_string(state.attributes.get(ATTR_UNIT_OF_MEASUREMENT)): @@ -408,8 +408,7 @@ def _numeric_handler(self, state: State, domain: str, title: str) -> None: f"State of the {title}", ) - with suppress(ValueError): - value = self.state_as_number(state) + if (value := self.state_as_number(state)) is not None: if ( state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfTemperature.FAHRENHEIT @@ -431,15 +430,15 @@ def _handle_device_tracker(self, state: State) -> None: prometheus_client.Gauge, "State of the device tracker (0/1)", ) - value = self.state_as_number(state) - metric.labels(**self._labels(state)).set(value) + if (value := self.state_as_number(state)) is not None: + metric.labels(**self._labels(state)).set(value) def _handle_person(self, state: State) -> None: metric = self._metric( "person_state", prometheus_client.Gauge, "State of the person (0/1)" ) - value = self.state_as_number(state) - metric.labels(**self._labels(state)).set(value) + if (value := self.state_as_number(state)) is not None: + metric.labels(**self._labels(state)).set(value) def _handle_cover(self, state: State) -> None: metric = self._metric( @@ -480,23 +479,19 @@ def _handle_light(self, state: State) -> None: "Light brightness percentage (0..100)", ) - try: + if (value := self.state_as_number(state)) is not None: brightness = state.attributes.get(ATTR_BRIGHTNESS) if state.state == STATE_ON and brightness is not None: - value = brightness / 255.0 - else: - value = self.state_as_number(state) + value = float(brightness) / 255.0 value = value * 100 metric.labels(**self._labels(state)).set(value) - except ValueError: - pass def _handle_lock(self, state: State) -> None: metric = self._metric( "lock_state", prometheus_client.Gauge, "State of the lock (0/1)" ) - value = self.state_as_number(state) - metric.labels(**self._labels(state)).set(value) + if (value := self.state_as_number(state)) is not None: + metric.labels(**self._labels(state)).set(value) def _handle_climate_temp( self, state: State, attr: str, metric_name: str, metric_description: str @@ -608,11 +603,8 @@ def _handle_humidifier(self, state: State) -> None: prometheus_client.Gauge, "State of the humidifier (0/1)", ) - try: - value = self.state_as_number(state) + if (value := self.state_as_number(state)) is not None: metric.labels(**self._labels(state)).set(value) - except ValueError: - pass current_mode = state.attributes.get(ATTR_MODE) available_modes = state.attributes.get(ATTR_AVAILABLE_MODES) @@ -643,8 +635,7 @@ def _handle_sensor(self, state: State) -> None: _metric = self._metric(metric, prometheus_client.Gauge, documentation) - try: - value = self.state_as_number(state) + if (value := self.state_as_number(state)) is not None: if ( state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfTemperature.FAHRENHEIT @@ -653,8 +644,6 @@ def _handle_sensor(self, state: State) -> None: value, UnitOfTemperature.FAHRENHEIT, UnitOfTemperature.CELSIUS ) _metric.labels(**self._labels(state)).set(value) - except ValueError: - pass self._battery(state) @@ -687,20 +676,15 @@ def _sensor_override_metric(self, state: State, unit: str | None) -> str | None: def _sensor_override_component_metric( self, state: State, unit: str | None ) -> str | None: - """Get metric from override in component confioguration.""" + """Get metric from override in component configuration.""" return self._component_config.get(state.entity_id).get(CONF_OVERRIDE_METRIC) @staticmethod def _sensor_fallback_metric(state: State, unit: str | None) -> str | None: """Get metric from fallback logic for compatibility.""" - if unit in (None, ""): - try: - state_helper.state_as_number(state) - except ValueError: - _LOGGER.debug("Unsupported sensor: %s", state.entity_id) - return None - return "sensor_state" - return f"sensor_unit_{unit}" + if unit not in (None, ""): + return f"sensor_unit_{unit}" + return "sensor_state" @staticmethod def _unit_string(unit: str | None) -> str | None: @@ -722,11 +706,8 @@ def _handle_switch(self, state: State) -> None: "switch_state", prometheus_client.Gauge, "State of the switch (0/1)" ) - try: - value = self.state_as_number(state) + if (value := self.state_as_number(state)) is not None: metric.labels(**self._labels(state)).set(value) - except ValueError: - pass self._handle_attributes(state) @@ -735,11 +716,8 @@ def _handle_fan(self, state: State) -> None: "fan_state", prometheus_client.Gauge, "State of the fan (0/1)" ) - try: - value = self.state_as_number(state) + if (value := self.state_as_number(state)) is not None: metric.labels(**self._labels(state)).set(value) - except ValueError: - pass fan_speed_percent = state.attributes.get(ATTR_PERCENTAGE) if fan_speed_percent is not None: @@ -805,8 +783,8 @@ def _handle_counter(self, state: State) -> None: prometheus_client.Gauge, "Value of counter entities", ) - - metric.labels(**self._labels(state)).set(self.state_as_number(state)) + if (value := self.state_as_number(state)) is not None: + metric.labels(**self._labels(state)).set(value) def _handle_update(self, state: State) -> None: metric = self._metric( @@ -814,8 +792,8 @@ def _handle_update(self, state: State) -> None: prometheus_client.Gauge, "Update state, indicating if an update is available (0/1)", ) - value = self.state_as_number(state) - metric.labels(**self._labels(state)).set(value) + if (value := self.state_as_number(state)) is not None: + metric.labels(**self._labels(state)).set(value) def _handle_alarm_control_panel(self, state: State) -> None: current_state = state.state @@ -828,22 +806,9 @@ def _handle_alarm_control_panel(self, state: State) -> None: ["state"], ) - alarm_states = [ - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_CUSTOM_BYPASS, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_ARMED_VACATION, - STATE_ALARM_DISARMED, - STATE_ALARM_TRIGGERED, - STATE_ALARM_PENDING, - STATE_ALARM_ARMING, - STATE_ALARM_DISARMING, - ] - - for alarm_state in alarm_states: - metric.labels(**dict(self._labels(state), state=alarm_state)).set( - float(alarm_state == current_state) + for alarm_state in AlarmControlPanelState: + metric.labels(**dict(self._labels(state), state=alarm_state.value)).set( + float(alarm_state.value == current_state) ) diff --git a/homeassistant/components/prosegur/alarm_control_panel.py b/homeassistant/components/prosegur/alarm_control_panel.py index ffedcf30770541..1c58b64cf5529a 100644 --- a/homeassistant/components/prosegur/alarm_control_panel.py +++ b/homeassistant/components/prosegur/alarm_control_panel.py @@ -10,13 +10,9 @@ from homeassistant.components.alarm_control_panel import ( AlarmControlPanelEntity, AlarmControlPanelEntityFeature, + AlarmControlPanelState, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ( - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_DISARMED, -) from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -26,10 +22,10 @@ _LOGGER = logging.getLogger(__name__) STATE_MAPPING = { - Status.DISARMED: STATE_ALARM_DISARMED, - Status.ARMED: STATE_ALARM_ARMED_AWAY, - Status.PARTIALLY: STATE_ALARM_ARMED_HOME, - Status.ERROR_PARTIALLY: STATE_ALARM_ARMED_HOME, + Status.DISARMED: AlarmControlPanelState.DISARMED, + Status.ARMED: AlarmControlPanelState.ARMED_AWAY, + Status.PARTIALLY: AlarmControlPanelState.ARMED_HOME, + Status.ERROR_PARTIALLY: AlarmControlPanelState.ARMED_HOME, } @@ -82,7 +78,7 @@ async def async_update(self) -> None: self._attr_available = False return - self._attr_state = STATE_MAPPING.get(self._installation.status) + self._attr_alarm_state = STATE_MAPPING.get(self._installation.status) self._attr_available = True async def async_alarm_disarm(self, code: str | None = None) -> None: diff --git a/homeassistant/components/prosegur/config_flow.py b/homeassistant/components/prosegur/config_flow.py index 7bd87e405ef392..74e4d2681446ae 100644 --- a/homeassistant/components/prosegur/config_flow.py +++ b/homeassistant/components/prosegur/config_flow.py @@ -2,13 +2,13 @@ from collections.abc import Mapping import logging -from typing import Any, cast +from typing import Any from pyprosegur.auth import COUNTRY, Auth from pyprosegur.installation import Installation import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_COUNTRY, CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError @@ -46,7 +46,6 @@ class ProsegurConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for Prosegur Alarm.""" VERSION = 1 - entry: ConfigEntry auth: Auth user_input: dict contracts: list[dict[str, str]] @@ -110,10 +109,6 @@ async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle initiation of re-authentication with Prosegur.""" - self.entry = cast( - ConfigEntry, - self.hass.config_entries.async_get_entry(self.context["entry_id"]), - ) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -122,9 +117,10 @@ async def async_step_reauth_confirm( """Handle re-authentication with Prosegur.""" errors: dict[str, str] = {} + reauth_entry = self._get_reauth_entry() if user_input: try: - user_input[CONF_COUNTRY] = self.entry.data[CONF_COUNTRY] + user_input[CONF_COUNTRY] = reauth_entry.data[CONF_COUNTRY] self.auth, self.contracts = await validate_input(self.hass, user_input) except CannotConnect: @@ -135,25 +131,20 @@ async def async_step_reauth_confirm( _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" else: - self.hass.config_entries.async_update_entry( - self.entry, - data={ - **self.entry.data, + return self.async_update_reload_and_abort( + reauth_entry, + data_updates={ CONF_USERNAME: user_input[CONF_USERNAME], CONF_PASSWORD: user_input[CONF_PASSWORD], }, ) - self.hass.async_create_task( - self.hass.config_entries.async_reload(self.entry.entry_id) - ) - return self.async_abort(reason="reauth_successful") return self.async_show_form( step_id="reauth_confirm", data_schema=vol.Schema( { vol.Required( - CONF_USERNAME, default=self.entry.data[CONF_USERNAME] + CONF_USERNAME, default=reauth_entry.data[CONF_USERNAME] ): str, vol.Required(CONF_PASSWORD): str, } diff --git a/homeassistant/components/proximity/config_flow.py b/homeassistant/components/proximity/config_flow.py index 1758b182ad75a0..5818ec2979b7b0 100644 --- a/homeassistant/components/proximity/config_flow.py +++ b/homeassistant/components/proximity/config_flow.py @@ -89,7 +89,7 @@ def _user_form_schema(self, user_input: dict[str, Any] | None = None) -> vol.Sch @callback def async_get_options_flow(config_entry: ConfigEntry) -> OptionsFlow: """Get the options flow for this handler.""" - return ProximityOptionsFlow(config_entry) + return ProximityOptionsFlow() async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -121,10 +121,6 @@ async def async_step_user( class ProximityOptionsFlow(OptionsFlow): """Handle a option flow.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - def _user_form_schema(self, user_input: dict[str, Any]) -> vol.Schema: return vol.Schema(_base_schema(user_input)) diff --git a/homeassistant/components/proxy/manifest.json b/homeassistant/components/proxy/manifest.json index 1e70c4d3e103f6..f13799422dfb49 100644 --- a/homeassistant/components/proxy/manifest.json +++ b/homeassistant/components/proxy/manifest.json @@ -3,5 +3,5 @@ "name": "Camera Proxy", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/proxy", - "requirements": ["Pillow==10.4.0"] + "requirements": ["Pillow==11.0.0"] } diff --git a/homeassistant/components/purpleair/config_flow.py b/homeassistant/components/purpleair/config_flow.py index 050200f50d4f49..3ca7870b3cb37f 100644 --- a/homeassistant/components/purpleair/config_flow.py +++ b/homeassistant/components/purpleair/config_flow.py @@ -202,7 +202,6 @@ class PurpleAirConfigFlow(ConfigFlow, domain=DOMAIN): def __init__(self) -> None: """Initialize.""" self._flow_data: dict[str, Any] = {} - self._reauth_entry: ConfigEntry | None = None @staticmethod @callback @@ -210,7 +209,7 @@ def async_get_options_flow( config_entry: ConfigEntry, ) -> PurpleAirOptionsFlowHandler: """Define the config flow to handle options.""" - return PurpleAirOptionsFlowHandler(config_entry) + return PurpleAirOptionsFlowHandler() async def async_step_by_coordinates( self, user_input: dict[str, Any] | None = None @@ -265,9 +264,6 @@ async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle configuration by re-auth.""" - self._reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -289,15 +285,9 @@ async def async_step_reauth_confirm( errors=validation.errors, ) - assert self._reauth_entry - - self.hass.config_entries.async_update_entry( - self._reauth_entry, data={CONF_API_KEY: api_key} + return self.async_update_reload_and_abort( + self._get_reauth_entry(), data={CONF_API_KEY: api_key} ) - self.hass.async_create_task( - self.hass.config_entries.async_reload(self._reauth_entry.entry_id) - ) - return self.async_abort(reason="reauth_successful") async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -325,10 +315,9 @@ async def async_step_user( class PurpleAirOptionsFlowHandler(OptionsFlow): """Handle a PurpleAir options flow.""" - def __init__(self, config_entry: ConfigEntry) -> None: + def __init__(self) -> None: """Initialize.""" self._flow_data: dict[str, Any] = {} - self.config_entry = config_entry @property def settings_schema(self) -> vol.Schema: diff --git a/homeassistant/components/pvoutput/config_flow.py b/homeassistant/components/pvoutput/config_flow.py index 9d18952e7b4326..ad2d759056f496 100644 --- a/homeassistant/components/pvoutput/config_flow.py +++ b/homeassistant/components/pvoutput/config_flow.py @@ -8,7 +8,7 @@ from pvo import PVOutput, PVOutputAuthenticationError, PVOutputError import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_API_KEY from homeassistant.core import HomeAssistant from homeassistant.helpers.aiohttp_client import async_get_clientsession @@ -33,7 +33,6 @@ class PVOutputFlowHandler(ConfigFlow, domain=DOMAIN): VERSION = 1 imported_name: str | None = None - reauth_entry: ConfigEntry | None = None async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -88,9 +87,6 @@ async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle initiation of re-authentication with PVOutput.""" - self.reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -99,29 +95,22 @@ async def async_step_reauth_confirm( """Handle re-authentication with PVOutput.""" errors = {} - if user_input is not None and self.reauth_entry: + if user_input is not None: + reauth_entry = self._get_reauth_entry() try: await validate_input( self.hass, api_key=user_input[CONF_API_KEY], - system_id=self.reauth_entry.data[CONF_SYSTEM_ID], + system_id=reauth_entry.data[CONF_SYSTEM_ID], ) except PVOutputAuthenticationError: errors["base"] = "invalid_auth" except PVOutputError: errors["base"] = "cannot_connect" else: - self.hass.config_entries.async_update_entry( - self.reauth_entry, - data={ - **self.reauth_entry.data, - CONF_API_KEY: user_input[CONF_API_KEY], - }, - ) - self.hass.async_create_task( - self.hass.config_entries.async_reload(self.reauth_entry.entry_id) + return self.async_update_reload_and_abort( + reauth_entry, data_updates=user_input ) - return self.async_abort(reason="reauth_successful") return self.async_show_form( step_id="reauth_confirm", diff --git a/homeassistant/components/pvpc_hourly_pricing/config_flow.py b/homeassistant/components/pvpc_hourly_pricing/config_flow.py index 239e1bcb0e922a..3c6b510004a66c 100644 --- a/homeassistant/components/pvpc_hourly_pricing/config_flow.py +++ b/homeassistant/components/pvpc_hourly_pricing/config_flow.py @@ -9,10 +9,11 @@ import voluptuous as vol from homeassistant.config_entries import ( + SOURCE_REAUTH, ConfigEntry, ConfigFlow, ConfigFlowResult, - OptionsFlowWithConfigEntry, + OptionsFlow, ) from homeassistant.const import CONF_API_TOKEN, CONF_NAME from homeassistant.core import callback @@ -48,7 +49,6 @@ class TariffSelectorConfigFlow(ConfigFlow, domain=DOMAIN): _use_api_token: bool = False _api_token: str | None = None _api: PVPCData | None = None - _reauth_entry: ConfigEntry | None = None @staticmethod @callback @@ -56,7 +56,7 @@ def async_get_options_flow( config_entry: ConfigEntry, ) -> PVPCOptionsFlowHandler: """Get the options flow for this handler.""" - return PVPCOptionsFlowHandler(config_entry) + return PVPCOptionsFlowHandler() async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -141,12 +141,10 @@ async def _async_verify( ATTR_POWER_P3: self._power_p3, CONF_API_TOKEN: self._api_token if self._use_api_token else None, } - if self._reauth_entry: - self.hass.config_entries.async_update_entry(self._reauth_entry, data=data) - self.hass.async_create_task( - self.hass.config_entries.async_reload(self._reauth_entry.entry_id) + if self.source == SOURCE_REAUTH: + return self.async_update_reload_and_abort( + self._get_reauth_entry(), data=data ) - return self.async_abort(reason="reauth_successful") assert self._name is not None return self.async_create_entry(title=self._name, data=data) @@ -155,9 +153,6 @@ async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle re-authentication with ESIOS Token.""" - self._reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) self._api_token = entry_data.get(CONF_API_TOKEN) self._use_api_token = self._api_token is not None self._name = entry_data[CONF_NAME] @@ -183,7 +178,7 @@ async def async_step_reauth_confirm( return self.async_show_form(step_id="reauth_confirm", data_schema=data_schema) -class PVPCOptionsFlowHandler(OptionsFlowWithConfigEntry): +class PVPCOptionsFlowHandler(OptionsFlow): """Handle PVPC options.""" _power: float | None = None @@ -204,7 +199,7 @@ async def async_step_api_token( ) # Fill options with entry data - api_token = self.options.get( + api_token = self.config_entry.options.get( CONF_API_TOKEN, self.config_entry.data.get(CONF_API_TOKEN) ) return self.async_show_form( @@ -234,13 +229,11 @@ async def async_step_init( ) # Fill options with entry data - power = self.options.get(ATTR_POWER, self.config_entry.data[ATTR_POWER]) - power_valley = self.options.get( - ATTR_POWER_P3, self.config_entry.data[ATTR_POWER_P3] - ) - api_token = self.options.get( - CONF_API_TOKEN, self.config_entry.data.get(CONF_API_TOKEN) - ) + options = self.config_entry.options + data = self.config_entry.data + power = options.get(ATTR_POWER, data[ATTR_POWER]) + power_valley = options.get(ATTR_POWER_P3, data[ATTR_POWER_P3]) + api_token = options.get(CONF_API_TOKEN, data.get(CONF_API_TOKEN)) use_api_token = api_token is not None schema = vol.Schema( { diff --git a/homeassistant/components/pyload/config_flow.py b/homeassistant/components/pyload/config_flow.py index 936cc5f3ea9bc9..3e6cbd33bb3802 100644 --- a/homeassistant/components/pyload/config_flow.py +++ b/homeassistant/components/pyload/config_flow.py @@ -30,7 +30,6 @@ TextSelectorType, ) -from . import PyLoadConfigEntry from .const import DEFAULT_HOST, DEFAULT_NAME, DEFAULT_PORT, DOMAIN _LOGGER = logging.getLogger(__name__) @@ -101,7 +100,6 @@ class PyLoadConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for pyLoad.""" VERSION = 1 - config_entry: PyLoadConfigEntry async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -156,7 +154,6 @@ async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Perform reauth upon an API authentication error.""" - self.config_entry = self._get_reauth_entry() return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -164,9 +161,10 @@ async def async_step_reauth_confirm( ) -> ConfigFlowResult: """Dialog that informs the user that reauth is required.""" errors = {} + reauth_entry = self._get_reauth_entry() if user_input is not None: - new_input = self.config_entry.data | user_input + new_input = reauth_entry.data | user_input try: await validate_input(self.hass, new_input) except (CannotConnect, ParserError): @@ -177,9 +175,7 @@ async def async_step_reauth_confirm( _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" else: - return self.async_update_reload_and_abort( - self.config_entry, data=new_input - ) + return self.async_update_reload_and_abort(reauth_entry, data=new_input) return self.async_show_form( step_id="reauth_confirm", @@ -188,25 +184,19 @@ async def async_step_reauth_confirm( { CONF_USERNAME: user_input[CONF_USERNAME] if user_input is not None - else self.config_entry.data[CONF_USERNAME] + else reauth_entry.data[CONF_USERNAME] }, ), - description_placeholders={CONF_NAME: self.config_entry.data[CONF_USERNAME]}, + description_placeholders={CONF_NAME: reauth_entry.data[CONF_USERNAME]}, errors=errors, ) async def async_step_reconfigure( self, user_input: dict[str, Any] | None = None - ) -> ConfigFlowResult: - """Perform a reconfiguration.""" - self.config_entry = self._get_reconfigure_entry() - return await self.async_step_reconfigure_confirm() - - async def async_step_reconfigure_confirm( - self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle the reconfiguration flow.""" errors = {} + reconfig_entry = self._get_reconfigure_entry() if user_input is not None: try: @@ -220,18 +210,17 @@ async def async_step_reconfigure_confirm( errors["base"] = "unknown" else: return self.async_update_reload_and_abort( - self.config_entry, + reconfig_entry, data=user_input, reload_even_if_entry_is_unchanged=False, - reason="reconfigure_successful", ) return self.async_show_form( - step_id="reconfigure_confirm", + step_id="reconfigure", data_schema=self.add_suggested_values_to_schema( STEP_USER_DATA_SCHEMA, - user_input or self.config_entry.data, + user_input or reconfig_entry.data, ), - description_placeholders={CONF_NAME: self.config_entry.data[CONF_USERNAME]}, + description_placeholders={CONF_NAME: reconfig_entry.data[CONF_USERNAME]}, errors=errors, ) diff --git a/homeassistant/components/pyload/strings.json b/homeassistant/components/pyload/strings.json index bbe6989f5e7883..4ae4c4fee6737b 100644 --- a/homeassistant/components/pyload/strings.json +++ b/homeassistant/components/pyload/strings.json @@ -15,7 +15,7 @@ "port": "pyLoad uses port 8000 by default." } }, - "reconfigure_confirm": { + "reconfigure": { "data": { "host": "[%key:common::config_flow::data::host%]", "username": "[%key:common::config_flow::data::username%]", diff --git a/homeassistant/components/python_script/manifest.json b/homeassistant/components/python_script/manifest.json index 594012dabb183e..4348fdd99118d6 100644 --- a/homeassistant/components/python_script/manifest.json +++ b/homeassistant/components/python_script/manifest.json @@ -5,5 +5,5 @@ "documentation": "https://www.home-assistant.io/integrations/python_script", "loggers": ["RestrictedPython"], "quality_scale": "internal", - "requirements": ["RestrictedPython==7.3"] + "requirements": ["RestrictedPython==7.4"] } diff --git a/homeassistant/components/qbittorrent/sensor.py b/homeassistant/components/qbittorrent/sensor.py index 68de7e1d5e5558..abc23f399752d8 100644 --- a/homeassistant/components/qbittorrent/sensor.py +++ b/homeassistant/components/qbittorrent/sensor.py @@ -11,6 +11,7 @@ SensorDeviceClass, SensorEntity, SensorEntityDescription, + SensorStateClass, ) from homeassistant.config_entries import ConfigEntry from homeassistant.const import STATE_IDLE, UnitOfDataRate @@ -79,6 +80,7 @@ class QBittorrentSensorEntityDescription(SensorEntityDescription): QBittorrentSensorEntityDescription( key=SENSOR_TYPE_DOWNLOAD_SPEED, translation_key="download_speed", + state_class=SensorStateClass.MEASUREMENT, device_class=SensorDeviceClass.DATA_RATE, native_unit_of_measurement=UnitOfDataRate.BYTES_PER_SECOND, suggested_display_precision=2, @@ -88,6 +90,7 @@ class QBittorrentSensorEntityDescription(SensorEntityDescription): QBittorrentSensorEntityDescription( key=SENSOR_TYPE_UPLOAD_SPEED, translation_key="upload_speed", + state_class=SensorStateClass.MEASUREMENT, device_class=SensorDeviceClass.DATA_RATE, native_unit_of_measurement=UnitOfDataRate.BYTES_PER_SECOND, suggested_display_precision=2, diff --git a/homeassistant/components/qnap/sensor.py b/homeassistant/components/qnap/sensor.py index 526516bfcdda3a..383a4e5f572602 100644 --- a/homeassistant/components/qnap/sensor.py +++ b/homeassistant/components/qnap/sensor.py @@ -13,7 +13,6 @@ SensorStateClass, ) from homeassistant.const import ( - ATTR_NAME, PERCENTAGE, EntityCategory, UnitOfDataRate, @@ -375,17 +374,6 @@ def native_value(self): return None - # Deprecated since Home Assistant 2024.6.0 - # Can be removed completely in 2024.12.0 - @property - def extra_state_attributes(self) -> dict[str, Any] | None: - """Return the state attributes.""" - if self.coordinator.data: - data = self.coordinator.data["system_stats"]["memory"] - size = round(float(data["total"]) / 1024, 2) - return {ATTR_MEMORY_SIZE: f"{size} {UnitOfInformation.GIBIBYTES}"} - return None - class QNAPNetworkSensor(QNAPSensor): """A QNAP sensor that monitors network stats.""" @@ -414,22 +402,6 @@ def native_value(self): return None - # Deprecated since Home Assistant 2024.6.0 - # Can be removed completely in 2024.12.0 - @property - def extra_state_attributes(self) -> dict[str, Any] | None: - """Return the state attributes.""" - if self.coordinator.data: - data = self.coordinator.data["system_stats"]["nics"][self.monitor_device] - return { - ATTR_IP: data["ip"], - ATTR_MASK: data["mask"], - ATTR_MAC: data["mac"], - ATTR_MAX_SPEED: data["max_speed"], - ATTR_PACKETS_ERR: data["err_packets"], - } - return None - class QNAPSystemSensor(QNAPSensor): """A QNAP sensor that monitors overall system health.""" @@ -455,25 +427,6 @@ def native_value(self): return None - # Deprecated since Home Assistant 2024.6.0 - # Can be removed completely in 2024.12.0 - @property - def extra_state_attributes(self) -> dict[str, Any] | None: - """Return the state attributes.""" - if self.coordinator.data: - data = self.coordinator.data["system_stats"] - days = int(data["uptime"]["days"]) - hours = int(data["uptime"]["hours"]) - minutes = int(data["uptime"]["minutes"]) - - return { - ATTR_NAME: data["system"]["name"], - ATTR_MODEL: data["system"]["model"], - ATTR_SERIAL: data["system"]["serial_number"], - ATTR_UPTIME: f"{days:0>2d}d {hours:0>2d}h {minutes:0>2d}m", - } - return None - class QNAPDriveSensor(QNAPSensor): """A QNAP sensor that monitors HDD/SSD drive stats.""" @@ -533,17 +486,3 @@ def native_value(self): return used_gb / total_gb * 100 return None - - # Deprecated since Home Assistant 2024.6.0 - # Can be removed completely in 2024.12.0 - @property - def extra_state_attributes(self) -> dict[str, Any] | None: - """Return the state attributes.""" - if self.coordinator.data: - data = self.coordinator.data["volumes"][self.monitor_device] - total_gb = int(data["total_size"]) / 1024 / 1024 / 1024 - - return { - ATTR_VOLUME_SIZE: f"{round(total_gb, 1)} {UnitOfInformation.GIBIBYTES}" - } - return None diff --git a/homeassistant/components/qnap_qsw/sensor.py b/homeassistant/components/qnap_qsw/sensor.py index 009bc63b2c68d9..45ec1828b9d73e 100644 --- a/homeassistant/components/qnap_qsw/sensor.py +++ b/homeassistant/components/qnap_qsw/sensor.py @@ -2,7 +2,9 @@ from __future__ import annotations +from collections.abc import Callable from dataclasses import dataclass, replace +from datetime import datetime from typing import Final from aioqsw.const import ( @@ -26,8 +28,11 @@ QSD_TX_OCTETS, QSD_TX_SPEED, QSD_UPTIME_SECONDS, + QSD_UPTIME_TIMESTAMP, ) +from homeassistant.components.automation import automations_with_entity +from homeassistant.components.script import scripts_with_entity from homeassistant.components.sensor import ( SensorDeviceClass, SensorEntity, @@ -43,8 +48,10 @@ UnitOfTime, ) from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers import entity_registry as er, issue_registry as ir from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.typing import UNDEFINED +from homeassistant.helpers.typing import UNDEFINED, StateType +from homeassistant.util import dt as dt_util from .const import ATTR_MAX, DOMAIN, QSW_COORD_DATA, RPM from .coordinator import QswDataCoordinator @@ -58,6 +65,17 @@ class QswSensorEntityDescription(SensorEntityDescription, QswEntityDescription): attributes: dict[str, list[str]] | None = None qsw_type: QswEntityType | None = None sep_key: str = "_" + value_fn: Callable[[str], datetime | StateType] = lambda value: value + + +DEPRECATED_UPTIME_SECONDS = QswSensorEntityDescription( + translation_key="uptime", + key=QSD_SYSTEM_TIME, + entity_category=EntityCategory.DIAGNOSTIC, + native_unit_of_measurement=UnitOfTime.SECONDS, + state_class=SensorStateClass.TOTAL_INCREASING, + subkey=QSD_UPTIME_SECONDS, +) SENSOR_TYPES: Final[tuple[QswSensorEntityDescription, ...]] = ( @@ -140,12 +158,12 @@ class QswSensorEntityDescription(SensorEntityDescription, QswEntityDescription): subkey=QSD_TX_SPEED, ), QswSensorEntityDescription( - translation_key="uptime", + translation_key="uptime_timestamp", key=QSD_SYSTEM_TIME, + device_class=SensorDeviceClass.TIMESTAMP, entity_category=EntityCategory.DIAGNOSTIC, - native_unit_of_measurement=UnitOfTime.SECONDS, - state_class=SensorStateClass.TOTAL_INCREASING, - subkey=QSD_UPTIME_SECONDS, + subkey=QSD_UPTIME_TIMESTAMP, + value_fn=dt_util.parse_datetime, ), ) @@ -337,6 +355,46 @@ async def async_setup_entry( ) entities.append(QswSensor(coordinator, _desc, entry, port_id)) + # Can be removed in HA 2025.5.0 + entity_reg = er.async_get(hass) + reg_entities = er.async_entries_for_config_entry(entity_reg, entry.entry_id) + for entity in reg_entities: + if entity.domain == "sensor" and entity.unique_id.endswith( + ("_uptime", "_uptime_seconds") + ): + entity_id = entity.entity_id + + if entity.disabled: + entity_reg.async_remove(entity_id) + continue + + if ( + DEPRECATED_UPTIME_SECONDS.key in coordinator.data + and DEPRECATED_UPTIME_SECONDS.subkey + in coordinator.data[DEPRECATED_UPTIME_SECONDS.key] + ): + entities.append( + QswSensor(coordinator, DEPRECATED_UPTIME_SECONDS, entry) + ) + + entity_automations = automations_with_entity(hass, entity_id) + entity_scripts = scripts_with_entity(hass, entity_id) + + for item in entity_automations + entity_scripts: + ir.async_create_issue( + hass, + DOMAIN, + f"uptime_seconds_deprecated_{entity_id}_{item}", + breaks_in_ha_version="2025.5.0", + is_fixable=False, + severity=ir.IssueSeverity.WARNING, + translation_key="uptime_seconds_deprecated", + translation_placeholders={ + "entity": entity_id, + "info": item, + }, + ) + async_add_entities(entities) @@ -374,5 +432,5 @@ def _async_update_attrs(self) -> None: self.entity_description.subkey, self.entity_description.qsw_type, ) - self._attr_native_value = value + self._attr_native_value = self.entity_description.value_fn(value) super()._async_update_attrs() diff --git a/homeassistant/components/qnap_qsw/strings.json b/homeassistant/components/qnap_qsw/strings.json index c8cd5ffb861204..462e66a25c31b2 100644 --- a/homeassistant/components/qnap_qsw/strings.json +++ b/homeassistant/components/qnap_qsw/strings.json @@ -52,7 +52,16 @@ }, "uptime": { "name": "Uptime" + }, + "uptime_timestamp": { + "name": "Uptime timestamp" } } + }, + "issues": { + "uptime_seconds_deprecated": { + "title": "QNAP QSW uptime seconds sensor deprecated", + "description": "The QNAP QSW uptime seconds sensor entity is deprecated and will be removed in HA 2025.2.0.\nHome Assistant detected that entity `{entity}` is being used in `{info}`\n\nYou should remove the uptime seconds entity from `{info}` then click submit to fix this issue." + } } } diff --git a/homeassistant/components/qrcode/manifest.json b/homeassistant/components/qrcode/manifest.json index 14f2d093f37947..3fcc895c2b9cc8 100644 --- a/homeassistant/components/qrcode/manifest.json +++ b/homeassistant/components/qrcode/manifest.json @@ -5,5 +5,5 @@ "documentation": "https://www.home-assistant.io/integrations/qrcode", "iot_class": "calculated", "loggers": ["pyzbar"], - "requirements": ["Pillow==10.4.0", "pyzbar==0.1.7"] + "requirements": ["Pillow==11.0.0", "pyzbar==0.1.7"] } diff --git a/homeassistant/components/rachio/config_flow.py b/homeassistant/components/rachio/config_flow.py index 66811091820886..fac93952b35af0 100644 --- a/homeassistant/components/rachio/config_flow.py +++ b/homeassistant/components/rachio/config_flow.py @@ -108,16 +108,12 @@ def async_get_options_flow( config_entry: ConfigEntry, ) -> OptionsFlowHandler: """Get the options flow for this handler.""" - return OptionsFlowHandler(config_entry) + return OptionsFlowHandler() class OptionsFlowHandler(OptionsFlow): """Handle a option flow for Rachio.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, int] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/radarr/config_flow.py b/homeassistant/components/radarr/config_flow.py index ab32a5d7352a07..d02038d7131a81 100644 --- a/homeassistant/components/radarr/config_flow.py +++ b/homeassistant/components/radarr/config_flow.py @@ -12,12 +12,11 @@ import voluptuous as vol from yarl import URL -from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_API_KEY, CONF_URL, CONF_VERIFY_SSL from homeassistant.core import HomeAssistant from homeassistant.helpers.aiohttp_client import async_get_clientsession -from . import RadarrConfigEntry from .const import DEFAULT_NAME, DEFAULT_URL, DOMAIN @@ -25,14 +24,11 @@ class RadarrConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for Radarr.""" VERSION = 1 - entry: RadarrConfigEntry | None = None async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle configuration by re-auth.""" - self.entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) - return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -51,10 +47,7 @@ async def async_step_user( """Handle a flow initiated by the user.""" errors = {} - if user_input is None: - user_input = dict(self.entry.data) if self.entry else None - - else: + if user_input is not None: # aiopyarr defaults to the service port if one isn't given # this is counter to standard practice where http = 80 # and https = 443. @@ -75,20 +68,21 @@ async def async_step_user( except exceptions.ArrException: errors = {"base": "unknown"} if not errors: - if self.entry: - self.hass.config_entries.async_update_entry( - self.entry, data=user_input + if self.source == SOURCE_REAUTH: + return self.async_update_reload_and_abort( + self._get_reauth_entry(), data=user_input ) - await self.hass.config_entries.async_reload(self.entry.entry_id) - - return self.async_abort(reason="reauth_successful") return self.async_create_entry( title=DEFAULT_NAME, data=user_input, ) - user_input = user_input or {} + if user_input is None: + user_input = {} + if self.source == SOURCE_REAUTH: + user_input = dict(self._get_reauth_entry().data) + return self.async_show_form( step_id="user", data_schema=vol.Schema( diff --git a/homeassistant/components/radio_browser/config_flow.py b/homeassistant/components/radio_browser/config_flow.py index 137ee7c8e87f17..411259f31d3eda 100644 --- a/homeassistant/components/radio_browser/config_flow.py +++ b/homeassistant/components/radio_browser/config_flow.py @@ -18,9 +18,6 @@ async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle the initial step.""" - if self._async_current_entries(): - return self.async_abort(reason="single_instance_allowed") - if user_input is not None: return self.async_create_entry(title="Radio Browser", data={}) diff --git a/homeassistant/components/radio_browser/manifest.json b/homeassistant/components/radio_browser/manifest.json index 5a52d29d27aec3..943187596d7580 100644 --- a/homeassistant/components/radio_browser/manifest.json +++ b/homeassistant/components/radio_browser/manifest.json @@ -6,5 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/radio_browser", "integration_type": "service", "iot_class": "cloud_polling", - "requirements": ["radios==0.3.1", "pycountry==23.12.11"] + "requirements": ["radios==0.3.2", "pycountry==24.6.1"], + "single_config_entry": true } diff --git a/homeassistant/components/radio_browser/strings.json b/homeassistant/components/radio_browser/strings.json index fd0470d26dc8ad..5dd0ad3dcf70d0 100644 --- a/homeassistant/components/radio_browser/strings.json +++ b/homeassistant/components/radio_browser/strings.json @@ -4,9 +4,6 @@ "user": { "description": "Do you want to add Radio Browser to Home Assistant?" } - }, - "abort": { - "single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]" } } } diff --git a/homeassistant/components/rainbird/config_flow.py b/homeassistant/components/rainbird/config_flow.py index c1c814b05c4a65..abeb1b5da157c8 100644 --- a/homeassistant/components/rainbird/config_flow.py +++ b/homeassistant/components/rainbird/config_flow.py @@ -65,7 +65,7 @@ def async_get_options_flow( config_entry: ConfigEntry, ) -> RainBirdOptionsFlowHandler: """Define the config flow to handle options.""" - return RainBirdOptionsFlowHandler(config_entry) + return RainBirdOptionsFlowHandler() async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -165,10 +165,6 @@ async def async_finish( class RainBirdOptionsFlowHandler(OptionsFlow): """Handle a RainBird options flow.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize RainBirdOptionsFlowHandler.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/rainforest_raven/__init__.py b/homeassistant/components/rainforest_raven/__init__.py index 76f82624160a1e..b68d995262ad68 100644 --- a/homeassistant/components/rainforest_raven/__init__.py +++ b/homeassistant/components/rainforest_raven/__init__.py @@ -2,29 +2,23 @@ from __future__ import annotations -from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant -from .const import DOMAIN -from .coordinator import RAVEnDataCoordinator +from .coordinator import RAVEnConfigEntry, RAVEnDataCoordinator PLATFORMS = (Platform.SENSOR,) -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_setup_entry(hass: HomeAssistant, entry: RAVEnConfigEntry) -> bool: """Set up Rainforest RAVEn device from a config entry.""" coordinator = RAVEnDataCoordinator(hass, entry) await coordinator.async_config_entry_first_refresh() - hass.data.setdefault(DOMAIN, {})[entry.entry_id] = coordinator + entry.runtime_data = coordinator await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: RAVEnConfigEntry) -> bool: """Unload a config entry.""" - unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS) - if unload_ok: - hass.data[DOMAIN].pop(entry.entry_id) - - return unload_ok + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/rainforest_raven/coordinator.py b/homeassistant/components/rainforest_raven/coordinator.py index d08a10c26704dc..31df922a168953 100644 --- a/homeassistant/components/rainforest_raven/coordinator.py +++ b/homeassistant/components/rainforest_raven/coordinator.py @@ -20,6 +20,8 @@ from .const import DOMAIN +type RAVEnConfigEntry = ConfigEntry[RAVEnDataCoordinator] + _LOGGER = logging.getLogger(__name__) @@ -67,32 +69,18 @@ class RAVEnDataCoordinator(DataUpdateCoordinator): _raven_device: RAVEnSerialDevice | None = None _device_info: RAVEnDeviceInfo | None = None + config_entry: RAVEnConfigEntry - def __init__(self, hass: HomeAssistant, entry: ConfigEntry) -> None: + def __init__(self, hass: HomeAssistant, entry: RAVEnConfigEntry) -> None: """Initialize the data object.""" - self.entry = entry - super().__init__( hass, _LOGGER, + config_entry=entry, name=DOMAIN, update_interval=timedelta(seconds=30), ) - @property - def device_fw_version(self) -> str | None: - """Return the firmware version of the device.""" - if self._device_info: - return self._device_info.fw_version - return None - - @property - def device_hw_version(self) -> str | None: - """Return the hardware version of the device.""" - if self._device_info: - return self._device_info.hw_version - return None - @property def device_mac_address(self) -> str | None: """Return the MAC address of the device.""" @@ -100,36 +88,20 @@ def device_mac_address(self) -> str | None: return self._device_info.device_mac_id.hex() return None - @property - def device_manufacturer(self) -> str | None: - """Return the manufacturer of the device.""" - if self._device_info: - return self._device_info.manufacturer - return None - - @property - def device_model(self) -> str | None: - """Return the model of the device.""" - if self._device_info: - return self._device_info.model_id - return None - - @property - def device_name(self) -> str: - """Return the product name of the device.""" - return "RAVEn Device" - @property def device_info(self) -> DeviceInfo | None: """Return device info.""" - if self._device_info and self.device_mac_address: + if (device_info := self._device_info) and ( + mac_address := self.device_mac_address + ): return DeviceInfo( - identifiers={(DOMAIN, self.device_mac_address)}, - manufacturer=self.device_manufacturer, - model=self.device_model, - name=self.device_name, - sw_version=self.device_fw_version, - hw_version=self.device_hw_version, + identifiers={(DOMAIN, mac_address)}, + manufacturer=device_info.manufacturer, + model=device_info.model_id, + model_id=device_info.model_id, + name="RAVEn Device", + sw_version=device_info.fw_version, + hw_version=device_info.hw_version, ) return None @@ -142,7 +114,7 @@ async def _async_update_data(self) -> dict[str, Any]: try: device = await self._get_device() async with asyncio.timeout(5): - return await _get_all_data(device, self.entry.data[CONF_MAC]) + return await _get_all_data(device, self.config_entry.data[CONF_MAC]) except RAVEnConnectionError as err: await self._cleanup_device() raise UpdateFailed(f"RAVEnConnectionError: {err}") from err @@ -159,7 +131,7 @@ async def _get_device(self) -> RAVEnSerialDevice: if self._raven_device is not None: return self._raven_device - device = RAVEnSerialDevice(self.entry.data[CONF_DEVICE]) + device = RAVEnSerialDevice(self.config_entry.data[CONF_DEVICE]) try: async with asyncio.timeout(5): diff --git a/homeassistant/components/rainforest_raven/diagnostics.py b/homeassistant/components/rainforest_raven/diagnostics.py index 820c4826f006ec..6c06b0d65ccfc1 100644 --- a/homeassistant/components/rainforest_raven/diagnostics.py +++ b/homeassistant/components/rainforest_raven/diagnostics.py @@ -6,12 +6,10 @@ from typing import Any from homeassistant.components.diagnostics import async_redact_data -from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_MAC from homeassistant.core import HomeAssistant, callback -from .const import DOMAIN -from .coordinator import RAVEnDataCoordinator +from .coordinator import RAVEnConfigEntry TO_REDACT_CONFIG = {CONF_MAC} TO_REDACT_DATA = {"device_mac_id", "meter_mac_id"} @@ -31,14 +29,13 @@ def async_redact_meter_macs(data: dict) -> dict: async def async_get_config_entry_diagnostics( - hass: HomeAssistant, config_entry: ConfigEntry + hass: HomeAssistant, config_entry: RAVEnConfigEntry ) -> Mapping[str, Any]: """Return diagnostics for a config entry.""" - coordinator: RAVEnDataCoordinator = hass.data[DOMAIN][config_entry.entry_id] return { "config_entry": async_redact_data(config_entry.as_dict(), TO_REDACT_CONFIG), "data": async_redact_meter_macs( - async_redact_data(coordinator.data, TO_REDACT_DATA) + async_redact_data(config_entry.runtime_data.data, TO_REDACT_DATA) ), } diff --git a/homeassistant/components/rainforest_raven/sensor.py b/homeassistant/components/rainforest_raven/sensor.py index 23ca3220694b0a..1025e92ef8607a 100644 --- a/homeassistant/components/rainforest_raven/sensor.py +++ b/homeassistant/components/rainforest_raven/sensor.py @@ -10,9 +10,7 @@ SensorEntity, SensorEntityDescription, SensorStateClass, - StateType, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( CONF_MAC, PERCENTAGE, @@ -22,10 +20,10 @@ ) from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.typing import StateType from homeassistant.helpers.update_coordinator import CoordinatorEntity -from .const import DOMAIN -from .coordinator import RAVEnDataCoordinator +from .coordinator import RAVEnConfigEntry, RAVEnDataCoordinator @dataclass(frozen=True, kw_only=True) @@ -80,10 +78,12 @@ class RAVEnSensorEntityDescription(SensorEntityDescription): async def async_setup_entry( - hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback + hass: HomeAssistant, + entry: RAVEnConfigEntry, + async_add_entities: AddEntitiesCallback, ) -> None: """Set up a config entry.""" - coordinator = hass.data[DOMAIN][entry.entry_id] + coordinator = entry.runtime_data entities: list[RAVEnSensor] = [ RAVEnSensor(coordinator, description) for description in DIAGNOSTICS ] diff --git a/homeassistant/components/rainmachine/config_flow.py b/homeassistant/components/rainmachine/config_flow.py index 5c07f04c1639f3..0b40d506566235 100644 --- a/homeassistant/components/rainmachine/config_flow.py +++ b/homeassistant/components/rainmachine/config_flow.py @@ -63,7 +63,7 @@ def async_get_options_flow( config_entry: ConfigEntry, ) -> RainMachineOptionsFlowHandler: """Define the config flow to handle options.""" - return RainMachineOptionsFlowHandler(config_entry) + return RainMachineOptionsFlowHandler() async def async_step_homekit( self, discovery_info: zeroconf.ZeroconfServiceInfo @@ -168,10 +168,6 @@ async def async_step_user( class RainMachineOptionsFlowHandler(OptionsFlow): """Handle a RainMachine options flow.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/rainmachine/update.py b/homeassistant/components/rainmachine/update.py index dbb91b70c85c41..39156b05cd4d38 100644 --- a/homeassistant/components/rainmachine/update.py +++ b/homeassistant/components/rainmachine/update.py @@ -2,6 +2,7 @@ from __future__ import annotations +from dataclasses import dataclass from enum import Enum from typing import Any @@ -10,6 +11,7 @@ from homeassistant.components.update import ( UpdateDeviceClass, UpdateEntity, + UpdateEntityDescription, UpdateEntityFeature, ) from homeassistant.core import HomeAssistant, callback @@ -42,7 +44,14 @@ class UpdateStates(Enum): } -UPDATE_DESCRIPTION = RainMachineEntityDescription( +@dataclass(frozen=True, kw_only=True) +class RainMachineUpdateEntityDescription( + UpdateEntityDescription, RainMachineEntityDescription +): + """Describe a RainMachine update.""" + + +UPDATE_DESCRIPTION = RainMachineUpdateEntityDescription( key="update", api_category=DATA_MACHINE_FIRMWARE_UPDATE_STATUS, ) diff --git a/homeassistant/components/random/binary_sensor.py b/homeassistant/components/random/binary_sensor.py index 9d33ad52692139..ae9a5886d59724 100644 --- a/homeassistant/components/random/binary_sensor.py +++ b/homeassistant/components/random/binary_sensor.py @@ -59,10 +59,9 @@ class RandomBinarySensor(BinarySensorEntity): def __init__(self, config: Mapping[str, Any], entry_id: str | None = None) -> None: """Initialize the Random binary sensor.""" - self._attr_name = config.get(CONF_NAME) + self._attr_name = config[CONF_NAME] self._attr_device_class = config.get(CONF_DEVICE_CLASS) - if entry_id: - self._attr_unique_id = entry_id + self._attr_unique_id = entry_id async def async_update(self) -> None: """Get new state and update the sensor's state.""" diff --git a/homeassistant/components/random/config_flow.py b/homeassistant/components/random/config_flow.py index fcbd77916a9340..00314169260383 100644 --- a/homeassistant/components/random/config_flow.py +++ b/homeassistant/components/random/config_flow.py @@ -95,7 +95,7 @@ def _generate_schema(domain: str, flow_type: _FlowType) -> vol.Schema: async def choose_options_step(options: dict[str, Any]) -> str: - """Return next step_id for options flow according to template_type.""" + """Return next step_id for options flow according to entity_type.""" return cast(str, options["entity_type"]) @@ -122,7 +122,7 @@ def _validate_unit(options: dict[str, Any]) -> None: def validate_user_input( - template_type: str, + entity_type: str, ) -> Callable[ [SchemaCommonFlowHandler, dict[str, Any]], Coroutine[Any, Any, dict[str, Any]], @@ -136,10 +136,10 @@ async def _validate_user_input( _: SchemaCommonFlowHandler, user_input: dict[str, Any], ) -> dict[str, Any]: - """Add template type to user input.""" - if template_type == Platform.SENSOR: + """Add entity type to user input.""" + if entity_type == Platform.SENSOR: _validate_unit(user_input) - return {"entity_type": template_type} | user_input + return {"entity_type": entity_type} | user_input return _validate_user_input diff --git a/homeassistant/components/random/sensor.py b/homeassistant/components/random/sensor.py index 3c6e67c99184d7..aad4fcb851cd7a 100644 --- a/homeassistant/components/random/sensor.py +++ b/homeassistant/components/random/sensor.py @@ -70,22 +70,22 @@ class RandomSensor(SensorEntity): """Representation of a Random number sensor.""" _attr_translation_key = "random" + _unrecorded_attributes = frozenset({ATTR_MAXIMUM, ATTR_MINIMUM}) def __init__(self, config: Mapping[str, Any], entry_id: str | None = None) -> None: """Initialize the Random sensor.""" - self._attr_name = config.get(CONF_NAME) - self._minimum = config.get(CONF_MINIMUM, DEFAULT_MIN) - self._maximum = config.get(CONF_MAXIMUM, DEFAULT_MAX) + self._attr_name = config[CONF_NAME] + self._minimum = config[CONF_MINIMUM] + self._maximum = config[CONF_MAXIMUM] self._attr_native_unit_of_measurement = config.get(CONF_UNIT_OF_MEASUREMENT) self._attr_device_class = config.get(CONF_DEVICE_CLASS) self._attr_extra_state_attributes = { ATTR_MAXIMUM: self._maximum, ATTR_MINIMUM: self._minimum, } - if entry_id: - self._attr_unique_id = entry_id + self._attr_unique_id = entry_id async def async_update(self) -> None: - """Get a new number and updates the states.""" + """Get a new number and update the state.""" self._attr_native_value = randrange(self._minimum, self._maximum + 1) diff --git a/homeassistant/components/random/strings.json b/homeassistant/components/random/strings.json index 98072a21fe120a..ef19dd6dd670e0 100644 --- a/homeassistant/components/random/strings.json +++ b/homeassistant/components/random/strings.json @@ -1,4 +1,5 @@ { + "title": "Random", "config": { "step": { "binary_sensor": { diff --git a/homeassistant/components/raspberry_pi/__init__.py b/homeassistant/components/raspberry_pi/__init__.py index d1dcd04922ffd3..8095eb9dfe0da6 100644 --- a/homeassistant/components/raspberry_pi/__init__.py +++ b/homeassistant/components/raspberry_pi/__init__.py @@ -2,10 +2,11 @@ from __future__ import annotations -from homeassistant.components.hassio import get_os_info, is_hassio +from homeassistant.components.hassio import get_os_info from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady +from homeassistant.helpers.hassio import is_hassio async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: diff --git a/homeassistant/components/rdw/__init__.py b/homeassistant/components/rdw/__init__.py index f123db7c697f40..6051576026b1e1 100644 --- a/homeassistant/components/rdw/__init__.py +++ b/homeassistant/components/rdw/__init__.py @@ -23,6 +23,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: coordinator: DataUpdateCoordinator[Vehicle] = DataUpdateCoordinator( hass, LOGGER, + config_entry=entry, name=f"{DOMAIN}_APK", update_interval=SCAN_INTERVAL, update_method=rdw.vehicle, diff --git a/homeassistant/components/recollect_waste/__init__.py b/homeassistant/components/recollect_waste/__init__.py index 6606f31a42d681..1710fb8c816be6 100644 --- a/homeassistant/components/recollect_waste/__init__.py +++ b/homeassistant/components/recollect_waste/__init__.py @@ -52,6 +52,7 @@ async def async_get_pickup_events() -> list[PickupEvent]: coordinator = DataUpdateCoordinator( hass, LOGGER, + config_entry=entry, name=( f"Place {entry.data[CONF_PLACE_ID]}, Service {entry.data[CONF_SERVICE_ID]}" ), diff --git a/homeassistant/components/recollect_waste/config_flow.py b/homeassistant/components/recollect_waste/config_flow.py index 882eb6a00d26c1..299af2609e34e6 100644 --- a/homeassistant/components/recollect_waste/config_flow.py +++ b/homeassistant/components/recollect_waste/config_flow.py @@ -34,9 +34,9 @@ class RecollectWasteConfigFlow(ConfigFlow, domain=DOMAIN): @callback def async_get_options_flow( config_entry: ConfigEntry, - ) -> OptionsFlow: + ) -> RecollectWasteOptionsFlowHandler: """Define the config flow to handle options.""" - return RecollectWasteOptionsFlowHandler(config_entry) + return RecollectWasteOptionsFlowHandler() async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -79,10 +79,6 @@ async def async_step_user( class RecollectWasteOptionsFlowHandler(OptionsFlow): """Handle a Recollect Waste options flow.""" - def __init__(self, entry: ConfigEntry) -> None: - """Initialize.""" - self._entry = entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -96,7 +92,7 @@ async def async_step_init( { vol.Optional( CONF_FRIENDLY_NAME, - default=self._entry.options.get(CONF_FRIENDLY_NAME), + default=self.config_entry.options.get(CONF_FRIENDLY_NAME), ): bool } ), diff --git a/homeassistant/components/recorder/core.py b/homeassistant/components/recorder/core.py index 77d01088d67db6..6ba64d4a5717e1 100644 --- a/homeassistant/components/recorder/core.py +++ b/homeassistant/components/recorder/core.py @@ -78,16 +78,8 @@ StatisticsShortTerm, ) from .executor import DBInterruptibleThreadPoolExecutor -from .migration import ( - EntityIDMigration, - EventIDPostMigration, - EventsContextIDMigration, - EventTypeIDMigration, - StatesContextIDMigration, -) from .models import DatabaseEngine, StatisticData, StatisticMetaData, UnsupportedDialect from .pool import POOL_SIZE, MutexPool, RecorderPool -from .queries import get_migration_changes from .table_managers.event_data import EventDataManager from .table_managers.event_types import EventTypeManager from .table_managers.recorder_runs import RecorderRunsManager @@ -120,7 +112,6 @@ build_mysqldb_conv, dburl_to_path, end_incomplete_runs, - execute_stmt_lambda_element, is_second_sunday, move_away_broken_database, session_scope, @@ -740,12 +731,17 @@ def _run(self) -> None: # First do non-live migration steps, if needed if schema_status.migration_needed: + # Do non-live schema migration result, schema_status = self._migrate_schema_offline(schema_status) if not result: self._notify_migration_failed() self.migration_in_progress = False return self.schema_version = schema_status.current_version + + # Do non-live data migration + migration.migrate_data_non_live(self, self.get_session, schema_status) + # Non-live migration is now completed, remaining steps are live self.migration_is_live = True @@ -801,20 +797,7 @@ def _activate_and_set_db_ready( # there are a lot of statistics graphs on the frontend. self.statistics_meta_manager.load(session) - migration_changes: dict[str, int] = { - row[0]: row[1] - for row in execute_stmt_lambda_element(session, get_migration_changes()) - } - - for migrator_cls in ( - StatesContextIDMigration, - EventsContextIDMigration, - EventTypeIDMigration, - EntityIDMigration, - EventIDPostMigration, - ): - migrator = migrator_cls(schema_status.start_version, migration_changes) - migrator.do_migrate(self, session) + migration.migrate_data_live(self, self.get_session, schema_status) # We must only set the db ready after we have set the table managers # to active if there is no data to migrate. @@ -981,6 +964,7 @@ def _migrate_schema( new_schema_status = migration.SchemaValidationStatus( current_version=SCHEMA_VERSION, migration_needed=False, + non_live_data_migration_needed=False, schema_errors=set(), start_version=SCHEMA_VERSION, ) diff --git a/homeassistant/components/recorder/migration.py b/homeassistant/components/recorder/migration.py index 5180a0c440c960..02ab05288c58d6 100644 --- a/homeassistant/components/recorder/migration.py +++ b/homeassistant/components/recorder/migration.py @@ -91,6 +91,7 @@ find_states_context_ids_to_migrate, find_unmigrated_short_term_statistics_rows, find_unmigrated_statistics_rows, + get_migration_changes, has_entity_ids_to_migrate, has_event_type_to_migrate, has_events_context_ids_to_migrate, @@ -104,6 +105,7 @@ from .tasks import RecorderTask from .util import ( database_job_retry_wrapper, + database_job_retry_wrapper_method, execute_stmt_lambda_element, get_index_by_name, retryable_database_job_method, @@ -198,12 +200,13 @@ def get_schema_version(session_maker: Callable[[], Session]) -> int | None: return None -@dataclass(frozen=True) +@dataclass(frozen=True, kw_only=True) class SchemaValidationStatus: """Store schema validation status.""" current_version: int migration_needed: bool + non_live_data_migration_needed: bool schema_errors: set[str] start_version: int @@ -233,8 +236,17 @@ def validate_db_schema( # columns may otherwise not exist etc. schema_errors = _find_schema_errors(hass, instance, session_maker) + schema_migration_needed = not is_current + _non_live_data_migration_needed = non_live_data_migration_needed( + instance, session_maker, current_version + ) + return SchemaValidationStatus( - current_version, not is_current, schema_errors, current_version + current_version=current_version, + non_live_data_migration_needed=_non_live_data_migration_needed, + migration_needed=schema_migration_needed or _non_live_data_migration_needed, + schema_errors=schema_errors, + start_version=current_version, ) @@ -251,7 +263,10 @@ def _find_schema_errors( def live_migration(schema_status: SchemaValidationStatus) -> bool: """Check if live migration is possible.""" - return schema_status.current_version >= LIVE_MIGRATION_MIN_SCHEMA_VERSION + return ( + schema_status.current_version >= LIVE_MIGRATION_MIN_SCHEMA_VERSION + and not schema_status.non_live_data_migration_needed + ) def pre_migrate_schema(engine: Engine) -> None: @@ -350,6 +365,68 @@ def migrate_schema_live( return schema_status +def _get_migration_changes(session: Session) -> dict[str, int]: + """Return migration changes as a dict.""" + migration_changes: dict[str, int] = { + row[0]: row[1] + for row in execute_stmt_lambda_element(session, get_migration_changes()) + } + return migration_changes + + +def non_live_data_migration_needed( + instance: Recorder, + session_maker: Callable[[], Session], + schema_version: int, +) -> bool: + """Return True if non-live data migration is needed. + + This must only be called if database schema is current. + """ + migration_needed = False + with session_scope(session=session_maker()) as session: + migration_changes = _get_migration_changes(session) + for migrator_cls in NON_LIVE_DATA_MIGRATORS: + migrator = migrator_cls(schema_version, migration_changes) + migration_needed |= migrator.needs_migrate(instance, session) + + return migration_needed + + +def migrate_data_non_live( + instance: Recorder, + session_maker: Callable[[], Session], + schema_status: SchemaValidationStatus, +) -> None: + """Do non-live data migration. + + This must be called after non-live schema migration is completed. + """ + with session_scope(session=session_maker()) as session: + migration_changes = _get_migration_changes(session) + + for migrator_cls in NON_LIVE_DATA_MIGRATORS: + migrator = migrator_cls(schema_status.start_version, migration_changes) + migrator.migrate_all(instance, session_maker) + + +def migrate_data_live( + instance: Recorder, + session_maker: Callable[[], Session], + schema_status: SchemaValidationStatus, +) -> None: + """Queue live schema migration tasks. + + This must be called after live schema migration is completed. + """ + with session_scope(session=session_maker()) as session: + migration_changes = _get_migration_changes(session) + + for migrator_cls in LIVE_DATA_MIGRATORS: + migrator = migrator_cls(schema_status.start_version, migration_changes) + migrator.queue_migration(instance, session) + + def _create_index( session_maker: Callable[[], Session], table_name: str, index_name: str ) -> None: @@ -2196,29 +2273,24 @@ class DataMigrationStatus: migration_done: bool -class BaseRunTimeMigration(ABC): - """Base class for run time migrations.""" +class BaseMigration(ABC): + """Base class for migrations.""" index_to_drop: tuple[str, str] | None = None required_schema_version = 0 migration_version = 1 migration_id: str - task = MigrationTask def __init__(self, schema_version: int, migration_changes: dict[str, int]) -> None: """Initialize a new BaseRunTimeMigration.""" self.schema_version = schema_version self.migration_changes = migration_changes - def do_migrate(self, instance: Recorder, session: Session) -> None: - """Start migration if needed.""" - if self.needs_migrate(instance, session): - instance.queue_task(self.task(self)) - else: - self.migration_done(instance, session) - - @retryable_database_job_method("migrate data") + @abstractmethod def migrate_data(self, instance: Recorder) -> bool: + """Migrate some data, return True if migration is completed.""" + + def _migrate_data(self, instance: Recorder) -> bool: """Migrate some data, returns True if migration is completed.""" status = self.migrate_data_impl(instance) if status.migration_done: @@ -2273,7 +2345,45 @@ def needs_migrate(self, instance: Recorder, session: Session) -> bool: return needs_migrate.needs_migrate -class BaseRunTimeMigrationWithQuery(BaseRunTimeMigration): +class BaseOffLineMigration(BaseMigration): + """Base class for off line migrations.""" + + def migrate_all( + self, instance: Recorder, session_maker: Callable[[], Session] + ) -> None: + """Migrate all data.""" + with session_scope(session=session_maker()) as session: + if not self.needs_migrate(instance, session): + self.migration_done(instance, session) + return + while not self.migrate_data(instance): + pass + + @database_job_retry_wrapper_method("migrate data", 10) + def migrate_data(self, instance: Recorder) -> bool: + """Migrate some data, returns True if migration is completed.""" + return self._migrate_data(instance) + + +class BaseRunTimeMigration(BaseMigration): + """Base class for run time migrations.""" + + task = MigrationTask + + def queue_migration(self, instance: Recorder, session: Session) -> None: + """Start migration if needed.""" + if self.needs_migrate(instance, session): + instance.queue_task(self.task(self)) + else: + self.migration_done(instance, session) + + @retryable_database_job_method("migrate data") + def migrate_data(self, instance: Recorder) -> bool: + """Migrate some data, returns True if migration is completed.""" + return self._migrate_data(instance) + + +class BaseMigrationWithQuery(BaseMigration): """Base class for run time migrations.""" @abstractmethod @@ -2290,7 +2400,7 @@ def needs_migrate_impl( ) -class StatesContextIDMigration(BaseRunTimeMigrationWithQuery): +class StatesContextIDMigration(BaseMigrationWithQuery, BaseOffLineMigration): """Migration to migrate states context_ids to binary format.""" required_schema_version = CONTEXT_ID_AS_BINARY_SCHEMA_VERSION @@ -2333,7 +2443,7 @@ def needs_migrate_query(self) -> StatementLambdaElement: return has_states_context_ids_to_migrate() -class EventsContextIDMigration(BaseRunTimeMigrationWithQuery): +class EventsContextIDMigration(BaseMigrationWithQuery, BaseOffLineMigration): """Migration to migrate events context_ids to binary format.""" required_schema_version = CONTEXT_ID_AS_BINARY_SCHEMA_VERSION @@ -2376,7 +2486,7 @@ def needs_migrate_query(self) -> StatementLambdaElement: return has_events_context_ids_to_migrate() -class EventTypeIDMigration(BaseRunTimeMigrationWithQuery): +class EventTypeIDMigration(BaseMigrationWithQuery, BaseRunTimeMigration): """Migration to migrate event_type to event_type_ids.""" required_schema_version = EVENT_TYPE_IDS_SCHEMA_VERSION @@ -2454,7 +2564,7 @@ def needs_migrate_query(self) -> StatementLambdaElement: return has_event_type_to_migrate() -class EntityIDMigration(BaseRunTimeMigrationWithQuery): +class EntityIDMigration(BaseMigrationWithQuery, BaseRunTimeMigration): """Migration to migrate entity_ids to states_meta.""" required_schema_version = STATES_META_SCHEMA_VERSION @@ -2542,7 +2652,7 @@ def migration_done(self, instance: Recorder, session: Session) -> None: instance.states_meta_manager.active = True with contextlib.suppress(SQLAlchemyError): migrate = EntityIDPostMigration(self.schema_version, self.migration_changes) - migrate.do_migrate(instance, session) + migrate.queue_migration(instance, session) def needs_migrate_query(self) -> StatementLambdaElement: """Check if the data is migrated.""" @@ -2631,7 +2741,7 @@ def needs_migrate_impl( return DataMigrationStatus(needs_migrate=False, migration_done=True) -class EntityIDPostMigration(BaseRunTimeMigrationWithQuery): +class EntityIDPostMigration(BaseMigrationWithQuery, BaseRunTimeMigration): """Migration to remove old entity_id strings from states.""" migration_id = "entity_id_post_migration" @@ -2648,9 +2758,19 @@ def needs_migrate_query(self) -> StatementLambdaElement: return has_used_states_entity_ids() -def _mark_migration_done( - session: Session, migration: type[BaseRunTimeMigration] -) -> None: +NON_LIVE_DATA_MIGRATORS = ( + StatesContextIDMigration, # Introduced in HA Core 2023.4 + EventsContextIDMigration, # Introduced in HA Core 2023.4 +) + +LIVE_DATA_MIGRATORS = ( + EventTypeIDMigration, + EntityIDMigration, + EventIDPostMigration, +) + + +def _mark_migration_done(session: Session, migration: type[BaseMigration]) -> None: """Mark a migration as done in the database.""" session.merge( MigrationChanges( diff --git a/homeassistant/components/recorder/pool.py b/homeassistant/components/recorder/pool.py index 30f8fa8d07a47a..fc2a8ccb1ccf7b 100644 --- a/homeassistant/components/recorder/pool.py +++ b/homeassistant/components/recorder/pool.py @@ -16,7 +16,7 @@ StaticPool, ) -from homeassistant.helpers.frame import report +from homeassistant.helpers.frame import ReportBehavior, report_usage from homeassistant.util.loop import raise_for_blocking_call _LOGGER = logging.getLogger(__name__) @@ -108,14 +108,14 @@ def _do_get(self) -> ConnectionPoolEntry: # type: ignore[return] # raise_for_blocking_call will raise an exception def _do_get_db_connection_protected(self) -> ConnectionPoolEntry: - report( + report_usage( ( "accesses the database without the database executor; " f"{ADVISE_MSG} " "for faster database operations" ), exclude_integrations={"recorder"}, - error_if_core=False, + core_behavior=ReportBehavior.LOG, ) return NullPool._create_connection(self) # noqa: SLF001 diff --git a/homeassistant/components/recorder/statistics.py b/homeassistant/components/recorder/statistics.py index 4ffe7c72971d8e..e5fbfe0e8c58b2 100644 --- a/homeassistant/components/recorder/statistics.py +++ b/homeassistant/components/recorder/statistics.py @@ -28,6 +28,7 @@ from homeassistant.util import dt as dt_util from homeassistant.util.unit_conversion import ( BaseUnitConverter, + BloodGlugoseConcentrationConverter, ConductivityConverter, DataRateConverter, DistanceConverter, @@ -128,6 +129,10 @@ STATISTIC_UNIT_TO_UNIT_CONVERTER: dict[str | None, type[BaseUnitConverter]] = { + **{ + unit: BloodGlugoseConcentrationConverter + for unit in BloodGlugoseConcentrationConverter.VALID_UNITS + }, **{unit: ConductivityConverter for unit in ConductivityConverter.VALID_UNITS}, **{unit: DataRateConverter for unit in DataRateConverter.VALID_UNITS}, **{unit: DistanceConverter for unit in DistanceConverter.VALID_UNITS}, diff --git a/homeassistant/components/recorder/util.py b/homeassistant/components/recorder/util.py index d078c32cb88cab..a59519ef38dfa7 100644 --- a/homeassistant/components/recorder/util.py +++ b/homeassistant/components/recorder/util.py @@ -652,13 +652,13 @@ def _is_retryable_error(instance: Recorder, err: OperationalError) -> bool: def retryable_database_job[**_P]( description: str, ) -> Callable[[_FuncType[_P, bool]], _FuncType[_P, bool]]: - """Try to execute a database job. + """Execute a database job repeatedly until it succeeds. The job should return True if it finished, and False if it needs to be rescheduled. """ def decorator(job: _FuncType[_P, bool]) -> _FuncType[_P, bool]: - return _wrap_func_or_meth(job, description, False) + return _wrap_retryable_database_job_func_or_meth(job, description, False) return decorator @@ -666,18 +666,18 @@ def decorator(job: _FuncType[_P, bool]) -> _FuncType[_P, bool]: def retryable_database_job_method[_Self, **_P]( description: str, ) -> Callable[[_MethType[_Self, _P, bool]], _MethType[_Self, _P, bool]]: - """Try to execute a database job. + """Execute a database job repeatedly until it succeeds. The job should return True if it finished, and False if it needs to be rescheduled. """ def decorator(job: _MethType[_Self, _P, bool]) -> _MethType[_Self, _P, bool]: - return _wrap_func_or_meth(job, description, True) + return _wrap_retryable_database_job_func_or_meth(job, description, True) return decorator -def _wrap_func_or_meth[**_P]( +def _wrap_retryable_database_job_func_or_meth[**_P]( job: _FuncOrMethType[_P, bool], description: str, method: bool ) -> _FuncOrMethType[_P, bool]: recorder_pos = 1 if method else 0 @@ -705,10 +705,10 @@ def wrapper(*args: _P.args, **kwargs: _P.kwargs) -> bool: return wrapper -def database_job_retry_wrapper[**_P]( - description: str, attempts: int = 5 -) -> Callable[[_FuncType[_P, None]], _FuncType[_P, None]]: - """Try to execute a database job multiple times. +def database_job_retry_wrapper[**_P, _R]( + description: str, attempts: int +) -> Callable[[_FuncType[_P, _R]], _FuncType[_P, _R]]: + """Execute a database job repeatedly until it succeeds, at most attempts times. This wrapper handles InnoDB deadlocks and lock timeouts. @@ -717,32 +717,63 @@ def database_job_retry_wrapper[**_P]( """ def decorator( - job: _FuncType[_P, None], - ) -> _FuncType[_P, None]: - @functools.wraps(job) - def wrapper(instance: Recorder, *args: _P.args, **kwargs: _P.kwargs) -> None: - for attempt in range(attempts): - try: - job(instance, *args, **kwargs) - except OperationalError as err: - if attempt == attempts - 1 or not _is_retryable_error( - instance, err - ): - raise - assert isinstance(err.orig, BaseException) # noqa: PT017 - _LOGGER.info( - "%s; %s failed, retrying", err.orig.args[1], description - ) - time.sleep(instance.db_retry_wait) - # Failed with retryable error - else: - return + job: _FuncType[_P, _R], + ) -> _FuncType[_P, _R]: + return _database_job_retry_wrapper_func_or_meth( + job, description, attempts, False + ) + + return decorator + + +def database_job_retry_wrapper_method[_Self, **_P, _R]( + description: str, attempts: int +) -> Callable[[_MethType[_Self, _P, _R]], _MethType[_Self, _P, _R]]: + """Execute a database job repeatedly until it succeeds, at most attempts times. - return wrapper + This wrapper handles InnoDB deadlocks and lock timeouts. + + This is different from retryable_database_job in that it will retry the job + attempts number of times instead of returning False if the job fails. + """ + + def decorator( + job: _MethType[_Self, _P, _R], + ) -> _MethType[_Self, _P, _R]: + return _database_job_retry_wrapper_func_or_meth( + job, description, attempts, True + ) return decorator +def _database_job_retry_wrapper_func_or_meth[**_P, _R]( + job: _FuncOrMethType[_P, _R], + description: str, + attempts: int, + method: bool, +) -> _FuncOrMethType[_P, _R]: + recorder_pos = 1 if method else 0 + + @functools.wraps(job) + def wrapper(*args: _P.args, **kwargs: _P.kwargs) -> _R: + instance: Recorder = args[recorder_pos] # type: ignore[assignment] + for attempt in range(attempts): + try: + return job(*args, **kwargs) + except OperationalError as err: + # Failed with retryable error + if attempt == attempts - 1 or not _is_retryable_error(instance, err): + raise + assert isinstance(err.orig, BaseException) # noqa: PT017 + _LOGGER.info("%s; %s failed, retrying", err.orig.args[1], description) + time.sleep(instance.db_retry_wait) + + raise ValueError("attempts must be a positive integer") + + return wrapper + + def periodic_db_cleanups(instance: Recorder) -> None: """Run any database cleanups that need to happen periodically. diff --git a/homeassistant/components/recorder/websocket_api.py b/homeassistant/components/recorder/websocket_api.py index ac917e903df25e..8b8d1cfb0c6579 100644 --- a/homeassistant/components/recorder/websocket_api.py +++ b/homeassistant/components/recorder/websocket_api.py @@ -16,6 +16,7 @@ from homeassistant.helpers.json import json_bytes from homeassistant.util import dt as dt_util from homeassistant.util.unit_conversion import ( + BloodGlugoseConcentrationConverter, ConductivityConverter, DataRateConverter, DistanceConverter, @@ -54,6 +55,9 @@ UNIT_SCHEMA = vol.Schema( { + vol.Optional("blood_glucose_concentration"): vol.In( + BloodGlugoseConcentrationConverter.VALID_UNITS + ), vol.Optional("conductivity"): vol.In(ConductivityConverter.VALID_UNITS), vol.Optional("data_rate"): vol.In(DataRateConverter.VALID_UNITS), vol.Optional("distance"): vol.In(DistanceConverter.VALID_UNITS), diff --git a/homeassistant/components/refoss/const.py b/homeassistant/components/refoss/const.py index 0542afe8afb104..851f8ba8f77c66 100644 --- a/homeassistant/components/refoss/const.py +++ b/homeassistant/components/refoss/const.py @@ -20,6 +20,9 @@ MAX_ERRORS = 2 +# Energy monitoring +SENSOR_EM = "em" + CHANNEL_DISPLAY_NAME: dict[str, dict[int, str]] = { "em06": { 1: "A1", @@ -28,5 +31,25 @@ 4: "A2", 5: "B2", 6: "C2", - } + }, + "em16": { + 1: "A1", + 2: "A2", + 3: "A3", + 4: "A4", + 5: "A5", + 6: "A6", + 7: "B1", + 8: "B2", + 9: "B3", + 10: "B4", + 11: "B5", + 12: "B6", + 13: "C1", + 14: "C2", + 15: "C3", + 16: "C4", + 17: "C5", + 18: "C6", + }, } diff --git a/homeassistant/components/refoss/sensor.py b/homeassistant/components/refoss/sensor.py index f65724ddd77282..26454cae48d239 100644 --- a/homeassistant/components/refoss/sensor.py +++ b/homeassistant/components/refoss/sensor.py @@ -31,6 +31,7 @@ COORDINATORS, DISPATCH_DEVICE_DISCOVERED, DOMAIN, + SENSOR_EM, ) from .entity import RefossEntity @@ -43,8 +44,13 @@ class RefossSensorEntityDescription(SensorEntityDescription): fn: Callable[[float], float] = lambda x: x +DEVICETYPE_SENSOR: dict[str, str] = { + "em06": SENSOR_EM, + "em16": SENSOR_EM, +} + SENSORS: dict[str, tuple[RefossSensorEntityDescription, ...]] = { - "em06": ( + SENSOR_EM: ( RefossSensorEntityDescription( key="power", translation_key="power", @@ -121,8 +127,11 @@ def init_device(coordinator: RefossDataUpdateCoordinator) -> None: if not isinstance(device, ElectricityXMix): return + + sensor_type = DEVICETYPE_SENSOR.get(device.device_type, "") + descriptions: tuple[RefossSensorEntityDescription, ...] = SENSORS.get( - device.device_type, () + sensor_type, () ) async_add_entities( diff --git a/homeassistant/components/reolink/__init__.py b/homeassistant/components/reolink/__init__.py index 4f0b8ae2664192..7a36991201a87b 100644 --- a/homeassistant/components/reolink/__init__.py +++ b/homeassistant/components/reolink/__init__.py @@ -10,7 +10,7 @@ from reolink_aio.exceptions import CredentialsInvalidError, ReolinkError from homeassistant.config_entries import ConfigEntryState -from homeassistant.const import EVENT_HOMEASSISTANT_STOP, Platform +from homeassistant.const import CONF_PORT, EVENT_HOMEASSISTANT_STOP, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady from homeassistant.helpers import ( @@ -22,7 +22,7 @@ from homeassistant.helpers.typing import ConfigType from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed -from .const import DOMAIN +from .const import CONF_USE_HTTPS, DOMAIN from .exceptions import PasswordIncompatible, ReolinkException, UserNotAdmin from .host import ReolinkHost from .services import async_setup_services @@ -83,6 +83,24 @@ async def async_setup_entry( hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, host.stop) ) + # update the port info if needed for the next time + if ( + host.api.port != config_entry.data[CONF_PORT] + or host.api.use_https != config_entry.data[CONF_USE_HTTPS] + ): + _LOGGER.warning( + "HTTP(s) port of Reolink %s, changed from %s to %s", + host.api.nvr_name, + config_entry.data[CONF_PORT], + host.api.port, + ) + data = { + **config_entry.data, + CONF_PORT: host.api.port, + CONF_USE_HTTPS: host.api.use_https, + } + hass.config_entries.async_update_entry(config_entry, data=data) + async def async_device_config_update() -> None: """Update the host state cache and renew the ONVIF-subscription.""" async with asyncio.timeout(host.api.timeout * (RETRY_ATTEMPTS + 2)): @@ -134,6 +152,7 @@ async def async_check_firmware_update() -> None: device_coordinator = DataUpdateCoordinator( hass, _LOGGER, + config_entry=config_entry, name=f"reolink.{host.api.nvr_name}", update_method=async_device_config_update, update_interval=DEVICE_UPDATE_INTERVAL, @@ -141,6 +160,7 @@ async def async_check_firmware_update() -> None: firmware_coordinator = DataUpdateCoordinator( hass, _LOGGER, + config_entry=config_entry, name=f"reolink.{host.api.nvr_name}.firmware", update_method=async_check_firmware_update, update_interval=FIRMWARE_UPDATE_INTERVAL, diff --git a/homeassistant/components/reolink/binary_sensor.py b/homeassistant/components/reolink/binary_sensor.py index c11161b11c7ffd..f6c64d0b0606fa 100644 --- a/homeassistant/components/reolink/binary_sensor.py +++ b/homeassistant/components/reolink/binary_sensor.py @@ -42,29 +42,34 @@ class ReolinkBinarySensorEntityDescription( BINARY_PUSH_SENSORS = ( ReolinkBinarySensorEntityDescription( key="motion", + cmd_id=33, device_class=BinarySensorDeviceClass.MOTION, value=lambda api, ch: api.motion_detected(ch), ), ReolinkBinarySensorEntityDescription( key=FACE_DETECTION_TYPE, + cmd_id=33, translation_key="face", value=lambda api, ch: api.ai_detected(ch, FACE_DETECTION_TYPE), supported=lambda api, ch: api.ai_supported(ch, FACE_DETECTION_TYPE), ), ReolinkBinarySensorEntityDescription( key=PERSON_DETECTION_TYPE, + cmd_id=33, translation_key="person", value=lambda api, ch: api.ai_detected(ch, PERSON_DETECTION_TYPE), supported=lambda api, ch: api.ai_supported(ch, PERSON_DETECTION_TYPE), ), ReolinkBinarySensorEntityDescription( key=VEHICLE_DETECTION_TYPE, + cmd_id=33, translation_key="vehicle", value=lambda api, ch: api.ai_detected(ch, VEHICLE_DETECTION_TYPE), supported=lambda api, ch: api.ai_supported(ch, VEHICLE_DETECTION_TYPE), ), ReolinkBinarySensorEntityDescription( key=PET_DETECTION_TYPE, + cmd_id=33, translation_key="pet", value=lambda api, ch: api.ai_detected(ch, PET_DETECTION_TYPE), supported=lambda api, ch: ( @@ -74,18 +79,21 @@ class ReolinkBinarySensorEntityDescription( ), ReolinkBinarySensorEntityDescription( key=PET_DETECTION_TYPE, + cmd_id=33, translation_key="animal", value=lambda api, ch: api.ai_detected(ch, PET_DETECTION_TYPE), supported=lambda api, ch: api.supported(ch, "ai_animal"), ), ReolinkBinarySensorEntityDescription( key=PACKAGE_DETECTION_TYPE, + cmd_id=33, translation_key="package", value=lambda api, ch: api.ai_detected(ch, PACKAGE_DETECTION_TYPE), supported=lambda api, ch: api.ai_supported(ch, PACKAGE_DETECTION_TYPE), ), ReolinkBinarySensorEntityDescription( key="visitor", + cmd_id=33, translation_key="visitor", value=lambda api, ch: api.visitor_detected(ch), supported=lambda api, ch: api.is_doorbell(ch), diff --git a/homeassistant/components/reolink/config_flow.py b/homeassistant/components/reolink/config_flow.py index be88baf84e4e9a..0b1ed7b4b15bc2 100644 --- a/homeassistant/components/reolink/config_flow.py +++ b/homeassistant/components/reolink/config_flow.py @@ -7,7 +7,12 @@ from typing import Any from reolink_aio.api import ALLOWED_SPECIAL_CHARS -from reolink_aio.exceptions import ApiError, CredentialsInvalidError, ReolinkError +from reolink_aio.exceptions import ( + ApiError, + CredentialsInvalidError, + LoginFirmwareError, + ReolinkError, +) import voluptuous as vol from homeassistant.components import dhcp @@ -49,10 +54,6 @@ class ReolinkOptionsFlowHandler(OptionsFlow): """Handle Reolink options.""" - def __init__(self, config_entry: ReolinkConfigEntry) -> None: - """Initialize ReolinkOptionsFlowHandler.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -107,7 +108,7 @@ def async_get_options_flow( config_entry: ReolinkConfigEntry, ) -> ReolinkOptionsFlowHandler: """Options callback for Reolink.""" - return ReolinkOptionsFlowHandler(config_entry) + return ReolinkOptionsFlowHandler() async def async_step_reauth( self, entry_data: Mapping[str, Any] @@ -139,13 +140,10 @@ async def async_step_reconfigure( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Perform a reconfiguration.""" - config_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) - assert config_entry is not None - self._host = config_entry.data[CONF_HOST] - self._username = config_entry.data[CONF_USERNAME] - self._password = config_entry.data[CONF_PASSWORD] + entry_data = self._get_reconfigure_entry().data + self._host = entry_data[CONF_HOST] + self._username = entry_data[CONF_USERNAME] + self._password = entry_data[CONF_PASSWORD] return await self.async_step_user() async def async_step_dhcp( @@ -236,6 +234,15 @@ async def async_step_user( placeholders["special_chars"] = ALLOWED_SPECIAL_CHARS except CredentialsInvalidError: errors[CONF_PASSWORD] = "invalid_auth" + except LoginFirmwareError: + errors["base"] = "update_needed" + placeholders["current_firmware"] = host.api.sw_version + placeholders["needed_firmware"] = ( + host.api.sw_version_required.version_string + ) + placeholders["download_center_url"] = ( + "https://reolink.com/download-center" + ) except ApiError as err: placeholders["error"] = str(err) errors[CONF_HOST] = "api_error" @@ -260,17 +267,16 @@ async def async_step_user( user_input[CONF_USE_HTTPS] = host.api.use_https mac_address = format_mac(host.api.mac_address) - existing_entry = await self.async_set_unique_id( - mac_address, raise_on_progress=False - ) - if existing_entry and self.init_step in ( - SOURCE_REAUTH, - SOURCE_RECONFIGURE, - ): + await self.async_set_unique_id(mac_address, raise_on_progress=False) + if self.source == SOURCE_REAUTH: + self._abort_if_unique_id_mismatch() + return self.async_update_reload_and_abort( + entry=self._get_reauth_entry(), data=user_input + ) + if self.source == SOURCE_RECONFIGURE: + self._abort_if_unique_id_mismatch() return self.async_update_reload_and_abort( - entry=existing_entry, - data=user_input, - reason=f"{self.init_step}_successful", + entry=self._get_reconfigure_entry(), data=user_input ) self._abort_if_unique_id_configured(updates=user_input) @@ -286,7 +292,7 @@ async def async_step_user( vol.Required(CONF_PASSWORD, default=self._password): str, } ) - if self._host is None or self.init_step == SOURCE_RECONFIGURE or errors: + if self._host is None or self.source == SOURCE_RECONFIGURE or errors: data_schema = data_schema.extend( { vol.Required(CONF_HOST, default=self._host): str, diff --git a/homeassistant/components/reolink/entity.py b/homeassistant/components/reolink/entity.py index d0a8f6dfc8d1b3..6101eee8a4c6f6 100644 --- a/homeassistant/components/reolink/entity.py +++ b/homeassistant/components/reolink/entity.py @@ -7,6 +7,7 @@ from reolink_aio.api import DUAL_LENS_MODELS, Chime, Host +from homeassistant.core import callback from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, DeviceInfo from homeassistant.helpers.entity import EntityDescription from homeassistant.helpers.update_coordinator import ( @@ -23,6 +24,7 @@ class ReolinkEntityDescription(EntityDescription): """A class that describes entities for Reolink.""" cmd_key: str | None = None + cmd_id: int | None = None @dataclass(frozen=True, kw_only=True) @@ -90,18 +92,35 @@ def available(self) -> bool: """Return True if entity is available.""" return self._host.api.session_active and super().available + @callback + def _push_callback(self) -> None: + """Handle incoming TCP push event.""" + self.async_write_ha_state() + + def register_callback(self, unique_id: str, cmd_id: int) -> None: + """Register callback for TCP push events.""" + self._host.api.baichuan.register_callback( # pragma: no cover + unique_id, self._push_callback, cmd_id + ) + async def async_added_to_hass(self) -> None: """Entity created.""" await super().async_added_to_hass() cmd_key = self.entity_description.cmd_key + cmd_id = self.entity_description.cmd_id if cmd_key is not None: self._host.async_register_update_cmd(cmd_key) + if cmd_id is not None and self._attr_unique_id is not None: + self.register_callback(self._attr_unique_id, cmd_id) async def async_will_remove_from_hass(self) -> None: """Entity removed.""" cmd_key = self.entity_description.cmd_key + cmd_id = self.entity_description.cmd_id if cmd_key is not None: self._host.async_unregister_update_cmd(cmd_key) + if cmd_id is not None and self._attr_unique_id is not None: + self._host.api.baichuan.unregister_callback(self._attr_unique_id) await super().async_will_remove_from_hass() @@ -160,6 +179,12 @@ def available(self) -> bool: """Return True if entity is available.""" return super().available and self._host.api.camera_online(self._channel) + def register_callback(self, unique_id: str, cmd_id) -> None: + """Register callback for TCP push events.""" + self._host.api.baichuan.register_callback( + unique_id, self._push_callback, cmd_id, self._channel + ) + async def async_added_to_hass(self) -> None: """Entity created.""" await super().async_added_to_hass() diff --git a/homeassistant/components/reolink/host.py b/homeassistant/components/reolink/host.py index a90b9314440c3a..336876d4c4fc80 100644 --- a/homeassistant/components/reolink/host.py +++ b/homeassistant/components/reolink/host.py @@ -41,6 +41,7 @@ ) DEFAULT_TIMEOUT = 30 +FIRST_TCP_PUSH_TIMEOUT = 10 FIRST_ONVIF_TIMEOUT = 10 FIRST_ONVIF_LONG_POLL_TIMEOUT = 90 SUBSCRIPTION_RENEW_THRESHOLD = 300 @@ -105,6 +106,7 @@ def get_aiohttp_session() -> aiohttp.ClientSession: self._long_poll_received: bool = False self._long_poll_error: bool = False self._cancel_poll: CALLBACK_TYPE | None = None + self._cancel_tcp_push_check: CALLBACK_TYPE | None = None self._cancel_onvif_check: CALLBACK_TYPE | None = None self._cancel_long_poll_check: CALLBACK_TYPE | None = None self._poll_job = HassJob(self._async_poll_all_motion, cancel_on_shutdown=True) @@ -220,6 +222,58 @@ async def async_init(self) -> None: else: self._unique_id = format_mac(self._api.mac_address) + try: + await self._api.baichuan.subscribe_events() + except ReolinkError: + await self._async_check_tcp_push() + else: + self._cancel_tcp_push_check = async_call_later( + self._hass, FIRST_TCP_PUSH_TIMEOUT, self._async_check_tcp_push + ) + + ch_list: list[int | None] = [None] + if self._api.is_nvr: + ch_list.extend(self._api.channels) + for ch in ch_list: + if not self._api.supported(ch, "firmware"): + continue + + key = ch if ch is not None else "host" + if self._api.camera_sw_version_update_required(ch): + ir.async_create_issue( + self._hass, + DOMAIN, + f"firmware_update_{key}", + is_fixable=False, + severity=ir.IssueSeverity.WARNING, + translation_key="firmware_update", + translation_placeholders={ + "required_firmware": self._api.camera_sw_version_required( + ch + ).version_string, + "current_firmware": self._api.camera_sw_version(ch), + "model": self._api.camera_model(ch), + "hw_version": self._api.camera_hardware_version(ch), + "name": self._api.camera_name(ch), + "download_link": "https://reolink.com/download-center/", + }, + ) + else: + ir.async_delete_issue(self._hass, DOMAIN, f"firmware_update_{key}") + + async def _async_check_tcp_push(self, *_) -> None: + """Check the TCP push subscription.""" + if self._api.baichuan.events_active: + ir.async_delete_issue(self._hass, DOMAIN, "webhook_url") + self._cancel_tcp_push_check = None + return + + _LOGGER.debug( + "Reolink %s, did not receive initial TCP push event after %i seconds", + self._api.nvr_name, + FIRST_TCP_PUSH_TIMEOUT, + ) + if self._onvif_push_supported: try: await self.subscribe() @@ -242,6 +296,8 @@ async def async_init(self) -> None: self._cancel_onvif_check = async_call_later( self._hass, FIRST_ONVIF_TIMEOUT, self._async_check_onvif ) + + # start long polling if ONVIF push failed immediately if not self._onvif_push_supported: _LOGGER.debug( "Camera model %s does not support ONVIF push, using ONVIF long polling instead", @@ -264,35 +320,7 @@ async def async_init(self) -> None: self._async_check_onvif_long_poll, ) - ch_list: list[int | None] = [None] - if self._api.is_nvr: - ch_list.extend(self._api.channels) - for ch in ch_list: - if not self._api.supported(ch, "firmware"): - continue - - key = ch if ch is not None else "host" - if self._api.camera_sw_version_update_required(ch): - ir.async_create_issue( - self._hass, - DOMAIN, - f"firmware_update_{key}", - is_fixable=False, - severity=ir.IssueSeverity.WARNING, - translation_key="firmware_update", - translation_placeholders={ - "required_firmware": self._api.camera_sw_version_required( - ch - ).version_string, - "current_firmware": self._api.camera_sw_version(ch), - "model": self._api.camera_model(ch), - "hw_version": self._api.camera_hardware_version(ch), - "name": self._api.camera_name(ch), - "download_link": "https://reolink.com/download-center/", - }, - ) - else: - ir.async_delete_issue(self._hass, DOMAIN, f"firmware_update_{key}") + self._cancel_tcp_push_check = None async def _async_check_onvif(self, *_) -> None: """Check the ONVIF subscription.""" @@ -391,6 +419,16 @@ async def update_states(self) -> None: async def disconnect(self) -> None: """Disconnect from the API, so the connection will be released.""" + try: + await self._api.baichuan.unsubscribe_events() + except ReolinkError as err: + _LOGGER.error( + "Reolink error while unsubscribing Baichuan from host %s:%s: %s", + self._api.host, + self._api.port, + err, + ) + try: await self._api.unsubscribe() except ReolinkError as err: @@ -461,6 +499,9 @@ async def stop(self, event=None) -> None: if self._cancel_poll is not None: self._cancel_poll() self._cancel_poll = None + if self._cancel_tcp_push_check is not None: + self._cancel_tcp_push_check() + self._cancel_tcp_push_check = None if self._cancel_onvif_check is not None: self._cancel_onvif_check() self._cancel_onvif_check = None @@ -494,8 +535,13 @@ async def subscribe(self) -> None: async def renew(self) -> None: """Renew the subscription of motion events (lease time is 15 minutes).""" + if self._api.baichuan.events_active and self._api.subscribed(SubType.push): + # TCP push active, unsubscribe from ONVIF push because not needed + self.unregister_webhook() + await self._api.unsubscribe() + try: - if self._onvif_push_supported: + if self._onvif_push_supported and not self._api.baichuan.events_active: await self._renew(SubType.push) if self._onvif_long_poll_supported and self._long_poll_task is not None: @@ -608,7 +654,8 @@ async def _async_long_polling(self, *_) -> None: """Use ONVIF long polling to immediately receive events.""" # This task will be cancelled once _async_stop_long_polling is called while True: - if self._webhook_reachable: + if self._api.baichuan.events_active or self._webhook_reachable: + # TCP push or ONVIF push working, stop long polling self._long_poll_task = None await self._async_stop_long_polling() return @@ -642,8 +689,12 @@ async def _async_long_polling(self, *_) -> None: async def _async_poll_all_motion(self, *_) -> None: """Poll motion and AI states until the first ONVIF push is received.""" - if self._webhook_reachable or self._long_poll_received: - # ONVIF push or long polling is working, stop fast polling + if ( + self._api.baichuan.events_active + or self._webhook_reachable + or self._long_poll_received + ): + # TCP push, ONVIF push or long polling is working, stop fast polling self._cancel_poll = None return @@ -747,6 +798,8 @@ def _signal_write_ha_state(self, channels: list[int] | None) -> None: @property def event_connection(self) -> str: """Type of connection to receive events.""" + if self._api.baichuan.events_active: + return "TCP push" if self._webhook_reachable: return "ONVIF push" if self._long_poll_received: diff --git a/homeassistant/components/reolink/icons.json b/homeassistant/components/reolink/icons.json index 5815e165607a14..d333a8a0201081 100644 --- a/homeassistant/components/reolink/icons.json +++ b/homeassistant/components/reolink/icons.json @@ -246,6 +246,12 @@ "off": "mdi:music-note-off" } }, + "vehicle_tone": { + "default": "mdi:music-note", + "state": { + "off": "mdi:music-note-off" + } + }, "visitor_tone": { "default": "mdi:music-note", "state": { @@ -261,7 +267,10 @@ }, "sensor": { "ptz_pan_position": { - "default": "mdi:pan" + "default": "mdi:pan-horizontal" + }, + "ptz_tilt_position": { + "default": "mdi:pan-vertical" }, "battery_temperature": { "default": "mdi:thermometer" diff --git a/homeassistant/components/reolink/light.py b/homeassistant/components/reolink/light.py index d545a878068070..0f239a30813857 100644 --- a/homeassistant/components/reolink/light.py +++ b/homeassistant/components/reolink/light.py @@ -57,6 +57,7 @@ class ReolinkHostLightEntityDescription( ReolinkLightEntityDescription( key="floodlight", cmd_key="GetWhiteLed", + cmd_id=291, translation_key="floodlight", supported=lambda api, ch: api.supported(ch, "floodLight"), is_on_fn=lambda api, ch: api.whiteled_state(ch), diff --git a/homeassistant/components/reolink/manifest.json b/homeassistant/components/reolink/manifest.json index 9e05cf7431ef7f..23a46c5e1c992b 100644 --- a/homeassistant/components/reolink/manifest.json +++ b/homeassistant/components/reolink/manifest.json @@ -18,5 +18,5 @@ "documentation": "https://www.home-assistant.io/integrations/reolink", "iot_class": "local_push", "loggers": ["reolink_aio"], - "requirements": ["reolink-aio==0.9.11"] + "requirements": ["reolink-aio==0.10.4"] } diff --git a/homeassistant/components/reolink/select.py b/homeassistant/components/reolink/select.py index b4175d4106966e..a444997a907def 100644 --- a/homeassistant/components/reolink/select.py +++ b/homeassistant/components/reolink/select.py @@ -197,6 +197,16 @@ def _get_quick_reply_id(api: Host, ch: int, mess: str) -> int: value=lambda chime: ChimeToneEnum(chime.tone("people")).name, method=lambda chime, name: chime.set_tone("people", ChimeToneEnum[name].value), ), + ReolinkChimeSelectEntityDescription( + key="vehicle_tone", + cmd_key="GetDingDongCfg", + translation_key="vehicle_tone", + entity_category=EntityCategory.CONFIG, + get_options=[method.name for method in ChimeToneEnum], + supported=lambda chime: "vehicle" in chime.chime_event_types, + value=lambda chime: ChimeToneEnum(chime.tone("vehicle")).name, + method=lambda chime, name: chime.set_tone("vehicle", ChimeToneEnum[name].value), + ), ReolinkChimeSelectEntityDescription( key="visitor_tone", cmd_key="GetDingDongCfg", @@ -272,7 +282,7 @@ def current_option(self) -> str | None: try: option = self.entity_description.value(self._host.api, self._channel) - except ValueError: + except (ValueError, KeyError): if self._log_error: _LOGGER.exception("Reolink '%s' has an unknown value", self.name) self._log_error = False @@ -314,7 +324,7 @@ def current_option(self) -> str | None: """Return the current option.""" try: option = self.entity_description.value(self._chime) - except ValueError: + except (ValueError, KeyError): if self._log_error: _LOGGER.exception("Reolink '%s' has an unknown value", self.name) self._log_error = False diff --git a/homeassistant/components/reolink/sensor.py b/homeassistant/components/reolink/sensor.py index c2fc815235ee53..80e58c3d5c28ba 100644 --- a/homeassistant/components/reolink/sensor.py +++ b/homeassistant/components/reolink/sensor.py @@ -58,7 +58,16 @@ class ReolinkHostSensorEntityDescription( state_class=SensorStateClass.MEASUREMENT, entity_category=EntityCategory.DIAGNOSTIC, value=lambda api, ch: api.ptz_pan_position(ch), - supported=lambda api, ch: api.supported(ch, "ptz_position"), + supported=lambda api, ch: api.supported(ch, "ptz_pan_position"), + ), + ReolinkSensorEntityDescription( + key="ptz_tilt_position", + cmd_key="GetPtzCurPos", + translation_key="ptz_tilt_position", + state_class=SensorStateClass.MEASUREMENT, + entity_category=EntityCategory.DIAGNOSTIC, + value=lambda api, ch: api.ptz_tilt_position(ch), + supported=lambda api, ch: api.supported(ch, "ptz_tilt_position"), ), ReolinkSensorEntityDescription( key="battery_percent", diff --git a/homeassistant/components/reolink/strings.json b/homeassistant/components/reolink/strings.json index 4ec4dcffdfdcfb..1d699b7b658445 100644 --- a/homeassistant/components/reolink/strings.json +++ b/homeassistant/components/reolink/strings.json @@ -31,6 +31,7 @@ "not_admin": "User needs to be admin, user \"{username}\" has authorisation level \"{userlevel}\"", "password_incompatible": "Password contains incompatible special character, only these characters are allowed: a-z, A-Z, 0-9 or {special_chars}", "unknown": "[%key:common::config_flow::error::unknown%]", + "update_needed": "Failed to login because of outdated firmware, please update the firmware to version {needed_firmware} using the Reolink Download Center: {download_center_url}, currently version {current_firmware} is installed", "webhook_exception": "Home Assistant URL is not available, go to Settings > System > Network > Home Assistant URL and correct the URLs, see {more_info}" }, "abort": { @@ -605,6 +606,22 @@ "waybackhome": "[%key:component::reolink::entity::select::motion_tone::state::waybackhome%]" } }, + "vehicle_tone": { + "name": "Vehicle ringtone", + "state": { + "off": "[%key:common::state::off%]", + "citybird": "[%key:component::reolink::entity::select::motion_tone::state::citybird%]", + "originaltune": "[%key:component::reolink::entity::select::motion_tone::state::originaltune%]", + "pianokey": "[%key:component::reolink::entity::select::motion_tone::state::pianokey%]", + "loop": "[%key:component::reolink::entity::select::motion_tone::state::loop%]", + "attraction": "[%key:component::reolink::entity::select::motion_tone::state::attraction%]", + "hophop": "[%key:component::reolink::entity::select::motion_tone::state::hophop%]", + "goodday": "[%key:component::reolink::entity::select::motion_tone::state::goodday%]", + "operetta": "[%key:component::reolink::entity::select::motion_tone::state::operetta%]", + "moonlight": "[%key:component::reolink::entity::select::motion_tone::state::moonlight%]", + "waybackhome": "[%key:component::reolink::entity::select::motion_tone::state::waybackhome%]" + } + }, "visitor_tone": { "name": "Visitor ringtone", "state": { @@ -648,6 +665,9 @@ "ptz_pan_position": { "name": "PTZ pan position" }, + "ptz_tilt_position": { + "name": "PTZ tilt position" + }, "battery_temperature": { "name": "Battery temperature" }, diff --git a/homeassistant/components/repairs/issue_handler.py b/homeassistant/components/repairs/issue_handler.py index b0b3f82a5d60ee..cc7e017699dbed 100644 --- a/homeassistant/components/repairs/issue_handler.py +++ b/homeassistant/components/repairs/issue_handler.py @@ -53,7 +53,7 @@ async def async_create_flow( self, handler_key: str, *, - context: dict[str, Any] | None = None, + context: data_entry_flow.FlowContext | None = None, data: dict[str, Any] | None = None, ) -> RepairsFlow: """Create a flow. platform is a repairs module.""" diff --git a/homeassistant/components/rest/__init__.py b/homeassistant/components/rest/__init__.py index 59239ad6744835..5695e51933e89b 100644 --- a/homeassistant/components/rest/__init__.py +++ b/homeassistant/components/rest/__init__.py @@ -180,6 +180,7 @@ async def _async_refresh_with_templates() -> None: return DataUpdateCoordinator( hass, _LOGGER, + config_entry=None, name="rest data", update_method=update_method, update_interval=update_interval, diff --git a/homeassistant/components/rflink/cover.py b/homeassistant/components/rflink/cover.py index a6148ed7760d08..695825cf31b981 100644 --- a/homeassistant/components/rflink/cover.py +++ b/homeassistant/components/rflink/cover.py @@ -10,8 +10,9 @@ from homeassistant.components.cover import ( PLATFORM_SCHEMA as COVER_PLATFORM_SCHEMA, CoverEntity, + CoverState, ) -from homeassistant.const import CONF_DEVICES, CONF_NAME, CONF_TYPE, STATE_OPEN +from homeassistant.const import CONF_DEVICES, CONF_NAME, CONF_TYPE from homeassistant.core import HomeAssistant import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -133,7 +134,7 @@ async def async_added_to_hass(self) -> None: """Restore RFLink cover state (OPEN/CLOSE).""" await super().async_added_to_hass() if (old_state := await self.async_get_last_state()) is not None: - self._state = old_state.state == STATE_OPEN + self._state = old_state.state == CoverState.OPEN def _handle_event(self, event): """Adjust state if Rflink picks up a remote command for this device.""" diff --git a/homeassistant/components/rflink/sensor.py b/homeassistant/components/rflink/sensor.py index 68b7847423ca3a..89632ac50b301b 100644 --- a/homeassistant/components/rflink/sensor.py +++ b/homeassistant/components/rflink/sensor.py @@ -71,6 +71,8 @@ native_unit_of_measurement=UnitOfPressure.HPA, ), SensorEntityDescription( + # Rflink devices reports ok/low so device class can’t be used + # It should be migrated to a binary sensor key="battery", name="Battery", icon="mdi:battery", diff --git a/homeassistant/components/rfxtrx/config_flow.py b/homeassistant/components/rfxtrx/config_flow.py index ceb9bea46615c6..866d9ecb1bb217 100644 --- a/homeassistant/components/rfxtrx/config_flow.py +++ b/homeassistant/components/rfxtrx/config_flow.py @@ -87,9 +87,8 @@ class RfxtrxOptionsFlow(OptionsFlow): _device_registry: dr.DeviceRegistry _device_entries: list[dr.DeviceEntry] - def __init__(self, config_entry: ConfigEntry) -> None: + def __init__(self) -> None: """Initialize rfxtrx options flow.""" - self._config_entry = config_entry self._global_options: dict[str, Any] = {} self._selected_device: dict[str, Any] = {} self._selected_device_entry_id: str | None = None @@ -120,9 +119,7 @@ async def async_step_prompt_options( event_code = device_data["event_code"] assert event_code self._selected_device_event_code = event_code - self._selected_device = self._config_entry.data[CONF_DEVICES][ - event_code - ] + self._selected_device = self.config_entry.data[CONF_DEVICES][event_code] self._selected_device_object = get_rfx_object(event_code) return await self.async_step_set_device_options() if CONF_EVENT_CODE in user_input: @@ -148,7 +145,7 @@ async def async_step_prompt_options( device_registry = dr.async_get(self.hass) device_entries = dr.async_entries_for_config_entry( - device_registry, self._config_entry.entry_id + device_registry, self.config_entry.entry_id ) self._device_registry = device_registry self._device_entries = device_entries @@ -162,11 +159,11 @@ async def async_step_prompt_options( options = { vol.Optional( CONF_AUTOMATIC_ADD, - default=self._config_entry.data[CONF_AUTOMATIC_ADD], + default=self.config_entry.data[CONF_AUTOMATIC_ADD], ): bool, vol.Optional( CONF_PROTOCOLS, - default=self._config_entry.data.get(CONF_PROTOCOLS) or [], + default=self.config_entry.data.get(CONF_PROTOCOLS) or [], ): cv.multi_select(RECV_MODES), vol.Optional(CONF_EVENT_CODE): str, vol.Optional(CONF_DEVICE): vol.In(configure_devices), @@ -425,7 +422,7 @@ def _handle_state_added(event: Event[EventStateChangedData]) -> None: def _can_add_device(self, new_rfx_obj: rfxtrxmod.RFXtrxEvent) -> bool: """Check if device does not already exist.""" new_device_id = get_device_id(new_rfx_obj.device) - for packet_id, entity_info in self._config_entry.data[CONF_DEVICES].items(): + for packet_id, entity_info in self.config_entry.data[CONF_DEVICES].items(): rfx_obj = get_rfx_object(packet_id) assert rfx_obj @@ -468,7 +465,7 @@ def _get_device_data(self, entry_id: str) -> DeviceData: assert entry device_id = get_device_tuple_from_identifiers(entry.identifiers) assert device_id - for packet_id, entity_info in self._config_entry.data[CONF_DEVICES].items(): + for packet_id, entity_info in self.config_entry.data[CONF_DEVICES].items(): if tuple(entity_info.get(CONF_DEVICE_ID)) == device_id: event_code = cast(str, packet_id) break @@ -481,8 +478,8 @@ def update_config_data( devices: dict[str, Any] | None = None, ) -> None: """Update data in ConfigEntry.""" - entry_data = self._config_entry.data.copy() - entry_data[CONF_DEVICES] = copy.deepcopy(self._config_entry.data[CONF_DEVICES]) + entry_data = self.config_entry.data.copy() + entry_data[CONF_DEVICES] = copy.deepcopy(self.config_entry.data[CONF_DEVICES]) if global_options: entry_data.update(global_options) if devices: @@ -494,9 +491,9 @@ def update_config_data( entry_data[CONF_DEVICES].pop(event_code, None) else: entry_data[CONF_DEVICES][event_code] = options - self.hass.config_entries.async_update_entry(self._config_entry, data=entry_data) + self.hass.config_entries.async_update_entry(self.config_entry, data=entry_data) self.hass.async_create_task( - self.hass.config_entries.async_reload(self._config_entry.entry_id) + self.hass.config_entries.async_reload(self.config_entry.entry_id) ) @@ -637,9 +634,11 @@ async def async_validate_rfx( @staticmethod @callback - def async_get_options_flow(config_entry: ConfigEntry) -> OptionsFlow: + def async_get_options_flow( + config_entry: ConfigEntry, + ) -> RfxtrxOptionsFlow: """Get the options flow for this handler.""" - return RfxtrxOptionsFlow(config_entry) + return RfxtrxOptionsFlow() def _test_transport(host: str | None, port: int | None, device: str | None) -> bool: diff --git a/homeassistant/components/rfxtrx/cover.py b/homeassistant/components/rfxtrx/cover.py index 1d3bdf269103e4..473a0d94056d44 100644 --- a/homeassistant/components/rfxtrx/cover.py +++ b/homeassistant/components/rfxtrx/cover.py @@ -7,9 +7,8 @@ import RFXtrx as rfxtrxmod -from homeassistant.components.cover import CoverEntity, CoverEntityFeature +from homeassistant.components.cover import CoverEntity, CoverEntityFeature, CoverState from homeassistant.config_entries import ConfigEntry -from homeassistant.const import STATE_OPEN from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity import Entity from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -97,7 +96,7 @@ async def async_added_to_hass(self) -> None: if self._event is None: old_state = await self.async_get_last_state() if old_state is not None: - self._attr_is_closed = old_state.state != STATE_OPEN + self._attr_is_closed = old_state.state != CoverState.OPEN async def async_open_cover(self, **kwargs: Any) -> None: """Move the cover up.""" diff --git a/homeassistant/components/rhasspy/config_flow.py b/homeassistant/components/rhasspy/config_flow.py index 114d74d4d05134..ea79f6b8845cd4 100644 --- a/homeassistant/components/rhasspy/config_flow.py +++ b/homeassistant/components/rhasspy/config_flow.py @@ -20,9 +20,6 @@ async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle the initial step.""" - if self._async_current_entries(): - return self.async_abort(reason="single_instance_allowed") - if user_input is None: return self.async_show_form(step_id="user", data_schema=vol.Schema({})) diff --git a/homeassistant/components/rhasspy/manifest.json b/homeassistant/components/rhasspy/manifest.json index 2675935618c615..f3496f7eeab0ce 100644 --- a/homeassistant/components/rhasspy/manifest.json +++ b/homeassistant/components/rhasspy/manifest.json @@ -5,5 +5,6 @@ "config_flow": true, "dependencies": ["intent"], "documentation": "https://www.home-assistant.io/integrations/rhasspy", - "iot_class": "local_push" + "iot_class": "local_push", + "single_config_entry": true } diff --git a/homeassistant/components/rhasspy/strings.json b/homeassistant/components/rhasspy/strings.json index 4d2111ebd8ac6e..3d574d30117eee 100644 --- a/homeassistant/components/rhasspy/strings.json +++ b/homeassistant/components/rhasspy/strings.json @@ -4,9 +4,6 @@ "user": { "description": "Do you want to enable Rhasspy support?" } - }, - "abort": { - "single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]" } } } diff --git a/homeassistant/components/ring/__init__.py b/homeassistant/components/ring/__init__.py index c1042a9546d16f..b2340b3455613b 100644 --- a/homeassistant/components/ring/__init__.py +++ b/homeassistant/components/ring/__init__.py @@ -10,13 +10,9 @@ from ring_doorbell import Auth, Ring, RingDevices from homeassistant.config_entries import ConfigEntry -from homeassistant.const import APPLICATION_NAME, CONF_TOKEN +from homeassistant.const import APPLICATION_NAME, CONF_DEVICE_ID, CONF_TOKEN from homeassistant.core import HomeAssistant, callback -from homeassistant.helpers import ( - device_registry as dr, - entity_registry as er, - instance_id, -) +from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.aiohttp_client import async_get_clientsession from .const import CONF_LISTEN_CREDENTIALS, DOMAIN, PLATFORMS @@ -38,18 +34,12 @@ class RingData: type RingConfigEntry = ConfigEntry[RingData] -async def get_auth_agent_id(hass: HomeAssistant) -> tuple[str, str]: - """Return user-agent and hardware id for Auth instantiation. +def get_auth_user_agent() -> str: + """Return user-agent for Auth instantiation. user_agent will be the display name in the ring.com authorised devices. - hardware_id will uniquely describe the authorised HA device. """ - user_agent = f"{APPLICATION_NAME}/{DOMAIN}-integration" - - # Generate a new uuid from the instance_uuid to keep the HA one private - instance_uuid = uuid.UUID(hex=await instance_id.async_get(hass)) - hardware_id = str(uuid.uuid5(instance_uuid, user_agent)) - return user_agent, hardware_id + return f"{APPLICATION_NAME}/{DOMAIN}-integration" async def async_setup_entry(hass: HomeAssistant, entry: RingConfigEntry) -> bool: @@ -69,13 +59,13 @@ def listen_credentials_updater(token: dict[str, Any]) -> None: data={**entry.data, CONF_LISTEN_CREDENTIALS: token}, ) - user_agent, hardware_id = await get_auth_agent_id(hass) + user_agent = get_auth_user_agent() client_session = async_get_clientsession(hass) auth = Auth( user_agent, entry.data[CONF_TOKEN], token_updater, - hardware_id=hardware_id, + hardware_id=entry.data[CONF_DEVICE_ID], http_client_session=client_session, ) ring = Ring(auth) @@ -138,3 +128,25 @@ def _async_migrator(entity_entry: er.RegistryEntry) -> dict[str, str] | None: return None await er.async_migrate_entries(hass, entry_id, _async_migrator) + + +async def async_migrate_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + """Migrate old config entry.""" + entry_version = entry.version + entry_minor_version = entry.minor_version + + new_minor_version = 2 + if entry_version == 1 and entry_minor_version == 1: + _LOGGER.debug( + "Migrating from version %s.%s", entry_version, entry_minor_version + ) + hardware_id = str(uuid.uuid4()) + hass.config_entries.async_update_entry( + entry, + data={**entry.data, CONF_DEVICE_ID: hardware_id}, + minor_version=new_minor_version, + ) + _LOGGER.debug( + "Migration to version %s.%s complete", entry_version, new_minor_version + ) + return True diff --git a/homeassistant/components/ring/config_flow.py b/homeassistant/components/ring/config_flow.py index aa78164eb6d714..a1024186349b07 100644 --- a/homeassistant/components/ring/config_flow.py +++ b/homeassistant/components/ring/config_flow.py @@ -3,20 +3,32 @@ from collections.abc import Mapping import logging from typing import Any +import uuid from ring_doorbell import Auth, AuthenticationError, Requires2FAError import voluptuous as vol from homeassistant.components import dhcp -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult -from homeassistant.const import CONF_PASSWORD, CONF_TOKEN, CONF_USERNAME +from homeassistant.config_entries import ( + SOURCE_REAUTH, + SOURCE_RECONFIGURE, + ConfigFlow, + ConfigFlowResult, +) +from homeassistant.const import ( + CONF_DEVICE_ID, + CONF_NAME, + CONF_PASSWORD, + CONF_TOKEN, + CONF_USERNAME, +) from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.aiohttp_client import async_get_clientsession import homeassistant.helpers.device_registry as dr -from . import get_auth_agent_id -from .const import CONF_2FA, DOMAIN +from . import get_auth_user_agent +from .const import CONF_2FA, CONF_CONFIG_ENTRY_MINOR_VERSION, DOMAIN _LOGGER = logging.getLogger(__name__) @@ -25,13 +37,17 @@ ) STEP_REAUTH_DATA_SCHEMA = vol.Schema({vol.Required(CONF_PASSWORD): str}) +STEP_RECONFIGURE_DATA_SCHEMA = vol.Schema({vol.Required(CONF_PASSWORD): str}) + UNKNOWN_RING_ACCOUNT = "unknown_ring_account" -async def validate_input(hass: HomeAssistant, data: dict[str, str]) -> dict[str, Any]: +async def validate_input( + hass: HomeAssistant, hardware_id: str, data: dict[str, str] +) -> dict[str, Any]: """Validate the user input allows us to connect.""" - user_agent, hardware_id = await get_auth_agent_id(hass) + user_agent = get_auth_user_agent() auth = Auth( user_agent, http_client_session=async_get_clientsession(hass), @@ -56,9 +72,10 @@ class RingConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for Ring.""" VERSION = 1 + MINOR_VERSION = CONF_CONFIG_ENTRY_MINOR_VERSION user_pass: dict[str, Any] = {} - reauth_entry: ConfigEntry | None = None + hardware_id: str | None = None async def async_step_dhcp( self, discovery_info: dhcp.DhcpServiceInfo @@ -87,8 +104,10 @@ async def async_step_user( if user_input is not None: await self.async_set_unique_id(user_input[CONF_USERNAME]) self._abort_if_unique_id_configured() + if not self.hardware_id: + self.hardware_id = str(uuid.uuid4()) try: - token = await validate_input(self.hass, user_input) + token = await validate_input(self.hass, self.hardware_id, user_input) except Require2FA: self.user_pass = user_input @@ -101,7 +120,11 @@ async def async_step_user( else: return self.async_create_entry( title=user_input[CONF_USERNAME], - data={CONF_USERNAME: user_input[CONF_USERNAME], CONF_TOKEN: token}, + data={ + CONF_DEVICE_ID: self.hardware_id, + CONF_USERNAME: user_input[CONF_USERNAME], + CONF_TOKEN: token, + }, ) return self.async_show_form( @@ -113,11 +136,16 @@ async def async_step_2fa( ) -> ConfigFlowResult: """Handle 2fa step.""" if user_input: - if self.reauth_entry: + if self.source == SOURCE_REAUTH: return await self.async_step_reauth_confirm( {**self.user_pass, **user_input} ) + if self.source == SOURCE_RECONFIGURE: + return await self.async_step_reconfigure( + {**self.user_pass, **user_input} + ) + return await self.async_step_user({**self.user_pass, **user_input}) return self.async_show_form( @@ -129,9 +157,6 @@ async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle reauth upon an API authentication error.""" - self.reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -139,12 +164,17 @@ async def async_step_reauth_confirm( ) -> ConfigFlowResult: """Dialog that informs the user that reauth is required.""" errors: dict[str, str] = {} - assert self.reauth_entry is not None + reauth_entry = self._get_reauth_entry() if user_input: - user_input[CONF_USERNAME] = self.reauth_entry.data[CONF_USERNAME] + user_input[CONF_USERNAME] = reauth_entry.data[CONF_USERNAME] + # Reauth will use the same hardware id and re-authorise an existing + # authorised device. + if not self.hardware_id: + self.hardware_id = reauth_entry.data[CONF_DEVICE_ID] + assert self.hardware_id try: - token = await validate_input(self.hass, user_input) + token = await validate_input(self.hass, self.hardware_id, user_input) except Require2FA: self.user_pass = user_input return await self.async_step_2fa() @@ -157,19 +187,59 @@ async def async_step_reauth_confirm( data = { CONF_USERNAME: user_input[CONF_USERNAME], CONF_TOKEN: token, + CONF_DEVICE_ID: self.hardware_id, } - self.hass.config_entries.async_update_entry( - self.reauth_entry, data=data - ) - await self.hass.config_entries.async_reload(self.reauth_entry.entry_id) - return self.async_abort(reason="reauth_successful") + return self.async_update_reload_and_abort(reauth_entry, data=data) return self.async_show_form( step_id="reauth_confirm", data_schema=STEP_REAUTH_DATA_SCHEMA, errors=errors, description_placeholders={ - CONF_USERNAME: self.reauth_entry.data[CONF_USERNAME] + CONF_USERNAME: reauth_entry.data[CONF_USERNAME], + CONF_NAME: reauth_entry.data[CONF_USERNAME], + }, + ) + + async def async_step_reconfigure( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Trigger a reconfiguration flow.""" + errors: dict[str, str] = {} + reconfigure_entry = self._get_reconfigure_entry() + username = reconfigure_entry.data[CONF_USERNAME] + await self.async_set_unique_id(username) + if user_input: + user_input[CONF_USERNAME] = username + # Reconfigure will generate a new hardware id and create a new + # authorised device at ring.com. + if not self.hardware_id: + self.hardware_id = str(uuid.uuid4()) + try: + assert self.hardware_id + token = await validate_input(self.hass, self.hardware_id, user_input) + except Require2FA: + self.user_pass = user_input + return await self.async_step_2fa() + except InvalidAuth: + errors["base"] = "invalid_auth" + except Exception: + _LOGGER.exception("Unexpected exception") + errors["base"] = "unknown" + else: + data = { + CONF_USERNAME: username, + CONF_TOKEN: token, + CONF_DEVICE_ID: self.hardware_id, + } + return self.async_update_reload_and_abort(reconfigure_entry, data=data) + + return self.async_show_form( + step_id="reconfigure", + data_schema=STEP_RECONFIGURE_DATA_SCHEMA, + errors=errors, + description_placeholders={ + CONF_USERNAME: username, }, ) diff --git a/homeassistant/components/ring/const.py b/homeassistant/components/ring/const.py index 24801045b17151..9595241ebb1425 100644 --- a/homeassistant/components/ring/const.py +++ b/homeassistant/components/ring/const.py @@ -3,6 +3,7 @@ from __future__ import annotations from datetime import timedelta +from typing import Final from homeassistant.const import Platform @@ -31,3 +32,5 @@ CONF_2FA = "2fa" CONF_LISTEN_CREDENTIALS = "listen_token" + +CONF_CONFIG_ENTRY_MINOR_VERSION: Final = 2 diff --git a/homeassistant/components/ring/manifest.json b/homeassistant/components/ring/manifest.json index 7eff30c18cb2ca..63c47cb2979b6f 100644 --- a/homeassistant/components/ring/manifest.json +++ b/homeassistant/components/ring/manifest.json @@ -30,5 +30,5 @@ "iot_class": "cloud_polling", "loggers": ["ring_doorbell"], "quality_scale": "silver", - "requirements": ["ring-doorbell==0.9.7"] + "requirements": ["ring-doorbell==0.9.9"] } diff --git a/homeassistant/components/ring/strings.json b/homeassistant/components/ring/strings.json index 5d282fae1b2436..0887e4112c628c 100644 --- a/homeassistant/components/ring/strings.json +++ b/homeassistant/components/ring/strings.json @@ -20,6 +20,13 @@ "data": { "password": "[%key:common::config_flow::data::password%]" } + }, + "reconfigure": { + "title": "Reconfigure Ring Integration", + "description": "Will create a new Authorized Device for {username} at ring.com", + "data": { + "password": "[%key:common::config_flow::data::password%]" + } } }, "error": { @@ -28,7 +35,8 @@ }, "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_account%]", - "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", + "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]" } }, "entity": { diff --git a/homeassistant/components/risco/alarm_control_panel.py b/homeassistant/components/risco/alarm_control_panel.py index 08dee936d37797..b1eae8fd91760f 100644 --- a/homeassistant/components/risco/alarm_control_panel.py +++ b/homeassistant/components/risco/alarm_control_panel.py @@ -12,19 +12,11 @@ from homeassistant.components.alarm_control_panel import ( AlarmControlPanelEntity, AlarmControlPanelEntityFeature, + AlarmControlPanelState, CodeFormat, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ( - CONF_PIN, - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_CUSTOM_BYPASS, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_ARMING, - STATE_ALARM_DISARMED, - STATE_ALARM_TRIGGERED, -) +from homeassistant.const import CONF_PIN from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -48,10 +40,10 @@ _LOGGER = logging.getLogger(__name__) STATES_TO_SUPPORTED_FEATURES = { - STATE_ALARM_ARMED_AWAY: AlarmControlPanelEntityFeature.ARM_AWAY, - STATE_ALARM_ARMED_CUSTOM_BYPASS: AlarmControlPanelEntityFeature.ARM_CUSTOM_BYPASS, - STATE_ALARM_ARMED_HOME: AlarmControlPanelEntityFeature.ARM_HOME, - STATE_ALARM_ARMED_NIGHT: AlarmControlPanelEntityFeature.ARM_NIGHT, + AlarmControlPanelState.ARMED_AWAY: AlarmControlPanelEntityFeature.ARM_AWAY, + AlarmControlPanelState.ARMED_CUSTOM_BYPASS: AlarmControlPanelEntityFeature.ARM_CUSTOM_BYPASS, + AlarmControlPanelState.ARMED_HOME: AlarmControlPanelEntityFeature.ARM_HOME, + AlarmControlPanelState.ARMED_NIGHT: AlarmControlPanelEntityFeature.ARM_NIGHT, } @@ -116,14 +108,14 @@ def __init__( self._attr_supported_features |= STATES_TO_SUPPORTED_FEATURES[state] @property - def state(self) -> str | None: + def alarm_state(self) -> AlarmControlPanelState | None: """Return the state of the device.""" if self._partition.triggered: - return STATE_ALARM_TRIGGERED + return AlarmControlPanelState.TRIGGERED if self._partition.arming: - return STATE_ALARM_ARMING + return AlarmControlPanelState.ARMING if self._partition.disarmed: - return STATE_ALARM_DISARMED + return AlarmControlPanelState.DISARMED if self._partition.armed: return self._risco_to_ha[RISCO_ARM] if self._partition.partially_armed: @@ -148,21 +140,21 @@ async def async_alarm_disarm(self, code: str | None = None) -> None: async def async_alarm_arm_home(self, code: str | None = None) -> None: """Send arm home command.""" - await self._arm(STATE_ALARM_ARMED_HOME, code) + await self._arm(AlarmControlPanelState.ARMED_HOME, code) async def async_alarm_arm_away(self, code: str | None = None) -> None: """Send arm away command.""" - await self._arm(STATE_ALARM_ARMED_AWAY, code) + await self._arm(AlarmControlPanelState.ARMED_AWAY, code) async def async_alarm_arm_night(self, code: str | None = None) -> None: """Send arm night command.""" - await self._arm(STATE_ALARM_ARMED_NIGHT, code) + await self._arm(AlarmControlPanelState.ARMED_NIGHT, code) async def async_alarm_arm_custom_bypass(self, code: str | None = None) -> None: """Send arm custom bypass command.""" - await self._arm(STATE_ALARM_ARMED_CUSTOM_BYPASS, code) + await self._arm(AlarmControlPanelState.ARMED_CUSTOM_BYPASS, code) - async def _arm(self, mode: str, code: str | None) -> None: + async def _arm(self, mode: AlarmControlPanelState, code: str | None) -> None: if self.code_arm_required and not self._validate_code(code): _LOGGER.warning("Wrong code entered for %s", mode) return diff --git a/homeassistant/components/risco/config_flow.py b/homeassistant/components/risco/config_flow.py index 735880df09b109..f7365d354147b7 100644 --- a/homeassistant/components/risco/config_flow.py +++ b/homeassistant/components/risco/config_flow.py @@ -9,6 +9,7 @@ from pyrisco import CannotConnectError, RiscoCloud, RiscoLocal, UnauthorizedError import voluptuous as vol +from homeassistant.components.alarm_control_panel import AlarmControlPanelState from homeassistant.config_entries import ( ConfigEntry, ConfigFlow, @@ -23,10 +24,6 @@ CONF_SCAN_INTERVAL, CONF_TYPE, CONF_USERNAME, - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_CUSTOM_BYPASS, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_NIGHT, ) from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.aiohttp_client import async_get_clientsession @@ -64,10 +61,10 @@ } ) HA_STATES = [ - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_ARMED_CUSTOM_BYPASS, + AlarmControlPanelState.ARMED_AWAY.value, + AlarmControlPanelState.ARMED_HOME.value, + AlarmControlPanelState.ARMED_NIGHT.value, + AlarmControlPanelState.ARMED_CUSTOM_BYPASS.value, ] @@ -223,7 +220,6 @@ class RiscoOptionsFlowHandler(OptionsFlow): def __init__(self, config_entry: ConfigEntry) -> None: """Initialize.""" - self.config_entry = config_entry self._data = {**DEFAULT_OPTIONS, **config_entry.options} def _options_schema(self) -> vol.Schema: diff --git a/homeassistant/components/risco/const.py b/homeassistant/components/risco/const.py index f1240a704de078..078e26c43b5405 100644 --- a/homeassistant/components/risco/const.py +++ b/homeassistant/components/risco/const.py @@ -1,10 +1,7 @@ """Constants for the Risco integration.""" -from homeassistant.const import ( - CONF_SCAN_INTERVAL, - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_HOME, -) +from homeassistant.components.alarm_control_panel import AlarmControlPanelState +from homeassistant.const import CONF_SCAN_INTERVAL DOMAIN = "risco" @@ -33,16 +30,18 @@ RISCO_PARTIAL_ARM = "partial_arm" RISCO_STATES = [RISCO_ARM, RISCO_PARTIAL_ARM, *RISCO_GROUPS] -DEFAULT_RISCO_GROUPS_TO_HA = {group: STATE_ALARM_ARMED_HOME for group in RISCO_GROUPS} +DEFAULT_RISCO_GROUPS_TO_HA = { + group: AlarmControlPanelState.ARMED_HOME for group in RISCO_GROUPS +} DEFAULT_RISCO_STATES_TO_HA = { - RISCO_ARM: STATE_ALARM_ARMED_AWAY, - RISCO_PARTIAL_ARM: STATE_ALARM_ARMED_HOME, + RISCO_ARM: AlarmControlPanelState.ARMED_AWAY, + RISCO_PARTIAL_ARM: AlarmControlPanelState.ARMED_HOME, **DEFAULT_RISCO_GROUPS_TO_HA, } DEFAULT_HA_STATES_TO_RISCO = { - STATE_ALARM_ARMED_AWAY: RISCO_ARM, - STATE_ALARM_ARMED_HOME: RISCO_PARTIAL_ARM, + AlarmControlPanelState.ARMED_AWAY: RISCO_ARM, + AlarmControlPanelState.ARMED_HOME: RISCO_PARTIAL_ARM, } DEFAULT_OPTIONS = { diff --git a/homeassistant/components/risco/strings.json b/homeassistant/components/risco/strings.json index e35b13394cbbcb..86d131b4f80c4d 100644 --- a/homeassistant/components/risco/strings.json +++ b/homeassistant/components/risco/strings.json @@ -28,7 +28,8 @@ "unknown": "[%key:common::config_flow::error::unknown%]" }, "abort": { - "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" } }, "options": { diff --git a/homeassistant/components/roborock/__init__.py b/homeassistant/components/roborock/__init__.py index bb42c0bd080ddb..d1cbccc6b057f7 100644 --- a/homeassistant/components/roborock/__init__.py +++ b/homeassistant/components/roborock/__init__.py @@ -169,7 +169,7 @@ async def setup_device_v1( ) -> RoborockDataUpdateCoordinator | None: """Set up a device Coordinator.""" mqtt_client = await hass.async_add_executor_job( - RoborockMqttClientV1, user_data, DeviceData(device, product_info.name) + RoborockMqttClientV1, user_data, DeviceData(device, product_info.model) ) try: networking = await mqtt_client.get_networking() diff --git a/homeassistant/components/roborock/config_flow.py b/homeassistant/components/roborock/config_flow.py index c6dee7ce4edc7a..200614b024e2d0 100644 --- a/homeassistant/components/roborock/config_flow.py +++ b/homeassistant/components/roborock/config_flow.py @@ -3,6 +3,7 @@ from __future__ import annotations from collections.abc import Mapping +from copy import deepcopy import logging from typing import Any @@ -19,11 +20,11 @@ import voluptuous as vol from homeassistant.config_entries import ( + SOURCE_REAUTH, ConfigEntry, ConfigFlow, ConfigFlowResult, OptionsFlow, - OptionsFlowWithConfigEntry, ) from homeassistant.const import CONF_USERNAME from homeassistant.core import callback @@ -44,7 +45,6 @@ class RoborockFlowHandler(ConfigFlow, domain=DOMAIN): """Handle a config flow for Roborock.""" VERSION = 1 - reauth_entry: ConfigEntry | None = None def __init__(self) -> None: """Initialize the config flow.""" @@ -116,11 +116,12 @@ async def async_step_code( _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" else: - if self.reauth_entry is not None: + if self.source == SOURCE_REAUTH: + reauth_entry = self._get_reauth_entry() self.hass.config_entries.async_update_entry( - self.reauth_entry, + reauth_entry, data={ - **self.reauth_entry.data, + **reauth_entry.data, CONF_USER_DATA: login_data.as_dict(), }, ) @@ -140,9 +141,6 @@ async def async_step_reauth( self._username = entry_data[CONF_USERNAME] assert self._username self._client = RoborockApiClient(self._username) - self.reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -173,14 +171,18 @@ def _create_entry( @callback def async_get_options_flow( config_entry: ConfigEntry, - ) -> OptionsFlow: + ) -> RoborockOptionsFlowHandler: """Create the options flow.""" return RoborockOptionsFlowHandler(config_entry) -class RoborockOptionsFlowHandler(OptionsFlowWithConfigEntry): +class RoborockOptionsFlowHandler(OptionsFlow): """Handle an option flow for Roborock.""" + def __init__(self, config_entry: ConfigEntry) -> None: + """Initialize options flow.""" + self.options = deepcopy(dict(config_entry.options)) + async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/roborock/manifest.json b/homeassistant/components/roborock/manifest.json index 3bb3b9b2046f64..c305e4710fcead 100644 --- a/homeassistant/components/roborock/manifest.json +++ b/homeassistant/components/roborock/manifest.json @@ -7,7 +7,7 @@ "iot_class": "local_polling", "loggers": ["roborock"], "requirements": [ - "python-roborock==2.6.0", + "python-roborock==2.7.2", "vacuum-map-parser-roborock==0.1.2" ] } diff --git a/homeassistant/components/roku/__init__.py b/homeassistant/components/roku/__init__.py index 7515f375054227..b318a91e4c7f65 100644 --- a/homeassistant/components/roku/__init__.py +++ b/homeassistant/components/roku/__init__.py @@ -6,7 +6,7 @@ from homeassistant.const import CONF_HOST, Platform from homeassistant.core import HomeAssistant -from .const import DOMAIN +from .const import CONF_PLAY_MEDIA_APP_ID, DEFAULT_PLAY_MEDIA_APP_ID, DOMAIN from .coordinator import RokuDataUpdateCoordinator PLATFORMS = [ @@ -24,7 +24,12 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: device_id = entry.entry_id coordinator = RokuDataUpdateCoordinator( - hass, host=entry.data[CONF_HOST], device_id=device_id + hass, + host=entry.data[CONF_HOST], + device_id=device_id, + play_media_app_id=entry.options.get( + CONF_PLAY_MEDIA_APP_ID, DEFAULT_PLAY_MEDIA_APP_ID + ), ) await coordinator.async_config_entry_first_refresh() @@ -32,6 +37,8 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + entry.async_on_unload(entry.add_update_listener(async_reload_entry)) + return True @@ -40,3 +47,8 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): hass.data[DOMAIN].pop(entry.entry_id) return unload_ok + + +async def async_reload_entry(hass: HomeAssistant, entry: ConfigEntry) -> None: + """Reload the config entry when it changed.""" + await hass.config_entries.async_reload(entry.entry_id) diff --git a/homeassistant/components/roku/config_flow.py b/homeassistant/components/roku/config_flow.py index 7757cc53e1cf41..18e3b3ed68a37e 100644 --- a/homeassistant/components/roku/config_flow.py +++ b/homeassistant/components/roku/config_flow.py @@ -10,12 +10,17 @@ import voluptuous as vol from homeassistant.components import ssdp, zeroconf -from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import ( + ConfigEntry, + ConfigFlow, + ConfigFlowResult, + OptionsFlow, +) from homeassistant.const import CONF_HOST, CONF_NAME from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.aiohttp_client import async_get_clientsession -from .const import DOMAIN +from .const import CONF_PLAY_MEDIA_APP_ID, DEFAULT_PLAY_MEDIA_APP_ID, DOMAIN DATA_SCHEMA = vol.Schema({vol.Required(CONF_HOST): str}) @@ -155,3 +160,36 @@ async def async_step_discovery_confirm( title=self.discovery_info[CONF_NAME], data=self.discovery_info, ) + + @staticmethod + @callback + def async_get_options_flow( + config_entry: ConfigEntry, + ) -> RokuOptionsFlowHandler: + """Create the options flow.""" + return RokuOptionsFlowHandler() + + +class RokuOptionsFlowHandler(OptionsFlow): + """Handle Roku options.""" + + async def async_step_init( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Manage Roku options.""" + if user_input is not None: + return self.async_create_entry(title="", data=user_input) + + return self.async_show_form( + step_id="init", + data_schema=vol.Schema( + { + vol.Optional( + CONF_PLAY_MEDIA_APP_ID, + default=self.config_entry.options.get( + CONF_PLAY_MEDIA_APP_ID, DEFAULT_PLAY_MEDIA_APP_ID + ), + ): str, + } + ), + ) diff --git a/homeassistant/components/roku/const.py b/homeassistant/components/roku/const.py index ab633a4044cf8d..f0c7d4e253778e 100644 --- a/homeassistant/components/roku/const.py +++ b/homeassistant/components/roku/const.py @@ -15,3 +15,9 @@ # Services SERVICE_SEARCH = "search" + +# Config +CONF_PLAY_MEDIA_APP_ID = "play_media_app_id" + +# Defaults +DEFAULT_PLAY_MEDIA_APP_ID = "15985" diff --git a/homeassistant/components/roku/coordinator.py b/homeassistant/components/roku/coordinator.py index 303d0e91a36056..7900669d02f8ac 100644 --- a/homeassistant/components/roku/coordinator.py +++ b/homeassistant/components/roku/coordinator.py @@ -29,15 +29,12 @@ class RokuDataUpdateCoordinator(DataUpdateCoordinator[Device]): roku: Roku def __init__( - self, - hass: HomeAssistant, - *, - host: str, - device_id: str, + self, hass: HomeAssistant, *, host: str, device_id: str, play_media_app_id: str ) -> None: """Initialize global Roku data updater.""" self.device_id = device_id self.roku = Roku(host=host, session=async_get_clientsession(hass)) + self.play_media_app_id = play_media_app_id self.full_update_interval = timedelta(minutes=15) self.last_full_update = None diff --git a/homeassistant/components/roku/media_player.py b/homeassistant/components/roku/media_player.py index 5b15253068e051..35f01553cdd168 100644 --- a/homeassistant/components/roku/media_player.py +++ b/homeassistant/components/roku/media_player.py @@ -445,17 +445,25 @@ async def async_play_media( if attr in extra } - params = {"t": "a", **params} + params = {"u": media_id, "t": "a", **params} - await self.coordinator.roku.play_on_roku(media_id, params) + await self.coordinator.roku.launch( + self.coordinator.play_media_app_id, + params, + ) elif media_type in {MediaType.URL, MediaType.VIDEO}: params = { param: extra[attr] for (attr, param) in ATTRS_TO_PLAY_ON_ROKU_PARAMS.items() if attr in extra } + params["u"] = media_id + params["t"] = "v" - await self.coordinator.roku.play_on_roku(media_id, params) + await self.coordinator.roku.launch( + self.coordinator.play_media_app_id, + params, + ) else: _LOGGER.error("Media type %s is not supported", original_media_type) return diff --git a/homeassistant/components/roku/strings.json b/homeassistant/components/roku/strings.json index 9eef366163eef1..9d657be6d61f56 100644 --- a/homeassistant/components/roku/strings.json +++ b/homeassistant/components/roku/strings.json @@ -24,6 +24,18 @@ "unknown": "[%key:common::config_flow::error::unknown%]" } }, + "options": { + "step": { + "init": { + "data": { + "play_media_app_id": "Play Media Roku Application ID" + }, + "data_description": { + "play_media_app_id": "The application ID to use when launching media playback. Must support the PlayOnRoku API." + } + } + } + }, "entity": { "binary_sensor": { "headphones_connected": { diff --git a/homeassistant/components/roomba/config_flow.py b/homeassistant/components/roomba/config_flow.py index d690bcce978186..e48d2d9113956a 100644 --- a/homeassistant/components/roomba/config_flow.py +++ b/homeassistant/components/roomba/config_flow.py @@ -16,7 +16,7 @@ ConfigEntry, ConfigFlow, ConfigFlowResult, - OptionsFlowWithConfigEntry, + OptionsFlow, ) from homeassistant.const import CONF_DELAY, CONF_HOST, CONF_NAME, CONF_PASSWORD from homeassistant.core import HomeAssistant, callback @@ -57,7 +57,7 @@ async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> dict[str, address=data[CONF_HOST], blid=data[CONF_BLID], password=data[CONF_PASSWORD], - continuous=False, + continuous=True, delay=data[CONF_DELAY], ) ) @@ -92,7 +92,7 @@ def async_get_options_flow( config_entry: ConfigEntry, ) -> RoombaOptionsFlowHandler: """Get the options flow for this handler.""" - return RoombaOptionsFlowHandler(config_entry) + return RoombaOptionsFlowHandler() async def async_step_zeroconf( self, discovery_info: zeroconf.ZeroconfServiceInfo @@ -300,7 +300,7 @@ async def async_step_link_manual( ) -class RoombaOptionsFlowHandler(OptionsFlowWithConfigEntry): +class RoombaOptionsFlowHandler(OptionsFlow): """Handle options.""" async def async_step_init( @@ -310,17 +310,18 @@ async def async_step_init( if user_input is not None: return self.async_create_entry(title="", data=user_input) + options = self.config_entry.options return self.async_show_form( step_id="init", data_schema=vol.Schema( { vol.Optional( CONF_CONTINUOUS, - default=self.options.get(CONF_CONTINUOUS, DEFAULT_CONTINUOUS), + default=options.get(CONF_CONTINUOUS, DEFAULT_CONTINUOUS), ): bool, vol.Optional( CONF_DELAY, - default=self.options.get(CONF_DELAY, DEFAULT_DELAY), + default=options.get(CONF_DELAY, DEFAULT_DELAY), ): int, } ), diff --git a/homeassistant/components/roomba/const.py b/homeassistant/components/roomba/const.py index 331c09006825f6..7f1e3b8e1eed6a 100644 --- a/homeassistant/components/roomba/const.py +++ b/homeassistant/components/roomba/const.py @@ -9,5 +9,5 @@ CONF_BLID = "blid" DEFAULT_CERT = "/etc/ssl/certs/ca-certificates.crt" DEFAULT_CONTINUOUS = True -DEFAULT_DELAY = 1 +DEFAULT_DELAY = 30 ROOMBA_SESSION = "roomba_session" diff --git a/homeassistant/components/roomba/manifest.json b/homeassistant/components/roomba/manifest.json index a697680b37948c..edb317f9752583 100644 --- a/homeassistant/components/roomba/manifest.json +++ b/homeassistant/components/roomba/manifest.json @@ -1,7 +1,7 @@ { "domain": "roomba", "name": "iRobot Roomba and Braava", - "codeowners": ["@pschmitt", "@cyr-ius", "@shenxn", "@Xitee1", "@Orhideous"], + "codeowners": ["@pschmitt", "@cyr-ius", "@shenxn", "@Orhideous"], "config_flow": true, "dhcp": [ { diff --git a/homeassistant/components/rova/strings.json b/homeassistant/components/rova/strings.json index 864989b90dbbd2..3b89fc789eec93 100644 --- a/homeassistant/components/rova/strings.json +++ b/homeassistant/components/rova/strings.json @@ -12,7 +12,8 @@ }, "error": { "already_configured": "[%key:common::config_flow::abort::already_configured_service%]", - "invalid_rova_area": "Rova does not collect at this address" + "invalid_rova_area": "Rova does not collect at this address", + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]" }, "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_service%]", diff --git a/homeassistant/components/rpi_power/config_flow.py b/homeassistant/components/rpi_power/config_flow.py index c44bb65d79a486..0151a92856dd4e 100644 --- a/homeassistant/components/rpi_power/config_flow.py +++ b/homeassistant/components/rpi_power/config_flow.py @@ -37,8 +37,6 @@ async def async_step_onboarding( self, data: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle a flow initialized by onboarding.""" - if self._async_current_entries(): - return self.async_abort(reason="single_instance_allowed") has_devices = await self._discovery_function(self.hass) if not has_devices: diff --git a/homeassistant/components/rpi_power/manifest.json b/homeassistant/components/rpi_power/manifest.json index 7da5897c00d275..d5704f61564b15 100644 --- a/homeassistant/components/rpi_power/manifest.json +++ b/homeassistant/components/rpi_power/manifest.json @@ -6,5 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/rpi_power", "iot_class": "local_polling", "loggers": ["rpi_bad_power"], - "requirements": ["rpi-bad-power==0.1.0"] + "requirements": ["rpi-bad-power==0.1.0"], + "single_config_entry": true } diff --git a/homeassistant/components/rpi_power/strings.json b/homeassistant/components/rpi_power/strings.json index 9a46ca1e10e3f1..796a973335bef4 100644 --- a/homeassistant/components/rpi_power/strings.json +++ b/homeassistant/components/rpi_power/strings.json @@ -7,7 +7,6 @@ } }, "abort": { - "single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]", "no_devices_found": "Can't find the system class needed for this component, make sure that your kernel is recent and the hardware is supported" } } diff --git a/homeassistant/components/rtsp_to_webrtc/__init__.py b/homeassistant/components/rtsp_to_webrtc/__init__.py index 948ba8929fc22f..59b8077e398b70 100644 --- a/homeassistant/components/rtsp_to_webrtc/__init__.py +++ b/homeassistant/components/rtsp_to_webrtc/__init__.py @@ -24,11 +24,11 @@ from rtsp_to_webrtc.client import get_adaptive_client from rtsp_to_webrtc.exceptions import ClientError, ResponseError from rtsp_to_webrtc.interface import WebRTCClientInterface +from webrtc_models import RTCIceServer from homeassistant.components import camera -from homeassistant.components.camera.webrtc import RTCIceServer, register_ice_server from homeassistant.config_entries import ConfigEntry -from homeassistant.core import HomeAssistant +from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import ConfigEntryNotReady, HomeAssistantError from homeassistant.helpers.aiohttp_client import async_get_clientsession @@ -59,10 +59,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: hass.data[DOMAIN][CONF_STUN_SERVER] = entry.options.get(CONF_STUN_SERVER) if server := entry.options.get(CONF_STUN_SERVER): - async def get_server() -> RTCIceServer: - return RTCIceServer(urls=[server]) + @callback + def get_servers() -> list[RTCIceServer]: + return [RTCIceServer(urls=[server])] - entry.async_on_unload(register_ice_server(hass, get_server)) + entry.async_on_unload(camera.async_register_ice_servers(hass, get_servers)) async def async_offer_for_stream_source( stream_source: str, diff --git a/homeassistant/components/rtsp_to_webrtc/config_flow.py b/homeassistant/components/rtsp_to_webrtc/config_flow.py index adab1a456d041c..22502659757007 100644 --- a/homeassistant/components/rtsp_to_webrtc/config_flow.py +++ b/homeassistant/components/rtsp_to_webrtc/config_flow.py @@ -9,7 +9,6 @@ import rtsp_to_webrtc import voluptuous as vol -from homeassistant.components.hassio import HassioServiceInfo from homeassistant.config_entries import ( ConfigEntry, ConfigFlow, @@ -19,6 +18,7 @@ from homeassistant.const import CONF_HOST, CONF_PORT from homeassistant.core import callback from homeassistant.helpers.aiohttp_client import async_get_clientsession +from homeassistant.helpers.service_info.hassio import HassioServiceInfo from . import CONF_STUN_SERVER, DATA_SERVER_URL, DOMAIN @@ -119,16 +119,12 @@ def async_get_options_flow( config_entry: ConfigEntry, ) -> OptionsFlow: """Create an options flow.""" - return OptionsFlowHandler(config_entry) + return OptionsFlowHandler() class OptionsFlowHandler(OptionsFlow): """RTSPtoWeb Options flow.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/ruckus_unleashed/config_flow.py b/homeassistant/components/ruckus_unleashed/config_flow.py index fdfacfc73a7737..0743b19bdaf7dc 100644 --- a/homeassistant/components/ruckus_unleashed/config_flow.py +++ b/homeassistant/components/ruckus_unleashed/config_flow.py @@ -8,7 +8,7 @@ from aioruckus.exceptions import AuthenticationError, SchemaError import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError @@ -64,8 +64,6 @@ class RuckusConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - _reauth_entry: ConfigEntry | None = None - async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -82,27 +80,24 @@ async def async_step_user( _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" else: - if self._reauth_entry is None: - await self.async_set_unique_id(info[KEY_SYS_SERIAL]) + await self.async_set_unique_id(info[KEY_SYS_SERIAL]) + if self.source != SOURCE_REAUTH: self._abort_if_unique_id_configured() return self.async_create_entry( title=info[KEY_SYS_TITLE], data=user_input ) - if info[KEY_SYS_SERIAL] == self._reauth_entry.unique_id: - self.hass.config_entries.async_update_entry( - self._reauth_entry, data=user_input - ) - self.hass.async_create_task( - self.hass.config_entries.async_reload( - self._reauth_entry.entry_id - ) + reauth_entry = self._get_reauth_entry() + if info[KEY_SYS_SERIAL] == reauth_entry.unique_id: + return self.async_update_reload_and_abort( + reauth_entry, data=user_input ) - return self.async_abort(reason="reauth_successful") errors["base"] = "invalid_host" - data_schema = self.add_suggested_values_to_schema( - DATA_SCHEMA, self._reauth_entry.data if self._reauth_entry else {} - ) + data_schema = DATA_SCHEMA + if self.source == SOURCE_REAUTH: + data_schema = self.add_suggested_values_to_schema( + data_schema, self._get_reauth_entry().data + ) return self.async_show_form( step_id="user", data_schema=data_schema, errors=errors ) @@ -111,9 +106,6 @@ async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Perform reauth upon an API authentication error.""" - self._reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) return await self.async_step_user() diff --git a/homeassistant/components/russound_rio/__init__.py b/homeassistant/components/russound_rio/__init__.py index ba53f6794e3ca1..784629ea0bc2aa 100644 --- a/homeassistant/components/russound_rio/__init__.py +++ b/homeassistant/components/russound_rio/__init__.py @@ -11,7 +11,7 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady -from .const import CONNECT_TIMEOUT, RUSSOUND_RIO_EXCEPTIONS +from .const import CONNECT_TIMEOUT, DOMAIN, RUSSOUND_RIO_EXCEPTIONS PLATFORMS = [Platform.MEDIA_PLAYER] @@ -43,7 +43,14 @@ async def _connection_update_callback( async with asyncio.timeout(CONNECT_TIMEOUT): await client.connect() except RUSSOUND_RIO_EXCEPTIONS as err: - raise ConfigEntryNotReady(f"Error while connecting to {host}:{port}") from err + raise ConfigEntryNotReady( + translation_domain=DOMAIN, + translation_key="entry_cannot_connect", + translation_placeholders={ + "host": host, + "port": port, + }, + ) from err entry.runtime_data = client await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) diff --git a/homeassistant/components/russound_rio/const.py b/homeassistant/components/russound_rio/const.py index 1b38dc8ce5c0bd..af52e89d399442 100644 --- a/homeassistant/components/russound_rio/const.py +++ b/homeassistant/components/russound_rio/const.py @@ -17,7 +17,7 @@ ) -CONNECT_TIMEOUT = 5 +CONNECT_TIMEOUT = 15 MP_FEATURES_BY_FLAG = { FeatureFlag.COMMANDS_ZONE_MUTE_OFF_ON: MediaPlayerEntityFeature.VOLUME_MUTE diff --git a/homeassistant/components/russound_rio/entity.py b/homeassistant/components/russound_rio/entity.py index 23b196ecb2f7b2..9790ff43e68d58 100644 --- a/homeassistant/components/russound_rio/entity.py +++ b/homeassistant/components/russound_rio/entity.py @@ -26,7 +26,12 @@ async def decorator(self: _EntityT, *args: _P.args, **kwargs: _P.kwargs) -> None await func(self, *args, **kwargs) except RUSSOUND_RIO_EXCEPTIONS as exc: raise HomeAssistantError( - f"Error executing {func.__name__} on entity {self.entity_id}," + translation_domain=DOMAIN, + translation_key="command_error", + translation_placeholders={ + "function_name": func.__name__, + "entity_id": self.entity_id, + }, ) from exc return decorator @@ -91,6 +96,4 @@ async def async_added_to_hass(self) -> None: async def async_will_remove_from_hass(self) -> None: """Remove callbacks.""" - await self._client.unregister_state_update_callbacks( - self._state_update_callback - ) + self._client.unregister_state_update_callbacks(self._state_update_callback) diff --git a/homeassistant/components/russound_rio/manifest.json b/homeassistant/components/russound_rio/manifest.json index 96fc0fb53dbb2b..ab77ca3ab6af32 100644 --- a/homeassistant/components/russound_rio/manifest.json +++ b/homeassistant/components/russound_rio/manifest.json @@ -7,5 +7,5 @@ "iot_class": "local_push", "loggers": ["aiorussound"], "quality_scale": "silver", - "requirements": ["aiorussound==4.0.5"] + "requirements": ["aiorussound==4.1.0"] } diff --git a/homeassistant/components/russound_rio/media_player.py b/homeassistant/components/russound_rio/media_player.py index 316e4d2be7c5ff..45818d3e25bb69 100644 --- a/homeassistant/components/russound_rio/media_player.py +++ b/homeassistant/components/russound_rio/media_player.py @@ -5,7 +5,7 @@ import logging from aiorussound import Controller -from aiorussound.models import Source +from aiorussound.models import PlayStatus, Source from aiorussound.rio import ZoneControlSurface from homeassistant.components.media_player import ( @@ -132,11 +132,18 @@ def _source(self) -> Source: def state(self) -> MediaPlayerState | None: """Return the state of the device.""" status = self._zone.status - if status == "ON": - return MediaPlayerState.ON - if status == "OFF": + play_status = self._source.play_status + if not status: return MediaPlayerState.OFF - return None + if play_status == PlayStatus.PLAYING: + return MediaPlayerState.PLAYING + if play_status == PlayStatus.PAUSED: + return MediaPlayerState.PAUSED + if play_status == PlayStatus.TRANSITIONING: + return MediaPlayerState.BUFFERING + if play_status == PlayStatus.STOPPED: + return MediaPlayerState.IDLE + return MediaPlayerState.ON @property def source(self): @@ -175,7 +182,7 @@ def volume_level(self): Value is returned based on a range (0..50). Therefore float divide by 50 to get to the required range. """ - return float(self._zone.volume or "0") / 50.0 + return self._zone.volume / 50.0 @command async def async_turn_off(self) -> None: diff --git a/homeassistant/components/russound_rio/strings.json b/homeassistant/components/russound_rio/strings.json index c105dcafae2143..b8c29c08301a7b 100644 --- a/homeassistant/components/russound_rio/strings.json +++ b/homeassistant/components/russound_rio/strings.json @@ -33,5 +33,13 @@ "title": "[%key:component::russound_rio::issues::deprecated_yaml_import_issue_cannot_connect::title%]", "description": "[%key:component::russound_rio::issues::deprecated_yaml_import_issue_cannot_connect::description%]" } + }, + "exceptions": { + "entry_cannot_connect": { + "message": "Error while connecting to {host}:{port}" + }, + "command_error": { + "message": "Error executing {function_name} on entity {entity_id}" + } } } diff --git a/homeassistant/components/rympro/config_flow.py b/homeassistant/components/rympro/config_flow.py index be35c48ac5b0b2..1d5d8a9e79d9df 100644 --- a/homeassistant/components/rympro/config_flow.py +++ b/homeassistant/components/rympro/config_flow.py @@ -9,7 +9,7 @@ from pyrympro import CannotConnectError, RymPro, UnauthorizedError import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_EMAIL, CONF_PASSWORD, CONF_TOKEN, CONF_UNIQUE_ID from homeassistant.core import HomeAssistant from homeassistant.helpers.aiohttp_client import async_get_clientsession @@ -46,10 +46,6 @@ class RymproConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - def __init__(self) -> None: - """Init the config flow.""" - self._reauth_entry: ConfigEntry | None = None - async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -74,19 +70,17 @@ async def async_step_user( title = user_input[CONF_EMAIL] data = {**user_input, **info} - if not self._reauth_entry: + if self.source != SOURCE_REAUTH: await self.async_set_unique_id(info[CONF_UNIQUE_ID]) self._abort_if_unique_id_configured() return self.async_create_entry(title=title, data=data) - self.hass.config_entries.async_update_entry( - self._reauth_entry, + return self.async_update_reload_and_abort( + self._get_reauth_entry(), title=title, data=data, unique_id=info[CONF_UNIQUE_ID], ) - await self.hass.config_entries.async_reload(self._reauth_entry.entry_id) - return self.async_abort(reason="reauth_successful") return self.async_show_form( step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors @@ -96,7 +90,4 @@ async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle configuration by re-auth.""" - self._reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) return await self.async_step_user() diff --git a/homeassistant/components/rympro/strings.json b/homeassistant/components/rympro/strings.json index c58bf5b93babd6..2c1e2ad93c9a46 100644 --- a/homeassistant/components/rympro/strings.json +++ b/homeassistant/components/rympro/strings.json @@ -14,7 +14,8 @@ "unknown": "[%key:common::config_flow::error::unknown%]" }, "abort": { - "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" } }, "entity": { diff --git a/homeassistant/components/samsungtv/__init__.py b/homeassistant/components/samsungtv/__init__.py index b43b8abea657a7..6d4e491b8394f1 100644 --- a/homeassistant/components/samsungtv/__init__.py +++ b/homeassistant/components/samsungtv/__init__.py @@ -10,7 +10,7 @@ import getmac from homeassistant.components import ssdp -from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntry +from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( CONF_HOST, CONF_MAC, @@ -36,7 +36,6 @@ CONF_SESSION_ID, CONF_SSDP_MAIN_TV_AGENT_LOCATION, CONF_SSDP_RENDERING_CONTROL_LOCATION, - DOMAIN, ENTRY_RELOAD_COOLDOWN, LEGACY_PORT, LOGGER, @@ -135,16 +134,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: SamsungTVConfigEntry) -> def _access_denied() -> None: """Access denied callback.""" LOGGER.debug("Access denied in getting remote object") - hass.create_task( - hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "entry_id": entry.entry_id, - }, - data=entry.data, - ) - ) + entry.async_start_reauth(hass) bridge.register_reauth_callback(_access_denied) diff --git a/homeassistant/components/samsungtv/config_flow.py b/homeassistant/components/samsungtv/config_flow.py index 9d2ecefd442993..837651f990039a 100644 --- a/homeassistant/components/samsungtv/config_flow.py +++ b/homeassistant/components/samsungtv/config_flow.py @@ -105,7 +105,6 @@ class SamsungTVConfigFlow(ConfigFlow, domain=DOMAIN): def __init__(self) -> None: """Initialize flow.""" - self._reauth_entry: ConfigEntry | None = None self._host: str = "" self._mac: str | None = None self._udn: str | None = None @@ -529,9 +528,6 @@ async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle configuration by re-auth.""" - self._reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) if entry_data.get(CONF_MODEL) and entry_data.get(CONF_NAME): self._title = f"{entry_data[CONF_NAME]} ({entry_data[CONF_MODEL]})" else: @@ -543,22 +539,23 @@ async def async_step_reauth_confirm( ) -> ConfigFlowResult: """Confirm reauth.""" errors = {} - assert self._reauth_entry - method = self._reauth_entry.data[CONF_METHOD] + + reauth_entry = self._get_reauth_entry() + method = reauth_entry.data[CONF_METHOD] if user_input is not None: if method == METHOD_ENCRYPTED_WEBSOCKET: return await self.async_step_reauth_confirm_encrypted() bridge = SamsungTVBridge.get_bridge( self.hass, method, - self._reauth_entry.data[CONF_HOST], + reauth_entry.data[CONF_HOST], ) result = await bridge.async_try_connect() if result == RESULT_SUCCESS: - new_data = dict(self._reauth_entry.data) + new_data = dict(reauth_entry.data) new_data[CONF_TOKEN] = bridge.token return self.async_update_reload_and_abort( - self._reauth_entry, + reauth_entry, data=new_data, ) if result not in (RESULT_AUTH_MISSING, RESULT_CANNOT_CONNECT): @@ -587,8 +584,9 @@ async def async_step_reauth_confirm_encrypted( ) -> ConfigFlowResult: """Confirm reauth (encrypted method).""" errors = {} - assert self._reauth_entry - await self._async_start_encrypted_pairing(self._reauth_entry.data[CONF_HOST]) + + reauth_entry = self._get_reauth_entry() + await self._async_start_encrypted_pairing(reauth_entry.data[CONF_HOST]) assert self._authenticator is not None if user_input is not None: @@ -598,9 +596,8 @@ async def async_step_reauth_confirm_encrypted( and (session_id := await self._authenticator.get_session_id_and_close()) ): return self.async_update_reload_and_abort( - self._reauth_entry, - data={ - **self._reauth_entry.data, + reauth_entry, + data_updates={ CONF_TOKEN: token, CONF_SESSION_ID: session_id, }, diff --git a/homeassistant/components/satel_integra/alarm_control_panel.py b/homeassistant/components/satel_integra/alarm_control_panel.py index f9e261b25b16e5..39c0d6b876dcab 100644 --- a/homeassistant/components/satel_integra/alarm_control_panel.py +++ b/homeassistant/components/satel_integra/alarm_control_panel.py @@ -11,15 +11,9 @@ from homeassistant.components.alarm_control_panel import ( AlarmControlPanelEntity, AlarmControlPanelEntityFeature, + AlarmControlPanelState, CodeFormat, ) -from homeassistant.const import ( - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_DISARMED, - STATE_ALARM_PENDING, - STATE_ALARM_TRIGGERED, -) from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -67,7 +61,6 @@ class SatelIntegraAlarmPanel(AlarmControlPanelEntity): _attr_code_format = CodeFormat.NUMBER _attr_should_poll = False - _attr_state: str | None _attr_supported_features = ( AlarmControlPanelEntityFeature.ARM_HOME | AlarmControlPanelEntityFeature.ARM_AWAY @@ -95,8 +88,8 @@ def _update_alarm_status(self): """Handle alarm status update.""" state = self._read_alarm_state() _LOGGER.debug("Got status update, current status: %s", state) - if state != self._attr_state: - self._attr_state = state + if state != self._attr_alarm_state: + self._attr_alarm_state = state self.async_write_ha_state() else: _LOGGER.debug("Ignoring alarm status message, same state") @@ -105,22 +98,28 @@ def _read_alarm_state(self): """Read current status of the alarm and translate it into HA status.""" # Default - disarmed: - hass_alarm_status = STATE_ALARM_DISARMED + hass_alarm_status = AlarmControlPanelState.DISARMED if not self._satel.connected: return None state_map = OrderedDict( [ - (AlarmState.TRIGGERED, STATE_ALARM_TRIGGERED), - (AlarmState.TRIGGERED_FIRE, STATE_ALARM_TRIGGERED), - (AlarmState.ENTRY_TIME, STATE_ALARM_PENDING), - (AlarmState.ARMED_MODE3, STATE_ALARM_ARMED_HOME), - (AlarmState.ARMED_MODE2, STATE_ALARM_ARMED_HOME), - (AlarmState.ARMED_MODE1, STATE_ALARM_ARMED_HOME), - (AlarmState.ARMED_MODE0, STATE_ALARM_ARMED_AWAY), - (AlarmState.EXIT_COUNTDOWN_OVER_10, STATE_ALARM_PENDING), - (AlarmState.EXIT_COUNTDOWN_UNDER_10, STATE_ALARM_PENDING), + (AlarmState.TRIGGERED, AlarmControlPanelState.TRIGGERED), + (AlarmState.TRIGGERED_FIRE, AlarmControlPanelState.TRIGGERED), + (AlarmState.ENTRY_TIME, AlarmControlPanelState.PENDING), + (AlarmState.ARMED_MODE3, AlarmControlPanelState.ARMED_HOME), + (AlarmState.ARMED_MODE2, AlarmControlPanelState.ARMED_HOME), + (AlarmState.ARMED_MODE1, AlarmControlPanelState.ARMED_HOME), + (AlarmState.ARMED_MODE0, AlarmControlPanelState.ARMED_AWAY), + ( + AlarmState.EXIT_COUNTDOWN_OVER_10, + AlarmControlPanelState.PENDING, + ), + ( + AlarmState.EXIT_COUNTDOWN_UNDER_10, + AlarmControlPanelState.PENDING, + ), ] ) _LOGGER.debug("State map of Satel: %s", self._satel.partition_states) @@ -141,9 +140,11 @@ async def async_alarm_disarm(self, code: str | None = None) -> None: _LOGGER.debug("Code was empty or None") return - clear_alarm_necessary = self._attr_state == STATE_ALARM_TRIGGERED + clear_alarm_necessary = ( + self._attr_alarm_state == AlarmControlPanelState.TRIGGERED + ) - _LOGGER.debug("Disarming, self._attr_state: %s", self._attr_state) + _LOGGER.debug("Disarming, self._attr_alarm_state: %s", self._attr_alarm_state) await self._satel.disarm(code, [self._partition_id]) diff --git a/homeassistant/components/schlage/__init__.py b/homeassistant/components/schlage/__init__.py index 1c3ad547f3d3a2..e9fb24f13092c5 100644 --- a/homeassistant/components/schlage/__init__.py +++ b/homeassistant/components/schlage/__init__.py @@ -16,6 +16,7 @@ PLATFORMS: list[Platform] = [ Platform.BINARY_SENSOR, Platform.LOCK, + Platform.SELECT, Platform.SENSOR, Platform.SWITCH, ] diff --git a/homeassistant/components/schlage/config_flow.py b/homeassistant/components/schlage/config_flow.py index a6104702396a9c..f359f7dda7185c 100644 --- a/homeassistant/components/schlage/config_flow.py +++ b/homeassistant/components/schlage/config_flow.py @@ -9,7 +9,7 @@ from pyschlage.exceptions import NotAuthorizedError import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from .const import DOMAIN, LOGGER @@ -25,15 +25,13 @@ class SchlageConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - reauth_entry: ConfigEntry | None = None - async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle the initial step.""" if user_input is None: return self._show_user_form({}) - username = user_input[CONF_USERNAME] + username = user_input[CONF_USERNAME].lower() password = user_input[CONF_PASSWORD] user_id, errors = await self.hass.async_add_executor_job( _authenticate, username, password @@ -42,7 +40,13 @@ async def async_step_user( return self._show_user_form(errors) await self.async_set_unique_id(user_id) - return self.async_create_entry(title=username, data=user_input) + return self.async_create_entry( + title=username, + data={ + CONF_USERNAME: username, + CONF_PASSWORD: password, + }, + ) def _show_user_form(self, errors: dict[str, str]) -> ConfigFlowResult: """Show the user form.""" @@ -54,20 +58,17 @@ async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle reauth upon an API authentication error.""" - self.reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Dialog that informs the user that reauth is required.""" - assert self.reauth_entry is not None if user_input is None: return self._show_reauth_form({}) - username = self.reauth_entry.data[CONF_USERNAME] + reauth_entry = self._get_reauth_entry() + username = reauth_entry.data[CONF_USERNAME] password = user_input[CONF_PASSWORD] user_id, errors = await self.hass.async_add_executor_job( _authenticate, username, password @@ -75,16 +76,14 @@ async def async_step_reauth_confirm( if user_id is None: return self._show_reauth_form(errors) - if self.reauth_entry.unique_id != user_id: - return self.async_abort(reason="wrong_account") + await self.async_set_unique_id(user_id) + self._abort_if_unique_id_mismatch(reason="wrong_account") data = { CONF_USERNAME: username, CONF_PASSWORD: user_input[CONF_PASSWORD], } - self.hass.config_entries.async_update_entry(self.reauth_entry, data=data) - await self.hass.config_entries.async_reload(self.reauth_entry.entry_id) - return self.async_abort(reason="reauth_successful") + return self.async_update_reload_and_abort(reauth_entry, data=data) def _show_reauth_form(self, errors: dict[str, str]) -> ConfigFlowResult: """Show the reauth form.""" diff --git a/homeassistant/components/schlage/select.py b/homeassistant/components/schlage/select.py new file mode 100644 index 00000000000000..6d93eccaa8542d --- /dev/null +++ b/homeassistant/components/schlage/select.py @@ -0,0 +1,78 @@ +"""Platform for Schlage select integration.""" + +from __future__ import annotations + +from homeassistant.components.select import SelectEntity, SelectEntityDescription +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import EntityCategory +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from .const import DOMAIN +from .coordinator import LockData, SchlageDataUpdateCoordinator +from .entity import SchlageEntity + +_DESCRIPTIONS = ( + SelectEntityDescription( + key="auto_lock_time", + translation_key="auto_lock_time", + entity_category=EntityCategory.CONFIG, + # valid values are from Schlage UI and validated by pyschlage + options=[ + "0", + "15", + "30", + "60", + "120", + "240", + "300", + ], + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up selects based on a config entry.""" + coordinator: SchlageDataUpdateCoordinator = hass.data[DOMAIN][config_entry.entry_id] + + def _add_new_locks(locks: dict[str, LockData]) -> None: + async_add_entities( + SchlageSelect( + coordinator=coordinator, + description=description, + device_id=device_id, + ) + for device_id in locks + for description in _DESCRIPTIONS + ) + + _add_new_locks(coordinator.data.locks) + coordinator.new_locks_callbacks.append(_add_new_locks) + + +class SchlageSelect(SchlageEntity, SelectEntity): + """Schlage select entity.""" + + def __init__( + self, + coordinator: SchlageDataUpdateCoordinator, + description: SelectEntityDescription, + device_id: str, + ) -> None: + """Initialize a SchlageSelect.""" + super().__init__(coordinator, device_id) + self.entity_description = description + self._attr_unique_id = f"{device_id}_{self.entity_description.key}" + + @property + def current_option(self) -> str: + """Return the current option.""" + return str(self._lock_data.lock.auto_lock_time) + + def select_option(self, option: str) -> None: + """Set the current option.""" + self._lock.set_auto_lock_time(int(option)) diff --git a/homeassistant/components/schlage/strings.json b/homeassistant/components/schlage/strings.json index 721d9e80286a2b..5c8cd0826a9eed 100644 --- a/homeassistant/components/schlage/strings.json +++ b/homeassistant/components/schlage/strings.json @@ -31,6 +31,20 @@ "name": "Keypad disabled" } }, + "select": { + "auto_lock_time": { + "name": "Auto-Lock time", + "state": { + "0": "Disabled", + "15": "15 seconds", + "30": "30 seconds", + "60": "1 minute", + "120": "2 minutes", + "240": "4 minutes", + "300": "5 minutes" + } + } + }, "switch": { "beeper": { "name": "Keypress Beep" diff --git a/homeassistant/components/scrape/__init__.py b/homeassistant/components/scrape/__init__.py index 16220d5c567f26..ff991c5f348c8c 100644 --- a/homeassistant/components/scrape/__init__.py +++ b/homeassistant/components/scrape/__init__.py @@ -72,7 +72,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: scan_interval: timedelta = resource_config.get( CONF_SCAN_INTERVAL, DEFAULT_SCAN_INTERVAL ) - coordinator = ScrapeCoordinator(hass, rest, scan_interval) + coordinator = ScrapeCoordinator(hass, None, rest, scan_interval) sensors: list[ConfigType] = resource_config.get(SENSOR_DOMAIN, []) if sensors: @@ -100,6 +100,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ScrapeConfigEntry) -> bo coordinator = ScrapeCoordinator( hass, + entry, rest, DEFAULT_SCAN_INTERVAL, ) diff --git a/homeassistant/components/scrape/coordinator.py b/homeassistant/components/scrape/coordinator.py index 74fd510ac94212..b5cabc6b94e7be 100644 --- a/homeassistant/components/scrape/coordinator.py +++ b/homeassistant/components/scrape/coordinator.py @@ -8,6 +8,7 @@ from bs4 import BeautifulSoup from homeassistant.components.rest import RestData +from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed @@ -18,12 +19,17 @@ class ScrapeCoordinator(DataUpdateCoordinator[BeautifulSoup]): """Scrape Coordinator.""" def __init__( - self, hass: HomeAssistant, rest: RestData, update_interval: timedelta + self, + hass: HomeAssistant, + config_entry: ConfigEntry | None, + rest: RestData, + update_interval: timedelta, ) -> None: """Initialize Scrape coordinator.""" super().__init__( hass, _LOGGER, + config_entry=config_entry, name="Scrape Coordinator", update_interval=update_interval, ) diff --git a/homeassistant/components/screenlogic/config_flow.py b/homeassistant/components/screenlogic/config_flow.py index 4a46756cf2f6a1..19db89dc03de8f 100644 --- a/homeassistant/components/screenlogic/config_flow.py +++ b/homeassistant/components/screenlogic/config_flow.py @@ -81,7 +81,7 @@ def async_get_options_flow( config_entry: ConfigEntry, ) -> ScreenLogicOptionsFlowHandler: """Get the options flow for ScreenLogic.""" - return ScreenLogicOptionsFlowHandler(config_entry) + return ScreenLogicOptionsFlowHandler() async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -192,10 +192,6 @@ async def async_step_gateway_entry(self, user_input=None) -> ConfigFlowResult: class ScreenLogicOptionsFlowHandler(OptionsFlow): """Handles the options for the ScreenLogic integration.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Init the screen logic options flow.""" - self.config_entry = config_entry - async def async_step_init(self, user_input=None) -> ConfigFlowResult: """Manage the options.""" if user_input is not None: diff --git a/homeassistant/components/sense/__init__.py b/homeassistant/components/sense/__init__.py index 58e993ad6e0c24..e919d48e96de03 100644 --- a/homeassistant/components/sense/__init__.py +++ b/homeassistant/components/sense/__init__.py @@ -1,10 +1,8 @@ """Support for monitoring a Sense energy sensor.""" from dataclasses import dataclass -from datetime import timedelta from functools import partial import logging -from typing import Any from sense_energy import ( ASyncSenseable, @@ -13,26 +11,18 @@ ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ( - CONF_EMAIL, - CONF_TIMEOUT, - EVENT_HOMEASSISTANT_STOP, - Platform, -) -from homeassistant.core import HomeAssistant, callback +from homeassistant.const import CONF_TIMEOUT, Platform +from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady from homeassistant.helpers.aiohttp_client import async_get_clientsession -from homeassistant.helpers.dispatcher import async_dispatcher_send -from homeassistant.helpers.event import async_track_time_interval -from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed from .const import ( ACTIVE_UPDATE_RATE, SENSE_CONNECT_EXCEPTIONS, - SENSE_DEVICE_UPDATE, SENSE_TIMEOUT_EXCEPTIONS, SENSE_WEBSOCKET_EXCEPTIONS, ) +from .coordinator import SenseRealtimeCoordinator, SenseTrendCoordinator _LOGGER = logging.getLogger(__name__) @@ -40,37 +30,19 @@ type SenseConfigEntry = ConfigEntry[SenseData] -class SenseDevicesData: - """Data for each sense device.""" - - def __init__(self): - """Create.""" - self._data_by_device = {} - - def set_devices_data(self, devices): - """Store a device update.""" - self._data_by_device = {device["id"]: device for device in devices} - - def get_device_by_id(self, sense_device_id): - """Get the latest device data.""" - return self._data_by_device.get(sense_device_id) - - @dataclass(kw_only=True, slots=True) class SenseData: """Sense data type.""" data: ASyncSenseable - device_data: SenseDevicesData - trends: DataUpdateCoordinator[None] - discovered: list[dict[str, Any]] + trends: SenseTrendCoordinator + rt: SenseRealtimeCoordinator async def async_setup_entry(hass: HomeAssistant, entry: SenseConfigEntry) -> bool: """Set up Sense from a config entry.""" entry_data = entry.data - email = entry_data[CONF_EMAIL] timeout = entry_data[CONF_TIMEOUT] access_token = entry_data.get("access_token", "") @@ -108,7 +80,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: SenseConfigEntry) -> boo raise ConfigEntryNotReady(str(err)) from err try: - sense_discovered_devices = await gateway.get_discovered_device_data() + await gateway.fetch_devices() await gateway.update_realtime() except SENSE_TIMEOUT_EXCEPTIONS as err: raise ConfigEntryNotReady( @@ -117,26 +89,8 @@ async def async_setup_entry(hass: HomeAssistant, entry: SenseConfigEntry) -> boo except SENSE_WEBSOCKET_EXCEPTIONS as err: raise ConfigEntryNotReady(str(err) or "Error during realtime update") from err - async def _async_update_trend(): - """Update the trend data.""" - try: - await gateway.update_trend_data() - except (SenseAuthenticationException, SenseMFARequiredException) as err: - _LOGGER.warning("Sense authentication expired") - raise ConfigEntryAuthFailed(err) from err - except SENSE_CONNECT_EXCEPTIONS as err: - raise UpdateFailed(err) from err - - trends_coordinator: DataUpdateCoordinator[None] = DataUpdateCoordinator( - hass, - _LOGGER, - name=f"Sense Trends {email}", - update_method=_async_update_trend, - update_interval=timedelta(seconds=300), - ) - # Start out as unavailable so we do not report 0 data - # until the update happens - trends_coordinator.last_update_success = False + trends_coordinator = SenseTrendCoordinator(hass, gateway) + realtime_coordinator = SenseRealtimeCoordinator(hass, gateway) # This can take longer than 60s and we already know # sense is online since get_discovered_device_data was @@ -146,45 +100,19 @@ async def _async_update_trend(): trends_coordinator.async_request_refresh(), "sense.trends-coordinator-refresh", ) + entry.async_create_background_task( + hass, + realtime_coordinator.async_request_refresh(), + "sense.realtime-coordinator-refresh", + ) entry.runtime_data = SenseData( data=gateway, - device_data=SenseDevicesData(), trends=trends_coordinator, - discovered=sense_discovered_devices, + rt=realtime_coordinator, ) await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) - - async def async_sense_update(_): - """Retrieve latest state.""" - try: - await gateway.update_realtime() - except SENSE_TIMEOUT_EXCEPTIONS as ex: - _LOGGER.error("Timeout retrieving data: %s", ex) - except SENSE_WEBSOCKET_EXCEPTIONS as ex: - _LOGGER.error("Failed to update data: %s", ex) - - data = gateway.get_realtime() - if "devices" in data: - entry.runtime_data.device_data.set_devices_data(data["devices"]) - async_dispatcher_send(hass, f"{SENSE_DEVICE_UPDATE}-{gateway.sense_monitor_id}") - - remove_update_callback = async_track_time_interval( - hass, async_sense_update, timedelta(seconds=ACTIVE_UPDATE_RATE) - ) - - @callback - def _remove_update_callback_at_stop(event): - remove_update_callback() - - entry.async_on_unload(remove_update_callback) - entry.async_on_unload( - hass.bus.async_listen_once( - EVENT_HOMEASSISTANT_STOP, _remove_update_callback_at_stop - ) - ) - return True diff --git a/homeassistant/components/sense/binary_sensor.py b/homeassistant/components/sense/binary_sensor.py index 8317f8458b3449..d06b3a6293798a 100644 --- a/homeassistant/components/sense/binary_sensor.py +++ b/homeassistant/components/sense/binary_sensor.py @@ -2,17 +2,20 @@ import logging +from sense_energy.sense_api import SenseDevice + from homeassistant.components.binary_sensor import ( BinarySensorDeviceClass, BinarySensorEntity, ) -from homeassistant.core import HomeAssistant, callback +from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import SenseConfigEntry -from .const import ATTRIBUTION, DOMAIN, MDI_ICONS, SENSE_DEVICE_UPDATE +from .const import DOMAIN +from .coordinator import SenseRealtimeCoordinator +from .entity import SenseDeviceEntity _LOGGER = logging.getLogger(__name__) @@ -24,13 +27,11 @@ async def async_setup_entry( ) -> None: """Set up the Sense binary sensor.""" sense_monitor_id = config_entry.runtime_data.data.sense_monitor_id + realtime_coordinator = config_entry.runtime_data.rt - sense_devices = config_entry.runtime_data.discovered - device_data = config_entry.runtime_data.device_data devices = [ - SenseDevice(device_data, device, sense_monitor_id) - for device in sense_devices - if device["tags"]["DeviceListAllowed"] == "true" + SenseBinarySensor(device, realtime_coordinator, sense_monitor_id) + for device in config_entry.runtime_data.data.devices ] await _migrate_old_unique_ids(hass, devices) @@ -38,65 +39,46 @@ async def async_setup_entry( async_add_entities(devices) -async def _migrate_old_unique_ids(hass, devices): - registry = er.async_get(hass) - for device in devices: - # Migration of old not so unique ids - old_entity_id = registry.async_get_entity_id( - "binary_sensor", DOMAIN, device.old_unique_id - ) - if old_entity_id is not None: - _LOGGER.debug( - "Migrating unique_id from [%s] to [%s]", - device.old_unique_id, - device.unique_id, - ) - registry.async_update_entity(old_entity_id, new_unique_id=device.unique_id) - - -def sense_to_mdi(sense_icon): - """Convert sense icon to mdi icon.""" - return f"mdi:{MDI_ICONS.get(sense_icon, "power-plug")}" - - -class SenseDevice(BinarySensorEntity): +class SenseBinarySensor(SenseDeviceEntity, BinarySensorEntity): """Implementation of a Sense energy device binary sensor.""" - _attr_attribution = ATTRIBUTION - _attr_should_poll = False - _attr_available = False _attr_device_class = BinarySensorDeviceClass.POWER - def __init__(self, sense_devices_data, device, sense_monitor_id): + def __init__( + self, + device: SenseDevice, + coordinator: SenseRealtimeCoordinator, + sense_monitor_id: str, + ) -> None: """Initialize the Sense binary sensor.""" - self._attr_name = device["name"] - self._id = device["id"] - self._sense_monitor_id = sense_monitor_id - self._attr_unique_id = f"{sense_monitor_id}-{self._id}" - self._attr_icon = sense_to_mdi(device["icon"]) - self._sense_devices_data = sense_devices_data + super().__init__(device, coordinator, sense_monitor_id, device.id) + self._id = device.id @property - def old_unique_id(self): + def old_unique_id(self) -> str: """Return the old not so unique id of the binary sensor.""" return self._id - async def async_added_to_hass(self) -> None: - """Register callbacks.""" - self.async_on_remove( - async_dispatcher_connect( - self.hass, - f"{SENSE_DEVICE_UPDATE}-{self._sense_monitor_id}", - self._async_update_from_data, - ) - ) + @property + def is_on(self) -> bool: + """Return the state of the sensor.""" + return self._device.is_on + - @callback - def _async_update_from_data(self): - """Get the latest data, update state. Must not do I/O.""" - new_state = bool(self._sense_devices_data.get_device_by_id(self._id)) - if self._attr_available and self._attr_is_on == new_state: - return - self._attr_available = True - self._attr_is_on = new_state - self.async_write_ha_state() +async def _migrate_old_unique_ids( + hass: HomeAssistant, devices: list[SenseBinarySensor] +) -> None: + registry = er.async_get(hass) + for device in devices: + # Migration of old not so unique ids + old_entity_id = registry.async_get_entity_id( + "binary_sensor", DOMAIN, device.old_unique_id + ) + updated_id = device.unique_id + if old_entity_id is not None and updated_id is not None: + _LOGGER.debug( + "Migrating unique_id from [%s] to [%s]", + device.old_unique_id, + device.unique_id, + ) + registry.async_update_entity(old_entity_id, new_unique_id=updated_id) diff --git a/homeassistant/components/sense/const.py b/homeassistant/components/sense/const.py index 5e944c18d8df93..b23117c977dbd7 100644 --- a/homeassistant/components/sense/const.py +++ b/homeassistant/components/sense/const.py @@ -11,6 +11,7 @@ DOMAIN = "sense" DEFAULT_TIMEOUT = 30 ACTIVE_UPDATE_RATE = 60 +TREND_UPDATE_RATE = 300 DEFAULT_NAME = "Sense" SENSE_DEVICE_UPDATE = "sense_devices_update" @@ -19,7 +20,7 @@ ATTRIBUTION = "Data provided by Sense.com" -CONSUMPTION_NAME = "Usage" +CONSUMPTION_NAME = "Energy" CONSUMPTION_ID = "usage" PRODUCTION_NAME = "Production" PRODUCTION_ID = "production" diff --git a/homeassistant/components/sense/coordinator.py b/homeassistant/components/sense/coordinator.py new file mode 100644 index 00000000000000..c0029cd79ea479 --- /dev/null +++ b/homeassistant/components/sense/coordinator.py @@ -0,0 +1,76 @@ +"""Sense Coordinators.""" + +from datetime import timedelta +import logging + +from sense_energy import ( + ASyncSenseable, + SenseAuthenticationException, + SenseMFARequiredException, +) + +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryAuthFailed +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed + +from .const import ( + ACTIVE_UPDATE_RATE, + SENSE_CONNECT_EXCEPTIONS, + SENSE_TIMEOUT_EXCEPTIONS, + SENSE_WEBSOCKET_EXCEPTIONS, + TREND_UPDATE_RATE, +) + +_LOGGER = logging.getLogger(__name__) + + +class SenseCoordinator(DataUpdateCoordinator[None]): + """Sense Trend Coordinator.""" + + def __init__( + self, hass: HomeAssistant, gateway: ASyncSenseable, name: str, update: int + ) -> None: + """Initialize.""" + super().__init__( + hass, + logger=_LOGGER, + name=f"Sense {name} {gateway.sense_monitor_id}", + update_interval=timedelta(seconds=update), + ) + self._gateway = gateway + self.last_update_success = False + + +class SenseTrendCoordinator(SenseCoordinator): + """Sense Trend Coordinator.""" + + def __init__(self, hass: HomeAssistant, gateway: ASyncSenseable) -> None: + """Initialize.""" + super().__init__(hass, gateway, "Trends", TREND_UPDATE_RATE) + + async def _async_update_data(self) -> None: + """Update the trend data.""" + try: + await self._gateway.update_trend_data() + except (SenseAuthenticationException, SenseMFARequiredException) as err: + _LOGGER.warning("Sense authentication expired") + raise ConfigEntryAuthFailed(err) from err + except SENSE_CONNECT_EXCEPTIONS as err: + raise UpdateFailed(err) from err + + +class SenseRealtimeCoordinator(SenseCoordinator): + """Sense Realtime Coordinator.""" + + def __init__(self, hass: HomeAssistant, gateway: ASyncSenseable) -> None: + """Initialize.""" + super().__init__(hass, gateway, "Realtime", ACTIVE_UPDATE_RATE) + + async def _async_update_data(self) -> None: + """Retrieve latest state.""" + try: + await self._gateway.update_realtime() + except SENSE_TIMEOUT_EXCEPTIONS as ex: + _LOGGER.error("Timeout retrieving data: %s", ex) + except SENSE_WEBSOCKET_EXCEPTIONS as ex: + _LOGGER.error("Failed to update data: %s", ex) diff --git a/homeassistant/components/sense/entity.py b/homeassistant/components/sense/entity.py new file mode 100644 index 00000000000000..248be53ceb7528 --- /dev/null +++ b/homeassistant/components/sense/entity.py @@ -0,0 +1,71 @@ +"""Base entities for Sense energy.""" + +from sense_energy import ASyncSenseable +from sense_energy.sense_api import SenseDevice + +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .const import ATTRIBUTION, DOMAIN, MDI_ICONS +from .coordinator import SenseCoordinator + + +def sense_to_mdi(sense_icon: str) -> str: + """Convert sense icon to mdi icon.""" + return f"mdi:{MDI_ICONS.get(sense_icon, "power-plug")}" + + +class SenseEntity(CoordinatorEntity[SenseCoordinator]): + """Base implementation of a Sense sensor.""" + + _attr_attribution = ATTRIBUTION + _attr_should_poll = False + _attr_has_entity_name = True + + def __init__( + self, + gateway: ASyncSenseable, + coordinator: SenseCoordinator, + sense_monitor_id: str, + unique_id: str, + ) -> None: + """Initialize the Sense sensor.""" + super().__init__(coordinator) + self._attr_unique_id = f"{sense_monitor_id}-{unique_id}" + self._gateway = gateway + self._attr_device_info = DeviceInfo( + name=f"Sense {sense_monitor_id}", + identifiers={(DOMAIN, sense_monitor_id)}, + model="Sense", + manufacturer="Sense Labs, Inc.", + configuration_url="https://home.sense.com", + ) + + +class SenseDeviceEntity(CoordinatorEntity[SenseCoordinator]): + """Base implementation of a Sense sensor.""" + + _attr_attribution = ATTRIBUTION + _attr_should_poll = False + _attr_has_entity_name = True + + def __init__( + self, + device: SenseDevice, + coordinator: SenseCoordinator, + sense_monitor_id: str, + unique_id: str, + ) -> None: + """Initialize the Sense sensor.""" + super().__init__(coordinator) + self._attr_unique_id = f"{sense_monitor_id}-{unique_id}" + self._device = device + self._attr_icon = sense_to_mdi(device.icon) + self._attr_device_info = DeviceInfo( + name=device.name, + identifiers={(DOMAIN, f"{sense_monitor_id}:{device.id}")}, + model="Sense", + manufacturer="Sense Labs, Inc.", + configuration_url="https://home.sense.com", + via_device=(DOMAIN, sense_monitor_id), + ) diff --git a/homeassistant/components/sense/manifest.json b/homeassistant/components/sense/manifest.json index 116b714ba820b3..df2317c3a6c9aa 100644 --- a/homeassistant/components/sense/manifest.json +++ b/homeassistant/components/sense/manifest.json @@ -20,5 +20,5 @@ "documentation": "https://www.home-assistant.io/integrations/sense", "iot_class": "cloud_polling", "loggers": ["sense_energy"], - "requirements": ["sense-energy==0.12.4"] + "requirements": ["sense-energy==0.13.3"] } diff --git a/homeassistant/components/sense/sensor.py b/homeassistant/components/sense/sensor.py index bc9dd470f5eed7..2f5c82675d5180 100644 --- a/homeassistant/components/sense/sensor.py +++ b/homeassistant/components/sense/sensor.py @@ -1,5 +1,10 @@ """Support for monitoring a Sense energy sensor.""" +from datetime import datetime + +from sense_energy import ASyncSenseable, Scale +from sense_energy.sense_api import SenseDevice + from homeassistant.components.sensor import ( SensorDeviceClass, SensorEntity, @@ -11,55 +16,37 @@ UnitOfEnergy, UnitOfPower, ) -from homeassistant.core import HomeAssistant, callback -from homeassistant.helpers.device_registry import DeviceInfo -from homeassistant.helpers.dispatcher import async_dispatcher_connect +from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.update_coordinator import CoordinatorEntity from . import SenseConfigEntry from .const import ( - ACTIVE_NAME, ACTIVE_TYPE, - ATTRIBUTION, CONSUMPTION_ID, CONSUMPTION_NAME, - DOMAIN, FROM_GRID_ID, FROM_GRID_NAME, - MDI_ICONS, NET_PRODUCTION_ID, NET_PRODUCTION_NAME, PRODUCTION_ID, PRODUCTION_NAME, PRODUCTION_PCT_ID, PRODUCTION_PCT_NAME, - SENSE_DEVICE_UPDATE, SOLAR_POWERED_ID, SOLAR_POWERED_NAME, TO_GRID_ID, TO_GRID_NAME, ) - - -class SensorConfig: - """Data structure holding sensor configuration.""" - - def __init__(self, name, sensor_type): - """Sensor name and type to pass to API.""" - self.name = name - self.sensor_type = sensor_type - - -# Sensor types/ranges -ACTIVE_SENSOR_TYPE = SensorConfig(ACTIVE_NAME, ACTIVE_TYPE) +from .coordinator import SenseRealtimeCoordinator, SenseTrendCoordinator +from .entity import SenseDeviceEntity, SenseEntity # Sensor types/ranges TRENDS_SENSOR_TYPES = { - "daily": SensorConfig("Daily", "DAY"), - "weekly": SensorConfig("Weekly", "WEEK"), - "monthly": SensorConfig("Monthly", "MONTH"), - "yearly": SensorConfig("Yearly", "YEAR"), + Scale.DAY: "Daily", + Scale.WEEK: "Weekly", + Scale.MONTH: "Monthly", + Scale.YEAR: "Yearly", + Scale.CYCLE: "Bill", } # Production/consumption variants @@ -76,11 +63,6 @@ def __init__(self, name, sensor_type): ] -def sense_to_mdi(sense_icon): - """Convert sense icon to mdi icon.""" - return f"mdi:{MDI_ICONS.get(sense_icon, 'power-plug')}" - - async def async_setup_entry( hass: HomeAssistant, config_entry: SenseConfigEntry, @@ -89,58 +71,46 @@ async def async_setup_entry( """Set up the Sense sensor.""" data = config_entry.runtime_data.data trends_coordinator = config_entry.runtime_data.trends + realtime_coordinator = config_entry.runtime_data.rt # Request only in case it takes longer # than 60s await trends_coordinator.async_request_refresh() sense_monitor_id = data.sense_monitor_id - sense_devices = config_entry.runtime_data.discovered - device_data = config_entry.runtime_data.device_data - entities: list[SensorEntity] = [ - SenseEnergyDevice(device_data, device, sense_monitor_id) - for device in sense_devices - if device["tags"]["DeviceListAllowed"] == "true" - ] + entities: list[SensorEntity] = [] - for variant_id, variant_name in SENSOR_VARIANTS: - name = ACTIVE_SENSOR_TYPE.name - sensor_type = ACTIVE_SENSOR_TYPE.sensor_type + for device in config_entry.runtime_data.data.devices: + entities.append( + SenseDevicePowerSensor(device, sense_monitor_id, realtime_coordinator) + ) + entities.extend( + SenseDeviceEnergySensor(device, scale, trends_coordinator, sense_monitor_id) + for scale in Scale + ) - unique_id = f"{sense_monitor_id}-active-{variant_id}" + for variant_id, variant_name in SENSOR_VARIANTS: entities.append( - SenseActiveSensor( - data, - name, - sensor_type, - sense_monitor_id, - variant_id, - variant_name, - unique_id, + SensePowerSensor( + data, sense_monitor_id, variant_id, variant_name, realtime_coordinator ) ) entities.extend( - SenseVoltageSensor(data, i, sense_monitor_id) + SenseVoltageSensor(data, i, sense_monitor_id, realtime_coordinator) for i in range(len(data.active_voltage)) ) - for type_id, typ in TRENDS_SENSOR_TYPES.items(): + for scale in Scale: for variant_id, variant_name in TREND_SENSOR_VARIANTS: - name = typ.name - sensor_type = typ.sensor_type - - unique_id = f"{sense_monitor_id}-{type_id}-{variant_id}" entities.append( SenseTrendsSensor( data, - name, - sensor_type, + scale, variant_id, variant_name, trends_coordinator, - unique_id, sense_monitor_id, ) ) @@ -148,131 +118,89 @@ async def async_setup_entry( async_add_entities(entities) -class SenseActiveSensor(SensorEntity): +class SensePowerSensor(SenseEntity, SensorEntity): """Implementation of a Sense energy sensor.""" _attr_device_class = SensorDeviceClass.POWER _attr_native_unit_of_measurement = UnitOfPower.WATT - _attr_attribution = ATTRIBUTION - _attr_should_poll = False - _attr_available = False _attr_state_class = SensorStateClass.MEASUREMENT def __init__( self, - data, - name, - sensor_type, - sense_monitor_id, - variant_id, - variant_name, - unique_id, - ): + gateway: ASyncSenseable, + sense_monitor_id: str, + variant_id: str, + variant_name: str, + realtime_coordinator: SenseRealtimeCoordinator, + ) -> None: """Initialize the Sense sensor.""" - self._attr_name = f"{name} {variant_name}" - self._attr_unique_id = unique_id - self._data = data - self._sense_monitor_id = sense_monitor_id - self._sensor_type = sensor_type - self._variant_id = variant_id - self._variant_name = variant_name - - async def async_added_to_hass(self) -> None: - """Register callbacks.""" - self.async_on_remove( - async_dispatcher_connect( - self.hass, - f"{SENSE_DEVICE_UPDATE}-{self._sense_monitor_id}", - self._async_update_from_data, - ) + super().__init__( + gateway, + realtime_coordinator, + sense_monitor_id, + f"{ACTIVE_TYPE}-{variant_id}", ) + self._attr_name = variant_name + self._variant_id = variant_id - @callback - def _async_update_from_data(self): - """Update the sensor from the data. Must not do I/O.""" - new_state = round( - self._data.active_solar_power + @property + def native_value(self) -> float: + """Return the state of the sensor.""" + return round( + self._gateway.active_solar_power if self._variant_id == PRODUCTION_ID - else self._data.active_power + else self._gateway.active_power ) - if self._attr_available and self._attr_native_value == new_state: - return - self._attr_native_value = new_state - self._attr_available = True - self.async_write_ha_state() -class SenseVoltageSensor(SensorEntity): +class SenseVoltageSensor(SenseEntity, SensorEntity): """Implementation of a Sense energy voltage sensor.""" _attr_device_class = SensorDeviceClass.VOLTAGE _attr_state_class = SensorStateClass.MEASUREMENT _attr_native_unit_of_measurement = UnitOfElectricPotential.VOLT - _attr_attribution = ATTRIBUTION - _attr_should_poll = False - _attr_available = False def __init__( self, - data, - index, - sense_monitor_id, - ): + gateway: ASyncSenseable, + index: int, + sense_monitor_id: str, + realtime_coordinator: SenseRealtimeCoordinator, + ) -> None: """Initialize the Sense sensor.""" - line_num = index + 1 - self._attr_name = f"L{line_num} Voltage" - self._attr_unique_id = f"{sense_monitor_id}-L{line_num}" - self._data = data - self._sense_monitor_id = sense_monitor_id - self._voltage_index = index - - async def async_added_to_hass(self) -> None: - """Register callbacks.""" - self.async_on_remove( - async_dispatcher_connect( - self.hass, - f"{SENSE_DEVICE_UPDATE}-{self._sense_monitor_id}", - self._async_update_from_data, - ) + super().__init__( + gateway, realtime_coordinator, sense_monitor_id, f"L{index + 1}" ) + self._attr_name = f"L{index + 1} Voltage" + self._voltage_index = index - @callback - def _async_update_from_data(self): - """Update the sensor from the data. Must not do I/O.""" - new_state = round(self._data.active_voltage[self._voltage_index], 1) - if self._attr_available and self._attr_native_value == new_state: - return - self._attr_available = True - self._attr_native_value = new_state - self.async_write_ha_state() + @property + def native_value(self) -> float: + """Return the state of the sensor.""" + return round(self._gateway.active_voltage[self._voltage_index], 1) -class SenseTrendsSensor(CoordinatorEntity, SensorEntity): +class SenseTrendsSensor(SenseEntity, SensorEntity): """Implementation of a Sense energy sensor.""" - _attr_device_class = SensorDeviceClass.ENERGY - _attr_state_class = SensorStateClass.TOTAL - _attr_native_unit_of_measurement = UnitOfEnergy.KILO_WATT_HOUR - _attr_attribution = ATTRIBUTION - _attr_should_poll = False - def __init__( self, - data, - name, - sensor_type, - variant_id, - variant_name, - trends_coordinator, - unique_id, - sense_monitor_id, - ): + gateway: ASyncSenseable, + scale: Scale, + variant_id: str, + variant_name: str, + trends_coordinator: SenseTrendCoordinator, + sense_monitor_id: str, + ) -> None: """Initialize the Sense sensor.""" - super().__init__(trends_coordinator) - self._attr_name = f"{name} {variant_name}" - self._attr_unique_id = unique_id - self._data = data - self._sensor_type = sensor_type + super().__init__( + gateway, + trends_coordinator, + sense_monitor_id, + f"{TRENDS_SENSOR_TYPES[scale].lower()}-{variant_id}", + ) + self._attr_name = f"{TRENDS_SENSOR_TYPES[scale]} {variant_name}" + self._scale = scale self._variant_id = variant_id self._had_any_update = False if variant_id in [PRODUCTION_PCT_ID, SOLAR_POWERED_ID]: @@ -280,66 +208,75 @@ def __init__( self._attr_entity_registry_enabled_default = False self._attr_state_class = None self._attr_device_class = None - self._attr_device_info = DeviceInfo( - name=f"Sense {sense_monitor_id}", - identifiers={(DOMAIN, sense_monitor_id)}, - model="Sense", - manufacturer="Sense Labs, Inc.", - configuration_url="https://home.sense.com", - ) + else: + self._attr_device_class = SensorDeviceClass.ENERGY + self._attr_state_class = SensorStateClass.TOTAL + self._attr_native_unit_of_measurement = UnitOfEnergy.KILO_WATT_HOUR @property - def native_value(self): + def native_value(self) -> float: """Return the state of the sensor.""" - return round(self._data.get_trend(self._sensor_type, self._variant_id), 1) + return round(self._gateway.get_stat(self._scale, self._variant_id), 1) @property - def last_reset(self): + def last_reset(self) -> datetime | None: """Return the time when the sensor was last reset, if any.""" if self._attr_state_class == SensorStateClass.TOTAL: - return self._data.trend_start(self._sensor_type) + return self._gateway.trend_start(self._scale) return None -class SenseEnergyDevice(SensorEntity): +class SenseDevicePowerSensor(SenseDeviceEntity, SensorEntity): """Implementation of a Sense energy device.""" - _attr_available = False _attr_state_class = SensorStateClass.MEASUREMENT _attr_native_unit_of_measurement = UnitOfPower.WATT - _attr_attribution = ATTRIBUTION _attr_device_class = SensorDeviceClass.POWER - _attr_should_poll = False - - def __init__(self, sense_devices_data, device, sense_monitor_id): - """Initialize the Sense binary sensor.""" - self._attr_name = f"{device['name']} {CONSUMPTION_NAME}" - self._id = device["id"] - self._sense_monitor_id = sense_monitor_id - self._attr_unique_id = f"{sense_monitor_id}-{self._id}-{CONSUMPTION_ID}" - self._attr_icon = sense_to_mdi(device["icon"]) - self._sense_devices_data = sense_devices_data - - async def async_added_to_hass(self) -> None: - """Register callbacks.""" - self.async_on_remove( - async_dispatcher_connect( - self.hass, - f"{SENSE_DEVICE_UPDATE}-{self._sense_monitor_id}", - self._async_update_from_data, - ) + + def __init__( + self, + device: SenseDevice, + sense_monitor_id: str, + coordinator: SenseRealtimeCoordinator, + ) -> None: + """Initialize the Sense device sensor.""" + super().__init__( + device, coordinator, sense_monitor_id, f"{device.id}-{CONSUMPTION_ID}" ) - @callback - def _async_update_from_data(self): - """Get the latest data, update state. Must not do I/O.""" - device_data = self._sense_devices_data.get_device_by_id(self._id) - if not device_data or "w" not in device_data: - new_state = 0 - else: - new_state = int(device_data["w"]) - if self._attr_available and self._attr_native_value == new_state: - return - self._attr_native_value = new_state - self._attr_available = True - self.async_write_ha_state() + @property + def native_value(self) -> float: + """Return the state of the sensor.""" + return self._device.power_w + + +class SenseDeviceEnergySensor(SenseDeviceEntity, SensorEntity): + """Implementation of a Sense device energy sensor.""" + + _attr_native_unit_of_measurement = UnitOfEnergy.KILO_WATT_HOUR + _attr_state_class = SensorStateClass.TOTAL_INCREASING + _attr_device_class = SensorDeviceClass.ENERGY + + def __init__( + self, + device: SenseDevice, + scale: Scale, + coordinator: SenseTrendCoordinator, + sense_monitor_id: str, + ) -> None: + """Initialize the Sense device sensor.""" + super().__init__( + device, + coordinator, + sense_monitor_id, + f"{device.id}-{TRENDS_SENSOR_TYPES[scale].lower()}-energy", + ) + self._attr_translation_key = f"{TRENDS_SENSOR_TYPES[scale].lower()}_energy" + self._attr_suggested_display_precision = 2 + self._scale = scale + self._device = device + + @property + def native_value(self) -> float: + """Return the state of the sensor.""" + return self._device.energy_kwh[self._scale] diff --git a/homeassistant/components/sense/strings.json b/homeassistant/components/sense/strings.json index a519155bee1354..4579c84f0506af 100644 --- a/homeassistant/components/sense/strings.json +++ b/homeassistant/components/sense/strings.json @@ -32,5 +32,24 @@ "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" } + }, + "entity": { + "sensor": { + "daily_energy": { + "name": "Daily energy" + }, + "weekly_energy": { + "name": "Weekly energy" + }, + "monthly_energy": { + "name": "Monthly energy" + }, + "yearly_energy": { + "name": "Yearly energy" + }, + "bill_energy": { + "name": "Bill energy" + } + } } } diff --git a/homeassistant/components/sensibo/config_flow.py b/homeassistant/components/sensibo/config_flow.py index 667f96fe1c28b6..b8b1029f14139f 100644 --- a/homeassistant/components/sensibo/config_flow.py +++ b/homeassistant/components/sensibo/config_flow.py @@ -8,8 +8,9 @@ from pysensibo.exceptions import AuthenticationError import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_API_KEY +from homeassistant.core import HomeAssistant from homeassistant.helpers.selector import TextSelector from .const import DEFAULT_NAME, DOMAIN @@ -22,19 +23,34 @@ ) +async def validate_api( + hass: HomeAssistant, api_key: str +) -> tuple[str | None, dict[str, str]]: + """Validate the API key.""" + errors: dict[str, str] = {} + username: str | None = None + try: + username = await async_validate_api(hass, api_key) + except AuthenticationError: + errors["base"] = "invalid_auth" + except ConnectionError: + errors["base"] = "cannot_connect" + except NoDevicesError: + errors["base"] = "no_devices" + except NoUsernameError: + errors["base"] = "no_username" + return (username, errors) + + class SensiboConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for Sensibo integration.""" VERSION = 2 - entry: ConfigEntry | None - async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle re-authentication with Sensibo.""" - - self.entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -45,24 +61,13 @@ async def async_step_reauth_confirm( if user_input: api_key = user_input[CONF_API_KEY] - try: - username = await async_validate_api(self.hass, api_key) - except AuthenticationError: - errors["base"] = "invalid_auth" - except ConnectionError: - errors["base"] = "cannot_connect" - except NoDevicesError: - errors["base"] = "no_devices" - except NoUsernameError: - errors["base"] = "no_username" - else: - assert self.entry is not None - - if username == self.entry.unique_id: + username, errors = await validate_api(self.hass, api_key) + if username: + reauth_entry = self._get_reauth_entry() + if username == reauth_entry.unique_id: return self.async_update_reload_and_abort( - self.entry, - data={ - **self.entry.data, + reauth_entry, + data_updates={ CONF_API_KEY: api_key, }, ) @@ -74,6 +79,32 @@ async def async_step_reauth_confirm( errors=errors, ) + async def async_step_reconfigure( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Reconfigure Sensibo.""" + errors: dict[str, str] = {} + + if user_input: + api_key = user_input[CONF_API_KEY] + username, errors = await validate_api(self.hass, api_key) + if username: + reconfigure_entry = self._get_reconfigure_entry() + if username == reconfigure_entry.unique_id: + return self.async_update_reload_and_abort( + reconfigure_entry, + data_updates={ + CONF_API_KEY: api_key, + }, + ) + errors["base"] = "incorrect_api_key" + + return self.async_show_form( + step_id="reconfigure", + data_schema=DATA_SCHEMA, + errors=errors, + ) + async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -83,17 +114,8 @@ async def async_step_user( if user_input: api_key = user_input[CONF_API_KEY] - try: - username = await async_validate_api(self.hass, api_key) - except AuthenticationError: - errors["base"] = "invalid_auth" - except ConnectionError: - errors["base"] = "cannot_connect" - except NoDevicesError: - errors["base"] = "no_devices" - except NoUsernameError: - errors["base"] = "no_username" - else: + username, errors = await validate_api(self.hass, api_key) + if username: await self.async_set_unique_id(username) self._abort_if_unique_id_configured() diff --git a/homeassistant/components/sensibo/strings.json b/homeassistant/components/sensibo/strings.json index 60a32028017f6e..bec402bee18f29 100644 --- a/homeassistant/components/sensibo/strings.json +++ b/homeassistant/components/sensibo/strings.json @@ -2,7 +2,8 @@ "config": { "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_account%]", - "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", + "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]" }, "error": { "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", @@ -27,6 +28,14 @@ "data_description": { "api_key": "[%key:component::sensibo::config::step::user::data_description::api_key%]" } + }, + "reconfigure": { + "data": { + "api_key": "[%key:common::config_flow::data::api_key%]" + }, + "data_description": { + "api_key": "[%key:component::sensibo::config::step::user::data_description::api_key%]" + } } } }, diff --git a/homeassistant/components/sensor/const.py b/homeassistant/components/sensor/const.py index da0b48a23a0406..ee6167a5643368 100644 --- a/homeassistant/components/sensor/const.py +++ b/homeassistant/components/sensor/const.py @@ -17,6 +17,7 @@ SIGNAL_STRENGTH_DECIBELS, SIGNAL_STRENGTH_DECIBELS_MILLIWATT, UnitOfApparentPower, + UnitOfBloodGlucoseConcentration, UnitOfConductivity, UnitOfDataRate, UnitOfElectricCurrent, @@ -47,6 +48,7 @@ ) from homeassistant.util.unit_conversion import ( BaseUnitConverter, + BloodGlugoseConcentrationConverter, ConductivityConverter, DataRateConverter, DistanceConverter, @@ -127,6 +129,12 @@ class SensorDeviceClass(StrEnum): Unit of measurement: `%` """ + BLOOD_GLUCOSE_CONCENTRATION = "blood_glucose_concentration" + """Blood glucose concentration. + + Unit of measurement: `mg/dL`, `mmol/L` + """ + CO = "carbon_monoxide" """Carbon Monoxide gas concentration. @@ -182,7 +190,7 @@ class SensorDeviceClass(StrEnum): Use this device class for sensors measuring energy consumption, for example electric energy consumption. - Unit of measurement: `J`, `kJ`, `MJ`, `GJ`, `Wh`, `kWh`, `MWh`, `cal`, `kcal`, `Mcal`, `Gcal` + Unit of measurement: `J`, `kJ`, `MJ`, `GJ`, `Wh`, `kWh`, `MWh`, `GWh`, `TWh`, `cal`, `kcal`, `Mcal`, `Gcal` """ ENERGY_STORAGE = "energy_storage" @@ -191,7 +199,7 @@ class SensorDeviceClass(StrEnum): Use this device class for sensors measuring stored energy, for example the amount of electric energy currently stored in a battery or the capacity of a battery. - Unit of measurement: `Wh`, `kWh`, `MWh`, `MJ`, `GJ` + Unit of measurement: `Wh`, `kWh`, `MWh`, `GWh`, `TWh`, `MJ`, `GJ` """ FREQUENCY = "frequency" @@ -299,7 +307,7 @@ class SensorDeviceClass(StrEnum): POWER = "power" """Power. - Unit of measurement: `W`, `kW` + Unit of measurement: `W`, `kW`, `MW`, `GW`, `TW` """ PRECIPITATION = "precipitation" @@ -493,6 +501,7 @@ class SensorStateClass(StrEnum): UNIT_CONVERTERS: dict[SensorDeviceClass | str | None, type[BaseUnitConverter]] = { SensorDeviceClass.ATMOSPHERIC_PRESSURE: PressureConverter, + SensorDeviceClass.BLOOD_GLUCOSE_CONCENTRATION: BloodGlugoseConcentrationConverter, SensorDeviceClass.CONDUCTIVITY: ConductivityConverter, SensorDeviceClass.CURRENT: ElectricCurrentConverter, SensorDeviceClass.DATA_RATE: DataRateConverter, @@ -524,6 +533,7 @@ class SensorStateClass(StrEnum): SensorDeviceClass.AQI: {None}, SensorDeviceClass.ATMOSPHERIC_PRESSURE: set(UnitOfPressure), SensorDeviceClass.BATTERY: {PERCENTAGE}, + SensorDeviceClass.BLOOD_GLUCOSE_CONCENTRATION: set(UnitOfBloodGlucoseConcentration), SensorDeviceClass.CO: {CONCENTRATION_PARTS_PER_MILLION}, SensorDeviceClass.CO2: {CONCENTRATION_PARTS_PER_MILLION}, SensorDeviceClass.CONDUCTIVITY: set(UnitOfConductivity), @@ -599,6 +609,7 @@ class SensorStateClass(StrEnum): SensorDeviceClass.AQI: {SensorStateClass.MEASUREMENT}, SensorDeviceClass.ATMOSPHERIC_PRESSURE: {SensorStateClass.MEASUREMENT}, SensorDeviceClass.BATTERY: {SensorStateClass.MEASUREMENT}, + SensorDeviceClass.BLOOD_GLUCOSE_CONCENTRATION: {SensorStateClass.MEASUREMENT}, SensorDeviceClass.CO: {SensorStateClass.MEASUREMENT}, SensorDeviceClass.CO2: {SensorStateClass.MEASUREMENT}, SensorDeviceClass.CONDUCTIVITY: {SensorStateClass.MEASUREMENT}, diff --git a/homeassistant/components/sensor/device_condition.py b/homeassistant/components/sensor/device_condition.py index f2b51899312cda..56ecb36adb3b1d 100644 --- a/homeassistant/components/sensor/device_condition.py +++ b/homeassistant/components/sensor/device_condition.py @@ -37,6 +37,7 @@ CONF_IS_AQI = "is_aqi" CONF_IS_ATMOSPHERIC_PRESSURE = "is_atmospheric_pressure" CONF_IS_BATTERY_LEVEL = "is_battery_level" +CONF_IS_BLOOD_GLUCOSE_CONCENTRATION = "is_blood_glucose_concentration" CONF_IS_CO = "is_carbon_monoxide" CONF_IS_CO2 = "is_carbon_dioxide" CONF_IS_CONDUCTIVITY = "is_conductivity" @@ -87,6 +88,9 @@ SensorDeviceClass.AQI: [{CONF_TYPE: CONF_IS_AQI}], SensorDeviceClass.ATMOSPHERIC_PRESSURE: [{CONF_TYPE: CONF_IS_ATMOSPHERIC_PRESSURE}], SensorDeviceClass.BATTERY: [{CONF_TYPE: CONF_IS_BATTERY_LEVEL}], + SensorDeviceClass.BLOOD_GLUCOSE_CONCENTRATION: [ + {CONF_TYPE: CONF_IS_BLOOD_GLUCOSE_CONCENTRATION} + ], SensorDeviceClass.CO: [{CONF_TYPE: CONF_IS_CO}], SensorDeviceClass.CO2: [{CONF_TYPE: CONF_IS_CO2}], SensorDeviceClass.CONDUCTIVITY: [{CONF_TYPE: CONF_IS_CONDUCTIVITY}], @@ -151,6 +155,7 @@ CONF_IS_AQI, CONF_IS_ATMOSPHERIC_PRESSURE, CONF_IS_BATTERY_LEVEL, + CONF_IS_BLOOD_GLUCOSE_CONCENTRATION, CONF_IS_CO, CONF_IS_CO2, CONF_IS_CONDUCTIVITY, diff --git a/homeassistant/components/sensor/device_trigger.py b/homeassistant/components/sensor/device_trigger.py index b07b3fac11e456..ffee10d9f401b4 100644 --- a/homeassistant/components/sensor/device_trigger.py +++ b/homeassistant/components/sensor/device_trigger.py @@ -36,6 +36,7 @@ CONF_AQI = "aqi" CONF_ATMOSPHERIC_PRESSURE = "atmospheric_pressure" CONF_BATTERY_LEVEL = "battery_level" +CONF_BLOOD_GLUCOSE_CONCENTRATION = "blood_glucose_concentration" CONF_CO = "carbon_monoxide" CONF_CO2 = "carbon_dioxide" CONF_CONDUCTIVITY = "conductivity" @@ -86,6 +87,9 @@ SensorDeviceClass.AQI: [{CONF_TYPE: CONF_AQI}], SensorDeviceClass.ATMOSPHERIC_PRESSURE: [{CONF_TYPE: CONF_ATMOSPHERIC_PRESSURE}], SensorDeviceClass.BATTERY: [{CONF_TYPE: CONF_BATTERY_LEVEL}], + SensorDeviceClass.BLOOD_GLUCOSE_CONCENTRATION: [ + {CONF_TYPE: CONF_BLOOD_GLUCOSE_CONCENTRATION} + ], SensorDeviceClass.CO: [{CONF_TYPE: CONF_CO}], SensorDeviceClass.CO2: [{CONF_TYPE: CONF_CO2}], SensorDeviceClass.CONDUCTIVITY: [{CONF_TYPE: CONF_CONDUCTIVITY}], @@ -151,6 +155,7 @@ CONF_AQI, CONF_ATMOSPHERIC_PRESSURE, CONF_BATTERY_LEVEL, + CONF_BLOOD_GLUCOSE_CONCENTRATION, CONF_CO, CONF_CO2, CONF_CONDUCTIVITY, diff --git a/homeassistant/components/sensor/icons.json b/homeassistant/components/sensor/icons.json index 6132fcbc1e92e4..ea4c902e665735 100644 --- a/homeassistant/components/sensor/icons.json +++ b/homeassistant/components/sensor/icons.json @@ -12,6 +12,9 @@ "atmospheric_pressure": { "default": "mdi:thermometer-lines" }, + "blood_glucose_concentration": { + "default": "mdi:spoon-sugar" + }, "carbon_dioxide": { "default": "mdi:molecule-co2" }, diff --git a/homeassistant/components/sensor/recorder.py b/homeassistant/components/sensor/recorder.py index 59f20a9ed25035..675d24b9240a4d 100644 --- a/homeassistant/components/sensor/recorder.py +++ b/homeassistant/components/sensor/recorder.py @@ -6,7 +6,6 @@ from collections.abc import Callable, Iterable from contextlib import suppress import datetime -from functools import partial import itertools import logging import math @@ -39,6 +38,7 @@ from homeassistant.helpers.typing import UNDEFINED, UndefinedType from homeassistant.loader import async_suggest_report_issue from homeassistant.util import dt as dt_util +from homeassistant.util.async_ import run_callback_threadsafe from homeassistant.util.enum import try_parse_enum from homeassistant.util.hass_dict import HassKey @@ -686,7 +686,6 @@ def list_statistic_ids( @callback def _update_issues( report_issue: Callable[[str, str, dict[str, Any]], None], - clear_issue: Callable[[str, str], None], sensor_states: list[State], metadatas: dict[str, tuple[int, StatisticMetaData]], ) -> None: @@ -707,8 +706,6 @@ def _update_issues( entity_id, {"statistic_id": entity_id}, ) - else: - clear_issue("state_class_removed", entity_id) metadata_unit = metadata[1]["unit_of_measurement"] converter = statistics.STATISTIC_UNIT_TO_UNIT_CONVERTER.get(metadata_unit) @@ -725,8 +722,6 @@ def _update_issues( "supported_unit": metadata_unit, }, ) - else: - clear_issue("units_changed", entity_id) elif numeric and state_unit not in converter.VALID_UNITS: # The state unit can't be converted to the unit in metadata valid_units = (unit or "" for unit in converter.VALID_UNITS) @@ -741,8 +736,6 @@ def _update_issues( "supported_unit": valid_units_str, }, ) - else: - clear_issue("units_changed", entity_id) def update_statistics_issues( @@ -756,36 +749,50 @@ def update_statistics_issues( instance, session, statistic_source=RECORDER_DOMAIN ) + @callback + def get_sensor_statistics_issues(hass: HomeAssistant) -> set[str]: + """Return a list of statistics issues.""" + issues = set() + issue_registry = ir.async_get(hass) + for issue in issue_registry.issues.values(): + if ( + issue.domain != DOMAIN + or not (issue_data := issue.data) + or issue_data.get("issue_type") + not in ("state_class_removed", "units_changed") + ): + continue + issues.add(issue.issue_id) + return issues + + issues = run_callback_threadsafe( + hass.loop, get_sensor_statistics_issues, hass + ).result() + def create_issue_registry_issue( issue_type: str, statistic_id: str, data: dict[str, Any] ) -> None: """Create an issue registry issue.""" - hass.loop.call_soon_threadsafe( - partial( - ir.async_create_issue, - hass, - DOMAIN, - f"{issue_type}_{statistic_id}", - data=data | {"issue_type": issue_type}, - is_fixable=False, - severity=ir.IssueSeverity.WARNING, - translation_key=issue_type, - translation_placeholders=data, - ) - ) - - def delete_issue_registry_issue(issue_type: str, statistic_id: str) -> None: - """Delete an issue registry issue.""" - hass.loop.call_soon_threadsafe( - ir.async_delete_issue, hass, DOMAIN, f"{issue_type}_{statistic_id}" + issue_id = f"{issue_type}_{statistic_id}" + issues.discard(issue_id) + ir.create_issue( + hass, + DOMAIN, + issue_id, + data=data | {"issue_type": issue_type}, + is_fixable=False, + severity=ir.IssueSeverity.WARNING, + translation_key=issue_type, + translation_placeholders=data, ) _update_issues( create_issue_registry_issue, - delete_issue_registry_issue, sensor_states, metadatas, ) + for issue_id in issues: + hass.loop.call_soon_threadsafe(ir.async_delete_issue, hass, DOMAIN, issue_id) def validate_statistics( @@ -811,7 +818,6 @@ def create_statistic_validation_issue( _update_issues( create_statistic_validation_issue, - lambda issue_type, statistic_id: None, sensor_states, metadatas, ) diff --git a/homeassistant/components/sensor/strings.json b/homeassistant/components/sensor/strings.json index 71bead342c40b7..6d529e72c3b2bb 100644 --- a/homeassistant/components/sensor/strings.json +++ b/homeassistant/components/sensor/strings.json @@ -6,6 +6,7 @@ "is_aqi": "Current {entity_name} air quality index", "is_atmospheric_pressure": "Current {entity_name} atmospheric pressure", "is_battery_level": "Current {entity_name} battery level", + "is_blood_glucose_concentration": "Current {entity_name} blood glucose concentration", "is_carbon_monoxide": "Current {entity_name} carbon monoxide concentration level", "is_carbon_dioxide": "Current {entity_name} carbon dioxide concentration level", "is_conductivity": "Current {entity_name} conductivity", @@ -56,6 +57,7 @@ "aqi": "{entity_name} air quality index changes", "atmospheric_pressure": "{entity_name} atmospheric pressure changes", "battery_level": "{entity_name} battery level changes", + "blood_glucose_concentration": "{entity_name} blood glucose concentration changes", "carbon_monoxide": "{entity_name} carbon monoxide concentration changes", "carbon_dioxide": "{entity_name} carbon dioxide concentration changes", "conductivity": "{entity_name} conductivity changes", @@ -149,6 +151,9 @@ "battery": { "name": "Battery" }, + "blood_glucose_concentration": { + "name": "Blood glucose concentration" + }, "carbon_monoxide": { "name": "Carbon monoxide" }, diff --git a/homeassistant/components/sensorpush/manifest.json b/homeassistant/components/sensorpush/manifest.json index 0222a1c2884fee..7729a67d7a12f2 100644 --- a/homeassistant/components/sensorpush/manifest.json +++ b/homeassistant/components/sensorpush/manifest.json @@ -17,5 +17,5 @@ "dependencies": ["bluetooth_adapters"], "documentation": "https://www.home-assistant.io/integrations/sensorpush", "iot_class": "local_push", - "requirements": ["sensorpush-ble==1.6.2"] + "requirements": ["sensorpush-ble==1.7.1"] } diff --git a/homeassistant/components/sentry/config_flow.py b/homeassistant/components/sentry/config_flow.py index 59cd1f3f0e9e32..2fead7c27cdd8f 100644 --- a/homeassistant/components/sentry/config_flow.py +++ b/homeassistant/components/sentry/config_flow.py @@ -49,7 +49,7 @@ def async_get_options_flow( config_entry: ConfigEntry, ) -> SentryOptionsFlow: """Get the options flow for this handler.""" - return SentryOptionsFlow(config_entry) + return SentryOptionsFlow() async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -78,10 +78,6 @@ async def async_step_user( class SentryOptionsFlow(OptionsFlow): """Handle Sentry options.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize Sentry options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/senz/__init__.py b/homeassistant/components/senz/__init__.py index bd4dfae457165c..c3238f7355f40d 100644 --- a/homeassistant/components/senz/__init__.py +++ b/homeassistant/components/senz/__init__.py @@ -60,6 +60,7 @@ async def update_thermostats() -> dict[str, Thermostat]: coordinator: SENZDataUpdateCoordinator = DataUpdateCoordinator( hass, _LOGGER, + config_entry=entry, name=account.username, update_interval=UPDATE_INTERVAL, update_method=update_thermostats, diff --git a/homeassistant/components/seven_segments/manifest.json b/homeassistant/components/seven_segments/manifest.json index 2f39644d6d31f7..af00a1fdfed6be 100644 --- a/homeassistant/components/seven_segments/manifest.json +++ b/homeassistant/components/seven_segments/manifest.json @@ -4,5 +4,5 @@ "codeowners": ["@fabaff"], "documentation": "https://www.home-assistant.io/integrations/seven_segments", "iot_class": "local_polling", - "requirements": ["Pillow==10.4.0"] + "requirements": ["Pillow==11.0.0"] } diff --git a/homeassistant/components/seventeentrack/services.py b/homeassistant/components/seventeentrack/services.py index 0833bc0a97bc70..54c23e6d6191ae 100644 --- a/homeassistant/components/seventeentrack/services.py +++ b/homeassistant/components/seventeentrack/services.py @@ -1,8 +1,8 @@ """Services for the seventeentrack integration.""" -from typing import Final +from typing import Any, Final -from pyseventeentrack.package import PACKAGE_STATUS_MAP +from pyseventeentrack.package import PACKAGE_STATUS_MAP, Package import voluptuous as vol from homeassistant.config_entries import ConfigEntry, ConfigEntryState @@ -81,18 +81,7 @@ async def get_packages(call: ServiceCall) -> ServiceResponse: return { "packages": [ - { - ATTR_DESTINATION_COUNTRY: package.destination_country, - ATTR_ORIGIN_COUNTRY: package.origin_country, - ATTR_PACKAGE_TYPE: package.package_type, - ATTR_TRACKING_INFO_LANGUAGE: package.tracking_info_language, - ATTR_TRACKING_NUMBER: package.tracking_number, - ATTR_LOCATION: package.location, - ATTR_STATUS: package.status, - ATTR_TIMESTAMP: package.timestamp.isoformat(), - ATTR_INFO_TEXT: package.info_text, - ATTR_FRIENDLY_NAME: package.friendly_name, - } + package_to_dict(package) for package in live_packages if slugify(package.status) in package_states or package_states == [] ] @@ -110,6 +99,22 @@ async def archive_package(call: ServiceCall) -> None: await seventeen_coordinator.client.profile.archive_package(tracking_number) + def package_to_dict(package: Package) -> dict[str, Any]: + result = { + ATTR_DESTINATION_COUNTRY: package.destination_country, + ATTR_ORIGIN_COUNTRY: package.origin_country, + ATTR_PACKAGE_TYPE: package.package_type, + ATTR_TRACKING_INFO_LANGUAGE: package.tracking_info_language, + ATTR_TRACKING_NUMBER: package.tracking_number, + ATTR_LOCATION: package.location, + ATTR_STATUS: package.status, + ATTR_INFO_TEXT: package.info_text, + ATTR_FRIENDLY_NAME: package.friendly_name, + } + if timestamp := package.timestamp: + result[ATTR_TIMESTAMP] = timestamp.isoformat() + return result + async def _validate_service(config_entry_id): entry: ConfigEntry | None = hass.config_entries.async_get_entry(config_entry_id) if not entry: diff --git a/homeassistant/components/sfr_box/config_flow.py b/homeassistant/components/sfr_box/config_flow.py index a4f14e5906973d..629f6ad291fa44 100644 --- a/homeassistant/components/sfr_box/config_flow.py +++ b/homeassistant/components/sfr_box/config_flow.py @@ -9,7 +9,7 @@ from sfrbox_api.exceptions import SFRBoxAuthenticationError, SFRBoxError import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME from homeassistant.helpers import selector from homeassistant.helpers.httpx_client import get_async_client @@ -37,7 +37,6 @@ class SFRBoxFlowHandler(ConfigFlow, domain=DOMAIN): VERSION = 1 _box: SFRBox _config: dict[str, Any] = {} - _reauth_entry: ConfigEntry | None = None async def async_step_user( self, user_input: dict[str, str] | None = None @@ -88,19 +87,16 @@ async def async_step_auth( except SFRBoxAuthenticationError: errors["base"] = "invalid_auth" else: - if reauth_entry := self._reauth_entry: - data = {**reauth_entry.data, **user_input} - self.hass.config_entries.async_update_entry(reauth_entry, data=data) - self.hass.async_create_task( - self.hass.config_entries.async_reload(reauth_entry.entry_id) + if self.source == SOURCE_REAUTH: + return self.async_update_reload_and_abort( + self._get_reauth_entry(), data_updates=user_input ) - return self.async_abort(reason="reauth_successful") self._config.update(user_input) return self.async_create_entry(title="SFR Box", data=self._config) suggested_values: Mapping[str, Any] | None = user_input - if self._reauth_entry and not suggested_values: - suggested_values = self._reauth_entry.data + if self.source == SOURCE_REAUTH and not suggested_values: + suggested_values = self._get_reauth_entry().data data_schema = self.add_suggested_values_to_schema(AUTH_SCHEMA, suggested_values) return self.async_show_form( @@ -117,8 +113,5 @@ async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle failed credentials.""" - self._reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) self._box = SFRBox(ip=entry_data[CONF_HOST], client=get_async_client(self.hass)) return await self.async_step_auth() diff --git a/homeassistant/components/shelly/bluetooth/__init__.py b/homeassistant/components/shelly/bluetooth/__init__.py index fad7ddf4424a8f..f2b71d19d615de 100644 --- a/homeassistant/components/shelly/bluetooth/__init__.py +++ b/homeassistant/components/shelly/bluetooth/__init__.py @@ -5,13 +5,7 @@ from typing import TYPE_CHECKING from aioshelly.ble import async_start_scanner, create_scanner -from aioshelly.ble.const import ( - BLE_SCAN_RESULT_EVENT, - BLE_SCAN_RESULT_VERSION, - DEFAULT_DURATION_MS, - DEFAULT_INTERVAL_MS, - DEFAULT_WINDOW_MS, -) +from aioshelly.ble.const import BLE_SCAN_RESULT_EVENT, BLE_SCAN_RESULT_VERSION from homeassistant.components.bluetooth import async_register_scanner from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback as hass_callback @@ -43,9 +37,6 @@ async def async_connect_scanner( active=scanner_mode == BLEScannerMode.ACTIVE, event_type=BLE_SCAN_RESULT_EVENT, data_version=BLE_SCAN_RESULT_VERSION, - interval_ms=DEFAULT_INTERVAL_MS, - window_ms=DEFAULT_WINDOW_MS, - duration_ms=DEFAULT_DURATION_MS, ) @hass_callback diff --git a/homeassistant/components/shelly/config_flow.py b/homeassistant/components/shelly/config_flow.py index 83caaeb4776380..1daa4710f3015a 100644 --- a/homeassistant/components/shelly/config_flow.py +++ b/homeassistant/components/shelly/config_flow.py @@ -146,7 +146,6 @@ class ShellyConfigFlow(ConfigFlow, domain=DOMAIN): port: int = DEFAULT_HTTP_PORT info: dict[str, Any] = {} device_info: dict[str, Any] = {} - entry: ConfigEntry async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -356,7 +355,6 @@ async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle configuration by re-auth.""" - self.entry = self._get_reauth_entry() return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -364,8 +362,9 @@ async def async_step_reauth_confirm( ) -> ConfigFlowResult: """Dialog that informs the user that reauth is required.""" errors: dict[str, str] = {} - host = self.entry.data[CONF_HOST] - port = get_http_port(self.entry.data) + reauth_entry = self._get_reauth_entry() + host = reauth_entry.data[CONF_HOST] + port = get_http_port(reauth_entry.data) if user_input is not None: try: @@ -373,7 +372,7 @@ async def async_step_reauth_confirm( except (DeviceConnectionError, InvalidAuthError): return self.async_abort(reason="reauth_unsuccessful") - if get_device_entry_gen(self.entry) != 1: + if get_device_entry_gen(reauth_entry) != 1: user_input[CONF_USERNAME] = "admin" try: await validate_input(self.hass, host, port, info, user_input) @@ -381,10 +380,10 @@ async def async_step_reauth_confirm( return self.async_abort(reason="reauth_unsuccessful") return self.async_update_reload_and_abort( - self.entry, data={**self.entry.data, **user_input} + reauth_entry, data_updates=user_input ) - if get_device_entry_gen(self.entry) in BLOCK_GENERATIONS: + if get_device_entry_gen(reauth_entry) in BLOCK_GENERATIONS: schema = { vol.Required(CONF_USERNAME): str, vol.Required(CONF_PASSWORD): str, @@ -400,19 +399,12 @@ async def async_step_reauth_confirm( async def async_step_reconfigure( self, user_input: dict[str, Any] | None = None - ) -> ConfigFlowResult: - """Handle a reconfiguration flow initialized by the user.""" - self.entry = self._get_reconfigure_entry() - self.host = self.entry.data[CONF_HOST] - self.port = self.entry.data.get(CONF_PORT, DEFAULT_HTTP_PORT) - - return await self.async_step_reconfigure_confirm() - - async def async_step_reconfigure_confirm( - self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle a reconfiguration flow initialized by the user.""" errors = {} + reconfigure_entry = self._get_reconfigure_entry() + self.host = reconfigure_entry.data[CONF_HOST] + self.port = reconfigure_entry.data.get(CONF_PORT, DEFAULT_HTTP_PORT) if user_input is not None: host = user_input[CONF_HOST] @@ -424,23 +416,23 @@ async def async_step_reconfigure_confirm( except CustomPortNotSupported: errors["base"] = "custom_port_not_supported" else: - if info[CONF_MAC] != self.entry.unique_id: - return self.async_abort(reason="another_device") + await self.async_set_unique_id(info[CONF_MAC]) + self._abort_if_unique_id_mismatch(reason="another_device") - data = {**self.entry.data, CONF_HOST: host, CONF_PORT: port} - self.hass.config_entries.async_update_entry(self.entry, data=data) - await self.hass.config_entries.async_reload(self.entry.entry_id) - return self.async_abort(reason="reconfigure_successful") + return self.async_update_reload_and_abort( + reconfigure_entry, + data_updates={CONF_HOST: host, CONF_PORT: port}, + ) return self.async_show_form( - step_id="reconfigure_confirm", + step_id="reconfigure", data_schema=vol.Schema( { vol.Required(CONF_HOST, default=self.host): str, vol.Required(CONF_PORT, default=self.port): vol.Coerce(int), } ), - description_placeholders={"device_name": self.entry.title}, + description_placeholders={"device_name": reconfigure_entry.title}, errors=errors, ) @@ -452,7 +444,7 @@ async def _async_get_info(self, host: str, port: int) -> dict[str, Any]: @callback def async_get_options_flow(config_entry: ConfigEntry) -> OptionsFlowHandler: """Get the options flow for this handler.""" - return OptionsFlowHandler(config_entry) + return OptionsFlowHandler() @classmethod @callback @@ -468,10 +460,6 @@ def async_supports_options_flow(cls, config_entry: ConfigEntry) -> bool: class OptionsFlowHandler(OptionsFlow): """Handle the option flow for shelly.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/shelly/coordinator.py b/homeassistant/components/shelly/coordinator.py index 6332e139244b9d..a66fbb20f481ed 100644 --- a/homeassistant/components/shelly/coordinator.py +++ b/homeassistant/components/shelly/coordinator.py @@ -603,7 +603,7 @@ def _async_device_event_handler(self, event_data: dict[str, Any]) -> None: async def _async_update_data(self) -> None: """Fetch data.""" - if self.update_sleep_period(): + if self.update_sleep_period() or self.hass.is_stopping: return if self.sleep_period: diff --git a/homeassistant/components/shelly/manifest.json b/homeassistant/components/shelly/manifest.json index 5e2522ea456302..38437fb213761a 100644 --- a/homeassistant/components/shelly/manifest.json +++ b/homeassistant/components/shelly/manifest.json @@ -9,7 +9,7 @@ "iot_class": "local_push", "loggers": ["aioshelly"], "quality_scale": "platinum", - "requirements": ["aioshelly==11.4.2"], + "requirements": ["aioshelly==12.0.1"], "zeroconf": [ { "type": "_http._tcp.local.", diff --git a/homeassistant/components/shelly/strings.json b/homeassistant/components/shelly/strings.json index f76319eb08c5be..342a7418b2a265 100644 --- a/homeassistant/components/shelly/strings.json +++ b/homeassistant/components/shelly/strings.json @@ -28,7 +28,7 @@ "confirm_discovery": { "description": "Do you want to set up the {model} at {host}?\n\nBattery-powered devices that are password protected must be woken up before continuing with setting up.\nBattery-powered devices that are not password protected will be added when the device wakes up, you can now manually wake the device up using a button on it or wait for the next data update from the device." }, - "reconfigure_confirm": { + "reconfigure": { "description": "Update configuration for {device_name}.\n\nBefore setup, battery-powered devices must be woken up, you can now wake the device up using a button on it.", "data": { "host": "[%key:common::config_flow::data::host%]", diff --git a/homeassistant/components/shelly/switch.py b/homeassistant/components/shelly/switch.py index 5ec223f53adcf2..134704cb0ffce2 100644 --- a/homeassistant/components/shelly/switch.py +++ b/homeassistant/components/shelly/switch.py @@ -66,6 +66,13 @@ class RpcSwitchDescription(RpcEntityDescription, SwitchEntityDescription): sub_key="value", ) +RPC_SCRIPT_SWITCH = RpcSwitchDescription( + key="script", + sub_key="running", + entity_registry_enabled_default=False, + entity_category=EntityCategory.CONFIG, +) + async def async_setup_entry( hass: HomeAssistant, @@ -176,6 +183,14 @@ def async_setup_rpc_entry( RpcVirtualSwitch, ) + async_setup_rpc_attribute_entities( + hass, + config_entry, + async_add_entities, + {"script": RPC_SCRIPT_SWITCH}, + RpcScriptSwitch, + ) + # the user can remove virtual components from the device configuration, so we need # to remove orphaned entities virtual_switch_ids = get_virtual_component_ids( @@ -190,6 +205,17 @@ def async_setup_rpc_entry( "boolean", ) + # if the script is removed, from the device configuration, we need + # to remove orphaned entities + async_remove_orphaned_entities( + hass, + config_entry.entry_id, + coordinator.mac, + SWITCH_PLATFORM, + coordinator.device.status, + "script", + ) + if not switch_ids: return @@ -317,3 +343,23 @@ async def async_turn_on(self, **kwargs: Any) -> None: async def async_turn_off(self, **kwargs: Any) -> None: """Turn off relay.""" await self.call_rpc("Boolean.Set", {"id": self._id, "value": False}) + + +class RpcScriptSwitch(ShellyRpcAttributeEntity, SwitchEntity): + """Entity that controls a script component on RPC based Shelly devices.""" + + entity_description: RpcSwitchDescription + _attr_has_entity_name = True + + @property + def is_on(self) -> bool: + """If switch is on.""" + return bool(self.status["running"]) + + async def async_turn_on(self, **kwargs: Any) -> None: + """Turn on relay.""" + await self.call_rpc("Script.Start", {"id": self._id}) + + async def async_turn_off(self, **kwargs: Any) -> None: + """Turn off relay.""" + await self.call_rpc("Script.Stop", {"id": self._id}) diff --git a/homeassistant/components/shelly/update.py b/homeassistant/components/shelly/update.py index fb586ae8b85441..f22547acf50ccc 100644 --- a/homeassistant/components/shelly/update.py +++ b/homeassistant/components/shelly/update.py @@ -238,7 +238,8 @@ def __init__( ) -> None: """Initialize update entity.""" super().__init__(coordinator, key, attribute, description) - self._ota_in_progress: bool | int = False + self._ota_in_progress = False + self._ota_progress_percentage: int | None = None self._attr_release_url = get_release_url( coordinator.device.gen, coordinator.model, description.beta ) @@ -256,11 +257,12 @@ def _ota_progress_callback(self, event: dict[str, Any]) -> None: if self.in_progress is not False: event_type = event["event"] if event_type == OTA_BEGIN: - self._ota_in_progress = 0 + self._ota_progress_percentage = 0 elif event_type == OTA_PROGRESS: - self._ota_in_progress = event["progress_percent"] + self._ota_progress_percentage = event["progress_percent"] elif event_type in (OTA_ERROR, OTA_SUCCESS): self._ota_in_progress = False + self._ota_progress_percentage = None self.async_write_ha_state() @property @@ -278,10 +280,15 @@ def latest_version(self) -> str | None: return self.installed_version @property - def in_progress(self) -> bool | int: + def in_progress(self) -> bool: """Update installation in progress.""" return self._ota_in_progress + @property + def update_percentage(self) -> int | None: + """Update installation progress.""" + return self._ota_progress_percentage + async def async_install( self, version: str | None, backup: bool, **kwargs: Any ) -> None: @@ -310,6 +317,7 @@ async def async_install( await self.coordinator.async_shutdown_device_and_start_reauth() else: self._ota_in_progress = True + self._ota_progress_percentage = None LOGGER.debug("OTA update call for %s successful", self.coordinator.name) diff --git a/homeassistant/components/shopping_list/intent.py b/homeassistant/components/shopping_list/intent.py index 84ea3971293688..1a6370f4168852 100644 --- a/homeassistant/components/shopping_list/intent.py +++ b/homeassistant/components/shopping_list/intent.py @@ -29,7 +29,7 @@ class AddItemIntent(intent.IntentHandler): async def async_handle(self, intent_obj: intent.Intent) -> intent.IntentResponse: """Handle the intent.""" slots = self.async_validate_slots(intent_obj.slots) - item = slots["item"]["value"] + item = slots["item"]["value"].strip() await intent_obj.hass.data[DOMAIN].async_add(item) response = intent_obj.create_response() diff --git a/homeassistant/components/sia/alarm_control_panel.py b/homeassistant/components/sia/alarm_control_panel.py index 2b2a32ca67d68e..7ea878f538d5ee 100644 --- a/homeassistant/components/sia/alarm_control_panel.py +++ b/homeassistant/components/sia/alarm_control_panel.py @@ -4,25 +4,19 @@ from dataclasses import dataclass import logging +from typing import TYPE_CHECKING from pysiaalarm import SIAEvent from homeassistant.components.alarm_control_panel import ( AlarmControlPanelEntity, AlarmControlPanelEntityDescription, + AlarmControlPanelState, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ( - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_CUSTOM_BYPASS, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_DISARMED, - STATE_ALARM_TRIGGERED, - STATE_UNAVAILABLE, -) +from homeassistant.const import STATE_UNAVAILABLE, STATE_UNKNOWN from homeassistant.core import HomeAssistant, State from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.typing import StateType from .const import CONF_ACCOUNT, CONF_ACCOUNTS, CONF_ZONES, KEY_ALARM, PREVIOUS_STATE from .entity import SIABaseEntity, SIAEntityDescription @@ -41,31 +35,32 @@ class SIAAlarmControlPanelEntityDescription( ENTITY_DESCRIPTION_ALARM = SIAAlarmControlPanelEntityDescription( key=KEY_ALARM, code_consequences={ - "PA": STATE_ALARM_TRIGGERED, - "JA": STATE_ALARM_TRIGGERED, - "TA": STATE_ALARM_TRIGGERED, - "BA": STATE_ALARM_TRIGGERED, - "CA": STATE_ALARM_ARMED_AWAY, - "CB": STATE_ALARM_ARMED_AWAY, - "CG": STATE_ALARM_ARMED_AWAY, - "CL": STATE_ALARM_ARMED_AWAY, - "CP": STATE_ALARM_ARMED_AWAY, - "CQ": STATE_ALARM_ARMED_AWAY, - "CS": STATE_ALARM_ARMED_AWAY, - "CF": STATE_ALARM_ARMED_CUSTOM_BYPASS, - "NP": STATE_ALARM_DISARMED, - "NO": STATE_ALARM_DISARMED, - "OA": STATE_ALARM_DISARMED, - "OB": STATE_ALARM_DISARMED, - "OG": STATE_ALARM_DISARMED, - "OP": STATE_ALARM_DISARMED, - "OQ": STATE_ALARM_DISARMED, - "OR": STATE_ALARM_DISARMED, - "OS": STATE_ALARM_DISARMED, - "NC": STATE_ALARM_ARMED_NIGHT, - "NL": STATE_ALARM_ARMED_NIGHT, - "NE": STATE_ALARM_ARMED_NIGHT, - "NF": STATE_ALARM_ARMED_NIGHT, + "PA": AlarmControlPanelState.TRIGGERED, + "JA": AlarmControlPanelState.TRIGGERED, + "TA": AlarmControlPanelState.TRIGGERED, + "BA": AlarmControlPanelState.TRIGGERED, + "HA": AlarmControlPanelState.TRIGGERED, + "CA": AlarmControlPanelState.ARMED_AWAY, + "CB": AlarmControlPanelState.ARMED_AWAY, + "CG": AlarmControlPanelState.ARMED_AWAY, + "CL": AlarmControlPanelState.ARMED_AWAY, + "CP": AlarmControlPanelState.ARMED_AWAY, + "CQ": AlarmControlPanelState.ARMED_AWAY, + "CS": AlarmControlPanelState.ARMED_AWAY, + "CF": AlarmControlPanelState.ARMED_CUSTOM_BYPASS, + "NP": AlarmControlPanelState.DISARMED, + "NO": AlarmControlPanelState.DISARMED, + "OA": AlarmControlPanelState.DISARMED, + "OB": AlarmControlPanelState.DISARMED, + "OG": AlarmControlPanelState.DISARMED, + "OP": AlarmControlPanelState.DISARMED, + "OQ": AlarmControlPanelState.DISARMED, + "OR": AlarmControlPanelState.DISARMED, + "OS": AlarmControlPanelState.DISARMED, + "NC": AlarmControlPanelState.ARMED_NIGHT, + "NL": AlarmControlPanelState.ARMED_NIGHT, + "NE": AlarmControlPanelState.ARMED_NIGHT, + "NF": AlarmControlPanelState.ARMED_NIGHT, "BR": PREVIOUS_STATE, }, ) @@ -109,13 +104,17 @@ def __init__( entity_description, ) - self._attr_state: StateType = None - self._old_state: StateType = None + self._attr_alarm_state: AlarmControlPanelState | None = None + self._old_state: AlarmControlPanelState | None = None def handle_last_state(self, last_state: State | None) -> None: """Handle the last state.""" - if last_state is not None: - self._attr_state = last_state.state + self._attr_alarm_state = None + if last_state is not None and last_state.state not in ( + STATE_UNAVAILABLE, + STATE_UNKNOWN, + ): + self._attr_alarm_state = AlarmControlPanelState(last_state.state) if self.state == STATE_UNAVAILABLE: self._attr_available = False @@ -132,5 +131,7 @@ def update_state(self, sia_event: SIAEvent) -> bool: _LOGGER.debug("New state will be %s", new_state) if new_state == PREVIOUS_STATE: new_state = self._old_state - self._attr_state, self._old_state = new_state, self._attr_state + if TYPE_CHECKING: + assert isinstance(new_state, AlarmControlPanelState) + self._attr_alarm_state, self._old_state = new_state, self._attr_alarm_state return True diff --git a/homeassistant/components/sia/config_flow.py b/homeassistant/components/sia/config_flow.py index cb451133d418f3..a23978145e72d1 100644 --- a/homeassistant/components/sia/config_flow.py +++ b/homeassistant/components/sia/config_flow.py @@ -181,7 +181,6 @@ class SIAOptionsFlowHandler(OptionsFlow): def __init__(self, config_entry: ConfigEntry) -> None: """Initialize SIA options flow.""" - self.config_entry = config_entry self.options = deepcopy(dict(config_entry.options)) self.hub: SIAHub | None = None self.accounts_todo: list = [] diff --git a/homeassistant/components/sia/entity.py b/homeassistant/components/sia/entity.py index aecac2b540b363..48af8e0beb42ed 100644 --- a/homeassistant/components/sia/entity.py +++ b/homeassistant/components/sia/entity.py @@ -8,6 +8,7 @@ from pysiaalarm import SIAEvent +from homeassistant.components.alarm_control_panel import AlarmControlPanelState from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_PORT from homeassistant.core import CALLBACK_TYPE, State, callback @@ -40,7 +41,7 @@ class SIARequiredKeysMixin: """Required keys for SIA entities.""" - code_consequences: dict[str, StateType | bool] + code_consequences: dict[str, StateType | bool | AlarmControlPanelState] @dataclass(frozen=True) diff --git a/homeassistant/components/sighthound/manifest.json b/homeassistant/components/sighthound/manifest.json index 875c98acb6dbb8..7d08367cf7d198 100644 --- a/homeassistant/components/sighthound/manifest.json +++ b/homeassistant/components/sighthound/manifest.json @@ -5,5 +5,5 @@ "documentation": "https://www.home-assistant.io/integrations/sighthound", "iot_class": "cloud_polling", "loggers": ["simplehound"], - "requirements": ["Pillow==10.4.0", "simplehound==0.3"] + "requirements": ["Pillow==11.0.0", "simplehound==0.3"] } diff --git a/homeassistant/components/simplisafe/alarm_control_panel.py b/homeassistant/components/simplisafe/alarm_control_panel.py index 478e5784e1926c..18f2d8ddcd5b8a 100644 --- a/homeassistant/components/simplisafe/alarm_control_panel.py +++ b/homeassistant/components/simplisafe/alarm_control_panel.py @@ -26,16 +26,9 @@ from homeassistant.components.alarm_control_panel import ( AlarmControlPanelEntity, AlarmControlPanelEntityFeature, + AlarmControlPanelState, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ( - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMING, - STATE_ALARM_DISARMED, - STATE_ALARM_PENDING, - STATE_ALARM_TRIGGERED, -) from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -65,33 +58,33 @@ ATTR_WIFI_STRENGTH = "wifi_strength" STATE_MAP_FROM_REST_API = { - SystemStates.ALARM: STATE_ALARM_TRIGGERED, - SystemStates.ALARM_COUNT: STATE_ALARM_PENDING, - SystemStates.AWAY: STATE_ALARM_ARMED_AWAY, - SystemStates.AWAY_COUNT: STATE_ALARM_ARMING, - SystemStates.ENTRY_DELAY: STATE_ALARM_PENDING, - SystemStates.EXIT_DELAY: STATE_ALARM_ARMING, - SystemStates.HOME: STATE_ALARM_ARMED_HOME, - SystemStates.HOME_COUNT: STATE_ALARM_ARMING, - SystemStates.OFF: STATE_ALARM_DISARMED, - SystemStates.TEST: STATE_ALARM_DISARMED, + SystemStates.ALARM: AlarmControlPanelState.TRIGGERED, + SystemStates.ALARM_COUNT: AlarmControlPanelState.PENDING, + SystemStates.AWAY: AlarmControlPanelState.ARMED_AWAY, + SystemStates.AWAY_COUNT: AlarmControlPanelState.ARMING, + SystemStates.ENTRY_DELAY: AlarmControlPanelState.PENDING, + SystemStates.EXIT_DELAY: AlarmControlPanelState.ARMING, + SystemStates.HOME: AlarmControlPanelState.ARMED_HOME, + SystemStates.HOME_COUNT: AlarmControlPanelState.ARMING, + SystemStates.OFF: AlarmControlPanelState.DISARMED, + SystemStates.TEST: AlarmControlPanelState.DISARMED, } STATE_MAP_FROM_WEBSOCKET_EVENT = { - EVENT_ALARM_CANCELED: STATE_ALARM_DISARMED, - EVENT_ALARM_TRIGGERED: STATE_ALARM_TRIGGERED, - EVENT_ARMED_AWAY: STATE_ALARM_ARMED_AWAY, - EVENT_ARMED_AWAY_BY_KEYPAD: STATE_ALARM_ARMED_AWAY, - EVENT_ARMED_AWAY_BY_REMOTE: STATE_ALARM_ARMED_AWAY, - EVENT_ARMED_HOME: STATE_ALARM_ARMED_HOME, - EVENT_AWAY_EXIT_DELAY_BY_KEYPAD: STATE_ALARM_ARMING, - EVENT_AWAY_EXIT_DELAY_BY_REMOTE: STATE_ALARM_ARMING, - EVENT_DISARMED_BY_KEYPAD: STATE_ALARM_DISARMED, - EVENT_DISARMED_BY_REMOTE: STATE_ALARM_DISARMED, - EVENT_ENTRY_DELAY: STATE_ALARM_PENDING, - EVENT_HOME_EXIT_DELAY: STATE_ALARM_ARMING, - EVENT_SECRET_ALERT_TRIGGERED: STATE_ALARM_TRIGGERED, - EVENT_USER_INITIATED_TEST: STATE_ALARM_DISARMED, + EVENT_ALARM_CANCELED: AlarmControlPanelState.DISARMED, + EVENT_ALARM_TRIGGERED: AlarmControlPanelState.TRIGGERED, + EVENT_ARMED_AWAY: AlarmControlPanelState.ARMED_AWAY, + EVENT_ARMED_AWAY_BY_KEYPAD: AlarmControlPanelState.ARMED_AWAY, + EVENT_ARMED_AWAY_BY_REMOTE: AlarmControlPanelState.ARMED_AWAY, + EVENT_ARMED_HOME: AlarmControlPanelState.ARMED_HOME, + EVENT_AWAY_EXIT_DELAY_BY_KEYPAD: AlarmControlPanelState.ARMING, + EVENT_AWAY_EXIT_DELAY_BY_REMOTE: AlarmControlPanelState.ARMING, + EVENT_DISARMED_BY_KEYPAD: AlarmControlPanelState.DISARMED, + EVENT_DISARMED_BY_REMOTE: AlarmControlPanelState.DISARMED, + EVENT_ENTRY_DELAY: AlarmControlPanelState.PENDING, + EVENT_HOME_EXIT_DELAY: AlarmControlPanelState.ARMING, + EVENT_SECRET_ALERT_TRIGGERED: AlarmControlPanelState.TRIGGERED, + EVENT_USER_INITIATED_TEST: AlarmControlPanelState.DISARMED, } WEBSOCKET_EVENTS_TO_LISTEN_FOR = ( @@ -145,9 +138,9 @@ def __init__(self, simplisafe: SimpliSafe, system: SystemType) -> None: def _set_state_from_system_data(self) -> None: """Set the state based on the latest REST API data.""" if self._system.alarm_going_off: - self._attr_state = STATE_ALARM_TRIGGERED + self._attr_alarm_state = AlarmControlPanelState.TRIGGERED elif state := STATE_MAP_FROM_REST_API.get(self._system.state): - self._attr_state = state + self._attr_alarm_state = state self.async_reset_error_count() else: LOGGER.warning("Unexpected system state (REST API): %s", self._system.state) @@ -162,7 +155,7 @@ async def async_alarm_disarm(self, code: str | None = None) -> None: f'Error while disarming "{self._system.system_id}": {err}' ) from err - self._attr_state = STATE_ALARM_DISARMED + self._attr_alarm_state = AlarmControlPanelState.DISARMED self.async_write_ha_state() async def async_alarm_arm_home(self, code: str | None = None) -> None: @@ -174,7 +167,7 @@ async def async_alarm_arm_home(self, code: str | None = None) -> None: f'Error while arming (home) "{self._system.system_id}": {err}' ) from err - self._attr_state = STATE_ALARM_ARMED_HOME + self._attr_alarm_state = AlarmControlPanelState.ARMED_HOME self.async_write_ha_state() async def async_alarm_arm_away(self, code: str | None = None) -> None: @@ -186,7 +179,7 @@ async def async_alarm_arm_away(self, code: str | None = None) -> None: f'Error while arming (away) "{self._system.system_id}": {err}' ) from err - self._attr_state = STATE_ALARM_ARMING + self._attr_alarm_state = AlarmControlPanelState.ARMING self.async_write_ha_state() @callback @@ -230,7 +223,7 @@ def async_update_from_websocket_event(self, event: WebsocketEvent) -> None: assert event.event_type if state := STATE_MAP_FROM_WEBSOCKET_EVENT.get(event.event_type): - self._attr_state = state + self._attr_alarm_state = state self.async_reset_error_count() else: LOGGER.error("Unknown alarm websocket event: %s", event.event_type) diff --git a/homeassistant/components/simplisafe/config_flow.py b/homeassistant/components/simplisafe/config_flow.py index 6fdbd351a299e3..68974fe118fee0 100644 --- a/homeassistant/components/simplisafe/config_flow.py +++ b/homeassistant/components/simplisafe/config_flow.py @@ -67,7 +67,7 @@ def async_get_options_flow( config_entry: ConfigEntry, ) -> SimpliSafeOptionsFlowHandler: """Define the config flow to handle options.""" - return SimpliSafeOptionsFlowHandler(config_entry) + return SimpliSafeOptionsFlowHandler() async def async_step_reauth( self, entry_data: Mapping[str, Any] @@ -153,10 +153,6 @@ async def async_step_user( class SimpliSafeOptionsFlowHandler(OptionsFlow): """Handle a SimpliSafe options flow.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/skybell/config_flow.py b/homeassistant/components/skybell/config_flow.py index 385f3dc39d7f03..a32441f4cf8891 100644 --- a/homeassistant/components/skybell/config_flow.py +++ b/homeassistant/components/skybell/config_flow.py @@ -34,16 +34,11 @@ async def async_step_reauth_confirm( errors = {} if user_input: password = user_input[CONF_PASSWORD] - entry_id = self.context["entry_id"] - if entry := self.hass.config_entries.async_get_entry(entry_id): - _, error = await self._async_validate_input(self.reauth_email, password) - if error is None: - self.hass.config_entries.async_update_entry( - entry, - data=entry.data | user_input, - ) - await self.hass.config_entries.async_reload(entry.entry_id) - return self.async_abort(reason="reauth_successful") + _, error = await self._async_validate_input(self.reauth_email, password) + if error is None: + return self.async_update_reload_and_abort( + self._get_reauth_entry(), data_updates=user_input + ) errors["base"] = error return self.async_show_form( diff --git a/homeassistant/components/sleepiq/config_flow.py b/homeassistant/components/sleepiq/config_flow.py index 26f3672d58868b..0a473404eb92e1 100644 --- a/homeassistant/components/sleepiq/config_flow.py +++ b/homeassistant/components/sleepiq/config_flow.py @@ -9,7 +9,7 @@ from asyncsleepiq import AsyncSleepIQ, SleepIQLoginException, SleepIQTimeoutException import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.helpers.aiohttp_client import async_get_clientsession @@ -24,10 +24,6 @@ class SleepIQFlowHandler(ConfigFlow, domain=DOMAIN): VERSION = 1 - def __init__(self) -> None: - """Initialize the config flow.""" - self._reauth_entry: ConfigEntry | None = None - async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Import a SleepIQ account as a config entry. @@ -84,9 +80,6 @@ async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Perform reauth upon an API authentication error.""" - self._reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -94,19 +87,16 @@ async def async_step_reauth_confirm( ) -> ConfigFlowResult: """Confirm reauth.""" errors: dict[str, str] = {} - assert self._reauth_entry is not None + + reauth_entry = self._get_reauth_entry() if user_input is not None: data = { - CONF_USERNAME: self._reauth_entry.data[CONF_USERNAME], + CONF_USERNAME: reauth_entry.data[CONF_USERNAME], CONF_PASSWORD: user_input[CONF_PASSWORD], } if not (error := await try_connection(self.hass, data)): - self.hass.config_entries.async_update_entry( - self._reauth_entry, data=data - ) - await self.hass.config_entries.async_reload(self._reauth_entry.entry_id) - return self.async_abort(reason="reauth_successful") + return self.async_update_reload_and_abort(reauth_entry, data=data) errors["base"] = error return self.async_show_form( @@ -114,7 +104,7 @@ async def async_step_reauth_confirm( data_schema=vol.Schema({vol.Required(CONF_PASSWORD): str}), errors=errors, description_placeholders={ - CONF_USERNAME: self._reauth_entry.data[CONF_USERNAME], + CONF_USERNAME: reauth_entry.data[CONF_USERNAME], }, ) diff --git a/homeassistant/components/slide/cover.py b/homeassistant/components/slide/cover.py index 5186b3d0feac59..d4927775a9714f 100644 --- a/homeassistant/components/slide/cover.py +++ b/homeassistant/components/slide/cover.py @@ -6,7 +6,7 @@ from typing import Any from homeassistant.components.cover import ATTR_POSITION, CoverDeviceClass, CoverEntity -from homeassistant.const import ATTR_ID, STATE_CLOSED, STATE_CLOSING, STATE_OPENING +from homeassistant.const import ATTR_ID from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType @@ -15,6 +15,10 @@ _LOGGER = logging.getLogger(__name__) +CLOSED = "closed" +CLOSING = "closing" +OPENING = "opening" + async def async_setup_platform( hass: HomeAssistant, @@ -55,19 +59,19 @@ def __init__(self, api, slide): @property def is_opening(self) -> bool: """Return if the cover is opening or not.""" - return self._slide["state"] == STATE_OPENING + return self._slide["state"] == OPENING @property def is_closing(self) -> bool: """Return if the cover is closing or not.""" - return self._slide["state"] == STATE_CLOSING + return self._slide["state"] == CLOSING @property def is_closed(self) -> bool | None: """Return None if status is unknown, True if closed, else False.""" if self._slide["state"] is None: return None - return self._slide["state"] == STATE_CLOSED + return self._slide["state"] == CLOSED @property def available(self) -> bool: @@ -87,12 +91,12 @@ def current_cover_position(self) -> int | None: async def async_open_cover(self, **kwargs: Any) -> None: """Open the cover.""" - self._slide["state"] = STATE_OPENING + self._slide["state"] = OPENING await self._api.slide_open(self._id) async def async_close_cover(self, **kwargs: Any) -> None: """Close the cover.""" - self._slide["state"] = STATE_CLOSING + self._slide["state"] = CLOSING await self._api.slide_close(self._id) async def async_stop_cover(self, **kwargs: Any) -> None: @@ -107,8 +111,8 @@ async def async_set_cover_position(self, **kwargs: Any) -> None: if self._slide["pos"] is not None: if position > self._slide["pos"]: - self._slide["state"] = STATE_CLOSING + self._slide["state"] = CLOSING else: - self._slide["state"] = STATE_OPENING + self._slide["state"] = OPENING await self._api.slide_set_position(self._id, position) diff --git a/homeassistant/components/slide/manifest.json b/homeassistant/components/slide/manifest.json index bb25e10658a119..111bc9bd7a93d8 100644 --- a/homeassistant/components/slide/manifest.json +++ b/homeassistant/components/slide/manifest.json @@ -5,5 +5,5 @@ "documentation": "https://www.home-assistant.io/integrations/slide", "iot_class": "cloud_polling", "loggers": ["goslideapi"], - "requirements": ["goslide-api==0.5.1"] + "requirements": ["goslide-api==0.7.0"] } diff --git a/homeassistant/components/sma/__init__.py b/homeassistant/components/sma/__init__.py index d8a7929ae7927c..37fb4d72284aa6 100644 --- a/homeassistant/components/sma/__init__.py +++ b/homeassistant/components/sma/__init__.py @@ -92,6 +92,7 @@ async def async_update_data(): coordinator = DataUpdateCoordinator( hass, _LOGGER, + config_entry=entry, name="sma", update_method=async_update_data, update_interval=interval, diff --git a/homeassistant/components/smappee/strings.json b/homeassistant/components/smappee/strings.json index 2bdbf0dabe81da..2966b5cd753f29 100644 --- a/homeassistant/components/smappee/strings.json +++ b/homeassistant/components/smappee/strings.json @@ -23,6 +23,7 @@ } }, "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", "already_configured_device": "[%key:common::config_flow::abort::already_configured_device%]", "already_configured_local_device": "Local device(s) is already configured. Please remove those first before configuring a cloud device.", "authorize_url_timeout": "[%key:common::config_flow::abort::oauth2_authorize_url_timeout%]", diff --git a/homeassistant/components/smart_meter_texas/__init__.py b/homeassistant/components/smart_meter_texas/__init__.py index c6e466392f07d6..1cd7df68e91342 100644 --- a/homeassistant/components/smart_meter_texas/__init__.py +++ b/homeassistant/components/smart_meter_texas/__init__.py @@ -64,6 +64,7 @@ async def async_update_data(): coordinator = DataUpdateCoordinator( hass, _LOGGER, + config_entry=entry, name="Smart Meter Texas", update_method=async_update_data, update_interval=SCAN_INTERVAL, diff --git a/homeassistant/components/smartthings/cover.py b/homeassistant/components/smartthings/cover.py index d0e2fc3f039c40..55e86bd582efeb 100644 --- a/homeassistant/components/smartthings/cover.py +++ b/homeassistant/components/smartthings/cover.py @@ -10,13 +10,10 @@ from homeassistant.components.cover import ( ATTR_POSITION, DOMAIN as COVER_DOMAIN, - STATE_CLOSED, - STATE_CLOSING, - STATE_OPEN, - STATE_OPENING, CoverDeviceClass, CoverEntity, CoverEntityFeature, + CoverState, ) from homeassistant.config_entries import ConfigEntry from homeassistant.const import ATTR_BATTERY_LEVEL @@ -27,11 +24,11 @@ from .entity import SmartThingsEntity VALUE_TO_STATE = { - "closed": STATE_CLOSED, - "closing": STATE_CLOSING, - "open": STATE_OPEN, - "opening": STATE_OPENING, - "partially open": STATE_OPEN, + "closed": CoverState.CLOSED, + "closing": CoverState.CLOSING, + "open": CoverState.OPEN, + "opening": CoverState.OPENING, + "partially open": CoverState.OPEN, "unknown": None, } @@ -147,16 +144,16 @@ async def async_update(self) -> None: @property def is_opening(self) -> bool: """Return if the cover is opening or not.""" - return self._state == STATE_OPENING + return self._state == CoverState.OPENING @property def is_closing(self) -> bool: """Return if the cover is closing or not.""" - return self._state == STATE_CLOSING + return self._state == CoverState.CLOSING @property def is_closed(self) -> bool | None: """Return if the cover is closed or not.""" - if self._state == STATE_CLOSED: + if self._state == CoverState.CLOSED: return True return None if self._state is None else False diff --git a/homeassistant/components/smarttub/config_flow.py b/homeassistant/components/smarttub/config_flow.py index 5caff953d6d0b5..cf96d7082a1a4d 100644 --- a/homeassistant/components/smarttub/config_flow.py +++ b/homeassistant/components/smarttub/config_flow.py @@ -3,12 +3,12 @@ from __future__ import annotations from collections.abc import Mapping -from typing import TYPE_CHECKING, Any +from typing import Any from smarttub import LoginFailed import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_EMAIL, CONF_PASSWORD from .const import DOMAIN @@ -24,12 +24,6 @@ class SmartTubConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - def __init__(self) -> None: - """Instantiate config flow.""" - super().__init__() - self._reauth_input: Mapping[str, Any] | None = None - self._reauth_entry: ConfigEntry | None = None - async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -48,24 +42,17 @@ async def async_step_user( else: await self.async_set_unique_id(account.id) - if self._reauth_input is None: + if self.source != SOURCE_REAUTH: self._abort_if_unique_id_configured() return self.async_create_entry( title=user_input[CONF_EMAIL], data=user_input ) # this is a reauth attempt - if TYPE_CHECKING: - assert self._reauth_entry - if self._reauth_entry.unique_id != self.unique_id: - # there is a config entry matching this account, - # but it is not the one we were trying to reauth - return self.async_abort(reason="already_configured") - self.hass.config_entries.async_update_entry( - self._reauth_entry, data=user_input + self._abort_if_unique_id_mismatch(reason="already_configured") + return self.async_update_reload_and_abort( + self._get_reauth_entry(), data=user_input ) - await self.hass.config_entries.async_reload(self._reauth_entry.entry_id) - return self.async_abort(reason="reauth_successful") return self.async_show_form( step_id="user", data_schema=DATA_SCHEMA, errors=errors @@ -75,10 +62,6 @@ async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Get new credentials if the current ones don't work anymore.""" - self._reauth_input = entry_data - self._reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -86,13 +69,12 @@ async def async_step_reauth_confirm( ) -> ConfigFlowResult: """Dialog that informs the user that reauth is required.""" if user_input is None: - if TYPE_CHECKING: - assert self._reauth_input is not None # same as DATA_SCHEMA but with default email data_schema = vol.Schema( { vol.Required( - CONF_EMAIL, default=self._reauth_input.get(CONF_EMAIL) + CONF_EMAIL, + default=self._get_reauth_entry().data.get(CONF_EMAIL), ): str, vol.Required(CONF_PASSWORD): str, } diff --git a/homeassistant/components/smarty/__init__.py b/homeassistant/components/smarty/__init__.py index 17c4bd0a26a850..0d043804c3d174 100644 --- a/homeassistant/components/smarty/__init__.py +++ b/homeassistant/components/smarty/__init__.py @@ -1,23 +1,20 @@ """Support to control a Salda Smarty XP/XV ventilation unit.""" -from datetime import timedelta import ipaddress import logging -from pysmarty2 import Smarty import voluptuous as vol +from homeassistant.config_entries import SOURCE_IMPORT from homeassistant.const import CONF_HOST, CONF_NAME, Platform -from homeassistant.core import HomeAssistant -from homeassistant.helpers import discovery +from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant +from homeassistant.data_entry_flow import FlowResultType +from homeassistant.helpers import issue_registry as ir import homeassistant.helpers.config_validation as cv -from homeassistant.helpers.dispatcher import dispatcher_send -from homeassistant.helpers.event import track_time_interval from homeassistant.helpers.typing import ConfigType -DOMAIN = "smarty" -DATA_SMARTY = "smarty" -SMARTY_NAME = "Smarty" +from .const import DOMAIN +from .coordinator import SmartyConfigEntry, SmartyCoordinator _LOGGER = logging.getLogger(__name__) @@ -26,48 +23,84 @@ DOMAIN: vol.Schema( { vol.Required(CONF_HOST): vol.All(ipaddress.ip_address, cv.string), - vol.Optional(CONF_NAME, default=SMARTY_NAME): cv.string, + vol.Optional(CONF_NAME, default="Smarty"): cv.string, } ) }, extra=vol.ALLOW_EXTRA, ) -RPM = "rpm" -SIGNAL_UPDATE_SMARTY = "smarty_update" +PLATFORMS = [ + Platform.BINARY_SENSOR, + Platform.BUTTON, + Platform.FAN, + Platform.SENSOR, + Platform.SWITCH, +] -def setup(hass: HomeAssistant, config: ConfigType) -> bool: - """Set up the smarty environment.""" - - conf = config[DOMAIN] - - host = conf[CONF_HOST] - name = conf[CONF_NAME] - - _LOGGER.debug("Name: %s, host: %s", name, host) - - smarty = Smarty(host=host) +async def async_setup(hass: HomeAssistant, hass_config: ConfigType) -> bool: + """Create a smarty system.""" + if config := hass_config.get(DOMAIN): + hass.async_create_task(_async_import(hass, config)) + return True - hass.data[DOMAIN] = {"api": smarty, "name": name} - # Initial update - smarty.update() +async def _async_import(hass: HomeAssistant, config: ConfigType) -> None: + """Set up the smarty environment.""" - # Load platforms - discovery.load_platform(hass, Platform.FAN, DOMAIN, {}, config) - discovery.load_platform(hass, Platform.SENSOR, DOMAIN, {}, config) - discovery.load_platform(hass, Platform.BINARY_SENSOR, DOMAIN, {}, config) + if not hass.config_entries.async_entries(DOMAIN): + # Start import flow + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_IMPORT}, data=config + ) + if result["type"] == FlowResultType.ABORT: + ir.async_create_issue( + hass, + DOMAIN, + f"deprecated_yaml_import_issue_{result['reason']}", + breaks_in_ha_version="2025.5.0", + is_fixable=False, + issue_domain=DOMAIN, + severity=ir.IssueSeverity.WARNING, + translation_key=f"deprecated_yaml_import_issue_{result['reason']}", + translation_placeholders={ + "domain": DOMAIN, + "integration_title": "Smarty", + }, + ) + return + + ir.async_create_issue( + hass, + HOMEASSISTANT_DOMAIN, + f"deprecated_yaml_{DOMAIN}", + breaks_in_ha_version="2025.5.0", + is_fixable=False, + issue_domain=DOMAIN, + severity=ir.IssueSeverity.WARNING, + translation_key="deprecated_yaml", + translation_placeholders={ + "domain": DOMAIN, + "integration_title": "Smarty", + }, + ) + + +async def async_setup_entry(hass: HomeAssistant, entry: SmartyConfigEntry) -> bool: + """Set up the Smarty environment from a config entry.""" + + coordinator = SmartyCoordinator(hass) + + await coordinator.async_config_entry_first_refresh() + + entry.runtime_data = coordinator + + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) - def poll_device_update(event_time): - """Update Smarty device.""" - _LOGGER.debug("Updating Smarty device") - if smarty.update(): - _LOGGER.debug("Update success") - dispatcher_send(hass, SIGNAL_UPDATE_SMARTY) - else: - _LOGGER.debug("Update failed") + return True - track_time_interval(hass, poll_device_update, timedelta(seconds=30)) - return True +async def async_unload_entry(hass: HomeAssistant, entry: SmartyConfigEntry) -> bool: + """Unload a config entry.""" + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/smarty/binary_sensor.py b/homeassistant/components/smarty/binary_sensor.py index b31c51244b8b08..213cb00d47cc30 100644 --- a/homeassistant/components/smarty/binary_sensor.py +++ b/homeassistant/components/smarty/binary_sensor.py @@ -2,6 +2,8 @@ from __future__ import annotations +from collections.abc import Callable +from dataclasses import dataclass import logging from pysmarty2 import Smarty @@ -9,104 +11,77 @@ from homeassistant.components.binary_sensor import ( BinarySensorDeviceClass, BinarySensorEntity, + BinarySensorEntityDescription, ) -from homeassistant.core import HomeAssistant, callback -from homeassistant.helpers.dispatcher import async_dispatcher_connect +from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType -from . import DOMAIN, SIGNAL_UPDATE_SMARTY +from .coordinator import SmartyConfigEntry, SmartyCoordinator +from .entity import SmartyEntity _LOGGER = logging.getLogger(__name__) -async def async_setup_platform( +@dataclass(frozen=True, kw_only=True) +class SmartyBinarySensorEntityDescription(BinarySensorEntityDescription): + """Class describing Smarty binary sensor entities.""" + + value_fn: Callable[[Smarty], bool] + + +ENTITIES: tuple[SmartyBinarySensorEntityDescription, ...] = ( + SmartyBinarySensorEntityDescription( + key="alarm", + translation_key="alarm", + device_class=BinarySensorDeviceClass.PROBLEM, + value_fn=lambda smarty: smarty.alarm, + ), + SmartyBinarySensorEntityDescription( + key="warning", + translation_key="warning", + device_class=BinarySensorDeviceClass.PROBLEM, + value_fn=lambda smarty: smarty.warning, + ), + SmartyBinarySensorEntityDescription( + key="boost", + translation_key="boost_state", + value_fn=lambda smarty: smarty.boost, + ), +) + + +async def async_setup_entry( hass: HomeAssistant, - config: ConfigType, + entry: SmartyConfigEntry, async_add_entities: AddEntitiesCallback, - discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up the Smarty Binary Sensor Platform.""" - smarty: Smarty = hass.data[DOMAIN]["api"] - name: str = hass.data[DOMAIN]["name"] - sensors = [ - AlarmSensor(name, smarty), - WarningSensor(name, smarty), - BoostSensor(name, smarty), - ] + coordinator = entry.runtime_data - async_add_entities(sensors, True) + async_add_entities( + SmartyBinarySensor(coordinator, description) for description in ENTITIES + ) -class SmartyBinarySensor(BinarySensorEntity): +class SmartyBinarySensor(SmartyEntity, BinarySensorEntity): """Representation of a Smarty Binary Sensor.""" - _attr_should_poll = False + entity_description: SmartyBinarySensorEntityDescription def __init__( self, - name: str, - device_class: BinarySensorDeviceClass | None, - smarty: Smarty, + coordinator: SmartyCoordinator, + entity_description: SmartyBinarySensorEntityDescription, ) -> None: """Initialize the entity.""" - self._attr_name = name - self._attr_device_class = device_class - self._smarty = smarty - - async def async_added_to_hass(self) -> None: - """Call to update.""" - async_dispatcher_connect(self.hass, SIGNAL_UPDATE_SMARTY, self._update_callback) - - @callback - def _update_callback(self) -> None: - """Call update method.""" - self.async_schedule_update_ha_state(True) - - -class BoostSensor(SmartyBinarySensor): - """Boost State Binary Sensor.""" - - def __init__(self, name: str, smarty: Smarty) -> None: - """Alarm Sensor Init.""" - super().__init__(name=f"{name} Boost State", device_class=None, smarty=smarty) - - def update(self) -> None: - """Update state.""" - _LOGGER.debug("Updating sensor %s", self._attr_name) - self._attr_is_on = self._smarty.boost - - -class AlarmSensor(SmartyBinarySensor): - """Alarm Binary Sensor.""" - - def __init__(self, name: str, smarty: Smarty) -> None: - """Alarm Sensor Init.""" - super().__init__( - name=f"{name} Alarm", - device_class=BinarySensorDeviceClass.PROBLEM, - smarty=smarty, - ) - - def update(self) -> None: - """Update state.""" - _LOGGER.debug("Updating sensor %s", self._attr_name) - self._attr_is_on = self._smarty.alarm - - -class WarningSensor(SmartyBinarySensor): - """Warning Sensor.""" - - def __init__(self, name: str, smarty: Smarty) -> None: - """Warning Sensor Init.""" - super().__init__( - name=f"{name} Warning", - device_class=BinarySensorDeviceClass.PROBLEM, - smarty=smarty, + super().__init__(coordinator) + self.entity_description = entity_description + self._attr_unique_id = ( + f"{coordinator.config_entry.entry_id}_{entity_description.key}" ) - def update(self) -> None: - """Update state.""" - _LOGGER.debug("Updating sensor %s", self._attr_name) - self._attr_is_on = self._smarty.warning + @property + def is_on(self) -> bool: + """Return the state of the binary sensor.""" + return self.entity_description.value_fn(self.coordinator.client) diff --git a/homeassistant/components/smarty/button.py b/homeassistant/components/smarty/button.py new file mode 100644 index 00000000000000..b8e31cf6fc8caf --- /dev/null +++ b/homeassistant/components/smarty/button.py @@ -0,0 +1,74 @@ +"""Platform to control a Salda Smarty XP/XV ventilation unit.""" + +from __future__ import annotations + +from collections.abc import Callable +from dataclasses import dataclass +import logging +from typing import Any + +from pysmarty2 import Smarty + +from homeassistant.components.button import ButtonEntity, ButtonEntityDescription +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from .coordinator import SmartyConfigEntry, SmartyCoordinator +from .entity import SmartyEntity + +_LOGGER = logging.getLogger(__name__) + + +@dataclass(frozen=True, kw_only=True) +class SmartyButtonDescription(ButtonEntityDescription): + """Class describing Smarty button.""" + + press_fn: Callable[[Smarty], bool | None] + + +ENTITIES: tuple[SmartyButtonDescription, ...] = ( + SmartyButtonDescription( + key="reset_filters_timer", + translation_key="reset_filters_timer", + press_fn=lambda smarty: smarty.reset_filters_timer(), + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: SmartyConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the Smarty Button Platform.""" + + coordinator = entry.runtime_data + + async_add_entities( + SmartyButton(coordinator, description) for description in ENTITIES + ) + + +class SmartyButton(SmartyEntity, ButtonEntity): + """Representation of a Smarty Button.""" + + entity_description: SmartyButtonDescription + + def __init__( + self, + coordinator: SmartyCoordinator, + entity_description: SmartyButtonDescription, + ) -> None: + """Initialize the entity.""" + super().__init__(coordinator) + self.entity_description = entity_description + self._attr_unique_id = ( + f"{coordinator.config_entry.entry_id}_{entity_description.key}" + ) + + async def async_press(self, **kwargs: Any) -> None: + """Press the button.""" + await self.hass.async_add_executor_job( + self.entity_description.press_fn, self.coordinator.client + ) + await self.coordinator.async_refresh() diff --git a/homeassistant/components/smarty/config_flow.py b/homeassistant/components/smarty/config_flow.py new file mode 100644 index 00000000000000..9a55356a99045e --- /dev/null +++ b/homeassistant/components/smarty/config_flow.py @@ -0,0 +1,62 @@ +"""Config flow for Smarty integration.""" + +from typing import Any + +from pysmarty2 import Smarty +import voluptuous as vol + +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.const import CONF_HOST, CONF_NAME + +from .const import DOMAIN + + +class SmartyConfigFlow(ConfigFlow, domain=DOMAIN): + """Smarty config flow.""" + + def _test_connection(self, host: str) -> str | None: + """Test the connection to the Smarty API.""" + smarty = Smarty(host=host) + try: + if smarty.update(): + return None + except Exception: # noqa: BLE001 + return "unknown" + else: + return "cannot_connect" + + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle a flow initialized by the user.""" + errors: dict[str, str] = {} + + if user_input is not None: + self._async_abort_entries_match(user_input) + error = await self.hass.async_add_executor_job( + self._test_connection, user_input[CONF_HOST] + ) + if not error: + return self.async_create_entry( + title=user_input[CONF_HOST], data=user_input + ) + errors["base"] = error + return self.async_show_form( + step_id="user", + data_schema=vol.Schema({vol.Required(CONF_HOST): str}), + errors=errors, + ) + + async def async_step_import( + self, import_config: dict[str, Any] + ) -> ConfigFlowResult: + """Handle a flow initialized by import.""" + error = await self.hass.async_add_executor_job( + self._test_connection, import_config[CONF_HOST] + ) + if not error: + return self.async_create_entry( + title=import_config[CONF_NAME], + data={CONF_HOST: import_config[CONF_HOST]}, + ) + return self.async_abort(reason=error) diff --git a/homeassistant/components/smarty/const.py b/homeassistant/components/smarty/const.py new file mode 100644 index 00000000000000..926c4233750492 --- /dev/null +++ b/homeassistant/components/smarty/const.py @@ -0,0 +1,3 @@ +"""Constants for the Smarty component.""" + +DOMAIN = "smarty" diff --git a/homeassistant/components/smarty/coordinator.py b/homeassistant/components/smarty/coordinator.py new file mode 100644 index 00000000000000..d7f3e2452d1211 --- /dev/null +++ b/homeassistant/components/smarty/coordinator.py @@ -0,0 +1,44 @@ +"""Smarty Coordinator.""" + +from datetime import timedelta +import logging + +from pysmarty2 import Smarty + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_HOST +from homeassistant.core import HomeAssistant +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed + +_LOGGER = logging.getLogger(__name__) + +type SmartyConfigEntry = ConfigEntry[SmartyCoordinator] + + +class SmartyCoordinator(DataUpdateCoordinator[None]): + """Smarty Coordinator.""" + + config_entry: SmartyConfigEntry + software_version: str + configuration_version: str + + def __init__(self, hass: HomeAssistant) -> None: + """Initialize.""" + super().__init__( + hass, + logger=_LOGGER, + name="Smarty", + update_interval=timedelta(seconds=30), + ) + self.client = Smarty(host=self.config_entry.data[CONF_HOST]) + + async def _async_setup(self) -> None: + if not await self.hass.async_add_executor_job(self.client.update): + raise UpdateFailed("Failed to update Smarty data") + self.software_version = self.client.get_software_version() + self.configuration_version = self.client.get_configuration_version() + + async def _async_update_data(self) -> None: + """Fetch data from Smarty.""" + if not await self.hass.async_add_executor_job(self.client.update): + raise UpdateFailed("Failed to update Smarty data") diff --git a/homeassistant/components/smarty/entity.py b/homeassistant/components/smarty/entity.py new file mode 100644 index 00000000000000..d26b56d489f1d8 --- /dev/null +++ b/homeassistant/components/smarty/entity.py @@ -0,0 +1,23 @@ +"""Smarty Entity class.""" + +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from . import DOMAIN +from .coordinator import SmartyCoordinator + + +class SmartyEntity(CoordinatorEntity[SmartyCoordinator]): + """Representation of a Smarty Entity.""" + + _attr_has_entity_name = True + + def __init__(self, coordinator: SmartyCoordinator) -> None: + """Initialize the entity.""" + super().__init__(coordinator) + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, coordinator.config_entry.entry_id)}, + manufacturer="Salda", + sw_version=self.coordinator.software_version, + hw_version=self.coordinator.configuration_version, + ) diff --git a/homeassistant/components/smarty/fan.py b/homeassistant/components/smarty/fan.py index a2d72250197b1a..378585a33e1ff2 100644 --- a/homeassistant/components/smarty/fan.py +++ b/homeassistant/components/smarty/fan.py @@ -6,21 +6,19 @@ import math from typing import Any -from pysmarty2 import Smarty - from homeassistant.components.fan import FanEntity, FanEntityFeature from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError -from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from homeassistant.util.percentage import ( percentage_to_ranged_value, ranged_value_to_percentage, ) from homeassistant.util.scaling import int_states_in_range -from . import DOMAIN, SIGNAL_UPDATE_SMARTY +from . import SmartyConfigEntry +from .coordinator import SmartyCoordinator +from .entity import SmartyEntity _LOGGER = logging.getLogger(__name__) @@ -28,24 +26,23 @@ SPEED_RANGE = (1, 3) # off is not included -async def async_setup_platform( +async def async_setup_entry( hass: HomeAssistant, - config: ConfigType, + entry: SmartyConfigEntry, async_add_entities: AddEntitiesCallback, - discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up the Smarty Fan Platform.""" - smarty: Smarty = hass.data[DOMAIN]["api"] - name: str = hass.data[DOMAIN]["name"] - async_add_entities([SmartyFan(name, smarty)], True) + coordinator = entry.runtime_data + + async_add_entities([SmartyFan(coordinator)]) -class SmartyFan(FanEntity): +class SmartyFan(SmartyEntity, FanEntity): """Representation of a Smarty Fan.""" - _attr_icon = "mdi:air-conditioner" - _attr_should_poll = False + _attr_name = None + _attr_translation_key = "fan" _attr_supported_features = ( FanEntityFeature.SET_SPEED | FanEntityFeature.TURN_OFF @@ -53,11 +50,12 @@ class SmartyFan(FanEntity): ) _enable_turn_on_off_backwards_compatibility = False - def __init__(self, name, smarty): + def __init__(self, coordinator: SmartyCoordinator) -> None: """Initialize the entity.""" - self._attr_name = name + super().__init__(coordinator) self._smarty_fan_speed = 0 - self._smarty = smarty + self._smarty = coordinator.client + self._attr_unique_id = coordinator.config_entry.entry_id @property def is_on(self) -> bool: @@ -111,17 +109,8 @@ def turn_off(self, **kwargs: Any) -> None: self._smarty_fan_speed = 0 self.schedule_update_ha_state() - async def async_added_to_hass(self) -> None: - """Call to update fan.""" - self.async_on_remove( - async_dispatcher_connect( - self.hass, SIGNAL_UPDATE_SMARTY, self._update_callback - ) - ) - @callback - def _update_callback(self) -> None: + def _handle_coordinator_update(self) -> None: """Call update method.""" - _LOGGER.debug("Updating state") self._smarty_fan_speed = self._smarty.fan_speed - self.async_write_ha_state() + super()._handle_coordinator_update() diff --git a/homeassistant/components/smarty/icons.json b/homeassistant/components/smarty/icons.json new file mode 100644 index 00000000000000..97e74199f0ae5a --- /dev/null +++ b/homeassistant/components/smarty/icons.json @@ -0,0 +1,9 @@ +{ + "entity": { + "fan": { + "fan": { + "default": "mdi:air-conditioner" + } + } + } +} diff --git a/homeassistant/components/smarty/manifest.json b/homeassistant/components/smarty/manifest.json index b83319b674418f..ca3133d8addaf0 100644 --- a/homeassistant/components/smarty/manifest.json +++ b/homeassistant/components/smarty/manifest.json @@ -2,6 +2,7 @@ "domain": "smarty", "name": "Salda Smarty", "codeowners": ["@z0mbieprocess"], + "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/smarty", "integration_type": "hub", "iot_class": "local_polling", diff --git a/homeassistant/components/smarty/sensor.py b/homeassistant/components/smarty/sensor.py index 3c6873611b4add..9d847003a59fca 100644 --- a/homeassistant/components/smarty/sensor.py +++ b/homeassistant/components/smarty/sensor.py @@ -2,182 +2,118 @@ from __future__ import annotations -import datetime as dt +from collections.abc import Callable +from dataclasses import dataclass +from datetime import datetime, timedelta import logging from pysmarty2 import Smarty -from homeassistant.components.sensor import SensorDeviceClass, SensorEntity -from homeassistant.const import UnitOfTemperature -from homeassistant.core import HomeAssistant, callback -from homeassistant.helpers.dispatcher import async_dispatcher_connect +from homeassistant.components.sensor import ( + SensorDeviceClass, + SensorEntity, + SensorEntityDescription, +) +from homeassistant.const import REVOLUTIONS_PER_MINUTE, UnitOfTemperature +from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType import homeassistant.util.dt as dt_util -from . import DOMAIN, SIGNAL_UPDATE_SMARTY +from .coordinator import SmartyConfigEntry, SmartyCoordinator +from .entity import SmartyEntity _LOGGER = logging.getLogger(__name__) -async def async_setup_platform( +def get_filter_days_left(smarty: Smarty) -> datetime | None: + """Return the date when the filter needs to be replaced.""" + if (days_left := smarty.filter_timer) is not None: + return dt_util.now() + timedelta(days=days_left) + return None + + +@dataclass(frozen=True, kw_only=True) +class SmartySensorDescription(SensorEntityDescription): + """Class describing Smarty sensor.""" + + value_fn: Callable[[Smarty], float | datetime | None] + + +ENTITIES: tuple[SmartySensorDescription, ...] = ( + SmartySensorDescription( + key="supply_air_temperature", + translation_key="supply_air_temperature", + device_class=SensorDeviceClass.TEMPERATURE, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + value_fn=lambda smarty: smarty.supply_air_temperature, + ), + SmartySensorDescription( + key="extract_air_temperature", + translation_key="extract_air_temperature", + device_class=SensorDeviceClass.TEMPERATURE, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + value_fn=lambda smarty: smarty.extract_air_temperature, + ), + SmartySensorDescription( + key="outdoor_air_temperature", + translation_key="outdoor_air_temperature", + device_class=SensorDeviceClass.TEMPERATURE, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + value_fn=lambda smarty: smarty.outdoor_air_temperature, + ), + SmartySensorDescription( + key="supply_fan_speed", + translation_key="supply_fan_speed", + native_unit_of_measurement=REVOLUTIONS_PER_MINUTE, + value_fn=lambda smarty: smarty.supply_fan_speed, + ), + SmartySensorDescription( + key="extract_fan_speed", + translation_key="extract_fan_speed", + native_unit_of_measurement=REVOLUTIONS_PER_MINUTE, + value_fn=lambda smarty: smarty.extract_fan_speed, + ), + SmartySensorDescription( + key="filter_days_left", + translation_key="filter_days_left", + device_class=SensorDeviceClass.TIMESTAMP, + value_fn=get_filter_days_left, + ), +) + + +async def async_setup_entry( hass: HomeAssistant, - config: ConfigType, + entry: SmartyConfigEntry, async_add_entities: AddEntitiesCallback, - discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up the Smarty Sensor Platform.""" - smarty: Smarty = hass.data[DOMAIN]["api"] - name: str = hass.data[DOMAIN]["name"] - sensors = [ - SupplyAirTemperatureSensor(name, smarty), - ExtractAirTemperatureSensor(name, smarty), - OutdoorAirTemperatureSensor(name, smarty), - SupplyFanSpeedSensor(name, smarty), - ExtractFanSpeedSensor(name, smarty), - FilterDaysLeftSensor(name, smarty), - ] + coordinator = entry.runtime_data - async_add_entities(sensors, True) + async_add_entities( + SmartySensor(coordinator, description) for description in ENTITIES + ) -class SmartySensor(SensorEntity): +class SmartySensor(SmartyEntity, SensorEntity): """Representation of a Smarty Sensor.""" - _attr_should_poll = False + entity_description: SmartySensorDescription def __init__( self, - name: str, - device_class: SensorDeviceClass | None, - smarty: Smarty, - unit_of_measurement: str | None, + coordinator: SmartyCoordinator, + entity_description: SmartySensorDescription, ) -> None: """Initialize the entity.""" - self._attr_name = name - self._attr_native_value = None - self._attr_device_class = device_class - self._attr_native_unit_of_measurement = unit_of_measurement - self._smarty = smarty - - async def async_added_to_hass(self) -> None: - """Call to update.""" - async_dispatcher_connect(self.hass, SIGNAL_UPDATE_SMARTY, self._update_callback) - - @callback - def _update_callback(self) -> None: - """Call update method.""" - self.async_schedule_update_ha_state(True) - - -class SupplyAirTemperatureSensor(SmartySensor): - """Supply Air Temperature Sensor.""" - - def __init__(self, name: str, smarty: Smarty) -> None: - """Supply Air Temperature Init.""" - super().__init__( - name=f"{name} Supply Air Temperature", - device_class=SensorDeviceClass.TEMPERATURE, - unit_of_measurement=UnitOfTemperature.CELSIUS, - smarty=smarty, + super().__init__(coordinator) + self.entity_description = entity_description + self._attr_unique_id = ( + f"{coordinator.config_entry.entry_id}_{entity_description.key}" ) - def update(self) -> None: - """Update state.""" - _LOGGER.debug("Updating sensor %s", self._attr_name) - self._attr_native_value = self._smarty.supply_air_temperature - - -class ExtractAirTemperatureSensor(SmartySensor): - """Extract Air Temperature Sensor.""" - - def __init__(self, name: str, smarty: Smarty) -> None: - """Supply Air Temperature Init.""" - super().__init__( - name=f"{name} Extract Air Temperature", - device_class=SensorDeviceClass.TEMPERATURE, - unit_of_measurement=UnitOfTemperature.CELSIUS, - smarty=smarty, - ) - - def update(self) -> None: - """Update state.""" - _LOGGER.debug("Updating sensor %s", self._attr_name) - self._attr_native_value = self._smarty.extract_air_temperature - - -class OutdoorAirTemperatureSensor(SmartySensor): - """Extract Air Temperature Sensor.""" - - def __init__(self, name: str, smarty: Smarty) -> None: - """Outdoor Air Temperature Init.""" - super().__init__( - name=f"{name} Outdoor Air Temperature", - device_class=SensorDeviceClass.TEMPERATURE, - unit_of_measurement=UnitOfTemperature.CELSIUS, - smarty=smarty, - ) - - def update(self) -> None: - """Update state.""" - _LOGGER.debug("Updating sensor %s", self._attr_name) - self._attr_native_value = self._smarty.outdoor_air_temperature - - -class SupplyFanSpeedSensor(SmartySensor): - """Supply Fan Speed RPM.""" - - def __init__(self, name: str, smarty: Smarty) -> None: - """Supply Fan Speed RPM Init.""" - super().__init__( - name=f"{name} Supply Fan Speed", - device_class=None, - unit_of_measurement=None, - smarty=smarty, - ) - - def update(self) -> None: - """Update state.""" - _LOGGER.debug("Updating sensor %s", self._attr_name) - self._attr_native_value = self._smarty.supply_fan_speed - - -class ExtractFanSpeedSensor(SmartySensor): - """Extract Fan Speed RPM.""" - - def __init__(self, name: str, smarty: Smarty) -> None: - """Extract Fan Speed RPM Init.""" - super().__init__( - name=f"{name} Extract Fan Speed", - device_class=None, - unit_of_measurement=None, - smarty=smarty, - ) - - def update(self) -> None: - """Update state.""" - _LOGGER.debug("Updating sensor %s", self._attr_name) - self._attr_native_value = self._smarty.extract_fan_speed - - -class FilterDaysLeftSensor(SmartySensor): - """Filter Days Left.""" - - def __init__(self, name: str, smarty: Smarty) -> None: - """Filter Days Left Init.""" - super().__init__( - name=f"{name} Filter Days Left", - device_class=SensorDeviceClass.TIMESTAMP, - unit_of_measurement=None, - smarty=smarty, - ) - self._days_left = 91 - - def update(self) -> None: - """Update state.""" - _LOGGER.debug("Updating sensor %s", self._attr_name) - days_left = self._smarty.filter_timer - if days_left is not None and days_left != self._days_left: - self._attr_native_value = dt_util.now() + dt.timedelta(days=days_left) - self._days_left = days_left + @property + def native_value(self) -> float | datetime | None: + """Return the state of the sensor.""" + return self.entity_description.value_fn(self.coordinator.client) diff --git a/homeassistant/components/smarty/strings.json b/homeassistant/components/smarty/strings.json new file mode 100644 index 00000000000000..188459b4f16303 --- /dev/null +++ b/homeassistant/components/smarty/strings.json @@ -0,0 +1,76 @@ +{ + "config": { + "step": { + "user": { + "data": { + "host": "[%key:common::config_flow::data::host%]" + }, + "data_description": { + "host": "The hostname or IP address of the Smarty device" + } + } + }, + "error": { + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", + "unknown": "[%key:common::config_flow::error::unknown%]" + }, + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", + "unknown": "[%key:common::config_flow::error::unknown%]" + } + }, + "issues": { + "deprecated_yaml_import_issue_unknown": { + "title": "YAML import failed with unknown error", + "description": "Configuring {integration_title} using YAML is being removed but there was an unknown error while importing your existing configuration.\nSetup will not proceed.\n\nVerify that your {integration_title} is operating correctly and restart Home Assistant to attempt the import again.\n\nAlternatively, you may remove the `{domain}` configuration from your configuration.yaml entirely, restart Home Assistant, and add the {integration_title} integration manually." + }, + "deprecated_yaml_import_issue_auth_error": { + "title": "YAML import failed due to an authentication error", + "description": "Configuring {integration_title} using YAML is being removed but there was an authentication error while importing your existing configuration.\nSetup will not proceed.\n\nVerify that your {integration_title} is operating correctly and restart Home Assistant to attempt the import again.\n\nAlternatively, you may remove the `{domain}` configuration from your configuration.yaml entirely, restart Home Assistant, and add the {integration_title} integration manually." + } + }, + "entity": { + "binary_sensor": { + "alarm": { + "name": "Alarm" + }, + "warning": { + "name": "Warning" + }, + "boost_state": { + "name": "Boost state" + } + }, + "button": { + "reset_filters_timer": { + "name": "Reset filters timer" + } + }, + "sensor": { + "supply_air_temperature": { + "name": "Supply air temperature" + }, + "extract_air_temperature": { + "name": "Extract air temperature" + }, + "outdoor_air_temperature": { + "name": "Outdoor air temperature" + }, + "supply_fan_speed": { + "name": "Supply fan speed" + }, + "extract_fan_speed": { + "name": "Extract fan speed" + }, + "filter_days_left": { + "name": "Filter days left" + } + }, + "switch": { + "boost": { + "name": "Boost" + } + } + } +} diff --git a/homeassistant/components/smarty/switch.py b/homeassistant/components/smarty/switch.py new file mode 100644 index 00000000000000..bf5fe80db4424e --- /dev/null +++ b/homeassistant/components/smarty/switch.py @@ -0,0 +1,90 @@ +"""Platform to control a Salda Smarty XP/XV ventilation unit.""" + +from __future__ import annotations + +from collections.abc import Callable +from dataclasses import dataclass +import logging +from typing import Any + +from pysmarty2 import Smarty + +from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from .coordinator import SmartyConfigEntry, SmartyCoordinator +from .entity import SmartyEntity + +_LOGGER = logging.getLogger(__name__) + + +@dataclass(frozen=True, kw_only=True) +class SmartySwitchDescription(SwitchEntityDescription): + """Class describing Smarty switch.""" + + is_on_fn: Callable[[Smarty], bool] + turn_on_fn: Callable[[Smarty], bool | None] + turn_off_fn: Callable[[Smarty], bool | None] + + +ENTITIES: tuple[SmartySwitchDescription, ...] = ( + SmartySwitchDescription( + key="boost", + translation_key="boost", + is_on_fn=lambda smarty: smarty.boost, + turn_on_fn=lambda smarty: smarty.enable_boost(), + turn_off_fn=lambda smarty: smarty.disable_boost(), + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: SmartyConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the Smarty Switch Platform.""" + + coordinator = entry.runtime_data + + async_add_entities( + SmartySwitch(coordinator, description) for description in ENTITIES + ) + + +class SmartySwitch(SmartyEntity, SwitchEntity): + """Representation of a Smarty Switch.""" + + entity_description: SmartySwitchDescription + + def __init__( + self, + coordinator: SmartyCoordinator, + entity_description: SmartySwitchDescription, + ) -> None: + """Initialize the entity.""" + super().__init__(coordinator) + self.entity_description = entity_description + self._attr_unique_id = ( + f"{coordinator.config_entry.entry_id}_{entity_description.key}" + ) + + @property + def is_on(self) -> bool: + """Return the state of the switch.""" + return self.entity_description.is_on_fn(self.coordinator.client) + + async def async_turn_on(self, **kwargs: Any) -> None: + """Turn the switch on.""" + await self.hass.async_add_executor_job( + self.entity_description.turn_on_fn, self.coordinator.client + ) + await self.coordinator.async_refresh() + + async def async_turn_off(self, **kwargs: Any) -> None: + """Turn the switch off.""" + await self.hass.async_add_executor_job( + self.entity_description.turn_off_fn, self.coordinator.client + ) + await self.coordinator.async_refresh() diff --git a/homeassistant/components/smhi/config_flow.py b/homeassistant/components/smhi/config_flow.py index 05b2bf71ca1534..2992b176f242e3 100644 --- a/homeassistant/components/smhi/config_flow.py +++ b/homeassistant/components/smhi/config_flow.py @@ -8,7 +8,7 @@ import voluptuous as vol from homeassistant.components.weather import DOMAIN as WEATHER_DOMAIN -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_LATITUDE, CONF_LOCATION, CONF_LONGITUDE, CONF_NAME from homeassistant.core import HomeAssistant from homeassistant.helpers import ( @@ -39,7 +39,6 @@ class SmhiFlowHandler(ConfigFlow, domain=DOMAIN): """Config flow for SMHI component.""" VERSION = 2 - config_entry: ConfigEntry async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -83,16 +82,10 @@ async def async_step_user( async def async_step_reconfigure( self, user_input: dict[str, Any] | None = None - ) -> ConfigFlowResult: - """Handle a reconfiguration flow initialized by the user.""" - self.config_entry = self._get_reconfigure_entry() - return await self.async_step_reconfigure_confirm() - - async def async_step_reconfigure_confirm( - self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle a reconfiguration flow initialized by the user.""" errors: dict[str, str] = {} + reconfigure_entry = self._get_reconfigure_entry() if user_input is not None: lat: float = user_input[CONF_LOCATION][CONF_LATITUDE] @@ -102,8 +95,8 @@ async def async_step_reconfigure_confirm( await self.async_set_unique_id(unique_id) self._abort_if_unique_id_configured() - old_lat = self.config_entry.data[CONF_LOCATION][CONF_LATITUDE] - old_lon = self.config_entry.data[CONF_LOCATION][CONF_LONGITUDE] + old_lat = reconfigure_entry.data[CONF_LOCATION][CONF_LATITUDE] + old_lon = reconfigure_entry.data[CONF_LOCATION][CONF_LONGITUDE] entity_reg = er.async_get(self.hass) if entity := entity_reg.async_get_entity_id( @@ -122,17 +115,16 @@ async def async_step_reconfigure_confirm( ) return self.async_update_reload_and_abort( - self.config_entry, + reconfigure_entry, unique_id=unique_id, - data={**self.config_entry.data, **user_input}, - reason="reconfigure_successful", + data_updates=user_input, ) errors["base"] = "wrong_location" schema = self.add_suggested_values_to_schema( vol.Schema({vol.Required(CONF_LOCATION): LocationSelector()}), - self.config_entry.data, + reconfigure_entry.data, ) return self.async_show_form( - step_id="reconfigure_confirm", data_schema=schema, errors=errors + step_id="reconfigure", data_schema=schema, errors=errors ) diff --git a/homeassistant/components/smhi/strings.json b/homeassistant/components/smhi/strings.json index e78fee64a2b021..3d2a790e6b62ca 100644 --- a/homeassistant/components/smhi/strings.json +++ b/homeassistant/components/smhi/strings.json @@ -12,7 +12,7 @@ "longitude": "[%key:common::config_flow::data::longitude%]" } }, - "reconfigure_confirm": { + "reconfigure": { "title": "Reconfigure your location in Sweden", "data": { "latitude": "[%key:common::config_flow::data::latitude%]", diff --git a/homeassistant/components/smlight/config_flow.py b/homeassistant/components/smlight/config_flow.py index 0e5b0f49d7b0ba..32efc729dc251d 100644 --- a/homeassistant/components/smlight/config_flow.py +++ b/homeassistant/components/smlight/config_flow.py @@ -15,7 +15,6 @@ from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.device_registry import format_mac -from . import SmConfigEntry from .const import DOMAIN STEP_USER_DATA_SCHEMA = vol.Schema( @@ -39,7 +38,6 @@ def __init__(self) -> None: """Initialize the config flow.""" self.client: Api2 self.host: str | None = None - self._reauth_entry: SmConfigEntry | None = None async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -140,9 +138,6 @@ async def async_step_reauth( ) -> ConfigFlowResult: """Handle reauth when API Authentication failed.""" - self._reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) host = entry_data[CONF_HOST] self.client = Api2(host, session=async_get_clientsession(self.hass)) self.host = host @@ -164,11 +159,8 @@ async def async_step_reauth_confirm( except SmlightConnectionError: return self.async_abort(reason="cannot_connect") else: - assert self._reauth_entry is not None - return self.async_update_reload_and_abort( - self._reauth_entry, - data={**self._reauth_entry.data, **user_input}, + self._get_reauth_entry(), data_updates=user_input ) return self.async_show_form( diff --git a/homeassistant/components/smlight/update.py b/homeassistant/components/smlight/update.py index cb28a1978605a6..147b1d766ef720 100644 --- a/homeassistant/components/smlight/update.py +++ b/homeassistant/components/smlight/update.py @@ -23,6 +23,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import SmConfigEntry +from .const import LOGGER from .coordinator import SmFirmwareUpdateCoordinator, SmFwData from .entity import SmEntity @@ -153,19 +154,21 @@ def _update_progress(self, progress: MessageEvent) -> None: """Update install progress on event.""" progress = int(progress.data) - if progress > 1: - self._attr_in_progress = progress - self.async_write_ha_state() + self._attr_update_percentage = progress + self.async_write_ha_state() def _update_done(self) -> None: """Handle cleanup for update done.""" self._finished_event.set() - self.coordinator.in_progress = False for remove_cb in self._unload: remove_cb() self._unload.clear() + self._attr_in_progress = False + self._attr_update_percentage = None + self.async_write_ha_state() + @callback def _update_finished(self, event: MessageEvent) -> None: """Handle event for update finished.""" @@ -175,7 +178,7 @@ def _update_finished(self, event: MessageEvent) -> None: @callback def _update_failed(self, event: MessageEvent) -> None: self._update_done() - + self.coordinator.in_progress = False raise HomeAssistantError(f"Update failed for {self.name}") async def async_install( @@ -186,6 +189,7 @@ async def async_install( if not self.coordinator.in_progress and self._firmware: self.coordinator.in_progress = True self._attr_in_progress = True + self._attr_update_percentage = None self.register_callbacks() await self.coordinator.client.fw_update(self._firmware) @@ -193,5 +197,20 @@ async def async_install( # block until update finished event received await self._finished_event.wait() - await self.coordinator.async_refresh() + # allow time for SLZB-06 to reboot before updating coordinator data + try: + async with asyncio.timeout(180): + while ( + self.coordinator.in_progress + and self.installed_version != self._firmware.ver + ): + await self.coordinator.async_refresh() + await asyncio.sleep(1) + except TimeoutError: + LOGGER.warning( + "Timeout waiting for %s to reboot after update", + self.coordinator.data.info.hostname, + ) + + self.coordinator.in_progress = False self._finished_event.clear() diff --git a/homeassistant/components/solarlog/config_flow.py b/homeassistant/components/solarlog/config_flow.py index 9a8703dda33aff..a61f825aa5e84d 100644 --- a/homeassistant/components/solarlog/config_flow.py +++ b/homeassistant/components/solarlog/config_flow.py @@ -17,7 +17,6 @@ from homeassistant.const import CONF_HOST, CONF_NAME, CONF_PASSWORD from homeassistant.util import slugify -from . import SolarlogConfigEntry from .const import CONF_HAS_PWD, DEFAULT_HOST, DEFAULT_NAME, DOMAIN _LOGGER = logging.getLogger(__name__) @@ -26,7 +25,6 @@ class SolarLogConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for solarlog.""" - _entry: SolarlogConfigEntry VERSION = 1 MINOR_VERSION = 3 @@ -141,40 +139,30 @@ async def async_step_reconfigure( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle a reconfiguration flow initialized by the user.""" - self._entry = self._get_reconfigure_entry() - - return await self.async_step_reconfigure_confirm() - - async def async_step_reconfigure_confirm( - self, user_input: dict[str, Any] | None = None - ) -> ConfigFlowResult: - """Handle a reconfiguration flow initialized by the user.""" + reconfigure_entry = self._get_reconfigure_entry() if user_input is not None: if not user_input[CONF_HAS_PWD] or user_input.get(CONF_PASSWORD, "") == "": user_input[CONF_PASSWORD] = "" user_input[CONF_HAS_PWD] = False return self.async_update_reload_and_abort( - self._entry, - reason="reconfigure_successful", - data={**self._entry.data, **user_input}, + reconfigure_entry, data_updates=user_input ) if await self._test_extended_data( - self._entry.data[CONF_HOST], user_input.get(CONF_PASSWORD, "") + reconfigure_entry.data[CONF_HOST], user_input.get(CONF_PASSWORD, "") ): # if password has been provided, only save if extended data is available return self.async_update_reload_and_abort( - self._entry, - reason="reconfigure_successful", - data={**self._entry.data, **user_input}, + reconfigure_entry, + data_updates=user_input, ) return self.async_show_form( - step_id="reconfigure_confirm", + step_id="reconfigure", data_schema=vol.Schema( { vol.Optional( - CONF_HAS_PWD, default=self._entry.data[CONF_HAS_PWD] + CONF_HAS_PWD, default=reconfigure_entry.data[CONF_HAS_PWD] ): bool, vol.Optional(CONF_PASSWORD): str, } @@ -185,24 +173,24 @@ async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle flow upon an API authentication error.""" - self._entry = self._get_reauth_entry() return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle reauthorization flow.""" + reauth_entry = self._get_reauth_entry() if user_input and await self._test_extended_data( - self._entry.data[CONF_HOST], user_input.get(CONF_PASSWORD, "") + reauth_entry.data[CONF_HOST], user_input.get(CONF_PASSWORD, "") ): return self.async_update_reload_and_abort( - self._entry, data={**self._entry.data, **user_input} + reauth_entry, data_updates=user_input ) data_schema = vol.Schema( { vol.Optional( - CONF_HAS_PWD, default=self._entry.data[CONF_HAS_PWD] + CONF_HAS_PWD, default=reauth_entry.data[CONF_HAS_PWD] ): bool, vol.Optional(CONF_PASSWORD): str, } diff --git a/homeassistant/components/solarlog/coordinator.py b/homeassistant/components/solarlog/coordinator.py index 51199ab7051f00..5fdf89c9e74927 100644 --- a/homeassistant/components/solarlog/coordinator.py +++ b/homeassistant/components/solarlog/coordinator.py @@ -2,6 +2,7 @@ from __future__ import annotations +from collections.abc import Callable from datetime import timedelta import logging from typing import TYPE_CHECKING @@ -18,7 +19,11 @@ from homeassistant.const import CONF_HOST from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady +import homeassistant.helpers.device_registry as dr from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed +from homeassistant.util import slugify + +from .const import DOMAIN _LOGGER = logging.getLogger(__name__) @@ -35,6 +40,9 @@ def __init__(self, hass: HomeAssistant, entry: SolarlogConfigEntry) -> None: hass, _LOGGER, name="SolarLog", update_interval=timedelta(seconds=60) ) + self.new_device_callbacks: list[Callable[[int], None]] = [] + self._devices_last_update: set[tuple[int, str]] = set() + host_entry = entry.data[CONF_HOST] password = entry.data.get("password", "") @@ -57,7 +65,8 @@ async def _async_setup(self) -> None: _LOGGER.debug("Start async_setup") logged_in = False if self.solarlog.password != "": - logged_in = await self.renew_authentication() + if logged_in := await self.renew_authentication(): + await self.solarlog.test_extended_data_available() if logged_in or await self.solarlog.test_extended_data_available(): device_list = await self.solarlog.update_device_list() self.solarlog.set_enabled_devices({key: True for key in device_list}) @@ -84,8 +93,55 @@ async def _async_update_data(self) -> SolarlogData: _LOGGER.debug("Data successfully updated") + if self.solarlog.extended_data: + self._async_add_remove_devices(data) + _LOGGER.debug("Add_remove_devices finished") + return data + def _async_add_remove_devices(self, data: SolarlogData) -> None: + """Add new devices, remove non-existing devices.""" + if ( + current_devices := { + (k, self.solarlog.device_name(k)) for k in data.inverter_data + } + ) == self._devices_last_update: + return + + # remove old devices + if removed_devices := self._devices_last_update - current_devices: + _LOGGER.debug("Removed device(s): %s", ", ".join(map(str, removed_devices))) + device_registry = dr.async_get(self.hass) + + for removed_device in removed_devices: + device_name = "" + for did, dn in self._devices_last_update: + if did == removed_device[0]: + device_name = dn + break + if device := device_registry.async_get_device( + identifiers={ + ( + DOMAIN, + f"{self.unique_id}_{slugify(device_name)}", + ) + } + ): + device_registry.async_update_device( + device_id=device.id, + remove_config_entry_id=self.unique_id, + ) + _LOGGER.debug("Device removed from device registry: %s", device.id) + + # add new devices + if new_devices := current_devices - self._devices_last_update: + _LOGGER.debug("New device(s) found: %s", ", ".join(map(str, new_devices))) + for device_id in new_devices: + for callback in self.new_device_callbacks: + callback(device_id[0]) + + self._devices_last_update = current_devices + async def renew_authentication(self) -> bool: """Renew access token for SolarLog API.""" logged_in = False diff --git a/homeassistant/components/solarlog/entity.py b/homeassistant/components/solarlog/entity.py index 1d91fc8726b210..b0f3ddf99f9e01 100644 --- a/homeassistant/components/solarlog/entity.py +++ b/homeassistant/components/solarlog/entity.py @@ -38,7 +38,7 @@ def __init__( """Initialize the SolarLogCoordinator sensor.""" super().__init__(coordinator, description) - self._attr_unique_id = f"{coordinator.unique_id}-{description.key}" + self._attr_unique_id = f"{coordinator.unique_id}_{description.key}" self._attr_device_info = DeviceInfo( manufacturer="Solar-Log", model="Controller", @@ -59,8 +59,8 @@ def __init__( ) -> None: """Initialize the SolarLogInverter sensor.""" super().__init__(coordinator, description) - name = f"{coordinator.unique_id}-{slugify(coordinator.solarlog.device_name(device_id))}" - self._attr_unique_id = f"{name}-{description.key}" + name = f"{coordinator.unique_id}_{slugify(coordinator.solarlog.device_name(device_id))}" + self._attr_unique_id = f"{name}_{description.key}" self._attr_device_info = DeviceInfo( manufacturer="Solar-Log", model="Inverter", diff --git a/homeassistant/components/solarlog/manifest.json b/homeassistant/components/solarlog/manifest.json index 274c97c76b52de..9f80b749d08649 100644 --- a/homeassistant/components/solarlog/manifest.json +++ b/homeassistant/components/solarlog/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/solarlog", "iot_class": "local_polling", "loggers": ["solarlog_cli"], - "requirements": ["solarlog_cli==0.3.1"] + "requirements": ["solarlog_cli==0.3.2"] } diff --git a/homeassistant/components/solarlog/sensor.py b/homeassistant/components/solarlog/sensor.py index 91e18da1cb2675..bcff5d57e1b506 100644 --- a/homeassistant/components/solarlog/sensor.py +++ b/homeassistant/components/solarlog/sensor.py @@ -87,6 +87,7 @@ class SolarLogInverterSensorEntityDescription(SensorEntityDescription): native_unit_of_measurement=UnitOfEnergy.WATT_HOUR, suggested_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, device_class=SensorDeviceClass.ENERGY, + state_class=SensorStateClass.TOTAL_INCREASING, suggested_display_precision=3, value_fn=lambda data: data.yield_day, ), @@ -105,6 +106,7 @@ class SolarLogInverterSensorEntityDescription(SensorEntityDescription): native_unit_of_measurement=UnitOfEnergy.WATT_HOUR, suggested_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, device_class=SensorDeviceClass.ENERGY, + state_class=SensorStateClass.TOTAL_INCREASING, suggested_display_precision=3, value_fn=lambda data: data.yield_month, ), @@ -114,6 +116,7 @@ class SolarLogInverterSensorEntityDescription(SensorEntityDescription): native_unit_of_measurement=UnitOfEnergy.WATT_HOUR, suggested_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, device_class=SensorDeviceClass.ENERGY, + state_class=SensorStateClass.TOTAL_INCREASING, value_fn=lambda data: data.yield_year, ), SolarLogCoordinatorSensorEntityDescription( @@ -140,6 +143,7 @@ class SolarLogInverterSensorEntityDescription(SensorEntityDescription): native_unit_of_measurement=UnitOfEnergy.WATT_HOUR, suggested_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, device_class=SensorDeviceClass.ENERGY, + state_class=SensorStateClass.TOTAL_INCREASING, suggested_display_precision=3, value_fn=lambda data: data.consumption_day, ), @@ -158,6 +162,7 @@ class SolarLogInverterSensorEntityDescription(SensorEntityDescription): native_unit_of_measurement=UnitOfEnergy.WATT_HOUR, suggested_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, device_class=SensorDeviceClass.ENERGY, + state_class=SensorStateClass.TOTAL_INCREASING, suggested_display_precision=3, value_fn=lambda data: data.consumption_month, ), @@ -167,6 +172,7 @@ class SolarLogInverterSensorEntityDescription(SensorEntityDescription): native_unit_of_measurement=UnitOfEnergy.WATT_HOUR, suggested_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, device_class=SensorDeviceClass.ENERGY, + state_class=SensorStateClass.TOTAL_INCREASING, suggested_display_precision=3, value_fn=lambda data: data.consumption_year, ), @@ -193,6 +199,7 @@ class SolarLogInverterSensorEntityDescription(SensorEntityDescription): translation_key="total_power", native_unit_of_measurement=UnitOfPower.WATT, device_class=SensorDeviceClass.POWER, + state_class=SensorStateClass.MEASUREMENT, value_fn=lambda data: data.total_power, ), SolarLogCoordinatorSensorEntityDescription( @@ -247,7 +254,9 @@ class SolarLogInverterSensorEntityDescription(SensorEntityDescription): native_unit_of_measurement=UnitOfPower.WATT, device_class=SensorDeviceClass.POWER, state_class=SensorStateClass.MEASUREMENT, - value_fn=lambda inverter: inverter.current_power, + value_fn=( + lambda inverter: None if inverter is None else inverter.current_power + ), ), SolarLogInverterSensorEntityDescription( key="consumption_year", @@ -255,11 +264,10 @@ class SolarLogInverterSensorEntityDescription(SensorEntityDescription): native_unit_of_measurement=UnitOfEnergy.WATT_HOUR, suggested_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, device_class=SensorDeviceClass.ENERGY, + state_class=SensorStateClass.TOTAL_INCREASING, suggested_display_precision=3, value_fn=( - lambda inverter: None - if inverter.consumption_year is None - else inverter.consumption_year + lambda inverter: None if inverter is None else inverter.consumption_year ), ), ) @@ -289,6 +297,14 @@ async def async_setup_entry( async_add_entities(entities) + def _async_add_new_device(device_id: int) -> None: + async_add_entities( + SolarLogInverterSensor(coordinator, sensor, device_id) + for sensor in INVERTER_SENSOR_TYPES + ) + + coordinator.new_device_callbacks.append(_async_add_new_device) + class SolarLogCoordinatorSensor(SolarLogCoordinatorEntity, SensorEntity): """Represents a SolarLog sensor.""" diff --git a/homeassistant/components/solarlog/strings.json b/homeassistant/components/solarlog/strings.json index 69ebbbcceda7cb..723af6cb277f98 100644 --- a/homeassistant/components/solarlog/strings.json +++ b/homeassistant/components/solarlog/strings.json @@ -29,10 +29,11 @@ "password": "[%key:common::config_flow::data::password%]" } }, - "reconfigure_confirm": { + "reconfigure": { "title": "Configure SolarLog", "data": { - "has_password": "[%key:component::solarlog::config::step::user::data::has_password%]" + "has_password": "[%key:component::solarlog::config::step::user::data::has_password%]", + "password": "[%key:common::config_flow::data::password%]" } } }, diff --git a/homeassistant/components/solax/__init__.py b/homeassistant/components/solax/__init__.py index 253f3b55e0a465..3b9df623559984 100644 --- a/homeassistant/components/solax/__init__.py +++ b/homeassistant/components/solax/__init__.py @@ -54,6 +54,7 @@ async def _async_update() -> InverterResponse: coordinator = SolaxDataUpdateCoordinator( hass, logger=_LOGGER, + config_entry=entry, name=f"solax {entry.title}", update_interval=SCAN_INTERVAL, update_method=_async_update, diff --git a/homeassistant/components/somfy_mylink/config_flow.py b/homeassistant/components/somfy_mylink/config_flow.py index 705db43362eefa..c2d851601750b2 100644 --- a/homeassistant/components/somfy_mylink/config_flow.py +++ b/homeassistant/components/somfy_mylink/config_flow.py @@ -130,7 +130,6 @@ class OptionsFlowHandler(OptionsFlow): def __init__(self, config_entry: ConfigEntry) -> None: """Initialize options flow.""" - self.config_entry = config_entry self.options = deepcopy(dict(config_entry.options)) self._target_id: str | None = None diff --git a/homeassistant/components/somfy_mylink/cover.py b/homeassistant/components/somfy_mylink/cover.py index 791c46cd07a3ca..8c64e58362b96b 100644 --- a/homeassistant/components/somfy_mylink/cover.py +++ b/homeassistant/components/somfy_mylink/cover.py @@ -3,9 +3,8 @@ import logging from typing import Any -from homeassistant.components.cover import CoverDeviceClass, CoverEntity +from homeassistant.components.cover import CoverDeviceClass, CoverEntity, CoverState from homeassistant.config_entries import ConfigEntry -from homeassistant.const import STATE_CLOSED, STATE_OPEN from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -131,7 +130,7 @@ async def async_added_to_hass(self) -> None: last_state = await self.async_get_last_state() if last_state is not None and last_state.state in ( - STATE_OPEN, - STATE_CLOSED, + CoverState.OPEN, + CoverState.CLOSED, ): - self._attr_is_closed = last_state.state == STATE_CLOSED + self._attr_is_closed = last_state.state == CoverState.CLOSED diff --git a/homeassistant/components/sonarr/config_flow.py b/homeassistant/components/sonarr/config_flow.py index 84bae85571ee0a..e1cedba10e756c 100644 --- a/homeassistant/components/sonarr/config_flow.py +++ b/homeassistant/components/sonarr/config_flow.py @@ -13,6 +13,7 @@ import yarl from homeassistant.config_entries import ( + SOURCE_REAUTH, ConfigEntry, ConfigFlow, ConfigFlowResult, @@ -58,22 +59,16 @@ class SonarrConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 2 - def __init__(self) -> None: - """Initialize the flow.""" - self.entry: ConfigEntry | None = None - @staticmethod @callback def async_get_options_flow(config_entry: ConfigEntry) -> SonarrOptionsFlowHandler: """Get the options flow for this handler.""" - return SonarrOptionsFlowHandler(config_entry) + return SonarrOptionsFlowHandler() async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle configuration by re-auth.""" - self.entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) - return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -81,10 +76,11 @@ async def async_step_reauth_confirm( ) -> ConfigFlowResult: """Confirm reauth dialog.""" if user_input is None: - assert self.entry is not None return self.async_show_form( step_id="reauth_confirm", - description_placeholders={"url": self.entry.data[CONF_URL]}, + description_placeholders={ + "url": self._get_reauth_entry().data[CONF_URL] + }, errors={}, ) @@ -97,8 +93,15 @@ async def async_step_user( errors = {} if user_input is not None: - if self.entry: - user_input = {**self.entry.data, **user_input} + # aiopyarr defaults to the service port if one isn't given + # this is counter to standard practice where http = 80 + # and https = 443. + if CONF_URL in user_input: + url = yarl.URL(user_input[CONF_URL]) + user_input[CONF_URL] = f"{url.scheme}://{url.host}:{url.port}{url.path}" + + if self.source == SOURCE_REAUTH: + user_input = {**self._get_reauth_entry().data, **user_input} if CONF_VERIFY_SSL not in user_input: user_input[CONF_VERIFY_SSL] = DEFAULT_VERIFY_SSL @@ -113,8 +116,10 @@ async def async_step_user( _LOGGER.exception("Unexpected exception") return self.async_abort(reason="unknown") else: - if self.entry: - return await self._async_reauth_update_entry(user_input) + if self.source == SOURCE_REAUTH: + return self.async_update_reload_and_abort( + self._get_reauth_entry(), data=user_input + ) parsed = yarl.URL(user_input[CONF_URL]) @@ -129,19 +134,9 @@ async def async_step_user( errors=errors, ) - async def _async_reauth_update_entry( - self, data: dict[str, Any] - ) -> ConfigFlowResult: - """Update existing config entry.""" - assert self.entry is not None - self.hass.config_entries.async_update_entry(self.entry, data=data) - await self.hass.config_entries.async_reload(self.entry.entry_id) - - return self.async_abort(reason="reauth_successful") - def _get_user_data_schema(self) -> dict[vol.Marker, type]: """Get the data schema to display user form.""" - if self.entry: + if self.source == SOURCE_REAUTH: return {vol.Required(CONF_API_KEY): str} data_schema: dict[vol.Marker, type] = { @@ -160,10 +155,6 @@ def _get_user_data_schema(self) -> dict[vol.Marker, type]: class SonarrOptionsFlowHandler(OptionsFlow): """Handle Sonarr client options.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, int] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/sonos/manifest.json b/homeassistant/components/sonos/manifest.json index d6c5eb298d8215..76a7d0bfa91981 100644 --- a/homeassistant/components/sonos/manifest.json +++ b/homeassistant/components/sonos/manifest.json @@ -8,7 +8,7 @@ "documentation": "https://www.home-assistant.io/integrations/sonos", "iot_class": "local_push", "loggers": ["soco"], - "requirements": ["soco==0.30.4", "sonos-websocket==0.1.3"], + "requirements": ["soco==0.30.6", "sonos-websocket==0.1.3"], "ssdp": [ { "st": "urn:schemas-upnp-org:device:ZonePlayer:1" diff --git a/homeassistant/components/spc/alarm_control_panel.py b/homeassistant/components/spc/alarm_control_panel.py index 7e584ff5e6329b..44e0572c9e90e0 100644 --- a/homeassistant/components/spc/alarm_control_panel.py +++ b/homeassistant/components/spc/alarm_control_panel.py @@ -9,13 +9,7 @@ from homeassistant.components.alarm_control_panel import ( AlarmControlPanelEntity, AlarmControlPanelEntityFeature, -) -from homeassistant.const import ( - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_DISARMED, - STATE_ALARM_TRIGGERED, + AlarmControlPanelState, ) from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect @@ -25,17 +19,17 @@ from . import DATA_API, SIGNAL_UPDATE_ALARM -def _get_alarm_state(area: Area) -> str | None: +def _get_alarm_state(area: Area) -> AlarmControlPanelState | None: """Get the alarm state.""" if area.verified_alarm: - return STATE_ALARM_TRIGGERED + return AlarmControlPanelState.TRIGGERED mode_to_state = { - AreaMode.UNSET: STATE_ALARM_DISARMED, - AreaMode.PART_SET_A: STATE_ALARM_ARMED_HOME, - AreaMode.PART_SET_B: STATE_ALARM_ARMED_NIGHT, - AreaMode.FULL_SET: STATE_ALARM_ARMED_AWAY, + AreaMode.UNSET: AlarmControlPanelState.DISARMED, + AreaMode.PART_SET_A: AlarmControlPanelState.ARMED_HOME, + AreaMode.PART_SET_B: AlarmControlPanelState.ARMED_NIGHT, + AreaMode.FULL_SET: AlarmControlPanelState.ARMED_AWAY, } return mode_to_state.get(area.mode) @@ -91,7 +85,7 @@ def changed_by(self) -> str: return self._area.last_changed_by @property - def state(self) -> str | None: + def alarm_state(self) -> AlarmControlPanelState | None: """Return the state of the device.""" return _get_alarm_state(self._area) diff --git a/homeassistant/components/speedtestdotnet/__init__.py b/homeassistant/components/speedtestdotnet/__init__.py index aed1cce33dbea3..e4c51ab7aa0d2c 100644 --- a/homeassistant/components/speedtestdotnet/__init__.py +++ b/homeassistant/components/speedtestdotnet/__init__.py @@ -6,7 +6,7 @@ import speedtest -from homeassistant.config_entries import ConfigEntry +from homeassistant.config_entries import ConfigEntry, ConfigEntryState from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady @@ -35,7 +35,10 @@ async def async_setup_entry( async def _async_finish_startup(hass: HomeAssistant) -> None: """Run this only when HA has finished its startup.""" - await coordinator.async_config_entry_first_refresh() + if config_entry.state is ConfigEntryState.LOADED: + await coordinator.async_refresh() + else: + await coordinator.async_config_entry_first_refresh() # Don't start a speedtest during startup async_at_started(hass, _async_finish_startup) diff --git a/homeassistant/components/speedtestdotnet/config_flow.py b/homeassistant/components/speedtestdotnet/config_flow.py index dc64448bbefac3..3bfd4eb6e4a058 100644 --- a/homeassistant/components/speedtestdotnet/config_flow.py +++ b/homeassistant/components/speedtestdotnet/config_flow.py @@ -30,7 +30,7 @@ def async_get_options_flow( config_entry: SpeedTestConfigEntry, ) -> SpeedTestOptionsFlowHandler: """Get the options flow for this handler.""" - return SpeedTestOptionsFlowHandler(config_entry) + return SpeedTestOptionsFlowHandler() async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -48,9 +48,8 @@ async def async_step_user( class SpeedTestOptionsFlowHandler(OptionsFlow): """Handle SpeedTest options.""" - def __init__(self, config_entry: SpeedTestConfigEntry) -> None: + def __init__(self) -> None: """Initialize options flow.""" - self.config_entry = config_entry self._servers: dict = {} async def async_step_init( diff --git a/homeassistant/components/spotify/__init__.py b/homeassistant/components/spotify/__init__.py index 4a0409df38386b..cfcc9011b37964 100644 --- a/homeassistant/components/spotify/__init__.py +++ b/homeassistant/components/spotify/__init__.py @@ -3,16 +3,16 @@ from __future__ import annotations from datetime import timedelta -from typing import Any +from typing import TYPE_CHECKING import aiohttp -import requests -from spotipy import Spotify, SpotifyException +from spotifyaio import Device, SpotifyClient, SpotifyConnectionError from homeassistant.config_entries import ConfigEntry -from homeassistant.const import Platform +from homeassistant.const import CONF_ACCESS_TOKEN, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady +from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.config_entry_oauth2_flow import ( OAuth2Session, async_get_config_entry_implementation, @@ -21,7 +21,7 @@ from .browse_media import async_browse_media from .const import DOMAIN, LOGGER, SPOTIFY_SCOPES -from .coordinator import SpotifyCoordinator +from .coordinator import SpotifyConfigEntry, SpotifyCoordinator from .models import SpotifyData from .util import ( is_spotify_media_type, @@ -29,7 +29,7 @@ spotify_uri_from_media_browser_url, ) -PLATFORMS = [Platform.MEDIA_PLAYER] +PLATFORMS = [Platform.MEDIA_PLAYER, Platform.SENSOR] __all__ = [ "async_browse_media", @@ -40,9 +40,6 @@ ] -type SpotifyConfigEntry = ConfigEntry[SpotifyData] - - async def async_setup_entry(hass: HomeAssistant, entry: SpotifyConfigEntry) -> bool: """Set up Spotify from a config entry.""" implementation = await async_get_config_entry_implementation(hass, entry) @@ -53,39 +50,36 @@ async def async_setup_entry(hass: HomeAssistant, entry: SpotifyConfigEntry) -> b except aiohttp.ClientError as err: raise ConfigEntryNotReady from err - spotify = Spotify(auth=session.token["access_token"]) + spotify = SpotifyClient(async_get_clientsession(hass)) - coordinator = SpotifyCoordinator(hass, spotify, session) + spotify.authenticate(session.token[CONF_ACCESS_TOKEN]) - await coordinator.async_config_entry_first_refresh() + async def _refresh_token() -> str: + await session.async_ensure_token_valid() + token = session.token[CONF_ACCESS_TOKEN] + if TYPE_CHECKING: + assert isinstance(token, str) + return token - async def _update_devices() -> list[dict[str, Any]]: - if not session.valid_token: - await session.async_ensure_token_valid() - await hass.async_add_executor_job( - spotify.set_auth, session.token["access_token"] - ) + spotify.refresh_token_function = _refresh_token - try: - devices: dict[str, Any] | None = await hass.async_add_executor_job( - spotify.devices - ) - except (requests.RequestException, SpotifyException) as err: - raise UpdateFailed from err + coordinator = SpotifyCoordinator(hass, spotify) - if devices is None: - return [] + await coordinator.async_config_entry_first_refresh() - return devices.get("devices", []) + async def _update_devices() -> list[Device]: + try: + return await spotify.get_devices() + except SpotifyConnectionError as err: + raise UpdateFailed from err - device_coordinator: DataUpdateCoordinator[list[dict[str, Any]]] = ( - DataUpdateCoordinator( - hass, - LOGGER, - name=f"{entry.title} Devices", - update_interval=timedelta(minutes=5), - update_method=_update_devices, - ) + device_coordinator: DataUpdateCoordinator[list[Device]] = DataUpdateCoordinator( + hass, + LOGGER, + name=f"{entry.title} Devices", + config_entry=entry, + update_interval=timedelta(minutes=5), + update_method=_update_devices, ) await device_coordinator.async_config_entry_first_refresh() diff --git a/homeassistant/components/spotify/browse_media.py b/homeassistant/components/spotify/browse_media.py index 58b14e1183a4ad..403ec608a7c978 100644 --- a/homeassistant/components/spotify/browse_media.py +++ b/homeassistant/components/spotify/browse_media.py @@ -3,11 +3,17 @@ from __future__ import annotations from enum import StrEnum -from functools import partial import logging -from typing import Any - -from spotipy import Spotify +from typing import TYPE_CHECKING, Any, TypedDict + +from spotifyaio import ( + Artist, + BasePlaylist, + SimplifiedAlbum, + SimplifiedTrack, + SpotifyClient, + Track, +) import yarl from homeassistant.components.media_player import ( @@ -18,7 +24,6 @@ ) from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant -from homeassistant.helpers.config_entry_oauth2_flow import OAuth2Session from .const import DOMAIN, MEDIA_PLAYER_PREFIX, MEDIA_TYPE_SHOW, PLAYABLE_MEDIA_TYPES from .util import fetch_image_url @@ -29,6 +34,62 @@ _LOGGER = logging.getLogger(__name__) +class ItemPayload(TypedDict): + """TypedDict for item payload.""" + + name: str + type: str + uri: str + id: str | None + thumbnail: str | None + + +def _get_artist_item_payload(artist: Artist) -> ItemPayload: + return { + "id": artist.artist_id, + "name": artist.name, + "type": MediaType.ARTIST, + "uri": artist.uri, + "thumbnail": fetch_image_url(artist.images), + } + + +def _get_album_item_payload(album: SimplifiedAlbum) -> ItemPayload: + return { + "id": album.album_id, + "name": album.name, + "type": MediaType.ALBUM, + "uri": album.uri, + "thumbnail": fetch_image_url(album.images), + } + + +def _get_playlist_item_payload(playlist: BasePlaylist) -> ItemPayload: + return { + "id": playlist.playlist_id, + "name": playlist.name, + "type": MediaType.PLAYLIST, + "uri": playlist.uri, + "thumbnail": fetch_image_url(playlist.images), + } + + +def _get_track_item_payload( + track: SimplifiedTrack, show_thumbnails: bool = True +) -> ItemPayload: + return { + "id": track.track_id, + "name": track.name, + "type": MediaType.TRACK, + "uri": track.uri, + "thumbnail": ( + fetch_image_url(track.album.images) + if show_thumbnails and isinstance(track, Track) + else None + ), + } + + class BrowsableMedia(StrEnum): """Enum of browsable media.""" @@ -192,14 +253,12 @@ async def async_browse_media( result = await async_browse_media_internal( hass, info.coordinator.client, - info.session, - info.coordinator.current_user, media_content_type, media_content_id, can_play_artist=can_play_artist, ) - # Build new URLs with config entry specifyers + # Build new URLs with config entry specifiers result.media_content_id = str(parsed_url.with_name(result.media_content_id)) if result.children: for child in result.children: @@ -209,9 +268,7 @@ async def async_browse_media( async def async_browse_media_internal( hass: HomeAssistant, - spotify: Spotify, - session: OAuth2Session, - current_user: dict[str, Any], + spotify: SpotifyClient, media_content_type: str | None, media_content_id: str | None, *, @@ -219,15 +276,7 @@ async def async_browse_media_internal( ) -> BrowseMedia: """Browse spotify media.""" if media_content_type in (None, f"{MEDIA_PLAYER_PREFIX}library"): - return await hass.async_add_executor_job( - partial(library_payload, can_play_artist=can_play_artist) - ) - - if not session.valid_token: - await session.async_ensure_token_valid() - await hass.async_add_executor_job( - spotify.set_auth, session.token["access_token"] - ) + return await library_payload(can_play_artist=can_play_artist) # Strip prefix if media_content_type: @@ -237,23 +286,18 @@ async def async_browse_media_internal( "media_content_type": media_content_type, "media_content_id": media_content_id, } - response = await hass.async_add_executor_job( - partial( - build_item_response, - spotify, - current_user, - payload, - can_play_artist=can_play_artist, - ) + response = await build_item_response( + spotify, + payload, + can_play_artist=can_play_artist, ) if response is None: raise BrowseError(f"Media not found: {media_content_type} / {media_content_id}") return response -def build_item_response( # noqa: C901 - spotify: Spotify, - user: dict[str, Any], +async def build_item_response( # noqa: C901 + spotify: SpotifyClient, payload: dict[str, str | None], *, can_play_artist: bool, @@ -265,80 +309,119 @@ def build_item_response( # noqa: C901 if media_content_type is None or media_content_id is None: return None - title = None - image = None - media: dict[str, Any] | None = None - items = [] + title: str | None = None + image: str | None = None + items: list[ItemPayload] = [] if media_content_type == BrowsableMedia.CURRENT_USER_PLAYLISTS: - if media := spotify.current_user_playlists(limit=BROWSE_LIMIT): - items = media.get("items", []) + if playlists := await spotify.get_playlists_for_current_user(): + items = [_get_playlist_item_payload(playlist) for playlist in playlists] elif media_content_type == BrowsableMedia.CURRENT_USER_FOLLOWED_ARTISTS: - if media := spotify.current_user_followed_artists(limit=BROWSE_LIMIT): - items = media.get("artists", {}).get("items", []) + if artists := await spotify.get_followed_artists(): + items = [_get_artist_item_payload(artist) for artist in artists] elif media_content_type == BrowsableMedia.CURRENT_USER_SAVED_ALBUMS: - if media := spotify.current_user_saved_albums(limit=BROWSE_LIMIT): - items = [item["album"] for item in media.get("items", [])] + if saved_albums := await spotify.get_saved_albums(): + items = [ + _get_album_item_payload(saved_album.album) + for saved_album in saved_albums + ] elif media_content_type == BrowsableMedia.CURRENT_USER_SAVED_TRACKS: - if media := spotify.current_user_saved_tracks(limit=BROWSE_LIMIT): - items = [item["track"] for item in media.get("items", [])] + if saved_tracks := await spotify.get_saved_tracks(): + items = [ + _get_track_item_payload(saved_track.track) + for saved_track in saved_tracks + ] elif media_content_type == BrowsableMedia.CURRENT_USER_SAVED_SHOWS: - if media := spotify.current_user_saved_shows(limit=BROWSE_LIMIT): - items = [item["show"] for item in media.get("items", [])] + if saved_shows := await spotify.get_saved_shows(): + items = [ + { + "id": saved_show.show.show_id, + "name": saved_show.show.name, + "type": MEDIA_TYPE_SHOW, + "uri": saved_show.show.uri, + "thumbnail": fetch_image_url(saved_show.show.images), + } + for saved_show in saved_shows + ] elif media_content_type == BrowsableMedia.CURRENT_USER_RECENTLY_PLAYED: - if media := spotify.current_user_recently_played(limit=BROWSE_LIMIT): - items = [item["track"] for item in media.get("items", [])] + if recently_played_tracks := await spotify.get_recently_played_tracks(): + items = [ + _get_track_item_payload(item.track) for item in recently_played_tracks + ] elif media_content_type == BrowsableMedia.CURRENT_USER_TOP_ARTISTS: - if media := spotify.current_user_top_artists(limit=BROWSE_LIMIT): - items = media.get("items", []) + if top_artists := await spotify.get_top_artists(): + items = [_get_artist_item_payload(artist) for artist in top_artists] elif media_content_type == BrowsableMedia.CURRENT_USER_TOP_TRACKS: - if media := spotify.current_user_top_tracks(limit=BROWSE_LIMIT): - items = media.get("items", []) + if top_tracks := await spotify.get_top_tracks(): + items = [_get_track_item_payload(track) for track in top_tracks] elif media_content_type == BrowsableMedia.FEATURED_PLAYLISTS: - if media := spotify.featured_playlists( - country=user["country"], limit=BROWSE_LIMIT - ): - items = media.get("playlists", {}).get("items", []) + if featured_playlists := await spotify.get_featured_playlists(): + items = [ + _get_playlist_item_payload(playlist) for playlist in featured_playlists + ] elif media_content_type == BrowsableMedia.CATEGORIES: - if media := spotify.categories(country=user["country"], limit=BROWSE_LIMIT): - items = media.get("categories", {}).get("items", []) + if categories := await spotify.get_categories(): + items = [ + { + "id": category.category_id, + "name": category.name, + "type": "category_playlists", + "uri": category.category_id, + "thumbnail": category.icons[0].url if category.icons else None, + } + for category in categories + ] elif media_content_type == "category_playlists": if ( - media := spotify.category_playlists( - category_id=media_content_id, - country=user["country"], - limit=BROWSE_LIMIT, + playlists := await spotify.get_category_playlists( + category_id=media_content_id ) - ) and (category := spotify.category(media_content_id, country=user["country"])): - title = category.get("name") - image = fetch_image_url(category, key="icons") - items = media.get("playlists", {}).get("items", []) + ) and (category := await spotify.get_category(media_content_id)): + title = category.name + image = category.icons[0].url if category.icons else None + items = [_get_playlist_item_payload(playlist) for playlist in playlists] elif media_content_type == BrowsableMedia.NEW_RELEASES: - if media := spotify.new_releases(country=user["country"], limit=BROWSE_LIMIT): - items = media.get("albums", {}).get("items", []) + if new_releases := await spotify.get_new_releases(): + items = [_get_album_item_payload(album) for album in new_releases] elif media_content_type == MediaType.PLAYLIST: - if media := spotify.playlist(media_content_id): - items = [item["track"] for item in media.get("tracks", {}).get("items", [])] + if playlist := await spotify.get_playlist(media_content_id): + title = playlist.name + image = playlist.images[0].url if playlist.images else None + items = [ + _get_track_item_payload(playlist_track.track) + for playlist_track in playlist.tracks.items + ] elif media_content_type == MediaType.ALBUM: - if media := spotify.album(media_content_id): - items = media.get("tracks", {}).get("items", []) + if album := await spotify.get_album(media_content_id): + title = album.name + image = album.images[0].url if album.images else None + items = [ + _get_track_item_payload(track, show_thumbnails=False) + for track in album.tracks + ] elif media_content_type == MediaType.ARTIST: - if (media := spotify.artist_albums(media_content_id, limit=BROWSE_LIMIT)) and ( - artist := spotify.artist(media_content_id) + if (artist_albums := await spotify.get_artist_albums(media_content_id)) and ( + artist := await spotify.get_artist(media_content_id) ): - title = artist.get("name") - image = fetch_image_url(artist) - items = media.get("items", []) + title = artist.name + image = artist.images[0].url if artist.images else None + items = [_get_album_item_payload(album) for album in artist_albums] elif media_content_type == MEDIA_TYPE_SHOW: - if (media := spotify.show_episodes(media_content_id, limit=BROWSE_LIMIT)) and ( - show := spotify.show(media_content_id) + if (show_episodes := await spotify.get_show_episodes(media_content_id)) and ( + show := await spotify.get_show(media_content_id) ): - title = show.get("name") - image = fetch_image_url(show) - items = media.get("items", []) - - if media is None: - return None + title = show.name + image = show.images[0].url if show.images else None + items = [ + { + "id": episode.episode_id, + "name": episode.name, + "type": MediaType.EPISODE, + "uri": episode.uri, + "thumbnail": fetch_image_url(episode.images), + } + for episode in show_episodes + ] try: media_class = CONTENT_TYPE_MEDIA_CLASS[media_content_type] @@ -359,9 +442,7 @@ def build_item_response( # noqa: C901 media_item.children = [] for item in items: - try: - item_id = item["id"] - except KeyError: + if (item_id := item["id"]) is None: _LOGGER.debug("Missing ID for media item: %s", item) continue media_item.children.append( @@ -372,21 +453,21 @@ def build_item_response( # noqa: C901 media_class=MediaClass.PLAYLIST, media_content_id=item_id, media_content_type=f"{MEDIA_PLAYER_PREFIX}category_playlists", - thumbnail=fetch_image_url(item, key="icons"), - title=item.get("name"), + thumbnail=item["thumbnail"], + title=item["name"], ) ) return media_item if title is None: title = LIBRARY_MAP.get(media_content_id, "Unknown") - if "name" in media: - title = media["name"] can_play = media_content_type in PLAYABLE_MEDIA_TYPES and ( media_content_type != MediaType.ARTIST or can_play_artist ) + if TYPE_CHECKING: + assert title browse_media = BrowseMedia( can_expand=True, can_play=can_play, @@ -407,23 +488,16 @@ def build_item_response( # noqa: C901 except (MissingMediaInformation, UnknownMediaType): continue - if "images" in media: - browse_media.thumbnail = fetch_image_url(media) - return browse_media -def item_payload(item: dict[str, Any], *, can_play_artist: bool) -> BrowseMedia: +def item_payload(item: ItemPayload, *, can_play_artist: bool) -> BrowseMedia: """Create response payload for a single media item. Used by async_browse_media. """ - try: - media_type = item["type"] - media_id = item["uri"] - except KeyError as err: - _LOGGER.debug("Missing type or URI for media item: %s", item) - raise MissingMediaInformation from err + media_type = item["type"] + media_id = item["uri"] try: media_class = CONTENT_TYPE_MEDIA_CLASS[media_type] @@ -440,25 +514,19 @@ def item_payload(item: dict[str, Any], *, can_play_artist: bool) -> BrowseMedia: media_type != MediaType.ARTIST or can_play_artist ) - browse_media = BrowseMedia( + return BrowseMedia( can_expand=can_expand, can_play=can_play, children_media_class=media_class["children"], media_class=media_class["parent"], media_content_id=media_id, media_content_type=f"{MEDIA_PLAYER_PREFIX}{media_type}", - title=item.get("name", "Unknown"), + title=item["name"], + thumbnail=item["thumbnail"], ) - if "images" in item: - browse_media.thumbnail = fetch_image_url(item) - elif MediaType.ALBUM in item: - browse_media.thumbnail = fetch_image_url(item[MediaType.ALBUM]) - - return browse_media - -def library_payload(*, can_play_artist: bool) -> BrowseMedia: +async def library_payload(*, can_play_artist: bool) -> BrowseMedia: """Create response payload to describe contents of a specific library. Used by async_browse_media. @@ -474,10 +542,16 @@ def library_payload(*, can_play_artist: bool) -> BrowseMedia: ) browse_media.children = [] - for item in [{"name": n, "type": t} for t, n in LIBRARY_MAP.items()]: + for item_type, item_name in LIBRARY_MAP.items(): browse_media.children.append( item_payload( - {"name": item["name"], "type": item["type"], "uri": item["type"]}, + { + "name": item_name, + "type": item_type, + "uri": item_type, + "id": None, + "thumbnail": None, + }, can_play_artist=can_play_artist, ) ) diff --git a/homeassistant/components/spotify/config_flow.py b/homeassistant/components/spotify/config_flow.py index 510f608746ee77..d99fa7793dff86 100644 --- a/homeassistant/components/spotify/config_flow.py +++ b/homeassistant/components/spotify/config_flow.py @@ -6,10 +6,12 @@ import logging from typing import Any -from spotipy import Spotify +from spotifyaio import SpotifyClient from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlowResult +from homeassistant.const import CONF_ACCESS_TOKEN, CONF_NAME, CONF_TOKEN from homeassistant.helpers import config_entry_oauth2_flow +from homeassistant.helpers.aiohttp_client import async_get_clientsession from .const import DOMAIN, SPOTIFY_SCOPES @@ -34,29 +36,24 @@ def extra_authorize_data(self) -> dict[str, Any]: async def async_oauth_create_entry(self, data: dict[str, Any]) -> ConfigFlowResult: """Create an entry for Spotify.""" - spotify = Spotify(auth=data["token"]["access_token"]) + spotify = SpotifyClient(async_get_clientsession(self.hass)) + spotify.authenticate(data[CONF_TOKEN][CONF_ACCESS_TOKEN]) try: - current_user = await self.hass.async_add_executor_job(spotify.current_user) + current_user = await spotify.get_current_user() except Exception: # noqa: BLE001 return self.async_abort(reason="connection_error") - name = data["id"] = current_user["id"] + name = current_user.display_name - if current_user.get("display_name"): - name = current_user["display_name"] - data["name"] = name - - await self.async_set_unique_id(current_user["id"]) + await self.async_set_unique_id(current_user.user_id) if self.source == SOURCE_REAUTH: - reauth_entry = self._get_reauth_entry() - if reauth_entry.data["id"] != current_user["id"]: - return self.async_abort(reason="reauth_account_mismatch") + self._abort_if_unique_id_mismatch(reason="reauth_account_mismatch") return self.async_update_reload_and_abort( - reauth_entry, title=name, data=data + self._get_reauth_entry(), title=name, data=data ) - return self.async_create_entry(title=name, data=data) + return self.async_create_entry(title=name, data={**data, CONF_NAME: name}) async def async_step_reauth( self, entry_data: Mapping[str, Any] diff --git a/homeassistant/components/spotify/coordinator.py b/homeassistant/components/spotify/coordinator.py index 72efdefa7a5277..9e62d5f137e417 100644 --- a/homeassistant/components/spotify/coordinator.py +++ b/homeassistant/components/spotify/coordinator.py @@ -3,44 +3,59 @@ from dataclasses import dataclass from datetime import datetime, timedelta import logging -from typing import Any - -from spotipy import Spotify, SpotifyException - -from homeassistant.components.media_player import MediaType +from typing import TYPE_CHECKING + +from spotifyaio import ( + ContextType, + ItemType, + PlaybackState, + Playlist, + SpotifyClient, + SpotifyConnectionError, + UserProfile, +) +from spotifyaio.models import AudioFeatures + +from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant -from homeassistant.helpers.config_entry_oauth2_flow import OAuth2Session from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed import homeassistant.util.dt as dt_util from .const import DOMAIN +if TYPE_CHECKING: + from .models import SpotifyData + _LOGGER = logging.getLogger(__name__) +type SpotifyConfigEntry = ConfigEntry[SpotifyData] + + @dataclass class SpotifyCoordinatorData: """Class to hold Spotify data.""" - current_playback: dict[str, Any] + current_playback: PlaybackState | None position_updated_at: datetime | None - playlist: dict[str, Any] | None + playlist: Playlist | None + audio_features: AudioFeatures | None + dj_playlist: bool = False # This is a minimal representation of the DJ playlist that Spotify now offers -# The DJ is not fully integrated with the playlist API, so needs to have the -# playlist response mocked in order to maintain functionality -SPOTIFY_DJ_PLAYLIST = {"uri": "spotify:playlist:37i9dQZF1EYkqdzj48dyYq", "name": "DJ"} +# The DJ is not fully integrated with the playlist API, so we need to guard +# against trying to fetch it as a regular playlist +SPOTIFY_DJ_PLAYLIST_URI = "spotify:playlist:37i9dQZF1EYkqdzj48dyYq" class SpotifyCoordinator(DataUpdateCoordinator[SpotifyCoordinatorData]): """Class to manage fetching Spotify data.""" - current_user: dict[str, Any] + current_user: UserProfile + config_entry: SpotifyConfigEntry - def __init__( - self, hass: HomeAssistant, client: Spotify, session: OAuth2Session - ) -> None: + def __init__(self, hass: HomeAssistant, client: SpotifyClient) -> None: """Initialize.""" super().__init__( hass, @@ -49,65 +64,70 @@ def __init__( update_interval=timedelta(seconds=30), ) self.client = client - self._playlist: dict[str, Any] | None = None - self.session = session + self._playlist: Playlist | None = None + self._currently_loaded_track: str | None = None async def _async_setup(self) -> None: """Set up the coordinator.""" try: - self.current_user = await self.hass.async_add_executor_job(self.client.me) - except SpotifyException as err: + self.current_user = await self.client.get_current_user() + except SpotifyConnectionError as err: raise UpdateFailed("Error communicating with Spotify API") from err - if not self.current_user: - raise UpdateFailed("Could not retrieve user") async def _async_update_data(self) -> SpotifyCoordinatorData: - if not self.session.valid_token: - await self.session.async_ensure_token_valid() - await self.hass.async_add_executor_job( - self.client.set_auth, self.session.token["access_token"] + try: + current = await self.client.get_playback() + except SpotifyConnectionError as err: + raise UpdateFailed("Error communicating with Spotify API") from err + if not current: + return SpotifyCoordinatorData( + current_playback=None, + position_updated_at=None, + playlist=None, + audio_features=None, ) - return await self.hass.async_add_executor_job(self._sync_update_data) - - def _sync_update_data(self) -> SpotifyCoordinatorData: - current = self.client.current_playback(additional_types=[MediaType.EPISODE]) - currently_playing = current or {} # Record the last updated time, because Spotify's timestamp property is unreliable # and doesn't actually return the fetch time as is mentioned in the API description - position_updated_at = dt_util.utcnow() if current is not None else None - - context = currently_playing.get("context") or {} - - # For some users in some cases, the uri is formed like - # "spotify:user:{name}:playlist:{id}" and spotipy wants - # the type to be playlist. - uri = context.get("uri") - if uri is not None: - parts = uri.split(":") - if len(parts) == 5 and parts[1] == "user" and parts[3] == "playlist": - uri = ":".join([parts[0], parts[3], parts[4]]) - - if context and (self._playlist is None or self._playlist["uri"] != uri): - self._playlist = None - if context["type"] == MediaType.PLAYLIST: - # The Spotify API does not currently support doing a lookup for - # the DJ playlist,so just use the minimal mock playlist object - if uri == SPOTIFY_DJ_PLAYLIST["uri"]: - self._playlist = SPOTIFY_DJ_PLAYLIST + position_updated_at = dt_util.utcnow() + + audio_features: AudioFeatures | None = None + if (item := current.item) is not None and item.type == ItemType.TRACK: + if item.uri != self._currently_loaded_track: + try: + audio_features = await self.client.get_audio_features(item.uri) + except SpotifyConnectionError: + _LOGGER.debug( + "Unable to load audio features for track '%s'. " + "Continuing without audio features", + item.uri, + ) + audio_features = None else: + self._currently_loaded_track = item.uri + else: + audio_features = self.data.audio_features + dj_playlist = False + if (context := current.context) is not None: + if self._playlist is None or self._playlist.uri != context.uri: + self._playlist = None + if context.uri == SPOTIFY_DJ_PLAYLIST_URI: + dj_playlist = True + elif context.context_type == ContextType.PLAYLIST: # Make sure any playlist lookups don't break the current # playback state update try: - self._playlist = self.client.playlist(uri) - except SpotifyException: + self._playlist = await self.client.get_playlist(context.uri) + except SpotifyConnectionError: _LOGGER.debug( "Unable to load spotify playlist '%s'. " "Continuing without playlist data", - uri, + context.uri, ) self._playlist = None return SpotifyCoordinatorData( - current_playback=currently_playing, + current_playback=current, position_updated_at=position_updated_at, playlist=self._playlist, + audio_features=audio_features, + dj_playlist=dj_playlist, ) diff --git a/homeassistant/components/spotify/diagnostics.py b/homeassistant/components/spotify/diagnostics.py new file mode 100644 index 00000000000000..82ce40eb22ae55 --- /dev/null +++ b/homeassistant/components/spotify/diagnostics.py @@ -0,0 +1,21 @@ +"""Diagnostics support for Spotify.""" + +from __future__ import annotations + +from dataclasses import asdict +from typing import Any + +from homeassistant.core import HomeAssistant + +from .coordinator import SpotifyConfigEntry + + +async def async_get_config_entry_diagnostics( + hass: HomeAssistant, entry: SpotifyConfigEntry +) -> dict[str, Any]: + """Return diagnostics for a config entry.""" + + return { + "playback": asdict(entry.runtime_data.coordinator.data), + "devices": [asdict(dev) for dev in entry.runtime_data.devices.data], + } diff --git a/homeassistant/components/spotify/entity.py b/homeassistant/components/spotify/entity.py new file mode 100644 index 00000000000000..6ab8297708928c --- /dev/null +++ b/homeassistant/components/spotify/entity.py @@ -0,0 +1,25 @@ +"""Base entity for Spotify.""" + +from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .const import DOMAIN +from .coordinator import SpotifyCoordinator + + +class SpotifyEntity(CoordinatorEntity[SpotifyCoordinator]): + """Defines a base Spotify entity.""" + + _attr_has_entity_name = True + + def __init__(self, coordinator: SpotifyCoordinator) -> None: + """Initialize the Spotify entity.""" + super().__init__(coordinator) + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, coordinator.current_user.user_id)}, + manufacturer="Spotify AB", + model=f"Spotify {coordinator.current_user.product}", + name=f"Spotify {coordinator.config_entry.title}", + entry_type=DeviceEntryType.SERVICE, + configuration_url="https://open.spotify.com", + ) diff --git a/homeassistant/components/spotify/icons.json b/homeassistant/components/spotify/icons.json index 00c63141eae998..e1b08127e43f05 100644 --- a/homeassistant/components/spotify/icons.json +++ b/homeassistant/components/spotify/icons.json @@ -4,6 +4,41 @@ "spotify": { "default": "mdi:spotify" } + }, + "sensor": { + "song_tempo": { + "default": "mdi:metronome" + }, + "danceability": { + "default": "mdi:dance-ballroom" + }, + "energy": { + "default": "mdi:lightning-bolt" + }, + "mode": { + "default": "mdi:music" + }, + "speechiness": { + "default": "mdi:speaker-message" + }, + "acousticness": { + "default": "mdi:guitar-acoustic" + }, + "instrumentalness": { + "default": "mdi:guitar-electric" + }, + "valence": { + "default": "mdi:emoticon-happy" + }, + "liveness": { + "default": "mdi:music-note" + }, + "time_signature": { + "default": "mdi:music-clef-treble" + }, + "key": { + "default": "mdi:music-clef-treble" + } } } } diff --git a/homeassistant/components/spotify/manifest.json b/homeassistant/components/spotify/manifest.json index 84f2bc102e3b66..8f8f7e0d5882aa 100644 --- a/homeassistant/components/spotify/manifest.json +++ b/homeassistant/components/spotify/manifest.json @@ -9,6 +9,6 @@ "iot_class": "cloud_polling", "loggers": ["spotipy"], "quality_scale": "silver", - "requirements": ["spotipy==2.23.0"], + "requirements": ["spotifyaio==0.8.8"], "zeroconf": ["_spotify-connect._tcp.local."] } diff --git a/homeassistant/components/spotify/media_player.py b/homeassistant/components/spotify/media_player.py index ad27e2919b2181..7687936fe4cd9d 100644 --- a/homeassistant/components/spotify/media_player.py +++ b/homeassistant/components/spotify/media_player.py @@ -2,14 +2,22 @@ from __future__ import annotations -from collections.abc import Callable +import asyncio +from collections.abc import Awaitable, Callable, Coroutine import datetime as dt -from datetime import timedelta import logging -from typing import Any, Concatenate - -import requests -from spotipy import SpotifyException +from typing import TYPE_CHECKING, Any, Concatenate + +from spotifyaio import ( + Device, + Episode, + Item, + ItemType, + PlaybackState, + ProductType, + RepeatMode as SpotifyRepeatMode, + Track, +) from yarl import URL from homeassistant.components.media_player import ( @@ -22,26 +30,17 @@ MediaType, RepeatMode, ) -from homeassistant.const import CONF_ID from homeassistant.core import HomeAssistant, callback -from homeassistant.exceptions import HomeAssistantError -from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.update_coordinator import ( - CoordinatorEntity, - DataUpdateCoordinator, -) +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator -from . import SpotifyConfigEntry from .browse_media import async_browse_media_internal -from .const import DOMAIN, MEDIA_PLAYER_PREFIX, PLAYABLE_MEDIA_TYPES -from .coordinator import SpotifyCoordinator -from .util import fetch_image_url +from .const import MEDIA_PLAYER_PREFIX, PLAYABLE_MEDIA_TYPES +from .coordinator import SpotifyConfigEntry, SpotifyCoordinator +from .entity import SpotifyEntity _LOGGER = logging.getLogger(__name__) -SCAN_INTERVAL = timedelta(seconds=30) - SUPPORT_SPOTIFY = ( MediaPlayerEntityFeature.BROWSE_MEDIA | MediaPlayerEntityFeature.NEXT_TRACK @@ -57,14 +56,15 @@ ) REPEAT_MODE_MAPPING_TO_HA = { - "context": RepeatMode.ALL, - "off": RepeatMode.OFF, - "track": RepeatMode.ONE, + SpotifyRepeatMode.CONTEXT: RepeatMode.ALL, + SpotifyRepeatMode.OFF: RepeatMode.OFF, + SpotifyRepeatMode.TRACK: RepeatMode.ONE, } REPEAT_MODE_MAPPING_TO_SPOTIFY = { value: key for key, value in REPEAT_MODE_MAPPING_TO_HA.items() } +AFTER_REQUEST_SLEEP = 1 async def async_setup_entry( @@ -74,47 +74,43 @@ async def async_setup_entry( ) -> None: """Set up Spotify based on a config entry.""" data = entry.runtime_data + assert entry.unique_id is not None spotify = SpotifyMediaPlayer( data.coordinator, data.devices, - entry.data[CONF_ID], - entry.title, ) async_add_entities([spotify]) -def spotify_exception_handler[_SpotifyMediaPlayerT: SpotifyMediaPlayer, **_P, _R]( - func: Callable[Concatenate[_SpotifyMediaPlayerT, _P], _R], -) -> Callable[Concatenate[_SpotifyMediaPlayerT, _P], _R | None]: - """Decorate Spotify calls to handle Spotify exception. +def ensure_item[_R]( + func: Callable[[SpotifyMediaPlayer, Item], _R], +) -> Callable[[SpotifyMediaPlayer], _R | None]: + """Ensure that the currently playing item is available.""" - A decorator that wraps the passed in function, catches Spotify errors, - aiohttp exceptions and handles the availability of the media player. - """ - - def wrapper( - self: _SpotifyMediaPlayerT, *args: _P.args, **kwargs: _P.kwargs - ) -> _R | None: - try: - result = func(self, *args, **kwargs) - except requests.RequestException: - self._attr_available = False + def wrapper(self: SpotifyMediaPlayer) -> _R | None: + if not self.currently_playing or not self.currently_playing.item: return None - except SpotifyException as exc: - self._attr_available = False - if exc.reason == "NO_ACTIVE_DEVICE": - raise HomeAssistantError("No active playback device found") from None - raise HomeAssistantError(f"Spotify error: {exc.reason}") from exc - self._attr_available = True - return result + return func(self, self.currently_playing.item) return wrapper -class SpotifyMediaPlayer(CoordinatorEntity[SpotifyCoordinator], MediaPlayerEntity): +def async_refresh_after[_T: SpotifyEntity, **_P]( + func: Callable[Concatenate[_T, _P], Awaitable[None]], +) -> Callable[Concatenate[_T, _P], Coroutine[Any, Any, None]]: + """Define a wrapper to yield and refresh after.""" + + async def _async_wrap(self: _T, *args: _P.args, **kwargs: _P.kwargs) -> None: + await func(self, *args, **kwargs) + await asyncio.sleep(AFTER_REQUEST_SLEEP) + await self.coordinator.async_refresh() + + return _async_wrap + + +class SpotifyMediaPlayer(SpotifyEntity, MediaPlayerEntity): """Representation of a Spotify controller.""" - _attr_has_entity_name = True _attr_media_image_remotely_accessible = False _attr_name = None _attr_translation_key = "spotify" @@ -122,38 +118,24 @@ class SpotifyMediaPlayer(CoordinatorEntity[SpotifyCoordinator], MediaPlayerEntit def __init__( self, coordinator: SpotifyCoordinator, - device_coordinator: DataUpdateCoordinator[list[dict[str, Any]]], - user_id: str, - name: str, + device_coordinator: DataUpdateCoordinator[list[Device]], ) -> None: """Initialize.""" super().__init__(coordinator) self.devices = device_coordinator - - self._attr_unique_id = user_id - - self._attr_device_info = DeviceInfo( - identifiers={(DOMAIN, user_id)}, - manufacturer="Spotify AB", - model=f"Spotify {coordinator.current_user['product']}", - name=f"Spotify {name}", - entry_type=DeviceEntryType.SERVICE, - configuration_url="https://open.spotify.com", - ) + self._attr_unique_id = coordinator.current_user.user_id @property - def currently_playing(self) -> dict[str, Any]: + def currently_playing(self) -> PlaybackState | None: """Return the current playback.""" return self.coordinator.data.current_playback @property def supported_features(self) -> MediaPlayerEntityFeature: """Return the supported features.""" - if self.coordinator.current_user["product"] != "premium": + if self.coordinator.current_user.product != ProductType.PREMIUM: return MediaPlayerEntityFeature(0) - if not self.currently_playing or self.currently_playing.get("device", {}).get( - "is_restricted" - ): + if not self.currently_playing or self.currently_playing.device.is_restricted: return MediaPlayerEntityFeature.SELECT_SOURCE return SUPPORT_SPOTIFY @@ -162,7 +144,7 @@ def state(self) -> MediaPlayerState: """Return the playback state.""" if not self.currently_playing: return MediaPlayerState.IDLE - if self.currently_playing["is_playing"]: + if self.currently_playing.is_playing: return MediaPlayerState.PLAYING return MediaPlayerState.PAUSED @@ -171,41 +153,32 @@ def volume_level(self) -> float | None: """Return the device volume.""" if not self.currently_playing: return None - return self.currently_playing.get("device", {}).get("volume_percent", 0) / 100 + return self.currently_playing.device.volume_percent / 100 @property - def media_content_id(self) -> str | None: + @ensure_item + def media_content_id(self, item: Item) -> str: # noqa: PLR0206 """Return the media URL.""" - if not self.currently_playing: - return None - item = self.currently_playing.get("item") or {} - return item.get("uri") + return item.uri @property - def media_content_type(self) -> str | None: + @ensure_item + def media_content_type(self, item: Item) -> str: # noqa: PLR0206 """Return the media type.""" - if not self.currently_playing: - return None - item = self.currently_playing.get("item") or {} - is_episode = item.get("type") == MediaType.EPISODE - return MediaType.PODCAST if is_episode else MediaType.MUSIC + return MediaType.PODCAST if item.type == ItemType.EPISODE else MediaType.MUSIC @property - def media_duration(self) -> int | None: + @ensure_item + def media_duration(self, item: Item) -> int: # noqa: PLR0206 """Duration of current playing media in seconds.""" - if self.currently_playing is None or self.currently_playing.get("item") is None: - return None - return self.currently_playing["item"]["duration_ms"] / 1000 + return round(item.duration_ms / 1000) @property def media_position(self) -> int | None: """Position of current playing media in seconds.""" - if ( - not self.currently_playing - or self.currently_playing.get("progress_ms") is None - ): + if not self.currently_playing or self.currently_playing.progress_ms is None: return None - return self.currently_playing["progress_ms"] / 1000 + return round(self.currently_playing.progress_ms / 1000) @property def media_position_updated_at(self) -> dt.datetime | None: @@ -215,131 +188,132 @@ def media_position_updated_at(self) -> dt.datetime | None: return self.coordinator.data.position_updated_at @property - def media_image_url(self) -> str | None: + @ensure_item + def media_image_url(self, item: Item) -> str | None: # noqa: PLR0206 """Return the media image URL.""" - if not self.currently_playing or self.currently_playing.get("item") is None: + if item.type == ItemType.EPISODE: + if TYPE_CHECKING: + assert isinstance(item, Episode) + if item.images: + return item.images[0].url + if item.show and item.show.images: + return item.show.images[0].url return None - - item = self.currently_playing["item"] - if item["type"] == MediaType.EPISODE: - if item["images"]: - return fetch_image_url(item) - if item["show"]["images"]: - return fetch_image_url(item["show"]) + if TYPE_CHECKING: + assert isinstance(item, Track) + if not item.album.images: return None - - if not item["album"]["images"]: - return None - return fetch_image_url(item["album"]) + return item.album.images[0].url @property - def media_title(self) -> str | None: + @ensure_item + def media_title(self, item: Item) -> str: # noqa: PLR0206 """Return the media title.""" - if not self.currently_playing: - return None - item = self.currently_playing.get("item") or {} - return item.get("name") + return item.name @property - def media_artist(self) -> str | None: + @ensure_item + def media_artist(self, item: Item) -> str: # noqa: PLR0206 """Return the media artist.""" - if not self.currently_playing or self.currently_playing.get("item") is None: - return None - - item = self.currently_playing["item"] - if item["type"] == MediaType.EPISODE: - return item["show"]["publisher"] + if item.type == ItemType.EPISODE: + if TYPE_CHECKING: + assert isinstance(item, Episode) + return item.show.publisher - return ", ".join(artist["name"] for artist in item["artists"]) + if TYPE_CHECKING: + assert isinstance(item, Track) + return ", ".join(artist.name for artist in item.artists) @property - def media_album_name(self) -> str | None: + @ensure_item + def media_album_name(self, item: Item) -> str: # noqa: PLR0206 """Return the media album.""" - if not self.currently_playing or self.currently_playing.get("item") is None: - return None - - item = self.currently_playing["item"] - if item["type"] == MediaType.EPISODE: - return item["show"]["name"] + if item.type == ItemType.EPISODE: + if TYPE_CHECKING: + assert isinstance(item, Episode) + return item.show.name - return item["album"]["name"] + if TYPE_CHECKING: + assert isinstance(item, Track) + return item.album.name @property - def media_track(self) -> int | None: + @ensure_item + def media_track(self, item: Item) -> int | None: # noqa: PLR0206 """Track number of current playing media, music track only.""" - if not self.currently_playing: + if item.type == ItemType.EPISODE: return None - item = self.currently_playing.get("item") or {} - return item.get("track_number") + if TYPE_CHECKING: + assert isinstance(item, Track) + return item.track_number @property - def media_playlist(self): + def media_playlist(self) -> str | None: """Title of Playlist currently playing.""" + if self.coordinator.data.dj_playlist: + return "DJ" if self.coordinator.data.playlist is None: return None - return self.coordinator.data.playlist["name"] + return self.coordinator.data.playlist.name @property def source(self) -> str | None: """Return the current playback device.""" if not self.currently_playing: return None - return self.currently_playing.get("device", {}).get("name") + return self.currently_playing.device.name @property def source_list(self) -> list[str] | None: """Return a list of source devices.""" - return [device["name"] for device in self.devices.data] + return [device.name for device in self.devices.data] @property def shuffle(self) -> bool | None: """Shuffling state.""" if not self.currently_playing: return None - return self.currently_playing.get("shuffle_state") + return self.currently_playing.shuffle @property def repeat(self) -> RepeatMode | None: """Return current repeat mode.""" - if ( - not self.currently_playing - or (repeat_state := self.currently_playing.get("repeat_state")) is None - ): + if not self.currently_playing: return None - return REPEAT_MODE_MAPPING_TO_HA.get(repeat_state) + return REPEAT_MODE_MAPPING_TO_HA.get(self.currently_playing.repeat_mode) - @spotify_exception_handler - def set_volume_level(self, volume: float) -> None: + @async_refresh_after + async def async_set_volume_level(self, volume: float) -> None: """Set the volume level.""" - self.coordinator.client.volume(int(volume * 100)) + await self.coordinator.client.set_volume(int(volume * 100)) - @spotify_exception_handler - def media_play(self) -> None: + @async_refresh_after + async def async_media_play(self) -> None: """Start or resume playback.""" - self.coordinator.client.start_playback() + await self.coordinator.client.start_playback() - @spotify_exception_handler - def media_pause(self) -> None: + @async_refresh_after + async def async_media_pause(self) -> None: """Pause playback.""" - self.coordinator.client.pause_playback() + await self.coordinator.client.pause_playback() - @spotify_exception_handler - def media_previous_track(self) -> None: + @async_refresh_after + async def async_media_previous_track(self) -> None: """Skip to previous track.""" - self.coordinator.client.previous_track() + await self.coordinator.client.previous_track() - @spotify_exception_handler - def media_next_track(self) -> None: + @async_refresh_after + async def async_media_next_track(self) -> None: """Skip to next track.""" - self.coordinator.client.next_track() + await self.coordinator.client.next_track() - @spotify_exception_handler - def media_seek(self, position: float) -> None: + @async_refresh_after + async def async_media_seek(self, position: float) -> None: """Send seek command.""" - self.coordinator.client.seek_track(int(position * 1000)) + await self.coordinator.client.seek_track(int(position * 1000)) - @spotify_exception_handler - def play_media( + @async_refresh_after + async def async_play_media( self, media_type: MediaType | str, media_id: str, **kwargs: Any ) -> None: """Play media.""" @@ -363,12 +337,8 @@ def play_media( _LOGGER.error("Media type %s is not supported", media_type) return - if ( - self.currently_playing - and not self.currently_playing.get("device") - and self.devices.data - ): - kwargs["device_id"] = self.devices.data[0].get("id") + if not self.currently_playing and self.devices.data: + kwargs["device_id"] = self.devices.data[0].device_id if enqueue == MediaPlayerEnqueue.ADD: if media_type not in { @@ -379,32 +349,32 @@ def play_media( raise ValueError( f"Media type {media_type} is not supported when enqueue is ADD" ) - self.coordinator.client.add_to_queue(media_id, kwargs.get("device_id")) + await self.coordinator.client.add_to_queue( + media_id, kwargs.get("device_id") + ) return - self.coordinator.client.start_playback(**kwargs) + await self.coordinator.client.start_playback(**kwargs) - @spotify_exception_handler - def select_source(self, source: str) -> None: + @async_refresh_after + async def async_select_source(self, source: str) -> None: """Select playback device.""" for device in self.devices.data: - if device["name"] == source: - self.coordinator.client.transfer_playback( - device["id"], self.state == MediaPlayerState.PLAYING - ) + if device.name == source: + await self.coordinator.client.transfer_playback(device.device_id) return - @spotify_exception_handler - def set_shuffle(self, shuffle: bool) -> None: + @async_refresh_after + async def async_set_shuffle(self, shuffle: bool) -> None: """Enable/Disable shuffle mode.""" - self.coordinator.client.shuffle(shuffle) + await self.coordinator.client.set_shuffle(state=shuffle) - @spotify_exception_handler - def set_repeat(self, repeat: RepeatMode) -> None: + @async_refresh_after + async def async_set_repeat(self, repeat: RepeatMode) -> None: """Set repeat mode.""" if repeat not in REPEAT_MODE_MAPPING_TO_SPOTIFY: raise ValueError(f"Unsupported repeat mode: {repeat}") - self.coordinator.client.repeat(REPEAT_MODE_MAPPING_TO_SPOTIFY[repeat]) + await self.coordinator.client.set_repeat(REPEAT_MODE_MAPPING_TO_SPOTIFY[repeat]) async def async_browse_media( self, @@ -416,8 +386,6 @@ async def async_browse_media( return await async_browse_media_internal( self.hass, self.coordinator.client, - self.coordinator.session, - self.coordinator.current_user, media_content_type, media_content_id, ) diff --git a/homeassistant/components/spotify/models.py b/homeassistant/components/spotify/models.py index daeee560d58088..ca323267f79e83 100644 --- a/homeassistant/components/spotify/models.py +++ b/homeassistant/components/spotify/models.py @@ -1,7 +1,8 @@ """Models for use in Spotify integration.""" from dataclasses import dataclass -from typing import Any + +from spotifyaio import Device from homeassistant.helpers.config_entry_oauth2_flow import OAuth2Session from homeassistant.helpers.update_coordinator import DataUpdateCoordinator @@ -15,4 +16,4 @@ class SpotifyData: coordinator: SpotifyCoordinator session: OAuth2Session - devices: DataUpdateCoordinator[list[dict[str, Any]]] + devices: DataUpdateCoordinator[list[Device]] diff --git a/homeassistant/components/spotify/sensor.py b/homeassistant/components/spotify/sensor.py new file mode 100644 index 00000000000000..3486a911b0d0d7 --- /dev/null +++ b/homeassistant/components/spotify/sensor.py @@ -0,0 +1,179 @@ +"""Sensor platform for Spotify.""" + +from collections.abc import Callable +from dataclasses import dataclass + +from spotifyaio.models import AudioFeatures, Key + +from homeassistant.components.sensor import ( + SensorDeviceClass, + SensorEntity, + SensorEntityDescription, +) +from homeassistant.const import PERCENTAGE +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from .coordinator import SpotifyConfigEntry, SpotifyCoordinator +from .entity import SpotifyEntity + + +@dataclass(frozen=True, kw_only=True) +class SpotifyAudioFeaturesSensorEntityDescription(SensorEntityDescription): + """Describes Spotify sensor entity.""" + + value_fn: Callable[[AudioFeatures], float | str | None] + + +KEYS: dict[Key, str] = { + Key.C: "C", + Key.C_SHARP_D_FLAT: "C♯/D♭", + Key.D: "D", + Key.D_SHARP_E_FLAT: "D♯/E♭", + Key.E: "E", + Key.F: "F", + Key.F_SHARP_G_FLAT: "F♯/G♭", + Key.G: "G", + Key.G_SHARP_A_FLAT: "G♯/A♭", + Key.A: "A", + Key.A_SHARP_B_FLAT: "A♯/B♭", + Key.B: "B", +} + +KEY_OPTIONS = list(KEYS.values()) + + +def _get_key(audio_features: AudioFeatures) -> str | None: + if audio_features.key is None: + return None + return KEYS[audio_features.key] + + +AUDIO_FEATURE_SENSORS: tuple[SpotifyAudioFeaturesSensorEntityDescription, ...] = ( + SpotifyAudioFeaturesSensorEntityDescription( + key="bpm", + translation_key="song_tempo", + native_unit_of_measurement="bpm", + suggested_display_precision=0, + value_fn=lambda audio_features: audio_features.tempo, + ), + SpotifyAudioFeaturesSensorEntityDescription( + key="danceability", + translation_key="danceability", + native_unit_of_measurement=PERCENTAGE, + suggested_display_precision=0, + value_fn=lambda audio_features: audio_features.danceability * 100, + entity_registry_enabled_default=False, + ), + SpotifyAudioFeaturesSensorEntityDescription( + key="energy", + translation_key="energy", + native_unit_of_measurement=PERCENTAGE, + suggested_display_precision=0, + value_fn=lambda audio_features: audio_features.energy * 100, + entity_registry_enabled_default=False, + ), + SpotifyAudioFeaturesSensorEntityDescription( + key="mode", + translation_key="mode", + device_class=SensorDeviceClass.ENUM, + options=["major", "minor"], + value_fn=lambda audio_features: audio_features.mode.name.lower(), + entity_registry_enabled_default=False, + ), + SpotifyAudioFeaturesSensorEntityDescription( + key="speechiness", + translation_key="speechiness", + native_unit_of_measurement=PERCENTAGE, + suggested_display_precision=0, + value_fn=lambda audio_features: audio_features.speechiness * 100, + entity_registry_enabled_default=False, + ), + SpotifyAudioFeaturesSensorEntityDescription( + key="acousticness", + translation_key="acousticness", + native_unit_of_measurement=PERCENTAGE, + suggested_display_precision=0, + value_fn=lambda audio_features: audio_features.acousticness * 100, + entity_registry_enabled_default=False, + ), + SpotifyAudioFeaturesSensorEntityDescription( + key="instrumentalness", + translation_key="instrumentalness", + native_unit_of_measurement=PERCENTAGE, + suggested_display_precision=0, + value_fn=lambda audio_features: audio_features.instrumentalness * 100, + entity_registry_enabled_default=False, + ), + SpotifyAudioFeaturesSensorEntityDescription( + key="liveness", + translation_key="liveness", + native_unit_of_measurement=PERCENTAGE, + suggested_display_precision=0, + value_fn=lambda audio_features: audio_features.liveness * 100, + entity_registry_enabled_default=False, + ), + SpotifyAudioFeaturesSensorEntityDescription( + key="valence", + translation_key="valence", + native_unit_of_measurement=PERCENTAGE, + suggested_display_precision=0, + value_fn=lambda audio_features: audio_features.valence * 100, + entity_registry_enabled_default=False, + ), + SpotifyAudioFeaturesSensorEntityDescription( + key="time_signature", + translation_key="time_signature", + device_class=SensorDeviceClass.ENUM, + options=["3/4", "4/4", "5/4", "6/4", "7/4"], + value_fn=lambda audio_features: f"{audio_features.time_signature}/4", + entity_registry_enabled_default=False, + ), + SpotifyAudioFeaturesSensorEntityDescription( + key="key", + translation_key="key", + device_class=SensorDeviceClass.ENUM, + options=KEY_OPTIONS, + value_fn=_get_key, + entity_registry_enabled_default=False, + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: SpotifyConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up Spotify sensor based on a config entry.""" + coordinator = entry.runtime_data.coordinator + + async_add_entities( + SpotifyAudioFeatureSensor(coordinator, description) + for description in AUDIO_FEATURE_SENSORS + ) + + +class SpotifyAudioFeatureSensor(SpotifyEntity, SensorEntity): + """Representation of a Spotify sensor.""" + + entity_description: SpotifyAudioFeaturesSensorEntityDescription + + def __init__( + self, + coordinator: SpotifyCoordinator, + entity_description: SpotifyAudioFeaturesSensorEntityDescription, + ) -> None: + """Initialize.""" + super().__init__(coordinator) + self._attr_unique_id = ( + f"{coordinator.current_user.user_id}_{entity_description.key}" + ) + self.entity_description = entity_description + + @property + def native_value(self) -> float | str | None: + """Return the state of the sensor.""" + if (audio_features := self.coordinator.data.audio_features) is None: + return None + return self.entity_description.value_fn(audio_features) diff --git a/homeassistant/components/spotify/strings.json b/homeassistant/components/spotify/strings.json index 6447e6e6d1b2d5..faf20d740d933c 100644 --- a/homeassistant/components/spotify/strings.json +++ b/homeassistant/components/spotify/strings.json @@ -19,7 +19,8 @@ "oauth_timeout": "[%key:common::config_flow::abort::oauth2_timeout%]", "oauth_unauthorized": "[%key:common::config_flow::abort::oauth2_unauthorized%]", "oauth_failed": "[%key:common::config_flow::abort::oauth2_failed%]", - "connection_error": "Could not fetch account information. Is the user registered in the Spotify Developer Dashboard?" + "connection_error": "Could not fetch account information. Is the user registered in the Spotify Developer Dashboard?", + "missing_credentials": "[%key:common::config_flow::abort::oauth2_missing_credentials%]" }, "create_entry": { "default": "Successfully authenticated with Spotify." @@ -29,5 +30,46 @@ "info": { "api_endpoint_reachable": "Spotify API endpoint reachable" } + }, + "entity": { + "sensor": { + "song_tempo": { + "name": "Song tempo" + }, + "danceability": { + "name": "Song danceability" + }, + "energy": { + "name": "Song energy" + }, + "mode": { + "name": "Song mode", + "state": { + "minor": "Minor", + "major": "Major" + } + }, + "speechiness": { + "name": "Song speechiness" + }, + "acousticness": { + "name": "Song acousticness" + }, + "instrumentalness": { + "name": "Song instrumentalness" + }, + "valence": { + "name": "Song valence" + }, + "liveness": { + "name": "Song liveness" + }, + "time_signature": { + "name": "Song time signature" + }, + "key": { + "name": "Song key" + } + } } } diff --git a/homeassistant/components/spotify/system_health.py b/homeassistant/components/spotify/system_health.py index 963c3bfb0ef04a..5ed6defe0902d4 100644 --- a/homeassistant/components/spotify/system_health.py +++ b/homeassistant/components/spotify/system_health.py @@ -1,5 +1,7 @@ """Provide info to system health.""" +from typing import Any + from homeassistant.components import system_health from homeassistant.core import HomeAssistant, callback @@ -12,7 +14,7 @@ def async_register( register.async_register_info(system_health_info) -async def system_health_info(hass): +async def system_health_info(hass: HomeAssistant) -> dict[str, Any]: """Get info for the info page.""" return { "api_endpoint_reachable": system_health.async_check_can_reach_url( diff --git a/homeassistant/components/spotify/util.py b/homeassistant/components/spotify/util.py index 98bce980e5bb10..d882e9c58b894d 100644 --- a/homeassistant/components/spotify/util.py +++ b/homeassistant/components/spotify/util.py @@ -2,8 +2,7 @@ from __future__ import annotations -from typing import Any - +from spotifyaio import Image import yarl from .const import MEDIA_PLAYER_PREFIX @@ -19,12 +18,11 @@ def resolve_spotify_media_type(media_content_type: str) -> str: return media_content_type.removeprefix(MEDIA_PLAYER_PREFIX) -def fetch_image_url(item: dict[str, Any], key="images") -> str | None: +def fetch_image_url(images: list[Image]) -> str | None: """Fetch image url.""" - source = item.get(key, []) - if isinstance(source, list) and source: - return source[0].get("url") - return None + if not images: + return None + return images[0].url def spotify_uri_from_media_browser_url(media_content_id: str) -> str: diff --git a/homeassistant/components/sql/config_flow.py b/homeassistant/components/sql/config_flow.py index 5537c7ff3b0ec0..4fe04f2401c83b 100644 --- a/homeassistant/components/sql/config_flow.py +++ b/homeassistant/components/sql/config_flow.py @@ -23,7 +23,7 @@ ConfigEntry, ConfigFlow, ConfigFlowResult, - OptionsFlowWithConfigEntry, + OptionsFlow, ) from homeassistant.const import ( CONF_DEVICE_CLASS, @@ -144,7 +144,7 @@ def async_get_options_flow( config_entry: ConfigEntry, ) -> SQLOptionsFlowHandler: """Get the options flow for this handler.""" - return SQLOptionsFlowHandler(config_entry) + return SQLOptionsFlowHandler() async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -209,7 +209,7 @@ async def async_step_user( ) -class SQLOptionsFlowHandler(OptionsFlowWithConfigEntry): +class SQLOptionsFlowHandler(OptionsFlow): """Handle SQL options.""" async def async_step_init( @@ -223,7 +223,7 @@ async def async_step_init( db_url = user_input.get(CONF_DB_URL) query = user_input[CONF_QUERY] column = user_input[CONF_COLUMN_NAME] - name = self.options.get(CONF_NAME, self.config_entry.title) + name = self.config_entry.options.get(CONF_NAME, self.config_entry.title) try: query = validate_sql_select(query) @@ -275,7 +275,7 @@ async def async_step_init( return self.async_show_form( step_id="init", data_schema=self.add_suggested_values_to_schema( - OPTIONS_SCHEMA, user_input or self.options + OPTIONS_SCHEMA, user_input or self.config_entry.options ), errors=errors, description_placeholders=description_placeholders, diff --git a/homeassistant/components/squeezebox/__init__.py b/homeassistant/components/squeezebox/__init__.py index c0a5b90647481d..f466f3bcb62c06 100644 --- a/homeassistant/components/squeezebox/__init__.py +++ b/homeassistant/components/squeezebox/__init__.py @@ -2,9 +2,10 @@ from asyncio import timeout from dataclasses import dataclass +from datetime import datetime import logging -from pysqueezebox import Server +from pysqueezebox import Player, Server from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( @@ -23,20 +24,30 @@ DeviceEntryType, format_mac, ) +from homeassistant.helpers.dispatcher import async_dispatcher_send +from homeassistant.helpers.event import async_call_later from .const import ( CONF_HTTPS, + DISCOVERY_INTERVAL, DISCOVERY_TASK, DOMAIN, + KNOWN_PLAYERS, + KNOWN_SERVERS, MANUFACTURER, SERVER_MODEL, + SIGNAL_PLAYER_DISCOVERED, + SIGNAL_PLAYER_REDISCOVERED, STATUS_API_TIMEOUT, STATUS_QUERY_LIBRARYNAME, STATUS_QUERY_MAC, STATUS_QUERY_UUID, STATUS_QUERY_VERSION, ) -from .coordinator import LMSStatusDataUpdateCoordinator +from .coordinator import ( + LMSStatusDataUpdateCoordinator, + SqueezeBoxPlayerUpdateCoordinator, +) _LOGGER = logging.getLogger(__name__) @@ -117,15 +128,55 @@ async def async_setup_entry(hass: HomeAssistant, entry: SqueezeboxConfigEntry) - ) _LOGGER.debug("LMS Device %s", device) - coordinator = LMSStatusDataUpdateCoordinator(hass, lms) + server_coordinator = LMSStatusDataUpdateCoordinator(hass, lms) entry.runtime_data = SqueezeboxData( - coordinator=coordinator, + coordinator=server_coordinator, server=lms, ) - await coordinator.async_config_entry_first_refresh() + # set up player discovery + known_servers = hass.data.setdefault(DOMAIN, {}).setdefault(KNOWN_SERVERS, {}) + known_players = known_servers.setdefault(lms.uuid, {}).setdefault(KNOWN_PLAYERS, []) + + async def _player_discovery(now: datetime | None = None) -> None: + """Discover squeezebox players by polling server.""" + + async def _discovered_player(player: Player) -> None: + """Handle a (re)discovered player.""" + if player.player_id in known_players: + await player.async_update() + async_dispatcher_send( + hass, SIGNAL_PLAYER_REDISCOVERED, player.player_id, player.connected + ) + else: + _LOGGER.debug("Adding new entity: %s", player) + player_coordinator = SqueezeBoxPlayerUpdateCoordinator( + hass, player, lms.uuid + ) + known_players.append(player.player_id) + async_dispatcher_send( + hass, SIGNAL_PLAYER_DISCOVERED, player_coordinator + ) + + if players := await lms.async_get_players(): + for player in players: + hass.async_create_task(_discovered_player(player)) + + entry.async_on_unload( + async_call_later(hass, DISCOVERY_INTERVAL, _player_discovery) + ) + + await server_coordinator.async_config_entry_first_refresh() await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + + _LOGGER.debug( + "Adding player discovery job for LMS server: %s", entry.data[CONF_HOST] + ) + entry.async_create_background_task( + hass, _player_discovery(), "squeezebox.media_player.player_discovery" + ) + return True diff --git a/homeassistant/components/squeezebox/browse_media.py b/homeassistant/components/squeezebox/browse_media.py index 6c69aa532ecf06..4d1c98bc4fcf2d 100644 --- a/homeassistant/components/squeezebox/browse_media.py +++ b/homeassistant/components/squeezebox/browse_media.py @@ -18,7 +18,15 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.network import is_internal_request -LIBRARY = ["Favorites", "Artists", "Albums", "Tracks", "Playlists", "Genres"] +LIBRARY = [ + "Favorites", + "Artists", + "Albums", + "Tracks", + "Playlists", + "Genres", + "New Music", +] MEDIA_TYPE_TO_SQUEEZEBOX = { "Favorites": "favorites", @@ -27,6 +35,7 @@ "Tracks": "titles", "Playlists": "playlists", "Genres": "genres", + "New Music": "new music", MediaType.ALBUM: "album", MediaType.ARTIST: "artist", MediaType.TRACK: "title", @@ -50,6 +59,7 @@ "Tracks": {"item": MediaClass.DIRECTORY, "children": MediaClass.TRACK}, "Playlists": {"item": MediaClass.DIRECTORY, "children": MediaClass.PLAYLIST}, "Genres": {"item": MediaClass.DIRECTORY, "children": MediaClass.GENRE}, + "New Music": {"item": MediaClass.DIRECTORY, "children": MediaClass.ALBUM}, MediaType.ALBUM: {"item": MediaClass.ALBUM, "children": MediaClass.TRACK}, MediaType.ARTIST: {"item": MediaClass.ARTIST, "children": MediaClass.ALBUM}, MediaType.TRACK: {"item": MediaClass.TRACK, "children": None}, @@ -68,6 +78,7 @@ "Playlists": MediaType.PLAYLIST, "Genres": MediaType.GENRE, "Favorites": None, # can only be determined after inspecting the item + "New Music": MediaType.ALBUM, } BROWSE_LIMIT = 1000 diff --git a/homeassistant/components/squeezebox/const.py b/homeassistant/components/squeezebox/const.py index 0bf8c24a5d1ea4..8bc332141709d3 100644 --- a/homeassistant/components/squeezebox/const.py +++ b/homeassistant/components/squeezebox/const.py @@ -5,6 +5,7 @@ DOMAIN = "squeezebox" DEFAULT_PORT = 9000 KNOWN_PLAYERS = "known_players" +KNOWN_SERVERS = "known_servers" MANUFACTURER = "https://lyrion.org/" PLAYER_DISCOVERY_UNSUB = "player_discovery_unsub" SENSOR_UPDATE_INTERVAL = 60 @@ -27,3 +28,7 @@ STATUS_QUERY_UUID = "uuid" STATUS_QUERY_VERSION = "version" SQUEEZEBOX_SOURCE_STRINGS = ("source:", "wavin:", "spotify:") +SIGNAL_PLAYER_DISCOVERED = "squeezebox_player_discovered" +SIGNAL_PLAYER_REDISCOVERED = "squeezebox_player_rediscovered" +DISCOVERY_INTERVAL = 60 +PLAYER_UPDATE_INTERVAL = 5 diff --git a/homeassistant/components/squeezebox/coordinator.py b/homeassistant/components/squeezebox/coordinator.py index 0d958399bcb348..f3aacbc9833484 100644 --- a/homeassistant/components/squeezebox/coordinator.py +++ b/homeassistant/components/squeezebox/coordinator.py @@ -1,18 +1,23 @@ """DataUpdateCoordinator for the Squeezebox integration.""" from asyncio import timeout +from collections.abc import Callable from datetime import timedelta import logging import re +from typing import Any -from pysqueezebox import Server +from pysqueezebox import Player, Server -from homeassistant.core import HomeAssistant +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed from homeassistant.util import dt as dt_util from .const import ( + PLAYER_UPDATE_INTERVAL, SENSOR_UPDATE_INTERVAL, + SIGNAL_PLAYER_REDISCOVERED, STATUS_API_TIMEOUT, STATUS_SENSOR_LASTSCAN, STATUS_SENSOR_NEEDSRESTART, @@ -38,7 +43,7 @@ def __init__(self, hass: HomeAssistant, lms: Server) -> None: self.newversion_regex = re.compile("<.*$") async def _async_update_data(self) -> dict: - """Fetch data fromn LMS status call. + """Fetch data from LMS status call. Then we process only a subset to make then nice for HA """ @@ -70,3 +75,46 @@ def _prepare_status_data(self, data: dict) -> dict: _LOGGER.debug("Processed serverstatus %s=%s", self.lms.name, data) return data + + +class SqueezeBoxPlayerUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]): + """Coordinator for Squeezebox players.""" + + def __init__(self, hass: HomeAssistant, player: Player, server_uuid: str) -> None: + """Initialize the coordinator.""" + super().__init__( + hass, + _LOGGER, + name=player.name, + update_interval=timedelta(seconds=PLAYER_UPDATE_INTERVAL), + always_update=True, + ) + self.player = player + self.available = True + self._remove_dispatcher: Callable | None = None + self.server_uuid = server_uuid + + async def _async_update_data(self) -> dict[str, Any]: + """Update Player if available, or listen for rediscovery if not.""" + if self.available: + # Only update players available at last update, unavailable players are rediscovered instead + await self.player.async_update() + + if self.player.connected is False: + _LOGGER.debug("Player %s is not available", self.name) + self.available = False + + # start listening for restored players + self._remove_dispatcher = async_dispatcher_connect( + self.hass, SIGNAL_PLAYER_REDISCOVERED, self.rediscovered + ) + return {} + + @callback + def rediscovered(self, unique_id: str, connected: bool) -> None: + """Make a player available again.""" + if unique_id == self.player.player_id and connected: + self.available = True + _LOGGER.debug("Player %s is available again", self.name) + if self._remove_dispatcher: + self._remove_dispatcher() diff --git a/homeassistant/components/squeezebox/icons.json b/homeassistant/components/squeezebox/icons.json index e86016329f5bb3..29911ddad77f3c 100644 --- a/homeassistant/components/squeezebox/icons.json +++ b/homeassistant/components/squeezebox/icons.json @@ -27,12 +27,6 @@ }, "call_query": { "service": "mdi:database" - }, - "sync": { - "service": "mdi:sync" - }, - "unsync": { - "service": "mdi:sync-off" } } } diff --git a/homeassistant/components/squeezebox/manifest.json b/homeassistant/components/squeezebox/manifest.json index 74b7c1f48009d1..aa595340d56345 100644 --- a/homeassistant/components/squeezebox/manifest.json +++ b/homeassistant/components/squeezebox/manifest.json @@ -12,5 +12,5 @@ "documentation": "https://www.home-assistant.io/integrations/squeezebox", "iot_class": "local_polling", "loggers": ["pysqueezebox"], - "requirements": ["pysqueezebox==0.9.3"] + "requirements": ["pysqueezebox==0.10.0"] } diff --git a/homeassistant/components/squeezebox/media_player.py b/homeassistant/components/squeezebox/media_player.py index 54cb07cafaf8f0..19cd1e369105fe 100644 --- a/homeassistant/components/squeezebox/media_player.py +++ b/homeassistant/components/squeezebox/media_player.py @@ -6,9 +6,9 @@ from datetime import datetime import json import logging -from typing import Any +from typing import TYPE_CHECKING, Any -from pysqueezebox import Player, Server, async_discover +from pysqueezebox import Server, async_discover import voluptuous as vol from homeassistant.components import media_source @@ -25,50 +25,53 @@ async_process_play_media_url, ) from homeassistant.config_entries import SOURCE_INTEGRATION_DISCOVERY -from homeassistant.const import ATTR_COMMAND, CONF_HOST, CONF_PORT +from homeassistant.const import ATTR_COMMAND, CONF_HOST, CONF_PORT, Platform from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import ServiceValidationError from homeassistant.helpers import ( config_validation as cv, discovery_flow, entity_platform, + entity_registry as er, ) from homeassistant.helpers.device_registry import ( CONNECTION_NETWORK_MAC, DeviceInfo, format_mac, ) -from homeassistant.helpers.dispatcher import ( - async_dispatcher_connect, - async_dispatcher_send, -) +from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.event import async_call_later from homeassistant.helpers.start import async_at_start +from homeassistant.helpers.update_coordinator import CoordinatorEntity from homeassistant.util.dt import utcnow -from . import SqueezeboxConfigEntry from .browse_media import ( build_item_response, generate_playlist, library_payload, media_source_content_filter, ) -from .const import DISCOVERY_TASK, DOMAIN, KNOWN_PLAYERS, SQUEEZEBOX_SOURCE_STRINGS +from .const import ( + DISCOVERY_TASK, + DOMAIN, + KNOWN_PLAYERS, + KNOWN_SERVERS, + SIGNAL_PLAYER_DISCOVERED, + SQUEEZEBOX_SOURCE_STRINGS, +) +from .coordinator import SqueezeBoxPlayerUpdateCoordinator + +if TYPE_CHECKING: + from . import SqueezeboxConfigEntry SERVICE_CALL_METHOD = "call_method" SERVICE_CALL_QUERY = "call_query" ATTR_QUERY_RESULT = "query_result" -SIGNAL_PLAYER_REDISCOVERED = "squeezebox_player_rediscovered" - _LOGGER = logging.getLogger(__name__) -DISCOVERY_INTERVAL = 60 - -KNOWN_SERVERS = "known_servers" ATTR_PARAMETERS = "parameters" ATTR_OTHER_PLAYER = "other_player" @@ -112,49 +115,15 @@ async def async_setup_entry( entry: SqueezeboxConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: - """Set up an player discovery from a config entry.""" - hass.data.setdefault(DOMAIN, {}) - known_players = hass.data[DOMAIN].setdefault(KNOWN_PLAYERS, []) - lms = entry.runtime_data.server - - async def _player_discovery(now: datetime | None = None) -> None: - """Discover squeezebox players by polling server.""" - - async def _discovered_player(player: Player) -> None: - """Handle a (re)discovered player.""" - entity = next( - ( - known - for known in known_players - if known.unique_id == player.player_id - ), - None, - ) - if entity: - await player.async_update() - async_dispatcher_send( - hass, SIGNAL_PLAYER_REDISCOVERED, player.player_id, player.connected - ) + """Set up the Squeezebox media_player platform from a server config entry.""" - if not entity: - _LOGGER.debug("Adding new entity: %s", player) - entity = SqueezeBoxEntity(player, lms) - known_players.append(entity) - async_add_entities([entity], True) + # Add media player entities when discovered + async def _player_discovered(player: SqueezeBoxPlayerUpdateCoordinator) -> None: + _LOGGER.debug("Setting up media_player entity for player %s", player) + async_add_entities([SqueezeBoxMediaPlayerEntity(player)]) - if players := await lms.async_get_players(): - for player in players: - hass.async_create_task(_discovered_player(player)) - - entry.async_on_unload( - async_call_later(hass, DISCOVERY_INTERVAL, _player_discovery) - ) - - _LOGGER.debug( - "Adding player discovery job for LMS server: %s", entry.data[CONF_HOST] - ) - entry.async_create_background_task( - hass, _player_discovery(), "squeezebox.media_player.player_discovery" + entry.async_on_unload( + async_dispatcher_connect(hass, SIGNAL_PLAYER_DISCOVERED, _player_discovered) ) # Register entity services @@ -184,8 +153,10 @@ async def _discovered_player(player: Player) -> None: entry.async_on_unload(async_at_start(hass, start_server_discovery)) -class SqueezeBoxEntity(MediaPlayerEntity): - """Representation of a SqueezeBox device. +class SqueezeBoxMediaPlayerEntity( + CoordinatorEntity[SqueezeBoxPlayerUpdateCoordinator], MediaPlayerEntity +): + """Representation of the media player features of a SqueezeBox device. Wraps a pysqueezebox.Player() object. """ @@ -212,13 +183,18 @@ class SqueezeBoxEntity(MediaPlayerEntity): _attr_has_entity_name = True _attr_name = None _last_update: datetime | None = None - _attr_available = True - def __init__(self, player: Player, server: Server) -> None: + def __init__( + self, + coordinator: SqueezeBoxPlayerUpdateCoordinator, + ) -> None: """Initialize the SqueezeBox device.""" + super().__init__(coordinator) + player = coordinator.player self._player = player self._query_result: bool | dict = {} self._remove_dispatcher: Callable | None = None + self._previous_media_position = 0 self._attr_unique_id = format_mac(player.player_id) _manufacturer = None if player.model == "SqueezeLite" or "SqueezePlay" in player.model: @@ -234,11 +210,24 @@ def __init__(self, player: Player, server: Server) -> None: identifiers={(DOMAIN, self._attr_unique_id)}, name=player.name, connections={(CONNECTION_NETWORK_MAC, self._attr_unique_id)}, - via_device=(DOMAIN, server.uuid), + via_device=(DOMAIN, coordinator.server_uuid), model=player.model, manufacturer=_manufacturer, ) + @callback + def _handle_coordinator_update(self) -> None: + """Handle updated data from the coordinator.""" + if self._previous_media_position != self.media_position: + self._previous_media_position = self.media_position + self._last_update = utcnow() + self.async_write_ha_state() + + @property + def available(self) -> bool: + """Return True if entity is available.""" + return self.coordinator.available and super().available + @property def extra_state_attributes(self) -> dict[str, Any]: """Return device-specific attributes.""" @@ -248,15 +237,6 @@ def extra_state_attributes(self) -> dict[str, Any]: if getattr(self, attr) is not None } - @callback - def rediscovered(self, unique_id: str, connected: bool) -> None: - """Make a player available again.""" - if unique_id == self.unique_id and connected: - self._attr_available = True - _LOGGER.debug("Player %s is available again", self.name) - if self._remove_dispatcher: - self._remove_dispatcher() - @property def state(self) -> MediaPlayerState | None: """Return the state of the device.""" @@ -269,26 +249,11 @@ def state(self) -> MediaPlayerState | None: ) return None - async def async_update(self) -> None: - """Update the Player() object.""" - # only update available players, newly available players will be rediscovered and marked available - if self._attr_available: - last_media_position = self.media_position - await self._player.async_update() - if self.media_position != last_media_position: - self._last_update = utcnow() - if self._player.connected is False: - _LOGGER.debug("Player %s is not available", self.name) - self._attr_available = False - - # start listening for restored players - self._remove_dispatcher = async_dispatcher_connect( - self.hass, SIGNAL_PLAYER_REDISCOVERED, self.rediscovered - ) - async def async_will_remove_from_hass(self) -> None: """Remove from list of known players when removed from hass.""" - self.hass.data[DOMAIN][KNOWN_PLAYERS].remove(self) + known_servers = self.hass.data[DOMAIN][KNOWN_SERVERS] + known_players = known_servers[self.coordinator.server_uuid][KNOWN_PLAYERS] + known_players.remove(self.coordinator.player.player_id) @property def volume_level(self) -> float | None: @@ -380,13 +345,15 @@ def shuffle(self) -> bool: @property def group_members(self) -> list[str]: """List players we are synced with.""" - player_ids = { - p.unique_id: p.entity_id for p in self.hass.data[DOMAIN][KNOWN_PLAYERS] - } + ent_reg = er.async_get(self.hass) return [ - player_ids[player] + entity_id for player in self._player.sync_group - if player in player_ids + if ( + entity_id := ent_reg.async_get_entity_id( + Platform.MEDIA_PLAYER, DOMAIN, player + ) + ) ] @property @@ -397,55 +364,68 @@ def query_result(self) -> dict | bool: async def async_turn_off(self) -> None: """Turn off media player.""" await self._player.async_set_power(False) + await self.coordinator.async_refresh() async def async_volume_up(self) -> None: """Volume up media player.""" await self._player.async_set_volume("+5") + await self.coordinator.async_refresh() async def async_volume_down(self) -> None: """Volume down media player.""" await self._player.async_set_volume("-5") + await self.coordinator.async_refresh() async def async_set_volume_level(self, volume: float) -> None: """Set volume level, range 0..1.""" volume_percent = str(int(volume * 100)) await self._player.async_set_volume(volume_percent) + await self.coordinator.async_refresh() async def async_mute_volume(self, mute: bool) -> None: """Mute (true) or unmute (false) media player.""" await self._player.async_set_muting(mute) + await self.coordinator.async_refresh() async def async_media_stop(self) -> None: """Send stop command to media player.""" await self._player.async_stop() + await self.coordinator.async_refresh() async def async_media_play_pause(self) -> None: """Send pause command to media player.""" await self._player.async_toggle_pause() + await self.coordinator.async_refresh() async def async_media_play(self) -> None: """Send play command to media player.""" await self._player.async_play() + await self.coordinator.async_refresh() async def async_media_pause(self) -> None: """Send pause command to media player.""" await self._player.async_pause() + await self.coordinator.async_refresh() async def async_media_next_track(self) -> None: """Send next track command.""" await self._player.async_index("+1") + await self.coordinator.async_refresh() async def async_media_previous_track(self) -> None: """Send next track command.""" await self._player.async_index("-1") + await self.coordinator.async_refresh() async def async_media_seek(self, position: float) -> None: """Send seek command.""" await self._player.async_time(position) + await self.coordinator.async_refresh() async def async_turn_on(self) -> None: """Turn the media player on.""" await self._player.async_set_power(True) + await self.coordinator.async_refresh() async def async_play_media( self, media_type: MediaType | str, media_id: str, **kwargs: Any @@ -504,6 +484,7 @@ async def async_play_media( await self._player.async_load_playlist(playlist, cmd) if index is not None: await self._player.async_index(index) + await self.coordinator.async_refresh() async def async_set_repeat(self, repeat: RepeatMode) -> None: """Set the repeat mode.""" @@ -515,15 +496,18 @@ async def async_set_repeat(self, repeat: RepeatMode) -> None: repeat_mode = "none" await self._player.async_set_repeat(repeat_mode) + await self.coordinator.async_refresh() async def async_set_shuffle(self, shuffle: bool) -> None: """Enable/disable shuffle mode.""" shuffle_mode = "song" if shuffle else "none" await self._player.async_set_shuffle(shuffle_mode) + await self.coordinator.async_refresh() async def async_clear_playlist(self) -> None: """Send the media player the command for clear playlist.""" await self._player.async_clear_playlist() + await self.coordinator.async_refresh() async def async_call_method( self, command: str, parameters: list[str] | None = None @@ -551,6 +535,7 @@ async def async_call_query( all_params.extend(parameters) self._query_result = await self._player.async_query(*all_params) _LOGGER.debug("call_query got result %s", self._query_result) + self.async_write_ha_state() async def async_join_players(self, group_members: list[str]) -> None: """Add other Squeezebox players to this player's sync group. @@ -558,21 +543,24 @@ async def async_join_players(self, group_members: list[str]) -> None: If the other player is a member of a sync group, it will leave the current sync group without asking. """ - player_ids = { - p.entity_id: p.unique_id for p in self.hass.data[DOMAIN][KNOWN_PLAYERS] - } - - for other_player in group_members: - if other_player_id := player_ids.get(other_player): + ent_reg = er.async_get(self.hass) + for other_player_entity_id in group_members: + other_player = ent_reg.async_get(other_player_entity_id) + if other_player is None: + raise ServiceValidationError( + f"Could not find player with entity_id {other_player_entity_id}" + ) + if other_player_id := other_player.unique_id: await self._player.async_sync(other_player_id) else: raise ServiceValidationError( - f"Could not join unknown player {other_player}" + f"Could not join unknown player {other_player_entity_id}" ) async def async_unjoin_player(self) -> None: """Unsync this Squeezebox player.""" await self._player.async_unsync() + await self.coordinator.async_refresh() async def async_browse_media( self, diff --git a/homeassistant/components/squeezebox/services.yaml b/homeassistant/components/squeezebox/services.yaml index 90f9bf2d7695f0..07885ae5dd6884 100644 --- a/homeassistant/components/squeezebox/services.yaml +++ b/homeassistant/components/squeezebox/services.yaml @@ -30,19 +30,3 @@ call_query: advanced: true selector: object: -sync: - target: - entity: - integration: squeezebox - domain: media_player - fields: - other_player: - required: true - example: "media_player.living_room" - selector: - text: -unsync: - target: - entity: - integration: squeezebox - domain: media_player diff --git a/homeassistant/components/squeezebox/strings.json b/homeassistant/components/squeezebox/strings.json index 1a120ee0567db8..b1b71cd8c1d1a6 100644 --- a/homeassistant/components/squeezebox/strings.json +++ b/homeassistant/components/squeezebox/strings.json @@ -60,20 +60,6 @@ "description": "[%key:component::squeezebox::services::call_method::fields::parameters::description%]" } } - }, - "sync": { - "name": "Sync", - "description": "Adds another player to this player's sync group. If the other player is already in a sync group, it will leave it.\n.", - "fields": { - "other_player": { - "name": "Other player", - "description": "Name of the other Squeezebox player to link." - } - } - }, - "unsync": { - "name": "Unsync", - "description": "Removes this player from its sync group." } }, "entity": { diff --git a/homeassistant/components/statistics/config_flow.py b/homeassistant/components/statistics/config_flow.py index 145a7655b36414..4280c92131a609 100644 --- a/homeassistant/components/statistics/config_flow.py +++ b/homeassistant/components/statistics/config_flow.py @@ -169,8 +169,8 @@ async def async_setup_preview(hass: HomeAssistant) -> None: vol.Required("user_input"): dict, } ) -@callback -def ws_start_preview( +@websocket_api.async_response +async def ws_start_preview( hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict[str, Any], @@ -234,6 +234,6 @@ def async_preview_updated(state: str, attributes: Mapping[str, Any]) -> None: preview_entity.hass = hass connection.send_result(msg["id"]) - connection.subscriptions[msg["id"]] = preview_entity.async_start_preview( + connection.subscriptions[msg["id"]] = await preview_entity.async_start_preview( async_preview_updated ) diff --git a/homeassistant/components/statistics/sensor.py b/homeassistant/components/statistics/sensor.py index ba98fe3ec6e357..50d07d4e46686a 100644 --- a/homeassistant/components/statistics/sensor.py +++ b/homeassistant/components/statistics/sensor.py @@ -17,6 +17,7 @@ from homeassistant.components.recorder import get_instance, history from homeassistant.components.sensor import ( DEVICE_CLASS_STATE_CLASSES, + DEVICE_CLASS_UNITS, PLATFORM_SCHEMA as SENSOR_PLATFORM_SCHEMA, SensorDeviceClass, SensorEntity, @@ -37,6 +38,7 @@ CALLBACK_TYPE, Event, EventStateChangedData, + EventStateReportedData, HomeAssistant, State, callback, @@ -48,9 +50,9 @@ from homeassistant.helpers.event import ( async_track_point_in_utc_time, async_track_state_change_event, + async_track_state_report_event, ) from homeassistant.helpers.reload import async_setup_reload_service -from homeassistant.helpers.start import async_at_start from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType, StateType from homeassistant.util import dt as dt_util from homeassistant.util.enum import try_parse_enum @@ -358,23 +360,20 @@ def __init__( self.samples_keep_last: bool = samples_keep_last self._precision: int = precision self._percentile: int = percentile - self._value: StateType | datetime = None - self._unit_of_measurement: str | None = None - self._available: bool = False + self._attr_available: bool = False self.states: deque[float | bool] = deque(maxlen=self._samples_max_buffer_size) self.ages: deque[datetime] = deque(maxlen=self._samples_max_buffer_size) self.attributes: dict[str, StateType] = {} - self._state_characteristic_fn: Callable[[], StateType | datetime] = ( + self._state_characteristic_fn: Callable[[], float | int | datetime | None] = ( self._callable_characteristic_fn(self._state_characteristic) ) self._update_listener: CALLBACK_TYPE | None = None self._preview_callback: Callable[[str, Mapping[str, Any]], None] | None = None - @callback - def async_start_preview( + async def async_start_preview( self, preview_callback: Callable[[str, Mapping[str, Any]], None], ) -> CALLBACK_TYPE: @@ -385,23 +384,22 @@ def async_start_preview( if not self._source_entity_id or ( self._samples_max_buffer_size is None and self._samples_max_age is None ): - self._available = False + self._attr_available = False calculated_state = self._async_calculate_state() preview_callback(calculated_state.state, calculated_state.attributes) return self._call_on_remove_callbacks self._preview_callback = preview_callback - self._async_stats_sensor_startup(self.hass) + await self._async_stats_sensor_startup() return self._call_on_remove_callbacks - @callback - def _async_stats_sensor_state_listener( + def _async_handle_new_state( self, - event: Event[EventStateChangedData], + reported_state: State | None, ) -> None: """Handle the sensor state changes.""" - if (new_state := event.data["new_state"]) is None: + if (new_state := reported_state) is None: return self._add_state_to_queue(new_state) self._async_purge_update_and_schedule() @@ -414,28 +412,55 @@ def _async_stats_sensor_state_listener( self.async_write_ha_state() @callback - def _async_stats_sensor_startup(self, _: HomeAssistant) -> None: - """Add listener and get recorded state.""" + def _async_stats_sensor_state_change_listener( + self, + event: Event[EventStateChangedData], + ) -> None: + self._async_handle_new_state(event.data["new_state"]) + + @callback + def _async_stats_sensor_state_report_listener( + self, + event: Event[EventStateReportedData], + ) -> None: + self._async_handle_new_state(event.data["new_state"]) + + async def _async_stats_sensor_startup(self) -> None: + """Add listener and get recorded state. + + Historical data needs to be loaded from the database first before we + can start accepting new incoming changes. + This is needed to ensure that the buffer is properly sorted by time. + """ _LOGGER.debug("Startup for %s", self.entity_id) + if "recorder" in self.hass.config.components: + await self._initialize_from_database() self.async_on_remove( async_track_state_change_event( self.hass, [self._source_entity_id], - self._async_stats_sensor_state_listener, + self._async_stats_sensor_state_change_listener, + ) + ) + self.async_on_remove( + async_track_state_report_event( + self.hass, + [self._source_entity_id], + self._async_stats_sensor_state_report_listener, ) ) - if "recorder" in self.hass.config.components: - self.hass.async_create_task(self._initialize_from_database()) async def async_added_to_hass(self) -> None: """Register callbacks.""" - self.async_on_remove( - async_at_start(self.hass, self._async_stats_sensor_startup) - ) + await self._async_stats_sensor_startup() def _add_state_to_queue(self, new_state: State) -> None: """Add the state to the queue.""" - self._available = new_state.state != STATE_UNAVAILABLE + + # Attention: it is not safe to store the new_state object, + # since the "last_reported" value will be updated over time. + # Here we make a copy the current value, which is okay. + self._attr_available = new_state.state != STATE_UNAVAILABLE if new_state.state == STATE_UNAVAILABLE: self.attributes[STAT_SOURCE_VALUE_VALID] = None return @@ -449,7 +474,7 @@ def _add_state_to_queue(self, new_state: State) -> None: self.states.append(new_state.state == "on") else: self.states.append(float(new_state.state)) - self.ages.append(new_state.last_updated) + self.ages.append(new_state.last_reported) self.attributes[STAT_SOURCE_VALUE_VALID] = True except ValueError: self.attributes[STAT_SOURCE_VALUE_VALID] = False @@ -460,11 +485,28 @@ def _add_state_to_queue(self, new_state: State) -> None: ) return - self._unit_of_measurement = self._derive_unit_of_measurement(new_state) + self._calculate_state_attributes(new_state) + + def _calculate_state_attributes(self, new_state: State) -> None: + """Set the entity state attributes.""" + + self._attr_native_unit_of_measurement = self._calculate_unit_of_measurement( + new_state + ) + self._attr_device_class = self._calculate_device_class( + new_state, self._attr_native_unit_of_measurement + ) + self._attr_state_class = self._calculate_state_class(new_state) + + def _calculate_unit_of_measurement(self, new_state: State) -> str | None: + """Return the calculated unit of measurement. + + The unit of measurement is that of the source sensor, adjusted based on the + state characteristics. + """ - def _derive_unit_of_measurement(self, new_state: State) -> str | None: base_unit: str | None = new_state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) - unit: str | None + unit: str | None = None if self.is_binary and self._state_characteristic in STATS_BINARY_PERCENTAGE: unit = PERCENTAGE elif not base_unit: @@ -487,53 +529,61 @@ def _derive_unit_of_measurement(self, new_state: State) -> str | None: unit = base_unit + "/sample" elif self._state_characteristic == STAT_CHANGE_SECOND: unit = base_unit + "/s" + return unit - @property - def device_class(self) -> SensorDeviceClass | None: - """Return the class of this device.""" + def _calculate_device_class( + self, new_state: State, unit: str | None + ) -> SensorDeviceClass | None: + """Return the calculated device class. + + The device class is calculated based on the state characteristics, + the source device class and the unit of measurement is + in the device class units list. + """ + + device_class: SensorDeviceClass | None = None if self._state_characteristic in STATS_DATETIME: return SensorDeviceClass.TIMESTAMP if self._state_characteristic in STATS_NUMERIC_RETAIN_UNIT: - source_state = self.hass.states.get(self._source_entity_id) - if source_state is None: + device_class = new_state.attributes.get(ATTR_DEVICE_CLASS) + if device_class is None: return None - source_device_class = source_state.attributes.get(ATTR_DEVICE_CLASS) - if source_device_class is None: + if ( + sensor_device_class := try_parse_enum(SensorDeviceClass, device_class) + ) is None: return None - sensor_device_class = try_parse_enum(SensorDeviceClass, source_device_class) - if sensor_device_class is None: + if ( + sensor_device_class + and ( + sensor_state_classes := DEVICE_CLASS_STATE_CLASSES.get( + sensor_device_class + ) + ) + and sensor_state_classes + and SensorStateClass.MEASUREMENT not in sensor_state_classes + ): return None - sensor_state_classes = DEVICE_CLASS_STATE_CLASSES.get( - sensor_device_class, set() - ) - if SensorStateClass.MEASUREMENT not in sensor_state_classes: + if device_class not in DEVICE_CLASS_UNITS: + return None + if ( + device_class in DEVICE_CLASS_UNITS + and unit not in DEVICE_CLASS_UNITS[device_class] + ): return None - return sensor_device_class - return None - @property - def state_class(self) -> SensorStateClass | None: - """Return the state class of this entity.""" + return device_class + + def _calculate_state_class(self, new_state: State) -> SensorStateClass | None: + """Return the calculated state class. + + Will be None if the characteristics is not numerical, otherwise + SensorStateClass.MEASUREMENT. + """ if self._state_characteristic in STATS_NOT_A_NUMBER: return None return SensorStateClass.MEASUREMENT - @property - def native_value(self) -> StateType | datetime: - """Return the state of the sensor.""" - return self._value - - @property - def native_unit_of_measurement(self) -> str | None: - """Return the unit the value is expressed in.""" - return self._unit_of_measurement - - @property - def available(self) -> bool: - """Return the availability of the sensor linked to the source sensor.""" - return self._available - @property def extra_state_attributes(self) -> dict[str, StateType] | None: """Return the state attributes of the sensor.""" @@ -677,7 +727,7 @@ async def _initialize_from_database(self) -> None: ): for state in reversed(states): self._add_state_to_queue(state) - + self._calculate_state_attributes(state) self._async_purge_update_and_schedule() # only write state to the state machine if we are not in preview mode @@ -712,19 +762,21 @@ def _update_value(self) -> None: """ value = self._state_characteristic_fn() - + _LOGGER.debug( + "Updating value: states: %s, ages: %s => %s", self.states, self.ages, value + ) if self._state_characteristic not in STATS_NOT_A_NUMBER: with contextlib.suppress(TypeError): value = round(cast(float, value), self._precision) if self._precision == 0: value = int(value) - self._value = value + self._attr_native_value = value def _callable_characteristic_fn( self, characteristic: str - ) -> Callable[[], StateType | datetime]: + ) -> Callable[[], float | int | datetime | None]: """Return the function callable of one characteristic function.""" - function: Callable[[], StateType | datetime] = getattr( + function: Callable[[], float | int | datetime | None] = getattr( self, f"_stat_binary_{characteristic}" if self.is_binary @@ -735,6 +787,8 @@ def _callable_characteristic_fn( # Statistics for numeric sensor def _stat_average_linear(self) -> StateType: + if len(self.states) == 1: + return self.states[0] if len(self.states) >= 2: area: float = 0 for i in range(1, len(self.states)): @@ -748,6 +802,8 @@ def _stat_average_linear(self) -> StateType: return None def _stat_average_step(self) -> StateType: + if len(self.states) == 1: + return self.states[0] if len(self.states) >= 2: area: float = 0 for i in range(1, len(self.states)): @@ -803,12 +859,12 @@ def _stat_datetime_value_min(self) -> datetime | None: return None def _stat_distance_95_percent_of_values(self) -> StateType: - if len(self.states) >= 2: + if len(self.states) >= 1: return 2 * 1.96 * cast(float, self._stat_standard_deviation()) return None def _stat_distance_99_percent_of_values(self) -> StateType: - if len(self.states) >= 2: + if len(self.states) >= 1: return 2 * 2.58 * cast(float, self._stat_standard_deviation()) return None @@ -835,17 +891,23 @@ def _stat_median(self) -> StateType: return None def _stat_noisiness(self) -> StateType: + if len(self.states) == 1: + return 0.0 if len(self.states) >= 2: return cast(float, self._stat_sum_differences()) / (len(self.states) - 1) return None def _stat_percentile(self) -> StateType: + if len(self.states) == 1: + return self.states[0] if len(self.states) >= 2: percentiles = statistics.quantiles(self.states, n=100, method="exclusive") return percentiles[self._percentile - 1] return None def _stat_standard_deviation(self) -> StateType: + if len(self.states) == 1: + return 0.0 if len(self.states) >= 2: return statistics.stdev(self.states) return None @@ -856,6 +918,8 @@ def _stat_sum(self) -> StateType: return None def _stat_sum_differences(self) -> StateType: + if len(self.states) == 1: + return 0.0 if len(self.states) >= 2: return sum( abs(j - i) @@ -864,6 +928,8 @@ def _stat_sum_differences(self) -> StateType: return None def _stat_sum_differences_nonnegative(self) -> StateType: + if len(self.states) == 1: + return 0.0 if len(self.states) >= 2: return sum( (j - i if j >= i else j - 0) @@ -885,6 +951,8 @@ def _stat_value_min(self) -> StateType: return None def _stat_variance(self) -> StateType: + if len(self.states) == 1: + return 0.0 if len(self.states) >= 2: return statistics.variance(self.states) return None @@ -892,6 +960,8 @@ def _stat_variance(self) -> StateType: # Statistics for binary sensor def _stat_binary_average_step(self) -> StateType: + if len(self.states) == 1: + return 100.0 * int(self.states[0] is True) if len(self.states) >= 2: on_seconds: float = 0 for i in range(1, len(self.states)): diff --git a/homeassistant/components/statistics/strings.json b/homeassistant/components/statistics/strings.json index 5f32b203bfdf45..a060c88da249c5 100644 --- a/homeassistant/components/statistics/strings.json +++ b/homeassistant/components/statistics/strings.json @@ -1,4 +1,5 @@ { + "title": "Statistics", "config": { "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_account%]" diff --git a/homeassistant/components/steam_online/config_flow.py b/homeassistant/components/steam_online/config_flow.py index 4b99bf7738dd88..69009fca8c48d3 100644 --- a/homeassistant/components/steam_online/config_flow.py +++ b/homeassistant/components/steam_online/config_flow.py @@ -36,15 +36,11 @@ def validate_input(user_input: dict[str, str]) -> dict[str, str | int]: class SteamFlowHandler(ConfigFlow, domain=DOMAIN): """Handle a config flow for Steam.""" - def __init__(self) -> None: - """Initialize the flow.""" - self.entry: SteamConfigEntry | None = None - @staticmethod @callback def async_get_options_flow( config_entry: SteamConfigEntry, - ) -> OptionsFlow: + ) -> SteamOptionsFlowHandler: """Get the options flow for this handler.""" return SteamOptionsFlowHandler(config_entry) @@ -53,8 +49,8 @@ async def async_step_user( ) -> ConfigFlowResult: """Handle a flow initiated by the user.""" errors = {} - if user_input is None and self.entry: - user_input = {CONF_ACCOUNT: self.entry.data[CONF_ACCOUNT]} + if user_input is None and self.source == SOURCE_REAUTH: + user_input = {CONF_ACCOUNT: self._get_reauth_entry().data[CONF_ACCOUNT]} elif user_input is not None: try: res = await self.hass.async_add_executor_job(validate_input, user_input) @@ -102,8 +98,6 @@ async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle a reauthorization flow request.""" - self.entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) - return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -129,7 +123,6 @@ class SteamOptionsFlowHandler(OptionsFlow): def __init__(self, entry: SteamConfigEntry) -> None: """Initialize options flow.""" - self.entry = entry self.options = dict(entry.options) async def async_step_init( @@ -137,7 +130,7 @@ async def async_step_init( ) -> ConfigFlowResult: """Manage Steam options.""" if user_input is not None: - await self.hass.config_entries.async_unload(self.entry.entry_id) + await self.hass.config_entries.async_unload(self.config_entry.entry_id) for _id in self.options[CONF_ACCOUNTS]: if _id not in user_input[CONF_ACCOUNTS] and ( entity_id := er.async_get(self.hass).async_get_entity_id( @@ -152,7 +145,7 @@ async def async_step_init( if _id in user_input[CONF_ACCOUNTS] } } - await self.hass.config_entries.async_reload(self.entry.entry_id) + await self.hass.config_entries.async_reload(self.config_entry.entry_id) return self.async_create_entry(title="", data=channel_data) error = None try: @@ -182,7 +175,9 @@ def get_accounts(self) -> list[dict[str, str | int]]: """Get accounts.""" interface = steam.api.interface("ISteamUser") try: - friends = interface.GetFriendList(steamid=self.entry.data[CONF_ACCOUNT]) + friends = interface.GetFriendList( + steamid=self.config_entry.data[CONF_ACCOUNT] + ) _users_str = [user["steamid"] for user in friends["friendslist"]["friends"]] except steam.api.HTTPError: return [] diff --git a/homeassistant/components/stream/const.py b/homeassistant/components/stream/const.py index a2fa065e0192ba..66455ffad1a9eb 100644 --- a/homeassistant/components/stream/const.py +++ b/homeassistant/components/stream/const.py @@ -1,5 +1,9 @@ """Constants for Stream component.""" +from __future__ import annotations + +from typing import Final + DOMAIN = "stream" ATTR_ENDPOINTS = "endpoints" @@ -11,8 +15,8 @@ OUTPUT_FORMATS = [HLS_PROVIDER] -SEGMENT_CONTAINER_FORMAT = "mp4" # format for segments -RECORDER_CONTAINER_FORMAT = "mp4" # format for recorder output +SEGMENT_CONTAINER_FORMAT: Final = "mp4" # format for segments +RECORDER_CONTAINER_FORMAT: Final = "mp4" # format for recorder output AUDIO_CODECS = {"aac", "mp3"} FORMAT_CONTENT_TYPE = {HLS_PROVIDER: "application/vnd.apple.mpegurl"} diff --git a/homeassistant/components/stream/core.py b/homeassistant/components/stream/core.py index 68c08a4f072070..4184b23b9a07e1 100644 --- a/homeassistant/components/stream/core.py +++ b/homeassistant/components/stream/core.py @@ -9,7 +9,7 @@ import datetime from enum import IntEnum import logging -from typing import TYPE_CHECKING, Any +from typing import TYPE_CHECKING, Any, cast from aiohttp import web import numpy as np @@ -27,7 +27,7 @@ ) if TYPE_CHECKING: - from av import CodecContext, Packet + from av import Packet, VideoCodecContext from homeassistant.components.camera import DynamicStreamSettings @@ -438,17 +438,17 @@ def __init__( """Initialize.""" # Keep import here so that we can import stream integration - # without installingreqs + # without installing reqs # pylint: disable-next=import-outside-toplevel from homeassistant.components.camera.img_util import TurboJPEGSingleton - self._packet: Packet = None + self._packet: Packet | None = None self._event: asyncio.Event = asyncio.Event() self._hass = hass self._image: bytes | None = None self._turbojpeg = TurboJPEGSingleton.instance() self._lock = asyncio.Lock() - self._codec_context: CodecContext | None = None + self._codec_context: VideoCodecContext | None = None self._stream_settings = stream_settings self._dynamic_stream_settings = dynamic_stream_settings @@ -460,7 +460,7 @@ def stash_keyframe_packet(self, packet: Packet) -> None: self._packet = packet self._hass.loop.call_soon_threadsafe(self._event.set) - def create_codec_context(self, codec_context: CodecContext) -> None: + def create_codec_context(self, codec_context: VideoCodecContext) -> None: """Create a codec context to be used for decoding the keyframes. This is run by the worker thread and will only be called once per worker. @@ -474,7 +474,9 @@ def create_codec_context(self, codec_context: CodecContext) -> None: # pylint: disable-next=import-outside-toplevel from av import CodecContext - self._codec_context = CodecContext.create(codec_context.name, "r") + self._codec_context = cast( + "VideoCodecContext", CodecContext.create(codec_context.name, "r") + ) self._codec_context.extradata = codec_context.extradata self._codec_context.skip_frame = "NONKEY" self._codec_context.thread_type = "NONE" @@ -506,9 +508,8 @@ def _generate_image(self, width: int | None, height: int | None) -> None: frames = self._codec_context.decode(None) break except EOFError: - _LOGGER.debug("Codec context needs flushing, attempting to reopen") - self._codec_context.close() - self._codec_context.open() + _LOGGER.debug("Codec context needs flushing") + self._codec_context.flush_buffers() else: _LOGGER.debug("Unable to decode keyframe") return diff --git a/homeassistant/components/stream/manifest.json b/homeassistant/components/stream/manifest.json index 00387d97b8350d..fdf81d99e656c5 100644 --- a/homeassistant/components/stream/manifest.json +++ b/homeassistant/components/stream/manifest.json @@ -7,5 +7,5 @@ "integration_type": "system", "iot_class": "local_push", "quality_scale": "internal", - "requirements": ["PyTurboJPEG==1.7.5", "ha-av==10.1.1", "numpy==1.26.4"] + "requirements": ["PyTurboJPEG==1.7.5", "av==13.1.0", "numpy==2.1.3"] } diff --git a/homeassistant/components/stream/recorder.py b/homeassistant/components/stream/recorder.py index 6dfc09891b7445..a24440e6d19c05 100644 --- a/homeassistant/components/stream/recorder.py +++ b/homeassistant/components/stream/recorder.py @@ -9,6 +9,7 @@ from typing import TYPE_CHECKING import av +import av.container from homeassistant.core import HomeAssistant, callback @@ -105,24 +106,23 @@ def write_segment(segment: Segment) -> None: # Create output on first segment if not output: + container_options: dict[str, str] = { + "video_track_timescale": str(int(1 / source_v.time_base)), # type: ignore[operator] + "movflags": "frag_keyframe+empty_moov", + "min_frag_duration": str(self.stream_settings.min_segment_duration), + } output = av.open( self.video_path + ".tmp", "w", format=RECORDER_CONTAINER_FORMAT, - container_options={ - "video_track_timescale": str(int(1 / source_v.time_base)), - "movflags": "frag_keyframe+empty_moov", - "min_frag_duration": str( - self.stream_settings.min_segment_duration - ), - }, + container_options=container_options, ) # Add output streams if necessary if not output_v: output_v = output.add_stream(template=source_v) context = output_v.codec_context - context.flags |= "GLOBAL_HEADER" + context.global_header = True if source_a and not output_a: output_a = output.add_stream(template=source_a) @@ -132,21 +132,23 @@ def write_segment(segment: Segment) -> None: last_stream_id = segment.stream_id pts_adjuster["video"] = int( (running_duration - source.start_time) - / (av.time_base * source_v.time_base) + / (av.time_base * source_v.time_base) # type: ignore[operator] ) if source_a: pts_adjuster["audio"] = int( (running_duration - source.start_time) - / (av.time_base * source_a.time_base) + / (av.time_base * source_a.time_base) # type: ignore[operator] ) # Remux video for packet in source.demux(): - if packet.dts is None: + if packet.pts is None: continue - packet.pts += pts_adjuster[packet.stream.type] - packet.dts += pts_adjuster[packet.stream.type] - packet.stream = output_v if packet.stream.type == "video" else output_a + packet.pts += pts_adjuster[packet.stream.type] # type: ignore[operator] + packet.dts += pts_adjuster[packet.stream.type] # type: ignore[operator] + stream = output_v if packet.stream.type == "video" else output_a + assert stream + packet.stream = stream output.mux(packet) running_duration += source.duration - source.start_time @@ -169,7 +171,9 @@ def write_transform_matrix_and_rename(video_path: str) -> None: os.remove(video_path + ".tmp") def finish_writing( - segments: deque[Segment], output: av.OutputContainer, video_path: str + segments: deque[Segment], + output: av.container.OutputContainer | None, + video_path: str, ) -> None: """Finish writing output.""" # Should only have 0 or 1 segments, but loop through just in case diff --git a/homeassistant/components/stream/worker.py b/homeassistant/components/stream/worker.py index 0d72a9b081871c..8c9bb1b8e9e29a 100644 --- a/homeassistant/components/stream/worker.py +++ b/homeassistant/components/stream/worker.py @@ -13,6 +13,9 @@ from typing import Any, Self, cast import av +import av.audio +import av.container +import av.stream from homeassistant.core import HomeAssistant from homeassistant.util import dt as dt_util @@ -47,10 +50,10 @@ class StreamWorkerError(Exception): """An exception thrown while processing a stream.""" -def redact_av_error_string(err: av.AVError) -> str: +def redact_av_error_string(err: av.FFmpegError) -> str: """Return an error string with credentials redacted from the url.""" - parts = [str(err.type), err.strerror] - if err.filename is not None: + parts = [str(err.type), err.strerror] # type: ignore[attr-defined] + if err.filename: parts.append(redact_credentials(err.filename)) return ", ".join(parts) @@ -123,30 +126,31 @@ def diagnostics(self) -> Diagnostics: class StreamMuxer: """StreamMuxer re-packages video/audio packets for output.""" + _segment_start_dts: int + _memory_file: BytesIO + _av_output: av.container.OutputContainer + _output_video_stream: av.VideoStream + _output_audio_stream: av.audio.AudioStream | None + _segment: Segment | None + # the following 2 member variables are used for Part formation + _memory_file_pos: int + _part_start_dts: float + def __init__( self, hass: HomeAssistant, - video_stream: av.video.VideoStream, - audio_stream: av.audio.stream.AudioStream | None, - audio_bsf: av.BitStreamFilter | None, + video_stream: av.VideoStream, + audio_stream: av.audio.AudioStream | None, + audio_bsf: str | None, stream_state: StreamState, stream_settings: StreamSettings, ) -> None: """Initialize StreamMuxer.""" self._hass = hass - self._segment_start_dts: int = cast(int, None) - self._memory_file: BytesIO = cast(BytesIO, None) - self._av_output: av.container.OutputContainer = None - self._input_video_stream: av.video.VideoStream = video_stream - self._input_audio_stream: av.audio.stream.AudioStream | None = audio_stream + self._input_video_stream = video_stream + self._input_audio_stream = audio_stream self._audio_bsf = audio_bsf - self._audio_bsf_context: av.BitStreamFilterContext = None - self._output_video_stream: av.video.VideoStream = None - self._output_audio_stream: av.audio.stream.AudioStream | None = None - self._segment: Segment | None = None - # the following 3 member variables are used for Part formation - self._memory_file_pos: int = cast(int, None) - self._part_start_dts: int = cast(int, None) + self._audio_bsf_context: av.BitStreamFilterContext | None = None self._part_has_keyframe = False self._stream_settings = stream_settings self._stream_state = stream_state @@ -156,83 +160,83 @@ def make_new_av( self, memory_file: BytesIO, sequence: int, - input_vstream: av.video.VideoStream, - input_astream: av.audio.stream.AudioStream | None, + input_vstream: av.VideoStream, + input_astream: av.audio.AudioStream | None, ) -> tuple[ av.container.OutputContainer, - av.video.VideoStream, - av.audio.stream.AudioStream | None, + av.VideoStream, + av.audio.AudioStream | None, ]: """Make a new av OutputContainer and add output streams.""" + container_options: dict[str, str] = { + # Removed skip_sidx - see: + # https://github.com/home-assistant/core/pull/39970 + # "cmaf" flag replaces several of the movflags used, + # but too recent to use for now + "movflags": "frag_custom+empty_moov+default_base_moof+frag_discont+negative_cts_offsets+skip_trailer+delay_moov", + # Sometimes the first segment begins with negative timestamps, + # and this setting just + # adjusts the timestamps in the output from that segment to start + # from 0. Helps from having to make some adjustments + # in test_durations + "avoid_negative_ts": "make_non_negative", + "fragment_index": str(sequence + 1), + "video_track_timescale": str(int(1 / input_vstream.time_base)), # type: ignore[operator] + # Only do extra fragmenting if we are using ll_hls + # Let ffmpeg do the work using frag_duration + # Fragment durations may exceed the 15% allowed variance but it seems ok + **( + { + "movflags": "empty_moov+default_base_moof+frag_discont+negative_cts_offsets+skip_trailer+delay_moov", + # Create a fragment every TARGET_PART_DURATION. The data from + # each fragment is stored in a "Part" that can be combined with + # the data from all the other "Part"s, plus an init section, + # to reconstitute the data in a "Segment". + # + # The LL-HLS spec allows for a fragment's duration to be within + # the range [0.85x,1.0x] of the part target duration. We use the + # frag_duration option to tell ffmpeg to try to cut the + # fragments when they reach frag_duration. However, + # the resulting fragments can have variability in their + # durations and can end up being too short or too long. With a + # video track with no audio, the discrete nature of frames means + # that the frame at the end of a fragment will sometimes extend + # slightly beyond the desired frag_duration. + # + # If there are two tracks, as in the case of a video feed with + # audio, there is an added wrinkle as the fragment cut seems to + # be done on the first track that crosses the desired threshold, + # and cutting on the audio track may also result in a shorter + # video fragment than desired. + # + # Given this, our approach is to give ffmpeg a frag_duration + # somewhere in the middle of the range, hoping that the parts + # stay pretty well bounded, and we adjust the part durations + # a bit in the hls metadata so that everything "looks" ok. + "frag_duration": str( + int(self._stream_settings.part_target_duration * 9e5) + ), + } + if self._stream_settings.ll_hls + else {} + ), + } container = av.open( memory_file, mode="w", format=SEGMENT_CONTAINER_FORMAT, - container_options={ - # Removed skip_sidx - see: - # https://github.com/home-assistant/core/pull/39970 - # "cmaf" flag replaces several of the movflags used, - # but too recent to use for now - "movflags": "frag_custom+empty_moov+default_base_moof+frag_discont+negative_cts_offsets+skip_trailer+delay_moov", - # Sometimes the first segment begins with negative timestamps, - # and this setting just - # adjusts the timestamps in the output from that segment to start - # from 0. Helps from having to make some adjustments - # in test_durations - "avoid_negative_ts": "make_non_negative", - "fragment_index": str(sequence + 1), - "video_track_timescale": str(int(1 / input_vstream.time_base)), - # Only do extra fragmenting if we are using ll_hls - # Let ffmpeg do the work using frag_duration - # Fragment durations may exceed the 15% allowed variance but it seems ok - **( - { - "movflags": "empty_moov+default_base_moof+frag_discont+negative_cts_offsets+skip_trailer+delay_moov", - # Create a fragment every TARGET_PART_DURATION. The data from - # each fragment is stored in a "Part" that can be combined with - # the data from all the other "Part"s, plus an init section, - # to reconstitute the data in a "Segment". - # - # The LL-HLS spec allows for a fragment's duration to be within - # the range [0.85x,1.0x] of the part target duration. We use the - # frag_duration option to tell ffmpeg to try to cut the - # fragments when they reach frag_duration. However, - # the resulting fragments can have variability in their - # durations and can end up being too short or too long. With a - # video track with no audio, the discrete nature of frames means - # that the frame at the end of a fragment will sometimes extend - # slightly beyond the desired frag_duration. - # - # If there are two tracks, as in the case of a video feed with - # audio, there is an added wrinkle as the fragment cut seems to - # be done on the first track that crosses the desired threshold, - # and cutting on the audio track may also result in a shorter - # video fragment than desired. - # - # Given this, our approach is to give ffmpeg a frag_duration - # somewhere in the middle of the range, hoping that the parts - # stay pretty well bounded, and we adjust the part durations - # a bit in the hls metadata so that everything "looks" ok. - "frag_duration": str( - int(self._stream_settings.part_target_duration * 9e5) - ), - } - if self._stream_settings.ll_hls - else {} - ), - }, + container_options=container_options, ) output_vstream = container.add_stream(template=input_vstream) # Check if audio is requested output_astream = None if input_astream: if self._audio_bsf: - self._audio_bsf_context = self._audio_bsf.create() - self._audio_bsf_context.set_input_stream(input_astream) - output_astream = container.add_stream( - template=self._audio_bsf_context or input_astream - ) - return container, output_vstream, output_astream + self._audio_bsf_context = av.BitStreamFilterContext( + self._audio_bsf, input_astream + ) + output_astream = container.add_stream(template=input_astream) + return container, output_vstream, output_astream # type: ignore[return-value] def reset(self, video_dts: int) -> None: """Initialize a new stream segment.""" @@ -251,7 +255,7 @@ def reset(self, video_dts: int) -> None: input_astream=self._input_audio_stream, ) if self._output_video_stream.name == "hevc": - self._output_video_stream.codec_tag = "hvc1" + self._output_video_stream.codec_context.codec_tag = "hvc1" def mux_packet(self, packet: av.Packet) -> None: """Mux a packet to the appropriate output stream.""" @@ -273,11 +277,11 @@ def mux_packet(self, packet: av.Packet) -> None: self._part_has_keyframe |= packet.is_keyframe elif packet.stream == self._input_audio_stream: + assert self._output_audio_stream if self._audio_bsf_context: - self._audio_bsf_context.send(packet) - while packet := self._audio_bsf_context.recv(): - packet.stream = self._output_audio_stream - self._av_output.mux(packet) + for audio_packet in self._audio_bsf_context.filter(packet): + audio_packet.stream = self._output_audio_stream + self._av_output.mux(audio_packet) return packet.stream = self._output_audio_stream self._av_output.mux(packet) @@ -395,7 +399,7 @@ def close(self) -> None: self._memory_file.close() -class PeekIterator(Iterator): +class PeekIterator(Iterator[av.Packet]): """An Iterator that may allow multiple passes. This may be consumed like a normal Iterator, however also supports a @@ -459,7 +463,7 @@ def is_valid(self, packet: av.Packet) -> bool: """Validate the packet timestamp based on ordering within the stream.""" # Discard packets missing DTS. Terminate if too many are missing. if packet.dts is None: - if self._missing_dts >= MAX_MISSING_DTS: + if self._missing_dts >= MAX_MISSING_DTS: # type: ignore[unreachable] raise StreamWorkerError( f"No dts in {MAX_MISSING_DTS+1} consecutive packets" ) @@ -486,7 +490,7 @@ def is_keyframe(packet: av.Packet) -> Any: def get_audio_bitstream_filter( packets: Iterator[av.Packet], audio_stream: Any -) -> av.BitStreamFilterContext | None: +) -> str | None: """Return the aac_adtstoasc bitstream filter if ADTS AAC is detected.""" if not audio_stream: return None @@ -503,7 +507,7 @@ def get_audio_bitstream_filter( _LOGGER.debug( "ADTS AAC detected. Adding aac_adtstoaac bitstream filter" ) - return av.BitStreamFilter("aac_adtstoasc") + return "aac_adtstoasc" break return None @@ -524,7 +528,7 @@ def stream_worker( del pyav_options["stimeout"] try: container = av.open(source, options=pyav_options, timeout=SOURCE_TIMEOUT) - except av.AVError as err: + except av.FFmpegError as err: raise StreamWorkerError( f"Error opening stream ({redact_av_error_string(err)})" ) from err @@ -541,7 +545,7 @@ def stream_worker( audio_stream = None # Some audio streams do not have a profile and throw errors when remuxing if audio_stream and audio_stream.profile is None: - audio_stream = None + audio_stream = None # type: ignore[unreachable] # Disable ll-hls for hls inputs if container.format.name == "hls": for field in fields(StreamSettings): @@ -556,8 +560,8 @@ def stream_worker( stream_state.diagnostics.set_value("audio_codec", audio_stream.name) dts_validator = TimestampValidator( - int(1 / video_stream.time_base), - 1 / audio_stream.time_base if audio_stream else 1, + int(1 / video_stream.time_base), # type: ignore[operator] + int(1 / audio_stream.time_base) if audio_stream else 1, # type: ignore[operator] ) container_packets = PeekIterator( filter(dts_validator.is_valid, container.demux((video_stream, audio_stream))) @@ -598,7 +602,7 @@ def is_video(packet: av.Packet) -> Any: except StopIteration as ex: container.close() raise StreamEndedError("Stream ended; no additional packets") from ex - except av.AVError as ex: + except av.FFmpegError as ex: container.close() raise StreamWorkerError( f"Error demuxing stream while finding first packet ({redact_av_error_string(ex)})" @@ -625,7 +629,7 @@ def is_video(packet: av.Packet) -> Any: raise except StopIteration as ex: raise StreamEndedError("Stream ended; no additional packets") from ex - except av.AVError as ex: + except av.FFmpegError as ex: raise StreamWorkerError( f"Error demuxing stream ({redact_av_error_string(ex)})" ) from ex diff --git a/homeassistant/components/subaru/__init__.py b/homeassistant/components/subaru/__init__.py index db2ee7fdbbc660..3762b16e58bccc 100644 --- a/homeassistant/components/subaru/__init__.py +++ b/homeassistant/components/subaru/__init__.py @@ -85,6 +85,7 @@ async def async_update_data(): coordinator = DataUpdateCoordinator( hass, _LOGGER, + config_entry=entry, name=COORDINATOR_NAME, update_method=async_update_data, update_interval=timedelta(seconds=FETCH_INTERVAL), diff --git a/homeassistant/components/subaru/config_flow.py b/homeassistant/components/subaru/config_flow.py index 3d96a89a14f737..0ef4ed29941f1a 100644 --- a/homeassistant/components/subaru/config_flow.py +++ b/homeassistant/components/subaru/config_flow.py @@ -106,7 +106,7 @@ def async_get_options_flow( config_entry: ConfigEntry, ) -> OptionsFlowHandler: """Get the options flow for this handler.""" - return OptionsFlowHandler(config_entry) + return OptionsFlowHandler() async def validate_login_creds(self, data): """Validate the user input allows us to connect. @@ -218,10 +218,6 @@ async def async_step_pin( class OptionsFlowHandler(OptionsFlow): """Handle a option flow for Subaru.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/suez_water/__init__.py b/homeassistant/components/suez_water/__init__.py index f5b2880e0117bb..06f503b85c26d8 100644 --- a/homeassistant/components/suez_water/__init__.py +++ b/homeassistant/components/suez_water/__init__.py @@ -2,15 +2,12 @@ from __future__ import annotations -from pysuez import SuezClient -from pysuez.client import PySuezError - from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_PASSWORD, CONF_USERNAME, Platform +from homeassistant.const import Platform from homeassistant.core import HomeAssistant -from homeassistant.exceptions import ConfigEntryError, ConfigEntryNotReady -from .const import CONF_COUNTER_ID, DOMAIN +from .const import DOMAIN +from .coordinator import SuezWaterCoordinator PLATFORMS: list[Platform] = [Platform.SENSOR] @@ -18,23 +15,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up Suez Water from a config entry.""" - def get_client() -> SuezClient: - try: - client = SuezClient( - entry.data[CONF_USERNAME], - entry.data[CONF_PASSWORD], - entry.data[CONF_COUNTER_ID], - provider=None, - ) - if not client.check_credentials(): - raise ConfigEntryError - except PySuezError as ex: - raise ConfigEntryNotReady from ex - return client - - hass.data.setdefault(DOMAIN, {})[ - entry.entry_id - ] = await hass.async_add_executor_job(get_client) + coordinator = SuezWaterCoordinator(hass, entry) + await coordinator.async_config_entry_first_refresh() + + hass.data.setdefault(DOMAIN, {})[entry.entry_id] = coordinator await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) diff --git a/homeassistant/components/suez_water/config_flow.py b/homeassistant/components/suez_water/config_flow.py index 28b211dc8080ae..ac09cf4a1d3bea 100644 --- a/homeassistant/components/suez_water/config_flow.py +++ b/homeassistant/components/suez_water/config_flow.py @@ -5,8 +5,7 @@ import logging from typing import Any -from pysuez import SuezClient -from pysuez.client import PySuezError +from pysuez import PySuezError, SuezClient import voluptuous as vol from homeassistant.config_entries import ConfigFlow, ConfigFlowResult @@ -21,28 +20,34 @@ { vol.Required(CONF_USERNAME): str, vol.Required(CONF_PASSWORD): str, - vol.Required(CONF_COUNTER_ID): str, + vol.Optional(CONF_COUNTER_ID): str, } ) -def validate_input(data: dict[str, Any]) -> None: +async def validate_input(data: dict[str, Any]) -> None: """Validate the user input allows us to connect. Data has the keys from STEP_USER_DATA_SCHEMA with values provided by the user. """ try: + counter_id = data.get(CONF_COUNTER_ID) client = SuezClient( data[CONF_USERNAME], data[CONF_PASSWORD], - data[CONF_COUNTER_ID], - provider=None, + counter_id, ) - if not client.check_credentials(): + if not await client.check_credentials(): raise InvalidAuth except PySuezError as ex: raise CannotConnect from ex + if counter_id is None: + try: + data[CONF_COUNTER_ID] = await client.find_counter() + except PySuezError as ex: + raise CounterNotFound from ex + class SuezWaterConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for Suez Water.""" @@ -58,11 +63,13 @@ async def async_step_user( await self.async_set_unique_id(user_input[CONF_USERNAME]) self._abort_if_unique_id_configured() try: - await self.hass.async_add_executor_job(validate_input, user_input) + await validate_input(user_input) except CannotConnect: errors["base"] = "cannot_connect" except InvalidAuth: errors["base"] = "invalid_auth" + except CounterNotFound: + errors["base"] = "counter_not_found" except Exception: _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" @@ -82,3 +89,7 @@ class CannotConnect(HomeAssistantError): class InvalidAuth(HomeAssistantError): """Error to indicate there is invalid auth.""" + + +class CounterNotFound(HomeAssistantError): + """Error to indicate we cannot automatically found the counter id.""" diff --git a/homeassistant/components/suez_water/const.py b/homeassistant/components/suez_water/const.py index 7afc0d3ce3ea51..cecd779c22c401 100644 --- a/homeassistant/components/suez_water/const.py +++ b/homeassistant/components/suez_water/const.py @@ -1,5 +1,9 @@ """Constants for the Suez Water integration.""" +from datetime import timedelta + DOMAIN = "suez_water" CONF_COUNTER_ID = "counter_id" + +DATA_REFRESH_INTERVAL = timedelta(hours=12) diff --git a/homeassistant/components/suez_water/coordinator.py b/homeassistant/components/suez_water/coordinator.py new file mode 100644 index 00000000000000..224929c606e595 --- /dev/null +++ b/homeassistant/components/suez_water/coordinator.py @@ -0,0 +1,88 @@ +"""Suez water update coordinator.""" + +from collections.abc import Mapping +from dataclasses import dataclass +from datetime import date +from typing import Any + +from pysuez import PySuezError, SuezClient + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_PASSWORD, CONF_USERNAME +from homeassistant.core import _LOGGER, HomeAssistant +from homeassistant.exceptions import ConfigEntryError +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed + +from .const import CONF_COUNTER_ID, DATA_REFRESH_INTERVAL, DOMAIN + + +@dataclass +class SuezWaterAggregatedAttributes: + """Class containing aggregated sensor extra attributes.""" + + this_month_consumption: dict[date, float] + previous_month_consumption: dict[date, float] + last_year_overall: dict[str, float] + this_year_overall: dict[str, float] + history: dict[date, float] + highest_monthly_consumption: float + + +@dataclass +class SuezWaterData: + """Class used to hold all fetch data from suez api.""" + + aggregated_value: float + aggregated_attr: Mapping[str, Any] + price: float + + +class SuezWaterCoordinator(DataUpdateCoordinator[SuezWaterData]): + """Suez water coordinator.""" + + _suez_client: SuezClient + config_entry: ConfigEntry + + def __init__(self, hass: HomeAssistant, config_entry: ConfigEntry) -> None: + """Initialize suez water coordinator.""" + super().__init__( + hass, + _LOGGER, + name=DOMAIN, + update_interval=DATA_REFRESH_INTERVAL, + always_update=True, + config_entry=config_entry, + ) + + async def _async_setup(self) -> None: + self._suez_client = SuezClient( + username=self.config_entry.data[CONF_USERNAME], + password=self.config_entry.data[CONF_PASSWORD], + counter_id=self.config_entry.data[CONF_COUNTER_ID], + ) + if not await self._suez_client.check_credentials(): + raise ConfigEntryError("Invalid credentials for suez water") + + async def _async_update_data(self) -> SuezWaterData: + """Fetch data from API endpoint.""" + try: + aggregated = await self._suez_client.fetch_aggregated_data() + data = SuezWaterData( + aggregated_value=aggregated.value, + aggregated_attr={ + "this_month_consumption": aggregated.current_month, + "previous_month_consumption": aggregated.previous_month, + "highest_monthly_consumption": aggregated.highest_monthly_consumption, + "last_year_overall": aggregated.previous_year, + "this_year_overall": aggregated.current_year, + "history": aggregated.history, + }, + price=(await self._suez_client.get_price()).price, + ) + except PySuezError as err: + _LOGGER.exception(err) + raise UpdateFailed( + f"Suez coordinator error communicating with API: {err}" + ) from err + _LOGGER.debug("Successfully fetched suez data") + return data diff --git a/homeassistant/components/suez_water/manifest.json b/homeassistant/components/suez_water/manifest.json index 4503d7a1119177..5eb05b9acb7c90 100644 --- a/homeassistant/components/suez_water/manifest.json +++ b/homeassistant/components/suez_water/manifest.json @@ -1,10 +1,10 @@ { "domain": "suez_water", "name": "Suez Water", - "codeowners": ["@ooii"], + "codeowners": ["@ooii", "@jb101010-2"], "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/suez_water", "iot_class": "cloud_polling", "loggers": ["pysuez", "regex"], - "requirements": ["pysuez==0.2.0"] + "requirements": ["pysuezV2==1.3.1"] } diff --git a/homeassistant/components/suez_water/sensor.py b/homeassistant/components/suez_water/sensor.py index 5b00cbf2dc41d9..2ba699a9af14d3 100644 --- a/homeassistant/components/suez_water/sensor.py +++ b/homeassistant/components/suez_water/sensor.py @@ -2,24 +2,53 @@ from __future__ import annotations -from datetime import timedelta -import logging +from collections.abc import Callable, Mapping +from dataclasses import dataclass +from typing import Any -from pysuez import SuezClient -from pysuez.client import PySuezError +from pysuez.const import ATTRIBUTION -from homeassistant.components.sensor import SensorDeviceClass, SensorEntity +from homeassistant.components.sensor import ( + SensorDeviceClass, + SensorEntity, + SensorEntityDescription, +) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import UnitOfVolume +from homeassistant.const import CURRENCY_EURO, UnitOfVolume from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.update_coordinator import CoordinatorEntity from .const import CONF_COUNTER_ID, DOMAIN +from .coordinator import SuezWaterCoordinator, SuezWaterData -_LOGGER = logging.getLogger(__name__) -SCAN_INTERVAL = timedelta(hours=12) +@dataclass(frozen=True, kw_only=True) +class SuezWaterSensorEntityDescription(SensorEntityDescription): + """Describes Suez water sensor entity.""" + + value_fn: Callable[[SuezWaterData], float | str | None] + attr_fn: Callable[[SuezWaterData], Mapping[str, Any] | None] = lambda _: None + + +SENSORS: tuple[SuezWaterSensorEntityDescription, ...] = ( + SuezWaterSensorEntityDescription( + key="water_usage_yesterday", + translation_key="water_usage_yesterday", + native_unit_of_measurement=UnitOfVolume.LITERS, + device_class=SensorDeviceClass.WATER, + value_fn=lambda suez_data: suez_data.aggregated_value, + attr_fn=lambda suez_data: suez_data.aggregated_attr, + ), + SuezWaterSensorEntityDescription( + key="water_price", + translation_key="water_price", + native_unit_of_measurement=CURRENCY_EURO, + device_class=SensorDeviceClass.MONETARY, + value_fn=lambda suez_data: suez_data.price, + ), +) async def async_setup_entry( @@ -28,68 +57,43 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up Suez Water sensor from a config entry.""" - client = hass.data[DOMAIN][entry.entry_id] - async_add_entities([SuezSensor(client, entry.data[CONF_COUNTER_ID])], True) + coordinator = hass.data[DOMAIN][entry.entry_id] + counter_id = entry.data[CONF_COUNTER_ID] + async_add_entities( + SuezWaterSensor(coordinator, counter_id, description) for description in SENSORS + ) -class SuezSensor(SensorEntity): - """Representation of a Sensor.""" + +class SuezWaterSensor(CoordinatorEntity[SuezWaterCoordinator], SensorEntity): + """Representation of a Suez water sensor.""" _attr_has_entity_name = True - _attr_translation_key = "water_usage_yesterday" - _attr_native_unit_of_measurement = UnitOfVolume.LITERS - _attr_device_class = SensorDeviceClass.WATER - - def __init__(self, client: SuezClient, counter_id: int) -> None: - """Initialize the data object.""" - self.client = client - self._attr_extra_state_attributes = {} - self._attr_unique_id = f"{counter_id}_water_usage_yesterday" + _attr_attribution = ATTRIBUTION + entity_description: SuezWaterSensorEntityDescription + + def __init__( + self, + coordinator: SuezWaterCoordinator, + counter_id: int, + entity_description: SuezWaterSensorEntityDescription, + ) -> None: + """Initialize the suez water sensor entity.""" + super().__init__(coordinator) + self._attr_unique_id = f"{counter_id}_{entity_description.key}" self._attr_device_info = DeviceInfo( identifiers={(DOMAIN, str(counter_id))}, entry_type=DeviceEntryType.SERVICE, manufacturer="Suez", ) + self.entity_description = entity_description + + @property + def native_value(self) -> float | str | None: + """Return the state of the sensor.""" + return self.entity_description.value_fn(self.coordinator.data) - def _fetch_data(self) -> None: - """Fetch latest data from Suez.""" - try: - self.client.update() - # _state holds the volume of consumed water during previous day - self._attr_native_value = self.client.state - self._attr_available = True - self._attr_attribution = self.client.attributes["attribution"] - - self._attr_extra_state_attributes["this_month_consumption"] = {} - for item in self.client.attributes["thisMonthConsumption"]: - self._attr_extra_state_attributes["this_month_consumption"][item] = ( - self.client.attributes["thisMonthConsumption"][item] - ) - self._attr_extra_state_attributes["previous_month_consumption"] = {} - for item in self.client.attributes["previousMonthConsumption"]: - self._attr_extra_state_attributes["previous_month_consumption"][ - item - ] = self.client.attributes["previousMonthConsumption"][item] - self._attr_extra_state_attributes["highest_monthly_consumption"] = ( - self.client.attributes["highestMonthlyConsumption"] - ) - self._attr_extra_state_attributes["last_year_overall"] = ( - self.client.attributes["lastYearOverAll"] - ) - self._attr_extra_state_attributes["this_year_overall"] = ( - self.client.attributes["thisYearOverAll"] - ) - self._attr_extra_state_attributes["history"] = {} - for item in self.client.attributes["history"]: - self._attr_extra_state_attributes["history"][item] = ( - self.client.attributes["history"][item] - ) - - except PySuezError: - self._attr_available = False - _LOGGER.warning("Unable to fetch data") - - def update(self) -> None: - """Return the latest collected data from Suez.""" - self._fetch_data() - _LOGGER.debug("Suez data state is: %s", self.native_value) + @property + def extra_state_attributes(self) -> Mapping[str, Any] | None: + """Return extra state of the sensor.""" + return self.entity_description.attr_fn(self.coordinator.data) diff --git a/homeassistant/components/suez_water/strings.json b/homeassistant/components/suez_water/strings.json index f9abd70fc19866..6be2affab9779f 100644 --- a/homeassistant/components/suez_water/strings.json +++ b/homeassistant/components/suez_water/strings.json @@ -12,7 +12,8 @@ "error": { "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", - "unknown": "[%key:common::config_flow::error::unknown%]" + "unknown": "[%key:common::config_flow::error::unknown%]", + "counter_not_found": "Could not find counter id automatically" }, "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" @@ -22,6 +23,9 @@ "sensor": { "water_usage_yesterday": { "name": "Water usage yesterday" + }, + "water_price": { + "name": "Water price" } } } diff --git a/homeassistant/components/sunweg/config_flow.py b/homeassistant/components/sunweg/config_flow.py index 2b5e49c2cb975a..24df8c02f55dae 100644 --- a/homeassistant/components/sunweg/config_flow.py +++ b/homeassistant/components/sunweg/config_flow.py @@ -124,12 +124,6 @@ async def async_step_reauth_confirm( if conf_result is not None: return conf_result - entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) - if entry is not None: - data: Mapping[str, Any] = self.data - self.hass.config_entries.async_update_entry(entry, data=data) - self.hass.async_create_task( - self.hass.config_entries.async_reload(entry.entry_id) - ) - - return self.async_abort(reason="reauth_successful") + return self.async_update_reload_and_abort( + self._get_reauth_entry(), data=self.data + ) diff --git a/homeassistant/components/sunweg/manifest.json b/homeassistant/components/sunweg/manifest.json index 998d3610735b6e..3ebe9ef8cb40e8 100644 --- a/homeassistant/components/sunweg/manifest.json +++ b/homeassistant/components/sunweg/manifest.json @@ -3,7 +3,7 @@ "name": "Sun WEG", "codeowners": ["@rokam"], "config_flow": true, - "documentation": "https://www.home-assistant.io/integrations/sunweg/", + "documentation": "https://www.home-assistant.io/integrations/sunweg", "iot_class": "cloud_polling", "loggers": ["sunweg"], "requirements": ["sunweg==3.0.2"] diff --git a/homeassistant/components/sunweg/strings.json b/homeassistant/components/sunweg/strings.json index 6033bc314bc463..9ab7be053b1e50 100644 --- a/homeassistant/components/sunweg/strings.json +++ b/homeassistant/components/sunweg/strings.json @@ -1,6 +1,7 @@ { "config": { "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", "no_plants": "No plants have been found on this account", "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" }, diff --git a/homeassistant/components/surepetcare/config_flow.py b/homeassistant/components/surepetcare/config_flow.py index a993e9a47f17ce..472d7ac10f0be0 100644 --- a/homeassistant/components/surepetcare/config_flow.py +++ b/homeassistant/components/surepetcare/config_flow.py @@ -10,7 +10,7 @@ from surepy.exceptions import SurePetcareAuthenticationError, SurePetcareError import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_PASSWORD, CONF_TOKEN, CONF_USERNAME from homeassistant.helpers.aiohttp_client import async_get_clientsession @@ -31,8 +31,6 @@ class SurePetCareConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - reauth_entry: ConfigEntry | None = None - async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -72,20 +70,17 @@ async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle configuration by re-auth.""" - self.reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Dialog that informs the user that reauth is required.""" - assert self.reauth_entry errors = {} + reauth_entry = self._get_reauth_entry() if user_input is not None: client = surepy.Surepy( - self.reauth_entry.data[CONF_USERNAME], + reauth_entry.data[CONF_USERNAME], user_input[CONF_PASSWORD], auth_token=None, api_timeout=SURE_API_TIMEOUT, @@ -102,9 +97,8 @@ async def async_step_reauth_confirm( errors["base"] = "unknown" else: return self.async_update_reload_and_abort( - self.reauth_entry, - data={ - **self.reauth_entry.data, + reauth_entry, + data_updates={ CONF_PASSWORD: user_input[CONF_PASSWORD], CONF_TOKEN: token, }, @@ -112,9 +106,7 @@ async def async_step_reauth_confirm( return self.async_show_form( step_id="reauth_confirm", - description_placeholders={ - "username": self.reauth_entry.data[CONF_USERNAME] - }, + description_placeholders={"username": reauth_entry.data[CONF_USERNAME]}, data_schema=vol.Schema({vol.Required(CONF_PASSWORD): str}), errors=errors, ) diff --git a/homeassistant/components/surepetcare/strings.json b/homeassistant/components/surepetcare/strings.json index c3b7864f36ae2a..58db669732a7f9 100644 --- a/homeassistant/components/surepetcare/strings.json +++ b/homeassistant/components/surepetcare/strings.json @@ -21,7 +21,8 @@ "unknown": "[%key:common::config_flow::error::unknown%]" }, "abort": { - "already_configured": "[%key:common::config_flow::abort::already_configured_account%]" + "already_configured": "[%key:common::config_flow::abort::already_configured_account%]", + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" } }, "services": { diff --git a/homeassistant/components/swiss_public_transport/__init__.py b/homeassistant/components/swiss_public_transport/__init__.py index dc1d0eb236c7d1..bceac6007a261e 100644 --- a/homeassistant/components/swiss_public_transport/__init__.py +++ b/homeassistant/components/swiss_public_transport/__init__.py @@ -8,8 +8,8 @@ OpendataTransportError, ) -from homeassistant import config_entries, core from homeassistant.const import Platform +from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryError, ConfigEntryNotReady from homeassistant.helpers import ( config_validation as cv, @@ -20,7 +20,10 @@ from homeassistant.helpers.typing import ConfigType from .const import CONF_DESTINATION, CONF_START, CONF_VIA, DOMAIN, PLACEHOLDERS -from .coordinator import SwissPublicTransportDataUpdateCoordinator +from .coordinator import ( + SwissPublicTransportConfigEntry, + SwissPublicTransportDataUpdateCoordinator, +) from .helper import unique_id_from_config from .services import setup_services @@ -32,14 +35,14 @@ CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN) -async def async_setup(hass: core.HomeAssistant, config: ConfigType) -> bool: +async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up the Swiss public transport component.""" setup_services(hass) return True async def async_setup_entry( - hass: core.HomeAssistant, entry: config_entries.ConfigEntry + hass: HomeAssistant, entry: SwissPublicTransportConfigEntry ) -> bool: """Set up Swiss public transport from a config entry.""" config = entry.data @@ -74,24 +77,21 @@ async def async_setup_entry( coordinator = SwissPublicTransportDataUpdateCoordinator(hass, opendata) await coordinator.async_config_entry_first_refresh() - hass.data.setdefault(DOMAIN, {})[entry.entry_id] = coordinator + entry.runtime_data = coordinator await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) return True async def async_unload_entry( - hass: core.HomeAssistant, entry: config_entries.ConfigEntry + hass: HomeAssistant, entry: SwissPublicTransportConfigEntry ) -> bool: """Unload a config entry.""" - if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): - hass.data[DOMAIN].pop(entry.entry_id) - - return unload_ok + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) async def async_migrate_entry( - hass: core.HomeAssistant, config_entry: config_entries.ConfigEntry + hass: HomeAssistant, config_entry: SwissPublicTransportConfigEntry ) -> bool: """Migrate config entry.""" _LOGGER.debug("Migrating from version %s", config_entry.version) diff --git a/homeassistant/components/swiss_public_transport/coordinator.py b/homeassistant/components/swiss_public_transport/coordinator.py index f91f9a7c768328..e6413e6f772692 100644 --- a/homeassistant/components/swiss_public_transport/coordinator.py +++ b/homeassistant/components/swiss_public_transport/coordinator.py @@ -16,11 +16,16 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed import homeassistant.util.dt as dt_util +from homeassistant.util.json import JsonValueType from .const import CONNECTIONS_COUNT, DEFAULT_UPDATE_TIME, DOMAIN _LOGGER = logging.getLogger(__name__) +type SwissPublicTransportConfigEntry = ConfigEntry[ + SwissPublicTransportDataUpdateCoordinator +] + class DataConnection(TypedDict): """A connection data class.""" @@ -50,7 +55,7 @@ class SwissPublicTransportDataUpdateCoordinator( ): """A SwissPublicTransport Data Update Coordinator.""" - config_entry: ConfigEntry + config_entry: SwissPublicTransportConfigEntry def __init__(self, hass: HomeAssistant, opendata: OpendataTransport) -> None: """Initialize the SwissPublicTransport data coordinator.""" @@ -70,13 +75,6 @@ def remaining_time(self, departure) -> timedelta | None: return departure_datetime - dt_util.as_local(dt_util.utcnow()) return None - def nth_departure_time(self, i: int) -> datetime | None: - """Get nth departure time.""" - connections = self._opendata.connections - if len(connections) > i and connections[i] is not None: - return dt_util.parse_datetime(connections[i]["departure"]) - return None - async def _async_update_data(self) -> list[DataConnection]: return await self.fetch_connections(limit=CONNECTIONS_COUNT) @@ -96,7 +94,7 @@ async def fetch_connections(self, limit: int) -> list[DataConnection]: connections = self._opendata.connections return [ DataConnection( - departure=self.nth_departure_time(i), + departure=dt_util.parse_datetime(connections[i]["departure"]), train_number=connections[i]["number"], platform=connections[i]["platform"], transfers=connections[i]["transfers"], @@ -110,3 +108,23 @@ async def fetch_connections(self, limit: int) -> list[DataConnection]: for i in range(limit) if len(connections) > i and connections[i] is not None ] + + async def fetch_connections_as_json(self, limit: int) -> list[JsonValueType]: + """Fetch connections using the opendata api.""" + return [ + { + "departure": connection["departure"].isoformat() + if connection["departure"] + else None, + "duration": connection["duration"], + "platform": connection["platform"], + "remaining_time": connection["remaining_time"], + "start": connection["start"], + "destination": connection["destination"], + "train_number": connection["train_number"], + "transfers": connection["transfers"], + "delay": connection["delay"], + "line": connection["line"], + } + for connection in await self.fetch_connections(limit) + ] diff --git a/homeassistant/components/swiss_public_transport/sensor.py b/homeassistant/components/swiss_public_transport/sensor.py index eb73ce030623fb..452ec31972f873 100644 --- a/homeassistant/components/swiss_public_transport/sensor.py +++ b/homeassistant/components/swiss_public_transport/sensor.py @@ -8,20 +8,24 @@ import logging from typing import TYPE_CHECKING -from homeassistant import config_entries, core from homeassistant.components.sensor import ( SensorDeviceClass, SensorEntity, SensorEntityDescription, ) from homeassistant.const import UnitOfTime +from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import StateType from homeassistant.helpers.update_coordinator import CoordinatorEntity from .const import CONNECTIONS_COUNT, DOMAIN -from .coordinator import DataConnection, SwissPublicTransportDataUpdateCoordinator +from .coordinator import ( + DataConnection, + SwissPublicTransportConfigEntry, + SwissPublicTransportDataUpdateCoordinator, +) _LOGGER = logging.getLogger(__name__) @@ -80,20 +84,18 @@ class SwissPublicTransportSensorEntityDescription(SensorEntityDescription): async def async_setup_entry( - hass: core.HomeAssistant, - config_entry: config_entries.ConfigEntry, + hass: HomeAssistant, + config_entry: SwissPublicTransportConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the sensor from a config entry created in the integrations UI.""" - coordinator = hass.data[DOMAIN][config_entry.entry_id] - unique_id = config_entry.unique_id if TYPE_CHECKING: assert unique_id async_add_entities( - SwissPublicTransportSensor(coordinator, description, unique_id) + SwissPublicTransportSensor(config_entry.runtime_data, description, unique_id) for description in SENSORS ) diff --git a/homeassistant/components/swiss_public_transport/services.py b/homeassistant/components/swiss_public_transport/services.py index e8b7c6bd45888e..3abf1a14b9f4ce 100644 --- a/homeassistant/components/swiss_public_transport/services.py +++ b/homeassistant/components/swiss_public_transport/services.py @@ -2,7 +2,6 @@ import voluptuous as vol -from homeassistant import config_entries from homeassistant.config_entries import ConfigEntryState from homeassistant.core import ( HomeAssistant, @@ -26,6 +25,7 @@ DOMAIN, SERVICE_FETCH_CONNECTIONS, ) +from .coordinator import SwissPublicTransportConfigEntry SERVICE_FETCH_CONNECTIONS_SCHEMA = vol.Schema( { @@ -41,7 +41,7 @@ def async_get_entry( hass: HomeAssistant, config_entry_id: str -) -> config_entries.ConfigEntry: +) -> SwissPublicTransportConfigEntry: """Get the Swiss public transport config entry.""" if not (entry := hass.config_entries.async_get_entry(config_entry_id)): raise ServiceValidationError( @@ -66,10 +66,12 @@ async def async_fetch_connections( ) -> ServiceResponse: """Fetch a set of connections.""" config_entry = async_get_entry(hass, call.data[ATTR_CONFIG_ENTRY_ID]) + limit = call.data.get(ATTR_LIMIT) or CONNECTIONS_COUNT - coordinator = hass.data[DOMAIN][config_entry.entry_id] try: - connections = await coordinator.fetch_connections(limit=int(limit)) + connections = await config_entry.runtime_data.fetch_connections_as_json( + limit=int(limit) + ) except UpdateFailed as e: raise HomeAssistantError( translation_domain=DOMAIN, diff --git a/homeassistant/components/switchbot/__init__.py b/homeassistant/components/switchbot/__init__.py index 75845d3f3cef34..c2b4b2ad736b11 100644 --- a/homeassistant/components/switchbot/__init__.py +++ b/homeassistant/components/switchbot/__init__.py @@ -41,6 +41,7 @@ Platform.SENSOR, ], SupportedModels.HYGROMETER.value: [Platform.SENSOR], + SupportedModels.HYGROMETER_CO2.value: [Platform.SENSOR], SupportedModels.CONTACT.value: [Platform.BINARY_SENSOR, Platform.SENSOR], SupportedModels.MOTION.value: [Platform.BINARY_SENSOR, Platform.SENSOR], SupportedModels.HUMIDIFIER.value: [Platform.HUMIDIFIER, Platform.SENSOR], diff --git a/homeassistant/components/switchbot/config_flow.py b/homeassistant/components/switchbot/config_flow.py index 0468db5618adf7..a0e451697709ea 100644 --- a/homeassistant/components/switchbot/config_flow.py +++ b/homeassistant/components/switchbot/config_flow.py @@ -80,7 +80,7 @@ def async_get_options_flow( config_entry: ConfigEntry, ) -> SwitchbotOptionsFlowHandler: """Get the options flow for this handler.""" - return SwitchbotOptionsFlowHandler(config_entry) + return SwitchbotOptionsFlowHandler() def __init__(self) -> None: """Initialize the config flow.""" @@ -346,10 +346,6 @@ async def async_step_user( class SwitchbotOptionsFlowHandler(OptionsFlow): """Handle Switchbot options.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/switchbot/const.py b/homeassistant/components/switchbot/const.py index bd727edfea400c..19b264bd46f2fa 100644 --- a/homeassistant/components/switchbot/const.py +++ b/homeassistant/components/switchbot/const.py @@ -20,6 +20,7 @@ class SupportedModels(StrEnum): CEILING_LIGHT = "ceiling_light" CURTAIN = "curtain" HYGROMETER = "hygrometer" + HYGROMETER_CO2 = "hygrometer_co2" LIGHT_STRIP = "light_strip" CONTACT = "contact" PLUG = "plug" @@ -48,6 +49,8 @@ class SupportedModels(StrEnum): NON_CONNECTABLE_SUPPORTED_MODEL_TYPES = { SwitchbotModel.METER: SupportedModels.HYGROMETER, SwitchbotModel.IO_METER: SupportedModels.HYGROMETER, + SwitchbotModel.METER_PRO: SupportedModels.HYGROMETER, + SwitchbotModel.METER_PRO_C: SupportedModels.HYGROMETER_CO2, SwitchbotModel.CONTACT_SENSOR: SupportedModels.CONTACT, SwitchbotModel.MOTION_SENSOR: SupportedModels.MOTION, } diff --git a/homeassistant/components/switchbot/manifest.json b/homeassistant/components/switchbot/manifest.json index f97162184c68ce..0e369f8ad2d073 100644 --- a/homeassistant/components/switchbot/manifest.json +++ b/homeassistant/components/switchbot/manifest.json @@ -39,5 +39,5 @@ "documentation": "https://www.home-assistant.io/integrations/switchbot", "iot_class": "local_push", "loggers": ["switchbot"], - "requirements": ["PySwitchbot==0.48.2"] + "requirements": ["PySwitchbot==0.51.0"] } diff --git a/homeassistant/components/switchbot/sensor.py b/homeassistant/components/switchbot/sensor.py index e696f21e082aa9..fd3de3e31e9bdf 100644 --- a/homeassistant/components/switchbot/sensor.py +++ b/homeassistant/components/switchbot/sensor.py @@ -10,6 +10,7 @@ SensorStateClass, ) from homeassistant.const import ( + CONCENTRATION_PARTS_PER_MILLION, PERCENTAGE, SIGNAL_STRENGTH_DECIBELS_MILLIWATT, EntityCategory, @@ -50,6 +51,12 @@ state_class=SensorStateClass.MEASUREMENT, entity_category=EntityCategory.DIAGNOSTIC, ), + "co2": SensorEntityDescription( + key="co2", + native_unit_of_measurement=CONCENTRATION_PARTS_PER_MILLION, + state_class=SensorStateClass.MEASUREMENT, + device_class=SensorDeviceClass.CO2, + ), "lightLevel": SensorEntityDescription( key="lightLevel", translation_key="light_level", diff --git a/homeassistant/components/switchbot_cloud/__init__.py b/homeassistant/components/switchbot_cloud/__init__.py index 39a179aaa2115d..625b4698301e38 100644 --- a/homeassistant/components/switchbot_cloud/__init__.py +++ b/homeassistant/components/switchbot_cloud/__init__.py @@ -17,6 +17,7 @@ _LOGGER = getLogger(__name__) PLATFORMS: list[Platform] = [ Platform.CLIMATE, + Platform.LOCK, Platform.SENSOR, Platform.SWITCH, Platform.VACUUM, @@ -31,6 +32,7 @@ class SwitchbotDevices: switches: list[Device | Remote] = field(default_factory=list) sensors: list[Device] = field(default_factory=list) vacuums: list[Device] = field(default_factory=list) + locks: list[Device] = field(default_factory=list) @dataclass @@ -83,6 +85,9 @@ def make_device_data( "Meter", "MeterPlus", "WoIOSensor", + "Hub 2", + "MeterPro", + "MeterPro(CO2)", ]: devices_data.sensors.append( prepare_device(hass, api, device, coordinators_by_id) @@ -97,6 +102,10 @@ def make_device_data( prepare_device(hass, api, device, coordinators_by_id) ) + if isinstance(device, Device) and device.device_type.startswith("Smart Lock"): + devices_data.locks.append( + prepare_device(hass, api, device, coordinators_by_id) + ) return devices_data diff --git a/homeassistant/components/switchbot_cloud/lock.py b/homeassistant/components/switchbot_cloud/lock.py new file mode 100644 index 00000000000000..2fbd551b919c77 --- /dev/null +++ b/homeassistant/components/switchbot_cloud/lock.py @@ -0,0 +1,53 @@ +"""Support for the Switchbot lock.""" + +from typing import Any + +from switchbot_api import LockCommands + +from homeassistant.components.lock import LockEntity +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import SwitchbotCloudData +from .const import DOMAIN +from .entity import SwitchBotCloudEntity + + +async def async_setup_entry( + hass: HomeAssistant, + config: ConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up SwitchBot Cloud entry.""" + data: SwitchbotCloudData = hass.data[DOMAIN][config.entry_id] + async_add_entities( + SwitchBotCloudLock(data.api, device, coordinator) + for device, coordinator in data.devices.locks + ) + + +class SwitchBotCloudLock(SwitchBotCloudEntity, LockEntity): + """Representation of a SwitchBot lock.""" + + _attr_name = None + + @callback + def _handle_coordinator_update(self) -> None: + """Handle updated data from the coordinator.""" + if coord_data := self.coordinator.data: + self._attr_is_locked = coord_data["lockState"] == "locked" + self.async_write_ha_state() + + async def async_lock(self, **kwargs: Any) -> None: + """Lock the lock.""" + await self.send_api_command(LockCommands.LOCK) + self._attr_is_locked = True + self.async_write_ha_state() + + async def async_unlock(self, **kwargs: Any) -> None: + """Unlock the lock.""" + + await self.send_api_command(LockCommands.UNLOCK) + self._attr_is_locked = False + self.async_write_ha_state() diff --git a/homeassistant/components/switchbot_cloud/sensor.py b/homeassistant/components/switchbot_cloud/sensor.py index ac612aea1194a3..90135ad96b3497 100644 --- a/homeassistant/components/switchbot_cloud/sensor.py +++ b/homeassistant/components/switchbot_cloud/sensor.py @@ -9,7 +9,11 @@ SensorStateClass, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import PERCENTAGE, UnitOfTemperature +from homeassistant.const import ( + CONCENTRATION_PARTS_PER_MILLION, + PERCENTAGE, + UnitOfTemperature, +) from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -21,6 +25,7 @@ SENSOR_TYPE_TEMPERATURE = "temperature" SENSOR_TYPE_HUMIDITY = "humidity" SENSOR_TYPE_BATTERY = "battery" +SENSOR_TYPE_CO2 = "CO2" METER_PLUS_SENSOR_DESCRIPTIONS = ( SensorEntityDescription( @@ -43,6 +48,16 @@ ), ) +METER_PRO_CO2_SENSOR_DESCRIPTIONS = ( + *METER_PLUS_SENSOR_DESCRIPTIONS, + SensorEntityDescription( + key=SENSOR_TYPE_CO2, + native_unit_of_measurement=CONCENTRATION_PARTS_PER_MILLION, + state_class=SensorStateClass.MEASUREMENT, + device_class=SensorDeviceClass.CO2, + ), +) + async def async_setup_entry( hass: HomeAssistant, @@ -55,7 +70,11 @@ async def async_setup_entry( async_add_entities( SwitchBotCloudSensor(data.api, device, coordinator, description) for device, coordinator in data.devices.sensors - for description in METER_PLUS_SENSOR_DESCRIPTIONS + for description in ( + METER_PRO_CO2_SENSOR_DESCRIPTIONS + if device.device_type == "MeterPro(CO2)" + else METER_PLUS_SENSOR_DESCRIPTIONS + ) ) diff --git a/homeassistant/components/switcher_kis/config_flow.py b/homeassistant/components/switcher_kis/config_flow.py index e34961ebf6c38e..e6c2e8e8589db3 100644 --- a/homeassistant/components/switcher_kis/config_flow.py +++ b/homeassistant/components/switcher_kis/config_flow.py @@ -10,7 +10,7 @@ from aioswitcher.device.tools import validate_token import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_TOKEN, CONF_USERNAME from .const import DOMAIN @@ -32,7 +32,6 @@ class SwitcherFlowHandler(ConfigFlow, domain=DOMAIN): VERSION = 1 - entry: ConfigEntry | None = None username: str | None = None token: str | None = None discovered_devices: dict[str, SwitcherBase] = {} @@ -82,7 +81,6 @@ async def async_step_reauth( self, user_input: Mapping[str, Any] ) -> ConfigFlowResult: """Handle configuration by re-auth.""" - self.entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -90,7 +88,6 @@ async def async_step_reauth_confirm( ) -> ConfigFlowResult: """Dialog that informs the user that reauth is required.""" errors: dict[str, str] = {} - assert self.entry is not None if user_input is not None: token_is_valid = await validate_token( @@ -98,7 +95,7 @@ async def async_step_reauth_confirm( ) if token_is_valid: return self.async_update_reload_and_abort( - self.entry, data={**self.entry.data, **user_input} + self._get_reauth_entry(), data_updates=user_input ) errors["base"] = "invalid_auth" diff --git a/homeassistant/components/switcher_kis/coordinator.py b/homeassistant/components/switcher_kis/coordinator.py index d292e9f8f393a1..118c86b8d78a8a 100644 --- a/homeassistant/components/switcher_kis/coordinator.py +++ b/homeassistant/components/switcher_kis/coordinator.py @@ -23,6 +23,8 @@ class SwitcherDataUpdateCoordinator( ): """Switcher device data update coordinator.""" + config_entry: ConfigEntry + def __init__( self, hass: HomeAssistant, @@ -33,10 +35,10 @@ def __init__( super().__init__( hass, _LOGGER, + config_entry=entry, name=device.name, update_interval=timedelta(seconds=MAX_UPDATE_INTERVAL_SEC), ) - self.entry = entry self.data = device self.token = entry.data.get(CONF_TOKEN) @@ -67,7 +69,7 @@ def async_setup(self) -> None: """Set up the coordinator.""" dev_reg = dr.async_get(self.hass) dev_reg.async_get_or_create( - config_entry_id=self.entry.entry_id, + config_entry_id=self.config_entry.entry_id, connections={(dr.CONNECTION_NETWORK_MAC, self.mac_address)}, identifiers={(DOMAIN, self.device_id)}, manufacturer="Switcher", diff --git a/homeassistant/components/switcher_kis/cover.py b/homeassistant/components/switcher_kis/cover.py index d81611b1629b40..dc3b6d96aed196 100644 --- a/homeassistant/components/switcher_kis/cover.py +++ b/homeassistant/components/switcher_kis/cover.py @@ -40,21 +40,31 @@ async def async_setup_entry( @callback def async_add_cover(coordinator: SwitcherDataUpdateCoordinator) -> None: """Add cover from Switcher device.""" + entities: list[CoverEntity] = [] + if coordinator.data.device_type.category in ( DeviceCategory.SHUTTER, DeviceCategory.SINGLE_SHUTTER_DUAL_LIGHT, + DeviceCategory.DUAL_SHUTTER_SINGLE_LIGHT, ): - async_add_entities([SwitcherCoverEntity(coordinator, 0)]) + number_of_covers = len(cast(SwitcherShutter, coordinator.data).position) + if number_of_covers == 1: + entities.append(SwitcherSingleCoverEntity(coordinator, 0)) + else: + entities.extend( + SwitcherMultiCoverEntity(coordinator, i) + for i in range(number_of_covers) + ) + async_add_entities(entities) config_entry.async_on_unload( async_dispatcher_connect(hass, SIGNAL_DEVICE_ADD, async_add_cover) ) -class SwitcherCoverEntity(SwitcherEntity, CoverEntity): +class SwitcherBaseCoverEntity(SwitcherEntity, CoverEntity): """Representation of a Switcher cover entity.""" - _attr_name = None _attr_device_class = CoverDeviceClass.SHUTTER _attr_supported_features = ( CoverEntityFeature.OPEN @@ -62,19 +72,7 @@ class SwitcherCoverEntity(SwitcherEntity, CoverEntity): | CoverEntityFeature.SET_POSITION | CoverEntityFeature.STOP ) - - def __init__( - self, - coordinator: SwitcherDataUpdateCoordinator, - cover_id: int | None = None, - ) -> None: - """Initialize the entity.""" - super().__init__(coordinator) - self._cover_id = cover_id - - self._attr_unique_id = f"{coordinator.device_id}-{coordinator.mac_address}" - - self._update_data() + _cover_id: int @callback def _handle_coordinator_update(self) -> None: @@ -85,10 +83,14 @@ def _handle_coordinator_update(self) -> None: def _update_data(self) -> None: """Update data from device.""" data = cast(SwitcherShutter, self.coordinator.data) - self._attr_current_cover_position = data.position - self._attr_is_closed = data.position == 0 - self._attr_is_closing = data.direction == ShutterDirection.SHUTTER_DOWN - self._attr_is_opening = data.direction == ShutterDirection.SHUTTER_UP + self._attr_current_cover_position = data.position[self._cover_id] + self._attr_is_closed = data.position[self._cover_id] == 0 + self._attr_is_closing = ( + data.direction[self._cover_id] == ShutterDirection.SHUTTER_DOWN + ) + self._attr_is_opening = ( + data.direction[self._cover_id] == ShutterDirection.SHUTTER_UP + ) async def _async_call_api(self, api: str, *args: Any) -> None: """Call Switcher API.""" @@ -133,3 +135,44 @@ async def async_set_cover_position(self, **kwargs: Any) -> None: async def async_stop_cover(self, **kwargs: Any) -> None: """Stop the cover.""" await self._async_call_api(API_STOP, self._cover_id) + + +class SwitcherSingleCoverEntity(SwitcherBaseCoverEntity): + """Representation of a Switcher single cover entity.""" + + _attr_name = None + + def __init__( + self, + coordinator: SwitcherDataUpdateCoordinator, + cover_id: int, + ) -> None: + """Initialize the entity.""" + super().__init__(coordinator) + self._cover_id = cover_id + + self._attr_unique_id = f"{coordinator.device_id}-{coordinator.mac_address}" + + self._update_data() + + +class SwitcherMultiCoverEntity(SwitcherBaseCoverEntity): + """Representation of a Switcher multiple cover entity.""" + + _attr_translation_key = "cover" + + def __init__( + self, + coordinator: SwitcherDataUpdateCoordinator, + cover_id: int, + ) -> None: + """Initialize the entity.""" + super().__init__(coordinator) + self._cover_id = cover_id + + self._attr_translation_placeholders = {"cover_id": str(cover_id + 1)} + self._attr_unique_id = ( + f"{coordinator.device_id}-{coordinator.mac_address}-{cover_id}" + ) + + self._update_data() diff --git a/homeassistant/components/switcher_kis/light.py b/homeassistant/components/switcher_kis/light.py index d3e8d52bc006a0..bd87176bcf0029 100644 --- a/homeassistant/components/switcher_kis/light.py +++ b/homeassistant/components/switcher_kis/light.py @@ -6,11 +6,7 @@ from typing import Any, cast from aioswitcher.api import SwitcherBaseResponse, SwitcherType2Api -from aioswitcher.device import ( - DeviceCategory, - DeviceState, - SwitcherSingleShutterDualLight, -) +from aioswitcher.device import DeviceCategory, DeviceState, SwitcherLight from homeassistant.components.light import ColorMode, LightEntity from homeassistant.config_entries import ConfigEntry @@ -38,42 +34,35 @@ async def async_setup_entry( @callback def async_add_light(coordinator: SwitcherDataUpdateCoordinator) -> None: """Add light from Switcher device.""" - if ( - coordinator.data.device_type.category - == DeviceCategory.SINGLE_SHUTTER_DUAL_LIGHT + entities: list[LightEntity] = [] + + if coordinator.data.device_type.category in ( + DeviceCategory.SINGLE_SHUTTER_DUAL_LIGHT, + DeviceCategory.DUAL_SHUTTER_SINGLE_LIGHT, + DeviceCategory.LIGHT, ): - async_add_entities( - [ - SwitcherLightEntity(coordinator, 0), - SwitcherLightEntity(coordinator, 1), - ] - ) + number_of_lights = len(cast(SwitcherLight, coordinator.data).light) + if number_of_lights == 1: + entities.append(SwitcherSingleLightEntity(coordinator, 0)) + else: + entities.extend( + SwitcherMultiLightEntity(coordinator, i) + for i in range(number_of_lights) + ) + async_add_entities(entities) config_entry.async_on_unload( async_dispatcher_connect(hass, SIGNAL_DEVICE_ADD, async_add_light) ) -class SwitcherLightEntity(SwitcherEntity, LightEntity): +class SwitcherBaseLightEntity(SwitcherEntity, LightEntity): """Representation of a Switcher light entity.""" _attr_color_mode = ColorMode.ONOFF _attr_supported_color_modes = {ColorMode.ONOFF} - _attr_translation_key = "light" - - def __init__( - self, coordinator: SwitcherDataUpdateCoordinator, light_id: int - ) -> None: - """Initialize the entity.""" - super().__init__(coordinator) - self._light_id = light_id - self.control_result: bool | None = None - - # Entity class attributes - self._attr_translation_placeholders = {"light_id": str(light_id + 1)} - self._attr_unique_id = ( - f"{coordinator.device_id}-{coordinator.mac_address}-{light_id}" - ) + control_result: bool | None = None + _light_id: int @callback def _handle_coordinator_update(self) -> None: @@ -87,8 +76,8 @@ def is_on(self) -> bool: if self.control_result is not None: return self.control_result - data = cast(SwitcherSingleShutterDualLight, self.coordinator.data) - return bool(data.lights[self._light_id] == DeviceState.ON) + data = cast(SwitcherLight, self.coordinator.data) + return bool(data.light[self._light_id] == DeviceState.ON) async def _async_call_api(self, api: str, *args: Any) -> None: """Call Switcher API.""" @@ -127,3 +116,44 @@ async def async_turn_off(self, **kwargs: Any) -> None: await self._async_call_api(API_SET_LIGHT, DeviceState.OFF, self._light_id) self.control_result = False self.async_write_ha_state() + + +class SwitcherSingleLightEntity(SwitcherBaseLightEntity): + """Representation of a Switcher single light entity.""" + + _attr_name = None + + def __init__( + self, + coordinator: SwitcherDataUpdateCoordinator, + light_id: int, + ) -> None: + """Initialize the entity.""" + super().__init__(coordinator) + self._light_id = light_id + self.control_result: bool | None = None + + # Entity class attributes + self._attr_unique_id = f"{coordinator.device_id}-{coordinator.mac_address}" + + +class SwitcherMultiLightEntity(SwitcherBaseLightEntity): + """Representation of a Switcher multiple light entity.""" + + _attr_translation_key = "light" + + def __init__( + self, + coordinator: SwitcherDataUpdateCoordinator, + light_id: int, + ) -> None: + """Initialize the entity.""" + super().__init__(coordinator) + self._light_id = light_id + self.control_result: bool | None = None + + # Entity class attributes + self._attr_translation_placeholders = {"light_id": str(light_id + 1)} + self._attr_unique_id = ( + f"{coordinator.device_id}-{coordinator.mac_address}-{light_id}" + ) diff --git a/homeassistant/components/switcher_kis/manifest.json b/homeassistant/components/switcher_kis/manifest.json index 902316f374ecec..4a50d992d6da25 100644 --- a/homeassistant/components/switcher_kis/manifest.json +++ b/homeassistant/components/switcher_kis/manifest.json @@ -7,6 +7,6 @@ "iot_class": "local_push", "loggers": ["aioswitcher"], "quality_scale": "platinum", - "requirements": ["aioswitcher==4.0.3"], + "requirements": ["aioswitcher==4.4.0"], "single_config_entry": true } diff --git a/homeassistant/components/switcher_kis/strings.json b/homeassistant/components/switcher_kis/strings.json index 68f9f9d590c359..798a43c981cd4d 100644 --- a/homeassistant/components/switcher_kis/strings.json +++ b/homeassistant/components/switcher_kis/strings.json @@ -43,6 +43,11 @@ "name": "Vertical swing off" } }, + "cover": { + "cover": { + "name": "Cover {cover_id}" + } + }, "light": { "light": { "name": "Light {light_id}" diff --git a/homeassistant/components/syncthru/__init__.py b/homeassistant/components/syncthru/__init__.py index b3d1230fdfe764..2817f4c21ce29f 100644 --- a/homeassistant/components/syncthru/__init__.py +++ b/homeassistant/components/syncthru/__init__.py @@ -52,6 +52,7 @@ async def async_update_data() -> SyncThru: coordinator = DataUpdateCoordinator[SyncThru]( hass, _LOGGER, + config_entry=entry, name=DOMAIN, update_method=async_update_data, update_interval=timedelta(seconds=30), diff --git a/homeassistant/components/synology_dsm/config_flow.py b/homeassistant/components/synology_dsm/config_flow.py index 70ab13c5c095a1..918a24035f8887 100644 --- a/homeassistant/components/synology_dsm/config_flow.py +++ b/homeassistant/components/synology_dsm/config_flow.py @@ -118,7 +118,7 @@ def async_get_options_flow( config_entry: ConfigEntry, ) -> SynologyDSMOptionsFlowHandler: """Get the options flow for this handler.""" - return SynologyDSMOptionsFlowHandler(config_entry) + return SynologyDSMOptionsFlowHandler() def __init__(self) -> None: """Initialize the synology_dsm config flow.""" @@ -376,10 +376,6 @@ def _async_get_existing_entry(self, discovered_mac: str) -> ConfigEntry | None: class SynologyDSMOptionsFlowHandler(OptionsFlow): """Handle a option flow.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/systemmonitor/coordinator.py b/homeassistant/components/systemmonitor/coordinator.py index d12eddbb14ad27..32a171a11caca8 100644 --- a/homeassistant/components/systemmonitor/coordinator.py +++ b/homeassistant/components/systemmonitor/coordinator.py @@ -37,17 +37,29 @@ class SensorData: def as_dict(self) -> dict[str, Any]: """Return as dict.""" + disk_usage = None + if self.disk_usage: + disk_usage = {k: str(v) for k, v in self.disk_usage.items()} + io_counters = None + if self.io_counters: + io_counters = {k: str(v) for k, v in self.io_counters.items()} + addresses = None + if self.addresses: + addresses = {k: str(v) for k, v in self.addresses.items()} + temperatures = None + if self.temperatures: + temperatures = {k: str(v) for k, v in self.temperatures.items()} return { - "disk_usage": {k: str(v) for k, v in self.disk_usage.items()}, + "disk_usage": disk_usage, "swap": str(self.swap), "memory": str(self.memory), - "io_counters": {k: str(v) for k, v in self.io_counters.items()}, - "addresses": {k: str(v) for k, v in self.addresses.items()}, + "io_counters": io_counters, + "addresses": addresses, "load": str(self.load), "cpu_percent": str(self.cpu_percent), "boot_time": str(self.boot_time), "processes": str(self.processes), - "temperatures": {k: str(v) for k, v in self.temperatures.items()}, + "temperatures": temperatures, } diff --git a/homeassistant/components/systemmonitor/manifest.json b/homeassistant/components/systemmonitor/manifest.json index 236f25bb1ed419..4c6ae0653d3079 100644 --- a/homeassistant/components/systemmonitor/manifest.json +++ b/homeassistant/components/systemmonitor/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/systemmonitor", "iot_class": "local_push", "loggers": ["psutil"], - "requirements": ["psutil-home-assistant==0.0.1", "psutil==6.0.0"] + "requirements": ["psutil-home-assistant==0.0.1", "psutil==6.1.0"] } diff --git a/homeassistant/components/tado/config_flow.py b/homeassistant/components/tado/config_flow.py index 9fd2030844ff23..c7bb76849010da 100644 --- a/homeassistant/components/tado/config_flow.py +++ b/homeassistant/components/tado/config_flow.py @@ -73,7 +73,6 @@ class TadoConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for Tado.""" VERSION = 1 - config_entry: ConfigEntry async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -118,19 +117,13 @@ async def async_step_homekit( async def async_step_reconfigure( self, user_input: dict[str, Any] | None = None - ) -> ConfigFlowResult: - """Handle a reconfiguration flow initialized by the user.""" - self.config_entry = self._get_reconfigure_entry() - return await self.async_step_reconfigure_confirm() - - async def async_step_reconfigure_confirm( - self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle a reconfiguration flow initialized by the user.""" errors: dict[str, str] = {} + reconfigure_entry = self._get_reconfigure_entry() if user_input is not None: - user_input[CONF_USERNAME] = self.config_entry.data[CONF_USERNAME] + user_input[CONF_USERNAME] = reconfigure_entry.data[CONF_USERNAME] try: await validate_input(self.hass, user_input) except CannotConnect: @@ -145,13 +138,11 @@ async def async_step_reconfigure_confirm( if not errors: return self.async_update_reload_and_abort( - self.config_entry, - data={**self.config_entry.data, **user_input}, - reason="reconfigure_successful", + reconfigure_entry, data_updates=user_input ) return self.async_show_form( - step_id="reconfigure_confirm", + step_id="reconfigure", data_schema=vol.Schema( { vol.Required(CONF_PASSWORD): str, @@ -159,7 +150,7 @@ async def async_step_reconfigure_confirm( ), errors=errors, description_placeholders={ - CONF_USERNAME: self.config_entry.data[CONF_USERNAME] + CONF_USERNAME: reconfigure_entry.data[CONF_USERNAME] }, ) @@ -169,16 +160,12 @@ def async_get_options_flow( config_entry: ConfigEntry, ) -> OptionsFlowHandler: """Get the options flow for this handler.""" - return OptionsFlowHandler(config_entry) + return OptionsFlowHandler() class OptionsFlowHandler(OptionsFlow): """Handle an option flow for Tado.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/tado/manifest.json b/homeassistant/components/tado/manifest.json index b0c00c888b7b6b..652d51f02619b6 100644 --- a/homeassistant/components/tado/manifest.json +++ b/homeassistant/components/tado/manifest.json @@ -14,5 +14,5 @@ }, "iot_class": "cloud_polling", "loggers": ["PyTado"], - "requirements": ["python-tado==0.17.6"] + "requirements": ["python-tado==0.17.7"] } diff --git a/homeassistant/components/tado/strings.json b/homeassistant/components/tado/strings.json index 39453cb5fe1c62..8124570f9c9c9b 100644 --- a/homeassistant/components/tado/strings.json +++ b/homeassistant/components/tado/strings.json @@ -12,7 +12,7 @@ }, "title": "Connect to your Tado account" }, - "reconfigure_confirm": { + "reconfigure": { "title": "Reconfigure your Tado", "description": "Reconfigure the entry, for your account: `{username}`.", "data": { diff --git a/homeassistant/components/tag/__init__.py b/homeassistant/components/tag/__init__.py index 0462c5bec3488e..95efae3d386b7a 100644 --- a/homeassistant/components/tag/__init__.py +++ b/homeassistant/components/tag/__init__.py @@ -84,7 +84,9 @@ def _create_entry( original_name=f"{DEFAULT_NAME} {tag_id}", suggested_object_id=slugify(name) if name else tag_id, ) - return entity_registry.async_update_entity(entry.entity_id, name=name) + if name: + return entity_registry.async_update_entity(entry.entity_id, name=name) + return entry class TagStore(Store[collection.SerializedStorageCollection]): diff --git a/homeassistant/components/tailscale/config_flow.py b/homeassistant/components/tailscale/config_flow.py index c5888e64f711da..ab57e9eadc6942 100644 --- a/homeassistant/components/tailscale/config_flow.py +++ b/homeassistant/components/tailscale/config_flow.py @@ -8,7 +8,7 @@ from tailscale import Tailscale, TailscaleAuthenticationError, TailscaleError import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_API_KEY from homeassistant.core import HomeAssistant from homeassistant.helpers.aiohttp_client import async_get_clientsession @@ -34,8 +34,6 @@ class TailscaleFlowHandler(ConfigFlow, domain=DOMAIN): VERSION = 1 - reauth_entry: ConfigEntry | None = None - async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -86,9 +84,6 @@ async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle initiation of re-authentication with Tailscale.""" - self.reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -97,11 +92,12 @@ async def async_step_reauth_confirm( """Handle re-authentication with Tailscale.""" errors = {} - if user_input is not None and self.reauth_entry: + if user_input is not None: + reauth_entry = self._get_reauth_entry() try: await validate_input( self.hass, - tailnet=self.reauth_entry.data[CONF_TAILNET], + tailnet=reauth_entry.data[CONF_TAILNET], api_key=user_input[CONF_API_KEY], ) except TailscaleAuthenticationError: @@ -109,17 +105,10 @@ async def async_step_reauth_confirm( except TailscaleError: errors["base"] = "cannot_connect" else: - self.hass.config_entries.async_update_entry( - self.reauth_entry, - data={ - **self.reauth_entry.data, - CONF_API_KEY: user_input[CONF_API_KEY], - }, - ) - self.hass.async_create_task( - self.hass.config_entries.async_reload(self.reauth_entry.entry_id) + return self.async_update_reload_and_abort( + reauth_entry, + data_updates={CONF_API_KEY: user_input[CONF_API_KEY]}, ) - return self.async_abort(reason="reauth_successful") return self.async_show_form( step_id="reauth_confirm", diff --git a/homeassistant/components/tailwind/config_flow.py b/homeassistant/components/tailwind/config_flow.py index 13682a3e9c40a9..48fe2d23727de4 100644 --- a/homeassistant/components/tailwind/config_flow.py +++ b/homeassistant/components/tailwind/config_flow.py @@ -17,7 +17,7 @@ from homeassistant.components import zeroconf from homeassistant.components.dhcp import DhcpServiceInfo -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_HOST, CONF_TOKEN from homeassistant.data_entry_flow import AbortFlow from homeassistant.helpers.aiohttp_client import async_get_clientsession @@ -41,7 +41,6 @@ class TailwindFlowHandler(ConfigFlow, domain=DOMAIN): VERSION = 1 host: str - reauth_entry: ConfigEntry | None = None async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -148,9 +147,6 @@ async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle initiation of re-authentication with a Tailwind device.""" - self.reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -159,10 +155,10 @@ async def async_step_reauth_confirm( """Handle re-authentication with a Tailwind device.""" errors = {} - if user_input is not None and self.reauth_entry: + if user_input is not None: try: return await self._async_step_create_entry( - host=self.reauth_entry.data[CONF_HOST], + host=self._get_reauth_entry().data[CONF_HOST], token=user_input[CONF_TOKEN], ) except TailwindAuthenticationError: @@ -214,9 +210,9 @@ async def _async_step_create_entry( except TailwindUnsupportedFirmwareVersionError: return self.async_abort(reason="unsupported_firmware") - if self.reauth_entry: + if self.source == SOURCE_REAUTH: return self.async_update_reload_and_abort( - self.reauth_entry, + self._get_reauth_entry(), data={ CONF_HOST: host, CONF_TOKEN: token, diff --git a/homeassistant/components/tankerkoenig/config_flow.py b/homeassistant/components/tankerkoenig/config_flow.py index e5a84374a09115..509f293665dcc9 100644 --- a/homeassistant/components/tankerkoenig/config_flow.py +++ b/homeassistant/components/tankerkoenig/config_flow.py @@ -74,7 +74,7 @@ def async_get_options_flow( config_entry: ConfigEntry, ) -> OptionsFlowHandler: """Get the options flow for this handler.""" - return OptionsFlowHandler(config_entry) + return OptionsFlowHandler() async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -144,9 +144,8 @@ async def async_step_reauth_confirm( if not user_input: return self._show_form_reauth() - entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) - assert entry - user_input = {**entry.data, **user_input} + reauth_entry = self._get_reauth_entry() + user_input = {**reauth_entry.data, **user_input} tankerkoenig = Tankerkoenig( api_key=user_input[CONF_API_KEY], @@ -157,9 +156,7 @@ async def async_step_reauth_confirm( except TankerkoenigInvalidKeyError: return self._show_form_reauth(user_input, {CONF_API_KEY: "invalid_auth"}) - self.hass.config_entries.async_update_entry(entry, data=user_input) - await self.hass.config_entries.async_reload(entry.entry_id) - return self.async_abort(reason="reauth_successful") + return self.async_update_reload_and_abort(reauth_entry, data=user_input) def _show_form_user( self, @@ -239,9 +236,8 @@ def _create_entry( class OptionsFlowHandler(OptionsFlow): """Handle an options flow.""" - def __init__(self, config_entry: ConfigEntry) -> None: + def __init__(self) -> None: """Initialize options flow.""" - self.config_entry = config_entry self._stations: dict[str, str] = {} async def async_step_init( diff --git a/homeassistant/components/tankerkoenig/strings.json b/homeassistant/components/tankerkoenig/strings.json index 7017c6e5fed267..29f4f439dd585a 100644 --- a/homeassistant/components/tankerkoenig/strings.json +++ b/homeassistant/components/tankerkoenig/strings.json @@ -42,6 +42,9 @@ "show_on_map": "Show stations on map" } } + }, + "error": { + "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]" } }, "entity": { diff --git a/homeassistant/components/tautulli/config_flow.py b/homeassistant/components/tautulli/config_flow.py index a8378786d18f56..369f9ead2f2a2f 100644 --- a/homeassistant/components/tautulli/config_flow.py +++ b/homeassistant/components/tautulli/config_flow.py @@ -60,14 +60,11 @@ async def async_step_reauth_confirm( ) -> ConfigFlowResult: """Confirm reauth dialog.""" errors = {} - if user_input is not None and ( - entry := self.hass.config_entries.async_get_entry(self.context["entry_id"]) - ): - _input = {**entry.data, CONF_API_KEY: user_input[CONF_API_KEY]} + if user_input is not None: + reauth_entry = self._get_reauth_entry() + _input = {**reauth_entry.data, CONF_API_KEY: user_input[CONF_API_KEY]} if (error := await self.validate_input(_input)) is None: - self.hass.config_entries.async_update_entry(entry, data=_input) - await self.hass.config_entries.async_reload(entry.entry_id) - return self.async_abort(reason="reauth_successful") + return self.async_update_reload_and_abort(reauth_entry, data=_input) errors["base"] = error return self.async_show_form( step_id="reauth_confirm", diff --git a/homeassistant/components/tedee/__init__.py b/homeassistant/components/tedee/__init__.py index a1b87cf13a456e..528a5052678c4f 100644 --- a/homeassistant/components/tedee/__init__.py +++ b/homeassistant/components/tedee/__init__.py @@ -7,7 +7,7 @@ from aiohttp.hdrs import METH_POST from aiohttp.web import Request, Response -from pytedee_async.exception import TedeeDataUpdateException, TedeeWebhookException +from aiotedee.exception import TedeeDataUpdateException, TedeeWebhookException from homeassistant.components.http import HomeAssistantView from homeassistant.components.webhook import ( @@ -23,7 +23,7 @@ from homeassistant.helpers.network import get_url from .const import DOMAIN, NAME -from .coordinator import TedeeApiCoordinator +from .coordinator import TedeeApiCoordinator, TedeeConfigEntry PLATFORMS = [ Platform.BINARY_SENSOR, @@ -33,13 +33,11 @@ _LOGGER = logging.getLogger(__name__) -type TedeeConfigEntry = ConfigEntry[TedeeApiCoordinator] - async def async_setup_entry(hass: HomeAssistant, entry: TedeeConfigEntry) -> bool: """Integration setup.""" - coordinator = TedeeApiCoordinator(hass) + coordinator = TedeeApiCoordinator(hass, entry) await coordinator.async_config_entry_first_refresh() diff --git a/homeassistant/components/tedee/binary_sensor.py b/homeassistant/components/tedee/binary_sensor.py index 3a7d1a12f2eaa4..b586db7c2a77c3 100644 --- a/homeassistant/components/tedee/binary_sensor.py +++ b/homeassistant/components/tedee/binary_sensor.py @@ -3,8 +3,8 @@ from collections.abc import Callable from dataclasses import dataclass -from pytedee_async import TedeeLock -from pytedee_async.lock import TedeeLockState +from aiotedee import TedeeLock +from aiotedee.lock import TedeeLockState from homeassistant.components.binary_sensor import ( BinarySensorDeviceClass, @@ -15,7 +15,7 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import TedeeConfigEntry +from .coordinator import TedeeConfigEntry from .entity import TedeeDescriptionEntity diff --git a/homeassistant/components/tedee/config_flow.py b/homeassistant/components/tedee/config_flow.py index 6d399901c9a8ac..422d818d1b5225 100644 --- a/homeassistant/components/tedee/config_flow.py +++ b/homeassistant/components/tedee/config_flow.py @@ -4,7 +4,7 @@ import logging from typing import Any -from pytedee_async import ( +from aiotedee import ( TedeeAuthException, TedeeClient, TedeeClientException, @@ -17,7 +17,6 @@ from homeassistant.config_entries import ( SOURCE_REAUTH, SOURCE_RECONFIGURE, - ConfigEntry, ConfigFlow, ConfigFlowResult, ) @@ -35,9 +34,6 @@ class TedeeConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 MINOR_VERSION = 2 - reauth_entry: ConfigEntry - reconfigure_entry: ConfigEntry - async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -46,7 +42,7 @@ async def async_step_user( if user_input is not None: if self.source == SOURCE_REAUTH: - host = self.reauth_entry.data[CONF_HOST] + host = self._get_reauth_entry().data[CONF_HOST] else: host = user_input[CONF_HOST] local_access_token = user_input[CONF_LOCAL_ACCESS_TOKEN] @@ -65,19 +61,17 @@ async def async_step_user( _LOGGER.error("Error during local bridge discovery: %s", exc) errors["base"] = "cannot_connect" else: + await self.async_set_unique_id(local_bridge.serial) if self.source == SOURCE_REAUTH: + self._abort_if_unique_id_mismatch() return self.async_update_reload_and_abort( - self.reauth_entry, - data={**self.reauth_entry.data, **user_input}, - reason="reauth_successful", + self._get_reauth_entry(), data_updates=user_input ) if self.source == SOURCE_RECONFIGURE: + self._abort_if_unique_id_mismatch() return self.async_update_reload_and_abort( - self.reconfigure_entry, - data={**self.reconfigure_entry.data, **user_input}, - reason="reconfigure_successful", + self._get_reconfigure_entry(), data_updates=user_input ) - await self.async_set_unique_id(local_bridge.serial) self._abort_if_unique_id_configured() return self.async_create_entry( title=NAME, @@ -103,7 +97,6 @@ async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Perform reauth upon an API authentication error.""" - self.reauth_entry = self._get_reauth_entry() return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -117,7 +110,9 @@ async def async_step_reauth_confirm( { vol.Required( CONF_LOCAL_ACCESS_TOKEN, - default=self.reauth_entry.data[CONF_LOCAL_ACCESS_TOKEN], + default=self._get_reauth_entry().data[ + CONF_LOCAL_ACCESS_TOKEN + ], ): str, } ), @@ -128,26 +123,18 @@ async def async_step_reconfigure( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Perform a reconfiguration.""" - self.reconfigure_entry = self._get_reconfigure_entry() - return await self.async_step_reconfigure_confirm() - - async def async_step_reconfigure_confirm( - self, user_input: dict[str, Any] | None = None - ) -> ConfigFlowResult: - """Add reconfigure step to allow to reconfigure a config entry.""" if not user_input: + reconfigure_entry = self._get_reconfigure_entry() return self.async_show_form( - step_id="reconfigure_confirm", + step_id="reconfigure", data_schema=vol.Schema( { vol.Required( - CONF_HOST, default=self.reconfigure_entry.data[CONF_HOST] + CONF_HOST, default=reconfigure_entry.data[CONF_HOST] ): str, vol.Required( CONF_LOCAL_ACCESS_TOKEN, - default=self.reconfigure_entry.data[ - CONF_LOCAL_ACCESS_TOKEN - ], + default=reconfigure_entry.data[CONF_LOCAL_ACCESS_TOKEN], ): str, } ), diff --git a/homeassistant/components/tedee/coordinator.py b/homeassistant/components/tedee/coordinator.py index 1dab31b052bb67..445585a1a2c6ec 100644 --- a/homeassistant/components/tedee/coordinator.py +++ b/homeassistant/components/tedee/coordinator.py @@ -1,12 +1,14 @@ """Coordinator for Tedee locks.""" +from __future__ import annotations + from collections.abc import Awaitable, Callable from datetime import timedelta import logging import time from typing import Any -from pytedee_async import ( +from aiotedee import ( TedeeClient, TedeeClientException, TedeeDataUpdateException, @@ -14,7 +16,7 @@ TedeeLock, TedeeWebhookException, ) -from pytedee_async.bridge import TedeeBridge +from aiotedee.bridge import TedeeBridge from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_HOST @@ -31,18 +33,21 @@ _LOGGER = logging.getLogger(__name__) +type TedeeConfigEntry = ConfigEntry[TedeeApiCoordinator] + class TedeeApiCoordinator(DataUpdateCoordinator[dict[int, TedeeLock]]): """Class to handle fetching data from the tedee API centrally.""" - config_entry: ConfigEntry + config_entry: TedeeConfigEntry bridge: TedeeBridge - def __init__(self, hass: HomeAssistant) -> None: + def __init__(self, hass: HomeAssistant, entry: TedeeConfigEntry) -> None: """Initialize coordinator.""" super().__init__( hass, _LOGGER, + config_entry=entry, name=DOMAIN, update_interval=SCAN_INTERVAL, ) diff --git a/homeassistant/components/tedee/entity.py b/homeassistant/components/tedee/entity.py index 59e3354aa1a816..96cc6f2b3f5d65 100644 --- a/homeassistant/components/tedee/entity.py +++ b/homeassistant/components/tedee/entity.py @@ -1,6 +1,6 @@ """Bases for Tedee entities.""" -from pytedee_async.lock import TedeeLock +from aiotedee.lock import TedeeLock from homeassistant.core import callback from homeassistant.helpers.device_registry import DeviceInfo @@ -32,6 +32,7 @@ def __init__( name=lock.lock_name, manufacturer="Tedee", model=lock.lock_type, + model_id=lock.lock_type, via_device=(DOMAIN, coordinator.bridge.serial), ) diff --git a/homeassistant/components/tedee/lock.py b/homeassistant/components/tedee/lock.py index 8f0587de8ae91c..6e89a48f2a066f 100644 --- a/homeassistant/components/tedee/lock.py +++ b/homeassistant/components/tedee/lock.py @@ -2,16 +2,15 @@ from typing import Any -from pytedee_async import TedeeClientException, TedeeLock, TedeeLockState +from aiotedee import TedeeClientException, TedeeLock, TedeeLockState from homeassistant.components.lock import LockEntity, LockEntityFeature from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import TedeeConfigEntry from .const import DOMAIN -from .coordinator import TedeeApiCoordinator +from .coordinator import TedeeApiCoordinator, TedeeConfigEntry from .entity import TedeeEntity diff --git a/homeassistant/components/tedee/manifest.json b/homeassistant/components/tedee/manifest.json index 4f071267a253a9..bca51f08f935aa 100644 --- a/homeassistant/components/tedee/manifest.json +++ b/homeassistant/components/tedee/manifest.json @@ -6,7 +6,7 @@ "dependencies": ["http", "webhook"], "documentation": "https://www.home-assistant.io/integrations/tedee", "iot_class": "local_push", - "loggers": ["pytedee_async"], + "loggers": ["aiotedee"], "quality_scale": "platinum", - "requirements": ["pytedee-async==0.2.20"] + "requirements": ["aiotedee==0.2.20"] } diff --git a/homeassistant/components/tedee/sensor.py b/homeassistant/components/tedee/sensor.py index c7d14af1f31a77..90f76317fffd3b 100644 --- a/homeassistant/components/tedee/sensor.py +++ b/homeassistant/components/tedee/sensor.py @@ -3,7 +3,7 @@ from collections.abc import Callable from dataclasses import dataclass -from pytedee_async import TedeeLock +from aiotedee import TedeeLock from homeassistant.components.sensor import ( SensorDeviceClass, @@ -15,7 +15,7 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import TedeeConfigEntry +from .coordinator import TedeeConfigEntry from .entity import TedeeDescriptionEntity diff --git a/homeassistant/components/tedee/strings.json b/homeassistant/components/tedee/strings.json index b0b15b76fcd038..b6966fa2933db4 100644 --- a/homeassistant/components/tedee/strings.json +++ b/homeassistant/components/tedee/strings.json @@ -22,7 +22,7 @@ "local_access_token": "[%key:component::tedee::config::step::user::data_description::local_access_token%]" } }, - "reconfigure_confirm": { + "reconfigure": { "title": "Reconfigure Tedee", "description": "Update the settings of this integration.", "data": { @@ -38,7 +38,8 @@ "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", - "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]" + "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]", + "unique_id_mismatch": "You selected a different bridge than the one this config entry was configured with, this is not allowed." }, "error": { "invalid_api_key": "[%key:common::config_flow::error::invalid_api_key%]", diff --git a/homeassistant/components/telegram_bot/__init__.py b/homeassistant/components/telegram_bot/__init__.py index 64e2517a40b13e..b9a032d7f28404 100644 --- a/homeassistant/components/telegram_bot/__init__.py +++ b/homeassistant/components/telegram_bot/__init__.py @@ -37,7 +37,6 @@ HTTP_DIGEST_AUTHENTICATION, ) from homeassistant.core import Context, HomeAssistant, ServiceCall -from homeassistant.exceptions import TemplateError from homeassistant.helpers import config_validation as cv, issue_registry as ir from homeassistant.helpers.typing import ConfigType from homeassistant.loader import async_get_loaded_integration @@ -175,14 +174,14 @@ ) SERVICE_SCHEMA_SEND_MESSAGE = BASE_SERVICE_SCHEMA.extend( - {vol.Required(ATTR_MESSAGE): cv.template, vol.Optional(ATTR_TITLE): cv.template} + {vol.Required(ATTR_MESSAGE): cv.string, vol.Optional(ATTR_TITLE): cv.string} ) SERVICE_SCHEMA_SEND_FILE = BASE_SERVICE_SCHEMA.extend( { - vol.Optional(ATTR_URL): cv.template, - vol.Optional(ATTR_FILE): cv.template, - vol.Optional(ATTR_CAPTION): cv.template, + vol.Optional(ATTR_URL): cv.string, + vol.Optional(ATTR_FILE): cv.string, + vol.Optional(ATTR_CAPTION): cv.string, vol.Optional(ATTR_USERNAME): cv.string, vol.Optional(ATTR_PASSWORD): cv.string, vol.Optional(ATTR_AUTHENTICATION): cv.string, @@ -196,8 +195,8 @@ SERVICE_SCHEMA_SEND_LOCATION = BASE_SERVICE_SCHEMA.extend( { - vol.Required(ATTR_LONGITUDE): cv.template, - vol.Required(ATTR_LATITUDE): cv.template, + vol.Required(ATTR_LONGITUDE): cv.string, + vol.Required(ATTR_LATITUDE): cv.string, } ) @@ -229,7 +228,7 @@ cv.positive_int, vol.All(cv.string, "last") ), vol.Required(ATTR_CHAT_ID): vol.Coerce(int), - vol.Required(ATTR_CAPTION): cv.template, + vol.Required(ATTR_CAPTION): cv.string, vol.Optional(ATTR_KEYBOARD_INLINE): cv.ensure_list, }, extra=vol.ALLOW_EXTRA, @@ -248,7 +247,7 @@ SERVICE_SCHEMA_ANSWER_CALLBACK_QUERY = vol.Schema( { - vol.Required(ATTR_MESSAGE): cv.template, + vol.Required(ATTR_MESSAGE): cv.string, vol.Required(ATTR_CALLBACK_QUERY_ID): vol.Coerce(int), vol.Optional(ATTR_SHOW_ALERT): cv.boolean, }, @@ -402,38 +401,8 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: async def async_send_telegram_message(service: ServiceCall) -> None: """Handle sending Telegram Bot message service calls.""" - def _render_template_attr(data, attribute): - if attribute_templ := data.get(attribute): - if any( - isinstance(attribute_templ, vtype) for vtype in (float, int, str) - ): - data[attribute] = attribute_templ - else: - try: - data[attribute] = attribute_templ.async_render( - parse_result=False - ) - except TemplateError as exc: - _LOGGER.error( - "TemplateError in %s: %s -> %s", - attribute, - attribute_templ.template, - exc, - ) - data[attribute] = attribute_templ.template - msgtype = service.service kwargs = dict(service.data) - for attribute in ( - ATTR_MESSAGE, - ATTR_TITLE, - ATTR_URL, - ATTR_FILE, - ATTR_CAPTION, - ATTR_LONGITUDE, - ATTR_LATITUDE, - ): - _render_template_attr(kwargs, attribute) _LOGGER.debug("New telegram message %s: %s", msgtype, kwargs) if msgtype == SERVICE_SEND_MESSAGE: diff --git a/homeassistant/components/template/alarm_control_panel.py b/homeassistant/components/template/alarm_control_panel.py index 6c8a70b328e597..aa1f99f04238a3 100644 --- a/homeassistant/components/template/alarm_control_panel.py +++ b/homeassistant/components/template/alarm_control_panel.py @@ -13,6 +13,7 @@ PLATFORM_SCHEMA as ALARM_CONTROL_PANEL_PLATFORM_SCHEMA, AlarmControlPanelEntity, AlarmControlPanelEntityFeature, + AlarmControlPanelState, CodeFormat, ) from homeassistant.config_entries import ConfigEntry @@ -22,15 +23,6 @@ CONF_NAME, CONF_UNIQUE_ID, CONF_VALUE_TEMPLATE, - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_CUSTOM_BYPASS, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_ARMED_VACATION, - STATE_ALARM_ARMING, - STATE_ALARM_DISARMED, - STATE_ALARM_PENDING, - STATE_ALARM_TRIGGERED, STATE_UNAVAILABLE, STATE_UNKNOWN, ) @@ -51,15 +43,15 @@ _LOGGER = logging.getLogger(__name__) _VALID_STATES = [ - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_CUSTOM_BYPASS, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_ARMED_VACATION, - STATE_ALARM_ARMING, - STATE_ALARM_DISARMED, - STATE_ALARM_PENDING, - STATE_ALARM_TRIGGERED, + AlarmControlPanelState.ARMED_AWAY, + AlarmControlPanelState.ARMED_CUSTOM_BYPASS, + AlarmControlPanelState.ARMED_HOME, + AlarmControlPanelState.ARMED_NIGHT, + AlarmControlPanelState.ARMED_VACATION, + AlarmControlPanelState.ARMING, + AlarmControlPanelState.DISARMED, + AlarmControlPanelState.PENDING, + AlarmControlPanelState.TRIGGERED, STATE_UNAVAILABLE, ] @@ -233,7 +225,7 @@ def __init__( if (trigger_action := config.get(CONF_TRIGGER_ACTION)) is not None: self._trigger_script = Script(hass, trigger_action, name, DOMAIN) - self._state: str | None = None + self._state: AlarmControlPanelState | None = None self._attr_device_info = async_device_info_to_link_from_device_id( hass, config.get(CONF_DEVICE_ID), @@ -281,10 +273,10 @@ async def async_added_to_hass(self) -> None: # then we should not restore state and self._state is None ): - self._state = last_state.state + self._state = AlarmControlPanelState(last_state.state) @property - def state(self) -> str | None: + def alarm_state(self) -> AlarmControlPanelState | None: """Return the state of the device.""" return self._state @@ -335,31 +327,39 @@ async def _async_alarm_arm(self, state, script, code): async def async_alarm_arm_away(self, code: str | None = None) -> None: """Arm the panel to Away.""" await self._async_alarm_arm( - STATE_ALARM_ARMED_AWAY, script=self._arm_away_script, code=code + AlarmControlPanelState.ARMED_AWAY, + script=self._arm_away_script, + code=code, ) async def async_alarm_arm_home(self, code: str | None = None) -> None: """Arm the panel to Home.""" await self._async_alarm_arm( - STATE_ALARM_ARMED_HOME, script=self._arm_home_script, code=code + AlarmControlPanelState.ARMED_HOME, + script=self._arm_home_script, + code=code, ) async def async_alarm_arm_night(self, code: str | None = None) -> None: """Arm the panel to Night.""" await self._async_alarm_arm( - STATE_ALARM_ARMED_NIGHT, script=self._arm_night_script, code=code + AlarmControlPanelState.ARMED_NIGHT, + script=self._arm_night_script, + code=code, ) async def async_alarm_arm_vacation(self, code: str | None = None) -> None: """Arm the panel to Vacation.""" await self._async_alarm_arm( - STATE_ALARM_ARMED_VACATION, script=self._arm_vacation_script, code=code + AlarmControlPanelState.ARMED_VACATION, + script=self._arm_vacation_script, + code=code, ) async def async_alarm_arm_custom_bypass(self, code: str | None = None) -> None: """Arm the panel to Custom Bypass.""" await self._async_alarm_arm( - STATE_ALARM_ARMED_CUSTOM_BYPASS, + AlarmControlPanelState.ARMED_CUSTOM_BYPASS, script=self._arm_custom_bypass_script, code=code, ) @@ -367,11 +367,13 @@ async def async_alarm_arm_custom_bypass(self, code: str | None = None) -> None: async def async_alarm_disarm(self, code: str | None = None) -> None: """Disarm the panel.""" await self._async_alarm_arm( - STATE_ALARM_DISARMED, script=self._disarm_script, code=code + AlarmControlPanelState.DISARMED, script=self._disarm_script, code=code ) async def async_alarm_trigger(self, code: str | None = None) -> None: """Trigger the panel.""" await self._async_alarm_arm( - STATE_ALARM_TRIGGERED, script=self._trigger_script, code=code + AlarmControlPanelState.TRIGGERED, + script=self._trigger_script, + code=code, ) diff --git a/homeassistant/components/template/binary_sensor.py b/homeassistant/components/template/binary_sensor.py index 187c7079f5902e..922f1d88ffbbf9 100644 --- a/homeassistant/components/template/binary_sensor.py +++ b/homeassistant/components/template/binary_sensor.py @@ -250,7 +250,6 @@ def __init__( self._attr_device_class = config.get(CONF_DEVICE_CLASS) self._template = config[CONF_STATE] - self._state: bool | None = None self._delay_cancel = None self._delay_on = None self._delay_on_raw = config.get(CONF_DELAY_ON) @@ -268,7 +267,7 @@ async def async_added_to_hass(self) -> None: and (last_state := await self.async_get_last_state()) is not None and last_state.state not in (STATE_UNKNOWN, STATE_UNAVAILABLE) ): - self._state = last_state.state == STATE_ON + self._attr_is_on = last_state.state == STATE_ON await super().async_added_to_hass() @callback @@ -308,7 +307,7 @@ def _update_state(self, result): else template.result_as_boolean(result) ) - if state == self._state: + if state == self._attr_is_on: return # state without delay @@ -317,24 +316,19 @@ def _update_state(self, result): or (state and not self._delay_on) or (not state and not self._delay_off) ): - self._state = state + self._attr_is_on = state return @callback def _set_state(_): """Set state of template binary sensor.""" - self._state = state + self._attr_is_on = state self.async_write_ha_state() delay = (self._delay_on if state else self._delay_off).total_seconds() # state with delay. Cancelled if template result changes. self._delay_cancel = async_call_later(self.hass, delay, _set_state) - @property - def is_on(self) -> bool | None: - """Return true if sensor is on.""" - return self._state - class TriggerBinarySensorEntity(TriggerEntity, BinarySensorEntity, RestoreEntity): """Sensor entity based on trigger data.""" @@ -359,7 +353,6 @@ def __init__( self._delay_cancel: CALLBACK_TYPE | None = None self._auto_off_cancel: CALLBACK_TYPE | None = None self._auto_off_time: datetime | None = None - self._state: bool | None = None async def async_added_to_hass(self) -> None: """Restore last state.""" @@ -371,9 +364,9 @@ async def async_added_to_hass(self) -> None: and last_state.state not in (STATE_UNKNOWN, STATE_UNAVAILABLE) # The trigger might have fired already while we waited for stored data, # then we should not restore state - and self._state is None + and self._attr_is_on is None ): - self._state = last_state.state == STATE_ON + self._attr_is_on = last_state.state == STATE_ON self.restore_attributes(last_state) if CONF_AUTO_OFF not in self._config: @@ -383,16 +376,11 @@ async def async_added_to_hass(self) -> None: auto_off_time := extra_data.auto_off_time ) is not None and auto_off_time <= dt_util.utcnow(): # It's already past the saved auto off time - self._state = False + self._attr_is_on = False - if self._state and auto_off_time is not None: + if self._attr_is_on and auto_off_time is not None: self._set_auto_off(auto_off_time) - @property - def is_on(self) -> bool | None: - """Return state of the sensor.""" - return self._state - @callback def _handle_coordinator_update(self) -> None: """Handle update of the data.""" @@ -418,7 +406,7 @@ def _handle_coordinator_update(self) -> None: delay = self._rendered.get(key) or self._config.get(key) # state without delay. None means rendering failed. - if self._state == state or state is None or delay is None: + if self._attr_is_on == state or state is None or delay is None: self._set_state(state) return @@ -439,7 +427,7 @@ def _handle_coordinator_update(self) -> None: @callback def _set_state(self, state, _=None): """Set up auto off.""" - self._state = state + self._attr_is_on = state self.async_set_context(self.coordinator.data["context"]) self.async_write_ha_state() @@ -469,7 +457,7 @@ def _set_auto_off(self, auto_off_time: datetime) -> None: @callback def _auto_off(_): """Reset state of template binary sensor.""" - self._state = False + self._attr_is_on = False self.async_write_ha_state() self._auto_off_time = auto_off_time diff --git a/homeassistant/components/template/coordinator.py b/homeassistant/components/template/coordinator.py index b9bbd3625af4d8..4d8fe78f2b51ac 100644 --- a/homeassistant/components/template/coordinator.py +++ b/homeassistant/components/template/coordinator.py @@ -24,7 +24,9 @@ class TriggerUpdateCoordinator(DataUpdateCoordinator): def __init__(self, hass: HomeAssistant, config: dict[str, Any]) -> None: """Instantiate trigger data.""" - super().__init__(hass, _LOGGER, name="Trigger Update Coordinator") + super().__init__( + hass, _LOGGER, config_entry=None, name="Trigger Update Coordinator" + ) self.config = config self._cond_func: Callable[[Mapping[str, Any] | None], bool] | None = None self._unsub_start: Callable[[], None] | None = None diff --git a/homeassistant/components/template/cover.py b/homeassistant/components/template/cover.py index 2c84387ed64fa0..2642ede9c3a733 100644 --- a/homeassistant/components/template/cover.py +++ b/homeassistant/components/template/cover.py @@ -24,10 +24,6 @@ CONF_OPTIMISTIC, CONF_UNIQUE_ID, CONF_VALUE_TEMPLATE, - STATE_CLOSED, - STATE_CLOSING, - STATE_OPEN, - STATE_OPENING, ) from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import TemplateError @@ -45,11 +41,17 @@ ) _LOGGER = logging.getLogger(__name__) + +OPEN_STATE = "open" +OPENING_STATE = "opening" +CLOSED_STATE = "closed" +CLOSING_STATE = "closing" + _VALID_STATES = [ - STATE_OPEN, - STATE_OPENING, - STATE_CLOSED, - STATE_CLOSING, + OPEN_STATE, + OPENING_STATE, + CLOSED_STATE, + CLOSING_STATE, "true", "false", "none", @@ -227,13 +229,13 @@ def _update_state(self, result): if state in _VALID_STATES: if not self._position_template: - if state in ("true", STATE_OPEN): + if state in ("true", OPEN_STATE): self._position = 100 else: self._position = 0 - self._is_opening = state == STATE_OPENING - self._is_closing = state == STATE_CLOSING + self._is_opening = state == OPENING_STATE + self._is_closing = state == CLOSING_STATE else: _LOGGER.error( "Received invalid cover is_on state: %s for entity %s. Expected: %s", diff --git a/homeassistant/components/template/strings.json b/homeassistant/components/template/strings.json index 0b20ab2f3a3db0..66864a027ba21e 100644 --- a/homeassistant/components/template/strings.json +++ b/homeassistant/components/template/strings.json @@ -106,7 +106,7 @@ "alarm_control_panel": "Template an alarm control panel", "binary_sensor": "Template a binary sensor", "button": "Template a button", - "image": "Template a image", + "image": "Template an image", "number": "Template a number", "select": "Template a select", "sensor": "Template a sensor", diff --git a/homeassistant/components/template/template_entity.py b/homeassistant/components/template/template_entity.py index 3e70e1c3546fcf..f5b84b1ad7ad8b 100644 --- a/homeassistant/components/template/template_entity.py +++ b/homeassistant/components/template/template_entity.py @@ -535,13 +535,15 @@ def _async_setup_templates(self) -> None: ) if self._entity_picture_template is not None: self.add_template_attribute( - "_attr_entity_picture", self._entity_picture_template + "_attr_entity_picture", self._entity_picture_template, cv.string ) if ( self._friendly_name_template is not None and not self._friendly_name_template.is_static ): - self.add_template_attribute("_attr_name", self._friendly_name_template) + self.add_template_attribute( + "_attr_name", self._friendly_name_template, cv.string + ) @callback def async_start_preview( diff --git a/homeassistant/components/template/trigger_entity.py b/homeassistant/components/template/trigger_entity.py index df84ce057c3e12..5130f332d5bb54 100644 --- a/homeassistant/components/template/trigger_entity.py +++ b/homeassistant/components/template/trigger_entity.py @@ -3,6 +3,7 @@ from __future__ import annotations from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.template import TemplateStateFromEntityId from homeassistant.helpers.trigger_template_entity import TriggerBaseEntity from homeassistant.helpers.update_coordinator import CoordinatorEntity @@ -41,11 +42,11 @@ def _set_unique_id(self, unique_id: str | None) -> None: def _process_data(self) -> None: """Process new data.""" - this = None - if state := self.hass.states.get(self.entity_id): - this = state.as_dict() run_variables = self.coordinator.data["run_variables"] - variables = {"this": this, **(run_variables or {})} + variables = { + "this": TemplateStateFromEntityId(self.hass, self.entity_id), + **(run_variables or {}), + } self._render_templates(variables) diff --git a/homeassistant/components/tensorflow/manifest.json b/homeassistant/components/tensorflow/manifest.json index 4f2b6f192859f7..86fd83ad0881e4 100644 --- a/homeassistant/components/tensorflow/manifest.json +++ b/homeassistant/components/tensorflow/manifest.json @@ -9,7 +9,7 @@ "tensorflow==2.5.0", "tf-models-official==2.5.0", "pycocotools==2.0.6", - "numpy==1.26.4", - "Pillow==10.4.0" + "numpy==2.1.3", + "Pillow==11.0.0" ] } diff --git a/homeassistant/components/tesla_fleet/__init__.py b/homeassistant/components/tesla_fleet/__init__.py index 4cd8c5c7142eef..e7030b568b3d18 100644 --- a/homeassistant/components/tesla_fleet/__init__.py +++ b/homeassistant/components/tesla_fleet/__init__.py @@ -5,7 +5,12 @@ from aiohttp.client_exceptions import ClientResponseError import jwt -from tesla_fleet_api import EnergySpecific, TeslaFleetApi, VehicleSpecific +from tesla_fleet_api import ( + EnergySpecific, + TeslaFleetApi, + VehicleSigned, + VehicleSpecific, +) from tesla_fleet_api.const import Scope from tesla_fleet_api.exceptions import ( InvalidRegion, @@ -126,7 +131,13 @@ async def _refresh_token() -> str: # Remove the protobuff 'cached_data' that we do not use to save memory product.pop("cached_data", None) vin = product["vin"] - api = VehicleSpecific(tesla.vehicle, vin) + signing = product["command_signing"] == "required" + if signing: + if not tesla.private_key: + await tesla.get_private_key(hass.config.path("tesla_fleet.key")) + api = VehicleSigned(tesla.vehicle, vin) + else: + api = VehicleSpecific(tesla.vehicle, vin) coordinator = TeslaFleetVehicleDataCoordinator(hass, api, product) await coordinator.async_config_entry_first_refresh() @@ -145,7 +156,7 @@ async def _refresh_token() -> str: coordinator=coordinator, vin=vin, device=device, - signing=product["command_signing"] == "required", + signing=signing, ) ) elif "energy_site_id" in product and hasattr(tesla, "energy"): diff --git a/homeassistant/components/tesla_fleet/button.py b/homeassistant/components/tesla_fleet/button.py index 87cd95576d2b06..aea0f91a97c5d1 100644 --- a/homeassistant/components/tesla_fleet/button.py +++ b/homeassistant/components/tesla_fleet/button.py @@ -70,8 +70,6 @@ async def async_setup_entry( for vehicle in entry.runtime_data.vehicles for description in DESCRIPTIONS if Scope.VEHICLE_CMDS in entry.runtime_data.scopes - and (not vehicle.signing or description.key == "wake") - # Wake doesn't need signing ) diff --git a/homeassistant/components/tesla_fleet/climate.py b/homeassistant/components/tesla_fleet/climate.py index 6199ee112b5dcd..9a1533a688f853 100644 --- a/homeassistant/components/tesla_fleet/climate.py +++ b/homeassistant/components/tesla_fleet/climate.py @@ -84,7 +84,7 @@ def __init__( ) -> None: """Initialize the climate.""" - self.read_only = Scope.VEHICLE_CMDS not in scopes or data.signing + self.read_only = Scope.VEHICLE_CMDS not in scopes if self.read_only: self._attr_supported_features = ClimateEntityFeature(0) @@ -231,7 +231,7 @@ def __init__( """Initialize the cabin overheat climate entity.""" # Scopes - self.read_only = Scope.VEHICLE_CMDS not in scopes or data.signing + self.read_only = Scope.VEHICLE_CMDS not in scopes # Supported Features if self.read_only: diff --git a/homeassistant/components/tesla_fleet/config_flow.py b/homeassistant/components/tesla_fleet/config_flow.py index 64b88792387de2..ca36c6f511b2f0 100644 --- a/homeassistant/components/tesla_fleet/config_flow.py +++ b/homeassistant/components/tesla_fleet/config_flow.py @@ -8,7 +8,7 @@ import jwt -from homeassistant.config_entries import ConfigEntry, ConfigFlowResult +from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlowResult from homeassistant.helpers import config_entry_oauth2_flow from .const import DOMAIN, LOGGER @@ -21,7 +21,6 @@ class OAuth2FlowHandler( """Config flow to handle Tesla Fleet API OAuth2 authentication.""" DOMAIN = DOMAIN - reauth_entry: ConfigEntry | None = None @property def logger(self) -> logging.Logger: @@ -50,32 +49,19 @@ async def async_oauth_create_entry( ) uid = token["sub"] - if not self.reauth_entry: - await self.async_set_unique_id(uid) - self._abort_if_unique_id_configured() - - return self.async_create_entry(title=uid, data=data) - - if self.reauth_entry.unique_id == uid: - self.hass.config_entries.async_update_entry( - self.reauth_entry, - data=data, + await self.async_set_unique_id(uid) + if self.source == SOURCE_REAUTH: + self._abort_if_unique_id_mismatch(reason="reauth_account_mismatch") + return self.async_update_reload_and_abort( + self._get_reauth_entry(), data=data ) - await self.hass.config_entries.async_reload(self.reauth_entry.entry_id) - return self.async_abort(reason="reauth_successful") - - return self.async_abort( - reason="reauth_account_mismatch", - description_placeholders={"title": self.reauth_entry.title}, - ) + self._abort_if_unique_id_configured() + return self.async_create_entry(title=uid, data=data) async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Perform reauth upon an API authentication error.""" - self.reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( diff --git a/homeassistant/components/tesla_fleet/cover.py b/homeassistant/components/tesla_fleet/cover.py index 4e49e24b6898a8..f270734424f4c4 100644 --- a/homeassistant/components/tesla_fleet/cover.py +++ b/homeassistant/components/tesla_fleet/cover.py @@ -57,7 +57,7 @@ def __init__(self, data: TeslaFleetVehicleData, scopes: list[Scope]) -> None: self._attr_supported_features = ( CoverEntityFeature.OPEN | CoverEntityFeature.CLOSE ) - if not self.scoped or self.vehicle.signing: + if not self.scoped: self._attr_supported_features = CoverEntityFeature(0) def _async_update_attrs(self) -> None: @@ -111,7 +111,7 @@ def __init__(self, vehicle: TeslaFleetVehicleData, scopes: list[Scope]) -> None: self._attr_supported_features = ( CoverEntityFeature.OPEN | CoverEntityFeature.CLOSE ) - if not self.scoped or self.vehicle.signing: + if not self.scoped: self._attr_supported_features = CoverEntityFeature(0) def _async_update_attrs(self) -> None: @@ -144,7 +144,7 @@ def __init__(self, vehicle: TeslaFleetVehicleData, scopes: list[Scope]) -> None: self.scoped = Scope.VEHICLE_CMDS in scopes self._attr_supported_features = CoverEntityFeature.OPEN - if not self.scoped or self.vehicle.signing: + if not self.scoped: self._attr_supported_features = CoverEntityFeature(0) def _async_update_attrs(self) -> None: @@ -172,18 +172,12 @@ def __init__(self, vehicle: TeslaFleetVehicleData, scopes: list[Scope]) -> None: self._attr_supported_features = ( CoverEntityFeature.OPEN | CoverEntityFeature.CLOSE ) - if not self.scoped or self.vehicle.signing: + if not self.scoped: self._attr_supported_features = CoverEntityFeature(0) def _async_update_attrs(self) -> None: """Update the entity attributes.""" - value = self._value - if value == CLOSED: - self._attr_is_closed = True - elif value == OPEN: - self._attr_is_closed = False - else: - self._attr_is_closed = None + self._attr_is_closed = self._value == CLOSED async def async_open_cover(self, **kwargs: Any) -> None: """Open rear trunk.""" @@ -216,7 +210,7 @@ def __init__(self, vehicle: TeslaFleetVehicleData, scopes: list[Scope]) -> None: super().__init__(vehicle, "vehicle_state_sun_roof_state") self.scoped = Scope.VEHICLE_CMDS in scopes - if not self.scoped or self.vehicle.signing: + if not self.scoped: self._attr_supported_features = CoverEntityFeature(0) def _async_update_attrs(self) -> None: diff --git a/homeassistant/components/tesla_fleet/device_tracker.py b/homeassistant/components/tesla_fleet/device_tracker.py index 62c084c9fe516b..d6dcef895a68ef 100644 --- a/homeassistant/components/tesla_fleet/device_tracker.py +++ b/homeassistant/components/tesla_fleet/device_tracker.py @@ -4,6 +4,7 @@ from homeassistant.components.device_tracker.config_entry import TrackerEntity from homeassistant.config_entries import ConfigEntry +from homeassistant.const import STATE_HOME from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.restore_state import RestoreEntity @@ -84,4 +85,7 @@ def _async_update_attrs(self) -> None: @property def location_name(self) -> str | None: """Return a location name for the current location of the device.""" - return self.get("drive_state_active_route_destination") + location = self.get("drive_state_active_route_destination") + if location == "Home": + return STATE_HOME + return location diff --git a/homeassistant/components/tesla_fleet/entity.py b/homeassistant/components/tesla_fleet/entity.py index 60230cd881d3f8..0ee41b5e32271e 100644 --- a/homeassistant/components/tesla_fleet/entity.py +++ b/homeassistant/components/tesla_fleet/entity.py @@ -123,14 +123,6 @@ async def wake_up_if_asleep(self) -> None: """Wake up the vehicle if its asleep.""" await wake_up_vehicle(self.vehicle) - def raise_for_read_only(self, scope: Scope) -> None: - """Raise an error if no command signing or a scope is not available.""" - if self.vehicle.signing: - raise ServiceValidationError( - translation_domain=DOMAIN, translation_key="command_signing" - ) - super().raise_for_read_only(scope) - class TeslaFleetEnergyLiveEntity(TeslaFleetEntity): """Parent class for TeslaFleet Energy Site Live entities.""" diff --git a/homeassistant/components/tesla_fleet/icons.json b/homeassistant/components/tesla_fleet/icons.json index 3e842c0997a30b..449dda93c62a8d 100644 --- a/homeassistant/components/tesla_fleet/icons.json +++ b/homeassistant/components/tesla_fleet/icons.json @@ -222,6 +222,16 @@ }, "wall_connector_state": { "default": "mdi:ev-station" + }, + "island_status": { + "default": "mdi:help-circle", + "state": { + "on_grid": "mdi:transmission-tower", + "off_grid": "mdi:transmission-tower-off", + "off_grid_unintentional": "mdi:transmission-tower-off", + "island_status_unknown": "mdi:help-circle", + "off_grid_intentional": "mdi:account-cancel" + } } }, "switch": { diff --git a/homeassistant/components/tesla_fleet/manifest.json b/homeassistant/components/tesla_fleet/manifest.json index f83f4f93e3cb06..8d6e5f110683f4 100644 --- a/homeassistant/components/tesla_fleet/manifest.json +++ b/homeassistant/components/tesla_fleet/manifest.json @@ -8,5 +8,5 @@ "iot_class": "cloud_polling", "loggers": ["tesla-fleet-api"], "quality_scale": "gold", - "requirements": ["tesla-fleet-api==0.7.8"] + "requirements": ["tesla-fleet-api==0.8.4"] } diff --git a/homeassistant/components/tesla_fleet/media_player.py b/homeassistant/components/tesla_fleet/media_player.py index 0a1d18c340712f..455c990077d2f9 100644 --- a/homeassistant/components/tesla_fleet/media_player.py +++ b/homeassistant/components/tesla_fleet/media_player.py @@ -64,7 +64,7 @@ def __init__( """Initialize the media player entity.""" super().__init__(data, "media") self.scoped = scoped - if not scoped and data.signing: + if not scoped: self._attr_supported_features = MediaPlayerEntityFeature(0) def _async_update_attrs(self) -> None: diff --git a/homeassistant/components/tesla_fleet/sensor.py b/homeassistant/components/tesla_fleet/sensor.py index 4d30a509e1aff1..b4e7b51faba781 100644 --- a/homeassistant/components/tesla_fleet/sensor.py +++ b/homeassistant/components/tesla_fleet/sensor.py @@ -378,6 +378,17 @@ class TeslaFleetTimeEntityDescription(SensorEntityDescription): device_class=SensorDeviceClass.POWER, entity_registry_enabled_default=False, ), + SensorEntityDescription( + key="island_status", + options=[ + "island_status_unknown", + "on_grid", + "off_grid", + "off_grid_unintentional", + "off_grid_intentional", + ], + device_class=SensorDeviceClass.ENUM, + ), ) WALL_CONNECTOR_DESCRIPTIONS: tuple[SensorEntityDescription, ...] = ( @@ -475,7 +486,7 @@ def __init__( async def async_added_to_hass(self) -> None: """Handle entity which will be added.""" await super().async_added_to_hass() - if self.coordinator.data.get("state") == TeslaFleetState.OFFLINE: + if self.coordinator.data.get("state") != TeslaFleetState.ONLINE: if (sensor_data := await self.async_get_last_sensor_data()) is not None: self._attr_native_value = sensor_data.native_value @@ -513,7 +524,7 @@ def _async_update_attrs(self) -> None: self._attr_native_value = self._get_timestamp(self._value) -class TeslaFleetEnergyLiveSensorEntity(TeslaFleetEnergyLiveEntity, RestoreSensor): +class TeslaFleetEnergyLiveSensorEntity(TeslaFleetEnergyLiveEntity, SensorEntity): """Base class for Tesla Fleet energy site metric sensors.""" entity_description: SensorEntityDescription @@ -527,20 +538,13 @@ def __init__( self.entity_description = description super().__init__(data, description.key) - async def async_added_to_hass(self) -> None: - """Handle entity which will be added.""" - await super().async_added_to_hass() - if not self.coordinator.updated_once: - if (sensor_data := await self.async_get_last_sensor_data()) is not None: - self._attr_native_value = sensor_data.native_value - def _async_update_attrs(self) -> None: """Update the attributes of the sensor.""" self._attr_available = not self.is_none self._attr_native_value = self._value -class TeslaFleetWallConnectorSensorEntity(TeslaFleetWallConnectorEntity, RestoreSensor): +class TeslaFleetWallConnectorSensorEntity(TeslaFleetWallConnectorEntity, SensorEntity): """Base class for Tesla Fleet energy site metric sensors.""" entity_description: SensorEntityDescription @@ -559,20 +563,13 @@ def __init__( description.key, ) - async def async_added_to_hass(self) -> None: - """Handle entity which will be added.""" - await super().async_added_to_hass() - if not self.coordinator.updated_once: - if (sensor_data := await self.async_get_last_sensor_data()) is not None: - self._attr_native_value = sensor_data.native_value - def _async_update_attrs(self) -> None: """Update the attributes of the sensor.""" self._attr_available = not self.is_none self._attr_native_value = self._value -class TeslaFleetEnergyInfoSensorEntity(TeslaFleetEnergyInfoEntity, RestoreSensor): +class TeslaFleetEnergyInfoSensorEntity(TeslaFleetEnergyInfoEntity, SensorEntity): """Base class for Tesla Fleet energy site metric sensors.""" entity_description: SensorEntityDescription @@ -586,13 +583,6 @@ def __init__( self.entity_description = description super().__init__(data, description.key) - async def async_added_to_hass(self) -> None: - """Handle entity which will be added.""" - await super().async_added_to_hass() - if not self.coordinator.updated_once: - if (sensor_data := await self.async_get_last_sensor_data()) is not None: - self._attr_native_value = sensor_data.native_value - def _async_update_attrs(self) -> None: """Update the attributes of the sensor.""" self._attr_available = not self.is_none diff --git a/homeassistant/components/tesla_fleet/strings.json b/homeassistant/components/tesla_fleet/strings.json index 09040de13b044f..fe5cd06c1ef476 100644 --- a/homeassistant/components/tesla_fleet/strings.json +++ b/homeassistant/components/tesla_fleet/strings.json @@ -8,7 +8,9 @@ "oauth_error": "[%key:common::config_flow::abort::oauth2_error%]", "oauth_timeout": "[%key:common::config_flow::abort::oauth2_timeout%]", "oauth_unauthorized": "[%key:common::config_flow::abort::oauth2_unauthorized%]", - "oauth_failed": "[%key:common::config_flow::abort::oauth2_failed%]" + "oauth_failed": "[%key:common::config_flow::abort::oauth2_failed%]", + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", + "reauth_account_mismatch": "The reauthentication account does not match the original account" }, "error": { "already_configured": "[%key:common::config_flow::abort::already_configured_account%]" @@ -412,6 +414,16 @@ "vehicle_state_odometer": { "name": "Odometer" }, + "island_status": { + "name": "Grid Status", + "state": { + "island_status_unknown": "Unknown", + "on_grid": "Connected", + "off_grid": "Disconnected", + "off_grid_unintentional": "Disconnected unintentionally", + "off_grid_intentional": "Disconnected intentionally" + } + }, "vehicle_state_tpms_pressure_fl": { "name": "Tire pressure front left" }, @@ -492,9 +504,6 @@ "command_no_reason": { "message": "Command was unsuccessful but did not return a reason why." }, - "command_signing": { - "message": "Vehicle requires command signing. Please see documentation for more details." - }, "invalid_cop_temp": { "message": "Cabin overheat protection does not support that temperature." }, diff --git a/homeassistant/components/tesla_wall_connector/__init__.py b/homeassistant/components/tesla_wall_connector/__init__.py index f4d04ca8cc6999..01c657fbcaa985 100644 --- a/homeassistant/components/tesla_wall_connector/__init__.py +++ b/homeassistant/components/tesla_wall_connector/__init__.py @@ -71,6 +71,7 @@ async def async_update_data(): coordinator: DataUpdateCoordinator = DataUpdateCoordinator( hass, _LOGGER, + config_entry=entry, name="tesla-wallconnector", update_interval=get_poll_interval(entry), update_method=async_update_data, diff --git a/homeassistant/components/teslemetry/__init__.py b/homeassistant/components/teslemetry/__init__.py index ab2e4c047349cc..aa1d2b426603bb 100644 --- a/homeassistant/components/teslemetry/__init__.py +++ b/homeassistant/components/teslemetry/__init__.py @@ -1,6 +1,7 @@ """Teslemetry integration.""" import asyncio +from collections.abc import Callable from typing import Final from tesla_fleet_api import EnergySpecific, Teslemetry, VehicleSpecific @@ -10,6 +11,7 @@ SubscriptionRequired, TeslaFleetError, ) +from teslemetry_stream import TeslemetryStream from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_ACCESS_TOKEN, Platform @@ -28,6 +30,7 @@ TeslemetryEnergySiteLiveCoordinator, TeslemetryVehicleDataCoordinator, ) +from .helpers import flatten from .models import TeslemetryData, TeslemetryEnergyData, TeslemetryVehicleData from .services import async_register_services @@ -69,8 +72,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: TeslemetryConfigEntry) - access_token=access_token, ) try: - scopes = (await teslemetry.metadata())["scopes"] - products = (await teslemetry.products())["response"] + calls = await asyncio.gather( + teslemetry.metadata(), + teslemetry.products(), + ) except InvalidToken as e: raise ConfigEntryAuthFailed from e except SubscriptionRequired as e: @@ -78,11 +83,24 @@ async def async_setup_entry(hass: HomeAssistant, entry: TeslemetryConfigEntry) - except TeslaFleetError as e: raise ConfigEntryNotReady from e + scopes = calls[0]["scopes"] + region = calls[0]["region"] + products = calls[1]["response"] + device_registry = dr.async_get(hass) # Create array of classes vehicles: list[TeslemetryVehicleData] = [] energysites: list[TeslemetryEnergyData] = [] + + # Create the stream + stream = TeslemetryStream( + session, + access_token, + server=f"{region.lower()}.teslemetry.com", + parse_timestamp=True, + ) + for product in products: if "vin" in product and Scope.VEHICLE_DEVICE_DATA in scopes: # Remove the protobuff 'cached_data' that we do not use to save memory @@ -99,22 +117,29 @@ async def async_setup_entry(hass: HomeAssistant, entry: TeslemetryConfigEntry) - serial_number=vin, ) + remove_listener = stream.async_add_listener( + create_handle_vehicle_stream(vin, coordinator), + {"vin": vin}, + ) + vehicles.append( TeslemetryVehicleData( api=api, coordinator=coordinator, + stream=stream, vin=vin, device=device, + remove_listener=remove_listener, ) ) elif "energy_site_id" in product and Scope.ENERGY_DEVICE_DATA in scopes: site_id = product["energy_site_id"] - if not ( - product["components"]["battery"] - or product["components"]["solar"] - or "wall_connectors" in product["components"] - ): + powerwall = ( + product["components"]["battery"] or product["components"]["solar"] + ) + wall_connector = "wall_connectors" in product["components"] + if not powerwall and not wall_connector: LOGGER.debug( "Skipping Energy Site %s as it has no components", site_id, @@ -137,7 +162,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: TeslemetryConfigEntry) - info_coordinator=TeslemetryEnergySiteInfoCoordinator( hass, api, product ), - history_coordinator=TeslemetryEnergyHistoryCoordinator(hass, api), + history_coordinator=( + TeslemetryEnergyHistoryCoordinator(hass, api) + if powerwall + else None + ), id=site_id, device=device, ) @@ -160,6 +189,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: TeslemetryConfigEntry) - *( energysite.history_coordinator.async_config_entry_first_refresh() for energysite in energysites + if energysite.history_coordinator ), ) @@ -214,3 +244,20 @@ async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> config_entry, unique_id=metadata["uid"], version=1, minor_version=2 ) return True + + +def create_handle_vehicle_stream(vin: str, coordinator) -> Callable[[dict], None]: + """Create a handle vehicle stream function.""" + + def handle_vehicle_stream(data: dict) -> None: + """Handle vehicle data from the stream.""" + if "vehicle_data" in data: + LOGGER.debug("Streaming received vehicle data from %s", vin) + coordinator.updated_once = True + coordinator.async_set_updated_data(flatten(data["vehicle_data"])) + elif "state" in data: + LOGGER.debug("Streaming received state from %s", vin) + coordinator.data["state"] = data["state"] + coordinator.async_set_updated_data(coordinator.data) + + return handle_vehicle_stream diff --git a/homeassistant/components/teslemetry/config_flow.py b/homeassistant/components/teslemetry/config_flow.py index 73921986f44f02..d8cf2bd7945224 100644 --- a/homeassistant/components/teslemetry/config_flow.py +++ b/homeassistant/components/teslemetry/config_flow.py @@ -14,7 +14,7 @@ ) import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_ACCESS_TOKEN from homeassistant.helpers.aiohttp_client import async_get_clientsession @@ -22,6 +22,7 @@ TESLEMETRY_SCHEMA = vol.Schema({vol.Required(CONF_ACCESS_TOKEN): str}) DESCRIPTION_PLACEHOLDERS = { + "name": "Teslemetry", "short_url": "teslemetry.com/console", "url": "[teslemetry.com/console](https://teslemetry.com/console)", } @@ -32,7 +33,6 @@ class TeslemetryConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 MINOR_VERSION = 2 - _entry: ConfigEntry | None = None async def async_auth(self, user_input: Mapping[str, Any]) -> dict[str, str]: """Reusable Auth Helper.""" @@ -78,7 +78,6 @@ async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle reauth on failure.""" - self._entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -86,12 +85,11 @@ async def async_step_reauth_confirm( ) -> ConfigFlowResult: """Handle users reauth credentials.""" - assert self._entry errors: dict[str, str] = {} if user_input and not (errors := await self.async_auth(user_input)): return self.async_update_reload_and_abort( - self._entry, + self._get_reauth_entry(), data=user_input, ) diff --git a/homeassistant/components/teslemetry/coordinator.py b/homeassistant/components/teslemetry/coordinator.py index 4612408e14d967..f37d0613de9964 100644 --- a/homeassistant/components/teslemetry/coordinator.py +++ b/homeassistant/components/teslemetry/coordinator.py @@ -18,6 +18,7 @@ from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed from .const import ENERGY_HISTORY_FIELDS, LOGGER, TeslemetryState +from .helpers import flatten VEHICLE_INTERVAL = timedelta(seconds=30) VEHICLE_WAIT = timedelta(minutes=15) @@ -35,19 +36,6 @@ ] -def flatten(data: dict[str, Any], parent: str | None = None) -> dict[str, Any]: - """Flatten the data structure.""" - result = {} - for key, value in data.items(): - if parent: - key = f"{parent}_{key}" - if isinstance(value, dict): - result.update(flatten(value, key)) - else: - result[key] = value - return result - - class TeslemetryVehicleDataCoordinator(DataUpdateCoordinator[dict[str, Any]]): """Class to manage fetching data from the Teslemetry API.""" diff --git a/homeassistant/components/teslemetry/cover.py b/homeassistant/components/teslemetry/cover.py index 190f729d99f348..8775da931d598e 100644 --- a/homeassistant/components/teslemetry/cover.py +++ b/homeassistant/components/teslemetry/cover.py @@ -182,13 +182,7 @@ def __init__(self, vehicle: TeslemetryVehicleData, scopes: list[Scope]) -> None: def _async_update_attrs(self) -> None: """Update the entity attributes.""" - value = self._value - if value == CLOSED: - self._attr_is_closed = True - elif value == OPEN: - self._attr_is_closed = False - else: - self._attr_is_closed = None + self._attr_is_closed = self._value == CLOSED async def async_open_cover(self, **kwargs: Any) -> None: """Open rear trunk.""" diff --git a/homeassistant/components/teslemetry/device_tracker.py b/homeassistant/components/teslemetry/device_tracker.py index 6577bcf88d6413..2b0ffd88cc6da1 100644 --- a/homeassistant/components/teslemetry/device_tracker.py +++ b/homeassistant/components/teslemetry/device_tracker.py @@ -3,6 +3,7 @@ from __future__ import annotations from homeassistant.components.device_tracker.config_entry import TrackerEntity +from homeassistant.const import STATE_HOME from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -80,4 +81,7 @@ class TeslemetryDeviceTrackerRouteEntity(TeslemetryDeviceTrackerEntity): @property def location_name(self) -> str | None: """Return a location name for the current location of the device.""" - return self.get("drive_state_active_route_destination") + location = self.get("drive_state_active_route_destination") + if location == "Home": + return STATE_HOME + return location diff --git a/homeassistant/components/teslemetry/entity.py b/homeassistant/components/teslemetry/entity.py index ca40d4d00ce4e2..d14f3a42734c5b 100644 --- a/homeassistant/components/teslemetry/entity.py +++ b/homeassistant/components/teslemetry/entity.py @@ -175,6 +175,8 @@ def __init__( ) -> None: """Initialize common aspects of a Teslemetry Energy Site Info entity.""" + assert data.history_coordinator + self.api = data.api self._attr_unique_id = f"{data.id}-{key}" self._attr_device_info = data.device diff --git a/homeassistant/components/teslemetry/helpers.py b/homeassistant/components/teslemetry/helpers.py index 4e0860083339f9..30601feccbcc97 100644 --- a/homeassistant/components/teslemetry/helpers.py +++ b/homeassistant/components/teslemetry/helpers.py @@ -10,6 +10,19 @@ from .const import DOMAIN, LOGGER, TeslemetryState +def flatten(data: dict[str, Any], parent: str | None = None) -> dict[str, Any]: + """Flatten the data structure.""" + result = {} + for key, value in data.items(): + if parent: + key = f"{parent}_{key}" + if isinstance(value, dict): + result.update(flatten(value, key)) + else: + result[key] = value + return result + + async def wake_up_vehicle(vehicle) -> None: """Wake up a vehicle.""" async with vehicle.wakelock: diff --git a/homeassistant/components/teslemetry/manifest.json b/homeassistant/components/teslemetry/manifest.json index 715c6cd215918f..6b667094d6290e 100644 --- a/homeassistant/components/teslemetry/manifest.json +++ b/homeassistant/components/teslemetry/manifest.json @@ -7,5 +7,5 @@ "iot_class": "cloud_polling", "loggers": ["tesla-fleet-api"], "quality_scale": "platinum", - "requirements": ["tesla-fleet-api==0.7.8"] + "requirements": ["tesla-fleet-api==0.8.4", "teslemetry-stream==0.4.2"] } diff --git a/homeassistant/components/teslemetry/models.py b/homeassistant/components/teslemetry/models.py index a6d549b8937fc5..d3969b30a7caae 100644 --- a/homeassistant/components/teslemetry/models.py +++ b/homeassistant/components/teslemetry/models.py @@ -3,10 +3,12 @@ from __future__ import annotations import asyncio +from collections.abc import Callable from dataclasses import dataclass from tesla_fleet_api import EnergySpecific, VehicleSpecific from tesla_fleet_api.const import Scope +from teslemetry_stream import TeslemetryStream from homeassistant.helpers.device_registry import DeviceInfo @@ -33,9 +35,11 @@ class TeslemetryVehicleData: api: VehicleSpecific coordinator: TeslemetryVehicleDataCoordinator + stream: TeslemetryStream vin: str wakelock = asyncio.Lock() device: DeviceInfo + remove_listener: Callable @dataclass @@ -45,6 +49,6 @@ class TeslemetryEnergyData: api: EnergySpecific live_coordinator: TeslemetryEnergySiteLiveCoordinator info_coordinator: TeslemetryEnergySiteInfoCoordinator - history_coordinator: TeslemetryEnergyHistoryCoordinator + history_coordinator: TeslemetryEnergyHistoryCoordinator | None id: int device: DeviceInfo diff --git a/homeassistant/components/teslemetry/sensor.py b/homeassistant/components/teslemetry/sensor.py index ba7d930fcd0558..95876cc2cf9db5 100644 --- a/homeassistant/components/teslemetry/sensor.py +++ b/homeassistant/components/teslemetry/sensor.py @@ -482,8 +482,7 @@ async def async_setup_entry( TeslemetryEnergyHistorySensorEntity(energysite, description) for energysite in entry.runtime_data.energysites for description in ENERGY_HISTORY_DESCRIPTIONS - if energysite.info_coordinator.data.get("components_battery") - or energysite.info_coordinator.data.get("components_solar") + if energysite.history_coordinator ), ) ) diff --git a/homeassistant/components/teslemetry/strings.json b/homeassistant/components/teslemetry/strings.json index 253c19632ea194..4f4bc2ae60c58d 100644 --- a/homeassistant/components/teslemetry/strings.json +++ b/homeassistant/components/teslemetry/strings.json @@ -1,7 +1,9 @@ { "config": { "abort": { - "already_configured": "Account is already configured" + "already_configured": "Account is already configured", + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", + "reauth_account_mismatch": "The reauthentication account does not match the original account" }, "error": { "invalid_access_token": "[%key:common::config_flow::error::invalid_access_token%]", @@ -15,6 +17,13 @@ "access_token": "[%key:common::config_flow::data::access_token%]" }, "description": "Enter an access token from {url}." + }, + "reauth_confirm": { + "title": "[%key:common::config_flow::title::reauth%]", + "description": "The {name} integration needs to re-authenticate your account, please enter an access token from {url}", + "data": { + "access_token": "[%key:common::config_flow::data::access_token%]" + } } } }, diff --git a/homeassistant/components/teslemetry/update.py b/homeassistant/components/teslemetry/update.py index 1884689ae64513..670cd0e0edaf0e 100644 --- a/homeassistant/components/teslemetry/update.py +++ b/homeassistant/components/teslemetry/update.py @@ -92,12 +92,12 @@ def _async_update_attrs(self) -> None: SCHEDULED, INSTALLING, ): - self._attr_in_progress = ( - cast(int, self.get("vehicle_state_software_update_install_perc")) - or True - ) + self._attr_in_progress = True + if install_perc := self.get("vehicle_state_software_update_install_perc"): + self._attr_update_percentage = cast(int, install_perc) else: self._attr_in_progress = False + self._attr_update_percentage = None async def async_install( self, version: str | None, backup: bool, **kwargs: Any @@ -107,4 +107,5 @@ async def async_install( await self.wake_up_if_asleep() await handle_vehicle_command(self.api.schedule_software_update(offset_sec=60)) self._attr_in_progress = True + self._attr_update_percentage = None self.async_write_ha_state() diff --git a/homeassistant/components/tessie/config_flow.py b/homeassistant/components/tessie/config_flow.py index bee518ce95fda3..14c6b93fdfd690 100644 --- a/homeassistant/components/tessie/config_flow.py +++ b/homeassistant/components/tessie/config_flow.py @@ -14,12 +14,12 @@ from homeassistant.const import CONF_ACCESS_TOKEN from homeassistant.helpers.aiohttp_client import async_get_clientsession -from . import TessieConfigEntry from .const import DOMAIN TESSIE_SCHEMA = vol.Schema({vol.Required(CONF_ACCESS_TOKEN): str}) DESCRIPTION_PLACEHOLDERS = { - "url": "[my.tessie.com/settings/api](https://my.tessie.com/settings/api)" + "name": "Tessie", + "url": "[my.tessie.com/settings/api](https://my.tessie.com/settings/api)", } @@ -28,10 +28,6 @@ class TessieConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - def __init__(self) -> None: - """Initialize.""" - self._reauth_entry: TessieConfigEntry | None = None - async def async_step_user( self, user_input: Mapping[str, Any] | None = None ) -> ConfigFlowResult: @@ -69,9 +65,6 @@ async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle re-auth.""" - self._reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -79,7 +72,7 @@ async def async_step_reauth_confirm( ) -> ConfigFlowResult: """Get update API Key from the user.""" errors: dict[str, str] = {} - assert self._reauth_entry + if user_input: try: await get_state_of_all_vehicles( @@ -95,7 +88,7 @@ async def async_step_reauth_confirm( errors["base"] = "cannot_connect" else: return self.async_update_reload_and_abort( - self._reauth_entry, data=user_input + self._get_reauth_entry(), data=user_input ) return self.async_show_form( diff --git a/homeassistant/components/tessie/lock.py b/homeassistant/components/tessie/lock.py index 4f6ce3800e3e5e..76d58a9070cb72 100644 --- a/homeassistant/components/tessie/lock.py +++ b/homeassistant/components/tessie/lock.py @@ -4,21 +4,11 @@ from typing import Any -from tessie_api import ( - disable_speed_limit, - enable_speed_limit, - lock, - open_unlock_charge_port, - unlock, -) - -from homeassistant.components.automation import automations_with_entity -from homeassistant.components.lock import ATTR_CODE, LockEntity -from homeassistant.components.script import scripts_with_entity -from homeassistant.const import Platform +from tessie_api import lock, open_unlock_charge_port, unlock + +from homeassistant.components.lock import LockEntity from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceValidationError -from homeassistant.helpers import entity_registry as er, issue_registry as ir from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import TessieConfigEntry @@ -37,46 +27,11 @@ async def async_setup_entry( """Set up the Tessie sensor platform from a config entry.""" data = entry.runtime_data - entities: list[TessieEntity] = [ + async_add_entities( klass(vehicle) for klass in (TessieLockEntity, TessieCableLockEntity) for vehicle in data.vehicles - ] - - ent_reg = er.async_get(hass) - - for vehicle in data.vehicles: - entity_id = ent_reg.async_get_entity_id( - Platform.LOCK, - DOMAIN, - f"{vehicle.vin}-vehicle_state_speed_limit_mode_active", - ) - if entity_id: - entity_entry = ent_reg.async_get(entity_id) - assert entity_entry - if entity_entry.disabled: - ent_reg.async_remove(entity_id) - else: - entities.append(TessieSpeedLimitEntity(vehicle)) - - entity_automations = automations_with_entity(hass, entity_id) - entity_scripts = scripts_with_entity(hass, entity_id) - for item in entity_automations + entity_scripts: - ir.async_create_issue( - hass, - DOMAIN, - f"deprecated_speed_limit_{entity_id}_{item}", - breaks_in_ha_version="2024.11.0", - is_fixable=True, - is_persistent=False, - severity=ir.IssueSeverity.WARNING, - translation_key="deprecated_speed_limit_entity", - translation_placeholders={ - "entity": entity_id, - "info": item, - }, - ) - async_add_entities(entities) + ) class TessieLockEntity(TessieEntity, LockEntity): @@ -105,58 +60,6 @@ async def async_unlock(self, **kwargs: Any) -> None: self.set((self.key, False)) -class TessieSpeedLimitEntity(TessieEntity, LockEntity): - """Speed Limit with PIN entity for Tessie.""" - - _attr_code_format = r"^\d\d\d\d$" - - def __init__( - self, - vehicle: TessieVehicleData, - ) -> None: - """Initialize the sensor.""" - super().__init__(vehicle, "vehicle_state_speed_limit_mode_active") - - @property - def is_locked(self) -> bool | None: - """Return the state of the Lock.""" - return self._value - - async def async_lock(self, **kwargs: Any) -> None: - """Enable speed limit with pin.""" - ir.async_create_issue( - self.coordinator.hass, - DOMAIN, - "deprecated_speed_limit_locked", - breaks_in_ha_version="2024.11.0", - is_fixable=True, - is_persistent=False, - severity=ir.IssueSeverity.WARNING, - translation_key="deprecated_speed_limit_locked", - ) - code: str | None = kwargs.get(ATTR_CODE) - if code: - await self.run(enable_speed_limit, pin=code) - self.set((self.key, True)) - - async def async_unlock(self, **kwargs: Any) -> None: - """Disable speed limit with pin.""" - ir.async_create_issue( - self.coordinator.hass, - DOMAIN, - "deprecated_speed_limit_unlocked", - breaks_in_ha_version="2024.11.0", - is_fixable=True, - is_persistent=False, - severity=ir.IssueSeverity.WARNING, - translation_key="deprecated_speed_limit_unlocked", - ) - code: str | None = kwargs.get(ATTR_CODE) - if code: - await self.run(disable_speed_limit, pin=code) - self.set((self.key, False)) - - class TessieCableLockEntity(TessieEntity, LockEntity): """Cable Lock entity for Tessie.""" diff --git a/homeassistant/components/tessie/manifest.json b/homeassistant/components/tessie/manifest.json index d9f2cea9618b2e..92aa289ca470be 100644 --- a/homeassistant/components/tessie/manifest.json +++ b/homeassistant/components/tessie/manifest.json @@ -7,5 +7,5 @@ "iot_class": "cloud_polling", "loggers": ["tessie", "tesla-fleet-api"], "quality_scale": "platinum", - "requirements": ["tessie-api==0.1.1", "tesla-fleet-api==0.7.8"] + "requirements": ["tessie-api==0.1.1", "tesla-fleet-api==0.8.4"] } diff --git a/homeassistant/components/tessie/strings.json b/homeassistant/components/tessie/strings.json index 52c03c8700b362..5b677594b426f1 100644 --- a/homeassistant/components/tessie/strings.json +++ b/homeassistant/components/tessie/strings.json @@ -1,7 +1,8 @@ { "config": { "abort": { - "already_configured": "[%key:common::config_flow::abort::already_configured_service%]" + "already_configured": "[%key:common::config_flow::abort::already_configured_service%]", + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" }, "error": { "invalid_access_token": "[%key:common::config_flow::error::invalid_access_token%]", @@ -62,9 +63,6 @@ }, "charge_state_charge_port_latch": { "name": "Charge cable lock" - }, - "vehicle_state_speed_limit_mode_active": { - "name": "Speed limit" } }, "media_player": { @@ -531,40 +529,5 @@ "command_failed": { "message": "Command failed, {message}" } - }, - "issues": { - "deprecated_speed_limit_entity": { - "title": "Detected Tessie speed limit lock entity usage", - "fix_flow": { - "step": { - "confirm": { - "title": "[%key:component::tessie::issues::deprecated_speed_limit_entity::title%]", - "description": "The Tessie integration's speed limit lock entity has been deprecated and will be remove in 2024.11.0.\nHome Assistant detected that entity `{entity}` is being used in `{info}`\n\nYou should remove the speed limit lock entity from `{info}` then select **Submit** to fix this issue." - } - } - } - }, - "deprecated_speed_limit_locked": { - "title": "Detected Tessie speed limit lock entity locked", - "fix_flow": { - "step": { - "confirm": { - "title": "[%key:component::tessie::issues::deprecated_speed_limit_locked::title%]", - "description": "The Tessie integration's speed limit lock entity has been deprecated and will be remove in 2024.11.0.\n\nPlease remove this entity from any automation or script, disable the entity then select **Submit** to fix this issue." - } - } - } - }, - "deprecated_speed_limit_unlocked": { - "title": "Detected Tessie speed limit lock entity unlocked", - "fix_flow": { - "step": { - "confirm": { - "title": "[%key:component::tessie::issues::deprecated_speed_limit_unlocked::title%]", - "description": "The Tessie integration's speed limit lock entity has been deprecated and will be remove in 2024.11.0.\n\nPlease remove this entity from any automation or script, disable the entity then select **Submit** to fix this issue." - } - } - } - } } } diff --git a/homeassistant/components/tessie/update.py b/homeassistant/components/tessie/update.py index 959a713047fea8..f6198fa6c0314e 100644 --- a/homeassistant/components/tessie/update.py +++ b/homeassistant/components/tessie/update.py @@ -71,14 +71,22 @@ def latest_version(self) -> str | None: return self.installed_version @property - def in_progress(self) -> bool | int | None: + def in_progress(self) -> bool: + """Update installation progress.""" + return ( + self.get("vehicle_state_software_update_status") + == TessieUpdateStatus.INSTALLING + ) + + @property + def update_percentage(self) -> int | None: """Update installation progress.""" if ( self.get("vehicle_state_software_update_status") == TessieUpdateStatus.INSTALLING ): return self.get("vehicle_state_software_update_install_perc") - return False + return None async def async_install( self, version: str | None, backup: bool, **kwargs: Any diff --git a/homeassistant/components/thethingsnetwork/__init__.py b/homeassistant/components/thethingsnetwork/__init__.py index 253ce7a052e07e..d3c6c8356cb713 100644 --- a/homeassistant/components/thethingsnetwork/__init__.py +++ b/homeassistant/components/thethingsnetwork/__init__.py @@ -2,55 +2,15 @@ import logging -import voluptuous as vol - from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_API_KEY, CONF_HOST from homeassistant.core import HomeAssistant -from homeassistant.helpers import issue_registry as ir -import homeassistant.helpers.config_validation as cv -from homeassistant.helpers.typing import ConfigType -from .const import CONF_APP_ID, DOMAIN, PLATFORMS, TTN_API_HOST +from .const import DOMAIN, PLATFORMS, TTN_API_HOST from .coordinator import TTNCoordinator _LOGGER = logging.getLogger(__name__) -CONFIG_SCHEMA = vol.Schema( - { - # Configuration via yaml not longer supported - keeping to warn about migration - DOMAIN: vol.Schema( - { - vol.Required(CONF_APP_ID): cv.string, - vol.Required("access_key"): cv.string, - } - ) - }, - extra=vol.ALLOW_EXTRA, -) - - -async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: - """Initialize of The Things Network component.""" - - if DOMAIN in config: - ir.async_create_issue( - hass, - DOMAIN, - "manual_migration", - breaks_in_ha_version="2024.12.0", - is_fixable=False, - severity=ir.IssueSeverity.ERROR, - translation_key="manual_migration", - translation_placeholders={ - "domain": DOMAIN, - "v2_v3_migration_url": "https://www.thethingsnetwork.org/forum/c/v2-to-v3-upgrade/102", - "v2_deprecation_url": "https://www.thethingsnetwork.org/forum/t/the-things-network-v2-is-permanently-shutting-down-completed/50710", - }, - ) - - return True - async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Establish connection with The Things Network.""" diff --git a/homeassistant/components/thethingsnetwork/config_flow.py b/homeassistant/components/thethingsnetwork/config_flow.py index 7480e4cb1d922c..412c5da4ef9f6c 100644 --- a/homeassistant/components/thethingsnetwork/config_flow.py +++ b/homeassistant/components/thethingsnetwork/config_flow.py @@ -7,7 +7,7 @@ from ttn_client import TTNAuthError, TTNClient import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_API_KEY, CONF_HOST from homeassistant.helpers.selector import ( TextSelector, @@ -25,8 +25,6 @@ class TTNFlowHandler(ConfigFlow, domain=DOMAIN): VERSION = 1 - _reauth_entry: ConfigEntry | None = None - async def async_step_user( self, user_input: Mapping[str, Any] | None = None ) -> ConfigFlowResult: @@ -51,11 +49,9 @@ async def async_step_user( if not errors: # Create entry - if self._reauth_entry: + if self.source == SOURCE_REAUTH: return self.async_update_reload_and_abort( - self._reauth_entry, - data=user_input, - reason="reauth_successful", + self._get_reauth_entry(), data=user_input ) await self.async_set_unique_id(user_input[CONF_APP_ID]) self._abort_if_unique_id_configured() @@ -67,8 +63,8 @@ async def async_step_user( # Show form for user to provide settings if not user_input: - if self._reauth_entry: - user_input = self._reauth_entry.data + if self.source == SOURCE_REAUTH: + user_input = self._get_reauth_entry().data else: user_input = {CONF_HOST: TTN_API_HOST} @@ -92,11 +88,6 @@ async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle a flow initialized by a reauth event.""" - - self._reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) - return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( diff --git a/homeassistant/components/thethingsnetwork/sensor.py b/homeassistant/components/thethingsnetwork/sensor.py index 82dd169a52d0fe..25dd2f1e1eb5d0 100644 --- a/homeassistant/components/thethingsnetwork/sensor.py +++ b/homeassistant/components/thethingsnetwork/sensor.py @@ -4,10 +4,11 @@ from ttn_client import TTNSensorValue -from homeassistant.components.sensor import SensorEntity, StateType +from homeassistant.components.sensor import SensorEntity from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.typing import StateType from .const import CONF_APP_ID, DOMAIN from .entity import TTNEntity diff --git a/homeassistant/components/thethingsnetwork/strings.json b/homeassistant/components/thethingsnetwork/strings.json index 98572cb318c7f2..f5a4fcef8fda0e 100644 --- a/homeassistant/components/thethingsnetwork/strings.json +++ b/homeassistant/components/thethingsnetwork/strings.json @@ -22,11 +22,5 @@ "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", "unknown": "[%key:common::config_flow::error::unknown%]" } - }, - "issues": { - "manual_migration": { - "description": "Configuring {domain} using YAML was removed as part of migrating to [The Things Network v3]({v2_v3_migration_url}). [The Things Network v2 has shutted down]({v2_deprecation_url}).\n\nPlease remove the {domain} entry from the configuration.yaml and add re-add the integration using the config_flow", - "title": "The {domain} YAML configuration is not supported" - } } } diff --git a/homeassistant/components/threshold/binary_sensor.py b/homeassistant/components/threshold/binary_sensor.py index 9440e2515867e6..3d52d2225be178 100644 --- a/homeassistant/components/threshold/binary_sensor.py +++ b/homeassistant/components/threshold/binary_sensor.py @@ -61,15 +61,29 @@ DEFAULT_NAME: Final = "Threshold" -PLATFORM_SCHEMA = BINARY_SENSOR_PLATFORM_SCHEMA.extend( - { - vol.Required(CONF_ENTITY_ID): cv.entity_id, - vol.Optional(CONF_DEVICE_CLASS): DEVICE_CLASSES_SCHEMA, - vol.Optional(CONF_HYSTERESIS, default=DEFAULT_HYSTERESIS): vol.Coerce(float), - vol.Optional(CONF_LOWER): vol.Coerce(float), - vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, - vol.Optional(CONF_UPPER): vol.Coerce(float), - } + +def no_missing_threshold(value: dict) -> dict: + """Validate data point list is greater than polynomial degrees.""" + if value.get(CONF_LOWER) is None and value.get(CONF_UPPER) is None: + raise vol.Invalid("Lower or Upper thresholds are not provided") + + return value + + +PLATFORM_SCHEMA = vol.All( + BINARY_SENSOR_PLATFORM_SCHEMA.extend( + { + vol.Required(CONF_ENTITY_ID): cv.entity_id, + vol.Optional(CONF_DEVICE_CLASS): DEVICE_CLASSES_SCHEMA, + vol.Optional(CONF_HYSTERESIS, default=DEFAULT_HYSTERESIS): vol.Coerce( + float + ), + vol.Optional(CONF_LOWER): vol.Coerce(float), + vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, + vol.Optional(CONF_UPPER): vol.Coerce(float), + } + ), + no_missing_threshold, ) @@ -126,9 +140,6 @@ async def async_setup_platform( hysteresis: float = config[CONF_HYSTERESIS] device_class: BinarySensorDeviceClass | None = config.get(CONF_DEVICE_CLASS) - if lower is None and upper is None: - raise ValueError("Lower or Upper thresholds not provided") - async_add_entities( [ ThresholdSensor( @@ -151,6 +162,9 @@ class ThresholdSensor(BinarySensorEntity): """Representation of a Threshold sensor.""" _attr_should_poll = False + _unrecorded_attributes = frozenset( + {ATTR_ENTITY_ID, ATTR_HYSTERESIS, ATTR_LOWER, ATTR_TYPE, ATTR_UPPER} + ) def __init__( self, @@ -177,7 +191,6 @@ def __init__( self._hysteresis: float = hysteresis self._attr_device_class = device_class self._state_position = POSITION_UNKNOWN - self._state: bool | None = None self.sensor_value: float | None = None async def async_added_to_hass(self) -> None: @@ -229,11 +242,6 @@ def async_threshold_sensor_state_listener( ) _update_sensor_state() - @property - def is_on(self) -> bool | None: - """Return true if sensor is on.""" - return self._state - @property def extra_state_attributes(self) -> dict[str, Any]: """Return the state attributes of the sensor.""" @@ -261,53 +269,53 @@ def above(sensor_value: float, threshold: float) -> bool: if self.sensor_value is None: self._state_position = POSITION_UNKNOWN - self._state = None + self._attr_is_on = None return if self.threshold_type == TYPE_LOWER: - if self._state is None: - self._state = False + if self._attr_is_on is None: + self._attr_is_on = False self._state_position = POSITION_ABOVE if below(self.sensor_value, self._threshold_lower): self._state_position = POSITION_BELOW - self._state = True + self._attr_is_on = True elif above(self.sensor_value, self._threshold_lower): self._state_position = POSITION_ABOVE - self._state = False + self._attr_is_on = False return if self.threshold_type == TYPE_UPPER: assert self._threshold_upper is not None - if self._state is None: - self._state = False + if self._attr_is_on is None: + self._attr_is_on = False self._state_position = POSITION_BELOW if above(self.sensor_value, self._threshold_upper): self._state_position = POSITION_ABOVE - self._state = True + self._attr_is_on = True elif below(self.sensor_value, self._threshold_upper): self._state_position = POSITION_BELOW - self._state = False + self._attr_is_on = False return if self.threshold_type == TYPE_RANGE: - if self._state is None: - self._state = True + if self._attr_is_on is None: + self._attr_is_on = True self._state_position = POSITION_IN_RANGE if below(self.sensor_value, self._threshold_lower): self._state_position = POSITION_BELOW - self._state = False + self._attr_is_on = False if above(self.sensor_value, self._threshold_upper): self._state_position = POSITION_ABOVE - self._state = False + self._attr_is_on = False elif above(self.sensor_value, self._threshold_lower) and below( self.sensor_value, self._threshold_upper ): self._state_position = POSITION_IN_RANGE - self._state = True + self._attr_is_on = True return @callback diff --git a/homeassistant/components/tibber/__init__.py b/homeassistant/components/tibber/__init__.py index ce05b8070f6c8d..9b5c7ee11689f9 100644 --- a/homeassistant/components/tibber/__init__.py +++ b/homeassistant/components/tibber/__init__.py @@ -6,15 +6,9 @@ import tibber from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ( - CONF_ACCESS_TOKEN, - CONF_NAME, - EVENT_HOMEASSISTANT_STOP, - Platform, -) +from homeassistant.const import CONF_ACCESS_TOKEN, EVENT_HOMEASSISTANT_STOP, Platform from homeassistant.core import Event, HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady -from homeassistant.helpers import discovery from homeassistant.helpers.aiohttp_client import async_get_clientsession import homeassistant.helpers.config_validation as cv from homeassistant.helpers.typing import ConfigType @@ -73,19 +67,6 @@ async def _close(event: Event) -> None: await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) - # Use discovery to load platform legacy notify platform - # The use of the legacy notify service was deprecated with HA Core 2024.6 - # Support will be removed with HA Core 2024.12 - hass.async_create_task( - discovery.async_load_platform( - hass, - Platform.NOTIFY, - DOMAIN, - {CONF_NAME: DOMAIN}, - hass.data[DATA_HASS_CONFIG], - ) - ) - return True diff --git a/homeassistant/components/tibber/manifest.json b/homeassistant/components/tibber/manifest.json index eb59d2456fb918..bc9304ab59d37c 100644 --- a/homeassistant/components/tibber/manifest.json +++ b/homeassistant/components/tibber/manifest.json @@ -8,5 +8,5 @@ "iot_class": "cloud_polling", "loggers": ["tibber"], "quality_scale": "silver", - "requirements": ["pyTibber==0.30.2"] + "requirements": ["pyTibber==0.30.8"] } diff --git a/homeassistant/components/tibber/notify.py b/homeassistant/components/tibber/notify.py index 1c9f86ed50261c..fdeeeba68ef932 100644 --- a/homeassistant/components/tibber/notify.py +++ b/homeassistant/components/tibber/notify.py @@ -2,38 +2,21 @@ from __future__ import annotations -from collections.abc import Callable -from typing import Any - from tibber import Tibber from homeassistant.components.notify import ( - ATTR_TITLE, ATTR_TITLE_DEFAULT, - BaseNotificationService, NotifyEntity, NotifyEntityFeature, - migrate_notify_issue, ) from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from . import DOMAIN as TIBBER_DOMAIN -async def async_get_service( - hass: HomeAssistant, - config: ConfigType, - discovery_info: DiscoveryInfoType | None = None, -) -> TibberNotificationService: - """Get the Tibber notification service.""" - tibber_connection: Tibber = hass.data[TIBBER_DOMAIN] - return TibberNotificationService(tibber_connection.send_notification) - - async def async_setup_entry( hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback ) -> None: @@ -41,31 +24,6 @@ async def async_setup_entry( async_add_entities([TibberNotificationEntity(entry.entry_id)]) -class TibberNotificationService(BaseNotificationService): - """Implement the notification service for Tibber.""" - - def __init__(self, notify: Callable) -> None: - """Initialize the service.""" - self._notify = notify - - async def async_send_message(self, message: str = "", **kwargs: Any) -> None: - """Send a message to Tibber devices.""" - migrate_notify_issue( - self.hass, - TIBBER_DOMAIN, - "Tibber", - "2024.12.0", - service_name=self._service_name, - ) - title = kwargs.get(ATTR_TITLE, ATTR_TITLE_DEFAULT) - try: - await self._notify(title=title, message=message) - except TimeoutError as exc: - raise HomeAssistantError( - translation_domain=TIBBER_DOMAIN, translation_key="send_message_timeout" - ) from exc - - class TibberNotificationEntity(NotifyEntity): """Implement the notification entity service for Tibber.""" diff --git a/homeassistant/components/tibber/sensor.py b/homeassistant/components/tibber/sensor.py index adac836aca6b3c..125dc8eae6f1b3 100644 --- a/homeassistant/components/tibber/sensor.py +++ b/homeassistant/components/tibber/sensor.py @@ -50,7 +50,7 @@ SCAN_INTERVAL = timedelta(minutes=1) MIN_TIME_BETWEEN_UPDATES = timedelta(minutes=5) PARALLEL_UPDATES = 0 - +TWENTY_MINUTES = 20 * 60 RT_SENSORS_UNIQUE_ID_MIGRATION = { "accumulated_consumption_last_hour": "accumulated consumption current hour", @@ -369,7 +369,7 @@ def __init__(self, tibber_home: tibber.TibberHome) -> None: """Initialize the sensor.""" super().__init__(tibber_home=tibber_home) self._last_updated: datetime.datetime | None = None - self._spread_load_constant = randrange(5000) + self._spread_load_constant = randrange(TWENTY_MINUTES) self._attr_available = False self._attr_extra_state_attributes = { @@ -397,7 +397,7 @@ async def async_update(self) -> None: if ( not self._tibber_home.last_data_timestamp or (self._tibber_home.last_data_timestamp - now).total_seconds() - < 5 * 3600 + self._spread_load_constant + < 11 * 3600 + self._spread_load_constant or not self.available ): _LOGGER.debug("Asking for new data") diff --git a/homeassistant/components/tibber/services.py b/homeassistant/components/tibber/services.py index 35facbcd54595a..72943a0215a091 100644 --- a/homeassistant/components/tibber/services.py +++ b/homeassistant/components/tibber/services.py @@ -47,21 +47,19 @@ async def __get_prices(call: ServiceCall, *, hass: HomeAssistant) -> ServiceResp for tibber_home in tibber_connection.get_homes(only_active=True): home_nickname = tibber_home.name - price_info = tibber_home.info["viewer"]["home"]["currentSubscription"][ - "priceInfo" - ] price_data = [ { - "start_time": dt.datetime.fromisoformat(price["startsAt"]), - "price": price["total"], - "level": price["level"], + "start_time": starts_at, + "price": price, + "level": tibber_home.price_level.get(starts_at), } - for key in ("today", "tomorrow") - for price in price_info[key] + for starts_at, price in tibber_home.price_total.items() ] selected_data = [ - price for price in price_data if start <= price["start_time"] < end + price + for price in price_data + if start <= dt.datetime.fromisoformat(price["start_time"]) < end ] tibber_prices[home_nickname] = selected_data diff --git a/homeassistant/components/tile/__init__.py b/homeassistant/components/tile/__init__.py index 7fd5afcea7d4d5..594c4e7bdcb2f9 100644 --- a/homeassistant/components/tile/__init__.py +++ b/homeassistant/components/tile/__init__.py @@ -101,6 +101,7 @@ async def async_update_tile(tile: Tile) -> None: coordinator = coordinators[tile_uuid] = DataUpdateCoordinator( hass, LOGGER, + config_entry=entry, name=tile.name, update_interval=DEFAULT_UPDATE_INTERVAL, update_method=partial(async_update_tile, tile), diff --git a/homeassistant/components/tile/strings.json b/homeassistant/components/tile/strings.json index 504823c4d16841..2d34d13c436183 100644 --- a/homeassistant/components/tile/strings.json +++ b/homeassistant/components/tile/strings.json @@ -16,7 +16,8 @@ } }, "error": { - "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]" + "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", + "unknown": "[%key:common::config_flow::error::unknown%]" }, "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_account%]", diff --git a/homeassistant/components/timer/strings.json b/homeassistant/components/timer/strings.json index 1ebf0c6f50a435..064ec81df1d80d 100644 --- a/homeassistant/components/timer/strings.json +++ b/homeassistant/components/timer/strings.json @@ -1,4 +1,5 @@ { + "title": "Timer", "entity_component": { "_": { "name": "Timer", diff --git a/homeassistant/components/tod/binary_sensor.py b/homeassistant/components/tod/binary_sensor.py index 907df849ea1167..3ac90b5578ccd8 100644 --- a/homeassistant/components/tod/binary_sensor.py +++ b/homeassistant/components/tod/binary_sensor.py @@ -5,7 +5,7 @@ from collections.abc import Callable from datetime import datetime, time, timedelta import logging -from typing import TYPE_CHECKING, Any, Literal, TypeGuard +from typing import Any, Literal, TypeGuard import voluptuous as vol @@ -109,6 +109,9 @@ class TodSensor(BinarySensorEntity): """Time of the Day Sensor.""" _attr_should_poll = False + _time_before: datetime + _time_after: datetime + _next_update: datetime def __init__( self, @@ -122,9 +125,6 @@ def __init__( """Init the ToD Sensor...""" self._attr_unique_id = unique_id self._attr_name = name - self._time_before: datetime | None = None - self._time_after: datetime | None = None - self._next_update: datetime | None = None self._after_offset = after_offset self._before_offset = before_offset self._before = before @@ -134,9 +134,6 @@ def __init__( @property def is_on(self) -> bool: """Return True is sensor is on.""" - if TYPE_CHECKING: - assert self._time_after is not None - assert self._time_before is not None if self._time_after < self._time_before: return self._time_after <= dt_util.utcnow() < self._time_before return False @@ -144,10 +141,6 @@ def is_on(self) -> bool: @property def extra_state_attributes(self) -> dict[str, Any] | None: """Return the state attributes of the sensor.""" - if TYPE_CHECKING: - assert self._time_after is not None - assert self._time_before is not None - assert self._next_update is not None if time_zone := dt_util.get_default_time_zone(): return { ATTR_AFTER: self._time_after.astimezone(time_zone).isoformat(), @@ -244,9 +237,6 @@ def _add_one_dst_aware_day(self, a_date: datetime, target_time: time) -> datetim def _turn_to_next_day(self) -> None: """Turn to to the next day.""" - if TYPE_CHECKING: - assert self._time_after is not None - assert self._time_before is not None if _is_sun_event(self._after): self._time_after = get_astral_event_next( self.hass, self._after, self._time_after - self._after_offset @@ -282,17 +272,12 @@ def _clean_up_listener() -> None: self.async_on_remove(_clean_up_listener) - if TYPE_CHECKING: - assert self._next_update is not None self._unsub_update = event.async_track_point_in_utc_time( self.hass, self._point_in_time_listener, self._next_update ) def _calculate_next_update(self) -> None: """Datetime when the next update to the state.""" - if TYPE_CHECKING: - assert self._time_after is not None - assert self._time_before is not None now = dt_util.utcnow() if now < self._time_after: self._next_update = self._time_after @@ -309,9 +294,6 @@ def _point_in_time_listener(self, now: datetime) -> None: self._calculate_next_update() self.async_write_ha_state() - if TYPE_CHECKING: - assert self._next_update is not None - self._unsub_update = event.async_track_point_in_utc_time( self.hass, self._point_in_time_listener, self._next_update ) diff --git a/homeassistant/components/todo/intent.py b/homeassistant/components/todo/intent.py index 6233ea6029e093..c678408a576e77 100644 --- a/homeassistant/components/todo/intent.py +++ b/homeassistant/components/todo/intent.py @@ -34,7 +34,7 @@ async def async_handle(self, intent_obj: intent.Intent) -> intent.IntentResponse hass = intent_obj.hass slots = self.async_validate_slots(intent_obj.slots) - item = slots["item"]["value"] + item = slots["item"]["value"].strip() list_name = slots["name"]["value"] target_list: TodoListEntity | None = None @@ -62,4 +62,13 @@ async def async_handle(self, intent_obj: intent.Intent) -> intent.IntentResponse response = intent_obj.create_response() response.response_type = intent.IntentResponseType.ACTION_DONE + response.async_set_results( + [ + intent.IntentResponseTarget( + type=intent.IntentResponseTargetType.ENTITY, + name=list_name, + id=match_result.states[0].entity_id, + ) + ] + ) return response diff --git a/homeassistant/components/todoist/__init__.py b/homeassistant/components/todoist/__init__.py index 60c40b1c03c072..2e30856d0dff9d 100644 --- a/homeassistant/components/todoist/__init__.py +++ b/homeassistant/components/todoist/__init__.py @@ -25,7 +25,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: token = entry.data[CONF_TOKEN] api = TodoistAPIAsync(token) - coordinator = TodoistCoordinator(hass, _LOGGER, SCAN_INTERVAL, api, token) + coordinator = TodoistCoordinator(hass, _LOGGER, entry, SCAN_INTERVAL, api, token) await coordinator.async_config_entry_first_refresh() hass.data.setdefault(DOMAIN, {}) diff --git a/homeassistant/components/todoist/calendar.py b/homeassistant/components/todoist/calendar.py index 31470633cc67ee..62f9fafc02afef 100644 --- a/homeassistant/components/todoist/calendar.py +++ b/homeassistant/components/todoist/calendar.py @@ -142,7 +142,7 @@ async def async_setup_platform( project_id_lookup = {} api = TodoistAPIAsync(token) - coordinator = TodoistCoordinator(hass, _LOGGER, SCAN_INTERVAL, api, token) + coordinator = TodoistCoordinator(hass, _LOGGER, None, SCAN_INTERVAL, api, token) await coordinator.async_refresh() async def _shutdown_coordinator(_: Event) -> None: diff --git a/homeassistant/components/todoist/coordinator.py b/homeassistant/components/todoist/coordinator.py index b55680907ac585..2f35741c5ab428 100644 --- a/homeassistant/components/todoist/coordinator.py +++ b/homeassistant/components/todoist/coordinator.py @@ -6,6 +6,7 @@ from todoist_api_python.api_async import TodoistAPIAsync from todoist_api_python.models import Label, Project, Section, Task +from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed @@ -17,12 +18,19 @@ def __init__( self, hass: HomeAssistant, logger: logging.Logger, + entry: ConfigEntry | None, update_interval: timedelta, api: TodoistAPIAsync, token: str, ) -> None: """Initialize the Todoist coordinator.""" - super().__init__(hass, logger, name="Todoist", update_interval=update_interval) + super().__init__( + hass, + logger, + config_entry=entry, + name="Todoist", + update_interval=update_interval, + ) self.api = api self._projects: list[Project] | None = None self._labels: list[Label] | None = None diff --git a/homeassistant/components/todoist/strings.json b/homeassistant/components/todoist/strings.json index 5b083ac58bfe80..721b491bbf5c04 100644 --- a/homeassistant/components/todoist/strings.json +++ b/homeassistant/components/todoist/strings.json @@ -78,7 +78,7 @@ "description": "When should user be reminded of this task, in natural language." }, "reminder_date_lang": { - "name": "Reminder data language", + "name": "Reminder date language", "description": "The language of reminder_date_string." }, "reminder_date": { diff --git a/homeassistant/components/tomorrowio/config_flow.py b/homeassistant/components/tomorrowio/config_flow.py index 90bb488a7c2dfa..cce41b17498846 100644 --- a/homeassistant/components/tomorrowio/config_flow.py +++ b/homeassistant/components/tomorrowio/config_flow.py @@ -91,10 +91,6 @@ def _get_unique_id(hass: HomeAssistant, input_dict: dict[str, Any]): class TomorrowioOptionsConfigFlow(OptionsFlow): """Handle Tomorrow.io options.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize Tomorrow.io options flow.""" - self._config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -105,7 +101,7 @@ async def async_step_init( options_schema = { vol.Required( CONF_TIMESTEP, - default=self._config_entry.options[CONF_TIMESTEP], + default=self.config_entry.options[CONF_TIMESTEP], ): vol.In([1, 5, 15, 30, 60]), } @@ -125,7 +121,7 @@ def async_get_options_flow( config_entry: ConfigEntry, ) -> TomorrowioOptionsConfigFlow: """Get the options flow for this handler.""" - return TomorrowioOptionsConfigFlow(config_entry) + return TomorrowioOptionsConfigFlow() async def async_step_user( self, user_input: dict[str, Any] | None = None diff --git a/homeassistant/components/totalconnect/alarm_control_panel.py b/homeassistant/components/totalconnect/alarm_control_panel.py index fb13c630e3e330..bc33129a74123b 100644 --- a/homeassistant/components/totalconnect/alarm_control_panel.py +++ b/homeassistant/components/totalconnect/alarm_control_panel.py @@ -9,19 +9,10 @@ from homeassistant.components.alarm_control_panel import ( AlarmControlPanelEntity, AlarmControlPanelEntityFeature, + AlarmControlPanelState, CodeFormat, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ( - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_CUSTOM_BYPASS, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_ARMING, - STATE_ALARM_DISARMED, - STATE_ALARM_DISARMING, - STATE_ALARM_TRIGGERED, -) from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from homeassistant.helpers import entity_platform @@ -103,7 +94,7 @@ def __init__( self._attr_code_format = CodeFormat.NUMBER @property - def state(self) -> str | None: + def alarm_state(self) -> AlarmControlPanelState | None: """Return the state of the device.""" # State attributes can be removed in 2025.3 attr = { @@ -121,29 +112,29 @@ def state(self) -> str | None: else: attr["location_name"] = f"{self.device.name} partition {self._partition_id}" - state: str | None = None + state: AlarmControlPanelState | None = None if self._partition.arming_state.is_disarmed(): - state = STATE_ALARM_DISARMED + state = AlarmControlPanelState.DISARMED elif self._partition.arming_state.is_armed_night(): - state = STATE_ALARM_ARMED_NIGHT + state = AlarmControlPanelState.ARMED_NIGHT elif self._partition.arming_state.is_armed_home(): - state = STATE_ALARM_ARMED_HOME + state = AlarmControlPanelState.ARMED_HOME elif self._partition.arming_state.is_armed_away(): - state = STATE_ALARM_ARMED_AWAY + state = AlarmControlPanelState.ARMED_AWAY elif self._partition.arming_state.is_armed_custom_bypass(): - state = STATE_ALARM_ARMED_CUSTOM_BYPASS + state = AlarmControlPanelState.ARMED_CUSTOM_BYPASS elif self._partition.arming_state.is_arming(): - state = STATE_ALARM_ARMING + state = AlarmControlPanelState.ARMING elif self._partition.arming_state.is_disarming(): - state = STATE_ALARM_DISARMING + state = AlarmControlPanelState.DISARMING elif self._partition.arming_state.is_triggered_police(): - state = STATE_ALARM_TRIGGERED + state = AlarmControlPanelState.TRIGGERED attr["triggered_source"] = "Police/Medical" elif self._partition.arming_state.is_triggered_fire(): - state = STATE_ALARM_TRIGGERED + state = AlarmControlPanelState.TRIGGERED attr["triggered_source"] = "Fire/Smoke" elif self._partition.arming_state.is_triggered_gas(): - state = STATE_ALARM_TRIGGERED + state = AlarmControlPanelState.TRIGGERED attr["triggered_source"] = "Carbon Monoxide" self._attr_extra_state_attributes = attr diff --git a/homeassistant/components/totalconnect/config_flow.py b/homeassistant/components/totalconnect/config_flow.py index c64dd5c612018d..3f5d05fda13c6a 100644 --- a/homeassistant/components/totalconnect/config_flow.py +++ b/homeassistant/components/totalconnect/config_flow.py @@ -193,16 +193,12 @@ def async_get_options_flow( config_entry: ConfigEntry, ) -> TotalConnectOptionsFlowHandler: """Get options flow.""" - return TotalConnectOptionsFlowHandler(config_entry) + return TotalConnectOptionsFlowHandler() class TotalConnectOptionsFlowHandler(OptionsFlow): """TotalConnect options flow handler.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, bool] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/touchline_sl/manifest.json b/homeassistant/components/touchline_sl/manifest.json index 2329cb67e17409..dd591cbf03883a 100644 --- a/homeassistant/components/touchline_sl/manifest.json +++ b/homeassistant/components/touchline_sl/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/touchline_sl", "integration_type": "hub", "iot_class": "cloud_polling", - "requirements": ["pytouchlinesl==0.1.7"] + "requirements": ["pytouchlinesl==0.1.8"] } diff --git a/homeassistant/components/tplink/__init__.py b/homeassistant/components/tplink/__init__.py index ceeb1120ed8ac1..ee1d90e70b4d6c 100644 --- a/homeassistant/components/tplink/__init__.py +++ b/homeassistant/components/tplink/__init__.py @@ -31,6 +31,7 @@ CONF_MAC, CONF_MODEL, CONF_PASSWORD, + CONF_PORT, CONF_USERNAME, ) from homeassistant.core import HomeAssistant, callback @@ -141,6 +142,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: TPLinkConfigEntry) -> bo entry_credentials_hash = entry.data.get(CONF_CREDENTIALS_HASH) entry_use_http = entry.data.get(CONF_USES_HTTP, False) entry_aes_keys = entry.data.get(CONF_AES_KEYS) + port_override = entry.data.get(CONF_PORT) conn_params: Device.ConnectionParameters | None = None if conn_params_dict := entry.data.get(CONF_CONNECTION_PARAMETERS): @@ -157,6 +159,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: TPLinkConfigEntry) -> bo timeout=CONNECT_TIMEOUT, http_client=client, aes_keys=entry_aes_keys, + port_override=port_override, ) if conn_params: config.connection_type = conn_params diff --git a/homeassistant/components/tplink/binary_sensor.py b/homeassistant/components/tplink/binary_sensor.py index 0e426161a0cf21..34375bccf4f79f 100644 --- a/homeassistant/components/tplink/binary_sensor.py +++ b/homeassistant/components/tplink/binary_sensor.py @@ -58,6 +58,10 @@ class TPLinkBinarySensorEntityDescription( key="water_alert", device_class=BinarySensorDeviceClass.MOISTURE, ), + TPLinkBinarySensorEntityDescription( + key="motion_detected", + device_class=BinarySensorDeviceClass.MOTION, + ), ) BINARYSENSOR_DESCRIPTIONS_MAP = {desc.key: desc for desc in BINARY_SENSOR_DESCRIPTIONS} diff --git a/homeassistant/components/tplink/button.py b/homeassistant/components/tplink/button.py index fd2d7fb664f546..131325e489d3b6 100644 --- a/homeassistant/components/tplink/button.py +++ b/homeassistant/components/tplink/button.py @@ -9,6 +9,7 @@ from homeassistant.components.button import ( DOMAIN as BUTTON_DOMAIN, + ButtonDeviceClass, ButtonEntity, ButtonEntityDescription, ) @@ -45,6 +46,10 @@ class TPLinkButtonEntityDescription( breaks_in_ha_version="2025.4.0", ), ), + TPLinkButtonEntityDescription( + key="reboot", + device_class=ButtonDeviceClass.RESTART, + ), ] BUTTON_DESCRIPTIONS_MAP = {desc.key: desc for desc in BUTTON_DESCRIPTIONS} diff --git a/homeassistant/components/tplink/climate.py b/homeassistant/components/tplink/climate.py index 3bd6aba5c26d23..f86992ea0cfd65 100644 --- a/homeassistant/components/tplink/climate.py +++ b/homeassistant/components/tplink/climate.py @@ -15,7 +15,7 @@ HVACAction, HVACMode, ) -from homeassistant.const import PRECISION_WHOLE +from homeassistant.const import PRECISION_TENTHS from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import ServiceValidationError from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -64,7 +64,7 @@ class TPLinkClimateEntity(CoordinatedTPLinkEntity, ClimateEntity): | ClimateEntityFeature.TURN_ON ) _attr_hvac_modes = [HVACMode.HEAT, HVACMode.OFF] - _attr_precision = PRECISION_WHOLE + _attr_precision = PRECISION_TENTHS # This disables the warning for async_turn_{on,off}, can be removed later. _enable_turn_on_off_backwards_compatibility = False diff --git a/homeassistant/components/tplink/config_flow.py b/homeassistant/components/tplink/config_flow.py index ae7543218c7dba..63f1b4e125b4a4 100644 --- a/homeassistant/components/tplink/config_flow.py +++ b/homeassistant/components/tplink/config_flow.py @@ -32,6 +32,7 @@ CONF_MAC, CONF_MODEL, CONF_PASSWORD, + CONF_PORT, CONF_USERNAME, ) from homeassistant.core import callback @@ -69,7 +70,7 @@ class TPLinkConfigFlow(ConfigFlow, domain=DOMAIN): MINOR_VERSION = CONF_CONFIG_ENTRY_MINOR_VERSION host: str | None = None - reauth_entry: ConfigEntry | None = None + port: int | None = None def __init__(self) -> None: """Initialize the config flow.""" @@ -163,12 +164,16 @@ async def _async_handle_discovery( return self.async_abort(reason="already_in_progress") credentials = await get_credentials(self.hass) try: + # If integration discovery there will be a device or None for dhcp if device: self._discovered_device = device await self._async_try_connect(device, credentials) else: await self._async_try_discover_and_update( - host, credentials, raise_on_progress=True + host, + credentials, + raise_on_progress=True, + raise_on_timeout=True, ) except AuthenticationError: return await self.async_step_discovery_auth_confirm() @@ -257,6 +262,26 @@ async def async_step_discovery_confirm( step_id="discovery_confirm", description_placeholders=placeholders ) + @staticmethod + def _async_get_host_port(host_str: str) -> tuple[str, int | None]: + """Parse the host string for host and port.""" + if "[" in host_str: + _, _, bracketed = host_str.partition("[") + host, _, port_str = bracketed.partition("]") + _, _, port_str = port_str.partition(":") + else: + host, _, port_str = host_str.partition(":") + + if not port_str: + return host, None + + try: + port = int(port_str) + except ValueError: + return host, None + + return host, port + async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -267,12 +292,29 @@ async def async_step_user( if user_input is not None: if not (host := user_input[CONF_HOST]): return await self.async_step_pick_device() - self._async_abort_entries_match({CONF_HOST: host}) + + host, port = self._async_get_host_port(host) + + match_dict = {CONF_HOST: host} + if port: + self.port = port + match_dict[CONF_PORT] = port + self._async_abort_entries_match(match_dict) + self.host = host credentials = await get_credentials(self.hass) try: device = await self._async_try_discover_and_update( - host, credentials, raise_on_progress=False + host, + credentials, + raise_on_progress=False, + raise_on_timeout=False, + port=port, + ) or await self._async_try_connect_all( + host, + credentials=credentials, + raise_on_progress=False, + port=port, ) except AuthenticationError: return await self.async_step_user_auth_confirm() @@ -280,6 +322,8 @@ async def async_step_user( errors["base"] = "cannot_connect" placeholders["error"] = str(ex) else: + if not device: + return await self.async_step_user_auth_confirm() return self._async_create_entry_from_device(device) return self.async_show_form( @@ -299,15 +343,23 @@ async def async_step_user_auth_confirm( assert self.host is not None placeholders: dict[str, str] = {CONF_HOST: self.host} - assert self._discovered_device is not None if user_input: username = user_input[CONF_USERNAME] password = user_input[CONF_PASSWORD] credentials = Credentials(username, password) + device: Device | None try: - device = await self._async_try_connect( - self._discovered_device, credentials - ) + if self._discovered_device: + device = await self._async_try_connect( + self._discovered_device, credentials + ) + else: + device = await self._async_try_connect_all( + self.host, + credentials=credentials, + raise_on_progress=False, + port=self.port, + ) except AuthenticationError as ex: errors[CONF_PASSWORD] = "invalid_auth" placeholders["error"] = str(ex) @@ -315,11 +367,15 @@ async def async_step_user_auth_confirm( errors["base"] = "cannot_connect" placeholders["error"] = str(ex) else: - await set_credentials(self.hass, username, password) - self.hass.async_create_task( - self._async_reload_requires_auth_entries(), eager_start=False - ) - return self._async_create_entry_from_device(device) + if not device: + errors["base"] = "cannot_connect" + placeholders["error"] = "try_connect_all failed" + else: + await set_credentials(self.hass, username, password) + self.hass.async_create_task( + self._async_reload_requires_auth_entries(), eager_start=False + ) + return self._async_create_entry_from_device(device) return self.async_show_form( step_id="user_auth_confirm", @@ -372,13 +428,13 @@ async def _async_reload_requires_auth_entries(self) -> None: """Reload any in progress config flow that now have credentials.""" _config_entries = self.hass.config_entries - if reauth_entry := self.reauth_entry: - await _config_entries.async_reload(reauth_entry.entry_id) + if self.source == SOURCE_REAUTH: + await _config_entries.async_reload(self._get_reauth_entry().entry_id) for flow in _config_entries.flow.async_progress_by_handler( DOMAIN, include_uninitialized=True ): - context: dict[str, Any] = flow["context"] + context = flow["context"] if context.get("source") != SOURCE_REAUTH: continue entry_id: str = context["entry_id"] @@ -404,47 +460,84 @@ def _async_create_entry_from_device(self, device: Device) -> ConfigFlowResult: data[CONF_AES_KEYS] = device.config.aes_keys if device.credentials_hash: data[CONF_CREDENTIALS_HASH] = device.credentials_hash + if port := device.config.port_override: + data[CONF_PORT] = port return self.async_create_entry( title=f"{device.alias} {device.model}", data=data, ) + async def _async_try_connect_all( + self, + host: str, + credentials: Credentials | None, + raise_on_progress: bool, + *, + port: int | None = None, + ) -> Device | None: + """Try to connect to the device speculatively. + + The connection parameters aren't known but discovery has failed so try + to connect with tcp. + """ + if credentials: + device = await Discover.try_connect_all( + host, + credentials=credentials, + http_client=create_async_tplink_clientsession(self.hass), + port=port, + ) + else: + # This will just try the legacy protocol that doesn't require auth + # and doesn't use http + try: + device = await Device.connect( + config=DeviceConfig(host, port_override=port) + ) + except Exception: # noqa: BLE001 + return None + if device: + await self.async_set_unique_id( + dr.format_mac(device.mac), + raise_on_progress=raise_on_progress, + ) + return device + async def _async_try_discover_and_update( self, host: str, credentials: Credentials | None, raise_on_progress: bool, - ) -> Device: + raise_on_timeout: bool, + *, + port: int | None = None, + ) -> Device | None: """Try to discover the device and call update. - Will try to connect to legacy devices if discovery fails. + Will try to connect directly if discovery fails. """ + self._discovered_device = None try: self._discovered_device = await Discover.discover_single( - host, credentials=credentials + host, + credentials=credentials, + port=port, ) except TimeoutError as ex: - # Try connect() to legacy devices if discovery fails. This is a - # fallback mechanism for legacy that can handle connections without - # discovery info but if it fails raise the original error which is - # applicable for newer devices. - try: - self._discovered_device = await Device.connect( - config=DeviceConfig(host) - ) - except Exception: # noqa: BLE001 - # Raise the original error instead of the fallback error + if raise_on_timeout: raise ex from ex - else: - if self._discovered_device.config.uses_http: - self._discovered_device.config.http_client = ( - create_async_tplink_clientsession(self.hass) - ) - await self._discovered_device.update() + return None + if TYPE_CHECKING: + assert self._discovered_device await self.async_set_unique_id( dr.format_mac(self._discovered_device.mac), raise_on_progress=raise_on_progress, ) + if self._discovered_device.config.uses_http: + self._discovered_device.config.http_client = ( + create_async_tplink_clientsession(self.hass) + ) + await self._discovered_device.update() return self._discovered_device async def _async_try_connect( @@ -473,9 +566,6 @@ async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Start the reauthentication flow if the device needs updated credentials.""" - self.reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -484,10 +574,10 @@ async def async_step_reauth_confirm( """Dialog that informs the user that reauth is required.""" errors: dict[str, str] = {} placeholders: dict[str, str] = {} - reauth_entry = self.reauth_entry - assert reauth_entry is not None + reauth_entry = self._get_reauth_entry() entry_data = reauth_entry.data host = entry_data[CONF_HOST] + port = entry_data.get(CONF_PORT) if user_input: username = user_input[CONF_USERNAME] @@ -497,7 +587,14 @@ async def async_step_reauth_confirm( device = await self._async_try_discover_and_update( host, credentials=credentials, - raise_on_progress=True, + raise_on_progress=False, + raise_on_timeout=False, + port=port, + ) or await self._async_try_connect_all( + host, + credentials=credentials, + raise_on_progress=False, + port=port, ) except AuthenticationError as ex: errors[CONF_PASSWORD] = "invalid_auth" @@ -506,15 +603,23 @@ async def async_step_reauth_confirm( errors["base"] = "cannot_connect" placeholders["error"] = str(ex) else: - await set_credentials(self.hass, username, password) - if updates := self._get_config_updates(reauth_entry, host, device): - self.hass.config_entries.async_update_entry( - reauth_entry, data=updates + if not device: + errors["base"] = "cannot_connect" + placeholders["error"] = "try_connect_all failed" + else: + await self.async_set_unique_id( + dr.format_mac(device.mac), + raise_on_progress=False, ) - self.hass.async_create_task( - self._async_reload_requires_auth_entries(), eager_start=False - ) - return self.async_abort(reason="reauth_successful") + await set_credentials(self.hass, username, password) + if updates := self._get_config_updates(reauth_entry, host, device): + self.hass.config_entries.async_update_entry( + reauth_entry, data=updates + ) + self.hass.async_create_task( + self._async_reload_requires_auth_entries(), eager_start=False + ) + return self.async_abort(reason="reauth_successful") # Old config entries will not have these values. alias = entry_data.get(CONF_ALIAS) or "unknown" diff --git a/homeassistant/components/tplink/icons.json b/homeassistant/components/tplink/icons.json index 96ea8f41bb7f0d..0abd68543c5f08 100644 --- a/homeassistant/components/tplink/icons.json +++ b/homeassistant/components/tplink/icons.json @@ -68,6 +68,15 @@ "state": { "on": "mdi:sleep" } + }, + "child_lock": { + "default": "mdi:account-lock" + }, + "pir_enabled": { + "default": "mdi:motion-sensor-off", + "state": { + "on": "mdi:motion-sensor" + } } }, "sensor": { @@ -88,6 +97,9 @@ }, "alarm_source": { "default": "mdi:bell" + }, + "water_alert_timestamp": { + "default": "mdi:clock-alert-outline" } }, "number": { diff --git a/homeassistant/components/tplink/manifest.json b/homeassistant/components/tplink/manifest.json index 81506c41a6d514..cb8a55b3db21a1 100644 --- a/homeassistant/components/tplink/manifest.json +++ b/homeassistant/components/tplink/manifest.json @@ -301,5 +301,5 @@ "iot_class": "local_polling", "loggers": ["kasa"], "quality_scale": "platinum", - "requirements": ["python-kasa[speedups]==0.7.4"] + "requirements": ["python-kasa[speedups]==0.7.7"] } diff --git a/homeassistant/components/tplink/sensor.py b/homeassistant/components/tplink/sensor.py index 276334dc8a1519..809d900276800b 100644 --- a/homeassistant/components/tplink/sensor.py +++ b/homeassistant/components/tplink/sensor.py @@ -97,6 +97,10 @@ class TPLinkSensorEntityDescription( key="device_time", device_class=SensorDeviceClass.TIMESTAMP, ), + TPLinkSensorEntityDescription( + key="water_alert_timestamp", + device_class=SensorDeviceClass.TIMESTAMP, + ), TPLinkSensorEntityDescription( key="humidity", device_class=SensorDeviceClass.HUMIDITY, @@ -112,6 +116,7 @@ class TPLinkSensorEntityDescription( TPLinkSensorEntityDescription( key="temperature", device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, ), ) diff --git a/homeassistant/components/tplink/strings.json b/homeassistant/components/tplink/strings.json index fd63a1031d3a27..8e5118c2720877 100644 --- a/homeassistant/components/tplink/strings.json +++ b/homeassistant/components/tplink/strings.json @@ -35,10 +35,6 @@ "password": "[%key:common::config_flow::data::password%]" } }, - "reauth": { - "title": "[%key:common::config_flow::title::reauth%]", - "description": "[%key:component::tplink::config::step::user_auth_confirm::description%]" - }, "reauth_confirm": { "title": "[%key:common::config_flow::title::reauth%]", "description": "[%key:component::tplink::config::step::user_auth_confirm::description%]", @@ -55,7 +51,8 @@ "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", "no_devices_found": "[%key:common::config_flow::abort::no_devices_found%]", - "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]" } }, "entity": { @@ -162,6 +159,9 @@ "device_time": { "name": "Device time" }, + "water_alert_timestamp": { + "name": "Last water leak alert" + }, "auto_off_at": { "name": "Auto off at" }, @@ -190,6 +190,12 @@ }, "fan_sleep_mode": { "name": "Fan sleep mode" + }, + "child_lock": { + "name": "Child lock" + }, + "pir_enabled": { + "name": "Motion sensor" } }, "number": { diff --git a/homeassistant/components/tplink/switch.py b/homeassistant/components/tplink/switch.py index 6d3e21d88c5141..c9285d86ba60c0 100644 --- a/homeassistant/components/tplink/switch.py +++ b/homeassistant/components/tplink/switch.py @@ -48,6 +48,12 @@ class TPLinkSwitchEntityDescription( TPLinkSwitchEntityDescription( key="fan_sleep_mode", ), + TPLinkSwitchEntityDescription( + key="child_lock", + ), + TPLinkSwitchEntityDescription( + key="pir_enabled", + ), ) SWITCH_DESCRIPTIONS_MAP = {desc.key: desc for desc in SWITCH_DESCRIPTIONS} diff --git a/homeassistant/components/tplink_omada/__init__.py b/homeassistant/components/tplink_omada/__init__.py index 7890d5936fb2af..573df44122c133 100644 --- a/homeassistant/components/tplink_omada/__init__.py +++ b/homeassistant/components/tplink_omada/__init__.py @@ -24,6 +24,7 @@ PLATFORMS: list[Platform] = [ Platform.BINARY_SENSOR, Platform.DEVICE_TRACKER, + Platform.SENSOR, Platform.SWITCH, Platform.UPDATE, ] diff --git a/homeassistant/components/tplink_omada/binary_sensor.py b/homeassistant/components/tplink_omada/binary_sensor.py index da0c1dd9fc9cdf..73d5f54b8b3c56 100644 --- a/homeassistant/components/tplink_omada/binary_sensor.py +++ b/homeassistant/components/tplink_omada/binary_sensor.py @@ -99,7 +99,6 @@ class OmadaGatewayPortBinarySensor( """Binary status of a property on an internet gateway.""" entity_description: GatewayPortBinarySensorEntityDescription - _attr_has_entity_name = True def __init__( self, diff --git a/homeassistant/components/tplink_omada/config_flow.py b/homeassistant/components/tplink_omada/config_flow.py index 5ea56a9ad9fc97..eeeddb624956e9 100644 --- a/homeassistant/components/tplink_omada/config_flow.py +++ b/homeassistant/components/tplink_omada/config_flow.py @@ -179,15 +179,9 @@ async def async_step_reauth_confirm( if info is not None: # Auth successful - update the config entry with the new credentials - entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] + return self.async_update_reload_and_abort( + self._get_reauth_entry(), data=self._omada_opts ) - assert entry is not None - self.hass.config_entries.async_update_entry( - entry, data=self._omada_opts - ) - await self.hass.config_entries.async_reload(entry.entry_id) - return self.async_abort(reason="reauth_successful") return self.async_show_form( step_id="reauth_confirm", diff --git a/homeassistant/components/tplink_omada/const.py b/homeassistant/components/tplink_omada/const.py index f63d82c6bb4a0a..bc55c76c931c65 100644 --- a/homeassistant/components/tplink_omada/const.py +++ b/homeassistant/components/tplink_omada/const.py @@ -1,3 +1,17 @@ """Constants for the TP-Link Omada integration.""" +from enum import StrEnum + DOMAIN = "tplink_omada" + + +class OmadaDeviceStatus(StrEnum): + """Possible composite status values for Omada devices.""" + + DISCONNECTED = "disconnected" + CONNECTED = "connected" + PENDING = "pending" + HEARTBEAT_MISSED = "heartbeat_missed" + ISOLATED = "isolated" + ADOPT_FAILED = "adopt_failed" + MANAGED_EXTERNALLY = "managed_externally" diff --git a/homeassistant/components/tplink_omada/coordinator.py b/homeassistant/components/tplink_omada/coordinator.py index e4f15e6567c539..a80bedeb65eca6 100644 --- a/homeassistant/components/tplink_omada/coordinator.py +++ b/homeassistant/components/tplink_omada/coordinator.py @@ -17,7 +17,7 @@ POLL_SWITCH_PORT = 300 POLL_GATEWAY = 300 POLL_CLIENTS = 300 -POLL_DEVICES = 900 +POLL_DEVICES = 300 class OmadaCoordinator[_T](DataUpdateCoordinator[dict[str, _T]]): diff --git a/homeassistant/components/tplink_omada/entity.py b/homeassistant/components/tplink_omada/entity.py index 213764aaa12305..54021a2ef86fe9 100644 --- a/homeassistant/components/tplink_omada/entity.py +++ b/homeassistant/components/tplink_omada/entity.py @@ -14,6 +14,8 @@ class OmadaDeviceEntity[_T: OmadaCoordinator[Any]](CoordinatorEntity[_T]): """Common base class for all entities associated with Omada SDN Devices.""" + _attr_has_entity_name = True + def __init__(self, coordinator: _T, device: OmadaDevice) -> None: """Initialize the device.""" super().__init__(coordinator) diff --git a/homeassistant/components/tplink_omada/icons.json b/homeassistant/components/tplink_omada/icons.json index d0c407a9326e54..c681b5e1f81a10 100644 --- a/homeassistant/components/tplink_omada/icons.json +++ b/homeassistant/components/tplink_omada/icons.json @@ -18,6 +18,14 @@ "off": "mdi:cloud-cancel" } } + }, + "sensor": { + "cpu_usage": { + "default": "mdi:cpu-32-bit" + }, + "mem_usage": { + "default": "mdi:memory" + } } } } diff --git a/homeassistant/components/tplink_omada/manifest.json b/homeassistant/components/tplink_omada/manifest.json index 6bde656dc302ec..af20b54675b2db 100644 --- a/homeassistant/components/tplink_omada/manifest.json +++ b/homeassistant/components/tplink_omada/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/tplink_omada", "integration_type": "hub", "iot_class": "local_polling", - "requirements": ["tplink-omada-client==1.4.2"] + "requirements": ["tplink-omada-client==1.4.3"] } diff --git a/homeassistant/components/tplink_omada/sensor.py b/homeassistant/components/tplink_omada/sensor.py new file mode 100644 index 00000000000000..272334d1b52f42 --- /dev/null +++ b/homeassistant/components/tplink_omada/sensor.py @@ -0,0 +1,132 @@ +"""Support for TPLink Omada binary sensors.""" + +from __future__ import annotations + +from collections.abc import Callable +from dataclasses import dataclass + +from tplink_omada_client.definitions import DeviceStatus, DeviceStatusCategory +from tplink_omada_client.devices import OmadaListDevice + +from homeassistant.components.sensor import ( + SensorDeviceClass, + SensorEntity, + SensorEntityDescription, + SensorStateClass, +) +from homeassistant.const import PERCENTAGE, EntityCategory +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.typing import StateType + +from . import OmadaConfigEntry +from .const import OmadaDeviceStatus +from .coordinator import OmadaDevicesCoordinator +from .entity import OmadaDeviceEntity + +# Useful low level status categories, mapped to a more descriptive status. +DEVICE_STATUS_MAP = { + DeviceStatus.PROVISIONING: OmadaDeviceStatus.PENDING, + DeviceStatus.CONFIGURING: OmadaDeviceStatus.PENDING, + DeviceStatus.UPGRADING: OmadaDeviceStatus.PENDING, + DeviceStatus.REBOOTING: OmadaDeviceStatus.PENDING, + DeviceStatus.ADOPT_FAILED: OmadaDeviceStatus.ADOPT_FAILED, + DeviceStatus.ADOPT_FAILED_WIRELESS: OmadaDeviceStatus.ADOPT_FAILED, + DeviceStatus.MANAGED_EXTERNALLY: OmadaDeviceStatus.MANAGED_EXTERNALLY, + DeviceStatus.MANAGED_EXTERNALLY_WIRELESS: OmadaDeviceStatus.MANAGED_EXTERNALLY, +} + +# High level status categories, suitable for most device statuses. +DEVICE_STATUS_CATEGORY_MAP = { + DeviceStatusCategory.DISCONNECTED: OmadaDeviceStatus.DISCONNECTED, + DeviceStatusCategory.CONNECTED: OmadaDeviceStatus.CONNECTED, + DeviceStatusCategory.PENDING: OmadaDeviceStatus.PENDING, + DeviceStatusCategory.HEARTBEAT_MISSED: OmadaDeviceStatus.HEARTBEAT_MISSED, + DeviceStatusCategory.ISOLATED: OmadaDeviceStatus.ISOLATED, +} + + +def _map_device_status(device: OmadaListDevice) -> str | None: + """Map the API device status to the best available descriptive device status.""" + display_status = DEVICE_STATUS_MAP.get( + device.status + ) or DEVICE_STATUS_CATEGORY_MAP.get(device.status_category) + return display_status.value if display_status else None + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: OmadaConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up sensors.""" + controller = config_entry.runtime_data + + devices_coordinator = controller.devices_coordinator + + async_add_entities( + OmadaDeviceSensor(devices_coordinator, device, desc) + for device in devices_coordinator.data.values() + for desc in OMADA_DEVICE_SENSORS + if desc.exists_func(device) + ) + + +@dataclass(frozen=True, kw_only=True) +class OmadaDeviceSensorEntityDescription(SensorEntityDescription): + """Entity description for a status derived from an Omada device in the device list.""" + + exists_func: Callable[[OmadaListDevice], bool] = lambda _: True + update_func: Callable[[OmadaListDevice], StateType] + + +OMADA_DEVICE_SENSORS: list[OmadaDeviceSensorEntityDescription] = [ + OmadaDeviceSensorEntityDescription( + key="device_status", + translation_key="device_status", + device_class=SensorDeviceClass.ENUM, + entity_category=EntityCategory.DIAGNOSTIC, + update_func=_map_device_status, + options=[v.value for v in OmadaDeviceStatus], + ), + OmadaDeviceSensorEntityDescription( + key="cpu_usage", + translation_key="cpu_usage", + entity_category=EntityCategory.DIAGNOSTIC, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=PERCENTAGE, + update_func=lambda device: device.cpu_usage, + ), + OmadaDeviceSensorEntityDescription( + key="mem_usage", + translation_key="mem_usage", + entity_category=EntityCategory.DIAGNOSTIC, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=PERCENTAGE, + update_func=lambda device: device.mem_usage, + ), +] + + +class OmadaDeviceSensor(OmadaDeviceEntity[OmadaDevicesCoordinator], SensorEntity): + """Sensor for property of a generic Omada device.""" + + entity_description: OmadaDeviceSensorEntityDescription + + def __init__( + self, + coordinator: OmadaDevicesCoordinator, + device: OmadaListDevice, + entity_description: OmadaDeviceSensorEntityDescription, + ) -> None: + """Initialize the device sensor.""" + super().__init__(coordinator, device) + self.entity_description = entity_description + self._attr_unique_id = f"{device.mac}_{entity_description.key}" + + @property + def native_value(self) -> StateType: + """Return the state of the sensor.""" + return self.entity_description.update_func( + self.coordinator.data[self.device.mac] + ) diff --git a/homeassistant/components/tplink_omada/strings.json b/homeassistant/components/tplink_omada/strings.json index 49873b7d0884c8..7fcede3fb12213 100644 --- a/homeassistant/components/tplink_omada/strings.json +++ b/homeassistant/components/tplink_omada/strings.json @@ -65,6 +65,27 @@ "poe_delivery": { "name": "Port {port_name} PoE Delivery" } + }, + "sensor": { + "device_status": { + "name": "Device status", + "state": { + "error": "Error", + "disconnected": "[%key:common::state::disconnected%]", + "connected": "[%key:common::state::connected%]", + "pending": "Pending", + "heartbeat_missed": "Heartbeat missed", + "isolated": "Isolated", + "adopt_failed": "Adopt failed", + "managed_externally": "Managed externally" + } + }, + "cpu_usage": { + "name": "CPU usage" + }, + "mem_usage": { + "name": "Memory usage" + } } } } diff --git a/homeassistant/components/tplink_omada/switch.py b/homeassistant/components/tplink_omada/switch.py index 26bedc5a88e5e7..f99d8aaedde1f4 100644 --- a/homeassistant/components/tplink_omada/switch.py +++ b/homeassistant/components/tplink_omada/switch.py @@ -229,7 +229,6 @@ class OmadaDevicePortSwitchEntity( ): """Generic toggle switch entity for a Netork Port of an Omada Device.""" - _attr_has_entity_name = True entity_description: OmadaDevicePortSwitchEntityDescription[ TCoordinator, TDevice, TPort ] diff --git a/homeassistant/components/tplink_omada/update.py b/homeassistant/components/tplink_omada/update.py index d1e0a08b803e42..54b586794bedaf 100644 --- a/homeassistant/components/tplink_omada/update.py +++ b/homeassistant/components/tplink_omada/update.py @@ -119,7 +119,6 @@ class OmadaDeviceUpdate( | UpdateEntityFeature.PROGRESS | UpdateEntityFeature.RELEASE_NOTES ) - _attr_has_entity_name = True _attr_device_class = UpdateDeviceClass.FIRMWARE def __init__( diff --git a/homeassistant/components/trafikverket_camera/config_flow.py b/homeassistant/components/trafikverket_camera/config_flow.py index 19e0adf45e431f..18e210beb16c23 100644 --- a/homeassistant/components/trafikverket_camera/config_flow.py +++ b/homeassistant/components/trafikverket_camera/config_flow.py @@ -10,7 +10,11 @@ from pytrafikverket.trafikverket_camera import TrafikverketCamera import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import ( + SOURCE_RECONFIGURE, + ConfigFlow, + ConfigFlowResult, +) from homeassistant.const import CONF_API_KEY, CONF_ID, CONF_LOCATION from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.selector import ( @@ -29,7 +33,6 @@ class TVCameraConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 3 - entry: ConfigEntry cameras: list[CameraInfoModel] api_key: str @@ -58,7 +61,6 @@ async def async_step_reauth( ) -> ConfigFlowResult: """Handle re-authentication with Trafikverket.""" - self.entry = self._get_reauth_entry() return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -70,16 +72,12 @@ async def async_step_reauth_confirm( if user_input: api_key = user_input[CONF_API_KEY] - assert self.entry is not None - errors, _ = await self.validate_input(api_key, self.entry.data[CONF_ID]) + reauth_entry = self._get_reauth_entry() + errors, _ = await self.validate_input(api_key, reauth_entry.data[CONF_ID]) if not errors: return self.async_update_reload_and_abort( - self.entry, - data={ - **self.entry.data, - CONF_API_KEY: api_key, - }, + reauth_entry, data_updates={CONF_API_KEY: api_key} ) return self.async_show_form( @@ -96,15 +94,8 @@ async def async_step_reconfigure( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle re-configuration with Trafikverket.""" - - self.entry = self._get_reconfigure_entry() - return await self.async_step_reconfigure_confirm() - - async def async_step_reconfigure_confirm( - self, user_input: dict[str, Any] | None = None - ) -> ConfigFlowResult: - """Confirm re-configuration with Trafikverket.""" errors: dict[str, str] = {} + reconfigure_entry = self._get_reconfigure_entry() if user_input: api_key = user_input[CONF_API_KEY] @@ -120,11 +111,10 @@ async def async_step_reconfigure_confirm( await self.async_set_unique_id(f"{DOMAIN}-{cameras[0].camera_id}") self._abort_if_unique_id_configured() return self.async_update_reload_and_abort( - self.entry, + reconfigure_entry, unique_id=f"{DOMAIN}-{cameras[0].camera_id}", title=cameras[0].camera_name or "Trafikverket Camera", data={CONF_API_KEY: api_key, CONF_ID: cameras[0].camera_id}, - reason="reconfigure_successful", ) schema = self.add_suggested_values_to_schema( @@ -134,11 +124,11 @@ async def async_step_reconfigure_confirm( vol.Required(CONF_LOCATION): TextSelector(), } ), - {**self.entry.data, **(user_input or {})}, + {**reconfigure_entry.data, **(user_input or {})}, ) return self.async_show_form( - step_id="reconfigure_confirm", + step_id="reconfigure", data_schema=schema, errors=errors, ) @@ -189,16 +179,15 @@ async def async_step_multiple_cameras( ) if not errors and cameras: - if hasattr(self, "entry") and self.entry: + if self.source == SOURCE_RECONFIGURE: return self.async_update_reload_and_abort( - self.entry, + self._get_reconfigure_entry(), unique_id=f"{DOMAIN}-{cameras[0].camera_id}", title=cameras[0].camera_name or "Trafikverket Camera", data={ CONF_API_KEY: self.api_key, CONF_ID: cameras[0].camera_id, }, - reason="reconfigure_successful", ) await self.async_set_unique_id(f"{DOMAIN}-{cameras[0].camera_id}") self._abort_if_unique_id_configured() diff --git a/homeassistant/components/trafikverket_camera/strings.json b/homeassistant/components/trafikverket_camera/strings.json index 142dcba5e85429..b6e2209fc57492 100644 --- a/homeassistant/components/trafikverket_camera/strings.json +++ b/homeassistant/components/trafikverket_camera/strings.json @@ -26,6 +26,20 @@ "data": { "id": "Choose camera" } + }, + "reauth_confirm": { + "data": { + "api_key": "[%key:common::config_flow::data::api_key%]" + } + }, + "reconfigure": { + "data": { + "api_key": "[%key:common::config_flow::data::api_key%]", + "location": "[%key:common::config_flow::data::location%]" + }, + "data_description": { + "location": "[%key:component::trafikverket_camera::config::step::user::data_description::location%]" + } } } }, diff --git a/homeassistant/components/trafikverket_ferry/config_flow.py b/homeassistant/components/trafikverket_ferry/config_flow.py index 1f82a535f1666e..002dc421273a02 100644 --- a/homeassistant/components/trafikverket_ferry/config_flow.py +++ b/homeassistant/components/trafikverket_ferry/config_flow.py @@ -9,7 +9,7 @@ from pytrafikverket.exceptions import InvalidAuthentication, NoFerryFound import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_API_KEY, CONF_NAME, CONF_WEEKDAY, WEEKDAYS from homeassistant.helpers import selector from homeassistant.helpers.aiohttp_client import async_get_clientsession @@ -49,8 +49,6 @@ class TVFerryConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - entry: ConfigEntry | None - async def validate_input( self, api_key: str, ferry_from: str, ferry_to: str ) -> None: @@ -63,8 +61,6 @@ async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle re-authentication with Trafikverket.""" - - self.entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -76,10 +72,10 @@ async def async_step_reauth_confirm( if user_input: api_key = user_input[CONF_API_KEY] - assert self.entry is not None + reauth_entry = self._get_reauth_entry() try: await self.validate_input( - api_key, self.entry.data[CONF_FROM], self.entry.data[CONF_TO] + api_key, reauth_entry.data[CONF_FROM], reauth_entry.data[CONF_TO] ) except InvalidAuthentication: errors["base"] = "invalid_auth" @@ -88,15 +84,10 @@ async def async_step_reauth_confirm( except Exception: # noqa: BLE001 errors["base"] = "cannot_connect" else: - self.hass.config_entries.async_update_entry( - self.entry, - data={ - **self.entry.data, - CONF_API_KEY: api_key, - }, + return self.async_update_reload_and_abort( + reauth_entry, + data_updates={CONF_API_KEY: api_key}, ) - await self.hass.config_entries.async_reload(self.entry.entry_id) - return self.async_abort(reason="reauth_successful") return self.async_show_form( step_id="reauth_confirm", diff --git a/homeassistant/components/trafikverket_train/config_flow.py b/homeassistant/components/trafikverket_train/config_flow.py index d03eeca8f653d1..f498a7b0d0e469 100644 --- a/homeassistant/components/trafikverket_train/config_flow.py +++ b/homeassistant/components/trafikverket_train/config_flow.py @@ -21,7 +21,7 @@ ConfigEntry, ConfigFlow, ConfigFlowResult, - OptionsFlowWithConfigEntry, + OptionsFlow, ) from homeassistant.const import CONF_API_KEY, CONF_NAME, CONF_WEEKDAY, WEEKDAYS from homeassistant.core import HomeAssistant, callback @@ -126,22 +126,18 @@ class TVTrainConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - entry: ConfigEntry | None - @staticmethod @callback def async_get_options_flow( config_entry: ConfigEntry, ) -> TVTrainOptionsFlowHandler: """Get the options flow for this handler.""" - return TVTrainOptionsFlowHandler(config_entry) + return TVTrainOptionsFlowHandler() async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle re-authentication with Trafikverket.""" - - self.entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -153,26 +149,21 @@ async def async_step_reauth_confirm( if user_input: api_key = user_input[CONF_API_KEY] - assert self.entry is not None + reauth_entry = self._get_reauth_entry() errors = await validate_input( self.hass, api_key, - self.entry.data[CONF_FROM], - self.entry.data[CONF_TO], - self.entry.data.get(CONF_TIME), - self.entry.data[CONF_WEEKDAY], - self.entry.options.get(CONF_FILTER_PRODUCT), + reauth_entry.data[CONF_FROM], + reauth_entry.data[CONF_TO], + reauth_entry.data.get(CONF_TIME), + reauth_entry.data[CONF_WEEKDAY], + reauth_entry.options.get(CONF_FILTER_PRODUCT), ) if not errors: - self.hass.config_entries.async_update_entry( - self.entry, - data={ - **self.entry.data, - CONF_API_KEY: api_key, - }, + return self.async_update_reload_and_abort( + reauth_entry, + data_updates={CONF_API_KEY: api_key}, ) - await self.hass.config_entries.async_reload(self.entry.entry_id) - return self.async_abort(reason="reauth_successful") return self.async_show_form( step_id="reauth_confirm", @@ -238,7 +229,7 @@ async def async_step_user( ) -class TVTrainOptionsFlowHandler(OptionsFlowWithConfigEntry): +class TVTrainOptionsFlowHandler(OptionsFlow): """Handle Trafikverket Train options.""" async def async_step_init( @@ -256,7 +247,7 @@ async def async_step_init( step_id="init", data_schema=self.add_suggested_values_to_schema( vol.Schema(OPTION_SCHEMA), - user_input or self.options, + user_input or self.config_entry.options, ), errors=errors, ) diff --git a/homeassistant/components/trafikverket_weatherstation/config_flow.py b/homeassistant/components/trafikverket_weatherstation/config_flow.py index 3e639a930ada5d..28b9a124fc6a16 100644 --- a/homeassistant/components/trafikverket_weatherstation/config_flow.py +++ b/homeassistant/components/trafikverket_weatherstation/config_flow.py @@ -13,7 +13,7 @@ from pytrafikverket.trafikverket_weather import TrafikverketWeather import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_API_KEY from homeassistant.helpers.aiohttp_client import async_get_clientsession import homeassistant.helpers.config_validation as cv @@ -31,8 +31,6 @@ class TVWeatherConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - entry: ConfigEntry - async def validate_input(self, sensor_api: str, station: str) -> None: """Validate input from user input.""" web_session = async_get_clientsession(self.hass) @@ -84,8 +82,6 @@ async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle re-authentication with Trafikverket.""" - - self.entry = self._get_reauth_entry() return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -93,12 +89,13 @@ async def async_step_reauth_confirm( ) -> ConfigFlowResult: """Confirm re-authentication with Trafikverket.""" errors: dict[str, str] = {} + reauth_entry = self._get_reauth_entry() if user_input: api_key = user_input[CONF_API_KEY] try: - await self.validate_input(api_key, self.entry.data[CONF_STATION]) + await self.validate_input(api_key, reauth_entry.data[CONF_STATION]) except InvalidAuthentication: errors["base"] = "invalid_auth" except NoWeatherStationFound: @@ -109,7 +106,7 @@ async def async_step_reauth_confirm( errors["base"] = "cannot_connect" else: return self.async_update_reload_and_abort( - self.entry, data={**self.entry.data, CONF_API_KEY: api_key} + reauth_entry, data_updates={CONF_API_KEY: api_key} ) return self.async_show_form( @@ -122,14 +119,6 @@ async def async_step_reconfigure( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle re-configuration with Trafikverket.""" - - self.entry = self._get_reconfigure_entry() - return await self.async_step_reconfigure_confirm() - - async def async_step_reconfigure_confirm( - self, user_input: dict[str, Any] | None = None - ) -> ConfigFlowResult: - """Confirm re-configuration with Trafikverket.""" errors: dict[str, str] = {} if user_input: @@ -147,10 +136,9 @@ async def async_step_reconfigure_confirm( errors["base"] = "cannot_connect" else: return self.async_update_reload_and_abort( - self.entry, + self._get_reconfigure_entry(), title=user_input[CONF_STATION], data=user_input, - reason="reconfigure_successful", ) schema = self.add_suggested_values_to_schema( @@ -162,11 +150,11 @@ async def async_step_reconfigure_confirm( vol.Required(CONF_STATION): TextSelector(), } ), - {**self.entry.data, **(user_input or {})}, + {**self._get_reconfigure_entry().data, **(user_input or {})}, ) return self.async_show_form( - step_id="reconfigure_confirm", + step_id="reconfigure", data_schema=schema, errors=errors, ) diff --git a/homeassistant/components/trafikverket_weatherstation/strings.json b/homeassistant/components/trafikverket_weatherstation/strings.json index 81d970e18e309c..90a9f9ba7c1535 100644 --- a/homeassistant/components/trafikverket_weatherstation/strings.json +++ b/homeassistant/components/trafikverket_weatherstation/strings.json @@ -23,7 +23,7 @@ "api_key": "[%key:common::config_flow::data::api_key%]" } }, - "reconfigure_confirm": { + "reconfigure": { "data": { "api_key": "[%key:common::config_flow::data::api_key%]", "station": "[%key:component::trafikverket_weatherstation::config::step::user::data::station%]" diff --git a/homeassistant/components/transmission/config_flow.py b/homeassistant/components/transmission/config_flow.py index 2a4fd5aae0be8e..30e9f5a146bde8 100644 --- a/homeassistant/components/transmission/config_flow.py +++ b/homeassistant/components/transmission/config_flow.py @@ -15,6 +15,7 @@ ) from homeassistant.const import ( CONF_HOST, + CONF_NAME, CONF_PASSWORD, CONF_PATH, CONF_PORT, @@ -55,7 +56,6 @@ class TransmissionFlowHandler(ConfigFlow, domain=DOMAIN): VERSION = 1 MINOR_VERSION = 2 - _reauth_entry: ConfigEntry | None @staticmethod @callback @@ -63,7 +63,7 @@ def async_get_options_flow( config_entry: ConfigEntry, ) -> TransmissionOptionsFlowHandler: """Get the options flow for this handler.""" - return TransmissionOptionsFlowHandler(config_entry) + return TransmissionOptionsFlowHandler() async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -100,9 +100,6 @@ async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Perform reauth upon an API authentication error.""" - self._reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -110,9 +107,9 @@ async def async_step_reauth_confirm( ) -> ConfigFlowResult: """Confirm reauth dialog.""" errors = {} - assert self._reauth_entry + reauth_entry = self._get_reauth_entry() if user_input is not None: - user_input = {**self._reauth_entry.data, **user_input} + user_input = {**reauth_entry.data, **user_input} try: await get_api(self.hass, user_input) @@ -121,15 +118,12 @@ async def async_step_reauth_confirm( except (CannotConnect, UnknownError): errors["base"] = "cannot_connect" else: - self.hass.config_entries.async_update_entry( - self._reauth_entry, data=user_input - ) - await self.hass.config_entries.async_reload(self._reauth_entry.entry_id) - return self.async_abort(reason="reauth_successful") + return self.async_update_reload_and_abort(reauth_entry, data=user_input) return self.async_show_form( description_placeholders={ - CONF_USERNAME: self._reauth_entry.data[CONF_USERNAME] + CONF_USERNAME: reauth_entry.data[CONF_USERNAME], + CONF_NAME: reauth_entry.title, }, step_id="reauth_confirm", data_schema=vol.Schema( @@ -144,10 +138,6 @@ async def async_step_reauth_confirm( class TransmissionOptionsFlowHandler(OptionsFlow): """Handle Transmission client options.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize Transmission options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/trend/binary_sensor.py b/homeassistant/components/trend/binary_sensor.py index 693c080e86eeef..681680f180fb08 100644 --- a/homeassistant/components/trend/binary_sensor.py +++ b/homeassistant/components/trend/binary_sensor.py @@ -199,11 +199,6 @@ def __init__( if sensor_entity_id: self.entity_id = sensor_entity_id - @property - def is_on(self) -> bool | None: - """Return true if sensor is on.""" - return self._state - @property def extra_state_attributes(self) -> Mapping[str, Any]: """Return the state attributes of the sensor.""" @@ -247,9 +242,9 @@ def trend_sensor_state_listener( if not (state := await self.async_get_last_state()): return - if state.state == STATE_UNKNOWN: + if state.state in {STATE_UNKNOWN, STATE_UNAVAILABLE}: return - self._state = state.state == STATE_ON + self._attr_is_on = state.state == STATE_ON async def async_update(self) -> None: """Get the latest data and update the states.""" @@ -266,13 +261,13 @@ async def async_update(self) -> None: await self.hass.async_add_executor_job(self._calculate_gradient) # Update state - self._state = ( + self._attr_is_on = ( abs(self._gradient) > abs(self._min_gradient) and math.copysign(self._gradient, self._min_gradient) == self._gradient ) if self._invert: - self._state = not self._state + self._attr_is_on = not self._attr_is_on def _calculate_gradient(self) -> None: """Compute the linear trend gradient of the current samples. diff --git a/homeassistant/components/trend/manifest.json b/homeassistant/components/trend/manifest.json index 56b4b811171bf6..d7981105fd2dad 100644 --- a/homeassistant/components/trend/manifest.json +++ b/homeassistant/components/trend/manifest.json @@ -7,5 +7,5 @@ "integration_type": "helper", "iot_class": "calculated", "quality_scale": "internal", - "requirements": ["numpy==1.26.4"] + "requirements": ["numpy==2.1.3"] } diff --git a/homeassistant/components/trend/strings.json b/homeassistant/components/trend/strings.json index 2fe0b35ee3cb21..fb70a6e70320fe 100644 --- a/homeassistant/components/trend/strings.json +++ b/homeassistant/components/trend/strings.json @@ -1,4 +1,5 @@ { + "title": "Trend", "services": { "reload": { "name": "[%key:common::action::reload%]", diff --git a/homeassistant/components/triggercmd/config_flow.py b/homeassistant/components/triggercmd/config_flow.py index f39d3abc9d42d4..fc02dd0b2fc79f 100644 --- a/homeassistant/components/triggercmd/config_flow.py +++ b/homeassistant/components/triggercmd/config_flow.py @@ -56,7 +56,7 @@ async def async_step_user( except InvalidToken: errors[CONF_TOKEN] = "invalid_token" except TRIGGERcmdConnectionError: - errors["base"] = "connection_error" + errors["base"] = "cannot_connect" except Exception: # pylint: disable=broad-except _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" diff --git a/homeassistant/components/triggercmd/manifest.json b/homeassistant/components/triggercmd/manifest.json index b71a5b83a81bf9..a0ee4eaf63eab4 100644 --- a/homeassistant/components/triggercmd/manifest.json +++ b/homeassistant/components/triggercmd/manifest.json @@ -3,7 +3,7 @@ "name": "TRIGGERcmd", "codeowners": ["@rvmey"], "config_flow": true, - "documentation": "https://docs.triggercmd.com", + "documentation": "https://www.home-assistant.io/integrations/triggercmd", "integration_type": "hub", "iot_class": "cloud_polling", "requirements": ["triggercmd==0.0.27"] diff --git a/homeassistant/components/triggercmd/strings.json b/homeassistant/components/triggercmd/strings.json index cbbbbc312be4bb..6725b92f59facd 100644 --- a/homeassistant/components/triggercmd/strings.json +++ b/homeassistant/components/triggercmd/strings.json @@ -13,6 +13,7 @@ "error": { "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", + "invalid_token": "Invalid token", "unknown": "[%key:common::config_flow::error::unknown%]" }, "abort": { diff --git a/homeassistant/components/tuya/__init__.py b/homeassistant/components/tuya/__init__.py index 35abccd9ca9e6e..242f5b5d004b50 100644 --- a/homeassistant/components/tuya/__init__.py +++ b/homeassistant/components/tuya/__init__.py @@ -147,14 +147,21 @@ def __init__( self.hass = hass self.manager = manager - def update_device(self, device: CustomerDevice) -> None: + def update_device( + self, device: CustomerDevice, updated_status_properties: list[str] | None + ) -> None: """Update device status.""" LOGGER.debug( - "Received update for device %s: %s", + "Received update for device %s: %s (updated properties: %s)", device.id, self.manager.device_map[device.id].status, + updated_status_properties, + ) + dispatcher_send( + self.hass, + f"{TUYA_HA_SIGNAL_UPDATE_ENTITY}_{device.id}", + updated_status_properties, ) - dispatcher_send(self.hass, f"{TUYA_HA_SIGNAL_UPDATE_ENTITY}_{device.id}") def add_device(self, device: CustomerDevice) -> None: """Add device added listener.""" diff --git a/homeassistant/components/tuya/alarm_control_panel.py b/homeassistant/components/tuya/alarm_control_panel.py index fbea8d352a0c79..56bccc73581908 100644 --- a/homeassistant/components/tuya/alarm_control_panel.py +++ b/homeassistant/components/tuya/alarm_control_panel.py @@ -10,12 +10,7 @@ AlarmControlPanelEntity, AlarmControlPanelEntityDescription, AlarmControlPanelEntityFeature, -) -from homeassistant.const import ( - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_DISARMED, - STATE_ALARM_TRIGGERED, + AlarmControlPanelState, ) from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect @@ -35,11 +30,11 @@ class Mode(StrEnum): SOS = "sos" -STATE_MAPPING: dict[str, str] = { - Mode.DISARMED: STATE_ALARM_DISARMED, - Mode.ARM: STATE_ALARM_ARMED_AWAY, - Mode.HOME: STATE_ALARM_ARMED_HOME, - Mode.SOS: STATE_ALARM_TRIGGERED, +STATE_MAPPING: dict[str, AlarmControlPanelState] = { + Mode.DISARMED: AlarmControlPanelState.DISARMED, + Mode.ARM: AlarmControlPanelState.ARMED_AWAY, + Mode.HOME: AlarmControlPanelState.ARMED_HOME, + Mode.SOS: AlarmControlPanelState.TRIGGERED, } @@ -115,7 +110,7 @@ def __init__( self._attr_supported_features |= AlarmControlPanelEntityFeature.TRIGGER @property - def state(self) -> str | None: + def alarm_state(self) -> AlarmControlPanelState | None: """Return the state of the device.""" if not (status := self.device.status.get(self.entity_description.key)): return None diff --git a/homeassistant/components/tuya/binary_sensor.py b/homeassistant/components/tuya/binary_sensor.py index a8c9157caa73ee..12661a26fd16b5 100644 --- a/homeassistant/components/tuya/binary_sensor.py +++ b/homeassistant/components/tuya/binary_sensor.py @@ -151,7 +151,7 @@ class TuyaBinarySensorEntityDescription(BinarySensorEntityDescription): TuyaBinarySensorEntityDescription( key=DPCode.PRESENCE_STATE, device_class=BinarySensorDeviceClass.OCCUPANCY, - on_value="presence", + on_value={"presence", "small_move", "large_move", "peaceful"}, ), ), # Formaldehyde Detector diff --git a/homeassistant/components/tuya/config_flow.py b/homeassistant/components/tuya/config_flow.py index 104c3b7c9fa174..30d04eb61e28f0 100644 --- a/homeassistant/components/tuya/config_flow.py +++ b/homeassistant/components/tuya/config_flow.py @@ -8,7 +8,7 @@ from tuya_sharing import LoginControl import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlow, ConfigFlowResult from homeassistant.helpers import selector from .const import ( @@ -32,7 +32,6 @@ class TuyaConfigFlow(ConfigFlow, domain=DOMAIN): __user_code: str __qr_code: str - __reauth_entry: ConfigEntry | None = None def __init__(self) -> None: """Initialize the config flow.""" @@ -135,9 +134,9 @@ async def async_step_scan( CONF_ENDPOINT: info[CONF_ENDPOINT], } - if self.__reauth_entry: + if self.source == SOURCE_REAUTH: return self.async_update_reload_and_abort( - self.__reauth_entry, + self._get_reauth_entry(), data=entry_data, ) @@ -150,14 +149,8 @@ async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle initiation of re-authentication with Tuya.""" - self.__reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) - - if self.__reauth_entry and CONF_USER_CODE in self.__reauth_entry.data: - success, _ = await self.__async_get_qr_code( - self.__reauth_entry.data[CONF_USER_CODE] - ) + if CONF_USER_CODE in entry_data: + success, _ = await self.__async_get_qr_code(entry_data[CONF_USER_CODE]) if success: return await self.async_step_scan() diff --git a/homeassistant/components/tuya/entity.py b/homeassistant/components/tuya/entity.py index 98016d96032fee..cc258560067fdc 100644 --- a/homeassistant/components/tuya/entity.py +++ b/homeassistant/components/tuya/entity.py @@ -17,6 +17,17 @@ from .const import DOMAIN, LOGGER, TUYA_HA_SIGNAL_UPDATE_ENTITY, DPCode, DPType from .util import remap_value +_DPTYPE_MAPPING: dict[str, DPType] = { + "Bitmap": DPType.RAW, + "bitmap": DPType.RAW, + "bool": DPType.BOOLEAN, + "enum": DPType.ENUM, + "json": DPType.JSON, + "raw": DPType.RAW, + "string": DPType.STRING, + "value": DPType.INTEGER, +} + @dataclass class IntegerTypeData: @@ -256,38 +267,14 @@ def get_dptype( order = ["function", "status_range"] for key in order: if dpcode in getattr(self.device, key): + current_type = getattr(self.device, key)[dpcode].type try: - return DPType(getattr(self.device, key)[dpcode].type) + return DPType(current_type) except ValueError: - fixed_type = TuyaEntity.determine_dptype( - getattr(self.device, key)[dpcode].type - ) - LOGGER.warning( - f'Device {self.device.name} (id: {self.device.id}) has returned a bad model dpId type for code {dpcode} : "{getattr(self.device, key)[dpcode].type}", it should have been "{fixed_type}". Please contact the Tuya Support about this' - ) - return fixed_type - - return None + # Sometimes, we get ill-formed DPTypes from the cloud, + # this fixes them and maps them to the correct DPType. + return _DPTYPE_MAPPING.get(current_type) - @staticmethod - def determine_dptype(type) -> DPType | None: - """Determine the DPType. - - Sometimes, we get ill formed DPTypes from the cloud, - this fixes them and maps them to the correct DPType - """ - if type == "value": - return DPType(DPType.INTEGER) - if type in ("bitmap", "raw"): - return DPType(DPType.RAW) - if type == "enum": - return DPType(DPType.ENUM) - if type == "bool": - return DPType(DPType.BOOLEAN) - if type == "json": - return DPType(DPType.JSON) - if type == "string": - return DPType(DPType.STRING) return None async def async_added_to_hass(self) -> None: @@ -296,10 +283,15 @@ async def async_added_to_hass(self) -> None: async_dispatcher_connect( self.hass, f"{TUYA_HA_SIGNAL_UPDATE_ENTITY}_{self.device.id}", - self.async_write_ha_state, + self._handle_state_update, ) ) + async def _handle_state_update( + self, updated_status_properties: list[str] | None + ) -> None: + self.async_write_ha_state() + def _send_command(self, commands: list[dict[str, Any]]) -> None: """Send command to the device.""" LOGGER.debug("Sending commands for device %s: %s", self.device.id, commands) diff --git a/homeassistant/components/tuya/manifest.json b/homeassistant/components/tuya/manifest.json index 305a74160de21d..b53e6fa27d8365 100644 --- a/homeassistant/components/tuya/manifest.json +++ b/homeassistant/components/tuya/manifest.json @@ -43,5 +43,5 @@ "integration_type": "hub", "iot_class": "cloud_push", "loggers": ["tuya_iot"], - "requirements": ["tuya-device-sharing-sdk==0.1.9"] + "requirements": ["tuya-device-sharing-sdk==0.2.1"] } diff --git a/homeassistant/components/tuya/sensor.py b/homeassistant/components/tuya/sensor.py index fd8efcac95df98..b9677037b7eddc 100644 --- a/homeassistant/components/tuya/sensor.py +++ b/homeassistant/components/tuya/sensor.py @@ -203,6 +203,17 @@ class TuyaSensorEntityDescription(SensorEntityDescription): device_class=SensorDeviceClass.CO2, state_class=SensorStateClass.MEASUREMENT, ), + TuyaSensorEntityDescription( + key=DPCode.CH2O_VALUE, + translation_key="formaldehyde", + state_class=SensorStateClass.MEASUREMENT, + ), + TuyaSensorEntityDescription( + key=DPCode.VOC_VALUE, + translation_key="voc", + device_class=SensorDeviceClass.VOLATILE_ORGANIC_COMPOUNDS, + state_class=SensorStateClass.MEASUREMENT, + ), *BATTERY_SENSORS, ), # Two-way temperature and humidity switch diff --git a/homeassistant/components/twentemilieu/__init__.py b/homeassistant/components/twentemilieu/__init__.py index f447ef6257d921..b6728b96536068 100644 --- a/homeassistant/components/twentemilieu/__init__.py +++ b/homeassistant/components/twentemilieu/__init__.py @@ -42,6 +42,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: coordinator: TwenteMilieuDataUpdateCoordinator = DataUpdateCoordinator( hass, LOGGER, + config_entry=entry, name=DOMAIN, update_interval=SCAN_INTERVAL, update_method=twentemilieu.update, diff --git a/homeassistant/components/twitch/__init__.py b/homeassistant/components/twitch/__init__.py index 40a744684b9cde..6979a016447da1 100644 --- a/homeassistant/components/twitch/__init__.py +++ b/homeassistant/components/twitch/__init__.py @@ -17,7 +17,8 @@ async_get_config_entry_implementation, ) -from .const import CLIENT, DOMAIN, OAUTH_SCOPES, PLATFORMS, SESSION +from .const import DOMAIN, OAUTH_SCOPES, PLATFORMS +from .coordinator import TwitchCoordinator async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: @@ -46,10 +47,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: client.auto_refresh_auth = False await client.set_user_authentication(access_token, scope=OAUTH_SCOPES) - hass.data.setdefault(DOMAIN, {})[entry.entry_id] = { - CLIENT: client, - SESSION: session, - } + coordinator = TwitchCoordinator(hass, client, session) + + await coordinator.async_config_entry_first_refresh() + + hass.data.setdefault(DOMAIN, {})[entry.entry_id] = coordinator await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) diff --git a/homeassistant/components/twitch/config_flow.py b/homeassistant/components/twitch/config_flow.py index 7f006f194f5ce4..ed196897c113a1 100644 --- a/homeassistant/components/twitch/config_flow.py +++ b/homeassistant/components/twitch/config_flow.py @@ -9,7 +9,7 @@ from twitchAPI.helper import first from twitchAPI.twitch import Twitch -from homeassistant.config_entries import ConfigEntry, ConfigFlowResult +from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlowResult from homeassistant.const import CONF_ACCESS_TOKEN, CONF_TOKEN from homeassistant.helpers import config_entry_oauth2_flow from homeassistant.helpers.config_entry_oauth2_flow import LocalOAuth2Implementation @@ -23,7 +23,6 @@ class OAuth2FlowHandler( """Config flow to handle Twitch OAuth2 authentication.""" DOMAIN = DOMAIN - reauth_entry: ConfigEntry | None = None def __init__(self) -> None: """Initialize flow.""" @@ -63,8 +62,8 @@ async def async_oauth_create_entry( user_id = user.id - if not self.reauth_entry: - await self.async_set_unique_id(user_id) + await self.async_set_unique_id(user_id) + if self.source != SOURCE_REAUTH: self._abort_if_unique_id_configured() channels = [ @@ -76,38 +75,36 @@ async def async_oauth_create_entry( title=user.display_name, data=data, options={CONF_CHANNELS: channels} ) - if self.reauth_entry.unique_id == user_id: - new_channels = self.reauth_entry.options[CONF_CHANNELS] - # Since we could not get all channels at import, we do it at the reauth - # immediately after. - if "imported" in self.reauth_entry.data: - channels = [ - channel.broadcaster_login - async for channel in await client.get_followed_channels(user_id) - ] - options = list(set(channels) - set(new_channels)) - new_channels = [*new_channels, *options] - - self.hass.config_entries.async_update_entry( - self.reauth_entry, - data=data, - options={CONF_CHANNELS: new_channels}, - ) - await self.hass.config_entries.async_reload(self.reauth_entry.entry_id) - return self.async_abort(reason="reauth_successful") - - return self.async_abort( + reauth_entry = self._get_reauth_entry() + self._abort_if_unique_id_mismatch( reason="wrong_account", - description_placeholders={"title": self.reauth_entry.title}, + description_placeholders={ + "title": reauth_entry.title, + "username": str(reauth_entry.unique_id), + }, + ) + + new_channels = reauth_entry.options[CONF_CHANNELS] + # Since we could not get all channels at import, we do it at the reauth + # immediately after. + if "imported" in reauth_entry.data: + channels = [ + channel.broadcaster_login + async for channel in await client.get_followed_channels(user_id) + ] + options = list(set(channels) - set(new_channels)) + new_channels = [*new_channels, *options] + + return self.async_update_reload_and_abort( + reauth_entry, + data=data, + options={CONF_CHANNELS: new_channels}, ) async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Perform reauth upon an API authentication error.""" - self.reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( diff --git a/homeassistant/components/twitch/const.py b/homeassistant/components/twitch/const.py index b46bf8113b48a6..fc7c2f734875e9 100644 --- a/homeassistant/components/twitch/const.py +++ b/homeassistant/components/twitch/const.py @@ -17,7 +17,5 @@ DOMAIN = "twitch" CONF_CHANNELS = "channels" -CLIENT = "client" -SESSION = "session" OAUTH_SCOPES = [AuthScope.USER_READ_SUBSCRIPTIONS, AuthScope.USER_READ_FOLLOWS] diff --git a/homeassistant/components/twitch/coordinator.py b/homeassistant/components/twitch/coordinator.py new file mode 100644 index 00000000000000..c34eeaa5325bf0 --- /dev/null +++ b/homeassistant/components/twitch/coordinator.py @@ -0,0 +1,127 @@ +"""Define a class to manage fetching Twitch data.""" + +from dataclasses import dataclass +from datetime import datetime, timedelta + +from twitchAPI.helper import first +from twitchAPI.object.api import FollowedChannel, Stream, TwitchUser, UserSubscription +from twitchAPI.twitch import Twitch +from twitchAPI.type import TwitchAPIException, TwitchResourceNotFound + +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant +from homeassistant.helpers.config_entry_oauth2_flow import OAuth2Session +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed + +from .const import CONF_CHANNELS, DOMAIN, LOGGER, OAUTH_SCOPES + + +def chunk_list(lst: list, chunk_size: int) -> list[list]: + """Split a list into chunks of chunk_size.""" + return [lst[i : i + chunk_size] for i in range(0, len(lst), chunk_size)] + + +@dataclass +class TwitchUpdate: + """Class for holding Twitch data.""" + + name: str + followers: int + is_streaming: bool + game: str | None + title: str | None + started_at: datetime | None + stream_picture: str | None + picture: str + subscribed: bool | None + subscription_gifted: bool | None + subscription_tier: int | None + follows: bool + following_since: datetime | None + viewers: int | None + + +class TwitchCoordinator(DataUpdateCoordinator[dict[str, TwitchUpdate]]): + """Class to manage fetching Twitch data.""" + + config_entry: ConfigEntry + users: list[TwitchUser] + current_user: TwitchUser + + def __init__( + self, hass: HomeAssistant, twitch: Twitch, session: OAuth2Session + ) -> None: + """Initialize the coordinator.""" + self.twitch = twitch + super().__init__( + hass, + LOGGER, + name=DOMAIN, + update_interval=timedelta(minutes=5), + ) + self.session = session + + async def _async_setup(self) -> None: + channels = self.config_entry.options[CONF_CHANNELS] + self.users = [] + # Split channels into chunks of 100 to avoid hitting the rate limit + for chunk in chunk_list(channels, 100): + self.users.extend( + [channel async for channel in self.twitch.get_users(logins=chunk)] + ) + if not (user := await first(self.twitch.get_users())): + raise UpdateFailed("Logged in user not found") + self.current_user = user + + async def _async_update_data(self) -> dict[str, TwitchUpdate]: + await self.session.async_ensure_token_valid() + await self.twitch.set_user_authentication( + self.session.token["access_token"], + OAUTH_SCOPES, + self.session.token["refresh_token"], + False, + ) + data: dict[str, TwitchUpdate] = {} + streams: dict[str, Stream] = { + s.user_id: s + async for s in self.twitch.get_followed_streams( + user_id=self.current_user.id, first=100 + ) + } + follows: dict[str, FollowedChannel] = { + f.broadcaster_id: f + async for f in await self.twitch.get_followed_channels( + user_id=self.current_user.id, first=100 + ) + } + for channel in self.users: + followers = await self.twitch.get_channel_followers(channel.id) + stream = streams.get(channel.id) + follow = follows.get(channel.id) + sub: UserSubscription | None = None + try: + sub = await self.twitch.check_user_subscription( + user_id=self.current_user.id, broadcaster_id=channel.id + ) + except TwitchResourceNotFound: + LOGGER.debug("User is not subscribed to %s", channel.display_name) + except TwitchAPIException as exc: + LOGGER.error("Error response on check_user_subscription: %s", exc) + + data[channel.id] = TwitchUpdate( + channel.display_name, + followers.total, + bool(stream), + stream.game_name if stream else None, + stream.title if stream else None, + stream.started_at if stream else None, + stream.thumbnail_url if stream else None, + channel.profile_image_url, + bool(sub), + sub.is_gift if sub else None, + {"1000": 1, "2000": 2, "3000": 3}.get(sub.tier) if sub else None, + bool(follow), + follow.followed_at if follow else None, + stream.viewer_count if stream else None, + ) + return data diff --git a/homeassistant/components/twitch/sensor.py b/homeassistant/components/twitch/sensor.py index a6e2f4e04afee7..bd5fc509989ede 100644 --- a/homeassistant/components/twitch/sensor.py +++ b/homeassistant/components/twitch/sensor.py @@ -2,32 +2,28 @@ from __future__ import annotations -from twitchAPI.helper import first -from twitchAPI.twitch import ( - AuthType, - Twitch, - TwitchAPIException, - TwitchResourceNotFound, - TwitchUser, -) +from typing import Any from homeassistant.components.sensor import SensorEntity from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant -from homeassistant.helpers.config_entry_oauth2_flow import OAuth2Session from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.typing import StateType +from homeassistant.helpers.update_coordinator import CoordinatorEntity -from .const import CLIENT, CONF_CHANNELS, DOMAIN, LOGGER, OAUTH_SCOPES, SESSION +from . import TwitchCoordinator +from .const import DOMAIN +from .coordinator import TwitchUpdate ATTR_GAME = "game" ATTR_TITLE = "title" ATTR_SUBSCRIPTION = "subscribed" -ATTR_SUBSCRIPTION_SINCE = "subscribed_since" ATTR_SUBSCRIPTION_GIFTED = "subscription_is_gifted" +ATTR_SUBSCRIPTION_TIER = "subscription_tier" ATTR_FOLLOW = "following" ATTR_FOLLOW_SINCE = "following_since" ATTR_FOLLOWING = "followers" -ATTR_VIEWS = "views" +ATTR_VIEWERS = "viewers" ATTR_STARTED_AT = "started_at" STATE_OFFLINE = "offline" @@ -36,109 +32,71 @@ PARALLEL_UPDATES = 1 -def chunk_list(lst: list, chunk_size: int) -> list[list]: - """Split a list into chunks of chunk_size.""" - return [lst[i : i + chunk_size] for i in range(0, len(lst), chunk_size)] - - async def async_setup_entry( hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Initialize entries.""" - client = hass.data[DOMAIN][entry.entry_id][CLIENT] - session = hass.data[DOMAIN][entry.entry_id][SESSION] - - channels = entry.options[CONF_CHANNELS] - - entities: list[TwitchSensor] = [] - - # Split channels into chunks of 100 to avoid hitting the rate limit - for chunk in chunk_list(channels, 100): - entities.extend( - [ - TwitchSensor(channel, session, client) - async for channel in client.get_users(logins=chunk) - ] - ) + coordinator = hass.data[DOMAIN][entry.entry_id] - async_add_entities(entities, True) + async_add_entities( + TwitchSensor(coordinator, channel_id) for channel_id in coordinator.data + ) -class TwitchSensor(SensorEntity): +class TwitchSensor(CoordinatorEntity[TwitchCoordinator], SensorEntity): """Representation of a Twitch channel.""" _attr_translation_key = "channel" - def __init__( - self, channel: TwitchUser, session: OAuth2Session, client: Twitch - ) -> None: + def __init__(self, coordinator: TwitchCoordinator, channel_id: str) -> None: """Initialize the sensor.""" - self._session = session - self._client = client - self._channel = channel - self._enable_user_auth = client.has_required_auth(AuthType.USER, OAUTH_SCOPES) - self._attr_name = channel.display_name - self._attr_unique_id = channel.id - - async def async_update(self) -> None: - """Update device state.""" - await self._session.async_ensure_token_valid() - await self._client.set_user_authentication( - self._session.token["access_token"], - OAUTH_SCOPES, - self._session.token["refresh_token"], - False, - ) - followers = await self._client.get_channel_followers(self._channel.id) - - self._attr_extra_state_attributes = { - ATTR_FOLLOWING: followers.total, - ATTR_VIEWS: self._channel.view_count, + super().__init__(coordinator) + self.channel_id = channel_id + self._attr_unique_id = channel_id + self._attr_name = self.channel.name + + @property + def available(self) -> bool: + """Return if entity is available.""" + return super().available and self.channel_id in self.coordinator.data + + @property + def channel(self) -> TwitchUpdate: + """Return the channel data.""" + return self.coordinator.data[self.channel_id] + + @property + def native_value(self) -> StateType: + """Return the state of the sensor.""" + return STATE_STREAMING if self.channel.is_streaming else STATE_OFFLINE + + @property + def extra_state_attributes(self) -> dict[str, Any]: + """Return the state attributes.""" + channel = self.channel + resp = { + ATTR_FOLLOWING: channel.followers, + ATTR_GAME: channel.game, + ATTR_TITLE: channel.title, + ATTR_STARTED_AT: channel.started_at, + ATTR_VIEWERS: channel.viewers, } - if self._enable_user_auth: - await self._async_add_user_attributes() - if stream := ( - await first(self._client.get_streams(user_id=[self._channel.id], first=1)) - ): - self._attr_native_value = STATE_STREAMING - self._attr_extra_state_attributes[ATTR_GAME] = stream.game_name - self._attr_extra_state_attributes[ATTR_TITLE] = stream.title - self._attr_extra_state_attributes[ATTR_STARTED_AT] = stream.started_at - self._attr_entity_picture = stream.thumbnail_url - if self._attr_entity_picture is not None: - self._attr_entity_picture = self._attr_entity_picture.format( - height=24, - width=24, - ) - else: - self._attr_native_value = STATE_OFFLINE - self._attr_extra_state_attributes[ATTR_GAME] = None - self._attr_extra_state_attributes[ATTR_TITLE] = None - self._attr_extra_state_attributes[ATTR_STARTED_AT] = None - self._attr_entity_picture = self._channel.profile_image_url - - async def _async_add_user_attributes(self) -> None: - if not (user := await first(self._client.get_users())): - return - self._attr_extra_state_attributes[ATTR_SUBSCRIPTION] = False - try: - sub = await self._client.check_user_subscription( - user_id=user.id, broadcaster_id=self._channel.id - ) - self._attr_extra_state_attributes[ATTR_SUBSCRIPTION] = True - self._attr_extra_state_attributes[ATTR_SUBSCRIPTION_GIFTED] = sub.is_gift - except TwitchResourceNotFound: - LOGGER.debug("User is not subscribed to %s", self._channel.display_name) - except TwitchAPIException as exc: - LOGGER.error("Error response on check_user_subscription: %s", exc) - - follows = await self._client.get_followed_channels( - user.id, broadcaster_id=self._channel.id - ) - self._attr_extra_state_attributes[ATTR_FOLLOW] = follows.total > 0 - if follows.total: - self._attr_extra_state_attributes[ATTR_FOLLOW_SINCE] = follows.data[ - 0 - ].followed_at + resp[ATTR_SUBSCRIPTION] = False + if channel.subscribed is not None: + resp[ATTR_SUBSCRIPTION] = channel.subscribed + resp[ATTR_SUBSCRIPTION_GIFTED] = channel.subscription_gifted + resp[ATTR_SUBSCRIPTION_TIER] = channel.subscription_tier + resp[ATTR_FOLLOW] = channel.follows + if channel.follows: + resp[ATTR_FOLLOW_SINCE] = channel.following_since + return resp + + @property + def entity_picture(self) -> str | None: + """Return the picture of the sensor.""" + if self.channel.is_streaming: + assert self.channel.stream_picture is not None + return self.channel.stream_picture + return self.channel.picture diff --git a/homeassistant/components/unifi/config_flow.py b/homeassistant/components/unifi/config_flow.py index b5ad1ea2ff0bfb..63c8533aa2ecea 100644 --- a/homeassistant/components/unifi/config_flow.py +++ b/homeassistant/components/unifi/config_flow.py @@ -20,7 +20,7 @@ from homeassistant.components import ssdp from homeassistant.config_entries import ( - ConfigEntry, + SOURCE_REAUTH, ConfigEntryState, ConfigFlow, ConfigFlowResult, @@ -78,7 +78,7 @@ class UnifiFlowHandler(ConfigFlow, domain=UNIFI_DOMAIN): @staticmethod @callback def async_get_options_flow( - config_entry: ConfigEntry, + config_entry: UnifiConfigEntry, ) -> UnifiOptionsFlowHandler: """Get the options flow for this handler.""" return UnifiOptionsFlowHandler(config_entry) @@ -86,7 +86,6 @@ def async_get_options_flow( def __init__(self) -> None: """Initialize the UniFi Network flow.""" self.config: dict[str, Any] = {} - self.reauth_config_entry: ConfigEntry | None = None self.reauth_schema: dict[vol.Marker, Any] = {} async def async_step_user( @@ -118,13 +117,14 @@ async def async_step_user( else: if ( - self.reauth_config_entry - and self.reauth_config_entry.unique_id is not None - and self.reauth_config_entry.unique_id in self.sites - ): - return await self.async_step_site( - {CONF_SITE_ID: self.reauth_config_entry.unique_id} + self.source == SOURCE_REAUTH + and ( + (reauth_unique_id := self._get_reauth_entry().unique_id) + is not None ) + and reauth_unique_id in self.sites + ): + return await self.async_step_site({CONF_SITE_ID: reauth_unique_id}) return await self.async_step_site() @@ -160,8 +160,8 @@ async def async_step_site( config_entry = await self.async_set_unique_id(unique_id) abort_reason = "configuration_updated" - if self.reauth_config_entry: - config_entry = self.reauth_config_entry + if self.source == SOURCE_REAUTH: + config_entry = self._get_reauth_entry() abort_reason = "reauth_successful" if config_entry: @@ -192,24 +192,20 @@ async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Trigger a reauthentication flow.""" - config_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) - assert config_entry - self.reauth_config_entry = config_entry + reauth_entry = self._get_reauth_entry() self.context["title_placeholders"] = { - CONF_HOST: config_entry.data[CONF_HOST], - CONF_SITE_ID: config_entry.title, + CONF_HOST: reauth_entry.data[CONF_HOST], + CONF_SITE_ID: reauth_entry.title, } self.reauth_schema = { - vol.Required(CONF_HOST, default=config_entry.data[CONF_HOST]): str, - vol.Required(CONF_USERNAME, default=config_entry.data[CONF_USERNAME]): str, + vol.Required(CONF_HOST, default=reauth_entry.data[CONF_HOST]): str, + vol.Required(CONF_USERNAME, default=reauth_entry.data[CONF_USERNAME]): str, vol.Required(CONF_PASSWORD): str, - vol.Required(CONF_PORT, default=config_entry.data[CONF_PORT]): int, + vol.Required(CONF_PORT, default=reauth_entry.data[CONF_PORT]): int, vol.Required( - CONF_VERIFY_SSL, default=config_entry.data[CONF_VERIFY_SSL] + CONF_VERIFY_SSL, default=reauth_entry.data[CONF_VERIFY_SSL] ): bool, } @@ -253,7 +249,6 @@ class UnifiOptionsFlowHandler(OptionsFlow): def __init__(self, config_entry: UnifiConfigEntry) -> None: """Initialize UniFi Network options flow.""" - self.config_entry = config_entry self.options = dict(config_entry.options) async def async_step_init( diff --git a/homeassistant/components/unifi/strings.json b/homeassistant/components/unifi/strings.json index ba426c2f08a358..1c7317c42678c8 100644 --- a/homeassistant/components/unifi/strings.json +++ b/homeassistant/components/unifi/strings.json @@ -2,6 +2,11 @@ "config": { "flow_title": "{site} ({host})", "step": { + "site": { + "data": { + "site": "Site ID" + } + }, "user": { "title": "Set up UniFi Network", "data": { diff --git a/homeassistant/components/unifiprotect/camera.py b/homeassistant/components/unifiprotect/camera.py index 62c35d00171be6..a40939be9177f0 100644 --- a/homeassistant/components/unifiprotect/camera.py +++ b/homeassistant/components/unifiprotect/camera.py @@ -156,7 +156,8 @@ def _add_new_device(device: ProtectAdoptableDeviceModel) -> None: async_add_entities(_async_camera_entities(hass, entry, data)) -_EMPTY_CAMERA_FEATURES = CameraEntityFeature(0) +_DISABLE_FEATURE = CameraEntityFeature(0) +_ENABLE_FEATURE = CameraEntityFeature.STREAM class ProtectCamera(ProtectDeviceEntity, Camera): @@ -195,24 +196,22 @@ def __init__( self._attr_name = f"{camera_name} (insecure)" # only the default (first) channel is enabled by default self._attr_entity_registry_enabled_default = is_default and secure + # Set the stream source before finishing the init + # because async_added_to_hass is too late and camera + # integration uses async_internal_added_to_hass to access + # the stream source which is called before async_added_to_hass + self._async_set_stream_source() @callback def _async_set_stream_source(self) -> None: - disable_stream = self._disable_stream channel = self.channel - - if not channel.is_rtsp_enabled: - disable_stream = False - - rtsp_url = channel.rtsps_url if self._secure else channel.rtsp_url - - # _async_set_stream_source called by __init__ - # pylint: disable-next=attribute-defined-outside-init - self._stream_source = None if disable_stream else rtsp_url - if self._stream_source: - self._attr_supported_features = CameraEntityFeature.STREAM - else: - self._attr_supported_features = _EMPTY_CAMERA_FEATURES + enable_stream = not self._disable_stream and channel.is_rtsp_enabled + # SRTP disabled because go2rtc does not support it + # https://github.com/AlexxIT/go2rtc/#source-rtsp + rtsp_url = channel.rtsps_no_srtp_url if self._secure else channel.rtsp_url + source = rtsp_url if enable_stream else None + self._attr_supported_features = _ENABLE_FEATURE if source else _DISABLE_FEATURE + self._stream_source = source @callback def _async_update_device_from_protect(self, device: ProtectDeviceType) -> None: diff --git a/homeassistant/components/unifiprotect/config_flow.py b/homeassistant/components/unifiprotect/config_flow.py index 284b7003485841..31950f8f7e44eb 100644 --- a/homeassistant/components/unifiprotect/config_flow.py +++ b/homeassistant/components/unifiprotect/config_flow.py @@ -104,7 +104,6 @@ class ProtectFlowHandler(ConfigFlow, domain=DOMAIN): def __init__(self) -> None: """Init the config flow.""" super().__init__() - self.entry: ConfigEntry | None = None self._discovered_device: dict[str, str] = {} async def async_step_dhcp( @@ -226,7 +225,7 @@ def async_get_options_flow( config_entry: ConfigEntry, ) -> OptionsFlow: """Get the options flow for this handler.""" - return OptionsFlowHandler(config_entry) + return OptionsFlowHandler() @callback def _async_create_entry(self, title: str, data: dict[str, Any]) -> ConfigFlowResult: @@ -295,8 +294,6 @@ async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Perform reauth upon an API authentication error.""" - - self.entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -304,21 +301,21 @@ async def async_step_reauth_confirm( ) -> ConfigFlowResult: """Confirm reauth.""" errors: dict[str, str] = {} - assert self.entry is not None # prepopulate fields - form_data = {**self.entry.data} + reauth_entry = self._get_reauth_entry() + form_data = {**reauth_entry.data} if user_input is not None: form_data.update(user_input) # validate login data _, errors = await self._async_get_nvr_data(form_data) if not errors: - return self.async_update_reload_and_abort(self.entry, data=form_data) + return self.async_update_reload_and_abort(reauth_entry, data=form_data) self.context["title_placeholders"] = { - "name": self.entry.title, - "ip_address": self.entry.data[CONF_HOST], + "name": reauth_entry.title, + "ip_address": reauth_entry.data[CONF_HOST], } return self.async_show_form( step_id="reauth_confirm", @@ -379,10 +376,6 @@ async def async_step_user( class OptionsFlowHandler(OptionsFlow): """Handle options.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/unifiprotect/manifest.json b/homeassistant/components/unifiprotect/manifest.json index ae7b2d94f2190b..85867b5c87cf05 100644 --- a/homeassistant/components/unifiprotect/manifest.json +++ b/homeassistant/components/unifiprotect/manifest.json @@ -40,7 +40,7 @@ "integration_type": "hub", "iot_class": "local_push", "loggers": ["uiprotect", "unifi_discovery"], - "requirements": ["uiprotect==6.3.1", "unifi-discovery==1.2.0"], + "requirements": ["uiprotect==6.4.0", "unifi-discovery==1.2.0"], "ssdp": [ { "manufacturer": "Ubiquiti Networks", diff --git a/homeassistant/components/unifiprotect/strings.json b/homeassistant/components/unifiprotect/strings.json index aaef111a3517bd..9238c825390d43 100644 --- a/homeassistant/components/unifiprotect/strings.json +++ b/homeassistant/components/unifiprotect/strings.json @@ -42,7 +42,8 @@ }, "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", - "discovery_started": "Discovery started" + "discovery_started": "Discovery started", + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" } }, "options": { diff --git a/homeassistant/components/upcloud/config_flow.py b/homeassistant/components/upcloud/config_flow.py index 20860df5553225..bb988726ba57b2 100644 --- a/homeassistant/components/upcloud/config_flow.py +++ b/homeassistant/components/upcloud/config_flow.py @@ -95,16 +95,12 @@ def async_get_options_flow( config_entry: ConfigEntry, ) -> UpCloudOptionsFlow: """Get options flow.""" - return UpCloudOptionsFlow(config_entry) + return UpCloudOptionsFlow() class UpCloudOptionsFlow(OptionsFlow): """UpCloud options flow.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/upcloud/manifest.json b/homeassistant/components/upcloud/manifest.json index cd829f6dd9d3db..38581d31709de4 100644 --- a/homeassistant/components/upcloud/manifest.json +++ b/homeassistant/components/upcloud/manifest.json @@ -5,5 +5,5 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/upcloud", "iot_class": "cloud_polling", - "requirements": ["upcloud-api==2.5.1"] + "requirements": ["upcloud-api==2.6.0"] } diff --git a/homeassistant/components/update/__init__.py b/homeassistant/components/update/__init__.py index 82f2792afa3d7f..6f0b56b14e8c94 100644 --- a/homeassistant/components/update/__init__.py +++ b/homeassistant/components/update/__init__.py @@ -27,6 +27,7 @@ from .const import ( ATTR_AUTO_UPDATE, ATTR_BACKUP, + ATTR_DISPLAY_PRECISION, ATTR_IN_PROGRESS, ATTR_INSTALLED_VERSION, ATTR_LATEST_VERSION, @@ -34,6 +35,7 @@ ATTR_RELEASE_URL, ATTR_SKIPPED_VERSION, ATTR_TITLE, + ATTR_UPDATE_PERCENTAGE, ATTR_VERSION, DOMAIN, SERVICE_INSTALL, @@ -177,6 +179,7 @@ class UpdateEntityDescription(EntityDescription, frozen_or_thawed=True): """A class that describes update entities.""" device_class: UpdateDeviceClass | None = None + display_precision: int = 0 entity_category: EntityCategory | None = EntityCategory.CONFIG @@ -190,12 +193,14 @@ def _version_is_newer(latest_version: str, installed_version: str) -> bool: "auto_update", "installed_version", "device_class", + "display_precision", "in_progress", "latest_version", "release_summary", "release_url", "supported_features", "title", + "update_percentage", } @@ -207,13 +212,20 @@ class UpdateEntity( """Representation of an update entity.""" _entity_component_unrecorded_attributes = frozenset( - {ATTR_ENTITY_PICTURE, ATTR_IN_PROGRESS, ATTR_RELEASE_SUMMARY} + { + ATTR_DISPLAY_PRECISION, + ATTR_ENTITY_PICTURE, + ATTR_IN_PROGRESS, + ATTR_RELEASE_SUMMARY, + ATTR_UPDATE_PERCENTAGE, + } ) entity_description: UpdateEntityDescription _attr_auto_update: bool = False _attr_installed_version: str | None = None _attr_device_class: UpdateDeviceClass | None + _attr_display_precision: int _attr_in_progress: bool | int = False _attr_latest_version: str | None = None _attr_release_summary: str | None = None @@ -221,6 +233,7 @@ class UpdateEntity( _attr_state: None = None _attr_supported_features: UpdateEntityFeature = UpdateEntityFeature(0) _attr_title: str | None = None + _attr_update_percentage: int | float | None = None __skipped_version: str | None = None __in_progress: bool = False @@ -250,6 +263,15 @@ def device_class(self) -> UpdateDeviceClass | None: return self.entity_description.device_class return None + @cached_property + def display_precision(self) -> int: + """Return number of decimal digits for display of update progress.""" + if hasattr(self, "_attr_display_precision"): + return self._attr_display_precision + if hasattr(self, "entity_description"): + return self.entity_description.display_precision + return 0 + @property def entity_category(self) -> EntityCategory | None: """Return the category of the entity, if any.""" @@ -278,8 +300,7 @@ def in_progress(self) -> bool | int | None: Needs UpdateEntityFeature.PROGRESS flag to be set for it to be used. - Can either return a boolean (True if in progress, False if not) - or an integer to indicate the progress in from 0 to 100%. + Should return a boolean (True if in progress, False if not). """ return self._attr_in_progress @@ -329,6 +350,16 @@ def supported_features_compat(self) -> UpdateEntityFeature: return new_features return features + @cached_property + def update_percentage(self) -> int | float | None: + """Update installation progress. + + Needs UpdateEntityFeature.PROGRESS flag to be set for it to be used. + + Can either return a number to indicate the progress from 0 to 100% or None. + """ + return self._attr_update_percentage + @final async def async_skip(self) -> None: """Skip the current offered version to update.""" @@ -422,8 +453,13 @@ def state_attributes(self) -> dict[str, Any] | None: # Otherwise, we use the internal progress value. if UpdateEntityFeature.PROGRESS in self.supported_features_compat: in_progress = self.in_progress + update_percentage = self.update_percentage if in_progress else None + if type(in_progress) is not bool and isinstance(in_progress, int): + update_percentage = in_progress + in_progress = True else: in_progress = self.__in_progress + update_percentage = None installed_version = self.installed_version latest_version = self.latest_version @@ -438,6 +474,7 @@ def state_attributes(self) -> dict[str, Any] | None: return { ATTR_AUTO_UPDATE: self.auto_update, + ATTR_DISPLAY_PRECISION: self.display_precision, ATTR_INSTALLED_VERSION: installed_version, ATTR_IN_PROGRESS: in_progress, ATTR_LATEST_VERSION: latest_version, @@ -445,6 +482,7 @@ def state_attributes(self) -> dict[str, Any] | None: ATTR_RELEASE_URL: self.release_url, ATTR_SKIPPED_VERSION: skipped_version, ATTR_TITLE: self.title, + ATTR_UPDATE_PERCENTAGE: update_percentage, } @final diff --git a/homeassistant/components/update/const.py b/homeassistant/components/update/const.py index 0d7da94f656f19..83a74ef6789be1 100644 --- a/homeassistant/components/update/const.py +++ b/homeassistant/components/update/const.py @@ -23,6 +23,7 @@ class UpdateEntityFeature(IntFlag): ATTR_AUTO_UPDATE: Final = "auto_update" ATTR_BACKUP: Final = "backup" +ATTR_DISPLAY_PRECISION: Final = "display_precision" ATTR_INSTALLED_VERSION: Final = "installed_version" ATTR_IN_PROGRESS: Final = "in_progress" ATTR_LATEST_VERSION: Final = "latest_version" @@ -30,4 +31,5 @@ class UpdateEntityFeature(IntFlag): ATTR_RELEASE_URL: Final = "release_url" ATTR_SKIPPED_VERSION: Final = "skipped_version" ATTR_TITLE: Final = "title" +ATTR_UPDATE_PERCENTAGE: Final = "update_percentage" ATTR_VERSION: Final = "version" diff --git a/homeassistant/components/upnp/config_flow.py b/homeassistant/components/upnp/config_flow.py index 1a40d4b34425fe..41e481fa58c0e1 100644 --- a/homeassistant/components/upnp/config_flow.py +++ b/homeassistant/components/upnp/config_flow.py @@ -16,7 +16,6 @@ ConfigFlow, ConfigFlowResult, OptionsFlow, - OptionsFlowWithConfigEntry, ) from homeassistant.core import HomeAssistant, callback @@ -94,9 +93,11 @@ class UpnpFlowHandler(ConfigFlow, domain=DOMAIN): @staticmethod @callback - def async_get_options_flow(config_entry: ConfigEntry) -> OptionsFlow: + def async_get_options_flow( + config_entry: ConfigEntry, + ) -> UpnpOptionsFlowHandler: """Get the options flow for this handler.""" - return UpnpOptionsFlowHandler(config_entry) + return UpnpOptionsFlowHandler() @property def _discoveries(self) -> dict[str, SsdpServiceInfo]: @@ -299,7 +300,7 @@ async def _async_create_entry_from_discovery( return self.async_create_entry(title=title, data=data, options=options) -class UpnpOptionsFlowHandler(OptionsFlowWithConfigEntry): +class UpnpOptionsFlowHandler(OptionsFlow): """Handle an options flow.""" async def async_step_init( @@ -313,7 +314,7 @@ async def async_step_init( { vol.Optional( CONFIG_ENTRY_FORCE_POLL, - default=self.options.get( + default=self.config_entry.options.get( CONFIG_ENTRY_FORCE_POLL, DEFAULT_CONFIG_ENTRY_FORCE_POLL ), ): bool, diff --git a/homeassistant/components/utility_meter/manifest.json b/homeassistant/components/utility_meter/manifest.json index 25e803e6a2d379..31a2d4e9584064 100644 --- a/homeassistant/components/utility_meter/manifest.json +++ b/homeassistant/components/utility_meter/manifest.json @@ -8,5 +8,5 @@ "iot_class": "local_push", "loggers": ["croniter"], "quality_scale": "internal", - "requirements": ["croniter==2.0.2"] + "requirements": ["cronsim==2.6"] } diff --git a/homeassistant/components/utility_meter/select.py b/homeassistant/components/utility_meter/select.py index d5b1206d046172..5815ce7ec95be8 100644 --- a/homeassistant/components/utility_meter/select.py +++ b/homeassistant/components/utility_meter/select.py @@ -6,7 +6,7 @@ from homeassistant.components.select import SelectEntity from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_UNIQUE_ID +from homeassistant.const import CONF_NAME, CONF_UNIQUE_ID from homeassistant.core import HomeAssistant from homeassistant.helpers.device import async_device_info_to_link_from_entity from homeassistant.helpers.device_registry import DeviceInfo @@ -36,9 +36,9 @@ async def async_setup_entry( ) tariff_select = TariffSelect( - name, - tariffs, - unique_id, + name=name, + tariffs=tariffs, + unique_id=unique_id, device_info=device_info, ) async_add_entities([tariff_select]) @@ -62,13 +62,15 @@ async def async_setup_platform( conf_meter_unique_id: str | None = hass.data[DATA_UTILITY][meter].get( CONF_UNIQUE_ID ) + conf_meter_name = hass.data[DATA_UTILITY][meter].get(CONF_NAME, meter) async_add_entities( [ TariffSelect( - meter, - discovery_info[CONF_TARIFFS], - conf_meter_unique_id, + name=conf_meter_name, + tariffs=discovery_info[CONF_TARIFFS], + yaml_slug=meter, + unique_id=conf_meter_unique_id, ) ] ) @@ -82,12 +84,16 @@ class TariffSelect(SelectEntity, RestoreEntity): def __init__( self, name, - tariffs, - unique_id, + tariffs: list[str], + *, + yaml_slug: str | None = None, + unique_id: str | None = None, device_info: DeviceInfo | None = None, ) -> None: """Initialize a tariff selector.""" self._attr_name = name + if yaml_slug: # Backwards compatibility with YAML configuration entries + self.entity_id = f"select.{yaml_slug}" self._attr_unique_id = unique_id self._attr_device_info = device_info self._current_tariff: str | None = None diff --git a/homeassistant/components/utility_meter/sensor.py b/homeassistant/components/utility_meter/sensor.py index 6b8c07c7ef7b38..19ef3c1f3a88d7 100644 --- a/homeassistant/components/utility_meter/sensor.py +++ b/homeassistant/components/utility_meter/sensor.py @@ -9,7 +9,7 @@ import logging from typing import Any, Self -from croniter import croniter +from cronsim import CronSim import voluptuous as vol from homeassistant.components.sensor import ( @@ -379,14 +379,13 @@ def __init__( self.entity_id = suggested_entity_id self._parent_meter = parent_meter self._sensor_source_id = source_entity - self._state = None self._last_period = Decimal(0) self._last_reset = dt_util.utcnow() self._last_valid_state = None self._collecting = None - self._name = name + self._attr_name = name self._input_device_class = None - self._unit_of_measurement = None + self._attr_native_unit_of_measurement = None self._period = meter_type if meter_type is not None: # For backwards compatibility reasons we convert the period and offset into a cron pattern @@ -405,12 +404,22 @@ def __init__( self._tariff = tariff self._tariff_entity = tariff_entity self._next_reset = None + self.scheduler = ( + CronSim( + self._cron_pattern, + dt_util.now( + dt_util.get_default_time_zone() + ), # we need timezone for DST purposes (see issue #102984) + ) + if self._cron_pattern + else None + ) def start(self, attributes: Mapping[str, Any]) -> None: """Initialize unit and state upon source initial update.""" self._input_device_class = attributes.get(ATTR_DEVICE_CLASS) - self._unit_of_measurement = attributes.get(ATTR_UNIT_OF_MEASUREMENT) - self._state = 0 + self._attr_native_unit_of_measurement = attributes.get(ATTR_UNIT_OF_MEASUREMENT) + self._attr_native_value = 0 self.async_write_ha_state() @staticmethod @@ -485,13 +494,13 @@ def async_reading(self, event: Event[EventStateChangedData]) -> None: ) return - if self._state is None: + if self.native_value is None: # First state update initializes the utility_meter sensors for sensor in self.hass.data[DATA_UTILITY][self._parent_meter][ DATA_TARIFF_SENSORS ]: sensor.start(new_state_attributes) - if self._unit_of_measurement is None: + if self.native_unit_of_measurement is None: _LOGGER.warning( "Source sensor %s has no unit of measurement. Please %s", self._sensor_source_id, @@ -502,10 +511,12 @@ def async_reading(self, event: Event[EventStateChangedData]) -> None: adjustment := self.calculate_adjustment(old_state, new_state) ) is not None and (self._sensor_net_consumption or adjustment >= 0): # If net_consumption is off, the adjustment must be non-negative - self._state += adjustment # type: ignore[operator] # self._state will be set to by the start function if it is None, therefore it always has a valid Decimal value at this line + self._attr_native_value += adjustment # type: ignore[operator] # self._attr_native_value will be set to by the start function if it is None, therefore it always has a valid Decimal value at this line self._input_device_class = new_state_attributes.get(ATTR_DEVICE_CLASS) - self._unit_of_measurement = new_state_attributes.get(ATTR_UNIT_OF_MEASUREMENT) + self._attr_native_unit_of_measurement = new_state_attributes.get( + ATTR_UNIT_OF_MEASUREMENT + ) self._last_valid_state = new_state_val self.async_write_ha_state() @@ -534,7 +545,7 @@ def _change_status(self, tariff: str) -> None: _LOGGER.debug( "%s - %s - source <%s>", - self._name, + self.name, COLLECTING if self._collecting is not None else PAUSED, self._sensor_source_id, ) @@ -543,11 +554,10 @@ def _change_status(self, tariff: str) -> None: async def _program_reset(self): """Program the reset of the utility meter.""" - if self._cron_pattern is not None: - tz = dt_util.get_default_time_zone() - self._next_reset = croniter(self._cron_pattern, dt_util.now(tz)).get_next( - datetime - ) # we need timezone for DST purposes (see issue #102984) + if self.scheduler: + self._next_reset = next(self.scheduler) + + _LOGGER.debug("Next reset of %s is %s", self.entity_id, self._next_reset) self.async_on_remove( async_track_point_in_time( self.hass, @@ -575,14 +585,16 @@ async def async_reset_meter(self, entity_id): return _LOGGER.debug("Reset utility meter <%s>", self.entity_id) self._last_reset = dt_util.utcnow() - self._last_period = Decimal(self._state) if self._state else Decimal(0) - self._state = 0 + self._last_period = ( + Decimal(self.native_value) if self.native_value else Decimal(0) + ) + self._attr_native_value = 0 self.async_write_ha_state() async def async_calibrate(self, value): """Calibrate the Utility Meter with a given value.""" - _LOGGER.debug("Calibrate %s = %s type(%s)", self._name, value, type(value)) - self._state = Decimal(str(value)) + _LOGGER.debug("Calibrate %s = %s type(%s)", self.name, value, type(value)) + self._attr_native_value = Decimal(str(value)) self.async_write_ha_state() async def async_added_to_hass(self): @@ -598,10 +610,11 @@ async def async_added_to_hass(self): ) if (last_sensor_data := await self.async_get_last_sensor_data()) is not None: - # new introduced in 2022.04 - self._state = last_sensor_data.native_value + self._attr_native_value = last_sensor_data.native_value self._input_device_class = last_sensor_data.input_device_class - self._unit_of_measurement = last_sensor_data.native_unit_of_measurement + self._attr_native_unit_of_measurement = ( + last_sensor_data.native_unit_of_measurement + ) self._last_period = last_sensor_data.last_period self._last_reset = last_sensor_data.last_reset self._last_valid_state = last_sensor_data.last_valid_state @@ -609,39 +622,6 @@ async def async_added_to_hass(self): # Null lambda to allow cancelling the collection on tariff change self._collecting = lambda: None - elif state := await self.async_get_last_state(): - # legacy to be removed on 2022.10 (we are keeping this to avoid utility_meter counter losses) - try: - self._state = Decimal(state.state) - except InvalidOperation: - _LOGGER.error( - "Could not restore state <%s>. Resetting utility_meter.%s", - state.state, - self.name, - ) - else: - self._unit_of_measurement = state.attributes.get( - ATTR_UNIT_OF_MEASUREMENT - ) - self._last_period = ( - Decimal(state.attributes[ATTR_LAST_PERIOD]) - if state.attributes.get(ATTR_LAST_PERIOD) - and is_number(state.attributes[ATTR_LAST_PERIOD]) - else Decimal(0) - ) - self._last_valid_state = ( - Decimal(state.attributes[ATTR_LAST_VALID_STATE]) - if state.attributes.get(ATTR_LAST_VALID_STATE) - and is_number(state.attributes[ATTR_LAST_VALID_STATE]) - else None - ) - self._last_reset = dt_util.as_utc( - dt_util.parse_datetime(state.attributes.get(ATTR_LAST_RESET)) - ) - if state.attributes.get(ATTR_STATUS) == COLLECTING: - # Null lambda to allow cancelling the collection on tariff change - self._collecting = lambda: None - @callback def async_source_tracking(event): """Wait for source to be ready, then start meter.""" @@ -666,7 +646,7 @@ def async_source_tracking(event): _LOGGER.debug( "<%s> collecting %s from %s", self.name, - self._unit_of_measurement, + self.native_unit_of_measurement, self._sensor_source_id, ) self._collecting = async_track_state_change_event( @@ -681,22 +661,15 @@ async def async_will_remove_from_hass(self) -> None: self._collecting() self._collecting = None - @property - def name(self): - """Return the name of the sensor.""" - return self._name - - @property - def native_value(self): - """Return the state of the sensor.""" - return self._state - @property def device_class(self): """Return the device class of the sensor.""" if self._input_device_class is not None: return self._input_device_class - if self._unit_of_measurement in DEVICE_CLASS_UNITS[SensorDeviceClass.ENERGY]: + if ( + self.native_unit_of_measurement + in DEVICE_CLASS_UNITS[SensorDeviceClass.ENERGY] + ): return SensorDeviceClass.ENERGY return None @@ -709,11 +682,6 @@ def state_class(self): else SensorStateClass.TOTAL_INCREASING ) - @property - def native_unit_of_measurement(self): - """Return the unit the value is expressed in.""" - return self._unit_of_measurement - @property def extra_state_attributes(self): """Return the state attributes of the sensor.""" diff --git a/homeassistant/components/vallox/config_flow.py b/homeassistant/components/vallox/config_flow.py index 1c291f853a5e1f..30d1d153d9e19a 100644 --- a/homeassistant/components/vallox/config_flow.py +++ b/homeassistant/components/vallox/config_flow.py @@ -8,7 +8,7 @@ from vallox_websocket_api import Vallox, ValloxApiException import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_HOST, CONF_NAME from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError @@ -40,8 +40,6 @@ class ValloxConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - _context_entry: ConfigEntry - async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -88,24 +86,18 @@ async def async_step_reconfigure( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle reconfiguration of the Vallox device host address.""" - self._context_entry = self._get_reconfigure_entry() - return await self.async_step_reconfigure_confirm() - - async def async_step_reconfigure_confirm( - self, user_input: dict[str, Any] | None = None - ) -> ConfigFlowResult: - """Handle reconfiguration of the Vallox device host address.""" + reconfigure_entry = self._get_reconfigure_entry() if not user_input: return self.async_show_form( - step_id="reconfigure_confirm", + step_id="reconfigure", data_schema=self.add_suggested_values_to_schema( - CONFIG_SCHEMA, {CONF_HOST: self._context_entry.data.get(CONF_HOST)} + CONFIG_SCHEMA, {CONF_HOST: reconfigure_entry.data.get(CONF_HOST)} ), ) updated_host = user_input[CONF_HOST] - if self._context_entry.data.get(CONF_HOST) != updated_host: + if reconfigure_entry.data.get(CONF_HOST) != updated_host: self._async_abort_entries_match({CONF_HOST: updated_host}) errors: dict[str, str] = {} @@ -121,13 +113,11 @@ async def async_step_reconfigure_confirm( errors[CONF_HOST] = "unknown" else: return self.async_update_reload_and_abort( - self._context_entry, - data={**self._context_entry.data, CONF_HOST: updated_host}, - reason="reconfigure_successful", + reconfigure_entry, data_updates={CONF_HOST: updated_host} ) return self.async_show_form( - step_id="reconfigure_confirm", + step_id="reconfigure", data_schema=self.add_suggested_values_to_schema( CONFIG_SCHEMA, {CONF_HOST: updated_host} ), diff --git a/homeassistant/components/vallox/strings.json b/homeassistant/components/vallox/strings.json index 608a5eb178222f..8a30ed4ad011ad 100644 --- a/homeassistant/components/vallox/strings.json +++ b/homeassistant/components/vallox/strings.json @@ -9,7 +9,7 @@ "host": "Hostname or IP address of your Vallox device." } }, - "reconfigure_confirm": { + "reconfigure": { "data": { "host": "[%key:common::config_flow::data::host%]" }, diff --git a/homeassistant/components/velbus/__init__.py b/homeassistant/components/velbus/__init__.py index 685f8b49500b17..ca8cfb0f2a7747 100644 --- a/homeassistant/components/velbus/__init__.py +++ b/homeassistant/components/velbus/__init__.py @@ -122,7 +122,7 @@ async def set_memo_text(call: ServiceCall) -> None: await ( hass.data[DOMAIN][call.data[CONF_INTERFACE]]["cntrl"] .get_module(call.data[CONF_ADDRESS]) - .set_memo_text(memo_text.async_render()) + .set_memo_text(memo_text) ) hass.services.async_register( @@ -135,7 +135,7 @@ async def set_memo_text(call: ServiceCall) -> None: vol.Required(CONF_ADDRESS): vol.All( vol.Coerce(int), vol.Range(min=0, max=255) ), - vol.Optional(CONF_MEMO_TEXT, default=""): cv.template, + vol.Optional(CONF_MEMO_TEXT, default=""): cv.string, } ), ) diff --git a/homeassistant/components/velbus/manifest.json b/homeassistant/components/velbus/manifest.json index c1cf2951bbd985..5443afeef7728b 100644 --- a/homeassistant/components/velbus/manifest.json +++ b/homeassistant/components/velbus/manifest.json @@ -13,7 +13,7 @@ "velbus-packet", "velbus-protocol" ], - "requirements": ["velbus-aio==2024.7.6"], + "requirements": ["velbus-aio==2024.10.0"], "usb": [ { "vid": "10CF", diff --git a/homeassistant/components/venstar/entity.py b/homeassistant/components/venstar/entity.py index 630da05324eb3d..b8a4b971a7f53b 100644 --- a/homeassistant/components/venstar/entity.py +++ b/homeassistant/components/venstar/entity.py @@ -34,11 +34,11 @@ def _handle_coordinator_update(self) -> None: @property def device_info(self) -> DeviceInfo: """Return the device information for this entity.""" - fw_ver_major, fw_ver_minor = self._client.get_firmware_ver() + firmware_version = self._client.get_firmware_ver() return DeviceInfo( identifiers={(DOMAIN, self._config.entry_id)}, name=self._client.name, manufacturer="Venstar", model=f"{self._client.model}-{self._client.get_type()}", - sw_version=f"{fw_ver_major}.{fw_ver_minor}", + sw_version=f"{firmware_version[0]}.{firmware_version[1]}", ) diff --git a/homeassistant/components/vera/config_flow.py b/homeassistant/components/vera/config_flow.py index 08e7640773b1db..f2b182cc270463 100644 --- a/homeassistant/components/vera/config_flow.py +++ b/homeassistant/components/vera/config_flow.py @@ -76,10 +76,6 @@ def options_data(user_input: dict[str, str]) -> dict[str, list[int]]: class OptionsFlowHandler(OptionsFlow): """Options for the component.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Init object.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, str] | None = None, @@ -104,7 +100,7 @@ class VeraFlowHandler(ConfigFlow, domain=DOMAIN): @callback def async_get_options_flow(config_entry: ConfigEntry) -> OptionsFlowHandler: """Get the options flow.""" - return OptionsFlowHandler(config_entry) + return OptionsFlowHandler() async def async_step_user( self, user_input: dict[str, Any] | None = None diff --git a/homeassistant/components/verisure/alarm_control_panel.py b/homeassistant/components/verisure/alarm_control_panel.py index fc7e7551145a16..5f34b5871632ff 100644 --- a/homeassistant/components/verisure/alarm_control_panel.py +++ b/homeassistant/components/verisure/alarm_control_panel.py @@ -7,10 +7,10 @@ from homeassistant.components.alarm_control_panel import ( AlarmControlPanelEntity, AlarmControlPanelEntityFeature, + AlarmControlPanelState, CodeFormat, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import STATE_ALARM_ARMING, STATE_ALARM_DISARMING from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -86,7 +86,7 @@ async def _async_set_arm_state( async def async_alarm_disarm(self, code: str | None = None) -> None: """Send disarm command.""" - self._attr_state = STATE_ALARM_DISARMING + self._attr_alarm_state = AlarmControlPanelState.DISARMING self.async_write_ha_state() await self._async_set_arm_state( "DISARMED", self.coordinator.verisure.disarm(code) @@ -94,7 +94,7 @@ async def async_alarm_disarm(self, code: str | None = None) -> None: async def async_alarm_arm_home(self, code: str | None = None) -> None: """Send arm home command.""" - self._attr_state = STATE_ALARM_ARMING + self._attr_alarm_state = AlarmControlPanelState.ARMING self.async_write_ha_state() await self._async_set_arm_state( "ARMED_HOME", self.coordinator.verisure.arm_home(code) @@ -102,7 +102,7 @@ async def async_alarm_arm_home(self, code: str | None = None) -> None: async def async_alarm_arm_away(self, code: str | None = None) -> None: """Send arm away command.""" - self._attr_state = STATE_ALARM_ARMING + self._attr_alarm_state = AlarmControlPanelState.ARMING self.async_write_ha_state() await self._async_set_arm_state( "ARMED_AWAY", self.coordinator.verisure.arm_away(code) @@ -111,7 +111,7 @@ async def async_alarm_arm_away(self, code: str | None = None) -> None: @callback def _handle_coordinator_update(self) -> None: """Handle updated data from the coordinator.""" - self._attr_state = ALARM_STATE_TO_HA.get( + self._attr_alarm_state = ALARM_STATE_TO_HA.get( self.coordinator.data["alarm"]["statusType"] ) self._attr_changed_by = self.coordinator.data["alarm"].get("name") diff --git a/homeassistant/components/verisure/config_flow.py b/homeassistant/components/verisure/config_flow.py index ccf74cd6791bbf..0f1088ccb80d0c 100644 --- a/homeassistant/components/verisure/config_flow.py +++ b/homeassistant/components/verisure/config_flow.py @@ -3,7 +3,7 @@ from __future__ import annotations from collections.abc import Mapping -from typing import Any, cast +from typing import Any from verisure import ( Error as VerisureError, @@ -38,15 +38,16 @@ class VerisureConfigFlowHandler(ConfigFlow, domain=DOMAIN): VERSION = 2 email: str - entry: ConfigEntry password: str verisure: Verisure @staticmethod @callback - def async_get_options_flow(config_entry: ConfigEntry) -> VerisureOptionsFlowHandler: + def async_get_options_flow( + config_entry: ConfigEntry, + ) -> VerisureOptionsFlowHandler: """Get the options flow for this handler.""" - return VerisureOptionsFlowHandler(config_entry) + return VerisureOptionsFlowHandler() async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -179,10 +180,6 @@ async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle initiation of re-authentication with Verisure.""" - self.entry = cast( - ConfigEntry, - self.hass.config_entries.async_get_entry(self.context["entry_id"]), - ) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -230,25 +227,21 @@ async def async_step_reauth_confirm( LOGGER.debug("Unexpected response from Verisure, %s", ex) errors["base"] = "unknown" else: - data = self.entry.data.copy() - self.hass.config_entries.async_update_entry( - self.entry, - data={ - **data, + return self.async_update_reload_and_abort( + self._get_reauth_entry(), + data_updates={ CONF_EMAIL: user_input[CONF_EMAIL], CONF_PASSWORD: user_input[CONF_PASSWORD], }, ) - self.hass.async_create_task( - self.hass.config_entries.async_reload(self.entry.entry_id) - ) - return self.async_abort(reason="reauth_successful") return self.async_show_form( step_id="reauth_confirm", data_schema=vol.Schema( { - vol.Required(CONF_EMAIL, default=self.entry.data[CONF_EMAIL]): str, + vol.Required( + CONF_EMAIL, default=self._get_reauth_entry().data[CONF_EMAIL] + ): str, vol.Required(CONF_PASSWORD): str, } ), @@ -274,18 +267,13 @@ async def async_step_reauth_mfa( LOGGER.debug("Unexpected response from Verisure, %s", ex) errors["base"] = "unknown" else: - self.hass.config_entries.async_update_entry( - self.entry, - data={ - **self.entry.data, + return self.async_update_reload_and_abort( + self._get_reauth_entry(), + data_updates={ CONF_EMAIL: self.email, CONF_PASSWORD: self.password, }, ) - self.hass.async_create_task( - self.hass.config_entries.async_reload(self.entry.entry_id) - ) - return self.async_abort(reason="reauth_successful") return self.async_show_form( step_id="reauth_mfa", @@ -304,10 +292,6 @@ async def async_step_reauth_mfa( class VerisureOptionsFlowHandler(OptionsFlow): """Handle Verisure options.""" - def __init__(self, entry: ConfigEntry) -> None: - """Initialize Verisure options flow.""" - self.entry = entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -324,7 +308,7 @@ async def async_step_init( vol.Optional( CONF_LOCK_CODE_DIGITS, description={ - "suggested_value": self.entry.options.get( + "suggested_value": self.config_entry.options.get( CONF_LOCK_CODE_DIGITS, DEFAULT_LOCK_CODE_DIGITS ) }, diff --git a/homeassistant/components/verisure/const.py b/homeassistant/components/verisure/const.py index 5b1aa1a0740b0b..4afb93d957f209 100644 --- a/homeassistant/components/verisure/const.py +++ b/homeassistant/components/verisure/const.py @@ -3,12 +3,7 @@ from datetime import timedelta import logging -from homeassistant.const import ( - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_DISARMED, - STATE_ALARM_PENDING, -) +from homeassistant.components.alarm_control_panel import AlarmControlPanelState DOMAIN = "verisure" @@ -43,8 +38,8 @@ } ALARM_STATE_TO_HA = { - "DISARMED": STATE_ALARM_DISARMED, - "ARMED_HOME": STATE_ALARM_ARMED_HOME, - "ARMED_AWAY": STATE_ALARM_ARMED_AWAY, - "PENDING": STATE_ALARM_PENDING, + "DISARMED": AlarmControlPanelState.DISARMED, + "ARMED_HOME": AlarmControlPanelState.ARMED_HOME, + "ARMED_AWAY": AlarmControlPanelState.ARMED_AWAY, + "PENDING": AlarmControlPanelState.PENDING, } diff --git a/homeassistant/components/vesync/const.py b/homeassistant/components/vesync/const.py index 50dce95e42a40c..48215819ce543a 100644 --- a/homeassistant/components/vesync/const.py +++ b/homeassistant/components/vesync/const.py @@ -56,6 +56,7 @@ "LAP-V201S-WEU": "Vital200S", # Alt ID Model Vital200S "LAP-V201S-WUS": "Vital200S", # Alt ID Model Vital200S "LAP-V201-AUSR": "Vital200S", # Alt ID Model Vital200S + "LAP-V201S-AUSR": "Vital200S", # Alt ID Model Vital200S "Vital100S": "Vital100S", "LAP-V102S-WUS": "Vital100S", # Alt ID Model Vital100S "LAP-V102S-AASR": "Vital100S", # Alt ID Model Vital100S diff --git a/homeassistant/components/vesync/fan.py b/homeassistant/components/vesync/fan.py index 58a262e769f173..098a17e90f0fe4 100644 --- a/homeassistant/components/vesync/fan.py +++ b/homeassistant/components/vesync/fan.py @@ -94,6 +94,7 @@ class VeSyncFanHA(VeSyncDevice, FanEntity): | FanEntityFeature.TURN_ON ) _attr_name = None + _attr_translation_key = "vesync" _enable_turn_on_off_backwards_compatibility = False def __init__(self, fan) -> None: diff --git a/homeassistant/components/vesync/icons.json b/homeassistant/components/vesync/icons.json index cfdefb2ed09d49..e4769acc9a514c 100644 --- a/homeassistant/components/vesync/icons.json +++ b/homeassistant/components/vesync/icons.json @@ -1,4 +1,20 @@ { + "entity": { + "fan": { + "vesync": { + "state_attributes": { + "preset_mode": { + "state": { + "auto": "mdi:fan-auto", + "sleep": "mdi:sleep", + "pet": "mdi:paw", + "turbo": "mdi:weather-tornado" + } + } + } + } + } + }, "services": { "update_devices": { "service": "mdi:update" diff --git a/homeassistant/components/vesync/strings.json b/homeassistant/components/vesync/strings.json index 5ff0aa58722f71..b6e4e2fd957fe0 100644 --- a/homeassistant/components/vesync/strings.json +++ b/homeassistant/components/vesync/strings.json @@ -42,6 +42,20 @@ "current_voltage": { "name": "Current voltage" } + }, + "fan": { + "vesync": { + "state_attributes": { + "preset_mode": { + "state": { + "auto": "Auto", + "sleep": "Sleep", + "pet": "Pet", + "turbo": "Turbo" + } + } + } + } } }, "services": { diff --git a/homeassistant/components/vicare/config_flow.py b/homeassistant/components/vicare/config_flow.py index 67ce4f2c18642e..c711cc060744c6 100644 --- a/homeassistant/components/vicare/config_flow.py +++ b/homeassistant/components/vicare/config_flow.py @@ -13,7 +13,7 @@ import voluptuous as vol from homeassistant.components import dhcp -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_CLIENT_ID, CONF_PASSWORD, CONF_USERNAME import homeassistant.helpers.config_validation as cv from homeassistant.helpers.device_registry import format_mac @@ -50,7 +50,6 @@ class ViCareConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for ViCare.""" VERSION = 1 - entry: ConfigEntry | None async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -81,7 +80,6 @@ async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle re-authentication with ViCare.""" - self.entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -89,11 +87,11 @@ async def async_step_reauth_confirm( ) -> ConfigFlowResult: """Confirm re-authentication with ViCare.""" errors: dict[str, str] = {} - assert self.entry is not None + reauth_entry = self._get_reauth_entry() if user_input: data = { - **self.entry.data, + **reauth_entry.data, **user_input, } @@ -102,17 +100,12 @@ async def async_step_reauth_confirm( except (PyViCareInvalidConfigurationError, PyViCareInvalidCredentialsError): errors["base"] = "invalid_auth" else: - self.hass.config_entries.async_update_entry( - self.entry, - data=data, - ) - await self.hass.config_entries.async_reload(self.entry.entry_id) - return self.async_abort(reason="reauth_successful") + return self.async_update_reload_and_abort(reauth_entry, data=data) return self.async_show_form( step_id="reauth_confirm", data_schema=self.add_suggested_values_to_schema( - REAUTH_SCHEMA, self.entry.data + REAUTH_SCHEMA, reauth_entry.data ), errors=errors, ) diff --git a/homeassistant/components/vicare/manifest.json b/homeassistant/components/vicare/manifest.json index 869a1ef80d824c..8ce996ab81d207 100644 --- a/homeassistant/components/vicare/manifest.json +++ b/homeassistant/components/vicare/manifest.json @@ -11,5 +11,5 @@ "documentation": "https://www.home-assistant.io/integrations/vicare", "iot_class": "cloud_polling", "loggers": ["PyViCare"], - "requirements": ["PyViCare==2.34.0"] + "requirements": ["PyViCare==2.35.0"] } diff --git a/homeassistant/components/vicare/number.py b/homeassistant/components/vicare/number.py index 529caca6a873d1..f9af9636941028 100644 --- a/homeassistant/components/vicare/number.py +++ b/homeassistant/components/vicare/number.py @@ -265,6 +265,72 @@ class ViCareNumberEntityDescription(NumberEntityDescription, ViCareRequiredKeysM HeatingProgram.COMFORT_HEATING ), ), + ViCareNumberEntityDescription( + key="normal_cooling_temperature", + translation_key="normal_cooling_temperature", + entity_category=EntityCategory.CONFIG, + device_class=NumberDeviceClass.TEMPERATURE, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + value_getter=lambda api: api.getDesiredTemperatureForProgram( + HeatingProgram.NORMAL_COOLING + ), + value_setter=lambda api, value: api.setProgramTemperature( + HeatingProgram.NORMAL_COOLING, value + ), + min_value_getter=lambda api: api.getProgramMinTemperature( + HeatingProgram.NORMAL_COOLING + ), + max_value_getter=lambda api: api.getProgramMaxTemperature( + HeatingProgram.NORMAL_COOLING + ), + stepping_getter=lambda api: api.getProgramStepping( + HeatingProgram.NORMAL_COOLING + ), + ), + ViCareNumberEntityDescription( + key="reduced_cooling_temperature", + translation_key="reduced_cooling_temperature", + entity_category=EntityCategory.CONFIG, + device_class=NumberDeviceClass.TEMPERATURE, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + value_getter=lambda api: api.getDesiredTemperatureForProgram( + HeatingProgram.REDUCED_COOLING + ), + value_setter=lambda api, value: api.setProgramTemperature( + HeatingProgram.REDUCED_COOLING, value + ), + min_value_getter=lambda api: api.getProgramMinTemperature( + HeatingProgram.REDUCED_COOLING + ), + max_value_getter=lambda api: api.getProgramMaxTemperature( + HeatingProgram.REDUCED_COOLING + ), + stepping_getter=lambda api: api.getProgramStepping( + HeatingProgram.REDUCED_COOLING + ), + ), + ViCareNumberEntityDescription( + key="comfort_cooling_temperature", + translation_key="comfort_cooling_temperature", + entity_category=EntityCategory.CONFIG, + device_class=NumberDeviceClass.TEMPERATURE, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + value_getter=lambda api: api.getDesiredTemperatureForProgram( + HeatingProgram.COMFORT_COOLING + ), + value_setter=lambda api, value: api.setProgramTemperature( + HeatingProgram.COMFORT_COOLING, value + ), + min_value_getter=lambda api: api.getProgramMinTemperature( + HeatingProgram.COMFORT_COOLING + ), + max_value_getter=lambda api: api.getProgramMaxTemperature( + HeatingProgram.COMFORT_COOLING + ), + stepping_getter=lambda api: api.getProgramStepping( + HeatingProgram.COMFORT_COOLING + ), + ), ) diff --git a/homeassistant/components/vicare/sensor.py b/homeassistant/components/vicare/sensor.py index bedb161edcb4af..57b7c0bec9a628 100644 --- a/homeassistant/components/vicare/sensor.py +++ b/homeassistant/components/vicare/sensor.py @@ -430,6 +430,32 @@ class ViCareSensorEntityDescription(SensorEntityDescription, ViCareRequiredKeysM state_class=SensorStateClass.TOTAL_INCREASING, entity_registry_enabled_default=False, ), + ViCareSensorEntityDescription( + key="energy_consumption_cooling_today", + translation_key="energy_consumption_cooling_today", + native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, + value_getter=lambda api: api.getPowerConsumptionCoolingToday(), + unit_getter=lambda api: api.getPowerConsumptionCoolingUnit(), + state_class=SensorStateClass.TOTAL_INCREASING, + ), + ViCareSensorEntityDescription( + key="energy_consumption_cooling_this_month", + translation_key="energy_consumption_cooling_this_month", + native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, + value_getter=lambda api: api.getPowerConsumptionCoolingThisMonth(), + unit_getter=lambda api: api.getPowerConsumptionCoolingUnit(), + state_class=SensorStateClass.TOTAL_INCREASING, + entity_registry_enabled_default=False, + ), + ViCareSensorEntityDescription( + key="energy_consumption_cooling_this_year", + translation_key="energy_consumption_cooling_this_year", + native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, + value_getter=lambda api: api.getPowerConsumptionCoolingThisYear(), + unit_getter=lambda api: api.getPowerConsumptionCoolingUnit(), + state_class=SensorStateClass.TOTAL_INCREASING, + entity_registry_enabled_default=False, + ), ViCareSensorEntityDescription( key="energy_dhw_summary_consumption_heating_currentday", translation_key="energy_dhw_summary_consumption_heating_currentday", diff --git a/homeassistant/components/vicare/strings.json b/homeassistant/components/vicare/strings.json index 15637a75b83c85..77e570da7790c2 100644 --- a/homeassistant/components/vicare/strings.json +++ b/homeassistant/components/vicare/strings.json @@ -97,13 +97,22 @@ "name": "Comfort temperature" }, "normal_heating_temperature": { - "name": "[%key:component::vicare::entity::number::normal_temperature::name%]" + "name": "Normal heating temperature" }, "reduced_heating_temperature": { - "name": "[%key:component::vicare::entity::number::reduced_temperature::name%]" + "name": "Reduced heating temperature" }, "comfort_heating_temperature": { - "name": "[%key:component::vicare::entity::number::comfort_temperature::name%]" + "name": "Comfort heating temperature" + }, + "normal_cooling_temperature": { + "name": "Normal cooling temperature" + }, + "reduced_cooling_temperature": { + "name": "Reduced cooling temperature" + }, + "comfort_cooling_temperature": { + "name": "Comfort cooling temperature" }, "dhw_temperature": { "name": "DHW temperature" @@ -234,28 +243,49 @@ "name": "DHW gas consumption last seven days" }, "energy_summary_consumption_heating_currentday": { - "name": "Heating energy consumption today" + "name": "Heating electricity consumption today" }, "energy_summary_consumption_heating_currentmonth": { - "name": "Heating energy consumption this month" + "name": "Heating electricity consumption this month" }, "energy_summary_consumption_heating_currentyear": { - "name": "Heating energy consumption this year" + "name": "Heating electricity consumption this year" }, "energy_summary_consumption_heating_lastsevendays": { - "name": "Heating energy consumption last seven days" + "name": "Heating electricity consumption last seven days" + }, + "energy_consumption_cooling_today": { + "name": "Cooling electricity consumption today" + }, + "energy_consumption_cooling_this_month": { + "name": "Cooling electricity consumption this month" + }, + "energy_consumption_cooling_this_year": { + "name": "Cooling electricity consumption this year" }, "energy_dhw_summary_consumption_heating_currentday": { - "name": "DHW energy consumption today" + "name": "DHW electricity consumption today" }, "energy_dhw_summary_consumption_heating_currentmonth": { - "name": "DHW energy consumption this month" + "name": "DHW electricity consumption this month" }, "energy_dhw_summary_consumption_heating_currentyear": { - "name": "DHW energy consumption this year" + "name": "DHW electricity consumption this year" }, "energy_summary_dhw_consumption_heating_lastsevendays": { - "name": "DHW energy consumption last seven days" + "name": "DHW electricity consumption last seven days" + }, + "power_consumption_today": { + "name": "Electricity consumption today" + }, + "power_consumption_this_week": { + "name": "Electricity consumption this week" + }, + "power_consumption_this_month": { + "name": "Electricity consumption this month" + }, + "power_consumption_this_year": { + "name": "Electricity consumption this year" }, "power_production_current": { "name": "Power production current" @@ -290,18 +320,6 @@ "solar_power_production_this_year": { "name": "Solar energy production this year" }, - "power_consumption_today": { - "name": "Energy consumption today" - }, - "power_consumption_this_week": { - "name": "Power consumption this week" - }, - "power_consumption_this_month": { - "name": "Energy consumption this month" - }, - "power_consumption_this_year": { - "name": "Energy consumption this year" - }, "buffer_top_temperature": { "name": "Buffer top temperature" }, diff --git a/homeassistant/components/vicare/types.py b/homeassistant/components/vicare/types.py index 7e1ec7f8beee0a..98d1c0566cecca 100644 --- a/homeassistant/components/vicare/types.py +++ b/homeassistant/components/vicare/types.py @@ -1,6 +1,7 @@ """Types for the ViCare integration.""" from collections.abc import Callable +from contextlib import suppress from dataclasses import dataclass import enum from typing import Any @@ -24,11 +25,14 @@ class HeatingProgram(enum.StrEnum): COMFORT = "comfort" COMFORT_HEATING = "comfortHeating" + COMFORT_COOLING = "comfortCooling" ECO = "eco" NORMAL = "normal" NORMAL_HEATING = "normalHeating" + NORMAL_COOLING = "normalCooling" REDUCED = "reduced" REDUCED_HEATING = "reducedHeating" + REDUCED_COOLING = "reducedCooling" STANDBY = "standby" @staticmethod @@ -48,8 +52,12 @@ def from_ha_preset( ) -> str | None: """Return the mapped ViCare heating program for the Home Assistant preset.""" for program in supported_heating_programs: - if VICARE_TO_HA_PRESET_HEATING.get(HeatingProgram(program)) == ha_preset: - return program + with suppress(ValueError): + if ( + VICARE_TO_HA_PRESET_HEATING.get(HeatingProgram(program)) + == ha_preset + ): + return program return None diff --git a/homeassistant/components/vizio/config_flow.py b/homeassistant/components/vizio/config_flow.py index c8f1aaa21cb289..49f6a7095651e0 100644 --- a/homeassistant/components/vizio/config_flow.py +++ b/homeassistant/components/vizio/config_flow.py @@ -108,10 +108,6 @@ def _host_is_same(host1: str, host2: str) -> bool: class VizioOptionsConfigFlow(OptionsFlow): """Handle Vizio options.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize vizio options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -184,7 +180,7 @@ class VizioConfigFlow(ConfigFlow, domain=DOMAIN): @callback def async_get_options_flow(config_entry: ConfigEntry) -> VizioOptionsConfigFlow: """Get the options flow for this handler.""" - return VizioOptionsConfigFlow(config_entry) + return VizioOptionsConfigFlow() def __init__(self) -> None: """Initialize config flow.""" diff --git a/homeassistant/components/vizio/coordinator.py b/homeassistant/components/vizio/coordinator.py index 1930828b595d9c..a7ca7d7f9ed01b 100644 --- a/homeassistant/components/vizio/coordinator.py +++ b/homeassistant/components/vizio/coordinator.py @@ -34,10 +34,9 @@ def __init__(self, hass: HomeAssistant, store: Store[list[dict[str, Any]]]) -> N self.fail_threshold = 10 self.store = store - async def async_config_entry_first_refresh(self) -> None: + async def _async_setup(self) -> None: """Refresh data for the first time when a config entry is setup.""" self.data = await self.store.async_load() or APPS - await super().async_config_entry_first_refresh() async def _async_update_data(self) -> list[dict[str, Any]]: """Update data via library.""" diff --git a/homeassistant/components/vlc_telnet/config_flow.py b/homeassistant/components/vlc_telnet/config_flow.py index 6ccb92e5b8b25c..085649379592a6 100644 --- a/homeassistant/components/vlc_telnet/config_flow.py +++ b/homeassistant/components/vlc_telnet/config_flow.py @@ -10,11 +10,11 @@ from aiovlc.exceptions import AuthError, ConnectError import voluptuous as vol -from homeassistant.components.hassio import HassioServiceInfo -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_HOST, CONF_NAME, CONF_PASSWORD, CONF_PORT from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers.service_info.hassio import HassioServiceInfo from .const import DEFAULT_PORT, DOMAIN @@ -70,7 +70,6 @@ class VLCTelnetConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for VLC media player Telnet.""" VERSION = 1 - entry: ConfigEntry | None = None hassio_discovery: dict[str, Any] | None = None async def async_step_user( @@ -108,21 +107,19 @@ async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle reauth flow.""" - self.entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) - assert self.entry - self.context["title_placeholders"] = {"host": self.entry.data[CONF_HOST]} + self.context["title_placeholders"] = {"host": entry_data[CONF_HOST]} return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle reauth confirm.""" - assert self.entry errors = {} + reauth_entry = self._get_reauth_entry() if user_input is not None: try: - await validate_input(self.hass, {**self.entry.data, **user_input}) + await validate_input(self.hass, {**reauth_entry.data, **user_input}) except CannotConnect: errors["base"] = "cannot_connect" except InvalidAuth: @@ -131,21 +128,14 @@ async def async_step_reauth_confirm( _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" else: - self.hass.config_entries.async_update_entry( - self.entry, - data={ - **self.entry.data, - CONF_PASSWORD: user_input[CONF_PASSWORD], - }, + return self.async_update_reload_and_abort( + reauth_entry, + data_updates={CONF_PASSWORD: user_input[CONF_PASSWORD]}, ) - self.hass.async_create_task( - self.hass.config_entries.async_reload(self.entry.entry_id) - ) - return self.async_abort(reason="reauth_successful") return self.async_show_form( step_id="reauth_confirm", - description_placeholders={CONF_HOST: self.entry.data[CONF_HOST]}, + description_placeholders={CONF_HOST: reauth_entry.data[CONF_HOST]}, data_schema=STEP_REAUTH_DATA_SCHEMA, errors=errors, ) diff --git a/homeassistant/components/vodafone_station/config_flow.py b/homeassistant/components/vodafone_station/config_flow.py index 6b6adb6a18dc64..7a80244f8d624e 100644 --- a/homeassistant/components/vodafone_station/config_flow.py +++ b/homeassistant/components/vodafone_station/config_flow.py @@ -17,7 +17,6 @@ ConfigFlow, ConfigFlowResult, OptionsFlow, - OptionsFlowWithConfigEntry, ) from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant, callback @@ -60,13 +59,14 @@ class VodafoneStationConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for Vodafone Station.""" VERSION = 1 - entry: ConfigEntry | None = None @staticmethod @callback - def async_get_options_flow(config_entry: ConfigEntry) -> OptionsFlow: + def async_get_options_flow( + config_entry: ConfigEntry, + ) -> VodafoneStationOptionsFlowHandler: """Get the options flow for this handler.""" - return VodafoneStationOptionsFlowHandler(config_entry) + return VodafoneStationOptionsFlowHandler() async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -106,21 +106,19 @@ async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle reauth flow.""" - self.entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) - assert self.entry - self.context["title_placeholders"] = {"host": self.entry.data[CONF_HOST]} + self.context["title_placeholders"] = {"host": entry_data[CONF_HOST]} return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle reauth confirm.""" - assert self.entry errors = {} + reauth_entry = self._get_reauth_entry() if user_input is not None: try: - await validate_input(self.hass, {**self.entry.data, **user_input}) + await validate_input(self.hass, {**reauth_entry.data, **user_input}) except aiovodafone_exceptions.AlreadyLogged: errors["base"] = "already_logged" except aiovodafone_exceptions.CannotConnect: @@ -131,27 +129,22 @@ async def async_step_reauth_confirm( _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" else: - self.hass.config_entries.async_update_entry( - self.entry, - data={ - **self.entry.data, + return self.async_update_reload_and_abort( + reauth_entry, + data_updates={ CONF_PASSWORD: user_input[CONF_PASSWORD], }, ) - self.hass.async_create_task( - self.hass.config_entries.async_reload(self.entry.entry_id) - ) - return self.async_abort(reason="reauth_successful") return self.async_show_form( step_id="reauth_confirm", - description_placeholders={CONF_HOST: self.entry.data[CONF_HOST]}, + description_placeholders={CONF_HOST: reauth_entry.data[CONF_HOST]}, data_schema=STEP_REAUTH_DATA_SCHEMA, errors=errors, ) -class VodafoneStationOptionsFlowHandler(OptionsFlowWithConfigEntry): +class VodafoneStationOptionsFlowHandler(OptionsFlow): """Handle a option flow.""" async def async_step_init( @@ -166,7 +159,7 @@ async def async_step_init( { vol.Optional( CONF_CONSIDER_HOME, - default=self.options.get( + default=self.config_entry.options.get( CONF_CONSIDER_HOME, DEFAULT_CONSIDER_HOME.total_seconds() ), ): vol.All(vol.Coerce(int), vol.Clamp(min=0, max=900)) diff --git a/homeassistant/components/vodafone_station/diagnostics.py b/homeassistant/components/vodafone_station/diagnostics.py new file mode 100644 index 00000000000000..e306d6caca2b49 --- /dev/null +++ b/homeassistant/components/vodafone_station/diagnostics.py @@ -0,0 +1,47 @@ +"""Diagnostics support for Vodafone Station.""" + +from __future__ import annotations + +from typing import Any + +from homeassistant.components.diagnostics import async_redact_data +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_PASSWORD, CONF_USERNAME +from homeassistant.core import HomeAssistant + +from .const import DOMAIN +from .coordinator import VodafoneStationRouter + +TO_REDACT = {CONF_USERNAME, CONF_PASSWORD} + + +async def async_get_config_entry_diagnostics( + hass: HomeAssistant, entry: ConfigEntry +) -> dict[str, Any]: + """Return diagnostics for a config entry.""" + + coordinator: VodafoneStationRouter = hass.data[DOMAIN][entry.entry_id] + + sensors_data = coordinator.data.sensors + return { + "entry": async_redact_data(entry.as_dict(), TO_REDACT), + "device_info": { + "sys_model_name": sensors_data.get("sys_model_name"), + "sys_firmware_version": sensors_data["sys_firmware_version"], + "sys_hardware_version": sensors_data["sys_hardware_version"], + "sys_cpu_usage": sensors_data["sys_cpu_usage"][:-1], + "sys_memory_usage": sensors_data["sys_memory_usage"][:-1], + "sys_reboot_cause": sensors_data["sys_reboot_cause"], + "last_update success": coordinator.last_update_success, + "last_exception": coordinator.last_exception, + "client_devices": [ + { + "hostname": device_info.device.name, + "connection_type": device_info.device.connection_type, + "connected": device_info.device.connected, + "type": device_info.device.type, + } + for _, device_info in coordinator.data.devices.items() + ], + }, + } diff --git a/homeassistant/components/vodafone_station/manifest.json b/homeassistant/components/vodafone_station/manifest.json index 47137fff26c74d..29cb3c070abcc5 100644 --- a/homeassistant/components/vodafone_station/manifest.json +++ b/homeassistant/components/vodafone_station/manifest.json @@ -8,5 +8,5 @@ "iot_class": "local_polling", "loggers": ["aiovodafone"], "quality_scale": "silver", - "requirements": ["aiovodafone==0.6.0"] + "requirements": ["aiovodafone==0.6.1"] } diff --git a/homeassistant/components/vodafone_station/sensor.py b/homeassistant/components/vodafone_station/sensor.py index 2a08a9b2ebe3a3..fb76253eb3d1d8 100644 --- a/homeassistant/components/vodafone_station/sensor.py +++ b/homeassistant/components/vodafone_station/sensor.py @@ -5,7 +5,7 @@ from collections.abc import Callable from dataclasses import dataclass from datetime import datetime -from typing import Any, Final +from typing import Final from homeassistant.components.sensor import ( SensorDeviceClass, @@ -16,32 +16,49 @@ from homeassistant.const import PERCENTAGE, EntityCategory, UnitOfDataRate from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.typing import StateType from homeassistant.helpers.update_coordinator import CoordinatorEntity from .const import _LOGGER, DOMAIN, LINE_TYPES from .coordinator import VodafoneStationRouter NOT_AVAILABLE: list = ["", "N/A", "0.0.0.0"] +UPTIME_DEVIATION = 45 @dataclass(frozen=True, kw_only=True) class VodafoneStationEntityDescription(SensorEntityDescription): """Vodafone Station entity description.""" - value: Callable[[Any, Any], Any] = ( - lambda coordinator, key: coordinator.data.sensors[key] - ) + value: Callable[ + [VodafoneStationRouter, str | datetime | float | None, str], + str | datetime | float | None, + ] = lambda coordinator, last_value, key: coordinator.data.sensors[key] is_suitable: Callable[[dict], bool] = lambda val: True -def _calculate_uptime(coordinator: VodafoneStationRouter, key: str) -> datetime: +def _calculate_uptime( + coordinator: VodafoneStationRouter, + last_value: str | datetime | float | None, + key: str, +) -> datetime: """Calculate device uptime.""" - return coordinator.api.convert_uptime(coordinator.data.sensors[key]) + delta_uptime = coordinator.api.convert_uptime(coordinator.data.sensors[key]) + + if ( + not isinstance(last_value, datetime) + or abs((delta_uptime - last_value).total_seconds()) > UPTIME_DEVIATION + ): + return delta_uptime + + return last_value -def _line_connection(coordinator: VodafoneStationRouter, key: str) -> str | None: +def _line_connection( + coordinator: VodafoneStationRouter, + last_value: str | datetime | float | None, + key: str, +) -> str | None: """Identify line type.""" value = coordinator.data.sensors @@ -126,14 +143,18 @@ def _line_connection(coordinator: VodafoneStationRouter, key: str) -> str | None translation_key="sys_cpu_usage", native_unit_of_measurement=PERCENTAGE, entity_category=EntityCategory.DIAGNOSTIC, - value=lambda coordinator, key: float(coordinator.data.sensors[key][:-1]), + value=lambda coordinator, last_value, key: float( + coordinator.data.sensors[key][:-1] + ), ), VodafoneStationEntityDescription( key="sys_memory_usage", translation_key="sys_memory_usage", native_unit_of_measurement=PERCENTAGE, entity_category=EntityCategory.DIAGNOSTIC, - value=lambda coordinator, key: float(coordinator.data.sensors[key][:-1]), + value=lambda coordinator, last_value, key: float( + coordinator.data.sensors[key][:-1] + ), ), VodafoneStationEntityDescription( key="sys_reboot_cause", @@ -178,10 +199,12 @@ def __init__( self.entity_description = description self._attr_device_info = coordinator.device_info self._attr_unique_id = f"{coordinator.serial_number}_{description.key}" + self._old_state: str | datetime | float | None = None @property - def native_value(self) -> StateType: + def native_value(self) -> str | datetime | float | None: """Sensor value.""" - return self.entity_description.value( - self.coordinator, self.entity_description.key + self._old_state = self.entity_description.value( + self.coordinator, self._old_state, self.entity_description.key ) + return self._old_state diff --git a/homeassistant/components/voip/assist_satellite.py b/homeassistant/components/voip/assist_satellite.py index 5e32585775cd03..0100435d6dc8c3 100644 --- a/homeassistant/components/voip/assist_satellite.py +++ b/homeassistant/components/voip/assist_satellite.py @@ -21,7 +21,6 @@ AssistSatelliteEntityDescription, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import EntityCategory from homeassistant.core import Context, HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -80,7 +79,6 @@ class VoipAssistSatellite(VoIPEntity, AssistSatelliteEntity, RtpDatagramProtocol entity_description = AssistSatelliteEntityDescription(key="assist_satellite") _attr_translation_key = "assist_satellite" - _attr_entity_category = EntityCategory.CONFIG _attr_name = None def __init__( diff --git a/homeassistant/components/voip/config_flow.py b/homeassistant/components/voip/config_flow.py index 821c7f29a1ebbe..63dcb8f86ee542 100644 --- a/homeassistant/components/voip/config_flow.py +++ b/homeassistant/components/voip/config_flow.py @@ -47,16 +47,12 @@ def async_get_options_flow( config_entry: ConfigEntry, ) -> OptionsFlow: """Create the options flow.""" - return VoipOptionsFlowHandler(config_entry) + return VoipOptionsFlowHandler() class VoipOptionsFlowHandler(OptionsFlow): """Handle VoIP options.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/volvooncall/config_flow.py b/homeassistant/components/volvooncall/config_flow.py index a5e860c9105882..ccb0a7f62e1128 100644 --- a/homeassistant/components/volvooncall/config_flow.py +++ b/homeassistant/components/volvooncall/config_flow.py @@ -9,7 +9,7 @@ import voluptuous as vol from volvooncall import Connection -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlow, ConfigFlowResult from homeassistant.const import ( CONF_PASSWORD, CONF_REGION, @@ -35,7 +35,6 @@ class VolvoOnCallConfigFlow(ConfigFlow, domain=DOMAIN): """VolvoOnCall config flow.""" VERSION = 1 - _reauth_entry: ConfigEntry | None = None async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -53,7 +52,7 @@ async def async_step_user( if user_input is not None: await self.async_set_unique_id(user_input[CONF_USERNAME]) - if not self._reauth_entry: + if self.source != SOURCE_REAUTH: self._abort_if_unique_id_configured() try: @@ -64,21 +63,18 @@ async def async_step_user( _LOGGER.exception("Unhandled exception in user step") errors["base"] = "unknown" if not errors: - if self._reauth_entry: - self.hass.config_entries.async_update_entry( - self._reauth_entry, data=self._reauth_entry.data | user_input + if self.source == SOURCE_REAUTH: + return self.async_update_reload_and_abort( + self._get_reauth_entry(), data_updates=user_input ) - await self.hass.config_entries.async_reload( - self._reauth_entry.entry_id - ) - return self.async_abort(reason="reauth_successful") return self.async_create_entry( title=user_input[CONF_USERNAME], data=user_input ) - elif self._reauth_entry: + elif self.source == SOURCE_REAUTH: + reauth_entry = self._get_reauth_entry() for key in defaults: - defaults[key] = self._reauth_entry.data.get(key) + defaults[key] = reauth_entry.data.get(key) user_schema = vol.Schema( { @@ -110,9 +106,6 @@ async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Perform reauth upon an API authentication error.""" - self._reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) return await self.async_step_user() async def is_valid(self, user_input): diff --git a/homeassistant/components/wallbox/__init__.py b/homeassistant/components/wallbox/__init__.py index 4ea2cf98be101b..b2f8ac7fd5d57e 100644 --- a/homeassistant/components/wallbox/__init__.py +++ b/homeassistant/components/wallbox/__init__.py @@ -10,7 +10,7 @@ from homeassistant.exceptions import ConfigEntryAuthFailed from .const import CONF_STATION, DOMAIN, UPDATE_INTERVAL -from .coordinator import InvalidAuth, WallboxCoordinator +from .coordinator import InvalidAuth, WallboxCoordinator, async_validate_input PLATFORMS = [Platform.LOCK, Platform.NUMBER, Platform.SENSOR, Platform.SWITCH] @@ -22,18 +22,16 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: entry.data[CONF_PASSWORD], jwtTokenDrift=UPDATE_INTERVAL, ) + try: + await async_validate_input(hass, wallbox) + except InvalidAuth as ex: + raise ConfigEntryAuthFailed from ex + wallbox_coordinator = WallboxCoordinator( entry.data[CONF_STATION], wallbox, hass, ) - - try: - await wallbox_coordinator.async_validate_input() - - except InvalidAuth as ex: - raise ConfigEntryAuthFailed from ex - await wallbox_coordinator.async_config_entry_first_refresh() hass.data.setdefault(DOMAIN, {})[entry.entry_id] = wallbox_coordinator diff --git a/homeassistant/components/wallbox/config_flow.py b/homeassistant/components/wallbox/config_flow.py index 44c47149554509..bdc51eef9633de 100644 --- a/homeassistant/components/wallbox/config_flow.py +++ b/homeassistant/components/wallbox/config_flow.py @@ -8,12 +8,12 @@ import voluptuous as vol from wallbox import Wallbox -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from .const import CONF_STATION, DOMAIN -from .coordinator import InvalidAuth, WallboxCoordinator +from .coordinator import InvalidAuth, async_validate_input COMPONENT_DOMAIN = DOMAIN @@ -32,9 +32,8 @@ async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> dict[str, Data has the keys from STEP_USER_DATA_SCHEMA with values provided by the user. """ wallbox = Wallbox(data["username"], data["password"]) - wallbox_coordinator = WallboxCoordinator(data["station"], wallbox, hass) - await wallbox_coordinator.async_validate_input() + await async_validate_input(hass, wallbox) # Return info that you want to store in the config entry. return {"title": "Wallbox Portal"} @@ -43,18 +42,10 @@ async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> dict[str, class WallboxConfigFlow(ConfigFlow, domain=COMPONENT_DOMAIN): """Handle a config flow for Wallbox.""" - def __init__(self) -> None: - """Start the Wallbox config flow.""" - self._reauth_entry: ConfigEntry | None = None - async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Perform reauth upon an API authentication error.""" - self._reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) - return await self.async_step_user() async def async_step_user( @@ -71,18 +62,13 @@ async def async_step_user( try: await self.async_set_unique_id(user_input["station"]) - if not self._reauth_entry: + if self.source != SOURCE_REAUTH: self._abort_if_unique_id_configured() info = await validate_input(self.hass, user_input) return self.async_create_entry(title=info["title"], data=user_input) - if user_input["station"] == self._reauth_entry.data[CONF_STATION]: - self.hass.config_entries.async_update_entry( - self._reauth_entry, data=user_input, unique_id=user_input["station"] - ) - self.hass.async_create_task( - self.hass.config_entries.async_reload(self._reauth_entry.entry_id) - ) - return self.async_abort(reason="reauth_successful") + reauth_entry = self._get_reauth_entry() + if user_input["station"] == reauth_entry.data[CONF_STATION]: + return self.async_update_reload_and_abort(reauth_entry, data=user_input) errors["base"] = "reauth_invalid" except ConnectionError: errors["base"] = "cannot_connect" diff --git a/homeassistant/components/wallbox/coordinator.py b/homeassistant/components/wallbox/coordinator.py index f3679551bc4734..99c565d9c0c97d 100644 --- a/homeassistant/components/wallbox/coordinator.py +++ b/homeassistant/components/wallbox/coordinator.py @@ -89,6 +89,21 @@ def require_authentication( return require_authentication +def _validate(wallbox: Wallbox) -> None: + """Authenticate using Wallbox API.""" + try: + wallbox.authenticate() + except requests.exceptions.HTTPError as wallbox_connection_error: + if wallbox_connection_error.response.status_code == 403: + raise InvalidAuth from wallbox_connection_error + raise ConnectionError from wallbox_connection_error + + +async def async_validate_input(hass: HomeAssistant, wallbox: Wallbox) -> None: + """Get new sensor data for Wallbox component.""" + await hass.async_add_executor_job(_validate, wallbox) + + class WallboxCoordinator(DataUpdateCoordinator[dict[str, Any]]): """Wallbox Coordinator class.""" @@ -108,19 +123,6 @@ def authenticate(self) -> None: """Authenticate using Wallbox API.""" self._wallbox.authenticate() - def _validate(self) -> None: - """Authenticate using Wallbox API.""" - try: - self._wallbox.authenticate() - except requests.exceptions.HTTPError as wallbox_connection_error: - if wallbox_connection_error.response.status_code == 403: - raise InvalidAuth from wallbox_connection_error - raise ConnectionError from wallbox_connection_error - - async def async_validate_input(self) -> None: - """Get new sensor data for Wallbox component.""" - await self.hass.async_add_executor_job(self._validate) - @_require_authentication def _get_data(self) -> dict[str, Any]: """Get new sensor data for Wallbox component.""" diff --git a/homeassistant/components/watttime/__init__.py b/homeassistant/components/watttime/__init__.py index 6b32cf723a3481..ed2bdd4ebac88c 100644 --- a/homeassistant/components/watttime/__init__.py +++ b/homeassistant/components/watttime/__init__.py @@ -58,6 +58,7 @@ async def async_update_data() -> RealTimeEmissionsResponseType: coordinator = DataUpdateCoordinator( hass, LOGGER, + config_entry=entry, name=entry.title, update_interval=DEFAULT_UPDATE_INTERVAL, update_method=async_update_data, diff --git a/homeassistant/components/watttime/config_flow.py b/homeassistant/components/watttime/config_flow.py index db68738b302dbb..ad676e166c5828 100644 --- a/homeassistant/components/watttime/config_flow.py +++ b/homeassistant/components/watttime/config_flow.py @@ -126,9 +126,11 @@ async def _async_validate_credentials( @staticmethod @callback - def async_get_options_flow(config_entry: ConfigEntry) -> OptionsFlow: + def async_get_options_flow( + config_entry: ConfigEntry, + ) -> WattTimeOptionsFlowHandler: """Define the config flow to handle options.""" - return WattTimeOptionsFlowHandler(config_entry) + return WattTimeOptionsFlowHandler() async def async_step_coordinates( self, user_input: dict[str, Any] | None = None @@ -241,10 +243,6 @@ async def async_step_user( class WattTimeOptionsFlowHandler(OptionsFlow): """Handle a WattTime options flow.""" - def __init__(self, entry: ConfigEntry) -> None: - """Initialize.""" - self.entry = entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -258,7 +256,7 @@ async def async_step_init( { vol.Required( CONF_SHOW_ON_MAP, - default=self.entry.options.get(CONF_SHOW_ON_MAP, True), + default=self.config_entry.options.get(CONF_SHOW_ON_MAP, True), ): bool } ), diff --git a/homeassistant/components/waze_travel_time/config_flow.py b/homeassistant/components/waze_travel_time/config_flow.py index 9738ec4465fe57..6ab6a4b121c8ac 100644 --- a/homeassistant/components/waze_travel_time/config_flow.py +++ b/homeassistant/components/waze_travel_time/config_flow.py @@ -113,10 +113,6 @@ def default_options(hass: HomeAssistant) -> dict[str, str | bool | list[str]]: class WazeOptionsFlow(OptionsFlow): """Handle an options flow for Waze Travel Time.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize waze options flow.""" - self.config_entry = config_entry - async def async_step_init(self, user_input=None) -> ConfigFlowResult: """Handle the initial step.""" if user_input is not None: @@ -142,15 +138,13 @@ class WazeConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 2 - _entry: ConfigEntry - @staticmethod @callback def async_get_options_flow( config_entry: ConfigEntry, ) -> WazeOptionsFlow: """Get the options flow for this handler.""" - return WazeOptionsFlow(config_entry) + return WazeOptionsFlow() async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -169,10 +163,9 @@ async def async_step_user( ): if self.source == SOURCE_RECONFIGURE: return self.async_update_reload_and_abort( - self._entry, + self._get_reconfigure_entry(), title=user_input[CONF_NAME], data=user_input, - reason="reconfigure_successful", ) return self.async_create_entry( title=user_input.get(CONF_NAME, DEFAULT_NAME), @@ -194,9 +187,7 @@ async def async_step_reconfigure( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle reconfiguration.""" - self._entry = self._get_reconfigure_entry() - - data = self._entry.data.copy() + data = self._get_reconfigure_entry().data.copy() data[CONF_REGION] = data[CONF_REGION].lower() return self.async_show_form( diff --git a/homeassistant/components/weatherflow/strings.json b/homeassistant/components/weatherflow/strings.json index 8fb3a3cdf31654..cf23f02d78166c 100644 --- a/homeassistant/components/weatherflow/strings.json +++ b/homeassistant/components/weatherflow/strings.json @@ -13,11 +13,11 @@ }, "error": { "address_in_use": "Unable to open local UDP port 50222.", - "cannot_connect": "UDP discovery error." + "cannot_connect": "UDP discovery error.", + "no_device_found": "[%key:common::config_flow::abort::no_devices_found%]" }, "abort": { - "single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]", - "no_devices_found": "[%key:common::config_flow::abort::no_devices_found%]" + "single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]" } }, "entity": { diff --git a/homeassistant/components/weatherflow_cloud/config_flow.py b/homeassistant/components/weatherflow_cloud/config_flow.py index cbb83b6f25bfdc..bdd3003e6b6bb2 100644 --- a/homeassistant/components/weatherflow_cloud/config_flow.py +++ b/homeassistant/components/weatherflow_cloud/config_flow.py @@ -49,15 +49,11 @@ async def async_step_reauth_confirm( errors = await _validate_api_token(api_token) if not errors: # Update the existing entry and abort - if existing_entry := self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ): - return self.async_update_reload_and_abort( - existing_entry, - data={CONF_API_TOKEN: api_token}, - reason="reauth_successful", - reload_even_if_entry_is_unchanged=False, - ) + return self.async_update_reload_and_abort( + self._get_reauth_entry(), + data={CONF_API_TOKEN: api_token}, + reload_even_if_entry_is_unchanged=False, + ) return self.async_show_form( step_id="reauth_confirm", diff --git a/homeassistant/components/weatherkit/manifest.json b/homeassistant/components/weatherkit/manifest.json index a6dd40d599338c..f86745f330fc3c 100644 --- a/homeassistant/components/weatherkit/manifest.json +++ b/homeassistant/components/weatherkit/manifest.json @@ -5,5 +5,5 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/weatherkit", "iot_class": "cloud_polling", - "requirements": ["apple_weatherkit==1.1.2"] + "requirements": ["apple_weatherkit==1.1.3"] } diff --git a/homeassistant/components/webmin/config_flow.py b/homeassistant/components/webmin/config_flow.py index 3f55bbd91106e6..64f8c684dfac9d 100644 --- a/homeassistant/components/webmin/config_flow.py +++ b/homeassistant/components/webmin/config_flow.py @@ -26,7 +26,7 @@ SchemaFlowFormStep, ) -from .const import DEFAULT_PORT, DEFAULT_SSL, DEFAULT_VERIFY_SSL, DOMAIN +from .const import DEFAULT_PORT, DEFAULT_SSL, DEFAULT_VERIFY_SSL, DOMAIN, LOGGER from .helpers import get_instance_from_options, get_sorted_mac_addresses @@ -45,9 +45,8 @@ async def validate_user_input( raise SchemaFlowError("invalid_auth") from err raise SchemaFlowError("cannot_connect") from err except Fault as fault: - raise SchemaFlowError( - f"Fault {fault.faultCode}: {fault.faultString}" - ) from fault + LOGGER.exception(f"Fault {fault.faultCode}: {fault.faultString}") + raise SchemaFlowError("unknown") from fault except ClientConnectionError as err: raise SchemaFlowError("cannot_connect") from err except Exception as err: diff --git a/homeassistant/components/webostv/config_flow.py b/homeassistant/components/webostv/config_flow.py index 4bc2c5ca258848..45395bd282a15b 100644 --- a/homeassistant/components/webostv/config_flow.py +++ b/homeassistant/components/webostv/config_flow.py @@ -47,7 +47,6 @@ def __init__(self) -> None: self._host: str = "" self._name: str = "" self._uuid: str | None = None - self._entry: ConfigEntry | None = None @staticmethod @callback @@ -144,15 +143,12 @@ async def async_step_reauth( ) -> ConfigFlowResult: """Perform reauth upon an WebOsTvPairError.""" self._host = entry_data[CONF_HOST] - self._entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Dialog that informs the user that reauth is required.""" - assert self._entry is not None - if user_input is not None: try: client = await async_control_connect(self._host, None) @@ -161,8 +157,9 @@ async def async_step_reauth_confirm( except WEBOSTV_EXCEPTIONS: return self.async_abort(reason="reauth_unsuccessful") - update_client_key(self.hass, self._entry, client) - await self.hass.config_entries.async_reload(self._entry.entry_id) + reauth_entry = self._get_reauth_entry() + update_client_key(self.hass, reauth_entry, client) + await self.hass.config_entries.async_reload(reauth_entry.entry_id) return self.async_abort(reason="reauth_successful") return self.async_show_form(step_id="reauth_confirm") @@ -173,8 +170,6 @@ class OptionsFlowHandler(OptionsFlow): def __init__(self, config_entry: ConfigEntry) -> None: """Initialize options flow.""" - self.config_entry = config_entry - self.options = config_entry.options self.host = config_entry.data[CONF_HOST] self.key = config_entry.data[CONF_CLIENT_SECRET] @@ -191,7 +186,8 @@ async def async_step_init( if not sources_list: errors["base"] = "cannot_retrieve" - sources = [s for s in self.options.get(CONF_SOURCES, []) if s in sources_list] + option_sources = self.config_entry.options.get(CONF_SOURCES, []) + sources = [s for s in option_sources if s in sources_list] if not sources: sources = sources_list diff --git a/homeassistant/components/websocket_api/connection.py b/homeassistant/components/websocket_api/connection.py index 6c0c6f0c5871dd..62f1adc39b97b1 100644 --- a/homeassistant/components/websocket_api/connection.py +++ b/homeassistant/components/websocket_api/connection.py @@ -16,6 +16,12 @@ from homeassistant.util.json import JsonValueType from . import const, messages +from .messages import ( + error_message, + event_message, + message_to_json_bytes, + result_message, +) from .util import describe_request if TYPE_CHECKING: @@ -126,12 +132,12 @@ def unsub() -> None: @callback def send_result(self, msg_id: int, result: Any | None = None) -> None: """Send a result message.""" - self.send_message(messages.result_message(msg_id, result)) + self.send_message(message_to_json_bytes(result_message(msg_id, result))) @callback def send_event(self, msg_id: int, event: Any | None = None) -> None: """Send a event message.""" - self.send_message(messages.event_message(msg_id, event)) + self.send_message(message_to_json_bytes(event_message(msg_id, event))) @callback def send_error( @@ -145,13 +151,15 @@ def send_error( ) -> None: """Send an error message.""" self.send_message( - messages.error_message( - msg_id, - code, - message, - translation_key=translation_key, - translation_domain=translation_domain, - translation_placeholders=translation_placeholders, + message_to_json_bytes( + error_message( + msg_id, + code, + message, + translation_key=translation_key, + translation_domain=translation_domain, + translation_placeholders=translation_placeholders, + ) ) ) diff --git a/homeassistant/components/websocket_api/http.py b/homeassistant/components/websocket_api/http.py index 29dc6113350654..e7d57aebab611b 100644 --- a/homeassistant/components/websocket_api/http.py +++ b/homeassistant/components/websocket_api/http.py @@ -36,6 +36,8 @@ from .messages import message_to_json_bytes from .util import describe_request +CLOSE_MSG_TYPES = {WSMsgType.CLOSE, WSMsgType.CLOSED, WSMsgType.CLOSING} + if TYPE_CHECKING: from .connection import ActiveConnection @@ -328,13 +330,7 @@ async def async_handle(self) -> web.WebSocketResponse: if TYPE_CHECKING: assert writer is not None - # aiohttp 3.11.0 changed the method name from _send_frame to send_frame - if hasattr(writer, "send_frame"): - send_frame = writer.send_frame # pragma: no cover - else: - send_frame = writer._send_frame # noqa: SLF001 - - send_bytes_text = partial(send_frame, opcode=WSMsgType.TEXT) + send_bytes_text = partial(writer.send_frame, opcode=WSMsgType.TEXT) auth = AuthPhase( logger, hass, self._send_message, self._cancel, request, send_bytes_text ) @@ -344,7 +340,7 @@ async def async_handle(self) -> web.WebSocketResponse: try: connection = await self._async_handle_auth_phase(auth, send_bytes_text) self._async_increase_writer_limit(writer) - await self._async_websocket_command_phase(connection, send_bytes_text) + await self._async_websocket_command_phase(connection) except asyncio.CancelledError: logger.debug("%s: Connection cancelled", self.description) raise @@ -454,9 +450,7 @@ def _async_increase_writer_limit(self, writer: WebSocketWriter) -> None: writer._limit = 2**20 # noqa: SLF001 async def _async_websocket_command_phase( - self, - connection: ActiveConnection, - send_bytes_text: Callable[[bytes], Coroutine[Any, Any, None]], + self, connection: ActiveConnection ) -> None: """Handle the command phase of the websocket connection.""" wsock = self._wsock @@ -467,24 +461,26 @@ async def _async_websocket_command_phase( # Command phase while not wsock.closed: msg = await wsock.receive() + msg_type = msg.type + msg_data = msg.data - if msg.type in (WSMsgType.CLOSE, WSMsgType.CLOSED, WSMsgType.CLOSING): + if msg_type in CLOSE_MSG_TYPES: break - if msg.type is WSMsgType.BINARY: - if len(msg.data) < 1: + if msg_type is WSMsgType.BINARY: + if len(msg_data) < 1: raise Disconnect("Received invalid binary message.") - handler = msg.data[0] - payload = msg.data[1:] + handler = msg_data[0] + payload = msg_data[1:] async_handle_binary(handler, payload) continue - if msg.type is not WSMsgType.TEXT: + if msg_type is not WSMsgType.TEXT: raise Disconnect("Received non-Text message.") try: - command_msg_data = json_loads(msg.data) + command_msg_data = json_loads(msg_data) except ValueError as ex: raise Disconnect("Received invalid JSON.") from ex diff --git a/homeassistant/components/weheat/api.py b/homeassistant/components/weheat/api.py index 1d0828aa41b1be..b1f5c0b3eff21b 100644 --- a/homeassistant/components/weheat/api.py +++ b/homeassistant/components/weheat/api.py @@ -23,7 +23,6 @@ def __init__( async def async_get_access_token(self) -> str: """Return a valid access token.""" - if not self._oauth_session.valid_token: - await self._oauth_session.async_ensure_token_valid() + await self._oauth_session.async_ensure_token_valid() return self._oauth_session.token[CONF_ACCESS_TOKEN] diff --git a/homeassistant/components/weheat/config_flow.py b/homeassistant/components/weheat/config_flow.py index c1eccaf6ba7c4b..b1a0b5dd4ea6be 100644 --- a/homeassistant/components/weheat/config_flow.py +++ b/homeassistant/components/weheat/config_flow.py @@ -6,7 +6,7 @@ from weheat.abstractions.user import get_user_id_from_token -from homeassistant.config_entries import ConfigEntry, ConfigFlowResult +from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlowResult from homeassistant.const import CONF_ACCESS_TOKEN, CONF_TOKEN from homeassistant.helpers.config_entry_oauth2_flow import AbstractOAuth2FlowHandler @@ -18,8 +18,6 @@ class OAuth2FlowHandler(AbstractOAuth2FlowHandler, domain=DOMAIN): DOMAIN = DOMAIN - reauth_entry: ConfigEntry | None = None - @property def logger(self) -> logging.Logger: """Return logger.""" @@ -38,28 +36,21 @@ async def async_oauth_create_entry(self, data: dict) -> ConfigFlowResult: user_id = await get_user_id_from_token( API_URL, data[CONF_TOKEN][CONF_ACCESS_TOKEN] ) - if not self.reauth_entry: - await self.async_set_unique_id(user_id) + await self.async_set_unique_id(user_id) + if self.source != SOURCE_REAUTH: self._abort_if_unique_id_configured() return self.async_create_entry(title=ENTRY_TITLE, data=data) - if self.reauth_entry.unique_id == user_id: - return self.async_update_reload_and_abort( - self.reauth_entry, - unique_id=user_id, - data={**self.reauth_entry.data, **data}, - ) - - return self.async_abort(reason="wrong_account") + self._abort_if_unique_id_mismatch(reason="wrong_account") + return self.async_update_reload_and_abort( + self._get_reauth_entry(), data_updates=data + ) async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Perform reauth upon an API authentication error.""" - self.reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( diff --git a/homeassistant/components/weheat/icons.json b/homeassistant/components/weheat/icons.json index a7579c12ecd842..6fdae84cfffa5d 100644 --- a/homeassistant/components/weheat/icons.json +++ b/homeassistant/components/weheat/icons.json @@ -10,23 +10,17 @@ "cop": { "default": "mdi:speedometer" }, - "water_inlet_temperature": { - "default": "mdi:thermometer" - }, - "water_outlet_temperature": { - "default": "mdi:thermometer" - }, "ch_inlet_temperature": { "default": "mdi:radiator" }, "outside_temperature": { "default": "mdi:home-thermometer-outline" }, - "dhw_top_temperature": { - "default": "mdi:thermometer" + "thermostat_room_temperature": { + "default": "mdi:home-thermometer" }, - "dhw_bottom_temperature": { - "default": "mdi:thermometer" + "thermostat_room_temperature_setpoint": { + "default": "mdi:home-thermometer" }, "heat_pump_state": { "default": "mdi:state-machine" diff --git a/homeassistant/components/weheat/manifest.json b/homeassistant/components/weheat/manifest.json index d32e0ce4047508..ef89a2f1acba35 100644 --- a/homeassistant/components/weheat/manifest.json +++ b/homeassistant/components/weheat/manifest.json @@ -6,5 +6,5 @@ "dependencies": ["application_credentials"], "documentation": "https://www.home-assistant.io/integrations/weheat", "iot_class": "cloud_polling", - "requirements": ["weheat==2024.09.23"] + "requirements": ["weheat==2024.11.02"] } diff --git a/homeassistant/components/weheat/sensor.py b/homeassistant/components/weheat/sensor.py index fc7d3628a33647..ef5be9030b9ed3 100644 --- a/homeassistant/components/weheat/sensor.py +++ b/homeassistant/components/weheat/sensor.py @@ -95,6 +95,33 @@ class WeHeatSensorEntityDescription(SensorEntityDescription): suggested_display_precision=DISPLAY_PRECISION_WATER_TEMP, value_fn=lambda status: status.air_inlet_temperature, ), + WeHeatSensorEntityDescription( + translation_key="thermostat_water_setpoint", + key="thermostat_water_setpoint", + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + suggested_display_precision=DISPLAY_PRECISION_WATER_TEMP, + value_fn=lambda status: status.thermostat_water_setpoint, + ), + WeHeatSensorEntityDescription( + translation_key="thermostat_room_temperature", + key="thermostat_room_temperature", + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + suggested_display_precision=DISPLAY_PRECISION_WATER_TEMP, + value_fn=lambda status: status.thermostat_room_temperature, + ), + WeHeatSensorEntityDescription( + translation_key="thermostat_room_temperature_setpoint", + key="thermostat_room_temperature_setpoint", + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + suggested_display_precision=DISPLAY_PRECISION_WATER_TEMP, + value_fn=lambda status: status.thermostat_room_temperature_setpoint, + ), WeHeatSensorEntityDescription( translation_key="heat_pump_state", key="heat_pump_state", diff --git a/homeassistant/components/weheat/strings.json b/homeassistant/components/weheat/strings.json index 3982bfd23b3d46..0733024cbed436 100644 --- a/homeassistant/components/weheat/strings.json +++ b/homeassistant/components/weheat/strings.json @@ -54,6 +54,15 @@ "outside_temperature": { "name": "Outside temperature" }, + "thermostat_water_setpoint": { + "name": "Water target temperature" + }, + "thermostat_room_temperature": { + "name": "Current room temperature" + }, + "thermostat_room_temperature_setpoint": { + "name": "Room temperature setpoint" + }, "dhw_top_temperature": { "name": "DHW top temperature" }, diff --git a/homeassistant/components/wemo/config_flow.py b/homeassistant/components/wemo/config_flow.py index 10a9bf5604bcb5..361c58953c5405 100644 --- a/homeassistant/components/wemo/config_flow.py +++ b/homeassistant/components/wemo/config_flow.py @@ -32,16 +32,12 @@ def __init__(self) -> None: @callback def async_get_options_flow(config_entry: ConfigEntry) -> OptionsFlow: """Get the options flow for this handler.""" - return WemoOptionsFlow(config_entry) + return WemoOptionsFlow() class WemoOptionsFlow(OptionsFlow): """Options flow for the WeMo component.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/whirlpool/config_flow.py b/homeassistant/components/whirlpool/config_flow.py index 7c39b1fbb29da7..069a5ca1e4ff0e 100644 --- a/homeassistant/components/whirlpool/config_flow.py +++ b/homeassistant/components/whirlpool/config_flow.py @@ -12,7 +12,7 @@ from whirlpool.auth import Auth from whirlpool.backendselector import BackendSelector -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_PASSWORD, CONF_REGION, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError @@ -71,14 +71,11 @@ class WhirlpoolConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for Whirlpool Sixth Sense.""" VERSION = 1 - entry: ConfigEntry | None async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle re-authentication with Whirlpool Sixth Sense.""" - - self.entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -88,10 +85,10 @@ async def async_step_reauth_confirm( errors: dict[str, str] = {} if user_input: - assert self.entry is not None + reauth_entry = self._get_reauth_entry() password = user_input[CONF_PASSWORD] brand = user_input[CONF_BRAND] - data = {**self.entry.data, CONF_PASSWORD: password, CONF_BRAND: brand} + data = {**reauth_entry.data, CONF_PASSWORD: password, CONF_BRAND: brand} try: await validate_input(self.hass, data) @@ -100,9 +97,7 @@ async def async_step_reauth_confirm( except (CannotConnect, TimeoutError): errors["base"] = "cannot_connect" else: - self.hass.config_entries.async_update_entry(self.entry, data=data) - await self.hass.config_entries.async_reload(self.entry.entry_id) - return self.async_abort(reason="reauth_successful") + return self.async_update_reload_and_abort(reauth_entry, data=data) return self.async_show_form( step_id="reauth_confirm", diff --git a/homeassistant/components/whirlpool/strings.json b/homeassistant/components/whirlpool/strings.json index 4b4673b771eb48..09257652ece24a 100644 --- a/homeassistant/components/whirlpool/strings.json +++ b/homeassistant/components/whirlpool/strings.json @@ -27,7 +27,8 @@ } }, "abort": { - "already_configured": "[%key:common::config_flow::abort::already_configured_account%]" + "already_configured": "[%key:common::config_flow::abort::already_configured_account%]", + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" }, "error": { "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", diff --git a/homeassistant/components/whois/__init__.py b/homeassistant/components/whois/__init__.py index b9f5938d93be5e..07116825f2946a 100644 --- a/homeassistant/components/whois/__init__.py +++ b/homeassistant/components/whois/__init__.py @@ -35,6 +35,7 @@ async def _async_query_domain() -> Domain | None: coordinator: DataUpdateCoordinator[Domain | None] = DataUpdateCoordinator( hass, LOGGER, + config_entry=entry, name=f"{DOMAIN}_APK", update_interval=SCAN_INTERVAL, update_method=_async_query_domain, diff --git a/homeassistant/components/wiffi/config_flow.py b/homeassistant/components/wiffi/config_flow.py index 3fcbef395e60a6..308923597cd637 100644 --- a/homeassistant/components/wiffi/config_flow.py +++ b/homeassistant/components/wiffi/config_flow.py @@ -34,7 +34,7 @@ def async_get_options_flow( config_entry: ConfigEntry, ) -> OptionsFlowHandler: """Create Wiffi server setup option flow.""" - return OptionsFlowHandler(config_entry) + return OptionsFlowHandler() async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -79,10 +79,6 @@ def _async_show_form(self, errors=None): class OptionsFlowHandler(OptionsFlow): """Wiffi server setup option flow.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, int] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/withings/config_flow.py b/homeassistant/components/withings/config_flow.py index 150c0d52890611..d7f07ccc18469c 100644 --- a/homeassistant/components/withings/config_flow.py +++ b/homeassistant/components/withings/config_flow.py @@ -9,7 +9,7 @@ from aiowithings import AuthScope from homeassistant.components.webhook import async_generate_id -from homeassistant.config_entries import ConfigEntry, ConfigFlowResult +from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlowResult from homeassistant.const import CONF_NAME, CONF_TOKEN, CONF_WEBHOOK_ID from homeassistant.helpers import config_entry_oauth2_flow @@ -23,8 +23,6 @@ class WithingsFlowHandler( DOMAIN = DOMAIN - reauth_entry: ConfigEntry | None = None - @property def logger(self) -> logging.Logger: """Return logger.""" @@ -42,9 +40,6 @@ async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Perform reauth upon an API authentication error.""" - self.reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -52,18 +47,17 @@ async def async_step_reauth_confirm( ) -> ConfigFlowResult: """Confirm reauth dialog.""" if user_input is None: - assert self.reauth_entry return self.async_show_form( step_id="reauth_confirm", - description_placeholders={CONF_NAME: self.reauth_entry.title}, + description_placeholders={CONF_NAME: self._get_reauth_entry().title}, ) return await self.async_step_user() async def async_oauth_create_entry(self, data: dict[str, Any]) -> ConfigFlowResult: """Create an entry for the flow, or update existing entry.""" user_id = str(data[CONF_TOKEN]["userid"]) - if not self.reauth_entry: - await self.async_set_unique_id(user_id) + await self.async_set_unique_id(user_id) + if self.source != SOURCE_REAUTH: self._abort_if_unique_id_configured() return self.async_create_entry( @@ -71,9 +65,7 @@ async def async_oauth_create_entry(self, data: dict[str, Any]) -> ConfigFlowResu data={**data, CONF_WEBHOOK_ID: async_generate_id()}, ) - if self.reauth_entry.unique_id == user_id: - return self.async_update_reload_and_abort( - self.reauth_entry, data={**self.reauth_entry.data, **data} - ) - - return self.async_abort(reason="wrong_account") + self._abort_if_unique_id_mismatch(reason="wrong_account") + return self.async_update_reload_and_abort( + self._get_reauth_entry(), data_updates=data + ) diff --git a/homeassistant/components/withings/manifest.json b/homeassistant/components/withings/manifest.json index e0d85f207a329f..a0a86be5da3802 100644 --- a/homeassistant/components/withings/manifest.json +++ b/homeassistant/components/withings/manifest.json @@ -9,5 +9,5 @@ "iot_class": "cloud_push", "loggers": ["aiowithings"], "quality_scale": "platinum", - "requirements": ["aiowithings==3.1.0"] + "requirements": ["aiowithings==3.1.1"] } diff --git a/homeassistant/components/withings/strings.json b/homeassistant/components/withings/strings.json index 38592305c3d662..775ef5cdaab3ee 100644 --- a/homeassistant/components/withings/strings.json +++ b/homeassistant/components/withings/strings.json @@ -21,6 +21,7 @@ "oauth_timeout": "[%key:common::config_flow::abort::oauth2_timeout%]", "oauth_unauthorized": "[%key:common::config_flow::abort::oauth2_unauthorized%]", "oauth_failed": "[%key:common::config_flow::abort::oauth2_failed%]", + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", "wrong_account": "Authenticated account does not match the account to be reauthenticated. Please log in with the correct account." }, "create_entry": { diff --git a/homeassistant/components/wiz/__init__.py b/homeassistant/components/wiz/__init__.py index 1bf3188e9e959b..0e986aaefa2c59 100644 --- a/homeassistant/components/wiz/__init__.py +++ b/homeassistant/components/wiz/__init__.py @@ -103,6 +103,7 @@ async def _async_update() -> float | None: coordinator = DataUpdateCoordinator( hass=hass, logger=_LOGGER, + config_entry=entry, name=entry.title, update_interval=timedelta(seconds=15), update_method=_async_update, diff --git a/homeassistant/components/wled/config_flow.py b/homeassistant/components/wled/config_flow.py index 2798e0d46d1ab3..812a0500d1a38d 100644 --- a/homeassistant/components/wled/config_flow.py +++ b/homeassistant/components/wled/config_flow.py @@ -12,7 +12,7 @@ ConfigEntry, ConfigFlow, ConfigFlowResult, - OptionsFlowWithConfigEntry, + OptionsFlow, ) from homeassistant.const import CONF_HOST, CONF_MAC from homeassistant.core import callback @@ -30,9 +30,11 @@ class WLEDFlowHandler(ConfigFlow, domain=DOMAIN): @staticmethod @callback - def async_get_options_flow(config_entry: ConfigEntry) -> WLEDOptionsFlowHandler: + def async_get_options_flow( + config_entry: ConfigEntry, + ) -> WLEDOptionsFlowHandler: """Get the options flow for this handler.""" - return WLEDOptionsFlowHandler(config_entry) + return WLEDOptionsFlowHandler() async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -117,7 +119,7 @@ async def _async_get_device(self, host: str) -> Device: return await wled.update() -class WLEDOptionsFlowHandler(OptionsFlowWithConfigEntry): +class WLEDOptionsFlowHandler(OptionsFlow): """Handle WLED options.""" async def async_step_init( @@ -133,7 +135,7 @@ async def async_step_init( { vol.Optional( CONF_KEEP_MAIN_LIGHT, - default=self.options.get( + default=self.config_entry.options.get( CONF_KEEP_MAIN_LIGHT, DEFAULT_KEEP_MAIN_LIGHT ), ): bool, diff --git a/homeassistant/components/wled/coordinator.py b/homeassistant/components/wled/coordinator.py index cb39fde5e5a420..8e2855e9f05433 100644 --- a/homeassistant/components/wled/coordinator.py +++ b/homeassistant/components/wled/coordinator.py @@ -49,6 +49,7 @@ def __init__( super().__init__( hass, LOGGER, + config_entry=entry, name=DOMAIN, update_interval=SCAN_INTERVAL, ) @@ -133,6 +134,7 @@ def __init__(self, hass: HomeAssistant) -> None: super().__init__( hass, LOGGER, + config_entry=None, name=DOMAIN, update_interval=RELEASES_SCAN_INTERVAL, ) diff --git a/homeassistant/components/wmspro/__init__.py b/homeassistant/components/wmspro/__init__.py index c0c4a9e3950065..37bf1495a56baa 100644 --- a/homeassistant/components/wmspro/__init__.py +++ b/homeassistant/components/wmspro/__init__.py @@ -15,7 +15,7 @@ from .const import DOMAIN, MANUFACTURER -PLATFORMS: list[Platform] = [Platform.COVER] +PLATFORMS: list[Platform] = [Platform.COVER, Platform.LIGHT, Platform.SCENE] type WebControlProConfigEntry = ConfigEntry[WebControlPro] diff --git a/homeassistant/components/wmspro/config_flow.py b/homeassistant/components/wmspro/config_flow.py index ba3b5ef367d1af..2ce58ec9ecaaa9 100644 --- a/homeassistant/components/wmspro/config_flow.py +++ b/homeassistant/components/wmspro/config_flow.py @@ -2,6 +2,7 @@ from __future__ import annotations +import ipaddress import logging from typing import Any @@ -38,7 +39,19 @@ async def async_step_dhcp( """Handle the DHCP discovery step.""" unique_id = format_mac(discovery_info.macaddress) await self.async_set_unique_id(unique_id) - self._abort_if_unique_id_configured() + + entry = self.hass.config_entries.async_entry_for_domain_unique_id( + DOMAIN, unique_id + ) + if entry: + try: # Check if current host is a valid IP address + ipaddress.ip_address(entry.data[CONF_HOST]) + except ValueError: # Do not touch name-based host + return self.async_abort(reason="already_configured") + else: # Update existing host with new IP address + self._abort_if_unique_id_configured( + updates={CONF_HOST: discovery_info.ip} + ) for entry in self.hass.config_entries.async_entries(DOMAIN): if not entry.unique_id and entry.data[CONF_HOST] in ( @@ -71,11 +84,20 @@ async def async_step_user( if not pong: errors["base"] = "cannot_connect" else: + await hub.refresh() + rooms = set(hub.rooms.keys()) + for entry in self.hass.config_entries.async_loaded_entries(DOMAIN): + if ( + entry.runtime_data + and entry.runtime_data.rooms + and set(entry.runtime_data.rooms.keys()) == rooms + ): + return self.async_abort(reason="already_configured") return self.async_create_entry(title=host, data=user_input) if self.source == dhcp.DOMAIN: discovery_info: DhcpServiceInfo = self.init_data - data_values = {CONF_HOST: discovery_info.hostname or discovery_info.ip} + data_values = {CONF_HOST: discovery_info.ip} else: data_values = {CONF_HOST: SUGGESTED_HOST} diff --git a/homeassistant/components/wmspro/const.py b/homeassistant/components/wmspro/const.py index 0a1036cf632234..d92534d9e46f80 100644 --- a/homeassistant/components/wmspro/const.py +++ b/homeassistant/components/wmspro/const.py @@ -5,3 +5,5 @@ ATTRIBUTION = "Data provided by WMS WebControl pro API" MANUFACTURER = "WAREMA Renkhoff SE" + +BRIGHTNESS_SCALE = (1, 100) diff --git a/homeassistant/components/wmspro/cover.py b/homeassistant/components/wmspro/cover.py index b8540a5bf08327..a36b34642b71e3 100644 --- a/homeassistant/components/wmspro/cover.py +++ b/homeassistant/components/wmspro/cover.py @@ -46,12 +46,12 @@ class WebControlProAwning(WebControlProGenericEntity, CoverEntity): def current_cover_position(self) -> int | None: """Return current position of cover.""" action = self._dest.action(WMS_WebControl_pro_API_actionDescription.AwningDrive) - return action["percentage"] + return 100 - action["percentage"] async def async_set_cover_position(self, **kwargs: Any) -> None: """Move the cover to a specific position.""" action = self._dest.action(WMS_WebControl_pro_API_actionDescription.AwningDrive) - await action(percentage=kwargs[ATTR_POSITION]) + await action(percentage=100 - kwargs[ATTR_POSITION]) @property def is_closed(self) -> bool | None: @@ -61,12 +61,12 @@ def is_closed(self) -> bool | None: async def async_open_cover(self, **kwargs: Any) -> None: """Open the cover.""" action = self._dest.action(WMS_WebControl_pro_API_actionDescription.AwningDrive) - await action(percentage=100) + await action(percentage=0) async def async_close_cover(self, **kwargs: Any) -> None: """Close the cover.""" action = self._dest.action(WMS_WebControl_pro_API_actionDescription.AwningDrive) - await action(percentage=0) + await action(percentage=100) async def async_stop_cover(self, **kwargs: Any) -> None: """Stop the device if in motion.""" diff --git a/homeassistant/components/wmspro/light.py b/homeassistant/components/wmspro/light.py new file mode 100644 index 00000000000000..9242982bcf9161 --- /dev/null +++ b/homeassistant/components/wmspro/light.py @@ -0,0 +1,89 @@ +"""Support for lights connected with WMS WebControl pro.""" + +from __future__ import annotations + +from datetime import timedelta +from typing import Any + +from wmspro.const import WMS_WebControl_pro_API_actionDescription + +from homeassistant.components.light import ATTR_BRIGHTNESS, ColorMode, LightEntity +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.util.color import brightness_to_value, value_to_brightness + +from . import WebControlProConfigEntry +from .const import BRIGHTNESS_SCALE +from .entity import WebControlProGenericEntity + +SCAN_INTERVAL = timedelta(seconds=5) +PARALLEL_UPDATES = 1 + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: WebControlProConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the WMS based lights from a config entry.""" + hub = config_entry.runtime_data + + entities: list[WebControlProGenericEntity] = [] + for dest in hub.dests.values(): + if dest.action(WMS_WebControl_pro_API_actionDescription.LightDimming): + entities.append(WebControlProDimmer(config_entry.entry_id, dest)) + elif dest.action(WMS_WebControl_pro_API_actionDescription.LightSwitch): + entities.append(WebControlProLight(config_entry.entry_id, dest)) + + async_add_entities(entities) + + +class WebControlProLight(WebControlProGenericEntity, LightEntity): + """Representation of a WMS based light.""" + + _attr_color_mode = ColorMode.ONOFF + _attr_supported_color_modes = {ColorMode.ONOFF} + + @property + def is_on(self) -> bool: + """Return true if light is on.""" + action = self._dest.action(WMS_WebControl_pro_API_actionDescription.LightSwitch) + return action["onOffState"] + + async def async_turn_on(self, **kwargs: Any) -> None: + """Turn the light on.""" + action = self._dest.action(WMS_WebControl_pro_API_actionDescription.LightSwitch) + await action(onOffState=True) + + async def async_turn_off(self, **kwargs: Any) -> None: + """Turn the light off.""" + action = self._dest.action(WMS_WebControl_pro_API_actionDescription.LightSwitch) + await action(onOffState=False) + + +class WebControlProDimmer(WebControlProLight): + """Representation of a WMS-based dimmable light.""" + + _attr_color_mode = ColorMode.BRIGHTNESS + _attr_supported_color_modes = {ColorMode.BRIGHTNESS} + + @property + def brightness(self) -> int: + """Return the brightness of this light between 1..255.""" + action = self._dest.action( + WMS_WebControl_pro_API_actionDescription.LightDimming + ) + return value_to_brightness(BRIGHTNESS_SCALE, action["percentage"]) + + async def async_turn_on(self, **kwargs: Any) -> None: + """Turn the dimmer on.""" + if ATTR_BRIGHTNESS not in kwargs: + await super().async_turn_on(**kwargs) + return + + action = self._dest.action( + WMS_WebControl_pro_API_actionDescription.LightDimming + ) + await action( + percentage=brightness_to_value(BRIGHTNESS_SCALE, kwargs[ATTR_BRIGHTNESS]) + ) diff --git a/homeassistant/components/wmspro/manifest.json b/homeassistant/components/wmspro/manifest.json index 3e0c4e21e6c76d..dd65be3e7e7ded 100644 --- a/homeassistant/components/wmspro/manifest.json +++ b/homeassistant/components/wmspro/manifest.json @@ -3,7 +3,6 @@ "name": "WMS WebControl pro", "codeowners": ["@mback2k"], "config_flow": true, - "dependencies": [], "dhcp": [ { "macaddress": "0023D5*" @@ -15,5 +14,5 @@ "documentation": "https://www.home-assistant.io/integrations/wmspro", "integration_type": "hub", "iot_class": "local_polling", - "requirements": ["pywmspro==0.2.0"] + "requirements": ["pywmspro==0.2.1"] } diff --git a/homeassistant/components/wmspro/scene.py b/homeassistant/components/wmspro/scene.py new file mode 100644 index 00000000000000..de18106b7f0fdc --- /dev/null +++ b/homeassistant/components/wmspro/scene.py @@ -0,0 +1,64 @@ +"""Support for scenes provided by WMS WebControl pro.""" + +from __future__ import annotations + +from typing import Any + +from wmspro.scene import Scene as WMS_Scene + +from homeassistant.components.scene import Scene +from homeassistant.core import HomeAssistant +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import WebControlProConfigEntry +from .const import ATTRIBUTION, DOMAIN, MANUFACTURER + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: WebControlProConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the WMS based scenes from a config entry.""" + hub = config_entry.runtime_data + + async_add_entities( + WebControlProScene(config_entry.entry_id, scene) + for scene in hub.scenes.values() + ) + + +class WebControlProScene(Scene): + """Representation of a WMS based scene.""" + + _attr_attribution = ATTRIBUTION + _attr_has_entity_name = True + + def __init__(self, config_entry_id: str, scene: WMS_Scene) -> None: + """Initialize the entity with the configured scene.""" + super().__init__() + + # Scene information + self._scene = scene + self._attr_name = scene.name + self._attr_unique_id = str(scene.id) + + # Room information + room = scene.room + room_name = room.name + room_id_str = str(room.id) + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, room_id_str)}, + manufacturer=MANUFACTURER, + model="Room", + name=room_name, + serial_number=room_id_str, + suggested_area=room_name, + via_device=(DOMAIN, config_entry_id), + configuration_url=f"http://{scene.host}/control", + ) + + async def async_activate(self, **kwargs: Any) -> None: + """Activate scene. Try to get entities into requested state.""" + await self._scene() diff --git a/homeassistant/components/wolflink/__init__.py b/homeassistant/components/wolflink/__init__.py index b897debfedec43..49197ed7d265da 100644 --- a/homeassistant/components/wolflink/__init__.py +++ b/homeassistant/components/wolflink/__init__.py @@ -100,6 +100,7 @@ async def async_update_data(): coordinator = DataUpdateCoordinator( hass, _LOGGER, + config_entry=entry, name=DOMAIN, update_method=async_update_data, update_interval=timedelta(seconds=60), diff --git a/homeassistant/components/workday/config_flow.py b/homeassistant/components/workday/config_flow.py index 2552fe849e26c3..4d93fccb1a77ef 100644 --- a/homeassistant/components/workday/config_flow.py +++ b/homeassistant/components/workday/config_flow.py @@ -12,7 +12,7 @@ ConfigEntry, ConfigFlow, ConfigFlowResult, - OptionsFlowWithConfigEntry, + OptionsFlow, ) from homeassistant.const import CONF_COUNTRY, CONF_LANGUAGE, CONF_NAME from homeassistant.core import callback @@ -219,7 +219,7 @@ def async_get_options_flow( config_entry: ConfigEntry, ) -> WorkdayOptionsFlowHandler: """Get the options flow for this handler.""" - return WorkdayOptionsFlowHandler(config_entry) + return WorkdayOptionsFlowHandler() async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -310,7 +310,7 @@ async def async_step_options( ) -class WorkdayOptionsFlowHandler(OptionsFlowWithConfigEntry): +class WorkdayOptionsFlowHandler(OptionsFlow): """Handle Workday options.""" async def async_step_init( @@ -320,7 +320,7 @@ async def async_step_init( errors: dict[str, str] = {} if user_input is not None: - combined_input: dict[str, Any] = {**self.options, **user_input} + combined_input: dict[str, Any] = {**self.config_entry.options, **user_input} if CONF_PROVINCE not in user_input: # Province not present, delete old value (if present) too combined_input.pop(CONF_PROVINCE, None) @@ -340,7 +340,7 @@ async def async_step_init( else: LOGGER.debug("abort_check in options with %s", combined_input) abort_match = { - CONF_COUNTRY: self._config_entry.options.get(CONF_COUNTRY), + CONF_COUNTRY: self.config_entry.options.get(CONF_COUNTRY), CONF_EXCLUDES: combined_input[CONF_EXCLUDES], CONF_OFFSET: combined_input[CONF_OFFSET], CONF_WORKDAYS: combined_input[CONF_WORKDAYS], @@ -357,23 +357,22 @@ async def async_step_init( else: return self.async_create_entry(data=combined_input) + options = self.config_entry.options schema: vol.Schema = await self.hass.async_add_executor_job( add_province_and_language_to_schema, DATA_SCHEMA_OPT, - self.options.get(CONF_COUNTRY), + options.get(CONF_COUNTRY), ) - new_schema = self.add_suggested_values_to_schema( - schema, user_input or self.options - ) + new_schema = self.add_suggested_values_to_schema(schema, user_input or options) LOGGER.debug("Errors have occurred in options %s", errors) return self.async_show_form( step_id="init", data_schema=new_schema, errors=errors, description_placeholders={ - "name": self.options[CONF_NAME], - "country": self.options.get(CONF_COUNTRY), + "name": options[CONF_NAME], + "country": options.get(CONF_COUNTRY), }, ) diff --git a/homeassistant/components/workday/manifest.json b/homeassistant/components/workday/manifest.json index 1201354bab2ed2..b02db73472967c 100644 --- a/homeassistant/components/workday/manifest.json +++ b/homeassistant/components/workday/manifest.json @@ -7,5 +7,5 @@ "iot_class": "local_polling", "loggers": ["holidays"], "quality_scale": "internal", - "requirements": ["holidays==0.57"] + "requirements": ["holidays==0.60"] } diff --git a/homeassistant/components/ws66i/config_flow.py b/homeassistant/components/ws66i/config_flow.py index 9f6f4ca59c201f..120b7738d2ec1f 100644 --- a/homeassistant/components/ws66i/config_flow.py +++ b/homeassistant/components/ws66i/config_flow.py @@ -130,7 +130,7 @@ def async_get_options_flow( config_entry: ConfigEntry, ) -> Ws66iOptionsFlowHandler: """Define the config flow to handle options.""" - return Ws66iOptionsFlowHandler(config_entry) + return Ws66iOptionsFlowHandler() @callback @@ -145,10 +145,6 @@ def _key_for_source( class Ws66iOptionsFlowHandler(OptionsFlow): """Handle a WS66i options flow.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, str] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/wyoming/__init__.py b/homeassistant/components/wyoming/__init__.py index 00d587e2bb4225..d639933ece69df 100644 --- a/homeassistant/components/wyoming/__init__.py +++ b/homeassistant/components/wyoming/__init__.py @@ -14,11 +14,11 @@ from .data import WyomingService from .devices import SatelliteDevice from .models import DomainDataItem -from .satellite import WyomingSatellite _LOGGER = logging.getLogger(__name__) SATELLITE_PLATFORMS = [ + Platform.ASSIST_SATELLITE, Platform.BINARY_SENSOR, Platform.SELECT, Platform.SWITCH, @@ -47,51 +47,29 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: entry.async_on_unload(entry.add_update_listener(update_listener)) if (satellite_info := service.info.satellite) is not None: - # Create satellite device, etc. - item.satellite = _make_satellite(hass, entry, service) - - # Set up satellite sensors, switches, etc. - await hass.config_entries.async_forward_entry_setups(entry, SATELLITE_PLATFORMS) + # Create satellite device + dev_reg = dr.async_get(hass) + + # Use config entry id since only one satellite per entry is supported + satellite_id = entry.entry_id + device = dev_reg.async_get_or_create( + config_entry_id=entry.entry_id, + identifiers={(DOMAIN, satellite_id)}, + name=satellite_info.name, + suggested_area=satellite_info.area, + ) - # Start satellite communication - entry.async_create_background_task( - hass, - item.satellite.run(), - f"Satellite {satellite_info.name}", + item.device = SatelliteDevice( + satellite_id=satellite_id, + device_id=device.id, ) - entry.async_on_unload(item.satellite.stop) + # Set up satellite entity, sensors, switches, etc. + await hass.config_entries.async_forward_entry_setups(entry, SATELLITE_PLATFORMS) return True -def _make_satellite( - hass: HomeAssistant, config_entry: ConfigEntry, service: WyomingService -) -> WyomingSatellite: - """Create Wyoming satellite/device from config entry and Wyoming service.""" - satellite_info = service.info.satellite - assert satellite_info is not None - - dev_reg = dr.async_get(hass) - - # Use config entry id since only one satellite per entry is supported - satellite_id = config_entry.entry_id - - device = dev_reg.async_get_or_create( - config_entry_id=config_entry.entry_id, - identifiers={(DOMAIN, satellite_id)}, - name=satellite_info.name, - suggested_area=satellite_info.area, - ) - - satellite_device = SatelliteDevice( - satellite_id=satellite_id, - device_id=device.id, - ) - - return WyomingSatellite(hass, config_entry, service, satellite_device) - - async def update_listener(hass: HomeAssistant, entry: ConfigEntry): """Handle options update.""" await hass.config_entries.async_reload(entry.entry_id) @@ -102,7 +80,7 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: item: DomainDataItem = hass.data[DOMAIN][entry.entry_id] platforms = list(item.service.platforms) - if item.satellite is not None: + if item.device is not None: platforms += SATELLITE_PLATFORMS unload_ok = await hass.config_entries.async_unload_platforms(entry, platforms) diff --git a/homeassistant/components/wyoming/satellite.py b/homeassistant/components/wyoming/assist_satellite.py similarity index 82% rename from homeassistant/components/wyoming/satellite.py rename to homeassistant/components/wyoming/assist_satellite.py index 781f0706c680a6..615084bcbf333d 100644 --- a/homeassistant/components/wyoming/satellite.py +++ b/homeassistant/components/wyoming/assist_satellite.py @@ -1,12 +1,12 @@ -"""Support for Wyoming satellite services.""" +"""Assist satellite entity for Wyoming integration.""" + +from __future__ import annotations import asyncio from collections.abc import AsyncGenerator import io import logging -import time -from typing import Final -from uuid import uuid4 +from typing import Any, Final import wave from wyoming.asr import Transcribe, Transcript @@ -18,20 +18,28 @@ from wyoming.ping import Ping, Pong from wyoming.pipeline import PipelineStage, RunPipeline from wyoming.satellite import PauseSatellite, RunSatellite +from wyoming.snd import Played from wyoming.timer import TimerCancelled, TimerFinished, TimerStarted, TimerUpdated from wyoming.tts import Synthesize, SynthesizeVoice from wyoming.vad import VoiceStarted, VoiceStopped from wyoming.wake import Detect, Detection -from homeassistant.components import assist_pipeline, intent, stt, tts -from homeassistant.components.assist_pipeline import select as pipeline_select -from homeassistant.components.assist_pipeline.vad import VadSensitivity +from homeassistant.components import assist_pipeline, intent, tts +from homeassistant.components.assist_pipeline import PipelineEvent +from homeassistant.components.assist_satellite import ( + AssistSatelliteConfiguration, + AssistSatelliteEntity, + AssistSatelliteEntityDescription, +) from homeassistant.config_entries import ConfigEntry -from homeassistant.core import Context, HomeAssistant, callback +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.entity_platform import AddEntitiesCallback from .const import DOMAIN from .data import WyomingService from .devices import SatelliteDevice +from .entity import WyomingSatelliteEntity +from .models import DomainDataItem _LOGGER = logging.getLogger(__name__) @@ -41,7 +49,6 @@ _PING_TIMEOUT: Final = 5 _PING_SEND_DELAY: Final = 2 _PIPELINE_FINISH_TIMEOUT: Final = 1 -_CONVERSATION_TIMEOUT_SEC: Final = 5 * 60 # 5 minutes # Wyoming stage -> Assist stage _STAGES: dict[PipelineStage, assist_pipeline.PipelineStage] = { @@ -52,21 +59,46 @@ } -class WyomingSatellite: - """Remove voice satellite running the Wyoming protocol.""" +async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up Wyoming Assist satellite entity.""" + domain_data: DomainDataItem = hass.data[DOMAIN][config_entry.entry_id] + assert domain_data.device is not None + + async_add_entities( + [ + WyomingAssistSatellite( + hass, domain_data.service, domain_data.device, config_entry + ) + ] + ) + + +class WyomingAssistSatellite(WyomingSatelliteEntity, AssistSatelliteEntity): + """Assist satellite for Wyoming devices.""" + + entity_description = AssistSatelliteEntityDescription(key="assist_satellite") + _attr_translation_key = "assist_satellite" + _attr_name = None def __init__( self, hass: HomeAssistant, - config_entry: ConfigEntry, service: WyomingService, device: SatelliteDevice, + config_entry: ConfigEntry, ) -> None: - """Initialize satellite.""" - self.hass = hass - self.config_entry = config_entry + """Initialize an Assist satellite.""" + WyomingSatelliteEntity.__init__(self, device) + AssistSatelliteEntity.__init__(self) + self.service = service self.device = device + self.config_entry = config_entry + self.is_running = True self._client: AsyncTcpClient | None = None @@ -84,6 +116,160 @@ def __init__( self.device.set_pipeline_listener(self._pipeline_changed) self.device.set_audio_settings_listener(self._audio_settings_changed) + @property + def pipeline_entity_id(self) -> str | None: + """Return the entity ID of the pipeline to use for the next conversation.""" + return self.device.get_pipeline_entity_id(self.hass) + + @property + def vad_sensitivity_entity_id(self) -> str | None: + """Return the entity ID of the VAD sensitivity to use for the next conversation.""" + return self.device.get_vad_sensitivity_entity_id(self.hass) + + @property + def tts_options(self) -> dict[str, Any] | None: + """Options passed for text-to-speech.""" + return { + tts.ATTR_PREFERRED_FORMAT: "wav", + tts.ATTR_PREFERRED_SAMPLE_RATE: 16000, + tts.ATTR_PREFERRED_SAMPLE_CHANNELS: 1, + tts.ATTR_PREFERRED_SAMPLE_BYTES: 2, + } + + async def async_added_to_hass(self) -> None: + """Run when entity about to be added to hass.""" + await super().async_added_to_hass() + self.start_satellite() + + async def async_will_remove_from_hass(self) -> None: + """Run when entity will be removed from hass.""" + await super().async_will_remove_from_hass() + self.stop_satellite() + + @callback + def async_get_configuration( + self, + ) -> AssistSatelliteConfiguration: + """Get the current satellite configuration.""" + raise NotImplementedError + + async def async_set_configuration( + self, config: AssistSatelliteConfiguration + ) -> None: + """Set the current satellite configuration.""" + raise NotImplementedError + + def on_pipeline_event(self, event: PipelineEvent) -> None: + """Set state based on pipeline stage.""" + assert self._client is not None + + if event.type == assist_pipeline.PipelineEventType.RUN_END: + # Pipeline run is complete + self._is_pipeline_running = False + self._pipeline_ended_event.set() + self.device.set_is_active(False) + elif event.type == assist_pipeline.PipelineEventType.WAKE_WORD_START: + self.hass.add_job(self._client.write_event(Detect().event())) + elif event.type == assist_pipeline.PipelineEventType.WAKE_WORD_END: + # Wake word detection + # Inform client of wake word detection + if event.data and (wake_word_output := event.data.get("wake_word_output")): + detection = Detection( + name=wake_word_output["wake_word_id"], + timestamp=wake_word_output.get("timestamp"), + ) + self.hass.add_job(self._client.write_event(detection.event())) + elif event.type == assist_pipeline.PipelineEventType.STT_START: + # Speech-to-text + self.device.set_is_active(True) + + if event.data: + self.hass.add_job( + self._client.write_event( + Transcribe(language=event.data["metadata"]["language"]).event() + ) + ) + elif event.type == assist_pipeline.PipelineEventType.STT_VAD_START: + # User started speaking + if event.data: + self.hass.add_job( + self._client.write_event( + VoiceStarted(timestamp=event.data["timestamp"]).event() + ) + ) + elif event.type == assist_pipeline.PipelineEventType.STT_VAD_END: + # User stopped speaking + if event.data: + self.hass.add_job( + self._client.write_event( + VoiceStopped(timestamp=event.data["timestamp"]).event() + ) + ) + elif event.type == assist_pipeline.PipelineEventType.STT_END: + # Speech-to-text transcript + if event.data: + # Inform client of transript + stt_text = event.data["stt_output"]["text"] + self.hass.add_job( + self._client.write_event(Transcript(text=stt_text).event()) + ) + elif event.type == assist_pipeline.PipelineEventType.TTS_START: + # Text-to-speech text + if event.data: + # Inform client of text + self.hass.add_job( + self._client.write_event( + Synthesize( + text=event.data["tts_input"], + voice=SynthesizeVoice( + name=event.data.get("voice"), + language=event.data.get("language"), + ), + ).event() + ) + ) + elif event.type == assist_pipeline.PipelineEventType.TTS_END: + # TTS stream + if event.data and (tts_output := event.data["tts_output"]): + media_id = tts_output["media_id"] + self.hass.add_job(self._stream_tts(media_id)) + elif event.type == assist_pipeline.PipelineEventType.ERROR: + # Pipeline error + if event.data: + self.hass.add_job( + self._client.write_event( + Error( + text=event.data["message"], code=event.data["code"] + ).event() + ) + ) + + # ------------------------------------------------------------------------- + + def start_satellite(self) -> None: + """Start satellite task.""" + self.is_running = True + + self.config_entry.async_create_background_task( + self.hass, self.run(), "wyoming satellite run" + ) + + def stop_satellite(self) -> None: + """Signal satellite task to stop running.""" + # Stop existing pipeline + self._audio_queue.put_nowait(None) + + # Tell satellite to stop running + self._send_pause() + + # Stop task loop + self.is_running = False + + # Unblock waiting for unmuted + self._muted_changed_event.set() + + # ------------------------------------------------------------------------- + async def run(self) -> None: """Run and maintain a connection to satellite.""" _LOGGER.debug("Running satellite task") @@ -110,6 +296,9 @@ async def run(self) -> None: except Exception as err: # noqa: BLE001 _LOGGER.debug("%s: %s", err.__class__.__name__, str(err)) + # Stop any existing pipeline + self._audio_queue.put_nowait(None) + # Ensure sensor is off (before restart) self.device.set_is_active(False) @@ -123,17 +312,6 @@ async def run(self) -> None: await self.on_stopped() - def stop(self) -> None: - """Signal satellite task to stop running.""" - # Tell satellite to stop running - self._send_pause() - - # Stop task loop - self.is_running = False - - # Unblock waiting for unmuted - self._muted_changed_event.set() - async def on_restart(self) -> None: """Block until pipeline loop will be restarted.""" _LOGGER.warning( @@ -151,7 +329,7 @@ async def on_reconnect(self) -> None: await asyncio.sleep(_RECONNECT_SECONDS) async def on_muted(self) -> None: - """Block until device may be unmated again.""" + """Block until device may be unmuted again.""" await self._muted_changed_event.wait() async def on_stopped(self) -> None: @@ -252,6 +430,7 @@ async def _run_pipeline_loop(self) -> None: done, pending = await asyncio.wait( pending, return_when=asyncio.FIRST_COMPLETED ) + if pipeline_ended_task in done: # Pipeline run end event was received _LOGGER.debug("Pipeline finished") @@ -302,7 +481,7 @@ async def _run_pipeline_loop(self) -> None: elif AudioStop.is_type(client_event.type) and self._is_pipeline_running: # Stop pipeline _LOGGER.debug("Client requested pipeline to stop") - self._audio_queue.put_nowait(b"") + self._audio_queue.put_nowait(None) elif Info.is_type(client_event.type): client_info = Info.from_event(client_event) _LOGGER.debug("Updated client info: %s", client_info) @@ -329,6 +508,9 @@ async def _run_pipeline_loop(self) -> None: break _LOGGER.debug("Client detected wake word: %s", wake_word_phrase) + elif Played.is_type(client_event.type): + # TTS response has finished playing on satellite + self.tts_response_finished() else: _LOGGER.debug("Unexpected event from satellite: %s", client_event) @@ -353,72 +535,20 @@ def _run_pipeline_once( if end_stage is None: raise ValueError(f"Invalid end stage: {end_stage}") - pipeline_id = pipeline_select.get_chosen_pipeline( - self.hass, - DOMAIN, - self.device.satellite_id, - ) - pipeline = assist_pipeline.async_get_pipeline(self.hass, pipeline_id) - assert pipeline is not None - # We will push audio in through a queue self._audio_queue = asyncio.Queue() - stt_stream = self._stt_stream() - - # Start pipeline running - _LOGGER.debug( - "Starting pipeline %s from %s to %s", - pipeline.name, - start_stage, - end_stage, - ) - - # Reset conversation id, if necessary - if (self._conversation_id_time is None) or ( - (time.monotonic() - self._conversation_id_time) > _CONVERSATION_TIMEOUT_SEC - ): - self._conversation_id = None - - if self._conversation_id is None: - self._conversation_id = str(uuid4()) - - # Update timeout - self._conversation_id_time = time.monotonic() self._is_pipeline_running = True self._pipeline_ended_event.clear() self.config_entry.async_create_background_task( self.hass, - assist_pipeline.async_pipeline_from_audio_stream( - self.hass, - context=Context(), - event_callback=self._event_callback, - stt_metadata=stt.SpeechMetadata( - language=pipeline.language, - format=stt.AudioFormats.WAV, - codec=stt.AudioCodecs.PCM, - bit_rate=stt.AudioBitRates.BITRATE_16, - sample_rate=stt.AudioSampleRates.SAMPLERATE_16000, - channel=stt.AudioChannels.CHANNEL_MONO, - ), - stt_stream=stt_stream, + self.async_accept_pipeline_from_satellite( + audio_stream=self._stt_stream(), start_stage=start_stage, end_stage=end_stage, - tts_audio_output="wav", - pipeline_id=pipeline_id, - audio_settings=assist_pipeline.AudioSettings( - noise_suppression_level=self.device.noise_suppression_level, - auto_gain_dbfs=self.device.auto_gain, - volume_multiplier=self.device.volume_multiplier, - silence_seconds=VadSensitivity.to_seconds( - self.device.vad_sensitivity - ), - ), - device_id=self.device.device_id, wake_word_phrase=wake_word_phrase, - conversation_id=self._conversation_id, ), - name="wyoming satellite pipeline", + "wyoming satellite pipeline", ) async def _send_delayed_ping(self) -> None: @@ -431,91 +561,6 @@ async def _send_delayed_ping(self) -> None: except ConnectionError: pass # handled with timeout - def _event_callback(self, event: assist_pipeline.PipelineEvent) -> None: - """Translate pipeline events into Wyoming events.""" - assert self._client is not None - - if event.type == assist_pipeline.PipelineEventType.RUN_END: - # Pipeline run is complete - self._is_pipeline_running = False - self._pipeline_ended_event.set() - self.device.set_is_active(False) - elif event.type == assist_pipeline.PipelineEventType.WAKE_WORD_START: - self.hass.add_job(self._client.write_event(Detect().event())) - elif event.type == assist_pipeline.PipelineEventType.WAKE_WORD_END: - # Wake word detection - # Inform client of wake word detection - if event.data and (wake_word_output := event.data.get("wake_word_output")): - detection = Detection( - name=wake_word_output["wake_word_id"], - timestamp=wake_word_output.get("timestamp"), - ) - self.hass.add_job(self._client.write_event(detection.event())) - elif event.type == assist_pipeline.PipelineEventType.STT_START: - # Speech-to-text - self.device.set_is_active(True) - - if event.data: - self.hass.add_job( - self._client.write_event( - Transcribe(language=event.data["metadata"]["language"]).event() - ) - ) - elif event.type == assist_pipeline.PipelineEventType.STT_VAD_START: - # User started speaking - if event.data: - self.hass.add_job( - self._client.write_event( - VoiceStarted(timestamp=event.data["timestamp"]).event() - ) - ) - elif event.type == assist_pipeline.PipelineEventType.STT_VAD_END: - # User stopped speaking - if event.data: - self.hass.add_job( - self._client.write_event( - VoiceStopped(timestamp=event.data["timestamp"]).event() - ) - ) - elif event.type == assist_pipeline.PipelineEventType.STT_END: - # Speech-to-text transcript - if event.data: - # Inform client of transript - stt_text = event.data["stt_output"]["text"] - self.hass.add_job( - self._client.write_event(Transcript(text=stt_text).event()) - ) - elif event.type == assist_pipeline.PipelineEventType.TTS_START: - # Text-to-speech text - if event.data: - # Inform client of text - self.hass.add_job( - self._client.write_event( - Synthesize( - text=event.data["tts_input"], - voice=SynthesizeVoice( - name=event.data.get("voice"), - language=event.data.get("language"), - ), - ).event() - ) - ) - elif event.type == assist_pipeline.PipelineEventType.TTS_END: - # TTS stream - if event.data and (tts_output := event.data["tts_output"]): - media_id = tts_output["media_id"] - self.hass.add_job(self._stream_tts(media_id)) - elif event.type == assist_pipeline.PipelineEventType.ERROR: - # Pipeline error - if event.data: - self.hass.add_job( - self._client.write_event( - Error( - text=event.data["message"], code=event.data["code"] - ).event() - ) - ) - async def _connect(self) -> None: """Connect to satellite over TCP.""" await self._disconnect() @@ -576,16 +621,16 @@ async def _stream_tts(self, media_id: str) -> None: async def _stt_stream(self) -> AsyncGenerator[bytes]: """Yield audio chunks from a queue.""" - try: - is_first_chunk = True - while chunk := await self._audio_queue.get(): - if is_first_chunk: - is_first_chunk = False - _LOGGER.debug("Receiving audio from satellite") - - yield chunk - except asyncio.CancelledError: - pass # ignore + is_first_chunk = True + while chunk := await self._audio_queue.get(): + if chunk is None: + break + + if is_first_chunk: + is_first_chunk = False + _LOGGER.debug("Receiving audio from satellite") + + yield chunk @callback def _handle_timer( diff --git a/homeassistant/components/wyoming/binary_sensor.py b/homeassistant/components/wyoming/binary_sensor.py index ac5db0cda99600..24ee073ec4dca7 100644 --- a/homeassistant/components/wyoming/binary_sensor.py +++ b/homeassistant/components/wyoming/binary_sensor.py @@ -28,9 +28,9 @@ async def async_setup_entry( item: DomainDataItem = hass.data[DOMAIN][config_entry.entry_id] # Setup is only forwarded for satellites - assert item.satellite is not None + assert item.device is not None - async_add_entities([WyomingSatelliteAssistInProgress(item.satellite.device)]) + async_add_entities([WyomingSatelliteAssistInProgress(item.device)]) class WyomingSatelliteAssistInProgress(WyomingSatelliteEntity, BinarySensorEntity): diff --git a/homeassistant/components/wyoming/config_flow.py b/homeassistant/components/wyoming/config_flow.py index 4ed2d458ad566d..5fdcb1a5484c6c 100644 --- a/homeassistant/components/wyoming/config_flow.py +++ b/homeassistant/components/wyoming/config_flow.py @@ -8,9 +8,10 @@ import voluptuous as vol -from homeassistant.components import hassio, zeroconf +from homeassistant.components import zeroconf from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_HOST, CONF_PORT +from homeassistant.helpers.service_info.hassio import HassioServiceInfo from .const import DOMAIN from .data import WyomingService @@ -30,7 +31,7 @@ class WyomingConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - _hassio_discovery: hassio.HassioServiceInfo + _hassio_discovery: HassioServiceInfo _service: WyomingService | None = None _name: str | None = None @@ -61,7 +62,7 @@ async def async_step_user( return self.async_abort(reason="no_services") async def async_step_hassio( - self, discovery_info: hassio.HassioServiceInfo + self, discovery_info: HassioServiceInfo ) -> ConfigFlowResult: """Handle Supervisor add-on discovery.""" _LOGGER.debug("Supervisor discovery info: %s", discovery_info) diff --git a/homeassistant/components/wyoming/conversation.py b/homeassistant/components/wyoming/conversation.py new file mode 100644 index 00000000000000..9a17559c1f8db3 --- /dev/null +++ b/homeassistant/components/wyoming/conversation.py @@ -0,0 +1,194 @@ +"""Support for Wyoming intent recognition services.""" + +import logging + +from wyoming.asr import Transcript +from wyoming.client import AsyncTcpClient +from wyoming.handle import Handled, NotHandled +from wyoming.info import HandleProgram, IntentProgram +from wyoming.intent import Intent, NotRecognized + +from homeassistant.components import conversation +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant +from homeassistant.helpers import intent +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.util import ulid + +from .const import DOMAIN +from .data import WyomingService +from .error import WyomingError +from .models import DomainDataItem + +_LOGGER = logging.getLogger(__name__) + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up Wyoming conversation.""" + item: DomainDataItem = hass.data[DOMAIN][config_entry.entry_id] + async_add_entities( + [ + WyomingConversationEntity(config_entry, item.service), + ] + ) + + +class WyomingConversationEntity( + conversation.ConversationEntity, conversation.AbstractConversationAgent +): + """Wyoming conversation agent.""" + + _attr_has_entity_name = True + + def __init__( + self, + config_entry: ConfigEntry, + service: WyomingService, + ) -> None: + """Set up provider.""" + super().__init__() + + self.service = service + + self._intent_service: IntentProgram | None = None + self._handle_service: HandleProgram | None = None + + for maybe_intent in self.service.info.intent: + if maybe_intent.installed: + self._intent_service = maybe_intent + break + + for maybe_handle in self.service.info.handle: + if maybe_handle.installed: + self._handle_service = maybe_handle + break + + model_languages: set[str] = set() + + if self._intent_service is not None: + for intent_model in self._intent_service.models: + if intent_model.installed: + model_languages.update(intent_model.languages) + + self._attr_name = self._intent_service.name + self._attr_supported_features = ( + conversation.ConversationEntityFeature.CONTROL + ) + elif self._handle_service is not None: + for handle_model in self._handle_service.models: + if handle_model.installed: + model_languages.update(handle_model.languages) + + self._attr_name = self._handle_service.name + + self._supported_languages = list(model_languages) + self._attr_unique_id = f"{config_entry.entry_id}-conversation" + + @property + def supported_languages(self) -> list[str]: + """Return a list of supported languages.""" + return self._supported_languages + + async def async_process( + self, user_input: conversation.ConversationInput + ) -> conversation.ConversationResult: + """Process a sentence.""" + conversation_id = user_input.conversation_id or ulid.ulid_now() + intent_response = intent.IntentResponse(language=user_input.language) + + try: + async with AsyncTcpClient(self.service.host, self.service.port) as client: + await client.write_event( + Transcript( + user_input.text, context={"conversation_id": conversation_id} + ).event() + ) + + while True: + event = await client.read_event() + if event is None: + _LOGGER.debug("Connection lost") + intent_response.async_set_error( + intent.IntentResponseErrorCode.UNKNOWN, + "Connection to service was lost", + ) + return conversation.ConversationResult( + response=intent_response, + conversation_id=user_input.conversation_id, + ) + + if Intent.is_type(event.type): + # Success + recognized_intent = Intent.from_event(event) + _LOGGER.debug("Recognized intent: %s", recognized_intent) + + intent_type = recognized_intent.name + intent_slots = { + e.name: {"value": e.value} + for e in recognized_intent.entities + } + intent_response = await intent.async_handle( + self.hass, + DOMAIN, + intent_type, + intent_slots, + text_input=user_input.text, + language=user_input.language, + ) + + if (not intent_response.speech) and recognized_intent.text: + intent_response.async_set_speech(recognized_intent.text) + + break + + if NotRecognized.is_type(event.type): + not_recognized = NotRecognized.from_event(event) + intent_response.async_set_error( + intent.IntentResponseErrorCode.NO_INTENT_MATCH, + not_recognized.text, + ) + break + + if Handled.is_type(event.type): + # Success + handled = Handled.from_event(event) + intent_response.async_set_speech(handled.text) + break + + if NotHandled.is_type(event.type): + not_handled = NotHandled.from_event(event) + intent_response.async_set_error( + intent.IntentResponseErrorCode.FAILED_TO_HANDLE, + not_handled.text, + ) + break + + except (OSError, WyomingError) as err: + _LOGGER.exception("Unexpected error while communicating with service") + intent_response.async_set_error( + intent.IntentResponseErrorCode.UNKNOWN, + f"Error communicating with service: {err}", + ) + return conversation.ConversationResult( + response=intent_response, + conversation_id=user_input.conversation_id, + ) + except intent.IntentError as err: + _LOGGER.exception("Unexpected error while handling intent") + intent_response.async_set_error( + intent.IntentResponseErrorCode.FAILED_TO_HANDLE, + f"Error handling intent: {err}", + ) + return conversation.ConversationResult( + response=intent_response, + conversation_id=user_input.conversation_id, + ) + + # Success + return conversation.ConversationResult( + response=intent_response, conversation_id=conversation_id + ) diff --git a/homeassistant/components/wyoming/data.py b/homeassistant/components/wyoming/data.py index 1ee0f24f8052b6..a16062ab0589b8 100644 --- a/homeassistant/components/wyoming/data.py +++ b/homeassistant/components/wyoming/data.py @@ -37,6 +37,10 @@ def __init__(self, host: str, port: int, info: Info) -> None: self.platforms.append(Platform.TTS) if any(wake.installed for wake in info.wake): self.platforms.append(Platform.WAKE_WORD) + if any(intent.installed for intent in info.intent) or any( + handle.installed for handle in info.handle + ): + self.platforms.append(Platform.CONVERSATION) def has_services(self) -> bool: """Return True if services are installed that Home Assistant can use.""" @@ -44,6 +48,8 @@ def has_services(self) -> bool: any(asr for asr in self.info.asr if asr.installed) or any(tts for tts in self.info.tts if tts.installed) or any(wake for wake in self.info.wake if wake.installed) + or any(intent for intent in self.info.intent if intent.installed) + or any(handle for handle in self.info.handle if handle.installed) or ((self.info.satellite is not None) and self.info.satellite.installed) ) @@ -70,6 +76,16 @@ def get_name(self) -> str | None: if wake_installed: return wake_installed[0].name + # intent recognition (text -> intent) + intent_installed = [intent for intent in self.info.intent if intent.installed] + if intent_installed: + return intent_installed[0].name + + # intent handling (text -> text) + handle_installed = [handle for handle in self.info.handle if handle.installed] + if handle_installed: + return handle_installed[0].name + return None @classmethod diff --git a/homeassistant/components/wyoming/entity.py b/homeassistant/components/wyoming/entity.py index 4591283036f298..1ce105fb860311 100644 --- a/homeassistant/components/wyoming/entity.py +++ b/homeassistant/components/wyoming/entity.py @@ -6,7 +6,7 @@ from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo from .const import DOMAIN -from .satellite import SatelliteDevice +from .devices import SatelliteDevice class WyomingSatelliteEntity(entity.Entity): diff --git a/homeassistant/components/wyoming/manifest.json b/homeassistant/components/wyoming/manifest.json index 30104a88dced0e..b837d2a9e76ec7 100644 --- a/homeassistant/components/wyoming/manifest.json +++ b/homeassistant/components/wyoming/manifest.json @@ -3,7 +3,12 @@ "name": "Wyoming Protocol", "codeowners": ["@balloob", "@synesthesiam"], "config_flow": true, - "dependencies": ["assist_pipeline", "intent", "conversation"], + "dependencies": [ + "assist_satellite", + "assist_pipeline", + "intent", + "conversation" + ], "documentation": "https://www.home-assistant.io/integrations/wyoming", "integration_type": "service", "iot_class": "local_push", diff --git a/homeassistant/components/wyoming/models.py b/homeassistant/components/wyoming/models.py index 066af144d780ef..b819d06f91606f 100644 --- a/homeassistant/components/wyoming/models.py +++ b/homeassistant/components/wyoming/models.py @@ -3,7 +3,7 @@ from dataclasses import dataclass from .data import WyomingService -from .satellite import WyomingSatellite +from .devices import SatelliteDevice @dataclass @@ -11,4 +11,4 @@ class DomainDataItem: """Domain data item.""" service: WyomingService - satellite: WyomingSatellite | None = None + device: SatelliteDevice | None = None diff --git a/homeassistant/components/wyoming/number.py b/homeassistant/components/wyoming/number.py index 5e769eeb06d4d0..d9a58cc333329b 100644 --- a/homeassistant/components/wyoming/number.py +++ b/homeassistant/components/wyoming/number.py @@ -30,13 +30,12 @@ async def async_setup_entry( item: DomainDataItem = hass.data[DOMAIN][config_entry.entry_id] # Setup is only forwarded for satellites - assert item.satellite is not None + assert item.device is not None - device = item.satellite.device async_add_entities( [ - WyomingSatelliteAutoGainNumber(device), - WyomingSatelliteVolumeMultiplierNumber(device), + WyomingSatelliteAutoGainNumber(item.device), + WyomingSatelliteVolumeMultiplierNumber(item.device), ] ) diff --git a/homeassistant/components/wyoming/select.py b/homeassistant/components/wyoming/select.py index f852b4d0434e3f..bbcaab8171063d 100644 --- a/homeassistant/components/wyoming/select.py +++ b/homeassistant/components/wyoming/select.py @@ -42,14 +42,13 @@ async def async_setup_entry( item: DomainDataItem = hass.data[DOMAIN][config_entry.entry_id] # Setup is only forwarded for satellites - assert item.satellite is not None + assert item.device is not None - device = item.satellite.device async_add_entities( [ - WyomingSatellitePipelineSelect(hass, device), - WyomingSatelliteNoiseSuppressionLevelSelect(device), - WyomingSatelliteVadSensitivitySelect(hass, device), + WyomingSatellitePipelineSelect(hass, item.device), + WyomingSatelliteNoiseSuppressionLevelSelect(item.device), + WyomingSatelliteVadSensitivitySelect(hass, item.device), ] ) diff --git a/homeassistant/components/wyoming/switch.py b/homeassistant/components/wyoming/switch.py index c012c60bc5aaaf..308429331c350c 100644 --- a/homeassistant/components/wyoming/switch.py +++ b/homeassistant/components/wyoming/switch.py @@ -27,9 +27,9 @@ async def async_setup_entry( item: DomainDataItem = hass.data[DOMAIN][config_entry.entry_id] # Setup is only forwarded for satellites - assert item.satellite is not None + assert item.device is not None - async_add_entities([WyomingSatelliteMuteSwitch(item.satellite.device)]) + async_add_entities([WyomingSatelliteMuteSwitch(item.device)]) class WyomingSatelliteMuteSwitch( @@ -51,7 +51,7 @@ async def async_added_to_hass(self) -> None: # Default to off self._attr_is_on = (state is not None) and (state.state == STATE_ON) - self._device.is_muted = self._attr_is_on + self._device.set_is_muted(self._attr_is_on) async def async_turn_on(self, **kwargs: Any) -> None: """Turn on.""" diff --git a/homeassistant/components/xeoma/manifest.json b/homeassistant/components/xeoma/manifest.json index a73b4bb867146c..d66177ca2142d3 100644 --- a/homeassistant/components/xeoma/manifest.json +++ b/homeassistant/components/xeoma/manifest.json @@ -5,5 +5,5 @@ "documentation": "https://www.home-assistant.io/integrations/xeoma", "iot_class": "local_polling", "loggers": ["pyxeoma"], - "requirements": ["pyxeoma==1.4.1"] + "requirements": ["pyxeoma==1.4.2"] } diff --git a/homeassistant/components/xiaomi_ble/config_flow.py b/homeassistant/components/xiaomi_ble/config_flow.py index 8209c9565bde6f..df2de381d39e44 100644 --- a/homeassistant/components/xiaomi_ble/config_flow.py +++ b/homeassistant/components/xiaomi_ble/config_flow.py @@ -4,10 +4,16 @@ from collections.abc import Mapping import dataclasses +import logging from typing import Any import voluptuous as vol -from xiaomi_ble import XiaomiBluetoothDeviceData as DeviceData +from xiaomi_ble import ( + XiaomiBluetoothDeviceData as DeviceData, + XiaomiCloudException, + XiaomiCloudInvalidAuthenticationException, + XiaomiCloudTokenFetch, +) from xiaomi_ble.parser import EncryptionScheme from homeassistant.components import onboarding @@ -17,14 +23,18 @@ async_discovered_service_info, async_process_advertisements, ) -from homeassistant.config_entries import ConfigFlow, ConfigFlowResult -from homeassistant.const import CONF_ADDRESS +from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlow, ConfigFlowResult +from homeassistant.const import CONF_ADDRESS, CONF_PASSWORD, CONF_USERNAME +from homeassistant.data_entry_flow import AbortFlow +from homeassistant.helpers.aiohttp_client import async_get_clientsession from .const import DOMAIN # How long to wait for additional advertisement packets if we don't have the right ones ADDITIONAL_DISCOVERY_TIMEOUT = 60 +_LOGGER = logging.getLogger(__name__) + @dataclasses.dataclass class Discovery: @@ -104,7 +114,7 @@ async def async_step_bluetooth( if device.encryption_scheme == EncryptionScheme.MIBEACON_LEGACY: return await self.async_step_get_encryption_key_legacy() if device.encryption_scheme == EncryptionScheme.MIBEACON_4_5: - return await self.async_step_get_encryption_key_4_5() + return await self.async_step_get_encryption_key_4_5_choose_method() return await self.async_step_bluetooth_confirm() async def async_step_get_encryption_key_legacy( @@ -175,6 +185,67 @@ async def async_step_get_encryption_key_4_5( errors=errors, ) + async def async_step_cloud_auth( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle the cloud auth step.""" + assert self._discovery_info + + errors: dict[str, str] = {} + description_placeholders: dict[str, str] = {} + if user_input is not None: + session = async_get_clientsession(self.hass) + fetcher = XiaomiCloudTokenFetch( + user_input[CONF_USERNAME], user_input[CONF_PASSWORD], session + ) + try: + device_details = await fetcher.get_device_info( + self._discovery_info.address + ) + except XiaomiCloudInvalidAuthenticationException as ex: + _LOGGER.debug("Authentication failed: %s", ex, exc_info=True) + errors = {"base": "auth_failed"} + description_placeholders = {"error_detail": str(ex)} + except XiaomiCloudException as ex: + _LOGGER.debug("Failed to connect to MI API: %s", ex, exc_info=True) + raise AbortFlow( + "api_error", description_placeholders={"error_detail": str(ex)} + ) from ex + else: + if device_details: + return await self.async_step_get_encryption_key_4_5( + {"bindkey": device_details.bindkey} + ) + errors = {"base": "api_device_not_found"} + + user_input = user_input or {} + return self.async_show_form( + step_id="cloud_auth", + errors=errors, + data_schema=vol.Schema( + { + vol.Required( + CONF_USERNAME, default=user_input.get(CONF_USERNAME) + ): str, + vol.Required(CONF_PASSWORD): str, + } + ), + description_placeholders={ + **self.context["title_placeholders"], + **description_placeholders, + }, + ) + + async def async_step_get_encryption_key_4_5_choose_method( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Choose method to get the bind key for a version 4/5 device.""" + return self.async_show_menu( + step_id="get_encryption_key_4_5_choose_method", + menu_options=["cloud_auth", "get_encryption_key_4_5"], + description_placeholders=self.context["title_placeholders"], + ) + async def async_step_bluetooth_confirm( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -231,7 +302,7 @@ async def async_step_user( return await self.async_step_get_encryption_key_legacy() if discovery.device.encryption_scheme == EncryptionScheme.MIBEACON_4_5: - return await self.async_step_get_encryption_key_4_5() + return await self.async_step_get_encryption_key_4_5_choose_method() return self._async_get_or_create_entry() @@ -264,9 +335,6 @@ async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle a flow initialized by a reauth event.""" - entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) - assert entry is not None - device: DeviceData = entry_data["device"] self._discovered_device = device @@ -276,7 +344,7 @@ async def async_step_reauth( return await self.async_step_get_encryption_key_legacy() if device.encryption_scheme == EncryptionScheme.MIBEACON_4_5: - return await self.async_step_get_encryption_key_4_5() + return await self.async_step_get_encryption_key_4_5_choose_method() # Otherwise there wasn't actually encryption so abort return self.async_abort(reason="reauth_successful") @@ -289,10 +357,10 @@ def _async_get_or_create_entry( if bindkey: data["bindkey"] = bindkey - if entry_id := self.context.get("entry_id"): - entry = self.hass.config_entries.async_get_entry(entry_id) - assert entry is not None - return self.async_update_reload_and_abort(entry, data=data) + if self.source == SOURCE_REAUTH: + return self.async_update_reload_and_abort( + self._get_reauth_entry(), data=data + ) return self.async_create_entry( title=self.context["title_placeholders"]["name"], diff --git a/homeassistant/components/xiaomi_ble/manifest.json b/homeassistant/components/xiaomi_ble/manifest.json index e4c643e491e2c6..26dd82c73bcd17 100644 --- a/homeassistant/components/xiaomi_ble/manifest.json +++ b/homeassistant/components/xiaomi_ble/manifest.json @@ -24,5 +24,5 @@ "dependencies": ["bluetooth_adapters"], "documentation": "https://www.home-assistant.io/integrations/xiaomi_ble", "iot_class": "local_push", - "requirements": ["xiaomi-ble==0.32.0"] + "requirements": ["xiaomi-ble==0.33.0"] } diff --git a/homeassistant/components/xiaomi_ble/sensor.py b/homeassistant/components/xiaomi_ble/sensor.py index 891caaf3e68a0e..ba8f64383ee2fc 100644 --- a/homeassistant/components/xiaomi_ble/sensor.py +++ b/homeassistant/components/xiaomi_ble/sensor.py @@ -48,8 +48,8 @@ ), (DeviceClass.CONDUCTIVITY, Units.CONDUCTIVITY): SensorEntityDescription( key=str(Units.CONDUCTIVITY), - device_class=None, - native_unit_of_measurement=UnitOfConductivity.MICROSIEMENS, + device_class=SensorDeviceClass.CONDUCTIVITY, + native_unit_of_measurement=UnitOfConductivity.MICROSIEMENS_PER_CM, state_class=SensorStateClass.MEASUREMENT, ), ( diff --git a/homeassistant/components/xiaomi_ble/strings.json b/homeassistant/components/xiaomi_ble/strings.json index 048c9bd92e2485..4ea4a47c61ee47 100644 --- a/homeassistant/components/xiaomi_ble/strings.json +++ b/homeassistant/components/xiaomi_ble/strings.json @@ -25,18 +25,35 @@ "data": { "bindkey": "Bindkey" } + }, + "cloud_auth": { + "description": "Please provide your Mi app username and password. This data won't be saved and only used to retrieve the device encryption key. Usernames and passwords are case sensitive.", + "data": { + "username": "[%key:common::config_flow::data::username%]", + "password": "[%key:common::config_flow::data::password%]" + } + }, + "get_encryption_key_4_5_choose_method": { + "description": "A Mi device can be set up in Home Assistant in two different ways.\n\nYou can enter the bindkey yourself, or Home Assistant can import them from your Mi account.", + "menu_options": { + "cloud_auth": "Mi account (recommended)", + "get_encryption_key_4_5": "Enter encryption key manually" + } } }, "error": { "decryption_failed": "The provided bindkey did not work, sensor data could not be decrypted. Please check it and try again.", "expected_24_characters": "Expected a 24 character hexadecimal bindkey.", - "expected_32_characters": "Expected a 32 character hexadecimal bindkey." + "expected_32_characters": "Expected a 32 character hexadecimal bindkey.", + "auth_failed": "Authentication failed: {error_detail}", + "api_device_not_found": "The device was not found in your Mi account." }, "abort": { "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", "no_devices_found": "[%key:common::config_flow::abort::no_devices_found%]", "already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]", - "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", + "api_error": "Error while communicating with Mi API: {error_detail}" } }, "device_automation": { diff --git a/homeassistant/components/xiaomi_miio/__init__.py b/homeassistant/components/xiaomi_miio/__init__.py index 9e14a3c58bade0..d841045d2356d2 100644 --- a/homeassistant/components/xiaomi_miio/__init__.py +++ b/homeassistant/components/xiaomi_miio/__init__.py @@ -56,6 +56,7 @@ MODEL_FAN_P9, MODEL_FAN_P10, MODEL_FAN_P11, + MODEL_FAN_P18, MODEL_FAN_ZA5, MODELS_AIR_MONITOR, MODELS_FAN, @@ -118,6 +119,7 @@ MODEL_FAN_P9: FanMiot, MODEL_FAN_P10: FanMiot, MODEL_FAN_P11: FanMiot, + MODEL_FAN_P18: FanMiot, MODEL_FAN_P5: FanP5, MODEL_FAN_ZA5: FanZA5, } @@ -306,6 +308,7 @@ async def async_create_miio_device_and_coordinator( "zhimi.fan.za3": True, "zhimi.fan.za5": True, "zhimi.airpurifier.za1": True, + "dmaker.fan.1c": True, } lazy_discover = LAZY_DISCOVER_FOR_MODEL.get(model, False) @@ -385,6 +388,7 @@ async def async_create_miio_device_and_coordinator( coordinator = coordinator_class( hass, _LOGGER, + config_entry=entry, name=name, update_method=update_method(hass, device), # Polling interval. Will only be polled if there are subscribers. @@ -450,6 +454,7 @@ async def async_update_data(): coordinator_dict[sub_device.sid] = DataUpdateCoordinator( hass, _LOGGER, + config_entry=entry, name=name, update_method=update_data_factory(sub_device), # Polling interval. Will only be polled if there are subscribers. diff --git a/homeassistant/components/xiaomi_miio/alarm_control_panel.py b/homeassistant/components/xiaomi_miio/alarm_control_panel.py index 58d5ed247ade75..9c06198bc7e27d 100644 --- a/homeassistant/components/xiaomi_miio/alarm_control_panel.py +++ b/homeassistant/components/xiaomi_miio/alarm_control_panel.py @@ -10,13 +10,9 @@ from homeassistant.components.alarm_control_panel import ( AlarmControlPanelEntity, AlarmControlPanelEntityFeature, + AlarmControlPanelState, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ( - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMING, - STATE_ALARM_DISARMED, -) from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -106,11 +102,11 @@ async def async_update(self) -> None: self._attr_available = True if state == XIAOMI_STATE_ARMED_VALUE: - self._attr_state = STATE_ALARM_ARMED_AWAY + self._attr_alarm_state = AlarmControlPanelState.ARMED_AWAY elif state == XIAOMI_STATE_DISARMED_VALUE: - self._attr_state = STATE_ALARM_DISARMED + self._attr_alarm_state = AlarmControlPanelState.DISARMED elif state == XIAOMI_STATE_ARMING_VALUE: - self._attr_state = STATE_ALARM_ARMING + self._attr_alarm_state = AlarmControlPanelState.ARMING else: _LOGGER.warning( "New state (%s) doesn't match expected values: %s/%s/%s", @@ -119,6 +115,6 @@ async def async_update(self) -> None: XIAOMI_STATE_DISARMED_VALUE, XIAOMI_STATE_ARMING_VALUE, ) - self._attr_state = None + self._attr_alarm_state = None - _LOGGER.debug("State value: %s", self._attr_state) + _LOGGER.debug("State value: %s", self._attr_alarm_state) diff --git a/homeassistant/components/xiaomi_miio/config_flow.py b/homeassistant/components/xiaomi_miio/config_flow.py index bd925b5fc542ea..b068f4a1e61c8b 100644 --- a/homeassistant/components/xiaomi_miio/config_flow.py +++ b/homeassistant/components/xiaomi_miio/config_flow.py @@ -13,7 +13,6 @@ from homeassistant.components import zeroconf from homeassistant.config_entries import ( - SOURCE_REAUTH, ConfigEntry, ConfigFlow, ConfigFlowResult, @@ -64,10 +63,6 @@ class OptionsFlowHandler(OptionsFlow): """Options for the component.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Init object.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -83,14 +78,7 @@ async def async_step_init( not cloud_username or not cloud_password or not cloud_country ): errors["base"] = "cloud_credentials_incomplete" - # trigger re-auth flow - self.hass.async_create_task( - self.hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_REAUTH}, - data=self.config_entry.data, - ) - ) + self.config_entry.async_start_reauth(self.hass) if not errors: return self.async_create_entry(title="", data=user_input) @@ -130,7 +118,7 @@ def __init__(self) -> None: @callback def async_get_options_flow(config_entry: ConfigEntry) -> OptionsFlowHandler: """Get the options flow.""" - return OptionsFlowHandler(config_entry) + return OptionsFlowHandler() async def async_step_reauth( self, entry_data: Mapping[str, Any] diff --git a/homeassistant/components/xiaomi_miio/const.py b/homeassistant/components/xiaomi_miio/const.py index 7d6cf152d7a885..2b9cdb2ffddcc3 100644 --- a/homeassistant/components/xiaomi_miio/const.py +++ b/homeassistant/components/xiaomi_miio/const.py @@ -94,6 +94,7 @@ class SetupException(Exception): MODEL_FAN_1C = "dmaker.fan.1c" MODEL_FAN_P10 = "dmaker.fan.p10" MODEL_FAN_P11 = "dmaker.fan.p11" +MODEL_FAN_P18 = "dmaker.fan.p18" MODEL_FAN_P5 = "dmaker.fan.p5" MODEL_FAN_P9 = "dmaker.fan.p9" MODEL_FAN_SA1 = "zhimi.fan.sa1" @@ -118,6 +119,7 @@ class SetupException(Exception): MODEL_FAN_1C, MODEL_FAN_P10, MODEL_FAN_P11, + MODEL_FAN_P18, MODEL_FAN_P9, MODEL_FAN_ZA5, ] @@ -491,7 +493,7 @@ class SetupException(Exception): | FEATURE_SET_DELAY_OFF_COUNTDOWN ) -FEATURE_FLAGS_FAN_P10_P11 = ( +FEATURE_FLAGS_FAN_P10_P11_P18 = ( FEATURE_SET_BUZZER | FEATURE_SET_CHILD_LOCK | FEATURE_SET_OSCILLATION_ANGLE diff --git a/homeassistant/components/xiaomi_miio/fan.py b/homeassistant/components/xiaomi_miio/fan.py index b8f92bd89b0824..81ca38eb053135 100644 --- a/homeassistant/components/xiaomi_miio/fan.py +++ b/homeassistant/components/xiaomi_miio/fan.py @@ -60,7 +60,7 @@ FEATURE_FLAGS_FAN_1C, FEATURE_FLAGS_FAN_P5, FEATURE_FLAGS_FAN_P9, - FEATURE_FLAGS_FAN_P10_P11, + FEATURE_FLAGS_FAN_P10_P11_P18, FEATURE_FLAGS_FAN_ZA5, FEATURE_RESET_FILTER, FEATURE_SET_EXTRA_FEATURES, @@ -85,6 +85,7 @@ MODEL_FAN_P9, MODEL_FAN_P10, MODEL_FAN_P11, + MODEL_FAN_P18, MODEL_FAN_ZA5, MODELS_FAN_MIIO, MODELS_FAN_MIOT, @@ -117,6 +118,10 @@ # Air Fresh A1 ATTR_FAVORITE_SPEED = "favorite_speed" +# Air Purifier 3C +ATTR_FAVORITE_RPM = "favorite_rpm" +ATTR_MOTOR_SPEED = "motor_speed" + # Map attributes to properties of the state object AVAILABLE_ATTRIBUTES_AIRPURIFIER_COMMON = { ATTR_EXTRA_FEATURES: "extra_features", @@ -608,28 +613,68 @@ async def async_set_percentage(self, percentage: int) -> None: class XiaomiAirPurifierMB4(XiaomiGenericAirPurifier): """Representation of a Xiaomi Air Purifier MB4.""" - def __init__(self, device, entry, unique_id, coordinator): + def __init__(self, device, entry, unique_id, coordinator) -> None: """Initialize Air Purifier MB4.""" super().__init__(device, entry, unique_id, coordinator) self._device_features = FEATURE_FLAGS_AIRPURIFIER_3C self._preset_modes = PRESET_MODES_AIRPURIFIER_3C self._attr_supported_features = ( - FanEntityFeature.PRESET_MODE + FanEntityFeature.SET_SPEED + | FanEntityFeature.PRESET_MODE | FanEntityFeature.TURN_OFF | FanEntityFeature.TURN_ON ) self._state = self.coordinator.data.is_on self._mode = self.coordinator.data.mode.value + self._favorite_rpm: int | None = None + self._speed_range = (300, 2200) + self._motor_speed = 0 @property def operation_mode_class(self): """Hold operation mode class.""" return AirpurifierMiotOperationMode + @property + def percentage(self) -> int | None: + """Return the current percentage based speed.""" + # show the actual fan speed in silent or auto preset mode + if self._mode != self.operation_mode_class["Favorite"].value: + return ranged_value_to_percentage(self._speed_range, self._motor_speed) + if self._favorite_rpm is None: + return None + if self._state: + return ranged_value_to_percentage(self._speed_range, self._favorite_rpm) + + return None + + async def async_set_percentage(self, percentage: int) -> None: + """Set the percentage of the fan. This method is a coroutine.""" + if percentage == 0: + await self.async_turn_off() + return + + favorite_rpm = int( + round(percentage_to_ranged_value(self._speed_range, percentage), -1) + ) + if not favorite_rpm: + return + if await self._try_command( + "Setting fan level of the miio device failed.", + self._device.set_favorite_rpm, + favorite_rpm, + ): + self._favorite_rpm = favorite_rpm + self._mode = self.operation_mode_class["Favorite"].value + self.async_write_ha_state() + async def async_set_preset_mode(self, preset_mode: str) -> None: """Set the preset mode of the fan.""" + if not self._state: + await self.async_turn_on() + if await self._try_command( "Setting operation mode of the miio device failed.", self._device.set_mode, @@ -643,6 +688,14 @@ def _handle_coordinator_update(self): """Fetch state from the device.""" self._state = self.coordinator.data.is_on self._mode = self.coordinator.data.mode.value + self._favorite_rpm = getattr(self.coordinator.data, ATTR_FAVORITE_RPM, None) + self._motor_speed = min( + self._speed_range[1], + max( + self._speed_range[0], + getattr(self.coordinator.data, ATTR_MOTOR_SPEED, 0), + ), + ) self.async_write_ha_state() @@ -860,8 +913,8 @@ def __init__(self, device, entry, unique_id, coordinator): self._device_features = FEATURE_FLAGS_FAN_1C elif self._model == MODEL_FAN_P9: self._device_features = FEATURE_FLAGS_FAN_P9 - elif self._model in (MODEL_FAN_P10, MODEL_FAN_P11): - self._device_features = FEATURE_FLAGS_FAN_P10_P11 + elif self._model in (MODEL_FAN_P10, MODEL_FAN_P11, MODEL_FAN_P18): + self._device_features = FEATURE_FLAGS_FAN_P10_P11_P18 else: self._device_features = FEATURE_FLAGS_FAN self._attr_supported_features = ( diff --git a/homeassistant/components/xiaomi_miio/number.py b/homeassistant/components/xiaomi_miio/number.py index f8788ba07d6c25..a3c501aad3f98c 100644 --- a/homeassistant/components/xiaomi_miio/number.py +++ b/homeassistant/components/xiaomi_miio/number.py @@ -50,7 +50,7 @@ FEATURE_FLAGS_FAN_1C, FEATURE_FLAGS_FAN_P5, FEATURE_FLAGS_FAN_P9, - FEATURE_FLAGS_FAN_P10_P11, + FEATURE_FLAGS_FAN_P10_P11_P18, FEATURE_FLAGS_FAN_ZA5, FEATURE_SET_DELAY_OFF_COUNTDOWN, FEATURE_SET_FAN_LEVEL, @@ -87,6 +87,7 @@ MODEL_FAN_P9, MODEL_FAN_P10, MODEL_FAN_P11, + MODEL_FAN_P18, MODEL_FAN_SA1, MODEL_FAN_V2, MODEL_FAN_V3, @@ -256,8 +257,9 @@ class FavoriteLevelValues: MODEL_AIRPURIFIER_4_PRO: FEATURE_FLAGS_AIRPURIFIER_4, MODEL_AIRPURIFIER_ZA1: FEATURE_FLAGS_AIRPURIFIER_ZA1, MODEL_FAN_1C: FEATURE_FLAGS_FAN_1C, - MODEL_FAN_P10: FEATURE_FLAGS_FAN_P10_P11, - MODEL_FAN_P11: FEATURE_FLAGS_FAN_P10_P11, + MODEL_FAN_P10: FEATURE_FLAGS_FAN_P10_P11_P18, + MODEL_FAN_P11: FEATURE_FLAGS_FAN_P10_P11_P18, + MODEL_FAN_P18: FEATURE_FLAGS_FAN_P10_P11_P18, MODEL_FAN_P5: FEATURE_FLAGS_FAN_P5, MODEL_FAN_P9: FEATURE_FLAGS_FAN_P9, MODEL_FAN_SA1: FEATURE_FLAGS_FAN, @@ -275,6 +277,7 @@ class FavoriteLevelValues: MODEL_FAN_P9: OscillationAngleValues(max_value=150, min_value=30, step=30), MODEL_FAN_P10: OscillationAngleValues(max_value=140, min_value=30, step=30), MODEL_FAN_P11: OscillationAngleValues(max_value=140, min_value=30, step=30), + MODEL_FAN_P18: OscillationAngleValues(max_value=140, min_value=30, step=30), } FAVORITE_LEVEL_VALUES = { diff --git a/homeassistant/components/xiaomi_miio/switch.py b/homeassistant/components/xiaomi_miio/switch.py index 8df3522b2ac2e5..02f4d4e94e51d8 100644 --- a/homeassistant/components/xiaomi_miio/switch.py +++ b/homeassistant/components/xiaomi_miio/switch.py @@ -59,7 +59,7 @@ FEATURE_FLAGS_FAN_1C, FEATURE_FLAGS_FAN_P5, FEATURE_FLAGS_FAN_P9, - FEATURE_FLAGS_FAN_P10_P11, + FEATURE_FLAGS_FAN_P10_P11_P18, FEATURE_FLAGS_FAN_ZA5, FEATURE_SET_ANION, FEATURE_SET_AUTO_DETECT, @@ -99,6 +99,7 @@ MODEL_FAN_P9, MODEL_FAN_P10, MODEL_FAN_P11, + MODEL_FAN_P18, MODEL_FAN_ZA1, MODEL_FAN_ZA3, MODEL_FAN_ZA4, @@ -211,8 +212,9 @@ MODEL_AIRPURIFIER_4_PRO: FEATURE_FLAGS_AIRPURIFIER_4, MODEL_AIRPURIFIER_ZA1: FEATURE_FLAGS_AIRPURIFIER_ZA1, MODEL_FAN_1C: FEATURE_FLAGS_FAN_1C, - MODEL_FAN_P10: FEATURE_FLAGS_FAN_P10_P11, - MODEL_FAN_P11: FEATURE_FLAGS_FAN_P10_P11, + MODEL_FAN_P10: FEATURE_FLAGS_FAN_P10_P11_P18, + MODEL_FAN_P11: FEATURE_FLAGS_FAN_P10_P11_P18, + MODEL_FAN_P18: FEATURE_FLAGS_FAN_P10_P11_P18, MODEL_FAN_P5: FEATURE_FLAGS_FAN_P5, MODEL_FAN_P9: FEATURE_FLAGS_FAN_P9, MODEL_FAN_ZA1: FEATURE_FLAGS_FAN, diff --git a/homeassistant/components/yale/config_flow.py b/homeassistant/components/yale/config_flow.py index 6cbc9543ea4170..fecf286fdd6b17 100644 --- a/homeassistant/components/yale/config_flow.py +++ b/homeassistant/components/yale/config_flow.py @@ -6,7 +6,7 @@ import jwt -from homeassistant.config_entries import ConfigEntry, ConfigFlowResult +from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlowResult from homeassistant.helpers import config_entry_oauth2_flow from .const import DOMAIN @@ -19,7 +19,6 @@ class YaleConfigFlow(config_entry_oauth2_flow.AbstractOAuth2FlowHandler, domain= VERSION = 1 DOMAIN = DOMAIN - reauth_entry: ConfigEntry | None = None @property def logger(self) -> logging.Logger: @@ -30,9 +29,6 @@ async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle configuration by re-auth.""" - self.reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) return await self.async_step_user() def _async_get_user_id_from_access_token(self, encoded: str) -> str: @@ -51,10 +47,11 @@ async def async_oauth_create_entry(self, data: dict) -> ConfigFlowResult: user_id = self._async_get_user_id_from_access_token( data["token"]["access_token"] ) - if entry := self.reauth_entry: - if entry.unique_id != user_id: - return self.async_abort(reason="reauth_invalid_user") - return self.async_update_reload_and_abort(entry, data=data) await self.async_set_unique_id(user_id) + if self.source == SOURCE_REAUTH: + self._abort_if_unique_id_mismatch(reason="reauth_invalid_user") + return self.async_update_reload_and_abort( + self._get_reauth_entry(), data=data + ) self._abort_if_unique_id_configured() return await super().async_oauth_create_entry(data) diff --git a/homeassistant/components/yale_smart_alarm/alarm_control_panel.py b/homeassistant/components/yale_smart_alarm/alarm_control_panel.py index 2fc56a9e5dd823..0f5b7d0b8e5802 100644 --- a/homeassistant/components/yale_smart_alarm/alarm_control_panel.py +++ b/homeassistant/components/yale_smart_alarm/alarm_control_panel.py @@ -13,12 +13,12 @@ from homeassistant.components.alarm_control_panel import ( AlarmControlPanelEntity, AlarmControlPanelEntityFeature, + AlarmControlPanelState, ) from homeassistant.const import CONF_NAME from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.typing import StateType from . import YaleConfigEntry from .const import DOMAIN, STATE_MAP, YALE_ALL_ERRORS @@ -106,6 +106,6 @@ def available(self) -> bool: return super().available @property - def state(self) -> StateType: + def alarm_state(self) -> AlarmControlPanelState | None: """Return the state of the alarm.""" return STATE_MAP.get(self.coordinator.data["alarm"]) diff --git a/homeassistant/components/yale_smart_alarm/binary_sensor.py b/homeassistant/components/yale_smart_alarm/binary_sensor.py index a1b94b907ded5a..8e68b1f0cb45f7 100644 --- a/homeassistant/components/yale_smart_alarm/binary_sensor.py +++ b/homeassistant/components/yale_smart_alarm/binary_sensor.py @@ -49,9 +49,13 @@ async def async_setup_entry( """Set up the Yale binary sensor entry.""" coordinator = entry.runtime_data - sensors: list[YaleDoorSensor | YaleProblemSensor] = [ + sensors: list[YaleDoorSensor | YaleDoorBatterySensor | YaleProblemSensor] = [ YaleDoorSensor(coordinator, data) for data in coordinator.data["door_windows"] ] + sensors.extend( + YaleDoorBatterySensor(coordinator, data) + for data in coordinator.data["door_windows"] + ) sensors.extend( YaleProblemSensor(coordinator, description) for description in SENSOR_TYPES ) @@ -70,6 +74,27 @@ def is_on(self) -> bool: return bool(self.coordinator.data["sensor_map"][self._attr_unique_id] == "open") +class YaleDoorBatterySensor(YaleEntity, BinarySensorEntity): + """Representation of a Yale door sensor battery status.""" + + _attr_device_class = BinarySensorDeviceClass.BATTERY + + def __init__( + self, + coordinator: YaleDataUpdateCoordinator, + data: dict, + ) -> None: + """Initiate Yale door battery Sensor.""" + super().__init__(coordinator, data) + self._attr_unique_id = f"{data["address"]}-battery" + + @property + def is_on(self) -> bool: + """Return true if the battery is low.""" + state: bool = self.coordinator.data["sensor_battery_map"][self._attr_unique_id] + return state + + class YaleProblemSensor(YaleAlarmEntity, BinarySensorEntity): """Representation of a Yale problem sensor.""" diff --git a/homeassistant/components/yale_smart_alarm/config_flow.py b/homeassistant/components/yale_smart_alarm/config_flow.py index 644160a8d93e67..c71b7b33a08727 100644 --- a/homeassistant/components/yale_smart_alarm/config_flow.py +++ b/homeassistant/components/yale_smart_alarm/config_flow.py @@ -23,10 +23,8 @@ CONF_AREA_ID, CONF_LOCK_CODE_DIGITS, DEFAULT_AREA_ID, - DEFAULT_LOCK_CODE_DIGITS, DEFAULT_NAME, DOMAIN, - LOGGER, YALE_BASE_ERRORS, ) @@ -40,66 +38,67 @@ DATA_SCHEMA_AUTH = vol.Schema( { - vol.Required(CONF_USERNAME): cv.string, vol.Required(CONF_PASSWORD): cv.string, } ) +OPTIONS_SCHEMA = vol.Schema( + { + vol.Optional( + CONF_LOCK_CODE_DIGITS, + ): int, + } +) + + +def validate_credentials(username: str, password: str) -> dict[str, Any]: + """Validate credentials.""" + errors: dict[str, str] = {} + try: + YaleSmartAlarmClient(username, password) + except AuthenticationError: + errors = {"base": "invalid_auth"} + except YALE_BASE_ERRORS: + errors = {"base": "cannot_connect"} + return errors + class YaleConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for Yale integration.""" VERSION = 2 - entry: ConfigEntry | None - @staticmethod @callback def async_get_options_flow(config_entry: ConfigEntry) -> YaleOptionsFlowHandler: """Get the options flow for this handler.""" - return YaleOptionsFlowHandler(config_entry) + return YaleOptionsFlowHandler() async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle initiation of re-authentication with Yale.""" - self.entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Dialog that informs the user that reauth is required.""" - errors = {} + errors: dict[str, str] = {} if user_input is not None: - username = user_input[CONF_USERNAME] + reauth_entry = self._get_reauth_entry() + username = reauth_entry.data[CONF_USERNAME] password = user_input[CONF_PASSWORD] - try: - await self.hass.async_add_executor_job( - YaleSmartAlarmClient, username, password - ) - except AuthenticationError as error: - LOGGER.error("Authentication failed. Check credentials %s", error) - errors = {"base": "invalid_auth"} - except YALE_BASE_ERRORS as error: - LOGGER.error("Connection to API failed %s", error) - errors = {"base": "cannot_connect"} - + errors = await self.hass.async_add_executor_job( + validate_credentials, username, password + ) if not errors: - existing_entry = await self.async_set_unique_id(username) - if existing_entry and self.entry: - self.hass.config_entries.async_update_entry( - existing_entry, - data={ - **self.entry.data, - CONF_USERNAME: username, - CONF_PASSWORD: password, - }, - ) - await self.hass.config_entries.async_reload(existing_entry.entry_id) - return self.async_abort(reason="reauth_successful") + return self.async_update_reload_and_abort( + reauth_entry, + data_updates={CONF_PASSWORD: password}, + ) return self.async_show_form( step_id="reauth_confirm", @@ -107,11 +106,42 @@ async def async_step_reauth_confirm( errors=errors, ) + async def async_step_reconfigure( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle reconfiguration of existing entry.""" + errors: dict[str, str] = {} + + if user_input is not None: + reconfigure_entry = self._get_reconfigure_entry() + username = user_input[CONF_USERNAME] + + errors = await self.hass.async_add_executor_job( + validate_credentials, username, user_input[CONF_PASSWORD] + ) + if ( + username != reconfigure_entry.unique_id + and await self.async_set_unique_id(username) + ): + errors["base"] = "unique_id_exists" + if not errors: + return self.async_update_reload_and_abort( + reconfigure_entry, + unique_id=username, + data_updates=user_input, + ) + + return self.async_show_form( + step_id="reconfigure", + data_schema=DATA_SCHEMA, + errors=errors, + ) + async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle the initial step.""" - errors = {} + errors: dict[str, str] = {} if user_input is not None: username = user_input[CONF_USERNAME] @@ -119,17 +149,9 @@ async def async_step_user( name = DEFAULT_NAME area = user_input.get(CONF_AREA_ID, DEFAULT_AREA_ID) - try: - await self.hass.async_add_executor_job( - YaleSmartAlarmClient, username, password - ) - except AuthenticationError as error: - LOGGER.error("Authentication failed. Check credentials %s", error) - errors = {"base": "invalid_auth"} - except YALE_BASE_ERRORS as error: - LOGGER.error("Connection to API failed %s", error) - errors = {"base": "cannot_connect"} - + errors = await self.hass.async_add_executor_job( + validate_credentials, username, password + ) if not errors: await self.async_set_unique_id(username) self._abort_if_unique_id_configured() @@ -154,32 +176,18 @@ async def async_step_user( class YaleOptionsFlowHandler(OptionsFlow): """Handle Yale options.""" - def __init__(self, entry: ConfigEntry) -> None: - """Initialize Yale options flow.""" - self.entry = entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Manage Yale options.""" - errors: dict[str, Any] = {} - if user_input: + if user_input is not None: return self.async_create_entry(data=user_input) return self.async_show_form( step_id="init", - data_schema=vol.Schema( - { - vol.Optional( - CONF_LOCK_CODE_DIGITS, - description={ - "suggested_value": self.entry.options.get( - CONF_LOCK_CODE_DIGITS, DEFAULT_LOCK_CODE_DIGITS - ) - }, - ): int, - } + data_schema=self.add_suggested_values_to_schema( + OPTIONS_SCHEMA, + self.config_entry.options, ), - errors=errors, ) diff --git a/homeassistant/components/yale_smart_alarm/const.py b/homeassistant/components/yale_smart_alarm/const.py index 41a754e4ce7489..14e31268ec9fc1 100644 --- a/homeassistant/components/yale_smart_alarm/const.py +++ b/homeassistant/components/yale_smart_alarm/const.py @@ -9,12 +9,8 @@ ) from yalesmartalarmclient.exceptions import AuthenticationError, UnknownError -from homeassistant.const import ( - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_DISARMED, - Platform, -) +from homeassistant.components.alarm_control_panel import AlarmControlPanelState +from homeassistant.const import Platform CONF_AREA_ID = "area_id" CONF_LOCK_CODE_DIGITS = "lock_code_digits" @@ -45,9 +41,9 @@ ] STATE_MAP = { - YALE_STATE_DISARM: STATE_ALARM_DISARMED, - YALE_STATE_ARM_PARTIAL: STATE_ALARM_ARMED_HOME, - YALE_STATE_ARM_FULL: STATE_ALARM_ARMED_AWAY, + YALE_STATE_DISARM: AlarmControlPanelState.DISARMED, + YALE_STATE_ARM_PARTIAL: AlarmControlPanelState.ARMED_HOME, + YALE_STATE_ARM_FULL: AlarmControlPanelState.ARMED_AWAY, } YALE_BASE_ERRORS = ( diff --git a/homeassistant/components/yale_smart_alarm/coordinator.py b/homeassistant/components/yale_smart_alarm/coordinator.py index 911b4523fc4a28..66bd71c9f1e093 100644 --- a/homeassistant/components/yale_smart_alarm/coordinator.py +++ b/homeassistant/components/yale_smart_alarm/coordinator.py @@ -60,6 +60,9 @@ async def _async_update_data(self) -> dict[str, Any]: for device in updates["cycle"]["device_status"]: state = device["status1"] if device["type"] == "device_type.door_contact": + device["_battery"] = False + if "device_status.low_battery" in state: + device["_battery"] = True if "device_status.dc_close" in state: device["_state"] = "closed" door_windows.append(device) @@ -77,6 +80,10 @@ async def _async_update_data(self) -> dict[str, Any]: _sensor_map = { contact["address"]: contact["_state"] for contact in door_windows } + _sensor_battery_map = { + f"{contact["address"]}-battery": contact["_battery"] + for contact in door_windows + } _temp_map = {temp["address"]: temp["status_temp"] for temp in temp_sensors} return { @@ -86,6 +93,7 @@ async def _async_update_data(self) -> dict[str, Any]: "status": updates["status"], "online": updates["online"], "sensor_map": _sensor_map, + "sensor_battery_map": _sensor_battery_map, "temp_map": _temp_map, "panel_info": updates["panel_info"], } diff --git a/homeassistant/components/yale_smart_alarm/strings.json b/homeassistant/components/yale_smart_alarm/strings.json index 8bade77f5f6bea..7f940e1139e5ba 100644 --- a/homeassistant/components/yale_smart_alarm/strings.json +++ b/homeassistant/components/yale_smart_alarm/strings.json @@ -2,11 +2,13 @@ "config": { "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_account%]", - "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", + "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]" }, "error": { "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", - "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]" + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", + "unique_id_exists": "Another config entry with this username already exist" }, "step": { "user": { @@ -18,10 +20,14 @@ } }, "reauth_confirm": { + "data": { + "password": "[%key:common::config_flow::data::password%]" + } + }, + "reconfigure": { "data": { "username": "[%key:common::config_flow::data::username%]", "password": "[%key:common::config_flow::data::password%]", - "name": "[%key:common::config_flow::data::name%]", "area_id": "[%key:component::yale_smart_alarm::config::step::user::data::area_id%]" } } diff --git a/homeassistant/components/yalexs_ble/config_flow.py b/homeassistant/components/yalexs_ble/config_flow.py index 7b69e417de7430..6de7475968638a 100644 --- a/homeassistant/components/yalexs_ble/config_flow.py +++ b/homeassistant/components/yalexs_ble/config_flow.py @@ -78,7 +78,6 @@ def __init__(self) -> None: self._discovery_info: BluetoothServiceInfoBleak | None = None self._discovered_devices: dict[str, BluetoothServiceInfoBleak] = {} self._lock_cfg: ValidatedLockConfig | None = None - self._reauth_entry: ConfigEntry | None = None async def async_step_bluetooth( self, discovery_info: BluetoothServiceInfoBleak @@ -194,9 +193,6 @@ async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle configuration by re-auth.""" - self._reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) return await self.async_step_reauth_validate() async def async_step_reauth_validate( @@ -204,8 +200,7 @@ async def async_step_reauth_validate( ) -> ConfigFlowResult: """Handle reauth and validation.""" errors = {} - reauth_entry = self._reauth_entry - assert reauth_entry is not None + reauth_entry = self._get_reauth_entry() if user_input is not None: if ( device := async_ble_device_from_address( @@ -222,7 +217,7 @@ async def async_step_reauth_validate( ) ): return self.async_update_reload_and_abort( - reauth_entry, data={**reauth_entry.data, **user_input} + reauth_entry, data_updates=user_input ) return self.async_show_form( @@ -317,16 +312,12 @@ def async_get_options_flow( config_entry: ConfigEntry, ) -> YaleXSBLEOptionsFlowHandler: """Get the options flow for this handler.""" - return YaleXSBLEOptionsFlowHandler(config_entry) + return YaleXSBLEOptionsFlowHandler() class YaleXSBLEOptionsFlowHandler(OptionsFlow): """Handle YaleXSBLE options.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize YaleXSBLE options flow.""" - self.entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -348,7 +339,9 @@ async def async_step_device_options( { vol.Optional( CONF_ALWAYS_CONNECTED, - default=self.entry.options.get(CONF_ALWAYS_CONNECTED, False), + default=self.config_entry.options.get( + CONF_ALWAYS_CONNECTED, False + ), ): bool, } ), diff --git a/homeassistant/components/yamaha_musiccast/strings.json b/homeassistant/components/yamaha_musiccast/strings.json index d0ee6c030a6915..eaa5ac50c80998 100644 --- a/homeassistant/components/yamaha_musiccast/strings.json +++ b/homeassistant/components/yamaha_musiccast/strings.json @@ -20,7 +20,9 @@ "yxc_control_url_missing": "The control URL is not given in the ssdp description." }, "error": { - "no_musiccast_device": "This device seems to be no MusicCast Device." + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", + "no_musiccast_device": "This device seems to be no MusicCast Device.", + "unknown": "[%key:common::config_flow::error::unknown%]" } }, "entity": { diff --git a/homeassistant/components/yeelight/config_flow.py b/homeassistant/components/yeelight/config_flow.py index 5438414ea61634..7a3a0a2f100000 100644 --- a/homeassistant/components/yeelight/config_flow.py +++ b/homeassistant/components/yeelight/config_flow.py @@ -58,9 +58,11 @@ class YeelightConfigFlow(ConfigFlow, domain=DOMAIN): @staticmethod @callback - def async_get_options_flow(config_entry: ConfigEntry) -> OptionsFlowHandler: + def async_get_options_flow( + config_entry: ConfigEntry, + ) -> OptionsFlowHandler: """Return the options flow.""" - return OptionsFlowHandler(config_entry) + return OptionsFlowHandler() def __init__(self) -> None: """Initialize the config flow.""" @@ -296,16 +298,12 @@ async def _async_try_connect( class OptionsFlowHandler(OptionsFlow): """Handle a option flow for Yeelight.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize the option flow.""" - self._config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle the initial step.""" - data = self._config_entry.data - options = self._config_entry.options + data = self.config_entry.data + options = self.config_entry.options detected_model = data.get(CONF_DETECTED_MODEL) model = options[CONF_MODEL] or detected_model diff --git a/homeassistant/components/yolink/config_flow.py b/homeassistant/components/yolink/config_flow.py index abdac696248305..2e96dcf9f8cc6c 100644 --- a/homeassistant/components/yolink/config_flow.py +++ b/homeassistant/components/yolink/config_flow.py @@ -6,7 +6,7 @@ import logging from typing import Any -from homeassistant.config_entries import ConfigEntry, ConfigFlowResult +from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlowResult from homeassistant.helpers import config_entry_oauth2_flow from .const import DOMAIN @@ -18,7 +18,6 @@ class OAuth2FlowHandler( """Config flow to handle yolink OAuth2 authentication.""" DOMAIN = DOMAIN - _reauth_entry: ConfigEntry | None = None @property def logger(self) -> logging.Logger: @@ -35,9 +34,6 @@ async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Perform reauth upon an API authentication error.""" - self._reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm(self, user_input=None) -> ConfigFlowResult: @@ -48,12 +44,10 @@ async def async_step_reauth_confirm(self, user_input=None) -> ConfigFlowResult: async def async_oauth_create_entry(self, data: dict) -> ConfigFlowResult: """Create an oauth config entry or update existing entry for reauth.""" - if existing_entry := self._reauth_entry: - self.hass.config_entries.async_update_entry( - existing_entry, data=existing_entry.data | data + if self.source == SOURCE_REAUTH: + return self.async_update_reload_and_abort( + self._get_reauth_entry(), data_updates=data ) - await self.hass.config_entries.async_reload(existing_entry.entry_id) - return self.async_abort(reason="reauth_successful") return self.async_create_entry(title="YoLink", data=data) async def async_step_user( @@ -61,6 +55,6 @@ async def async_step_user( ) -> ConfigFlowResult: """Handle a flow start.""" existing_entry = await self.async_set_unique_id(DOMAIN) - if existing_entry and not self._reauth_entry: + if existing_entry and self.source != SOURCE_REAUTH: return self.async_abort(reason="already_configured") return await super().async_step_user(user_input) diff --git a/homeassistant/components/yolink/strings.json b/homeassistant/components/yolink/strings.json index cefc7737a7908c..2f9a945450268f 100644 --- a/homeassistant/components/yolink/strings.json +++ b/homeassistant/components/yolink/strings.json @@ -19,7 +19,8 @@ "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", "oauth_timeout": "[%key:common::config_flow::abort::oauth2_timeout%]", "oauth_unauthorized": "[%key:common::config_flow::abort::oauth2_unauthorized%]", - "oauth_failed": "[%key:common::config_flow::abort::oauth2_failed%]" + "oauth_failed": "[%key:common::config_flow::abort::oauth2_failed%]", + "missing_credentials": "[%key:common::config_flow::abort::oauth2_missing_credentials%]" }, "create_entry": { "default": "[%key:common::config_flow::create_entry::authenticated%]" diff --git a/homeassistant/components/youless/__init__.py b/homeassistant/components/youless/__init__.py index a968d05292243f..d475034cc9dbd8 100644 --- a/homeassistant/components/youless/__init__.py +++ b/homeassistant/components/youless/__init__.py @@ -36,6 +36,7 @@ async def async_update_data() -> YoulessAPI: coordinator = DataUpdateCoordinator( hass, _LOGGER, + config_entry=entry, name="youless_gateway", update_method=async_update_data, update_interval=timedelta(seconds=10), diff --git a/homeassistant/components/youtube/config_flow.py b/homeassistant/components/youtube/config_flow.py index 32b37b93eb2123..48336422585b70 100644 --- a/homeassistant/components/youtube/config_flow.py +++ b/homeassistant/components/youtube/config_flow.py @@ -12,9 +12,10 @@ from youtubeaio.youtube import YouTube from homeassistant.config_entries import ( + SOURCE_REAUTH, ConfigEntry, ConfigFlowResult, - OptionsFlowWithConfigEntry, + OptionsFlow, ) from homeassistant.const import CONF_ACCESS_TOKEN, CONF_TOKEN from homeassistant.core import callback @@ -45,7 +46,6 @@ class OAuth2FlowHandler( DOMAIN = DOMAIN - reauth_entry: ConfigEntry | None = None _youtube: YouTube | None = None @staticmethod @@ -54,7 +54,7 @@ def async_get_options_flow( config_entry: ConfigEntry, ) -> YouTubeOptionsFlowHandler: """Get the options flow for this handler.""" - return YouTubeOptionsFlowHandler(config_entry) + return YouTubeOptionsFlowHandler() @property def logger(self) -> logging.Logger: @@ -75,9 +75,6 @@ async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Perform reauth upon an API authentication error.""" - self.reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -117,22 +114,19 @@ async def async_oauth_create_entry(self, data: dict[str, Any]) -> ConfigFlowResu self._title = own_channel.snippet.title self._data = data - if not self.reauth_entry: - await self.async_set_unique_id(own_channel.channel_id) + await self.async_set_unique_id(own_channel.channel_id) + if self.source != SOURCE_REAUTH: self._abort_if_unique_id_configured() return await self.async_step_channels() - if self.reauth_entry.unique_id == own_channel.channel_id: - self.hass.config_entries.async_update_entry(self.reauth_entry, data=data) - await self.hass.config_entries.async_reload(self.reauth_entry.entry_id) - return self.async_abort(reason="reauth_successful") - - return self.async_abort( + self._abort_if_unique_id_mismatch( reason="wrong_account", description_placeholders={"title": self._title}, ) + return self.async_update_reload_and_abort(self._get_reauth_entry(), data=data) + async def async_step_channels( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -165,7 +159,7 @@ async def async_step_channels( ) -class YouTubeOptionsFlowHandler(OptionsFlowWithConfigEntry): +class YouTubeOptionsFlowHandler(OptionsFlow): """YouTube Options flow handler.""" async def async_step_init( @@ -200,6 +194,6 @@ async def async_step_init( ), } ), - self.options, + self.config_entry.options, ), ) diff --git a/homeassistant/components/youtube/strings.json b/homeassistant/components/youtube/strings.json index 5902d3a4482d17..78ca0532459df0 100644 --- a/homeassistant/components/youtube/strings.json +++ b/homeassistant/components/youtube/strings.json @@ -10,7 +10,8 @@ "oauth_error": "[%key:common::config_flow::abort::oauth2_error%]", "oauth_timeout": "[%key:common::config_flow::abort::oauth2_timeout%]", "oauth_unauthorized": "[%key:common::config_flow::abort::oauth2_unauthorized%]", - "oauth_failed": "[%key:common::config_flow::abort::oauth2_failed%]" + "oauth_failed": "[%key:common::config_flow::abort::oauth2_failed%]", + "wrong_account": "Wrong account: please authenticate with the right account." }, "error": { "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", diff --git a/homeassistant/components/zeroconf/__init__.py b/homeassistant/components/zeroconf/__init__.py index b0a78a1ff88b6e..449c2ccef91cfc 100644 --- a/homeassistant/components/zeroconf/__init__.py +++ b/homeassistant/components/zeroconf/__init__.py @@ -540,7 +540,9 @@ def _async_process_service_update( continue matcher_domain = matcher[ATTR_DOMAIN] - context = { + # Create a type annotated regular dict since this is a hot path and creating + # a regular dict is slightly cheaper than calling ConfigFlowContext + context: config_entries.ConfigFlowContext = { "source": config_entries.SOURCE_ZEROCONF, } if domain: diff --git a/homeassistant/components/zeroconf/manifest.json b/homeassistant/components/zeroconf/manifest.json index 8246085e40520f..98b09f1a2512db 100644 --- a/homeassistant/components/zeroconf/manifest.json +++ b/homeassistant/components/zeroconf/manifest.json @@ -8,5 +8,5 @@ "iot_class": "local_push", "loggers": ["zeroconf"], "quality_scale": "internal", - "requirements": ["zeroconf==0.135.0"] + "requirements": ["zeroconf==0.136.0"] } diff --git a/homeassistant/components/zeroconf/usage.py b/homeassistant/components/zeroconf/usage.py index b9d51cd3c367b3..8ddfdbd592d1a6 100644 --- a/homeassistant/components/zeroconf/usage.py +++ b/homeassistant/components/zeroconf/usage.py @@ -4,7 +4,7 @@ import zeroconf -from homeassistant.helpers.frame import report +from homeassistant.helpers.frame import ReportBehavior, report_usage from .models import HaZeroconf @@ -16,14 +16,14 @@ def install_multiple_zeroconf_catcher(hass_zc: HaZeroconf) -> None: """ def new_zeroconf_new(self: zeroconf.Zeroconf, *k: Any, **kw: Any) -> HaZeroconf: - report( + report_usage( ( "attempted to create another Zeroconf instance. Please use the shared" " Zeroconf via await" " homeassistant.components.zeroconf.async_get_instance(hass)" ), exclude_integrations={"zeroconf"}, - error_if_core=False, + core_behavior=ReportBehavior.LOG, ) return hass_zc diff --git a/homeassistant/components/zeversolar/manifest.json b/homeassistant/components/zeversolar/manifest.json index af197b3aa7ce86..18bab34c04e2cf 100644 --- a/homeassistant/components/zeversolar/manifest.json +++ b/homeassistant/components/zeversolar/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/zeversolar", "integration_type": "device", "iot_class": "local_polling", - "requirements": ["zeversolar==0.3.1"] + "requirements": ["zeversolar==0.3.2"] } diff --git a/homeassistant/components/zha/alarm_control_panel.py b/homeassistant/components/zha/alarm_control_panel.py index c54d7c7ab2d9b3..734683e5497e45 100644 --- a/homeassistant/components/zha/alarm_control_panel.py +++ b/homeassistant/components/zha/alarm_control_panel.py @@ -4,9 +4,14 @@ import functools +from zha.application.platforms.alarm_control_panel.const import ( + AlarmState as ZHAAlarmState, +) + from homeassistant.components.alarm_control_panel import ( AlarmControlPanelEntity, AlarmControlPanelEntityFeature, + AlarmControlPanelState, CodeFormat, ) from homeassistant.config_entries import ConfigEntry @@ -23,6 +28,20 @@ get_zha_data, ) +ZHA_STATE_TO_ALARM_STATE_MAP = { + ZHAAlarmState.DISARMED.value: AlarmControlPanelState.DISARMED, + ZHAAlarmState.ARMED_HOME.value: AlarmControlPanelState.ARMED_HOME, + ZHAAlarmState.ARMED_AWAY.value: AlarmControlPanelState.ARMED_AWAY, + ZHAAlarmState.ARMED_NIGHT.value: AlarmControlPanelState.ARMED_NIGHT, + ZHAAlarmState.ARMED_VACATION.value: AlarmControlPanelState.ARMED_VACATION, + ZHAAlarmState.ARMED_CUSTOM_BYPASS.value: AlarmControlPanelState.ARMED_CUSTOM_BYPASS, + ZHAAlarmState.PENDING.value: AlarmControlPanelState.PENDING, + ZHAAlarmState.ARMING.value: AlarmControlPanelState.ARMING, + ZHAAlarmState.DISARMING.value: AlarmControlPanelState.DISARMING, + ZHAAlarmState.TRIGGERED.value: AlarmControlPanelState.TRIGGERED, + ZHAAlarmState.UNKNOWN.value: None, +} + async def async_setup_entry( hass: HomeAssistant, @@ -94,6 +113,6 @@ async def async_alarm_trigger(self, code: str | None = None) -> None: self.async_write_ha_state() @property - def state(self) -> str | None: + def alarm_state(self) -> AlarmControlPanelState | None: """Return the state of the entity.""" - return self.entity_data.entity.state["state"] + return ZHA_STATE_TO_ALARM_STATE_MAP.get(self.entity_data.entity.state["state"]) diff --git a/homeassistant/components/zha/config_flow.py b/homeassistant/components/zha/config_flow.py index 20eb006eb74c9e..1c7e0d105c4191 100644 --- a/homeassistant/components/zha/config_flow.py +++ b/homeassistant/components/zha/config_flow.py @@ -680,8 +680,6 @@ class ZhaOptionsFlowHandler(BaseZhaFlow, OptionsFlow): def __init__(self, config_entry: ConfigEntry) -> None: """Initialize options flow.""" super().__init__() - self.config_entry = config_entry - self._radio_mgr.device_path = config_entry.data[CONF_DEVICE][CONF_DEVICE_PATH] self._radio_mgr.device_settings = config_entry.data[CONF_DEVICE] self._radio_mgr.radio_type = RadioType[config_entry.data[CONF_RADIO_TYPE]] diff --git a/homeassistant/components/zha/helpers.py b/homeassistant/components/zha/helpers.py index 0689929699164c..2440e18cf5389f 100644 --- a/homeassistant/components/zha/helpers.py +++ b/homeassistant/components/zha/helpers.py @@ -104,7 +104,7 @@ ATTR_NAME, Platform, ) -from homeassistant.core import HomeAssistant, callback +from homeassistant.core import Event, HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import ( config_validation as cv, @@ -495,7 +495,7 @@ def __init__( self.hass = hass self.config_entry = config_entry self.gateway = gateway - self.device_proxies: dict[str, ZHADeviceProxy] = {} + self.device_proxies: dict[EUI64, ZHADeviceProxy] = {} self.group_proxies: dict[int, ZHAGroupProxy] = {} self._ha_entity_refs: collections.defaultdict[EUI64, list[EntityReference]] = ( collections.defaultdict(list) @@ -509,6 +509,12 @@ def __init__( self._unsubs: list[Callable[[], None]] = [] self._unsubs.append(self.gateway.on_all_events(self._handle_event_protocol)) self._reload_task: asyncio.Task | None = None + config_entry.async_on_unload( + self.hass.bus.async_listen( + er.EVENT_ENTITY_REGISTRY_UPDATED, + self._handle_entity_registry_updated, + ) + ) @property def ha_entity_refs(self) -> collections.defaultdict[EUI64, list[EntityReference]]: @@ -532,6 +538,46 @@ def register_entity_reference( ) ) + async def _handle_entity_registry_updated( + self, event: Event[er.EventEntityRegistryUpdatedData] + ) -> None: + """Handle when entity registry updated.""" + entity_id = event.data["entity_id"] + entity_entry: er.RegistryEntry | None = er.async_get(self.hass).async_get( + entity_id + ) + if ( + entity_entry is None + or entity_entry.config_entry_id != self.config_entry.entry_id + or entity_entry.device_id is None + ): + return + device_entry: dr.DeviceEntry | None = dr.async_get(self.hass).async_get( + entity_entry.device_id + ) + assert device_entry + + ieee_address = next( + identifier + for domain, identifier in device_entry.identifiers + if domain == DOMAIN + ) + assert ieee_address + + ieee = EUI64.convert(ieee_address) + + assert ieee in self.device_proxies + + zha_device_proxy = self.device_proxies[ieee] + entity_key = (entity_entry.domain, entity_entry.unique_id) + if entity_key not in zha_device_proxy.device.platform_entities: + return + platform_entity = zha_device_proxy.device.platform_entities[entity_key] + if entity_entry.disabled: + platform_entity.disable() + else: + platform_entity.enable() + async def async_initialize_devices_and_entities(self) -> None: """Initialize devices and entities.""" for device in self.gateway.devices.values(): @@ -1117,7 +1163,7 @@ def async_add_entities( if not entities: return - entities_to_add = [] + entities_to_add: list[ZHAEntity] = [] for entity_data in entities: try: entities_to_add.append(entity_class(entity_data)) @@ -1129,6 +1175,9 @@ def async_add_entities( "Error while adding entity from entity data: %s", entity_data ) _async_add_entities(entities_to_add, update_before_add=False) + for entity in entities_to_add: + if not entity.enabled: + entity.entity_data.entity.disable() entities.clear() @@ -1198,7 +1247,7 @@ def create_zha_config(hass: HomeAssistant, ha_zha_data: HAZHAData) -> ZHAData: # deep copy the yaml config to avoid modifying the original and to safely # pass it to the ZHA library app_config = copy.deepcopy(ha_zha_data.yaml_config.get(CONF_ZIGPY, {})) - database = app_config.get( + database = ha_zha_data.yaml_config.get( CONF_DATABASE, hass.config.path(DEFAULT_DATABASE_NAME), ) diff --git a/homeassistant/components/zha/icons.json b/homeassistant/components/zha/icons.json index 9d5254fe237469..5b3b85ced39957 100644 --- a/homeassistant/components/zha/icons.json +++ b/homeassistant/components/zha/icons.json @@ -45,6 +45,15 @@ "maximum_level": { "default": "mdi:brightness-percent" }, + "default_level_local": { + "default": "mdi:brightness-percent" + }, + "default_level_remote": { + "default": "mdi:brightness-percent" + }, + "state_after_power_restored": { + "default": "mdi:brightness-percent" + }, "auto_off_timer": { "default": "mdi:timer" }, diff --git a/homeassistant/components/zha/light.py b/homeassistant/components/zha/light.py index fa83ad1cab6d5f..9a22dfb02e953c 100644 --- a/homeassistant/components/zha/light.py +++ b/homeassistant/components/zha/light.py @@ -44,12 +44,7 @@ ZhaColorMode.ONOFF: ColorMode.ONOFF, ZhaColorMode.BRIGHTNESS: ColorMode.BRIGHTNESS, ZhaColorMode.COLOR_TEMP: ColorMode.COLOR_TEMP, - ZhaColorMode.HS: ColorMode.HS, ZhaColorMode.XY: ColorMode.XY, - ZhaColorMode.RGB: ColorMode.RGB, - ZhaColorMode.RGBW: ColorMode.RGBW, - ZhaColorMode.RGBWW: ColorMode.RGBWW, - ZhaColorMode.WHITE: ColorMode.WHITE, } HA_TO_ZHA_COLOR_MODE = {v: k for k, v in ZHA_TO_HA_COLOR_MODE.items()} diff --git a/homeassistant/components/zha/manifest.json b/homeassistant/components/zha/manifest.json index dd15fb99960f30..96c9bc030f6799 100644 --- a/homeassistant/components/zha/manifest.json +++ b/homeassistant/components/zha/manifest.json @@ -1,7 +1,7 @@ { "domain": "zha", "name": "Zigbee Home Automation", - "after_dependencies": ["onboarding", "usb"], + "after_dependencies": ["hassio", "onboarding", "usb"], "codeowners": ["@dmulcahey", "@adminiuga", "@puddly", "@TheJulianJES"], "config_flow": true, "dependencies": ["file_upload"], @@ -21,7 +21,7 @@ "zha", "universal_silabs_flasher" ], - "requirements": ["universal-silabs-flasher==0.0.22", "zha==0.0.34"], + "requirements": ["universal-silabs-flasher==0.0.24", "zha==0.0.37"], "usb": [ { "vid": "10C4", diff --git a/homeassistant/components/zha/strings.json b/homeassistant/components/zha/strings.json index 6123081fcd7b9e..d0505bf2460a97 100644 --- a/homeassistant/components/zha/strings.json +++ b/homeassistant/components/zha/strings.json @@ -708,6 +708,15 @@ "maximum_level": { "name": "Maximum load dimming level" }, + "default_level_local": { + "name": "Local default dimming level" + }, + "default_level_remote": { + "name": "Remote default dimming level" + }, + "state_after_power_restored": { + "name": "Start-up default dimming level" + }, "auto_off_timer": { "name": "Automatic switch shutoff timer" }, @@ -767,6 +776,21 @@ }, "regulation_setpoint_offset": { "name": "Regulation setpoint offset" + }, + "irrigation_cycles": { + "name": "Irrigation cycles" + }, + "irrigation_target": { + "name": "Irrigation target" + }, + "irrigation_interval": { + "name": "Irrigation interval" + }, + "valve_countdown_1": { + "name": "Irrigation time 1" + }, + "valve_countdown_2": { + "name": "Irrigation time 2" } }, "select": { @@ -818,6 +842,9 @@ "increased_non_neutral_output": { "name": "Non neutral output" }, + "leading_or_trailing_edge": { + "name": "Dimming mode" + }, "feeding_mode": { "name": "Mode" }, @@ -853,6 +880,12 @@ }, "setpoint_response_time": { "name": "Setpoint response time" + }, + "irrigation_mode": { + "name": "Irrigation mode" + }, + "weather_delay": { + "name": "Weather delay" } }, "sensor": { @@ -898,6 +931,12 @@ "device_temperature": { "name": "Device temperature" }, + "internal_temp_monitor": { + "name": "Internal temperature" + }, + "overheated": { + "name": "Overheat protection" + }, "formaldehyde": { "name": "Formaldehyde concentration" }, @@ -1023,6 +1062,27 @@ }, "motor_stepcount": { "name": "Motor stepcount" + }, + "irrigation_duration": { + "name": "Last irrigation duration" + }, + "irrigation_start_time": { + "name": "Irrigation start time" + }, + "irrigation_end_time": { + "name": "Irrigation end time" + }, + "irrigation_duration_1": { + "name": "Irrigation duration 1" + }, + "irriation_duration_2": { + "name": "Irrigation duration 2" + }, + "valve_status_1": { + "name": "Status 1" + }, + "valve_status_2": { + "name": "Status 2" } }, "switch": { @@ -1127,6 +1187,12 @@ }, "adaptation_run_enabled": { "name": "Adaptation run enabled" + }, + "valve_on_off_1": { + "name": "Valve 1" + }, + "valve_on_off_2": { + "name": "Valve 2" } } } diff --git a/homeassistant/components/zhong_hong/manifest.json b/homeassistant/components/zhong_hong/manifest.json index 06cc06faf0b172..9da0e9ab72b793 100644 --- a/homeassistant/components/zhong_hong/manifest.json +++ b/homeassistant/components/zhong_hong/manifest.json @@ -5,5 +5,5 @@ "documentation": "https://www.home-assistant.io/integrations/zhong_hong", "iot_class": "local_push", "loggers": ["zhong_hong_hvac"], - "requirements": ["zhong-hong-hvac==1.0.12"] + "requirements": ["zhong-hong-hvac==1.0.13"] } diff --git a/homeassistant/components/zwave_js/api.py b/homeassistant/components/zwave_js/api.py index b43528fe358d0b..bd49e85b601a1e 100644 --- a/homeassistant/components/zwave_js/api.py +++ b/homeassistant/components/zwave_js/api.py @@ -13,8 +13,10 @@ from zwave_js_server.const import ( CommandClass, ExclusionStrategy, + InclusionState, InclusionStrategy, LogLevel, + NodeStatus, Protocols, ProvisioningEntryStatus, QRCodeVersion, @@ -41,6 +43,7 @@ ControllerFirmwareUpdateResult, ) from zwave_js_server.model.driver import Driver +from zwave_js_server.model.endpoint import Endpoint from zwave_js_server.model.log_config import LogConfig from zwave_js_server.model.log_message import LogMessage from zwave_js_server.model.node import Node, NodeStatistics @@ -53,6 +56,7 @@ async_parse_qr_code_string, async_try_parse_dsk_from_qr_code_string, ) +from zwave_js_server.model.value import ConfigurationValueFormat from zwave_js_server.util.node import async_set_config_parameter from homeassistant.components import websocket_api @@ -73,6 +77,11 @@ from .config_validation import BITMASK_SCHEMA from .const import ( + ATTR_COMMAND_CLASS, + ATTR_ENDPOINT, + ATTR_METHOD_NAME, + ATTR_PARAMETERS, + ATTR_WAIT_FOR_RESULT, CONF_DATA_COLLECTION_OPTED_IN, DATA_CLIENT, EVENT_DEVICE_ADDED_TO_REGISTRY, @@ -98,6 +107,8 @@ PROPERTY_KEY = "property_key" ENDPOINT = "endpoint" VALUE = "value" +VALUE_SIZE = "value_size" +VALUE_FORMAT = "value_format" # constants for log config commands CONFIG = "config" @@ -408,6 +419,8 @@ def async_register_api(hass: HomeAssistant) -> None: websocket_api.async_register_command(hass, websocket_rebuild_node_routes) websocket_api.async_register_command(hass, websocket_set_config_parameter) websocket_api.async_register_command(hass, websocket_get_config_parameters) + websocket_api.async_register_command(hass, websocket_get_raw_config_parameter) + websocket_api.async_register_command(hass, websocket_set_raw_config_parameter) websocket_api.async_register_command(hass, websocket_subscribe_log_updates) websocket_api.async_register_command(hass, websocket_update_log_config) websocket_api.async_register_command(hass, websocket_get_log_config) @@ -435,6 +448,8 @@ def async_register_api(hass: HomeAssistant) -> None: ) websocket_api.async_register_command(hass, websocket_subscribe_node_statistics) websocket_api.async_register_command(hass, websocket_hard_reset_controller) + websocket_api.async_register_command(hass, websocket_node_capabilities) + websocket_api.async_register_command(hass, websocket_invoke_cc_api) hass.http.register_view(FirmwareUploadView(dr.async_get(hass))) @@ -693,6 +708,30 @@ def forward_dsk(event: dict) -> None: ) ) + @callback + def forward_node_added( + node: Node, low_security: bool, low_security_reason: str | None + ) -> None: + interview_unsubs = [ + node.on("interview started", forward_event), + node.on("interview completed", forward_event), + node.on("interview stage completed", forward_stage), + node.on("interview failed", forward_event), + ] + unsubs.extend(interview_unsubs) + node_details = { + "node_id": node.node_id, + "status": node.status, + "ready": node.ready, + "low_security": low_security, + "low_security_reason": low_security_reason, + } + connection.send_message( + websocket_api.event_message( + msg[ID], {"event": "node added", "node": node_details} + ) + ) + @callback def forward_requested_grant(event: dict) -> None: connection.send_message( @@ -727,24 +766,10 @@ def node_found(event: dict) -> None: @callback def node_added(event: dict) -> None: - node = event["node"] - interview_unsubs = [ - node.on("interview started", forward_event), - node.on("interview completed", forward_event), - node.on("interview stage completed", forward_stage), - node.on("interview failed", forward_event), - ] - unsubs.extend(interview_unsubs) - node_details = { - "node_id": node.node_id, - "status": node.status, - "ready": node.ready, - "low_security": event["result"].get("lowSecurity", False), - } - connection.send_message( - websocket_api.event_message( - msg[ID], {"event": "node added", "node": node_details} - ) + forward_node_added( + event["node"], + event["result"].get("lowSecurity", False), + event["result"].get("lowSecurityReason"), ) @callback @@ -776,25 +801,39 @@ def device_registered(device: dr.DeviceEntry) -> None: ] msg[DATA_UNSUBSCRIBE] = unsubs - try: - result = await controller.async_begin_inclusion( - INCLUSION_STRATEGY_NOT_SMART_START[inclusion_strategy.value], - force_security=force_security, - provisioning=provisioning, - dsk=dsk, - ) - except ValueError as err: - connection.send_error( + if controller.inclusion_state == InclusionState.INCLUDING: + connection.send_result( msg[ID], - ERR_INVALID_FORMAT, - err.args[0], + True, # Inclusion is already in progress ) - return + # Check for nodes that have been added but not fully included + for node in controller.nodes.values(): + if node.status != NodeStatus.DEAD and not node.ready: + forward_node_added( + node, + not node.is_secure, + None, + ) + else: + try: + result = await controller.async_begin_inclusion( + INCLUSION_STRATEGY_NOT_SMART_START[inclusion_strategy.value], + force_security=force_security, + provisioning=provisioning, + dsk=dsk, + ) + except ValueError as err: + connection.send_error( + msg[ID], + ERR_INVALID_FORMAT, + err.args[0], + ) + return - connection.send_result( - msg[ID], - result, - ) + connection.send_result( + msg[ID], + result, + ) @websocket_api.require_admin @@ -1726,6 +1765,72 @@ async def websocket_get_config_parameters( ) +@websocket_api.require_admin +@websocket_api.websocket_command( + { + vol.Required(TYPE): "zwave_js/set_raw_config_parameter", + vol.Required(DEVICE_ID): str, + vol.Required(PROPERTY): int, + vol.Required(VALUE): int, + vol.Required(VALUE_SIZE): vol.All(vol.Coerce(int), vol.Range(min=1, max=4)), + vol.Required(VALUE_FORMAT): vol.Coerce(ConfigurationValueFormat), + } +) +@websocket_api.async_response +@async_handle_failed_command +@async_get_node +async def websocket_set_raw_config_parameter( + hass: HomeAssistant, + connection: ActiveConnection, + msg: dict[str, Any], + node: Node, +) -> None: + """Set a custom config parameter value for a Z-Wave node.""" + result = await node.async_set_raw_config_parameter_value( + msg[VALUE], + msg[PROPERTY], + value_size=msg[VALUE_SIZE], + value_format=msg[VALUE_FORMAT], + ) + + connection.send_result( + msg[ID], + { + STATUS: result.status, + }, + ) + + +@websocket_api.require_admin +@websocket_api.websocket_command( + { + vol.Required(TYPE): "zwave_js/get_raw_config_parameter", + vol.Required(DEVICE_ID): str, + vol.Required(PROPERTY): int, + } +) +@websocket_api.async_response +@async_handle_failed_command +@async_get_node +async def websocket_get_raw_config_parameter( + hass: HomeAssistant, + connection: ActiveConnection, + msg: dict[str, Any], + node: Node, +) -> None: + """Get a custom config parameter value for a Z-Wave node.""" + value = await node.async_get_raw_config_parameter_value( + msg[PROPERTY], + ) + + connection.send_result( + msg[ID], + { + VALUE: value, + }, + ) + + def filename_is_present_if_logging_to_file(obj: dict) -> dict: """Validate that filename is provided if log_to_file is True.""" if obj.get(LOG_TO_FILE, False) and FILENAME not in obj: @@ -2499,3 +2604,81 @@ def _handle_device_added(device: dr.DeviceEntry) -> None: ) ] await driver.async_hard_reset() + + +@websocket_api.websocket_command( + { + vol.Required(TYPE): "zwave_js/node_capabilities", + vol.Required(DEVICE_ID): str, + } +) +@websocket_api.async_response +@async_handle_failed_command +@async_get_node +async def websocket_node_capabilities( + hass: HomeAssistant, + connection: ActiveConnection, + msg: dict[str, Any], + node: Node, +) -> None: + """Get node endpoints with their support command classes.""" + # consumers expect snake_case at the moment + # remove that addition when consumers are updated + connection.send_result( + msg[ID], + { + idx: [ + command_class.to_dict() | {"is_secure": command_class.is_secure} + for command_class in endpoint.command_classes + ] + for idx, endpoint in node.endpoints.items() + }, + ) + + +@websocket_api.require_admin +@websocket_api.websocket_command( + { + vol.Required(TYPE): "zwave_js/invoke_cc_api", + vol.Required(DEVICE_ID): str, + vol.Required(ATTR_COMMAND_CLASS): vol.All( + vol.Coerce(int), vol.Coerce(CommandClass) + ), + vol.Optional(ATTR_ENDPOINT): vol.Coerce(int), + vol.Required(ATTR_METHOD_NAME): cv.string, + vol.Required(ATTR_PARAMETERS): list, + vol.Optional(ATTR_WAIT_FOR_RESULT): cv.boolean, + } +) +@websocket_api.async_response +@async_handle_failed_command +@async_get_node +async def websocket_invoke_cc_api( + hass: HomeAssistant, + connection: ActiveConnection, + msg: dict[str, Any], + node: Node, +) -> None: + """Call invokeCCAPI on the node or provided endpoint.""" + command_class: CommandClass = msg[ATTR_COMMAND_CLASS] + method_name: str = msg[ATTR_METHOD_NAME] + parameters: list[Any] = msg[ATTR_PARAMETERS] + + node_or_endpoint: Node | Endpoint = node + if (endpoint := msg.get(ATTR_ENDPOINT)) is not None: + node_or_endpoint = node.endpoints[endpoint] + + try: + result = await node_or_endpoint.async_invoke_cc_api( + command_class, + method_name, + *parameters, + wait_for_result=msg.get(ATTR_WAIT_FOR_RESULT, False), + ) + except BaseZwaveJSServerError as err: + connection.send_error(msg[ID], err.__class__.__name__, str(err)) + else: + connection.send_result( + msg[ID], + result, + ) diff --git a/homeassistant/components/zwave_js/climate.py b/homeassistant/components/zwave_js/climate.py index 14a3fe579c4899..c7ab579c2cb28a 100644 --- a/homeassistant/components/zwave_js/climate.py +++ b/homeassistant/components/zwave_js/climate.py @@ -24,8 +24,6 @@ ATTR_HVAC_MODE, ATTR_TARGET_TEMP_HIGH, ATTR_TARGET_TEMP_LOW, - DEFAULT_MAX_TEMP, - DEFAULT_MIN_TEMP, DOMAIN as CLIMATE_DOMAIN, PRESET_NONE, ClimateEntity, @@ -421,7 +419,7 @@ def extra_state_attributes(self) -> dict[str, str] | None: @property def min_temp(self) -> float: """Return the minimum temperature.""" - min_temp = DEFAULT_MIN_TEMP + min_temp = 0.0 # Not using DEFAULT_MIN_TEMP to allow wider range base_unit: str = UnitOfTemperature.CELSIUS try: temp = self._setpoint_value_or_raise(self._current_mode_setpoint_enums[0]) @@ -437,7 +435,7 @@ def min_temp(self) -> float: @property def max_temp(self) -> float: """Return the maximum temperature.""" - max_temp = DEFAULT_MAX_TEMP + max_temp = 50.0 # Not using DEFAULT_MAX_TEMP to allow wider range base_unit: str = UnitOfTemperature.CELSIUS try: temp = self._setpoint_value_or_raise(self._current_mode_setpoint_enums[0]) diff --git a/homeassistant/components/zwave_js/config_flow.py b/homeassistant/components/zwave_js/config_flow.py index 7733e0325ec035..36f208e18d57e2 100644 --- a/homeassistant/components/zwave_js/config_flow.py +++ b/homeassistant/components/zwave_js/config_flow.py @@ -18,8 +18,6 @@ AddonInfo, AddonManager, AddonState, - HassioServiceInfo, - is_hassio, ) from homeassistant.components.zeroconf import ZeroconfServiceInfo from homeassistant.config_entries import ( @@ -29,6 +27,7 @@ ConfigEntryBaseFlow, ConfigEntryState, ConfigFlow, + ConfigFlowContext, ConfigFlowResult, OptionsFlow, OptionsFlowManager, @@ -38,6 +37,8 @@ from homeassistant.data_entry_flow import AbortFlow, FlowManager from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.aiohttp_client import async_get_clientsession +from homeassistant.helpers.hassio import is_hassio +from homeassistant.helpers.service_info.hassio import HassioServiceInfo from homeassistant.helpers.typing import VolDictType from . import disconnect_client @@ -192,7 +193,7 @@ def __init__(self) -> None: @property @abstractmethod - def flow_manager(self) -> FlowManager[ConfigFlowResult]: + def flow_manager(self) -> FlowManager[ConfigFlowContext, ConfigFlowResult]: """Return the flow manager of the flow.""" async def async_step_install_addon( @@ -365,7 +366,7 @@ def async_get_options_flow( config_entry: ConfigEntry, ) -> OptionsFlowHandler: """Return the options flow.""" - return OptionsFlowHandler(config_entry) + return OptionsFlowHandler() async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -724,10 +725,9 @@ def _async_create_entry_from_vars(self) -> ConfigFlowResult: class OptionsFlowHandler(BaseZwaveJSFlow, OptionsFlow): """Handle an options flow for Z-Wave JS.""" - def __init__(self, config_entry: ConfigEntry) -> None: + def __init__(self) -> None: """Set up the options flow.""" super().__init__() - self.config_entry = config_entry self.original_addon_config: dict[str, Any] | None = None self.revert_reason: str | None = None diff --git a/homeassistant/components/zwave_js/config_validation.py b/homeassistant/components/zwave_js/config_validation.py index 6c060f90ce5d1b..30bc2f1678916a 100644 --- a/homeassistant/components/zwave_js/config_validation.py +++ b/homeassistant/components/zwave_js/config_validation.py @@ -34,6 +34,8 @@ def boolean(value: Any) -> bool: VALUE_SCHEMA = vol.Any( boolean, + float, + int, vol.Coerce(int), vol.Coerce(float), BITMASK_SCHEMA, diff --git a/homeassistant/components/zwave_js/discovery.py b/homeassistant/components/zwave_js/discovery.py index cff0eb434e0b08..5c79c668afcbb5 100644 --- a/homeassistant/components/zwave_js/discovery.py +++ b/homeassistant/components/zwave_js/discovery.py @@ -238,6 +238,12 @@ class ZWaveDiscoverySchema: command_class={CommandClass.SWITCH_BINARY}, property={CURRENT_VALUE_PROPERTY} ) +COLOR_SWITCH_CURRENT_VALUE_SCHEMA = ZWaveValueDiscoverySchema( + command_class={CommandClass.SWITCH_COLOR}, + property={CURRENT_COLOR_PROPERTY}, + property_key={None}, +) + SIREN_TONE_SCHEMA = ZWaveValueDiscoverySchema( command_class={CommandClass.SOUND_SWITCH}, property={TONE_ID_PROPERTY}, @@ -762,33 +768,6 @@ class ZWaveDiscoverySchema: }, ), ), - # HomeSeer HSM-200 v1 - ZWaveDiscoverySchema( - platform=Platform.LIGHT, - hint="black_is_off", - manufacturer_id={0x001E}, - product_id={0x0001}, - product_type={0x0004}, - primary_value=ZWaveValueDiscoverySchema( - command_class={CommandClass.SWITCH_COLOR}, - property={CURRENT_COLOR_PROPERTY}, - property_key={None}, - ), - absent_values=[SWITCH_MULTILEVEL_CURRENT_VALUE_SCHEMA], - ), - # Logic Group ZDB5100 - ZWaveDiscoverySchema( - platform=Platform.LIGHT, - hint="black_is_off", - manufacturer_id={0x0234}, - product_id={0x0121}, - product_type={0x0003}, - primary_value=ZWaveValueDiscoverySchema( - command_class={CommandClass.SWITCH_COLOR}, - property={CURRENT_COLOR_PROPERTY}, - property_key={None}, - ), - ), # ====== START OF GENERIC MAPPING SCHEMAS ======= # locks # Door Lock CC @@ -990,11 +969,6 @@ class ZWaveDiscoverySchema: ), entity_category=EntityCategory.CONFIG, ), - # binary switches - ZWaveDiscoverySchema( - platform=Platform.SWITCH, - primary_value=SWITCH_BINARY_CURRENT_VALUE_SCHEMA, - ), # switch for Indicator CC ZWaveDiscoverySchema( platform=Platform.SWITCH, @@ -1082,15 +1056,51 @@ class ZWaveDiscoverySchema: device_class_generic={"Thermostat"}, primary_value=SWITCH_MULTILEVEL_CURRENT_VALUE_SCHEMA, ), - # lights - # primary value is the currentValue (brightness) - # catch any device with multilevel CC as light - # NOTE: keep this at the bottom of the discovery scheme, - # to handle all others that need the multilevel CC first + # Handle the different combinations of Binary Switch, Multilevel Switch and Color Switch + # to create switches and/or (colored) lights. The goal is to: + # - couple Color Switch CC with Multilevel Switch CC if possible + # - couple Color Switch CC with Binary Switch CC as the first fallback + # - use Color Switch CC standalone as the last fallback + # + # Multilevel Switch CC (+ Color Switch CC) -> Dimmable light with or without color support. ZWaveDiscoverySchema( platform=Platform.LIGHT, primary_value=SWITCH_MULTILEVEL_CURRENT_VALUE_SCHEMA, ), + # Binary Switch CC when Multilevel Switch and Color Switch CC exist -> + # On/Off switch, assign color to light entity instead + ZWaveDiscoverySchema( + platform=Platform.SWITCH, + primary_value=SWITCH_BINARY_CURRENT_VALUE_SCHEMA, + required_values=[ + SWITCH_MULTILEVEL_CURRENT_VALUE_SCHEMA, + COLOR_SWITCH_CURRENT_VALUE_SCHEMA, + ], + ), + # Binary Switch CC and Color Switch CC -> + # Colored light that uses Binary Switch CC for turning on/off. + ZWaveDiscoverySchema( + platform=Platform.LIGHT, + hint="color_onoff", + primary_value=SWITCH_BINARY_CURRENT_VALUE_SCHEMA, + required_values=[COLOR_SWITCH_CURRENT_VALUE_SCHEMA], + ), + # Binary Switch CC without Color Switch CC -> On/Off switch + ZWaveDiscoverySchema( + platform=Platform.SWITCH, + primary_value=SWITCH_BINARY_CURRENT_VALUE_SCHEMA, + absent_values=[COLOR_SWITCH_CURRENT_VALUE_SCHEMA], + ), + # Colored light (legacy device) that can only be controlled through Color Switch CC. + ZWaveDiscoverySchema( + platform=Platform.LIGHT, + hint="color_onoff", + primary_value=COLOR_SWITCH_CURRENT_VALUE_SCHEMA, + absent_values=[ + SWITCH_BINARY_CURRENT_VALUE_SCHEMA, + SWITCH_MULTILEVEL_CURRENT_VALUE_SCHEMA, + ], + ), # light for Basic CC with target ZWaveDiscoverySchema( platform=Platform.LIGHT, @@ -1315,14 +1325,20 @@ def async_discover_single_value( # check additional required values if schema.required_values is not None and not all( - any(check_value(val, val_scheme) for val in value.node.values.values()) + any( + check_value(val, val_scheme, primary_value=value) + for val in value.node.values.values() + ) for val_scheme in schema.required_values ): continue # check for values that may not be present if schema.absent_values is not None and any( - any(check_value(val, val_scheme) for val in value.node.values.values()) + any( + check_value(val, val_scheme, primary_value=value) + for val in value.node.values.values() + ) for val_scheme in schema.absent_values ): continue @@ -1441,7 +1457,11 @@ def async_discover_single_configuration_value( @callback -def check_value(value: ZwaveValue, schema: ZWaveValueDiscoverySchema) -> bool: +def check_value( + value: ZwaveValue, + schema: ZWaveValueDiscoverySchema, + primary_value: ZwaveValue | None = None, +) -> bool: """Check if value matches scheme.""" # check command_class if ( @@ -1452,6 +1472,14 @@ def check_value(value: ZwaveValue, schema: ZWaveValueDiscoverySchema) -> bool: # check endpoint if schema.endpoint is not None and value.endpoint not in schema.endpoint: return False + # If the schema does not require an endpoint, make sure the value is on the + # same endpoint as the primary value + if ( + schema.endpoint is None + and primary_value is not None + and value.endpoint != primary_value.endpoint + ): + return False # check property if schema.property is not None and value.property_ not in schema.property: return False diff --git a/homeassistant/components/zwave_js/light.py b/homeassistant/components/zwave_js/light.py index 020f1b66b3d108..4a044ca3f52aa9 100644 --- a/homeassistant/components/zwave_js/light.py +++ b/homeassistant/components/zwave_js/light.py @@ -76,8 +76,8 @@ def async_add_light(info: ZwaveDiscoveryInfo) -> None: driver = client.driver assert driver is not None # Driver is ready before platforms are loaded. - if info.platform_hint == "black_is_off": - async_add_entities([ZwaveBlackIsOffLight(config_entry, driver, info)]) + if info.platform_hint == "color_onoff": + async_add_entities([ZwaveColorOnOffLight(config_entry, driver, info)]) else: async_add_entities([ZwaveLight(config_entry, driver, info)]) @@ -111,9 +111,10 @@ def __init__( self._supports_color = False self._supports_rgbw = False self._supports_color_temp = False + self._supports_dimming = False + self._color_mode: str | None = None self._hs_color: tuple[float, float] | None = None self._rgbw_color: tuple[int, int, int, int] | None = None - self._color_mode: str | None = None self._color_temp: int | None = None self._min_mireds = 153 # 6500K as a safe default self._max_mireds = 370 # 2700K as a safe default @@ -129,15 +130,28 @@ def __init__( ) self._supported_color_modes: set[ColorMode] = set() + self._target_brightness: Value | None = None + # get additional (optional) values and set features - # If the command class is Basic, we must geenerate a name that includes - # the command class name to avoid ambiguity - self._target_brightness = self.get_zwave_value( - TARGET_VALUE_PROPERTY, - CommandClass.SWITCH_MULTILEVEL, - add_to_watched_value_ids=False, - ) - if self.info.primary_value.command_class == CommandClass.BASIC: + if self.info.primary_value.command_class == CommandClass.SWITCH_BINARY: + # This light can not be dimmed separately from the color channels + self._target_brightness = self.get_zwave_value( + TARGET_VALUE_PROPERTY, + CommandClass.SWITCH_BINARY, + add_to_watched_value_ids=False, + ) + self._supports_dimming = False + elif self.info.primary_value.command_class == CommandClass.SWITCH_MULTILEVEL: + # This light can be dimmed separately from the color channels + self._target_brightness = self.get_zwave_value( + TARGET_VALUE_PROPERTY, + CommandClass.SWITCH_MULTILEVEL, + add_to_watched_value_ids=False, + ) + self._supports_dimming = True + elif self.info.primary_value.command_class == CommandClass.BASIC: + # If the command class is Basic, we must generate a name that includes + # the command class name to avoid ambiguity self._attr_name = self.generate_name( include_value_name=True, alternate_value_name="Basic" ) @@ -146,6 +160,13 @@ def __init__( CommandClass.BASIC, add_to_watched_value_ids=False, ) + self._supports_dimming = True + + self._current_color = self.get_zwave_value( + CURRENT_COLOR_PROPERTY, + CommandClass.SWITCH_COLOR, + value_property_key=None, + ) self._target_color = self.get_zwave_value( TARGET_COLOR_PROPERTY, CommandClass.SWITCH_COLOR, @@ -216,7 +237,7 @@ def hs_color(self) -> tuple[float, float] | None: @property def rgbw_color(self) -> tuple[int, int, int, int] | None: - """Return the hs color.""" + """Return the RGBW color.""" return self._rgbw_color @property @@ -243,11 +264,39 @@ async def async_turn_on(self, **kwargs: Any) -> None: """Turn the device on.""" transition = kwargs.get(ATTR_TRANSITION) + brightness = kwargs.get(ATTR_BRIGHTNESS) - # RGB/HS color hs_color = kwargs.get(ATTR_HS_COLOR) + color_temp = kwargs.get(ATTR_COLOR_TEMP) + rgbw = kwargs.get(ATTR_RGBW_COLOR) + + new_colors = self._get_new_colors(hs_color, color_temp, rgbw) + if new_colors is not None: + await self._async_set_colors(new_colors, transition) + + # set brightness (or turn on if dimming is not supported) + await self._async_set_brightness(brightness, transition) + + async def async_turn_off(self, **kwargs: Any) -> None: + """Turn the light off.""" + await self._async_set_brightness(0, kwargs.get(ATTR_TRANSITION)) + + def _get_new_colors( + self, + hs_color: tuple[float, float] | None, + color_temp: int | None, + rgbw: tuple[int, int, int, int] | None, + brightness_scale: float | None = None, + ) -> dict[ColorComponent, int] | None: + """Determine the new color dict to set.""" + + # RGB/HS color if hs_color is not None and self._supports_color: red, green, blue = color_util.color_hs_to_RGB(*hs_color) + if brightness_scale is not None: + red = round(red * brightness_scale) + green = round(green * brightness_scale) + blue = round(blue * brightness_scale) colors = { ColorComponent.RED: red, ColorComponent.GREEN: green, @@ -257,10 +306,9 @@ async def async_turn_on(self, **kwargs: Any) -> None: # turn of white leds when setting rgb colors[ColorComponent.WARM_WHITE] = 0 colors[ColorComponent.COLD_WHITE] = 0 - await self._async_set_colors(colors, transition) + return colors # Color temperature - color_temp = kwargs.get(ATTR_COLOR_TEMP) if color_temp is not None and self._supports_color_temp: # Limit color temp to min/max values cold = max( @@ -275,20 +323,18 @@ async def async_turn_on(self, **kwargs: Any) -> None: ), ) warm = 255 - cold - await self._async_set_colors( - { - # turn off color leds when setting color temperature - ColorComponent.RED: 0, - ColorComponent.GREEN: 0, - ColorComponent.BLUE: 0, - ColorComponent.WARM_WHITE: warm, - ColorComponent.COLD_WHITE: cold, - }, - transition, - ) + colors = { + ColorComponent.WARM_WHITE: warm, + ColorComponent.COLD_WHITE: cold, + } + if self._supports_color: + # turn off color leds when setting color temperature + colors[ColorComponent.RED] = 0 + colors[ColorComponent.GREEN] = 0 + colors[ColorComponent.BLUE] = 0 + return colors # RGBW - rgbw = kwargs.get(ATTR_RGBW_COLOR) if rgbw is not None and self._supports_rgbw: rgbw_channels = { ColorComponent.RED: rgbw[0], @@ -300,17 +346,15 @@ async def async_turn_on(self, **kwargs: Any) -> None: if self._cold_white: rgbw_channels[ColorComponent.COLD_WHITE] = rgbw[3] - await self._async_set_colors(rgbw_channels, transition) - # set brightness - await self._async_set_brightness(kwargs.get(ATTR_BRIGHTNESS), transition) + return rgbw_channels - async def async_turn_off(self, **kwargs: Any) -> None: - """Turn the light off.""" - await self._async_set_brightness(0, kwargs.get(ATTR_TRANSITION)) + return None async def _async_set_colors( - self, colors: dict[ColorComponent, int], transition: float | None = None + self, + colors: dict[ColorComponent, int], + transition: float | None = None, ) -> None: """Set (multiple) defined colors to given value(s).""" # prefer the (new) combined color property @@ -361,9 +405,14 @@ async def _async_set_brightness( zwave_transition = {TRANSITION_DURATION_OPTION: "default"} # setting a value requires setting targetValue - await self._async_set_value( - self._target_brightness, zwave_brightness, zwave_transition - ) + if self._supports_dimming: + await self._async_set_value( + self._target_brightness, zwave_brightness, zwave_transition + ) + else: + await self._async_set_value( + self._target_brightness, zwave_brightness > 0, zwave_transition + ) # We do an optimistic state update when setting to a previous value # to avoid waiting for the value to be updated from the device which is # typically delayed and causes a confusing UX. @@ -427,15 +476,8 @@ def _calculate_color_values(self) -> None: """Calculate light colors.""" (red_val, green_val, blue_val, ww_val, cw_val) = self._get_color_values() - # prefer the (new) combined color property - # https://github.com/zwave-js/node-zwave-js/pull/1782 - combined_color_val = self.get_zwave_value( - CURRENT_COLOR_PROPERTY, - CommandClass.SWITCH_COLOR, - value_property_key=None, - ) - if combined_color_val and isinstance(combined_color_val.value, dict): - multi_color = combined_color_val.value + if self._current_color and isinstance(self._current_color.value, dict): + multi_color = self._current_color.value else: multi_color = {} @@ -486,11 +528,10 @@ def _calculate_color_values(self) -> None: self._color_mode = ColorMode.RGBW -class ZwaveBlackIsOffLight(ZwaveLight): - """Representation of a Z-Wave light where setting the color to black turns it off. +class ZwaveColorOnOffLight(ZwaveLight): + """Representation of a colored Z-Wave light with an optional binary switch to turn on/off. - Currently only supports lights with RGB, no color temperature, and no white - channels. + Dimming for RGB lights is realized by scaling the color channels. """ def __init__( @@ -499,61 +540,137 @@ def __init__( """Initialize the light.""" super().__init__(config_entry, driver, info) - self._last_color: dict[str, int] | None = None - self._supported_color_modes.discard(ColorMode.BRIGHTNESS) + self._last_on_color: dict[ColorComponent, int] | None = None + self._last_brightness: int | None = None @property - def brightness(self) -> int: - """Return the brightness of this light between 0..255.""" - return 255 + def brightness(self) -> int | None: + """Return the brightness of this light between 0..255. - @property - def is_on(self) -> bool | None: - """Return true if device is on (brightness above 0).""" + Z-Wave multilevel switches use a range of [0, 99] to control brightness. + """ if self.info.primary_value.value is None: return None - return any(value != 0 for value in self.info.primary_value.value.values()) + if self._target_brightness and self.info.primary_value.value is False: + # Binary switch exists and is turned off + return 0 + + # Brightness is encoded in the color channels by scaling them lower than 255 + color_values = [ + v.value + for v in self._get_color_values() + if v is not None and v.value is not None + ] + return max(color_values) if color_values else 0 async def async_turn_on(self, **kwargs: Any) -> None: """Turn the device on.""" + if ( kwargs.get(ATTR_RGBW_COLOR) is not None or kwargs.get(ATTR_COLOR_TEMP) is not None - or kwargs.get(ATTR_HS_COLOR) is not None ): + # RGBW and color temp are not supported in this mode, + # delegate to the parent class await super().async_turn_on(**kwargs) return transition = kwargs.get(ATTR_TRANSITION) - # turn on light to last color if known, otherwise set to white - if self._last_color is not None: - await self._async_set_colors( - { - ColorComponent.RED: self._last_color["red"], - ColorComponent.GREEN: self._last_color["green"], - ColorComponent.BLUE: self._last_color["blue"], - }, - transition, - ) - else: - await self._async_set_colors( - { + brightness = kwargs.get(ATTR_BRIGHTNESS) + hs_color = kwargs.get(ATTR_HS_COLOR) + new_colors: dict[ColorComponent, int] | None = None + scale: float | None = None + + if brightness is None and hs_color is None: + # Turned on without specifying brightness or color + if self._last_on_color is not None: + if self._target_brightness: + # Color is already set, use the binary switch to turn on + await self._async_set_brightness(None, transition) + return + + # Preserve the previous color + new_colors = self._last_on_color + elif self._supports_color: + # Turned on for the first time. Make it white + new_colors = { ColorComponent.RED: 255, ColorComponent.GREEN: 255, ColorComponent.BLUE: 255, - }, - transition, + } + elif brightness is not None: + # If brightness gets set, preserve the color and mix it with the new brightness + if self.color_mode == ColorMode.HS: + scale = brightness / 255 + if ( + self._last_on_color is not None + and None not in self._last_on_color.values() + ): + # Changed brightness from 0 to >0 + old_brightness = max(self._last_on_color.values()) + new_scale = brightness / old_brightness + scale = new_scale + new_colors = {} + for color, value in self._last_on_color.items(): + new_colors[color] = round(value * new_scale) + elif hs_color is None and self._color_mode == ColorMode.HS: + hs_color = self._hs_color + elif hs_color is not None and brightness is None: + # Turned on by using the color controls + current_brightness = self.brightness + if current_brightness == 0 and self._last_brightness is not None: + # Use the last brightness value if the light is currently off + scale = self._last_brightness / 255 + elif current_brightness is not None: + scale = current_brightness / 255 + + # Reset last color until turning off again + self._last_on_color = None + + if new_colors is None: + new_colors = self._get_new_colors( + hs_color=hs_color, color_temp=None, rgbw=None, brightness_scale=scale ) + if new_colors is not None: + await self._async_set_colors(new_colors, transition) + + # Turn the binary switch on if there is one + await self._async_set_brightness(brightness, transition) + async def async_turn_off(self, **kwargs: Any) -> None: """Turn the light off.""" - self._last_color = self.info.primary_value.value - await self._async_set_colors( - { + + # Remember last color and brightness to restore it when turning on + self._last_brightness = self.brightness + if self._current_color and isinstance(self._current_color.value, dict): + red = self._current_color.value.get(COLOR_SWITCH_COMBINED_RED) + green = self._current_color.value.get(COLOR_SWITCH_COMBINED_GREEN) + blue = self._current_color.value.get(COLOR_SWITCH_COMBINED_BLUE) + + last_color: dict[ColorComponent, int] = {} + if red is not None: + last_color[ColorComponent.RED] = red + if green is not None: + last_color[ColorComponent.GREEN] = green + if blue is not None: + last_color[ColorComponent.BLUE] = blue + + if last_color: + self._last_on_color = last_color + + if self._target_brightness: + # Turn off the binary switch only + await self._async_set_brightness(0, kwargs.get(ATTR_TRANSITION)) + else: + # turn off all color channels + colors = { ColorComponent.RED: 0, ColorComponent.GREEN: 0, ColorComponent.BLUE: 0, - }, - kwargs.get(ATTR_TRANSITION), - ) - await self._async_set_brightness(0, kwargs.get(ATTR_TRANSITION)) + } + + await self._async_set_colors( + colors, + kwargs.get(ATTR_TRANSITION), + ) diff --git a/homeassistant/components/zwave_js/manifest.json b/homeassistant/components/zwave_js/manifest.json index 0fee480b093f56..e3f643486a007c 100644 --- a/homeassistant/components/zwave_js/manifest.json +++ b/homeassistant/components/zwave_js/manifest.json @@ -1,6 +1,7 @@ { "domain": "zwave_js", "name": "Z-Wave", + "after_dependencies": ["hassio"], "codeowners": ["@home-assistant/z-wave"], "config_flow": true, "dependencies": ["http", "repairs", "usb", "websocket_api"], @@ -9,7 +10,7 @@ "iot_class": "local_push", "loggers": ["zwave_js_server"], "quality_scale": "platinum", - "requirements": ["pyserial==3.5", "zwave-js-server-python==0.58.1"], + "requirements": ["pyserial==3.5", "zwave-js-server-python==0.59.0"], "usb": [ { "vid": "0658", diff --git a/homeassistant/components/zwave_js/services.py b/homeassistant/components/zwave_js/services.py index 969a235bb414ea..d1cb66ceafcde6 100644 --- a/homeassistant/components/zwave_js/services.py +++ b/homeassistant/components/zwave_js/services.py @@ -529,8 +529,15 @@ def process_results( for node_or_endpoint, result in get_valid_responses_from_results( nodes_or_endpoints_list, _results ): - zwave_value = result[0] - cmd_status = result[1] + if value_size is None: + # async_set_config_parameter still returns (Value, SetConfigParameterResult) + zwave_value = result[0] + cmd_status = result[1] + else: + # async_set_raw_config_parameter_value now returns just SetConfigParameterResult + cmd_status = result + zwave_value = f"parameter {property_or_property_name}" + if cmd_status.status == CommandStatus.ACCEPTED: msg = "Set configuration parameter %s on Node %s with value %s" else: diff --git a/homeassistant/components/zwave_js/services.yaml b/homeassistant/components/zwave_js/services.yaml index f5063fdfd93834..acf6e9a066519e 100644 --- a/homeassistant/components/zwave_js/services.yaml +++ b/homeassistant/components/zwave_js/services.yaml @@ -51,16 +51,6 @@ set_lock_configuration: min: 0 max: 65535 unit_of_measurement: sec - outside_handles_can_open_door_configuration: - required: false - example: [true, true, true, false] - selector: - object: - inside_handles_can_open_door_configuration: - required: false - example: [true, true, true, false] - selector: - object: auto_relock_time: required: false example: 1 diff --git a/homeassistant/components/zwave_js/strings.json b/homeassistant/components/zwave_js/strings.json index ca7d5153e6e0ba..28789bbf9f4c68 100644 --- a/homeassistant/components/zwave_js/strings.json +++ b/homeassistant/components/zwave_js/strings.json @@ -523,10 +523,6 @@ "description": "Duration in seconds the latch stays retracted.", "name": "Hold and release time" }, - "inside_handles_can_open_door_configuration": { - "description": "A list of four booleans which indicate which inside handles can open the door.", - "name": "Inside handles can open door configuration" - }, "lock_timeout": { "description": "Seconds until lock mode times out. Should only be used if operation type is `timed`.", "name": "Lock timeout" @@ -535,10 +531,6 @@ "description": "The operation type of the lock.", "name": "Operation Type" }, - "outside_handles_can_open_door_configuration": { - "description": "A list of four booleans which indicate which outside handles can open the door.", - "name": "Outside handles can open door configuration" - }, "twist_assist": { "description": "Enable Twist Assist.", "name": "Twist assist" diff --git a/homeassistant/components/zwave_js/update.py b/homeassistant/components/zwave_js/update.py index 02c59d220e1ca4..d060abe007d47c 100644 --- a/homeassistant/components/zwave_js/update.py +++ b/homeassistant/components/zwave_js/update.py @@ -155,7 +155,8 @@ def _update_progress(self, event: dict[str, Any]) -> None: progress: NodeFirmwareUpdateProgress = event["firmware_update_progress"] if not self._latest_version_firmware: return - self._attr_in_progress = int(progress.progress) + self._attr_in_progress = True + self._attr_update_percentage = int(progress.progress) self.async_write_ha_state() @callback @@ -181,6 +182,7 @@ def _unsub_firmware_events_and_reset_progress( self._result = None self._finished_event.clear() self._attr_in_progress = False + self._attr_update_percentage = None if write_state: self.async_write_ha_state() @@ -267,6 +269,7 @@ async def async_install( assert firmware self._unsub_firmware_events_and_reset_progress(False) self._attr_in_progress = True + self._attr_update_percentage = None self.async_write_ha_state() self._progress_unsub = self.node.on( diff --git a/homeassistant/config.py b/homeassistant/config.py index 9063429ca91774..cab4d0c7affc27 100644 --- a/homeassistant/config.py +++ b/homeassistant/config.py @@ -17,62 +17,23 @@ import shutil from types import ModuleType from typing import TYPE_CHECKING, Any -from urllib.parse import urlparse from awesomeversion import AwesomeVersion import voluptuous as vol from voluptuous.humanize import MAX_VALIDATION_ERROR_ITEM_LENGTH from yaml.error import MarkedYAMLError -from . import auth -from .auth import mfa_modules as auth_mfa_modules, providers as auth_providers -from .const import ( - ATTR_ASSUMED_STATE, - ATTR_FRIENDLY_NAME, - ATTR_HIDDEN, - CONF_ALLOWLIST_EXTERNAL_DIRS, - CONF_ALLOWLIST_EXTERNAL_URLS, - CONF_AUTH_MFA_MODULES, - CONF_AUTH_PROVIDERS, - CONF_COUNTRY, - CONF_CURRENCY, - CONF_CUSTOMIZE, - CONF_CUSTOMIZE_DOMAIN, - CONF_CUSTOMIZE_GLOB, - CONF_DEBUG, - CONF_ELEVATION, - CONF_EXTERNAL_URL, - CONF_ID, - CONF_INTERNAL_URL, - CONF_LANGUAGE, - CONF_LATITUDE, - CONF_LEGACY_TEMPLATES, - CONF_LONGITUDE, - CONF_MEDIA_DIRS, - CONF_NAME, - CONF_PACKAGES, - CONF_PLATFORM, - CONF_RADIUS, - CONF_TEMPERATURE_UNIT, - CONF_TIME_ZONE, - CONF_TYPE, - CONF_UNIT_SYSTEM, - LEGACY_CONF_WHITELIST_EXTERNAL_DIRS, - __version__, -) -from .core import DOMAIN as HOMEASSISTANT_DOMAIN, ConfigSource, HomeAssistant, callback +from .const import CONF_PACKAGES, CONF_PLATFORM, __version__ +from .core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant, callback +from .core_config import _PACKAGE_DEFINITION_SCHEMA, _PACKAGES_CONFIG_SCHEMA from .exceptions import ConfigValidationError, HomeAssistantError -from .generated.currencies import HISTORIC_CURRENCIES -from .helpers import config_validation as cv, issue_registry as ir -from .helpers.entity_values import EntityValues +from .helpers import config_validation as cv from .helpers.translation import async_get_exception_message from .helpers.typing import ConfigType from .loader import ComponentProtocol, Integration, IntegrationNotFound from .requirements import RequirementsNotFound, async_get_integration_with_requirements from .util.async_ import create_eager_task -from .util.hass_dict import HassKey from .util.package import is_docker_env -from .util.unit_system import get_unit_system, validate_unit_system from .util.yaml import SECRET_YAML, Secrets, YamlTypeError, load_yaml_dict from .util.yaml.objects import NodeStrClass @@ -83,7 +44,6 @@ YAML_CONFIG_FILE = "configuration.yaml" VERSION_FILE = ".HA_VERSION" CONFIG_DIR_NAME = ".homeassistant" -DATA_CUSTOMIZE: HassKey[EntityValues] = HassKey("hass_customize") AUTOMATION_CONFIG_PATH = "automations.yaml" SCRIPT_CONFIG_PATH = "scripts.yaml" @@ -172,201 +132,6 @@ class IntegrationConfigInfo: exception_info_list: list[ConfigExceptionInfo] -def _no_duplicate_auth_provider( - configs: Sequence[dict[str, Any]], -) -> Sequence[dict[str, Any]]: - """No duplicate auth provider config allowed in a list. - - Each type of auth provider can only have one config without optional id. - Unique id is required if same type of auth provider used multiple times. - """ - config_keys: set[tuple[str, str | None]] = set() - for config in configs: - key = (config[CONF_TYPE], config.get(CONF_ID)) - if key in config_keys: - raise vol.Invalid( - f"Duplicate auth provider {config[CONF_TYPE]} found. " - "Please add unique IDs " - "if you want to have the same auth provider twice" - ) - config_keys.add(key) - return configs - - -def _no_duplicate_auth_mfa_module( - configs: Sequence[dict[str, Any]], -) -> Sequence[dict[str, Any]]: - """No duplicate auth mfa module item allowed in a list. - - Each type of mfa module can only have one config without optional id. - A global unique id is required if same type of mfa module used multiple - times. - Note: this is different than auth provider - """ - config_keys: set[str] = set() - for config in configs: - key = config.get(CONF_ID, config[CONF_TYPE]) - if key in config_keys: - raise vol.Invalid( - f"Duplicate mfa module {config[CONF_TYPE]} found. " - "Please add unique IDs " - "if you want to have the same mfa module twice" - ) - config_keys.add(key) - return configs - - -def _filter_bad_internal_external_urls(conf: dict) -> dict: - """Filter internal/external URL with a path.""" - for key in CONF_INTERNAL_URL, CONF_EXTERNAL_URL: - if key in conf and urlparse(conf[key]).path not in ("", "/"): - # We warn but do not fix, because if this was incorrectly configured, - # adjusting this value might impact security. - _LOGGER.warning( - "Invalid %s set. It's not allowed to have a path (/bla)", key - ) - - return conf - - -# Schema for all packages element -PACKAGES_CONFIG_SCHEMA = vol.Schema({cv.string: vol.Any(dict, list)}) - -# Schema for individual package definition -PACKAGE_DEFINITION_SCHEMA = vol.Schema({cv.string: vol.Any(dict, list, None)}) - -CUSTOMIZE_DICT_SCHEMA = vol.Schema( - { - vol.Optional(ATTR_FRIENDLY_NAME): cv.string, - vol.Optional(ATTR_HIDDEN): cv.boolean, - vol.Optional(ATTR_ASSUMED_STATE): cv.boolean, - }, - extra=vol.ALLOW_EXTRA, -) - -CUSTOMIZE_CONFIG_SCHEMA = vol.Schema( - { - vol.Optional(CONF_CUSTOMIZE, default={}): vol.Schema( - {cv.entity_id: CUSTOMIZE_DICT_SCHEMA} - ), - vol.Optional(CONF_CUSTOMIZE_DOMAIN, default={}): vol.Schema( - {cv.string: CUSTOMIZE_DICT_SCHEMA} - ), - vol.Optional(CONF_CUSTOMIZE_GLOB, default={}): vol.Schema( - {cv.string: CUSTOMIZE_DICT_SCHEMA} - ), - } -) - - -def _raise_issue_if_historic_currency(hass: HomeAssistant, currency: str) -> None: - if currency not in HISTORIC_CURRENCIES: - ir.async_delete_issue(hass, HOMEASSISTANT_DOMAIN, "historic_currency") - return - - ir.async_create_issue( - hass, - HOMEASSISTANT_DOMAIN, - "historic_currency", - is_fixable=False, - learn_more_url="homeassistant://config/general", - severity=ir.IssueSeverity.WARNING, - translation_key="historic_currency", - translation_placeholders={"currency": currency}, - ) - - -def _raise_issue_if_no_country(hass: HomeAssistant, country: str | None) -> None: - if country is not None: - ir.async_delete_issue(hass, HOMEASSISTANT_DOMAIN, "country_not_configured") - return - - ir.async_create_issue( - hass, - HOMEASSISTANT_DOMAIN, - "country_not_configured", - is_fixable=False, - learn_more_url="homeassistant://config/general", - severity=ir.IssueSeverity.WARNING, - translation_key="country_not_configured", - ) - - -def _validate_currency(data: Any) -> Any: - try: - return cv.currency(data) - except vol.InInvalid: - with suppress(vol.InInvalid): - return cv.historic_currency(data) - raise - - -CORE_CONFIG_SCHEMA = vol.All( - CUSTOMIZE_CONFIG_SCHEMA.extend( - { - CONF_NAME: vol.Coerce(str), - CONF_LATITUDE: cv.latitude, - CONF_LONGITUDE: cv.longitude, - CONF_ELEVATION: vol.Coerce(int), - CONF_RADIUS: cv.positive_int, - vol.Remove(CONF_TEMPERATURE_UNIT): cv.temperature_unit, - CONF_UNIT_SYSTEM: validate_unit_system, - CONF_TIME_ZONE: cv.time_zone, - vol.Optional(CONF_INTERNAL_URL): cv.url, - vol.Optional(CONF_EXTERNAL_URL): cv.url, - vol.Optional(CONF_ALLOWLIST_EXTERNAL_DIRS): vol.All( - cv.ensure_list, [vol.IsDir()] - ), - vol.Optional(LEGACY_CONF_WHITELIST_EXTERNAL_DIRS): vol.All( - cv.ensure_list, [vol.IsDir()] - ), - vol.Optional(CONF_ALLOWLIST_EXTERNAL_URLS): vol.All( - cv.ensure_list, [cv.url] - ), - vol.Optional(CONF_PACKAGES, default={}): PACKAGES_CONFIG_SCHEMA, - vol.Optional(CONF_AUTH_PROVIDERS): vol.All( - cv.ensure_list, - [ - auth_providers.AUTH_PROVIDER_SCHEMA.extend( - { - CONF_TYPE: vol.NotIn( - ["insecure_example"], - ( - "The insecure_example auth provider" - " is for testing only." - ), - ) - } - ) - ], - _no_duplicate_auth_provider, - ), - vol.Optional(CONF_AUTH_MFA_MODULES): vol.All( - cv.ensure_list, - [ - auth_mfa_modules.MULTI_FACTOR_AUTH_MODULE_SCHEMA.extend( - { - CONF_TYPE: vol.NotIn( - ["insecure_example"], - "The insecure_example mfa module is for testing only.", - ) - } - ) - ], - _no_duplicate_auth_mfa_module, - ), - vol.Optional(CONF_MEDIA_DIRS): cv.schema_with_slug_keys(vol.IsDir()), - vol.Remove(CONF_LEGACY_TEMPLATES): cv.boolean, - vol.Optional(CONF_CURRENCY): _validate_currency, - vol.Optional(CONF_COUNTRY): cv.country, - vol.Optional(CONF_LANGUAGE): cv.language, - vol.Optional(CONF_DEBUG): cv.boolean, - } - ), - _filter_bad_internal_external_urls, -) - - def get_default_config_dir() -> str: """Put together the default configuration directory based on the OS.""" data_dir = os.path.expanduser("~") @@ -812,131 +577,6 @@ def format_schema_error( return humanize_error(hass, exc, domain, config, link) -async def async_process_ha_core_config(hass: HomeAssistant, config: dict) -> None: - """Process the [homeassistant] section from the configuration. - - This method is a coroutine. - """ - # CORE_CONFIG_SCHEMA is not async safe since it uses vol.IsDir - # so we need to run it in an executor job. - config = await hass.async_add_executor_job(CORE_CONFIG_SCHEMA, config) - - # Only load auth during startup. - if not hasattr(hass, "auth"): - if (auth_conf := config.get(CONF_AUTH_PROVIDERS)) is None: - auth_conf = [{"type": "homeassistant"}] - - mfa_conf = config.get( - CONF_AUTH_MFA_MODULES, - [{"type": "totp", "id": "totp", "name": "Authenticator app"}], - ) - - setattr( - hass, "auth", await auth.auth_manager_from_config(hass, auth_conf, mfa_conf) - ) - - await hass.config.async_load() - - hac = hass.config - - if any( - k in config - for k in ( - CONF_LATITUDE, - CONF_LONGITUDE, - CONF_NAME, - CONF_ELEVATION, - CONF_TIME_ZONE, - CONF_UNIT_SYSTEM, - CONF_EXTERNAL_URL, - CONF_INTERNAL_URL, - CONF_CURRENCY, - CONF_COUNTRY, - CONF_LANGUAGE, - CONF_RADIUS, - ) - ): - hac.config_source = ConfigSource.YAML - - for key, attr in ( - (CONF_LATITUDE, "latitude"), - (CONF_LONGITUDE, "longitude"), - (CONF_NAME, "location_name"), - (CONF_ELEVATION, "elevation"), - (CONF_INTERNAL_URL, "internal_url"), - (CONF_EXTERNAL_URL, "external_url"), - (CONF_MEDIA_DIRS, "media_dirs"), - (CONF_CURRENCY, "currency"), - (CONF_COUNTRY, "country"), - (CONF_LANGUAGE, "language"), - (CONF_RADIUS, "radius"), - ): - if key in config: - setattr(hac, attr, config[key]) - - if config.get(CONF_DEBUG): - hac.debug = True - - _raise_issue_if_historic_currency(hass, hass.config.currency) - _raise_issue_if_no_country(hass, hass.config.country) - - if CONF_TIME_ZONE in config: - await hac.async_set_time_zone(config[CONF_TIME_ZONE]) - - if CONF_MEDIA_DIRS not in config: - if is_docker_env(): - hac.media_dirs = {"local": "/media"} - else: - hac.media_dirs = {"local": hass.config.path("media")} - - # Init whitelist external dir - hac.allowlist_external_dirs = {hass.config.path("www"), *hac.media_dirs.values()} - if CONF_ALLOWLIST_EXTERNAL_DIRS in config: - hac.allowlist_external_dirs.update(set(config[CONF_ALLOWLIST_EXTERNAL_DIRS])) - - elif LEGACY_CONF_WHITELIST_EXTERNAL_DIRS in config: - _LOGGER.warning( - "Key %s has been replaced with %s. Please update your config", - LEGACY_CONF_WHITELIST_EXTERNAL_DIRS, - CONF_ALLOWLIST_EXTERNAL_DIRS, - ) - hac.allowlist_external_dirs.update( - set(config[LEGACY_CONF_WHITELIST_EXTERNAL_DIRS]) - ) - - # Init whitelist external URL list – make sure to add / to every URL that doesn't - # already have it so that we can properly test "path ownership" - if CONF_ALLOWLIST_EXTERNAL_URLS in config: - hac.allowlist_external_urls.update( - url if url.endswith("/") else f"{url}/" - for url in config[CONF_ALLOWLIST_EXTERNAL_URLS] - ) - - # Customize - cust_exact = dict(config[CONF_CUSTOMIZE]) - cust_domain = dict(config[CONF_CUSTOMIZE_DOMAIN]) - cust_glob = OrderedDict(config[CONF_CUSTOMIZE_GLOB]) - - for name, pkg in config[CONF_PACKAGES].items(): - if (pkg_cust := pkg.get(HOMEASSISTANT_DOMAIN)) is None: - continue - - try: - pkg_cust = CUSTOMIZE_CONFIG_SCHEMA(pkg_cust) - except vol.Invalid: - _LOGGER.warning("Package %s contains invalid customize", name) - continue - - cust_exact.update(pkg_cust[CONF_CUSTOMIZE]) - cust_domain.update(pkg_cust[CONF_CUSTOMIZE_DOMAIN]) - cust_glob.update(pkg_cust[CONF_CUSTOMIZE_GLOB]) - - hass.data[DATA_CUSTOMIZE] = EntityValues(cust_exact, cust_domain, cust_glob) - - if CONF_UNIT_SYSTEM in config: - hac.units = get_unit_system(config[CONF_UNIT_SYSTEM]) - - def _log_pkg_error( hass: HomeAssistant, package: str, component: str | None, config: dict, message: str ) -> None: @@ -1001,7 +641,7 @@ def _identify_config_schema(module: ComponentProtocol) -> str | None: def _validate_package_definition(name: str, conf: Any) -> None: """Validate basic package definition properties.""" cv.slug(name) - PACKAGE_DEFINITION_SCHEMA(conf) + _PACKAGE_DEFINITION_SCHEMA(conf) def _recursive_merge(conf: dict[str, Any], package: dict[str, Any]) -> str | None: @@ -1040,7 +680,7 @@ async def merge_packages_config( vol.Invalid if whole package config is invalid. """ - PACKAGES_CONFIG_SCHEMA(packages) + _PACKAGES_CONFIG_SCHEMA(packages) invalid_packages = [] for pack_name, pack_conf in packages.items(): diff --git a/homeassistant/config_entries.py b/homeassistant/config_entries.py index 28fecf9bcc4119..f1748c6b7fb89d 100644 --- a/homeassistant/config_entries.py +++ b/homeassistant/config_entries.py @@ -27,10 +27,16 @@ from async_interrupt import interrupt from propcache import cached_property from typing_extensions import TypeVar +import voluptuous as vol from . import data_entry_flow, loader from .components import persistent_notification -from .const import EVENT_HOMEASSISTANT_STARTED, EVENT_HOMEASSISTANT_STOP, Platform +from .const import ( + CONF_NAME, + EVENT_HOMEASSISTANT_STARTED, + EVENT_HOMEASSISTANT_STOP, + Platform, +) from .core import ( CALLBACK_TYPE, DOMAIN as HOMEASSISTANT_DOMAIN, @@ -41,7 +47,7 @@ HomeAssistant, callback, ) -from .data_entry_flow import FLOW_NOT_COMPLETE_STEPS, FlowResult +from .data_entry_flow import FLOW_NOT_COMPLETE_STEPS, FlowContext, FlowResult from .exceptions import ( ConfigEntryAuthFailed, ConfigEntryError, @@ -57,7 +63,7 @@ RANDOM_MICROSECOND_MIN, async_call_later, ) -from .helpers.frame import report +from .helpers.frame import ReportBehavior, report, report_usage from .helpers.json import json_bytes, json_bytes_sorted, json_fragment from .helpers.typing import UNDEFINED, ConfigType, DiscoveryInfoType, UndefinedType from .loader import async_suggest_report_issue @@ -78,10 +84,10 @@ if TYPE_CHECKING: from .components.bluetooth import BluetoothServiceInfoBleak from .components.dhcp import DhcpServiceInfo - from .components.hassio import HassioServiceInfo from .components.ssdp import SsdpServiceInfo from .components.usb import UsbServiceInfo from .components.zeroconf import ZeroconfServiceInfo + from .helpers.service_info.hassio import HassioServiceInfo from .helpers.service_info.mqtt import MqttServiceInfo @@ -123,6 +129,9 @@ DISCOVERY_COOLDOWN = 1 +ISSUE_UNIQUE_ID_COLLISION = "config_entry_unique_id_collision" +UNIQUE_ID_COLLISION_TITLE_LIMIT = 5 + _DataT = TypeVar("_DataT", default=Any) @@ -170,11 +179,13 @@ def recoverable(self) -> bool: SOURCE_DHCP, SOURCE_DISCOVERY, SOURCE_HARDWARE, + SOURCE_HASSIO, SOURCE_HOMEKIT, SOURCE_IMPORT, SOURCE_INTEGRATION_DISCOVERY, SOURCE_MQTT, SOURCE_SSDP, + SOURCE_SYSTEM, SOURCE_USB, SOURCE_ZEROCONF, } @@ -267,7 +278,19 @@ class OperationNotAllowed(ConfigError): } -class ConfigFlowResult(FlowResult, total=False): +class ConfigFlowContext(FlowContext, total=False): + """Typed context dict for config flow.""" + + alternative_domain: str + configuration_url: str + confirm_only: bool + discovery_key: DiscoveryKey + entry_id: str + title_placeholders: Mapping[str, str] + unique_id: str | None + + +class ConfigFlowResult(FlowResult[ConfigFlowContext, str], total=False): """Typed result dict for config flow.""" minor_version: int @@ -314,7 +337,6 @@ class ConfigEntry(Generic[_DataT]): _on_unload: list[Callable[[], Coroutine[Any, Any, None] | None]] | None setup_lock: asyncio.Lock _reauth_lock: asyncio.Lock - _reconfigure_lock: asyncio.Lock _tasks: set[asyncio.Future[Any]] _background_tasks: set[asyncio.Future[Any]] _integration_for_domain: loader.Integration | None @@ -418,8 +440,6 @@ def __init__( _setter(self, "setup_lock", asyncio.Lock()) # Reauth lock to prevent concurrent reauth flows _setter(self, "_reauth_lock", asyncio.Lock()) - # Reconfigure lock to prevent concurrent reconfigure flows - _setter(self, "_reconfigure_lock", asyncio.Lock()) _setter(self, "_tasks", set()) _setter(self, "_background_tasks", set()) @@ -518,10 +538,21 @@ async def async_setup( integration: loader.Integration | None = None, ) -> None: """Set up an entry.""" - current_entry.set(self) if self.source == SOURCE_IGNORE or self.disabled_by: return + current_entry.set(self) + try: + await self.__async_setup_with_context(hass, integration) + finally: + current_entry.set(None) + + async def __async_setup_with_context( + self, + hass: HomeAssistant, + integration: loader.Integration | None, + ) -> None: + """Set up an entry, with current_entry set.""" if integration is None and not (integration := self._integration_for_domain): integration = await loader.async_get_integration(hass, self.domain) self._integration_for_domain = integration @@ -1026,7 +1057,7 @@ async def _async_process_on_unload(self, hass: HomeAssistant) -> None: def async_start_reauth( self, hass: HomeAssistant, - context: dict[str, Any] | None = None, + context: ConfigFlowContext | None = None, data: dict[str, Any] | None = None, ) -> None: """Start a reauth flow.""" @@ -1044,7 +1075,7 @@ def async_start_reauth( async def _async_init_reauth( self, hass: HomeAssistant, - context: dict[str, Any] | None = None, + context: ConfigFlowContext | None = None, data: dict[str, Any] | None = None, ) -> None: """Start a reauth flow.""" @@ -1056,12 +1087,12 @@ async def _async_init_reauth( return result = await hass.config_entries.flow.async_init( self.domain, - context={ - "source": SOURCE_REAUTH, - "entry_id": self.entry_id, - "title_placeholders": {"name": self.title}, - "unique_id": self.unique_id, - } + context=ConfigFlowContext( + source=SOURCE_REAUTH, + entry_id=self.entry_id, + title_placeholders={"name": self.title}, + unique_id=self.unique_id, + ) | (context or {}), data=self.data | (data or {}), ) @@ -1082,49 +1113,6 @@ async def _async_init_reauth( translation_placeholders={"name": self.title}, ) - @callback - def async_start_reconfigure( - self, - hass: HomeAssistant, - context: dict[str, Any] | None = None, - data: dict[str, Any] | None = None, - ) -> None: - """Start a reconfigure flow.""" - # We will check this again in the task when we hold the lock, - # but we also check it now to try to avoid creating the task. - if any(self.async_get_active_flows(hass, {SOURCE_RECONFIGURE, SOURCE_REAUTH})): - # Reconfigure or reauth flow already in progress for this entry - return - hass.async_create_task( - self._async_init_reconfigure(hass, context, data), - f"config entry reconfigure {self.title} {self.domain} {self.entry_id}", - ) - - async def _async_init_reconfigure( - self, - hass: HomeAssistant, - context: dict[str, Any] | None = None, - data: dict[str, Any] | None = None, - ) -> None: - """Start a reconfigure flow.""" - async with self._reconfigure_lock: - if any( - self.async_get_active_flows(hass, {SOURCE_RECONFIGURE, SOURCE_REAUTH}) - ): - # Reconfigure or reauth flow already in progress for this entry - return - await hass.config_entries.flow.async_init( - self.domain, - context={ - "source": SOURCE_RECONFIGURE, - "entry_id": self.entry_id, - "title_placeholders": {"name": self.title}, - "unique_id": self.unique_id, - } - | (context or {}), - data=self.data | (data or {}), - ) - @callback def async_get_active_flows( self, hass: HomeAssistant, sources: set[str] @@ -1214,7 +1202,9 @@ def _report_non_awaited_platform_forwards(entry: ConfigEntry, what: str) -> None ) -class ConfigEntriesFlowManager(data_entry_flow.FlowManager[ConfigFlowResult]): +class ConfigEntriesFlowManager( + data_entry_flow.FlowManager[ConfigFlowContext, ConfigFlowResult] +): """Manage all the config entry flows that are in progress.""" _flow_result = ConfigFlowResult @@ -1260,20 +1250,41 @@ def _async_has_other_discovery_flows(self, flow_id: str) -> bool: return False async def async_init( - self, handler: str, *, context: dict[str, Any] | None = None, data: Any = None + self, + handler: str, + *, + context: ConfigFlowContext | None = None, + data: Any = None, ) -> ConfigFlowResult: """Start a configuration flow.""" if not context or "source" not in context: raise KeyError("Context not set or doesn't have a source set") + # reauth/reconfigure flows should be linked to a config entry + if (source := context["source"]) in { + SOURCE_REAUTH, + SOURCE_RECONFIGURE, + } and "entry_id" not in context: + # Deprecated in 2024.12, should fail in 2025.12 + report( + f"initialises a {source} flow without a link to the config entry", + error_if_integration=False, + error_if_core=True, + ) + flow_id = ulid_util.ulid_now() # Avoid starting a config flow on an integration that only supports # a single config entry, but which already has an entry if ( - context.get("source") - not in {SOURCE_IGNORE, SOURCE_REAUTH, SOURCE_RECONFIGURE} - and self.config_entries.async_has_entries(handler, include_ignore=False) + source not in {SOURCE_IGNORE, SOURCE_REAUTH, SOURCE_RECONFIGURE} + and ( + self.config_entries.async_has_entries(handler, include_ignore=False) + or ( + self.config_entries.async_has_entries(handler, include_ignore=True) + and source != SOURCE_USER + ) + ) and await _support_single_config_entry_only(self.hass, handler) ): return ConfigFlowResult( @@ -1286,7 +1297,7 @@ async def async_init( loop = self.hass.loop - if context["source"] == SOURCE_IMPORT: + if source == SOURCE_IMPORT: self._pending_import_flows[handler][flow_id] = loop.create_future() cancel_init_future = loop.create_future() @@ -1319,7 +1330,7 @@ async def _async_init( self, flow_id: str, handler: str, - context: dict, + context: ConfigFlowContext, data: Any, ) -> tuple[ConfigFlow, ConfigFlowResult]: """Run the init in a task to allow it to be canceled at shutdown.""" @@ -1357,7 +1368,7 @@ def async_shutdown(self) -> None: async def async_finish_flow( self, - flow: data_entry_flow.FlowHandler[ConfigFlowResult], + flow: data_entry_flow.FlowHandler[ConfigFlowContext, ConfigFlowResult], result: ConfigFlowResult, ) -> ConfigFlowResult: """Finish a config flow and add an entry. @@ -1452,6 +1463,7 @@ async def async_finish_flow( or progress_unique_id == DEFAULT_DISCOVERY_UNIQUE_ID ): self.async_abort(progress_flow_id) + continue # Abort any flows in progress for the same handler # when integration allows only one config entry @@ -1495,16 +1507,24 @@ async def async_finish_flow( version=result["version"], ) + if existing_entry is not None: + # Unload and remove the existing entry + await self.config_entries._async_remove(existing_entry.entry_id) # noqa: SLF001 await self.config_entries.async_add(entry) if existing_entry is not None: - await self.config_entries.async_remove(existing_entry.entry_id) + # Clean up devices and entities belonging to the existing entry + self.config_entries._async_clean_up(existing_entry) # noqa: SLF001 result["result"] = entry return result async def async_create_flow( - self, handler_key: str, *, context: dict | None = None, data: Any = None + self, + handler_key: str, + *, + context: ConfigFlowContext | None = None, + data: Any = None, ) -> ConfigFlow: """Create a flow for specified handler. @@ -1522,7 +1542,7 @@ async def async_create_flow( async def async_post_init( self, - flow: data_entry_flow.FlowHandler[ConfigFlowResult], + flow: data_entry_flow.FlowHandler[ConfigFlowContext, ConfigFlowResult], result: ConfigFlowResult, ) -> None: """After a flow is initialised trigger new flow notifications.""" @@ -1560,7 +1580,7 @@ def _async_discovery(self) -> None: @callback def async_has_matching_discovery_flow( - self, handler: str, match_context: dict[str, Any], data: Any + self, handler: str, match_context: ConfigFlowContext, data: Any ) -> bool: """Check if an existing matching discovery flow is in progress. @@ -1610,6 +1630,7 @@ def values(self) -> ValuesView[ConfigEntry]: def __setitem__(self, entry_id: str, entry: ConfigEntry) -> None: """Add an item.""" data = self.data + self.check_unique_id(entry) if entry_id in data: # This is likely a bug in a test that is adding the same entry twice. # In the future, once we have fixed the tests, this will raise HomeAssistantError. @@ -1618,34 +1639,49 @@ def __setitem__(self, entry_id: str, entry: ConfigEntry) -> None: data[entry_id] = entry self._index_entry(entry) + def check_unique_id(self, entry: ConfigEntry) -> None: + """Check config entry unique id. + + For a string unique id (this is the correct case): return + For a hashable non string unique id: log warning + For a non-hashable unique id: raise error + """ + if (unique_id := entry.unique_id) is None: + return + if isinstance(unique_id, str): + # Unique id should be a string + return + if isinstance(unique_id, Hashable): # type: ignore[unreachable] + # Checks for other non-string was added in HA Core 2024.10 + # In HA Core 2025.10, we should remove the error and instead fail + report_issue = async_suggest_report_issue( + self._hass, integration_domain=entry.domain + ) + _LOGGER.error( + ( + "Config entry '%s' from integration %s has an invalid unique_id" + " '%s' of type %s when a string is expected, please %s" + ), + entry.title, + entry.domain, + entry.unique_id, + type(entry.unique_id).__name__, + report_issue, + ) + else: + # Guard against integrations using unhashable unique_id + # In HA Core 2024.11, the guard was changed from warning to failing + raise HomeAssistantError( + f"The entry unique id {unique_id} is not a string." + ) + def _index_entry(self, entry: ConfigEntry) -> None: """Index an entry.""" + self.check_unique_id(entry) self._domain_index.setdefault(entry.domain, []).append(entry) if entry.unique_id is not None: - unique_id_hash = entry.unique_id - if not isinstance(entry.unique_id, str): - # Guard against integrations using unhashable unique_id - # In HA Core 2024.9, we should remove the guard and instead fail - if not isinstance(entry.unique_id, Hashable): # type: ignore[unreachable] - unique_id_hash = str(entry.unique_id) - # Checks for other non-string was added in HA Core 2024.10 - # In HA Core 2025.10, we should remove the error and instead fail - report_issue = async_suggest_report_issue( - self._hass, integration_domain=entry.domain - ) - _LOGGER.error( - ( - "Config entry '%s' from integration %s has an invalid unique_id" - " '%s', please %s" - ), - entry.title, - entry.domain, - entry.unique_id, - report_issue, - ) - self._domain_unique_id_index.setdefault(entry.domain, {}).setdefault( - unique_id_hash, [] + entry.unique_id, [] ).append(entry) def _unindex_entry(self, entry_id: str) -> None: @@ -1656,9 +1692,6 @@ def _unindex_entry(self, entry_id: str) -> None: if not self._domain_index[domain]: del self._domain_index[domain] if (unique_id := entry.unique_id) is not None: - # Check type first to avoid expensive isinstance call - if type(unique_id) is not str and not isinstance(unique_id, Hashable): # noqa: E721 - unique_id = str(entry.unique_id) # type: ignore[unreachable] self._domain_unique_id_index[domain][unique_id].remove(entry) if not self._domain_unique_id_index[domain][unique_id]: del self._domain_unique_id_index[domain][unique_id] @@ -1677,6 +1710,7 @@ def update_unique_id(self, entry: ConfigEntry, new_unique_id: str | None) -> Non """ entry_id = entry.entry_id self._unindex_entry(entry_id) + self.check_unique_id(entry) object.__setattr__(entry, "unique_id", new_unique_id) self._index_entry(entry) entry.clear_state_cache() @@ -1690,9 +1724,12 @@ def get_entry_by_domain_and_unique_id( self, domain: str, unique_id: str ) -> ConfigEntry | None: """Get entry by domain and unique id.""" - # Check type first to avoid expensive isinstance call - if type(unique_id) is not str and not isinstance(unique_id, Hashable): # noqa: E721 - unique_id = str(unique_id) # type: ignore[unreachable] + if unique_id is None: + return None # type: ignore[unreachable] + if not isinstance(unique_id, Hashable): + raise HomeAssistantError( + f"The entry unique id {unique_id} is not a string." + ) entries = self._domain_unique_id_index.get(domain, {}).get(unique_id) if not entries: return None @@ -1861,12 +1898,27 @@ async def async_add(self, entry: ConfigEntry) -> None: ) self._entries[entry.entry_id] = entry + self.async_update_issues() self._async_dispatch(ConfigEntryChange.ADDED, entry) await self.async_setup(entry.entry_id) self._async_schedule_save() async def async_remove(self, entry_id: str) -> dict[str, Any]: - """Remove an entry.""" + """Remove, unload and clean up after an entry.""" + unload_success, entry = await self._async_remove(entry_id) + self._async_clean_up(entry) + + for discovery_domain in entry.discovery_keys: + async_dispatcher_send_internal( + self.hass, + signal_discovered_config_entry_removed(discovery_domain), + entry, + ) + + return {"require_restart": not unload_success} + + async def _async_remove(self, entry_id: str) -> tuple[bool, ConfigEntry]: + """Remove and unload an entry.""" if (entry := self.async_get_entry(entry_id)) is None: raise UnknownEntry @@ -1879,8 +1931,16 @@ async def async_remove(self, entry_id: str) -> dict[str, Any]: await entry.async_remove(self.hass) del self._entries[entry.entry_id] + self.async_update_issues() self._async_schedule_save() + return (unload_success, entry) + + @callback + def _async_clean_up(self, entry: ConfigEntry) -> None: + """Clean up after an entry.""" + entry_id = entry.entry_id + dev_reg = device_registry.async_get(self.hass) ent_reg = entity_registry.async_get(self.hass) @@ -1899,13 +1959,6 @@ async def async_remove(self, entry_id: str) -> dict[str, Any]: ir.async_delete_issue(self.hass, HOMEASSISTANT_DOMAIN, issue_id) self._async_dispatch(ConfigEntryChange.REMOVED, entry) - for discovery_domain in entry.discovery_keys: - async_dispatcher_send_internal( - self.hass, - signal_discovered_config_entry_removed(discovery_domain), - entry, - ) - return {"require_restart": not unload_success} @callback def _async_shutdown(self, event: Event) -> None: @@ -1953,6 +2006,7 @@ async def async_initialize(self) -> None: entries[entry_id] = config_entry self._entries = entries + self.async_update_issues() async def async_setup(self, entry_id: str, _lock: bool = True) -> bool: """Set up a config entry. @@ -2120,8 +2174,33 @@ def async_update_entry( _setter = object.__setattr__ if unique_id is not UNDEFINED and entry.unique_id != unique_id: + # Deprecated in 2024.11, should fail in 2025.11 + if ( + # flipr creates duplicates during migration, and asks users to + # remove the duplicate. We don't need warn about it here too. + # We should remove the special case for "flipr" in HA Core 2025.4, + # when the flipr migration period ends + entry.domain != "flipr" + and unique_id is not None + and self.async_entry_for_domain_unique_id(entry.domain, unique_id) + is not None + ): + report_issue = async_suggest_report_issue( + self.hass, integration_domain=entry.domain + ) + _LOGGER.error( + ( + "Unique id of config entry '%s' from integration %s changed to" + " '%s' which is already in use, please %s" + ), + entry.title, + entry.domain, + unique_id, + report_issue, + ) # Reindex the entry if the unique_id has changed self._entries.update_unique_id(entry, unique_id) + self.async_update_issues() changed = True for attr, value in ( @@ -2364,6 +2443,84 @@ async def async_wait_component(self, entry: ConfigEntry) -> bool: return False return entry.state is ConfigEntryState.LOADED + @callback + def async_update_issues(self) -> None: + """Update unique id collision issues.""" + issue_registry = ir.async_get(self.hass) + issues: set[str] = set() + + for issue in issue_registry.issues.values(): + if ( + issue.domain != HOMEASSISTANT_DOMAIN + or not (issue_data := issue.data) + or issue_data.get("issue_type") != ISSUE_UNIQUE_ID_COLLISION + ): + continue + issues.add(issue.issue_id) + + for domain, unique_ids in self._entries._domain_unique_id_index.items(): # noqa: SLF001 + # flipr creates duplicates during migration, and asks users to + # remove the duplicate. We don't need warn about it here too. + # We should remove the special case for "flipr" in HA Core 2025.4, + # when the flipr migration period ends + if domain == "flipr": + continue + for unique_id, entries in unique_ids.items(): + # We might mutate the list of entries, so we need a copy to not mess up + # the index + entries = list(entries) + + # There's no need to raise an issue for ignored entries, we can + # safely remove them once we no longer allow unique id collisions. + # Iterate over a copy of the copy to allow mutating while iterating + for entry in list(entries): + if entry.source == SOURCE_IGNORE: + entries.remove(entry) + + if len(entries) < 2: + continue + issue_id = f"{ISSUE_UNIQUE_ID_COLLISION}_{domain}_{unique_id}" + issues.discard(issue_id) + titles = [f"'{entry.title}'" for entry in entries] + translation_placeholders = { + "domain": domain, + "configure_url": f"/config/integrations/integration/{domain}", + "unique_id": str(unique_id), + } + if len(titles) <= UNIQUE_ID_COLLISION_TITLE_LIMIT: + translation_key = "config_entry_unique_id_collision" + translation_placeholders["titles"] = ", ".join(titles) + else: + translation_key = "config_entry_unique_id_collision_many" + translation_placeholders["number_of_entries"] = str(len(titles)) + translation_placeholders["titles"] = ", ".join( + titles[:UNIQUE_ID_COLLISION_TITLE_LIMIT] + ) + translation_placeholders["title_limit"] = str( + UNIQUE_ID_COLLISION_TITLE_LIMIT + ) + + ir.async_create_issue( + self.hass, + HOMEASSISTANT_DOMAIN, + issue_id, + breaks_in_ha_version="2025.11.0", + data={ + "issue_type": ISSUE_UNIQUE_ID_COLLISION, + "unique_id": unique_id, + }, + is_fixable=False, + issue_domain=domain, + severity=ir.IssueSeverity.ERROR, + translation_key=translation_key, + translation_placeholders=translation_placeholders, + ) + + break # Only create one issue per domain + + for issue_id in issues: + ir.async_delete_issue(self.hass, HOMEASSISTANT_DOMAIN, issue_id) + @callback def _async_abort_entries_match( @@ -2385,7 +2542,9 @@ def _async_abort_entries_match( raise data_entry_flow.AbortFlow("already_configured") -class ConfigEntryBaseFlow(data_entry_flow.FlowHandler[ConfigFlowResult]): +class ConfigEntryBaseFlow( + data_entry_flow.FlowHandler[ConfigFlowContext, ConfigFlowResult] +): """Base class for config and option flows.""" _flow_result = ConfigFlowResult @@ -2406,7 +2565,7 @@ def unique_id(self) -> str | None: if not self.context: return None - return cast(str | None, self.context.get("unique_id")) + return self.context.get("unique_id") @staticmethod @callback @@ -2432,6 +2591,27 @@ def _async_abort_entries_match( self._async_current_entries(include_ignore=False), match_dict ) + @callback + def _abort_if_unique_id_mismatch( + self, + *, + reason: str = "unique_id_mismatch", + description_placeholders: Mapping[str, str] | None = None, + ) -> None: + """Abort if the unique ID does not match the reauth/reconfigure context. + + Requires strings.json entry corresponding to the `reason` parameter + in user visible flows. + """ + if ( + self.source == SOURCE_REAUTH + and self._get_reauth_entry().unique_id != self.unique_id + ) or ( + self.source == SOURCE_RECONFIGURE + and self._get_reconfigure_entry().unique_id != self.unique_id + ): + raise data_entry_flow.AbortFlow(reason, description_placeholders) + @callback def _abort_if_unique_id_configured( self, @@ -2709,6 +2889,20 @@ def async_create_entry( # type: ignore[override] options: Mapping[str, Any] | None = None, ) -> ConfigFlowResult: """Finish config flow and create a config entry.""" + if self.source in {SOURCE_REAUTH, SOURCE_RECONFIGURE}: + report_issue = async_suggest_report_issue( + self.hass, integration_domain=self.handler + ) + _LOGGER.warning( + ( + "Detected %s config flow creating a new entry, " + "when it is expected to update an existing entry and abort. " + "This will stop working in %s, please %s" + ), + self.source, + "2025.11", + report_issue, + ) result = super().async_create_entry( title=title, data=data, @@ -2730,11 +2924,30 @@ def async_update_reload_and_abort( unique_id: str | None | UndefinedType = UNDEFINED, title: str | UndefinedType = UNDEFINED, data: Mapping[str, Any] | UndefinedType = UNDEFINED, + data_updates: Mapping[str, Any] | UndefinedType = UNDEFINED, options: Mapping[str, Any] | UndefinedType = UNDEFINED, reason: str | UndefinedType = UNDEFINED, reload_even_if_entry_is_unchanged: bool = True, ) -> ConfigFlowResult: - """Update config entry, reload config entry and finish config flow.""" + """Update config entry, reload config entry and finish config flow. + + :param data: replace the entry data with new data + :param data_updates: add items from data_updates to entry data - existing keys + are overridden + :param options: replace the entry options with new options + :param title: replace the title of the entry + :param unique_id: replace the unique_id of the entry + + :param reason: set the reason for the abort, defaults to + `reauth_successful` or `reconfigure_successful` based on flow source + + :param reload_even_if_entry_is_unchanged: set this to `False` if the entry + should not be reloaded if it is unchanged + """ + if data_updates is not UNDEFINED: + if data is not UNDEFINED: + raise ValueError("Cannot set both data and data_updates") + data = entry.data | data_updates result = self.hass.config_entries.async_update_entry( entry=entry, unique_id=unique_id, @@ -2750,6 +2963,38 @@ def async_update_reload_and_abort( reason = "reconfigure_successful" return self.async_abort(reason=reason) + @callback + def async_show_form( + self, + *, + step_id: str | None = None, + data_schema: vol.Schema | None = None, + errors: dict[str, str] | None = None, + description_placeholders: Mapping[str, str | None] | None = None, + last_step: bool | None = None, + preview: str | None = None, + ) -> ConfigFlowResult: + """Return the definition of a form to gather user input. + + The step_id parameter is deprecated and will be removed in a future release. + """ + if self.source == SOURCE_REAUTH and "entry_id" in self.context: + # If the integration does not provide a name for the reauth title, + # we append it to the description placeholders. + # We also need to check entry_id as some integrations bypass the + # reauth helpers and create a flow without it. + description_placeholders = dict(description_placeholders or {}) + if description_placeholders.get(CONF_NAME) is None: + description_placeholders[CONF_NAME] = self._get_reauth_entry().title + return super().async_show_form( + step_id=step_id, + data_schema=data_schema, + errors=errors, + description_placeholders=description_placeholders, + last_step=last_step, + preview=preview, + ) + def is_matching(self, other_flow: Self) -> bool: """Return True if other_flow is matching this flow.""" raise NotImplementedError @@ -2759,7 +3004,7 @@ def _reauth_entry_id(self) -> str: """Return reauth entry id.""" if self.source != SOURCE_REAUTH: raise ValueError(f"Source is {self.source}, expected {SOURCE_REAUTH}") - return self.context["entry_id"] # type: ignore[no-any-return] + return self.context["entry_id"] @callback def _get_reauth_entry(self) -> ConfigEntry: @@ -2773,7 +3018,7 @@ def _reconfigure_entry_id(self) -> str: """Return reconfigure entry id.""" if self.source != SOURCE_RECONFIGURE: raise ValueError(f"Source is {self.source}, expected {SOURCE_RECONFIGURE}") - return self.context["entry_id"] # type: ignore[no-any-return] + return self.context["entry_id"] @callback def _get_reconfigure_entry(self) -> ConfigEntry: @@ -2785,7 +3030,9 @@ def _get_reconfigure_entry(self) -> ConfigEntry: raise UnknownEntry -class OptionsFlowManager(data_entry_flow.FlowManager[ConfigFlowResult]): +class OptionsFlowManager( + data_entry_flow.FlowManager[ConfigFlowContext, ConfigFlowResult] +): """Flow to set options for a configuration entry.""" _flow_result = ConfigFlowResult @@ -2802,7 +3049,7 @@ async def async_create_flow( self, handler_key: str, *, - context: dict[str, Any] | None = None, + context: ConfigFlowContext | None = None, data: dict[str, Any] | None = None, ) -> OptionsFlow: """Create an options flow for a config entry. @@ -2815,7 +3062,7 @@ async def async_create_flow( async def async_finish_flow( self, - flow: data_entry_flow.FlowHandler[ConfigFlowResult], + flow: data_entry_flow.FlowHandler[ConfigFlowContext, ConfigFlowResult], result: ConfigFlowResult, ) -> ConfigFlowResult: """Finish an options flow and update options for configuration entry. @@ -2840,7 +3087,7 @@ async def async_finish_flow( return result async def _async_setup_preview( - self, flow: data_entry_flow.FlowHandler[ConfigFlowResult] + self, flow: data_entry_flow.FlowHandler[ConfigFlowContext, ConfigFlowResult] ) -> None: """Set up preview for an option flow handler.""" entry = self._async_get_config_entry(flow.handler) @@ -2855,6 +3102,9 @@ class OptionsFlow(ConfigEntryBaseFlow): handler: str + _config_entry: ConfigEntry + """For compatibility only - to be removed in 2025.12""" + @callback def _async_abort_entries_match( self, match_dict: dict[str, Any] | None = None @@ -2863,32 +3113,78 @@ def _async_abort_entries_match( Requires `already_configured` in strings.json in user visible flows. """ - - config_entry = cast( - ConfigEntry, self.hass.config_entries.async_get_entry(self.handler) - ) _async_abort_entries_match( [ entry - for entry in self.hass.config_entries.async_entries(config_entry.domain) - if entry is not config_entry and entry.source != SOURCE_IGNORE + for entry in self.hass.config_entries.async_entries( + self.config_entry.domain + ) + if entry is not self.config_entry and entry.source != SOURCE_IGNORE ], match_dict, ) + @property + def _config_entry_id(self) -> str: + """Return config entry id. + + Please note that this is not available inside `__init__` method, and + can only be referenced after initialisation. + """ + # This is the same as handler, but that's an implementation detail + if self.handler is None: + raise ValueError( + "The config entry id is not available during initialisation" + ) + return self.handler + + @property + def config_entry(self) -> ConfigEntry: + """Return the config entry linked to the current options flow. + + Please note that this is not available inside `__init__` method, and + can only be referenced after initialisation. + """ + # For compatibility only - to be removed in 2025.12 + if hasattr(self, "_config_entry"): + return self._config_entry + + if self.hass is None: + raise ValueError("The config entry is not available during initialisation") + if entry := self.hass.config_entries.async_get_entry(self._config_entry_id): + return entry + raise UnknownEntry + + @config_entry.setter + def config_entry(self, value: ConfigEntry) -> None: + """Set the config entry value.""" + report_usage( + "sets option flow config_entry explicitly, which is deprecated " + "and will stop working in 2025.12", + core_behavior=ReportBehavior.ERROR, + core_integration_behavior=ReportBehavior.ERROR, + custom_integration_behavior=ReportBehavior.LOG, + ) + self._config_entry = value + class OptionsFlowWithConfigEntry(OptionsFlow): - """Base class for options flows with config entry and options.""" + """Base class for options flows with config entry and options. + + This class is being phased out, and should not be referenced in new code. + It is kept only for backward compatibility, and only for custom integrations. + """ def __init__(self, config_entry: ConfigEntry) -> None: """Initialize options flow.""" self._config_entry = config_entry self._options = deepcopy(dict(config_entry.options)) - - @property - def config_entry(self) -> ConfigEntry: - """Return the config entry.""" - return self._config_entry + report_usage( + "inherits from OptionsFlowWithConfigEntry", + core_behavior=ReportBehavior.ERROR, + core_integration_behavior=ReportBehavior.ERROR, + custom_integration_behavior=ReportBehavior.IGNORE, + ) @property def options(self) -> dict[str, Any]: diff --git a/homeassistant/const.py b/homeassistant/const.py index a0277231551452..558e7ec2b0b84e 100644 --- a/homeassistant/const.py +++ b/homeassistant/const.py @@ -9,6 +9,7 @@ from .helpers.deprecation import ( DeprecatedConstant, DeprecatedConstantEnum, + EnumWithDeprecatedMembers, all_with_deprecated_constants, check_if_deprecated_constant, dir_with_deprecated_constants, @@ -23,7 +24,7 @@ APPLICATION_NAME: Final = "HomeAssistant" MAJOR_VERSION: Final = 2024 -MINOR_VERSION: Final = 11 +MINOR_VERSION: Final = 12 PATCH_VERSION: Final = "0.dev0" __short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}" __version__: Final = f"{__short_version__}.{PATCH_VERSION}" @@ -478,16 +479,6 @@ class Platform(StrEnum): STATE_PAUSED: Final = "paused" STATE_IDLE: Final = "idle" STATE_STANDBY: Final = "standby" -STATE_ALARM_DISARMED: Final = "disarmed" -STATE_ALARM_ARMED_HOME: Final = "armed_home" -STATE_ALARM_ARMED_AWAY: Final = "armed_away" -STATE_ALARM_ARMED_NIGHT: Final = "armed_night" -STATE_ALARM_ARMED_VACATION: Final = "armed_vacation" -STATE_ALARM_ARMED_CUSTOM_BYPASS: Final = "armed_custom_bypass" -STATE_ALARM_PENDING: Final = "pending" -STATE_ALARM_ARMING: Final = "arming" -STATE_ALARM_DISARMING: Final = "disarming" -STATE_ALARM_TRIGGERED: Final = "triggered" STATE_UNAVAILABLE: Final = "unavailable" STATE_OK: Final = "ok" STATE_PROBLEM: Final = "problem" @@ -521,6 +512,60 @@ class Platform(StrEnum): "2025.10", ) +# #### ALARM CONTROL PANEL STATES #### +# STATE_ALARM_* below are deprecated as of 2024.11 +# use the AlarmControlPanelState enum instead. +_DEPRECATED_STATE_ALARM_DISARMED: Final = DeprecatedConstant( + "disarmed", + "AlarmControlPanelState.DISARMED", + "2025.11", +) +_DEPRECATED_STATE_ALARM_ARMED_HOME: Final = DeprecatedConstant( + "armed_home", + "AlarmControlPanelState.ARMED_HOME", + "2025.11", +) +_DEPRECATED_STATE_ALARM_ARMED_AWAY: Final = DeprecatedConstant( + "armed_away", + "AlarmControlPanelState.ARMED_AWAY", + "2025.11", +) +_DEPRECATED_STATE_ALARM_ARMED_NIGHT: Final = DeprecatedConstant( + "armed_night", + "AlarmControlPanelState.ARMED_NIGHT", + "2025.11", +) +_DEPRECATED_STATE_ALARM_ARMED_VACATION: Final = DeprecatedConstant( + "armed_vacation", + "AlarmControlPanelState.ARMED_VACATION", + "2025.11", +) +_DEPRECATED_STATE_ALARM_ARMED_CUSTOM_BYPASS: Final = DeprecatedConstant( + "armed_custom_bypass", + "AlarmControlPanelState.ARMED_CUSTOM_BYPASS", + "2025.11", +) +_DEPRECATED_STATE_ALARM_PENDING: Final = DeprecatedConstant( + "pending", + "AlarmControlPanelState.PENDING", + "2025.11", +) +_DEPRECATED_STATE_ALARM_ARMING: Final = DeprecatedConstant( + "arming", + "AlarmControlPanelState.ARMING", + "2025.11", +) +_DEPRECATED_STATE_ALARM_DISARMING: Final = DeprecatedConstant( + "disarming", + "AlarmControlPanelState.DISARMING", + "2025.11", +) +_DEPRECATED_STATE_ALARM_TRIGGERED: Final = DeprecatedConstant( + "triggered", + "AlarmControlPanelState.TRIGGERED", + "2025.11", +) + # #### STATE AND EVENT ATTRIBUTES #### # Attribution ATTR_ATTRIBUTION: Final = "attribution" @@ -680,6 +725,9 @@ class UnitOfPower(StrEnum): WATT = "W" KILO_WATT = "kW" + MEGA_WATT = "MW" + GIGA_WATT = "GW" + TERA_WATT = "TW" BTU_PER_HOUR = "BTU/h" @@ -725,6 +773,8 @@ class UnitOfEnergy(StrEnum): WATT_HOUR = "Wh" KILO_WATT_HOUR = "kWh" MEGA_WATT_HOUR = "MWh" + GIGA_WATT_HOUR = "GWh" + TERA_WATT_HOUR = "TWh" CALORIE = "cal" KILO_CALORIE = "kcal" MEGA_CALORIE = "Mcal" @@ -1177,20 +1227,35 @@ class UnitOfMass(StrEnum): """Deprecated: please use UnitOfMass.POUNDS""" -# Conductivity units -class UnitOfConductivity(StrEnum): +class UnitOfConductivity( + StrEnum, + metaclass=EnumWithDeprecatedMembers, + deprecated={ + "SIEMENS": ("UnitOfConductivity.SIEMENS_PER_CM", "2025.11.0"), + "MICROSIEMENS": ("UnitOfConductivity.MICROSIEMENS_PER_CM", "2025.11.0"), + "MILLISIEMENS": ("UnitOfConductivity.MILLISIEMENS_PER_CM", "2025.11.0"), + }, +): """Conductivity units.""" + SIEMENS_PER_CM = "S/cm" + MICROSIEMENS_PER_CM = "µS/cm" + MILLISIEMENS_PER_CM = "mS/cm" + + # Deprecated aliases SIEMENS = "S/cm" + """Deprecated: Please use UnitOfConductivity.SIEMENS_PER_CM""" MICROSIEMENS = "µS/cm" + """Deprecated: Please use UnitOfConductivity.MICROSIEMENS_PER_CM""" MILLISIEMENS = "mS/cm" + """Deprecated: Please use UnitOfConductivity.MILLISIEMENS_PER_CM""" _DEPRECATED_CONDUCTIVITY: Final = DeprecatedConstantEnum( - UnitOfConductivity.MICROSIEMENS, - "2025.6", + UnitOfConductivity.MICROSIEMENS_PER_CM, + "2025.11", ) -"""Deprecated: please use UnitOfConductivity.MICROSIEMENS""" +"""Deprecated: please use UnitOfConductivity.MICROSIEMENS_PER_CM""" # Light units LIGHT_LUX: Final = "lx" @@ -1293,6 +1358,13 @@ class UnitOfPrecipitationDepth(StrEnum): CONCENTRATION_PARTS_PER_BILLION: Final = "ppb" +class UnitOfBloodGlucoseConcentration(StrEnum): + """Blood glucose concentration units.""" + + MILLIGRAMS_PER_DECILITER = "mg/dL" + MILLIMOLE_PER_LITER = "mmol/L" + + # Speed units class UnitOfSpeed(StrEnum): """Speed units.""" diff --git a/homeassistant/core.py b/homeassistant/core.py index 82ec4956a94d6b..cdfb5570b4437b 100644 --- a/homeassistant/core.py +++ b/homeassistant/core.py @@ -18,15 +18,12 @@ ValuesView, ) import concurrent.futures -from contextlib import suppress from dataclasses import dataclass import datetime import enum import functools import inspect import logging -import os -import pathlib import re import threading import time @@ -42,12 +39,10 @@ cast, overload, ) -from urllib.parse import urlparse from propcache import cached_property, under_cached_property from typing_extensions import TypeVar import voluptuous as vol -import yarl from . import util from .const import ( @@ -55,7 +50,6 @@ ATTR_FRIENDLY_NAME, ATTR_SERVICE, ATTR_SERVICE_DATA, - BASE_PLATFORMS, COMPRESSED_STATE_ATTRIBUTES, COMPRESSED_STATE_CONTEXT, COMPRESSED_STATE_LAST_CHANGED, @@ -77,7 +71,6 @@ MAX_EXPECTED_ENTITY_IDS, MAX_LENGTH_EVENT_EVENT_TYPE, MAX_LENGTH_STATE_STATE, - UnitOfLength, __version__, ) from .exceptions import ( @@ -90,14 +83,16 @@ Unauthorized, ) from .helpers.deprecation import ( + DeferredDeprecatedAlias, DeprecatedConstantEnum, + EnumWithDeprecatedMembers, all_with_deprecated_constants, check_if_deprecated_constant, dir_with_deprecated_constants, ) from .helpers.json import json_bytes, json_fragment -from .helpers.typing import UNDEFINED, UndefinedType, VolSchemaType -from .util import dt as dt_util, location +from .helpers.typing import VolSchemaType +from .util import dt as dt_util from .util.async_ import ( cancelling, create_eager_task, @@ -112,18 +107,11 @@ from .util.read_only_dict import ReadOnlyDict from .util.timeout import TimeoutManager from .util.ulid import ulid_at_time, ulid_now -from .util.unit_system import ( - _CONF_UNIT_SYSTEM_IMPERIAL, - _CONF_UNIT_SYSTEM_US_CUSTOMARY, - METRIC_SYSTEM, - UnitSystem, - get_unit_system, -) # Typing imports that create a circular dependency if TYPE_CHECKING: from .auth import AuthManager - from .components.http import ApiConfig, HomeAssistantHTTP + from .components.http import HomeAssistantHTTP from .config_entries import ConfigEntries from .helpers.entity import StateInfo @@ -137,10 +125,6 @@ _DataT = TypeVar("_DataT", bound=Mapping[str, Any], default=Mapping[str, Any]) type CALLBACK_TYPE = Callable[[], None] -CORE_STORAGE_KEY = "core.config" -CORE_STORAGE_VERSION = 1 -CORE_STORAGE_MINOR_VERSION = 4 - DOMAIN = "homeassistant" # How long to wait to log tasks that are blocking @@ -150,7 +134,16 @@ type EntityServiceResponse = dict[str, ServiceResponse] -class ConfigSource(enum.StrEnum): +class ConfigSource( + enum.StrEnum, + metaclass=EnumWithDeprecatedMembers, + deprecated={ + "DEFAULT": ("core_config.ConfigSource.DEFAULT", "2025.11.0"), + "DISCOVERED": ("core_config.ConfigSource.DISCOVERED", "2025.11.0"), + "STORAGE": ("core_config.ConfigSource.STORAGE", "2025.11.0"), + "YAML": ("core_config.ConfigSource.YAML", "2025.11.0"), + }, +): """Source of core configuration.""" DEFAULT = "default" @@ -192,6 +185,19 @@ class EventStateReportedData(EventStateEventData): _DEPRECATED_SOURCE_YAML = DeprecatedConstantEnum(ConfigSource.YAML, "2025.1") +def _deprecated_core_config() -> Any: + # pylint: disable-next=import-outside-toplevel + from . import core_config + + return core_config.Config + + +# The Config class was moved to core_config in Home Assistant 2024.11 +_DEPRECATED_Config = DeferredDeprecatedAlias( + _deprecated_core_config, "homeassistant.core_config.Config", "2025.11" +) + + # How long to wait until things that run on startup have to finish. TIMEOUT_EVENT_START = 15 @@ -431,6 +437,9 @@ def __init__(self, config_dir: str) -> None: # pylint: disable-next=import-outside-toplevel from . import loader + # pylint: disable-next=import-outside-toplevel + from .core_config import Config + # This is a dictionary that any component can store any data on. self.data = HassDict() self.loop = asyncio.get_running_loop() @@ -647,12 +656,12 @@ def async_add_job[_R, *_Ts]( # late import to avoid circular imports from .helpers import frame # pylint: disable=import-outside-toplevel - frame.report( + frame.report_usage( "calls `async_add_job`, which is deprecated and will be removed in Home " "Assistant 2025.4; Please review " "https://developers.home-assistant.io/blog/2024/03/13/deprecate_add_run_job" " for replacement options", - error_if_core=False, + core_behavior=frame.ReportBehavior.LOG, ) if target is None: @@ -703,12 +712,12 @@ def async_add_hass_job[_R]( # late import to avoid circular imports from .helpers import frame # pylint: disable=import-outside-toplevel - frame.report( + frame.report_usage( "calls `async_add_hass_job`, which is deprecated and will be removed in Home " "Assistant 2025.5; Please review " "https://developers.home-assistant.io/blog/2024/04/07/deprecate_add_hass_job" " for replacement options", - error_if_core=False, + core_behavior=frame.ReportBehavior.LOG, ) return self._async_add_hass_job(hassjob, *args, background=background) @@ -977,12 +986,12 @@ def async_run_job[_R, *_Ts]( # late import to avoid circular imports from .helpers import frame # pylint: disable=import-outside-toplevel - frame.report( + frame.report_usage( "calls `async_run_job`, which is deprecated and will be removed in Home " "Assistant 2025.4; Please review " "https://developers.home-assistant.io/blog/2024/03/13/deprecate_add_run_job" " for replacement options", - error_if_core=False, + core_behavior=frame.ReportBehavior.LOG, ) if asyncio.iscoroutine(target): @@ -1626,10 +1635,10 @@ def async_listen( # late import to avoid circular imports from .helpers import frame # pylint: disable=import-outside-toplevel - frame.report( + frame.report_usage( "calls `async_listen` with run_immediately, which is" " deprecated and will be removed in Home Assistant 2025.5", - error_if_core=False, + core_behavior=frame.ReportBehavior.LOG, ) if event_filter is not None and not is_callback_check_partial(event_filter): @@ -1696,10 +1705,10 @@ def async_listen_once( # late import to avoid circular imports from .helpers import frame # pylint: disable=import-outside-toplevel - frame.report( + frame.report_usage( "calls `async_listen_once` with run_immediately, which is " "deprecated and will be removed in Home Assistant 2025.5", - error_if_core=False, + core_behavior=frame.ReportBehavior.LOG, ) one_time_listener: _OneTimeListener[_DataT] = _OneTimeListener( @@ -2843,452 +2852,6 @@ async def _execute_service( return await self._hass.async_add_executor_job(target, service_call) -class _ComponentSet(set[str]): - """Set of loaded components. - - This set contains both top level components and platforms. - - Examples: - `light`, `switch`, `hue`, `mjpeg.camera`, `universal.media_player`, - `homeassistant.scene` - - The top level components set only contains the top level components. - - The all components set contains all components, including platform - based components. - - """ - - def __init__( - self, top_level_components: set[str], all_components: set[str] - ) -> None: - """Initialize the component set.""" - self._top_level_components = top_level_components - self._all_components = all_components - - def add(self, component: str) -> None: - """Add a component to the store.""" - if "." not in component: - self._top_level_components.add(component) - self._all_components.add(component) - else: - platform, _, domain = component.partition(".") - if domain in BASE_PLATFORMS: - self._all_components.add(platform) - return super().add(component) - - def remove(self, component: str) -> None: - """Remove a component from the store.""" - if "." in component: - raise ValueError("_ComponentSet does not support removing sub-components") - self._top_level_components.remove(component) - return super().remove(component) - - def discard(self, component: str) -> None: - """Remove a component from the store.""" - raise NotImplementedError("_ComponentSet does not support discard, use remove") - - -class Config: - """Configuration settings for Home Assistant.""" - - _store: Config._ConfigStore - - def __init__(self, hass: HomeAssistant, config_dir: str) -> None: - """Initialize a new config object.""" - # pylint: disable-next=import-outside-toplevel - from .components.zone import DEFAULT_RADIUS - - self.hass = hass - - self.latitude: float = 0 - self.longitude: float = 0 - - self.elevation: int = 0 - """Elevation (always in meters regardless of the unit system).""" - - self.radius: int = DEFAULT_RADIUS - """Radius of the Home Zone (always in meters regardless of the unit system).""" - - self.debug: bool = False - self.location_name: str = "Home" - self.time_zone: str = "UTC" - self.units: UnitSystem = METRIC_SYSTEM - self.internal_url: str | None = None - self.external_url: str | None = None - self.currency: str = "EUR" - self.country: str | None = None - self.language: str = "en" - - self.config_source: ConfigSource = ConfigSource.DEFAULT - - # If True, pip install is skipped for requirements on startup - self.skip_pip: bool = False - - # List of packages to skip when installing requirements on startup - self.skip_pip_packages: list[str] = [] - - # Set of loaded top level components - # This set is updated by _ComponentSet - # and should not be modified directly - self.top_level_components: set[str] = set() - - # Set of all loaded components including platform - # based components - self.all_components: set[str] = set() - - # Set of loaded components - self.components: _ComponentSet = _ComponentSet( - self.top_level_components, self.all_components - ) - - # API (HTTP) server configuration - self.api: ApiConfig | None = None - - # Directory that holds the configuration - self.config_dir: str = config_dir - - # List of allowed external dirs to access - self.allowlist_external_dirs: set[str] = set() - - # List of allowed external URLs that integrations may use - self.allowlist_external_urls: set[str] = set() - - # Dictionary of Media folders that integrations may use - self.media_dirs: dict[str, str] = {} - - # If Home Assistant is running in recovery mode - self.recovery_mode: bool = False - - # Use legacy template behavior - self.legacy_templates: bool = False - - # If Home Assistant is running in safe mode - self.safe_mode: bool = False - - def async_initialize(self) -> None: - """Finish initializing a config object. - - This must be called before the config object is used. - """ - self._store = self._ConfigStore(self.hass) - - def distance(self, lat: float, lon: float) -> float | None: - """Calculate distance from Home Assistant. - - Async friendly. - """ - return self.units.length( - location.distance(self.latitude, self.longitude, lat, lon), - UnitOfLength.METERS, - ) - - def path(self, *path: str) -> str: - """Generate path to the file within the configuration directory. - - Async friendly. - """ - return os.path.join(self.config_dir, *path) - - def is_allowed_external_url(self, url: str) -> bool: - """Check if an external URL is allowed.""" - parsed_url = f"{yarl.URL(url)!s}/" - - return any( - allowed - for allowed in self.allowlist_external_urls - if parsed_url.startswith(allowed) - ) - - def is_allowed_path(self, path: str) -> bool: - """Check if the path is valid for access from outside. - - This function does blocking I/O and should not be called from the event loop. - Use hass.async_add_executor_job to schedule it on the executor. - """ - assert path is not None - - thepath = pathlib.Path(path) - try: - # The file path does not have to exist (it's parent should) - if thepath.exists(): - thepath = thepath.resolve() - else: - thepath = thepath.parent.resolve() - except (FileNotFoundError, RuntimeError, PermissionError): - return False - - for allowed_path in self.allowlist_external_dirs: - try: - thepath.relative_to(allowed_path) - except ValueError: - pass - else: - return True - - return False - - def as_dict(self) -> dict[str, Any]: - """Create a dictionary representation of the configuration. - - Async friendly. - """ - allowlist_external_dirs = list(self.allowlist_external_dirs) - return { - "latitude": self.latitude, - "longitude": self.longitude, - "elevation": self.elevation, - "unit_system": self.units.as_dict(), - "location_name": self.location_name, - "time_zone": self.time_zone, - "components": list(self.components), - "config_dir": self.config_dir, - # legacy, backwards compat - "whitelist_external_dirs": allowlist_external_dirs, - "allowlist_external_dirs": allowlist_external_dirs, - "allowlist_external_urls": list(self.allowlist_external_urls), - "version": __version__, - "config_source": self.config_source, - "recovery_mode": self.recovery_mode, - "state": self.hass.state.value, - "external_url": self.external_url, - "internal_url": self.internal_url, - "currency": self.currency, - "country": self.country, - "language": self.language, - "safe_mode": self.safe_mode, - "debug": self.debug, - "radius": self.radius, - } - - async def async_set_time_zone(self, time_zone_str: str) -> None: - """Help to set the time zone.""" - if time_zone := await dt_util.async_get_time_zone(time_zone_str): - self.time_zone = time_zone_str - dt_util.set_default_time_zone(time_zone) - else: - raise ValueError(f"Received invalid time zone {time_zone_str}") - - def set_time_zone(self, time_zone_str: str) -> None: - """Set the time zone. - - This is a legacy method that should not be used in new code. - Use async_set_time_zone instead. - - It will be removed in Home Assistant 2025.6. - """ - # report is imported here to avoid a circular import - from .helpers.frame import report # pylint: disable=import-outside-toplevel - - report( - "set the time zone using set_time_zone instead of async_set_time_zone" - " which will stop working in Home Assistant 2025.6", - error_if_core=True, - error_if_integration=True, - ) - if time_zone := dt_util.get_time_zone(time_zone_str): - self.time_zone = time_zone_str - dt_util.set_default_time_zone(time_zone) - else: - raise ValueError(f"Received invalid time zone {time_zone_str}") - - async def _async_update( - self, - *, - source: ConfigSource, - latitude: float | None = None, - longitude: float | None = None, - elevation: int | None = None, - unit_system: str | None = None, - location_name: str | None = None, - time_zone: str | None = None, - external_url: str | UndefinedType | None = UNDEFINED, - internal_url: str | UndefinedType | None = UNDEFINED, - currency: str | None = None, - country: str | UndefinedType | None = UNDEFINED, - language: str | None = None, - radius: int | None = None, - ) -> None: - """Update the configuration from a dictionary.""" - self.config_source = source - if latitude is not None: - self.latitude = latitude - if longitude is not None: - self.longitude = longitude - if elevation is not None: - self.elevation = elevation - if unit_system is not None: - try: - self.units = get_unit_system(unit_system) - except ValueError: - self.units = METRIC_SYSTEM - if location_name is not None: - self.location_name = location_name - if time_zone is not None: - await self.async_set_time_zone(time_zone) - if external_url is not UNDEFINED: - self.external_url = external_url - if internal_url is not UNDEFINED: - self.internal_url = internal_url - if currency is not None: - self.currency = currency - if country is not UNDEFINED: - self.country = country - if language is not None: - self.language = language - if radius is not None: - self.radius = radius - - async def async_update(self, **kwargs: Any) -> None: - """Update the configuration from a dictionary.""" - # pylint: disable-next=import-outside-toplevel - from .config import ( - _raise_issue_if_historic_currency, - _raise_issue_if_no_country, - ) - - await self._async_update(source=ConfigSource.STORAGE, **kwargs) - await self._async_store() - self.hass.bus.async_fire_internal(EVENT_CORE_CONFIG_UPDATE, kwargs) - - _raise_issue_if_historic_currency(self.hass, self.currency) - _raise_issue_if_no_country(self.hass, self.country) - - async def async_load(self) -> None: - """Load [homeassistant] core config.""" - if not (data := await self._store.async_load()): - return - - # In 2021.9 we fixed validation to disallow a path (because that's never - # correct) but this data still lives in storage, so we print a warning. - if data.get("external_url") and urlparse(data["external_url"]).path not in ( - "", - "/", - ): - _LOGGER.warning("Invalid external_url set. It's not allowed to have a path") - - if data.get("internal_url") and urlparse(data["internal_url"]).path not in ( - "", - "/", - ): - _LOGGER.warning("Invalid internal_url set. It's not allowed to have a path") - - await self._async_update( - source=ConfigSource.STORAGE, - latitude=data.get("latitude"), - longitude=data.get("longitude"), - elevation=data.get("elevation"), - unit_system=data.get("unit_system_v2"), - location_name=data.get("location_name"), - time_zone=data.get("time_zone"), - external_url=data.get("external_url", UNDEFINED), - internal_url=data.get("internal_url", UNDEFINED), - currency=data.get("currency"), - country=data.get("country"), - language=data.get("language"), - radius=data["radius"], - ) - - async def _async_store(self) -> None: - """Store [homeassistant] core config.""" - data = { - "latitude": self.latitude, - "longitude": self.longitude, - "elevation": self.elevation, - # We don't want any integrations to use the name of the unit system - # so we are using the private attribute here - "unit_system_v2": self.units._name, # noqa: SLF001 - "location_name": self.location_name, - "time_zone": self.time_zone, - "external_url": self.external_url, - "internal_url": self.internal_url, - "currency": self.currency, - "country": self.country, - "language": self.language, - "radius": self.radius, - } - await self._store.async_save(data) - - # Circular dependency prevents us from generating the class at top level - # pylint: disable-next=import-outside-toplevel - from .helpers.storage import Store - - class _ConfigStore(Store[dict[str, Any]]): - """Class to help storing Config data.""" - - def __init__(self, hass: HomeAssistant) -> None: - """Initialize storage class.""" - super().__init__( - hass, - CORE_STORAGE_VERSION, - CORE_STORAGE_KEY, - private=True, - atomic_writes=True, - minor_version=CORE_STORAGE_MINOR_VERSION, - ) - self._original_unit_system: str | None = None # from old store 1.1 - - async def _async_migrate_func( - self, - old_major_version: int, - old_minor_version: int, - old_data: dict[str, Any], - ) -> dict[str, Any]: - """Migrate to the new version.""" - - # pylint: disable-next=import-outside-toplevel - from .components.zone import DEFAULT_RADIUS - - data = old_data - if old_major_version == 1 and old_minor_version < 2: - # In 1.2, we remove support for "imperial", replaced by "us_customary" - # Using a new key to allow rollback - self._original_unit_system = data.get("unit_system") - data["unit_system_v2"] = self._original_unit_system - if data["unit_system_v2"] == _CONF_UNIT_SYSTEM_IMPERIAL: - data["unit_system_v2"] = _CONF_UNIT_SYSTEM_US_CUSTOMARY - if old_major_version == 1 and old_minor_version < 3: - # In 1.3, we add the key "language", initialize it from the - # owner account. - data["language"] = "en" - try: - owner = await self.hass.auth.async_get_owner() - if owner is not None: - # pylint: disable-next=import-outside-toplevel - from .components.frontend import storage as frontend_store - - # pylint: disable-next=import-outside-toplevel - from .helpers import config_validation as cv - - _, owner_data = await frontend_store.async_user_store( - self.hass, owner.id - ) - - if ( - "language" in owner_data - and "language" in owner_data["language"] - ): - with suppress(vol.InInvalid): - data["language"] = cv.language( - owner_data["language"]["language"] - ) - # pylint: disable-next=broad-except - except Exception: - _LOGGER.exception("Unexpected error during core config migration") - if old_major_version == 1 and old_minor_version < 4: - # In 1.4, we add the key "radius", initialize it with the default. - data.setdefault("radius", DEFAULT_RADIUS) - - if old_major_version > 1: - raise NotImplementedError - return data - - async def async_save(self, data: dict[str, Any]) -> None: - if self._original_unit_system: - data["unit_system"] = self._original_unit_system - return await super().async_save(data) - - # These can be removed if no deprecated constant are in this module anymore __getattr__ = functools.partial(check_if_deprecated_constant, module_globals=globals()) __dir__ = functools.partial( diff --git a/homeassistant/core_config.py b/homeassistant/core_config.py new file mode 100644 index 00000000000000..5c773c57bc4399 --- /dev/null +++ b/homeassistant/core_config.py @@ -0,0 +1,891 @@ +"""Module to help with parsing and generating configuration files.""" + +from __future__ import annotations + +from collections import OrderedDict +from collections.abc import Sequence +from contextlib import suppress +import enum +import logging +import os +import pathlib +from typing import TYPE_CHECKING, Any, Final +from urllib.parse import urlparse + +import voluptuous as vol +from webrtc_models import RTCConfiguration, RTCIceServer +import yarl + +from . import auth +from .auth import mfa_modules as auth_mfa_modules, providers as auth_providers +from .const import ( + ATTR_ASSUMED_STATE, + ATTR_FRIENDLY_NAME, + ATTR_HIDDEN, + BASE_PLATFORMS, + CONF_ALLOWLIST_EXTERNAL_DIRS, + CONF_ALLOWLIST_EXTERNAL_URLS, + CONF_AUTH_MFA_MODULES, + CONF_AUTH_PROVIDERS, + CONF_COUNTRY, + CONF_CURRENCY, + CONF_CUSTOMIZE, + CONF_CUSTOMIZE_DOMAIN, + CONF_CUSTOMIZE_GLOB, + CONF_DEBUG, + CONF_ELEVATION, + CONF_EXTERNAL_URL, + CONF_ID, + CONF_INTERNAL_URL, + CONF_LANGUAGE, + CONF_LATITUDE, + CONF_LEGACY_TEMPLATES, + CONF_LONGITUDE, + CONF_MEDIA_DIRS, + CONF_NAME, + CONF_PACKAGES, + CONF_RADIUS, + CONF_TEMPERATURE_UNIT, + CONF_TIME_ZONE, + CONF_TYPE, + CONF_UNIT_SYSTEM, + CONF_URL, + CONF_USERNAME, + EVENT_CORE_CONFIG_UPDATE, + LEGACY_CONF_WHITELIST_EXTERNAL_DIRS, + UnitOfLength, + __version__, +) +from .core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant +from .generated.currencies import HISTORIC_CURRENCIES +from .helpers import config_validation as cv, issue_registry as ir +from .helpers.entity_values import EntityValues +from .helpers.frame import ReportBehavior, report_usage +from .helpers.storage import Store +from .helpers.typing import UNDEFINED, UndefinedType +from .util import dt as dt_util, location +from .util.hass_dict import HassKey +from .util.package import is_docker_env +from .util.unit_system import ( + _CONF_UNIT_SYSTEM_IMPERIAL, + _CONF_UNIT_SYSTEM_US_CUSTOMARY, + METRIC_SYSTEM, + UnitSystem, + get_unit_system, + validate_unit_system, +) + +# Typing imports that create a circular dependency +if TYPE_CHECKING: + from .components.http import ApiConfig + +_LOGGER = logging.getLogger(__name__) + +DATA_CUSTOMIZE: HassKey[EntityValues] = HassKey("hass_customize") + +CONF_CREDENTIAL: Final = "credential" +CONF_ICE_SERVERS: Final = "ice_servers" +CONF_WEBRTC: Final = "webrtc" + +CORE_STORAGE_KEY = "core.config" +CORE_STORAGE_VERSION = 1 +CORE_STORAGE_MINOR_VERSION = 4 + + +class ConfigSource(enum.StrEnum): + """Source of core configuration.""" + + DEFAULT = "default" + DISCOVERED = "discovered" + STORAGE = "storage" + YAML = "yaml" + + +def _no_duplicate_auth_provider( + configs: Sequence[dict[str, Any]], +) -> Sequence[dict[str, Any]]: + """No duplicate auth provider config allowed in a list. + + Each type of auth provider can only have one config without optional id. + Unique id is required if same type of auth provider used multiple times. + """ + config_keys: set[tuple[str, str | None]] = set() + for config in configs: + key = (config[CONF_TYPE], config.get(CONF_ID)) + if key in config_keys: + raise vol.Invalid( + f"Duplicate auth provider {config[CONF_TYPE]} found. " + "Please add unique IDs " + "if you want to have the same auth provider twice" + ) + config_keys.add(key) + return configs + + +def _no_duplicate_auth_mfa_module( + configs: Sequence[dict[str, Any]], +) -> Sequence[dict[str, Any]]: + """No duplicate auth mfa module item allowed in a list. + + Each type of mfa module can only have one config without optional id. + A global unique id is required if same type of mfa module used multiple + times. + Note: this is different than auth provider + """ + config_keys: set[str] = set() + for config in configs: + key = config.get(CONF_ID, config[CONF_TYPE]) + if key in config_keys: + raise vol.Invalid( + f"Duplicate mfa module {config[CONF_TYPE]} found. " + "Please add unique IDs " + "if you want to have the same mfa module twice" + ) + config_keys.add(key) + return configs + + +def _filter_bad_internal_external_urls(conf: dict) -> dict: + """Filter internal/external URL with a path.""" + for key in CONF_INTERNAL_URL, CONF_EXTERNAL_URL: + if key in conf and urlparse(conf[key]).path not in ("", "/"): + # We warn but do not fix, because if this was incorrectly configured, + # adjusting this value might impact security. + _LOGGER.warning( + "Invalid %s set. It's not allowed to have a path (/bla)", key + ) + + return conf + + +# Schema for all packages element +_PACKAGES_CONFIG_SCHEMA = vol.Schema({cv.string: vol.Any(dict, list)}) + +# Schema for individual package definition +_PACKAGE_DEFINITION_SCHEMA = vol.Schema({cv.string: vol.Any(dict, list, None)}) + +_CUSTOMIZE_DICT_SCHEMA = vol.Schema( + { + vol.Optional(ATTR_FRIENDLY_NAME): cv.string, + vol.Optional(ATTR_HIDDEN): cv.boolean, + vol.Optional(ATTR_ASSUMED_STATE): cv.boolean, + }, + extra=vol.ALLOW_EXTRA, +) + +_CUSTOMIZE_CONFIG_SCHEMA = vol.Schema( + { + vol.Optional(CONF_CUSTOMIZE, default={}): vol.Schema( + {cv.entity_id: _CUSTOMIZE_DICT_SCHEMA} + ), + vol.Optional(CONF_CUSTOMIZE_DOMAIN, default={}): vol.Schema( + {cv.string: _CUSTOMIZE_DICT_SCHEMA} + ), + vol.Optional(CONF_CUSTOMIZE_GLOB, default={}): vol.Schema( + {cv.string: _CUSTOMIZE_DICT_SCHEMA} + ), + } +) + + +def _raise_issue_if_historic_currency(hass: HomeAssistant, currency: str) -> None: + if currency not in HISTORIC_CURRENCIES: + ir.async_delete_issue(hass, HOMEASSISTANT_DOMAIN, "historic_currency") + return + + ir.async_create_issue( + hass, + HOMEASSISTANT_DOMAIN, + "historic_currency", + is_fixable=False, + learn_more_url="homeassistant://config/general", + severity=ir.IssueSeverity.WARNING, + translation_key="historic_currency", + translation_placeholders={"currency": currency}, + ) + + +def _raise_issue_if_no_country(hass: HomeAssistant, country: str | None) -> None: + if country is not None: + ir.async_delete_issue(hass, HOMEASSISTANT_DOMAIN, "country_not_configured") + return + + ir.async_create_issue( + hass, + HOMEASSISTANT_DOMAIN, + "country_not_configured", + is_fixable=False, + learn_more_url="homeassistant://config/general", + severity=ir.IssueSeverity.WARNING, + translation_key="country_not_configured", + ) + + +def _validate_currency(data: Any) -> Any: + try: + return cv.currency(data) + except vol.InInvalid: + with suppress(vol.InInvalid): + return cv.historic_currency(data) + raise + + +def _validate_stun_or_turn_url(value: Any) -> str: + """Validate an URL.""" + url_in = str(value) + url = urlparse(url_in) + + if url.scheme not in ("stun", "stuns", "turn", "turns"): + raise vol.Invalid("invalid url") + return url_in + + +CORE_CONFIG_SCHEMA = vol.All( + _CUSTOMIZE_CONFIG_SCHEMA.extend( + { + CONF_NAME: vol.Coerce(str), + CONF_LATITUDE: cv.latitude, + CONF_LONGITUDE: cv.longitude, + CONF_ELEVATION: vol.Coerce(int), + CONF_RADIUS: cv.positive_int, + vol.Remove(CONF_TEMPERATURE_UNIT): cv.temperature_unit, + CONF_UNIT_SYSTEM: validate_unit_system, + CONF_TIME_ZONE: cv.time_zone, + vol.Optional(CONF_INTERNAL_URL): cv.url, + vol.Optional(CONF_EXTERNAL_URL): cv.url, + vol.Optional(CONF_ALLOWLIST_EXTERNAL_DIRS): vol.All( + cv.ensure_list, [vol.IsDir()] + ), + vol.Optional(LEGACY_CONF_WHITELIST_EXTERNAL_DIRS): vol.All( + cv.ensure_list, [vol.IsDir()] + ), + vol.Optional(CONF_ALLOWLIST_EXTERNAL_URLS): vol.All( + cv.ensure_list, [cv.url] + ), + vol.Optional(CONF_PACKAGES, default={}): _PACKAGES_CONFIG_SCHEMA, + vol.Optional(CONF_AUTH_PROVIDERS): vol.All( + cv.ensure_list, + [ + auth_providers.AUTH_PROVIDER_SCHEMA.extend( + { + CONF_TYPE: vol.NotIn( + ["insecure_example"], + ( + "The insecure_example auth provider" + " is for testing only." + ), + ) + } + ) + ], + _no_duplicate_auth_provider, + ), + vol.Optional(CONF_AUTH_MFA_MODULES): vol.All( + cv.ensure_list, + [ + auth_mfa_modules.MULTI_FACTOR_AUTH_MODULE_SCHEMA.extend( + { + CONF_TYPE: vol.NotIn( + ["insecure_example"], + "The insecure_example mfa module is for testing only.", + ) + } + ) + ], + _no_duplicate_auth_mfa_module, + ), + vol.Optional(CONF_MEDIA_DIRS): cv.schema_with_slug_keys(vol.IsDir()), + vol.Remove(CONF_LEGACY_TEMPLATES): cv.boolean, + vol.Optional(CONF_CURRENCY): _validate_currency, + vol.Optional(CONF_COUNTRY): cv.country, + vol.Optional(CONF_LANGUAGE): cv.language, + vol.Optional(CONF_DEBUG): cv.boolean, + vol.Optional(CONF_WEBRTC): vol.Schema( + { + vol.Required(CONF_ICE_SERVERS): vol.All( + cv.ensure_list, + [ + vol.Schema( + { + vol.Required(CONF_URL): vol.All( + cv.ensure_list, [_validate_stun_or_turn_url] + ), + vol.Optional(CONF_USERNAME): cv.string, + vol.Optional(CONF_CREDENTIAL): cv.string, + } + ) + ], + ) + } + ), + } + ), + _filter_bad_internal_external_urls, +) + + +async def async_process_ha_core_config(hass: HomeAssistant, config: dict) -> None: + """Process the [homeassistant] section from the configuration. + + This method is a coroutine. + """ + # CORE_CONFIG_SCHEMA is not async safe since it uses vol.IsDir + # so we need to run it in an executor job. + config = await hass.async_add_executor_job(CORE_CONFIG_SCHEMA, config) + + # Only load auth during startup. + if not hasattr(hass, "auth"): + if (auth_conf := config.get(CONF_AUTH_PROVIDERS)) is None: + auth_conf = [{"type": "homeassistant"}] + + mfa_conf = config.get( + CONF_AUTH_MFA_MODULES, + [{"type": "totp", "id": "totp", "name": "Authenticator app"}], + ) + + setattr( + hass, "auth", await auth.auth_manager_from_config(hass, auth_conf, mfa_conf) + ) + + await hass.config.async_load() + + hac = hass.config + + if any( + k in config + for k in ( + CONF_COUNTRY, + CONF_CURRENCY, + CONF_ELEVATION, + CONF_EXTERNAL_URL, + CONF_INTERNAL_URL, + CONF_LANGUAGE, + CONF_LATITUDE, + CONF_LONGITUDE, + CONF_NAME, + CONF_RADIUS, + CONF_TIME_ZONE, + CONF_UNIT_SYSTEM, + ) + ): + hac.config_source = ConfigSource.YAML + + for key, attr in ( + (CONF_COUNTRY, "country"), + (CONF_CURRENCY, "currency"), + (CONF_ELEVATION, "elevation"), + (CONF_EXTERNAL_URL, "external_url"), + (CONF_INTERNAL_URL, "internal_url"), + (CONF_LANGUAGE, "language"), + (CONF_LATITUDE, "latitude"), + (CONF_LONGITUDE, "longitude"), + (CONF_MEDIA_DIRS, "media_dirs"), + (CONF_NAME, "location_name"), + (CONF_RADIUS, "radius"), + ): + if key in config: + setattr(hac, attr, config[key]) + + if config.get(CONF_DEBUG): + hac.debug = True + + if CONF_WEBRTC in config: + hac.webrtc.ice_servers = [ + RTCIceServer( + server[CONF_URL], + server.get(CONF_USERNAME), + server.get(CONF_CREDENTIAL), + ) + for server in config[CONF_WEBRTC][CONF_ICE_SERVERS] + ] + + _raise_issue_if_historic_currency(hass, hass.config.currency) + _raise_issue_if_no_country(hass, hass.config.country) + + if CONF_TIME_ZONE in config: + await hac.async_set_time_zone(config[CONF_TIME_ZONE]) + + if CONF_MEDIA_DIRS not in config: + if is_docker_env(): + hac.media_dirs = {"local": "/media"} + else: + hac.media_dirs = {"local": hass.config.path("media")} + + # Init whitelist external dir + hac.allowlist_external_dirs = {hass.config.path("www"), *hac.media_dirs.values()} + if CONF_ALLOWLIST_EXTERNAL_DIRS in config: + hac.allowlist_external_dirs.update(set(config[CONF_ALLOWLIST_EXTERNAL_DIRS])) + + elif LEGACY_CONF_WHITELIST_EXTERNAL_DIRS in config: + _LOGGER.warning( + "Key %s has been replaced with %s. Please update your config", + LEGACY_CONF_WHITELIST_EXTERNAL_DIRS, + CONF_ALLOWLIST_EXTERNAL_DIRS, + ) + hac.allowlist_external_dirs.update( + set(config[LEGACY_CONF_WHITELIST_EXTERNAL_DIRS]) + ) + + # Init whitelist external URL list – make sure to add / to every URL that doesn't + # already have it so that we can properly test "path ownership" + if CONF_ALLOWLIST_EXTERNAL_URLS in config: + hac.allowlist_external_urls.update( + url if url.endswith("/") else f"{url}/" + for url in config[CONF_ALLOWLIST_EXTERNAL_URLS] + ) + + # Customize + cust_exact = dict(config[CONF_CUSTOMIZE]) + cust_domain = dict(config[CONF_CUSTOMIZE_DOMAIN]) + cust_glob = OrderedDict(config[CONF_CUSTOMIZE_GLOB]) + + for name, pkg in config[CONF_PACKAGES].items(): + if (pkg_cust := pkg.get(HOMEASSISTANT_DOMAIN)) is None: + continue + + try: + pkg_cust = _CUSTOMIZE_CONFIG_SCHEMA(pkg_cust) + except vol.Invalid: + _LOGGER.warning("Package %s contains invalid customize", name) + continue + + cust_exact.update(pkg_cust[CONF_CUSTOMIZE]) + cust_domain.update(pkg_cust[CONF_CUSTOMIZE_DOMAIN]) + cust_glob.update(pkg_cust[CONF_CUSTOMIZE_GLOB]) + + hass.data[DATA_CUSTOMIZE] = EntityValues(cust_exact, cust_domain, cust_glob) + + if CONF_UNIT_SYSTEM in config: + hac.units = get_unit_system(config[CONF_UNIT_SYSTEM]) + + +class _ComponentSet(set[str]): + """Set of loaded components. + + This set contains both top level components and platforms. + + Examples: + `light`, `switch`, `hue`, `mjpeg.camera`, `universal.media_player`, + `homeassistant.scene` + + The top level components set only contains the top level components. + + The all components set contains all components, including platform + based components. + + """ + + def __init__( + self, top_level_components: set[str], all_components: set[str] + ) -> None: + """Initialize the component set.""" + self._top_level_components = top_level_components + self._all_components = all_components + + def add(self, component: str) -> None: + """Add a component to the store.""" + if "." not in component: + self._top_level_components.add(component) + self._all_components.add(component) + else: + platform, _, domain = component.partition(".") + if domain in BASE_PLATFORMS: + self._all_components.add(platform) + return super().add(component) + + def remove(self, component: str) -> None: + """Remove a component from the store.""" + if "." in component: + raise ValueError("_ComponentSet does not support removing sub-components") + self._top_level_components.remove(component) + return super().remove(component) + + def discard(self, component: str) -> None: + """Remove a component from the store.""" + raise NotImplementedError("_ComponentSet does not support discard, use remove") + + +class Config: + """Configuration settings for Home Assistant.""" + + _store: Config._ConfigStore + + def __init__(self, hass: HomeAssistant, config_dir: str) -> None: + """Initialize a new config object.""" + # pylint: disable-next=import-outside-toplevel + from .components.zone import DEFAULT_RADIUS + + self.hass = hass + + self.latitude: float = 0 + self.longitude: float = 0 + + self.elevation: int = 0 + """Elevation (always in meters regardless of the unit system).""" + + self.radius: int = DEFAULT_RADIUS + """Radius of the Home Zone (always in meters regardless of the unit system).""" + + self.debug: bool = False + self.location_name: str = "Home" + self.time_zone: str = "UTC" + self.units: UnitSystem = METRIC_SYSTEM + self.internal_url: str | None = None + self.external_url: str | None = None + self.currency: str = "EUR" + self.country: str | None = None + self.language: str = "en" + + self.config_source: ConfigSource = ConfigSource.DEFAULT + + # If True, pip install is skipped for requirements on startup + self.skip_pip: bool = False + + # List of packages to skip when installing requirements on startup + self.skip_pip_packages: list[str] = [] + + # Set of loaded top level components + # This set is updated by _ComponentSet + # and should not be modified directly + self.top_level_components: set[str] = set() + + # Set of all loaded components including platform + # based components + self.all_components: set[str] = set() + + # Set of loaded components + self.components: _ComponentSet = _ComponentSet( + self.top_level_components, self.all_components + ) + + # API (HTTP) server configuration + self.api: ApiConfig | None = None + + # Directory that holds the configuration + self.config_dir: str = config_dir + + # List of allowed external dirs to access + self.allowlist_external_dirs: set[str] = set() + + # List of allowed external URLs that integrations may use + self.allowlist_external_urls: set[str] = set() + + # Dictionary of Media folders that integrations may use + self.media_dirs: dict[str, str] = {} + + # If Home Assistant is running in recovery mode + self.recovery_mode: bool = False + + # Use legacy template behavior + self.legacy_templates: bool = False + + # If Home Assistant is running in safe mode + self.safe_mode: bool = False + + self.webrtc = RTCConfiguration() + + def async_initialize(self) -> None: + """Finish initializing a config object. + + This must be called before the config object is used. + """ + self._store = self._ConfigStore(self.hass) + + def distance(self, lat: float, lon: float) -> float | None: + """Calculate distance from Home Assistant. + + Async friendly. + """ + return self.units.length( + location.distance(self.latitude, self.longitude, lat, lon), + UnitOfLength.METERS, + ) + + def path(self, *path: str) -> str: + """Generate path to the file within the configuration directory. + + Async friendly. + """ + return os.path.join(self.config_dir, *path) + + def is_allowed_external_url(self, url: str) -> bool: + """Check if an external URL is allowed.""" + parsed_url = f"{yarl.URL(url)!s}/" + + return any( + allowed + for allowed in self.allowlist_external_urls + if parsed_url.startswith(allowed) + ) + + def is_allowed_path(self, path: str) -> bool: + """Check if the path is valid for access from outside. + + This function does blocking I/O and should not be called from the event loop. + Use hass.async_add_executor_job to schedule it on the executor. + """ + assert path is not None + + thepath = pathlib.Path(path) + try: + # The file path does not have to exist (it's parent should) + if thepath.exists(): + thepath = thepath.resolve() + else: + thepath = thepath.parent.resolve() + except (FileNotFoundError, RuntimeError, PermissionError): + return False + + for allowed_path in self.allowlist_external_dirs: + try: + thepath.relative_to(allowed_path) + except ValueError: + pass + else: + return True + + return False + + def as_dict(self) -> dict[str, Any]: + """Return a dictionary representation of the configuration. + + Async friendly. + """ + allowlist_external_dirs = list(self.allowlist_external_dirs) + return { + "allowlist_external_dirs": allowlist_external_dirs, + "allowlist_external_urls": list(self.allowlist_external_urls), + "components": list(self.components), + "config_dir": self.config_dir, + "config_source": self.config_source, + "country": self.country, + "currency": self.currency, + "debug": self.debug, + "elevation": self.elevation, + "external_url": self.external_url, + "internal_url": self.internal_url, + "language": self.language, + "latitude": self.latitude, + "location_name": self.location_name, + "longitude": self.longitude, + "radius": self.radius, + "recovery_mode": self.recovery_mode, + "safe_mode": self.safe_mode, + "state": self.hass.state.value, + "time_zone": self.time_zone, + "unit_system": self.units.as_dict(), + "version": __version__, + # legacy, backwards compat + "whitelist_external_dirs": allowlist_external_dirs, + } + + async def async_set_time_zone(self, time_zone_str: str) -> None: + """Help to set the time zone.""" + if time_zone := await dt_util.async_get_time_zone(time_zone_str): + self.time_zone = time_zone_str + dt_util.set_default_time_zone(time_zone) + else: + raise ValueError(f"Received invalid time zone {time_zone_str}") + + def set_time_zone(self, time_zone_str: str) -> None: + """Set the time zone. + + This is a legacy method that should not be used in new code. + Use async_set_time_zone instead. + + It will be removed in Home Assistant 2025.6. + """ + report_usage( + "set the time zone using set_time_zone instead of async_set_time_zone" + " which will stop working in Home Assistant 2025.6", + core_integration_behavior=ReportBehavior.ERROR, + custom_integration_behavior=ReportBehavior.ERROR, + ) + if time_zone := dt_util.get_time_zone(time_zone_str): + self.time_zone = time_zone_str + dt_util.set_default_time_zone(time_zone) + else: + raise ValueError(f"Received invalid time zone {time_zone_str}") + + async def _async_update( + self, + *, + country: str | UndefinedType | None = UNDEFINED, + currency: str | None = None, + elevation: int | None = None, + external_url: str | UndefinedType | None = UNDEFINED, + internal_url: str | UndefinedType | None = UNDEFINED, + language: str | None = None, + latitude: float | None = None, + location_name: str | None = None, + longitude: float | None = None, + radius: int | None = None, + source: ConfigSource, + time_zone: str | None = None, + unit_system: str | None = None, + ) -> None: + """Update the configuration from a dictionary.""" + self.config_source = source + if country is not UNDEFINED: + self.country = country + if currency is not None: + self.currency = currency + if elevation is not None: + self.elevation = elevation + if external_url is not UNDEFINED: + self.external_url = external_url + if internal_url is not UNDEFINED: + self.internal_url = internal_url + if language is not None: + self.language = language + if latitude is not None: + self.latitude = latitude + if location_name is not None: + self.location_name = location_name + if longitude is not None: + self.longitude = longitude + if radius is not None: + self.radius = radius + if time_zone is not None: + await self.async_set_time_zone(time_zone) + if unit_system is not None: + try: + self.units = get_unit_system(unit_system) + except ValueError: + self.units = METRIC_SYSTEM + + async def async_update(self, **kwargs: Any) -> None: + """Update the configuration from a dictionary.""" + await self._async_update(source=ConfigSource.STORAGE, **kwargs) + await self._async_store() + self.hass.bus.async_fire_internal(EVENT_CORE_CONFIG_UPDATE, kwargs) + + _raise_issue_if_historic_currency(self.hass, self.currency) + _raise_issue_if_no_country(self.hass, self.country) + + async def async_load(self) -> None: + """Load [homeassistant] core config.""" + if not (data := await self._store.async_load()): + return + + # In 2021.9 we fixed validation to disallow a path (because that's never + # correct) but this data still lives in storage, so we print a warning. + if data.get("external_url") and urlparse(data["external_url"]).path not in ( + "", + "/", + ): + _LOGGER.warning("Invalid external_url set. It's not allowed to have a path") + + if data.get("internal_url") and urlparse(data["internal_url"]).path not in ( + "", + "/", + ): + _LOGGER.warning("Invalid internal_url set. It's not allowed to have a path") + + await self._async_update( + source=ConfigSource.STORAGE, + latitude=data.get("latitude"), + longitude=data.get("longitude"), + elevation=data.get("elevation"), + unit_system=data.get("unit_system_v2"), + location_name=data.get("location_name"), + time_zone=data.get("time_zone"), + external_url=data.get("external_url", UNDEFINED), + internal_url=data.get("internal_url", UNDEFINED), + currency=data.get("currency"), + country=data.get("country"), + language=data.get("language"), + radius=data["radius"], + ) + + async def _async_store(self) -> None: + """Store [homeassistant] core config.""" + data = { + "latitude": self.latitude, + "longitude": self.longitude, + "elevation": self.elevation, + # We don't want any integrations to use the name of the unit system + # so we are using the private attribute here + "unit_system_v2": self.units._name, # noqa: SLF001 + "location_name": self.location_name, + "time_zone": self.time_zone, + "external_url": self.external_url, + "internal_url": self.internal_url, + "currency": self.currency, + "country": self.country, + "language": self.language, + "radius": self.radius, + } + await self._store.async_save(data) + + class _ConfigStore(Store[dict[str, Any]]): + """Class to help storing Config data.""" + + def __init__(self, hass: HomeAssistant) -> None: + """Initialize storage class.""" + super().__init__( + hass, + CORE_STORAGE_VERSION, + CORE_STORAGE_KEY, + private=True, + atomic_writes=True, + minor_version=CORE_STORAGE_MINOR_VERSION, + ) + self._original_unit_system: str | None = None # from old store 1.1 + + async def _async_migrate_func( + self, + old_major_version: int, + old_minor_version: int, + old_data: dict[str, Any], + ) -> dict[str, Any]: + """Migrate to the new version.""" + + # pylint: disable-next=import-outside-toplevel + from .components.zone import DEFAULT_RADIUS + + data = old_data + if old_major_version == 1 and old_minor_version < 2: + # In 1.2, we remove support for "imperial", replaced by "us_customary" + # Using a new key to allow rollback + self._original_unit_system = data.get("unit_system") + data["unit_system_v2"] = self._original_unit_system + if data["unit_system_v2"] == _CONF_UNIT_SYSTEM_IMPERIAL: + data["unit_system_v2"] = _CONF_UNIT_SYSTEM_US_CUSTOMARY + if old_major_version == 1 and old_minor_version < 3: + # In 1.3, we add the key "language", initialize it from the + # owner account. + data["language"] = "en" + try: + owner = await self.hass.auth.async_get_owner() + if owner is not None: + # pylint: disable-next=import-outside-toplevel + from .components.frontend import storage as frontend_store + + _, owner_data = await frontend_store.async_user_store( + self.hass, owner.id + ) + + if ( + "language" in owner_data + and "language" in owner_data["language"] + ): + with suppress(vol.InInvalid): + data["language"] = cv.language( + owner_data["language"]["language"] + ) + # pylint: disable-next=broad-except + except Exception: + _LOGGER.exception("Unexpected error during core config migration") + if old_major_version == 1 and old_minor_version < 4: + # In 1.4, we add the key "radius", initialize it with the default. + data.setdefault("radius", DEFAULT_RADIUS) + + if old_major_version > 1: + raise NotImplementedError + return data + + async def async_save(self, data: dict[str, Any]) -> None: + if self._original_unit_system: + data["unit_system"] = self._original_unit_system + return await super().async_save(data) diff --git a/homeassistant/data_entry_flow.py b/homeassistant/data_entry_flow.py index de08a178a70ebe..9d041c9b8d3308 100644 --- a/homeassistant/data_entry_flow.py +++ b/homeassistant/data_entry_flow.py @@ -26,7 +26,7 @@ check_if_deprecated_constant, dir_with_deprecated_constants, ) -from .helpers.frame import report +from .helpers.frame import ReportBehavior, report_usage from .loader import async_suggest_report_issue from .util import uuid as uuid_util @@ -87,7 +87,10 @@ class FlowResultType(StrEnum): } -_FlowResultT = TypeVar("_FlowResultT", bound="FlowResult[Any]", default="FlowResult") +_FlowContextT = TypeVar("_FlowContextT", bound="FlowContext", default="FlowContext") +_FlowResultT = TypeVar( + "_FlowResultT", bound="FlowResult[Any, Any]", default="FlowResult" +) _HandlerT = TypeVar("_HandlerT", default=str) @@ -139,10 +142,17 @@ def __init__( self.description_placeholders = description_placeholders -class FlowResult(TypedDict, Generic[_HandlerT], total=False): +class FlowContext(TypedDict, total=False): + """Typed context dict.""" + + show_advanced_options: bool + source: str + + +class FlowResult(TypedDict, Generic[_FlowContextT, _HandlerT], total=False): """Typed result dict.""" - context: dict[str, Any] + context: _FlowContextT data_schema: vol.Schema | None data: Mapping[str, Any] description_placeholders: Mapping[str, str | None] | None @@ -189,7 +199,7 @@ def _map_error_to_schema_errors( schema_errors[path_part_str] = error.error_message -class FlowManager(abc.ABC, Generic[_FlowResultT, _HandlerT]): +class FlowManager(abc.ABC, Generic[_FlowContextT, _FlowResultT, _HandlerT]): """Manage all the flows that are in progress.""" _flow_result: type[_FlowResultT] = FlowResult # type: ignore[assignment] @@ -201,12 +211,14 @@ def __init__( """Initialize the flow manager.""" self.hass = hass self._preview: set[_HandlerT] = set() - self._progress: dict[str, FlowHandler[_FlowResultT, _HandlerT]] = {} + self._progress: dict[ + str, FlowHandler[_FlowContextT, _FlowResultT, _HandlerT] + ] = {} self._handler_progress_index: defaultdict[ - _HandlerT, set[FlowHandler[_FlowResultT, _HandlerT]] + _HandlerT, set[FlowHandler[_FlowContextT, _FlowResultT, _HandlerT]] ] = defaultdict(set) self._init_data_process_index: defaultdict[ - type, set[FlowHandler[_FlowResultT, _HandlerT]] + type, set[FlowHandler[_FlowContextT, _FlowResultT, _HandlerT]] ] = defaultdict(set) @abc.abstractmethod @@ -214,9 +226,9 @@ async def async_create_flow( self, handler_key: _HandlerT, *, - context: dict[str, Any] | None = None, + context: _FlowContextT | None = None, data: dict[str, Any] | None = None, - ) -> FlowHandler[_FlowResultT, _HandlerT]: + ) -> FlowHandler[_FlowContextT, _FlowResultT, _HandlerT]: """Create a flow for specified handler. Handler key is the domain of the component that we want to set up. @@ -224,7 +236,9 @@ async def async_create_flow( @abc.abstractmethod async def async_finish_flow( - self, flow: FlowHandler[_FlowResultT, _HandlerT], result: _FlowResultT + self, + flow: FlowHandler[_FlowContextT, _FlowResultT, _HandlerT], + result: _FlowResultT, ) -> _FlowResultT: """Finish a data entry flow. @@ -233,7 +247,9 @@ async def async_finish_flow( """ async def async_post_init( - self, flow: FlowHandler[_FlowResultT, _HandlerT], result: _FlowResultT + self, + flow: FlowHandler[_FlowContextT, _FlowResultT, _HandlerT], + result: _FlowResultT, ) -> None: """Entry has finished executing its first step asynchronously.""" @@ -288,7 +304,7 @@ def async_progress_by_init_data_type( @callback def _async_progress_by_handler( self, handler: _HandlerT, match_context: dict[str, Any] | None - ) -> list[FlowHandler[_FlowResultT, _HandlerT]]: + ) -> list[FlowHandler[_FlowContextT, _FlowResultT, _HandlerT]]: """Return the flows in progress by handler. If match_context is specified, only return flows with a context that @@ -307,12 +323,12 @@ async def async_init( self, handler: _HandlerT, *, - context: dict[str, Any] | None = None, + context: _FlowContextT | None = None, data: Any = None, ) -> _FlowResultT: """Start a data entry flow.""" if context is None: - context = {} + context = cast(_FlowContextT, {}) flow = await self.async_create_flow(handler, context=context, data=data) if not flow: raise UnknownFlow("Flow was not created") @@ -452,7 +468,7 @@ def async_abort(self, flow_id: str) -> None: @callback def _async_add_flow_progress( - self, flow: FlowHandler[_FlowResultT, _HandlerT] + self, flow: FlowHandler[_FlowContextT, _FlowResultT, _HandlerT] ) -> None: """Add a flow to in progress.""" if flow.init_data is not None: @@ -462,7 +478,7 @@ def _async_add_flow_progress( @callback def _async_remove_flow_from_index( - self, flow: FlowHandler[_FlowResultT, _HandlerT] + self, flow: FlowHandler[_FlowContextT, _FlowResultT, _HandlerT] ) -> None: """Remove a flow from in progress.""" if flow.init_data is not None: @@ -489,7 +505,7 @@ def _async_remove_flow_progress(self, flow_id: str) -> None: async def _async_handle_step( self, - flow: FlowHandler[_FlowResultT, _HandlerT], + flow: FlowHandler[_FlowContextT, _FlowResultT, _HandlerT], step_id: str, user_input: dict | BaseServiceInfo | None, ) -> _FlowResultT: @@ -514,12 +530,12 @@ async def _async_handle_step( if not isinstance(result["type"], FlowResultType): result["type"] = FlowResultType(result["type"]) # type: ignore[unreachable] - report( + report_usage( ( "does not use FlowResultType enum for data entry flow result type. " "This is deprecated and will stop working in Home Assistant 2025.1" ), - error_if_core=False, + core_behavior=ReportBehavior.LOG, ) if ( @@ -566,7 +582,7 @@ def schedule_configure(_: asyncio.Task) -> None: return result def _raise_if_step_does_not_exist( - self, flow: FlowHandler[_FlowResultT, _HandlerT], step_id: str + self, flow: FlowHandler[_FlowContextT, _FlowResultT, _HandlerT], step_id: str ) -> None: """Raise if the step does not exist.""" method = f"async_step_{step_id}" @@ -578,7 +594,7 @@ def _raise_if_step_does_not_exist( ) async def _async_setup_preview( - self, flow: FlowHandler[_FlowResultT, _HandlerT] + self, flow: FlowHandler[_FlowContextT, _FlowResultT, _HandlerT] ) -> None: """Set up preview for a flow handler.""" if flow.handler not in self._preview: @@ -588,7 +604,7 @@ async def _async_setup_preview( @callback def _async_flow_handler_to_flow_result( self, - flows: Iterable[FlowHandler[_FlowResultT, _HandlerT]], + flows: Iterable[FlowHandler[_FlowContextT, _FlowResultT, _HandlerT]], include_uninitialized: bool, ) -> list[_FlowResultT]: """Convert a list of FlowHandler to a partial FlowResult that can be serialized.""" @@ -610,7 +626,7 @@ def _async_flow_handler_to_flow_result( ] -class FlowHandler(Generic[_FlowResultT, _HandlerT]): +class FlowHandler(Generic[_FlowContextT, _FlowResultT, _HandlerT]): """Handle a data entry flow.""" _flow_result: type[_FlowResultT] = FlowResult # type: ignore[assignment] @@ -624,7 +640,7 @@ class FlowHandler(Generic[_FlowResultT, _HandlerT]): hass: HomeAssistant = None # type: ignore[assignment] handler: _HandlerT = None # type: ignore[assignment] # Ensure the attribute has a subscriptable, but immutable, default value. - context: dict[str, Any] = MappingProxyType({}) # type: ignore[assignment] + context: _FlowContextT = MappingProxyType({}) # type: ignore[assignment] # Set by _async_create_flow callback init_step = "init" @@ -643,12 +659,12 @@ class FlowHandler(Generic[_FlowResultT, _HandlerT]): @property def source(self) -> str | None: """Source that initialized the flow.""" - return self.context.get("source", None) # type: ignore[no-any-return] + return self.context.get("source", None) # type: ignore[return-value] @property def show_advanced_options(self) -> bool: """If we should show advanced options.""" - return self.context.get("show_advanced_options", False) # type: ignore[no-any-return] + return self.context.get("show_advanced_options", False) # type: ignore[return-value] def add_suggested_values_to_schema( self, data_schema: vol.Schema, suggested_values: Mapping[str, Any] | None diff --git a/homeassistant/generated/bluetooth.py b/homeassistant/generated/bluetooth.py index 2ea604a91a2a28..c4612898cb2a84 100644 --- a/homeassistant/generated/bluetooth.py +++ b/homeassistant/generated/bluetooth.py @@ -279,6 +279,11 @@ ], "manufacturer_id": 76, }, + { + "connectable": True, + "domain": "husqvarna_automower_ble", + "service_uuid": "98bd0001-0b0e-421a-84e5-ddbf75dc6de4", + }, { "domain": "ibeacon", "manufacturer_data_start": [ diff --git a/homeassistant/generated/config_flows.py b/homeassistant/generated/config_flows.py index f399b0922f13ca..cbd30b560ce755 100644 --- a/homeassistant/generated/config_flows.py +++ b/homeassistant/generated/config_flows.py @@ -221,7 +221,6 @@ "gios", "github", "glances", - "go2rtc", "goalzero", "gogogate2", "goodwe", @@ -265,6 +264,7 @@ "huisbaasje", "hunterdouglas_powerview", "husqvarna_automower", + "husqvarna_automower_ble", "huum", "hvv_departures", "hydrawise", @@ -327,6 +327,7 @@ "lektrico", "lg_netcast", "lg_soundbar", + "lg_thinq", "lidarr", "lifx", "linear_garage_door", @@ -335,6 +336,7 @@ "litterrobot", "livisi", "local_calendar", + "local_file", "local_ip", "local_todo", "locative", @@ -382,12 +384,14 @@ "mpd", "mqtt", "mullvad", + "music_assistant", "mutesync", "mysensors", "mystrom", "myuplink", "nam", "nanoleaf", + "nasweb", "neato", "nest", "netatmo", @@ -404,6 +408,7 @@ "nina", "nmap_tracker", "nobo_hub", + "nordpool", "notion", "nuheat", "nuki", @@ -418,6 +423,7 @@ "oncue", "ondilo_ico", "onewire", + "onkyo", "onvif", "open_meteo", "openai_conversation", @@ -438,6 +444,7 @@ "ovo_energy", "owntracks", "p1_monitor", + "palazzetti", "panasonic_viera", "peco", "pegel_online", @@ -539,6 +546,7 @@ "smart_meter_texas", "smartthings", "smarttub", + "smarty", "smhi", "smlight", "sms", diff --git a/homeassistant/generated/dhcp.py b/homeassistant/generated/dhcp.py index 154ca93545cb59..7dacf9a0bcabba 100644 --- a/homeassistant/generated/dhcp.py +++ b/homeassistant/generated/dhcp.py @@ -37,10 +37,6 @@ "hostname": "august*", "macaddress": "E076D0*", }, - { - "domain": "awair", - "macaddress": "70886B1*", - }, { "domain": "axis", "registered_devices": True, @@ -280,6 +276,18 @@ "hostname": "polisy*", "macaddress": "000DB9*", }, + { + "domain": "lamarzocco", + "hostname": "gs[0-9][0-9][0-9][0-9][0-9][0-9]", + }, + { + "domain": "lamarzocco", + "hostname": "lm[0-9][0-9][0-9][0-9][0-9][0-9]", + }, + { + "domain": "lamarzocco", + "hostname": "mr[0-9][0-9][0-9][0-9][0-9][0-9]", + }, { "domain": "lametric", "registered_devices": True, @@ -371,6 +379,15 @@ "hostname": "gateway*", "macaddress": "F8811A*", }, + { + "domain": "palazzetti", + "hostname": "connbox*", + "macaddress": "40F3857*", + }, + { + "domain": "palazzetti", + "registered_devices": True, + }, { "domain": "powerwall", "hostname": "1118431-*", diff --git a/homeassistant/generated/integrations.json b/homeassistant/generated/integrations.json index 3243d1677ae29d..a1fdb9478f3ab6 100644 --- a/homeassistant/generated/integrations.json +++ b/homeassistant/generated/integrations.json @@ -698,12 +698,6 @@ "config_flow": false, "iot_class": "cloud_polling" }, - "bloomsky": { - "name": "BloomSky", - "integration_type": "hub", - "config_flow": false, - "iot_class": "cloud_polling" - }, "blue_current": { "name": "Blue Current", "integration_type": "hub", @@ -958,7 +952,8 @@ "name": "Cloudflare", "integration_type": "hub", "config_flow": true, - "iot_class": "cloud_push" + "iot_class": "cloud_push", + "single_config_entry": true }, "cmus": { "name": "cmus", @@ -1160,7 +1155,8 @@ "demo": { "integration_type": "hub", "config_flow": false, - "iot_class": "calculated" + "iot_class": "calculated", + "single_config_entry": true }, "denon": { "name": "Denon", @@ -1403,7 +1399,8 @@ "name": "Duotecno", "integration_type": "hub", "config_flow": true, - "iot_class": "local_push" + "iot_class": "local_push", + "single_config_entry": true }, "duquesne_light": { "name": "Duquesne Light", @@ -1461,7 +1458,8 @@ "name": "ecobee", "integration_type": "hub", "config_flow": true, - "iot_class": "cloud_polling" + "iot_class": "cloud_polling", + "single_config_entry": true }, "ecoforest": { "name": "Ecoforest", @@ -1659,7 +1657,8 @@ "name": "EnOcean", "integration_type": "hub", "config_flow": true, - "iot_class": "local_push" + "iot_class": "local_push", + "single_config_entry": true }, "enphase_envoy": { "name": "Enphase Envoy", @@ -2247,13 +2246,6 @@ } } }, - "go2rtc": { - "name": "go2rtc", - "integration_type": "hub", - "config_flow": true, - "iot_class": "local_polling", - "single_config_entry": true - }, "goalzero": { "name": "Goal Zero Yeti", "integration_type": "device", @@ -2470,7 +2462,8 @@ "name": "Home Assistant Supervisor", "integration_type": "hub", "config_flow": false, - "iot_class": "local_polling" + "iot_class": "local_polling", + "single_config_entry": true }, "havana_shade": { "name": "Havana Shade", @@ -2685,11 +2678,22 @@ "integration_type": "virtual", "supported_by": "motion_blinds" }, - "husqvarna_automower": { - "name": "Husqvarna Automower", - "integration_type": "hub", - "config_flow": true, - "iot_class": "cloud_push" + "husqvarna": { + "name": "Husqvarna", + "integrations": { + "husqvarna_automower": { + "integration_type": "hub", + "config_flow": true, + "iot_class": "cloud_push", + "name": "Husqvarna Automower" + }, + "husqvarna_automower_ble": { + "integration_type": "hub", + "config_flow": true, + "iot_class": "local_polling", + "name": "Husqvarna Automower BLE" + } + } }, "huum": { "name": "Huum", @@ -2731,7 +2735,8 @@ "name": "Jandy iAqualink", "integration_type": "hub", "config_flow": true, - "iot_class": "cloud_polling" + "iot_class": "cloud_polling", + "single_config_entry": true }, "ibm": { "name": "IBM", @@ -2860,7 +2865,8 @@ "name": "Insteon", "integration_type": "hub", "config_flow": true, - "iot_class": "local_push" + "iot_class": "local_push", + "single_config_entry": true }, "intellifire": { "name": "IntelliFire", @@ -2959,7 +2965,8 @@ "name": "International Space Station (ISS)", "integration_type": "service", "config_flow": true, - "iot_class": "cloud_polling" + "iot_class": "cloud_polling", + "single_config_entry": true }, "ista_ecotrend": { "name": "ista EcoTrend", @@ -3098,7 +3105,8 @@ "name": "Everything but the Kitchen Sink", "integration_type": "hub", "config_flow": false, - "iot_class": "calculated" + "iot_class": "calculated", + "single_config_entry": true }, "kiwi": { "name": "KIWI", @@ -3212,7 +3220,8 @@ "name": "Launch Library", "integration_type": "service", "config_flow": true, - "iot_class": "cloud_polling" + "iot_class": "cloud_polling", + "single_config_entry": true }, "laundrify": { "name": "laundrify", @@ -3276,6 +3285,12 @@ "iot_class": "local_polling", "name": "LG Soundbars" }, + "lg_thinq": { + "integration_type": "hub", + "config_flow": true, + "iot_class": "cloud_push", + "name": "LG ThinQ" + }, "webostv": { "integration_type": "hub", "config_flow": true, @@ -3354,7 +3369,8 @@ "name": "LiteJet", "integration_type": "hub", "config_flow": true, - "iot_class": "local_push" + "iot_class": "local_push", + "single_config_entry": true }, "litterrobot": { "name": "Litter-Robot", @@ -3382,13 +3398,14 @@ "local_file": { "name": "Local File", "integration_type": "hub", - "config_flow": false, + "config_flow": true, "iot_class": "local_polling" }, "local_ip": { "integration_type": "hub", "config_flow": true, - "iot_class": "local_polling" + "iot_class": "local_polling", + "single_config_entry": true }, "local_todo": { "integration_type": "hub", @@ -3927,6 +3944,12 @@ "iot_class": "cloud_polling", "single_config_entry": true }, + "music_assistant": { + "name": "Music Assistant", + "integration_type": "hub", + "config_flow": true, + "iot_class": "local_push" + }, "mutesync": { "name": "mutesync", "integration_type": "hub", @@ -3993,6 +4016,12 @@ "config_flow": true, "iot_class": "local_push" }, + "nasweb": { + "name": "NASweb", + "integration_type": "hub", + "config_flow": true, + "iot_class": "local_push" + }, "neato": { "name": "Neato Botvac", "integration_type": "hub", @@ -4158,6 +4187,13 @@ "config_flow": true, "iot_class": "local_push" }, + "nordpool": { + "name": "Nord Pool", + "integration_type": "hub", + "config_flow": true, + "iot_class": "cloud_polling", + "single_config_entry": true + }, "norway_air": { "name": "Om Luftkvalitet i Norge (Norway Air)", "integration_type": "hub", @@ -4239,7 +4275,8 @@ "name": "NZBGet", "integration_type": "hub", "config_flow": true, - "iot_class": "local_polling" + "iot_class": "local_polling", + "single_config_entry": true }, "oasa_telematics": { "name": "OASA Telematics", @@ -4287,7 +4324,8 @@ "name": "Hayward Omnilogic", "integration_type": "hub", "config_flow": true, - "iot_class": "cloud_polling" + "iot_class": "cloud_polling", + "single_config_entry": true }, "oncue": { "name": "Oncue by Kohler", @@ -4299,7 +4337,8 @@ "name": "Ondilo ICO", "integration_type": "hub", "config_flow": true, - "iot_class": "cloud_polling" + "iot_class": "cloud_polling", + "single_config_entry": true }, "onewire": { "name": "1-Wire", @@ -4309,8 +4348,8 @@ }, "onkyo": { "name": "Onkyo", - "integration_type": "hub", - "config_flow": false, + "integration_type": "device", + "config_flow": true, "iot_class": "local_push" }, "onvif": { @@ -4507,7 +4546,8 @@ "name": "OwnTracks", "integration_type": "hub", "config_flow": true, - "iot_class": "local_push" + "iot_class": "local_push", + "single_config_entry": true }, "p1_monitor": { "name": "P1 Monitor", @@ -4515,6 +4555,12 @@ "config_flow": true, "iot_class": "local_polling" }, + "palazzetti": { + "name": "Palazzetti", + "integration_type": "device", + "config_flow": true, + "iot_class": "local_polling" + }, "panasonic": { "name": "Panasonic", "integrations": { @@ -4538,11 +4584,6 @@ "config_flow": false, "iot_class": "local_polling" }, - "panel_iframe": { - "name": "iframe Panel", - "integration_type": "hub", - "config_flow": false - }, "pcs_lighting": { "name": "PCS Lighting", "integration_type": "virtual", @@ -4722,7 +4763,8 @@ "profiler": { "name": "Profiler", "integration_type": "hub", - "config_flow": true + "config_flow": true, + "single_config_entry": true }, "progettihwsw": { "name": "ProgettiHWSW Automation", @@ -4937,7 +4979,8 @@ "name": "Radio Browser", "integration_type": "service", "config_flow": true, - "iot_class": "cloud_polling" + "iot_class": "cloud_polling", + "single_config_entry": true }, "radiotherm": { "name": "Radio Thermostat", @@ -5112,7 +5155,8 @@ "name": "Rhasspy", "integration_type": "hub", "config_flow": true, - "iot_class": "local_push" + "iot_class": "local_push", + "single_config_entry": true }, "ridwell": { "name": "Ridwell", @@ -5654,7 +5698,7 @@ "smarty": { "name": "Salda Smarty", "integration_type": "hub", - "config_flow": false, + "config_flow": true, "iot_class": "local_polling" }, "smhi": { @@ -7288,7 +7332,6 @@ "iot_class": "calculated" }, "history_stats": { - "name": "History Stats", "integration_type": "helper", "config_flow": true, "iot_class": "local_polling" @@ -7334,13 +7377,11 @@ "iot_class": "calculated" }, "mold_indicator": { - "name": "Mold Indicator", "integration_type": "helper", "config_flow": true, "iot_class": "calculated" }, "random": { - "name": "Random", "integration_type": "helper", "config_flow": true, "iot_class": "calculated" @@ -7350,7 +7391,6 @@ "config_flow": false }, "statistics": { - "name": "Statistics", "integration_type": "helper", "config_flow": true, "iot_class": "local_polling" @@ -7372,7 +7412,6 @@ "iot_class": "local_polling" }, "timer": { - "name": "Timer", "integration_type": "helper", "config_flow": false }, @@ -7382,7 +7421,6 @@ "iot_class": "calculated" }, "trend": { - "name": "Trend", "integration_type": "helper", "config_flow": true, "iot_class": "calculated" @@ -7411,6 +7449,7 @@ "google_travel_time", "group", "growatt_server", + "history_stats", "holiday", "homekit_controller", "input_boolean", @@ -7427,20 +7466,25 @@ "min_max", "mobile_app", "moehlenhoff_alpha2", + "mold_indicator", "moon", "nextbus", "nmap_tracker", "plant", "proximity", + "random", "rpi_power", "schedule", "season", "shopping_list", + "statistics", "sun", "switch_as_x", "threshold", "time_date", + "timer", "tod", + "trend", "uptime", "utility_meter", "version", diff --git a/homeassistant/generated/zeroconf.py b/homeassistant/generated/zeroconf.py index f627f1f0f47dd1..1fbd6337fdb0fb 100644 --- a/homeassistant/generated/zeroconf.py +++ b/homeassistant/generated/zeroconf.py @@ -267,6 +267,11 @@ } ZEROCONF = { + "_PowerView-G3._tcp.local.": [ + { + "domain": "hunterdouglas_powerview", + }, + ], "_Volumio._tcp.local.": [ { "domain": "volumio", @@ -609,6 +614,12 @@ }, ], "_lutron._tcp.local.": [ + { + "domain": "lutron_caseta", + "properties": { + "SYSTYPE": "hwqs*", + }, + }, { "domain": "lutron_caseta", "properties": { @@ -628,6 +639,11 @@ }, }, ], + "_mass._tcp.local.": [ + { + "domain": "music_assistant", + }, + ], "_matter._tcp.local.": [ { "domain": "matter", @@ -695,11 +711,6 @@ "domain": "plugwise", }, ], - "_powerview-g3._tcp.local.": [ - { - "domain": "hunterdouglas_powerview", - }, - ], "_powerview._tcp.local.": [ { "domain": "hunterdouglas_powerview", diff --git a/homeassistant/helpers/aiohttp_client.py b/homeassistant/helpers/aiohttp_client.py index 2f4c1980468b8c..f01ae325875ddf 100644 --- a/homeassistant/helpers/aiohttp_client.py +++ b/homeassistant/helpers/aiohttp_client.py @@ -44,11 +44,13 @@ f"aiohttp/{aiohttp.__version__} Python/{sys.version_info[0]}.{sys.version_info[1]}" ) -ENABLE_CLEANUP_CLOSED = not (3, 11, 1) <= sys.version_info < (3, 11, 4) -# Enabling cleanup closed on python 3.11.1+ leaks memory relatively quickly -# see https://github.com/aio-libs/aiohttp/issues/7252 -# aiohttp interacts poorly with https://github.com/python/cpython/pull/98540 -# The issue was fixed in 3.11.4 via https://github.com/python/cpython/pull/104485 +ENABLE_CLEANUP_CLOSED = (3, 13, 0) <= sys.version_info < ( + 3, + 13, + 1, +) or sys.version_info < (3, 12, 7) +# Cleanup closed is no longer needed after https://github.com/python/cpython/pull/118960 +# which first appeared in Python 3.12.7 and 3.13.1 WARN_CLOSE_MSG = "closes the Home Assistant aiohttp session" diff --git a/homeassistant/helpers/check_config.py b/homeassistant/helpers/check_config.py index 43021fffac50f8..4b5e2f277a020a 100644 --- a/homeassistant/helpers/check_config.py +++ b/homeassistant/helpers/check_config.py @@ -13,7 +13,6 @@ from homeassistant import loader from homeassistant.config import ( # type: ignore[attr-defined] CONF_PACKAGES, - CORE_CONFIG_SCHEMA, YAML_CONFIG_FILE, config_per_platform, extract_domain_configs, @@ -23,6 +22,7 @@ merge_packages_config, ) from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant +from homeassistant.core_config import CORE_CONFIG_SCHEMA from homeassistant.exceptions import HomeAssistantError from homeassistant.requirements import ( RequirementsNotFound, diff --git a/homeassistant/helpers/config_validation.py b/homeassistant/helpers/config_validation.py index 98a2cd719314a0..81ac10f86cc838 100644 --- a/homeassistant/helpers/config_validation.py +++ b/homeassistant/helpers/config_validation.py @@ -874,7 +874,7 @@ def url_no_path(value: Any) -> str: url_in = url(value) if urlparse(url_in).path not in ("", "/"): - raise vol.Invalid("url it not allowed to have a path component") + raise vol.Invalid("url is not allowed to have a path component") return url_in diff --git a/homeassistant/helpers/data_entry_flow.py b/homeassistant/helpers/data_entry_flow.py index b2cad292e3d631..adb2062a8ea58e 100644 --- a/homeassistant/helpers/data_entry_flow.py +++ b/homeassistant/helpers/data_entry_flow.py @@ -18,7 +18,7 @@ _FlowManagerT = TypeVar( "_FlowManagerT", - bound=data_entry_flow.FlowManager[Any], + bound=data_entry_flow.FlowManager[Any, Any], default=data_entry_flow.FlowManager, ) diff --git a/homeassistant/helpers/deprecation.py b/homeassistant/helpers/deprecation.py index 65e8f4ef97e72a..81f7821ec79253 100644 --- a/homeassistant/helpers/deprecation.py +++ b/homeassistant/helpers/deprecation.py @@ -3,7 +3,8 @@ from __future__ import annotations from collections.abc import Callable -from enum import Enum +from contextlib import suppress +from enum import Enum, EnumType, _EnumDict import functools import inspect import logging @@ -164,6 +165,30 @@ def _print_deprecation_warning_internal( breaks_in_ha_version: str | None, *, log_when_no_integration_is_found: bool, +) -> None: + # Suppress ImportError due to use of deprecated enum in core.py + # Can be removed in HA Core 2025.1 + with suppress(ImportError): + _print_deprecation_warning_internal_impl( + obj_name, + module_name, + replacement, + description, + verb, + breaks_in_ha_version, + log_when_no_integration_is_found=log_when_no_integration_is_found, + ) + + +def _print_deprecation_warning_internal_impl( + obj_name: str, + module_name: str, + replacement: str, + description: str, + verb: str, + breaks_in_ha_version: str | None, + *, + log_when_no_integration_is_found: bool, ) -> None: # pylint: disable=import-outside-toplevel from homeassistant.core import async_get_hass_or_none @@ -338,3 +363,35 @@ def all_with_deprecated_constants(module_globals: dict[str, Any]) -> list[str]: for name in module_globals_keys if name.startswith(_PREFIX_DEPRECATED) ] + + +class EnumWithDeprecatedMembers(EnumType): + """Enum with deprecated members.""" + + def __new__( + mcs, # noqa: N804 ruff bug, ruff does not understand this is a metaclass + cls: str, + bases: tuple[type, ...], + classdict: _EnumDict, + *, + deprecated: dict[str, tuple[str, str]], + **kwds: Any, + ) -> Any: + """Create a new class.""" + classdict["__deprecated__"] = deprecated + return super().__new__(mcs, cls, bases, classdict, **kwds) + + def __getattribute__(cls, name: str) -> Any: + """Warn if accessing a deprecated member.""" + deprecated = super().__getattribute__("__deprecated__") + if name in deprecated: + _print_deprecation_warning_internal( + f"{cls.__name__}.{name}", + cls.__module__, + f"{deprecated[name][0]}", + "enum member", + "used", + deprecated[name][1], + log_when_no_integration_is_found=False, + ) + return super().__getattribute__(name) diff --git a/homeassistant/helpers/device_registry.py b/homeassistant/helpers/device_registry.py index f05179ccf0aef1..faf4257577d351 100644 --- a/homeassistant/helpers/device_registry.py +++ b/homeassistant/helpers/device_registry.py @@ -842,7 +842,6 @@ def async_get_or_create( device.id, allow_collisions=True, add_config_entry_id=config_entry_id, - add_config_entry=config_entry, configuration_url=configuration_url, device_info_type=device_info_type, disabled_by=disabled_by, @@ -870,7 +869,6 @@ def async_update_device( # noqa: C901 self, device_id: str, *, - add_config_entry: ConfigEntry | UndefinedType = UNDEFINED, add_config_entry_id: str | UndefinedType = UNDEFINED, # Temporary flag so we don't blow up when collisions are implicitly introduced # by calls to async_get_or_create. Must not be set by integrations. @@ -905,13 +903,11 @@ def async_update_device( # noqa: C901 config_entries = old.config_entries - if add_config_entry_id is not UNDEFINED and add_config_entry is UNDEFINED: - config_entry = self.hass.config_entries.async_get_entry(add_config_entry_id) - if config_entry is None: + if add_config_entry_id is not UNDEFINED: + if self.hass.config_entries.async_get_entry(add_config_entry_id) is None: raise HomeAssistantError( f"Can't link device to unknown config entry {add_config_entry_id}" ) - add_config_entry = config_entry if not new_connections and not new_identifiers: raise HomeAssistantError( @@ -955,11 +951,11 @@ def async_update_device( # noqa: C901 area = ar.async_get(self.hass).async_get_or_create(suggested_area) area_id = area.id - if add_config_entry is not UNDEFINED: + if add_config_entry_id is not UNDEFINED: primary_entry_id = old.primary_config_entry if ( device_info_type == "primary" - and add_config_entry.entry_id != primary_entry_id + and add_config_entry_id != primary_entry_id ): if ( primary_entry_id is None @@ -970,11 +966,11 @@ def async_update_device( # noqa: C901 ) or primary_entry.domain in LOW_PRIO_CONFIG_ENTRY_DOMAINS ): - new_values["primary_config_entry"] = add_config_entry.entry_id - old_values["primary_config_entry"] = old.primary_config_entry + new_values["primary_config_entry"] = add_config_entry_id + old_values["primary_config_entry"] = primary_entry_id - if add_config_entry.entry_id not in old.config_entries: - config_entries = old.config_entries | {add_config_entry.entry_id} + if add_config_entry_id not in old.config_entries: + config_entries = old.config_entries | {add_config_entry_id} if ( remove_config_entry_id is not UNDEFINED diff --git a/homeassistant/helpers/discovery_flow.py b/homeassistant/helpers/discovery_flow.py index e6596a496e05db..fd41c7ffb44810 100644 --- a/homeassistant/helpers/discovery_flow.py +++ b/homeassistant/helpers/discovery_flow.py @@ -13,7 +13,7 @@ from homeassistant.util.hass_dict import HassKey if TYPE_CHECKING: - from homeassistant.config_entries import ConfigFlowResult + from homeassistant.config_entries import ConfigFlowContext, ConfigFlowResult FLOW_INIT_LIMIT = 20 DISCOVERY_FLOW_DISPATCHER: HassKey[FlowDispatcher] = HassKey( @@ -42,7 +42,7 @@ def from_json_dict(cls, json_dict: dict[str, Any]) -> Self: def async_create_flow( hass: HomeAssistant, domain: str, - context: dict[str, Any], + context: ConfigFlowContext, data: Any, *, discovery_key: DiscoveryKey | None = None, @@ -70,7 +70,7 @@ def async_create_flow( @callback def _async_init_flow( - hass: HomeAssistant, domain: str, context: dict[str, Any], data: Any + hass: HomeAssistant, domain: str, context: ConfigFlowContext, data: Any ) -> Coroutine[None, None, ConfigFlowResult] | None: """Create a discovery flow.""" # Avoid spawning flows that have the same initial discovery data @@ -98,7 +98,7 @@ class PendingFlowKey(NamedTuple): class PendingFlowValue(NamedTuple): """Value for pending flows.""" - context: dict[str, Any] + context: ConfigFlowContext data: Any @@ -137,7 +137,7 @@ async def _async_start(self, event: Event) -> None: await gather_with_limited_concurrency(FLOW_INIT_LIMIT, *init_coros) @callback - def async_create(self, domain: str, context: dict[str, Any], data: Any) -> None: + def async_create(self, domain: str, context: ConfigFlowContext, data: Any) -> None: """Create and add or queue a flow.""" key = PendingFlowKey(domain, context["source"]) values = PendingFlowValue(context, data) diff --git a/homeassistant/helpers/entity.py b/homeassistant/helpers/entity.py index cc843b6d9b14e0..1f77dd3f95cd7e 100644 --- a/homeassistant/helpers/entity.py +++ b/homeassistant/helpers/entity.py @@ -21,7 +21,6 @@ from propcache import cached_property import voluptuous as vol -from homeassistant.config import DATA_CUSTOMIZE from homeassistant.const import ( ATTR_ASSUMED_STATE, ATTR_ATTRIBUTION, @@ -49,6 +48,7 @@ get_hassjob_callable_job_type, get_release_channel, ) +from homeassistant.core_config import DATA_CUSTOMIZE from homeassistant.exceptions import ( HomeAssistantError, InvalidStateError, @@ -337,7 +337,9 @@ def _setter(o: Any, val: Any) -> None: Also invalidates the corresponding cached_property by calling delattr on it. """ - if getattr(o, private_attr_name, _SENTINEL) == val: + if ( + old_val := getattr(o, private_attr_name, _SENTINEL) + ) == val and type(old_val) is type(val): return setattr(o, private_attr_name, val) # Invalidate the cache of the cached property diff --git a/homeassistant/helpers/entity_component.py b/homeassistant/helpers/entity_component.py index 76abb3020d1c85..1be7289401ccfa 100644 --- a/homeassistant/helpers/entity_component.py +++ b/homeassistant/helpers/entity_component.py @@ -65,10 +65,13 @@ async def async_update_entity(hass: HomeAssistant, entity_id: str) -> None: class EntityComponent(Generic[_EntityT]): - """The EntityComponent manages platforms that manages entities. + """The EntityComponent manages platforms that manage entities. + + An example of an entity component is 'light', which manages platforms such + as 'hue.light'. This class has the following responsibilities: - - Process the configuration and set up a platform based component. + - Process the configuration and set up a platform based component, for example light. - Manage the platforms and their entities. - Help extract the entities from a service call. - Listen for discovery events for platforms related to the domain. diff --git a/homeassistant/helpers/entity_platform.py b/homeassistant/helpers/entity_platform.py index fe852e2114b432..62eed213b2a82f 100644 --- a/homeassistant/helpers/entity_platform.py +++ b/homeassistant/helpers/entity_platform.py @@ -111,7 +111,11 @@ async def async_setup_entry( class EntityPlatform: - """Manage the entities for a single platform.""" + """Manage the entities for a single platform. + + An example of an entity platform is 'hue.light', which is managed by + the entity component 'light'. + """ def __init__( self, diff --git a/homeassistant/helpers/event.py b/homeassistant/helpers/event.py index 97a85fdde89251..02ea81031926c8 100644 --- a/homeassistant/helpers/event.py +++ b/homeassistant/helpers/event.py @@ -322,6 +322,10 @@ def async_track_state_change_event( for each one, we keep a dict of entity ids that care about the state change events so we can do a fast dict lookup to route events. + The passed in entity_ids will be automatically lower cased. + + EVENT_STATE_CHANGED is fired on each occasion the state is updated + and changed, opposite of EVENT_STATE_REPORTED. """ if not (entity_ids := _async_string_to_lower_list(entity_ids)): return _remove_empty_listener @@ -383,7 +387,10 @@ def _async_track_state_change_event( action: Callable[[Event[EventStateChangedData]], Any], job_type: HassJobType | None, ) -> CALLBACK_TYPE: - """async_track_state_change_event without lowercasing.""" + """Faster version of async_track_state_change_event. + + The passed in entity_ids will not be automatically lower cased. + """ return _async_track_event( _KEYED_TRACK_STATE_CHANGE, hass, entity_ids, action, job_type ) @@ -403,7 +410,11 @@ def async_track_state_report_event( action: Callable[[Event[EventStateReportedData]], Any], job_type: HassJobType | None = None, ) -> CALLBACK_TYPE: - """Track EVENT_STATE_REPORTED by entity_id without lowercasing.""" + """Track EVENT_STATE_REPORTED by entity_ids. + + EVENT_STATE_REPORTED is fired on each occasion the state is updated + but not changed, opposite of EVENT_STATE_CHANGED. + """ return _async_track_event( _KEYED_TRACK_STATE_REPORT, hass, entity_ids, action, job_type ) diff --git a/homeassistant/helpers/frame.py b/homeassistant/helpers/frame.py index fd7e014b2ffed8..eda980997139d3 100644 --- a/homeassistant/helpers/frame.py +++ b/homeassistant/helpers/frame.py @@ -5,6 +5,7 @@ import asyncio from collections.abc import Callable from dataclasses import dataclass +import enum import functools import linecache import logging @@ -144,24 +145,72 @@ def report( If error_if_integration is True, raise instead of log if an integration is found when unwinding the stack frame. """ + core_behavior = ReportBehavior.ERROR if error_if_core else ReportBehavior.LOG + core_integration_behavior = ( + ReportBehavior.ERROR if error_if_integration else ReportBehavior.LOG + ) + custom_integration_behavior = core_integration_behavior + + if log_custom_component_only: + if core_behavior is ReportBehavior.LOG: + core_behavior = ReportBehavior.IGNORE + if core_integration_behavior is ReportBehavior.LOG: + core_integration_behavior = ReportBehavior.IGNORE + + report_usage( + what, + core_behavior=core_behavior, + core_integration_behavior=core_integration_behavior, + custom_integration_behavior=custom_integration_behavior, + exclude_integrations=exclude_integrations, + level=level, + ) + + +class ReportBehavior(enum.Enum): + """Enum for behavior on code usage.""" + + IGNORE = enum.auto() + """Ignore the code usage.""" + LOG = enum.auto() + """Log the code usage.""" + ERROR = enum.auto() + """Raise an error on code usage.""" + + +def report_usage( + what: str, + *, + core_behavior: ReportBehavior = ReportBehavior.ERROR, + core_integration_behavior: ReportBehavior = ReportBehavior.LOG, + custom_integration_behavior: ReportBehavior = ReportBehavior.LOG, + exclude_integrations: set[str] | None = None, + level: int = logging.WARNING, +) -> None: + """Report incorrect code usage. + + Similar to `report` but allows more fine-grained reporting. + """ try: integration_frame = get_integration_frame( exclude_integrations=exclude_integrations ) except MissingIntegrationFrame as err: msg = f"Detected code that {what}. Please report this issue." - if error_if_core: + if core_behavior is ReportBehavior.ERROR: raise RuntimeError(msg) from err - if not log_custom_component_only: + if core_behavior is ReportBehavior.LOG: _LOGGER.warning(msg, stack_info=True) return - if ( - error_if_integration - or not log_custom_component_only - or integration_frame.custom_integration - ): - _report_integration(what, integration_frame, level, error_if_integration) + integration_behavior = core_integration_behavior + if integration_frame.custom_integration: + integration_behavior = custom_integration_behavior + + if integration_behavior is not ReportBehavior.IGNORE: + _report_integration( + what, integration_frame, level, integration_behavior is ReportBehavior.ERROR + ) def _report_integration( diff --git a/homeassistant/helpers/hassio.py b/homeassistant/helpers/hassio.py new file mode 100644 index 00000000000000..51503f709d60b1 --- /dev/null +++ b/homeassistant/helpers/hassio.py @@ -0,0 +1,22 @@ +"""Hass.io helper.""" + +import os + +from homeassistant.core import HomeAssistant, callback + + +@callback +def is_hassio(hass: HomeAssistant) -> bool: + """Return true if Hass.io is loaded. + + Async friendly. + """ + return "hassio" in hass.config.components + + +@callback +def get_supervisor_ip() -> str | None: + """Return the supervisor ip address.""" + if "SUPERVISOR" not in os.environ: + return None + return os.environ["SUPERVISOR"].partition(":")[0] diff --git a/homeassistant/helpers/intent.py b/homeassistant/helpers/intent.py index 15e38d39dda2c1..b38f769b302d86 100644 --- a/homeassistant/helpers/intent.py +++ b/homeassistant/helpers/intent.py @@ -56,6 +56,7 @@ INTENT_TIMER_STATUS = "HassTimerStatus" INTENT_GET_CURRENT_DATE = "HassGetCurrentDate" INTENT_GET_CURRENT_TIME = "HassGetCurrentTime" +INTENT_RESPOND = "HassRespond" SLOT_SCHEMA = vol.Schema({}, extra=vol.ALLOW_EXTRA) @@ -351,6 +352,7 @@ class MatchTargetsCandidate: """Candidate for async_match_targets.""" state: State + is_exposed: bool entity: entity_registry.RegistryEntry | None = None area: area_registry.AreaEntry | None = None floor: floor_registry.FloorEntry | None = None @@ -514,29 +516,31 @@ def async_match_targets( # noqa: C901 if not states: return MatchTargetsResult(False, MatchFailedReason.DOMAIN) - if constraints.assistant: - # Filter by exposure - states = [ - s - for s in states - if async_should_expose(hass, constraints.assistant, s.entity_id) - ] - if not states: - return MatchTargetsResult(False, MatchFailedReason.ASSISTANT) + candidates = [ + MatchTargetsCandidate( + state=state, + is_exposed=( + async_should_expose(hass, constraints.assistant, state.entity_id) + if constraints.assistant + else True + ), + ) + for state in states + ] if constraints.domains and (not filtered_by_domain): # Filter by domain (if we didn't already do it) - states = [s for s in states if s.domain in constraints.domains] - if not states: + candidates = [c for c in candidates if c.state.domain in constraints.domains] + if not candidates: return MatchTargetsResult(False, MatchFailedReason.DOMAIN) if constraints.states: # Filter by state - states = [s for s in states if s.state in constraints.states] - if not states: + candidates = [c for c in candidates if c.state.state in constraints.states] + if not candidates: return MatchTargetsResult(False, MatchFailedReason.STATE) - # Exit early so we can avoid registry lookups + # Try to exit early so we can avoid registry lookups if not ( constraints.name or constraints.features @@ -544,11 +548,18 @@ def async_match_targets( # noqa: C901 or constraints.area_name or constraints.floor_name ): - return MatchTargetsResult(True, states=states) + if constraints.assistant: + # Check exposure + candidates = [c for c in candidates if c.is_exposed] + if not candidates: + return MatchTargetsResult(False, MatchFailedReason.ASSISTANT) + + return MatchTargetsResult(True, states=[c.state for c in candidates]) # We need entity registry entries now er = entity_registry.async_get(hass) - candidates = [MatchTargetsCandidate(s, er.async_get(s.entity_id)) for s in states] + for candidate in candidates: + candidate.entity = er.async_get(candidate.state.entity_id) if constraints.name: # Filter by entity name or alias @@ -637,6 +648,12 @@ def async_match_targets( # noqa: C901 False, MatchFailedReason.AREA, areas=targeted_areas ) + if constraints.assistant: + # Check exposure + candidates = [c for c in candidates if c.is_exposed] + if not candidates: + return MatchTargetsResult(False, MatchFailedReason.ASSISTANT) + if constraints.name and (not constraints.allow_duplicate_names): # Check for duplicates if not areas_added: diff --git a/homeassistant/helpers/llm.py b/homeassistant/helpers/llm.py index 8b2e066068713d..d322810b0ef8e5 100644 --- a/homeassistant/helpers/llm.py +++ b/homeassistant/helpers/llm.py @@ -177,11 +177,6 @@ async def async_call_tool(self, tool_input: ToolInput) -> JsonObjectType: else: raise HomeAssistantError(f'Tool "{tool_input.tool_name}" not found') - tool_input = ToolInput( - tool_name=tool_input.tool_name, - tool_args=tool.parameters(tool_input.tool_args), - ) - return await tool.async_call(self.api.hass, tool_input, self.llm_context) @@ -284,6 +279,7 @@ class AssistAPI(API): intent.INTENT_TOGGLE, intent.INTENT_GET_CURRENT_DATE, intent.INTENT_GET_CURRENT_TIME, + intent.INTENT_RESPOND, } def __init__(self, hass: HomeAssistant) -> None: @@ -420,7 +416,9 @@ def _async_get_tools( ): continue - tools.append(ScriptTool(self.hass, state.entity_id)) + script_tool = ScriptTool(self.hass, state.entity_id) + if script_tool.parameters.schema: + tools.append(script_tool) return tools @@ -451,12 +449,17 @@ def _get_exposed_entities( entities = {} for state in hass.states.async_all(): - if state.domain == SCRIPT_DOMAIN: - continue - if not async_should_expose(hass, assistant, state.entity_id): continue + description: str | None = None + if state.domain == SCRIPT_DOMAIN: + description, parameters = _get_cached_script_parameters( + hass, state.entity_id + ) + if parameters.schema: # Only list scripts without input fields here + continue + entity_entry = entity_registry.async_get(state.entity_id) names = [state.name] area_names = [] @@ -485,6 +488,9 @@ def _get_exposed_entities( "state": state.state, } + if description: + info["description"] = description + if area_names: info["areas"] = ", ".join(area_names) @@ -610,6 +616,83 @@ def _selector_serializer(schema: Any) -> Any: # noqa: C901 return {"type": "string"} +def _get_cached_script_parameters( + hass: HomeAssistant, entity_id: str +) -> tuple[str | None, vol.Schema]: + """Get script description and schema.""" + entity_registry = er.async_get(hass) + + description = None + parameters = vol.Schema({}) + entity_entry = entity_registry.async_get(entity_id) + if entity_entry and entity_entry.unique_id: + parameters_cache = hass.data.get(SCRIPT_PARAMETERS_CACHE) + + if parameters_cache is None: + parameters_cache = hass.data[SCRIPT_PARAMETERS_CACHE] = {} + + @callback + def clear_cache(event: Event) -> None: + """Clear script parameter cache on script reload or delete.""" + if ( + event.data[ATTR_DOMAIN] == SCRIPT_DOMAIN + and event.data[ATTR_SERVICE] in parameters_cache + ): + parameters_cache.pop(event.data[ATTR_SERVICE]) + + cancel = hass.bus.async_listen(EVENT_SERVICE_REMOVED, clear_cache) + + @callback + def on_homeassistant_close(event: Event) -> None: + """Cleanup.""" + cancel() + + hass.bus.async_listen_once( + EVENT_HOMEASSISTANT_CLOSE, on_homeassistant_close + ) + + if entity_entry.unique_id in parameters_cache: + return parameters_cache[entity_entry.unique_id] + + if service_desc := service.async_get_cached_service_description( + hass, SCRIPT_DOMAIN, entity_entry.unique_id + ): + description = service_desc.get("description") + schema: dict[vol.Marker, Any] = {} + fields = service_desc.get("fields", {}) + + for field, config in fields.items(): + field_description = config.get("description") + if not field_description: + field_description = config.get("name") + key: vol.Marker + if config.get("required"): + key = vol.Required(field, description=field_description) + else: + key = vol.Optional(field, description=field_description) + if "selector" in config: + schema[key] = selector.selector(config["selector"]) + else: + schema[key] = cv.string + + parameters = vol.Schema(schema) + + aliases: list[str] = [] + if entity_entry.name: + aliases.append(entity_entry.name) + if entity_entry.aliases: + aliases.extend(entity_entry.aliases) + if aliases: + if description: + description = description + ". Aliases: " + str(list(aliases)) + else: + description = "Aliases: " + str(list(aliases)) + + parameters_cache[entity_entry.unique_id] = (description, parameters) + + return description, parameters + + class ScriptTool(Tool): """LLM Tool representing a Script.""" @@ -619,86 +702,14 @@ def __init__( script_entity_id: str, ) -> None: """Init the class.""" - entity_registry = er.async_get(hass) - self.name = split_entity_id(script_entity_id)[1] if self.name[0].isdigit(): self.name = "_" + self.name self._entity_id = script_entity_id - self.parameters = vol.Schema({}) - entity_entry = entity_registry.async_get(script_entity_id) - if entity_entry and entity_entry.unique_id: - parameters_cache = hass.data.get(SCRIPT_PARAMETERS_CACHE) - - if parameters_cache is None: - parameters_cache = hass.data[SCRIPT_PARAMETERS_CACHE] = {} - - @callback - def clear_cache(event: Event) -> None: - """Clear script parameter cache on script reload or delete.""" - if ( - event.data[ATTR_DOMAIN] == SCRIPT_DOMAIN - and event.data[ATTR_SERVICE] in parameters_cache - ): - parameters_cache.pop(event.data[ATTR_SERVICE]) - - cancel = hass.bus.async_listen(EVENT_SERVICE_REMOVED, clear_cache) - - @callback - def on_homeassistant_close(event: Event) -> None: - """Cleanup.""" - cancel() - - hass.bus.async_listen_once( - EVENT_HOMEASSISTANT_CLOSE, on_homeassistant_close - ) - - if entity_entry.unique_id in parameters_cache: - self.description, self.parameters = parameters_cache[ - entity_entry.unique_id - ] - return - - if service_desc := service.async_get_cached_service_description( - hass, SCRIPT_DOMAIN, entity_entry.unique_id - ): - self.description = service_desc.get("description") - schema: dict[vol.Marker, Any] = {} - fields = service_desc.get("fields", {}) - - for field, config in fields.items(): - description = config.get("description") - if not description: - description = config.get("name") - key: vol.Marker - if config.get("required"): - key = vol.Required(field, description=description) - else: - key = vol.Optional(field, description=description) - if "selector" in config: - schema[key] = selector.selector(config["selector"]) - else: - schema[key] = cv.string - - self.parameters = vol.Schema(schema) - - aliases: list[str] = [] - if entity_entry.name: - aliases.append(entity_entry.name) - if entity_entry.aliases: - aliases.extend(entity_entry.aliases) - if aliases: - if self.description: - self.description = ( - self.description + ". Aliases: " + str(list(aliases)) - ) - else: - self.description = "Aliases: " + str(list(aliases)) - - parameters_cache[entity_entry.unique_id] = ( - self.description, - self.parameters, - ) + + self.description, self.parameters = _get_cached_script_parameters( + hass, script_entity_id + ) async def async_call( self, hass: HomeAssistant, tool_input: ToolInput, llm_context: LLMContext diff --git a/homeassistant/helpers/network.py b/homeassistant/helpers/network.py index fa7fec9faead2d..e39cc2de54735e 100644 --- a/homeassistant/helpers/network.py +++ b/homeassistant/helpers/network.py @@ -16,6 +16,8 @@ from homeassistant.loader import bind_hass from homeassistant.util.network import is_ip_address, is_loopback, normalize_url +from .hassio import is_hassio + TYPE_URL_INTERNAL = "internal_url" TYPE_URL_EXTERNAL = "external_url" SUPERVISOR_NETWORK_HOST = "homeassistant" @@ -42,10 +44,6 @@ def get_supervisor_network_url( hass: HomeAssistant, *, allow_ssl: bool = False ) -> str | None: """Get URL for home assistant within supervisor network.""" - # Local import to avoid circular dependencies - # pylint: disable-next=import-outside-toplevel - from homeassistant.components.hassio import is_hassio - if hass.config.api is None or not is_hassio(hass): return None @@ -180,20 +178,21 @@ def get_url( and request_host is not None and hass.config.api is not None ): - # Local import to avoid circular dependencies - # pylint: disable-next=import-outside-toplevel - from homeassistant.components.hassio import get_host_info, is_hassio - scheme = "https" if hass.config.api.use_ssl else "http" current_url = yarl.URL.build( scheme=scheme, host=request_host, port=hass.config.api.port ) known_hostnames = ["localhost"] - if is_hassio(hass) and (host_info := get_host_info(hass)): - known_hostnames.extend( - [host_info["hostname"], f"{host_info['hostname']}.local"] - ) + if is_hassio(hass): + # Local import to avoid circular dependencies + # pylint: disable-next=import-outside-toplevel + from homeassistant.components.hassio import get_host_info + + if host_info := get_host_info(hass): + known_hostnames.extend( + [host_info["hostname"], f"{host_info['hostname']}.local"] + ) if ( ( diff --git a/homeassistant/helpers/schema_config_entry_flow.py b/homeassistant/helpers/schema_config_entry_flow.py index 7463c9945b24b6..af8c4c6402df5f 100644 --- a/homeassistant/helpers/schema_config_entry_flow.py +++ b/homeassistant/helpers/schema_config_entry_flow.py @@ -16,7 +16,6 @@ ConfigFlow, ConfigFlowResult, OptionsFlow, - OptionsFlowWithConfigEntry, ) from homeassistant.core import HomeAssistant, callback, split_entity_id from homeassistant.data_entry_flow import UnknownHandler @@ -403,7 +402,7 @@ def async_create_entry( ) -class SchemaOptionsFlowHandler(OptionsFlowWithConfigEntry): +class SchemaOptionsFlowHandler(OptionsFlow): """Handle a schema based options flow.""" def __init__( @@ -422,10 +421,8 @@ def __init__( options, which is the union of stored options and user input from the options flow steps. """ - super().__init__(config_entry) - self._common_handler = SchemaCommonFlowHandler( - self, options_flow, self._options - ) + self._options = copy.deepcopy(dict(config_entry.options)) + self._common_handler = SchemaCommonFlowHandler(self, options_flow, self.options) self._async_options_flow_finished = async_options_flow_finished for step in options_flow: @@ -438,6 +435,11 @@ def __init__( if async_setup_preview: setattr(self, "async_setup_preview", async_setup_preview) + @property + def options(self) -> dict[str, Any]: + """Return a mutable copy of the config entry options.""" + return self._options + @staticmethod def _async_step( step_id: str, diff --git a/homeassistant/helpers/script.py b/homeassistant/helpers/script.py index ee2c4c647732e3..86dcd858c1bc45 100644 --- a/homeassistant/helpers/script.py +++ b/homeassistant/helpers/script.py @@ -1133,7 +1133,11 @@ async def _async_wait_for_trigger_step(self) -> None: self._step_log("wait for trigger", timeout) variables = {**self._variables} - self._variables["wait"] = {"remaining": timeout, "trigger": None} + self._variables["wait"] = { + "remaining": timeout, + "completed": False, + "trigger": None, + } trace_set_result(wait=self._variables["wait"]) if timeout == 0: @@ -1151,6 +1155,7 @@ async def async_done( variables: dict[str, Any], context: Context | None = None ) -> None: self._async_set_remaining_time_var(timeout_handle) + self._variables["wait"]["completed"] = True self._variables["wait"]["trigger"] = variables["trigger"] _set_result_unless_done(done) diff --git a/homeassistant/helpers/service.py b/homeassistant/helpers/service.py index ac21f1da3fcda8..33e8f3d3d6e3d8 100644 --- a/homeassistant/helpers/service.py +++ b/homeassistant/helpers/service.py @@ -571,19 +571,31 @@ def async_extract_referenced_entity_ids( # noqa: C901 for area_entry in area_reg.areas.get_areas_for_floor(floor_id) ) - # Find devices for targeted areas + selected.referenced_areas.update(selector.area_ids) selected.referenced_devices.update(selector.device_ids) - selected.referenced_areas.update(selector.area_ids) + if not selected.referenced_areas and not selected.referenced_devices: + return selected + + # Add indirectly referenced by device + selected.indirectly_referenced.update( + entry.entity_id + for device_id in selected.referenced_devices + for entry in entities.get_entries_for_device_id(device_id) + # Do not add entities which are hidden or which are config + # or diagnostic entities. + if (entry.entity_category is None and entry.hidden_by is None) + ) + + # Find devices for targeted areas + referenced_devices_by_area: set[str] = set() if selected.referenced_areas: for area_id in selected.referenced_areas: - selected.referenced_devices.update( + referenced_devices_by_area.update( device_entry.id for device_entry in dev_reg.devices.get_devices_for_area_id(area_id) ) - - if not selected.referenced_areas and not selected.referenced_devices: - return selected + selected.referenced_devices.update(referenced_devices_by_area) # Add indirectly referenced by area selected.indirectly_referenced.update( @@ -595,10 +607,10 @@ def async_extract_referenced_entity_ids( # noqa: C901 # or diagnostic entities. if entry.entity_category is None and entry.hidden_by is None ) - # Add indirectly referenced by device + # Add indirectly referenced by area through device selected.indirectly_referenced.update( entry.entity_id - for device_id in selected.referenced_devices + for device_id in referenced_devices_by_area for entry in entities.get_entries_for_device_id(device_id) # Do not add entities which are hidden or which are config # or diagnostic entities. @@ -610,11 +622,10 @@ def async_extract_referenced_entity_ids( # noqa: C901 # by an area and the entity # has no explicitly set area not entry.area_id - # The entity's device matches a targeted device - or device_id in selector.device_ids ) ) ) + return selected diff --git a/homeassistant/helpers/service_info/hassio.py b/homeassistant/helpers/service_info/hassio.py new file mode 100644 index 00000000000000..0125fef3017328 --- /dev/null +++ b/homeassistant/helpers/service_info/hassio.py @@ -0,0 +1,16 @@ +"""Hassio Discovery data.""" + +from dataclasses import dataclass +from typing import Any + +from homeassistant.data_entry_flow import BaseServiceInfo + + +@dataclass(slots=True) +class HassioServiceInfo(BaseServiceInfo): + """Prepared info from hassio entries.""" + + config: dict[str, Any] + name: str + slug: str + uuid: str diff --git a/homeassistant/helpers/system_info.py b/homeassistant/helpers/system_info.py index 69e03904caaafb..df4c45cd5ed477 100644 --- a/homeassistant/helpers/system_info.py +++ b/homeassistant/helpers/system_info.py @@ -14,6 +14,7 @@ from homeassistant.loader import bind_hass from homeassistant.util.package import is_docker_env, is_virtual_env +from .hassio import is_hassio from .importlib import async_import_module from .singleton import singleton @@ -52,13 +53,13 @@ async def async_get_system_info(hass: HomeAssistant) -> dict[str, Any]: else: hassio = await async_import_module(hass, "homeassistant.components.hassio") - is_hassio = hassio.is_hassio(hass) + is_hassio_ = is_hassio(hass) info_object = { "installation_type": "Unknown", "version": current_version, "dev": "dev" in current_version, - "hassio": is_hassio, + "hassio": is_hassio_, "virtualenv": is_virtual_env(), "python_version": platform.python_version(), "docker": False, @@ -89,7 +90,7 @@ async def async_get_system_info(hass: HomeAssistant) -> dict[str, Any]: info_object["installation_type"] = "Home Assistant Core" # Enrich with Supervisor information - if is_hassio: + if is_hassio_: if not (info := hassio.get_info(hass)): _LOGGER.warning("No Home Assistant Supervisor info available") info = {} diff --git a/homeassistant/helpers/template.py b/homeassistant/helpers/template.py index 5d5fd3df39ac62..753464c35d5216 100644 --- a/homeassistant/helpers/template.py +++ b/homeassistant/helpers/template.py @@ -9,6 +9,7 @@ from collections.abc import Callable, Generator, Iterable from contextlib import AbstractContextManager from contextvars import ContextVar +from copy import deepcopy from datetime import date, datetime, time, timedelta from functools import cache, lru_cache, partial, wraps import json @@ -1280,7 +1281,7 @@ def result_as_boolean(template_result: Any | None) -> bool: True/not 0/'1'/'true'/'yes'/'on'/'enable' are considered truthy False/0/None/'0'/'false'/'no'/'off'/'disable' are considered falsy - + All other values are falsy """ if template_result is None: return False @@ -2172,7 +2173,8 @@ def merge_response(value: ServiceResponse) -> list[Any]: is_single_list = False response_items: list = [] - for entity_id, entity_response in value.items(): # pylint: disable=too-many-nested-blocks + input_service_response = deepcopy(value) + for entity_id, entity_response in input_service_response.items(): # pylint: disable=too-many-nested-blocks if not isinstance(entity_response, dict): raise TypeError("Response is not a dictionary") for value_key, type_response in entity_response.items(): diff --git a/homeassistant/helpers/update_coordinator.py b/homeassistant/helpers/update_coordinator.py index 25cd4bc4d906ec..f5c2a2a1288efe 100644 --- a/homeassistant/helpers/update_coordinator.py +++ b/homeassistant/helpers/update_coordinator.py @@ -29,6 +29,8 @@ from . import entity, event from .debounce import Debouncer +from .frame import report +from .typing import UNDEFINED, UndefinedType REQUEST_REFRESH_DEFAULT_COOLDOWN = 10 REQUEST_REFRESH_DEFAULT_IMMEDIATE = True @@ -68,6 +70,7 @@ def __init__( hass: HomeAssistant, logger: logging.Logger, *, + config_entry: config_entries.ConfigEntry | None | UndefinedType = UNDEFINED, name: str, update_interval: timedelta | None = None, update_method: Callable[[], Awaitable[_DataT]] | None = None, @@ -84,7 +87,12 @@ def __init__( self._update_interval_seconds: float | None = None self.update_interval = update_interval self._shutdown_requested = False - self.config_entry = config_entries.current_entry.get() + if config_entry is UNDEFINED: + self.config_entry = config_entries.current_entry.get() + # This should be deprecated once all core integrations are updated + # to pass in the config entry explicitly. + else: + self.config_entry = config_entry self.always_update = always_update # It's None before the first successful update. @@ -277,6 +285,26 @@ async def async_config_entry_first_refresh(self) -> None: fails. Additionally logging is handled by config entry setup to ensure that multiple retries do not cause log spam. """ + if self.config_entry is None: + report( + "uses `async_config_entry_first_refresh`, which is only supported " + "for coordinators with a config entry and will stop working in " + "Home Assistant 2025.11", + error_if_core=True, + error_if_integration=False, + ) + elif ( + self.config_entry.state + is not config_entries.ConfigEntryState.SETUP_IN_PROGRESS + ): + report( + "uses `async_config_entry_first_refresh`, which is only supported " + f"when entry state is {config_entries.ConfigEntryState.SETUP_IN_PROGRESS}, " + f"but it is in state {self.config_entry.state}, " + "This will stop working in Home Assistant 2025.11", + error_if_core=True, + error_if_integration=False, + ) if await self.__wrap_async_setup(): await self._async_refresh( log_failures=False, raise_on_auth_failed=True, raise_on_entry_error=True diff --git a/homeassistant/loader.py b/homeassistant/loader.py index dd38271070d9d1..d2e04df04c4b00 100644 --- a/homeassistant/loader.py +++ b/homeassistant/loader.py @@ -255,6 +255,7 @@ class Manifest(TypedDict, total=False): usb: list[dict[str, str]] homekit: dict[str, list[str]] is_built_in: bool + overwrites_built_in: bool version: str codeowners: list[str] loggers: list[str] @@ -282,9 +283,7 @@ def manifest_from_legacy_module(domain: str, module: ModuleType) -> Manifest: } -async def _async_get_custom_components( - hass: HomeAssistant, -) -> dict[str, Integration]: +def _get_custom_components(hass: HomeAssistant) -> dict[str, Integration]: """Return list of custom integrations.""" if hass.config.recovery_mode or hass.config.safe_mode: return {} @@ -294,21 +293,14 @@ async def _async_get_custom_components( except ImportError: return {} - def get_sub_directories(paths: list[str]) -> list[pathlib.Path]: - """Return all sub directories in a set of paths.""" - return [ - entry - for path in paths - for entry in pathlib.Path(path).iterdir() - if entry.is_dir() - ] - - dirs = await hass.async_add_executor_job( - get_sub_directories, custom_components.__path__ - ) + dirs = [ + entry + for path in custom_components.__path__ + for entry in pathlib.Path(path).iterdir() + if entry.is_dir() + ] - integrations = await hass.async_add_executor_job( - _resolve_integrations_from_root, + integrations = _resolve_integrations_from_root( hass, custom_components, [comp.name for comp in dirs], @@ -329,7 +321,7 @@ async def async_get_custom_components( if comps_or_future is None: future = hass.data[DATA_CUSTOM_COMPONENTS] = hass.loop.create_future() - comps = await _async_get_custom_components(hass) + comps = await hass.async_add_executor_job(_get_custom_components, hass) hass.data[DATA_CUSTOM_COMPONENTS] = comps future.set_result(comps) @@ -451,6 +443,7 @@ async def async_get_integration_descriptions( "single_config_entry": integration.manifest.get( "single_config_entry", False ), + "overwrites_built_in": integration.overwrites_built_in, } custom_flows[integration_key][integration.domain] = metadata @@ -762,6 +755,7 @@ def __init__( self.file_path = file_path self.manifest = manifest manifest["is_built_in"] = self.is_built_in + manifest["overwrites_built_in"] = self.overwrites_built_in if self.dependencies: self._all_dependencies_resolved: bool | None = None @@ -909,6 +903,16 @@ def is_built_in(self) -> bool: """Test if package is a built-in integration.""" return self.pkg_path.startswith(PACKAGE_BUILTIN) + @property + def overwrites_built_in(self) -> bool: + """Return if package overwrites a built-in integration.""" + if self.is_built_in: + return False + core_comp_path = ( + pathlib.Path(__file__).parent / "components" / self.domain / "manifest.json" + ) + return core_comp_path.is_file() + @property def version(self) -> AwesomeVersion | None: """Return the version of the integration.""" @@ -1552,16 +1556,18 @@ def __getattr__(self, comp_name: str) -> ModuleWrapper: raise ImportError(f"Unable to load {comp_name}") # Local import to avoid circular dependencies - from .helpers.frame import report # pylint: disable=import-outside-toplevel + # pylint: disable-next=import-outside-toplevel + from .helpers.frame import ReportBehavior, report_usage - report( + report_usage( ( f"accesses hass.components.{comp_name}." " This is deprecated and will stop working in Home Assistant 2025.3, it" f" should be updated to import functions used from {comp_name} directly" ), - error_if_core=False, - log_custom_component_only=True, + core_behavior=ReportBehavior.IGNORE, + core_integration_behavior=ReportBehavior.IGNORE, + custom_integration_behavior=ReportBehavior.LOG, ) wrapped = ModuleWrapper(self._hass, component) @@ -1581,16 +1587,18 @@ def __getattr__(self, helper_name: str) -> ModuleWrapper: helper = importlib.import_module(f"homeassistant.helpers.{helper_name}") # Local import to avoid circular dependencies - from .helpers.frame import report # pylint: disable=import-outside-toplevel + # pylint: disable-next=import-outside-toplevel + from .helpers.frame import ReportBehavior, report_usage - report( + report_usage( ( f"accesses hass.helpers.{helper_name}." - " This is deprecated and will stop working in Home Assistant 2024.11, it" + " This is deprecated and will stop working in Home Assistant 2025.5, it" f" should be updated to import functions used from {helper_name} directly" ), - error_if_core=False, - log_custom_component_only=True, + core_behavior=ReportBehavior.IGNORE, + core_integration_behavior=ReportBehavior.IGNORE, + custom_integration_behavior=ReportBehavior.LOG, ) wrapped = ModuleWrapper(self._hass, helper) diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 26e5601cda50ec..ec2dc977989d28 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -3,16 +3,18 @@ aiodhcpwatcher==1.0.2 aiodiscover==2.1.0 aiodns==3.2.0 -aiohasupervisor==0.1.0 +aiohasupervisor==0.2.1 aiohttp-fast-zlib==0.1.1 -aiohttp==3.10.9 +aiohttp==3.11.0rc0 aiohttp_cors==0.7.0 aiozoneinfo==0.2.1 astral==2.2 async-interrupt==1.2.0 async-upnp-client==0.41.0 atomicwrites-homeassistant==1.4.1 -attrs==23.2.0 +attrs==24.2.0 +audioop-lts==0.2.1;python_version>='3.13' +av==13.1.0 awesomeversion==24.6.0 bcrypt==4.2.0 bleak-retry-connector==3.6.0 @@ -26,25 +28,24 @@ ciso8601==2.3.1 cryptography==43.0.1 dbus-fast==2.24.3 fnv-hash-fast==1.0.2 -ha-av==10.1.1 -ha-ffmpeg==3.2.0 -habluetooth==3.5.0 -hass-nabucasa==0.81.1 +go2rtc-client==0.1.0 +ha-ffmpeg==3.2.2 +habluetooth==3.6.0 +hass-nabucasa==0.84.0 hassil==1.7.4 home-assistant-bluetooth==1.13.0 -home-assistant-frontend==20241002.2 -home-assistant-intents==2024.10.2 +home-assistant-frontend==20241106.2 +home-assistant-intents==2024.11.6 httpx==0.27.2 ifaddr==0.2.0 Jinja2==3.1.4 lru-dict==1.3.0 -mashumaro==3.13.1 mutagen==1.47.0 -orjson==3.10.7 +orjson==3.10.11 packaging>=23.1 paho-mqtt==1.6.1 -Pillow==10.4.0 -propcache==0.1.0 +Pillow==11.0.0 +propcache==0.2.0 psutil-home-assistant==0.0.1 PyJWT==2.9.0 pymicro-vad==1.0.1 @@ -57,16 +58,20 @@ PyTurboJPEG==1.7.5 pyudev==0.24.1 PyYAML==6.0.2 requests==2.32.3 +securetar==2024.2.1 SQLAlchemy==2.0.31 +standard-aifc==3.13.0;python_version>='3.13' +standard-telnetlib==3.13.0;python_version>='3.13' typing-extensions>=4.12.2,<5.0 ulid-transform==1.0.2 urllib3>=1.26.5,<2 -uv==0.4.17 +uv==0.5.0 voluptuous-openapi==0.0.5 voluptuous-serialize==2.6.0 voluptuous==0.15.2 -yarl==1.13.1 -zeroconf==0.135.0 +webrtc-models==0.2.0 +yarl==1.17.1 +zeroconf==0.136.0 # Constrain pycryptodome to avoid vulnerability # see https://github.com/home-assistant/core/pull/16238 @@ -79,9 +84,9 @@ httplib2>=0.19.0 # gRPC is an implicit dependency that we want to make explicit so we manage # upgrades intentionally. It is a large package to build from source and we # want to ensure we have wheels built. -grpcio==1.66.2 -grpcio-status==1.66.2 -grpcio-reflection==1.66.2 +grpcio==1.67.1 +grpcio-status==1.67.1 +grpcio-reflection==1.67.1 # This is a old unmaintained library and is replaced with pycryptodome pycrypto==1000000000.0.0 @@ -101,7 +106,7 @@ uuid==1000000000.0.0 # these requirements are quite loose. As the entire stack has some outstanding issues, and # even newer versions seem to introduce new issues, it's useful for us to pin all these # requirements so we can directly link HA versions to these library versions. -anyio==4.6.0 +anyio==4.6.2.post1 h11==0.14.0 httpcore==1.0.5 @@ -110,7 +115,8 @@ httpcore==1.0.5 hyperframe>=5.2.0 # Ensure we run compatible with musllinux build env -numpy==1.26.4 +numpy==2.1.3 +pandas~=2.2.3 # Constrain multidict to avoid typing issues # https://github.com/home-assistant/core/pull/67046 @@ -121,7 +127,10 @@ backoff>=2.0 # Required to avoid breaking (#101042). # v2 has breaking changes (#99218). -pydantic==1.10.18 +pydantic==1.10.19 + +# Required for Python 3.12.4 compatibility (#119223). +mashumaro>=3.13.1 # Breaks asyncio # https://github.com/pubnub/python/issues/130 @@ -137,7 +146,7 @@ pyOpenSSL>=24.0.0 # protobuf must be in package constraints for the wheel # builder to build binary wheels -protobuf==5.28.2 +protobuf==5.28.3 # faust-cchardet: Ensure we have a version we can build wheels # 2.1.18 is the first version that works with our wheel builder @@ -159,15 +168,12 @@ get-mac==1000000000.0.0 # We want to skip the binary wheels for the 'charset-normalizer' packages. # They are build with mypyc, but causes issues with our wheel builder. # In order to do so, we need to constrain the version. -charset-normalizer==3.2.0 +charset-normalizer==3.4.0 # dacite: Ensure we have a version that is able to handle type unions for -# Roborock, NAM, Brother, and GIOS. +# NAM, Brother, and GIOS. dacite>=1.7.0 -# Musle wheels for pandas 2.2.0 cannot be build for any architecture. -pandas==2.1.4 - # chacha20poly1305-reuseable==0.12.x is incompatible with cryptography==43.0.x chacha20poly1305-reuseable>=0.13.0 @@ -185,3 +191,7 @@ tuf>=4.0.0 # https://github.com/jd/tenacity/issues/471 tenacity!=8.4.0 + +# 5.0.0 breaks Timeout as a context manager +# TypeError: 'Timeout' object does not support the context manager protocol +async-timeout==4.0.3 diff --git a/homeassistant/runner.py b/homeassistant/runner.py index 102dbafe147f9b..59775655854504 100644 --- a/homeassistant/runner.py +++ b/homeassistant/runner.py @@ -3,10 +3,8 @@ from __future__ import annotations import asyncio -from asyncio import events import dataclasses import logging -import os import subprocess import threading from time import monotonic @@ -58,22 +56,6 @@ class RuntimeConfig: safe_mode: bool = False -def can_use_pidfd() -> bool: - """Check if pidfd_open is available. - - Back ported from cpython 3.12 - """ - if not hasattr(os, "pidfd_open"): - return False - try: - pid = os.getpid() - os.close(os.pidfd_open(pid, 0)) - except OSError: - # blocked by security policy like SECCOMP - return False - return True - - class HassEventLoopPolicy(asyncio.DefaultEventLoopPolicy): """Event loop policy for Home Assistant.""" @@ -81,23 +63,6 @@ def __init__(self, debug: bool) -> None: """Init the event loop policy.""" super().__init__() self.debug = debug - self._watcher: asyncio.AbstractChildWatcher | None = None - - def _init_watcher(self) -> None: - """Initialize the watcher for child processes. - - Back ported from cpython 3.12 - """ - with events._lock: # type: ignore[attr-defined] # noqa: SLF001 - if self._watcher is None: # pragma: no branch - if can_use_pidfd(): - self._watcher = asyncio.PidfdChildWatcher() - else: - self._watcher = asyncio.ThreadedChildWatcher() - if threading.current_thread() is threading.main_thread(): - self._watcher.attach_loop( - self._local._loop # type: ignore[attr-defined] # noqa: SLF001 - ) @property def loop_name(self) -> str: diff --git a/homeassistant/util/dt.py b/homeassistant/util/dt.py index 30cf7222f3a936..ee2b6c762d8cbe 100644 --- a/homeassistant/util/dt.py +++ b/homeassistant/util/dt.py @@ -95,7 +95,7 @@ def set_default_time_zone(time_zone: dt.tzinfo) -> None: get_default_time_zone.cache_clear() -def get_time_zone(time_zone_str: str) -> dt.tzinfo | None: +def get_time_zone(time_zone_str: str) -> zoneinfo.ZoneInfo | None: """Get time zone from string. Return None if unable to determine. Must be run in the executor if the ZoneInfo is not already @@ -107,7 +107,7 @@ def get_time_zone(time_zone_str: str) -> dt.tzinfo | None: return None -async def async_get_time_zone(time_zone_str: str) -> dt.tzinfo | None: +async def async_get_time_zone(time_zone_str: str) -> zoneinfo.ZoneInfo | None: """Get time zone from string. Return None if unable to determine. Async friendly. diff --git a/homeassistant/util/package.py b/homeassistant/util/package.py index 3796bf35cd7a3d..da0666290a1407 100644 --- a/homeassistant/util/package.py +++ b/homeassistant/util/package.py @@ -104,6 +104,8 @@ def install_package( _LOGGER.info("Attempting install of %s", package) env = os.environ.copy() args = [ + sys.executable, + "-m", "uv", "pip", "install", diff --git a/homeassistant/util/timeout.py b/homeassistant/util/timeout.py index 821f502694bb57..ddabdf2746d7f2 100644 --- a/homeassistant/util/timeout.py +++ b/homeassistant/util/timeout.py @@ -16,7 +16,7 @@ ZONE_GLOBAL = "global" -class _State(str, enum.Enum): +class _State(enum.Enum): """States of a task.""" INIT = "INIT" @@ -160,11 +160,16 @@ def __init__( self._wait_zone: asyncio.Event = asyncio.Event() self._state: _State = _State.INIT self._cool_down: float = cool_down + self._cancelling = 0 async def __aenter__(self) -> Self: self._manager.global_tasks.append(self) self._start_timer() self._state = _State.ACTIVE + # Remember if the task was already cancelling + # so when we __aexit__ we can decide if we should + # raise asyncio.TimeoutError or let the cancellation propagate + self._cancelling = self._task.cancelling() return self async def __aexit__( @@ -177,7 +182,15 @@ async def __aexit__( self._manager.global_tasks.remove(self) # Timeout on exit - if exc_type is asyncio.CancelledError and self.state == _State.TIMEOUT: + if exc_type is asyncio.CancelledError and self.state is _State.TIMEOUT: + # The timeout was hit, and the task was cancelled + # so we need to uncancel the task since the cancellation + # should not leak out of the context manager + if self._task.uncancel() > self._cancelling: + # If the task was already cancelling don't raise + # asyncio.TimeoutError and instead return None + # to allow the cancellation to propagate + return None raise TimeoutError self._state = _State.EXIT @@ -266,6 +279,7 @@ def __init__( self._time_left: float = timeout self._expiration_time: float | None = None self._timeout_handler: asyncio.Handle | None = None + self._cancelling = 0 @property def state(self) -> _State: @@ -280,6 +294,11 @@ async def __aenter__(self) -> Self: if self._zone.freezes_done: self._start_timer() + # Remember if the task was already cancelling + # so when we __aexit__ we can decide if we should + # raise asyncio.TimeoutError or let the cancellation propagate + self._cancelling = self._task.cancelling() + return self async def __aexit__( @@ -292,7 +311,15 @@ async def __aexit__( self._stop_timer() # Timeout on exit - if exc_type is asyncio.CancelledError and self.state == _State.TIMEOUT: + if exc_type is asyncio.CancelledError and self.state is _State.TIMEOUT: + # The timeout was hit, and the task was cancelled + # so we need to uncancel the task since the cancellation + # should not leak out of the context manager + if self._task.uncancel() > self._cancelling: + # If the task was already cancelling don't raise + # asyncio.TimeoutError and instead return None + # to allow the cancellation to propagate + return None raise TimeoutError self._state = _State.EXIT diff --git a/homeassistant/util/unit_conversion.py b/homeassistant/util/unit_conversion.py index fccc77edcb0889..95d8fbc9df1205 100644 --- a/homeassistant/util/unit_conversion.py +++ b/homeassistant/util/unit_conversion.py @@ -10,6 +10,7 @@ CONCENTRATION_PARTS_PER_MILLION, PERCENTAGE, UNIT_NOT_RECOGNIZED_TEMPLATE, + UnitOfBloodGlucoseConcentration, UnitOfConductivity, UnitOfDataRate, UnitOfElectricCurrent, @@ -173,14 +174,25 @@ class DistanceConverter(BaseUnitConverter): } +class BloodGlugoseConcentrationConverter(BaseUnitConverter): + """Utility to convert blood glucose concentration values.""" + + UNIT_CLASS = "blood_glucose_concentration" + _UNIT_CONVERSION: dict[str | None, float] = { + UnitOfBloodGlucoseConcentration.MILLIGRAMS_PER_DECILITER: 18, + UnitOfBloodGlucoseConcentration.MILLIMOLE_PER_LITER: 1, + } + VALID_UNITS = set(UnitOfBloodGlucoseConcentration) + + class ConductivityConverter(BaseUnitConverter): """Utility to convert electric current values.""" UNIT_CLASS = "conductivity" _UNIT_CONVERSION: dict[str | None, float] = { - UnitOfConductivity.MICROSIEMENS: 1, - UnitOfConductivity.MILLISIEMENS: 1e-3, - UnitOfConductivity.SIEMENS: 1e-6, + UnitOfConductivity.MICROSIEMENS_PER_CM: 1, + UnitOfConductivity.MILLISIEMENS_PER_CM: 1e-3, + UnitOfConductivity.SIEMENS_PER_CM: 1e-6, } VALID_UNITS = set(UnitOfConductivity) @@ -222,6 +234,8 @@ class EnergyConverter(BaseUnitConverter): UnitOfEnergy.WATT_HOUR: 1e3, UnitOfEnergy.KILO_WATT_HOUR: 1, UnitOfEnergy.MEGA_WATT_HOUR: 1 / 1e3, + UnitOfEnergy.GIGA_WATT_HOUR: 1 / 1e6, + UnitOfEnergy.TERA_WATT_HOUR: 1 / 1e9, UnitOfEnergy.CALORIE: _WH_TO_CAL * 1e3, UnitOfEnergy.KILO_CALORIE: _WH_TO_CAL, UnitOfEnergy.MEGA_CALORIE: _WH_TO_CAL / 1e3, @@ -292,10 +306,16 @@ class PowerConverter(BaseUnitConverter): _UNIT_CONVERSION: dict[str | None, float] = { UnitOfPower.WATT: 1, UnitOfPower.KILO_WATT: 1 / 1000, + UnitOfPower.MEGA_WATT: 1 / 1e6, + UnitOfPower.GIGA_WATT: 1 / 1e9, + UnitOfPower.TERA_WATT: 1 / 1e12, } VALID_UNITS = { UnitOfPower.WATT, UnitOfPower.KILO_WATT, + UnitOfPower.MEGA_WATT, + UnitOfPower.GIGA_WATT, + UnitOfPower.TERA_WATT, } diff --git a/homeassistant/util/yaml/loader.py b/homeassistant/util/yaml/loader.py index 39ac17d94f997f..39d38a8f47d5c9 100644 --- a/homeassistant/util/yaml/loader.py +++ b/homeassistant/util/yaml/loader.py @@ -25,7 +25,6 @@ from propcache import cached_property from homeassistant.exceptions import HomeAssistantError -from homeassistant.helpers.frame import report from .const import SECRET_YAML from .objects import Input, NodeDictClass, NodeListClass, NodeStrClass @@ -144,37 +143,6 @@ def __init__(self, stream: Any, secrets: Secrets | None = None) -> None: self.secrets = secrets -class SafeLoader(FastSafeLoader): - """Provided for backwards compatibility. Logs when instantiated.""" - - def __init__(*args: Any, **kwargs: Any) -> None: - """Log a warning and call super.""" - SafeLoader.__report_deprecated() - FastSafeLoader.__init__(*args, **kwargs) - - @classmethod - def add_constructor(cls, tag: str, constructor: Callable) -> None: - """Log a warning and call super.""" - SafeLoader.__report_deprecated() - FastSafeLoader.add_constructor(tag, constructor) - - @classmethod - def add_multi_constructor( - cls, tag_prefix: str, multi_constructor: Callable - ) -> None: - """Log a warning and call super.""" - SafeLoader.__report_deprecated() - FastSafeLoader.add_multi_constructor(tag_prefix, multi_constructor) - - @staticmethod - def __report_deprecated() -> None: - """Log deprecation warning.""" - report( - "uses deprecated 'SafeLoader' instead of 'FastSafeLoader', " - "which will stop working in HA Core 2024.6," - ) - - class PythonSafeLoader(yaml.SafeLoader, _LoaderMixin): """Python safe loader.""" @@ -184,37 +152,6 @@ def __init__(self, stream: Any, secrets: Secrets | None = None) -> None: self.secrets = secrets -class SafeLineLoader(PythonSafeLoader): - """Provided for backwards compatibility. Logs when instantiated.""" - - def __init__(*args: Any, **kwargs: Any) -> None: - """Log a warning and call super.""" - SafeLineLoader.__report_deprecated() - PythonSafeLoader.__init__(*args, **kwargs) - - @classmethod - def add_constructor(cls, tag: str, constructor: Callable) -> None: - """Log a warning and call super.""" - SafeLineLoader.__report_deprecated() - PythonSafeLoader.add_constructor(tag, constructor) - - @classmethod - def add_multi_constructor( - cls, tag_prefix: str, multi_constructor: Callable - ) -> None: - """Log a warning and call super.""" - SafeLineLoader.__report_deprecated() - PythonSafeLoader.add_multi_constructor(tag_prefix, multi_constructor) - - @staticmethod - def __report_deprecated() -> None: - """Log deprecation warning.""" - report( - "uses deprecated 'SafeLineLoader' instead of 'PythonSafeLoader', " - "which will stop working in HA Core 2024.6," - ) - - type LoaderType = FastSafeLoader | PythonSafeLoader diff --git a/mypy.ini b/mypy.ini index cc0d74348bb83f..4d33f16d968e11 100644 --- a/mypy.ini +++ b/mypy.ini @@ -11,6 +11,7 @@ follow_imports = normal local_partial_types = true strict_equality = true no_implicit_optional = true +report_deprecated_as_error = true warn_incomplete_stub = true warn_redundant_casts = true warn_unused_configs = true @@ -994,6 +995,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.cambridge_audio.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.camera.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -1835,6 +1846,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.go2rtc.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.goalzero.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -1895,6 +1916,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.govee_ble.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.gpsd.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -2765,16 +2796,6 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true -[mypy-homeassistant.components.map.*] -check_untyped_defs = true -disallow_incomplete_defs = true -disallow_subclassing_any = true -disallow_untyped_calls = true -disallow_untyped_decorators = true -disallow_untyped_defs = true -warn_return_any = true -warn_unreachable = true - [mypy-homeassistant.components.mastodon.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -2975,6 +2996,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.music_assistant.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.my.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -3025,6 +3056,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.nasweb.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.neato.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -3115,6 +3156,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.nordpool.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.notify.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -3205,6 +3256,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.openai_conversation.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.openexchangerates.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -3275,6 +3336,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.panel_custom.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.peco.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -3825,6 +3896,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.shell_command.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.shelly.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -3965,6 +4046,17 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.spotify.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true +no_implicit_reexport = true + [mypy-homeassistant.components.sql.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -4106,6 +4198,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.switch_as_x.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.switchbee.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -4890,9 +4992,6 @@ warn_unreachable = true [mypy-homeassistant.components.application_credentials.*] no_implicit_reexport = true -[mypy-homeassistant.components.spotify.*] -no_implicit_reexport = true - [mypy-tests.*] check_untyped_defs = false disallow_incomplete_defs = false diff --git a/pyproject.toml b/pyproject.toml index eba579e81cf13b..4a9192d7767b0c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,10 +1,10 @@ [build-system] -requires = ["setuptools==69.2.0", "wheel~=0.43.0"] +requires = ["setuptools==75.1.0"] build-backend = "setuptools.build_meta" [project] name = "homeassistant" -version = "2024.11.0.dev0" +version = "2024.12.0.dev0" license = {text = "Apache-2.0"} description = "Open-source home automation platform running on Python 3." readme = "README.rst" @@ -25,16 +25,18 @@ requires-python = ">=3.12.0" dependencies = [ "aiodns==3.2.0", # Integrations may depend on hassio integration without listing it to - # change behavior based on presence of supervisor - "aiohasupervisor==0.1.0", - "aiohttp==3.10.9", + # change behavior based on presence of supervisor. Deprecated with #127228 + # Lib can be removed with 2025.11 + "aiohasupervisor==0.2.1", + "aiohttp==3.11.0rc0", "aiohttp_cors==0.7.0", "aiohttp-fast-zlib==0.1.1", "aiozoneinfo==0.2.1", "astral==2.2", "async-interrupt==1.2.0", - "attrs==23.2.0", + "attrs==24.2.0", "atomicwrites-homeassistant==1.4.1", + "audioop-lts==0.2.1;python_version>='3.13'", "awesomeversion==24.6.0", "bcrypt==4.2.0", "certifi>=2021.5.30", @@ -42,7 +44,7 @@ dependencies = [ "fnv-hash-fast==1.0.2", # hass-nabucasa is imported by helpers which don't depend on the cloud # integration - "hass-nabucasa==0.81.1", + "hass-nabucasa==0.84.0", # When bumping httpx, please check the version pins of # httpcore, anyio, and h11 in gen_requirements_all "httpx==0.27.2", @@ -50,31 +52,34 @@ dependencies = [ "ifaddr==0.2.0", "Jinja2==3.1.4", "lru-dict==1.3.0", - "mashumaro==3.13.1", "PyJWT==2.9.0", # PyJWT has loose dependency. We want the latest one. "cryptography==43.0.1", - "Pillow==10.4.0", - "propcache==0.1.0", + "Pillow==11.0.0", + "propcache==0.2.0", "pyOpenSSL==24.2.1", - "orjson==3.10.7", + "orjson==3.10.11", "packaging>=23.1", "psutil-home-assistant==0.0.1", "python-slugify==8.0.4", "PyYAML==6.0.2", "requests==2.32.3", + "securetar==2024.2.1", "SQLAlchemy==2.0.31", + "standard-aifc==3.13.0;python_version>='3.13'", + "standard-telnetlib==3.13.0;python_version>='3.13'", "typing-extensions>=4.12.2,<5.0", "ulid-transform==1.0.2", # Constrain urllib3 to ensure we deal with CVE-2020-26137 and CVE-2021-33503 # Temporary setting an upper bound, to prevent compat issues with urllib3>=2 # https://github.com/home-assistant/core/issues/97248 "urllib3>=1.26.5,<2", - "uv==0.4.17", + "uv==0.5.0", "voluptuous==0.15.2", "voluptuous-serialize==2.6.0", "voluptuous-openapi==0.0.5", - "yarl==1.13.1", + "yarl==1.17.1", + "webrtc-models==0.2.0", ] [project.urls] @@ -465,14 +470,14 @@ filterwarnings = [ # Ignore custom pytest marks "ignore:Unknown pytest.mark.disable_autouse_fixture:pytest.PytestUnknownMarkWarning:tests.components.met", "ignore:Unknown pytest.mark.dataset:pytest.PytestUnknownMarkWarning:tests.components.screenlogic", - # https://github.com/rokam/sunweg/blob/3.0.2/sunweg/plant.py#L96 - v3.0.2 - 2024-07-10 + # https://github.com/rokam/sunweg/blob/3.1.0/sunweg/plant.py#L96 - v3.1.0 - 2024-10-02 "ignore:The '(kwh_per_kwp|performance_rate)' property is deprecated and will return 0:DeprecationWarning:tests.components.sunweg.test_init", # -- design choice 3rd party - # https://github.com/gwww/elkm1/blob/2.2.7/elkm1_lib/util.py#L8-L19 + # https://github.com/gwww/elkm1/blob/2.2.10/elkm1_lib/util.py#L8-L19 "ignore:ssl.TLSVersion.TLSv1 is deprecated:DeprecationWarning:elkm1_lib.util", # https://github.com/allenporter/ical/pull/215 - # https://github.com/allenporter/ical/blob/8.1.1/ical/util.py#L21-L23 + # https://github.com/allenporter/ical/blob/8.2.0/ical/util.py#L21-L23 "ignore:datetime.*utcnow\\(\\) is deprecated and scheduled for removal:DeprecationWarning:ical.util", # https://github.com/bachya/regenmaschine/blob/2024.03.0/regenmaschine/client.py#L52 "ignore:ssl.TLSVersion.SSLv3 is deprecated:DeprecationWarning:regenmaschine.client", @@ -484,10 +489,13 @@ filterwarnings = [ "ignore:Deprecated call to `pkg_resources.declare_namespace\\(('azure'|'google.*'|'pywinusb'|'repoze'|'xbox'|'zope')\\)`:DeprecationWarning:pkg_resources", # -- tracked upstream / open PRs + # - pyOpenSSL v24.2.1 # https://github.com/certbot/certbot/issues/9828 - v2.11.0 + # https://github.com/certbot/certbot/issues/9992 "ignore:X509Extension support in pyOpenSSL is deprecated. You should use the APIs in cryptography:DeprecationWarning:acme.crypto_util", - # https://github.com/beetbox/mediafile/issues/67 - v0.12.0 - "ignore:'imghdr' is deprecated and slated for removal in Python 3.13:DeprecationWarning:mediafile", + "ignore:CSR support in pyOpenSSL is deprecated. You should use the APIs in cryptography:DeprecationWarning:acme.crypto_util", + "ignore:CSR support in pyOpenSSL is deprecated. You should use the APIs in cryptography:DeprecationWarning:josepy.util", + # - other # https://github.com/foxel/python_ndms2_client/issues/6 - v0.1.3 # https://github.com/foxel/python_ndms2_client/pull/8 "ignore:'telnetlib' is deprecated and slated for removal in Python 3.13:DeprecationWarning:ndms2_client.connection", @@ -524,8 +532,8 @@ filterwarnings = [ "ignore:datetime.*utcnow\\(\\) is deprecated and scheduled for removal:DeprecationWarning:onvif.client", # https://github.com/okunishinishi/python-stringcase/commit/6a5c5bbd3fe5337862abc7fd0853a0f36e18b2e1 - >1.2.0 "ignore:invalid escape sequence:SyntaxWarning:.*stringcase", - # https://github.com/mvantellingen/python-zeep/pull/1364 - >4.2.1 - "ignore:'cgi' is deprecated and slated for removal in Python 3.13:DeprecationWarning:zeep.utils", + # https://github.com/cereal2nd/velbus-aio/pull/126 - >2024.10.0 + "ignore:pkg_resources is deprecated as an API:DeprecationWarning:velbusaio.handler", # -- fixed for Python 3.13 # https://github.com/rhasspy/wyoming/commit/e34af30d455b6f2bb9e5cfb25fad8d276914bc54 - >=1.4.2 @@ -533,8 +541,9 @@ filterwarnings = [ # -- other # Locale changes might take some time to resolve upstream + # https://github.com/Squachen/micloud/blob/v_0.6/micloud/micloud.py#L35 - v0.6 - 2022-12-08 "ignore:'locale.getdefaultlocale' is deprecated and slated for removal in Python 3.15:DeprecationWarning:micloud.micloud", - # https://github.com/MatsNl/pyatag/issues/11 - v0.3.7.1 + # https://github.com/MatsNl/pyatag/issues/11 - v0.3.7.1 - 2023-10-09 "ignore:datetime.*utcnow\\(\\) is deprecated and scheduled for removal:DeprecationWarning:pyatag.gateway", # https://github.com/lidatong/dataclasses-json/issues/328 # https://github.com/lidatong/dataclasses-json/pull/351 @@ -542,14 +551,19 @@ filterwarnings = [ # https://pypi.org/project/emulated-roku/ - v0.3.0 - 2023-12-19 # https://github.com/martonperei/emulated_roku "ignore:loop argument is deprecated:DeprecationWarning:emulated_roku", - # https://github.com/thecynic/pylutron - v0.2.15 + # https://github.com/w1ll1am23/pyeconet/blob/v0.1.23/src/pyeconet/api.py#L38 - v0.1.23 - 2024-10-08 + "ignore:ssl.PROTOCOL_TLS is deprecated:DeprecationWarning:pyeconet.api", + # https://github.com/thecynic/pylutron - v0.2.16 - 2024-10-22 "ignore:setDaemon\\(\\) is deprecated, set the daemon attribute instead:DeprecationWarning:pylutron", - # https://github.com/pschmitt/pynuki/blob/1.6.3/pynuki/utils.py#L21 - v1.6.3 + # https://github.com/pschmitt/pynuki/blob/1.6.3/pynuki/utils.py#L21 - v1.6.3 - 2024-02-24 "ignore:datetime.*utcnow\\(\\) is deprecated and scheduled for removal:DeprecationWarning:pynuki.utils", + # https://github.com/lextudio/pysnmp/blob/v7.1.10/pysnmp/smi/compiler.py#L23-L31 - v7.1.10 - 2024-11-04 + "ignore:smiV1Relaxed is deprecated. Please use smi_v1_relaxed instead:DeprecationWarning:pysnmp.smi.compiler", + "ignore:getReadersFromUrls is deprecated. Please use get_readers_from_urls instead:DeprecationWarning:pysmi.reader.url", # wrong stacklevel # https://github.com/briis/pyweatherflowudp/blob/v1.4.5/pyweatherflowudp/const.py#L20 - v1.4.5 - 2023-10-10 "ignore:This function will be removed in future versions of pint:DeprecationWarning:pyweatherflowudp.const", # Wrong stacklevel - # https://bugs.launchpad.net/beautifulsoup/+bug/2034451 + # https://bugs.launchpad.net/beautifulsoup/+bug/2034451 fixed in >4.12.3 "ignore:It looks like you're parsing an XML document using an HTML parser:UserWarning:html.parser", # New in aiohttp - v3.9.0 "ignore:It is recommended to use web.AppKey instances for keys:UserWarning:(homeassistant|tests|aiohttp_cors)", @@ -570,13 +584,10 @@ filterwarnings = [ "ignore:invalid escape sequence:SyntaxWarning:.*sanix", # https://pypi.org/project/sleekxmppfs/ - v1.4.1 - 2022-08-18 "ignore:invalid escape sequence:SyntaxWarning:.*sleekxmppfs.thirdparty.mini_dateutil", # codespell:ignore thirdparty - # https://pypi.org/project/vobject/ - v0.9.7 - 2024-03-25 - # https://github.com/py-vobject/vobject - "ignore:invalid escape sequence:SyntaxWarning:.*vobject.base", # - pkg_resources # https://pypi.org/project/aiomusiccast/ - v0.14.8 - 2023-03-20 "ignore:pkg_resources is deprecated as an API:DeprecationWarning:aiomusiccast", - # https://pypi.org/project/habitipy/ - v0.3.1 - 2019-01-14 / 2024-04-28 + # https://pypi.org/project/habitipy/ - v0.3.3 - 2024-10-28 "ignore:pkg_resources is deprecated as an API:DeprecationWarning:habitipy.api", # https://github.com/eavanvalkenburg/pysiaalarm/blob/v3.1.1/src/pysiaalarm/data/data.py#L7 - v3.1.1 - 2023-04-17 "ignore:pkg_resources is deprecated as an API:DeprecationWarning:pysiaalarm.data.data", @@ -584,14 +595,6 @@ filterwarnings = [ "ignore:pkg_resources is deprecated as an API:DeprecationWarning:pybotvac.version", # https://github.com/home-assistant-ecosystem/python-mystrom/blob/2.2.0/pymystrom/__init__.py#L10 - v2.2.0 - 2023-05-21 "ignore:pkg_resources is deprecated as an API:DeprecationWarning:pymystrom", - # https://pypi.org/project/velbus-aio/ - v2024.7.6 - 2024-07-31 - # https://github.com/Cereal2nd/velbus-aio/blob/2024.7.6/velbusaio/handler.py#L22 - "ignore:pkg_resources is deprecated as an API:DeprecationWarning:velbusaio.handler", - # - pyOpenSSL v24.2.1 - # https://pypi.org/project/acme/ - v2.11.0 - 2024-06-06 - "ignore:CSR support in pyOpenSSL is deprecated. You should use the APIs in cryptography:DeprecationWarning:acme.crypto_util", - # https://pypi.org/project/josepy/ - v1.14.0 - 2023-11-01 - "ignore:CSR support in pyOpenSSL is deprecated. You should use the APIs in cryptography:DeprecationWarning:josepy.util", # -- Python 3.13 # HomeAssistant @@ -601,11 +604,11 @@ filterwarnings = [ # https://github.com/nextcord/nextcord/issues/1174 # https://github.com/nextcord/nextcord/blob/v2.6.1/nextcord/player.py#L5 "ignore:'audioop' is deprecated and slated for removal in Python 3.13:DeprecationWarning:nextcord.player", - # https://pypi.org/project/SpeechRecognition/ - v3.10.4 - 2024-05-05 - # https://github.com/Uberi/speech_recognition/blob/3.10.4/speech_recognition/__init__.py#L7 + # https://pypi.org/project/SpeechRecognition/ - v3.11.0 - 2024-05-05 + # https://github.com/Uberi/speech_recognition/blob/3.11.0/speech_recognition/__init__.py#L7 "ignore:'aifc' is deprecated and slated for removal in Python 3.13:DeprecationWarning:speech_recognition", # https://pypi.org/project/voip-utils/ - v0.2.0 - 2024-09-06 - # https://github.com/home-assistant-libs/voip-utils/blob/v0.2.0/voip_utils/rtp_audio.py#L3 + # https://github.com/home-assistant-libs/voip-utils/blob/0.2.0/voip_utils/rtp_audio.py#L3 "ignore:'audioop' is deprecated and slated for removal in Python 3.13:DeprecationWarning:voip_utils.rtp_audio", # -- Python 3.13 - unmaintained projects, last release about 2+ years @@ -617,6 +620,17 @@ filterwarnings = [ # https://github.com/ssaenger/pyws66i/blob/v1.1/pyws66i/__init__.py#L2 "ignore:'telnetlib' is deprecated and slated for removal in Python 3.13:DeprecationWarning:pyws66i", + # -- New in Python 3.13 + # https://github.com/kurtmckee/feedparser/pull/389 - >6.0.11 + # https://github.com/kurtmckee/feedparser/issues/481 + "ignore:'count' is passed as positional argument:DeprecationWarning:feedparser.html", + # https://github.com/youknowone/python-deadlib - Backports for aifc, telnetlib + "ignore:aifc was removed in Python 3.13.*'standard-aifc':DeprecationWarning:speech_recognition", + "ignore:telnetlib was removed in Python 3.13.*'standard-telnetlib':DeprecationWarning:homeassistant.components.hddtemp.sensor", + "ignore:telnetlib was removed in Python 3.13.*'standard-telnetlib':DeprecationWarning:ndms2_client.connection", + "ignore:telnetlib was removed in Python 3.13.*'standard-telnetlib':DeprecationWarning:plumlightpad.lightpad", + "ignore:telnetlib was removed in Python 3.13.*'standard-telnetlib':DeprecationWarning:pyws66i", + # -- unmaintained projects, last release about 2+ years # https://pypi.org/project/agent-py/ - v0.0.23 - 2020-06-04 "ignore:with timeout\\(\\) is deprecated:DeprecationWarning:agent.a", @@ -627,7 +641,7 @@ filterwarnings = [ # https://pypi.org/project/directv/ - v0.4.0 - 2020-09-12 "ignore:with timeout\\(\\) is deprecated:DeprecationWarning:directv.directv", "ignore:datetime.*utcnow\\(\\) is deprecated and scheduled for removal:DeprecationWarning:directv.models", - # https://pypi.org/project/foobot_async/ - v1.0.0 - 2020-11-24 + # https://pypi.org/project/foobot_async/ - v1.0.1 - 2024-08-16 "ignore:with timeout\\(\\) is deprecated:DeprecationWarning:foobot_async", # https://pypi.org/project/httpsig/ - v1.3.0 - 2018-11-28 "ignore:pkg_resources is deprecated as an API:DeprecationWarning:httpsig", diff --git a/requirements.txt b/requirements.txt index 5431f5941b7f21..19f8ac9ee22ad3 100644 --- a/requirements.txt +++ b/requirements.txt @@ -4,44 +4,48 @@ # Home Assistant Core aiodns==3.2.0 -aiohasupervisor==0.1.0 -aiohttp==3.10.9 +aiohasupervisor==0.2.1 +aiohttp==3.11.0rc0 aiohttp_cors==0.7.0 aiohttp-fast-zlib==0.1.1 aiozoneinfo==0.2.1 astral==2.2 async-interrupt==1.2.0 -attrs==23.2.0 +attrs==24.2.0 atomicwrites-homeassistant==1.4.1 +audioop-lts==0.2.1;python_version>='3.13' awesomeversion==24.6.0 bcrypt==4.2.0 certifi>=2021.5.30 ciso8601==2.3.1 fnv-hash-fast==1.0.2 -hass-nabucasa==0.81.1 +hass-nabucasa==0.84.0 httpx==0.27.2 home-assistant-bluetooth==1.13.0 ifaddr==0.2.0 Jinja2==3.1.4 lru-dict==1.3.0 -mashumaro==3.13.1 PyJWT==2.9.0 cryptography==43.0.1 -Pillow==10.4.0 -propcache==0.1.0 +Pillow==11.0.0 +propcache==0.2.0 pyOpenSSL==24.2.1 -orjson==3.10.7 +orjson==3.10.11 packaging>=23.1 psutil-home-assistant==0.0.1 python-slugify==8.0.4 PyYAML==6.0.2 requests==2.32.3 +securetar==2024.2.1 SQLAlchemy==2.0.31 +standard-aifc==3.13.0;python_version>='3.13' +standard-telnetlib==3.13.0;python_version>='3.13' typing-extensions>=4.12.2,<5.0 ulid-transform==1.0.2 urllib3>=1.26.5,<2 -uv==0.4.17 +uv==0.5.0 voluptuous==0.15.2 voluptuous-serialize==2.6.0 voluptuous-openapi==0.0.5 -yarl==1.13.1 +yarl==1.17.1 +webrtc-models==0.2.0 diff --git a/requirements_all.txt b/requirements_all.txt index b8c55ac3e1635a..67c7c99114698e 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -13,10 +13,10 @@ AIOSomecomfort==0.0.25 Adax-local==0.1.5 # homeassistant.components.doorbird -DoorBirdPy==3.0.4 +DoorBirdPy==3.0.8 # homeassistant.components.homekit -HAP-python==4.9.1 +HAP-python==4.9.2 # homeassistant.components.tasmota HATasmota==0.9.2 @@ -33,7 +33,7 @@ Mastodon.py==1.8.1 # homeassistant.components.seven_segments # homeassistant.components.sighthound # homeassistant.components.tensorflow -Pillow==10.4.0 +Pillow==11.0.0 # homeassistant.components.plex PlexAPI==4.15.16 @@ -45,7 +45,7 @@ ProgettiHWSW==0.1.3 # PyBluez==0.22 # homeassistant.components.cast -PyChromecast==14.0.4 +PyChromecast==14.0.5 # homeassistant.components.flick_electric PyFlick==0.0.2 @@ -84,7 +84,7 @@ PyQRCode==1.2.1 PyRMVtransport==0.3.3 # homeassistant.components.switchbot -PySwitchbot==0.48.2 +PySwitchbot==0.51.0 # homeassistant.components.switchmate PySwitchmate==0.5.1 @@ -100,7 +100,7 @@ PyTransportNSW==0.1.1 PyTurboJPEG==1.7.5 # homeassistant.components.vicare -PyViCare==2.34.0 +PyViCare==2.35.0 # homeassistant.components.xiaomi_aqara PyXiaomiGateway==0.14.3 @@ -109,7 +109,7 @@ PyXiaomiGateway==0.14.3 RachioPy==1.1.0 # homeassistant.components.python_script -RestrictedPython==7.3 +RestrictedPython==7.4 # homeassistant.components.remember_the_milk RtmAPI==0.7.2 @@ -152,7 +152,7 @@ advantage-air==0.4.4 afsapi==0.2.7 # homeassistant.components.agent_dvr -agent-py==0.0.23 +agent-py==0.0.24 # homeassistant.components.geo_json_events aio-geojson-generic-client==0.4 @@ -176,10 +176,10 @@ aio-georss-gdacs==0.10 aioairq==0.3.2 # homeassistant.components.airzone_cloud -aioairzone-cloud==0.6.6 +aioairzone-cloud==0.6.10 # homeassistant.components.airzone -aioairzone==0.9.3 +aioairzone==0.9.5 # homeassistant.components.ambient_network # homeassistant.components.ambient_station @@ -198,7 +198,7 @@ aioaseko==1.0.0 aioasuswrt==1.4.0 # homeassistant.components.husqvarna_automower -aioautomower==2024.9.3 +aioautomower==2024.10.3 # homeassistant.components.azure_devops aioazuredevops==2.2.1 @@ -210,7 +210,7 @@ aiobafi6==0.9.0 aiobotocore==2.13.1 # homeassistant.components.comelit -aiocomelit==0.9.0 +aiocomelit==0.9.1 # homeassistant.components.dhcp aiodhcpwatcher==1.0.2 @@ -240,7 +240,7 @@ aioelectricitymaps==0.4.0 aioemonitor==1.0.5 # homeassistant.components.esphome -aioesphomeapi==27.0.0 +aioesphomeapi==27.0.1 # homeassistant.components.flo aioflo==2021.11.0 @@ -249,6 +249,7 @@ aioflo==2021.11.0 aioftp==0.21.3 # homeassistant.components.github +# homeassistant.components.iron_os aiogithubapi==24.6.0 # homeassistant.components.guardian @@ -258,10 +259,10 @@ aioguardian==2022.07.0 aioharmony==0.2.10 # homeassistant.components.hassio -aiohasupervisor==0.1.0 +aiohasupervisor==0.2.1 # homeassistant.components.homekit_controller -aiohomekit==3.2.3 +aiohomekit==3.2.6 # homeassistant.components.hue aiohue==4.7.3 @@ -315,10 +316,10 @@ aionut==4.3.3 aiooncue==0.3.7 # homeassistant.components.openexchangerates -aioopenexchangerates==0.6.2 +aioopenexchangerates==0.6.8 # homeassistant.components.nmap_tracker -aiooui==0.1.6 +aiooui==0.1.7 # homeassistant.components.pegel_online aiopegelonline==0.0.10 @@ -356,7 +357,7 @@ aioridwell==2024.01.0 aioruckus==0.41 # homeassistant.components.russound_rio -aiorussound==4.0.5 +aiorussound==4.1.0 # homeassistant.components.ruuvi_gateway aioruuvigateway==0.1.0 @@ -365,7 +366,7 @@ aioruuvigateway==0.1.0 aiosenz==1.0.0 # homeassistant.components.shelly -aioshelly==11.4.2 +aioshelly==12.0.1 # homeassistant.components.skybell aioskybell==22.7.0 @@ -380,10 +381,10 @@ aiosolaredge==0.2.0 aiosteamist==1.0.0 # homeassistant.components.cambridge_audio -aiostreammagic==2.5.0 +aiostreammagic==2.8.4 # homeassistant.components.switcher_kis -aioswitcher==4.0.3 +aioswitcher==4.4.0 # homeassistant.components.syncthing aiosyncthing==0.5.1 @@ -391,6 +392,9 @@ aiosyncthing==0.5.1 # homeassistant.components.tankerkoenig aiotankerkoenig==0.4.2 +# homeassistant.components.tedee +aiotedee==0.2.20 + # homeassistant.components.tractive aiotractive==0.6.0 @@ -401,7 +405,7 @@ aiounifi==80 aiovlc==0.5.1 # homeassistant.components.vodafone_station -aiovodafone==0.6.0 +aiovodafone==0.6.1 # homeassistant.components.waqi aiowaqi==3.1.0 @@ -413,7 +417,7 @@ aiowatttime==0.1.1 aiowebostv==0.4.2 # homeassistant.components.withings -aiowithings==3.1.0 +aiowithings==3.1.1 # homeassistant.components.yandex_transport aioymaps==1.2.5 @@ -425,7 +429,7 @@ airgradient==0.9.1 airly==1.1.0 # homeassistant.components.airthings_ble -airthings-ble==0.9.1 +airthings-ble==0.9.2 # homeassistant.components.airthings airthings-cloud==0.2.0 @@ -464,7 +468,7 @@ anthemav==1.4.1 anthropic==0.31.2 # homeassistant.components.weatherkit -apple_weatherkit==1.1.2 +apple_weatherkit==1.1.3 # homeassistant.components.apprise apprise==1.9.0 @@ -514,13 +518,20 @@ asyncsleepiq==1.5.2 # atenpdu==0.3.2 # homeassistant.components.aurora -auroranoaa==0.0.3 +auroranoaa==0.0.5 # homeassistant.components.aurora_abb_powerone aurorapy==0.2.7 # homeassistant.components.autarco -autarco==3.0.0 +autarco==3.1.0 + +# homeassistant.components.husqvarna_automower_ble +automower-ble==0.2.0 + +# homeassistant.components.generic +# homeassistant.components.stream +av==13.1.0 # homeassistant.components.avea # avea==1.5.1 @@ -529,10 +540,10 @@ autarco==3.0.0 # avion==0.10 # homeassistant.components.axis -axis==62 +axis==63 # homeassistant.components.fujitsu_fglair -ayla-iot-unofficial==1.4.1 +ayla-iot-unofficial==1.4.3 # homeassistant.components.azure_event_hub azure-eventhub==5.11.1 @@ -568,7 +579,7 @@ beautifulsoup4==4.12.3 # beewi-smartclim==0.0.10 # homeassistant.components.bmw_connected_drive -bimmer-connected[china]==0.16.3 +bimmer-connected[china]==0.16.4 # homeassistant.components.bizkaibus bizkaibus==0.1.1 @@ -628,7 +639,7 @@ boto3==1.34.131 botocore==1.34.131 # homeassistant.components.bring -bring-api==0.9.0 +bring-api==0.9.1 # homeassistant.components.broadlink broadlink==0.19.0 @@ -694,7 +705,7 @@ connect-box==0.3.1 construct==2.10.68 # homeassistant.components.utility_meter -croniter==2.0.2 +cronsim==2.6 # homeassistant.components.crownstone crownstone-cloud==1.4.11 @@ -724,7 +735,7 @@ debugpy==1.8.6 # decora==0.6 # homeassistant.components.ecovacs -deebot-client==8.4.0 +deebot-client==8.4.1 # homeassistant.components.ihc # homeassistant.components.namecheapdns @@ -849,7 +860,7 @@ epion==0.0.3 epson-projector==0.5.1 # homeassistant.components.eq3btsmart -eq3btsmart==1.1.9 +eq3btsmart==1.2.1 # homeassistant.components.esphome esphome-dashboard-api==1.2.3 @@ -930,22 +941,22 @@ freesms==0.2.0 # homeassistant.components.fritz # homeassistant.components.fritzbox_callmonitor -fritzconnection[qr]==1.13.2 +fritzconnection[qr]==1.14.0 # homeassistant.components.fyta -fyta_cli==0.6.7 +fyta_cli==0.6.10 # homeassistant.components.google_translate gTTS==2.2.4 # homeassistant.components.gardena_bluetooth -gardena-bluetooth==1.4.3 +gardena-bluetooth==1.4.4 # homeassistant.components.google_assistant_sdk gassist-text==0.0.11 # homeassistant.components.google -gcal-sync==6.1.5 +gcal-sync==6.2.0 # homeassistant.components.geniushub geniushub-client==0.7.1 @@ -973,7 +984,7 @@ georss-qld-bushfire-alert-client==0.8 getmac==0.9.4 # homeassistant.components.gios -gios==4.0.0 +gios==5.0.0 # homeassistant.components.gitter gitterpy==0.1.7 @@ -982,7 +993,7 @@ gitterpy==0.1.7 glances-api==0.8.0 # homeassistant.components.go2rtc -go2rtc-client==0.0.1b0 +go2rtc-client==0.1.0 # homeassistant.components.goalzero goalzero==0.2.2 @@ -1007,7 +1018,7 @@ google-cloud-texttospeech==2.17.2 google-generativeai==0.8.2 # homeassistant.components.nest -google-nest-sdm==5.0.1 +google-nest-sdm==6.1.5 # homeassistant.components.google_photos google-photos-library-api==0.12.1 @@ -1016,7 +1027,7 @@ google-photos-library-api==0.12.1 googlemaps==2.5.1 # homeassistant.components.slide -goslide-api==0.5.1 +goslide-api==0.7.0 # homeassistant.components.tailwind gotailwind==0.2.4 @@ -1025,7 +1036,7 @@ gotailwind==0.2.4 govee-ble==0.40.0 # homeassistant.components.govee_light_local -govee-local-api==1.5.2 +govee-local-api==1.5.3 # homeassistant.components.remote_rpi_gpio gpiozero==1.6.2 @@ -1055,17 +1066,13 @@ gspread==5.5.0 gstreamer-player==1.1.2 # homeassistant.components.profiler -guppy3==3.1.4.post1 +guppy3==3.1.4.post1;python_version<'3.13' # homeassistant.components.iaqualink h2==4.1.0 -# homeassistant.components.generic -# homeassistant.components.stream -ha-av==10.1.1 - # homeassistant.components.ffmpeg -ha-ffmpeg==3.2.0 +ha-ffmpeg==3.2.2 # homeassistant.components.iotawatt ha-iotawattpy==0.1.2 @@ -1074,13 +1081,13 @@ ha-iotawattpy==0.1.2 ha-philipsjs==3.2.2 # homeassistant.components.habitica -habitipy==0.3.1 +habitipy==0.3.3 # homeassistant.components.bluetooth -habluetooth==3.5.0 +habluetooth==3.6.0 # homeassistant.components.cloud -hass-nabucasa==0.81.1 +hass-nabucasa==0.84.0 # homeassistant.components.splunk hass-splunk==0.1.1 @@ -1092,7 +1099,7 @@ hassil==1.7.4 hdate==0.10.9 # homeassistant.components.heatmiser -heatmiserV3==1.1.18 +heatmiserV3==2.0.3 # homeassistant.components.here_travel_time here-routing==1.0.1 @@ -1117,13 +1124,13 @@ hole==0.8.0 # homeassistant.components.holiday # homeassistant.components.workday -holidays==0.57 +holidays==0.60 # homeassistant.components.frontend -home-assistant-frontend==20241002.2 +home-assistant-frontend==20241106.2 # homeassistant.components.conversation -home-assistant-intents==2024.10.2 +home-assistant-intents==2024.11.6 # homeassistant.components.home_connect homeconnect==0.8.0 @@ -1138,10 +1145,10 @@ horimote==0.4.1 httplib2==0.20.4 # homeassistant.components.huawei_lte -huawei-lte-api==1.7.3 +huawei-lte-api==1.10.0 # homeassistant.components.huum -huum==0.7.10 +huum==0.7.11;python_version<'3.13' # homeassistant.components.hyperion hyperion-py==0.7.5 @@ -1179,7 +1186,7 @@ iglo==1.2.7 ihcsdk==2.8.5 # homeassistant.components.imgw_pib -imgw_pib==1.0.5 +imgw_pib==1.0.6 # homeassistant.components.incomfort incomfort-client==0.6.3-1 @@ -1249,10 +1256,10 @@ knx-frontend==2024.9.10.221729 konnected==1.2.0 # homeassistant.components.kraken -krakenex==2.1.0 +krakenex==2.2.2 # homeassistant.components.lacrosse_view -lacrosse-view==1.0.2 +lacrosse-view==1.0.3 # homeassistant.components.eufy lakeside==0.13 @@ -1261,7 +1268,7 @@ lakeside==0.13 laundrify-aio==1.2.2 # homeassistant.components.lcn -lcn-frontend==0.1.6 +lcn-frontend==0.2.2 # homeassistant.components.ld2410_ble ld2410-ble==0.1.1 @@ -1273,7 +1280,7 @@ leaone-ble==0.1.0 led-ble==1.0.2 # homeassistant.components.lektrico -lektricowifi==0.0.42 +lektricowifi==0.0.43 # homeassistant.components.foscam libpyfoscam==1.2.2 @@ -1302,9 +1309,6 @@ linear-garage-door==0.2.9 # homeassistant.components.linode linode-api==4.1.9b1 -# homeassistant.components.lamarzocco -lmcloud==1.2.3 - # homeassistant.components.google_maps locationsharinglib==5.0.1 @@ -1381,7 +1385,7 @@ moat-ble==0.1.1 moehlenhoff-alpha2==1.3.1 # homeassistant.components.monzo -monzopy==1.3.2 +monzopy==1.4.2 # homeassistant.components.mopeka mopeka-iot-ble==0.8.0 @@ -1390,17 +1394,20 @@ mopeka-iot-ble==0.8.0 motionblinds==0.6.25 # homeassistant.components.motionblinds_ble -motionblindsble==0.1.1 +motionblindsble==0.1.2 # homeassistant.components.motioneye motioneye-client==0.3.14 # homeassistant.components.bang_olufsen -mozart-api==3.4.1.8.8 +mozart-api==4.1.1.116.0 # homeassistant.components.mullvad mullvad-api==1.0.0 +# homeassistant.components.music_assistant +music-assistant-client==1.0.5 + # homeassistant.components.tts mutagen==1.47.0 @@ -1450,7 +1457,7 @@ nextdns==3.3.0 nibe==2.11.0 # homeassistant.components.nice_go -nice-go==0.3.9 +nice-go==0.3.10 # homeassistant.components.niko_home_control niko-home-control==0.2.1 @@ -1484,10 +1491,10 @@ numato-gpio==0.13.0 # homeassistant.components.stream # homeassistant.components.tensorflow # homeassistant.components.trend -numpy==1.26.4 +numpy==2.1.3 # homeassistant.components.nyt_games -nyt_games==0.4.3 +nyt_games==0.4.4 # homeassistant.components.oasa_telematics oasatelematics==0.3 @@ -1547,7 +1554,7 @@ openwrt-luci-rpc==1.1.17 openwrt-ubus-rpc==0.0.2 # homeassistant.components.opower -opower==0.8.2 +opower==0.8.6 # homeassistant.components.oralb oralb-ble==0.17.6 @@ -1612,7 +1619,7 @@ plexauth==0.0.6 plexwebsocket==0.0.14 # homeassistant.components.plugwise -plugwise==1.4.0 +plugwise==1.5.0 # homeassistant.components.plum_lightpad plumlightpad==0.0.11 @@ -1644,7 +1651,7 @@ proxmoxer==2.0.1 psutil-home-assistant==0.0.1 # homeassistant.components.systemmonitor -psutil==6.0.0 +psutil==6.1.0 # homeassistant.components.pulseaudio_loopback pulsectl==23.5.2 @@ -1713,7 +1720,7 @@ pyCEC==0.5.2 pyControl4==1.2.0 # homeassistant.components.duotecno -pyDuotecno==2024.9.0 +pyDuotecno==2024.10.1 # homeassistant.components.electrasmart pyElectra==1.2.4 @@ -1731,7 +1738,7 @@ pyRFXtrx==0.31.1 pySDCP==1 # homeassistant.components.tibber -pyTibber==0.30.2 +pyTibber==0.30.8 # homeassistant.components.dlink pyW215==0.7.0 @@ -1783,7 +1790,7 @@ pybbox==0.0.5-alpha pyblackbird==0.6 # homeassistant.components.bluesound -pyblu==1.0.3 +pyblu==1.0.4 # homeassistant.components.neato pybotvac==0.0.25 @@ -1816,7 +1823,7 @@ pycomfoconnect==0.5.1 pycoolmasternet-async==0.2.2 # homeassistant.components.radio_browser -pycountry==23.12.11 +pycountry==24.6.1 # homeassistant.components.microsoft pycsspeechtts==1.0.8 @@ -1831,10 +1838,10 @@ pydaikin==2.13.7 pydanfossair==0.1.0 # homeassistant.components.deako -pydeako==0.4.0 +pydeako==0.5.4 # homeassistant.components.deconz -pydeconz==116 +pydeconz==118 # homeassistant.components.delijn pydelijn==1.1.0 @@ -1861,7 +1868,7 @@ pyebox==1.1.4 pyecoforest==0.4.0 # homeassistant.components.econet -pyeconet==0.1.22 +pyeconet==0.1.23 # homeassistant.components.ista_ecotrend pyecotrend-ista==3.3.1 @@ -1879,7 +1886,7 @@ pyegps==0.2.5 pyeiscp==0.0.7 # homeassistant.components.emoncms -pyemoncms==0.0.7 +pyemoncms==0.1.1 # homeassistant.components.enphase_envoy pyenphase==1.22.0 @@ -1900,7 +1907,7 @@ pyevilgenius==2.0.0 pyezviz==0.2.1.2 # homeassistant.components.fibaro -pyfibaro==0.7.8 +pyfibaro==0.8.0 # homeassistant.components.fido pyfido==2.1.2 @@ -1966,7 +1973,7 @@ pyintesishome==1.8.0 pyipma==3.0.7 # homeassistant.components.ipp -pyipp==0.16.0 +pyipp==0.17.0 # homeassistant.components.iqvia pyiqvia==2022.04.0 @@ -2016,6 +2023,9 @@ pykwb==0.0.8 # homeassistant.components.lacrosse pylacrosse==0.4 +# homeassistant.components.lamarzocco +pylamarzocco==1.2.3 + # homeassistant.components.lastfm pylast==5.1.0 @@ -2038,7 +2048,7 @@ pylitterbot==2023.5.0 pylutron-caseta==0.21.1 # homeassistant.components.lutron -pylutron==0.2.15 +pylutron==0.2.16 # homeassistant.components.mailgun pymailgunner==1.4 @@ -2077,7 +2087,7 @@ pymsteams==0.1.12 pymysensors==0.24.0 # homeassistant.components.iron_os -pynecil==0.2.0 +pynecil==0.2.1 # homeassistant.components.netgear pynetgear==0.10.10 @@ -2088,6 +2098,9 @@ pynetio==0.1.9.1 # homeassistant.components.nobo_hub pynobo==1.8.1 +# homeassistant.components.nordpool +pynordpool==0.2.2 + # homeassistant.components.nuki pynuki==1.6.3 @@ -2113,7 +2126,7 @@ pyombi==0.1.10 pyopenuv==2023.02.0 # homeassistant.components.openweathermap -pyopenweathermap==0.1.1 +pyopenweathermap==0.2.1 # homeassistant.components.opnsense pyopnsense==0.4.0 @@ -2125,7 +2138,7 @@ pyoppleio-legacy==1.0.8 pyosoenergyapi==1.1.4 # homeassistant.components.opentherm_gw -pyotgw==2.2.1 +pyotgw==2.2.2 # homeassistant.auth.mfa_modules.notify # homeassistant.auth.mfa_modules.totp @@ -2138,6 +2151,9 @@ pyoverkiz==1.14.1 # homeassistant.components.onewire pyownet==0.10.0.post1 +# homeassistant.components.palazzetti +pypalazzetti==0.1.11 + # homeassistant.components.elv pypca==0.0.7 @@ -2268,13 +2284,13 @@ pyspcwebgw==0.7.0 pyspeex-noise==1.0.2 # homeassistant.components.squeezebox -pysqueezebox==0.9.3 +pysqueezebox==0.10.0 # homeassistant.components.stiebel_eltron pystiebeleltron==0.0.1.dev2 # homeassistant.components.suez_water -pysuez==0.2.0 +pysuezV2==1.3.1 # homeassistant.components.switchbee pyswitchbee==1.8.3 @@ -2282,9 +2298,6 @@ pyswitchbee==1.8.3 # homeassistant.components.tautulli pytautulli==23.1.1 -# homeassistant.components.tedee -pytedee-async==0.2.20 - # homeassistant.components.thinkingcleaner pythinkingcleaner==0.0.3 @@ -2298,7 +2311,7 @@ python-awair==0.2.4 python-blockchain-api==0.0.2 # homeassistant.components.bsblan -python-bsblan==0.6.2 +python-bsblan==1.2.1 # homeassistant.components.clementine python-clementine-remote==1.0.1 @@ -2346,10 +2359,10 @@ python-join-api==0.0.9 python-juicenet==1.1.0 # homeassistant.components.tplink -python-kasa[speedups]==0.7.4 +python-kasa[speedups]==0.7.7 # homeassistant.components.linkplay -python-linkplay==0.0.15 +python-linkplay==0.0.20 # homeassistant.components.lirc # python-lirc==1.2.3 @@ -2386,7 +2399,7 @@ python-rabbitair==0.0.8 python-ripple-api==0.0.3 # homeassistant.components.roborock -python-roborock==2.6.0 +python-roborock==2.7.2 # homeassistant.components.smarttub python-smarttub==0.0.36 @@ -2395,7 +2408,7 @@ python-smarttub==0.0.36 python-songpal==0.16.2 # homeassistant.components.tado -python-tado==0.17.6 +python-tado==0.17.7 # homeassistant.components.technove python-technove==1.3.1 @@ -2419,7 +2432,7 @@ pytomorrowio==0.3.6 pytouchline==0.7 # homeassistant.components.touchline_sl -pytouchlinesl==0.1.7 +pytouchlinesl==0.1.8 # homeassistant.components.traccar # homeassistant.components.traccar_server @@ -2483,13 +2496,13 @@ pywilight==0.0.74 pywizlight==0.5.14 # homeassistant.components.wmspro -pywmspro==0.2.0 +pywmspro==0.2.1 # homeassistant.components.ws66i pyws66i==1.1 # homeassistant.components.xeoma -pyxeoma==1.4.1 +pyxeoma==1.4.2 # homeassistant.components.yardian pyyardian==1.1.1 @@ -2513,7 +2526,7 @@ qnapstats==0.4.0 quantum-gateway==0.0.8 # homeassistant.components.radio_browser -radios==0.3.1 +radios==0.3.2 # homeassistant.components.radiotherm radiotherm==2.1.0 @@ -2540,7 +2553,7 @@ renault-api==0.2.7 renson-endura-delta==1.7.1 # homeassistant.components.reolink -reolink-aio==0.9.11 +reolink-aio==0.10.4 # homeassistant.components.idteck_prox rfk101py==0.0.1 @@ -2549,7 +2562,7 @@ rfk101py==0.0.1 rflink==0.0.66 # homeassistant.components.ring -ring-doorbell==0.9.7 +ring-doorbell==0.9.9 # homeassistant.components.fleetgo ritassist==0.9.2 @@ -2616,7 +2629,7 @@ sendgrid==6.8.2 # homeassistant.components.emulated_kasa # homeassistant.components.sense -sense-energy==0.12.4 +sense-energy==0.13.3 # homeassistant.components.sensirion_ble sensirion-ble==0.1.1 @@ -2625,7 +2638,7 @@ sensirion-ble==0.1.1 sensorpro-ble==0.5.3 # homeassistant.components.sensorpush -sensorpush-ble==1.6.2 +sensorpush-ble==1.7.1 # homeassistant.components.sensoterra sensoterra==2.0.1 @@ -2676,13 +2689,13 @@ smhi-pkg==1.0.18 snapcast==2.3.6 # homeassistant.components.sonos -soco==0.30.4 +soco==0.30.6 # homeassistant.components.solaredge_local solaredge-local==0.2.3 # homeassistant.components.solarlog -solarlog_cli==0.3.1 +solarlog_cli==0.3.2 # homeassistant.components.solax solax==3.1.1 @@ -2700,7 +2713,7 @@ speak2mary==1.4.0 speedtest-cli==2.1.3 # homeassistant.components.spotify -spotipy==2.23.0 +spotifyaio==0.8.8 # homeassistant.components.sql sqlparse==0.5.0 @@ -2791,7 +2804,7 @@ temperusb==1.6.1 # homeassistant.components.tesla_fleet # homeassistant.components.teslemetry # homeassistant.components.tessie -tesla-fleet-api==0.7.8 +tesla-fleet-api==0.8.4 # homeassistant.components.powerwall tesla-powerwall==0.5.2 @@ -2799,6 +2812,9 @@ tesla-powerwall==0.5.2 # homeassistant.components.tesla_wall_connector tesla-wall-connector==1.0.2 +# homeassistant.components.teslemetry +teslemetry-stream==0.4.2 + # homeassistant.components.tessie tessie-api==0.1.1 @@ -2814,6 +2830,9 @@ thermopro-ble==0.10.0 # homeassistant.components.thingspeak thingspeak==1.0.0 +# homeassistant.components.lg_thinq +thinqconnect==1.0.0 + # homeassistant.components.tikteck tikteck==0.4 @@ -2839,7 +2858,7 @@ total-connect-client==2024.5 tp-connected==0.0.4 # homeassistant.components.tplink_omada -tplink-omada-client==1.4.2 +tplink-omada-client==1.4.3 # homeassistant.components.transmission transmission-rpc==7.0.3 @@ -2854,7 +2873,7 @@ ttls==1.8.3 ttn_client==1.2.0 # homeassistant.components.tuya -tuya-device-sharing-sdk==0.1.9 +tuya-device-sharing-sdk==0.2.1 # homeassistant.components.twentemilieu twentemilieu==2.0.1 @@ -2872,7 +2891,7 @@ typedmonarchmoney==0.3.1 uasiren==0.0.1 # homeassistant.components.unifiprotect -uiprotect==6.3.1 +uiprotect==6.4.0 # homeassistant.components.landisgyr_heat_meter ultraheat-api==0.5.7 @@ -2887,13 +2906,13 @@ unifi_ap==0.0.1 unifiled==0.11 # homeassistant.components.zha -universal-silabs-flasher==0.0.22 +universal-silabs-flasher==0.0.24 # homeassistant.components.upb upb-lib==0.5.8 # homeassistant.components.upcloud -upcloud-api==2.5.1 +upcloud-api==2.6.0 # homeassistant.components.huawei_lte # homeassistant.components.syncthru @@ -2913,7 +2932,7 @@ vallox-websocket-api==5.3.0 vehicle==2.2.2 # homeassistant.components.velbus -velbus-aio==2024.7.6 +velbus-aio==2024.10.0 # homeassistant.components.venstar venstarcolortouch==0.19 @@ -2958,11 +2977,17 @@ waterfurnace==1.1.0 # homeassistant.components.weatherflow_cloud weatherflow4py==1.0.6 +# homeassistant.components.cisco_webex_teams +webexpythonsdk==2.0.1 + +# homeassistant.components.nasweb +webio-api==0.1.8 + # homeassistant.components.webmin webmin-xmlrpc==0.0.2 # homeassistant.components.weheat -weheat==2024.09.23 +weheat==2024.11.02 # homeassistant.components.whirlpool whirlpool-sixth-sense==0.18.8 @@ -2989,13 +3014,13 @@ wyoming==1.5.4 xbox-webapi==2.0.11 # homeassistant.components.xiaomi_ble -xiaomi-ble==0.32.0 +xiaomi-ble==0.33.0 # homeassistant.components.knx -xknx==3.2.0 +xknx==3.3.0 # homeassistant.components.knx -xknxproject==3.8.0 +xknxproject==3.8.1 # homeassistant.components.fritz # homeassistant.components.rest @@ -3035,7 +3060,7 @@ youless-api==2.1.2 youtubeaio==1.1.5 # homeassistant.components.media_extractor -yt-dlp==2024.09.27 +yt-dlp[default]==2024.11.04 # homeassistant.components.zamg zamg==0.3.6 @@ -3044,16 +3069,16 @@ zamg==0.3.6 zengge==0.2 # homeassistant.components.zeroconf -zeroconf==0.135.0 +zeroconf==0.136.0 # homeassistant.components.zeversolar -zeversolar==0.3.1 +zeversolar==0.3.2 # homeassistant.components.zha -zha==0.0.34 +zha==0.0.37 # homeassistant.components.zhong_hong -zhong-hong-hvac==1.0.12 +zhong-hong-hvac==1.0.13 # homeassistant.components.ziggo_mediabox_xl ziggo-mediabox-xl==1.1.0 @@ -3062,7 +3087,7 @@ ziggo-mediabox-xl==1.1.0 zm-py==0.5.4 # homeassistant.components.zwave_js -zwave-js-server-python==0.58.1 +zwave-js-server-python==0.59.0 # homeassistant.components.zwave_me zwave-me-ws==0.4.3 diff --git a/requirements_test.txt b/requirements_test.txt index 56e4b0e2eb26e0..166fd965e2c645 100644 --- a/requirements_test.txt +++ b/requirements_test.txt @@ -7,17 +7,17 @@ -c homeassistant/package_constraints.txt -r requirements_test_pre_commit.txt -astroid==3.3.4 +astroid==3.3.5 coverage==7.6.1 freezegun==1.5.1 +license-expression==30.4.0 mock-open==1.4.0 -mypy-dev==1.12.0a5 +mypy-dev==1.14.0a2 pre-commit==4.0.0 -pydantic==1.10.18 +pydantic==1.10.19 pylint==3.3.1 pylint-per-file-ignores==1.3.2 pipdeptree==2.23.4 -pip-licenses==5.0.0 pytest-asyncio==0.24.0 pytest-aiohttp==1.0.5 pytest-cov==5.0.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index dd326719413807..048f0ac7d76016 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -13,10 +13,10 @@ AIOSomecomfort==0.0.25 Adax-local==0.1.5 # homeassistant.components.doorbird -DoorBirdPy==3.0.4 +DoorBirdPy==3.0.8 # homeassistant.components.homekit -HAP-python==4.9.1 +HAP-python==4.9.2 # homeassistant.components.tasmota HATasmota==0.9.2 @@ -33,7 +33,7 @@ Mastodon.py==1.8.1 # homeassistant.components.seven_segments # homeassistant.components.sighthound # homeassistant.components.tensorflow -Pillow==10.4.0 +Pillow==11.0.0 # homeassistant.components.plex PlexAPI==4.15.16 @@ -42,7 +42,7 @@ PlexAPI==4.15.16 ProgettiHWSW==0.1.3 # homeassistant.components.cast -PyChromecast==14.0.4 +PyChromecast==14.0.5 # homeassistant.components.flick_electric PyFlick==0.0.2 @@ -81,7 +81,7 @@ PyQRCode==1.2.1 PyRMVtransport==0.3.3 # homeassistant.components.switchbot -PySwitchbot==0.48.2 +PySwitchbot==0.51.0 # homeassistant.components.syncthru PySyncThru==0.7.10 @@ -94,7 +94,7 @@ PyTransportNSW==0.1.1 PyTurboJPEG==1.7.5 # homeassistant.components.vicare -PyViCare==2.34.0 +PyViCare==2.35.0 # homeassistant.components.xiaomi_aqara PyXiaomiGateway==0.14.3 @@ -103,7 +103,7 @@ PyXiaomiGateway==0.14.3 RachioPy==1.1.0 # homeassistant.components.python_script -RestrictedPython==7.3 +RestrictedPython==7.4 # homeassistant.components.remember_the_milk RtmAPI==0.7.2 @@ -140,7 +140,7 @@ advantage-air==0.4.4 afsapi==0.2.7 # homeassistant.components.agent_dvr -agent-py==0.0.23 +agent-py==0.0.24 # homeassistant.components.geo_json_events aio-geojson-generic-client==0.4 @@ -164,10 +164,10 @@ aio-georss-gdacs==0.10 aioairq==0.3.2 # homeassistant.components.airzone_cloud -aioairzone-cloud==0.6.6 +aioairzone-cloud==0.6.10 # homeassistant.components.airzone -aioairzone==0.9.3 +aioairzone==0.9.5 # homeassistant.components.ambient_network # homeassistant.components.ambient_station @@ -186,7 +186,7 @@ aioaseko==1.0.0 aioasuswrt==1.4.0 # homeassistant.components.husqvarna_automower -aioautomower==2024.9.3 +aioautomower==2024.10.3 # homeassistant.components.azure_devops aioazuredevops==2.2.1 @@ -198,7 +198,7 @@ aiobafi6==0.9.0 aiobotocore==2.13.1 # homeassistant.components.comelit -aiocomelit==0.9.0 +aiocomelit==0.9.1 # homeassistant.components.dhcp aiodhcpwatcher==1.0.2 @@ -228,12 +228,13 @@ aioelectricitymaps==0.4.0 aioemonitor==1.0.5 # homeassistant.components.esphome -aioesphomeapi==27.0.0 +aioesphomeapi==27.0.1 # homeassistant.components.flo aioflo==2021.11.0 # homeassistant.components.github +# homeassistant.components.iron_os aiogithubapi==24.6.0 # homeassistant.components.guardian @@ -243,10 +244,10 @@ aioguardian==2022.07.0 aioharmony==0.2.10 # homeassistant.components.hassio -aiohasupervisor==0.1.0 +aiohasupervisor==0.2.1 # homeassistant.components.homekit_controller -aiohomekit==3.2.3 +aiohomekit==3.2.6 # homeassistant.components.hue aiohue==4.7.3 @@ -297,10 +298,10 @@ aionut==4.3.3 aiooncue==0.3.7 # homeassistant.components.openexchangerates -aioopenexchangerates==0.6.2 +aioopenexchangerates==0.6.8 # homeassistant.components.nmap_tracker -aiooui==0.1.6 +aiooui==0.1.7 # homeassistant.components.pegel_online aiopegelonline==0.0.10 @@ -338,7 +339,7 @@ aioridwell==2024.01.0 aioruckus==0.41 # homeassistant.components.russound_rio -aiorussound==4.0.5 +aiorussound==4.1.0 # homeassistant.components.ruuvi_gateway aioruuvigateway==0.1.0 @@ -347,7 +348,7 @@ aioruuvigateway==0.1.0 aiosenz==1.0.0 # homeassistant.components.shelly -aioshelly==11.4.2 +aioshelly==12.0.1 # homeassistant.components.skybell aioskybell==22.7.0 @@ -362,10 +363,10 @@ aiosolaredge==0.2.0 aiosteamist==1.0.0 # homeassistant.components.cambridge_audio -aiostreammagic==2.5.0 +aiostreammagic==2.8.4 # homeassistant.components.switcher_kis -aioswitcher==4.0.3 +aioswitcher==4.4.0 # homeassistant.components.syncthing aiosyncthing==0.5.1 @@ -373,6 +374,9 @@ aiosyncthing==0.5.1 # homeassistant.components.tankerkoenig aiotankerkoenig==0.4.2 +# homeassistant.components.tedee +aiotedee==0.2.20 + # homeassistant.components.tractive aiotractive==0.6.0 @@ -383,7 +387,7 @@ aiounifi==80 aiovlc==0.5.1 # homeassistant.components.vodafone_station -aiovodafone==0.6.0 +aiovodafone==0.6.1 # homeassistant.components.waqi aiowaqi==3.1.0 @@ -395,7 +399,7 @@ aiowatttime==0.1.1 aiowebostv==0.4.2 # homeassistant.components.withings -aiowithings==3.1.0 +aiowithings==3.1.1 # homeassistant.components.yandex_transport aioymaps==1.2.5 @@ -407,7 +411,7 @@ airgradient==0.9.1 airly==1.1.0 # homeassistant.components.airthings_ble -airthings-ble==0.9.1 +airthings-ble==0.9.2 # homeassistant.components.airthings airthings-cloud==0.2.0 @@ -437,7 +441,7 @@ anthemav==1.4.1 anthropic==0.31.2 # homeassistant.components.weatherkit -apple_weatherkit==1.1.2 +apple_weatherkit==1.1.3 # homeassistant.components.apprise apprise==1.9.0 @@ -469,19 +473,26 @@ asyncarve==0.1.1 asyncsleepiq==1.5.2 # homeassistant.components.aurora -auroranoaa==0.0.3 +auroranoaa==0.0.5 # homeassistant.components.aurora_abb_powerone aurorapy==0.2.7 # homeassistant.components.autarco -autarco==3.0.0 +autarco==3.1.0 + +# homeassistant.components.husqvarna_automower_ble +automower-ble==0.2.0 + +# homeassistant.components.generic +# homeassistant.components.stream +av==13.1.0 # homeassistant.components.axis -axis==62 +axis==63 # homeassistant.components.fujitsu_fglair -ayla-iot-unofficial==1.4.1 +ayla-iot-unofficial==1.4.3 # homeassistant.components.azure_event_hub azure-eventhub==5.11.1 @@ -502,7 +513,7 @@ base36==0.1.1 beautifulsoup4==4.12.3 # homeassistant.components.bmw_connected_drive -bimmer-connected[china]==0.16.3 +bimmer-connected[china]==0.16.4 # homeassistant.components.eq3btsmart # homeassistant.components.esphome @@ -548,7 +559,7 @@ boschshcpy==0.2.91 botocore==1.34.131 # homeassistant.components.bring -bring-api==0.9.0 +bring-api==0.9.1 # homeassistant.components.broadlink broadlink==0.19.0 @@ -590,7 +601,7 @@ colorthief==0.2.1 construct==2.10.68 # homeassistant.components.utility_meter -croniter==2.0.2 +cronsim==2.6 # homeassistant.components.crownstone crownstone-cloud==1.4.11 @@ -614,7 +625,7 @@ dbus-fast==2.24.3 debugpy==1.8.6 # homeassistant.components.ecovacs -deebot-client==8.4.0 +deebot-client==8.4.1 # homeassistant.components.ihc # homeassistant.components.namecheapdns @@ -718,7 +729,7 @@ epion==0.0.3 epson-projector==0.5.1 # homeassistant.components.eq3btsmart -eq3btsmart==1.1.9 +eq3btsmart==1.2.1 # homeassistant.components.esphome esphome-dashboard-api==1.2.3 @@ -783,22 +794,22 @@ freebox-api==1.1.0 # homeassistant.components.fritz # homeassistant.components.fritzbox_callmonitor -fritzconnection[qr]==1.13.2 +fritzconnection[qr]==1.14.0 # homeassistant.components.fyta -fyta_cli==0.6.7 +fyta_cli==0.6.10 # homeassistant.components.google_translate gTTS==2.2.4 # homeassistant.components.gardena_bluetooth -gardena-bluetooth==1.4.3 +gardena-bluetooth==1.4.4 # homeassistant.components.google_assistant_sdk gassist-text==0.0.11 # homeassistant.components.google -gcal-sync==6.1.5 +gcal-sync==6.2.0 # homeassistant.components.geniushub geniushub-client==0.7.1 @@ -826,13 +837,13 @@ georss-qld-bushfire-alert-client==0.8 getmac==0.9.4 # homeassistant.components.gios -gios==4.0.0 +gios==5.0.0 # homeassistant.components.glances glances-api==0.8.0 # homeassistant.components.go2rtc -go2rtc-client==0.0.1b0 +go2rtc-client==0.1.0 # homeassistant.components.goalzero goalzero==0.2.2 @@ -857,7 +868,7 @@ google-cloud-texttospeech==2.17.2 google-generativeai==0.8.2 # homeassistant.components.nest -google-nest-sdm==5.0.1 +google-nest-sdm==6.1.5 # homeassistant.components.google_photos google-photos-library-api==0.12.1 @@ -872,7 +883,7 @@ gotailwind==0.2.4 govee-ble==0.40.0 # homeassistant.components.govee_light_local -govee-local-api==1.5.2 +govee-local-api==1.5.3 # homeassistant.components.gpsd gps3==0.33.3 @@ -893,17 +904,13 @@ growattServer==1.5.0 gspread==5.5.0 # homeassistant.components.profiler -guppy3==3.1.4.post1 +guppy3==3.1.4.post1;python_version<'3.13' # homeassistant.components.iaqualink h2==4.1.0 -# homeassistant.components.generic -# homeassistant.components.stream -ha-av==10.1.1 - # homeassistant.components.ffmpeg -ha-ffmpeg==3.2.0 +ha-ffmpeg==3.2.2 # homeassistant.components.iotawatt ha-iotawattpy==0.1.2 @@ -912,13 +919,13 @@ ha-iotawattpy==0.1.2 ha-philipsjs==3.2.2 # homeassistant.components.habitica -habitipy==0.3.1 +habitipy==0.3.3 # homeassistant.components.bluetooth -habluetooth==3.5.0 +habluetooth==3.6.0 # homeassistant.components.cloud -hass-nabucasa==0.81.1 +hass-nabucasa==0.84.0 # homeassistant.components.conversation hassil==1.7.4 @@ -943,13 +950,13 @@ hole==0.8.0 # homeassistant.components.holiday # homeassistant.components.workday -holidays==0.57 +holidays==0.60 # homeassistant.components.frontend -home-assistant-frontend==20241002.2 +home-assistant-frontend==20241106.2 # homeassistant.components.conversation -home-assistant-intents==2024.10.2 +home-assistant-intents==2024.11.6 # homeassistant.components.home_connect homeconnect==0.8.0 @@ -961,10 +968,10 @@ homematicip==1.1.2 httplib2==0.20.4 # homeassistant.components.huawei_lte -huawei-lte-api==1.7.3 +huawei-lte-api==1.10.0 # homeassistant.components.huum -huum==0.7.10 +huum==0.7.11;python_version<'3.13' # homeassistant.components.hyperion hyperion-py==0.7.5 @@ -990,7 +997,7 @@ idasen-ha==2.6.2 ifaddr==0.2.0 # homeassistant.components.imgw_pib -imgw_pib==1.0.5 +imgw_pib==1.0.6 # homeassistant.components.incomfort incomfort-client==0.6.3-1 @@ -1048,16 +1055,16 @@ knx-frontend==2024.9.10.221729 konnected==1.2.0 # homeassistant.components.kraken -krakenex==2.1.0 +krakenex==2.2.2 # homeassistant.components.lacrosse_view -lacrosse-view==1.0.2 +lacrosse-view==1.0.3 # homeassistant.components.laundrify laundrify-aio==1.2.2 # homeassistant.components.lcn -lcn-frontend==0.1.6 +lcn-frontend==0.2.2 # homeassistant.components.ld2410_ble ld2410-ble==0.1.1 @@ -1069,7 +1076,7 @@ leaone-ble==0.1.0 led-ble==1.0.2 # homeassistant.components.lektrico -lektricowifi==0.0.42 +lektricowifi==0.0.43 # homeassistant.components.foscam libpyfoscam==1.2.2 @@ -1083,9 +1090,6 @@ libsoundtouch==0.8 # homeassistant.components.linear_garage_door linear-garage-door==0.2.9 -# homeassistant.components.lamarzocco -lmcloud==1.2.3 - # homeassistant.components.london_underground london-tube-status==0.5 @@ -1150,7 +1154,7 @@ moat-ble==0.1.1 moehlenhoff-alpha2==1.3.1 # homeassistant.components.monzo -monzopy==1.3.2 +monzopy==1.4.2 # homeassistant.components.mopeka mopeka-iot-ble==0.8.0 @@ -1159,17 +1163,20 @@ mopeka-iot-ble==0.8.0 motionblinds==0.6.25 # homeassistant.components.motionblinds_ble -motionblindsble==0.1.1 +motionblindsble==0.1.2 # homeassistant.components.motioneye motioneye-client==0.3.14 # homeassistant.components.bang_olufsen -mozart-api==3.4.1.8.8 +mozart-api==4.1.1.116.0 # homeassistant.components.mullvad mullvad-api==1.0.0 +# homeassistant.components.music_assistant +music-assistant-client==1.0.5 + # homeassistant.components.tts mutagen==1.47.0 @@ -1210,7 +1217,7 @@ nextdns==3.3.0 nibe==2.11.0 # homeassistant.components.nice_go -nice-go==0.3.9 +nice-go==0.3.10 # homeassistant.components.nfandroidtv notifications-android-tv==0.1.5 @@ -1232,10 +1239,10 @@ numato-gpio==0.13.0 # homeassistant.components.stream # homeassistant.components.tensorflow # homeassistant.components.trend -numpy==1.26.4 +numpy==2.1.3 # homeassistant.components.nyt_games -nyt_games==0.4.3 +nyt_games==0.4.4 # homeassistant.components.google oauth2client==4.1.3 @@ -1277,7 +1284,7 @@ openhomedevice==2.2.0 openwebifpy==4.2.7 # homeassistant.components.opower -opower==0.8.2 +opower==0.8.6 # homeassistant.components.oralb oralb-ble==0.17.6 @@ -1319,7 +1326,7 @@ plexauth==0.0.6 plexwebsocket==0.0.14 # homeassistant.components.plugwise -plugwise==1.4.0 +plugwise==1.5.0 # homeassistant.components.plum_lightpad plumlightpad==0.0.11 @@ -1342,7 +1349,7 @@ prometheus-client==0.21.0 psutil-home-assistant==0.0.1 # homeassistant.components.systemmonitor -psutil==6.0.0 +psutil==6.1.0 # homeassistant.components.androidtv pure-python-adb[async]==0.3.0.dev0 @@ -1399,7 +1406,7 @@ pyCEC==0.5.2 pyControl4==1.2.0 # homeassistant.components.duotecno -pyDuotecno==2024.9.0 +pyDuotecno==2024.10.1 # homeassistant.components.electrasmart pyElectra==1.2.4 @@ -1408,7 +1415,7 @@ pyElectra==1.2.4 pyRFXtrx==0.31.1 # homeassistant.components.tibber -pyTibber==0.30.2 +pyTibber==0.30.8 # homeassistant.components.dlink pyW215==0.7.0 @@ -1451,7 +1458,7 @@ pybalboa==1.0.2 pyblackbird==0.6 # homeassistant.components.bluesound -pyblu==1.0.3 +pyblu==1.0.4 # homeassistant.components.neato pybotvac==0.0.25 @@ -1469,7 +1476,7 @@ pycomfoconnect==0.5.1 pycoolmasternet-async==0.2.2 # homeassistant.components.radio_browser -pycountry==23.12.11 +pycountry==24.6.1 # homeassistant.components.microsoft pycsspeechtts==1.0.8 @@ -1478,10 +1485,10 @@ pycsspeechtts==1.0.8 pydaikin==2.13.7 # homeassistant.components.deako -pydeako==0.4.0 +pydeako==0.5.4 # homeassistant.components.deconz -pydeconz==116 +pydeconz==118 # homeassistant.components.dexcom pydexcom==0.2.3 @@ -1499,7 +1506,7 @@ pydroid-ipcam==2.0.0 pyecoforest==0.4.0 # homeassistant.components.econet -pyeconet==0.1.22 +pyeconet==0.1.23 # homeassistant.components.ista_ecotrend pyecotrend-ista==3.3.1 @@ -1510,8 +1517,11 @@ pyefergy==22.5.0 # homeassistant.components.energenie_power_sockets pyegps==0.2.5 +# homeassistant.components.onkyo +pyeiscp==0.0.7 + # homeassistant.components.emoncms -pyemoncms==0.0.7 +pyemoncms==0.1.1 # homeassistant.components.enphase_envoy pyenphase==1.22.0 @@ -1526,7 +1536,7 @@ pyevilgenius==2.0.0 pyezviz==0.2.1.2 # homeassistant.components.fibaro -pyfibaro==0.7.8 +pyfibaro==0.8.0 # homeassistant.components.fido pyfido==2.1.2 @@ -1580,7 +1590,7 @@ pyinsteon==1.6.3 pyipma==3.0.7 # homeassistant.components.ipp -pyipp==0.16.0 +pyipp==0.17.0 # homeassistant.components.iqvia pyiqvia==2022.04.0 @@ -1618,6 +1628,9 @@ pykrakenapi==0.1.8 # homeassistant.components.kulersky pykulersky==0.5.2 +# homeassistant.components.lamarzocco +pylamarzocco==1.2.3 + # homeassistant.components.lastfm pylast==5.1.0 @@ -1640,7 +1653,7 @@ pylitterbot==2023.5.0 pylutron-caseta==0.21.1 # homeassistant.components.lutron -pylutron==0.2.15 +pylutron==0.2.16 # homeassistant.components.mailgun pymailgunner==1.4 @@ -1670,7 +1683,7 @@ pymonoprice==0.4 pymysensors==0.24.0 # homeassistant.components.iron_os -pynecil==0.2.0 +pynecil==0.2.1 # homeassistant.components.netgear pynetgear==0.10.10 @@ -1678,6 +1691,9 @@ pynetgear==0.10.10 # homeassistant.components.nobo_hub pynobo==1.8.1 +# homeassistant.components.nordpool +pynordpool==0.2.2 + # homeassistant.components.nuki pynuki==1.6.3 @@ -1700,7 +1716,7 @@ pyoctoprintapi==0.1.12 pyopenuv==2023.02.0 # homeassistant.components.openweathermap -pyopenweathermap==0.1.1 +pyopenweathermap==0.2.1 # homeassistant.components.opnsense pyopnsense==0.4.0 @@ -1709,7 +1725,7 @@ pyopnsense==0.4.0 pyosoenergyapi==1.1.4 # homeassistant.components.opentherm_gw -pyotgw==2.2.1 +pyotgw==2.2.2 # homeassistant.auth.mfa_modules.notify # homeassistant.auth.mfa_modules.totp @@ -1722,6 +1738,9 @@ pyoverkiz==1.14.1 # homeassistant.components.onewire pyownet==0.10.0.post1 +# homeassistant.components.palazzetti +pypalazzetti==0.1.11 + # homeassistant.components.lcn pypck==0.7.24 @@ -1800,6 +1819,9 @@ pysmartapp==0.3.5 # homeassistant.components.smartthings pysmartthings==0.7.8 +# homeassistant.components.smarty +pysmarty2==0.10.1 + # homeassistant.components.edl21 pysml==0.0.12 @@ -1822,10 +1844,10 @@ pyspcwebgw==0.7.0 pyspeex-noise==1.0.2 # homeassistant.components.squeezebox -pysqueezebox==0.9.3 +pysqueezebox==0.10.0 # homeassistant.components.suez_water -pysuez==0.2.0 +pysuezV2==1.3.1 # homeassistant.components.switchbee pyswitchbee==1.8.3 @@ -1833,9 +1855,6 @@ pyswitchbee==1.8.3 # homeassistant.components.tautulli pytautulli==23.1.1 -# homeassistant.components.tedee -pytedee-async==0.2.20 - # homeassistant.components.motionmount python-MotionMount==2.2.0 @@ -1843,7 +1862,7 @@ python-MotionMount==2.2.0 python-awair==0.2.4 # homeassistant.components.bsblan -python-bsblan==0.6.2 +python-bsblan==1.2.1 # homeassistant.components.ecobee python-ecobee-api==0.2.20 @@ -1867,10 +1886,10 @@ python-izone==1.2.9 python-juicenet==1.1.0 # homeassistant.components.tplink -python-kasa[speedups]==0.7.4 +python-kasa[speedups]==0.7.7 # homeassistant.components.linkplay -python-linkplay==0.0.15 +python-linkplay==0.0.20 # homeassistant.components.matter python-matter-server==6.6.0 @@ -1901,7 +1920,7 @@ python-picnic-api==1.1.0 python-rabbitair==0.0.8 # homeassistant.components.roborock -python-roborock==2.6.0 +python-roborock==2.7.2 # homeassistant.components.smarttub python-smarttub==0.0.36 @@ -1910,7 +1929,7 @@ python-smarttub==0.0.36 python-songpal==0.16.2 # homeassistant.components.tado -python-tado==0.17.6 +python-tado==0.17.7 # homeassistant.components.technove python-technove==1.3.1 @@ -1925,7 +1944,7 @@ pytile==2023.12.0 pytomorrowio==0.3.6 # homeassistant.components.touchline_sl -pytouchlinesl==0.1.7 +pytouchlinesl==0.1.8 # homeassistant.components.traccar # homeassistant.components.traccar_server @@ -1983,7 +2002,7 @@ pywilight==0.0.74 pywizlight==0.5.14 # homeassistant.components.wmspro -pywmspro==0.2.0 +pywmspro==0.2.1 # homeassistant.components.ws66i pyws66i==1.1 @@ -2004,7 +2023,7 @@ qingping-ble==0.10.0 qnapstats==0.4.0 # homeassistant.components.radio_browser -radios==0.3.1 +radios==0.3.2 # homeassistant.components.radiotherm radiotherm==2.1.0 @@ -2025,13 +2044,13 @@ renault-api==0.2.7 renson-endura-delta==1.7.1 # homeassistant.components.reolink -reolink-aio==0.9.11 +reolink-aio==0.10.4 # homeassistant.components.rflink rflink==0.0.66 # homeassistant.components.ring -ring-doorbell==0.9.7 +ring-doorbell==0.9.9 # homeassistant.components.roku rokuecp==0.19.3 @@ -2077,7 +2096,7 @@ securetar==2024.2.1 # homeassistant.components.emulated_kasa # homeassistant.components.sense -sense-energy==0.12.4 +sense-energy==0.13.3 # homeassistant.components.sensirion_ble sensirion-ble==0.1.1 @@ -2086,7 +2105,7 @@ sensirion-ble==0.1.1 sensorpro-ble==0.5.3 # homeassistant.components.sensorpush -sensorpush-ble==1.6.2 +sensorpush-ble==1.7.1 # homeassistant.components.sensoterra sensoterra==2.0.1 @@ -2125,10 +2144,10 @@ smhi-pkg==1.0.18 snapcast==2.3.6 # homeassistant.components.sonos -soco==0.30.4 +soco==0.30.6 # homeassistant.components.solarlog -solarlog_cli==0.3.1 +solarlog_cli==0.3.2 # homeassistant.components.solax solax==3.1.1 @@ -2146,7 +2165,7 @@ speak2mary==1.4.0 speedtest-cli==2.1.3 # homeassistant.components.spotify -spotipy==2.23.0 +spotifyaio==0.8.8 # homeassistant.components.sql sqlparse==0.5.0 @@ -2213,7 +2232,7 @@ temperusb==1.6.1 # homeassistant.components.tesla_fleet # homeassistant.components.teslemetry # homeassistant.components.tessie -tesla-fleet-api==0.7.8 +tesla-fleet-api==0.8.4 # homeassistant.components.powerwall tesla-powerwall==0.5.2 @@ -2221,6 +2240,9 @@ tesla-powerwall==0.5.2 # homeassistant.components.tesla_wall_connector tesla-wall-connector==1.0.2 +# homeassistant.components.teslemetry +teslemetry-stream==0.4.2 + # homeassistant.components.tessie tessie-api==0.1.1 @@ -2230,6 +2252,9 @@ thermobeacon-ble==0.7.0 # homeassistant.components.thermopro thermopro-ble==0.10.0 +# homeassistant.components.lg_thinq +thinqconnect==1.0.0 + # homeassistant.components.tilt_ble tilt-ble==0.2.3 @@ -2246,7 +2271,7 @@ toonapi==0.3.0 total-connect-client==2024.5 # homeassistant.components.tplink_omada -tplink-omada-client==1.4.2 +tplink-omada-client==1.4.3 # homeassistant.components.transmission transmission-rpc==7.0.3 @@ -2261,7 +2286,7 @@ ttls==1.8.3 ttn_client==1.2.0 # homeassistant.components.tuya -tuya-device-sharing-sdk==0.1.9 +tuya-device-sharing-sdk==0.2.1 # homeassistant.components.twentemilieu twentemilieu==2.0.1 @@ -2279,7 +2304,7 @@ typedmonarchmoney==0.3.1 uasiren==0.0.1 # homeassistant.components.unifiprotect -uiprotect==6.3.1 +uiprotect==6.4.0 # homeassistant.components.landisgyr_heat_meter ultraheat-api==0.5.7 @@ -2288,13 +2313,13 @@ ultraheat-api==0.5.7 unifi-discovery==1.2.0 # homeassistant.components.zha -universal-silabs-flasher==0.0.22 +universal-silabs-flasher==0.0.24 # homeassistant.components.upb upb-lib==0.5.8 # homeassistant.components.upcloud -upcloud-api==2.5.1 +upcloud-api==2.6.0 # homeassistant.components.huawei_lte # homeassistant.components.syncthru @@ -2314,7 +2339,7 @@ vallox-websocket-api==5.3.0 vehicle==2.2.2 # homeassistant.components.velbus -velbus-aio==2024.7.6 +velbus-aio==2024.10.0 # homeassistant.components.venstar venstarcolortouch==0.19 @@ -2350,11 +2375,14 @@ watchdog==2.3.1 # homeassistant.components.weatherflow_cloud weatherflow4py==1.0.6 +# homeassistant.components.nasweb +webio-api==0.1.8 + # homeassistant.components.webmin webmin-xmlrpc==0.0.2 # homeassistant.components.weheat -weheat==2024.09.23 +weheat==2024.11.02 # homeassistant.components.whirlpool whirlpool-sixth-sense==0.18.8 @@ -2378,13 +2406,13 @@ wyoming==1.5.4 xbox-webapi==2.0.11 # homeassistant.components.xiaomi_ble -xiaomi-ble==0.32.0 +xiaomi-ble==0.33.0 # homeassistant.components.knx -xknx==3.2.0 +xknx==3.3.0 # homeassistant.components.knx -xknxproject==3.8.0 +xknxproject==3.8.1 # homeassistant.components.fritz # homeassistant.components.rest @@ -2418,22 +2446,22 @@ youless-api==2.1.2 youtubeaio==1.1.5 # homeassistant.components.media_extractor -yt-dlp==2024.09.27 +yt-dlp[default]==2024.11.04 # homeassistant.components.zamg zamg==0.3.6 # homeassistant.components.zeroconf -zeroconf==0.135.0 +zeroconf==0.136.0 # homeassistant.components.zeversolar -zeversolar==0.3.1 +zeversolar==0.3.2 # homeassistant.components.zha -zha==0.0.34 +zha==0.0.37 # homeassistant.components.zwave_js -zwave-js-server-python==0.58.1 +zwave-js-server-python==0.59.0 # homeassistant.components.zwave_me zwave-me-ws==0.4.3 diff --git a/requirements_test_pre_commit.txt b/requirements_test_pre_commit.txt index addc8fa0e85798..23f584dd0dec05 100644 --- a/requirements_test_pre_commit.txt +++ b/requirements_test_pre_commit.txt @@ -1,5 +1,5 @@ # Automatically generated from .pre-commit-config.yaml by gen_requirements_all.py, do not edit codespell==2.3.0 -ruff==0.6.9 +ruff==0.7.3 yamllint==1.35.1 diff --git a/script/gen_requirements_all.py b/script/gen_requirements_all.py index 7787578902cac7..c5611069bf5b8b 100755 --- a/script/gen_requirements_all.py +++ b/script/gen_requirements_all.py @@ -58,8 +58,16 @@ # will be included in requirements_all_{action}.txt OVERRIDDEN_REQUIREMENTS_ACTIONS = { - "pytest": {"exclude": set(), "include": {"python-gammu"}}, - "wheels_aarch64": {"exclude": set(), "include": INCLUDED_REQUIREMENTS_WHEELS}, + "pytest": { + "exclude": set(), + "include": {"python-gammu"}, + "markers": {}, + }, + "wheels_aarch64": { + "exclude": set(), + "include": INCLUDED_REQUIREMENTS_WHEELS, + "markers": {}, + }, # Pandas has issues building on armhf, it is expected they # will drop the platform in the near future (they consider it # "flimsy" on 386). The following packages depend on pandas, @@ -67,10 +75,23 @@ "wheels_armhf": { "exclude": {"env-canada", "noaa-coops", "pyezviz", "pykrakenapi"}, "include": INCLUDED_REQUIREMENTS_WHEELS, + "markers": {}, + }, + "wheels_armv7": { + "exclude": set(), + "include": INCLUDED_REQUIREMENTS_WHEELS, + "markers": {}, + }, + "wheels_amd64": { + "exclude": set(), + "include": INCLUDED_REQUIREMENTS_WHEELS, + "markers": {}, + }, + "wheels_i386": { + "exclude": set(), + "include": INCLUDED_REQUIREMENTS_WHEELS, + "markers": {}, }, - "wheels_armv7": {"exclude": set(), "include": INCLUDED_REQUIREMENTS_WHEELS}, - "wheels_amd64": {"exclude": set(), "include": INCLUDED_REQUIREMENTS_WHEELS}, - "wheels_i386": {"exclude": set(), "include": INCLUDED_REQUIREMENTS_WHEELS}, } IGNORE_PIN = ("colorlog>2.1,<3", "urllib3") @@ -96,9 +117,9 @@ # gRPC is an implicit dependency that we want to make explicit so we manage # upgrades intentionally. It is a large package to build from source and we # want to ensure we have wheels built. -grpcio==1.66.2 -grpcio-status==1.66.2 -grpcio-reflection==1.66.2 +grpcio==1.67.1 +grpcio-status==1.67.1 +grpcio-reflection==1.67.1 # This is a old unmaintained library and is replaced with pycryptodome pycrypto==1000000000.0.0 @@ -118,7 +139,7 @@ # these requirements are quite loose. As the entire stack has some outstanding issues, and # even newer versions seem to introduce new issues, it's useful for us to pin all these # requirements so we can directly link HA versions to these library versions. -anyio==4.6.0 +anyio==4.6.2.post1 h11==0.14.0 httpcore==1.0.5 @@ -127,7 +148,8 @@ hyperframe>=5.2.0 # Ensure we run compatible with musllinux build env -numpy==1.26.4 +numpy==2.1.3 +pandas~=2.2.3 # Constrain multidict to avoid typing issues # https://github.com/home-assistant/core/pull/67046 @@ -138,7 +160,10 @@ # Required to avoid breaking (#101042). # v2 has breaking changes (#99218). -pydantic==1.10.18 +pydantic==1.10.19 + +# Required for Python 3.12.4 compatibility (#119223). +mashumaro>=3.13.1 # Breaks asyncio # https://github.com/pubnub/python/issues/130 @@ -154,7 +179,7 @@ # protobuf must be in package constraints for the wheel # builder to build binary wheels -protobuf==5.28.2 +protobuf==5.28.3 # faust-cchardet: Ensure we have a version we can build wheels # 2.1.18 is the first version that works with our wheel builder @@ -176,15 +201,12 @@ # We want to skip the binary wheels for the 'charset-normalizer' packages. # They are build with mypyc, but causes issues with our wheel builder. # In order to do so, we need to constrain the version. -charset-normalizer==3.2.0 +charset-normalizer==3.4.0 # dacite: Ensure we have a version that is able to handle type unions for -# Roborock, NAM, Brother, and GIOS. +# NAM, Brother, and GIOS. dacite>=1.7.0 -# Musle wheels for pandas 2.2.0 cannot be build for any architecture. -pandas==2.1.4 - # chacha20poly1305-reuseable==0.12.x is incompatible with cryptography==43.0.x chacha20poly1305-reuseable>=0.13.0 @@ -202,6 +224,10 @@ # https://github.com/jd/tenacity/issues/471 tenacity!=8.4.0 + +# 5.0.0 breaks Timeout as a context manager +# TypeError: 'Timeout' object does not support the context manager protocol +async-timeout==4.0.3 """ GENERATED_MESSAGE = ( @@ -306,6 +332,10 @@ def process_action_requirement(req: str, action: str) -> str: return req if normalized_package_name in EXCLUDED_REQUIREMENTS_ALL: return f"# {req}" + if markers := OVERRIDDEN_REQUIREMENTS_ACTIONS[action]["markers"].get( + normalized_package_name, None + ): + return f"{req};{markers}" return req diff --git a/script/hassfest/config_schema.py b/script/hassfest/config_schema.py index 06ef206512795c..6b863ab9ecde32 100644 --- a/script/hassfest/config_schema.py +++ b/script/hassfest/config_schema.py @@ -10,7 +10,7 @@ CONFIG_SCHEMA_IGNORE = { # Configuration under the homeassistant key is a special case, it's handled by - # conf_util.async_process_ha_core_config already during bootstrapping, not by + # core_config.async_process_ha_core_config already during bootstrapping, not by # a schema in the homeassistant integration. HOMEASSISTANT_DOMAIN, } diff --git a/script/hassfest/dependencies.py b/script/hassfest/dependencies.py index 66796d4dd0d2b4..0c7f4f11a8c008 100644 --- a/script/hassfest/dependencies.py +++ b/script/hassfest/dependencies.py @@ -44,6 +44,15 @@ def _add_reference(self, reference_domain: str) -> None: assert self._cur_fil_dir self.referenced[self._cur_fil_dir].add(reference_domain) + def visit_If(self, node: ast.If) -> None: + """Visit If node.""" + if isinstance(node.test, ast.Name) and node.test.id == "TYPE_CHECKING": + # Ignore TYPE_CHECKING block + return + + # Have it visit other kids + self.generic_visit(node) + def visit_ImportFrom(self, node: ast.ImportFrom) -> None: """Visit ImportFrom node.""" if node.module is None: @@ -112,10 +121,10 @@ def visit_Attribute(self, node: ast.Attribute) -> None: "alert", "automation", "conversation", + "default_config", "device_automation", "frontend", "group", - "hassio", "homeassistant", "input_boolean", "input_button", diff --git a/script/hassfest/docker.py b/script/hassfest/docker.py index 213f21a7a3e334..083cdaba1a90e9 100644 --- a/script/hassfest/docker.py +++ b/script/hassfest/docker.py @@ -20,7 +20,8 @@ # Synchronize with homeassistant/core.py:async_stop ENV \ S6_SERVICES_GRACETIME={timeout} \ - UV_SYSTEM_PYTHON=true + UV_SYSTEM_PYTHON=true \ + UV_NO_CACHE=true ARG QEMU_CPU @@ -78,7 +79,7 @@ _HASSFEST_TEMPLATE = r"""# Automatically generated by hassfest. # # To update, run python3 -m script.hassfest -p docker -FROM python:alpine +FROM python:3.12-alpine ENV \ UV_SYSTEM_PYTHON=true \ @@ -111,7 +112,7 @@ LABEL "com.github.actions.color"="gray-dark" """ -_GO2RTC_VERSION = "1.9.4" +_GO2RTC_VERSION = "1.9.6" def _get_package_versions(file: Path, packages: set[str]) -> dict[str, str]: diff --git a/script/hassfest/docker/Dockerfile b/script/hassfest/docker/Dockerfile index f1194e37e2f2a4..9bad1e8aecc048 100644 --- a/script/hassfest/docker/Dockerfile +++ b/script/hassfest/docker/Dockerfile @@ -1,7 +1,7 @@ # Automatically generated by hassfest. # # To update, run python3 -m script.hassfest -p docker -FROM python:alpine +FROM python:3.12-alpine ENV \ UV_SYSTEM_PYTHON=true \ @@ -14,7 +14,7 @@ WORKDIR "/github/workspace" COPY . /usr/src/homeassistant # Uv is only needed during build -RUN --mount=from=ghcr.io/astral-sh/uv:0.4.17,source=/uv,target=/bin/uv \ +RUN --mount=from=ghcr.io/astral-sh/uv:0.5.0,source=/uv,target=/bin/uv \ # Required for PyTurboJPEG apk add --no-cache libturbojpeg \ && uv pip install \ @@ -22,8 +22,8 @@ RUN --mount=from=ghcr.io/astral-sh/uv:0.4.17,source=/uv,target=/bin/uv \ --no-cache \ -c /usr/src/homeassistant/homeassistant/package_constraints.txt \ -r /usr/src/homeassistant/requirements.txt \ - stdlib-list==0.10.0 pipdeptree==2.23.4 tqdm==4.66.5 ruff==0.6.9 \ - PyTurboJPEG==1.7.5 ha-ffmpeg==3.2.0 hassil==1.7.4 home-assistant-intents==2024.10.2 mutagen==1.47.0 pymicro-vad==1.0.1 pyspeex-noise==1.0.2 + stdlib-list==0.10.0 pipdeptree==2.23.4 tqdm==4.66.5 ruff==0.7.3 \ + PyTurboJPEG==1.7.5 ha-ffmpeg==3.2.2 hassil==1.7.4 home-assistant-intents==2024.11.6 mutagen==1.47.0 pymicro-vad==1.0.1 pyspeex-noise==1.0.2 LABEL "name"="hassfest" LABEL "maintainer"="Home Assistant " diff --git a/script/hassfest/manifest.py b/script/hassfest/manifest.py index 8643e34725f4a8..6d2f4087f59af4 100644 --- a/script/hassfest/manifest.py +++ b/script/hassfest/manifest.py @@ -88,12 +88,10 @@ class QualityScale(IntEnum): "logbook", "logger", "lovelace", - "map", "media_source", "my", "onboarding", "panel_custom", - "panel_iframe", "plant", "profiler", "proxy", diff --git a/script/hassfest/mypy_config.py b/script/hassfest/mypy_config.py index de42c964ddf729..25fe875e43788f 100644 --- a/script/hassfest/mypy_config.py +++ b/script/hassfest/mypy_config.py @@ -43,6 +43,7 @@ "local_partial_types": "true", "strict_equality": "true", "no_implicit_optional": "true", + "report_deprecated_as_error": "true", "warn_incomplete_stub": "true", "warn_redundant_casts": "true", "warn_unused_configs": "true", diff --git a/script/hassfest/requirements.py b/script/hassfest/requirements.py index 3df25f3284a8a6..998593d20ec5a1 100644 --- a/script/hassfest/requirements.py +++ b/script/hassfest/requirements.py @@ -28,12 +28,6 @@ PIP_REGEX = re.compile(r"^(--.+\s)?([-_\.\w\d]+.*(?:==|>=|<=|~=|!=|<|>|===)?.*$)") PIP_VERSION_RANGE_SEPARATOR = re.compile(r"^(==|>=|<=|~=|!=|<|>|===)?(.*)$") -IGNORE_STANDARD_LIBRARY_VIOLATIONS = { - # Integrations which have standard library requirements. - "slide", - "suez_water", -} - def validate(integrations: dict[str, Integration], config: Config) -> None: """Handle requirements for integrations.""" @@ -144,10 +138,7 @@ def validate_requirements(integration: Integration) -> None: if req in sys.stdlib_module_names: standard_library_violations.add(req) - if ( - standard_library_violations - and integration.domain not in IGNORE_STANDARD_LIBRARY_VIOLATIONS - ): + if standard_library_violations: integration.add_error( "requirements", ( @@ -155,18 +146,6 @@ def validate_requirements(integration: Integration) -> None: "are not compatible with the Python standard library" ), ) - elif ( - not standard_library_violations - and integration.domain in IGNORE_STANDARD_LIBRARY_VIOLATIONS - ): - integration.add_error( - "requirements", - ( - f"Integration {integration.domain} no longer has requirements which are" - " incompatible with the Python standard library, remove it from " - "IGNORE_STANDARD_LIBRARY_VIOLATIONS" - ), - ) @cache diff --git a/script/hassfest/services.py b/script/hassfest/services.py index 92fca14d373c25..8c9ab5c0c0b1db 100644 --- a/script/hassfest/services.py +++ b/script/hassfest/services.py @@ -75,6 +75,14 @@ def unique_field_validator(fields: Any) -> Any: } ) +CUSTOM_INTEGRATION_SECTION_SCHEMA = vol.Schema( + { + vol.Optional("collapsed"): bool, + vol.Required("fields"): vol.Schema({str: CUSTOM_INTEGRATION_FIELD_SCHEMA}), + } +) + + CORE_INTEGRATION_SERVICE_SCHEMA = vol.Any( vol.Schema( { @@ -105,7 +113,17 @@ def unique_field_validator(fields: Any) -> Any: vol.Optional("target"): vol.Any( selector.TargetSelector.CONFIG_SCHEMA, None ), - vol.Optional("fields"): vol.Schema({str: CUSTOM_INTEGRATION_FIELD_SCHEMA}), + vol.Optional("fields"): vol.All( + vol.Schema( + { + str: vol.Any( + CUSTOM_INTEGRATION_FIELD_SCHEMA, + CUSTOM_INTEGRATION_SECTION_SCHEMA, + ) + } + ), + unique_field_validator, + ), } ), None, diff --git a/script/json_schemas/manifest_schema.json b/script/json_schemas/manifest_schema.json new file mode 100644 index 00000000000000..40f08fd2c856bd --- /dev/null +++ b/script/json_schemas/manifest_schema.json @@ -0,0 +1,391 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "Home Assistant integration manifest", + "description": "The manifest for a Home Assistant integration", + "type": "object", + "if": { + "properties": { "integration_type": { "const": "virtual" } }, + "required": ["integration_type"] + }, + "then": { + "oneOf": [ + { + "properties": { + "domain": { + "description": "The domain identifier of the integration.\nhttps://developers.home-assistant.io/docs/creating_integration_manifest/#domain", + "examples": ["mobile_app"], + "type": "string", + "pattern": "[0-9a-z_]+" + }, + "name": { + "description": "The friendly name of the integration.", + "type": "string" + }, + "integration_type": { + "description": "The integration type.\nhttps://developers.home-assistant.io/docs/creating_integration_manifest/#integration-type", + "const": "virtual" + }, + "iot_standards": { + "description": "The IoT standards which supports devices or services of this virtual integration.\nhttps://developers.home-assistant.io/docs/creating_integration_manifest/#iot-standards", + "type": "array", + "minItems": 1, + "items": { + "type": "string", + "enum": ["homekit", "zigbee", "zwave"] + } + } + }, + "additionalProperties": false, + "required": ["domain", "name", "integration_type", "iot_standards"] + }, + { + "properties": { + "domain": { + "description": "The domain identifier of the integration.\nhttps://developers.home-assistant.io/docs/creating_integration_manifest/#domain", + "examples": ["mobile_app"], + "type": "string", + "pattern": "[0-9a-z_]+" + }, + "name": { + "description": "The friendly name of the integration.", + "type": "string" + }, + "integration_type": { + "description": "The integration type.\nhttps://developers.home-assistant.io/docs/creating_integration_manifest/#integration-type", + "const": "virtual" + }, + "supported_by": { + "description": "The integration which supports devices or services of this virtual integration.\nhttps://developers.home-assistant.io/docs/creating_integration_manifest/#supported-by", + "type": "string" + } + }, + "additionalProperties": false, + "required": ["domain", "name", "integration_type", "supported_by"] + } + ] + }, + "else": { + "properties": { + "domain": { + "description": "The domain identifier of the integration.\nhttps://developers.home-assistant.io/docs/creating_integration_manifest/#domain", + "examples": ["mobile_app"], + "type": "string", + "pattern": "[0-9a-z_]+" + }, + "name": { + "description": "The friendly name of the integration.", + "type": "string" + }, + "integration_type": { + "description": "The integration type.\nhttps://developers.home-assistant.io/docs/creating_integration_manifest/#integration-type", + "type": "string", + "default": "hub", + "enum": [ + "device", + "entity", + "hardware", + "helper", + "hub", + "service", + "system" + ] + }, + "config_flow": { + "description": "Whether the integration is configurable from the UI.\nhttps://developers.home-assistant.io/docs/creating_integration_manifest/#config-flow", + "type": "boolean" + }, + "mqtt": { + "description": "A list of topics to subscribe for the discovery of devices via MQTT.\nThis requires to specify \"mqtt\" in either the \"dependencies\" or \"after_dependencies\".\nhttps://developers.home-assistant.io/docs/creating_integration_manifest/#mqtt", + "type": "array", + "items": { + "type": "string" + }, + "uniqueItems": true + }, + "zeroconf": { + "description": "A list containing service domains to search for devices to discover via Zeroconf. Items can either be strings, which discovers all devices in the specific service domain, and/or objects which include filters. (useful for generic service domains like _http._tcp.local.)\nA device is discovered if it matches one of the items, but inside the individual item all properties have to be matched.\nhttps://developers.home-assistant.io/docs/creating_integration_manifest/#zeroconf", + "type": "array", + "minItems": 1, + "items": { + "anyOf": [ + { + "type": "string", + "pattern": "^.*\\.local\\.$", + "description": "Service domain to search for devices." + }, + { + "type": "object", + "properties": { + "type": { + "description": "The service domain to search for devices.", + "examples": ["_http._tcp.local."], + "type": "string", + "pattern": "^.*\\.local\\.$" + }, + "name": { + "description": "The name or name pattern of the devices to filter.", + "type": "string" + }, + "properties": { + "description": "The properties of the Zeroconf advertisement to filter.", + "type": "object", + "additionalProperties": { "type": "string" } + } + }, + "required": ["type"], + "additionalProperties": false + } + ] + }, + "uniqueItems": true + }, + "ssdp": { + "description": "A list of matchers to find devices discoverable via SSDP/UPnP. In order to be discovered, the device has to match all properties of any of the matchers.\nhttps://developers.home-assistant.io/docs/creating_integration_manifest/#ssdp", + "type": "array", + "minItems": 1, + "items": { + "description": "A matcher for the SSDP discovery.", + "type": "object", + "properties": { + "st": { + "type": "string" + }, + "deviceType": { + "type": "string" + }, + "manufacturer": { + "type": "string" + }, + "modelDescription": { + "type": "string" + } + }, + "additionalProperties": { "type": "string" } + } + }, + "bluetooth": { + "description": "A list of matchers to find devices discoverable via Bluetooth. In order to be discovered, the device has to match all properties of any of the matchers.\nhttps://developers.home-assistant.io/docs/creating_integration_manifest/#bluetooth", + "type": "array", + "minItems": 1, + "items": { + "description": "A matcher for the bluetooth discovery", + "type": "object", + "properties": { + "connectable": { + "description": "Whether the device needs to be connected to or it works with just advertisement data.", + "type": "boolean" + }, + "local_name": { + "description": "The name or a name pattern of the device to match.", + "type": "string", + "pattern": "^([^*]+|[^*]{3,}[*].*)$" + }, + "service_uuid": { + "description": "The 128-bit service data UUID to match.", + "type": "string", + "pattern": "[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}" + }, + "service_data_uuid": { + "description": "The 16-bit service data UUID to match, converted into the corresponding 128-bit UUID by replacing the 3rd and 4th byte of `00000000-0000-1000-8000-00805f9b34fb` with the 16-bit UUID.", + "examples": ["0000fd3d-0000-1000-8000-00805f9b34fb"], + "type": "string", + "pattern": "0000[0-9a-f]{4}-0000-1000-8000-00805f9b34fb" + }, + "manufacturer_id": { + "description": "The Manufacturer ID to match.", + "type": "integer" + }, + "manufacturer_data_start": { + "description": "The start bytes of the manufacturer data to match.", + "type": "array", + "minItems": 1, + "items": { + "type": "integer", + "minimum": 0, + "maximum": 255 + } + } + }, + "additionalProperties": false + }, + "uniqueItems": true + }, + "homekit": { + "description": "A list of model names to find devices which are discoverable via HomeKit. A device is discovered if the model name of the device starts with any of the specified model names.\nhttps://developers.home-assistant.io/docs/creating_integration_manifest/#homekit", + "type": "object", + "properties": { + "models": { + "description": "The model names to search for.", + "type": "array", + "items": { + "type": "string" + }, + "uniqueItems": true + } + }, + "required": ["models"], + "additionalProperties": false + }, + "dhcp": { + "description": "A list of matchers to find devices discoverable via DHCP. In order to be discovered, the device has to match all properties of any of the matchers.\nYou can specify an item with \"registered_devices\" set to true to check for devices with MAC addresses specified in the device registry.\nhttps://developers.home-assistant.io/docs/creating_integration_manifest/#dhcp", + "type": "array", + "items": { + "anyOf": [ + { + "type": "object", + "properties": { + "registered_devices": { + "description": "Whether the MAC addresses of devices in the device registry should be used for discovery, useful if the discovery is used to update the IP address of already registered devices.", + "const": true + } + }, + "additionalProperties": false + }, + { + "type": "object", + "properties": { + "hostname": { + "description": "The hostname or hostname pattern to match.", + "type": "string" + }, + "macaddress": { + "description": "The MAC address or MAC address pattern to match.", + "type": "string", + "maxLength": 12 + } + }, + "additionalProperties": false + } + ] + }, + "uniqueItems": true + }, + "usb": { + "description": "A list of matchers to find devices discoverable via USB. In order to be discovered, the device has to match all properties of any of the matchers.\nhttps://developers.home-assistant.io/docs/creating_integration_manifest/#usb", + "type": "array", + "uniqueItems": true, + "items": { + "type": "object", + "additionalProperties": false, + "properties": { + "vid": { + "description": "The vendor ID to match.", + "type": "string", + "pattern": "[0-9A-F]{4}" + }, + "pid": { + "description": "The product ID to match.", + "type": "string", + "pattern": "[0-9A-F]{4}" + }, + "description": { + "description": "The USB device description to match.", + "type": "string" + }, + "manufacturer": { + "description": "The manufacturer to match.", + "type": "string" + }, + "serial_number": { + "description": "The serial number to match.", + "type": "string" + }, + "known_devices": { + "type": "array", + "items": { + "type": "string" + } + } + } + } + }, + "documentation": { + "description": "The website containing the documentation for the integration. It has to be in the format \"https://www.home-assistant.io/integrations/[domain]\"\nhttps://developers.home-assistant.io/docs/creating_integration_manifest/#documentation", + "type": "string", + "pattern": "^https://www.home-assistant.io/integrations/[0-9a-z_]+$", + "format": "uri" + }, + "quality_scale": { + "description": "The quality scale of the integration.\nhttps://developers.home-assistant.io/docs/creating_integration_manifest/#integration-quality-scale", + "type": "string", + "enum": ["internal", "silver", "gold", "platinum"] + }, + "requirements": { + "description": "The PyPI package requirements for the integration. The package has to be pinned to a specific version.\nhttps://developers.home-assistant.io/docs/creating_integration_manifest/#requirements", + "type": "array", + "items": { + "type": "string", + "pattern": ".+==.+" + }, + "uniqueItems": true + }, + "dependencies": { + "description": "A list of integrations which need to be loaded before this integration can be set up.\nhttps://developers.home-assistant.io/docs/creating_integration_manifest/#dependencies", + "type": "array", + "items": { + "type": "string" + }, + "minItems": 1, + "uniqueItems": true + }, + "after_dependencies": { + "description": "A list of integrations which need to be loaded before this integration is set up when it is configured. The integration will still be set up when the \"after_dependencies\" are not configured.\nhttps://developers.home-assistant.io/docs/creating_integration_manifest/#after-dependencies", + "type": "array", + "items": { + "type": "string" + }, + "minItems": 1, + "uniqueItems": true + }, + "codeowners": { + "description": "A list of GitHub usernames or GitHub team names of the integration owners.\nhttps://developers.home-assistant.io/docs/creating_integration_manifest/#code-owners", + "type": "array", + "minItems": 0, + "items": { + "type": "string", + "pattern": "^@.+$" + }, + "uniqueItems": true + }, + "loggers": { + "description": "A list of logger names used by the requirements.\nhttps://developers.home-assistant.io/docs/creating_integration_manifest/#loggers", + "type": "array", + "minItems": 1, + "items": { + "type": "string" + }, + "uniqueItems": true + }, + "disabled": { + "description": "The reason for the integration being disabled.", + "type": "string" + }, + "iot_class": { + "description": "The IoT class of the integration, describing how the integration connects to the device or service.\nhttps://developers.home-assistant.io/docs/creating_integration_manifest/#iot-class", + "type": "string", + "enum": [ + "assumed_state", + "cloud_polling", + "cloud_push", + "local_polling", + "local_push", + "calculated" + ] + }, + "single_config_entry": { + "description": "Whether the integration only supports a single config entry.\nhttps://developers.home-assistant.io/docs/creating_integration_manifest/#single-config-entry-only", + "const": true + } + }, + "additionalProperties": false, + "required": ["domain", "name", "codeowners", "documentation"], + "dependencies": { + "mqtt": { + "anyOf": [ + { "required": ["dependencies"] }, + { "required": ["after_dependencies"] } + ] + } + } + } +} diff --git a/script/licenses.py b/script/licenses.py index 7a2ddc814de0a9..464a2fc456b4b2 100644 --- a/script/licenses.py +++ b/script/licenses.py @@ -2,14 +2,36 @@ from __future__ import annotations -from argparse import ArgumentParser +from argparse import ArgumentParser, Namespace from collections.abc import Sequence from dataclasses import dataclass +from importlib import metadata import json from pathlib import Path import sys +from typing import TypedDict, cast from awesomeversion import AwesomeVersion +from license_expression import ( + AND, + OR, + ExpressionError, + LicenseExpression, + LicenseSymbol, + get_spdx_licensing, +) + +licensing = get_spdx_licensing() + + +class PackageMetadata(TypedDict): + """Package metadata.""" + + name: str + version: str + license_expression: str | None + license_metadata: str | None + license_classifier: list[str] @dataclass @@ -17,19 +39,60 @@ class PackageDefinition: """Package definition.""" license: str + license_expression: str | None + license_metadata: str | None + license_classifier: list[str] name: str version: AwesomeVersion @classmethod - def from_dict(cls, data: dict[str, str]) -> PackageDefinition: - """Create a package definition from a dictionary.""" + def from_dict(cls, data: PackageMetadata) -> PackageDefinition: + """Create a package definition from PackageMetadata.""" + if not (license_str := "; ".join(data["license_classifier"])): + license_str = data["license_metadata"] or "UNKNOWN" return cls( - license=data["License"], - name=data["Name"], - version=AwesomeVersion(data["Version"]), + license=license_str, + license_expression=data["license_expression"], + license_metadata=data["license_metadata"], + license_classifier=data["license_classifier"], + name=data["name"], + version=AwesomeVersion(data["version"]), ) +# Incomplete list of OSI approved SPDX identifiers +# Add more as needed, see https://spdx.org/licenses/ +OSI_APPROVED_LICENSES_SPDX = { + "0BSD", + "AFL-2.1", + "AGPL-3.0-only", + "AGPL-3.0-or-later", + "Apache-2.0", + "BSD-1-Clause", + "BSD-2-Clause", + "BSD-3-Clause", + "EPL-1.0", + "EPL-2.0", + "GPL-2.0-only", + "GPL-2.0-or-later", + "GPL-3.0-only", + "GPL-3.0-or-later", + "HPND", + "ISC", + "LGPL-2.1-only", + "LGPL-2.1-or-later", + "LGPL-3.0-only", + "LGPL-3.0-or-later", + "MIT", + "MIT-CMU", + "MPL-1.1", + "MPL-2.0", + "PSF-2.0", + "Unlicense", + "Zlib", + "ZPL-2.1", +} + OSI_APPROVED_LICENSES = { "Academic Free License (AFL)", "Apache Software License", @@ -98,13 +161,10 @@ def from_dict(cls, data: dict[str, str]) -> PackageDefinition: "Zero-Clause BSD (0BSD)", "Zope Public License", "zlib/libpng License", + # End license classifier "Apache License", "MIT", - "apache-2.0", - "GPL-3.0", - "GPLv3+", "MPL2", - "MPL-2.0", "Apache 2", "LGPL v3", "BSD", @@ -112,29 +172,16 @@ def from_dict(cls, data: dict[str, str]) -> PackageDefinition: "GPLv3", "Eclipse Public License v2.0", "ISC", - "GPL-2.0-only", - "mit", "GNU General Public License v3", - "Unlicense", - "Apache-2", "GPLv2", - "Python-2.0.1", } EXCEPTIONS = { "PyMicroBot", # https://github.com/spycle/pyMicroBot/pull/3 "PySwitchmate", # https://github.com/Danielhiversen/pySwitchmate/pull/16 "PyXiaomiGateway", # https://github.com/Danielhiversen/PyXiaomiGateway/pull/201 - "aiocomelit", # https://github.com/chemelli74/aiocomelit/pull/138 "aioecowitt", # https://github.com/home-assistant-libs/aioecowitt/pull/180 - "aioopenexchangerates", # https://github.com/MartinHjelmare/aioopenexchangerates/pull/94 - "aiooui", # https://github.com/Bluetooth-Devices/aiooui/pull/8 - "aioruuvigateway", # https://github.com/akx/aioruuvigateway/pull/6 - "aiovodafone", # https://github.com/chemelli74/aiovodafone/pull/131 - "apple_weatherkit", # https://github.com/tjhorner/python-weatherkit/pull/3 - "asyncio", # PSF License "chacha20poly1305", # LGPL - "chacha20poly1305-reuseable", # Apache 2.0 or BSD 3-Clause "commentjson", # https://github.com/vaidik/commentjson/pull/55 "crownstone-cloud", # https://github.com/crownstone/crownstone-lib-python-cloud/pull/5 "crownstone-core", # https://github.com/crownstone/crownstone-lib-python-core/pull/6 @@ -142,13 +189,9 @@ def from_dict(cls, data: dict[str, str]) -> PackageDefinition: "crownstone-uart", # https://github.com/crownstone/crownstone-lib-python-uart/pull/12 "eliqonline", # https://github.com/molobrakos/eliqonline/pull/17 "enocean", # https://github.com/kipe/enocean/pull/142 - "gardena-bluetooth", # https://github.com/elupus/gardena-bluetooth/pull/11 - "heatmiserV3", # https://github.com/andylockran/heatmiserV3/pull/94 - "huum", # https://github.com/frwickst/pyhuum/pull/8 "imutils", # https://github.com/PyImageSearch/imutils/pull/292 "iso4217", # Public domain "kiwiki_client", # https://github.com/c7h/kiwiki_client/pull/6 - "krakenex", # https://github.com/veox/python3-krakenex/pull/145 "ld2410-ble", # https://github.com/930913/ld2410-ble/pull/7 "maxcube-api", # https://github.com/uebelack/python-maxcube-api/pull/48 "neurio", # https://github.com/jordanh/neurio-python/pull/13 @@ -159,14 +202,9 @@ def from_dict(cls, data: dict[str, str]) -> PackageDefinition: "pyeconet", # https://github.com/w1ll1am23/pyeconet/pull/41 "pysabnzbd", # https://github.com/jeradM/pysabnzbd/pull/6 "pyvera", # https://github.com/maximvelichko/pyvera/pull/164 - "pyxeoma", # https://github.com/jeradM/pyxeoma/pull/11 "repoze.lru", - "ruuvitag-ble", # https://github.com/Bluetooth-Devices/ruuvitag-ble/pull/10 - "sensirion-ble", # https://github.com/akx/sensirion-ble/pull/9 "sharp_aquos_rc", # https://github.com/jmoore987/sharp_aquos_rc/pull/14 "tapsaff", # https://github.com/bazwilliams/python-taps-aff/pull/5 - "vincenty", # Public domain - "zeversolar", # https://github.com/kvanzuijlen/zeversolar/pull/46 } TODO = { @@ -175,77 +213,201 @@ def from_dict(cls, data: dict[str, str]) -> PackageDefinition: ), # https://github.com/aio-libs/aiocache/blob/master/LICENSE all rights reserved? } +EXCEPTIONS_AND_TODOS = EXCEPTIONS.union(TODO) -def main(argv: Sequence[str] | None = None) -> int: - """Run the main script.""" - exit_code = 0 - parser = ArgumentParser() - parser.add_argument( - "path", - nargs="?", - metavar="PATH", - default="licenses.json", - help="Path to json licenses file", - ) +def check_licenses(args: CheckArgs) -> int: + """Check licenses are OSI approved.""" + exit_code = 0 + raw_licenses = json.loads(Path(args.path).read_text()) + license_status = { + pkg.name: (pkg, check_license_status(pkg)) + for data in raw_licenses + if (pkg := PackageDefinition.from_dict(data)) + } - argv = argv or sys.argv[1:] - args = parser.parse_args(argv) + for name, version in TODO.items(): + pkg, status = license_status.get(name, (None, None)) + if pkg is None or not (version < pkg.version): + continue + assert status is not None - raw_licenses = json.loads(Path(args.path).read_text()) - package_definitions = [PackageDefinition.from_dict(data) for data in raw_licenses] - for package in package_definitions: - previous_unapproved_version = TODO.get(package.name) - approved = False - for approved_license in OSI_APPROVED_LICENSES: - if approved_license in package.license: - approved = True - break - if previous_unapproved_version is not None: - if previous_unapproved_version < package.version: - if approved: - print( - "Approved license detected for " - f"{package.name}@{package.version}: {package.license}" - ) - print("Please remove the package from the TODO list.") - print() - else: - print( - "We could not detect an OSI-approved license for " - f"{package.name}@{package.version}: {package.license}" - ) - print() - exit_code = 1 - elif not approved and package.name not in EXCEPTIONS: + if status is True: + print( + "Approved license detected for " + f"{pkg.name}@{pkg.version}: {get_license_str(pkg)}\n" + "Please remove the package from the TODO list.\n" + ) + else: print( "We could not detect an OSI-approved license for " - f"{package.name}@{package.version}: {package.license}" + f"{pkg.name}@{pkg.version}: {get_license_str(pkg)}\n" + "Please update the package version on the TODO list.\n" ) - print() - exit_code = 1 - elif approved and package.name in EXCEPTIONS: + exit_code = 1 + + for pkg, status in license_status.values(): + if status is False and pkg.name not in EXCEPTIONS_AND_TODOS: print( - "Approved license detected for " - f"{package.name}@{package.version}: {package.license}" + "We could not detect an OSI-approved license for " + f"{pkg.name}@{pkg.version}: {get_license_str(pkg)}\n" ) - print(f"Please remove the package from the EXCEPTIONS list: {package.name}") - print() exit_code = 1 - current_packages = {package.name for package in package_definitions} - for package in [*TODO.keys(), *EXCEPTIONS]: - if package not in current_packages: + if status is True and pkg.name in EXCEPTIONS: print( - f"Package {package} is tracked, but not used. Please remove from the licenses.py" - "file." + "Approved license detected for " + f"{pkg.name}@{pkg.version}: {get_license_str(pkg)}\n" + "Please remove the package from the EXCEPTIONS list.\n" ) - print() exit_code = 1 + + for name in EXCEPTIONS_AND_TODOS.difference(license_status): + print( + f"Package {name} is tracked, but not used. " + "Please remove it from the licenses.py file.\n" + ) + exit_code = 1 + return exit_code +def check_license_status(package: PackageDefinition) -> bool: + """Check if package licenses is OSI approved.""" + if package.license_expression: + # Prefer 'License-Expression' if it exists + return check_license_expression(package.license_expression) or False + + if ( + package.license_metadata + and (check := check_license_expression(package.license_metadata)) is not None + ): + # Check license metadata if it's a valid SPDX license expression + return check + + for approved_license in OSI_APPROVED_LICENSES: + if approved_license in package.license: + return True + return False + + +def check_license_expression(license_str: str) -> bool | None: + """Check if license expression is a valid and approved SPDX license string.""" + if license_str == "UNKNOWN" or "\n" in license_str: + # Ignore common errors for license metadata values + return None + + try: + expr = licensing.parse(license_str, validate=True) + except ExpressionError: + return None + return check_spdx_license(expr) + + +def check_spdx_license(expr: LicenseExpression) -> bool: + """Check a SPDX license expression.""" + if isinstance(expr, LicenseSymbol): + return expr.key in OSI_APPROVED_LICENSES_SPDX + if isinstance(expr, OR): + return any(check_spdx_license(arg) for arg in expr.args) + if isinstance(expr, AND): + return all(check_spdx_license(arg) for arg in expr.args) + return False + + +def get_license_str(package: PackageDefinition) -> str: + """Return license string.""" + return ( + f"{package.license_expression} -- {package.license_metadata} " + f"-- {package.license_classifier}" + ) + + +def extract_licenses(args: ExtractArgs) -> int: + """Extract license data for installed packages.""" + licenses = sorted( + [get_package_metadata(dist) for dist in list(metadata.distributions())], + key=lambda dist: dist["name"], + ) + Path(args.output_file).write_text(json.dumps(licenses, indent=2)) + return 0 + + +def get_package_metadata(dist: metadata.Distribution) -> PackageMetadata: + """Get package metadata for distribution.""" + return { + "name": dist.name, + "version": dist.version, + "license_expression": dist.metadata.get("License-Expression"), + "license_metadata": dist.metadata.get("License"), + "license_classifier": extract_license_classifier( + dist.metadata.get_all("Classifier") + ), + } + + +def extract_license_classifier(classifiers: list[str] | None) -> list[str]: + """Extract license from list of classifiers. + + E.g. 'License :: OSI Approved :: MIT License' -> 'MIT License'. + Filter out bare 'License :: OSI Approved'. + """ + return [ + license_classifier + for classifier in classifiers or () + if classifier.startswith("License") + and (license_classifier := classifier.rpartition(" :: ")[2]) + and license_classifier != "OSI Approved" + ] + + +class ExtractArgs(Namespace): + """Extract arguments.""" + + output_file: str + + +class CheckArgs(Namespace): + """Check arguments.""" + + path: str + + +def main(argv: Sequence[str] | None = None) -> int: + """Run the main script.""" + parser = ArgumentParser() + subparsers = parser.add_subparsers(title="Subcommands", required=True) + + parser_extract = subparsers.add_parser("extract") + parser_extract.set_defaults(action="extract") + parser_extract.add_argument( + "--output-file", + default="licenses.json", + help="Path to store the licenses file", + ) + + parser_check = subparsers.add_parser("check") + parser_check.set_defaults(action="check") + parser_check.add_argument( + "path", + nargs="?", + metavar="PATH", + default="licenses.json", + help="Path to json licenses file", + ) + + argv = argv or sys.argv[1:] + args = parser.parse_args(argv) + + if args.action == "extract": + args = cast(ExtractArgs, args) + return extract_licenses(args) + if args.action == "check": + args = cast(CheckArgs, args) + if (exit_code := check_licenses(args)) == 0: + print("All licenses are approved!") + return exit_code + return 0 + + if __name__ == "__main__": - exit_code = main() - if exit_code == 0: - print("All licenses are approved!") - sys.exit(exit_code) + sys.exit(main()) diff --git a/script/scaffold/templates/config_flow_oauth2/integration/api.py b/script/scaffold/templates/config_flow_oauth2/integration/api.py index 3f4aa3cfb82852..9516dd991224bb 100644 --- a/script/scaffold/templates/config_flow_oauth2/integration/api.py +++ b/script/scaffold/templates/config_flow_oauth2/integration/api.py @@ -49,7 +49,6 @@ def __init__( async def async_get_access_token(self) -> str: """Return a valid access token.""" - if not self._oauth_session.valid_token: - await self._oauth_session.async_ensure_token_valid() + await self._oauth_session.async_ensure_token_valid() return self._oauth_session.token["access_token"] diff --git a/script/split_tests.py b/script/split_tests.py index e124f722552553..c64de46a0682c1 100755 --- a/script/split_tests.py +++ b/script/split_tests.py @@ -49,16 +49,27 @@ def split_tests(self, test_folder: TestFolder) -> None: test_folder.get_all_flatten(), reverse=True, key=lambda x: x.total_tests ) for tests in sorted_tests: - print(f"{tests.total_tests:>{digits}} tests in {tests.path}") if tests.added_to_bucket: # Already added to bucket continue + print(f"{tests.total_tests:>{digits}} tests in {tests.path}") smallest_bucket = min(self._buckets, key=lambda x: x.total_tests) + is_file = isinstance(tests, TestFile) if ( smallest_bucket.total_tests + tests.total_tests < self._tests_per_bucket - ) or isinstance(tests, TestFile): + ) or is_file: smallest_bucket.add(tests) + # Ensure all files from the same folder are in the same bucket + # to ensure that syrupy correctly identifies unused snapshots + if is_file: + for other_test in tests.parent.children.values(): + if other_test is tests or isinstance(other_test, TestFolder): + continue + print( + f"{other_test.total_tests:>{digits}} tests in {other_test.path} (same bucket)" + ) + smallest_bucket.add(other_test) # verify that all tests are added to a bucket if not test_folder.added_to_bucket: @@ -79,6 +90,7 @@ class TestFile: total_tests: int path: Path added_to_bucket: bool = field(default=False, init=False) + parent: TestFolder | None = field(default=None, init=False) def add_to_bucket(self) -> None: """Add test file to bucket.""" @@ -125,6 +137,7 @@ def __repr__(self) -> str: def add_test_file(self, file: TestFile) -> None: """Add test file to folder.""" path = file.path + file.parent = self relative_path = path.relative_to(self.path) if not relative_path.parts: raise ValueError("Path is not a child of this folder") diff --git a/tests/common.py b/tests/common.py index ad14481e38561f..8bd45e4d7f8f65 100644 --- a/tests/common.py +++ b/tests/common.py @@ -1064,6 +1064,8 @@ async def start_reauth_flow( data: dict[str, Any] | None = None, ) -> ConfigFlowResult: """Start a reauthentication flow.""" + if self.entry_id not in hass.config_entries._entries: + raise ValueError("Config entry must be added to hass to start reauth flow") return await start_reauth_flow(hass, self, context, data) async def start_reconfigure_flow( @@ -1073,6 +1075,10 @@ async def start_reconfigure_flow( show_advanced_options: bool = False, ) -> ConfigFlowResult: """Start a reconfiguration flow.""" + if self.entry_id not in hass.config_entries._entries: + raise ValueError( + "Config entry must be added to hass to start reconfiguration flow" + ) return await hass.config_entries.flow.async_init( self.domain, context={ diff --git a/tests/components/abode/test_alarm_control_panel.py b/tests/components/abode/test_alarm_control_panel.py index 51e0ee46838cdd..025afa74b80660 100644 --- a/tests/components/abode/test_alarm_control_panel.py +++ b/tests/components/abode/test_alarm_control_panel.py @@ -3,7 +3,10 @@ from unittest.mock import PropertyMock, patch from homeassistant.components.abode import ATTR_DEVICE_ID -from homeassistant.components.alarm_control_panel import DOMAIN as ALARM_DOMAIN +from homeassistant.components.alarm_control_panel import ( + DOMAIN as ALARM_DOMAIN, + AlarmControlPanelState, +) from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_FRIENDLY_NAME, @@ -11,9 +14,6 @@ SERVICE_ALARM_ARM_AWAY, SERVICE_ALARM_ARM_HOME, SERVICE_ALARM_DISARM, - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_DISARMED, ) from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -39,7 +39,7 @@ async def test_attributes(hass: HomeAssistant) -> None: await setup_platform(hass, ALARM_DOMAIN) state = hass.states.get(DEVICE_ID) - assert state.state == STATE_ALARM_DISARMED + assert state.state == AlarmControlPanelState.DISARMED assert state.attributes.get(ATTR_DEVICE_ID) == "area_1" assert not state.attributes.get("battery_backup") assert not state.attributes.get("cellular_backup") @@ -75,7 +75,7 @@ async def test_set_alarm_away(hass: HomeAssistant) -> None: await hass.async_block_till_done() state = hass.states.get(DEVICE_ID) - assert state.state == STATE_ALARM_ARMED_AWAY + assert state.state == AlarmControlPanelState.ARMED_AWAY async def test_set_alarm_home(hass: HomeAssistant) -> None: @@ -105,7 +105,7 @@ async def test_set_alarm_home(hass: HomeAssistant) -> None: await hass.async_block_till_done() state = hass.states.get(DEVICE_ID) - assert state.state == STATE_ALARM_ARMED_HOME + assert state.state == AlarmControlPanelState.ARMED_HOME async def test_set_alarm_standby(hass: HomeAssistant) -> None: @@ -134,7 +134,7 @@ async def test_set_alarm_standby(hass: HomeAssistant) -> None: await hass.async_block_till_done() state = hass.states.get(DEVICE_ID) - assert state.state == STATE_ALARM_DISARMED + assert state.state == AlarmControlPanelState.DISARMED async def test_state_unknown(hass: HomeAssistant) -> None: diff --git a/tests/components/abode/test_config_flow.py b/tests/components/abode/test_config_flow.py index a37fb8cbe33eab..2abed387566f8d 100644 --- a/tests/components/abode/test_config_flow.py +++ b/tests/components/abode/test_config_flow.py @@ -10,7 +10,6 @@ import pytest from requests.exceptions import ConnectTimeout -from homeassistant.components.abode import config_flow from homeassistant.components.abode.const import CONF_POLLING, DOMAIN from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_PASSWORD, CONF_USERNAME @@ -22,114 +21,110 @@ pytestmark = pytest.mark.usefixtures("mock_setup_entry") -async def test_show_form(hass: HomeAssistant) -> None: - """Test that the form is served with no input.""" - flow = config_flow.AbodeFlowHandler() - flow.hass = hass - - result = await flow.async_step_user(user_input=None) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - - async def test_one_config_allowed(hass: HomeAssistant) -> None: """Test that only one Abode configuration is allowed.""" - flow = config_flow.AbodeFlowHandler() - flow.hass = hass - MockConfigEntry( domain=DOMAIN, data={CONF_USERNAME: "user@email.com", CONF_PASSWORD: "password"}, ).add_to_hass(hass) - step_user_result = await flow.async_step_user() - - assert step_user_result["type"] is FlowResultType.ABORT - assert step_user_result["reason"] == "single_instance_allowed" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "single_instance_allowed" -async def test_invalid_credentials(hass: HomeAssistant) -> None: - """Test that invalid credentials throws an error.""" - conf = {CONF_USERNAME: "user@email.com", CONF_PASSWORD: "password"} - flow = config_flow.AbodeFlowHandler() - flow.hass = hass +async def test_user_flow(hass: HomeAssistant) -> None: + """Test user flow, with various errors.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + # Test that invalid credentials throws an error. with patch( "homeassistant.components.abode.config_flow.Abode", side_effect=AbodeAuthenticationException( (HTTPStatus.BAD_REQUEST, "auth error") ), ): - result = await flow.async_step_user(user_input=conf) - assert result["errors"] == {"base": "invalid_auth"} - - -async def test_connection_auth_error(hass: HomeAssistant) -> None: - """Test other than invalid credentials throws an error.""" - conf = {CONF_USERNAME: "user@email.com", CONF_PASSWORD: "password"} - - flow = config_flow.AbodeFlowHandler() - flow.hass = hass + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_USERNAME: "user@email.com", CONF_PASSWORD: "password"}, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] == {"base": "invalid_auth"} + # Test other than invalid credentials throws an error. with patch( "homeassistant.components.abode.config_flow.Abode", side_effect=AbodeAuthenticationException( (HTTPStatus.INTERNAL_SERVER_ERROR, "connection error") ), ): - result = await flow.async_step_user(user_input=conf) - assert result["errors"] == {"base": "cannot_connect"} - - -async def test_connection_error(hass: HomeAssistant) -> None: - """Test login throws an error if connection times out.""" - conf = {CONF_USERNAME: "user@email.com", CONF_PASSWORD: "password"} - - flow = config_flow.AbodeFlowHandler() - flow.hass = hass + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_USERNAME: "user@email.com", CONF_PASSWORD: "password"}, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] == {"base": "cannot_connect"} + # Test login throws an error if connection times out. with patch( "homeassistant.components.abode.config_flow.Abode", side_effect=ConnectTimeout, ): - result = await flow.async_step_user(user_input=conf) - assert result["errors"] == {"base": "cannot_connect"} - + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_USERNAME: "user@email.com", CONF_PASSWORD: "password"}, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] == {"base": "cannot_connect"} -async def test_step_user(hass: HomeAssistant) -> None: - """Test that the user step works.""" - conf = {CONF_USERNAME: "user@email.com", CONF_PASSWORD: "password"} + # Test success + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) with patch("homeassistant.components.abode.config_flow.Abode"): - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER}, data=conf + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_USERNAME: "user@email.com", CONF_PASSWORD: "password"}, ) - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "user@email.com" - assert result["data"] == { - CONF_USERNAME: "user@email.com", - CONF_PASSWORD: "password", - CONF_POLLING: False, - } + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "user@email.com" + assert result["data"] == { + CONF_USERNAME: "user@email.com", + CONF_PASSWORD: "password", + CONF_POLLING: False, + } async def test_step_mfa(hass: HomeAssistant) -> None: """Test that the MFA step works.""" - conf = {CONF_USERNAME: "user@email.com", CONF_PASSWORD: "password"} + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) with patch( "homeassistant.components.abode.config_flow.Abode", side_effect=AbodeAuthenticationException(MFA_CODE_REQUIRED), ): - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER}, data=conf + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_USERNAME: "user@email.com", CONF_PASSWORD: "password"}, ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "mfa" + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "mfa" with patch( "homeassistant.components.abode.config_flow.Abode", @@ -141,46 +136,51 @@ async def test_step_mfa(hass: HomeAssistant) -> None: result["flow_id"], user_input={"mfa_code": "123456"} ) - assert result["errors"] == {"base": "invalid_mfa_code"} + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "mfa" + assert result["errors"] == {"base": "invalid_mfa_code"} with patch("homeassistant.components.abode.config_flow.Abode"): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={"mfa_code": "123456"} ) - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "user@email.com" - assert result["data"] == { - CONF_USERNAME: "user@email.com", - CONF_PASSWORD: "password", - CONF_POLLING: False, - } + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "user@email.com" + assert result["data"] == { + CONF_USERNAME: "user@email.com", + CONF_PASSWORD: "password", + CONF_POLLING: False, + } async def test_step_reauth(hass: HomeAssistant) -> None: """Test the reauth flow.""" - conf = {CONF_USERNAME: "user@email.com", CONF_PASSWORD: "password"} - entry = MockConfigEntry( domain=DOMAIN, unique_id="user@email.com", - data=conf, + data={CONF_USERNAME: "user@email.com", CONF_PASSWORD: "password"}, ) entry.add_to_hass(hass) - with patch("homeassistant.components.abode.config_flow.Abode"): - result = await entry.start_reauth_flow(hass) + result = await entry.start_reauth_flow(hass) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth_confirm" + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" - with patch("homeassistant.config_entries.ConfigEntries.async_reload"): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input=conf, - ) + with ( + patch("homeassistant.components.abode.config_flow.Abode"), + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_USERNAME: "user@email.com", + CONF_PASSWORD: "new_password", + }, + ) - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "reauth_successful" + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reauth_successful" - assert len(hass.config_entries.async_entries()) == 1 + assert len(hass.config_entries.async_entries()) == 1 + assert entry.data[CONF_PASSWORD] == "new_password" diff --git a/tests/components/abode/test_cover.py b/tests/components/abode/test_cover.py index cdbec0ddf681b8..4a49648516deba 100644 --- a/tests/components/abode/test_cover.py +++ b/tests/components/abode/test_cover.py @@ -3,13 +3,12 @@ from unittest.mock import patch from homeassistant.components.abode import ATTR_DEVICE_ID -from homeassistant.components.cover import DOMAIN as COVER_DOMAIN +from homeassistant.components.cover import DOMAIN as COVER_DOMAIN, CoverState from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_FRIENDLY_NAME, SERVICE_CLOSE_COVER, SERVICE_OPEN_COVER, - STATE_CLOSED, ) from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -34,7 +33,7 @@ async def test_attributes(hass: HomeAssistant) -> None: await setup_platform(hass, COVER_DOMAIN) state = hass.states.get(DEVICE_ID) - assert state.state == STATE_CLOSED + assert state.state == CoverState.CLOSED assert state.attributes.get(ATTR_DEVICE_ID) == "ZW:00000007" assert not state.attributes.get("battery_low") assert not state.attributes.get("no_response") diff --git a/tests/components/adguard/test_config_flow.py b/tests/components/adguard/test_config_flow.py index d493962611fc3a..6644a4ca20ff9b 100644 --- a/tests/components/adguard/test_config_flow.py +++ b/tests/components/adguard/test_config_flow.py @@ -4,7 +4,6 @@ from homeassistant import config_entries from homeassistant.components.adguard.const import DOMAIN -from homeassistant.components.hassio import HassioServiceInfo from homeassistant.config_entries import SOURCE_USER from homeassistant.const import ( CONF_HOST, @@ -17,6 +16,7 @@ ) from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType +from homeassistant.helpers.service_info.hassio import HassioServiceInfo from tests.common import MockConfigEntry from tests.test_util.aiohttp import AiohttpClientMocker diff --git a/tests/components/advantage_air/test_binary_sensor.py b/tests/components/advantage_air/test_binary_sensor.py index 13bbadb38f987e..d0088d96ba565c 100644 --- a/tests/components/advantage_air/test_binary_sensor.py +++ b/tests/components/advantage_air/test_binary_sensor.py @@ -1,10 +1,8 @@ """Test the Advantage Air Binary Sensor Platform.""" from datetime import timedelta -from unittest.mock import AsyncMock +from unittest.mock import AsyncMock, patch -from homeassistant.components.advantage_air import ADVANTAGE_AIR_SYNC_INTERVAL -from homeassistant.config_entries import RELOAD_AFTER_UPDATE_DELAY from homeassistant.const import STATE_OFF, STATE_ON from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -70,22 +68,14 @@ async def test_binary_sensor_async_setup_entry( assert not hass.states.get(entity_id) mock_get.reset_mock() - entity_registry.async_update_entity(entity_id=entity_id, disabled_by=None) - await hass.async_block_till_done() - - async_fire_time_changed( - hass, - dt_util.utcnow() + timedelta(seconds=ADVANTAGE_AIR_SYNC_INTERVAL + 1), - ) - await hass.async_block_till_done(wait_background_tasks=True) - assert len(mock_get.mock_calls) == 1 - - async_fire_time_changed( - hass, - dt_util.utcnow() + timedelta(seconds=RELOAD_AFTER_UPDATE_DELAY + 1), - ) - await hass.async_block_till_done(wait_background_tasks=True) - assert len(mock_get.mock_calls) == 2 + + with patch("homeassistant.config_entries.RELOAD_AFTER_UPDATE_DELAY", 1): + entity_registry.async_update_entity(entity_id=entity_id, disabled_by=None) + await hass.async_block_till_done() + + async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=2)) + await hass.async_block_till_done(wait_background_tasks=True) + assert len(mock_get.mock_calls) == 1 state = hass.states.get(entity_id) assert state @@ -101,22 +91,14 @@ async def test_binary_sensor_async_setup_entry( assert not hass.states.get(entity_id) mock_get.reset_mock() - entity_registry.async_update_entity(entity_id=entity_id, disabled_by=None) - await hass.async_block_till_done() - - async_fire_time_changed( - hass, - dt_util.utcnow() + timedelta(seconds=ADVANTAGE_AIR_SYNC_INTERVAL + 1), - ) - await hass.async_block_till_done(wait_background_tasks=True) - assert len(mock_get.mock_calls) == 1 - - async_fire_time_changed( - hass, - dt_util.utcnow() + timedelta(seconds=RELOAD_AFTER_UPDATE_DELAY + 1), - ) - await hass.async_block_till_done(wait_background_tasks=True) - assert len(mock_get.mock_calls) == 2 + + with patch("homeassistant.config_entries.RELOAD_AFTER_UPDATE_DELAY", 1): + entity_registry.async_update_entity(entity_id=entity_id, disabled_by=None) + await hass.async_block_till_done() + + async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=2)) + await hass.async_block_till_done(wait_background_tasks=True) + assert len(mock_get.mock_calls) == 1 state = hass.states.get(entity_id) assert state diff --git a/tests/components/advantage_air/test_cover.py b/tests/components/advantage_air/test_cover.py index 4752601d9ad547..a9a3cc70c18504 100644 --- a/tests/components/advantage_air/test_cover.py +++ b/tests/components/advantage_air/test_cover.py @@ -9,8 +9,9 @@ SERVICE_OPEN_COVER, SERVICE_SET_COVER_POSITION, CoverDeviceClass, + CoverState, ) -from homeassistant.const import ATTR_ENTITY_ID, STATE_OPEN +from homeassistant.const import ATTR_ENTITY_ID from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -31,7 +32,7 @@ async def test_ac_cover( entity_id = "cover.myauto_zone_y" state = hass.states.get(entity_id) assert state - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN assert state.attributes.get("device_class") == CoverDeviceClass.DAMPER assert state.attributes.get("current_position") == 100 @@ -120,7 +121,7 @@ async def test_things_cover( thing_id = "200" state = hass.states.get(entity_id) assert state - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN assert state.attributes.get("device_class") == CoverDeviceClass.BLIND entry = entity_registry.async_get(entity_id) diff --git a/tests/components/advantage_air/test_sensor.py b/tests/components/advantage_air/test_sensor.py index 06243921a645c5..3ea368a59fbdb8 100644 --- a/tests/components/advantage_air/test_sensor.py +++ b/tests/components/advantage_air/test_sensor.py @@ -1,15 +1,13 @@ """Test the Advantage Air Sensor Platform.""" from datetime import timedelta -from unittest.mock import AsyncMock +from unittest.mock import AsyncMock, patch -from homeassistant.components.advantage_air import ADVANTAGE_AIR_SYNC_INTERVAL from homeassistant.components.advantage_air.const import DOMAIN as ADVANTAGE_AIR_DOMAIN from homeassistant.components.advantage_air.sensor import ( ADVANTAGE_AIR_SERVICE_SET_TIME_TO, ADVANTAGE_AIR_SET_COUNTDOWN_VALUE, ) -from homeassistant.config_entries import RELOAD_AFTER_UPDATE_DELAY from homeassistant.const import ATTR_ENTITY_ID from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -124,23 +122,15 @@ async def test_sensor_platform_disabled_entity( assert not hass.states.get(entity_id) - entity_registry.async_update_entity(entity_id=entity_id, disabled_by=None) - await hass.async_block_till_done(wait_background_tasks=True) mock_get.reset_mock() - async_fire_time_changed( - hass, - dt_util.utcnow() + timedelta(seconds=ADVANTAGE_AIR_SYNC_INTERVAL + 1), - ) - await hass.async_block_till_done(wait_background_tasks=True) - assert len(mock_get.mock_calls) == 1 + with patch("homeassistant.config_entries.RELOAD_AFTER_UPDATE_DELAY", 1): + entity_registry.async_update_entity(entity_id=entity_id, disabled_by=None) + await hass.async_block_till_done(wait_background_tasks=True) - async_fire_time_changed( - hass, - dt_util.utcnow() + timedelta(seconds=RELOAD_AFTER_UPDATE_DELAY + 1), - ) - await hass.async_block_till_done(wait_background_tasks=True) - assert len(mock_get.mock_calls) == 2 + async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=2)) + await hass.async_block_till_done(wait_background_tasks=True) + assert len(mock_get.mock_calls) == 1 state = hass.states.get(entity_id) assert state diff --git a/tests/components/airgradient/conftest.py b/tests/components/airgradient/conftest.py index 1899e12c8aeaf0..395c5cd96a4399 100644 --- a/tests/components/airgradient/conftest.py +++ b/tests/components/airgradient/conftest.py @@ -1,7 +1,7 @@ """AirGradient tests configuration.""" from collections.abc import Generator -from unittest.mock import patch +from unittest.mock import AsyncMock, patch from airgradient import Config, Measures import pytest @@ -10,7 +10,6 @@ from homeassistant.const import CONF_HOST from tests.common import MockConfigEntry, load_fixture -from tests.components.smhi.common import AsyncMock @pytest.fixture diff --git a/tests/components/airgradient/snapshots/test_update.ambr b/tests/components/airgradient/snapshots/test_update.ambr index c639a97d5dde11..1f944bb528b4d8 100644 --- a/tests/components/airgradient/snapshots/test_update.ambr +++ b/tests/components/airgradient/snapshots/test_update.ambr @@ -37,6 +37,7 @@ 'attributes': ReadOnlyDict({ 'auto_update': False, 'device_class': 'firmware', + 'display_precision': 0, 'entity_picture': 'https://brands.home-assistant.io/_/airgradient/icon.png', 'friendly_name': 'Airgradient Firmware', 'in_progress': False, @@ -47,6 +48,7 @@ 'skipped_version': None, 'supported_features': , 'title': None, + 'update_percentage': None, }), 'context': , 'entity_id': 'update.airgradient_firmware', diff --git a/tests/components/airthings_ble/__init__.py b/tests/components/airthings_ble/__init__.py index a736fa979e918d..add21b1067f771 100644 --- a/tests/components/airthings_ble/__init__.py +++ b/tests/components/airthings_ble/__init__.py @@ -49,7 +49,7 @@ def patch_airthings_ble(return_value=AirthingsDevice, side_effect=None): def patch_airthings_device_update(): """Patch airthings-ble device.""" return patch( - "homeassistant.components.airthings_ble.AirthingsBluetoothDeviceData.update_device", + "homeassistant.components.airthings_ble.coordinator.AirthingsBluetoothDeviceData.update_device", return_value=WAVE_DEVICE_INFO, ) diff --git a/tests/components/airtouch5/test_cover.py b/tests/components/airtouch5/test_cover.py index 295535cd95d8da..57a344e80181aa 100644 --- a/tests/components/airtouch5/test_cover.py +++ b/tests/components/airtouch5/test_cover.py @@ -17,9 +17,9 @@ SERVICE_CLOSE_COVER, SERVICE_OPEN_COVER, SERVICE_SET_COVER_POSITION, - STATE_OPEN, + CoverState, ) -from homeassistant.const import ATTR_ENTITY_ID, STATE_CLOSED, Platform +from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -118,26 +118,26 @@ async def _call_zone_status_callback(open_percentage: int) -> None: await _call_zone_status_callback(0.7) state = hass.states.get(COVER_ENTITY_ID) assert state - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN assert state.attributes.get(ATTR_CURRENT_POSITION) == 70 # Fully open await _call_zone_status_callback(1) state = hass.states.get(COVER_ENTITY_ID) assert state - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN assert state.attributes.get(ATTR_CURRENT_POSITION) == 100 # Fully closed await _call_zone_status_callback(0.0) state = hass.states.get(COVER_ENTITY_ID) assert state - assert state.state == STATE_CLOSED + assert state.state == CoverState.CLOSED assert state.attributes.get(ATTR_CURRENT_POSITION) == 0 # Partly reopened await _call_zone_status_callback(0.3) state = hass.states.get(COVER_ENTITY_ID) assert state - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN assert state.attributes.get(ATTR_CURRENT_POSITION) == 30 diff --git a/tests/components/airvisual/test_config_flow.py b/tests/components/airvisual/test_config_flow.py index e38fc64587e526..632bdb72eb4e06 100644 --- a/tests/components/airvisual/test_config_flow.py +++ b/tests/components/airvisual/test_config_flow.py @@ -155,10 +155,6 @@ async def test_step_reauth( assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" - result = await hass.config_entries.flow.async_configure(result["flow_id"]) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth_confirm" - new_api_key = "defgh67890" result = await hass.config_entries.flow.async_configure( diff --git a/tests/components/airzone/snapshots/test_diagnostics.ambr b/tests/components/airzone/snapshots/test_diagnostics.ambr index 693550a3e1c4a4..fb4f6530b1e053 100644 --- a/tests/components/airzone/snapshots/test_diagnostics.ambr +++ b/tests/components/airzone/snapshots/test_diagnostics.ambr @@ -220,6 +220,45 @@ }), ]), }), + dict({ + 'data': list([ + dict({ + 'air_demand': 0, + 'coldStage': 0, + 'coldStages': 0, + 'coolmaxtemp': 30, + 'coolmintemp': 15, + 'coolsetpoint': 20, + 'errors': list([ + ]), + 'floor_demand': 0, + 'heatStage': 0, + 'heatStages': 0, + 'heatmaxtemp': 30, + 'heatmintemp': 15, + 'heatsetpoint': 20, + 'humidity': 0, + 'maxTemp': 30, + 'minTemp': 15, + 'mode': 6, + 'modes': list([ + 1, + 2, + 3, + 4, + 5, + 6, + ]), + 'name': 'Aux Heat', + 'on': 1, + 'roomTemp': 22, + 'setpoint': 20, + 'systemID': 4, + 'units': 0, + 'zoneID': 1, + }), + ]), + }), ]), }), 'version': dict({ @@ -269,8 +308,8 @@ 'temp-set': 45, 'temp-unit': 0, }), - 'num-systems': 3, - 'num-zones': 7, + 'num-systems': 4, + 'num-zones': 8, 'systems': dict({ '1': dict({ 'available': True, @@ -320,6 +359,23 @@ ]), 'problems': False, }), + '4': dict({ + 'available': True, + 'full-name': 'Airzone [4] System', + 'id': 4, + 'master-system-zone': '4:1', + 'master-zone': 1, + 'mode': 6, + 'modes': list([ + 1, + 2, + 3, + 4, + 5, + 6, + ]), + 'problems': False, + }), }), 'version': '1.62', 'webserver': dict({ @@ -683,6 +739,46 @@ 'temp-step': 1.0, 'temp-unit': 1, }), + '4:1': dict({ + 'absolute-temp-max': 30.0, + 'absolute-temp-min': 15.0, + 'action': 5, + 'air-demand': False, + 'available': True, + 'cold-stage': 0, + 'cool-temp-max': 30.0, + 'cool-temp-min': 15.0, + 'cool-temp-set': 20.0, + 'demand': False, + 'double-set-point': False, + 'floor-demand': False, + 'full-name': 'Airzone [4:1] Aux Heat', + 'heat-stage': 0, + 'heat-temp-max': 30.0, + 'heat-temp-min': 15.0, + 'heat-temp-set': 20.0, + 'id': 1, + 'master': True, + 'mode': 6, + 'modes': list([ + 1, + 2, + 3, + 4, + 5, + 6, + ]), + 'name': 'Aux Heat', + 'on': True, + 'problems': False, + 'system': 4, + 'temp': 22.0, + 'temp-max': 30.0, + 'temp-min': 15.0, + 'temp-set': 20.0, + 'temp-step': 0.5, + 'temp-unit': 0, + }), }), }), }) diff --git a/tests/components/airzone/test_climate.py b/tests/components/airzone/test_climate.py index 0f23c151e0ef52..12a73a6a268dd4 100644 --- a/tests/components/airzone/test_climate.py +++ b/tests/components/airzone/test_climate.py @@ -225,6 +225,23 @@ async def test_airzone_create_climates(hass: HomeAssistant) -> None: assert state.attributes.get(ATTR_TARGET_TEMP_HIGH) == 25.0 assert state.attributes.get(ATTR_TARGET_TEMP_LOW) == 22.8 + state = hass.states.get("climate.aux_heat") + assert state.state == HVACMode.HEAT + assert state.attributes.get(ATTR_CURRENT_HUMIDITY) is None + assert state.attributes.get(ATTR_CURRENT_TEMPERATURE) == 22 + assert state.attributes.get(ATTR_HVAC_ACTION) == HVACAction.IDLE + assert state.attributes.get(ATTR_HVAC_MODES) == [ + HVACMode.OFF, + HVACMode.COOL, + HVACMode.HEAT, + HVACMode.FAN_ONLY, + HVACMode.DRY, + ] + assert state.attributes.get(ATTR_MAX_TEMP) == 30 + assert state.attributes.get(ATTR_MIN_TEMP) == 15 + assert state.attributes.get(ATTR_TARGET_TEMP_STEP) == API_TEMPERATURE_STEP + assert state.attributes.get(ATTR_TEMPERATURE) == 20.0 + HVAC_MOCK_CHANGED = copy.deepcopy(HVAC_MOCK) HVAC_MOCK_CHANGED[API_SYSTEMS][0][API_DATA][0][API_MAX_TEMP] = 25 HVAC_MOCK_CHANGED[API_SYSTEMS][0][API_DATA][0][API_MIN_TEMP] = 10 diff --git a/tests/components/airzone/test_switch.py b/tests/components/airzone/test_switch.py new file mode 100644 index 00000000000000..f761b53ed4c657 --- /dev/null +++ b/tests/components/airzone/test_switch.py @@ -0,0 +1,102 @@ +"""The switch tests for the Airzone platform.""" + +from unittest.mock import patch + +from aioairzone.const import API_DATA, API_ON, API_SYSTEM_ID, API_ZONE_ID + +from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN +from homeassistant.const import ( + ATTR_ENTITY_ID, + SERVICE_TURN_OFF, + SERVICE_TURN_ON, + STATE_OFF, + STATE_ON, +) +from homeassistant.core import HomeAssistant + +from .util import async_init_integration + + +async def test_airzone_create_switches(hass: HomeAssistant) -> None: + """Test creation of switches.""" + + await async_init_integration(hass) + + state = hass.states.get("switch.despacho") + assert state.state == STATE_OFF + + state = hass.states.get("switch.dorm_1") + assert state.state == STATE_ON + + state = hass.states.get("switch.dorm_2") + assert state.state == STATE_OFF + + state = hass.states.get("switch.dorm_ppal") + assert state.state == STATE_ON + + state = hass.states.get("switch.salon") + assert state.state == STATE_OFF + + +async def test_airzone_switch_off(hass: HomeAssistant) -> None: + """Test switch off.""" + + await async_init_integration(hass) + + put_hvac_off = { + API_DATA: [ + { + API_SYSTEM_ID: 1, + API_ZONE_ID: 3, + API_ON: False, + } + ] + } + + with patch( + "homeassistant.components.airzone.AirzoneLocalApi.put_hvac", + return_value=put_hvac_off, + ): + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_OFF, + { + ATTR_ENTITY_ID: "switch.dorm_1", + }, + blocking=True, + ) + + state = hass.states.get("switch.dorm_1") + assert state.state == STATE_OFF + + +async def test_airzone_switch_on(hass: HomeAssistant) -> None: + """Test switch on.""" + + await async_init_integration(hass) + + put_hvac_on = { + API_DATA: [ + { + API_SYSTEM_ID: 1, + API_ZONE_ID: 5, + API_ON: True, + } + ] + } + + with patch( + "homeassistant.components.airzone.AirzoneLocalApi.put_hvac", + return_value=put_hvac_on, + ): + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_ON, + { + ATTR_ENTITY_ID: "switch.dorm_2", + }, + blocking=True, + ) + + state = hass.states.get("switch.dorm_2") + assert state.state == STATE_ON diff --git a/tests/components/airzone/util.py b/tests/components/airzone/util.py index 2cdb7a9c6f9c1f..278663b7a9754e 100644 --- a/tests/components/airzone/util.py +++ b/tests/components/airzone/util.py @@ -272,6 +272,37 @@ }, ] }, + { + API_DATA: [ + { + API_SYSTEM_ID: 4, + API_ZONE_ID: 1, + API_NAME: "Aux Heat", + API_ON: 1, + API_COOL_SET_POINT: 20, + API_COOL_MAX_TEMP: 30, + API_COOL_MIN_TEMP: 15, + API_HEAT_SET_POINT: 20, + API_HEAT_MAX_TEMP: 30, + API_HEAT_MIN_TEMP: 15, + API_MAX_TEMP: 30, + API_MIN_TEMP: 15, + API_SET_POINT: 20, + API_ROOM_TEMP: 22, + API_MODES: [1, 2, 3, 4, 5, 6], + API_MODE: 6, + API_COLD_STAGES: 0, + API_COLD_STAGE: 0, + API_HEAT_STAGES: 0, + API_HEAT_STAGE: 0, + API_HUMIDITY: 0, + API_UNITS: 0, + API_ERRORS: [], + API_AIR_DEMAND: 0, + API_FLOOR_DEMAND: 0, + }, + ] + }, ] } diff --git a/tests/components/airzone_cloud/snapshots/test_diagnostics.ambr b/tests/components/airzone_cloud/snapshots/test_diagnostics.ambr index 86b5c75b290e86..c6ad36916bf17b 100644 --- a/tests/components/airzone_cloud/snapshots/test_diagnostics.ambr +++ b/tests/components/airzone_cloud/snapshots/test_diagnostics.ambr @@ -136,6 +136,7 @@ }), 'temperature': 21.0, 'temperature-setpoint': 22.0, + 'temperature-setpoint-auto-air': 22.0, 'temperature-setpoint-cool-air': 22.0, 'temperature-setpoint-hot-air': 22.0, 'temperature-setpoint-max': 30.0, @@ -191,6 +192,7 @@ }), 'temperature': 20.0, 'temperature-setpoint': 22.0, + 'temperature-setpoint-auto-air': 22.0, 'temperature-setpoint-cool-air': 22.0, 'temperature-setpoint-hot-air': 18.0, 'temperature-setpoint-max': 30.0, @@ -297,6 +299,7 @@ 'dhw1': dict({ 'active': False, 'available': True, + 'double-set-point': False, 'id': 'dhw1', 'installation': 'installation1', 'is-connected': True, @@ -379,6 +382,7 @@ 'aq-present': True, 'aq-status': 'good', 'available': True, + 'double-set-point': False, 'errors': list([ dict({ '_id': 'error-id', diff --git a/tests/components/airzone_cloud/test_climate.py b/tests/components/airzone_cloud/test_climate.py index 37c5ff8e1afd62..2b587680a5770a 100644 --- a/tests/components/airzone_cloud/test_climate.py +++ b/tests/components/airzone_cloud/test_climate.py @@ -97,8 +97,7 @@ async def test_airzone_create_climates(hass: HomeAssistant) -> None: assert state.attributes[ATTR_MAX_TEMP] == 30 assert state.attributes[ATTR_MIN_TEMP] == 15 assert state.attributes[ATTR_TARGET_TEMP_STEP] == API_DEFAULT_TEMP_STEP - assert state.attributes.get(ATTR_TARGET_TEMP_HIGH) == 22.0 - assert state.attributes.get(ATTR_TARGET_TEMP_LOW) == 18.0 + assert state.attributes.get(ATTR_TEMPERATURE) == 22.0 # Groups state = hass.states.get("climate.group") @@ -589,6 +588,7 @@ async def test_airzone_climate_set_temp(hass: HomeAssistant) -> None: SERVICE_SET_TEMPERATURE, { ATTR_ENTITY_ID: "climate.bron_pro", + ATTR_HVAC_MODE: HVACMode.HEAT_COOL, ATTR_TARGET_TEMP_HIGH: 25.0, ATTR_TARGET_TEMP_LOW: 20.0, }, @@ -596,7 +596,7 @@ async def test_airzone_climate_set_temp(hass: HomeAssistant) -> None: ) state = hass.states.get("climate.bron_pro") - assert state.state == HVACMode.HEAT + assert state.state == HVACMode.HEAT_COOL assert state.attributes.get(ATTR_TARGET_TEMP_HIGH) == 25.0 assert state.attributes.get(ATTR_TARGET_TEMP_LOW) == 20.0 diff --git a/tests/components/airzone_cloud/test_init.py b/tests/components/airzone_cloud/test_init.py index b5b4bcebaa84b6..6cab0be6e7c464 100644 --- a/tests/components/airzone_cloud/test_init.py +++ b/tests/components/airzone_cloud/test_init.py @@ -2,6 +2,8 @@ from unittest.mock import patch +from aioairzone_cloud.exceptions import AirzoneTimeout + from homeassistant.components.airzone_cloud.const import DOMAIN from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant @@ -50,3 +52,20 @@ async def test_unload_entry(hass: HomeAssistant) -> None: await hass.config_entries.async_unload(config_entry.entry_id) await hass.async_block_till_done() assert config_entry.state is ConfigEntryState.NOT_LOADED + + +async def test_init_api_timeout(hass: HomeAssistant) -> None: + """Test API timeouts when loading the Airzone Cloud integration.""" + + with patch( + "homeassistant.components.airzone_cloud.AirzoneCloudApi.login", + side_effect=AirzoneTimeout, + ): + config_entry = MockConfigEntry( + data=CONFIG, + domain=DOMAIN, + unique_id="airzone_cloud_unique_id", + ) + config_entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(config_entry.entry_id) is False diff --git a/tests/components/airzone_cloud/test_select.py b/tests/components/airzone_cloud/test_select.py index 5a6b610446832b..d0993365083ad9 100644 --- a/tests/components/airzone_cloud/test_select.py +++ b/tests/components/airzone_cloud/test_select.py @@ -4,7 +4,7 @@ import pytest -from homeassistant.components.select import DOMAIN as SELECT_DOMAIN +from homeassistant.components.select import ATTR_OPTIONS, DOMAIN as SELECT_DOMAIN from homeassistant.const import ATTR_ENTITY_ID, ATTR_OPTION, SERVICE_SELECT_OPTION from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceValidationError @@ -22,9 +22,21 @@ async def test_airzone_create_selects(hass: HomeAssistant) -> None: state = hass.states.get("select.dormitorio_air_quality_mode") assert state.state == "auto" + state = hass.states.get("select.dormitorio_mode") + assert state is None + state = hass.states.get("select.salon_air_quality_mode") assert state.state == "auto" + state = hass.states.get("select.salon_mode") + assert state.state == "cool" + assert state.attributes.get(ATTR_OPTIONS) == [ + "cool", + "dry", + "fan", + "heat", + ] + async def test_airzone_select_air_quality_mode(hass: HomeAssistant) -> None: """Test select Air Quality mode.""" @@ -58,3 +70,37 @@ async def test_airzone_select_air_quality_mode(hass: HomeAssistant) -> None: state = hass.states.get("select.dormitorio_air_quality_mode") assert state.state == "off" + + +async def test_airzone_select_mode(hass: HomeAssistant) -> None: + """Test select HVAC mode.""" + + await async_init_integration(hass) + + with pytest.raises(ServiceValidationError): + await hass.services.async_call( + SELECT_DOMAIN, + SERVICE_SELECT_OPTION, + { + ATTR_ENTITY_ID: "select.salon_mode", + ATTR_OPTION: "Invalid", + }, + blocking=True, + ) + + with patch( + "homeassistant.components.airzone_cloud.AirzoneCloudApi.api_patch_device", + return_value=None, + ): + await hass.services.async_call( + SELECT_DOMAIN, + SERVICE_SELECT_OPTION, + { + ATTR_ENTITY_ID: "select.salon_mode", + ATTR_OPTION: "heat", + }, + blocking=True, + ) + + state = hass.states.get("select.salon_mode") + assert state.state == "heat" diff --git a/tests/components/airzone_cloud/test_switch.py b/tests/components/airzone_cloud/test_switch.py new file mode 100644 index 00000000000000..5ee65f11fa8d95 --- /dev/null +++ b/tests/components/airzone_cloud/test_switch.py @@ -0,0 +1,71 @@ +"""The switch tests for the Airzone Cloud platform.""" + +from unittest.mock import patch + +from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN +from homeassistant.const import ( + ATTR_ENTITY_ID, + SERVICE_TURN_OFF, + SERVICE_TURN_ON, + STATE_OFF, + STATE_ON, +) +from homeassistant.core import HomeAssistant + +from .util import async_init_integration + + +async def test_airzone_create_switches(hass: HomeAssistant) -> None: + """Test creation of switches.""" + + await async_init_integration(hass) + + state = hass.states.get("switch.dormitorio") + assert state.state == STATE_OFF + + state = hass.states.get("switch.salon") + assert state.state == STATE_ON + + +async def test_airzone_switch_off(hass: HomeAssistant) -> None: + """Test switch off.""" + + await async_init_integration(hass) + + with patch( + "homeassistant.components.airzone_cloud.AirzoneCloudApi.api_patch_device", + return_value=None, + ): + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_OFF, + { + ATTR_ENTITY_ID: "switch.salon", + }, + blocking=True, + ) + + state = hass.states.get("switch.salon") + assert state.state == STATE_OFF + + +async def test_airzone_switch_on(hass: HomeAssistant) -> None: + """Test switch on.""" + + await async_init_integration(hass) + + with patch( + "homeassistant.components.airzone_cloud.AirzoneCloudApi.api_patch_device", + return_value=None, + ): + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_ON, + { + ATTR_ENTITY_ID: "switch.dormitorio", + }, + blocking=True, + ) + + state = hass.states.get("switch.dormitorio") + assert state.state == STATE_ON diff --git a/tests/components/alarm_control_panel/common.py b/tests/components/alarm_control_panel/common.py index 36e9918f54c12a..8a631eeff36b4e 100644 --- a/tests/components/alarm_control_panel/common.py +++ b/tests/components/alarm_control_panel/common.py @@ -8,6 +8,7 @@ DOMAIN, AlarmControlPanelEntity, AlarmControlPanelEntityFeature, + AlarmControlPanelState, ) from homeassistant.const import ( ATTR_CODE, @@ -20,12 +21,6 @@ SERVICE_ALARM_ARM_VACATION, SERVICE_ALARM_DISARM, SERVICE_ALARM_TRIGGER, - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_ARMED_VACATION, - STATE_ALARM_DISARMED, - STATE_ALARM_TRIGGERED, ) from homeassistant.core import HomeAssistant @@ -145,31 +140,31 @@ def code_arm_required(self): def alarm_arm_away(self, code=None): """Send arm away command.""" - self._attr_state = STATE_ALARM_ARMED_AWAY + self._attr_alarm_state = AlarmControlPanelState.ARMED_AWAY self.schedule_update_ha_state() def alarm_arm_home(self, code=None): """Send arm home command.""" - self._attr_state = STATE_ALARM_ARMED_HOME + self._attr_alarm_state = AlarmControlPanelState.ARMED_HOME self.schedule_update_ha_state() def alarm_arm_night(self, code=None): """Send arm night command.""" - self._attr_state = STATE_ALARM_ARMED_NIGHT + self._attr_alarm_state = AlarmControlPanelState.ARMED_NIGHT self.schedule_update_ha_state() def alarm_arm_vacation(self, code=None): """Send arm night command.""" - self._attr_state = STATE_ALARM_ARMED_VACATION + self._attr_alarm_state = AlarmControlPanelState.ARMED_VACATION self.schedule_update_ha_state() def alarm_disarm(self, code=None): """Send disarm command.""" if code == "1234": - self._attr_state = STATE_ALARM_DISARMED + self._attr_alarm_state = AlarmControlPanelState.DISARMED self.schedule_update_ha_state() def alarm_trigger(self, code=None): """Send alarm trigger command.""" - self._attr_state = STATE_ALARM_TRIGGERED + self._attr_alarm_state = AlarmControlPanelState.TRIGGERED self.schedule_update_ha_state() diff --git a/tests/components/alarm_control_panel/test_device_action.py b/tests/components/alarm_control_panel/test_device_action.py index 9c5aaffd73364e..a733501769181d 100644 --- a/tests/components/alarm_control_panel/test_device_action.py +++ b/tests/components/alarm_control_panel/test_device_action.py @@ -7,19 +7,10 @@ from homeassistant.components.alarm_control_panel import ( DOMAIN, AlarmControlPanelEntityFeature, + AlarmControlPanelState, ) from homeassistant.components.device_automation import DeviceAutomationType -from homeassistant.const import ( - CONF_PLATFORM, - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_ARMED_VACATION, - STATE_ALARM_DISARMED, - STATE_ALARM_TRIGGERED, - STATE_UNKNOWN, - EntityCategory, -) +from homeassistant.const import CONF_PLATFORM, STATE_UNKNOWN, EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.setup import async_setup_component @@ -541,27 +532,44 @@ async def test_action( hass.bus.async_fire("test_event_arm_away") await hass.async_block_till_done() - assert hass.states.get(entity_entry.entity_id).state == STATE_ALARM_ARMED_AWAY + assert ( + hass.states.get(entity_entry.entity_id).state + == AlarmControlPanelState.ARMED_AWAY + ) hass.bus.async_fire("test_event_arm_home") await hass.async_block_till_done() - assert hass.states.get(entity_entry.entity_id).state == STATE_ALARM_ARMED_HOME + assert ( + hass.states.get(entity_entry.entity_id).state + == AlarmControlPanelState.ARMED_HOME + ) hass.bus.async_fire("test_event_arm_vacation") await hass.async_block_till_done() - assert hass.states.get(entity_entry.entity_id).state == STATE_ALARM_ARMED_VACATION + assert ( + hass.states.get(entity_entry.entity_id).state + == AlarmControlPanelState.ARMED_VACATION + ) hass.bus.async_fire("test_event_arm_night") await hass.async_block_till_done() - assert hass.states.get(entity_entry.entity_id).state == STATE_ALARM_ARMED_NIGHT + assert ( + hass.states.get(entity_entry.entity_id).state + == AlarmControlPanelState.ARMED_NIGHT + ) hass.bus.async_fire("test_event_disarm") await hass.async_block_till_done() - assert hass.states.get(entity_entry.entity_id).state == STATE_ALARM_DISARMED + assert ( + hass.states.get(entity_entry.entity_id).state == AlarmControlPanelState.DISARMED + ) hass.bus.async_fire("test_event_trigger") await hass.async_block_till_done() - assert hass.states.get(entity_entry.entity_id).state == STATE_ALARM_TRIGGERED + assert ( + hass.states.get(entity_entry.entity_id).state + == AlarmControlPanelState.TRIGGERED + ) async def test_action_legacy( @@ -615,4 +623,7 @@ async def test_action_legacy( hass.bus.async_fire("test_event_arm_away") await hass.async_block_till_done() - assert hass.states.get(entity_entry.entity_id).state == STATE_ALARM_ARMED_AWAY + assert ( + hass.states.get(entity_entry.entity_id).state + == AlarmControlPanelState.ARMED_AWAY + ) diff --git a/tests/components/alarm_control_panel/test_device_condition.py b/tests/components/alarm_control_panel/test_device_condition.py index da1d77f50a3781..37cbc466e6d7dc 100644 --- a/tests/components/alarm_control_panel/test_device_condition.py +++ b/tests/components/alarm_control_panel/test_device_condition.py @@ -7,18 +7,10 @@ from homeassistant.components.alarm_control_panel import ( DOMAIN, AlarmControlPanelEntityFeature, + AlarmControlPanelState, ) from homeassistant.components.device_automation import DeviceAutomationType -from homeassistant.const import ( - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_CUSTOM_BYPASS, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_ARMED_VACATION, - STATE_ALARM_DISARMED, - STATE_ALARM_TRIGGERED, - EntityCategory, -) +from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant, ServiceCall from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.setup import async_setup_component @@ -354,7 +346,7 @@ async def test_if_state( ] }, ) - hass.states.async_set(entry.entity_id, STATE_ALARM_TRIGGERED) + hass.states.async_set(entry.entity_id, AlarmControlPanelState.TRIGGERED) hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") hass.bus.async_fire("test_event3") @@ -366,7 +358,7 @@ async def test_if_state( assert len(service_calls) == 1 assert service_calls[0].data["some"] == "is_triggered - event - test_event1" - hass.states.async_set(entry.entity_id, STATE_ALARM_DISARMED) + hass.states.async_set(entry.entity_id, AlarmControlPanelState.DISARMED) hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") hass.bus.async_fire("test_event3") @@ -378,7 +370,7 @@ async def test_if_state( assert len(service_calls) == 2 assert service_calls[1].data["some"] == "is_disarmed - event - test_event2" - hass.states.async_set(entry.entity_id, STATE_ALARM_ARMED_HOME) + hass.states.async_set(entry.entity_id, AlarmControlPanelState.ARMED_HOME) hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") hass.bus.async_fire("test_event3") @@ -390,7 +382,7 @@ async def test_if_state( assert len(service_calls) == 3 assert service_calls[2].data["some"] == "is_armed_home - event - test_event3" - hass.states.async_set(entry.entity_id, STATE_ALARM_ARMED_AWAY) + hass.states.async_set(entry.entity_id, AlarmControlPanelState.ARMED_AWAY) hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") hass.bus.async_fire("test_event3") @@ -402,7 +394,7 @@ async def test_if_state( assert len(service_calls) == 4 assert service_calls[3].data["some"] == "is_armed_away - event - test_event4" - hass.states.async_set(entry.entity_id, STATE_ALARM_ARMED_NIGHT) + hass.states.async_set(entry.entity_id, AlarmControlPanelState.ARMED_NIGHT) hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") hass.bus.async_fire("test_event3") @@ -414,7 +406,7 @@ async def test_if_state( assert len(service_calls) == 5 assert service_calls[4].data["some"] == "is_armed_night - event - test_event5" - hass.states.async_set(entry.entity_id, STATE_ALARM_ARMED_VACATION) + hass.states.async_set(entry.entity_id, AlarmControlPanelState.ARMED_VACATION) hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") hass.bus.async_fire("test_event3") @@ -426,7 +418,7 @@ async def test_if_state( assert len(service_calls) == 6 assert service_calls[5].data["some"] == "is_armed_vacation - event - test_event6" - hass.states.async_set(entry.entity_id, STATE_ALARM_ARMED_CUSTOM_BYPASS) + hass.states.async_set(entry.entity_id, AlarmControlPanelState.ARMED_CUSTOM_BYPASS) hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") hass.bus.async_fire("test_event3") @@ -488,7 +480,7 @@ async def test_if_state_legacy( ] }, ) - hass.states.async_set(entry.entity_id, STATE_ALARM_TRIGGERED) + hass.states.async_set(entry.entity_id, AlarmControlPanelState.TRIGGERED) hass.bus.async_fire("test_event1") await hass.async_block_till_done() assert len(service_calls) == 1 diff --git a/tests/components/alarm_control_panel/test_device_trigger.py b/tests/components/alarm_control_panel/test_device_trigger.py index 46eba314dc1de0..17a301ccdf1f5c 100644 --- a/tests/components/alarm_control_panel/test_device_trigger.py +++ b/tests/components/alarm_control_panel/test_device_trigger.py @@ -9,18 +9,10 @@ from homeassistant.components.alarm_control_panel import ( DOMAIN, AlarmControlPanelEntityFeature, + AlarmControlPanelState, ) from homeassistant.components.device_automation import DeviceAutomationType -from homeassistant.const import ( - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_ARMED_VACATION, - STATE_ALARM_DISARMED, - STATE_ALARM_PENDING, - STATE_ALARM_TRIGGERED, - EntityCategory, -) +from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant, ServiceCall from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.setup import async_setup_component @@ -256,7 +248,7 @@ async def test_if_fires_on_state_change( DOMAIN, "test", "5678", device_id=device_entry.id ) - hass.states.async_set(entry.entity_id, STATE_ALARM_PENDING) + hass.states.async_set(entry.entity_id, AlarmControlPanelState.PENDING) assert await async_setup_component( hass, @@ -400,7 +392,7 @@ async def test_if_fires_on_state_change( ) # Fake that the entity is triggered. - hass.states.async_set(entry.entity_id, STATE_ALARM_TRIGGERED) + hass.states.async_set(entry.entity_id, AlarmControlPanelState.TRIGGERED) await hass.async_block_till_done() assert len(service_calls) == 1 assert ( @@ -409,7 +401,7 @@ async def test_if_fires_on_state_change( ) # Fake that the entity is disarmed. - hass.states.async_set(entry.entity_id, STATE_ALARM_DISARMED) + hass.states.async_set(entry.entity_id, AlarmControlPanelState.DISARMED) await hass.async_block_till_done() assert len(service_calls) == 2 assert ( @@ -418,7 +410,7 @@ async def test_if_fires_on_state_change( ) # Fake that the entity is armed home. - hass.states.async_set(entry.entity_id, STATE_ALARM_ARMED_HOME) + hass.states.async_set(entry.entity_id, AlarmControlPanelState.ARMED_HOME) await hass.async_block_till_done() assert len(service_calls) == 3 assert ( @@ -427,7 +419,7 @@ async def test_if_fires_on_state_change( ) # Fake that the entity is armed away. - hass.states.async_set(entry.entity_id, STATE_ALARM_ARMED_AWAY) + hass.states.async_set(entry.entity_id, AlarmControlPanelState.ARMED_AWAY) await hass.async_block_till_done() assert len(service_calls) == 4 assert ( @@ -436,7 +428,7 @@ async def test_if_fires_on_state_change( ) # Fake that the entity is armed night. - hass.states.async_set(entry.entity_id, STATE_ALARM_ARMED_NIGHT) + hass.states.async_set(entry.entity_id, AlarmControlPanelState.ARMED_NIGHT) await hass.async_block_till_done() assert len(service_calls) == 5 assert ( @@ -445,7 +437,7 @@ async def test_if_fires_on_state_change( ) # Fake that the entity is armed vacation. - hass.states.async_set(entry.entity_id, STATE_ALARM_ARMED_VACATION) + hass.states.async_set(entry.entity_id, AlarmControlPanelState.ARMED_VACATION) await hass.async_block_till_done() assert len(service_calls) == 6 assert ( @@ -471,7 +463,7 @@ async def test_if_fires_on_state_change_with_for( DOMAIN, "test", "5678", device_id=device_entry.id ) - hass.states.async_set(entry.entity_id, STATE_ALARM_DISARMED) + hass.states.async_set(entry.entity_id, AlarmControlPanelState.DISARMED) assert await async_setup_component( hass, @@ -506,7 +498,7 @@ async def test_if_fires_on_state_change_with_for( await hass.async_block_till_done() assert len(service_calls) == 0 - hass.states.async_set(entry.entity_id, STATE_ALARM_TRIGGERED) + hass.states.async_set(entry.entity_id, AlarmControlPanelState.TRIGGERED) await hass.async_block_till_done() assert len(service_calls) == 0 async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10)) @@ -536,7 +528,7 @@ async def test_if_fires_on_state_change_legacy( DOMAIN, "test", "5678", device_id=device_entry.id ) - hass.states.async_set(entry.entity_id, STATE_ALARM_DISARMED) + hass.states.async_set(entry.entity_id, AlarmControlPanelState.DISARMED) assert await async_setup_component( hass, @@ -570,7 +562,7 @@ async def test_if_fires_on_state_change_legacy( await hass.async_block_till_done() assert len(service_calls) == 0 - hass.states.async_set(entry.entity_id, STATE_ALARM_TRIGGERED) + hass.states.async_set(entry.entity_id, AlarmControlPanelState.TRIGGERED) await hass.async_block_till_done() assert len(service_calls) == 1 assert ( diff --git a/tests/components/alarm_control_panel/test_init.py b/tests/components/alarm_control_panel/test_init.py index 06724978ce37ea..90b23f87ab127c 100644 --- a/tests/components/alarm_control_panel/test_init.py +++ b/tests/components/alarm_control_panel/test_init.py @@ -2,14 +2,17 @@ from types import ModuleType from typing import Any +from unittest.mock import patch import pytest from homeassistant.components import alarm_control_panel -from homeassistant.components.alarm_control_panel.const import ( +from homeassistant.components.alarm_control_panel import ( + DOMAIN as ALARM_CONTROL_PANEL_DOMAIN, AlarmControlPanelEntityFeature, CodeFormat, ) +from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( ATTR_CODE, SERVICE_ALARM_ARM_AWAY, @@ -23,11 +26,20 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceValidationError from homeassistant.helpers import entity_registry as er +from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import UNDEFINED, UndefinedType -from .conftest import MockAlarmControlPanel +from .conftest import TEST_DOMAIN, MockAlarmControlPanel -from tests.common import help_test_all, import_and_test_deprecated_constant_enum +from tests.common import ( + MockConfigEntry, + MockModule, + MockPlatform, + help_test_all, + import_and_test_deprecated_constant_enum, + mock_integration, + mock_platform, +) async def help_test_async_alarm_control_panel_service( @@ -283,3 +295,197 @@ async def test_alarm_control_panel_with_default_code( hass, mock_alarm_control_panel_entity.entity_id, SERVICE_ALARM_DISARM ) mock_alarm_control_panel_entity.calls_disarm.assert_called_with("1234") + + +async def test_alarm_control_panel_not_log_deprecated_state_warning( + hass: HomeAssistant, + mock_alarm_control_panel_entity: MockAlarmControlPanel, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test correctly using alarm_state doesn't log issue or raise repair.""" + state = hass.states.get(mock_alarm_control_panel_entity.entity_id) + assert state is not None + assert "Entities should implement the 'alarm_state' property and" not in caplog.text + + +async def test_alarm_control_panel_log_deprecated_state_warning_using_state_prop( + hass: HomeAssistant, + code_format: CodeFormat | None, + supported_features: AlarmControlPanelEntityFeature, + code_arm_required: bool, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test incorrectly using state property does log issue and raise repair.""" + + async def async_setup_entry_init( + hass: HomeAssistant, config_entry: ConfigEntry + ) -> bool: + """Set up test config entry.""" + await hass.config_entries.async_forward_entry_setups( + config_entry, [ALARM_CONTROL_PANEL_DOMAIN] + ) + return True + + mock_integration( + hass, + MockModule( + TEST_DOMAIN, + async_setup_entry=async_setup_entry_init, + ), + ) + + class MockLegacyAlarmControlPanel(MockAlarmControlPanel): + """Mocked alarm control entity.""" + + def __init__( + self, + supported_features: AlarmControlPanelEntityFeature = AlarmControlPanelEntityFeature( + 0 + ), + code_format: CodeFormat | None = None, + code_arm_required: bool = True, + ) -> None: + """Initialize the alarm control.""" + super().__init__(supported_features, code_format, code_arm_required) + + @property + def state(self) -> str: + """Return the state of the entity.""" + return "disarmed" + + entity = MockLegacyAlarmControlPanel( + supported_features=supported_features, + code_format=code_format, + code_arm_required=code_arm_required, + ) + + async def async_setup_entry_platform( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, + ) -> None: + """Set up test alarm control panel platform via config entry.""" + async_add_entities([entity]) + + mock_platform( + hass, + f"{TEST_DOMAIN}.{ALARM_CONTROL_PANEL_DOMAIN}", + MockPlatform(async_setup_entry=async_setup_entry_platform), + ) + + with patch.object( + MockLegacyAlarmControlPanel, + "__module__", + "tests.custom_components.test.alarm_control_panel", + ): + config_entry = MockConfigEntry(domain=TEST_DOMAIN) + config_entry.add_to_hass(hass) + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + state = hass.states.get(entity.entity_id) + assert state is not None + + assert "Entities should implement the 'alarm_state' property and" in caplog.text + + +async def test_alarm_control_panel_log_deprecated_state_warning_using_attr_state_attr( + hass: HomeAssistant, + code_format: CodeFormat | None, + supported_features: AlarmControlPanelEntityFeature, + code_arm_required: bool, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test incorrectly using _attr_state attribute does log issue and raise repair.""" + + async def async_setup_entry_init( + hass: HomeAssistant, config_entry: ConfigEntry + ) -> bool: + """Set up test config entry.""" + await hass.config_entries.async_forward_entry_setups( + config_entry, [ALARM_CONTROL_PANEL_DOMAIN] + ) + return True + + mock_integration( + hass, + MockModule( + TEST_DOMAIN, + async_setup_entry=async_setup_entry_init, + ), + ) + + class MockLegacyAlarmControlPanel(MockAlarmControlPanel): + """Mocked alarm control entity.""" + + def __init__( + self, + supported_features: AlarmControlPanelEntityFeature = AlarmControlPanelEntityFeature( + 0 + ), + code_format: CodeFormat | None = None, + code_arm_required: bool = True, + ) -> None: + """Initialize the alarm control.""" + super().__init__(supported_features, code_format, code_arm_required) + + def alarm_disarm(self, code: str | None = None) -> None: + """Mock alarm disarm calls.""" + self._attr_state = "disarmed" + + entity = MockLegacyAlarmControlPanel( + supported_features=supported_features, + code_format=code_format, + code_arm_required=code_arm_required, + ) + + async def async_setup_entry_platform( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, + ) -> None: + """Set up test alarm control panel platform via config entry.""" + async_add_entities([entity]) + + mock_platform( + hass, + f"{TEST_DOMAIN}.{ALARM_CONTROL_PANEL_DOMAIN}", + MockPlatform(async_setup_entry=async_setup_entry_platform), + ) + + with patch.object( + MockLegacyAlarmControlPanel, + "__module__", + "tests.custom_components.test.alarm_control_panel", + ): + config_entry = MockConfigEntry(domain=TEST_DOMAIN) + config_entry.add_to_hass(hass) + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + state = hass.states.get(entity.entity_id) + assert state is not None + + assert "Entities should implement the 'alarm_state' property and" not in caplog.text + + with patch.object( + MockLegacyAlarmControlPanel, + "__module__", + "tests.custom_components.test.alarm_control_panel", + ): + await help_test_async_alarm_control_panel_service( + hass, entity.entity_id, SERVICE_ALARM_DISARM + ) + + assert "Entities should implement the 'alarm_state' property and" in caplog.text + caplog.clear() + with patch.object( + MockLegacyAlarmControlPanel, + "__module__", + "tests.custom_components.test.alarm_control_panel", + ): + await help_test_async_alarm_control_panel_service( + hass, entity.entity_id, SERVICE_ALARM_DISARM + ) + # Test we only log once + assert "Entities should implement the 'alarm_state' property and" not in caplog.text diff --git a/tests/components/alarm_control_panel/test_reproduce_state.py b/tests/components/alarm_control_panel/test_reproduce_state.py index c7984b0793e9d8..fcb4fdee36e9ef 100644 --- a/tests/components/alarm_control_panel/test_reproduce_state.py +++ b/tests/components/alarm_control_panel/test_reproduce_state.py @@ -2,6 +2,7 @@ import pytest +from homeassistant.components.alarm_control_panel import AlarmControlPanelState from homeassistant.const import ( SERVICE_ALARM_ARM_AWAY, SERVICE_ALARM_ARM_CUSTOM_BYPASS, @@ -10,13 +11,6 @@ SERVICE_ALARM_ARM_VACATION, SERVICE_ALARM_DISARM, SERVICE_ALARM_TRIGGER, - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_CUSTOM_BYPASS, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_ARMED_VACATION, - STATE_ALARM_DISARMED, - STATE_ALARM_TRIGGERED, ) from homeassistant.core import HomeAssistant, State from homeassistant.helpers.state import async_reproduce_state @@ -29,27 +23,37 @@ async def test_reproducing_states( ) -> None: """Test reproducing Alarm control panel states.""" hass.states.async_set( - "alarm_control_panel.entity_armed_away", STATE_ALARM_ARMED_AWAY, {} + "alarm_control_panel.entity_armed_away", + AlarmControlPanelState.ARMED_AWAY, + {}, ) hass.states.async_set( "alarm_control_panel.entity_armed_custom_bypass", - STATE_ALARM_ARMED_CUSTOM_BYPASS, + AlarmControlPanelState.ARMED_CUSTOM_BYPASS, {}, ) hass.states.async_set( - "alarm_control_panel.entity_armed_home", STATE_ALARM_ARMED_HOME, {} + "alarm_control_panel.entity_armed_home", + AlarmControlPanelState.ARMED_HOME, + {}, ) hass.states.async_set( - "alarm_control_panel.entity_armed_night", STATE_ALARM_ARMED_NIGHT, {} + "alarm_control_panel.entity_armed_night", + AlarmControlPanelState.ARMED_NIGHT, + {}, ) hass.states.async_set( - "alarm_control_panel.entity_armed_vacation", STATE_ALARM_ARMED_VACATION, {} + "alarm_control_panel.entity_armed_vacation", + AlarmControlPanelState.ARMED_VACATION, + {}, ) hass.states.async_set( - "alarm_control_panel.entity_disarmed", STATE_ALARM_DISARMED, {} + "alarm_control_panel.entity_disarmed", AlarmControlPanelState.DISARMED, {} ) hass.states.async_set( - "alarm_control_panel.entity_triggered", STATE_ALARM_TRIGGERED, {} + "alarm_control_panel.entity_triggered", + AlarmControlPanelState.TRIGGERED, + {}, ) arm_away_calls = async_mock_service( @@ -76,18 +80,34 @@ async def test_reproducing_states( await async_reproduce_state( hass, [ - State("alarm_control_panel.entity_armed_away", STATE_ALARM_ARMED_AWAY), + State( + "alarm_control_panel.entity_armed_away", + AlarmControlPanelState.ARMED_AWAY, + ), State( "alarm_control_panel.entity_armed_custom_bypass", - STATE_ALARM_ARMED_CUSTOM_BYPASS, + AlarmControlPanelState.ARMED_CUSTOM_BYPASS, + ), + State( + "alarm_control_panel.entity_armed_home", + AlarmControlPanelState.ARMED_HOME, + ), + State( + "alarm_control_panel.entity_armed_night", + AlarmControlPanelState.ARMED_NIGHT, + ), + State( + "alarm_control_panel.entity_armed_vacation", + AlarmControlPanelState.ARMED_VACATION, ), - State("alarm_control_panel.entity_armed_home", STATE_ALARM_ARMED_HOME), - State("alarm_control_panel.entity_armed_night", STATE_ALARM_ARMED_NIGHT), State( - "alarm_control_panel.entity_armed_vacation", STATE_ALARM_ARMED_VACATION + "alarm_control_panel.entity_disarmed", + AlarmControlPanelState.DISARMED, + ), + State( + "alarm_control_panel.entity_triggered", + AlarmControlPanelState.TRIGGERED, ), - State("alarm_control_panel.entity_disarmed", STATE_ALARM_DISARMED), - State("alarm_control_panel.entity_triggered", STATE_ALARM_TRIGGERED), ], ) @@ -117,17 +137,34 @@ async def test_reproducing_states( await async_reproduce_state( hass, [ - State("alarm_control_panel.entity_armed_away", STATE_ALARM_TRIGGERED), State( - "alarm_control_panel.entity_armed_custom_bypass", STATE_ALARM_ARMED_AWAY + "alarm_control_panel.entity_armed_away", + AlarmControlPanelState.TRIGGERED, + ), + State( + "alarm_control_panel.entity_armed_custom_bypass", + AlarmControlPanelState.ARMED_AWAY, + ), + State( + "alarm_control_panel.entity_armed_home", + AlarmControlPanelState.ARMED_CUSTOM_BYPASS, + ), + State( + "alarm_control_panel.entity_armed_night", + AlarmControlPanelState.ARMED_HOME, + ), + State( + "alarm_control_panel.entity_armed_vacation", + AlarmControlPanelState.ARMED_NIGHT, + ), + State( + "alarm_control_panel.entity_disarmed", + AlarmControlPanelState.ARMED_VACATION, ), State( - "alarm_control_panel.entity_armed_home", STATE_ALARM_ARMED_CUSTOM_BYPASS + "alarm_control_panel.entity_triggered", + AlarmControlPanelState.DISARMED, ), - State("alarm_control_panel.entity_armed_night", STATE_ALARM_ARMED_HOME), - State("alarm_control_panel.entity_armed_vacation", STATE_ALARM_ARMED_NIGHT), - State("alarm_control_panel.entity_disarmed", STATE_ALARM_ARMED_VACATION), - State("alarm_control_panel.entity_triggered", STATE_ALARM_DISARMED), # Should not raise State("alarm_control_panel.non_existing", "on"), ], diff --git a/tests/components/alexa/test_capabilities.py b/tests/components/alexa/test_capabilities.py index 5acdbdb271a7a4..a41c2f47b2d825 100644 --- a/tests/components/alexa/test_capabilities.py +++ b/tests/components/alexa/test_capabilities.py @@ -5,6 +5,7 @@ import pytest +from homeassistant.components.alarm_control_panel import AlarmControlPanelState from homeassistant.components.alexa import smart_home from homeassistant.components.climate import ( ATTR_CURRENT_TEMPERATURE, @@ -23,11 +24,6 @@ ) from homeassistant.const import ( ATTR_UNIT_OF_MEASUREMENT, - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_CUSTOM_BYPASS, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_DISARMED, STATE_OFF, STATE_UNAVAILABLE, STATE_UNKNOWN, @@ -1351,15 +1347,23 @@ async def test_temperature_sensor_water_heater(hass: HomeAssistant) -> None: async def test_report_alarm_control_panel_state(hass: HomeAssistant) -> None: """Test SecurityPanelController implements armState property.""" - hass.states.async_set("alarm_control_panel.armed_away", STATE_ALARM_ARMED_AWAY, {}) hass.states.async_set( - "alarm_control_panel.armed_custom_bypass", STATE_ALARM_ARMED_CUSTOM_BYPASS, {} + "alarm_control_panel.armed_away", AlarmControlPanelState.ARMED_AWAY, {} ) - hass.states.async_set("alarm_control_panel.armed_home", STATE_ALARM_ARMED_HOME, {}) hass.states.async_set( - "alarm_control_panel.armed_night", STATE_ALARM_ARMED_NIGHT, {} + "alarm_control_panel.armed_custom_bypass", + AlarmControlPanelState.ARMED_CUSTOM_BYPASS, + {}, + ) + hass.states.async_set( + "alarm_control_panel.armed_home", AlarmControlPanelState.ARMED_HOME, {} + ) + hass.states.async_set( + "alarm_control_panel.armed_night", AlarmControlPanelState.ARMED_NIGHT, {} + ) + hass.states.async_set( + "alarm_control_panel.disarmed", AlarmControlPanelState.DISARMED, {} ) - hass.states.async_set("alarm_control_panel.disarmed", STATE_ALARM_DISARMED, {}) properties = await reported_properties(hass, "alarm_control_panel.armed_away") properties.assert_equal("Alexa.SecurityPanelController", "armState", "ARMED_AWAY") diff --git a/tests/components/alexa/test_smart_home.py b/tests/components/alexa/test_smart_home.py index 6ccf265dcdcbdf..68010a6a7111c5 100644 --- a/tests/components/alexa/test_smart_home.py +++ b/tests/components/alexa/test_smart_home.py @@ -12,7 +12,6 @@ from homeassistant.components.media_player import MediaPlayerEntityFeature from homeassistant.components.vacuum import VacuumEntityFeature from homeassistant.components.valve import SERVICE_STOP_VALVE, ValveEntityFeature -from homeassistant.config import async_process_ha_core_config from homeassistant.const import ( SERVICE_CLOSE_VALVE, SERVICE_OPEN_VALVE, @@ -20,6 +19,7 @@ UnitOfTemperature, ) from homeassistant.core import Context, Event, HomeAssistant +from homeassistant.core_config import async_process_ha_core_config from homeassistant.helpers import entityfilter from homeassistant.setup import async_setup_component from homeassistant.util.unit_system import METRIC_SYSTEM, US_CUSTOMARY_SYSTEM @@ -3999,6 +3999,108 @@ async def test_alarm_control_panel_code_arm_required(hass: HomeAssistant) -> Non await discovery_test(device, hass, expected_endpoints=0) +async def test_alarm_control_panel_disarm_required(hass: HomeAssistant) -> None: + """Test alarm_control_panel disarm required.""" + device = ( + "alarm_control_panel.test_4", + "armed_away", + { + "friendly_name": "Test Alarm Control Panel 4", + "code_arm_required": False, + "code_format": "FORMAT_NUMBER", + "code": "1234", + "supported_features": 3, + }, + ) + appliance = await discovery_test(device, hass) + + assert appliance["endpointId"] == "alarm_control_panel#test_4" + assert appliance["displayCategories"][0] == "SECURITY_PANEL" + assert appliance["friendlyName"] == "Test Alarm Control Panel 4" + assert_endpoint_capabilities( + appliance, "Alexa.SecurityPanelController", "Alexa.EndpointHealth", "Alexa" + ) + + properties = await reported_properties(hass, "alarm_control_panel#test_4") + properties.assert_equal("Alexa.SecurityPanelController", "armState", "ARMED_AWAY") + + msg = await assert_request_fails( + "Alexa.SecurityPanelController", + "Arm", + "alarm_control_panel#test_4", + "alarm_control_panel.alarm_arm_home", + hass, + payload={"armState": "ARMED_STAY"}, + ) + assert msg["event"]["payload"]["type"] == "AUTHORIZATION_REQUIRED" + assert ( + msg["event"]["payload"]["message"] + == "You must disarm the system before you can set the requested arm state." + ) + + _, msg = await assert_request_calls_service( + "Alexa.SecurityPanelController", + "Arm", + "alarm_control_panel#test_4", + "alarm_control_panel.alarm_arm_away", + hass, + response_type="Arm.Response", + payload={"armState": "ARMED_AWAY"}, + ) + properties = ReportedProperties(msg["context"]["properties"]) + properties.assert_equal("Alexa.SecurityPanelController", "armState", "ARMED_AWAY") + + +async def test_alarm_control_panel_change_arm_type(hass: HomeAssistant) -> None: + """Test alarm_control_panel change arm type.""" + device = ( + "alarm_control_panel.test_5", + "armed_home", + { + "friendly_name": "Test Alarm Control Panel 5", + "code_arm_required": False, + "code_format": "FORMAT_NUMBER", + "code": "1234", + "supported_features": 3, + }, + ) + appliance = await discovery_test(device, hass) + + assert appliance["endpointId"] == "alarm_control_panel#test_5" + assert appliance["displayCategories"][0] == "SECURITY_PANEL" + assert appliance["friendlyName"] == "Test Alarm Control Panel 5" + assert_endpoint_capabilities( + appliance, "Alexa.SecurityPanelController", "Alexa.EndpointHealth", "Alexa" + ) + + properties = await reported_properties(hass, "alarm_control_panel#test_5") + properties.assert_equal("Alexa.SecurityPanelController", "armState", "ARMED_STAY") + + _, msg = await assert_request_calls_service( + "Alexa.SecurityPanelController", + "Arm", + "alarm_control_panel#test_5", + "alarm_control_panel.alarm_arm_home", + hass, + response_type="Arm.Response", + payload={"armState": "ARMED_STAY"}, + ) + properties = ReportedProperties(msg["context"]["properties"]) + properties.assert_equal("Alexa.SecurityPanelController", "armState", "ARMED_STAY") + + _, msg = await assert_request_calls_service( + "Alexa.SecurityPanelController", + "Arm", + "alarm_control_panel#test_5", + "alarm_control_panel.alarm_arm_away", + hass, + response_type="Arm.Response", + payload={"armState": "ARMED_AWAY"}, + ) + properties = ReportedProperties(msg["context"]["properties"]) + properties.assert_equal("Alexa.SecurityPanelController", "armState", "ARMED_AWAY") + + async def test_range_unsupported_domain(hass: HomeAssistant) -> None: """Test rangeController with unsupported domain.""" device = ("switch.test", "on", {"friendly_name": "Test switch"}) diff --git a/tests/components/analytics/test_analytics.py b/tests/components/analytics/test_analytics.py index 5542aab4b304da..ba7e46bdde756d 100644 --- a/tests/components/analytics/test_analytics.py +++ b/tests/components/analytics/test_analytics.py @@ -76,7 +76,7 @@ async def test_no_send( """Test send when no preferences are defined.""" analytics = Analytics(hass) with patch( - "homeassistant.components.hassio.is_hassio", + "homeassistant.components.analytics.analytics.is_hassio", side_effect=Mock(return_value=False), ): assert not analytics.preferences[ATTR_BASE] @@ -97,7 +97,7 @@ async def test_load_with_supervisor_diagnostics(hass: HomeAssistant) -> None: side_effect=Mock(return_value={"diagnostics": True}), ), patch( - "homeassistant.components.hassio.is_hassio", + "homeassistant.components.analytics.analytics.is_hassio", side_effect=Mock(return_value=True), ), ): @@ -118,7 +118,7 @@ async def test_load_with_supervisor_without_diagnostics(hass: HomeAssistant) -> side_effect=Mock(return_value={"diagnostics": False}), ), patch( - "homeassistant.components.hassio.is_hassio", + "homeassistant.components.analytics.analytics.is_hassio", side_effect=Mock(return_value=True), ), ): @@ -219,8 +219,12 @@ async def test_send_base_with_supervisor( side_effect=Mock(return_value={}), ), patch( - "homeassistant.components.hassio.is_hassio", + "homeassistant.components.analytics.analytics.is_hassio", side_effect=Mock(return_value=True), + ) as is_hassio_mock, + patch( + "homeassistant.helpers.system_info.is_hassio", + new=is_hassio_mock, ), ): await analytics.load() @@ -314,8 +318,12 @@ async def test_send_usage_with_supervisor( side_effect=Mock(return_value={}), ), patch( - "homeassistant.components.hassio.is_hassio", + "homeassistant.components.analytics.analytics.is_hassio", side_effect=Mock(return_value=True), + ) as is_hassio_mock, + patch( + "homeassistant.helpers.system_info.is_hassio", + new=is_hassio_mock, ), ): await analytics.send_analytics() @@ -529,8 +537,12 @@ async def test_send_statistics_with_supervisor( side_effect=Mock(return_value={}), ), patch( - "homeassistant.components.hassio.is_hassio", + "homeassistant.components.analytics.analytics.is_hassio", side_effect=Mock(return_value=True), + ) as is_hassio_mock, + patch( + "homeassistant.helpers.system_info.is_hassio", + new=is_hassio_mock, ), ): await analytics.send_analytics() diff --git a/tests/components/analytics_insights/conftest.py b/tests/components/analytics_insights/conftest.py index fcdda95e9bd5f5..a9c152b8ab9d84 100644 --- a/tests/components/analytics_insights/conftest.py +++ b/tests/components/analytics_insights/conftest.py @@ -5,9 +5,10 @@ import pytest from python_homeassistant_analytics import CurrentAnalytics -from python_homeassistant_analytics.models import CustomIntegration, Integration +from python_homeassistant_analytics.models import Addon, CustomIntegration, Integration from homeassistant.components.analytics_insights.const import ( + CONF_TRACKED_ADDONS, CONF_TRACKED_CUSTOM_INTEGRATIONS, CONF_TRACKED_INTEGRATIONS, DOMAIN, @@ -43,6 +44,10 @@ def mock_analytics_client() -> Generator[AsyncMock]: client.get_current_analytics.return_value = CurrentAnalytics.from_json( load_fixture("analytics_insights/current_data.json") ) + addons = load_json_object_fixture("analytics_insights/addons.json") + client.get_addons.return_value = { + key: Addon.from_dict(value) for key, value in addons.items() + } integrations = load_json_object_fixture("analytics_insights/integrations.json") client.get_integrations.return_value = { key: Integration.from_dict(value) for key, value in integrations.items() @@ -65,6 +70,7 @@ def mock_config_entry() -> MockConfigEntry: title="Homeassistant Analytics", data={}, options={ + CONF_TRACKED_ADDONS: ["core_samba"], CONF_TRACKED_INTEGRATIONS: ["youtube", "spotify", "myq"], CONF_TRACKED_CUSTOM_INTEGRATIONS: ["hacs"], }, diff --git a/tests/components/analytics_insights/fixtures/addons.json b/tests/components/analytics_insights/fixtures/addons.json new file mode 100644 index 00000000000000..cb7ae42c86b595 --- /dev/null +++ b/tests/components/analytics_insights/fixtures/addons.json @@ -0,0 +1,31 @@ +{ + "core_samba": { + "total": 76357, + "versions": { + "12.3.2": 65875, + "12.2.0": 1313, + "12.3.1": 5018, + "12.1.0": 211, + "10.0.0": 1139, + "9.4.0": 4, + "12.3.0": 704, + "9.3.1": 36, + "10.0.2": 1290, + "9.5.1": 379, + "9.6.1": 66, + "10.0.1": 200, + "9.3.0": 20, + "9.2.0": 9, + "9.5.0": 13, + "12.0.0": 39, + "9.7.0": 20, + "11.0.0": 13, + "3.0": 1, + "9.6.0": 2, + "8.1": 2, + "9.0": 3 + }, + "protected": 76345, + "auto_update": 32732 + } +} diff --git a/tests/components/analytics_insights/snapshots/test_sensor.ambr b/tests/components/analytics_insights/snapshots/test_sensor.ambr index 1a8f4cec078014..6e11b344b0b9e4 100644 --- a/tests/components/analytics_insights/snapshots/test_sensor.ambr +++ b/tests/components/analytics_insights/snapshots/test_sensor.ambr @@ -1,4 +1,54 @@ # serializer version: 1 +# name: test_all_entities[sensor.homeassistant_analytics_core_samba-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.homeassistant_analytics_core_samba', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'core_samba', + 'platform': 'analytics_insights', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'addons', + 'unique_id': 'addon_core_samba_active_installations', + 'unit_of_measurement': 'active installations', + }) +# --- +# name: test_all_entities[sensor.homeassistant_analytics_core_samba-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Homeassistant Analytics core_samba', + 'state_class': , + 'unit_of_measurement': 'active installations', + }), + 'context': , + 'entity_id': 'sensor.homeassistant_analytics_core_samba', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '76357', + }) +# --- # name: test_all_entities[sensor.homeassistant_analytics_hacs_custom-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -299,103 +349,3 @@ 'state': '339', }) # --- -# name: test_all_entities[sensor.total_active_installations-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.total_active_installations', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Total active installations', - 'platform': 'analytics_insights', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'total_active_installations', - 'unique_id': 'total_active_installations', - 'unit_of_measurement': 'active installations', - }) -# --- -# name: test_all_entities[sensor.total_active_installations-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Homeassistant Analytics Total active installations', - 'state_class': , - 'unit_of_measurement': 'active installations', - }), - 'context': , - 'entity_id': 'sensor.total_active_installations', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '310400', - }) -# --- -# name: test_all_entities[sensor.total_reports_integrations-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.total_reports_integrations', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Total reported integrations', - 'platform': 'analytics_insights', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'total_reports_integrations', - 'unique_id': 'total_reports_integrations', - 'unit_of_measurement': 'active installations', - }) -# --- -# name: test_all_entities[sensor.total_reports_integrations-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Homeassistant Analytics Total reported integrations', - 'state_class': , - 'unit_of_measurement': 'active installations', - }), - 'context': , - 'entity_id': 'sensor.total_reports_integrations', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '249256', - }) -# --- diff --git a/tests/components/analytics_insights/test_config_flow.py b/tests/components/analytics_insights/test_config_flow.py index 0c9d4c074f8333..747f24930a46e7 100644 --- a/tests/components/analytics_insights/test_config_flow.py +++ b/tests/components/analytics_insights/test_config_flow.py @@ -7,6 +7,7 @@ from python_homeassistant_analytics import HomeassistantAnalyticsConnectionError from homeassistant.components.analytics_insights.const import ( + CONF_TRACKED_ADDONS, CONF_TRACKED_CUSTOM_INTEGRATIONS, CONF_TRACKED_INTEGRATIONS, DOMAIN, @@ -25,10 +26,12 @@ [ ( { + CONF_TRACKED_ADDONS: ["core_samba"], CONF_TRACKED_INTEGRATIONS: ["youtube"], CONF_TRACKED_CUSTOM_INTEGRATIONS: ["hacs"], }, { + CONF_TRACKED_ADDONS: ["core_samba"], CONF_TRACKED_INTEGRATIONS: ["youtube"], CONF_TRACKED_CUSTOM_INTEGRATIONS: ["hacs"], }, @@ -38,6 +41,7 @@ CONF_TRACKED_INTEGRATIONS: ["youtube"], }, { + CONF_TRACKED_ADDONS: [], CONF_TRACKED_INTEGRATIONS: ["youtube"], CONF_TRACKED_CUSTOM_INTEGRATIONS: [], }, @@ -47,6 +51,7 @@ CONF_TRACKED_CUSTOM_INTEGRATIONS: ["hacs"], }, { + CONF_TRACKED_ADDONS: [], CONF_TRACKED_INTEGRATIONS: [], CONF_TRACKED_CUSTOM_INTEGRATIONS: ["hacs"], }, @@ -83,6 +88,7 @@ async def test_form( "user_input", [ { + CONF_TRACKED_ADDONS: [], CONF_TRACKED_INTEGRATIONS: [], CONF_TRACKED_CUSTOM_INTEGRATIONS: [], }, @@ -113,6 +119,7 @@ async def test_submitting_empty_form( result = await hass.config_entries.flow.async_configure( result["flow_id"], { + CONF_TRACKED_ADDONS: ["core_samba"], CONF_TRACKED_INTEGRATIONS: ["youtube"], CONF_TRACKED_CUSTOM_INTEGRATIONS: ["hacs"], }, @@ -123,6 +130,7 @@ async def test_submitting_empty_form( assert result["title"] == "Home Assistant Analytics Insights" assert result["data"] == {} assert result["options"] == { + CONF_TRACKED_ADDONS: ["core_samba"], CONF_TRACKED_INTEGRATIONS: ["youtube"], CONF_TRACKED_CUSTOM_INTEGRATIONS: ["hacs"], } @@ -161,6 +169,7 @@ async def test_form_already_configured( domain=DOMAIN, data={}, options={ + CONF_TRACKED_ADDONS: [], CONF_TRACKED_INTEGRATIONS: ["youtube", "spotify"], CONF_TRACKED_CUSTOM_INTEGRATIONS: [], }, @@ -179,19 +188,32 @@ async def test_form_already_configured( [ ( { + CONF_TRACKED_ADDONS: ["core_samba"], CONF_TRACKED_INTEGRATIONS: ["youtube"], CONF_TRACKED_CUSTOM_INTEGRATIONS: ["hacs"], }, { + CONF_TRACKED_ADDONS: ["core_samba"], CONF_TRACKED_INTEGRATIONS: ["youtube"], CONF_TRACKED_CUSTOM_INTEGRATIONS: ["hacs"], }, ), + ( + { + CONF_TRACKED_ADDONS: ["core_samba"], + }, + { + CONF_TRACKED_ADDONS: ["core_samba"], + CONF_TRACKED_INTEGRATIONS: [], + CONF_TRACKED_CUSTOM_INTEGRATIONS: [], + }, + ), ( { CONF_TRACKED_INTEGRATIONS: ["youtube"], }, { + CONF_TRACKED_ADDONS: [], CONF_TRACKED_INTEGRATIONS: ["youtube"], CONF_TRACKED_CUSTOM_INTEGRATIONS: [], }, @@ -201,6 +223,7 @@ async def test_form_already_configured( CONF_TRACKED_CUSTOM_INTEGRATIONS: ["hacs"], }, { + CONF_TRACKED_ADDONS: [], CONF_TRACKED_INTEGRATIONS: [], CONF_TRACKED_CUSTOM_INTEGRATIONS: ["hacs"], }, @@ -237,6 +260,7 @@ async def test_options_flow( "user_input", [ { + CONF_TRACKED_ADDONS: [], CONF_TRACKED_INTEGRATIONS: [], CONF_TRACKED_CUSTOM_INTEGRATIONS: [], }, @@ -267,6 +291,7 @@ async def test_submitting_empty_options_flow( result = await hass.config_entries.options.async_configure( result["flow_id"], { + CONF_TRACKED_ADDONS: ["core_samba"], CONF_TRACKED_INTEGRATIONS: ["youtube", "hue"], CONF_TRACKED_CUSTOM_INTEGRATIONS: ["hacs"], }, @@ -275,6 +300,7 @@ async def test_submitting_empty_options_flow( assert result["type"] is FlowResultType.CREATE_ENTRY assert result["data"] == { + CONF_TRACKED_ADDONS: ["core_samba"], CONF_TRACKED_INTEGRATIONS: ["youtube", "hue"], CONF_TRACKED_CUSTOM_INTEGRATIONS: ["hacs"], } diff --git a/tests/components/androidtv/common.py b/tests/components/androidtv/common.py index 23e048e4d52b53..133f6b1470b033 100644 --- a/tests/components/androidtv/common.py +++ b/tests/components/androidtv/common.py @@ -100,7 +100,12 @@ def setup_mock_entry( - config: dict[str, Any], entity_domain: str + config: dict[str, Any], + entity_domain: str, + *, + options=None, + version=1, + minor_version=2, ) -> tuple[str, str, MockConfigEntry]: """Prepare mock entry for entities tests.""" patch_key = config[ADB_PATCH_KEY] @@ -109,6 +114,9 @@ def setup_mock_entry( domain=DOMAIN, data=config[DOMAIN], unique_id="a1:b1:c1:d1:e1:f1", + options=options, + version=version, + minor_version=minor_version, ) return patch_key, entity_id, config_entry diff --git a/tests/components/androidtv/test_config_flow.py b/tests/components/androidtv/test_config_flow.py index b73fee9fb105f0..cb1015e41984e6 100644 --- a/tests/components/androidtv/test_config_flow.py +++ b/tests/components/androidtv/test_config_flow.py @@ -22,7 +22,7 @@ CONF_APPS, CONF_EXCLUDE_UNNAMED_APPS, CONF_GET_SOURCES, - CONF_SCREENCAP, + CONF_SCREENCAP_INTERVAL, CONF_STATE_DETECTION_RULES, CONF_TURN_OFF_COMMAND, CONF_TURN_ON_COMMAND, @@ -501,7 +501,7 @@ async def test_options_flow(hass: HomeAssistant) -> None: user_input={ CONF_GET_SOURCES: True, CONF_EXCLUDE_UNNAMED_APPS: True, - CONF_SCREENCAP: True, + CONF_SCREENCAP_INTERVAL: 1, CONF_TURN_OFF_COMMAND: "off", CONF_TURN_ON_COMMAND: "on", }, @@ -515,6 +515,6 @@ async def test_options_flow(hass: HomeAssistant) -> None: assert config_entry.options[CONF_GET_SOURCES] is True assert config_entry.options[CONF_EXCLUDE_UNNAMED_APPS] is True - assert config_entry.options[CONF_SCREENCAP] is True + assert config_entry.options[CONF_SCREENCAP_INTERVAL] == 1 assert config_entry.options[CONF_TURN_OFF_COMMAND] == "off" assert config_entry.options[CONF_TURN_ON_COMMAND] == "on" diff --git a/tests/components/androidtv/test_init.py b/tests/components/androidtv/test_init.py new file mode 100644 index 00000000000000..8ff7df1668bf7c --- /dev/null +++ b/tests/components/androidtv/test_init.py @@ -0,0 +1,34 @@ +"""Tests for AndroidTV integration initialization.""" + +from homeassistant.components.androidtv.const import ( + CONF_SCREENCAP, + CONF_SCREENCAP_INTERVAL, +) +from homeassistant.components.media_player import DOMAIN as MP_DOMAIN +from homeassistant.core import HomeAssistant + +from . import patchers +from .common import CONFIG_ANDROID_DEFAULT, SHELL_RESPONSE_OFF, setup_mock_entry + + +async def test_migrate_version( + hass: HomeAssistant, +) -> None: + """Test migration to new version.""" + patch_key, _, mock_config_entry = setup_mock_entry( + CONFIG_ANDROID_DEFAULT, + MP_DOMAIN, + options={CONF_SCREENCAP: False}, + minor_version=1, + ) + mock_config_entry.add_to_hass(hass) + + with ( + patchers.patch_connect(True)[patch_key], + patchers.patch_shell(SHELL_RESPONSE_OFF)[patch_key], + ): + assert await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + assert mock_config_entry.options[CONF_SCREENCAP_INTERVAL] == 0 + assert mock_config_entry.minor_version == 2 diff --git a/tests/components/androidtv/test_media_player.py b/tests/components/androidtv/test_media_player.py index ef0d0c63b0637a..5a8d88dd9f674b 100644 --- a/tests/components/androidtv/test_media_player.py +++ b/tests/components/androidtv/test_media_player.py @@ -13,7 +13,7 @@ from homeassistant.components.androidtv.const import ( CONF_APPS, CONF_EXCLUDE_UNNAMED_APPS, - CONF_SCREENCAP, + CONF_SCREENCAP_INTERVAL, CONF_STATE_DETECTION_RULES, CONF_TURN_OFF_COMMAND, CONF_TURN_ON_COMMAND, @@ -801,6 +801,9 @@ async def test_get_image_http( """ patch_key, entity_id, config_entry = _setup(CONFIG_ANDROID_DEFAULT) config_entry.add_to_hass(hass) + hass.config_entries.async_update_entry( + config_entry, options={CONF_SCREENCAP_INTERVAL: 2} + ) with ( patchers.patch_connect(True)[patch_key], @@ -828,21 +831,27 @@ async def test_get_image_http( content = await resp.read() assert content == b"image" - next_update = utcnow() + timedelta(seconds=30) + next_update = utcnow() + timedelta(minutes=1) with ( patchers.patch_shell("11")[patch_key], patchers.PATCH_SCREENCAP as patch_screen_cap, - patch("homeassistant.util.utcnow", return_value=next_update), + patch( + "homeassistant.components.androidtv.media_player.utcnow", + return_value=next_update, + ), ): async_fire_time_changed(hass, next_update, True) await hass.async_block_till_done() patch_screen_cap.assert_not_called() - next_update = utcnow() + timedelta(seconds=60) + next_update = utcnow() + timedelta(minutes=2) with ( patchers.patch_shell("11")[patch_key], patchers.PATCH_SCREENCAP as patch_screen_cap, - patch("homeassistant.util.utcnow", return_value=next_update), + patch( + "homeassistant.components.androidtv.media_player.utcnow", + return_value=next_update, + ), ): async_fire_time_changed(hass, next_update, True) await hass.async_block_till_done() @@ -854,6 +863,9 @@ async def test_get_image_http_fail(hass: HomeAssistant) -> None: patch_key, entity_id, config_entry = _setup(CONFIG_ANDROID_DEFAULT) config_entry.add_to_hass(hass) + hass.config_entries.async_update_entry( + config_entry, options={CONF_SCREENCAP_INTERVAL: 2} + ) with ( patchers.patch_connect(True)[patch_key], @@ -885,7 +897,7 @@ async def test_get_image_disabled(hass: HomeAssistant) -> None: patch_key, entity_id, config_entry = _setup(CONFIG_ANDROID_DEFAULT) config_entry.add_to_hass(hass) hass.config_entries.async_update_entry( - config_entry, options={CONF_SCREENCAP: False} + config_entry, options={CONF_SCREENCAP_INTERVAL: 0} ) with ( @@ -1133,7 +1145,7 @@ async def test_options_reload(hass: HomeAssistant) -> None: with patchers.PATCH_SETUP_ENTRY as setup_entry_call: # change an option that not require integration reload hass.config_entries.async_update_entry( - config_entry, options={CONF_SCREENCAP: False} + config_entry, options={CONF_EXCLUDE_UNNAMED_APPS: True} ) await hass.async_block_till_done() diff --git a/tests/components/androidtv_remote/test_config_flow.py b/tests/components/androidtv_remote/test_config_flow.py index 93c9067d1c8a57..02e15bca41574d 100644 --- a/tests/components/androidtv_remote/test_config_flow.py +++ b/tests/components/androidtv_remote/test_config_flow.py @@ -757,6 +757,59 @@ async def test_zeroconf_flow_abort_if_mac_is_missing( assert result["reason"] == "cannot_connect" +async def test_zeroconf_flow_already_configured_zeroconf_has_multiple_invalid_ip_addresses( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_unload_entry: AsyncMock, + mock_api: MagicMock, +) -> None: + """Test we abort the zeroconf flow if already configured and zeroconf has invalid ip addresses.""" + host = "1.2.3.4" + name = "My Android TV" + mac = "1A:2B:3C:4D:5E:6F" + unique_id = "1a:2b:3c:4d:5e:6f" + name_existing = name + host_existing = host + + mock_config_entry = MockConfigEntry( + title=name, + domain=DOMAIN, + data={ + "host": host_existing, + "name": name_existing, + "mac": mac, + }, + unique_id=unique_id, + state=ConfigEntryState.LOADED, + ) + mock_config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_ZEROCONF}, + data=zeroconf.ZeroconfServiceInfo( + ip_address=ip_address("1.2.3.5"), + ip_addresses=[ip_address("1.2.3.5"), ip_address(host)], + port=6466, + hostname=host, + type="mock_type", + name=name + "._androidtvremote2._tcp.local.", + properties={"bt": mac}, + ), + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + await hass.async_block_till_done() + assert hass.config_entries.async_entries(DOMAIN)[0].data == { + "host": host, + "name": name, + "mac": mac, + } + assert len(mock_unload_entry.mock_calls) == 0 + assert len(mock_setup_entry.mock_calls) == 0 + + async def test_reauth_flow_success( hass: HomeAssistant, mock_setup_entry: AsyncMock, diff --git a/tests/components/anova/__init__.py b/tests/components/anova/__init__.py index 887f5b3b05b35f..903a11809800cc 100644 --- a/tests/components/anova/__init__.py +++ b/tests/components/anova/__init__.py @@ -36,6 +36,7 @@ def create_entry(hass: HomeAssistant, device_id: str = DEVICE_UNIQUE_ID) -> Conf }, unique_id="sample@gmail.com", version=1, + minor_version=2, ) entry.add_to_hass(hass) return entry diff --git a/tests/components/anova/test_config_flow.py b/tests/components/anova/test_config_flow.py index 0f93b869296c26..3b2afaa49c077c 100644 --- a/tests/components/anova/test_config_flow.py +++ b/tests/components/anova/test_config_flow.py @@ -6,7 +6,7 @@ from homeassistant import config_entries from homeassistant.components.anova.const import DOMAIN -from homeassistant.const import CONF_DEVICES, CONF_PASSWORD, CONF_USERNAME +from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -27,7 +27,6 @@ async def test_flow_user(hass: HomeAssistant, anova_api: AnovaApi) -> None: assert result["data"] == { CONF_USERNAME: "sample@gmail.com", CONF_PASSWORD: "sample", - CONF_DEVICES: [], } diff --git a/tests/components/anova/test_init.py b/tests/components/anova/test_init.py index 66ea11fdaef5ff..2e3e2920abc370 100644 --- a/tests/components/anova/test_init.py +++ b/tests/components/anova/test_init.py @@ -1,13 +1,18 @@ """Test init for Anova.""" +from unittest.mock import patch + from anova_wifi import AnovaApi from homeassistant.components.anova.const import DOMAIN from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import CONF_DEVICES, CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from . import async_init_integration, create_entry +from tests.common import MockConfigEntry + async def test_async_setup_entry(hass: HomeAssistant, anova_api: AnovaApi) -> None: """Test a successful setup entry.""" @@ -55,3 +60,34 @@ async def test_websocket_failure( """Test that we successfully handle a websocket failure on setup.""" entry = await async_init_integration(hass) assert entry.state is ConfigEntryState.SETUP_RETRY + + +async def test_migration_removing_devices_in_config_entry( + hass: HomeAssistant, anova_api: AnovaApi +) -> None: + """Test a successful setup entry.""" + entry = MockConfigEntry( + domain=DOMAIN, + title="Anova", + data={ + CONF_USERNAME: "sample@gmail.com", + CONF_PASSWORD: "sample", + CONF_DEVICES: [], + }, + unique_id="sample@gmail.com", + version=1, + minor_version=1, + ) + entry.add_to_hass(hass) + + with patch("homeassistant.components.anova.AnovaApi.authenticate"): + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + state = hass.states.get("sensor.anova_precision_cooker_mode") + assert state is not None + assert state.state == "idle" + + assert entry.version == 1 + assert entry.minor_version == 2 + assert CONF_DEVICES not in entry.data diff --git a/tests/components/anthropic/test_config_flow.py b/tests/components/anthropic/test_config_flow.py index df27352b7b2698..a5a025b00d05bc 100644 --- a/tests/components/anthropic/test_config_flow.py +++ b/tests/components/anthropic/test_config_flow.py @@ -108,7 +108,7 @@ async def test_options( ), body={"type": "error", "error": {"type": "invalid_request_error"}}, ), - "invalid_request_error", + "unknown", ), ( AuthenticationError( diff --git a/tests/components/apple_tv/test_config_flow.py b/tests/components/apple_tv/test_config_flow.py index f37042a6f50e9b..4567bd32582ea7 100644 --- a/tests/components/apple_tv/test_config_flow.py +++ b/tests/components/apple_tv/test_config_flow.py @@ -16,6 +16,7 @@ CONF_START_OFF, DOMAIN, ) +from homeassistant.const import CONF_NAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -1189,18 +1190,17 @@ async def test_reconfigure_update_credentials(hass: HomeAssistant) -> None: ) config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": "reauth"}, - data={"identifier": "mrpid", "name": "apple tv"}, - ) + result = await config_entry.start_reauth_flow(hass, data={"name": "apple tv"}) result2 = await hass.config_entries.flow.async_configure( result["flow_id"], {}, ) assert result2["type"] is FlowResultType.FORM - assert result2["description_placeholders"] == {"protocol": "MRP"} + assert result2["description_placeholders"] == { + CONF_NAME: "Mock Title", + "protocol": "MRP", + } result3 = await hass.config_entries.flow.async_configure( result["flow_id"], {"pin": 1111} diff --git a/tests/components/application_credentials/test_init.py b/tests/components/application_credentials/test_init.py index d90084fa7c9b10..b72d9653c2d040 100644 --- a/tests/components/application_credentials/test_init.py +++ b/tests/components/application_credentials/test_init.py @@ -423,6 +423,10 @@ async def test_import_named_credential( ] +@pytest.mark.parametrize( + "ignore_translations", + ["component.fake_integration.config.abort.missing_credentials"], +) async def test_config_flow_no_credentials(hass: HomeAssistant) -> None: """Test config flow base case with no credentials registered.""" result = await hass.config_entries.flow.async_init( @@ -432,6 +436,10 @@ async def test_config_flow_no_credentials(hass: HomeAssistant) -> None: assert result.get("reason") == "missing_credentials" +@pytest.mark.parametrize( + "ignore_translations", + ["component.fake_integration.config.abort.missing_credentials"], +) async def test_config_flow_other_domain( hass: HomeAssistant, ws_client: ClientFixture, @@ -559,6 +567,10 @@ async def test_config_flow_multiple_entries( ) +@pytest.mark.parametrize( + "ignore_translations", + ["component.fake_integration.config.abort.missing_credentials"], +) async def test_config_flow_create_delete_credential( hass: HomeAssistant, ws_client: ClientFixture, @@ -604,6 +616,10 @@ async def test_config_flow_with_config_credential( assert result["data"].get("auth_implementation") == TEST_DOMAIN +@pytest.mark.parametrize( + "ignore_translations", + ["component.fake_integration.config.abort.missing_configuration"], +) @pytest.mark.parametrize("mock_application_credentials_integration", [None]) async def test_import_without_setup(hass: HomeAssistant, config_credential) -> None: """Test import of credentials without setting up the integration.""" @@ -619,6 +635,10 @@ async def test_import_without_setup(hass: HomeAssistant, config_credential) -> N assert result.get("reason") == "missing_configuration" +@pytest.mark.parametrize( + "ignore_translations", + ["component.fake_integration.config.abort.missing_configuration"], +) @pytest.mark.parametrize("mock_application_credentials_integration", [None]) async def test_websocket_without_platform( hass: HomeAssistant, ws_client: ClientFixture diff --git a/tests/components/aseko_pool_live/test_config_flow.py b/tests/components/aseko_pool_live/test_config_flow.py index eb40decf21394c..b307f00abbebf1 100644 --- a/tests/components/aseko_pool_live/test_config_flow.py +++ b/tests/components/aseko_pool_live/test_config_flow.py @@ -128,8 +128,9 @@ async def test_async_step_reauth_success(hass: HomeAssistant, user: User) -> Non mock_entry = MockConfigEntry( domain=DOMAIN, - unique_id="UID", - data={CONF_EMAIL: "aseko@example.com"}, + unique_id="a_user_id", + data={CONF_EMAIL: "aseko@example.com", CONF_PASSWORD: "passw0rd"}, + version=2, ) mock_entry.add_to_hass(hass) @@ -151,13 +152,61 @@ async def test_async_step_reauth_success(hass: HomeAssistant, user: User) -> Non ): result = await hass.config_entries.flow.async_configure( result["flow_id"], - {CONF_EMAIL: "aseko@example.com", CONF_PASSWORD: "passw0rd"}, + {CONF_EMAIL: "aseko@example.com", CONF_PASSWORD: "new_password"}, ) await hass.async_block_till_done() assert result["type"] is FlowResultType.ABORT assert result["reason"] == "reauth_successful" assert len(mock_setup_entry.mock_calls) == 1 + assert mock_entry.unique_id == "a_user_id" + assert dict(mock_entry.data) == { + CONF_EMAIL: "aseko@example.com", + CONF_PASSWORD: "new_password", + } + + +async def test_async_step_reauth_mismatch(hass: HomeAssistant, user: User) -> None: + """Test mismatch reauthentication.""" + + mock_entry = MockConfigEntry( + domain=DOMAIN, + unique_id="UID", + data={CONF_EMAIL: "aseko@example.com", CONF_PASSWORD: "passw0rd"}, + version=2, + ) + mock_entry.add_to_hass(hass) + + result = await mock_entry.start_reauth_flow(hass) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" + assert result["errors"] == {} + + with ( + patch( + "homeassistant.components.aseko_pool_live.config_flow.Aseko.login", + return_value=user, + ), + patch( + "homeassistant.components.aseko_pool_live.async_setup_entry", + return_value=True, + ) as mock_setup_entry, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_EMAIL: "aseko@example.com", CONF_PASSWORD: "new_password"}, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "unique_id_mismatch" + assert len(mock_setup_entry.mock_calls) == 0 + assert mock_entry.unique_id == "UID" + assert dict(mock_entry.data) == { + CONF_EMAIL: "aseko@example.com", + CONF_PASSWORD: "passw0rd", + } @pytest.mark.parametrize( diff --git a/tests/components/assist_pipeline/test_vad.py b/tests/components/assist_pipeline/test_vad.py index fda26d2fb94e66..bd07601cd5d10f 100644 --- a/tests/components/assist_pipeline/test_vad.py +++ b/tests/components/assist_pipeline/test_vad.py @@ -16,7 +16,7 @@ def test_silence() -> None: segmenter = VoiceCommandSegmenter() # True return value indicates voice command has not finished - assert segmenter.process(_ONE_SECOND * 3, False) + assert segmenter.process(_ONE_SECOND * 3, 0.0) assert not segmenter.in_command @@ -26,15 +26,15 @@ def test_speech() -> None: segmenter = VoiceCommandSegmenter() # silence - assert segmenter.process(_ONE_SECOND, False) + assert segmenter.process(_ONE_SECOND, 0.0) # "speech" - assert segmenter.process(_ONE_SECOND, True) + assert segmenter.process(_ONE_SECOND, 1.0) assert segmenter.in_command # silence # False return value indicates voice command is finished - assert not segmenter.process(_ONE_SECOND, False) + assert not segmenter.process(_ONE_SECOND, 0.0) assert not segmenter.in_command @@ -112,19 +112,19 @@ def test_silence_seconds() -> None: segmenter = VoiceCommandSegmenter(silence_seconds=1.0) # silence - assert segmenter.process(_ONE_SECOND, False) + assert segmenter.process(_ONE_SECOND, 0.0) assert not segmenter.in_command # "speech" - assert segmenter.process(_ONE_SECOND, True) + assert segmenter.process(_ONE_SECOND, 1.0) assert segmenter.in_command # not enough silence to end - assert segmenter.process(_ONE_SECOND * 0.5, False) + assert segmenter.process(_ONE_SECOND * 0.5, 0.0) assert segmenter.in_command # exactly enough silence now - assert not segmenter.process(_ONE_SECOND * 0.5, False) + assert not segmenter.process(_ONE_SECOND * 0.5, 0.0) assert not segmenter.in_command @@ -134,27 +134,27 @@ def test_silence_reset() -> None: segmenter = VoiceCommandSegmenter(silence_seconds=1.0, reset_seconds=0.5) # silence - assert segmenter.process(_ONE_SECOND, False) + assert segmenter.process(_ONE_SECOND, 0.0) assert not segmenter.in_command # "speech" - assert segmenter.process(_ONE_SECOND, True) + assert segmenter.process(_ONE_SECOND, 1.0) assert segmenter.in_command # not enough silence to end - assert segmenter.process(_ONE_SECOND * 0.5, False) + assert segmenter.process(_ONE_SECOND * 0.5, 0.0) assert segmenter.in_command # speech should reset silence detection - assert segmenter.process(_ONE_SECOND * 0.5, True) + assert segmenter.process(_ONE_SECOND * 0.5, 1.0) assert segmenter.in_command # not enough silence to end - assert segmenter.process(_ONE_SECOND * 0.5, False) + assert segmenter.process(_ONE_SECOND * 0.5, 0.0) assert segmenter.in_command # exactly enough silence now - assert not segmenter.process(_ONE_SECOND * 0.5, False) + assert not segmenter.process(_ONE_SECOND * 0.5, 0.0) assert not segmenter.in_command @@ -166,23 +166,23 @@ def test_speech_reset() -> None: ) # silence - assert segmenter.process(_ONE_SECOND, False) + assert segmenter.process(_ONE_SECOND, 0.0) assert not segmenter.in_command # not enough speech to start voice command - assert segmenter.process(_ONE_SECOND * 0.5, True) + assert segmenter.process(_ONE_SECOND * 0.5, 1.0) assert not segmenter.in_command # silence should reset speech detection - assert segmenter.process(_ONE_SECOND, False) + assert segmenter.process(_ONE_SECOND, 0.0) assert not segmenter.in_command # not enough speech to start voice command - assert segmenter.process(_ONE_SECOND * 0.5, True) + assert segmenter.process(_ONE_SECOND * 0.5, 1.0) assert not segmenter.in_command # exactly enough speech now - assert segmenter.process(_ONE_SECOND * 0.5, True) + assert segmenter.process(_ONE_SECOND * 0.5, 1.0) assert segmenter.in_command @@ -193,18 +193,18 @@ def test_timeout() -> None: # not enough to time out assert not segmenter.timed_out - assert segmenter.process(_ONE_SECOND * 0.5, False) + assert segmenter.process(_ONE_SECOND * 0.5, 0.0) assert not segmenter.timed_out # enough to time out - assert not segmenter.process(_ONE_SECOND * 0.5, True) + assert not segmenter.process(_ONE_SECOND * 0.5, 1.0) assert segmenter.timed_out # flag resets with more audio - assert segmenter.process(_ONE_SECOND * 0.5, True) + assert segmenter.process(_ONE_SECOND * 0.5, 1.0) assert not segmenter.timed_out - assert not segmenter.process(_ONE_SECOND * 0.5, False) + assert not segmenter.process(_ONE_SECOND * 0.5, 0.0) assert segmenter.timed_out @@ -215,14 +215,38 @@ def test_command_seconds() -> None: command_seconds=3, speech_seconds=1, silence_seconds=1, reset_seconds=1 ) - assert segmenter.process(_ONE_SECOND, True) + assert segmenter.process(_ONE_SECOND, 1.0) # Silence counts towards total command length - assert segmenter.process(_ONE_SECOND * 0.5, False) + assert segmenter.process(_ONE_SECOND * 0.5, 0.0) # Enough to finish command now - assert segmenter.process(_ONE_SECOND, True) - assert segmenter.process(_ONE_SECOND * 0.5, False) + assert segmenter.process(_ONE_SECOND, 1.0) + assert segmenter.process(_ONE_SECOND * 0.5, 0.0) # Silence to finish - assert not segmenter.process(_ONE_SECOND * 0.5, False) + assert not segmenter.process(_ONE_SECOND * 0.5, 0.0) + + +def test_speech_thresholds() -> None: + """Test before/in command speech thresholds.""" + + segmenter = VoiceCommandSegmenter( + before_command_speech_threshold=0.2, + in_command_speech_threshold=0.5, + command_seconds=2, + speech_seconds=1, + silence_seconds=1, + ) + + # Not high enough probability to trigger command + assert segmenter.process(_ONE_SECOND, 0.1) + assert not segmenter.in_command + + # Triggers command + assert segmenter.process(_ONE_SECOND, 0.3) + assert segmenter.in_command + + # Now that same probability is considered silence. + # Finishes command. + assert not segmenter.process(_ONE_SECOND, 0.3) diff --git a/tests/components/awair/test_config_flow.py b/tests/components/awair/test_config_flow.py index ac17cf414489fd..b27f20e83f3e2a 100644 --- a/tests/components/awair/test_config_flow.py +++ b/tests/components/awair/test_config_flow.py @@ -144,27 +144,32 @@ async def test_reauth(hass: HomeAssistant, user, cloud_devices) -> None: with patch("python_awair.AwairClient.query", side_effect=AuthError()): result = await hass.config_entries.flow.async_configure( result["flow_id"], - user_input=CLOUD_CONFIG, + user_input={CONF_ACCESS_TOKEN: "bad"}, ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth_confirm" - assert result["errors"] == {CONF_ACCESS_TOKEN: "invalid_access_token"} + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" + assert result["errors"] == {CONF_ACCESS_TOKEN: "invalid_access_token"} with ( patch( "python_awair.AwairClient.query", side_effect=[user, cloud_devices], ), - patch("homeassistant.components.awair.async_setup_entry", return_value=True), + patch( + "homeassistant.components.awair.async_setup_entry", return_value=True + ) as mock_setup_entry, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], - user_input=CLOUD_CONFIG, + user_input={CONF_ACCESS_TOKEN: "good"}, ) + await hass.async_block_till_done() - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "reauth_successful" + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reauth_successful" + mock_setup_entry.assert_called_once() + assert dict(mock_config.data) == {CONF_ACCESS_TOKEN: "good"} async def test_reauth_error(hass: HomeAssistant) -> None: @@ -395,10 +400,6 @@ async def test_zeroconf_discovery_update_configuration( return_value=True, ) as mock_setup_entry, patch("python_awair.AwairClient.query", side_effect=[local_devices]), - patch( - "homeassistant.components.awair.async_setup_entry", - return_value=True, - ), ): result = await hass.config_entries.flow.async_init( DOMAIN, diff --git a/tests/components/axis/test_config_flow.py b/tests/components/axis/test_config_flow.py index c8ffc46ca3f7c2..52dd9c2f8ad62b 100644 --- a/tests/components/axis/test_config_flow.py +++ b/tests/components/axis/test_config_flow.py @@ -75,7 +75,7 @@ async def test_flow_manual_configuration(hass: HomeAssistant) -> None: } -async def test_manual_configuration_update_configuration( +async def test_manual_configuration_duplicate_fails( hass: HomeAssistant, config_entry_setup: MockConfigEntry, mock_requests: Callable[[str], None], @@ -105,7 +105,7 @@ async def test_manual_configuration_update_configuration( assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" - assert config_entry_setup.data[CONF_HOST] == "2.3.4.5" + assert config_entry_setup.data[CONF_HOST] == "1.2.3.4" @pytest.mark.parametrize( @@ -221,7 +221,7 @@ async def test_reauth_flow_update_configuration( await hass.async_block_till_done() assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "already_configured" + assert result["reason"] == "reauth_successful" assert config_entry_setup.data[CONF_PROTOCOL] == "https" assert config_entry_setup.data[CONF_HOST] == "2.3.4.5" assert config_entry_setup.data[CONF_PORT] == 443 @@ -255,7 +255,7 @@ async def test_reconfiguration_flow_update_configuration( await hass.async_block_till_done() assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "already_configured" + assert result["reason"] == "reconfigure_successful" assert config_entry_setup.data[CONF_PROTOCOL] == "http" assert config_entry_setup.data[CONF_HOST] == "2.3.4.5" assert config_entry_setup.data[CONF_PORT] == 80 diff --git a/tests/components/azure_devops/test_config_flow.py b/tests/components/azure_devops/test_config_flow.py index 9ebc9991939f42..64c771a7adc19a 100644 --- a/tests/components/azure_devops/test_config_flow.py +++ b/tests/components/azure_devops/test_config_flow.py @@ -57,12 +57,13 @@ async def test_reauth_authorization_error( mock_devops_client: AsyncMock, ) -> None: """Test we show user form on Azure DevOps authorization error.""" + mock_config_entry.add_to_hass(hass) mock_devops_client.authorize.return_value = False mock_devops_client.authorized = False result = await mock_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth" + assert result["step_id"] == "reauth_confirm" result2 = await hass.config_entries.flow.async_configure( result["flow_id"], @@ -71,7 +72,7 @@ async def test_reauth_authorization_error( await hass.async_block_till_done() assert result2["type"] is FlowResultType.FORM - assert result2["step_id"] == "reauth" + assert result2["step_id"] == "reauth_confirm" assert result2["errors"] == {"base": "invalid_auth"} @@ -108,13 +109,14 @@ async def test_reauth_connection_error( mock_devops_client: AsyncMock, ) -> None: """Test we show user form on Azure DevOps connection error.""" + mock_config_entry.add_to_hass(hass) mock_devops_client.authorize.side_effect = aiohttp.ClientError mock_devops_client.authorized = False result = await mock_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth" + assert result["step_id"] == "reauth_confirm" result2 = await hass.config_entries.flow.async_configure( result["flow_id"], @@ -123,7 +125,7 @@ async def test_reauth_connection_error( await hass.async_block_till_done() assert result2["type"] is FlowResultType.FORM - assert result2["step_id"] == "reauth" + assert result2["step_id"] == "reauth_confirm" assert result2["errors"] == {"base": "cannot_connect"} @@ -170,7 +172,7 @@ async def test_reauth_project_error( result = await mock_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth" + assert result["step_id"] == "reauth_confirm" result2 = await hass.config_entries.flow.async_configure( result["flow_id"], @@ -179,7 +181,7 @@ async def test_reauth_project_error( await hass.async_block_till_done() assert result2["type"] is FlowResultType.FORM - assert result2["step_id"] == "reauth" + assert result2["step_id"] == "reauth_confirm" assert result2["errors"] == {"base": "project_error"} @@ -197,8 +199,7 @@ async def test_reauth_flow( result = await mock_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth" - assert result["errors"] == {"base": "invalid_auth"} + assert result["step_id"] == "reauth_confirm" mock_devops_client.authorize.return_value = True mock_devops_client.authorized = True diff --git a/tests/components/backup/snapshots/test_websocket.ambr b/tests/components/backup/snapshots/test_websocket.ambr index a1d83f5cd75976..096df37d70477c 100644 --- a/tests/components/backup/snapshots/test_websocket.ambr +++ b/tests/components/backup/snapshots/test_websocket.ambr @@ -147,6 +147,54 @@ 'type': 'result', }) # --- +# name: test_details[with_hassio-with_backup_content] + dict({ + 'error': dict({ + 'code': 'unknown_command', + 'message': 'Unknown command.', + }), + 'id': 1, + 'success': False, + 'type': 'result', + }) +# --- +# name: test_details[with_hassio-without_backup_content] + dict({ + 'error': dict({ + 'code': 'unknown_command', + 'message': 'Unknown command.', + }), + 'id': 1, + 'success': False, + 'type': 'result', + }) +# --- +# name: test_details[without_hassio-with_backup_content] + dict({ + 'id': 1, + 'result': dict({ + 'backup': dict({ + 'date': '1970-01-01T00:00:00.000Z', + 'name': 'Test', + 'path': 'abc123.tar', + 'size': 0.0, + 'slug': 'abc123', + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_details[without_hassio-without_backup_content] + dict({ + 'id': 1, + 'result': dict({ + 'backup': None, + }), + 'success': True, + 'type': 'result', + }) +# --- # name: test_generate[with_hassio] dict({ 'error': dict({ @@ -221,3 +269,22 @@ 'type': 'result', }) # --- +# name: test_restore[with_hassio] + dict({ + 'error': dict({ + 'code': 'unknown_command', + 'message': 'Unknown command.', + }), + 'id': 1, + 'success': False, + 'type': 'result', + }) +# --- +# name: test_restore[without_hassio] + dict({ + 'id': 1, + 'result': None, + 'success': True, + 'type': 'result', + }) +# --- diff --git a/tests/components/backup/test_http.py b/tests/components/backup/test_http.py index b4d9c52d0558fb..93ecb27bc9770b 100644 --- a/tests/components/backup/test_http.py +++ b/tests/components/backup/test_http.py @@ -23,7 +23,7 @@ async def test_downloading_backup( with ( patch( - "homeassistant.components.backup.manager.BackupManager.get_backup", + "homeassistant.components.backup.manager.BackupManager.async_get_backup", return_value=TEST_BACKUP, ), patch("pathlib.Path.exists", return_value=True), diff --git a/tests/components/backup/test_init.py b/tests/components/backup/test_init.py index 0472111e33e200..e064939d618d10 100644 --- a/tests/components/backup/test_init.py +++ b/tests/components/backup/test_init.py @@ -33,7 +33,7 @@ async def test_create_service( await setup_backup_integration(hass) with patch( - "homeassistant.components.backup.manager.BackupManager.generate_backup", + "homeassistant.components.backup.manager.BackupManager.async_create_backup", ) as generate_backup: await hass.services.async_call( DOMAIN, diff --git a/tests/components/backup/test_manager.py b/tests/components/backup/test_manager.py index 41749298819d64..a4dba5c6936d6b 100644 --- a/tests/components/backup/test_manager.py +++ b/tests/components/backup/test_manager.py @@ -62,7 +62,7 @@ def _mock_iterdir(path: Path) -> list[Path]: "2025.1.0", ), ): - await manager.generate_backup() + await manager.async_create_backup() assert mocked_json_bytes.call_count == 1 backup_json_dict = mocked_json_bytes.call_args[0][0] @@ -108,7 +108,7 @@ async def test_load_backups(hass: HomeAssistant) -> None: ), ): await manager.load_backups() - backups = await manager.get_backups() + backups = await manager.async_get_backups() assert backups == {TEST_BACKUP.slug: TEST_BACKUP} @@ -123,7 +123,7 @@ async def test_load_backups_with_exception( patch("tarfile.open", side_effect=OSError("Test exception")), ): await manager.load_backups() - backups = await manager.get_backups() + backups = await manager.async_get_backups() assert f"Unable to read backup {TEST_BACKUP.path}: Test exception" in caplog.text assert backups == {} @@ -138,7 +138,7 @@ async def test_removing_backup( manager.loaded_backups = True with patch("pathlib.Path.exists", return_value=True): - await manager.remove_backup(TEST_BACKUP.slug) + await manager.async_remove_backup(slug=TEST_BACKUP.slug) assert "Removed backup located at" in caplog.text @@ -149,7 +149,7 @@ async def test_removing_non_existing_backup( """Test removing not existing backup.""" manager = BackupManager(hass) - await manager.remove_backup("non_existing") + await manager.async_remove_backup(slug="non_existing") assert "Removed backup located at" not in caplog.text @@ -163,7 +163,7 @@ async def test_getting_backup_that_does_not_exist( manager.loaded_backups = True with patch("pathlib.Path.exists", return_value=False): - backup = await manager.get_backup(TEST_BACKUP.slug) + backup = await manager.async_get_backup(slug=TEST_BACKUP.slug) assert backup is None assert ( @@ -172,15 +172,15 @@ async def test_getting_backup_that_does_not_exist( ) in caplog.text -async def test_generate_backup_when_backing_up(hass: HomeAssistant) -> None: +async def test_async_create_backup_when_backing_up(hass: HomeAssistant) -> None: """Test generate backup.""" manager = BackupManager(hass) manager.backing_up = True with pytest.raises(HomeAssistantError, match="Backup already in progress"): - await manager.generate_backup() + await manager.async_create_backup() -async def test_generate_backup( +async def test_async_create_backup( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, ) -> None: @@ -285,7 +285,7 @@ async def _mock_step(hass: HomeAssistant) -> None: await _mock_backup_generation(manager) -async def test_loading_platforms_when_running_pre_backup_actions( +async def test_loading_platforms_when_running_async_pre_backup_actions( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, ) -> None: @@ -302,7 +302,7 @@ async def test_loading_platforms_when_running_pre_backup_actions( async_post_backup=AsyncMock(), ), ) - await manager.pre_backup_actions() + await manager.async_pre_backup_actions() assert manager.loaded_platforms assert len(manager.platforms) == 1 @@ -310,7 +310,7 @@ async def test_loading_platforms_when_running_pre_backup_actions( assert "Loaded 1 platforms" in caplog.text -async def test_loading_platforms_when_running_post_backup_actions( +async def test_loading_platforms_when_running_async_post_backup_actions( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, ) -> None: @@ -327,9 +327,37 @@ async def test_loading_platforms_when_running_post_backup_actions( async_post_backup=AsyncMock(), ), ) - await manager.post_backup_actions() + await manager.async_post_backup_actions() assert manager.loaded_platforms assert len(manager.platforms) == 1 assert "Loaded 1 platforms" in caplog.text + + +async def test_async_trigger_restore( + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test trigger restore.""" + manager = BackupManager(hass) + manager.loaded_backups = True + manager.backups = {TEST_BACKUP.slug: TEST_BACKUP} + + with ( + patch("pathlib.Path.exists", return_value=True), + patch("pathlib.Path.write_text") as mocked_write_text, + patch("homeassistant.core.ServiceRegistry.async_call") as mocked_service_call, + ): + await manager.async_restore_backup(TEST_BACKUP.slug) + assert mocked_write_text.call_args[0][0] == '{"path": "abc123.tar"}' + assert mocked_service_call.called + + +async def test_async_trigger_restore_missing_backup(hass: HomeAssistant) -> None: + """Test trigger restore.""" + manager = BackupManager(hass) + manager.loaded_backups = True + + with pytest.raises(HomeAssistantError, match="Backup abc123 not found"): + await manager.async_restore_backup(TEST_BACKUP.slug) diff --git a/tests/components/backup/test_websocket.py b/tests/components/backup/test_websocket.py index 388aba6bc04f50..125ba8adaad1c4 100644 --- a/tests/components/backup/test_websocket.py +++ b/tests/components/backup/test_websocket.py @@ -5,6 +5,7 @@ import pytest from syrupy import SnapshotAssertion +from homeassistant.components.backup.manager import Backup from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError @@ -45,13 +46,48 @@ async def test_info( await hass.async_block_till_done() with patch( - "homeassistant.components.backup.manager.BackupManager.get_backups", + "homeassistant.components.backup.manager.BackupManager.async_get_backups", return_value={TEST_BACKUP.slug: TEST_BACKUP}, ): await client.send_json_auto_id({"type": "backup/info"}) assert snapshot == await client.receive_json() +@pytest.mark.parametrize( + "backup_content", + [ + pytest.param(TEST_BACKUP, id="with_backup_content"), + pytest.param(None, id="without_backup_content"), + ], +) +@pytest.mark.parametrize( + "with_hassio", + [ + pytest.param(True, id="with_hassio"), + pytest.param(False, id="without_hassio"), + ], +) +async def test_details( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + snapshot: SnapshotAssertion, + with_hassio: bool, + backup_content: Backup | None, +) -> None: + """Test getting backup info.""" + await setup_backup_integration(hass, with_hassio=with_hassio) + + client = await hass_ws_client(hass) + await hass.async_block_till_done() + + with patch( + "homeassistant.components.backup.manager.BackupManager.async_get_backup", + return_value=backup_content, + ): + await client.send_json_auto_id({"type": "backup/details", "slug": "abc123"}) + assert await client.receive_json() == snapshot + + @pytest.mark.parametrize( "with_hassio", [ @@ -72,7 +108,7 @@ async def test_remove( await hass.async_block_till_done() with patch( - "homeassistant.components.backup.manager.BackupManager.remove_backup", + "homeassistant.components.backup.manager.BackupManager.async_remove_backup", ): await client.send_json_auto_id({"type": "backup/remove", "slug": "abc123"}) assert snapshot == await client.receive_json() @@ -98,13 +134,39 @@ async def test_generate( await hass.async_block_till_done() with patch( - "homeassistant.components.backup.manager.BackupManager.generate_backup", + "homeassistant.components.backup.manager.BackupManager.async_create_backup", return_value=TEST_BACKUP, ): await client.send_json_auto_id({"type": "backup/generate"}) assert snapshot == await client.receive_json() +@pytest.mark.parametrize( + "with_hassio", + [ + pytest.param(True, id="with_hassio"), + pytest.param(False, id="without_hassio"), + ], +) +async def test_restore( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + snapshot: SnapshotAssertion, + with_hassio: bool, +) -> None: + """Test calling the restore command.""" + await setup_backup_integration(hass, with_hassio=with_hassio) + + client = await hass_ws_client(hass) + await hass.async_block_till_done() + + with patch( + "homeassistant.components.backup.manager.BackupManager.async_restore_backup", + ): + await client.send_json_auto_id({"type": "backup/restore", "slug": "abc123"}) + assert await client.receive_json() == snapshot + + @pytest.mark.parametrize( "access_token_fixture_name", ["hass_access_token", "hass_supervisor_access_token"], @@ -132,7 +194,7 @@ async def test_backup_end( await hass.async_block_till_done() with patch( - "homeassistant.components.backup.manager.BackupManager.post_backup_actions", + "homeassistant.components.backup.manager.BackupManager.async_post_backup_actions", ): await client.send_json_auto_id({"type": "backup/end"}) assert snapshot == await client.receive_json() @@ -165,7 +227,7 @@ async def test_backup_start( await hass.async_block_till_done() with patch( - "homeassistant.components.backup.manager.BackupManager.pre_backup_actions", + "homeassistant.components.backup.manager.BackupManager.async_pre_backup_actions", ): await client.send_json_auto_id({"type": "backup/start"}) assert snapshot == await client.receive_json() @@ -193,7 +255,7 @@ async def test_backup_end_excepion( await hass.async_block_till_done() with patch( - "homeassistant.components.backup.manager.BackupManager.post_backup_actions", + "homeassistant.components.backup.manager.BackupManager.async_post_backup_actions", side_effect=exception, ): await client.send_json_auto_id({"type": "backup/end"}) @@ -222,7 +284,7 @@ async def test_backup_start_excepion( await hass.async_block_till_done() with patch( - "homeassistant.components.backup.manager.BackupManager.pre_backup_actions", + "homeassistant.components.backup.manager.BackupManager.async_pre_backup_actions", side_effect=exception, ): await client.send_json_auto_id({"type": "backup/start"}) diff --git a/tests/components/balboa/__init__.py b/tests/components/balboa/__init__.py index a27293e955f3ab..2cb100e364284a 100644 --- a/tests/components/balboa/__init__.py +++ b/tests/components/balboa/__init__.py @@ -4,7 +4,7 @@ from unittest.mock import MagicMock -from homeassistant.components.balboa import CONF_SYNC_TIME, DOMAIN +from homeassistant.components.balboa.const import CONF_SYNC_TIME, DOMAIN from homeassistant.const import CONF_HOST from homeassistant.core import HomeAssistant, State diff --git a/tests/components/bang_olufsen/conftest.py b/tests/components/bang_olufsen/conftest.py index ff29592b1376eb..cbde856ff89e60 100644 --- a/tests/components/bang_olufsen/conftest.py +++ b/tests/components/bang_olufsen/conftest.py @@ -6,6 +6,7 @@ from mozart_api.models import ( Action, BeolinkPeer, + BeolinkSelf, ContentItem, ListeningMode, ListeningModeFeatures, @@ -14,6 +15,7 @@ PlaybackContentMetadata, PlaybackProgress, PlaybackState, + PlayQueueSettings, ProductState, RemoteMenuItem, RenderingState, @@ -33,11 +35,13 @@ TEST_DATA_CREATE_ENTRY, TEST_DATA_CREATE_ENTRY_2, TEST_FRIENDLY_NAME, - TEST_FRIENDLY_NAME_2, TEST_FRIENDLY_NAME_3, + TEST_FRIENDLY_NAME_4, + TEST_HOST_3, + TEST_HOST_4, TEST_JID_1, - TEST_JID_2, TEST_JID_3, + TEST_JID_4, TEST_NAME, TEST_NAME_2, TEST_SERIAL_NUMBER, @@ -100,7 +104,7 @@ def mock_mozart_client() -> Generator[AsyncMock]: # REST API client methods client.get_beolink_self = AsyncMock() - client.get_beolink_self.return_value = BeolinkPeer( + client.get_beolink_self.return_value = BeolinkSelf( friendly_name=TEST_FRIENDLY_NAME, jid=TEST_JID_1 ) client.get_softwareupdate_status = AsyncMock() @@ -120,7 +124,7 @@ def mock_mozart_client() -> Generator[AsyncMock]: client.get_available_sources = AsyncMock() client.get_available_sources.return_value = SourceArray( items=[ - # Is in the HIDDEN_SOURCE_IDS constant, so should not be user selectable + # Is not playable, so should not be user selectable Source( name="AirPlay", id="airPlay", @@ -133,14 +137,16 @@ def mock_mozart_client() -> Generator[AsyncMock]: id="tidal", is_enabled=True, is_multiroom_available=True, + is_playable=True, ), Source( name="Line-In", id="lineIn", is_enabled=True, is_multiroom_available=False, + is_playable=True, ), - # Is disabled, so should not be user selectable + # Is disabled and not playable, so should not be user selectable Source( name="Powerlink", id="pl", @@ -261,13 +267,29 @@ def mock_mozart_client() -> Generator[AsyncMock]: } client.get_beolink_peers = AsyncMock() client.get_beolink_peers.return_value = [ - BeolinkPeer(friendly_name=TEST_FRIENDLY_NAME_2, jid=TEST_JID_2), - BeolinkPeer(friendly_name=TEST_FRIENDLY_NAME_3, jid=TEST_JID_3), + BeolinkPeer( + friendly_name=TEST_FRIENDLY_NAME_3, + jid=TEST_JID_3, + ip_address=TEST_HOST_3, + ), + BeolinkPeer( + friendly_name=TEST_FRIENDLY_NAME_4, + jid=TEST_JID_4, + ip_address=TEST_HOST_4, + ), ] client.get_beolink_listeners = AsyncMock() client.get_beolink_listeners.return_value = [ - BeolinkPeer(friendly_name=TEST_FRIENDLY_NAME_2, jid=TEST_JID_2), - BeolinkPeer(friendly_name=TEST_FRIENDLY_NAME_3, jid=TEST_JID_3), + BeolinkPeer( + friendly_name=TEST_FRIENDLY_NAME_3, + jid=TEST_JID_3, + ip_address=TEST_HOST_3, + ), + BeolinkPeer( + friendly_name=TEST_FRIENDLY_NAME_4, + jid=TEST_JID_4, + ip_address=TEST_HOST_4, + ), ] client.get_listening_mode_set = AsyncMock() @@ -296,6 +318,12 @@ def mock_mozart_client() -> Generator[AsyncMock]: href="", id=123, ) + client.get_settings_queue = AsyncMock() + client.get_settings_queue.return_value = PlayQueueSettings( + repeat="none", + shuffle=False, + ) + client.post_standby = AsyncMock() client.set_current_volume_level = AsyncMock() client.set_volume_mute = AsyncMock() @@ -317,6 +345,7 @@ def mock_mozart_client() -> Generator[AsyncMock]: client.post_beolink_allstandby = AsyncMock() client.join_latest_beolink_experience = AsyncMock() client.activate_listening_mode = AsyncMock() + client.set_settings_queue = AsyncMock() # Non-REST API client methods client.check_device_connection = AsyncMock() diff --git a/tests/components/bang_olufsen/const.py b/tests/components/bang_olufsen/const.py index 7cbe81dc06a406..6602a898eb688a 100644 --- a/tests/components/bang_olufsen/const.py +++ b/tests/components/bang_olufsen/const.py @@ -16,6 +16,7 @@ PlayQueueItemType, RenderingState, SceneProperties, + Source, UserFlow, VolumeLevel, VolumeMute, @@ -52,14 +53,17 @@ TEST_FRIENDLY_NAME_2 = "Laundry room Balance" TEST_JID_2 = f"{TEST_TYPE_NUMBER}.{TEST_ITEM_NUMBER}.22222222@products.bang-olufsen.com" TEST_MEDIA_PLAYER_ENTITY_ID_2 = "media_player.beosound_balance_22222222" +TEST_HOST_2 = "192.168.0.2" TEST_FRIENDLY_NAME_3 = "Lego room Balance" TEST_JID_3 = f"{TEST_TYPE_NUMBER}.{TEST_ITEM_NUMBER}.33333333@products.bang-olufsen.com" TEST_MEDIA_PLAYER_ENTITY_ID_3 = "media_player.beosound_balance_33333333" +TEST_HOST_3 = "192.168.0.3" TEST_FRIENDLY_NAME_4 = "Lounge room Balance" TEST_JID_4 = f"{TEST_TYPE_NUMBER}.{TEST_ITEM_NUMBER}.44444444@products.bang-olufsen.com" TEST_MEDIA_PLAYER_ENTITY_ID_4 = "media_player.beosound_balance_44444444" +TEST_HOST_4 = "192.168.0.4" TEST_HOSTNAME_ZEROCONF = TEST_NAME.replace(" ", "-") + ".local." TEST_TYPE_ZEROCONF = "_bangolufsen._tcp.local." @@ -122,11 +126,15 @@ }, ) -TEST_AUDIO_SOURCES = [BangOlufsenSource.TIDAL.name, BangOlufsenSource.LINE_IN.name] +TEST_SOURCE = Source( + name="Tidal", id="tidal", is_seekable=True, is_enabled=True, is_playable=True +) +TEST_AUDIO_SOURCES = [TEST_SOURCE.name, BangOlufsenSource.LINE_IN.name] TEST_VIDEO_SOURCES = ["HDMI A"] TEST_SOURCES = TEST_AUDIO_SOURCES + TEST_VIDEO_SOURCES TEST_FALLBACK_SOURCES = [ "Audio Streamer", + "Bluetooth", "Spotify Connect", "Line-In", "Optical", diff --git a/tests/components/bang_olufsen/snapshots/test_media_player.ambr b/tests/components/bang_olufsen/snapshots/test_media_player.ambr new file mode 100644 index 00000000000000..ea96e286821467 --- /dev/null +++ b/tests/components/bang_olufsen/snapshots/test_media_player.ambr @@ -0,0 +1,874 @@ +# serializer version: 1 +# name: test_async_beolink_allstandby + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'beolink': dict({ + 'listeners': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'peers': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'self': dict({ + 'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com', + }), + }), + 'device_class': 'speaker', + 'entity_picture_local': None, + 'friendly_name': 'Living room Balance', + 'group_members': list([ + 'media_player.beosound_balance_11111111', + 'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com', + 'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com', + ]), + 'icon': 'mdi:speaker-wireless', + 'media_content_type': , + 'repeat': , + 'shuffle': False, + 'sound_mode': 'Test Listening Mode (123)', + 'sound_mode_list': list([ + 'Test Listening Mode (123)', + 'Test Listening Mode (234)', + 'Test Listening Mode 2 (345)', + ]), + 'source_list': list([ + 'Tidal', + 'Line-In', + 'HDMI A', + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'media_player.beosound_balance_11111111', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'playing', + }) +# --- +# name: test_async_beolink_expand[all_discovered-True-None-log_messages0-2] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'beolink': dict({ + 'listeners': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'peers': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'self': dict({ + 'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com', + }), + }), + 'device_class': 'speaker', + 'entity_picture_local': None, + 'friendly_name': 'Living room Balance', + 'group_members': list([ + 'media_player.beosound_balance_11111111', + 'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com', + 'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com', + ]), + 'icon': 'mdi:speaker-wireless', + 'media_content_type': , + 'repeat': , + 'shuffle': False, + 'sound_mode': 'Test Listening Mode (123)', + 'sound_mode_list': list([ + 'Test Listening Mode (123)', + 'Test Listening Mode (234)', + 'Test Listening Mode 2 (345)', + ]), + 'source': 'Tidal', + 'source_list': list([ + 'Tidal', + 'Line-In', + 'HDMI A', + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'media_player.beosound_balance_11111111', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'playing', + }) +# --- +# name: test_async_beolink_expand[all_discovered-True-expand_side_effect1-log_messages1-2] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'beolink': dict({ + 'listeners': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'peers': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'self': dict({ + 'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com', + }), + }), + 'device_class': 'speaker', + 'entity_picture_local': None, + 'friendly_name': 'Living room Balance', + 'group_members': list([ + 'media_player.beosound_balance_11111111', + 'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com', + 'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com', + ]), + 'icon': 'mdi:speaker-wireless', + 'media_content_type': , + 'repeat': , + 'shuffle': False, + 'sound_mode': 'Test Listening Mode (123)', + 'sound_mode_list': list([ + 'Test Listening Mode (123)', + 'Test Listening Mode (234)', + 'Test Listening Mode 2 (345)', + ]), + 'source': 'Tidal', + 'source_list': list([ + 'Tidal', + 'Line-In', + 'HDMI A', + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'media_player.beosound_balance_11111111', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'playing', + }) +# --- +# name: test_async_beolink_expand[beolink_jids-parameter_value2-None-log_messages2-1] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'beolink': dict({ + 'listeners': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'peers': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'self': dict({ + 'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com', + }), + }), + 'device_class': 'speaker', + 'entity_picture_local': None, + 'friendly_name': 'Living room Balance', + 'group_members': list([ + 'media_player.beosound_balance_11111111', + 'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com', + 'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com', + ]), + 'icon': 'mdi:speaker-wireless', + 'media_content_type': , + 'repeat': , + 'shuffle': False, + 'sound_mode': 'Test Listening Mode (123)', + 'sound_mode_list': list([ + 'Test Listening Mode (123)', + 'Test Listening Mode (234)', + 'Test Listening Mode 2 (345)', + ]), + 'source': 'Tidal', + 'source_list': list([ + 'Tidal', + 'Line-In', + 'HDMI A', + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'media_player.beosound_balance_11111111', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'playing', + }) +# --- +# name: test_async_beolink_expand[beolink_jids-parameter_value3-expand_side_effect3-log_messages3-1] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'beolink': dict({ + 'listeners': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'peers': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'self': dict({ + 'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com', + }), + }), + 'device_class': 'speaker', + 'entity_picture_local': None, + 'friendly_name': 'Living room Balance', + 'group_members': list([ + 'media_player.beosound_balance_11111111', + 'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com', + 'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com', + ]), + 'icon': 'mdi:speaker-wireless', + 'media_content_type': , + 'repeat': , + 'shuffle': False, + 'sound_mode': 'Test Listening Mode (123)', + 'sound_mode_list': list([ + 'Test Listening Mode (123)', + 'Test Listening Mode (234)', + 'Test Listening Mode 2 (345)', + ]), + 'source': 'Tidal', + 'source_list': list([ + 'Tidal', + 'Line-In', + 'HDMI A', + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'media_player.beosound_balance_11111111', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'playing', + }) +# --- +# name: test_async_beolink_join + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'beolink': dict({ + 'listeners': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'peers': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'self': dict({ + 'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com', + }), + }), + 'device_class': 'speaker', + 'entity_picture_local': None, + 'friendly_name': 'Living room Balance', + 'group_members': list([ + 'media_player.beosound_balance_11111111', + 'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com', + 'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com', + ]), + 'icon': 'mdi:speaker-wireless', + 'media_content_type': , + 'repeat': , + 'shuffle': False, + 'sound_mode': 'Test Listening Mode (123)', + 'sound_mode_list': list([ + 'Test Listening Mode (123)', + 'Test Listening Mode (234)', + 'Test Listening Mode 2 (345)', + ]), + 'source_list': list([ + 'Tidal', + 'Line-In', + 'HDMI A', + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'media_player.beosound_balance_11111111', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'playing', + }) +# --- +# name: test_async_beolink_unexpand + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'beolink': dict({ + 'listeners': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'peers': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'self': dict({ + 'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com', + }), + }), + 'device_class': 'speaker', + 'entity_picture_local': None, + 'friendly_name': 'Living room Balance', + 'group_members': list([ + 'media_player.beosound_balance_11111111', + 'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com', + 'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com', + ]), + 'icon': 'mdi:speaker-wireless', + 'media_content_type': , + 'repeat': , + 'shuffle': False, + 'sound_mode': 'Test Listening Mode (123)', + 'sound_mode_list': list([ + 'Test Listening Mode (123)', + 'Test Listening Mode (234)', + 'Test Listening Mode 2 (345)', + ]), + 'source_list': list([ + 'Tidal', + 'Line-In', + 'HDMI A', + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'media_player.beosound_balance_11111111', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'playing', + }) +# --- +# name: test_async_join_players[group_members0-1-0] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'beolink': dict({ + 'listeners': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'peers': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'self': dict({ + 'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com', + }), + }), + 'device_class': 'speaker', + 'entity_picture_local': None, + 'friendly_name': 'Living room Balance', + 'group_members': list([ + 'media_player.beosound_balance_11111111', + 'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com', + 'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com', + ]), + 'icon': 'mdi:speaker-wireless', + 'media_content_type': , + 'repeat': , + 'shuffle': False, + 'sound_mode': 'Test Listening Mode (123)', + 'sound_mode_list': list([ + 'Test Listening Mode (123)', + 'Test Listening Mode (234)', + 'Test Listening Mode 2 (345)', + ]), + 'source': 'Tidal', + 'source_list': list([ + 'Tidal', + 'Line-In', + 'HDMI A', + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'media_player.beosound_balance_11111111', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'playing', + }) +# --- +# name: test_async_join_players[group_members0-1-0].1 + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'beolink': dict({ + 'listeners': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'peers': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'self': dict({ + 'Living room Balance': '1111.1111111.22222222@products.bang-olufsen.com', + }), + }), + 'device_class': 'speaker', + 'entity_picture_local': None, + 'friendly_name': 'Living room Balance', + 'group_members': list([ + 'media_player.beosound_balance_22222222', + 'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com', + 'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com', + ]), + 'icon': 'mdi:speaker-wireless', + 'media_content_type': , + 'sound_mode': 'Test Listening Mode (123)', + 'sound_mode_list': list([ + 'Test Listening Mode (123)', + 'Test Listening Mode (234)', + 'Test Listening Mode 2 (345)', + ]), + 'source_list': list([ + 'Tidal', + 'Line-In', + 'HDMI A', + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'media_player.beosound_balance_22222222', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'playing', + }) +# --- +# name: test_async_join_players[group_members1-0-1] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'beolink': dict({ + 'listeners': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'peers': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'self': dict({ + 'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com', + }), + }), + 'device_class': 'speaker', + 'entity_picture_local': None, + 'friendly_name': 'Living room Balance', + 'group_members': list([ + 'media_player.beosound_balance_11111111', + 'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com', + 'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com', + ]), + 'icon': 'mdi:speaker-wireless', + 'media_content_type': , + 'repeat': , + 'shuffle': False, + 'sound_mode': 'Test Listening Mode (123)', + 'sound_mode_list': list([ + 'Test Listening Mode (123)', + 'Test Listening Mode (234)', + 'Test Listening Mode 2 (345)', + ]), + 'source': 'Tidal', + 'source_list': list([ + 'Tidal', + 'Line-In', + 'HDMI A', + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'media_player.beosound_balance_11111111', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'playing', + }) +# --- +# name: test_async_join_players[group_members1-0-1].1 + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'beolink': dict({ + 'listeners': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'peers': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'self': dict({ + 'Living room Balance': '1111.1111111.22222222@products.bang-olufsen.com', + }), + }), + 'device_class': 'speaker', + 'entity_picture_local': None, + 'friendly_name': 'Living room Balance', + 'group_members': list([ + 'media_player.beosound_balance_22222222', + 'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com', + 'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com', + ]), + 'icon': 'mdi:speaker-wireless', + 'media_content_type': , + 'sound_mode': 'Test Listening Mode (123)', + 'sound_mode_list': list([ + 'Test Listening Mode (123)', + 'Test Listening Mode (234)', + 'Test Listening Mode 2 (345)', + ]), + 'source_list': list([ + 'Tidal', + 'Line-In', + 'HDMI A', + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'media_player.beosound_balance_22222222', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'playing', + }) +# --- +# name: test_async_join_players_invalid[source0-group_members0-expected_result0-invalid_source] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'beolink': dict({ + 'listeners': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'peers': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'self': dict({ + 'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com', + }), + }), + 'device_class': 'speaker', + 'entity_picture_local': None, + 'friendly_name': 'Living room Balance', + 'group_members': list([ + 'media_player.beosound_balance_11111111', + 'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com', + 'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com', + ]), + 'icon': 'mdi:speaker-wireless', + 'media_content_type': , + 'media_position': 0, + 'sound_mode': 'Test Listening Mode (123)', + 'sound_mode_list': list([ + 'Test Listening Mode (123)', + 'Test Listening Mode (234)', + 'Test Listening Mode 2 (345)', + ]), + 'source': 'Line-In', + 'source_list': list([ + 'Tidal', + 'Line-In', + 'HDMI A', + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'media_player.beosound_balance_11111111', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'playing', + }) +# --- +# name: test_async_join_players_invalid[source0-group_members0-expected_result0-invalid_source].1 + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'beolink': dict({ + 'listeners': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'peers': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'self': dict({ + 'Living room Balance': '1111.1111111.22222222@products.bang-olufsen.com', + }), + }), + 'device_class': 'speaker', + 'entity_picture_local': None, + 'friendly_name': 'Living room Balance', + 'group_members': list([ + 'media_player.beosound_balance_22222222', + 'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com', + 'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com', + ]), + 'icon': 'mdi:speaker-wireless', + 'media_content_type': , + 'sound_mode': 'Test Listening Mode (123)', + 'sound_mode_list': list([ + 'Test Listening Mode (123)', + 'Test Listening Mode (234)', + 'Test Listening Mode 2 (345)', + ]), + 'source_list': list([ + 'Tidal', + 'Line-In', + 'HDMI A', + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'media_player.beosound_balance_22222222', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'playing', + }) +# --- +# name: test_async_join_players_invalid[source1-group_members1-expected_result1-invalid_grouping_entity] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'beolink': dict({ + 'listeners': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'peers': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'self': dict({ + 'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com', + }), + }), + 'device_class': 'speaker', + 'entity_picture_local': None, + 'friendly_name': 'Living room Balance', + 'group_members': list([ + 'media_player.beosound_balance_11111111', + 'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com', + 'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com', + ]), + 'icon': 'mdi:speaker-wireless', + 'media_content_type': , + 'sound_mode': 'Test Listening Mode (123)', + 'sound_mode_list': list([ + 'Test Listening Mode (123)', + 'Test Listening Mode (234)', + 'Test Listening Mode 2 (345)', + ]), + 'source': 'Tidal', + 'source_list': list([ + 'Tidal', + 'Line-In', + 'HDMI A', + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'media_player.beosound_balance_11111111', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'playing', + }) +# --- +# name: test_async_join_players_invalid[source1-group_members1-expected_result1-invalid_grouping_entity].1 + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'beolink': dict({ + 'listeners': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'peers': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'self': dict({ + 'Living room Balance': '1111.1111111.22222222@products.bang-olufsen.com', + }), + }), + 'device_class': 'speaker', + 'entity_picture_local': None, + 'friendly_name': 'Living room Balance', + 'group_members': list([ + 'media_player.beosound_balance_22222222', + 'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com', + 'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com', + ]), + 'icon': 'mdi:speaker-wireless', + 'media_content_type': , + 'sound_mode': 'Test Listening Mode (123)', + 'sound_mode_list': list([ + 'Test Listening Mode (123)', + 'Test Listening Mode (234)', + 'Test Listening Mode 2 (345)', + ]), + 'source_list': list([ + 'Tidal', + 'Line-In', + 'HDMI A', + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'media_player.beosound_balance_22222222', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'playing', + }) +# --- +# name: test_async_unjoin_player + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'beolink': dict({ + 'listeners': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'peers': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'self': dict({ + 'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com', + }), + }), + 'device_class': 'speaker', + 'entity_picture_local': None, + 'friendly_name': 'Living room Balance', + 'group_members': list([ + 'media_player.beosound_balance_11111111', + 'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com', + 'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com', + ]), + 'icon': 'mdi:speaker-wireless', + 'media_content_type': , + 'repeat': , + 'shuffle': False, + 'sound_mode': 'Test Listening Mode (123)', + 'sound_mode_list': list([ + 'Test Listening Mode (123)', + 'Test Listening Mode (234)', + 'Test Listening Mode 2 (345)', + ]), + 'source_list': list([ + 'Tidal', + 'Line-In', + 'HDMI A', + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'media_player.beosound_balance_11111111', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'playing', + }) +# --- +# name: test_async_update_beolink_listener + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'beolink': dict({ + 'leader': dict({ + 'Laundry room Balance': '1111.1111111.22222222@products.bang-olufsen.com', + }), + 'peers': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'self': dict({ + 'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com', + }), + }), + 'device_class': 'speaker', + 'entity_picture_local': None, + 'friendly_name': 'Living room Balance', + 'group_members': list([ + 'media_player.beosound_balance_22222222', + 'media_player.beosound_balance_11111111', + ]), + 'icon': 'mdi:speaker-wireless', + 'media_content_type': , + 'sound_mode': 'Test Listening Mode (123)', + 'sound_mode_list': list([ + 'Test Listening Mode (123)', + 'Test Listening Mode (234)', + 'Test Listening Mode 2 (345)', + ]), + 'source_list': list([ + 'Tidal', + 'Line-In', + 'HDMI A', + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'media_player.beosound_balance_11111111', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'playing', + }) +# --- +# name: test_async_update_beolink_listener.1 + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'beolink': dict({ + 'listeners': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'peers': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'self': dict({ + 'Living room Balance': '1111.1111111.22222222@products.bang-olufsen.com', + }), + }), + 'device_class': 'speaker', + 'entity_picture_local': None, + 'friendly_name': 'Living room Balance', + 'group_members': list([ + 'media_player.beosound_balance_22222222', + 'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com', + 'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com', + ]), + 'icon': 'mdi:speaker-wireless', + 'media_content_type': , + 'sound_mode': 'Test Listening Mode (123)', + 'sound_mode_list': list([ + 'Test Listening Mode (123)', + 'Test Listening Mode (234)', + 'Test Listening Mode 2 (345)', + ]), + 'source_list': list([ + 'Tidal', + 'Line-In', + 'HDMI A', + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'media_player.beosound_balance_22222222', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'playing', + }) +# --- diff --git a/tests/components/bang_olufsen/test_init.py b/tests/components/bang_olufsen/test_init.py index 3eb98e956beb14..c8e4c05f9abaa3 100644 --- a/tests/components/bang_olufsen/test_init.py +++ b/tests/components/bang_olufsen/test_init.py @@ -9,7 +9,7 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceRegistry -from .const import TEST_MODEL_BALANCE, TEST_NAME, TEST_SERIAL_NUMBER +from .const import TEST_FRIENDLY_NAME, TEST_MODEL_BALANCE, TEST_SERIAL_NUMBER from tests.common import MockConfigEntry @@ -35,7 +35,8 @@ async def test_setup_entry( identifiers={(DOMAIN, TEST_SERIAL_NUMBER)} ) assert device is not None - assert device.name == TEST_NAME + # Is usually TEST_NAME, but is updated to the device's friendly name by _update_name_and_beolink + assert device.name == TEST_FRIENDLY_NAME assert device.model == TEST_MODEL_BALANCE # Ensure that the connection has been checked WebSocket connection has been initialized @@ -85,6 +86,7 @@ async def test_unload_entry( await hass.config_entries.async_setup(mock_config_entry.entry_id) assert mock_config_entry.state == ConfigEntryState.LOADED + assert hasattr(mock_config_entry, "runtime_data") # Unload entry await hass.config_entries.async_unload(mock_config_entry.entry_id) @@ -94,5 +96,5 @@ async def test_unload_entry( assert mock_mozart_client.close_api_client.call_count == 1 # Ensure that the entry is not loaded and has been removed from hass - assert mock_config_entry.entry_id not in hass.data[DOMAIN] + assert not hasattr(mock_config_entry, "runtime_data") assert mock_config_entry.state == ConfigEntryState.NOT_LOADED diff --git a/tests/components/bang_olufsen/test_media_player.py b/tests/components/bang_olufsen/test_media_player.py index ff42ae2a867bea..aa35b0265dc380 100644 --- a/tests/components/bang_olufsen/test_media_player.py +++ b/tests/components/bang_olufsen/test_media_player.py @@ -4,16 +4,23 @@ import logging from unittest.mock import AsyncMock, patch +from mozart_api.exceptions import NotFoundException from mozart_api.models import ( BeolinkLeader, + BeolinkSelf, PlaybackContentMetadata, + PlayQueueSettings, RenderingState, Source, + SourceArray, WebsocketNotificationTag, ) import pytest +from syrupy.assertion import SnapshotAssertion +from syrupy.filters import props from homeassistant.components.bang_olufsen.const import ( + BANG_OLUFSEN_REPEAT_FROM_HA, BANG_OLUFSEN_STATES, DOMAIN, BangOlufsenSource, @@ -32,7 +39,9 @@ ATTR_MEDIA_EXTRA, ATTR_MEDIA_POSITION, ATTR_MEDIA_POSITION_UPDATED_AT, + ATTR_MEDIA_REPEAT, ATTR_MEDIA_SEEK_POSITION, + ATTR_MEDIA_SHUFFLE, ATTR_MEDIA_TITLE, ATTR_MEDIA_TRACK, ATTR_MEDIA_VOLUME_LEVEL, @@ -41,23 +50,29 @@ ATTR_SOUND_MODE_LIST, DOMAIN as MEDIA_PLAYER_DOMAIN, SERVICE_CLEAR_PLAYLIST, + SERVICE_JOIN, SERVICE_MEDIA_NEXT_TRACK, SERVICE_MEDIA_PLAY_PAUSE, SERVICE_MEDIA_PREVIOUS_TRACK, SERVICE_MEDIA_SEEK, SERVICE_MEDIA_STOP, SERVICE_PLAY_MEDIA, + SERVICE_REPEAT_SET, SERVICE_SELECT_SOUND_MODE, SERVICE_SELECT_SOURCE, + SERVICE_SHUFFLE_SET, SERVICE_TURN_OFF, + SERVICE_UNJOIN, SERVICE_VOLUME_MUTE, SERVICE_VOLUME_SET, MediaPlayerState, MediaType, + RepeatMode, ) from homeassistant.const import ATTR_ENTITY_ID from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError, ServiceValidationError +from homeassistant.helpers.device_registry import DeviceRegistry from homeassistant.setup import async_setup_component from .const import ( @@ -70,7 +85,10 @@ TEST_DEEZER_TRACK, TEST_FALLBACK_SOURCES, TEST_FRIENDLY_NAME_2, + TEST_JID_1, TEST_JID_2, + TEST_JID_3, + TEST_JID_4, TEST_LISTENING_MODE_REF, TEST_MEDIA_PLAYER_ENTITY_ID, TEST_MEDIA_PLAYER_ENTITY_ID_2, @@ -87,6 +105,7 @@ TEST_SEEK_POSITION_HOME_ASSISTANT_FORMAT, TEST_SOUND_MODE_2, TEST_SOUND_MODES, + TEST_SOURCE, TEST_SOURCES, TEST_VIDEO_SOURCES, TEST_VOLUME, @@ -130,6 +149,9 @@ async def test_initialization( mock_mozart_client.get_remote_menu.assert_called_once() mock_mozart_client.get_listening_mode_set.assert_called_once() mock_mozart_client.get_active_listening_mode.assert_called_once() + mock_mozart_client.get_beolink_self.assert_called_once() + mock_mozart_client.get_beolink_peers.assert_called_once() + mock_mozart_client.get_beolink_listeners.assert_called_once() async def test_async_update_sources_audio_only( @@ -190,6 +212,37 @@ async def test_async_update_sources_remote( assert mock_mozart_client.get_remote_menu.call_count == 2 +async def test_async_update_sources_availability( + hass: HomeAssistant, + mock_mozart_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test that the playback_source WebSocket event updates available playback sources.""" + # Remove video sources to simplify test + mock_mozart_client.get_remote_menu.return_value = {} + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + playback_source_callback = ( + mock_mozart_client.get_playback_source_notifications.call_args[0][0] + ) + + assert mock_mozart_client.get_available_sources.call_count == 1 + + # Add a source that is available and playable + mock_mozart_client.get_available_sources.return_value = SourceArray( + items=[TEST_SOURCE] + ) + + # Send playback_source. The source is not actually used, so its attributes don't matter + playback_source_callback(Source()) + + assert mock_mozart_client.get_available_sources.call_count == 2 + assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) + assert states.attributes[ATTR_INPUT_SOURCE_LIST] == [TEST_SOURCE.name] + + async def test_async_update_playback_metadata( hass: HomeAssistant, mock_mozart_client: AsyncMock, @@ -305,19 +358,17 @@ async def test_async_update_playback_state( @pytest.mark.parametrize( - ("reported_source", "real_source", "content_type", "progress", "metadata"), + ("source", "content_type", "progress", "metadata"), [ - # Normal source, music mediatype expected, no progress expected + # Normal source, music mediatype expected ( - BangOlufsenSource.TIDAL, - BangOlufsenSource.TIDAL, + TEST_SOURCE, MediaType.MUSIC, TEST_PLAYBACK_PROGRESS.progress, PlaybackContentMetadata(), ), - # URI source, url media type expected, no progress expected + # URI source, url media type expected ( - BangOlufsenSource.URI_STREAMER, BangOlufsenSource.URI_STREAMER, MediaType.URL, TEST_PLAYBACK_PROGRESS.progress, @@ -326,44 +377,17 @@ async def test_async_update_playback_state( # Line-In source,media type expected, progress 0 expected ( BangOlufsenSource.LINE_IN, - BangOlufsenSource.CHROMECAST, MediaType.MUSIC, 0, PlaybackContentMetadata(), ), - # Chromecast as source, but metadata says Line-In. - # Progress is not set to 0 as the source is Chromecast first - ( - BangOlufsenSource.CHROMECAST, - BangOlufsenSource.LINE_IN, - MediaType.MUSIC, - TEST_PLAYBACK_PROGRESS.progress, - PlaybackContentMetadata(title=BangOlufsenSource.LINE_IN.name), - ), - # Chromecast as source, but metadata says Bluetooth - ( - BangOlufsenSource.CHROMECAST, - BangOlufsenSource.BLUETOOTH, - MediaType.MUSIC, - TEST_PLAYBACK_PROGRESS.progress, - PlaybackContentMetadata(title=BangOlufsenSource.BLUETOOTH.name), - ), - # Chromecast as source, but metadata says Bluetooth in another way - ( - BangOlufsenSource.CHROMECAST, - BangOlufsenSource.BLUETOOTH, - MediaType.MUSIC, - TEST_PLAYBACK_PROGRESS.progress, - PlaybackContentMetadata(art=[]), - ), ], ) async def test_async_update_source_change( hass: HomeAssistant, mock_mozart_client: AsyncMock, mock_config_entry: MockConfigEntry, - reported_source: Source, - real_source: Source, + source: Source, content_type: MediaType, progress: int, metadata: PlaybackContentMetadata, @@ -392,10 +416,10 @@ async def test_async_update_source_change( # Simulate metadata playback_metadata_callback(metadata) - source_change_callback(reported_source) + source_change_callback(source) assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) - assert states.attributes[ATTR_INPUT_SOURCE] == real_source.name + assert states.attributes[ATTR_INPUT_SOURCE] == source.name assert states.attributes[ATTR_MEDIA_CONTENT_TYPE] == content_type assert states.attributes[ATTR_MEDIA_POSITION] == progress @@ -493,11 +517,14 @@ async def test_async_update_beolink_line_in( assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) assert states.attributes["group_members"] == [] - assert mock_mozart_client.get_beolink_listeners.call_count == 1 + # Called once during _initialize and once during _async_update_beolink + assert mock_mozart_client.get_beolink_listeners.call_count == 2 + assert mock_mozart_client.get_beolink_peers.call_count == 2 async def test_async_update_beolink_listener( hass: HomeAssistant, + snapshot: SnapshotAssertion, mock_mozart_client: AsyncMock, mock_config_entry: MockConfigEntry, mock_config_entry_2: MockConfigEntry, @@ -530,7 +557,56 @@ async def test_async_update_beolink_listener( TEST_MEDIA_PLAYER_ENTITY_ID, ] - assert mock_mozart_client.get_beolink_listeners.call_count == 0 + # Called once for each entity during _initialize + assert mock_mozart_client.get_beolink_listeners.call_count == 2 + # Called once for each entity during _initialize and + # once more during _async_update_beolink for the entity that has the callback associated with it. + assert mock_mozart_client.get_beolink_peers.call_count == 3 + + # Main entity + assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) + assert states == snapshot(exclude=props("media_position_updated_at")) + + # Secondary entity + assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID_2)) + assert states == snapshot(exclude=props("media_position_updated_at")) + + +async def test_async_update_name_and_beolink( + hass: HomeAssistant, + device_registry: DeviceRegistry, + mock_mozart_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test _async_update_name_and_beolink.""" + # Change response to ensure device name is changed + mock_mozart_client.get_beolink_self.return_value = BeolinkSelf( + friendly_name=TEST_FRIENDLY_NAME_2, jid=TEST_JID_1 + ) + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + configuration_callback = ( + mock_mozart_client.get_notification_notifications.call_args[0][0] + ) + # Trigger callback + configuration_callback(WebsocketNotificationTag(value="configuration")) + + await hass.async_block_till_done() + + assert mock_mozart_client.get_beolink_self.call_count == 2 + assert mock_mozart_client.get_beolink_peers.call_count == 2 + assert mock_mozart_client.get_beolink_listeners.call_count == 2 + + # Check that device name has been changed + assert mock_config_entry.unique_id + assert ( + device := device_registry.async_get_device( + identifiers={(DOMAIN, mock_config_entry.unique_id)} + ) + ) + assert device.name == TEST_FRIENDLY_NAME_2 async def test_async_mute_volume( @@ -669,10 +745,12 @@ async def test_async_media_next_track( @pytest.mark.parametrize( ("source", "expected_result", "seek_called_times"), [ - # Deezer source, seek expected - (BangOlufsenSource.DEEZER, does_not_raise(), 1), - # Non deezer source, seek shouldn't work - (BangOlufsenSource.TIDAL, pytest.raises(HomeAssistantError), 0), + # Seekable source, seek expected + (TEST_SOURCE, does_not_raise(), 1), + # Non seekable source, seek shouldn't work + (BangOlufsenSource.LINE_IN, pytest.raises(HomeAssistantError), 0), + # Malformed source, seek shouldn't work + (Source(), pytest.raises(HomeAssistantError), 0), ], ) async def test_async_media_seek( @@ -756,7 +834,7 @@ async def test_async_clear_playlist( # Invalid source ("Test source", pytest.raises(ServiceValidationError), 0, 0), # Valid audio source - (BangOlufsenSource.TIDAL.name, does_not_raise(), 1, 0), + (TEST_SOURCE.name, does_not_raise(), 1, 0), # Valid video source (TEST_VIDEO_SOURCES[0], does_not_raise(), 0, 1), ], @@ -1304,6 +1382,7 @@ async def test_async_browse_media( ) async def test_async_join_players( hass: HomeAssistant, + snapshot: SnapshotAssertion, mock_mozart_client: AsyncMock, mock_config_entry: MockConfigEntry, mock_config_entry_2: MockConfigEntry, @@ -1325,11 +1404,11 @@ async def test_async_join_players( await hass.config_entries.async_setup(mock_config_entry_2.entry_id) # Set the source to a beolink expandable source - source_change_callback(BangOlufsenSource.TIDAL) + source_change_callback(TEST_SOURCE) await hass.services.async_call( - "media_player", - "join", + MEDIA_PLAYER_DOMAIN, + SERVICE_JOIN, { ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, ATTR_GROUP_MEMBERS: group_members, @@ -1340,6 +1419,14 @@ async def test_async_join_players( assert mock_mozart_client.post_beolink_expand.call_count == expand_count assert mock_mozart_client.join_latest_beolink_experience.call_count == join_count + # Main entity + assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) + assert states == snapshot(exclude=props("media_position_updated_at")) + + # Secondary entity + assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID_2)) + assert states == snapshot(exclude=props("media_position_updated_at")) + @pytest.mark.parametrize( ("source", "group_members", "expected_result", "error_type"), @@ -1353,7 +1440,7 @@ async def test_async_join_players( ), # Invalid media_player entity ( - BangOlufsenSource.TIDAL, + TEST_SOURCE, [TEST_MEDIA_PLAYER_ENTITY_ID_3], pytest.raises(ServiceValidationError), "invalid_grouping_entity", @@ -1362,6 +1449,7 @@ async def test_async_join_players( ) async def test_async_join_players_invalid( hass: HomeAssistant, + snapshot: SnapshotAssertion, mock_mozart_client: AsyncMock, mock_config_entry: MockConfigEntry, mock_config_entry_2: MockConfigEntry, @@ -1386,8 +1474,8 @@ async def test_async_join_players_invalid( with expected_result as exc_info: await hass.services.async_call( - "media_player", - "join", + MEDIA_PLAYER_DOMAIN, + SERVICE_JOIN, { ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, ATTR_GROUP_MEMBERS: group_members, @@ -1402,9 +1490,18 @@ async def test_async_join_players_invalid( assert mock_mozart_client.post_beolink_expand.call_count == 0 assert mock_mozart_client.join_latest_beolink_experience.call_count == 0 + # Main entity + assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) + assert states == snapshot(exclude=props("media_position_updated_at")) + + # Secondary entity + assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID_2)) + assert states == snapshot(exclude=props("media_position_updated_at")) + async def test_async_unjoin_player( hass: HomeAssistant, + snapshot: SnapshotAssertion, mock_mozart_client: AsyncMock, mock_config_entry: MockConfigEntry, ) -> None: @@ -1414,10 +1511,270 @@ async def test_async_unjoin_player( await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.services.async_call( - "media_player", - "unjoin", + MEDIA_PLAYER_DOMAIN, + SERVICE_UNJOIN, {ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID}, blocking=True, ) mock_mozart_client.post_beolink_leave.assert_called_once() + + assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) + assert states == snapshot(exclude=props("media_position_updated_at")) + + +async def test_async_beolink_join( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_mozart_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test async_beolink_join with defined JID.""" + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + await hass.services.async_call( + DOMAIN, + "beolink_join", + { + ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, + "beolink_jid": TEST_JID_2, + }, + blocking=True, + ) + + mock_mozart_client.join_beolink_peer.assert_called_once_with(jid=TEST_JID_2) + + assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) + assert states == snapshot(exclude=props("media_position_updated_at")) + + +@pytest.mark.parametrize( + ( + "parameter", + "parameter_value", + "expand_side_effect", + "log_messages", + "peers_call_count", + ), + [ + # All discovered + # Valid peers + ("all_discovered", True, None, [], 2), + # Invalid peers + ( + "all_discovered", + True, + NotFoundException(), + [f"Unable to expand to {TEST_JID_3}", f"Unable to expand to {TEST_JID_4}"], + 2, + ), + # Beolink JIDs + # Valid peer + ("beolink_jids", [TEST_JID_3, TEST_JID_4], None, [], 1), + # Invalid peer + ( + "beolink_jids", + [TEST_JID_3, TEST_JID_4], + NotFoundException(), + [ + f"Unable to expand to {TEST_JID_3}. Is the device available on the network?", + f"Unable to expand to {TEST_JID_4}. Is the device available on the network?", + ], + 1, + ), + ], +) +async def test_async_beolink_expand( + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, + snapshot: SnapshotAssertion, + mock_mozart_client: AsyncMock, + mock_config_entry: MockConfigEntry, + parameter: str, + parameter_value: bool | list[str], + expand_side_effect: NotFoundException | None, + log_messages: list[str], + peers_call_count: int, +) -> None: + """Test async_beolink_expand.""" + mock_mozart_client.post_beolink_expand.side_effect = expand_side_effect + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + source_change_callback = ( + mock_mozart_client.get_source_change_notifications.call_args[0][0] + ) + + # Set the source to a beolink expandable source + source_change_callback(TEST_SOURCE) + + await hass.services.async_call( + DOMAIN, + "beolink_expand", + { + ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, + parameter: parameter_value, + }, + blocking=True, + ) + + # Check log messages + for log_message in log_messages: + assert log_message in caplog.text + + # Called once during _initialize and once during async_beolink_expand for all_discovered + assert mock_mozart_client.get_beolink_peers.call_count == peers_call_count + + assert mock_mozart_client.post_beolink_expand.call_count == len( + await mock_mozart_client.get_beolink_peers() + ) + + assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) + assert states == snapshot(exclude=props("media_position_updated_at")) + + +async def test_async_beolink_unexpand( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_mozart_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test test_async_beolink_unexpand.""" + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + await hass.services.async_call( + DOMAIN, + "beolink_unexpand", + { + ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, + "beolink_jids": [TEST_JID_3, TEST_JID_4], + }, + blocking=True, + ) + + assert mock_mozart_client.post_beolink_unexpand.call_count == 2 + + assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) + assert states == snapshot(exclude=props("media_position_updated_at")) + + +async def test_async_beolink_allstandby( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_mozart_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test async_beolink_allstandby.""" + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + await hass.services.async_call( + DOMAIN, + "beolink_allstandby", + {ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID}, + blocking=True, + ) + + mock_mozart_client.post_beolink_allstandby.assert_called_once() + + assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) + assert states == snapshot(exclude=props("media_position_updated_at")) + + +@pytest.mark.parametrize( + ("repeat"), + [ + # Repeat all + (RepeatMode.ALL), + # Repeat track + (RepeatMode.ONE), + # Repeat none + (RepeatMode.OFF), + ], +) +async def test_async_set_repeat( + hass: HomeAssistant, + mock_mozart_client: AsyncMock, + mock_config_entry: MockConfigEntry, + repeat: RepeatMode, +) -> None: + """Test async_set_repeat.""" + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) + assert ATTR_MEDIA_REPEAT not in states.attributes + + # Set the return value of the repeat endpoint to match service call + mock_mozart_client.get_settings_queue.return_value = PlayQueueSettings( + repeat=BANG_OLUFSEN_REPEAT_FROM_HA[repeat] + ) + + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_REPEAT_SET, + { + ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, + ATTR_MEDIA_REPEAT: repeat, + }, + blocking=True, + ) + mock_mozart_client.set_settings_queue.assert_called_once_with( + play_queue_settings=PlayQueueSettings( + repeat=BANG_OLUFSEN_REPEAT_FROM_HA[repeat] + ) + ) + + # Test the BANG_OLUFSEN_REPEAT_TO_HA dict by checking property value + assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) + assert states.attributes[ATTR_MEDIA_REPEAT] == repeat + + +@pytest.mark.parametrize( + ("shuffle"), + [ + # Shuffle on + (True), + # Shuffle off + (False), + ], +) +async def test_async_set_shuffle( + hass: HomeAssistant, + mock_mozart_client: AsyncMock, + mock_config_entry: MockConfigEntry, + shuffle: bool, +) -> None: + """Test async_set_shuffle.""" + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) + assert ATTR_MEDIA_SHUFFLE not in states.attributes + + # Set the return value of the shuffle endpoint to match service call + mock_mozart_client.get_settings_queue.return_value = PlayQueueSettings( + shuffle=shuffle + ) + + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_SHUFFLE_SET, + { + ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, + ATTR_MEDIA_SHUFFLE: shuffle, + }, + blocking=True, + ) + mock_mozart_client.set_settings_queue.assert_called_once_with( + play_queue_settings=PlayQueueSettings(shuffle=shuffle) + ) + + assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) + assert states.attributes[ATTR_MEDIA_SHUFFLE] == shuffle diff --git a/tests/components/blebox/test_cover.py b/tests/components/blebox/test_cover.py index 1900a6d6834dfa..2d9125b22065e1 100644 --- a/tests/components/blebox/test_cover.py +++ b/tests/components/blebox/test_cover.py @@ -11,12 +11,9 @@ ATTR_CURRENT_TILT_POSITION, ATTR_POSITION, ATTR_TILT_POSITION, - STATE_CLOSED, - STATE_CLOSING, - STATE_OPEN, - STATE_OPENING, CoverDeviceClass, CoverEntityFeature, + CoverState, ) from homeassistant.const import ( ATTR_DEVICE_CLASS, @@ -212,7 +209,7 @@ def open_gate(): feature_mock.async_open = AsyncMock(side_effect=open_gate) await async_setup_entity(hass, entity_id) - assert hass.states.get(entity_id).state == STATE_CLOSED + assert hass.states.get(entity_id).state == CoverState.CLOSED feature_mock.async_update = AsyncMock() await hass.services.async_call( @@ -221,7 +218,7 @@ def open_gate(): {"entity_id": entity_id}, blocking=True, ) - assert hass.states.get(entity_id).state == STATE_OPENING + assert hass.states.get(entity_id).state == CoverState.OPENING @pytest.mark.parametrize("feature", ALL_COVER_FIXTURES, indirect=["feature"]) @@ -240,13 +237,13 @@ def close(): feature_mock.async_close = AsyncMock(side_effect=close) await async_setup_entity(hass, entity_id) - assert hass.states.get(entity_id).state == STATE_OPEN + assert hass.states.get(entity_id).state == CoverState.OPEN feature_mock.async_update = AsyncMock() await hass.services.async_call( "cover", SERVICE_CLOSE_COVER, {"entity_id": entity_id}, blocking=True ) - assert hass.states.get(entity_id).state == STATE_CLOSING + assert hass.states.get(entity_id).state == CoverState.CLOSING def opening_to_stop_feature_mock(feature_mock): @@ -270,13 +267,13 @@ async def test_stop(feature, hass: HomeAssistant) -> None: opening_to_stop_feature_mock(feature_mock) await async_setup_entity(hass, entity_id) - assert hass.states.get(entity_id).state == STATE_OPENING + assert hass.states.get(entity_id).state == CoverState.OPENING feature_mock.async_update = AsyncMock() await hass.services.async_call( "cover", SERVICE_STOP_COVER, {"entity_id": entity_id}, blocking=True ) - assert hass.states.get(entity_id).state == STATE_OPEN + assert hass.states.get(entity_id).state == CoverState.OPEN @pytest.mark.parametrize("feature", ALL_COVER_FIXTURES, indirect=["feature"]) @@ -295,7 +292,7 @@ def initial_update(): state = hass.states.get(entity_id) assert state.attributes[ATTR_CURRENT_POSITION] == 71 # 100 - 29 - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN @pytest.mark.parametrize( @@ -318,7 +315,7 @@ def set_position(position): feature_mock.async_set_position = AsyncMock(side_effect=set_position) await async_setup_entity(hass, entity_id) - assert hass.states.get(entity_id).state == STATE_CLOSED + assert hass.states.get(entity_id).state == CoverState.CLOSED feature_mock.async_update = AsyncMock() await hass.services.async_call( @@ -327,7 +324,7 @@ def set_position(position): {"entity_id": entity_id, ATTR_POSITION: 1}, blocking=True, ) # almost closed - assert hass.states.get(entity_id).state == STATE_OPENING + assert hass.states.get(entity_id).state == CoverState.OPENING async def test_unknown_position(shutterbox, hass: HomeAssistant) -> None: @@ -344,7 +341,7 @@ def initial_update(): await async_setup_entity(hass, entity_id) state = hass.states.get(entity_id) - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN assert ATTR_CURRENT_POSITION not in state.attributes @@ -402,7 +399,7 @@ def initial_update(): feature_mock.async_update = AsyncMock(side_effect=initial_update) await async_setup_entity(hass, entity_id) - assert hass.states.get(entity_id).state == STATE_OPENING + assert hass.states.get(entity_id).state == CoverState.OPENING @pytest.mark.parametrize("feature", ALL_COVER_FIXTURES, indirect=["feature"]) @@ -416,7 +413,7 @@ def initial_update(): feature_mock.async_update = AsyncMock(side_effect=initial_update) await async_setup_entity(hass, entity_id) - assert hass.states.get(entity_id).state == STATE_CLOSING + assert hass.states.get(entity_id).state == CoverState.CLOSING @pytest.mark.parametrize("feature", ALL_COVER_FIXTURES, indirect=["feature"]) @@ -430,7 +427,7 @@ def initial_update(): feature_mock.async_update = AsyncMock(side_effect=initial_update) await async_setup_entity(hass, entity_id) - assert hass.states.get(entity_id).state == STATE_CLOSED + assert hass.states.get(entity_id).state == CoverState.CLOSED async def test_tilt_position(shutterbox, hass: HomeAssistant) -> None: @@ -465,7 +462,7 @@ def set_tilt(tilt_position): feature_mock.async_set_tilt_position = AsyncMock(side_effect=set_tilt) await async_setup_entity(hass, entity_id) - assert hass.states.get(entity_id).state == STATE_CLOSED + assert hass.states.get(entity_id).state == CoverState.CLOSED feature_mock.async_update = AsyncMock() await hass.services.async_call( @@ -474,7 +471,7 @@ def set_tilt(tilt_position): {"entity_id": entity_id, ATTR_TILT_POSITION: 80}, blocking=True, ) - assert hass.states.get(entity_id).state == STATE_OPENING + assert hass.states.get(entity_id).state == CoverState.OPENING async def test_open_tilt(shutterbox, hass: HomeAssistant) -> None: diff --git a/tests/components/blebox/test_init.py b/tests/components/blebox/test_init.py index f406df51bd4e3b..0cb5139336c523 100644 --- a/tests/components/blebox/test_init.py +++ b/tests/components/blebox/test_init.py @@ -5,7 +5,6 @@ import blebox_uniapi import pytest -from homeassistant.components.blebox.const import DOMAIN from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant @@ -57,10 +56,10 @@ async def test_unload_config_entry(hass: HomeAssistant) -> None: await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - assert hass.data[DOMAIN] + assert hasattr(entry, "runtime_data") await hass.config_entries.async_unload(entry.entry_id) await hass.async_block_till_done() - assert not hass.data.get(DOMAIN) + assert not hasattr(entry, "runtime_data") assert entry.state is ConfigEntryState.NOT_LOADED diff --git a/tests/components/blink/test_init.py b/tests/components/blink/test_init.py index 3cd2cd51ebdb83..6d4a93e58ab887 100644 --- a/tests/components/blink/test_init.py +++ b/tests/components/blink/test_init.py @@ -66,18 +66,17 @@ async def test_setup_not_ready_authkey_required( assert mock_config_entry.state is ConfigEntryState.SETUP_ERROR -async def test_unload_entry_multiple( +async def test_unload_entry( hass: HomeAssistant, mock_blink_api: MagicMock, mock_blink_auth_api: MagicMock, mock_config_entry: MockConfigEntry, ) -> None: - """Test being able to unload one of 2 entries.""" + """Test unload doesn't un-register services.""" mock_config_entry.add_to_hass(hass) assert await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() - hass.data[DOMAIN]["dummy"] = {1: 2} assert mock_config_entry.state is ConfigEntryState.LOADED assert await hass.config_entries.async_unload(mock_config_entry.entry_id) assert mock_config_entry.state is ConfigEntryState.NOT_LOADED diff --git a/tests/components/bluesound/conftest.py b/tests/components/bluesound/conftest.py index 155d6b66e4ef6d..b4ee61dee5793f 100644 --- a/tests/components/bluesound/conftest.py +++ b/tests/components/bluesound/conftest.py @@ -1,71 +1,124 @@ """Common fixtures for the Bluesound tests.""" -from collections.abc import Generator +from collections.abc import AsyncGenerator, Generator +from dataclasses import dataclass +import ipaddress +from typing import Any from unittest.mock import AsyncMock, patch -from pyblu import Status, SyncStatus +from pyblu import Input, Player, Preset, Status, SyncStatus import pytest from homeassistant.components.bluesound.const import DOMAIN from homeassistant.const import CONF_HOST, CONF_PORT from homeassistant.core import HomeAssistant +from .utils import LongPollingMock + from tests.common import MockConfigEntry -@pytest.fixture -def sync_status() -> SyncStatus: - """Return a sync status object.""" - return SyncStatus( - etag="etag", - id="1.1.1.1:11000", - mac="00:11:22:33:44:55", - name="player-name", - image="invalid_url", - initialized=True, - brand="brand", - model="model", - model_name="model-name", - volume_db=0.5, - volume=50, - group=None, - master=None, - slaves=None, - zone=None, - zone_master=None, - zone_slave=None, - mute_volume_db=None, - mute_volume=None, - ) +@dataclass +class PlayerMockData: + """Container for player mock data.""" + host: str + player: AsyncMock + status_long_polling_mock: LongPollingMock[Status] + sync_status_long_polling_mock: LongPollingMock[SyncStatus] -@pytest.fixture -def status() -> Status: - """Return a status object.""" - return Status( - etag="etag", - input_id=None, - service=None, - state="playing", - shuffle=False, - album=None, - artist=None, - name=None, - image=None, - volume=10, - volume_db=22.3, - mute=False, - mute_volume=None, - mute_volume_db=None, - seconds=2, - total_seconds=123.1, - can_seek=False, - sleep=0, - group_name=None, - group_volume=None, - indexing=False, - stream_url=None, - ) + @staticmethod + async def generate(host: str) -> "PlayerMockData": + """Generate player mock data.""" + host_ip = ipaddress.ip_address(host) + assert host_ip.version == 4 + mac_parts = [0xFF, 0xFF, *host_ip.packed] + mac = ":".join(f"{x:02X}" for x in mac_parts) + + player_name = f"player-name{host.replace('.', '')}" + + player = await AsyncMock(spec=Player)() + player.__aenter__.return_value = player + + status_long_polling_mock = LongPollingMock( + Status( + etag="etag", + input_id=None, + service=None, + state="play", + shuffle=False, + album="album", + artist="artist", + name="song", + image=None, + volume=10, + volume_db=22.3, + mute=False, + mute_volume=None, + mute_volume_db=None, + seconds=2, + total_seconds=123.1, + can_seek=False, + sleep=0, + group_name=None, + group_volume=None, + indexing=False, + stream_url=None, + ) + ) + + sync_status_long_polling_mock = LongPollingMock( + SyncStatus( + etag="etag", + id=f"{host}:11000", + mac=mac, + name=player_name, + image="invalid_url", + initialized=True, + brand="brand", + model="model", + model_name="model-name", + volume_db=0.5, + volume=50, + group=None, + master=None, + slaves=None, + zone=None, + zone_master=None, + zone_slave=None, + mute_volume_db=None, + mute_volume=None, + ) + ) + + player.status.side_effect = status_long_polling_mock.side_effect() + player.sync_status.side_effect = sync_status_long_polling_mock.side_effect() + + player.inputs = AsyncMock( + return_value=[ + Input("1", "input1", "image1", "url1"), + Input("2", "input2", "image2", "url2"), + ] + ) + player.presets = AsyncMock( + return_value=[ + Preset("preset1", "1", "url1", "image1", None), + Preset("preset2", "2", "url2", "image2", None), + ] + ) + + return PlayerMockData( + host, player, status_long_polling_mock, sync_status_long_polling_mock + ) + + +@dataclass +class PlayerMocks: + """Container for mocks.""" + + player_data: PlayerMockData + player_data_secondary: PlayerMockData + player_data_for_already_configured: PlayerMockData @pytest.fixture @@ -78,24 +131,76 @@ def mock_setup_entry() -> Generator[AsyncMock]: @pytest.fixture -def mock_config_entry(hass: HomeAssistant) -> MockConfigEntry: +def config_entry() -> MockConfigEntry: """Return a mocked config entry.""" - mock_entry = MockConfigEntry( + return MockConfigEntry( domain=DOMAIN, data={ - CONF_HOST: "1.1.1.2", + CONF_HOST: "1.1.1.1", CONF_PORT: 11000, }, - unique_id="00:11:22:33:44:55-11000", + unique_id="ff:ff:01:01:01:01-11000", ) - mock_entry.add_to_hass(hass) - return mock_entry + +@pytest.fixture +def config_entry_secondary() -> MockConfigEntry: + """Return a mocked config entry.""" + return MockConfigEntry( + domain=DOMAIN, + data={ + CONF_HOST: "2.2.2.2", + CONF_PORT: 11000, + }, + unique_id="ff:ff:02:02:02:02-11000", + ) + + +@pytest.fixture +async def setup_config_entry( + hass: HomeAssistant, config_entry: MockConfigEntry, player_mocks: PlayerMocks +) -> None: + """Set up the platform.""" + config_entry.add_to_hass(hass) + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() @pytest.fixture -def mock_player(status: Status) -> Generator[AsyncMock]: +async def setup_config_entry_secondary( + hass: HomeAssistant, + config_entry_secondary: MockConfigEntry, + player_mocks: PlayerMocks, +) -> None: + """Set up the platform.""" + config_entry_secondary.add_to_hass(hass) + assert await hass.config_entries.async_setup(config_entry_secondary.entry_id) + await hass.async_block_till_done() + + +@pytest.fixture +async def player_mocks() -> AsyncGenerator[PlayerMocks]: """Mock the player.""" + player_mocks = PlayerMocks( + player_data=await PlayerMockData.generate("1.1.1.1"), + player_data_secondary=await PlayerMockData.generate("2.2.2.2"), + player_data_for_already_configured=await PlayerMockData.generate("1.1.1.2"), + ) + + # to simulate a player that is already configured + player_mocks.player_data_for_already_configured.sync_status_long_polling_mock.get().mac = player_mocks.player_data.sync_status_long_polling_mock.get().mac + + def select_player(*args: Any, **kwargs: Any) -> AsyncMock: + match args[0]: + case "1.1.1.1": + return player_mocks.player_data.player + case "2.2.2.2": + return player_mocks.player_data_secondary.player + case "1.1.1.2": + return player_mocks.player_data_for_already_configured.player + case _: + raise ValueError("Invalid player") + with ( patch( "homeassistant.components.bluesound.Player", autospec=True @@ -105,28 +210,6 @@ def mock_player(status: Status) -> Generator[AsyncMock]: new=mock_player, ), ): - player = mock_player.return_value - player.__aenter__.return_value = player - player.status.return_value = status - player.sync_status.return_value = SyncStatus( - etag="etag", - id="1.1.1.1:11000", - mac="00:11:22:33:44:55", - name="player-name", - image="invalid_url", - initialized=True, - brand="brand", - model="model", - model_name="model-name", - volume_db=0.5, - volume=50, - group=None, - master=None, - slaves=None, - zone=None, - zone_master=None, - zone_slave=None, - mute_volume_db=None, - mute_volume=None, - ) - yield player + mock_player.side_effect = select_player + + yield player_mocks diff --git a/tests/components/bluesound/snapshots/test_media_player.ambr b/tests/components/bluesound/snapshots/test_media_player.ambr new file mode 100644 index 00000000000000..3e644d3038a9d4 --- /dev/null +++ b/tests/components/bluesound/snapshots/test_media_player.ambr @@ -0,0 +1,31 @@ +# serializer version: 1 +# name: test_attributes_set + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'player-name1111', + 'is_volume_muted': False, + 'master': False, + 'media_album_name': 'album', + 'media_artist': 'artist', + 'media_content_type': , + 'media_duration': 123, + 'media_position': 2, + 'media_title': 'song', + 'shuffle': False, + 'source_list': list([ + 'input1', + 'input2', + 'preset1', + 'preset2', + ]), + 'supported_features': , + 'volume_level': 0.1, + }), + 'context': , + 'entity_id': 'media_player.player_name1111', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'playing', + }) +# --- diff --git a/tests/components/bluesound/test_config_flow.py b/tests/components/bluesound/test_config_flow.py index 53cf40a8d465d5..63744cdf0ffc03 100644 --- a/tests/components/bluesound/test_config_flow.py +++ b/tests/components/bluesound/test_config_flow.py @@ -11,11 +11,13 @@ from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType +from .conftest import PlayerMocks + from tests.common import MockConfigEntry async def test_user_flow_success( - hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_player: AsyncMock + hass: HomeAssistant, mock_setup_entry: AsyncMock, player_mocks: PlayerMocks ) -> None: """Test we get the form.""" result = await hass.config_entries.flow.async_init( @@ -33,15 +35,17 @@ async def test_user_flow_success( ) assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "player-name" + assert result["title"] == "player-name1111" assert result["data"] == {CONF_HOST: "1.1.1.1", CONF_PORT: 11000} - assert result["result"].unique_id == "00:11:22:33:44:55-11000" + assert result["result"].unique_id == "ff:ff:01:01:01:01-11000" mock_setup_entry.assert_called_once() async def test_user_flow_cannot_connect( - hass: HomeAssistant, mock_player: AsyncMock, mock_setup_entry: AsyncMock + hass: HomeAssistant, + player_mocks: PlayerMocks, + mock_setup_entry: AsyncMock, ) -> None: """Test we handle cannot connect error.""" result = await hass.config_entries.flow.async_init( @@ -49,7 +53,9 @@ async def test_user_flow_cannot_connect( context={"source": SOURCE_USER}, ) - mock_player.sync_status.side_effect = PlayerUnreachableError("Player not reachable") + player_mocks.player_data.sync_status_long_polling_mock.set_error( + PlayerUnreachableError("Player not reachable") + ) result = await hass.config_entries.flow.async_configure( result["flow_id"], { @@ -61,7 +67,7 @@ async def test_user_flow_cannot_connect( assert result["errors"] == {"base": "cannot_connect"} assert result["step_id"] == "user" - mock_player.sync_status.side_effect = None + player_mocks.player_data.sync_status_long_polling_mock.set_error(None) result = await hass.config_entries.flow.async_configure( result["flow_id"], { @@ -70,7 +76,7 @@ async def test_user_flow_cannot_connect( ) assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "player-name" + assert result["title"] == "player-name1111" assert result["data"] == { CONF_HOST: "1.1.1.1", CONF_PORT: 11000, @@ -81,10 +87,11 @@ async def test_user_flow_cannot_connect( async def test_user_flow_aleady_configured( hass: HomeAssistant, - mock_player: AsyncMock, - mock_config_entry: MockConfigEntry, + player_mocks: PlayerMocks, + config_entry: MockConfigEntry, ) -> None: """Test we handle already configured.""" + config_entry.add_to_hass(hass) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER}, @@ -93,7 +100,7 @@ async def test_user_flow_aleady_configured( result = await hass.config_entries.flow.async_configure( result["flow_id"], { - CONF_HOST: "1.1.1.1", + CONF_HOST: "1.1.1.2", CONF_PORT: 11000, }, ) @@ -101,13 +108,13 @@ async def test_user_flow_aleady_configured( assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" - assert mock_config_entry.data[CONF_HOST] == "1.1.1.1" + assert config_entry.data[CONF_HOST] == "1.1.1.2" - mock_player.sync_status.assert_called_once() + player_mocks.player_data_for_already_configured.player.sync_status.assert_called_once() async def test_import_flow_success( - hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_player: AsyncMock + hass: HomeAssistant, mock_setup_entry: AsyncMock, player_mocks: PlayerMocks ) -> None: """Test we get the form.""" result = await hass.config_entries.flow.async_init( @@ -117,19 +124,21 @@ async def test_import_flow_success( ) assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "player-name" + assert result["title"] == "player-name1111" assert result["data"] == {CONF_HOST: "1.1.1.1", CONF_PORT: 11000} - assert result["result"].unique_id == "00:11:22:33:44:55-11000" + assert result["result"].unique_id == "ff:ff:01:01:01:01-11000" mock_setup_entry.assert_called_once() - mock_player.sync_status.assert_called_once() + player_mocks.player_data.player.sync_status.assert_called_once() async def test_import_flow_cannot_connect( - hass: HomeAssistant, mock_player: AsyncMock + hass: HomeAssistant, player_mocks: PlayerMocks ) -> None: """Test we handle cannot connect error.""" - mock_player.sync_status.side_effect = PlayerUnreachableError("Player not reachable") + player_mocks.player_data.player.sync_status.side_effect = PlayerUnreachableError( + "Player not reachable" + ) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_IMPORT}, @@ -139,29 +148,30 @@ async def test_import_flow_cannot_connect( assert result["type"] is FlowResultType.ABORT assert result["reason"] == "cannot_connect" - mock_player.sync_status.assert_called_once() + player_mocks.player_data.player.sync_status.assert_called_once() async def test_import_flow_already_configured( hass: HomeAssistant, - mock_player: AsyncMock, - mock_config_entry: MockConfigEntry, + player_mocks: PlayerMocks, + config_entry: MockConfigEntry, ) -> None: """Test we handle already configured.""" + config_entry.add_to_hass(hass) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_IMPORT}, - data={CONF_HOST: "1.1.1.1", CONF_PORT: 11000}, + data={CONF_HOST: "1.1.1.2", CONF_PORT: 11000}, ) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" - mock_player.sync_status.assert_called_once() + player_mocks.player_data_for_already_configured.player.sync_status.assert_called_once() async def test_zeroconf_flow_success( - hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_player: AsyncMock + hass: HomeAssistant, mock_setup_entry: AsyncMock, player_mocks: PlayerMocks ) -> None: """Test we get the form.""" result = await hass.config_entries.flow.async_init( @@ -171,7 +181,7 @@ async def test_zeroconf_flow_success( ip_address="1.1.1.1", ip_addresses=["1.1.1.1"], port=11000, - hostname="player-name", + hostname="player-name1111", type="_musc._tcp.local.", name="player-name._musc._tcp.local.", properties={}, @@ -182,25 +192,27 @@ async def test_zeroconf_flow_success( assert result["step_id"] == "confirm" mock_setup_entry.assert_not_called() - mock_player.sync_status.assert_called_once() + player_mocks.player_data.player.sync_status.assert_called_once() result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "player-name" + assert result["title"] == "player-name1111" assert result["data"] == {CONF_HOST: "1.1.1.1", CONF_PORT: 11000} - assert result["result"].unique_id == "00:11:22:33:44:55-11000" + assert result["result"].unique_id == "ff:ff:01:01:01:01-11000" mock_setup_entry.assert_called_once() async def test_zeroconf_flow_cannot_connect( - hass: HomeAssistant, mock_player: AsyncMock + hass: HomeAssistant, player_mocks: PlayerMocks ) -> None: """Test we handle cannot connect error.""" - mock_player.sync_status.side_effect = PlayerUnreachableError("Player not reachable") + player_mocks.player_data.player.sync_status.side_effect = PlayerUnreachableError( + "Player not reachable" + ) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_ZEROCONF}, @@ -208,7 +220,7 @@ async def test_zeroconf_flow_cannot_connect( ip_address="1.1.1.1", ip_addresses=["1.1.1.1"], port=11000, - hostname="player-name", + hostname="player-name1111", type="_musc._tcp.local.", name="player-name._musc._tcp.local.", properties={}, @@ -218,23 +230,24 @@ async def test_zeroconf_flow_cannot_connect( assert result["type"] is FlowResultType.ABORT assert result["reason"] == "cannot_connect" - mock_player.sync_status.assert_called_once() + player_mocks.player_data.player.sync_status.assert_called_once() async def test_zeroconf_flow_already_configured( hass: HomeAssistant, - mock_player: AsyncMock, - mock_config_entry: MockConfigEntry, + player_mocks: PlayerMocks, + config_entry: MockConfigEntry, ) -> None: """Test we handle already configured and update the host.""" + config_entry.add_to_hass(hass) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_ZEROCONF}, data=ZeroconfServiceInfo( - ip_address="1.1.1.1", - ip_addresses=["1.1.1.1"], + ip_address="1.1.1.2", + ip_addresses=["1.1.1.2"], port=11000, - hostname="player-name", + hostname="player-name1112", type="_musc._tcp.local.", name="player-name._musc._tcp.local.", properties={}, @@ -244,6 +257,6 @@ async def test_zeroconf_flow_already_configured( assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" - assert mock_config_entry.data[CONF_HOST] == "1.1.1.1" + assert config_entry.data[CONF_HOST] == "1.1.1.2" - mock_player.sync_status.assert_called_once() + player_mocks.player_data_for_already_configured.player.sync_status.assert_called_once() diff --git a/tests/components/bluesound/test_init.py b/tests/components/bluesound/test_init.py new file mode 100644 index 00000000000000..4178c27acadd57 --- /dev/null +++ b/tests/components/bluesound/test_init.py @@ -0,0 +1,46 @@ +"""Test bluesound integration.""" + +from pyblu.errors import PlayerUnreachableError + +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant + +from .conftest import PlayerMocks + +from tests.common import MockConfigEntry + + +async def test_setup_entry( + hass: HomeAssistant, setup_config_entry: None, config_entry: MockConfigEntry +) -> None: + """Test a successful setup entry.""" + assert hass.states.get("media_player.player_name1111").state == "playing" + assert config_entry.state is ConfigEntryState.LOADED + + assert await hass.config_entries.async_unload(config_entry.entry_id) + await hass.async_block_till_done() + + assert hass.states.get("media_player.player_name1111").state == "unavailable" + assert config_entry.state is ConfigEntryState.NOT_LOADED + + +async def test_unload_entry_while_player_is_offline( + hass: HomeAssistant, + setup_config_entry: None, + config_entry: MockConfigEntry, + player_mocks: PlayerMocks, +) -> None: + """Test entries can be unloaded correctly while the player is offline.""" + player_mocks.player_data.player.status.side_effect = PlayerUnreachableError( + "Player not reachable" + ) + player_mocks.player_data.status_long_polling_mock.trigger() + + # give the long polling loop a chance to update the state; this could be any async call + await hass.async_block_till_done() + + assert await hass.config_entries.async_unload(config_entry.entry_id) + await hass.async_block_till_done() + + assert hass.states.get("media_player.player_name1111").state == "unavailable" + assert config_entry.state is ConfigEntryState.NOT_LOADED diff --git a/tests/components/bluesound/test_media_player.py b/tests/components/bluesound/test_media_player.py new file mode 100644 index 00000000000000..0bf615de3da879 --- /dev/null +++ b/tests/components/bluesound/test_media_player.py @@ -0,0 +1,375 @@ +"""Tests for the Bluesound Media Player platform.""" + +import dataclasses +from unittest.mock import call + +from pyblu import PairedPlayer +from pyblu.errors import PlayerUnreachableError +import pytest +from syrupy.assertion import SnapshotAssertion +from syrupy.filters import props + +from homeassistant.components.bluesound import DOMAIN as BLUESOUND_DOMAIN +from homeassistant.components.bluesound.const import ATTR_MASTER +from homeassistant.components.bluesound.services import ( + SERVICE_CLEAR_TIMER, + SERVICE_JOIN, + SERVICE_SET_TIMER, +) +from homeassistant.components.media_player import ( + ATTR_MEDIA_VOLUME_LEVEL, + DOMAIN as MEDIA_PLAYER_DOMAIN, + SERVICE_MEDIA_NEXT_TRACK, + SERVICE_MEDIA_PAUSE, + SERVICE_MEDIA_PLAY, + SERVICE_MEDIA_PREVIOUS_TRACK, + SERVICE_VOLUME_DOWN, + SERVICE_VOLUME_MUTE, + SERVICE_VOLUME_SET, + SERVICE_VOLUME_UP, + MediaPlayerState, +) +from homeassistant.const import ATTR_ENTITY_ID, STATE_UNAVAILABLE +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ServiceValidationError + +from .conftest import PlayerMocks + + +@pytest.mark.parametrize( + ("service", "method"), + [ + (SERVICE_MEDIA_PAUSE, "pause"), + (SERVICE_MEDIA_PLAY, "play"), + (SERVICE_MEDIA_NEXT_TRACK, "skip"), + (SERVICE_MEDIA_PREVIOUS_TRACK, "back"), + ], +) +async def test_simple_actions( + hass: HomeAssistant, + setup_config_entry: None, + player_mocks: PlayerMocks, + service: str, + method: str, +) -> None: + """Test the media player simple actions.""" + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + service, + {ATTR_ENTITY_ID: "media_player.player_name1111"}, + blocking=True, + ) + + getattr(player_mocks.player_data.player, method).assert_called_once_with() + + +async def test_volume_set( + hass: HomeAssistant, setup_config_entry: None, player_mocks: PlayerMocks +) -> None: + """Test the media player volume set.""" + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_VOLUME_SET, + {ATTR_ENTITY_ID: "media_player.player_name1111", ATTR_MEDIA_VOLUME_LEVEL: 0.5}, + blocking=True, + ) + + player_mocks.player_data.player.volume.assert_called_once_with(level=50) + + +async def test_volume_mute( + hass: HomeAssistant, setup_config_entry: None, player_mocks: PlayerMocks +) -> None: + """Test the media player volume mute.""" + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_VOLUME_MUTE, + {ATTR_ENTITY_ID: "media_player.player_name1111", "is_volume_muted": True}, + blocking=True, + ) + + player_mocks.player_data.player.volume.assert_called_once_with(mute=True) + + +async def test_volume_up( + hass: HomeAssistant, setup_config_entry: None, player_mocks: PlayerMocks +) -> None: + """Test the media player volume up.""" + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_VOLUME_UP, + {ATTR_ENTITY_ID: "media_player.player_name1111"}, + blocking=True, + ) + + player_mocks.player_data.player.volume.assert_called_once_with(level=11) + + +async def test_volume_down( + hass: HomeAssistant, setup_config_entry: None, player_mocks: PlayerMocks +) -> None: + """Test the media player volume down.""" + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_VOLUME_DOWN, + {ATTR_ENTITY_ID: "media_player.player_name1111"}, + blocking=True, + ) + + player_mocks.player_data.player.volume.assert_called_once_with(level=9) + + +async def test_attributes_set( + hass: HomeAssistant, + setup_config_entry: None, + player_mocks: PlayerMocks, + snapshot: SnapshotAssertion, +) -> None: + """Test the media player attributes set.""" + state = hass.states.get("media_player.player_name1111") + assert state == snapshot(exclude=props("media_position_updated_at")) + + +async def test_stop_maps_to_idle( + hass: HomeAssistant, + setup_config_entry: None, + player_mocks: PlayerMocks, +) -> None: + """Test the media player stop maps to idle.""" + player_mocks.player_data.status_long_polling_mock.set( + dataclasses.replace( + player_mocks.player_data.status_long_polling_mock.get(), state="stop" + ) + ) + + # give the long polling loop a chance to update the state; this could be any async call + await hass.async_block_till_done() + + assert ( + hass.states.get("media_player.player_name1111").state == MediaPlayerState.IDLE + ) + + +async def test_status_updated( + hass: HomeAssistant, + setup_config_entry: None, + player_mocks: PlayerMocks, +) -> None: + """Test the media player status updated.""" + pre_state = hass.states.get("media_player.player_name1111") + assert pre_state.state == "playing" + assert pre_state.attributes[ATTR_MEDIA_VOLUME_LEVEL] == 0.1 + + status = player_mocks.player_data.status_long_polling_mock.get() + status = dataclasses.replace(status, state="pause", volume=50, etag="changed") + player_mocks.player_data.status_long_polling_mock.set(status) + + # give the long polling loop a chance to update the state; this could be any async call + await hass.async_block_till_done() + + post_state = hass.states.get("media_player.player_name1111") + + assert post_state.state == MediaPlayerState.PAUSED + assert post_state.attributes[ATTR_MEDIA_VOLUME_LEVEL] == 0.5 + + +async def test_unavailable_when_offline( + hass: HomeAssistant, + setup_config_entry: None, + player_mocks: PlayerMocks, +) -> None: + """Test that the media player goes unavailable when the player is unreachable.""" + pre_state = hass.states.get("media_player.player_name1111") + assert pre_state.state == "playing" + + player_mocks.player_data.status_long_polling_mock.set_error( + PlayerUnreachableError("Player not reachable") + ) + player_mocks.player_data.status_long_polling_mock.trigger() + + # give the long polling loop a chance to update the state; this could be any async call + await hass.async_block_till_done() + + post_state = hass.states.get("media_player.player_name1111") + + assert post_state.state == STATE_UNAVAILABLE + + +async def test_set_sleep_timer( + hass: HomeAssistant, setup_config_entry: None, player_mocks: PlayerMocks +) -> None: + """Test the set sleep timer action.""" + await hass.services.async_call( + BLUESOUND_DOMAIN, + SERVICE_SET_TIMER, + {ATTR_ENTITY_ID: "media_player.player_name1111"}, + blocking=True, + ) + + player_mocks.player_data.player.sleep_timer.assert_called_once() + + +async def test_clear_sleep_timer( + hass: HomeAssistant, setup_config_entry: None, player_mocks: PlayerMocks +) -> None: + """Test the clear sleep timer action.""" + + player_mocks.player_data.player.sleep_timer.side_effect = [15, 30, 45, 60, 90, 0] + + await hass.services.async_call( + BLUESOUND_DOMAIN, + SERVICE_CLEAR_TIMER, + {ATTR_ENTITY_ID: "media_player.player_name1111"}, + blocking=True, + ) + + player_mocks.player_data.player.sleep_timer.assert_has_calls([call()] * 6) + + +async def test_join_cannot_join_to_self( + hass: HomeAssistant, setup_config_entry: None, player_mocks: PlayerMocks +) -> None: + """Test that joining to self is not allowed.""" + with pytest.raises(ServiceValidationError, match="Cannot join player to itself"): + await hass.services.async_call( + BLUESOUND_DOMAIN, + SERVICE_JOIN, + { + ATTR_ENTITY_ID: "media_player.player_name1111", + ATTR_MASTER: "media_player.player_name1111", + }, + blocking=True, + ) + + +async def test_join( + hass: HomeAssistant, + setup_config_entry: None, + setup_config_entry_secondary: None, + player_mocks: PlayerMocks, +) -> None: + """Test the join action.""" + await hass.services.async_call( + BLUESOUND_DOMAIN, + SERVICE_JOIN, + { + ATTR_ENTITY_ID: "media_player.player_name1111", + ATTR_MASTER: "media_player.player_name2222", + }, + blocking=True, + ) + + player_mocks.player_data_secondary.player.add_slave.assert_called_once_with( + "1.1.1.1", 11000 + ) + + +async def test_unjoin( + hass: HomeAssistant, + setup_config_entry: None, + setup_config_entry_secondary: None, + player_mocks: PlayerMocks, +) -> None: + """Test the unjoin action.""" + updated_sync_status = dataclasses.replace( + player_mocks.player_data.sync_status_long_polling_mock.get(), + master=PairedPlayer("2.2.2.2", 11000), + ) + player_mocks.player_data.sync_status_long_polling_mock.set(updated_sync_status) + + # give the long polling loop a chance to update the state; this could be any async call + await hass.async_block_till_done() + + await hass.services.async_call( + BLUESOUND_DOMAIN, + "unjoin", + {ATTR_ENTITY_ID: "media_player.player_name1111"}, + blocking=True, + ) + + player_mocks.player_data_secondary.player.remove_slave.assert_called_once_with( + "1.1.1.1", 11000 + ) + + +async def test_attr_master( + hass: HomeAssistant, + setup_config_entry: None, + player_mocks: PlayerMocks, +) -> None: + """Test the media player master.""" + attr_master = hass.states.get("media_player.player_name1111").attributes[ + ATTR_MASTER + ] + assert attr_master is False + + updated_sync_status = dataclasses.replace( + player_mocks.player_data.sync_status_long_polling_mock.get(), + slaves=[PairedPlayer("2.2.2.2", 11000)], + ) + player_mocks.player_data.sync_status_long_polling_mock.set(updated_sync_status) + + # give the long polling loop a chance to update the state; this could be any async call + await hass.async_block_till_done() + + attr_master = hass.states.get("media_player.player_name1111").attributes[ + ATTR_MASTER + ] + + assert attr_master is True + + +async def test_attr_bluesound_group( + hass: HomeAssistant, + setup_config_entry: None, + setup_config_entry_secondary: None, + player_mocks: PlayerMocks, +) -> None: + """Test the media player grouping.""" + attr_bluesound_group = hass.states.get( + "media_player.player_name1111" + ).attributes.get("bluesound_group") + assert attr_bluesound_group is None + + updated_status = dataclasses.replace( + player_mocks.player_data.status_long_polling_mock.get(), + group_name="player-name1111+player-name2222", + ) + player_mocks.player_data.status_long_polling_mock.set(updated_status) + + # give the long polling loop a chance to update the state; this could be any async call + await hass.async_block_till_done() + + attr_bluesound_group = hass.states.get( + "media_player.player_name1111" + ).attributes.get("bluesound_group") + + assert attr_bluesound_group == ["player-name1111", "player-name2222"] + + +async def test_volume_up_from_6_to_7( + hass: HomeAssistant, + setup_config_entry: None, + player_mocks: PlayerMocks, +) -> None: + """Test the media player volume up from 6 to 7. + + This fails if if rounding is not done correctly. See https://github.com/home-assistant/core/issues/129956 for more details. + """ + player_mocks.player_data.status_long_polling_mock.set( + dataclasses.replace( + player_mocks.player_data.status_long_polling_mock.get(), volume=6 + ) + ) + + # give the long polling loop a chance to update the state; this could be any async call + await hass.async_block_till_done() + + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_VOLUME_UP, + {ATTR_ENTITY_ID: "media_player.player_name1111"}, + blocking=True, + ) + + player_mocks.player_data.player.volume.assert_called_once_with(level=7) diff --git a/tests/components/bluesound/utils.py b/tests/components/bluesound/utils.py new file mode 100644 index 00000000000000..112d077d7f5efa --- /dev/null +++ b/tests/components/bluesound/utils.py @@ -0,0 +1,70 @@ +"""Utils for bluesound tests.""" + +import asyncio +from typing import Protocol + + +class Etag(Protocol): + """Etag protocol.""" + + etag: str + + +class LongPollingMock[T: Etag]: + """Mock long polling methods(status, sync_status).""" + + def __init__(self, value: T) -> None: + """Store value and allows to wait for changes.""" + self._value = value + self._error: Exception | None = None + self._event = asyncio.Event() + self._event.set() + + def trigger(self): + """Trigger the event without changing the value.""" + self._event.set() + + def set(self, value: T): + """Set the value and notify all waiting.""" + self._value = value + self._event.set() + + def set_error(self, error: Exception | None): + """Set the error and notify all waiting.""" + self._error = error + self._event.set() + + def get(self) -> T: + """Get the value without waiting.""" + return self._value + + async def wait(self) -> T: + """Wait for the value or error to change.""" + await self._event.wait() + self._event.clear() + + return self._value + + def side_effect(self): + """Return the side_effect for mocking.""" + last_etag = None + + async def mock(*args, **kwargs) -> T: + nonlocal last_etag + if self._error is not None: + raise self._error + + etag = kwargs.get("etag") + if etag is None or etag != last_etag: + last_etag = self.get().etag + return self.get() + + value = await self.wait() + last_etag = value.etag + + if self._error is not None: + raise self._error + + return value + + return mock diff --git a/tests/components/bmw_connected_drive/__init__.py b/tests/components/bmw_connected_drive/__init__.py index 655955ff9aade3..4d280a1d0e5c8d 100644 --- a/tests/components/bmw_connected_drive/__init__.py +++ b/tests/components/bmw_connected_drive/__init__.py @@ -40,7 +40,7 @@ }, "options": {CONF_READ_ONLY: False}, "source": config_entries.SOURCE_USER, - "unique_id": f"{FIXTURE_USER_INPUT[CONF_REGION]}-{FIXTURE_USER_INPUT[CONF_REGION]}", + "unique_id": f"{FIXTURE_USER_INPUT[CONF_REGION]}-{FIXTURE_USER_INPUT[CONF_USERNAME]}", } diff --git a/tests/components/bmw_connected_drive/snapshots/test_sensor.ambr b/tests/components/bmw_connected_drive/snapshots/test_sensor.ambr index 2182ff2bb4894a..624b2c6007f88e 100644 --- a/tests/components/bmw_connected_drive/snapshots/test_sensor.ambr +++ b/tests/components/bmw_connected_drive/snapshots/test_sensor.ambr @@ -245,7 +245,7 @@ 'suggested_display_precision': 0, }), }), - 'original_device_class': , + 'original_device_class': None, 'original_icon': None, 'original_name': 'Charging target', 'platform': 'bmw_connected_drive', @@ -259,7 +259,6 @@ # name: test_entity_state_attrs[sensor.i3_rex_charging_target-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'device_class': 'battery', 'friendly_name': 'i3 (+ REX) Charging target', 'unit_of_measurement': '%', }), @@ -894,7 +893,7 @@ 'suggested_display_precision': 0, }), }), - 'original_device_class': , + 'original_device_class': None, 'original_icon': None, 'original_name': 'Charging target', 'platform': 'bmw_connected_drive', @@ -908,7 +907,6 @@ # name: test_entity_state_attrs[sensor.i4_edrive40_charging_target-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'device_class': 'battery', 'friendly_name': 'i4 eDrive40 Charging target', 'unit_of_measurement': '%', }), @@ -1900,7 +1898,7 @@ 'suggested_display_precision': 0, }), }), - 'original_device_class': , + 'original_device_class': None, 'original_icon': None, 'original_name': 'Charging target', 'platform': 'bmw_connected_drive', @@ -1914,7 +1912,6 @@ # name: test_entity_state_attrs[sensor.ix_xdrive50_charging_target-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'device_class': 'battery', 'friendly_name': 'iX xDrive50 Charging target', 'unit_of_measurement': '%', }), diff --git a/tests/components/bmw_connected_drive/test_config_flow.py b/tests/components/bmw_connected_drive/test_config_flow.py index f71730fcc17b65..f57f1a304ac010 100644 --- a/tests/components/bmw_connected_drive/test_config_flow.py +++ b/tests/components/bmw_connected_drive/test_config_flow.py @@ -4,8 +4,13 @@ from unittest.mock import patch from bimmer_connected.api.authentication import MyBMWAuthentication -from bimmer_connected.models import MyBMWAPIError, MyBMWAuthError +from bimmer_connected.models import ( + MyBMWAPIError, + MyBMWAuthError, + MyBMWCaptchaMissingError, +) from httpx import RequestError +import pytest from homeassistant import config_entries from homeassistant.components.bmw_connected_drive.config_flow import DOMAIN @@ -13,7 +18,7 @@ CONF_READ_ONLY, CONF_REFRESH_TOKEN, ) -from homeassistant.const import CONF_PASSWORD, CONF_USERNAME +from homeassistant.const import CONF_PASSWORD, CONF_REGION, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -193,6 +198,14 @@ async def test_reauth(hass: HomeAssistant) -> None: assert result["step_id"] == "user" assert result["errors"] == {} + suggested_values = { + key: key.description.get("suggested_value") + for key in result["data_schema"].schema + } + assert suggested_values[CONF_USERNAME] == FIXTURE_USER_INPUT[CONF_USERNAME] + assert suggested_values[CONF_PASSWORD] == wrong_password + assert suggested_values[CONF_REGION] == FIXTURE_USER_INPUT[CONF_REGION] + result2 = await hass.config_entries.flow.async_configure( result["flow_id"], FIXTURE_USER_INPUT ) @@ -203,3 +216,131 @@ async def test_reauth(hass: HomeAssistant) -> None: assert config_entry.data == FIXTURE_COMPLETE_ENTRY assert len(mock_setup_entry.mock_calls) == 2 + + +async def test_reauth_unique_id_abort(hass: HomeAssistant) -> None: + """Test aborting the reauth form if unique_id changes.""" + with patch( + "bimmer_connected.api.authentication.MyBMWAuthentication.login", + side_effect=login_sideeffect, + autospec=True, + ): + wrong_password = "wrong" + + config_entry_with_wrong_password = deepcopy(FIXTURE_CONFIG_ENTRY) + config_entry_with_wrong_password["data"][CONF_PASSWORD] = wrong_password + + config_entry = MockConfigEntry(**config_entry_with_wrong_password) + config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.data == config_entry_with_wrong_password["data"] + + result = await config_entry.start_reauth_flow(hass) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] == {} + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], {**FIXTURE_USER_INPUT, CONF_REGION: "north_america"} + ) + await hass.async_block_till_done() + + assert result2["type"] is FlowResultType.ABORT + assert result2["reason"] == "account_mismatch" + assert config_entry.data == config_entry_with_wrong_password["data"] + + +async def test_reconfigure(hass: HomeAssistant) -> None: + """Test the reconfiguration form.""" + with patch( + "bimmer_connected.api.authentication.MyBMWAuthentication.login", + side_effect=login_sideeffect, + autospec=True, + ): + config_entry = MockConfigEntry(**FIXTURE_CONFIG_ENTRY) + config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + result = await config_entry.start_reconfigure_flow(hass) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] == {} + + suggested_values = { + key: key.description.get("suggested_value") + for key in result["data_schema"].schema + } + assert suggested_values[CONF_USERNAME] == FIXTURE_USER_INPUT[CONF_USERNAME] + assert suggested_values[CONF_PASSWORD] == FIXTURE_USER_INPUT[CONF_PASSWORD] + assert suggested_values[CONF_REGION] == FIXTURE_USER_INPUT[CONF_REGION] + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], FIXTURE_USER_INPUT + ) + await hass.async_block_till_done() + + assert result2["type"] is FlowResultType.ABORT + assert result2["reason"] == "reconfigure_successful" + assert config_entry.data == FIXTURE_COMPLETE_ENTRY + + +async def test_reconfigure_unique_id_abort(hass: HomeAssistant) -> None: + """Test aborting the reconfiguration form if unique_id changes.""" + with patch( + "bimmer_connected.api.authentication.MyBMWAuthentication.login", + side_effect=login_sideeffect, + autospec=True, + ): + config_entry = MockConfigEntry(**FIXTURE_CONFIG_ENTRY) + config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + result = await config_entry.start_reconfigure_flow(hass) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] == {} + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + {**FIXTURE_USER_INPUT, CONF_USERNAME: "somebody@email.com"}, + ) + await hass.async_block_till_done() + + assert result2["type"] is FlowResultType.ABORT + assert result2["reason"] == "account_mismatch" + assert config_entry.data == FIXTURE_COMPLETE_ENTRY + + +@pytest.mark.usefixtures("bmw_fixture") +async def test_captcha_flow_not_set(hass: HomeAssistant) -> None: + """Test the external flow with captcha failing once and succeeding the second time.""" + + TEST_REGION = "north_america" + + # Start flow and open form + # Start flow and open form + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + # Add login data + with patch( + "bimmer_connected.api.authentication.MyBMWAuthentication._login_row_na", + side_effect=MyBMWCaptchaMissingError( + "Missing hCaptcha token for North America login" + ), + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={**FIXTURE_USER_INPUT, CONF_REGION: TEST_REGION}, + ) + assert result["errors"]["base"] == "missing_captcha" diff --git a/tests/components/bmw_connected_drive/test_coordinator.py b/tests/components/bmw_connected_drive/test_coordinator.py index b0f507bbfc2b29..774a85eb6da03c 100644 --- a/tests/components/bmw_connected_drive/test_coordinator.py +++ b/tests/components/bmw_connected_drive/test_coordinator.py @@ -1,13 +1,19 @@ """Test BMW coordinator.""" +from copy import deepcopy from datetime import timedelta from unittest.mock import patch -from bimmer_connected.models import MyBMWAPIError, MyBMWAuthError +from bimmer_connected.models import ( + MyBMWAPIError, + MyBMWAuthError, + MyBMWCaptchaMissingError, +) from freezegun.api import FrozenDateTimeFactory import pytest from homeassistant.components.bmw_connected_drive import DOMAIN as BMW_DOMAIN +from homeassistant.const import CONF_REGION from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed from homeassistant.helpers import issue_registry as ir @@ -122,3 +128,38 @@ async def test_init_reauth( f"config_entry_reauth_{BMW_DOMAIN}_{config_entry.entry_id}", ) assert reauth_issue.active is True + + +@pytest.mark.usefixtures("bmw_fixture") +async def test_captcha_reauth( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, +) -> None: + """Test the reauth form.""" + TEST_REGION = "north_america" + + config_entry_fixure = deepcopy(FIXTURE_CONFIG_ENTRY) + config_entry_fixure["data"][CONF_REGION] = TEST_REGION + config_entry = MockConfigEntry(**config_entry_fixure) + config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + coordinator = config_entry.runtime_data.coordinator + + assert coordinator.last_update_success is True + + freezer.tick(timedelta(minutes=10, seconds=1)) + with patch( + "bimmer_connected.account.MyBMWAccount.get_vehicles", + side_effect=MyBMWCaptchaMissingError( + "Missing hCaptcha token for North America login" + ), + ): + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert coordinator.last_update_success is False + assert isinstance(coordinator.last_exception, ConfigEntryAuthFailed) is True + assert coordinator.last_exception.translation_key == "missing_captcha" diff --git a/tests/components/bond/test_button.py b/tests/components/bond/test_button.py index 8c8f38db72b242..c14bba0d01fa3b 100644 --- a/tests/components/bond/test_button.py +++ b/tests/components/bond/test_button.py @@ -57,6 +57,15 @@ def light(name: str): } +def motorized_shade(name: str): + """Create a motorized shade with a given name.""" + return { + "name": name, + "type": DeviceType.MOTORIZED_SHADES, + "actions": [Action.OPEN, Action.OPEN_NEXT, Action.CLOSE, Action.CLOSE_NEXT], + } + + async def test_entity_registry( hass: HomeAssistant, entity_registry: er.EntityRegistry, @@ -180,3 +189,38 @@ async def test_press_button(hass: HomeAssistant) -> None: mock_action.assert_called_once_with( "test-device-id", Action(Action.START_DECREASING_BRIGHTNESS) ) + + +async def test_motorized_shade_actions(hass: HomeAssistant) -> None: + """Tests motorized shade open next and close next actions.""" + await setup_platform( + hass, + BUTTON_DOMAIN, + motorized_shade("name-1"), + bond_device_id="test-device-id", + ) + + assert hass.states.get("button.name_1_open_next") + assert hass.states.get("button.name_1_close_next") + + with patch_bond_action() as mock_action, patch_bond_device_state(): + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + {ATTR_ENTITY_ID: "button.name_1_open_next"}, + blocking=True, + ) + await hass.async_block_till_done() + + mock_action.assert_called_once_with("test-device-id", Action(Action.OPEN_NEXT)) + + with patch_bond_action() as mock_action, patch_bond_device_state(): + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + {ATTR_ENTITY_ID: "button.name_1_close_next"}, + blocking=True, + ) + await hass.async_block_till_done() + + mock_action.assert_called_once_with("test-device-id", Action(Action.CLOSE_NEXT)) diff --git a/tests/components/bond/test_cover.py b/tests/components/bond/test_cover.py index e438a830eb5db9..4dc8256be48e51 100644 --- a/tests/components/bond/test_cover.py +++ b/tests/components/bond/test_cover.py @@ -8,7 +8,7 @@ ATTR_CURRENT_POSITION, ATTR_POSITION, DOMAIN as COVER_DOMAIN, - STATE_CLOSED, + CoverState, ) from homeassistant.const import ( ATTR_ENTITY_ID, @@ -19,7 +19,6 @@ SERVICE_SET_COVER_POSITION, SERVICE_STOP_COVER, SERVICE_STOP_COVER_TILT, - STATE_OPEN, STATE_UNKNOWN, ) from homeassistant.core import HomeAssistant @@ -224,7 +223,7 @@ async def test_tilt_and_open(hass: HomeAssistant) -> None: await hass.async_block_till_done() mock_open.assert_called_once_with("test-device-id", Action.tilt_open()) - assert hass.states.get("cover.name_1").state == STATE_CLOSED + assert hass.states.get("cover.name_1").state == CoverState.CLOSED async def test_update_reports_open_cover(hass: HomeAssistant) -> None: @@ -280,7 +279,7 @@ async def test_set_position_cover(hass: HomeAssistant) -> None: mock_hold.assert_called_once_with("test-device-id", Action.set_position(0)) entity_state = hass.states.get("cover.name_1") - assert entity_state.state == STATE_OPEN + assert entity_state.state == CoverState.OPEN assert entity_state.attributes[ATTR_CURRENT_POSITION] == 100 with ( @@ -298,7 +297,7 @@ async def test_set_position_cover(hass: HomeAssistant) -> None: mock_hold.assert_called_once_with("test-device-id", Action.set_position(100)) entity_state = hass.states.get("cover.name_1") - assert entity_state.state == STATE_CLOSED + assert entity_state.state == CoverState.CLOSED assert entity_state.attributes[ATTR_CURRENT_POSITION] == 0 with ( @@ -316,5 +315,5 @@ async def test_set_position_cover(hass: HomeAssistant) -> None: mock_hold.assert_called_once_with("test-device-id", Action.set_position(40)) entity_state = hass.states.get("cover.name_1") - assert entity_state.state == STATE_OPEN + assert entity_state.state == CoverState.OPEN assert entity_state.attributes[ATTR_CURRENT_POSITION] == 60 diff --git a/tests/components/bosch_shc/test_config_flow.py b/tests/components/bosch_shc/test_config_flow.py index eaabe1128075ac..63f7169b02659f 100644 --- a/tests/components/bosch_shc/test_config_flow.py +++ b/tests/components/bosch_shc/test_config_flow.py @@ -99,8 +99,8 @@ async def test_form_user(hass: HomeAssistant) -> None: assert result3["title"] == "shc012345" assert result3["data"] == { "host": "1.1.1.1", - "ssl_certificate": hass.config.path(DOMAIN, CONF_SHC_CERT), - "ssl_key": hass.config.path(DOMAIN, CONF_SHC_KEY), + "ssl_certificate": hass.config.path(DOMAIN, "test-mac", CONF_SHC_CERT), + "ssl_key": hass.config.path(DOMAIN, "test-mac", CONF_SHC_KEY), "token": "abc:123", "hostname": "123", } @@ -549,8 +549,8 @@ async def test_zeroconf(hass: HomeAssistant) -> None: assert result3["title"] == "shc012345" assert result3["data"] == { "host": "1.1.1.1", - "ssl_certificate": hass.config.path(DOMAIN, CONF_SHC_CERT), - "ssl_key": hass.config.path(DOMAIN, CONF_SHC_KEY), + "ssl_certificate": hass.config.path(DOMAIN, "test-mac", CONF_SHC_CERT), + "ssl_key": hass.config.path(DOMAIN, "test-mac", CONF_SHC_KEY), "token": "abc:123", "hostname": "123", } @@ -708,6 +708,7 @@ async def test_reauth(hass: HomeAssistant) -> None: async def test_tls_assets_writer(hass: HomeAssistant) -> None: """Test we write tls assets to correct location.""" + unique_id = "test-mac" assets = { "token": "abc:123", "cert": b"content_cert", @@ -719,14 +720,163 @@ async def test_tls_assets_writer(hass: HomeAssistant) -> None: "homeassistant.components.bosch_shc.config_flow.open", mock_open() ) as mocked_file, ): - write_tls_asset(hass, CONF_SHC_CERT, assets["cert"]) + write_tls_asset(hass, unique_id, CONF_SHC_CERT, assets["cert"]) mocked_file.assert_called_with( - hass.config.path(DOMAIN, CONF_SHC_CERT), "w", encoding="utf8" + hass.config.path(DOMAIN, unique_id, CONF_SHC_CERT), "w", encoding="utf8" ) mocked_file().write.assert_called_with("content_cert") - write_tls_asset(hass, CONF_SHC_KEY, assets["key"]) + write_tls_asset(hass, unique_id, CONF_SHC_KEY, assets["key"]) mocked_file.assert_called_with( - hass.config.path(DOMAIN, CONF_SHC_KEY), "w", encoding="utf8" + hass.config.path(DOMAIN, unique_id, CONF_SHC_KEY), "w", encoding="utf8" ) mocked_file().write.assert_called_with("content_key") + + +@pytest.mark.usefixtures("mock_zeroconf") +async def test_register_multiple_controllers(hass: HomeAssistant) -> None: + """Test register multiple controllers. + + Each registered controller must get its own key/certificate pair, + which must not get overwritten when a new controller is added. + """ + + controller_1 = { + "hostname": "shc111111", + "mac": "test-mac1", + "host": "1.1.1.1", + "register": { + "token": "abc:shc111111", + "cert": b"content_cert1", + "key": b"content_key1", + }, + } + controller_2 = { + "hostname": "shc222222", + "mac": "test-mac2", + "host": "2.2.2.2", + "register": { + "token": "abc:shc222222", + "cert": b"content_cert2", + "key": b"content_key2", + }, + } + + # Set up controller 1 + ctrl_1_result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + + with ( + patch( + "boschshcpy.session.SHCSession.mdns_info", + return_value=SHCInformation, + ), + patch( + "boschshcpy.information.SHCInformation.name", + new_callable=PropertyMock, + return_value=controller_1["hostname"], + ), + patch( + "boschshcpy.information.SHCInformation.unique_id", + new_callable=PropertyMock, + return_value=controller_1["mac"], + ), + ): + ctrl_1_result2 = await hass.config_entries.flow.async_configure( + ctrl_1_result["flow_id"], + {"host": controller_1["host"]}, + ) + + with ( + patch( + "boschshcpy.register_client.SHCRegisterClient.register", + return_value=controller_1["register"], + ), + patch("os.mkdir"), + patch("homeassistant.components.bosch_shc.config_flow.open"), + patch("boschshcpy.session.SHCSession.authenticate"), + patch( + "homeassistant.components.bosch_shc.async_setup_entry", + return_value=True, + ), + ): + ctrl_1_result3 = await hass.config_entries.flow.async_configure( + ctrl_1_result2["flow_id"], + {"password": "test"}, + ) + await hass.async_block_till_done() + + assert ctrl_1_result3["type"] is FlowResultType.CREATE_ENTRY + assert ctrl_1_result3["title"] == "shc111111" + assert ctrl_1_result3["context"]["unique_id"] == controller_1["mac"] + assert ctrl_1_result3["data"] == { + "host": "1.1.1.1", + "ssl_certificate": hass.config.path(DOMAIN, controller_1["mac"], CONF_SHC_CERT), + "ssl_key": hass.config.path(DOMAIN, controller_1["mac"], CONF_SHC_KEY), + "token": "abc:shc111111", + "hostname": "shc111111", + } + + # Set up controller 2 + ctrl_2_result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + + with ( + patch( + "boschshcpy.session.SHCSession.mdns_info", + return_value=SHCInformation, + ), + patch( + "boschshcpy.information.SHCInformation.name", + new_callable=PropertyMock, + return_value=controller_2["hostname"], + ), + patch( + "boschshcpy.information.SHCInformation.unique_id", + new_callable=PropertyMock, + return_value=controller_2["mac"], + ), + ): + ctrl_2_result2 = await hass.config_entries.flow.async_configure( + ctrl_2_result["flow_id"], + {"host": controller_2["host"]}, + ) + + with ( + patch( + "boschshcpy.register_client.SHCRegisterClient.register", + return_value=controller_2["register"], + ), + patch("os.mkdir"), + patch("homeassistant.components.bosch_shc.config_flow.open"), + patch("boschshcpy.session.SHCSession.authenticate"), + patch( + "homeassistant.components.bosch_shc.async_setup_entry", + return_value=True, + ), + ): + ctrl_2_result3 = await hass.config_entries.flow.async_configure( + ctrl_2_result2["flow_id"], + {"password": "test"}, + ) + await hass.async_block_till_done() + + assert ctrl_2_result3["type"] is FlowResultType.CREATE_ENTRY + assert ctrl_2_result3["title"] == "shc222222" + assert ctrl_2_result3["context"]["unique_id"] == controller_2["mac"] + assert ctrl_2_result3["data"] == { + "host": "2.2.2.2", + "ssl_certificate": hass.config.path(DOMAIN, controller_2["mac"], CONF_SHC_CERT), + "ssl_key": hass.config.path(DOMAIN, controller_2["mac"], CONF_SHC_KEY), + "token": "abc:shc222222", + "hostname": "shc222222", + } + + # Check that each controller has its own key/certificate pair + assert ( + ctrl_1_result3["data"]["ssl_certificate"] + != ctrl_2_result3["data"]["ssl_certificate"] + ) + assert ctrl_1_result3["data"]["ssl_key"] != ctrl_2_result3["data"]["ssl_key"] diff --git a/tests/components/bring/fixtures/items_invitation.json b/tests/components/bring/fixtures/items_invitation.json new file mode 100644 index 00000000000000..82ef623e43953c --- /dev/null +++ b/tests/components/bring/fixtures/items_invitation.json @@ -0,0 +1,44 @@ +{ + "uuid": "77a151f8-77c4-47a3-8295-c750a0e69d4f", + "status": "INVITATION", + "purchase": [ + { + "uuid": "b5d0790b-5f32-4d5c-91da-e29066f167de", + "itemId": "Paprika", + "specification": "Rot", + "attributes": [ + { + "type": "PURCHASE_CONDITIONS", + "content": { + "urgent": true, + "convenient": true, + "discounted": true + } + } + ] + }, + { + "uuid": "72d370ab-d8ca-4e41-b956-91df94795b4e", + "itemId": "Pouletbrüstli", + "specification": "Bio", + "attributes": [ + { + "type": "PURCHASE_CONDITIONS", + "content": { + "urgent": true, + "convenient": true, + "discounted": true + } + } + ] + } + ], + "recently": [ + { + "uuid": "fc8db30a-647e-4e6c-9d71-3b85d6a2d954", + "itemId": "Ananas", + "specification": "", + "attributes": [] + } + ] +} diff --git a/tests/components/bring/fixtures/items_shared.json b/tests/components/bring/fixtures/items_shared.json new file mode 100644 index 00000000000000..9ac999729d37b4 --- /dev/null +++ b/tests/components/bring/fixtures/items_shared.json @@ -0,0 +1,44 @@ +{ + "uuid": "77a151f8-77c4-47a3-8295-c750a0e69d4f", + "status": "SHARED", + "purchase": [ + { + "uuid": "b5d0790b-5f32-4d5c-91da-e29066f167de", + "itemId": "Paprika", + "specification": "Rot", + "attributes": [ + { + "type": "PURCHASE_CONDITIONS", + "content": { + "urgent": true, + "convenient": true, + "discounted": true + } + } + ] + }, + { + "uuid": "72d370ab-d8ca-4e41-b956-91df94795b4e", + "itemId": "Pouletbrüstli", + "specification": "Bio", + "attributes": [ + { + "type": "PURCHASE_CONDITIONS", + "content": { + "urgent": true, + "convenient": true, + "discounted": true + } + } + ] + } + ], + "recently": [ + { + "uuid": "fc8db30a-647e-4e6c-9d71-3b85d6a2d954", + "itemId": "Ananas", + "specification": "", + "attributes": [] + } + ] +} diff --git a/tests/components/bring/snapshots/test_sensor.ambr b/tests/components/bring/snapshots/test_sensor.ambr index 08e554632e9511..97e1d1b4bd9bb8 100644 --- a/tests/components/bring/snapshots/test_sensor.ambr +++ b/tests/components/bring/snapshots/test_sensor.ambr @@ -46,6 +46,64 @@ 'state': '2', }) # --- +# name: test_setup[sensor.baumarkt_list_access-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'registered', + 'shared', + 'invitation', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.baumarkt_list_access', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'List access', + 'platform': 'bring', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-00000000-00000000-00000000_b4776778-7f6c-496e-951b-92a35d3db0dd_list_access', + 'unit_of_measurement': None, + }) +# --- +# name: test_setup[sensor.baumarkt_list_access-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Baumarkt List access', + 'options': list([ + 'registered', + 'shared', + 'invitation', + ]), + }), + 'context': , + 'entity_id': 'sensor.baumarkt_list_access', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'registered', + }) +# --- # name: test_setup[sensor.baumarkt_on_occasion-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -279,6 +337,64 @@ 'state': '2', }) # --- +# name: test_setup[sensor.einkauf_list_access-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'registered', + 'shared', + 'invitation', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.einkauf_list_access', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'List access', + 'platform': 'bring', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-00000000-00000000-00000000_e542eef6-dba7-4c31-a52c-29e6ab9d83a5_list_access', + 'unit_of_measurement': None, + }) +# --- +# name: test_setup[sensor.einkauf_list_access-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Einkauf List access', + 'options': list([ + 'registered', + 'shared', + 'invitation', + ]), + }), + 'context': , + 'entity_id': 'sensor.einkauf_list_access', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'registered', + }) +# --- # name: test_setup[sensor.einkauf_on_occasion-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/bring/test_sensor.py b/tests/components/bring/test_sensor.py index a36b01631652bc..974818ccedf42c 100644 --- a/tests/components/bring/test_sensor.py +++ b/tests/components/bring/test_sensor.py @@ -1,17 +1,18 @@ """Test for sensor platform of the Bring! integration.""" from collections.abc import Generator -from unittest.mock import patch +from unittest.mock import AsyncMock, patch import pytest from syrupy.assertion import SnapshotAssertion +from homeassistant.components.bring.const import DOMAIN from homeassistant.config_entries import ConfigEntryState from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from tests.common import MockConfigEntry, snapshot_platform +from tests.common import MockConfigEntry, load_json_object_fixture, snapshot_platform @pytest.fixture(autouse=True) @@ -42,3 +43,34 @@ async def test_setup( await snapshot_platform( hass, entity_registry, snapshot, bring_config_entry.entry_id ) + + +@pytest.mark.parametrize( + ("fixture", "entity_state"), + [ + ("items_invitation", "invitation"), + ("items_shared", "shared"), + ("items", "registered"), + ], +) +async def test_list_access_states( + hass: HomeAssistant, + bring_config_entry: MockConfigEntry, + mock_bring_client: AsyncMock, + fixture: str, + entity_state: str, +) -> None: + """Snapshot test states of list access sensor.""" + + mock_bring_client.get_list.return_value = load_json_object_fixture( + f"{fixture}.json", DOMAIN + ) + + bring_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(bring_config_entry.entry_id) + await hass.async_block_till_done() + + assert bring_config_entry.state is ConfigEntryState.LOADED + + assert (state := hass.states.get("sensor.einkauf_list_access")) + assert state.state == entity_state diff --git a/tests/components/brother/test_config_flow.py b/tests/components/brother/test_config_flow.py index 0dc179061b4d4c..929e2f083e9216 100644 --- a/tests/components/brother/test_config_flow.py +++ b/tests/components/brother/test_config_flow.py @@ -261,7 +261,7 @@ async def test_reconfigure_successful( result = await mock_config_entry.start_reconfigure_flow(hass) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure_confirm" + assert result["step_id"] == "reconfigure" result = await hass.config_entries.flow.async_configure( result["flow_id"], @@ -297,7 +297,7 @@ async def test_reconfigure_not_successful( result = await mock_config_entry.start_reconfigure_flow(hass) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure_confirm" + assert result["step_id"] == "reconfigure" mock_brother_client.async_update.side_effect = exc @@ -307,7 +307,7 @@ async def test_reconfigure_not_successful( ) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure_confirm" + assert result["step_id"] == "reconfigure" assert result["errors"] == {"base": base_error} mock_brother_client.async_update.side_effect = None @@ -336,7 +336,7 @@ async def test_reconfigure_invalid_hostname( result = await mock_config_entry.start_reconfigure_flow(hass) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure_confirm" + assert result["step_id"] == "reconfigure" result = await hass.config_entries.flow.async_configure( result["flow_id"], @@ -344,7 +344,7 @@ async def test_reconfigure_invalid_hostname( ) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure_confirm" + assert result["step_id"] == "reconfigure" assert result["errors"] == {CONF_HOST: "wrong_host"} @@ -359,7 +359,7 @@ async def test_reconfigure_not_the_same_device( result = await mock_config_entry.start_reconfigure_flow(hass) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure_confirm" + assert result["step_id"] == "reconfigure" mock_brother_client.serial = "9876543210" @@ -369,5 +369,5 @@ async def test_reconfigure_not_the_same_device( ) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure_confirm" + assert result["step_id"] == "reconfigure" assert result["errors"] == {"base": "another_device"} diff --git a/tests/components/bsblan/fixtures/state.json b/tests/components/bsblan/fixtures/state.json index 51d4cf2e136eeb..8c458e173d4227 100644 --- a/tests/components/bsblan/fixtures/state.json +++ b/tests/components/bsblan/fixtures/state.json @@ -97,5 +97,14 @@ "dataType": 1, "readonly": 1, "unit": "" + }, + "room1_temp_setpoint_boost": { + "name": "Room 1 Temp Setpoint Boost", + "error": 0, + "value": "22.5", + "desc": "Boost", + "dataType": 1, + "readonly": 1, + "unit": "°C" } } diff --git a/tests/components/bsblan/snapshots/test_diagnostics.ambr b/tests/components/bsblan/snapshots/test_diagnostics.ambr index c1d152056ec428..9fabd373205cfe 100644 --- a/tests/components/bsblan/snapshots/test_diagnostics.ambr +++ b/tests/components/bsblan/snapshots/test_diagnostics.ambr @@ -6,60 +6,103 @@ 'current_temperature': dict({ 'data_type': 0, 'desc': '', + 'error': 0, 'name': 'Room temp 1 actual value', + 'precision': None, + 'readonly': 1, + 'readwrite': 0, 'unit': '°C', - 'value': '18.6', + 'value': 18.6, }), 'outside_temperature': dict({ 'data_type': 0, 'desc': '', + 'error': 0, 'name': 'Outside temp sensor local', + 'precision': None, + 'readonly': 0, + 'readwrite': 0, 'unit': '°C', - 'value': '6.1', + 'value': 6.1, }), }), 'state': dict({ 'current_temperature': dict({ 'data_type': 0, 'desc': '', + 'error': 0, 'name': 'Room temp 1 actual value', + 'precision': None, + 'readonly': 1, + 'readwrite': 0, 'unit': '°C', - 'value': '18.6', + 'value': 18.6, }), 'hvac_action': dict({ 'data_type': 1, 'desc': 'Raumtemp’begrenzung', + 'error': 0, 'name': 'Status heating circuit 1', + 'precision': None, + 'readonly': 1, + 'readwrite': 0, 'unit': '', - 'value': '122', + 'value': 122, }), 'hvac_mode': dict({ 'data_type': 1, 'desc': 'Komfort', + 'error': 0, 'name': 'Operating mode', + 'precision': None, + 'readonly': 0, + 'readwrite': 0, 'unit': '', 'value': 'heat', }), 'hvac_mode2': dict({ 'data_type': 1, 'desc': 'Reduziert', + 'error': 0, 'name': 'Operating mode', + 'precision': None, + 'readonly': 0, + 'readwrite': 0, 'unit': '', - 'value': '2', + 'value': 2, + }), + 'room1_temp_setpoint_boost': dict({ + 'data_type': 1, + 'desc': 'Boost', + 'error': 0, + 'name': 'Room 1 Temp Setpoint Boost', + 'precision': None, + 'readonly': 1, + 'readwrite': 0, + 'unit': '°C', + 'value': '22.5', }), 'room1_thermostat_mode': dict({ 'data_type': 1, 'desc': 'Kein Bedarf', + 'error': 0, 'name': 'Raumthermostat 1', + 'precision': None, + 'readonly': 1, + 'readwrite': 0, 'unit': '', - 'value': '0', + 'value': 0, }), 'target_temperature': dict({ 'data_type': 0, 'desc': '', + 'error': 0, 'name': 'Room temperature Comfort setpoint', + 'precision': None, + 'readonly': 0, + 'readwrite': 0, 'unit': '°C', - 'value': '18.5', + 'value': 18.5, }), }), }), @@ -73,21 +116,33 @@ 'controller_family': dict({ 'data_type': 0, 'desc': '', + 'error': 0, 'name': 'Device family', + 'precision': None, + 'readonly': 0, + 'readwrite': 0, 'unit': '', - 'value': '211', + 'value': 211, }), 'controller_variant': dict({ 'data_type': 0, 'desc': '', + 'error': 0, 'name': 'Device variant', + 'precision': None, + 'readonly': 0, + 'readwrite': 0, 'unit': '', - 'value': '127', + 'value': 127, }), 'device_identification': dict({ 'data_type': 7, 'desc': '', + 'error': 0, 'name': 'Gerte-Identifikation', + 'precision': None, + 'readonly': 0, + 'readwrite': 0, 'unit': '', 'value': 'RVS21.831F/127', }), @@ -96,16 +151,24 @@ 'max_temp': dict({ 'data_type': 0, 'desc': '', + 'error': 0, 'name': 'Summer/winter changeover temp heat circuit 1', + 'precision': None, + 'readonly': 0, + 'readwrite': 0, 'unit': '°C', - 'value': '20.0', + 'value': 20.0, }), 'min_temp': dict({ 'data_type': 0, 'desc': '', + 'error': 0, 'name': 'Room temp frost protection setpoint', + 'precision': None, + 'readonly': 0, + 'readwrite': 0, 'unit': '°C', - 'value': '8.0', + 'value': 8.0, }), }), }) diff --git a/tests/components/cambridge_audio/conftest.py b/tests/components/cambridge_audio/conftest.py index f17ff0cca3fe43..33a9ded70e32f3 100644 --- a/tests/components/cambridge_audio/conftest.py +++ b/tests/components/cambridge_audio/conftest.py @@ -1,16 +1,25 @@ """Cambridge Audio tests configuration.""" from collections.abc import Generator -from unittest.mock import Mock, patch +from unittest.mock import AsyncMock, Mock, patch -from aiostreammagic.models import Info, NowPlaying, PlayState, Source, State +from aiostreammagic.models import ( + AudioOutput, + Display, + Info, + NowPlaying, + PlayState, + PresetList, + Source, + State, + Update, +) import pytest from homeassistant.components.cambridge_audio.const import DOMAIN from homeassistant.const import CONF_HOST from tests.common import MockConfigEntry, load_fixture, load_json_array_fixture -from tests.components.smhi.common import AsyncMock @pytest.fixture @@ -50,9 +59,17 @@ def mock_stream_magic_client() -> Generator[AsyncMock]: client.now_playing = NowPlaying.from_json( load_fixture("get_now_playing.json", DOMAIN) ) + client.display = Display.from_json(load_fixture("get_display.json", DOMAIN)) + client.update = Update.from_json(load_fixture("get_update.json", DOMAIN)) + client.preset_list = PresetList.from_json( + load_fixture("get_presets_list.json", DOMAIN) + ) + client.audio_output = AudioOutput.from_json( + load_fixture("get_audio_output.json", DOMAIN) + ) client.is_connected = Mock(return_value=True) client.position_last_updated = client.play_state.position - client.unregister_state_update_callbacks = AsyncMock(return_value=True) + client.unregister_state_update_callbacks.return_value = True yield client diff --git a/tests/components/cambridge_audio/fixtures/get_audio_output.json b/tests/components/cambridge_audio/fixtures/get_audio_output.json new file mode 100644 index 00000000000000..e38ae03730799b --- /dev/null +++ b/tests/components/cambridge_audio/fixtures/get_audio_output.json @@ -0,0 +1,16 @@ +{ + "outputs": [ + { + "id": "speaker_a", + "name": "Speaker A" + }, + { + "id": "speaker_b", + "name": "Speaker B" + }, + { + "id": "headphones", + "name": "Headphones" + } + ] +} diff --git a/tests/components/cambridge_audio/fixtures/get_display.json b/tests/components/cambridge_audio/fixtures/get_display.json new file mode 100644 index 00000000000000..73cbf5a60b3c40 --- /dev/null +++ b/tests/components/cambridge_audio/fixtures/get_display.json @@ -0,0 +1,3 @@ +{ + "brightness": "bright" +} diff --git a/tests/components/cambridge_audio/fixtures/get_presets_list.json b/tests/components/cambridge_audio/fixtures/get_presets_list.json new file mode 100644 index 00000000000000..87d49e9fd306a5 --- /dev/null +++ b/tests/components/cambridge_audio/fixtures/get_presets_list.json @@ -0,0 +1,34 @@ +{ + "start": 1, + "end": 99, + "max_presets": 99, + "presettable": true, + "presets": [ + { + "id": 1, + "name": "Chicago House Radio", + "type": "Radio", + "class": "stream.radio", + "state": "OK", + "is_playing": false, + "art_url": "https://static.airable.io/43/68/432868.png", + "airable_radio_id": 5317566146608442 + }, + { + "id": 2, + "name": "Spotify: Good & Evil", + "type": "Spotify", + "class": "stream.service.spotify", + "state": "OK", + "is_playing": true, + "art_url": "https://i.scdn.co/image/ab67616d0000b27325a5a1ed28871e8e53e62d59" + }, + { + "id": 3, + "name": "Unknown Preset Type", + "type": "Unknown", + "class": "stream.unknown", + "state": "OK" + } + ] +} diff --git a/tests/components/cambridge_audio/fixtures/get_update.json b/tests/components/cambridge_audio/fixtures/get_update.json new file mode 100644 index 00000000000000..a6fec6265c0f78 --- /dev/null +++ b/tests/components/cambridge_audio/fixtures/get_update.json @@ -0,0 +1,5 @@ +{ + "early_update": false, + "update_available": false, + "updating": false +} diff --git a/tests/components/cambridge_audio/snapshots/test_diagnostics.ambr b/tests/components/cambridge_audio/snapshots/test_diagnostics.ambr index c554785006e4fc..1ba9c4093f66f7 100644 --- a/tests/components/cambridge_audio/snapshots/test_diagnostics.ambr +++ b/tests/components/cambridge_audio/snapshots/test_diagnostics.ambr @@ -1,51 +1,196 @@ # serializer version: 1 # name: test_entry_diagnostics dict({ + 'display': dict({ + 'brightness': 'bright', + }), 'info': dict({ - '__type': "", - 'repr': "Info(name='Cambridge Audio CXNv2', model='CXNv2', timezone='America/Chicago', locale='en_GB', udn='02680b5c-1320-4d54-9f7c-3cfe915ad4c3', unit_id='0020c2d8', api_version='1.8')", + 'api_version': '1.8', + 'locale': 'en_GB', + 'model': 'CXNv2', + 'name': 'Cambridge Audio CXNv2', + 'timezone': 'America/Chicago', + 'udn': '02680b5c-1320-4d54-9f7c-3cfe915ad4c3', + 'unit_id': '0020c2d8', + }), + 'now_playing': dict({ + 'controls': list([ + 'play_pause', + 'track_next', + 'track_previous', + ]), + }), + 'play_state': dict({ + 'metadata': dict({ + 'album': "Greatest Hits: God's Favorite Band", + 'art_url': 'http://192.168.20.218:80/album-art-2d89?id=1:246', + 'artist': 'Green Day', + 'bitrate': None, + 'class_name': 'md.track', + 'codec': 'ALAC', + 'duration': 232, + 'encoding': None, + 'lossless': True, + 'mqa': 'none', + 'name': 'AirPlay', + 'radio_id': None, + 'sample_format': None, + 'sample_rate': 44100, + 'signal': None, + 'source': 'AIRPLAY', + 'station': None, + 'title': 'Holiday', + }), + 'mode_repeat': 'off', + 'mode_shuffle': 'off', + 'position': 179, + 'presettable': False, + 'state': 'play', + }), + 'presets_list': dict({ + 'end': 99, + 'max_presets': 99, + 'presets': list([ + dict({ + 'airable_radio_id': 5317566146608442, + 'art_url': 'https://static.airable.io/43/68/432868.png', + 'is_playing': False, + 'name': 'Chicago House Radio', + 'preset_class': 'stream.radio', + 'preset_id': 1, + 'state': 'OK', + 'type': 'Radio', + }), + dict({ + 'airable_radio_id': None, + 'art_url': 'https://i.scdn.co/image/ab67616d0000b27325a5a1ed28871e8e53e62d59', + 'is_playing': True, + 'name': 'Spotify: Good & Evil', + 'preset_class': 'stream.service.spotify', + 'preset_id': 2, + 'state': 'OK', + 'type': 'Spotify', + }), + dict({ + 'airable_radio_id': None, + 'art_url': None, + 'is_playing': False, + 'name': 'Unknown Preset Type', + 'preset_class': 'stream.unknown', + 'preset_id': 3, + 'state': 'OK', + 'type': 'Unknown', + }), + ]), + 'presettable': True, + 'start': 1, }), 'sources': list([ dict({ - '__type': "", - 'repr': "Source(id='IR', name='Internet Radio', default_name='Internet Radio', nameable=False, ui_selectable=False, description='Internet Radio', description_locale='Internet Radio', preferred_order=9)", + 'default_name': 'Internet Radio', + 'description': 'Internet Radio', + 'description_locale': 'Internet Radio', + 'id': 'IR', + 'name': 'Internet Radio', + 'nameable': False, + 'preferred_order': 9, + 'ui_selectable': False, }), dict({ - '__type': "", - 'repr': "Source(id='USB_AUDIO', name='USB Audio', default_name='USB Audio', nameable=True, ui_selectable=True, description='USB Audio', description_locale='USB Audio', preferred_order=1)", + 'default_name': 'USB Audio', + 'description': 'USB Audio', + 'description_locale': 'USB Audio', + 'id': 'USB_AUDIO', + 'name': 'USB Audio', + 'nameable': True, + 'preferred_order': 1, + 'ui_selectable': True, }), dict({ - '__type': "", - 'repr': "Source(id='SPDIF_COAX', name='D2', default_name='D2', nameable=True, ui_selectable=False, description='Digital Co-axial', description_locale='Digital Co-axial', preferred_order=3)", + 'default_name': 'D2', + 'description': 'Digital Co-axial', + 'description_locale': 'Digital Co-axial', + 'id': 'SPDIF_COAX', + 'name': 'D2', + 'nameable': True, + 'preferred_order': 3, + 'ui_selectable': False, }), dict({ - '__type': "", - 'repr': "Source(id='SPDIF_TOSLINK', name='D1', default_name='D1', nameable=True, ui_selectable=False, description='Digital Optical', description_locale='Digital Optical', preferred_order=2)", + 'default_name': 'D1', + 'description': 'Digital Optical', + 'description_locale': 'Digital Optical', + 'id': 'SPDIF_TOSLINK', + 'name': 'D1', + 'nameable': True, + 'preferred_order': 2, + 'ui_selectable': False, }), dict({ - '__type': "", - 'repr': "Source(id='MEDIA_PLAYER', name='Media Library', default_name='Media Library', nameable=False, ui_selectable=True, description='Media Player', description_locale='Media Player', preferred_order=10)", + 'default_name': 'Media Library', + 'description': 'Media Player', + 'description_locale': 'Media Player', + 'id': 'MEDIA_PLAYER', + 'name': 'Media Library', + 'nameable': False, + 'preferred_order': 10, + 'ui_selectable': True, }), dict({ - '__type': "", - 'repr': "Source(id='AIRPLAY', name='AirPlay', default_name='AirPlay', nameable=False, ui_selectable=True, description='AirPlay', description_locale='AirPlay', preferred_order=11)", + 'default_name': 'AirPlay', + 'description': 'AirPlay', + 'description_locale': 'AirPlay', + 'id': 'AIRPLAY', + 'name': 'AirPlay', + 'nameable': False, + 'preferred_order': 11, + 'ui_selectable': True, }), dict({ - '__type': "", - 'repr': "Source(id='SPOTIFY', name='Spotify', default_name='Spotify', nameable=False, ui_selectable=True, description='Spotify', description_locale='Spotify', preferred_order=6)", + 'default_name': 'Spotify', + 'description': 'Spotify', + 'description_locale': 'Spotify', + 'id': 'SPOTIFY', + 'name': 'Spotify', + 'nameable': False, + 'preferred_order': 6, + 'ui_selectable': True, }), dict({ - '__type': "", - 'repr': "Source(id='CAST', name='Chromecast built-in', default_name='Chromecast built-in', nameable=False, ui_selectable=True, description='Chromecast built-in', description_locale='Chromecast built-in', preferred_order=8)", + 'default_name': 'Chromecast built-in', + 'description': 'Chromecast built-in', + 'description_locale': 'Chromecast built-in', + 'id': 'CAST', + 'name': 'Chromecast built-in', + 'nameable': False, + 'preferred_order': 8, + 'ui_selectable': True, }), dict({ - '__type': "", - 'repr': "Source(id='ROON', name='Roon Ready', default_name='Roon Ready', nameable=False, ui_selectable=False, description='Roon Ready', description_locale='Roon Ready', preferred_order=5)", + 'default_name': 'Roon Ready', + 'description': 'Roon Ready', + 'description_locale': 'Roon Ready', + 'id': 'ROON', + 'name': 'Roon Ready', + 'nameable': False, + 'preferred_order': 5, + 'ui_selectable': False, }), dict({ - '__type': "", - 'repr': "Source(id='TIDAL', name='TIDAL Connect', default_name='TIDAL Connect', nameable=False, ui_selectable=False, description='TIDAL', description_locale='TIDAL', preferred_order=7)", + 'default_name': 'TIDAL Connect', + 'description': 'TIDAL', + 'description_locale': 'TIDAL', + 'id': 'TIDAL', + 'name': 'TIDAL Connect', + 'nameable': False, + 'preferred_order': 7, + 'ui_selectable': False, }), ]), + 'update': dict({ + 'early_update': False, + 'update_available': False, + 'updating': False, + }), }) # --- diff --git a/tests/components/cambridge_audio/snapshots/test_select.ambr b/tests/components/cambridge_audio/snapshots/test_select.ambr new file mode 100644 index 00000000000000..b40c8a8d5c4bac --- /dev/null +++ b/tests/components/cambridge_audio/snapshots/test_select.ambr @@ -0,0 +1,115 @@ +# serializer version: 1 +# name: test_all_entities[select.cambridge_audio_cxnv2_audio_output-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'Speaker A', + 'Speaker B', + 'Headphones', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': , + 'entity_id': 'select.cambridge_audio_cxnv2_audio_output', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Audio output', + 'platform': 'cambridge_audio', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'audio_output', + 'unique_id': '0020c2d8-audio_output', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[select.cambridge_audio_cxnv2_audio_output-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Cambridge Audio CXNv2 Audio output', + 'options': list([ + 'Speaker A', + 'Speaker B', + 'Headphones', + ]), + }), + 'context': , + 'entity_id': 'select.cambridge_audio_cxnv2_audio_output', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_all_entities[select.cambridge_audio_cxnv2_display_brightness-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'bright', + 'dim', + 'off', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': , + 'entity_id': 'select.cambridge_audio_cxnv2_display_brightness', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Display brightness', + 'platform': 'cambridge_audio', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'display_brightness', + 'unique_id': '0020c2d8-display_brightness', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[select.cambridge_audio_cxnv2_display_brightness-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Cambridge Audio CXNv2 Display brightness', + 'options': list([ + 'bright', + 'dim', + 'off', + ]), + }), + 'context': , + 'entity_id': 'select.cambridge_audio_cxnv2_display_brightness', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'bright', + }) +# --- diff --git a/tests/components/cambridge_audio/snapshots/test_switch.ambr b/tests/components/cambridge_audio/snapshots/test_switch.ambr new file mode 100644 index 00000000000000..9bfcd7c6da72f6 --- /dev/null +++ b/tests/components/cambridge_audio/snapshots/test_switch.ambr @@ -0,0 +1,93 @@ +# serializer version: 1 +# name: test_all_entities[switch.cambridge_audio_cxnv2_early_update-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.cambridge_audio_cxnv2_early_update', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Early update', + 'platform': 'cambridge_audio', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'early_update', + 'unique_id': '0020c2d8-early_update', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[switch.cambridge_audio_cxnv2_early_update-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Cambridge Audio CXNv2 Early update', + }), + 'context': , + 'entity_id': 'switch.cambridge_audio_cxnv2_early_update', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[switch.cambridge_audio_cxnv2_pre_amp-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.cambridge_audio_cxnv2_pre_amp', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Pre-Amp', + 'platform': 'cambridge_audio', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'pre_amp', + 'unique_id': '0020c2d8-pre_amp', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[switch.cambridge_audio_cxnv2_pre_amp-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Cambridge Audio CXNv2 Pre-Amp', + }), + 'context': , + 'entity_id': 'switch.cambridge_audio_cxnv2_pre_amp', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- diff --git a/tests/components/cambridge_audio/test_init.py b/tests/components/cambridge_audio/test_init.py index 7dea193d9fd743..4a8c1b668e202a 100644 --- a/tests/components/cambridge_audio/test_init.py +++ b/tests/components/cambridge_audio/test_init.py @@ -2,9 +2,11 @@ from unittest.mock import AsyncMock +from aiostreammagic import StreamMagicError from syrupy import SnapshotAssertion from homeassistant.components.cambridge_audio.const import DOMAIN +from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr @@ -13,6 +15,20 @@ from tests.common import MockConfigEntry +async def test_config_entry_not_ready( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_stream_magic_client: AsyncMock, +) -> None: + """Test the Cambridge Audio configuration entry not ready.""" + mock_stream_magic_client.connect = AsyncMock(side_effect=StreamMagicError()) + await setup_integration(hass, mock_config_entry) + + assert mock_config_entry.state is ConfigEntryState.SETUP_RETRY + + mock_stream_magic_client.connect = AsyncMock(return_value=True) + + async def test_device_info( hass: HomeAssistant, snapshot: SnapshotAssertion, diff --git a/tests/components/cambridge_audio/test_media_player.py b/tests/components/cambridge_audio/test_media_player.py index 391cdd868ec953..b857e61c235a20 100644 --- a/tests/components/cambridge_audio/test_media_player.py +++ b/tests/components/cambridge_audio/test_media_player.py @@ -11,10 +11,14 @@ import pytest from homeassistant.components.media_player import ( + ATTR_MEDIA_CONTENT_ID, + ATTR_MEDIA_CONTENT_TYPE, ATTR_MEDIA_REPEAT, ATTR_MEDIA_SEEK_POSITION, ATTR_MEDIA_SHUFFLE, + ATTR_MEDIA_VOLUME_LEVEL, DOMAIN as MP_DOMAIN, + SERVICE_PLAY_MEDIA, MediaPlayerEntityFeature, RepeatMode, ) @@ -31,6 +35,9 @@ SERVICE_SHUFFLE_SET, SERVICE_TURN_OFF, SERVICE_TURN_ON, + SERVICE_VOLUME_DOWN, + SERVICE_VOLUME_SET, + SERVICE_VOLUME_UP, STATE_BUFFERING, STATE_IDLE, STATE_OFF, @@ -40,6 +47,7 @@ STATE_STANDBY, ) from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from . import setup_integration from .const import ENTITY_ID @@ -49,9 +57,8 @@ async def mock_state_update(client: AsyncMock) -> None: """Trigger a callback in the media player.""" - await client.register_state_update_callbacks.call_args[0][0]( - client, CallbackType.STATE - ) + for callback in client.register_state_update_callbacks.call_args_list: + await callback[0][0](client, CallbackType.STATE) async def test_entity_supported_features( @@ -216,12 +223,12 @@ async def test_media_next_previous_track( mock_stream_magic_client.previous_track.assert_called_once() -async def test_shuffle_repeat( +async def test_shuffle_repeat_set( hass: HomeAssistant, mock_config_entry: MockConfigEntry, mock_stream_magic_client: AsyncMock, ) -> None: - """Test shuffle and repeat service.""" + """Test shuffle and repeat set service.""" await setup_integration(hass, mock_config_entry) mock_stream_magic_client.now_playing.controls = [ @@ -264,6 +271,36 @@ async def test_shuffle_repeat( mock_stream_magic_client.set_repeat.assert_called_with(CambridgeRepeatMode.ALL) +async def test_shuffle_repeat_get( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_stream_magic_client: AsyncMock, +) -> None: + """Test shuffle and repeat get service.""" + await setup_integration(hass, mock_config_entry) + + mock_stream_magic_client.play_state.mode_shuffle = None + + state = hass.states.get(ENTITY_ID) + assert state.attributes[ATTR_MEDIA_SHUFFLE] is False + + mock_stream_magic_client.play_state.mode_shuffle = ShuffleMode.ALL + + await mock_state_update(mock_stream_magic_client) + await hass.async_block_till_done() + + state = hass.states.get(ENTITY_ID) + assert state.attributes[ATTR_MEDIA_SHUFFLE] is True + + mock_stream_magic_client.play_state.mode_repeat = CambridgeRepeatMode.ALL + + await mock_state_update(mock_stream_magic_client) + await hass.async_block_till_done() + + state = hass.states.get(ENTITY_ID) + assert state.attributes[ATTR_MEDIA_REPEAT] == RepeatMode.ALL + + async def test_power_service( hass: HomeAssistant, mock_config_entry: MockConfigEntry, @@ -302,3 +339,160 @@ async def test_media_seek( ) mock_stream_magic_client.media_seek.assert_called_once_with(100) + + +async def test_media_volume( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_stream_magic_client: AsyncMock, +) -> None: + """Test volume service.""" + await setup_integration(hass, mock_config_entry) + + mock_stream_magic_client.state.pre_amp_mode = True + + # Test volume up + await hass.services.async_call( + MP_DOMAIN, + SERVICE_VOLUME_UP, + {ATTR_ENTITY_ID: ENTITY_ID}, + ) + + mock_stream_magic_client.volume_up.assert_called_once() + + # Test volume down + await hass.services.async_call( + MP_DOMAIN, + SERVICE_VOLUME_DOWN, + {ATTR_ENTITY_ID: ENTITY_ID}, + ) + + mock_stream_magic_client.volume_down.assert_called_once() + + await hass.services.async_call( + MP_DOMAIN, + SERVICE_VOLUME_SET, + {ATTR_ENTITY_ID: ENTITY_ID, ATTR_MEDIA_VOLUME_LEVEL: 0.30}, + ) + + mock_stream_magic_client.set_volume.assert_called_once_with(30) + + +async def test_play_media_preset_item_id( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_stream_magic_client: AsyncMock, +) -> None: + """Test playing media with a preset item id.""" + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + MP_DOMAIN, + SERVICE_PLAY_MEDIA, + { + ATTR_ENTITY_ID: ENTITY_ID, + ATTR_MEDIA_CONTENT_TYPE: "preset", + ATTR_MEDIA_CONTENT_ID: "1", + }, + blocking=True, + ) + assert mock_stream_magic_client.recall_preset.call_count == 1 + assert mock_stream_magic_client.recall_preset.call_args_list[0].args[0] == 1 + + with pytest.raises(ServiceValidationError, match="Missing preset for media_id: 10"): + await hass.services.async_call( + MP_DOMAIN, + SERVICE_PLAY_MEDIA, + { + ATTR_ENTITY_ID: ENTITY_ID, + ATTR_MEDIA_CONTENT_TYPE: "preset", + ATTR_MEDIA_CONTENT_ID: "10", + }, + blocking=True, + ) + + with pytest.raises( + ServiceValidationError, match="Preset must be an integer, got: UNKNOWN_PRESET" + ): + await hass.services.async_call( + MP_DOMAIN, + SERVICE_PLAY_MEDIA, + { + ATTR_ENTITY_ID: ENTITY_ID, + ATTR_MEDIA_CONTENT_TYPE: "preset", + ATTR_MEDIA_CONTENT_ID: "UNKNOWN_PRESET", + }, + blocking=True, + ) + + +async def test_play_media_airable_radio_id( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_stream_magic_client: AsyncMock, +) -> None: + """Test playing media with an airable radio id.""" + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + MP_DOMAIN, + SERVICE_PLAY_MEDIA, + { + ATTR_ENTITY_ID: ENTITY_ID, + ATTR_MEDIA_CONTENT_TYPE: "airable", + ATTR_MEDIA_CONTENT_ID: "12345678", + }, + blocking=True, + ) + assert mock_stream_magic_client.play_radio_airable.call_count == 1 + call_args = mock_stream_magic_client.play_radio_airable.call_args_list[0].args + assert call_args[0] == "Radio" + assert call_args[1] == 12345678 + + +async def test_play_media_internet_radio( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_stream_magic_client: AsyncMock, +) -> None: + """Test playing media with a url.""" + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + MP_DOMAIN, + SERVICE_PLAY_MEDIA, + { + ATTR_ENTITY_ID: ENTITY_ID, + ATTR_MEDIA_CONTENT_TYPE: "internet_radio", + ATTR_MEDIA_CONTENT_ID: "https://example.com", + }, + blocking=True, + ) + assert mock_stream_magic_client.play_radio_url.call_count == 1 + call_args = mock_stream_magic_client.play_radio_url.call_args_list[0].args + assert call_args[0] == "Radio" + assert call_args[1] == "https://example.com" + + +async def test_play_media_unknown_type( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_stream_magic_client: AsyncMock, +) -> None: + """Test playing media with an unsupported content type.""" + await setup_integration(hass, mock_config_entry) + + with pytest.raises( + HomeAssistantError, + match="Unsupported media type for Cambridge Audio device: unsupported_content_type", + ): + await hass.services.async_call( + MP_DOMAIN, + SERVICE_PLAY_MEDIA, + { + ATTR_ENTITY_ID: ENTITY_ID, + ATTR_MEDIA_CONTENT_TYPE: "unsupported_content_type", + ATTR_MEDIA_CONTENT_ID: "1", + }, + blocking=True, + ) diff --git a/tests/components/cambridge_audio/test_select.py b/tests/components/cambridge_audio/test_select.py new file mode 100644 index 00000000000000..473c4027163dc7 --- /dev/null +++ b/tests/components/cambridge_audio/test_select.py @@ -0,0 +1,64 @@ +"""Tests for the Cambridge Audio select platform.""" + +from unittest.mock import AsyncMock, patch + +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.components.select import ( + DOMAIN as SELECT_DOMAIN, + SERVICE_SELECT_OPTION, +) +from homeassistant.const import ATTR_ENTITY_ID, ATTR_OPTION, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_all_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_stream_magic_client: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test all entities.""" + with patch("homeassistant.components.cambridge_audio.PLATFORMS", [Platform.SELECT]): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +async def test_setting_value( + hass: HomeAssistant, + mock_stream_magic_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test setting value.""" + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + SELECT_DOMAIN, + SERVICE_SELECT_OPTION, + { + ATTR_ENTITY_ID: "select.cambridge_audio_cxnv2_display_brightness", + ATTR_OPTION: "dim", + }, + blocking=True, + ) + mock_stream_magic_client.set_display_brightness.assert_called_once_with("dim") + + await hass.services.async_call( + SELECT_DOMAIN, + SERVICE_SELECT_OPTION, + { + ATTR_ENTITY_ID: "select.cambridge_audio_cxnv2_audio_output", + ATTR_OPTION: "Speaker A", + }, + blocking=True, + ) + mock_stream_magic_client.set_audio_output.assert_called_once_with("speaker_a") diff --git a/tests/components/cambridge_audio/test_switch.py b/tests/components/cambridge_audio/test_switch.py new file mode 100644 index 00000000000000..3192f198d1f4dd --- /dev/null +++ b/tests/components/cambridge_audio/test_switch.py @@ -0,0 +1,60 @@ +"""Tests for the Cambridge Audio switch platform.""" + +from unittest.mock import AsyncMock, patch + +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN, SERVICE_TURN_ON +from homeassistant.const import ATTR_ENTITY_ID, SERVICE_TURN_OFF, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_all_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_stream_magic_client: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test all entities.""" + with patch("homeassistant.components.cambridge_audio.PLATFORMS", [Platform.SWITCH]): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +async def test_setting_value( + hass: HomeAssistant, + mock_stream_magic_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test setting value.""" + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_ON, + { + ATTR_ENTITY_ID: "switch.cambridge_audio_cxnv2_early_update", + }, + blocking=True, + ) + mock_stream_magic_client.set_early_update.assert_called_once_with(True) + mock_stream_magic_client.set_early_update.reset_mock() + + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_OFF, + { + ATTR_ENTITY_ID: "switch.cambridge_audio_cxnv2_early_update", + }, + blocking=True, + ) + mock_stream_magic_client.set_early_update.assert_called_once_with(False) diff --git a/tests/components/camera/common.py b/tests/components/camera/common.py index f7dcf46db01ac1..569756c26401e8 100644 --- a/tests/components/camera/common.py +++ b/tests/components/camera/common.py @@ -6,6 +6,16 @@ from unittest.mock import Mock +from webrtc_models import RTCIceCandidate + +from homeassistant.components.camera import ( + Camera, + CameraWebRTCProvider, + WebRTCAnswer, + WebRTCSendMessage, +) +from homeassistant.core import callback + EMPTY_8_6_JPEG = b"empty_8_6" WEBRTC_ANSWER = "a=sendonly" STREAM_SOURCE = "rtsp://127.0.0.1/stream" @@ -23,3 +33,43 @@ def mock_turbo_jpeg( mocked_turbo_jpeg.scale_with_quality.return_value = EMPTY_8_6_JPEG mocked_turbo_jpeg.encode.return_value = EMPTY_8_6_JPEG return mocked_turbo_jpeg + + +class SomeTestProvider(CameraWebRTCProvider): + """Test provider.""" + + def __init__(self) -> None: + """Initialize the provider.""" + self._is_supported = True + + @property + def domain(self) -> str: + """Return the integration domain of the provider.""" + return "some_test" + + @callback + def async_is_supported(self, stream_source: str) -> bool: + """Determine if the provider supports the stream source.""" + return self._is_supported + + async def async_handle_async_webrtc_offer( + self, + camera: Camera, + offer_sdp: str, + session_id: str, + send_message: WebRTCSendMessage, + ) -> None: + """Handle the WebRTC offer and return the answer via the provided callback. + + Return value determines if the offer was handled successfully. + """ + send_message(WebRTCAnswer(answer="answer")) + + async def async_on_webrtc_candidate( + self, session_id: str, candidate: RTCIceCandidate + ) -> None: + """Handle the WebRTC candidate.""" + + @callback + def async_close_session(self, session_id: str) -> None: + """Close the session.""" diff --git a/tests/components/camera/conftest.py b/tests/components/camera/conftest.py index 5eda2f1eb55656..f0c418711c7565 100644 --- a/tests/components/camera/conftest.py +++ b/tests/components/camera/conftest.py @@ -1,18 +1,30 @@ """Test helpers for camera.""" from collections.abc import AsyncGenerator, Generator -from unittest.mock import AsyncMock, PropertyMock, patch +from unittest.mock import AsyncMock, Mock, PropertyMock, patch import pytest +from webrtc_models import RTCIceCandidate from homeassistant.components import camera from homeassistant.components.camera.const import StreamType +from homeassistant.components.camera.webrtc import WebRTCAnswer, WebRTCSendMessage +from homeassistant.config_entries import ConfigEntry, ConfigFlow from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.setup import async_setup_component -from .common import STREAM_SOURCE, WEBRTC_ANSWER +from .common import STREAM_SOURCE, WEBRTC_ANSWER, SomeTestProvider + +from tests.common import ( + MockConfigEntry, + MockModule, + mock_config_flow, + mock_integration, + mock_platform, + setup_test_component_platform, +) @pytest.fixture(autouse=True) @@ -56,23 +68,37 @@ def mock_camera_hls_fixture(mock_camera: None) -> Generator[None]: yield -@pytest.fixture(name="mock_camera_web_rtc") -async def mock_camera_web_rtc_fixture(hass: HomeAssistant) -> AsyncGenerator[None]: +@pytest.fixture +async def mock_camera_webrtc_frontendtype_only( + hass: HomeAssistant, +) -> AsyncGenerator[None]: """Initialize a demo camera platform with WebRTC.""" assert await async_setup_component( hass, "camera", {camera.DOMAIN: {"platform": "demo"}} ) await hass.async_block_till_done() - with ( - patch( - "homeassistant.components.camera.Camera.frontend_stream_type", - new_callable=PropertyMock(return_value=StreamType.WEB_RTC), - ), - patch( - "homeassistant.components.camera.Camera.async_handle_web_rtc_offer", - return_value=WEBRTC_ANSWER, - ), + with patch( + "homeassistant.components.camera.Camera.frontend_stream_type", + new_callable=PropertyMock(return_value=StreamType.WEB_RTC), + ): + yield + + +@pytest.fixture +async def mock_camera_webrtc( + mock_camera_webrtc_frontendtype_only: None, +) -> AsyncGenerator[None]: + """Initialize a demo camera platform with WebRTC.""" + + async def async_handle_async_webrtc_offer( + offer_sdp: str, session_id: str, send_message: WebRTCSendMessage + ) -> None: + send_message(WebRTCAnswer(WEBRTC_ANSWER)) + + with patch( + "homeassistant.components.camera.Camera.async_handle_async_webrtc_offer", + side_effect=async_handle_async_webrtc_offer, ): yield @@ -127,3 +153,100 @@ def mock_stream_source_fixture() -> Generator[AsyncMock]: return_value=STREAM_SOURCE, ) as mock_stream_source: yield mock_stream_source + + +@pytest.fixture +async def mock_test_webrtc_cameras(hass: HomeAssistant) -> None: + """Initialize test WebRTC cameras with native RTC support.""" + + # Cannot use the fixture mock_camera_web_rtc as it's mocking Camera.async_handle_web_rtc_offer + # and native support is checked by verify the function "async_handle_web_rtc_offer" was + # overwritten(implemented) or not + class BaseCamera(camera.Camera): + """Base Camera.""" + + _attr_supported_features: camera.CameraEntityFeature = ( + camera.CameraEntityFeature.STREAM + ) + _attr_frontend_stream_type: camera.StreamType = camera.StreamType.WEB_RTC + + async def stream_source(self) -> str | None: + return STREAM_SOURCE + + class SyncCamera(BaseCamera): + """Mock Camera with native sync WebRTC support.""" + + _attr_name = "Sync" + + async def async_handle_web_rtc_offer(self, offer_sdp: str) -> str | None: + return WEBRTC_ANSWER + + class AsyncCamera(BaseCamera): + """Mock Camera with native async WebRTC support.""" + + _attr_name = "Async" + + async def async_handle_async_webrtc_offer( + self, offer_sdp: str, session_id: str, send_message: WebRTCSendMessage + ) -> None: + send_message(WebRTCAnswer(WEBRTC_ANSWER)) + + async def async_on_webrtc_candidate( + self, session_id: str, candidate: RTCIceCandidate + ) -> None: + """Handle a WebRTC candidate.""" + # Do nothing + + domain = "test" + + entry = MockConfigEntry(domain=domain) + entry.add_to_hass(hass) + + async def async_setup_entry_init( + hass: HomeAssistant, config_entry: ConfigEntry + ) -> bool: + """Set up test config entry.""" + await hass.config_entries.async_forward_entry_setups( + config_entry, [camera.DOMAIN] + ) + return True + + async def async_unload_entry_init( + hass: HomeAssistant, config_entry: ConfigEntry + ) -> bool: + """Unload test config entry.""" + await hass.config_entries.async_forward_entry_unload( + config_entry, camera.DOMAIN + ) + return True + + mock_integration( + hass, + MockModule( + domain, + async_setup_entry=async_setup_entry_init, + async_unload_entry=async_unload_entry_init, + ), + ) + setup_test_component_platform( + hass, camera.DOMAIN, [SyncCamera(), AsyncCamera()], from_config_entry=True + ) + mock_platform(hass, f"{domain}.config_flow", Mock()) + + with mock_config_flow(domain, ConfigFlow): + assert await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + +@pytest.fixture +async def register_test_provider( + hass: HomeAssistant, +) -> AsyncGenerator[SomeTestProvider]: + """Add WebRTC test provider.""" + await async_setup_component(hass, "camera", {}) + + provider = SomeTestProvider() + unsub = camera.async_register_webrtc_provider(hass, provider) + await hass.async_block_till_done() + yield provider + unsub() diff --git a/tests/components/camera/snapshots/test_init.ambr b/tests/components/camera/snapshots/test_init.ambr new file mode 100644 index 00000000000000..eae1c481cc0c3c --- /dev/null +++ b/tests/components/camera/snapshots/test_init.ambr @@ -0,0 +1,127 @@ +# serializer version: 1 +# name: test_record_service[/test/recording_{{ entity_id }}.mpg-/test/recording_.mpg-expected_issues1] + IssueRegistryItemSnapshot({ + 'active': True, + 'breaks_in_ha_version': '2025.6.0', + 'created': , + 'data': None, + 'dismissed_version': None, + 'domain': 'camera', + 'is_fixable': True, + 'is_persistent': False, + 'issue_domain': None, + 'issue_id': 'deprecated_filename_template_camera.demo_camera_record', + 'learn_more_url': None, + 'severity': , + 'translation_key': 'deprecated_filename_template', + 'translation_placeholders': dict({ + 'entity_id': 'camera.demo_camera', + 'service': 'camera.record', + }), + }) +# --- +# name: test_record_service[/test/recording_{{ entity_id.entity_id }}.mpg-/test/recording_camera.demo_camera.mpg-expected_issues3] + IssueRegistryItemSnapshot({ + 'active': True, + 'breaks_in_ha_version': '2025.6.0', + 'created': , + 'data': None, + 'dismissed_version': None, + 'domain': 'camera', + 'is_fixable': True, + 'is_persistent': False, + 'issue_domain': None, + 'issue_id': 'deprecated_filename_template_camera.demo_camera_record', + 'learn_more_url': None, + 'severity': , + 'translation_key': 'deprecated_filename_template', + 'translation_placeholders': dict({ + 'entity_id': 'camera.demo_camera', + 'service': 'camera.record', + }), + }) +# --- +# name: test_record_service[/test/recording_{{ entity_id.name }}.mpg-/test/recording_Demo camera.mpg-expected_issues2] + IssueRegistryItemSnapshot({ + 'active': True, + 'breaks_in_ha_version': '2025.6.0', + 'created': , + 'data': None, + 'dismissed_version': None, + 'domain': 'camera', + 'is_fixable': True, + 'is_persistent': False, + 'issue_domain': None, + 'issue_id': 'deprecated_filename_template_camera.demo_camera_record', + 'learn_more_url': None, + 'severity': , + 'translation_key': 'deprecated_filename_template', + 'translation_placeholders': dict({ + 'entity_id': 'camera.demo_camera', + 'service': 'camera.record', + }), + }) +# --- +# name: test_snapshot_service[/test/snapshot_{{ entity_id }}.jpg-/test/snapshot_.jpg-expected_issues1] + IssueRegistryItemSnapshot({ + 'active': True, + 'breaks_in_ha_version': '2025.6.0', + 'created': , + 'data': None, + 'dismissed_version': None, + 'domain': 'camera', + 'is_fixable': True, + 'is_persistent': False, + 'issue_domain': None, + 'issue_id': 'deprecated_filename_template_camera.demo_camera_snapshot', + 'learn_more_url': None, + 'severity': , + 'translation_key': 'deprecated_filename_template', + 'translation_placeholders': dict({ + 'entity_id': 'camera.demo_camera', + 'service': 'camera.snapshot', + }), + }) +# --- +# name: test_snapshot_service[/test/snapshot_{{ entity_id.entity_id }}.jpg-/test/snapshot_camera.demo_camera.jpg-expected_issues3] + IssueRegistryItemSnapshot({ + 'active': True, + 'breaks_in_ha_version': '2025.6.0', + 'created': , + 'data': None, + 'dismissed_version': None, + 'domain': 'camera', + 'is_fixable': True, + 'is_persistent': False, + 'issue_domain': None, + 'issue_id': 'deprecated_filename_template_camera.demo_camera_snapshot', + 'learn_more_url': None, + 'severity': , + 'translation_key': 'deprecated_filename_template', + 'translation_placeholders': dict({ + 'entity_id': 'camera.demo_camera', + 'service': 'camera.snapshot', + }), + }) +# --- +# name: test_snapshot_service[/test/snapshot_{{ entity_id.name }}.jpg-/test/snapshot_Demo camera.jpg-expected_issues2] + IssueRegistryItemSnapshot({ + 'active': True, + 'breaks_in_ha_version': '2025.6.0', + 'created': , + 'data': None, + 'dismissed_version': None, + 'domain': 'camera', + 'is_fixable': True, + 'is_persistent': False, + 'issue_domain': None, + 'issue_id': 'deprecated_filename_template_camera.demo_camera_snapshot', + 'learn_more_url': None, + 'severity': , + 'translation_key': 'deprecated_filename_template', + 'translation_placeholders': dict({ + 'entity_id': 'camera.demo_camera', + 'service': 'camera.snapshot', + }), + }) +# --- diff --git a/tests/components/camera/test_init.py b/tests/components/camera/test_init.py index 2b90d621329531..32024694b7ea0d 100644 --- a/tests/components/camera/test_init.py +++ b/tests/components/camera/test_init.py @@ -1,33 +1,43 @@ """The tests for the camera component.""" -from collections.abc import Generator from http import HTTPStatus import io from types import ModuleType -from unittest.mock import AsyncMock, Mock, PropertyMock, mock_open, patch +from unittest.mock import ANY, AsyncMock, Mock, PropertyMock, mock_open, patch import pytest +from syrupy.assertion import SnapshotAssertion +from webrtc_models import RTCIceCandidate from homeassistant.components import camera +from homeassistant.components.camera import ( + Camera, + CameraWebRTCProvider, + WebRTCAnswer, + WebRTCSendMessage, + async_register_webrtc_provider, +) from homeassistant.components.camera.const import ( DOMAIN, PREF_ORIENTATION, PREF_PRELOAD_STREAM, + StreamType, ) +from homeassistant.components.camera.helper import get_camera_from_entity_id from homeassistant.components.websocket_api import TYPE_RESULT -from homeassistant.config import async_process_ha_core_config from homeassistant.const import ( ATTR_ENTITY_ID, EVENT_HOMEASSISTANT_STARTED, STATE_UNAVAILABLE, ) -from homeassistant.core import HomeAssistant +from homeassistant.core import HomeAssistant, callback +from homeassistant.core_config import async_process_ha_core_config from homeassistant.exceptions import HomeAssistantError -from homeassistant.helpers import entity_registry as er +from homeassistant.helpers import entity_registry as er, issue_registry as ir from homeassistant.setup import async_setup_component from homeassistant.util import dt as dt_util -from .common import EMPTY_8_6_JPEG, STREAM_SOURCE, WEBRTC_ANSWER, mock_turbo_jpeg +from .common import EMPTY_8_6_JPEG, STREAM_SOURCE, mock_turbo_jpeg from tests.common import ( async_fire_time_changed, @@ -36,9 +46,6 @@ ) from tests.typing import ClientSessionGenerator, WebSocketGenerator -HLS_STREAM_SOURCE = "http://127.0.0.1/example.m3u" -WEBRTC_OFFER = "v=0\r\n" - @pytest.fixture(name="image_mock_url") async def image_mock_url_fixture(hass: HomeAssistant) -> None: @@ -49,34 +56,6 @@ async def image_mock_url_fixture(hass: HomeAssistant) -> None: await hass.async_block_till_done() -@pytest.fixture(name="mock_hls_stream_source") -async def mock_hls_stream_source_fixture() -> Generator[AsyncMock]: - """Fixture to create an HLS stream source.""" - with patch( - "homeassistant.components.camera.Camera.stream_source", - return_value=HLS_STREAM_SOURCE, - ) as mock_hls_stream_source: - yield mock_hls_stream_source - - -async def provide_web_rtc_answer(stream_source: str, offer: str, stream_id: str) -> str: - """Simulate an rtsp to webrtc provider.""" - assert stream_source == STREAM_SOURCE - assert offer == WEBRTC_OFFER - return WEBRTC_ANSWER - - -@pytest.fixture(name="mock_rtsp_to_web_rtc") -def mock_rtsp_to_web_rtc_fixture(hass: HomeAssistant) -> Generator[Mock]: - """Fixture that registers a mock rtsp to web_rtc provider.""" - mock_provider = Mock(side_effect=provide_web_rtc_answer) - unsub = camera.async_register_rtsp_to_web_rtc_provider( - hass, "mock_domain", mock_provider - ) - yield mock_provider - unsub() - - @pytest.mark.usefixtures("image_mock_url") async def test_get_image_from_camera(hass: HomeAssistant) -> None: """Grab an image from camera entity.""" @@ -226,7 +205,38 @@ async def test_get_image_fails(hass: HomeAssistant) -> None: @pytest.mark.usefixtures("mock_camera") -async def test_snapshot_service(hass: HomeAssistant) -> None: +@pytest.mark.parametrize( + ("filename_template", "expected_filename", "expected_issues"), + [ + ( + "/test/snapshot.jpg", + "/test/snapshot.jpg", + [], + ), + ( + "/test/snapshot_{{ entity_id }}.jpg", + "/test/snapshot_.jpg", + ["deprecated_filename_template_camera.demo_camera_snapshot"], + ), + ( + "/test/snapshot_{{ entity_id.name }}.jpg", + "/test/snapshot_Demo camera.jpg", + ["deprecated_filename_template_camera.demo_camera_snapshot"], + ), + ( + "/test/snapshot_{{ entity_id.entity_id }}.jpg", + "/test/snapshot_camera.demo_camera.jpg", + ["deprecated_filename_template_camera.demo_camera_snapshot"], + ), + ], +) +async def test_snapshot_service( + hass: HomeAssistant, + filename_template: str, + expected_filename: str, + expected_issues: list, + snapshot: SnapshotAssertion, +) -> None: """Test snapshot service.""" mopen = mock_open() @@ -242,16 +252,25 @@ async def test_snapshot_service(hass: HomeAssistant) -> None: camera.SERVICE_SNAPSHOT, { ATTR_ENTITY_ID: "camera.demo_camera", - camera.ATTR_FILENAME: "/test/snapshot.jpg", + camera.ATTR_FILENAME: filename_template, }, blocking=True, ) + mopen.assert_called_once_with(expected_filename, "wb") + mock_write = mopen().write assert len(mock_write.mock_calls) == 1 assert mock_write.mock_calls[0][1][0] == b"Test" + issue_registry = ir.async_get(hass) + assert len(issue_registry.issues) == 1 + len(expected_issues) + for expected_issue in expected_issues: + issue = issue_registry.async_get_issue(DOMAIN, expected_issue) + assert issue is not None + assert issue == snapshot + @pytest.mark.usefixtures("mock_camera") async def test_snapshot_service_not_allowed_path(hass: HomeAssistant) -> None: @@ -263,7 +282,30 @@ async def test_snapshot_service_not_allowed_path(hass: HomeAssistant) -> None: patch( "homeassistant.components.camera.os.makedirs", ), - pytest.raises(HomeAssistantError, match="/test/snapshot.jpg"), + pytest.raises( + HomeAssistantError, + match="Cannot write `/test/snapshot.jpg`, no access to path", + ), + ): + await hass.services.async_call( + camera.DOMAIN, + camera.SERVICE_SNAPSHOT, + { + ATTR_ENTITY_ID: "camera.demo_camera", + camera.ATTR_FILENAME: "/test/snapshot.jpg", + }, + blocking=True, + ) + + +@pytest.mark.usefixtures("mock_camera") +async def test_snapshot_service_os_error( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture +) -> None: + """Test snapshot service with os error.""" + with ( + patch.object(hass.config, "is_allowed_path", return_value=True), + patch("homeassistant.components.camera.os.makedirs", side_effect=OSError), ): await hass.services.async_call( camera.DOMAIN, @@ -275,6 +317,8 @@ async def test_snapshot_service_not_allowed_path(hass: HomeAssistant) -> None: blocking=True, ) + assert "Can't write image to file:" in caplog.text + @pytest.mark.usefixtures("mock_camera", "mock_stream") async def test_websocket_stream_no_source( @@ -557,7 +601,34 @@ async def test_record_service_invalid_path(hass: HomeAssistant) -> None: @pytest.mark.usefixtures("mock_camera", "mock_stream") -async def test_record_service(hass: HomeAssistant) -> None: +@pytest.mark.parametrize( + ("filename_template", "expected_filename", "expected_issues"), + [ + ("/test/recording.mpg", "/test/recording.mpg", []), + ( + "/test/recording_{{ entity_id }}.mpg", + "/test/recording_.mpg", + ["deprecated_filename_template_camera.demo_camera_record"], + ), + ( + "/test/recording_{{ entity_id.name }}.mpg", + "/test/recording_Demo camera.mpg", + ["deprecated_filename_template_camera.demo_camera_record"], + ), + ( + "/test/recording_{{ entity_id.entity_id }}.mpg", + "/test/recording_camera.demo_camera.mpg", + ["deprecated_filename_template_camera.demo_camera_record"], + ), + ], +) +async def test_record_service( + hass: HomeAssistant, + filename_template: str, + expected_filename: str, + expected_issues: list, + snapshot: SnapshotAssertion, +) -> None: """Test record service.""" with ( patch( @@ -573,12 +644,24 @@ async def test_record_service(hass: HomeAssistant) -> None: await hass.services.async_call( camera.DOMAIN, camera.SERVICE_RECORD, - {ATTR_ENTITY_ID: "camera.demo_camera", camera.CONF_FILENAME: "/my/path"}, + { + ATTR_ENTITY_ID: "camera.demo_camera", + camera.ATTR_FILENAME: filename_template, + }, blocking=True, ) # So long as we call stream.record, the rest should be covered # by those tests. - assert mock_record.called + mock_record.assert_called_once_with( + ANY, expected_filename, duration=30, lookback=0 + ) + + issue_registry = ir.async_get(hass) + assert len(issue_registry.issues) == 1 + len(expected_issues) + for expected_issue in expected_issues: + issue = issue_registry.async_get_issue(DOMAIN, expected_issue) + assert issue is not None + assert issue == snapshot @pytest.mark.usefixtures("mock_camera") @@ -600,148 +683,6 @@ async def test_camera_proxy_stream(hass_client: ClientSessionGenerator) -> None: assert response.status == HTTPStatus.BAD_GATEWAY -@pytest.mark.usefixtures("mock_camera_web_rtc") -async def test_websocket_web_rtc_offer( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator -) -> None: - """Test initiating a WebRTC stream with offer and answer.""" - client = await hass_ws_client(hass) - await client.send_json( - { - "id": 9, - "type": "camera/web_rtc_offer", - "entity_id": "camera.demo_camera", - "offer": WEBRTC_OFFER, - } - ) - response = await client.receive_json() - - assert response["id"] == 9 - assert response["type"] == TYPE_RESULT - assert response["success"] - assert response["result"]["answer"] == WEBRTC_ANSWER - - -@pytest.mark.usefixtures("mock_camera_web_rtc") -async def test_websocket_web_rtc_offer_invalid_entity( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator -) -> None: - """Test WebRTC with a camera entity that does not exist.""" - client = await hass_ws_client(hass) - await client.send_json( - { - "id": 9, - "type": "camera/web_rtc_offer", - "entity_id": "camera.does_not_exist", - "offer": WEBRTC_OFFER, - } - ) - response = await client.receive_json() - - assert response["id"] == 9 - assert response["type"] == TYPE_RESULT - assert not response["success"] - - -@pytest.mark.usefixtures("mock_camera_web_rtc") -async def test_websocket_web_rtc_offer_missing_offer( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator -) -> None: - """Test WebRTC stream with missing required fields.""" - client = await hass_ws_client(hass) - await client.send_json( - { - "id": 9, - "type": "camera/web_rtc_offer", - "entity_id": "camera.demo_camera", - } - ) - response = await client.receive_json() - - assert response["id"] == 9 - assert response["type"] == TYPE_RESULT - assert not response["success"] - assert response["error"]["code"] == "invalid_format" - - -@pytest.mark.usefixtures("mock_camera_web_rtc") -async def test_websocket_web_rtc_offer_failure( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator -) -> None: - """Test WebRTC stream that fails handling the offer.""" - client = await hass_ws_client(hass) - - with patch( - "homeassistant.components.camera.Camera.async_handle_web_rtc_offer", - side_effect=HomeAssistantError("offer failed"), - ): - await client.send_json( - { - "id": 9, - "type": "camera/web_rtc_offer", - "entity_id": "camera.demo_camera", - "offer": WEBRTC_OFFER, - } - ) - response = await client.receive_json() - - assert response["id"] == 9 - assert response["type"] == TYPE_RESULT - assert not response["success"] - assert response["error"]["code"] == "web_rtc_offer_failed" - assert response["error"]["message"] == "offer failed" - - -@pytest.mark.usefixtures("mock_camera_web_rtc") -async def test_websocket_web_rtc_offer_timeout( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator -) -> None: - """Test WebRTC stream with timeout handling the offer.""" - client = await hass_ws_client(hass) - - with patch( - "homeassistant.components.camera.Camera.async_handle_web_rtc_offer", - side_effect=TimeoutError(), - ): - await client.send_json( - { - "id": 9, - "type": "camera/web_rtc_offer", - "entity_id": "camera.demo_camera", - "offer": WEBRTC_OFFER, - } - ) - response = await client.receive_json() - - assert response["id"] == 9 - assert response["type"] == TYPE_RESULT - assert not response["success"] - assert response["error"]["code"] == "web_rtc_offer_failed" - assert response["error"]["message"] == "Timeout handling WebRTC offer" - - -@pytest.mark.usefixtures("mock_camera") -async def test_websocket_web_rtc_offer_invalid_stream_type( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator -) -> None: - """Test WebRTC initiating for a camera with a different stream_type.""" - client = await hass_ws_client(hass) - await client.send_json( - { - "id": 9, - "type": "camera/web_rtc_offer", - "entity_id": "camera.demo_camera", - "offer": WEBRTC_OFFER, - } - ) - response = await client.receive_json() - - assert response["id"] == 9 - assert response["type"] == TYPE_RESULT - assert not response["success"] - assert response["error"]["code"] == "web_rtc_offer_failed" - - @pytest.mark.usefixtures("mock_camera") async def test_state_streaming(hass: HomeAssistant) -> None: """Camera state.""" @@ -803,144 +744,6 @@ async def test_stream_unavailable( assert demo_camera.state == camera.CameraState.STREAMING -@pytest.mark.usefixtures("mock_camera", "mock_stream_source") -async def test_rtsp_to_web_rtc_offer( - hass: HomeAssistant, - hass_ws_client: WebSocketGenerator, - mock_rtsp_to_web_rtc: Mock, -) -> None: - """Test creating a web_rtc offer from an rstp provider.""" - client = await hass_ws_client(hass) - await client.send_json( - { - "id": 9, - "type": "camera/web_rtc_offer", - "entity_id": "camera.demo_camera", - "offer": WEBRTC_OFFER, - } - ) - response = await client.receive_json() - - assert response.get("id") == 9 - assert response.get("type") == TYPE_RESULT - assert response.get("success") - assert "result" in response - assert response["result"] == {"answer": WEBRTC_ANSWER} - - assert mock_rtsp_to_web_rtc.called - - -@pytest.mark.usefixtures( - "mock_camera", - "mock_hls_stream_source", # Not an RTSP stream source - "mock_rtsp_to_web_rtc", -) -async def test_unsupported_rtsp_to_web_rtc_stream_type( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator -) -> None: - """Test rtsp-to-webrtc is not registered for non-RTSP streams.""" - client = await hass_ws_client(hass) - await client.send_json( - { - "id": 10, - "type": "camera/web_rtc_offer", - "entity_id": "camera.demo_camera", - "offer": WEBRTC_OFFER, - } - ) - response = await client.receive_json() - - assert response.get("id") == 10 - assert response.get("type") == TYPE_RESULT - assert "success" in response - assert not response["success"] - - -@pytest.mark.usefixtures("mock_camera", "mock_stream_source") -async def test_rtsp_to_web_rtc_provider_unregistered( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator -) -> None: - """Test creating a web_rtc offer from an rstp provider.""" - mock_provider = Mock(side_effect=provide_web_rtc_answer) - unsub = camera.async_register_rtsp_to_web_rtc_provider( - hass, "mock_domain", mock_provider - ) - - client = await hass_ws_client(hass) - - # Registered provider can handle the WebRTC offer - await client.send_json( - { - "id": 11, - "type": "camera/web_rtc_offer", - "entity_id": "camera.demo_camera", - "offer": WEBRTC_OFFER, - } - ) - response = await client.receive_json() - assert response["id"] == 11 - assert response["type"] == TYPE_RESULT - assert response["success"] - assert response["result"]["answer"] == WEBRTC_ANSWER - - assert mock_provider.called - mock_provider.reset_mock() - - # Unregister provider, then verify the WebRTC offer cannot be handled - unsub() - await client.send_json( - { - "id": 12, - "type": "camera/web_rtc_offer", - "entity_id": "camera.demo_camera", - "offer": WEBRTC_OFFER, - } - ) - response = await client.receive_json() - assert response.get("id") == 12 - assert response.get("type") == TYPE_RESULT - assert "success" in response - assert not response["success"] - - assert not mock_provider.called - - -@pytest.mark.usefixtures("mock_camera", "mock_stream_source") -async def test_rtsp_to_web_rtc_offer_not_accepted( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator -) -> None: - """Test a provider that can't satisfy the rtsp to webrtc offer.""" - - async def provide_none(stream_source: str, offer: str) -> str: - """Simulate a provider that can't accept the offer.""" - return None - - mock_provider = Mock(side_effect=provide_none) - unsub = camera.async_register_rtsp_to_web_rtc_provider( - hass, "mock_domain", mock_provider - ) - client = await hass_ws_client(hass) - - # Registered provider can handle the WebRTC offer - await client.send_json( - { - "id": 11, - "type": "camera/web_rtc_offer", - "entity_id": "camera.demo_camera", - "offer": WEBRTC_OFFER, - } - ) - response = await client.receive_json() - assert response["id"] == 11 - assert response.get("type") == TYPE_RESULT - assert "success" in response - assert not response["success"] - - assert mock_provider.called - - unsub() - - @pytest.mark.usefixtures("mock_camera") async def test_use_stream_for_stills( hass: HomeAssistant, hass_client: ClientSessionGenerator @@ -1092,3 +895,162 @@ async def test_entity_picture_url_changes_on_token_update(hass: HomeAssistant) - new_entity_picture = camera_state.attributes["entity_picture"] assert new_entity_picture != original_picture assert "token=" in new_entity_picture + + +async def _test_capabilities( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + entity_id: str, + expected_stream_types: set[StreamType], + expected_stream_types_with_webrtc_provider: set[StreamType], +) -> None: + """Test camera capabilities.""" + await async_setup_component(hass, "camera", {}) + await hass.async_block_till_done() + + async def test(expected_types: set[StreamType]) -> None: + camera_obj = get_camera_from_entity_id(hass, entity_id) + capabilities = camera_obj.camera_capabilities + assert capabilities == camera.CameraCapabilities(expected_types) + + # Request capabilities through WebSocket + client = await hass_ws_client(hass) + await client.send_json_auto_id( + {"type": "camera/capabilities", "entity_id": entity_id} + ) + msg = await client.receive_json() + + # Assert WebSocket response + assert msg["type"] == TYPE_RESULT + assert msg["success"] + assert msg["result"] == {"frontend_stream_types": ANY} + assert sorted(msg["result"]["frontend_stream_types"]) == sorted(expected_types) + + await test(expected_stream_types) + + # Test with WebRTC provider + + class SomeTestProvider(CameraWebRTCProvider): + """Test provider.""" + + @property + def domain(self) -> str: + """Return domain.""" + return "test" + + @callback + def async_is_supported(self, stream_source: str) -> bool: + """Determine if the provider supports the stream source.""" + return True + + async def async_handle_async_webrtc_offer( + self, + camera: Camera, + offer_sdp: str, + session_id: str, + send_message: WebRTCSendMessage, + ) -> None: + """Handle the WebRTC offer and return the answer via the provided callback.""" + send_message(WebRTCAnswer("answer")) + + async def async_on_webrtc_candidate( + self, session_id: str, candidate: RTCIceCandidate + ) -> None: + """Handle the WebRTC candidate.""" + + provider = SomeTestProvider() + async_register_webrtc_provider(hass, provider) + await hass.async_block_till_done() + await test(expected_stream_types_with_webrtc_provider) + + +@pytest.mark.usefixtures("mock_camera", "mock_stream_source") +async def test_camera_capabilities_hls( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test HLS camera capabilities.""" + await _test_capabilities( + hass, + hass_ws_client, + "camera.demo_camera", + {StreamType.HLS}, + {StreamType.HLS, StreamType.WEB_RTC}, + ) + + +@pytest.mark.usefixtures("mock_test_webrtc_cameras") +async def test_camera_capabilities_webrtc( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test WebRTC camera capabilities.""" + + await _test_capabilities( + hass, hass_ws_client, "camera.sync", {StreamType.WEB_RTC}, {StreamType.WEB_RTC} + ) + + +@pytest.mark.parametrize( + ("entity_id", "expect_native_async_webrtc"), + [("camera.sync", False), ("camera.async", True)], +) +@pytest.mark.usefixtures("mock_test_webrtc_cameras", "register_test_provider") +async def test_webrtc_provider_not_added_for_native_webrtc( + hass: HomeAssistant, entity_id: str, expect_native_async_webrtc: bool +) -> None: + """Test that a WebRTC provider is not added to a camera when the camera has native WebRTC support.""" + camera_obj = get_camera_from_entity_id(hass, entity_id) + assert camera_obj + assert camera_obj._webrtc_provider is None + assert camera_obj._supports_native_sync_webrtc is not expect_native_async_webrtc + assert camera_obj._supports_native_async_webrtc is expect_native_async_webrtc + + +@pytest.mark.usefixtures("mock_camera", "mock_stream_source") +async def test_camera_capabilities_changing_non_native_support( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test WebRTC camera capabilities.""" + cam = get_camera_from_entity_id(hass, "camera.demo_camera") + assert ( + cam.supported_features + == camera.CameraEntityFeature.ON_OFF | camera.CameraEntityFeature.STREAM + ) + + await _test_capabilities( + hass, + hass_ws_client, + cam.entity_id, + {StreamType.HLS}, + {StreamType.HLS, StreamType.WEB_RTC}, + ) + + cam._attr_supported_features = camera.CameraEntityFeature(0) + cam.async_write_ha_state() + await hass.async_block_till_done() + + await _test_capabilities(hass, hass_ws_client, cam.entity_id, set(), set()) + + +@pytest.mark.usefixtures("mock_test_webrtc_cameras") +@pytest.mark.parametrize(("entity_id"), ["camera.sync", "camera.async"]) +async def test_camera_capabilities_changing_native_support( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + entity_id: str, +) -> None: + """Test WebRTC camera capabilities.""" + cam = get_camera_from_entity_id(hass, entity_id) + assert cam.supported_features == camera.CameraEntityFeature.STREAM + + await _test_capabilities( + hass, hass_ws_client, cam.entity_id, {StreamType.WEB_RTC}, {StreamType.WEB_RTC} + ) + + cam._attr_supported_features = camera.CameraEntityFeature(0) + cam.async_write_ha_state() + await hass.async_block_till_done() + + await _test_capabilities(hass, hass_ws_client, cam.entity_id, set(), set()) diff --git a/tests/components/camera/test_media_source.py b/tests/components/camera/test_media_source.py index 0780ecc2a9ceb0..85f876d4e818d7 100644 --- a/tests/components/camera/test_media_source.py +++ b/tests/components/camera/test_media_source.py @@ -65,8 +65,8 @@ async def test_browsing_mjpeg(hass: HomeAssistant) -> None: assert item.children[0].title == "Demo camera without stream" -@pytest.mark.usefixtures("mock_camera_web_rtc") -async def test_browsing_web_rtc(hass: HomeAssistant) -> None: +@pytest.mark.usefixtures("mock_camera_webrtc") +async def test_browsing_webrtc(hass: HomeAssistant) -> None: """Test browsing WebRTC camera media source.""" # 3 cameras: # one only supports WebRTC (no stream source) diff --git a/tests/components/camera/test_webrtc.py b/tests/components/camera/test_webrtc.py index d304c7e5fb0a11..ba5cf35c52f35d 100644 --- a/tests/components/camera/test_webrtc.py +++ b/tests/components/camera/test_webrtc.py @@ -1,23 +1,143 @@ """Test camera WebRTC.""" +from collections.abc import AsyncGenerator, Generator +import logging +from typing import Any +from unittest.mock import AsyncMock, Mock, patch + import pytest +from webrtc_models import RTCIceCandidate, RTCIceServer -from homeassistant.components.camera import Camera -from homeassistant.components.camera.const import StreamType -from homeassistant.components.camera.helper import get_camera_from_entity_id -from homeassistant.components.camera.webrtc import ( +from homeassistant.components.camera import ( DATA_ICE_SERVERS, + DOMAIN as CAMERA_DOMAIN, + Camera, + CameraEntityFeature, CameraWebRTCProvider, - RTCIceServer, + StreamType, + WebRTCAnswer, + WebRTCCandidate, + WebRTCError, + WebRTCMessage, + WebRTCSendMessage, + async_get_supported_legacy_provider, + async_register_ice_servers, + async_register_rtsp_to_web_rtc_provider, async_register_webrtc_provider, - register_ice_server, + get_camera_from_entity_id, ) from homeassistant.components.websocket_api import TYPE_RESULT -from homeassistant.core import HomeAssistant +from homeassistant.config_entries import ConfigEntry, ConfigFlow +from homeassistant.core import HomeAssistant, callback +from homeassistant.core_config import async_process_ha_core_config +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers import issue_registry as ir from homeassistant.setup import async_setup_component +from .common import STREAM_SOURCE, WEBRTC_ANSWER, SomeTestProvider + +from tests.common import ( + MockConfigEntry, + MockModule, + mock_config_flow, + mock_integration, + mock_platform, + setup_test_component_platform, +) from tests.typing import WebSocketGenerator +WEBRTC_OFFER = "v=0\r\n" +HLS_STREAM_SOURCE = "http://127.0.0.1/example.m3u" +TEST_INTEGRATION_DOMAIN = "test" + + +class Go2RTCProvider(SomeTestProvider): + """go2rtc provider.""" + + @property + def domain(self) -> str: + """Return the integration domain of the provider.""" + return "go2rtc" + + +class MockCamera(Camera): + """Mock Camera Entity.""" + + _attr_name = "Test" + _attr_supported_features: CameraEntityFeature = CameraEntityFeature.STREAM + _attr_frontend_stream_type: StreamType = StreamType.WEB_RTC + + def __init__(self) -> None: + """Initialize the mock entity.""" + super().__init__() + self._sync_answer: str | None | Exception = WEBRTC_ANSWER + + def set_sync_answer(self, value: str | None | Exception) -> None: + """Set sync offer answer.""" + self._sync_answer = value + + async def async_handle_web_rtc_offer(self, offer_sdp: str) -> str | None: + """Handle the WebRTC offer and return the answer.""" + if isinstance(self._sync_answer, Exception): + raise self._sync_answer + return self._sync_answer + + async def stream_source(self) -> str | None: + """Return the source of the stream. + + This is used by cameras with CameraEntityFeature.STREAM + and StreamType.HLS. + """ + return "rtsp://stream" + + +@pytest.fixture +async def init_test_integration( + hass: HomeAssistant, +) -> MockCamera: + """Initialize components.""" + + entry = MockConfigEntry(domain=TEST_INTEGRATION_DOMAIN) + entry.add_to_hass(hass) + + async def async_setup_entry_init( + hass: HomeAssistant, config_entry: ConfigEntry + ) -> bool: + """Set up test config entry.""" + await hass.config_entries.async_forward_entry_setups( + config_entry, [CAMERA_DOMAIN] + ) + return True + + async def async_unload_entry_init( + hass: HomeAssistant, config_entry: ConfigEntry + ) -> bool: + """Unload test config entry.""" + await hass.config_entries.async_forward_entry_unload( + config_entry, CAMERA_DOMAIN + ) + return True + + mock_integration( + hass, + MockModule( + TEST_INTEGRATION_DOMAIN, + async_setup_entry=async_setup_entry_init, + async_unload_entry=async_unload_entry_init, + ), + ) + test_camera = MockCamera() + setup_test_component_platform( + hass, CAMERA_DOMAIN, [test_camera], from_config_entry=True + ) + mock_platform(hass, f"{TEST_INTEGRATION_DOMAIN}.config_flow", Mock()) + + with mock_config_flow(TEST_INTEGRATION_DOMAIN, ConfigFlow): + assert await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + return test_camera + @pytest.mark.usefixtures("mock_camera", "mock_stream", "mock_stream_source") async def test_async_register_webrtc_provider( @@ -29,36 +149,21 @@ async def test_async_register_webrtc_provider( camera = get_camera_from_entity_id(hass, "camera.demo_camera") assert camera.frontend_stream_type is StreamType.HLS - stream_supported = True - - class TestProvider(CameraWebRTCProvider): - """Test provider.""" - - async def async_is_supported(self, stream_source: str) -> bool: - """Determine if the provider supports the stream source.""" - nonlocal stream_supported - return stream_supported - - async def async_handle_web_rtc_offer( - self, camera: Camera, offer_sdp: str - ) -> str | None: - """Handle the WebRTC offer and return an answer.""" - return "answer" - - unregister = async_register_webrtc_provider(hass, TestProvider()) + provider = SomeTestProvider() + unregister = async_register_webrtc_provider(hass, provider) await hass.async_block_till_done() assert camera.frontend_stream_type is StreamType.WEB_RTC # Mark stream as unsupported - stream_supported = False + provider._is_supported = False # Manually refresh the provider await camera.async_refresh_providers() assert camera.frontend_stream_type is StreamType.HLS - # Mark stream as unsupported - stream_supported = True + # Mark stream as supported + provider._is_supported = True # Manually refresh the provider await camera.async_refresh_providers() assert camera.frontend_stream_type is StreamType.WEB_RTC @@ -72,51 +177,19 @@ async def async_handle_web_rtc_offer( @pytest.mark.usefixtures("mock_camera", "mock_stream", "mock_stream_source") async def test_async_register_webrtc_provider_twice( hass: HomeAssistant, + register_test_provider: SomeTestProvider, ) -> None: """Test registering a WebRTC provider twice should raise.""" - await async_setup_component(hass, "camera", {}) - - class TestProvider(CameraWebRTCProvider): - """Test provider.""" - - async def async_is_supported(self, stream_source: str) -> bool: - """Determine if the provider supports the stream source.""" - return True - - async def async_handle_web_rtc_offer( - self, camera: Camera, offer_sdp: str - ) -> str | None: - """Handle the WebRTC offer and return an answer.""" - return "answer" - - provider = TestProvider() - async_register_webrtc_provider(hass, provider) - await hass.async_block_till_done() - with pytest.raises(ValueError, match="Provider already registered"): - async_register_webrtc_provider(hass, provider) + async_register_webrtc_provider(hass, register_test_provider) async def test_async_register_webrtc_provider_camera_not_loaded( hass: HomeAssistant, ) -> None: """Test registering a WebRTC provider when camera is not loaded.""" - - class TestProvider(CameraWebRTCProvider): - """Test provider.""" - - async def async_is_supported(self, stream_source: str) -> bool: - """Determine if the provider supports the stream source.""" - return True - - async def async_handle_web_rtc_offer( - self, camera: Camera, offer_sdp: str - ) -> str | None: - """Handle the WebRTC offer and return an answer.""" - return "answer" - with pytest.raises(ValueError, match="Unexpected state, camera not loaded"): - async_register_webrtc_provider(hass, TestProvider()) + async_register_webrtc_provider(hass, SomeTestProvider()) @pytest.mark.usefixtures("mock_camera", "mock_stream", "mock_stream_source") @@ -131,37 +204,48 @@ async def test_async_register_ice_server( called = 0 - async def get_ice_server() -> RTCIceServer: + @callback + def get_ice_servers() -> list[RTCIceServer]: nonlocal called called += 1 - return RTCIceServer(urls="stun:example.com") + return [ + RTCIceServer(urls="stun:example.com"), + RTCIceServer(urls="turn:example.com"), + ] - unregister = register_ice_server(hass, get_ice_server) + unregister = async_register_ice_servers(hass, get_ice_servers) assert not called camera = get_camera_from_entity_id(hass, "camera.demo_camera") - config = await camera.async_get_webrtc_client_configuration() + config = camera.async_get_webrtc_client_configuration() - assert config.configuration.ice_servers == [RTCIceServer(urls="stun:example.com")] + assert config.configuration.ice_servers == [ + RTCIceServer(urls="stun:example.com"), + RTCIceServer(urls="turn:example.com"), + ] assert called == 1 # register another ICE server called_2 = 0 - async def get_ice_server_2() -> RTCIceServer: + @callback + def get_ice_servers_2() -> list[RTCIceServer]: nonlocal called_2 called_2 += 1 - return RTCIceServer( - urls=["stun:example2.com", "turn:example2.com"], - username="user", - credential="pass", - ) + return [ + RTCIceServer( + urls=["stun:example2.com", "turn:example2.com"], + username="user", + credential="pass", + ) + ] - unregister_2 = register_ice_server(hass, get_ice_server_2) + unregister_2 = async_register_ice_servers(hass, get_ice_servers_2) - config = await camera.async_get_webrtc_client_configuration() + config = camera.async_get_webrtc_client_configuration() assert config.configuration.ice_servers == [ RTCIceServer(urls="stun:example.com"), + RTCIceServer(urls="turn:example.com"), RTCIceServer( urls=["stun:example2.com", "turn:example2.com"], username="user", @@ -175,7 +259,7 @@ async def get_ice_server_2() -> RTCIceServer: unregister() - config = await camera.async_get_webrtc_client_configuration() + config = camera.async_get_webrtc_client_configuration() assert config.configuration.ice_servers == [ RTCIceServer( urls=["stun:example2.com", "turn:example2.com"], @@ -189,11 +273,11 @@ async def get_ice_server_2() -> RTCIceServer: # unregister the second ICE server unregister_2() - config = await camera.async_get_webrtc_client_configuration() + config = camera.async_get_webrtc_client_configuration() assert config.configuration.ice_servers == [] -@pytest.mark.usefixtures("mock_camera_web_rtc") +@pytest.mark.usefixtures("mock_camera_webrtc") async def test_ws_get_client_config( hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: @@ -210,7 +294,106 @@ async def test_ws_get_client_config( assert msg["type"] == TYPE_RESULT assert msg["success"] assert msg["result"] == { - "configuration": {"iceServers": [{"urls": "stun:stun.l.google.com:19302"}]} + "configuration": { + "iceServers": [ + { + "urls": [ + "stun:stun.home-assistant.io:80", + "stun:stun.home-assistant.io:3478", + ] + }, + ], + }, + "getCandidatesUpfront": False, + } + + @callback + def get_ice_server() -> list[RTCIceServer]: + return [ + RTCIceServer( + urls=["stun:example2.com", "turn:example2.com"], + username="user", + credential="pass", + ) + ] + + async_register_ice_servers(hass, get_ice_server) + + await client.send_json_auto_id( + {"type": "camera/webrtc/get_client_config", "entity_id": "camera.demo_camera"} + ) + msg = await client.receive_json() + + # Assert WebSocket response + assert msg["type"] == TYPE_RESULT + assert msg["success"] + assert msg["result"] == { + "configuration": { + "iceServers": [ + { + "urls": [ + "stun:stun.home-assistant.io:80", + "stun:stun.home-assistant.io:3478", + ] + }, + { + "urls": ["stun:example2.com", "turn:example2.com"], + "username": "user", + "credential": "pass", + }, + ], + }, + "getCandidatesUpfront": False, + } + + +@pytest.mark.usefixtures("mock_test_webrtc_cameras") +async def test_ws_get_client_config_sync_offer( + hass: HomeAssistant, hass_ws_client: WebSocketGenerator +) -> None: + """Test get WebRTC client config, when camera is supporting sync offer.""" + await async_setup_component(hass, "camera", {}) + await hass.async_block_till_done() + + client = await hass_ws_client(hass) + await client.send_json_auto_id( + {"type": "camera/webrtc/get_client_config", "entity_id": "camera.sync"} + ) + msg = await client.receive_json() + + # Assert WebSocket response + assert msg["type"] == TYPE_RESULT + assert msg["success"] + assert msg["result"] == { + "configuration": {}, + "getCandidatesUpfront": True, + } + + +@pytest.mark.usefixtures("mock_camera_webrtc") +async def test_ws_get_client_config_custom_config( + hass: HomeAssistant, hass_ws_client: WebSocketGenerator +) -> None: + """Test get WebRTC client config.""" + await async_process_ha_core_config( + hass, + {"webrtc": {"ice_servers": [{"url": "stun:custom_stun_server:3478"}]}}, + ) + + await async_setup_component(hass, "camera", {}) + + client = await hass_ws_client(hass) + await client.send_json_auto_id( + {"type": "camera/webrtc/get_client_config", "entity_id": "camera.demo_camera"} + ) + msg = await client.receive_json() + + # Assert WebSocket response + assert msg["type"] == TYPE_RESULT + assert msg["success"] + assert msg["result"] == { + "configuration": {"iceServers": [{"urls": ["stun:custom_stun_server:3478"]}]}, + "getCandidatesUpfront": False, } @@ -231,6 +414,803 @@ async def test_ws_get_client_config_no_rtc_camera( assert msg["type"] == TYPE_RESULT assert not msg["success"] assert msg["error"] == { - "code": "web_rtc_offer_failed", + "code": "webrtc_get_client_config_failed", + "message": "Camera does not support WebRTC, frontend_stream_type=hls", + } + + +async def provide_webrtc_answer(stream_source: str, offer: str, stream_id: str) -> str: + """Simulate an rtsp to webrtc provider.""" + assert stream_source == STREAM_SOURCE + assert offer == WEBRTC_OFFER + return WEBRTC_ANSWER + + +@pytest.fixture(name="mock_rtsp_to_webrtc") +def mock_rtsp_to_webrtc_fixture(hass: HomeAssistant) -> Generator[Mock]: + """Fixture that registers a mock rtsp to webrtc provider.""" + mock_provider = Mock(side_effect=provide_webrtc_answer) + unsub = async_register_rtsp_to_web_rtc_provider(hass, "mock_domain", mock_provider) + yield mock_provider + unsub() + + +@pytest.mark.usefixtures("mock_camera_webrtc") +async def test_websocket_webrtc_offer( + hass: HomeAssistant, hass_ws_client: WebSocketGenerator +) -> None: + """Test initiating a WebRTC stream with offer and answer.""" + client = await hass_ws_client(hass) + await client.send_json_auto_id( + { + "type": "camera/webrtc/offer", + "entity_id": "camera.demo_camera", + "offer": WEBRTC_OFFER, + } + ) + response = await client.receive_json() + assert response["type"] == TYPE_RESULT + assert response["success"] + subscription_id = response["id"] + + # Session id + response = await client.receive_json() + assert response["id"] == subscription_id + assert response["type"] == "event" + assert response["event"]["type"] == "session" + + # Answer + response = await client.receive_json() + assert response["id"] == subscription_id + assert response["type"] == "event" + assert response["event"] == { + "type": "answer", + "answer": WEBRTC_ANSWER, + } + + # Unsubscribe/Close session + await client.send_json_auto_id( + { + "type": "unsubscribe_events", + "subscription": subscription_id, + } + ) + msg = await client.receive_json() + assert msg["success"] + + +@pytest.mark.parametrize( + ("message", "expected_frontend_message"), + [ + ( + WebRTCCandidate(RTCIceCandidate("candidate")), + {"type": "candidate", "candidate": "candidate"}, + ), + ( + WebRTCError("webrtc_offer_failed", "error"), + {"type": "error", "code": "webrtc_offer_failed", "message": "error"}, + ), + (WebRTCAnswer("answer"), {"type": "answer", "answer": "answer"}), + ], + ids=["candidate", "error", "answer"], +) +@pytest.mark.usefixtures("mock_stream_source", "mock_camera") +async def test_websocket_webrtc_offer_webrtc_provider( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + register_test_provider: SomeTestProvider, + message: WebRTCMessage, + expected_frontend_message: dict[str, Any], +) -> None: + """Test initiating a WebRTC stream with a webrtc provider.""" + client = await hass_ws_client(hass) + with ( + patch.object( + register_test_provider, "async_handle_async_webrtc_offer", autospec=True + ) as mock_async_handle_async_webrtc_offer, + patch.object( + register_test_provider, "async_close_session", autospec=True + ) as mock_async_close_session, + ): + await client.send_json_auto_id( + { + "type": "camera/webrtc/offer", + "entity_id": "camera.demo_camera", + "offer": WEBRTC_OFFER, + } + ) + response = await client.receive_json() + assert response["type"] == TYPE_RESULT + assert response["success"] + subscription_id = response["id"] + mock_async_handle_async_webrtc_offer.assert_called_once() + assert mock_async_handle_async_webrtc_offer.call_args[0][1] == WEBRTC_OFFER + send_message: WebRTCSendMessage = ( + mock_async_handle_async_webrtc_offer.call_args[0][3] + ) + + # Session id + response = await client.receive_json() + assert response["id"] == subscription_id + assert response["type"] == "event" + assert response["event"]["type"] == "session" + session_id = response["event"]["session_id"] + + send_message(message) + + response = await client.receive_json() + assert response["id"] == subscription_id + assert response["type"] == "event" + assert response["event"] == expected_frontend_message + + # Unsubscribe/Close session + await client.send_json_auto_id( + { + "type": "unsubscribe_events", + "subscription": subscription_id, + } + ) + msg = await client.receive_json() + assert msg["success"] + mock_async_close_session.assert_called_once_with(session_id) + + +@pytest.mark.usefixtures("mock_camera_webrtc") +async def test_websocket_webrtc_offer_invalid_entity( + hass: HomeAssistant, hass_ws_client: WebSocketGenerator +) -> None: + """Test WebRTC with a camera entity that does not exist.""" + client = await hass_ws_client(hass) + await client.send_json_auto_id( + { + "type": "camera/webrtc/offer", + "entity_id": "camera.does_not_exist", + "offer": WEBRTC_OFFER, + } + ) + response = await client.receive_json() + + assert response["type"] == TYPE_RESULT + assert not response["success"] + assert response["error"] == { + "code": "home_assistant_error", + "message": "Camera not found", + } + + +@pytest.mark.usefixtures("mock_camera_webrtc") +async def test_websocket_webrtc_offer_missing_offer( + hass: HomeAssistant, hass_ws_client: WebSocketGenerator +) -> None: + """Test WebRTC stream with missing required fields.""" + client = await hass_ws_client(hass) + await client.send_json_auto_id( + { + "type": "camera/webrtc/offer", + "entity_id": "camera.demo_camera", + } + ) + response = await client.receive_json() + + assert response["type"] == TYPE_RESULT + assert not response["success"] + assert response["error"]["code"] == "invalid_format" + + +@pytest.mark.parametrize( + ("error", "expected_message"), + [ + (ValueError("value error"), "value error"), + (HomeAssistantError("offer failed"), "offer failed"), + (TimeoutError(), "Timeout handling WebRTC offer"), + ], +) +@pytest.mark.usefixtures("mock_camera_webrtc_frontendtype_only") +async def test_websocket_webrtc_offer_failure( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + init_test_integration: MockCamera, + error: Exception, + expected_message: str, +) -> None: + """Test WebRTC stream that fails handling the offer.""" + client = await hass_ws_client(hass) + init_test_integration.set_sync_answer(error) + + await client.send_json_auto_id( + { + "type": "camera/webrtc/offer", + "entity_id": "camera.test", + "offer": WEBRTC_OFFER, + } + ) + response = await client.receive_json() + + assert response["type"] == TYPE_RESULT + assert response["success"] + subscription_id = response["id"] + + # Session id + response = await client.receive_json() + assert response["id"] == subscription_id + assert response["type"] == "event" + assert response["event"]["type"] == "session" + + # Error + response = await client.receive_json() + assert response["id"] == subscription_id + assert response["type"] == "event" + assert response["event"] == { + "type": "error", + "code": "webrtc_offer_failed", + "message": expected_message, + } + + +async def test_websocket_webrtc_offer_sync( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + init_test_integration: MockCamera, +) -> None: + """Test sync WebRTC stream offer.""" + client = await hass_ws_client(hass) + init_test_integration.set_sync_answer(WEBRTC_ANSWER) + + await client.send_json_auto_id( + { + "type": "camera/webrtc/offer", + "entity_id": "camera.test", + "offer": WEBRTC_OFFER, + } + ) + response = await client.receive_json() + + assert response["type"] == TYPE_RESULT + assert response["success"] + subscription_id = response["id"] + + # Session id + response = await client.receive_json() + assert response["id"] == subscription_id + assert response["type"] == "event" + assert response["event"]["type"] == "session" + + # Answer + response = await client.receive_json() + assert response["id"] == subscription_id + assert response["type"] == "event" + assert response["event"] == {"type": "answer", "answer": WEBRTC_ANSWER} + + +async def test_websocket_webrtc_offer_sync_no_answer( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + caplog: pytest.LogCaptureFixture, + init_test_integration: MockCamera, +) -> None: + """Test sync WebRTC stream offer with no answer.""" + client = await hass_ws_client(hass) + init_test_integration.set_sync_answer(None) + + await client.send_json_auto_id( + { + "type": "camera/webrtc/offer", + "entity_id": "camera.test", + "offer": WEBRTC_OFFER, + } + ) + response = await client.receive_json() + + assert response["type"] == TYPE_RESULT + assert response["success"] + subscription_id = response["id"] + + # Session id + response = await client.receive_json() + assert response["id"] == subscription_id + assert response["type"] == "event" + assert response["event"]["type"] == "session" + + # Answer + response = await client.receive_json() + assert response["id"] == subscription_id + assert response["type"] == "event" + assert response["event"] == { + "type": "error", + "code": "webrtc_offer_failed", + "message": "No answer on WebRTC offer", + } + assert ( + "homeassistant.components.camera", + logging.ERROR, + "Error handling WebRTC offer: No answer", + ) in caplog.record_tuples + + +@pytest.mark.usefixtures("mock_camera") +async def test_websocket_webrtc_offer_invalid_stream_type( + hass: HomeAssistant, hass_ws_client: WebSocketGenerator +) -> None: + """Test WebRTC initiating for a camera with a different stream_type.""" + client = await hass_ws_client(hass) + await client.send_json_auto_id( + { + "type": "camera/webrtc/offer", + "entity_id": "camera.demo_camera", + "offer": WEBRTC_OFFER, + } + ) + response = await client.receive_json() + + assert response["type"] == TYPE_RESULT + assert not response["success"] + assert response["error"] == { + "code": "webrtc_offer_failed", + "message": "Camera does not support WebRTC, frontend_stream_type=hls", + } + + +@pytest.mark.usefixtures("mock_camera", "mock_stream_source") +async def test_rtsp_to_webrtc_offer( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + mock_rtsp_to_webrtc: Mock, +) -> None: + """Test creating a webrtc offer from an rstp provider.""" + client = await hass_ws_client(hass) + await client.send_json_auto_id( + { + "type": "camera/webrtc/offer", + "entity_id": "camera.demo_camera", + "offer": WEBRTC_OFFER, + } + ) + response = await client.receive_json() + + assert response["type"] == TYPE_RESULT + assert response["success"] + subscription_id = response["id"] + + # Session id + response = await client.receive_json() + assert response["id"] == subscription_id + assert response["type"] == "event" + assert response["event"]["type"] == "session" + + # Answer + response = await client.receive_json() + assert response["id"] == subscription_id + assert response["type"] == "event" + assert response["event"] == { + "type": "answer", + "answer": WEBRTC_ANSWER, + } + + assert mock_rtsp_to_webrtc.called + + +@pytest.fixture(name="mock_hls_stream_source") +async def mock_hls_stream_source_fixture() -> AsyncGenerator[AsyncMock]: + """Fixture to create an HLS stream source.""" + with patch( + "homeassistant.components.camera.Camera.stream_source", + return_value=HLS_STREAM_SOURCE, + ) as mock_hls_stream_source: + yield mock_hls_stream_source + + +@pytest.mark.usefixtures( + "mock_camera", + "mock_hls_stream_source", # Not an RTSP stream source + "mock_camera_webrtc_frontendtype_only", +) +async def test_unsupported_rtsp_to_webrtc_stream_type( + hass: HomeAssistant, hass_ws_client: WebSocketGenerator +) -> None: + """Test rtsp-to-webrtc is not registered for non-RTSP streams.""" + client = await hass_ws_client(hass) + await client.send_json_auto_id( + { + "type": "camera/webrtc/offer", + "entity_id": "camera.demo_camera", + "offer": WEBRTC_OFFER, + } + ) + response = await client.receive_json() + assert response["type"] == TYPE_RESULT + assert response["success"] + subscription_id = response["id"] + + # Session id + response = await client.receive_json() + assert response["id"] == subscription_id + assert response["type"] == "event" + assert response["event"]["type"] == "session" + + # Answer + response = await client.receive_json() + assert response["id"] == subscription_id + assert response["type"] == "event" + assert response["event"] == { + "type": "error", + "code": "webrtc_offer_failed", + "message": "Camera does not support WebRTC", + } + + +@pytest.mark.usefixtures("mock_camera", "mock_stream_source") +async def test_rtsp_to_webrtc_provider_unregistered( + hass: HomeAssistant, hass_ws_client: WebSocketGenerator +) -> None: + """Test creating a webrtc offer from an rstp provider.""" + mock_provider = Mock(side_effect=provide_webrtc_answer) + unsub = async_register_rtsp_to_web_rtc_provider(hass, "mock_domain", mock_provider) + + client = await hass_ws_client(hass) + + # Registered provider can handle the WebRTC offer + await client.send_json_auto_id( + { + "type": "camera/webrtc/offer", + "entity_id": "camera.demo_camera", + "offer": WEBRTC_OFFER, + } + ) + response = await client.receive_json() + assert response["type"] == TYPE_RESULT + assert response["success"] + subscription_id = response["id"] + + # Session id + response = await client.receive_json() + assert response["id"] == subscription_id + assert response["type"] == "event" + assert response["event"]["type"] == "session" + + # Answer + response = await client.receive_json() + assert response["id"] == subscription_id + assert response["type"] == "event" + assert response["event"] == { + "type": "answer", + "answer": WEBRTC_ANSWER, + } + + assert mock_provider.called + mock_provider.reset_mock() + + # Unregister provider, then verify the WebRTC offer cannot be handled + unsub() + await client.send_json_auto_id( + { + "type": "camera/webrtc/offer", + "entity_id": "camera.demo_camera", + "offer": WEBRTC_OFFER, + } + ) + response = await client.receive_json() + assert response.get("type") == TYPE_RESULT + assert not response["success"] + assert response["error"] == { + "code": "webrtc_offer_failed", + "message": "Camera does not support WebRTC, frontend_stream_type=hls", + } + + assert not mock_provider.called + + +@pytest.mark.usefixtures("mock_camera", "mock_stream_source") +async def test_rtsp_to_webrtc_offer_not_accepted( + hass: HomeAssistant, hass_ws_client: WebSocketGenerator +) -> None: + """Test a provider that can't satisfy the rtsp to webrtc offer.""" + + async def provide_none( + stream_source: str, offer: str, stream_id: str + ) -> str | None: + """Simulate a provider that can't accept the offer.""" + return None + + mock_provider = Mock(side_effect=provide_none) + unsub = async_register_rtsp_to_web_rtc_provider(hass, "mock_domain", mock_provider) + client = await hass_ws_client(hass) + + # Registered provider can handle the WebRTC offer + await client.send_json_auto_id( + { + "type": "camera/webrtc/offer", + "entity_id": "camera.demo_camera", + "offer": WEBRTC_OFFER, + } + ) + response = await client.receive_json() + assert response["type"] == TYPE_RESULT + assert response["success"] + subscription_id = response["id"] + + # Session id + response = await client.receive_json() + assert response["id"] == subscription_id + assert response["type"] == "event" + assert response["event"]["type"] == "session" + + # Answer + response = await client.receive_json() + assert response["id"] == subscription_id + assert response["type"] == "event" + assert response["event"] == { + "type": "error", + "code": "webrtc_offer_failed", + "message": "Camera does not support WebRTC", + } + + assert mock_provider.called + + unsub() + + +@pytest.mark.usefixtures("mock_camera_webrtc") +async def test_ws_webrtc_candidate( + hass: HomeAssistant, hass_ws_client: WebSocketGenerator +) -> None: + """Test ws webrtc candidate command.""" + client = await hass_ws_client(hass) + session_id = "session_id" + candidate = "candidate" + with patch( + "homeassistant.components.camera.Camera.async_on_webrtc_candidate" + ) as mock_on_webrtc_candidate: + await client.send_json_auto_id( + { + "type": "camera/webrtc/candidate", + "entity_id": "camera.demo_camera", + "session_id": session_id, + "candidate": candidate, + } + ) + response = await client.receive_json() + assert response["type"] == TYPE_RESULT + assert response["success"] + mock_on_webrtc_candidate.assert_called_once_with( + session_id, RTCIceCandidate(candidate) + ) + + +@pytest.mark.usefixtures("mock_camera_webrtc") +async def test_ws_webrtc_candidate_not_supported( + hass: HomeAssistant, hass_ws_client: WebSocketGenerator +) -> None: + """Test ws webrtc candidate command is raising if not supported.""" + client = await hass_ws_client(hass) + await client.send_json_auto_id( + { + "type": "camera/webrtc/candidate", + "entity_id": "camera.demo_camera", + "session_id": "session_id", + "candidate": "candidate", + } + ) + response = await client.receive_json() + assert response["type"] == TYPE_RESULT + assert not response["success"] + assert response["error"] == { + "code": "home_assistant_error", + "message": "Cannot handle WebRTC candidate", + } + + +@pytest.mark.usefixtures("mock_camera", "mock_stream_source") +async def test_ws_webrtc_candidate_webrtc_provider( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + register_test_provider: SomeTestProvider, +) -> None: + """Test ws webrtc candidate command with WebRTC provider.""" + with patch.object( + register_test_provider, "async_on_webrtc_candidate" + ) as mock_on_webrtc_candidate: + client = await hass_ws_client(hass) + session_id = "session_id" + candidate = "candidate" + await client.send_json_auto_id( + { + "type": "camera/webrtc/candidate", + "entity_id": "camera.demo_camera", + "session_id": session_id, + "candidate": candidate, + } + ) + response = await client.receive_json() + assert response["type"] == TYPE_RESULT + assert response["success"] + mock_on_webrtc_candidate.assert_called_once_with( + session_id, RTCIceCandidate(candidate) + ) + + +@pytest.mark.usefixtures("mock_camera_webrtc") +async def test_ws_webrtc_candidate_invalid_entity( + hass: HomeAssistant, hass_ws_client: WebSocketGenerator +) -> None: + """Test ws WebRTC candidate command with a camera entity that does not exist.""" + client = await hass_ws_client(hass) + await client.send_json_auto_id( + { + "type": "camera/webrtc/candidate", + "entity_id": "camera.does_not_exist", + "session_id": "session_id", + "candidate": "candidate", + } + ) + response = await client.receive_json() + + assert response["type"] == TYPE_RESULT + assert not response["success"] + assert response["error"] == { + "code": "home_assistant_error", + "message": "Camera not found", + } + + +@pytest.mark.usefixtures("mock_camera_webrtc") +async def test_ws_webrtc_canidate_missing_candidate( + hass: HomeAssistant, hass_ws_client: WebSocketGenerator +) -> None: + """Test ws WebRTC candidate command with missing required fields.""" + client = await hass_ws_client(hass) + await client.send_json_auto_id( + { + "type": "camera/webrtc/candidate", + "entity_id": "camera.demo_camera", + "session_id": "session_id", + } + ) + response = await client.receive_json() + + assert response["type"] == TYPE_RESULT + assert not response["success"] + assert response["error"]["code"] == "invalid_format" + + +@pytest.mark.usefixtures("mock_camera") +async def test_ws_webrtc_candidate_invalid_stream_type( + hass: HomeAssistant, hass_ws_client: WebSocketGenerator +) -> None: + """Test ws WebRTC candidate command for a camera with a different stream_type.""" + client = await hass_ws_client(hass) + await client.send_json_auto_id( + { + "type": "camera/webrtc/candidate", + "entity_id": "camera.demo_camera", + "session_id": "session_id", + "candidate": "candidate", + } + ) + response = await client.receive_json() + + assert response["type"] == TYPE_RESULT + assert not response["success"] + assert response["error"] == { + "code": "webrtc_candidate_failed", "message": "Camera does not support WebRTC, frontend_stream_type=hls", } + + +async def test_webrtc_provider_optional_interface(hass: HomeAssistant) -> None: + """Test optional interface for WebRTC provider.""" + + class OnlyRequiredInterfaceProvider(CameraWebRTCProvider): + """Test provider.""" + + @property + def domain(self) -> str: + """Return the domain of the provider.""" + return "test" + + @callback + def async_is_supported(self, stream_source: str) -> bool: + """Determine if the provider supports the stream source.""" + return True + + async def async_handle_async_webrtc_offer( + self, + camera: Camera, + offer_sdp: str, + session_id: str, + send_message: WebRTCSendMessage, + ) -> None: + """Handle the WebRTC offer and return the answer via the provided callback. + + Return value determines if the offer was handled successfully. + """ + send_message(WebRTCAnswer(answer="answer")) + + async def async_on_webrtc_candidate( + self, session_id: str, candidate: RTCIceCandidate + ) -> None: + """Handle the WebRTC candidate.""" + + provider = OnlyRequiredInterfaceProvider() + # Call all interface methods + assert provider.async_is_supported("stream_source") is True + await provider.async_handle_async_webrtc_offer( + Mock(), "offer_sdp", "session_id", Mock() + ) + await provider.async_on_webrtc_candidate("session_id", RTCIceCandidate("candidate")) + provider.async_close_session("session_id") + + +@pytest.mark.usefixtures("mock_camera") +async def test_repair_issue_legacy_provider( + hass: HomeAssistant, + issue_registry: ir.IssueRegistry, +) -> None: + """Test repair issue created for legacy provider.""" + # Ensure no issue if no provider is registered + assert not issue_registry.async_get_issue( + "camera", "legacy_webrtc_provider_mock_domain" + ) + + # Register a legacy provider + legacy_provider = Mock(side_effect=provide_webrtc_answer) + unsub_legacy_provider = async_register_rtsp_to_web_rtc_provider( + hass, "mock_domain", legacy_provider + ) + await hass.async_block_till_done() + + # Ensure no issue if only legacy provider is registered + assert not issue_registry.async_get_issue( + "camera", "legacy_webrtc_provider_mock_domain" + ) + + provider = Go2RTCProvider() + unsub_go2rtc_provider = async_register_webrtc_provider(hass, provider) + await hass.async_block_till_done() + + # Ensure issue when legacy and builtin provider are registered + issue = issue_registry.async_get_issue( + "camera", "legacy_webrtc_provider_mock_domain" + ) + assert issue + assert issue.is_fixable is False + assert issue.is_persistent is False + assert issue.issue_domain == "mock_domain" + assert issue.learn_more_url == "https://www.home-assistant.io/integrations/go2rtc/" + assert issue.severity == ir.IssueSeverity.WARNING + assert issue.issue_id == "legacy_webrtc_provider_mock_domain" + assert issue.translation_key == "legacy_webrtc_provider" + assert issue.translation_placeholders == { + "legacy_integration": "mock_domain", + "builtin_integration": "go2rtc", + } + + unsub_legacy_provider() + unsub_go2rtc_provider() + + +@pytest.mark.usefixtures("mock_camera", "register_test_provider", "mock_rtsp_to_webrtc") +async def test_no_repair_issue_without_new_provider( + hass: HomeAssistant, + issue_registry: ir.IssueRegistry, +) -> None: + """Test repair issue not created if no go2rtc provider exists.""" + assert not issue_registry.async_get_issue( + "camera", "legacy_webrtc_provider_mock_domain" + ) + + +@pytest.mark.usefixtures("mock_camera", "mock_rtsp_to_webrtc") +async def test_registering_same_legacy_provider( + hass: HomeAssistant, +) -> None: + """Test registering the same legacy provider twice.""" + legacy_provider = Mock(side_effect=provide_webrtc_answer) + with pytest.raises(ValueError, match="Provider already registered"): + async_register_rtsp_to_web_rtc_provider(hass, "mock_domain", legacy_provider) + + +@pytest.mark.usefixtures("mock_hls_stream_source", "mock_camera", "mock_rtsp_to_webrtc") +async def test_get_not_supported_legacy_provider(hass: HomeAssistant) -> None: + """Test getting a not supported legacy provider.""" + camera = get_camera_from_entity_id(hass, "camera.demo_camera") + assert await async_get_supported_legacy_provider(hass, camera) is None diff --git a/tests/components/canary/test_alarm_control_panel.py b/tests/components/canary/test_alarm_control_panel.py index 83e801d67c4514..a194621b0d9055 100644 --- a/tests/components/canary/test_alarm_control_panel.py +++ b/tests/components/canary/test_alarm_control_panel.py @@ -4,17 +4,16 @@ from canary.const import LOCATION_MODE_AWAY, LOCATION_MODE_HOME, LOCATION_MODE_NIGHT -from homeassistant.components.alarm_control_panel import DOMAIN as ALARM_DOMAIN +from homeassistant.components.alarm_control_panel import ( + DOMAIN as ALARM_DOMAIN, + AlarmControlPanelState, +) from homeassistant.components.canary import DOMAIN from homeassistant.const import ( SERVICE_ALARM_ARM_AWAY, SERVICE_ALARM_ARM_HOME, SERVICE_ALARM_ARM_NIGHT, SERVICE_ALARM_DISARM, - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_DISARMED, STATE_UNKNOWN, ) from homeassistant.core import HomeAssistant @@ -67,7 +66,7 @@ async def test_alarm_control_panel( state = hass.states.get(entity_id) assert state - assert state.state == STATE_ALARM_DISARMED + assert state.state == AlarmControlPanelState.DISARMED assert state.attributes["private"] type(mocked_location).is_private = PropertyMock(return_value=False) @@ -82,7 +81,7 @@ async def test_alarm_control_panel( state = hass.states.get(entity_id) assert state - assert state.state == STATE_ALARM_ARMED_HOME + assert state.state == AlarmControlPanelState.ARMED_HOME # test armed away type(mocked_location).mode = PropertyMock( @@ -94,7 +93,7 @@ async def test_alarm_control_panel( state = hass.states.get(entity_id) assert state - assert state.state == STATE_ALARM_ARMED_AWAY + assert state.state == AlarmControlPanelState.ARMED_AWAY # test armed night type(mocked_location).mode = PropertyMock( @@ -106,7 +105,7 @@ async def test_alarm_control_panel( state = hass.states.get(entity_id) assert state - assert state.state == STATE_ALARM_ARMED_NIGHT + assert state.state == AlarmControlPanelState.ARMED_NIGHT async def test_alarm_control_panel_services(hass: HomeAssistant, canary) -> None: diff --git a/tests/components/cast/test_home_assistant_cast.py b/tests/components/cast/test_home_assistant_cast.py index c9e311bb02434f..2fc348fd008012 100644 --- a/tests/components/cast/test_home_assistant_cast.py +++ b/tests/components/cast/test_home_assistant_cast.py @@ -5,8 +5,8 @@ import pytest from homeassistant.components.cast import DOMAIN, home_assistant_cast -from homeassistant.config import async_process_ha_core_config from homeassistant.core import HomeAssistant +from homeassistant.core_config import async_process_ha_core_config from homeassistant.exceptions import HomeAssistantError from tests.common import MockConfigEntry, async_mock_signal diff --git a/tests/components/cast/test_media_player.py b/tests/components/cast/test_media_player.py index 513f32b1ad69cf..b2ce60e9393020 100644 --- a/tests/components/cast/test_media_player.py +++ b/tests/components/cast/test_media_player.py @@ -27,13 +27,13 @@ MediaClass, MediaPlayerEntityFeature, ) -from homeassistant.config import async_process_ha_core_config from homeassistant.const import ( ATTR_ENTITY_ID, CAST_APP_ID_HOMEASSISTANT_LOVELACE, EVENT_HOMEASSISTANT_STOP, ) from homeassistant.core import HomeAssistant +from homeassistant.core_config import async_process_ha_core_config from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import device_registry as dr, entity_registry as er, network from homeassistant.helpers.dispatcher import ( diff --git a/tests/components/chacon_dio/test_cover.py b/tests/components/chacon_dio/test_cover.py index 24e6e8581d8930..9e9f403ed0b254 100644 --- a/tests/components/chacon_dio/test_cover.py +++ b/tests/components/chacon_dio/test_cover.py @@ -13,9 +13,7 @@ SERVICE_OPEN_COVER, SERVICE_SET_COVER_POSITION, SERVICE_STOP_COVER, - STATE_CLOSING, - STATE_OPEN, - STATE_OPENING, + CoverState, ) from homeassistant.components.homeassistant import SERVICE_UPDATE_ENTITY from homeassistant.const import ATTR_ENTITY_ID @@ -73,7 +71,7 @@ async def test_update( state = hass.states.get(COVER_ENTITY_ID) assert state assert state.attributes.get(ATTR_CURRENT_POSITION) == 51 - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN async def test_cover_actions( @@ -95,7 +93,7 @@ async def test_cover_actions( ) await hass.async_block_till_done() state = hass.states.get(COVER_ENTITY_ID) - assert state.state == STATE_CLOSING + assert state.state == CoverState.CLOSING await hass.services.async_call( COVER_DOMAIN, @@ -105,7 +103,7 @@ async def test_cover_actions( ) await hass.async_block_till_done() state = hass.states.get(COVER_ENTITY_ID) - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN await hass.services.async_call( COVER_DOMAIN, @@ -115,7 +113,7 @@ async def test_cover_actions( ) await hass.async_block_till_done() state = hass.states.get(COVER_ENTITY_ID) - assert state.state == STATE_OPENING + assert state.state == CoverState.OPENING await hass.services.async_call( COVER_DOMAIN, @@ -125,7 +123,7 @@ async def test_cover_actions( ) await hass.async_block_till_done() state = hass.states.get(COVER_ENTITY_ID) - assert state.state == STATE_OPENING + assert state.state == CoverState.OPENING async def test_cover_callbacks( @@ -161,19 +159,19 @@ async def _callback_device_state_function(open_level: int, movement: str) -> Non state = hass.states.get(COVER_ENTITY_ID) assert state assert state.attributes.get(ATTR_CURRENT_POSITION) == 79 - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN await _callback_device_state_function(90, "up") state = hass.states.get(COVER_ENTITY_ID) assert state assert state.attributes.get(ATTR_CURRENT_POSITION) == 90 - assert state.state == STATE_OPENING + assert state.state == CoverState.OPENING await _callback_device_state_function(60, "down") state = hass.states.get(COVER_ENTITY_ID) assert state assert state.attributes.get(ATTR_CURRENT_POSITION) == 60 - assert state.state == STATE_CLOSING + assert state.state == CoverState.CLOSING async def test_no_cover_found( diff --git a/tests/components/climate/test_intent.py b/tests/components/climate/test_intent.py index 54e2e4ff1a65ed..d17f3a1747dea4 100644 --- a/tests/components/climate/test_intent.py +++ b/tests/components/climate/test_intent.py @@ -371,7 +371,7 @@ async def test_not_exposed( {"name": {"value": climate_1.name}}, assistant=conversation.DOMAIN, ) - assert err.value.result.no_match_reason == intent.MatchFailedReason.NAME + assert err.value.result.no_match_reason == intent.MatchFailedReason.ASSISTANT # Expose first, hide second async_expose_entity(hass, conversation.DOMAIN, climate_1.entity_id, True) diff --git a/tests/components/cloud/conftest.py b/tests/components/cloud/conftest.py index 2edd9571bdd3ff..7002f7c39ec877 100644 --- a/tests/components/cloud/conftest.py +++ b/tests/components/cloud/conftest.py @@ -3,13 +3,14 @@ from collections.abc import AsyncGenerator, Callable, Coroutine, Generator from pathlib import Path from typing import Any -from unittest.mock import DEFAULT, MagicMock, PropertyMock, patch +from unittest.mock import DEFAULT, AsyncMock, MagicMock, PropertyMock, patch from hass_nabucasa import Cloud from hass_nabucasa.auth import CognitoAuth from hass_nabucasa.cloudhooks import Cloudhooks from hass_nabucasa.const import DEFAULT_SERVERS, DEFAULT_VALUES, STATE_CONNECTED from hass_nabucasa.google_report_state import GoogleReportState +from hass_nabucasa.ice_servers import IceServers from hass_nabucasa.iot import CloudIoT from hass_nabucasa.remote import RemoteUI from hass_nabucasa.voice import Voice @@ -68,6 +69,12 @@ async def cloud_fixture() -> AsyncGenerator[MagicMock]: ) mock_cloud.voice = MagicMock(spec=Voice) mock_cloud.started = None + mock_cloud.ice_servers = MagicMock( + spec=IceServers, + async_register_ice_servers_listener=AsyncMock( + return_value=lambda: "mock-unregister" + ), + ) def set_up_mock_cloud( cloud_client: CloudClient, mode: str, **kwargs: Any diff --git a/tests/components/cloud/test_client.py b/tests/components/cloud/test_client.py index 7af163cc49d85e..43eccc5ef9cebe 100644 --- a/tests/components/cloud/test_client.py +++ b/tests/components/cloud/test_client.py @@ -1,5 +1,6 @@ """Test the cloud.iot module.""" +from collections.abc import Callable, Coroutine from datetime import timedelta from typing import Any from unittest.mock import AsyncMock, MagicMock, Mock, PropertyMock, patch @@ -183,6 +184,59 @@ async def test_handler_google_actions_disabled( assert resp["payload"] == response_payload +async def test_handler_ice_servers( + hass: HomeAssistant, + cloud: MagicMock, + set_cloud_prefs: Callable[[dict[str, Any]], Coroutine[Any, Any, None]], +) -> None: + """Test handler ICE servers.""" + assert await async_setup_component(hass, "cloud", {"cloud": {}}) + await hass.async_block_till_done() + # make sure that preferences will not be reset + await cloud.client.prefs.async_set_username(cloud.username) + await set_cloud_prefs( + { + "alexa_enabled": False, + "google_enabled": False, + } + ) + + await cloud.login("test-user", "test-pass") + await cloud.client.cloud_connected() + + assert cloud.client._cloud_ice_servers_listener is not None + assert cloud.client._cloud_ice_servers_listener() == "mock-unregister" + + +async def test_handler_ice_servers_disabled( + hass: HomeAssistant, + cloud: MagicMock, + set_cloud_prefs: Callable[[dict[str, Any]], Coroutine[Any, Any, None]], +) -> None: + """Test handler ICE servers when user has disabled it.""" + assert await async_setup_component(hass, "cloud", {"cloud": {}}) + await hass.async_block_till_done() + # make sure that preferences will not be reset + await cloud.client.prefs.async_set_username(cloud.username) + await set_cloud_prefs( + { + "alexa_enabled": False, + "google_enabled": False, + } + ) + + await cloud.login("test-user", "test-pass") + await cloud.client.cloud_connected() + + await set_cloud_prefs( + { + "cloud_ice_servers_enabled": False, + } + ) + + assert cloud.client._cloud_ice_servers_listener is None + + async def test_webhook_msg( hass: HomeAssistant, caplog: pytest.LogCaptureFixture ) -> None: @@ -475,13 +529,16 @@ async def test_logged_out( await cloud.client.cloud_connected() await hass.async_block_till_done() + assert cloud.client._cloud_ice_servers_listener is not None + # Simulate logged out await cloud.logout() await hass.async_block_till_done() - # Check we clean up Alexa and Google + # Check we clean up Alexa, Google and ICE servers assert cloud.client._alexa_config is None assert cloud.client._google_config is None + assert cloud.client._cloud_ice_servers_listener is None google_config_mock.async_deinitialize.assert_called_once_with() alexa_config_mock.async_deinitialize.assert_called_once_with() diff --git a/tests/components/cloud/test_http_api.py b/tests/components/cloud/test_http_api.py index 15339f43dae7b4..216fc77db4800c 100644 --- a/tests/components/cloud/test_http_api.py +++ b/tests/components/cloud/test_http_api.py @@ -784,6 +784,7 @@ async def test_websocket_status( "google_report_state": True, "remote_allow_remote_enable": True, "remote_enabled": False, + "cloud_ice_servers_enabled": True, "tts_default_voice": ["en-US", "JennyNeural"], }, "alexa_entities": { @@ -903,6 +904,7 @@ async def test_websocket_update_preferences( assert cloud.client.prefs.alexa_enabled assert cloud.client.prefs.google_secure_devices_pin is None assert cloud.client.prefs.remote_allow_remote_enable is True + assert cloud.client.prefs.cloud_ice_servers_enabled is True client = await hass_ws_client(hass) @@ -914,6 +916,7 @@ async def test_websocket_update_preferences( "google_secure_devices_pin": "1234", "tts_default_voice": ["en-GB", "RyanNeural"], "remote_allow_remote_enable": False, + "cloud_ice_servers_enabled": False, } ) response = await client.receive_json() @@ -923,6 +926,7 @@ async def test_websocket_update_preferences( assert not cloud.client.prefs.alexa_enabled assert cloud.client.prefs.google_secure_devices_pin == "1234" assert cloud.client.prefs.remote_allow_remote_enable is False + assert cloud.client.prefs.cloud_ice_servers_enabled is False assert cloud.client.prefs.tts_default_voice == ("en-GB", "RyanNeural") diff --git a/tests/components/cloud/test_system_health.py b/tests/components/cloud/test_system_health.py index 60b23e47fecd34..6293f44067dff1 100644 --- a/tests/components/cloud/test_system_health.py +++ b/tests/components/cloud/test_system_health.py @@ -50,7 +50,12 @@ async def test_cloud_system_health( await cloud.client.async_system_message({"region": "xx-earth-616"}) await set_cloud_prefs( - {"alexa_enabled": True, "google_enabled": False, "remote_enabled": True} + { + "alexa_enabled": True, + "google_enabled": False, + "remote_enabled": True, + "cloud_ice_servers_enabled": True, + } ) info = await get_system_health_info(hass, "cloud") @@ -70,6 +75,7 @@ async def test_cloud_system_health( "remote_server": "us-west-1", "alexa_enabled": True, "google_enabled": False, + "cloud_ice_servers_enabled": True, "can_reach_cert_server": "ok", "can_reach_cloud_auth": {"type": "failed", "error": "unreachable"}, "can_reach_cloud": "ok", diff --git a/tests/components/cloud/test_tts.py b/tests/components/cloud/test_tts.py index 50ea5e87d82112..499981c643d005 100644 --- a/tests/components/cloud/test_tts.py +++ b/tests/components/cloud/test_tts.py @@ -25,9 +25,9 @@ DOMAIN as TTS_DOMAIN, get_engine_instance, ) -from homeassistant.config import async_process_ha_core_config from homeassistant.const import ATTR_ENTITY_ID, STATE_UNAVAILABLE, STATE_UNKNOWN from homeassistant.core import HomeAssistant +from homeassistant.core_config import async_process_ha_core_config from homeassistant.helpers import issue_registry as ir from homeassistant.helpers.entity_registry import EntityRegistry from homeassistant.setup import async_setup_component diff --git a/tests/components/co2signal/test_config_flow.py b/tests/components/co2signal/test_config_flow.py index 92d9450b6703eb..f8f94d44126480 100644 --- a/tests/components/co2signal/test_config_flow.py +++ b/tests/components/co2signal/test_config_flow.py @@ -44,7 +44,7 @@ async def test_form_home(hass: HomeAssistant) -> None: await hass.async_block_till_done() assert result2["type"] is FlowResultType.CREATE_ENTRY - assert result2["title"] == "CO2 Signal" + assert result2["title"] == "Electricity Maps" assert result2["data"] == { "api_key": "api_key", } @@ -185,7 +185,7 @@ async def test_form_error_handling( await hass.async_block_till_done() assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "CO2 Signal" + assert result["title"] == "Electricity Maps" assert result["data"] == { "api_key": "api_key", } diff --git a/tests/components/comelit/const.py b/tests/components/comelit/const.py index 998c12c09b701e..92fdfebfa1d29b 100644 --- a/tests/components/comelit/const.py +++ b/tests/components/comelit/const.py @@ -1,6 +1,19 @@ """Common stuff for Comelit SimpleHome tests.""" -from aiocomelit.const import VEDO +from aiocomelit import ComelitVedoAreaObject, ComelitVedoZoneObject +from aiocomelit.api import ComelitSerialBridgeObject +from aiocomelit.const import ( + CLIMATE, + COVER, + IRRIGATION, + LIGHT, + OTHER, + SCENARIO, + VEDO, + WATT, + AlarmAreaState, + AlarmZoneState, +) from homeassistant.components.comelit.const import DOMAIN from homeassistant.const import CONF_DEVICES, CONF_HOST, CONF_PIN, CONF_PORT, CONF_TYPE @@ -27,3 +40,67 @@ MOCK_USER_VEDO_DATA = MOCK_CONFIG[DOMAIN][CONF_DEVICES][1] FAKE_PIN = 5678 + +BRIDGE_DEVICE_QUERY = { + CLIMATE: {}, + COVER: { + 0: ComelitSerialBridgeObject( + index=0, + name="Cover0", + status=0, + human_status="closed", + type="cover", + val=0, + protected=0, + zone="Open space", + power=0.0, + power_unit=WATT, + ) + }, + LIGHT: { + 0: ComelitSerialBridgeObject( + index=0, + name="Light0", + status=0, + human_status="off", + type="light", + val=0, + protected=0, + zone="Bathroom", + power=0.0, + power_unit=WATT, + ) + }, + OTHER: {}, + IRRIGATION: {}, + SCENARIO: {}, +} + +VEDO_DEVICE_QUERY = { + "aree": { + 0: ComelitVedoAreaObject( + index=0, + name="Area0", + p1=True, + p2=False, + ready=False, + armed=False, + alarm=False, + alarm_memory=False, + sabotage=False, + anomaly=False, + in_time=False, + out_time=False, + human_status=AlarmAreaState.UNKNOWN, + ) + }, + "zone": { + 0: ComelitVedoZoneObject( + index=0, + name="Zone0", + status_api="0x000", + status=0, + human_status=AlarmZoneState.REST, + ) + }, +} diff --git a/tests/components/comelit/snapshots/test_diagnostics.ambr b/tests/components/comelit/snapshots/test_diagnostics.ambr new file mode 100644 index 00000000000000..58ce74035f93db --- /dev/null +++ b/tests/components/comelit/snapshots/test_diagnostics.ambr @@ -0,0 +1,144 @@ +# serializer version: 1 +# name: test_entry_diagnostics_bridge + dict({ + 'device_info': dict({ + 'devices': list([ + dict({ + 'clima': list([ + ]), + }), + dict({ + 'shutter': list([ + dict({ + '0': dict({ + 'human_status': 'closed', + 'name': 'Cover0', + 'power': 0.0, + 'power_unit': 'W', + 'protected': 0, + 'status': 0, + 'val': 0, + 'zone': 'Open space', + }), + }), + ]), + }), + dict({ + 'light': list([ + dict({ + '0': dict({ + 'human_status': 'off', + 'name': 'Light0', + 'power': 0.0, + 'power_unit': 'W', + 'protected': 0, + 'status': 0, + 'val': 0, + 'zone': 'Bathroom', + }), + }), + ]), + }), + dict({ + 'other': list([ + ]), + }), + dict({ + 'irrigation': list([ + ]), + }), + dict({ + 'scenario': list([ + ]), + }), + ]), + 'last_exception': 'None', + 'last_update success': True, + }), + 'entry': dict({ + 'data': dict({ + 'host': 'fake_host', + 'pin': '**REDACTED**', + 'port': 80, + }), + 'disabled_by': None, + 'discovery_keys': dict({ + }), + 'domain': 'comelit', + 'minor_version': 1, + 'options': dict({ + }), + 'pref_disable_new_entities': False, + 'pref_disable_polling': False, + 'source': 'user', + 'title': 'Mock Title', + 'unique_id': None, + 'version': 1, + }), + 'type': 'Serial bridge', + }) +# --- +# name: test_entry_diagnostics_vedo + dict({ + 'device_info': dict({ + 'devices': list([ + dict({ + 'aree': list([ + dict({ + '0': dict({ + 'alarm': False, + 'alarm_memory': False, + 'anomaly': False, + 'armed': False, + 'human_status': 'unknown', + 'in_time': False, + 'name': 'Area0', + 'out_time': False, + 'p1': True, + 'p2': False, + 'ready': False, + 'sabotage': False, + }), + }), + ]), + }), + dict({ + 'zone': list([ + dict({ + '0': dict({ + 'human_status': 'rest', + 'name': 'Zone0', + 'status': 0, + 'status_api': '0x000', + }), + }), + ]), + }), + ]), + 'last_exception': 'None', + 'last_update success': True, + }), + 'entry': dict({ + 'data': dict({ + 'host': 'fake_vedo_host', + 'pin': '**REDACTED**', + 'port': 8080, + 'type': 'Vedo system', + }), + 'disabled_by': None, + 'discovery_keys': dict({ + }), + 'domain': 'comelit', + 'minor_version': 1, + 'options': dict({ + }), + 'pref_disable_new_entities': False, + 'pref_disable_polling': False, + 'source': 'user', + 'title': 'Mock Title', + 'unique_id': None, + 'version': 1, + }), + 'type': 'Vedo system', + }) +# --- diff --git a/tests/components/comelit/test_diagnostics.py b/tests/components/comelit/test_diagnostics.py new file mode 100644 index 00000000000000..39d75af1152d3f --- /dev/null +++ b/tests/components/comelit/test_diagnostics.py @@ -0,0 +1,81 @@ +"""Tests for Comelit Simplehome diagnostics platform.""" + +from __future__ import annotations + +from unittest.mock import patch + +from syrupy import SnapshotAssertion +from syrupy.filters import props + +from homeassistant.components.comelit.const import DOMAIN +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant + +from .const import ( + BRIDGE_DEVICE_QUERY, + MOCK_USER_BRIDGE_DATA, + MOCK_USER_VEDO_DATA, + VEDO_DEVICE_QUERY, +) + +from tests.common import MockConfigEntry +from tests.components.diagnostics import get_diagnostics_for_config_entry +from tests.typing import ClientSessionGenerator + + +async def test_entry_diagnostics_bridge( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + snapshot: SnapshotAssertion, +) -> None: + """Test Bridge config entry diagnostics.""" + entry = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_BRIDGE_DATA) + entry.add_to_hass(hass) + + with ( + patch("aiocomelit.api.ComeliteSerialBridgeApi.login"), + patch( + "aiocomelit.api.ComeliteSerialBridgeApi.get_all_devices", + return_value=BRIDGE_DEVICE_QUERY, + ), + ): + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + assert entry.state == ConfigEntryState.LOADED + assert await get_diagnostics_for_config_entry(hass, hass_client, entry) == snapshot( + exclude=props( + "entry_id", + "created_at", + "modified_at", + ) + ) + + +async def test_entry_diagnostics_vedo( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + snapshot: SnapshotAssertion, +) -> None: + """Test Vedo System config entry diagnostics.""" + entry = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_VEDO_DATA) + entry.add_to_hass(hass) + + with ( + patch("aiocomelit.api.ComelitVedoApi.login"), + patch( + "aiocomelit.api.ComelitVedoApi.get_all_areas_and_zones", + return_value=VEDO_DEVICE_QUERY, + ), + ): + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + assert entry.state == ConfigEntryState.LOADED + assert await get_diagnostics_for_config_entry(hass, hass_client, entry) == snapshot( + exclude=props( + "entry_id", + "created_at", + "modified_at", + ) + ) diff --git a/tests/components/command_line/test_cover.py b/tests/components/command_line/test_cover.py index b81d915c6d520d..da9d86ba8a5182 100644 --- a/tests/components/command_line/test_cover.py +++ b/tests/components/command_line/test_cover.py @@ -14,7 +14,11 @@ from homeassistant import setup from homeassistant.components.command_line import DOMAIN from homeassistant.components.command_line.cover import CommandCover -from homeassistant.components.cover import DOMAIN as COVER_DOMAIN, SCAN_INTERVAL +from homeassistant.components.cover import ( + DOMAIN as COVER_DOMAIN, + SCAN_INTERVAL, + CoverState, +) from homeassistant.components.homeassistant import ( DOMAIN as HA_DOMAIN, SERVICE_UPDATE_ENTITY, @@ -24,7 +28,6 @@ SERVICE_CLOSE_COVER, SERVICE_OPEN_COVER, SERVICE_STOP_COVER, - STATE_OPEN, STATE_UNAVAILABLE, ) from homeassistant.core import HomeAssistant @@ -389,7 +392,7 @@ async def test_availability( entity_state = hass.states.get("cover.test") assert entity_state - assert entity_state.state == STATE_OPEN + assert entity_state.state == CoverState.OPEN hass.states.async_set("sensor.input1", "off") await hass.async_block_till_done() diff --git a/tests/components/config/test_config_entries.py b/tests/components/config/test_config_entries.py index 6fac86b6c81c5d..b96aa9ae006b97 100644 --- a/tests/components/config/test_config_entries.py +++ b/tests/components/config/test_config_entries.py @@ -393,6 +393,10 @@ async def test_available_flows( ############################ +@pytest.mark.parametrize( + "ignore_translations", + ["component.test.config.error.Should be unique."], +) async def test_initialize_flow(hass: HomeAssistant, client: TestClient) -> None: """Test we can initialize a flow.""" mock_platform(hass, "test.config_flow", None) @@ -500,6 +504,10 @@ async def async_step_user(self, user_input=None): assert resp.status == HTTPStatus.UNAUTHORIZED +@pytest.mark.parametrize( + "ignore_translations", + ["component.test.config.abort.bla"], +) async def test_abort(hass: HomeAssistant, client: TestClient) -> None: """Test a flow that aborts.""" mock_platform(hass, "test.config_flow", None) @@ -768,6 +776,10 @@ async def test_get_progress_index_unauth( assert response["error"]["code"] == "unauthorized" +@pytest.mark.parametrize( + "ignore_translations", + ["component.test.config.error.Should be unique."], +) async def test_get_progress_flow(hass: HomeAssistant, client: TestClient) -> None: """Test we can query the API for same result as we get from init a flow.""" mock_platform(hass, "test.config_flow", None) @@ -800,6 +812,10 @@ async def async_step_user(self, user_input=None): assert data == data2 +@pytest.mark.parametrize( + "ignore_translations", + ["component.test.config.error.Should be unique."], +) async def test_get_progress_flow_unauth( hass: HomeAssistant, client: TestClient, hass_admin_user: MockUser ) -> None: @@ -2351,6 +2367,10 @@ async def async_step_user(self, user_input=None): } +@pytest.mark.parametrize( + "ignore_translations", + ["component.test.config.abort.reconfigure_successful"], +) @pytest.mark.usefixtures("enable_custom_integrations", "freezer") async def test_supports_reconfigure( hass: HomeAssistant, diff --git a/tests/components/conftest.py b/tests/components/conftest.py index 5ac9ba8ec6cbb1..5535ec3b976825 100644 --- a/tests/components/conftest.py +++ b/tests/components/conftest.py @@ -5,13 +5,29 @@ from collections.abc import Callable, Generator from importlib.util import find_spec from pathlib import Path +import string from typing import TYPE_CHECKING, Any -from unittest.mock import AsyncMock, MagicMock, PropertyMock, patch - +from unittest.mock import AsyncMock, MagicMock, patch + +from aiohasupervisor.models import ( + Discovery, + Repository, + ResolutionInfo, + StoreAddon, + StoreInfo, +) import pytest +from homeassistant.config_entries import ( + DISCOVERY_SOURCES, + ConfigEntriesFlowManager, + FlowResult, + OptionsFlowManager, +) from homeassistant.const import STATE_OFF, STATE_ON from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowHandler, FlowManager, FlowResultType +from homeassistant.helpers.translation import async_get_translations if TYPE_CHECKING: from homeassistant.components.hassio import AddonManager @@ -185,7 +201,9 @@ def mock_legacy_device_tracker_setup() -> Callable[[HomeAssistant, MockScanner], @pytest.fixture(name="addon_manager") -def addon_manager_fixture(hass: HomeAssistant) -> AddonManager: +def addon_manager_fixture( + hass: HomeAssistant, supervisor_client: AsyncMock +) -> AddonManager: """Return an AddonManager instance.""" # pylint: disable-next=import-outside-toplevel from .hassio.common import mock_addon_manager @@ -194,12 +212,9 @@ def addon_manager_fixture(hass: HomeAssistant) -> AddonManager: @pytest.fixture(name="discovery_info") -def discovery_info_fixture() -> Any: +def discovery_info_fixture() -> list[Discovery]: """Return the discovery info from the supervisor.""" - # pylint: disable-next=import-outside-toplevel - from .hassio.common import mock_discovery_info - - return mock_discovery_info() + return [] @pytest.fixture(name="discovery_info_side_effect") @@ -210,13 +225,29 @@ def discovery_info_side_effect_fixture() -> Any | None: @pytest.fixture(name="get_addon_discovery_info") def get_addon_discovery_info_fixture( - discovery_info: dict[str, Any], discovery_info_side_effect: Any | None -) -> Generator[AsyncMock]: + supervisor_client: AsyncMock, + discovery_info: list[Discovery], + discovery_info_side_effect: Any | None, +) -> AsyncMock: """Mock get add-on discovery info.""" - # pylint: disable-next=import-outside-toplevel - from .hassio.common import mock_get_addon_discovery_info + supervisor_client.discovery.list.return_value = discovery_info + supervisor_client.discovery.list.side_effect = discovery_info_side_effect + return supervisor_client.discovery.list + - yield from mock_get_addon_discovery_info(discovery_info, discovery_info_side_effect) +@pytest.fixture(name="get_discovery_message_side_effect") +def get_discovery_message_side_effect_fixture() -> Any | None: + """Side effect for getting a discovery message by uuid.""" + return None + + +@pytest.fixture(name="get_discovery_message") +def get_discovery_message_fixture( + supervisor_client: AsyncMock, get_discovery_message_side_effect: Any | None +) -> AsyncMock: + """Mock getting a discovery message by uuid.""" + supervisor_client.discovery.get.side_effect = get_discovery_message_side_effect + return supervisor_client.discovery.get @pytest.fixture(name="addon_store_info_side_effect") @@ -227,13 +258,14 @@ def addon_store_info_side_effect_fixture() -> Any | None: @pytest.fixture(name="addon_store_info") def addon_store_info_fixture( + supervisor_client: AsyncMock, addon_store_info_side_effect: Any | None, -) -> Generator[AsyncMock]: +) -> AsyncMock: """Mock Supervisor add-on store info.""" # pylint: disable-next=import-outside-toplevel from .hassio.common import mock_addon_store_info - yield from mock_addon_store_info(addon_store_info_side_effect) + return mock_addon_store_info(supervisor_client, addon_store_info_side_effect) @pytest.fixture(name="addon_info_side_effect") @@ -245,12 +277,12 @@ def addon_info_side_effect_fixture() -> Any | None: @pytest.fixture(name="addon_info") def addon_info_fixture( supervisor_client: AsyncMock, addon_info_side_effect: Any | None -) -> Generator[AsyncMock]: +) -> AsyncMock: """Mock Supervisor add-on info.""" # pylint: disable-next=import-outside-toplevel from .hassio.common import mock_addon_info - yield from mock_addon_info(supervisor_client, addon_info_side_effect) + return mock_addon_info(supervisor_client, addon_info_side_effect) @pytest.fixture(name="addon_not_installed") @@ -300,13 +332,12 @@ def install_addon_side_effect_fixture( @pytest.fixture(name="install_addon") def install_addon_fixture( + supervisor_client: AsyncMock, install_addon_side_effect: Any | None, -) -> Generator[AsyncMock]: +) -> AsyncMock: """Mock install add-on.""" - # pylint: disable-next=import-outside-toplevel - from .hassio.common import mock_install_addon - - yield from mock_install_addon(install_addon_side_effect) + supervisor_client.store.install_addon.side_effect = install_addon_side_effect + return supervisor_client.store.install_addon @pytest.fixture(name="start_addon_side_effect") @@ -354,10 +385,7 @@ def stop_addon_fixture(supervisor_client: AsyncMock) -> AsyncMock: @pytest.fixture(name="addon_options") def addon_options_fixture(addon_info: AsyncMock) -> dict[str, Any]: """Mock add-on options.""" - # pylint: disable-next=import-outside-toplevel - from .hassio.common import mock_addon_options - - return mock_addon_options(addon_info) + return addon_info.return_value.options @pytest.fixture(name="set_addon_options_side_effect") @@ -373,13 +401,14 @@ def set_addon_options_side_effect_fixture( @pytest.fixture(name="set_addon_options") def set_addon_options_fixture( + supervisor_client: AsyncMock, set_addon_options_side_effect: Any | None, -) -> Generator[AsyncMock]: +) -> AsyncMock: """Mock set add-on options.""" - # pylint: disable-next=import-outside-toplevel - from .hassio.common import mock_set_addon_options - - yield from mock_set_addon_options(set_addon_options_side_effect) + supervisor_client.addons.set_addon_options.side_effect = ( + set_addon_options_side_effect + ) + return supervisor_client.addons.set_addon_options @pytest.fixture(name="uninstall_addon") @@ -398,12 +427,77 @@ def create_backup_fixture() -> Generator[AsyncMock]: @pytest.fixture(name="update_addon") -def update_addon_fixture() -> Generator[AsyncMock]: +def update_addon_fixture(supervisor_client: AsyncMock) -> AsyncMock: """Mock update add-on.""" + return supervisor_client.store.update_addon + + +@pytest.fixture(name="store_addons") +def store_addons_fixture() -> list[StoreAddon]: + """Mock store addons list.""" + return [] + + +@pytest.fixture(name="store_repositories") +def store_repositories_fixture() -> list[Repository]: + """Mock store repositories list.""" + return [] + + +@pytest.fixture(name="store_info") +def store_info_fixture( + supervisor_client: AsyncMock, + store_addons: list[StoreAddon], + store_repositories: list[Repository], +) -> AsyncMock: + """Mock store info.""" + supervisor_client.store.info.return_value = StoreInfo( + addons=store_addons, repositories=store_repositories + ) + return supervisor_client.store.info + + +@pytest.fixture(name="addon_stats") +def addon_stats_fixture(supervisor_client: AsyncMock) -> AsyncMock: + """Mock addon stats info.""" # pylint: disable-next=import-outside-toplevel - from .hassio.common import mock_update_addon + from .hassio.common import mock_addon_stats + + return mock_addon_stats(supervisor_client) + - yield from mock_update_addon() +@pytest.fixture(name="addon_changelog") +def addon_changelog_fixture(supervisor_client: AsyncMock) -> AsyncMock: + """Mock addon changelog.""" + supervisor_client.store.addon_changelog.return_value = "" + return supervisor_client.store.addon_changelog + + +@pytest.fixture(name="supervisor_is_connected") +def supervisor_is_connected_fixture(supervisor_client: AsyncMock) -> AsyncMock: + """Mock supervisor is connected.""" + supervisor_client.supervisor.ping.return_value = None + return supervisor_client.supervisor.ping + + +@pytest.fixture(name="resolution_info") +def resolution_info_fixture(supervisor_client: AsyncMock) -> AsyncMock: + """Mock resolution info from supervisor.""" + supervisor_client.resolution.info.return_value = ResolutionInfo( + suggestions=[], + unsupported=[], + unhealthy=[], + issues=[], + checks=[], + ) + return supervisor_client.resolution.info + + +@pytest.fixture(name="resolution_suggestions_for_issue") +def resolution_suggestions_for_issue_fixture(supervisor_client: AsyncMock) -> AsyncMock: + """Mock suggestions by issue from supervisor resolution.""" + supervisor_client.resolution.suggestions_for_issue.return_value = [] + return supervisor_client.resolution.suggestions_for_issue @pytest.fixture(name="supervisor_client") @@ -411,6 +505,11 @@ def supervisor_client() -> Generator[AsyncMock]: """Mock the supervisor client.""" supervisor_client = AsyncMock() supervisor_client.addons = AsyncMock() + supervisor_client.discovery = AsyncMock() + supervisor_client.homeassistant = AsyncMock() + supervisor_client.os = AsyncMock() + supervisor_client.resolution = AsyncMock() + supervisor_client.supervisor = AsyncMock() with ( patch( "homeassistant.components.hassio.get_supervisor_client", @@ -425,8 +524,164 @@ def supervisor_client() -> Generator[AsyncMock]: return_value=supervisor_client, ), patch( - "homeassistant.components.hassio.handler.HassIO.client", - new=PropertyMock(return_value=supervisor_client), + "homeassistant.components.hassio.discovery.get_supervisor_client", + return_value=supervisor_client, + ), + patch( + "homeassistant.components.hassio.coordinator.get_supervisor_client", + return_value=supervisor_client, + ), + patch( + "homeassistant.components.hassio.issues.get_supervisor_client", + return_value=supervisor_client, + ), + patch( + "homeassistant.components.hassio.repairs.get_supervisor_client", + return_value=supervisor_client, ), ): yield supervisor_client + + +def _validate_translation_placeholders( + full_key: str, + translation: str, + description_placeholders: dict[str, str] | None, +) -> str | None: + """Raise if translation exists with missing placeholders.""" + tuples = list(string.Formatter().parse(translation)) + for _, placeholder, _, _ in tuples: + if placeholder is None: + continue + if ( + description_placeholders is None + or placeholder not in description_placeholders + ): + pytest.fail( + f"Description not found for placeholder `{placeholder}` in {full_key}" + ) + + +async def _ensure_translation_exists( + hass: HomeAssistant, + ignore_translations: dict[str, StoreInfo], + category: str, + component: str, + key: str, + description_placeholders: dict[str, str] | None, + *, + translation_required: bool = True, +) -> None: + """Raise if translation doesn't exist.""" + full_key = f"component.{component}.{category}.{key}" + translations = await async_get_translations(hass, "en", category, [component]) + if (translation := translations.get(full_key)) is not None: + _validate_translation_placeholders( + full_key, translation, description_placeholders + ) + return + + if not translation_required: + return + + if full_key in ignore_translations: + ignore_translations[full_key] = "used" + return + + pytest.fail( + f"Translation not found for {component}: `{category}.{key}`. " + f"Please add to homeassistant/components/{component}/strings.json" + ) + + +@pytest.fixture +def ignore_translations() -> str | list[str]: + """Ignore specific translations. + + Override or parametrize this fixture with a fixture that returns, + a list of translation that should be ignored. + """ + return [] + + +@pytest.fixture(autouse=True) +def check_config_translations(ignore_translations: str | list[str]) -> Generator[None]: + """Ensure config_flow translations are available.""" + if not isinstance(ignore_translations, list): + ignore_translations = [ignore_translations] + + _ignore_translations = {k: "unused" for k in ignore_translations} + _original = FlowManager._async_handle_step + + async def _async_handle_step( + self: FlowManager, flow: FlowHandler, *args + ) -> FlowResult: + result = await _original(self, flow, *args) + if isinstance(self, ConfigEntriesFlowManager): + category = "config" + component = flow.handler + elif isinstance(self, OptionsFlowManager): + category = "options" + component = flow.hass.config_entries.async_get_entry(flow.handler).domain + else: + return result + + # Check if this flow has been seen before + # Gets set to False on first run, and to True on subsequent runs + setattr(flow, "__flow_seen_before", hasattr(flow, "__flow_seen_before")) + + if result["type"] is FlowResultType.FORM: + if step_id := result.get("step_id"): + # neither title nor description are required + # - title defaults to integration name + # - description is optional + for header in ("title", "description"): + await _ensure_translation_exists( + flow.hass, + _ignore_translations, + category, + component, + f"step.{step_id}.{header}", + result["description_placeholders"], + translation_required=False, + ) + if errors := result.get("errors"): + for error in errors.values(): + await _ensure_translation_exists( + flow.hass, + _ignore_translations, + category, + component, + f"error.{error}", + result["description_placeholders"], + ) + return result + + if result["type"] is FlowResultType.ABORT: + # We don't need translations for a discovery flow which immediately + # aborts, since such flows won't be seen by users + if not flow.__flow_seen_before and flow.source in DISCOVERY_SOURCES: + return result + await _ensure_translation_exists( + flow.hass, + _ignore_translations, + category, + component, + f"abort.{result["reason"]}", + result["description_placeholders"], + ) + + return result + + with patch( + "homeassistant.data_entry_flow.FlowManager._async_handle_step", + _async_handle_step, + ): + yield + + unused_ignore = [k for k, v in _ignore_translations.items() if v == "unused"] + if unused_ignore: + pytest.fail( + f"Unused ignore translations: {', '.join(unused_ignore)}. " + "Please remove them from the ignore_translations fixture." + ) diff --git a/tests/components/conversation/snapshots/test_default_agent.ambr b/tests/components/conversation/snapshots/test_default_agent.ambr index 051613f03002a9..b1f2ea0db75e4a 100644 --- a/tests/components/conversation/snapshots/test_default_agent.ambr +++ b/tests/components/conversation/snapshots/test_default_agent.ambr @@ -168,7 +168,7 @@ 'speech': dict({ 'plain': dict({ 'extra_data': None, - 'speech': 'Sorry, I am not aware of any device called kitchen light', + 'speech': 'Sorry, kitchen light is not exposed', }), }), }), @@ -358,7 +358,7 @@ 'speech': dict({ 'plain': dict({ 'extra_data': None, - 'speech': 'Sorry, I am not aware of any device called kitchen light', + 'speech': 'Sorry, kitchen light is not exposed', }), }), }), diff --git a/tests/components/conversation/snapshots/test_http.ambr b/tests/components/conversation/snapshots/test_http.ambr index fd02646df48331..08aca43aba520a 100644 --- a/tests/components/conversation/snapshots/test_http.ambr +++ b/tests/components/conversation/snapshots/test_http.ambr @@ -23,7 +23,6 @@ 'fa', 'fi', 'fr', - 'fr-CA', 'gl', 'gu', 'he', @@ -55,6 +54,7 @@ 'sv', 'sw', 'te', + 'th', 'tr', 'uk', 'ur', diff --git a/tests/components/conversation/test_default_agent.py b/tests/components/conversation/test_default_agent.py index cf9d575ebe0a64..9f54671d8a1b17 100644 --- a/tests/components/conversation/test_default_agent.py +++ b/tests/components/conversation/test_default_agent.py @@ -14,6 +14,7 @@ from homeassistant.components import conversation, cover, media_player from homeassistant.components.conversation import default_agent from homeassistant.components.conversation.const import DATA_DEFAULT_ENTITY +from homeassistant.components.conversation.default_agent import METADATA_CUSTOM_SENTENCE from homeassistant.components.conversation.models import ConversationInput from homeassistant.components.cover import SERVICE_OPEN_COVER from homeassistant.components.homeassistant.exposed_entities import ( @@ -417,6 +418,44 @@ async def test_trigger_sentences(hass: HomeAssistant) -> None: assert len(callback.mock_calls) == 0 +@pytest.mark.parametrize( + ("language", "expected"), + [("en", "English done"), ("de", "German done"), ("not_translated", "Done")], +) +@pytest.mark.usefixtures("init_components") +async def test_trigger_sentence_response_translation( + hass: HomeAssistant, language: str, expected: str +) -> None: + """Test translation of default response 'done'.""" + hass.config.language = language + + agent = hass.data[DATA_DEFAULT_ENTITY] + assert isinstance(agent, default_agent.DefaultAgent) + + translations = { + "en": {"component.conversation.conversation.agent.done": "English done"}, + "de": {"component.conversation.conversation.agent.done": "German done"}, + "not_translated": {}, + } + + with patch( + "homeassistant.components.conversation.default_agent.translation.async_get_translations", + return_value=translations.get(language), + ): + unregister = agent.register_trigger( + ["test sentence"], AsyncMock(return_value=None) + ) + result = await conversation.async_converse( + hass, "test sentence", None, Context() + ) + assert result.response.response_type == intent.IntentResponseType.ACTION_DONE + assert result.response.speech == { + "plain": {"speech": expected, "extra_data": None} + } + + unregister() + + @pytest.mark.usefixtures("init_components", "sl_setup") async def test_shopping_list_add_item(hass: HomeAssistant) -> None: """Test adding an item to the shopping list through the default agent.""" @@ -430,7 +469,7 @@ async def test_shopping_list_add_item(hass: HomeAssistant) -> None: @pytest.mark.usefixtures("init_components") -async def test_nevermind_item(hass: HomeAssistant) -> None: +async def test_nevermind_intent(hass: HomeAssistant) -> None: """Test HassNevermind intent through the default agent.""" result = await conversation.async_converse(hass, "nevermind", None, Context()) assert result.response.intent is not None @@ -440,6 +479,17 @@ async def test_nevermind_item(hass: HomeAssistant) -> None: assert not result.response.speech +@pytest.mark.usefixtures("init_components") +async def test_respond_intent(hass: HomeAssistant) -> None: + """Test HassRespond intent through the default agent.""" + result = await conversation.async_converse(hass, "hello", None, Context()) + assert result.response.intent is not None + assert result.response.intent.intent_type == intent.INTENT_RESPOND + + assert result.response.response_type == intent.IntentResponseType.ACTION_DONE + assert result.response.speech["plain"]["speech"] == "Hello from Home Assistant." + + @pytest.mark.usefixtures("init_components") async def test_device_area_context( hass: HomeAssistant, @@ -581,7 +631,7 @@ async def test_device_area_context( @pytest.mark.usefixtures("init_components") async def test_error_no_device(hass: HomeAssistant) -> None: - """Test error message when device/entity is missing.""" + """Test error message when device/entity doesn't exist.""" result = await conversation.async_converse( hass, "turn on missing entity", None, Context(), None ) @@ -594,9 +644,27 @@ async def test_error_no_device(hass: HomeAssistant) -> None: ) +@pytest.mark.usefixtures("init_components") +async def test_error_no_device_exposed(hass: HomeAssistant) -> None: + """Test error message when device/entity exists but is not exposed.""" + hass.states.async_set("light.kitchen_light", "off") + expose_entity(hass, "light.kitchen_light", False) + + result = await conversation.async_converse( + hass, "turn on kitchen light", None, Context(), None + ) + + assert result.response.response_type == intent.IntentResponseType.ERROR + assert result.response.error_code == intent.IntentResponseErrorCode.NO_VALID_TARGETS + assert ( + result.response.speech["plain"]["speech"] + == "Sorry, kitchen light is not exposed" + ) + + @pytest.mark.usefixtures("init_components") async def test_error_no_area(hass: HomeAssistant) -> None: - """Test error message when area is missing.""" + """Test error message when area doesn't exist.""" result = await conversation.async_converse( hass, "turn on the lights in missing area", None, Context(), None ) @@ -611,7 +679,7 @@ async def test_error_no_area(hass: HomeAssistant) -> None: @pytest.mark.usefixtures("init_components") async def test_error_no_floor(hass: HomeAssistant) -> None: - """Test error message when floor is missing.""" + """Test error message when floor doesn't exist.""" result = await conversation.async_converse( hass, "turn on all the lights on missing floor", None, Context(), None ) @@ -628,7 +696,7 @@ async def test_error_no_floor(hass: HomeAssistant) -> None: async def test_error_no_device_in_area( hass: HomeAssistant, area_registry: ar.AreaRegistry ) -> None: - """Test error message when area is missing a device/entity.""" + """Test error message when area exists but is does not contain a device/entity.""" area_kitchen = area_registry.async_get_or_create("kitchen_id") area_kitchen = area_registry.async_update(area_kitchen.id, name="kitchen") result = await conversation.async_converse( @@ -643,6 +711,119 @@ async def test_error_no_device_in_area( ) +@pytest.mark.usefixtures("init_components") +async def test_error_no_device_on_floor( + hass: HomeAssistant, + floor_registry: fr.FloorRegistry, +) -> None: + """Test error message when floor exists but is does not contain a device/entity.""" + floor_registry.async_create("ground") + result = await conversation.async_converse( + hass, "turn on missing entity on ground floor", None, Context(), None + ) + + assert result.response.response_type == intent.IntentResponseType.ERROR + assert result.response.error_code == intent.IntentResponseErrorCode.NO_VALID_TARGETS + assert ( + result.response.speech["plain"]["speech"] + == "Sorry, I am not aware of any device called missing entity on ground floor" + ) + + +@pytest.mark.usefixtures("init_components") +async def test_error_no_device_on_floor_exposed( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + area_registry: ar.AreaRegistry, + floor_registry: fr.FloorRegistry, +) -> None: + """Test error message when a device/entity exists on a floor but isn't exposed.""" + floor_ground = floor_registry.async_create("ground") + + area_kitchen = area_registry.async_get_or_create("kitchen_id") + area_kitchen = area_registry.async_update( + area_kitchen.id, name="kitchen", floor_id=floor_ground.floor_id + ) + + kitchen_light = entity_registry.async_get_or_create("light", "demo", "1234") + kitchen_light = entity_registry.async_update_entity( + kitchen_light.entity_id, + name="test light", + area_id=area_kitchen.id, + ) + hass.states.async_set( + kitchen_light.entity_id, + "off", + attributes={ATTR_FRIENDLY_NAME: kitchen_light.name}, + ) + expose_entity(hass, kitchen_light.entity_id, False) + await hass.async_block_till_done() + + # We don't have a sentence for turning on devices by floor + name = MatchEntity(name="name", value=kitchen_light.name, text=kitchen_light.name) + floor = MatchEntity(name="floor", value=floor_ground.name, text=floor_ground.name) + recognize_result = RecognizeResult( + intent=Intent("HassTurnOn"), + intent_data=IntentData([]), + entities={"name": name, "floor": floor}, + entities_list=[name, floor], + ) + + with patch( + "homeassistant.components.conversation.default_agent.recognize_all", + return_value=[recognize_result], + ): + result = await conversation.async_converse( + hass, "turn on test light on the ground floor", None, Context(), None + ) + + assert result.response.response_type == intent.IntentResponseType.ERROR + assert ( + result.response.error_code + == intent.IntentResponseErrorCode.NO_VALID_TARGETS + ) + assert ( + result.response.speech["plain"]["speech"] + == "Sorry, test light in the ground floor is not exposed" + ) + + +@pytest.mark.usefixtures("init_components") +async def test_error_no_device_in_area_exposed( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + area_registry: ar.AreaRegistry, +) -> None: + """Test error message when a device/entity exists in an area but isn't exposed.""" + area_kitchen = area_registry.async_get_or_create("kitchen_id") + area_kitchen = area_registry.async_update(area_kitchen.id, name="kitchen") + + kitchen_light = entity_registry.async_get_or_create("light", "demo", "1234") + kitchen_light = entity_registry.async_update_entity( + kitchen_light.entity_id, + name="test light", + area_id=area_kitchen.id, + ) + hass.states.async_set( + kitchen_light.entity_id, + "off", + attributes={ATTR_FRIENDLY_NAME: kitchen_light.name}, + ) + expose_entity(hass, kitchen_light.entity_id, False) + await hass.async_block_till_done() + + result = await conversation.async_converse( + hass, "turn on test light in the kitchen", None, Context(), None + ) + + assert result.response.response_type == intent.IntentResponseType.ERROR + assert result.response.error_code == intent.IntentResponseErrorCode.NO_VALID_TARGETS + assert ( + result.response.speech["plain"]["speech"] + == "Sorry, test light in the kitchen area is not exposed" + ) + + @pytest.mark.usefixtures("init_components") async def test_error_no_domain(hass: HomeAssistant) -> None: """Test error message when no devices/entities exist for a domain.""" @@ -675,6 +856,38 @@ async def test_error_no_domain(hass: HomeAssistant) -> None: ) +@pytest.mark.usefixtures("init_components") +async def test_error_no_domain_exposed(hass: HomeAssistant) -> None: + """Test error message when devices/entities exist for a domain but are not exposed.""" + hass.states.async_set("fan.test_fan", "off") + expose_entity(hass, "fan.test_fan", False) + await hass.async_block_till_done() + + # We don't have a sentence for turning on all fans + fan_domain = MatchEntity(name="domain", value="fan", text="fans") + recognize_result = RecognizeResult( + intent=Intent("HassTurnOn"), + intent_data=IntentData([]), + entities={"domain": fan_domain}, + entities_list=[fan_domain], + ) + + with patch( + "homeassistant.components.conversation.default_agent.recognize_all", + return_value=[recognize_result], + ): + result = await conversation.async_converse( + hass, "turn on the fans", None, Context(), None + ) + + assert result.response.response_type == intent.IntentResponseType.ERROR + assert ( + result.response.error_code + == intent.IntentResponseErrorCode.NO_VALID_TARGETS + ) + assert result.response.speech["plain"]["speech"] == "Sorry, no fan is exposed" + + @pytest.mark.usefixtures("init_components") async def test_error_no_domain_in_area( hass: HomeAssistant, area_registry: ar.AreaRegistry @@ -695,7 +908,43 @@ async def test_error_no_domain_in_area( @pytest.mark.usefixtures("init_components") -async def test_error_no_domain_in_floor( +async def test_error_no_domain_in_area_exposed( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + area_registry: ar.AreaRegistry, +) -> None: + """Test error message when devices/entities for a domain exist in an area but are not exposed.""" + area_kitchen = area_registry.async_get_or_create("kitchen_id") + area_kitchen = area_registry.async_update(area_kitchen.id, name="kitchen") + + kitchen_light = entity_registry.async_get_or_create("light", "demo", "1234") + kitchen_light = entity_registry.async_update_entity( + kitchen_light.entity_id, + name="test light", + area_id=area_kitchen.id, + ) + hass.states.async_set( + kitchen_light.entity_id, + "off", + attributes={ATTR_FRIENDLY_NAME: kitchen_light.name}, + ) + expose_entity(hass, kitchen_light.entity_id, False) + await hass.async_block_till_done() + + result = await conversation.async_converse( + hass, "turn on the lights in the kitchen", None, Context(), None + ) + + assert result.response.response_type == intent.IntentResponseType.ERROR + assert result.response.error_code == intent.IntentResponseErrorCode.NO_VALID_TARGETS + assert ( + result.response.speech["plain"]["speech"] + == "Sorry, no light in the kitchen area is exposed" + ) + + +@pytest.mark.usefixtures("init_components") +async def test_error_no_domain_on_floor( hass: HomeAssistant, area_registry: ar.AreaRegistry, floor_registry: fr.FloorRegistry, @@ -717,45 +966,244 @@ async def test_error_no_domain_in_floor( == "Sorry, I am not aware of any light on the ground floor" ) - # Add a new floor/area to trigger registry event handlers - floor_upstairs = floor_registry.async_create("upstairs") + # Add a new floor/area to trigger registry event handlers + floor_upstairs = floor_registry.async_create("upstairs") + area_bedroom = area_registry.async_get_or_create("bedroom_id") + area_bedroom = area_registry.async_update( + area_bedroom.id, name="bedroom", floor_id=floor_upstairs.floor_id + ) + + result = await conversation.async_converse( + hass, "turn on all lights upstairs", None, Context(), None + ) + + assert result.response.response_type == intent.IntentResponseType.ERROR + assert result.response.error_code == intent.IntentResponseErrorCode.NO_VALID_TARGETS + assert ( + result.response.speech["plain"]["speech"] + == "Sorry, I am not aware of any light on the upstairs floor" + ) + + +@pytest.mark.usefixtures("init_components") +async def test_error_no_domain_on_floor_exposed( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + area_registry: ar.AreaRegistry, + floor_registry: fr.FloorRegistry, +) -> None: + """Test error message when devices/entities for a domain exist on a floor but are not exposed.""" + floor_ground = floor_registry.async_create("ground") + area_kitchen = area_registry.async_get_or_create("kitchen_id") + area_kitchen = area_registry.async_update( + area_kitchen.id, name="kitchen", floor_id=floor_ground.floor_id + ) + kitchen_light = entity_registry.async_get_or_create("light", "demo", "1234") + kitchen_light = entity_registry.async_update_entity( + kitchen_light.entity_id, + name="test light", + area_id=area_kitchen.id, + ) + hass.states.async_set( + kitchen_light.entity_id, + "off", + attributes={ATTR_FRIENDLY_NAME: kitchen_light.name}, + ) + expose_entity(hass, kitchen_light.entity_id, False) + await hass.async_block_till_done() + + result = await conversation.async_converse( + hass, "turn on all lights on the ground floor", None, Context(), None + ) + + assert result.response.response_type == intent.IntentResponseType.ERROR + assert result.response.error_code == intent.IntentResponseErrorCode.NO_VALID_TARGETS + assert ( + result.response.speech["plain"]["speech"] + == "Sorry, no light in the ground floor is exposed" + ) + + +@pytest.mark.usefixtures("init_components") +async def test_error_no_device_class(hass: HomeAssistant) -> None: + """Test error message when no entities of a device class exist.""" + # Create a cover entity that is not a window. + # This ensures that the filtering below won't exit early because there are + # no entities in the cover domain. + hass.states.async_set( + "cover.garage_door", + STATE_CLOSED, + attributes={ATTR_DEVICE_CLASS: cover.CoverDeviceClass.GARAGE}, + ) + + # We don't have a sentence for opening all windows + cover_domain = MatchEntity(name="domain", value="cover", text="cover") + window_class = MatchEntity(name="device_class", value="window", text="windows") + recognize_result = RecognizeResult( + intent=Intent("HassTurnOn"), + intent_data=IntentData([]), + entities={"domain": cover_domain, "device_class": window_class}, + entities_list=[cover_domain, window_class], + ) + + with patch( + "homeassistant.components.conversation.default_agent.recognize_all", + return_value=[recognize_result], + ): + result = await conversation.async_converse( + hass, "open the windows", None, Context(), None + ) + + assert result.response.response_type == intent.IntentResponseType.ERROR + assert ( + result.response.error_code + == intent.IntentResponseErrorCode.NO_VALID_TARGETS + ) + assert ( + result.response.speech["plain"]["speech"] + == "Sorry, I am not aware of any window" + ) + + +@pytest.mark.usefixtures("init_components") +async def test_error_no_device_class_exposed(hass: HomeAssistant) -> None: + """Test error message when entities of a device class exist but aren't exposed.""" + # Create a cover entity that is not a window. + # This ensures that the filtering below won't exit early because there are + # no entities in the cover domain. + hass.states.async_set( + "cover.garage_door", + STATE_CLOSED, + attributes={ATTR_DEVICE_CLASS: cover.CoverDeviceClass.GARAGE}, + ) + + # Create a window an ensure it's not exposed + hass.states.async_set( + "cover.test_window", + STATE_CLOSED, + attributes={ATTR_DEVICE_CLASS: cover.CoverDeviceClass.WINDOW}, + ) + expose_entity(hass, "cover.test_window", False) + + # We don't have a sentence for opening all windows + cover_domain = MatchEntity(name="domain", value="cover", text="cover") + window_class = MatchEntity(name="device_class", value="window", text="windows") + recognize_result = RecognizeResult( + intent=Intent("HassTurnOn"), + intent_data=IntentData([]), + entities={"domain": cover_domain, "device_class": window_class}, + entities_list=[cover_domain, window_class], + ) + + with patch( + "homeassistant.components.conversation.default_agent.recognize_all", + return_value=[recognize_result], + ): + result = await conversation.async_converse( + hass, "open all the windows", None, Context(), None + ) + + assert result.response.response_type == intent.IntentResponseType.ERROR + assert ( + result.response.error_code + == intent.IntentResponseErrorCode.NO_VALID_TARGETS + ) + assert ( + result.response.speech["plain"]["speech"] == "Sorry, no window is exposed" + ) + + +@pytest.mark.usefixtures("init_components") +async def test_error_no_device_class_in_area( + hass: HomeAssistant, area_registry: ar.AreaRegistry +) -> None: + """Test error message when no entities of a device class exist in an area.""" + area_bedroom = area_registry.async_get_or_create("bedroom_id") + area_bedroom = area_registry.async_update(area_bedroom.id, name="bedroom") + result = await conversation.async_converse( + hass, "open bedroom windows", None, Context(), None + ) + + assert result.response.response_type == intent.IntentResponseType.ERROR + assert result.response.error_code == intent.IntentResponseErrorCode.NO_VALID_TARGETS + assert ( + result.response.speech["plain"]["speech"] + == "Sorry, I am not aware of any window in the bedroom area" + ) + + +@pytest.mark.usefixtures("init_components") +async def test_error_no_device_class_in_area_exposed( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + area_registry: ar.AreaRegistry, +) -> None: + """Test error message when entities of a device class exist in an area but are not exposed.""" area_bedroom = area_registry.async_get_or_create("bedroom_id") - area_bedroom = area_registry.async_update( - area_bedroom.id, name="bedroom", floor_id=floor_upstairs.floor_id + area_bedroom = area_registry.async_update(area_bedroom.id, name="bedroom") + bedroom_window = entity_registry.async_get_or_create("cover", "demo", "1234") + bedroom_window = entity_registry.async_update_entity( + bedroom_window.entity_id, + name="test cover", + area_id=area_bedroom.id, + ) + hass.states.async_set( + bedroom_window.entity_id, + "off", + attributes={ATTR_DEVICE_CLASS: cover.CoverDeviceClass.WINDOW}, ) + expose_entity(hass, bedroom_window.entity_id, False) + await hass.async_block_till_done() result = await conversation.async_converse( - hass, "turn on all lights upstairs", None, Context(), None + hass, "open bedroom windows", None, Context(), None ) assert result.response.response_type == intent.IntentResponseType.ERROR assert result.response.error_code == intent.IntentResponseErrorCode.NO_VALID_TARGETS assert ( result.response.speech["plain"]["speech"] - == "Sorry, I am not aware of any light on the upstairs floor" + == "Sorry, no window in the bedroom area is exposed" ) @pytest.mark.usefixtures("init_components") -async def test_error_no_device_class(hass: HomeAssistant) -> None: - """Test error message when no entities of a device class exist.""" - # Create a cover entity that is not a window. - # This ensures that the filtering below won't exit early because there are - # no entities in the cover domain. +async def test_error_no_device_class_on_floor_exposed( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + area_registry: ar.AreaRegistry, + floor_registry: fr.FloorRegistry, +) -> None: + """Test error message when entities of a device class exist in on a floor but are not exposed.""" + floor_ground = floor_registry.async_create("ground") + + area_bedroom = area_registry.async_get_or_create("bedroom_id") + area_bedroom = area_registry.async_update( + area_bedroom.id, name="bedroom", floor_id=floor_ground.floor_id + ) + bedroom_window = entity_registry.async_get_or_create("cover", "demo", "1234") + bedroom_window = entity_registry.async_update_entity( + bedroom_window.entity_id, + name="test cover", + area_id=area_bedroom.id, + ) hass.states.async_set( - "cover.garage_door", - STATE_CLOSED, - attributes={ATTR_DEVICE_CLASS: cover.CoverDeviceClass.GARAGE}, + bedroom_window.entity_id, + "off", + attributes={ATTR_DEVICE_CLASS: cover.CoverDeviceClass.WINDOW}, ) + expose_entity(hass, bedroom_window.entity_id, False) + await hass.async_block_till_done() - # We don't have a sentence for opening all windows + # We don't have a sentence for opening all windows on a floor cover_domain = MatchEntity(name="domain", value="cover", text="cover") window_class = MatchEntity(name="device_class", value="window", text="windows") + floor = MatchEntity(name="floor", value=floor_ground.name, text=floor_ground.name) recognize_result = RecognizeResult( intent=Intent("HassTurnOn"), intent_data=IntentData([]), - entities={"domain": cover_domain, "device_class": window_class}, - entities_list=[cover_domain, window_class], + entities={"domain": cover_domain, "device_class": window_class, "floor": floor}, + entities_list=[cover_domain, window_class, floor], ) with patch( @@ -763,7 +1211,7 @@ async def test_error_no_device_class(hass: HomeAssistant) -> None: return_value=[recognize_result], ): result = await conversation.async_converse( - hass, "open the windows", None, Context(), None + hass, "open ground floor windows", None, Context(), None ) assert result.response.response_type == intent.IntentResponseType.ERROR @@ -773,29 +1221,10 @@ async def test_error_no_device_class(hass: HomeAssistant) -> None: ) assert ( result.response.speech["plain"]["speech"] - == "Sorry, I am not aware of any window" + == "Sorry, no window in the ground floor is exposed" ) -@pytest.mark.usefixtures("init_components") -async def test_error_no_device_class_in_area( - hass: HomeAssistant, area_registry: ar.AreaRegistry -) -> None: - """Test error message when no entities of a device class exist in an area.""" - area_bedroom = area_registry.async_get_or_create("bedroom_id") - area_bedroom = area_registry.async_update(area_bedroom.id, name="bedroom") - result = await conversation.async_converse( - hass, "open bedroom windows", None, Context(), None - ) - - assert result.response.response_type == intent.IntentResponseType.ERROR - assert result.response.error_code == intent.IntentResponseErrorCode.NO_VALID_TARGETS - assert ( - result.response.speech["plain"]["speech"] - == "Sorry, I am not aware of any window in the bedroom area" - ) - - @pytest.mark.usefixtures("init_components") async def test_error_no_intent(hass: HomeAssistant) -> None: """Test response with an intent match failure.""" @@ -870,12 +1299,48 @@ async def test_error_duplicate_names( @pytest.mark.usefixtures("init_components") -async def test_error_duplicate_names_in_area( +async def test_duplicate_names_but_one_is_exposed( + hass: HomeAssistant, entity_registry: er.EntityRegistry +) -> None: + """Test when multiple devices have the same name (or alias), but only one of them is exposed.""" + kitchen_light_1 = entity_registry.async_get_or_create("light", "demo", "1234") + kitchen_light_2 = entity_registry.async_get_or_create("light", "demo", "5678") + + # Same name and alias + for light in (kitchen_light_1, kitchen_light_2): + light = entity_registry.async_update_entity( + light.entity_id, + name="kitchen light", + aliases={"overhead light"}, + ) + hass.states.async_set( + light.entity_id, + "off", + attributes={ATTR_FRIENDLY_NAME: light.name}, + ) + + # Only expose one + expose_entity(hass, kitchen_light_1.entity_id, True) + expose_entity(hass, kitchen_light_2.entity_id, False) + + # Check name and alias + async_mock_service(hass, "light", "turn_on") + for name in ("kitchen light", "overhead light"): + # command + result = await conversation.async_converse( + hass, f"turn on {name}", None, Context(), None + ) + assert result.response.response_type == intent.IntentResponseType.ACTION_DONE + assert result.response.matched_states[0].entity_id == kitchen_light_1.entity_id + + +@pytest.mark.usefixtures("init_components") +async def test_error_duplicate_names_same_area( hass: HomeAssistant, area_registry: ar.AreaRegistry, entity_registry: er.EntityRegistry, ) -> None: - """Test error message when multiple devices have the same name (or alias).""" + """Test error message when multiple devices have the same name (or alias) in the same area.""" area_kitchen = area_registry.async_get_or_create("kitchen_id") area_kitchen = area_registry.async_update(area_kitchen.id, name="kitchen") @@ -927,6 +1392,127 @@ async def test_error_duplicate_names_in_area( ) +@pytest.mark.usefixtures("init_components") +async def test_duplicate_names_same_area_but_one_is_exposed( + hass: HomeAssistant, + area_registry: ar.AreaRegistry, + entity_registry: er.EntityRegistry, +) -> None: + """Test when multiple devices have the same name (or alias) in the same area but only one is exposed.""" + area_kitchen = area_registry.async_get_or_create("kitchen_id") + area_kitchen = area_registry.async_update(area_kitchen.id, name="kitchen") + + kitchen_light_1 = entity_registry.async_get_or_create("light", "demo", "1234") + kitchen_light_2 = entity_registry.async_get_or_create("light", "demo", "5678") + + # Same name and alias + for light in (kitchen_light_1, kitchen_light_2): + light = entity_registry.async_update_entity( + light.entity_id, + name="kitchen light", + area_id=area_kitchen.id, + aliases={"overhead light"}, + ) + hass.states.async_set( + light.entity_id, + "off", + attributes={ATTR_FRIENDLY_NAME: light.name}, + ) + + # Only expose one + expose_entity(hass, kitchen_light_1.entity_id, True) + expose_entity(hass, kitchen_light_2.entity_id, False) + + # Check name and alias + async_mock_service(hass, "light", "turn_on") + for name in ("kitchen light", "overhead light"): + # command + result = await conversation.async_converse( + hass, f"turn on {name} in {area_kitchen.name}", None, Context(), None + ) + assert result.response.response_type == intent.IntentResponseType.ACTION_DONE + assert result.response.matched_states[0].entity_id == kitchen_light_1.entity_id + + +@pytest.mark.usefixtures("init_components") +async def test_duplicate_names_different_areas( + hass: HomeAssistant, + area_registry: ar.AreaRegistry, + entity_registry: er.EntityRegistry, + device_registry: dr.DeviceRegistry, +) -> None: + """Test preferred area when multiple devices have the same name (or alias) in different areas.""" + area_kitchen = area_registry.async_get_or_create("kitchen_id") + area_kitchen = area_registry.async_update(area_kitchen.id, name="kitchen") + + area_bedroom = area_registry.async_get_or_create("bedroom_id") + area_bedroom = area_registry.async_update(area_bedroom.id, name="bedroom") + + kitchen_light = entity_registry.async_get_or_create("light", "demo", "1234") + kitchen_light = entity_registry.async_update_entity( + kitchen_light.entity_id, area_id=area_kitchen.id + ) + bedroom_light = entity_registry.async_get_or_create("light", "demo", "5678") + bedroom_light = entity_registry.async_update_entity( + bedroom_light.entity_id, area_id=area_bedroom.id + ) + + # Same name and alias + for light in (kitchen_light, bedroom_light): + light = entity_registry.async_update_entity( + light.entity_id, + name="test light", + aliases={"overhead light"}, + ) + hass.states.async_set( + light.entity_id, + "off", + attributes={ATTR_FRIENDLY_NAME: light.name}, + ) + + # Add a satellite in the kitchen and bedroom + kitchen_entry = MockConfigEntry() + kitchen_entry.add_to_hass(hass) + device_kitchen = device_registry.async_get_or_create( + config_entry_id=kitchen_entry.entry_id, + connections=set(), + identifiers={("demo", "device-kitchen")}, + ) + device_registry.async_update_device(device_kitchen.id, area_id=area_kitchen.id) + + bedroom_entry = MockConfigEntry() + bedroom_entry.add_to_hass(hass) + device_bedroom = device_registry.async_get_or_create( + config_entry_id=bedroom_entry.entry_id, + connections=set(), + identifiers={("demo", "device-bedroom")}, + ) + device_registry.async_update_device(device_bedroom.id, area_id=area_bedroom.id) + + # Check name and alias + async_mock_service(hass, "light", "turn_on") + for name in ("test light", "overhead light"): + # Should fail without a preferred area + result = await conversation.async_converse( + hass, f"turn on {name}", None, Context(), None + ) + assert result.response.response_type == intent.IntentResponseType.ERROR + + # Target kitchen light by using kitchen device + result = await conversation.async_converse( + hass, f"turn on {name}", None, Context(), None, device_id=device_kitchen.id + ) + assert result.response.response_type == intent.IntentResponseType.ACTION_DONE + assert result.response.matched_states[0].entity_id == kitchen_light.entity_id + + # Target bedroom light by using bedroom device + result = await conversation.async_converse( + hass, f"turn on {name}", None, Context(), None, device_id=device_bedroom.id + ) + assert result.response.response_type == intent.IntentResponseType.ACTION_DONE + assert result.response.matched_states[0].entity_id == bedroom_light.entity_id + + @pytest.mark.usefixtures("init_components") async def test_error_wrong_state(hass: HomeAssistant) -> None: """Test error message when no entities are in the correct state.""" @@ -2015,13 +2601,15 @@ async def test_light_area_same_name( device_registry.async_update_device(device.id, area_id=kitchen_area.id) kitchen_light = entity_registry.async_get_or_create( - "light", "demo", "1234", original_name="kitchen light" + "light", "demo", "1234", original_name="light in the kitchen" ) entity_registry.async_update_entity( kitchen_light.entity_id, area_id=kitchen_area.id ) hass.states.async_set( - kitchen_light.entity_id, "off", attributes={ATTR_FRIENDLY_NAME: "kitchen light"} + kitchen_light.entity_id, + "off", + attributes={ATTR_FRIENDLY_NAME: "light in the kitchen"}, ) ceiling_light = entity_registry.async_get_or_create( @@ -2034,12 +2622,19 @@ async def test_light_area_same_name( ceiling_light.entity_id, "off", attributes={ATTR_FRIENDLY_NAME: "ceiling light"} ) + bathroom_light = entity_registry.async_get_or_create( + "light", "demo", "9012", original_name="light" + ) + hass.states.async_set( + bathroom_light.entity_id, "off", attributes={ATTR_FRIENDLY_NAME: "light"} + ) + calls = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") await hass.services.async_call( "conversation", "process", - {conversation.ATTR_TEXT: "turn on kitchen light"}, + {conversation.ATTR_TEXT: "turn on light in the kitchen"}, ) await hass.async_block_till_done() @@ -2113,7 +2708,10 @@ async def test_config_sentences_priority( hass_admin_user: MockUser, snapshot: SnapshotAssertion, ) -> None: - """Test that user intents from configuration.yaml have priority over builtin intents/sentences.""" + """Test that user intents from configuration.yaml have priority over builtin intents/sentences. + + Also test that they follow proper selection logic. + """ # Add a custom sentence that would match a builtin sentence. # Custom sentences have priority. assert await async_setup_component(hass, "homeassistant", {}) @@ -2121,13 +2719,36 @@ async def test_config_sentences_priority( assert await async_setup_component( hass, "conversation", - {"conversation": {"intents": {"CustomIntent": ["turn on the lamp"]}}}, + { + "conversation": { + "intents": { + "CustomIntent": ["turn on "], + "WorseCustomIntent": ["turn on the lamp"], + "FakeCustomIntent": ["turn on "], + } + } + }, ) + + # Fake intent not being custom + intents = ( + await conversation.async_get_agent(hass).async_get_or_load_intents( + hass.config.language + ) + ).intents.intents + intents["FakeCustomIntent"].data[0].metadata[METADATA_CUSTOM_SENTENCE] = False + assert await async_setup_component(hass, "light", {}) assert await async_setup_component( hass, "intent_script", - {"intent_script": {"CustomIntent": {"speech": {"text": "custom response"}}}}, + { + "intent_script": { + "CustomIntent": {"speech": {"text": "custom response"}}, + "WorseCustomIntent": {"speech": {"text": "worse custom response"}}, + "FakeCustomIntent": {"speech": {"text": "fake custom response"}}, + } + }, ) # Ensure that a "lamp" exists so that we can verify the custom intent @@ -2144,3 +2765,71 @@ async def test_config_sentences_priority( data = result.as_dict() assert data["response"]["response_type"] == "action_done" assert data["response"]["speech"]["plain"]["speech"] == "custom response" + + +async def test_query_same_name_different_areas( + hass: HomeAssistant, + init_components, + area_registry: ar.AreaRegistry, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, +) -> None: + """Test asking a question about entities with the same name in different areas.""" + entry = MockConfigEntry(domain="test") + entry.add_to_hass(hass) + + kitchen_device = device_registry.async_get_or_create( + config_entry_id=entry.entry_id, + connections={(dr.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, + ) + + kitchen_area = area_registry.async_create("kitchen") + device_registry.async_update_device(kitchen_device.id, area_id=kitchen_area.id) + + kitchen_light = entity_registry.async_get_or_create( + "light", + "demo", + "1234", + ) + entity_registry.async_update_entity( + kitchen_light.entity_id, area_id=kitchen_area.id + ) + hass.states.async_set( + kitchen_light.entity_id, + "on", + attributes={ATTR_FRIENDLY_NAME: "overhead light"}, + ) + + bedroom_area = area_registry.async_create("bedroom") + bedroom_light = entity_registry.async_get_or_create( + "light", + "demo", + "5678", + ) + entity_registry.async_update_entity( + bedroom_light.entity_id, area_id=bedroom_area.id + ) + hass.states.async_set( + bedroom_light.entity_id, + "off", + attributes={ATTR_FRIENDLY_NAME: "overhead light"}, + ) + + # Should fail without a preferred area (duplicate name) + result = await conversation.async_converse( + hass, "is the overhead light on?", None, Context(), None + ) + assert result.response.response_type == intent.IntentResponseType.ERROR + + # Succeeds using area from device (kitchen) + result = await conversation.async_converse( + hass, + "is the overhead light on?", + None, + Context(), + None, + device_id=kitchen_device.id, + ) + assert result.response.response_type == intent.IntentResponseType.QUERY_ANSWER + assert len(result.response.matched_states) == 1 + assert result.response.matched_states[0].entity_id == kitchen_light.entity_id diff --git a/tests/components/cover/common.py b/tests/components/cover/common.py index d9f67e73f17b8a..b4a0cdb06d411e 100644 --- a/tests/components/cover/common.py +++ b/tests/components/cover/common.py @@ -2,8 +2,7 @@ from typing import Any -from homeassistant.components.cover import CoverEntity, CoverEntityFeature -from homeassistant.const import STATE_CLOSED, STATE_CLOSING, STATE_OPEN, STATE_OPENING +from homeassistant.components.cover import CoverEntity, CoverEntityFeature, CoverState from tests.common import MockEntity @@ -26,7 +25,7 @@ def __init__( @property def is_closed(self): """Return if the cover is closed or not.""" - if "state" in self._values and self._values["state"] == STATE_CLOSED: + if "state" in self._values and self._values["state"] == CoverState.CLOSED: return True return self.current_cover_position == 0 @@ -35,7 +34,7 @@ def is_closed(self): def is_opening(self): """Return if the cover is opening or not.""" if "state" in self._values: - return self._values["state"] == STATE_OPENING + return self._values["state"] == CoverState.OPENING return False @@ -43,28 +42,28 @@ def is_opening(self): def is_closing(self): """Return if the cover is closing or not.""" if "state" in self._values: - return self._values["state"] == STATE_CLOSING + return self._values["state"] == CoverState.CLOSING return False def open_cover(self, **kwargs) -> None: """Open cover.""" if self._reports_opening_closing: - self._values["state"] = STATE_OPENING + self._values["state"] = CoverState.OPENING else: - self._values["state"] = STATE_OPEN + self._values["state"] = CoverState.OPEN def close_cover(self, **kwargs) -> None: """Close cover.""" if self._reports_opening_closing: - self._values["state"] = STATE_CLOSING + self._values["state"] = CoverState.CLOSING else: - self._values["state"] = STATE_CLOSED + self._values["state"] = CoverState.CLOSED def stop_cover(self, **kwargs) -> None: """Stop cover.""" assert CoverEntityFeature.STOP in self.supported_features - self._values["state"] = STATE_CLOSED if self.is_closed else STATE_OPEN + self._values["state"] = CoverState.CLOSED if self.is_closed else CoverState.OPEN @property def current_cover_position(self): diff --git a/tests/components/cover/test_device_condition.py b/tests/components/cover/test_device_condition.py index 8c1d2d1c9a742c..aa5f150172cf05 100644 --- a/tests/components/cover/test_device_condition.py +++ b/tests/components/cover/test_device_condition.py @@ -4,17 +4,9 @@ from pytest_unordered import unordered from homeassistant.components import automation -from homeassistant.components.cover import DOMAIN, CoverEntityFeature +from homeassistant.components.cover import DOMAIN, CoverEntityFeature, CoverState from homeassistant.components.device_automation import DeviceAutomationType -from homeassistant.const import ( - CONF_PLATFORM, - STATE_CLOSED, - STATE_CLOSING, - STATE_OPEN, - STATE_OPENING, - STATE_UNAVAILABLE, - EntityCategory, -) +from homeassistant.const import CONF_PLATFORM, STATE_UNAVAILABLE, EntityCategory from homeassistant.core import HomeAssistant, ServiceCall from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.entity_registry import RegistryEntryHider @@ -365,7 +357,7 @@ async def test_if_state( DOMAIN, "test", "5678", device_id=device_entry.id ) - hass.states.async_set(entry.entity_id, STATE_OPEN) + hass.states.async_set(entry.entity_id, CoverState.OPEN) assert await async_setup_component( hass, @@ -469,21 +461,21 @@ async def test_if_state( assert len(service_calls) == 1 assert service_calls[0].data["some"] == "is_open - event - test_event1" - hass.states.async_set(entry.entity_id, STATE_CLOSED) + hass.states.async_set(entry.entity_id, CoverState.CLOSED) hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") await hass.async_block_till_done() assert len(service_calls) == 2 assert service_calls[1].data["some"] == "is_closed - event - test_event2" - hass.states.async_set(entry.entity_id, STATE_OPENING) + hass.states.async_set(entry.entity_id, CoverState.OPENING) hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event3") await hass.async_block_till_done() assert len(service_calls) == 3 assert service_calls[2].data["some"] == "is_opening - event - test_event3" - hass.states.async_set(entry.entity_id, STATE_CLOSING) + hass.states.async_set(entry.entity_id, CoverState.CLOSING) hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event4") await hass.async_block_till_done() @@ -508,7 +500,7 @@ async def test_if_state_legacy( DOMAIN, "test", "5678", device_id=device_entry.id ) - hass.states.async_set(entry.entity_id, STATE_OPEN) + hass.states.async_set(entry.entity_id, CoverState.OPEN) assert await async_setup_component( hass, @@ -675,7 +667,7 @@ async def test_if_position( assert service_calls[2].data["some"] == "is_pos_gt_45_lt_90 - event - test_event3" hass.states.async_set( - ent.entity_id, STATE_CLOSED, attributes={"current_position": 45} + ent.entity_id, CoverState.CLOSED, attributes={"current_position": 45} ) hass.bus.async_fire("test_event1") await hass.async_block_till_done() @@ -688,7 +680,7 @@ async def test_if_position( assert service_calls[4].data["some"] == "is_pos_lt_90 - event - test_event2" hass.states.async_set( - ent.entity_id, STATE_CLOSED, attributes={"current_position": 90} + ent.entity_id, CoverState.CLOSED, attributes={"current_position": 90} ) hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") @@ -835,7 +827,7 @@ async def test_if_tilt_position( assert service_calls[2].data["some"] == "is_pos_gt_45_lt_90 - event - test_event3" hass.states.async_set( - ent.entity_id, STATE_CLOSED, attributes={"current_tilt_position": 45} + ent.entity_id, CoverState.CLOSED, attributes={"current_tilt_position": 45} ) hass.bus.async_fire("test_event1") await hass.async_block_till_done() @@ -848,7 +840,7 @@ async def test_if_tilt_position( assert service_calls[4].data["some"] == "is_pos_lt_90 - event - test_event2" hass.states.async_set( - ent.entity_id, STATE_CLOSED, attributes={"current_tilt_position": 90} + ent.entity_id, CoverState.CLOSED, attributes={"current_tilt_position": 90} ) hass.bus.async_fire("test_event1") await hass.async_block_till_done() diff --git a/tests/components/cover/test_device_trigger.py b/tests/components/cover/test_device_trigger.py index 5eb8cd484b2dc2..e6021d2232671e 100644 --- a/tests/components/cover/test_device_trigger.py +++ b/tests/components/cover/test_device_trigger.py @@ -6,16 +6,9 @@ from pytest_unordered import unordered from homeassistant.components import automation -from homeassistant.components.cover import DOMAIN, CoverEntityFeature +from homeassistant.components.cover import DOMAIN, CoverEntityFeature, CoverState from homeassistant.components.device_automation import DeviceAutomationType -from homeassistant.const import ( - CONF_PLATFORM, - STATE_CLOSED, - STATE_CLOSING, - STATE_OPEN, - STATE_OPENING, - EntityCategory, -) +from homeassistant.const import CONF_PLATFORM, EntityCategory from homeassistant.core import HomeAssistant, ServiceCall from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.entity_registry import RegistryEntryHider @@ -387,7 +380,7 @@ async def test_if_fires_on_state_change( DOMAIN, "test", "5678", device_id=device_entry.id ) - hass.states.async_set(entry.entity_id, STATE_CLOSED) + hass.states.async_set(entry.entity_id, CoverState.CLOSED) assert await async_setup_component( hass, @@ -487,7 +480,7 @@ async def test_if_fires_on_state_change( ) # Fake that the entity is opened. - hass.states.async_set(entry.entity_id, STATE_OPEN) + hass.states.async_set(entry.entity_id, CoverState.OPEN) await hass.async_block_till_done() assert len(service_calls) == 1 assert ( @@ -496,7 +489,7 @@ async def test_if_fires_on_state_change( ) # Fake that the entity is closed. - hass.states.async_set(entry.entity_id, STATE_CLOSED) + hass.states.async_set(entry.entity_id, CoverState.CLOSED) await hass.async_block_till_done() assert len(service_calls) == 2 assert ( @@ -505,7 +498,7 @@ async def test_if_fires_on_state_change( ) # Fake that the entity is opening. - hass.states.async_set(entry.entity_id, STATE_OPENING) + hass.states.async_set(entry.entity_id, CoverState.OPENING) await hass.async_block_till_done() assert len(service_calls) == 3 assert ( @@ -514,7 +507,7 @@ async def test_if_fires_on_state_change( ) # Fake that the entity is closing. - hass.states.async_set(entry.entity_id, STATE_CLOSING) + hass.states.async_set(entry.entity_id, CoverState.CLOSING) await hass.async_block_till_done() assert len(service_calls) == 4 assert ( @@ -540,7 +533,7 @@ async def test_if_fires_on_state_change_legacy( DOMAIN, "test", "5678", device_id=device_entry.id ) - hass.states.async_set(entry.entity_id, STATE_CLOSED) + hass.states.async_set(entry.entity_id, CoverState.CLOSED) assert await async_setup_component( hass, @@ -574,7 +567,7 @@ async def test_if_fires_on_state_change_legacy( ) # Fake that the entity is opened. - hass.states.async_set(entry.entity_id, STATE_OPEN) + hass.states.async_set(entry.entity_id, CoverState.OPEN) await hass.async_block_till_done() assert len(service_calls) == 1 assert ( @@ -600,7 +593,7 @@ async def test_if_fires_on_state_change_with_for( DOMAIN, "test", "5678", device_id=device_entry.id ) - hass.states.async_set(entry.entity_id, STATE_CLOSED) + hass.states.async_set(entry.entity_id, CoverState.CLOSED) assert await async_setup_component( hass, @@ -635,7 +628,7 @@ async def test_if_fires_on_state_change_with_for( await hass.async_block_till_done() assert len(service_calls) == 0 - hass.states.async_set(entry.entity_id, STATE_OPEN) + hass.states.async_set(entry.entity_id, CoverState.OPEN) await hass.async_block_till_done() assert len(service_calls) == 0 async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10)) @@ -754,12 +747,14 @@ async def test_if_fires_on_position( ] }, ) - hass.states.async_set(ent.entity_id, STATE_OPEN, attributes={"current_position": 1}) hass.states.async_set( - ent.entity_id, STATE_CLOSED, attributes={"current_position": 95} + ent.entity_id, CoverState.OPEN, attributes={"current_position": 1} + ) + hass.states.async_set( + ent.entity_id, CoverState.CLOSED, attributes={"current_position": 95} ) hass.states.async_set( - ent.entity_id, STATE_OPEN, attributes={"current_position": 50} + ent.entity_id, CoverState.OPEN, attributes={"current_position": 50} ) await hass.async_block_till_done() assert len(service_calls) == 3 @@ -781,11 +776,11 @@ async def test_if_fires_on_position( ) hass.states.async_set( - ent.entity_id, STATE_CLOSED, attributes={"current_position": 95} + ent.entity_id, CoverState.CLOSED, attributes={"current_position": 95} ) await hass.async_block_till_done() hass.states.async_set( - ent.entity_id, STATE_CLOSED, attributes={"current_position": 45} + ent.entity_id, CoverState.CLOSED, attributes={"current_position": 45} ) await hass.async_block_till_done() assert len(service_calls) == 4 @@ -795,7 +790,7 @@ async def test_if_fires_on_position( ) hass.states.async_set( - ent.entity_id, STATE_CLOSED, attributes={"current_position": 90} + ent.entity_id, CoverState.CLOSED, attributes={"current_position": 90} ) await hass.async_block_till_done() assert len(service_calls) == 5 @@ -912,13 +907,13 @@ async def test_if_fires_on_tilt_position( }, ) hass.states.async_set( - ent.entity_id, STATE_OPEN, attributes={"current_tilt_position": 1} + ent.entity_id, CoverState.OPEN, attributes={"current_tilt_position": 1} ) hass.states.async_set( - ent.entity_id, STATE_CLOSED, attributes={"current_tilt_position": 95} + ent.entity_id, CoverState.CLOSED, attributes={"current_tilt_position": 95} ) hass.states.async_set( - ent.entity_id, STATE_OPEN, attributes={"current_tilt_position": 50} + ent.entity_id, CoverState.OPEN, attributes={"current_tilt_position": 50} ) await hass.async_block_till_done() assert len(service_calls) == 3 @@ -940,11 +935,11 @@ async def test_if_fires_on_tilt_position( ) hass.states.async_set( - ent.entity_id, STATE_CLOSED, attributes={"current_tilt_position": 95} + ent.entity_id, CoverState.CLOSED, attributes={"current_tilt_position": 95} ) await hass.async_block_till_done() hass.states.async_set( - ent.entity_id, STATE_CLOSED, attributes={"current_tilt_position": 45} + ent.entity_id, CoverState.CLOSED, attributes={"current_tilt_position": 45} ) await hass.async_block_till_done() assert len(service_calls) == 4 @@ -954,7 +949,7 @@ async def test_if_fires_on_tilt_position( ) hass.states.async_set( - ent.entity_id, STATE_CLOSED, attributes={"current_tilt_position": 90} + ent.entity_id, CoverState.CLOSED, attributes={"current_tilt_position": 90} ) await hass.async_block_till_done() assert len(service_calls) == 5 diff --git a/tests/components/cover/test_init.py b/tests/components/cover/test_init.py index d1d84ffad6c0db..6b80dd1ab9af7a 100644 --- a/tests/components/cover/test_init.py +++ b/tests/components/cover/test_init.py @@ -5,15 +5,8 @@ import pytest from homeassistant.components import cover -from homeassistant.const import ( - ATTR_ENTITY_ID, - CONF_PLATFORM, - SERVICE_TOGGLE, - STATE_CLOSED, - STATE_CLOSING, - STATE_OPEN, - STATE_OPENING, -) +from homeassistant.components.cover import CoverState +from homeassistant.const import ATTR_ENTITY_ID, CONF_PLATFORM, SERVICE_TOGGLE from homeassistant.core import HomeAssistant, ServiceResponse from homeassistant.helpers.entity import Entity from homeassistant.setup import async_setup_component @@ -106,15 +99,17 @@ async def test_services( assert is_closing(hass, ent6) # Without STOP but still reports opening/closing has a 4th possible toggle state - set_state(ent6, STATE_CLOSED) + set_state(ent6, CoverState.CLOSED) await call_service(hass, SERVICE_TOGGLE, ent6) assert is_opening(hass, ent6) # After the unusual state transition: closing -> fully open, toggle should close - set_state(ent5, STATE_OPEN) + set_state(ent5, CoverState.OPEN) await call_service(hass, SERVICE_TOGGLE, ent5) # Start closing assert is_closing(hass, ent5) - set_state(ent5, STATE_OPEN) # Unusual state transition from closing -> fully open + set_state( + ent5, CoverState.OPEN + ) # Unusual state transition from closing -> fully open set_cover_position(ent5, 100) await call_service(hass, SERVICE_TOGGLE, ent5) # Should close, not open assert is_closing(hass, ent5) @@ -139,22 +134,22 @@ def set_state(ent, state) -> None: def is_open(hass: HomeAssistant, ent: Entity) -> bool: """Return if the cover is closed based on the statemachine.""" - return hass.states.is_state(ent.entity_id, STATE_OPEN) + return hass.states.is_state(ent.entity_id, CoverState.OPEN) def is_opening(hass: HomeAssistant, ent: Entity) -> bool: """Return if the cover is closed based on the statemachine.""" - return hass.states.is_state(ent.entity_id, STATE_OPENING) + return hass.states.is_state(ent.entity_id, CoverState.OPENING) def is_closed(hass: HomeAssistant, ent: Entity) -> bool: """Return if the cover is closed based on the statemachine.""" - return hass.states.is_state(ent.entity_id, STATE_CLOSED) + return hass.states.is_state(ent.entity_id, CoverState.CLOSED) def is_closing(hass: HomeAssistant, ent: Entity) -> bool: """Return if the cover is closed based on the statemachine.""" - return hass.states.is_state(ent.entity_id, STATE_CLOSING) + return hass.states.is_state(ent.entity_id, CoverState.CLOSING) def _create_tuples(enum: type[Enum], constant_prefix: str) -> list[tuple[Enum, str]]: diff --git a/tests/components/cover/test_intent.py b/tests/components/cover/test_intent.py index 1cf23c4c3df985..383a55e2a72104 100644 --- a/tests/components/cover/test_intent.py +++ b/tests/components/cover/test_intent.py @@ -10,9 +10,9 @@ SERVICE_CLOSE_COVER, SERVICE_OPEN_COVER, SERVICE_SET_COVER_POSITION, + CoverState, intent as cover_intent, ) -from homeassistant.const import STATE_CLOSED, STATE_OPEN from homeassistant.core import HomeAssistant from homeassistant.helpers import intent from homeassistant.setup import async_setup_component @@ -32,7 +32,9 @@ async def test_open_cover_intent(hass: HomeAssistant, slots: dict[str, Any]) -> await cover_intent.async_setup_intents(hass) hass.states.async_set( - f"{DOMAIN}.garage_door", STATE_CLOSED, attributes={"device_class": "garage"} + f"{DOMAIN}.garage_door", + CoverState.CLOSED, + attributes={"device_class": "garage"}, ) calls = async_mock_service(hass, DOMAIN, SERVICE_OPEN_COVER) @@ -61,7 +63,7 @@ async def test_close_cover_intent(hass: HomeAssistant, slots: dict[str, Any]) -> await cover_intent.async_setup_intents(hass) hass.states.async_set( - f"{DOMAIN}.garage_door", STATE_OPEN, attributes={"device_class": "garage"} + f"{DOMAIN}.garage_door", CoverState.OPEN, attributes={"device_class": "garage"} ) calls = async_mock_service(hass, DOMAIN, SERVICE_CLOSE_COVER) @@ -95,7 +97,7 @@ async def test_set_cover_position(hass: HomeAssistant, slots: dict[str, Any]) -> entity_id = f"{DOMAIN}.test_cover" hass.states.async_set( entity_id, - STATE_CLOSED, + CoverState.CLOSED, attributes={ATTR_CURRENT_POSITION: 0, "device_class": "shade"}, ) calls = async_mock_service(hass, DOMAIN, SERVICE_SET_COVER_POSITION) diff --git a/tests/components/cover/test_reproduce_state.py b/tests/components/cover/test_reproduce_state.py index f5dd01745d3c8c..4aad27011fa347 100644 --- a/tests/components/cover/test_reproduce_state.py +++ b/tests/components/cover/test_reproduce_state.py @@ -7,6 +7,7 @@ ATTR_CURRENT_TILT_POSITION, ATTR_POSITION, ATTR_TILT_POSITION, + CoverState, ) from homeassistant.const import ( SERVICE_CLOSE_COVER, @@ -15,8 +16,6 @@ SERVICE_OPEN_COVER_TILT, SERVICE_SET_COVER_POSITION, SERVICE_SET_COVER_TILT_POSITION, - STATE_CLOSED, - STATE_OPEN, ) from homeassistant.core import HomeAssistant, State from homeassistant.helpers.state import async_reproduce_state @@ -28,32 +27,32 @@ async def test_reproducing_states( hass: HomeAssistant, caplog: pytest.LogCaptureFixture ) -> None: """Test reproducing Cover states.""" - hass.states.async_set("cover.entity_close", STATE_CLOSED, {}) + hass.states.async_set("cover.entity_close", CoverState.CLOSED, {}) hass.states.async_set( "cover.entity_close_attr", - STATE_CLOSED, + CoverState.CLOSED, {ATTR_CURRENT_POSITION: 0, ATTR_CURRENT_TILT_POSITION: 0}, ) hass.states.async_set( - "cover.entity_close_tilt", STATE_CLOSED, {ATTR_CURRENT_TILT_POSITION: 50} + "cover.entity_close_tilt", CoverState.CLOSED, {ATTR_CURRENT_TILT_POSITION: 50} ) - hass.states.async_set("cover.entity_open", STATE_OPEN, {}) + hass.states.async_set("cover.entity_open", CoverState.OPEN, {}) hass.states.async_set( - "cover.entity_slightly_open", STATE_OPEN, {ATTR_CURRENT_POSITION: 50} + "cover.entity_slightly_open", CoverState.OPEN, {ATTR_CURRENT_POSITION: 50} ) hass.states.async_set( "cover.entity_open_attr", - STATE_OPEN, + CoverState.OPEN, {ATTR_CURRENT_POSITION: 100, ATTR_CURRENT_TILT_POSITION: 0}, ) hass.states.async_set( "cover.entity_open_tilt", - STATE_OPEN, + CoverState.OPEN, {ATTR_CURRENT_POSITION: 50, ATTR_CURRENT_TILT_POSITION: 50}, ) hass.states.async_set( "cover.entity_entirely_open", - STATE_OPEN, + CoverState.OPEN, {ATTR_CURRENT_POSITION: 100, ATTR_CURRENT_TILT_POSITION: 100}, ) @@ -70,34 +69,36 @@ async def test_reproducing_states( await async_reproduce_state( hass, [ - State("cover.entity_close", STATE_CLOSED), + State("cover.entity_close", CoverState.CLOSED), State( "cover.entity_close_attr", - STATE_CLOSED, + CoverState.CLOSED, {ATTR_CURRENT_POSITION: 0, ATTR_CURRENT_TILT_POSITION: 0}, ), State( "cover.entity_close_tilt", - STATE_CLOSED, + CoverState.CLOSED, {ATTR_CURRENT_TILT_POSITION: 50}, ), - State("cover.entity_open", STATE_OPEN), + State("cover.entity_open", CoverState.OPEN), State( - "cover.entity_slightly_open", STATE_OPEN, {ATTR_CURRENT_POSITION: 50} + "cover.entity_slightly_open", + CoverState.OPEN, + {ATTR_CURRENT_POSITION: 50}, ), State( "cover.entity_open_attr", - STATE_OPEN, + CoverState.OPEN, {ATTR_CURRENT_POSITION: 100, ATTR_CURRENT_TILT_POSITION: 0}, ), State( "cover.entity_open_tilt", - STATE_OPEN, + CoverState.OPEN, {ATTR_CURRENT_POSITION: 50, ATTR_CURRENT_TILT_POSITION: 50}, ), State( "cover.entity_entirely_open", - STATE_OPEN, + CoverState.OPEN, {ATTR_CURRENT_POSITION: 100, ATTR_CURRENT_TILT_POSITION: 100}, ), ], @@ -125,26 +126,28 @@ async def test_reproducing_states( await async_reproduce_state( hass, [ - State("cover.entity_close", STATE_OPEN), + State("cover.entity_close", CoverState.OPEN), State( "cover.entity_close_attr", - STATE_OPEN, + CoverState.OPEN, {ATTR_CURRENT_POSITION: 50, ATTR_CURRENT_TILT_POSITION: 50}, ), State( "cover.entity_close_tilt", - STATE_CLOSED, + CoverState.CLOSED, {ATTR_CURRENT_TILT_POSITION: 100}, ), - State("cover.entity_open", STATE_CLOSED), - State("cover.entity_slightly_open", STATE_OPEN, {}), - State("cover.entity_open_attr", STATE_CLOSED, {}), + State("cover.entity_open", CoverState.CLOSED), + State("cover.entity_slightly_open", CoverState.OPEN, {}), + State("cover.entity_open_attr", CoverState.CLOSED, {}), State( - "cover.entity_open_tilt", STATE_OPEN, {ATTR_CURRENT_TILT_POSITION: 0} + "cover.entity_open_tilt", + CoverState.OPEN, + {ATTR_CURRENT_TILT_POSITION: 0}, ), State( "cover.entity_entirely_open", - STATE_CLOSED, + CoverState.CLOSED, {ATTR_CURRENT_POSITION: 0, ATTR_CURRENT_TILT_POSITION: 0}, ), # Should not raise diff --git a/tests/components/crownstone/test_config_flow.py b/tests/components/crownstone/test_config_flow.py index 5dd00e7baffbb4..a38a04cb2adc43 100644 --- a/tests/components/crownstone/test_config_flow.py +++ b/tests/components/crownstone/test_config_flow.py @@ -258,7 +258,7 @@ async def test_unknown_error( result = await start_config_flow(hass, cloud) assert result["type"] is FlowResultType.FORM - assert result["errors"] == {"base": "unknown_error"} + assert result["errors"] == {"base": "unknown"} assert crownstone_setup.call_count == 0 diff --git a/tests/components/deconz/snapshots/test_light.ambr b/tests/components/deconz/snapshots/test_light.ambr index b5a9f7b5543fb0..a3ec7caac60864 100644 --- a/tests/components/deconz/snapshots/test_light.ambr +++ b/tests/components/deconz/snapshots/test_light.ambr @@ -1400,7 +1400,12 @@ 'area_id': None, 'capabilities': dict({ 'effect_list': list([ - 'colorloop', + , + , + , + , + , + , ]), 'max_color_temp_kelvin': 6535, 'max_mireds': 500, @@ -1448,7 +1453,12 @@ 'color_temp_kelvin': None, 'effect': None, 'effect_list': list([ - 'colorloop', + , + , + , + , + , + , ]), 'friendly_name': 'Gradient light', 'hs_color': tuple( diff --git a/tests/components/deconz/test_alarm_control_panel.py b/tests/components/deconz/test_alarm_control_panel.py index 6c47146f9b0403..dbe75584df7cdb 100644 --- a/tests/components/deconz/test_alarm_control_panel.py +++ b/tests/components/deconz/test_alarm_control_panel.py @@ -9,6 +9,7 @@ from homeassistant.components.alarm_control_panel import ( DOMAIN as ALARM_CONTROL_PANEL_DOMAIN, + AlarmControlPanelState, ) from homeassistant.const import ( ATTR_CODE, @@ -17,13 +18,6 @@ SERVICE_ALARM_ARM_HOME, SERVICE_ALARM_ARM_NIGHT, SERVICE_ALARM_DISARM, - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_ARMING, - STATE_ALARM_DISARMED, - STATE_ALARM_PENDING, - STATE_ALARM_TRIGGERED, Platform, ) from homeassistant.core import HomeAssistant @@ -117,21 +111,21 @@ async def test_alarm_control_panel( for action, state in ( # Event signals alarm control panel armed state - (AncillaryControlPanel.ARMED_AWAY, STATE_ALARM_ARMED_AWAY), - (AncillaryControlPanel.ARMED_NIGHT, STATE_ALARM_ARMED_NIGHT), - (AncillaryControlPanel.ARMED_STAY, STATE_ALARM_ARMED_HOME), - (AncillaryControlPanel.DISARMED, STATE_ALARM_DISARMED), + (AncillaryControlPanel.ARMED_AWAY, AlarmControlPanelState.ARMED_AWAY), + (AncillaryControlPanel.ARMED_NIGHT, AlarmControlPanelState.ARMED_NIGHT), + (AncillaryControlPanel.ARMED_STAY, AlarmControlPanelState.ARMED_HOME), + (AncillaryControlPanel.DISARMED, AlarmControlPanelState.DISARMED), # Event signals alarm control panel arming state - (AncillaryControlPanel.ARMING_AWAY, STATE_ALARM_ARMING), - (AncillaryControlPanel.ARMING_NIGHT, STATE_ALARM_ARMING), - (AncillaryControlPanel.ARMING_STAY, STATE_ALARM_ARMING), + (AncillaryControlPanel.ARMING_AWAY, AlarmControlPanelState.ARMING), + (AncillaryControlPanel.ARMING_NIGHT, AlarmControlPanelState.ARMING), + (AncillaryControlPanel.ARMING_STAY, AlarmControlPanelState.ARMING), # Event signals alarm control panel pending state - (AncillaryControlPanel.ENTRY_DELAY, STATE_ALARM_PENDING), - (AncillaryControlPanel.EXIT_DELAY, STATE_ALARM_PENDING), + (AncillaryControlPanel.ENTRY_DELAY, AlarmControlPanelState.PENDING), + (AncillaryControlPanel.EXIT_DELAY, AlarmControlPanelState.PENDING), # Event signals alarm control panel triggered state - (AncillaryControlPanel.IN_ALARM, STATE_ALARM_TRIGGERED), + (AncillaryControlPanel.IN_ALARM, AlarmControlPanelState.TRIGGERED), # Event signals alarm control panel unknown state keeps previous state - (AncillaryControlPanel.NOT_READY, STATE_ALARM_TRIGGERED), + (AncillaryControlPanel.NOT_READY, AlarmControlPanelState.TRIGGERED), ): await sensor_ws_data({"state": {"panel": action}}) assert hass.states.get("alarm_control_panel.keypad").state == state diff --git a/tests/components/deconz/test_config_flow.py b/tests/components/deconz/test_config_flow.py index 8555a6e333beb1..ce13bbfa5d47c4 100644 --- a/tests/components/deconz/test_config_flow.py +++ b/tests/components/deconz/test_config_flow.py @@ -20,12 +20,12 @@ DOMAIN as DECONZ_DOMAIN, HASSIO_CONFIGURATION_URL, ) -from homeassistant.components.hassio import HassioServiceInfo from homeassistant.components.ssdp import ATTR_UPNP_MANUFACTURER_URL, ATTR_UPNP_SERIAL from homeassistant.config_entries import SOURCE_HASSIO, SOURCE_SSDP, SOURCE_USER from homeassistant.const import CONF_API_KEY, CONF_HOST, CONF_PORT, CONTENT_TYPE_JSON from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType +from homeassistant.helpers.service_info.hassio import HassioServiceInfo from .conftest import API_KEY, BRIDGE_ID diff --git a/tests/components/deconz/test_cover.py b/tests/components/deconz/test_cover.py index f1573394fae0e0..47f8083798e3c6 100644 --- a/tests/components/deconz/test_cover.py +++ b/tests/components/deconz/test_cover.py @@ -19,8 +19,9 @@ SERVICE_SET_COVER_TILT_POSITION, SERVICE_STOP_COVER, SERVICE_STOP_COVER_TILT, + CoverState, ) -from homeassistant.const import ATTR_ENTITY_ID, STATE_OPEN, Platform +from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -67,7 +68,7 @@ async def test_cover( await light_ws_data({"state": {"lift": 0, "open": True}}) cover = hass.states.get("cover.window_covering_device") - assert cover.state == STATE_OPEN + assert cover.state == CoverState.OPEN assert cover.attributes[ATTR_CURRENT_POSITION] == 100 # Verify service calls for cover diff --git a/tests/components/deconz/test_light.py b/tests/components/deconz/test_light.py index 441cb01be630f4..8ce83d87b698d7 100644 --- a/tests/components/deconz/test_light.py +++ b/tests/components/deconz/test_light.py @@ -413,7 +413,7 @@ async def test_light_state_change( ATTR_ENTITY_ID: "light.hue_go", ATTR_XY_COLOR: (0.411, 0.351), ATTR_FLASH: FLASH_LONG, - ATTR_EFFECT: "None", + ATTR_EFFECT: "none", }, }, { diff --git a/tests/components/deconz/test_logbook.py b/tests/components/deconz/test_logbook.py index d23680225f1cf7..57cf8748762bec 100644 --- a/tests/components/deconz/test_logbook.py +++ b/tests/components/deconz/test_logbook.py @@ -16,7 +16,6 @@ CONF_EVENT, CONF_ID, CONF_UNIQUE_ID, - STATE_ALARM_ARMED_AWAY, ) from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr @@ -83,7 +82,7 @@ async def test_humanifying_deconz_alarm_event( { CONF_CODE: 1234, CONF_DEVICE_ID: keypad_entry.id, - CONF_EVENT: STATE_ALARM_ARMED_AWAY, + CONF_EVENT: "armed_away", CONF_ID: keypad_event_id, CONF_UNIQUE_ID: keypad_serial, }, @@ -94,7 +93,7 @@ async def test_humanifying_deconz_alarm_event( { CONF_CODE: 1234, CONF_DEVICE_ID: "ff99ff99ff99ff99ff99ff99ff99ff99", - CONF_EVENT: STATE_ALARM_ARMED_AWAY, + CONF_EVENT: "armed_away", CONF_ID: removed_device_event_id, CONF_UNIQUE_ID: removed_device_serial, }, diff --git a/tests/components/demo/test_cover.py b/tests/components/demo/test_cover.py index abbbbf0b79a3da..97cad5bbe14517 100644 --- a/tests/components/demo/test_cover.py +++ b/tests/components/demo/test_cover.py @@ -12,6 +12,7 @@ ATTR_POSITION, ATTR_TILT_POSITION, DOMAIN as COVER_DOMAIN, + CoverState, ) from homeassistant.const import ( ATTR_ENTITY_ID, @@ -26,10 +27,6 @@ SERVICE_STOP_COVER_TILT, SERVICE_TOGGLE, SERVICE_TOGGLE_COVER_TILT, - STATE_CLOSED, - STATE_CLOSING, - STATE_OPEN, - STATE_OPENING, Platform, ) from homeassistant.core import HomeAssistant @@ -75,41 +72,41 @@ async def test_supported_features(hass: HomeAssistant) -> None: async def test_close_cover(hass: HomeAssistant) -> None: """Test closing the cover.""" state = hass.states.get(ENTITY_COVER) - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN assert state.attributes[ATTR_CURRENT_POSITION] == 70 await hass.services.async_call( COVER_DOMAIN, SERVICE_CLOSE_COVER, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True ) state = hass.states.get(ENTITY_COVER) - assert state.state == STATE_CLOSING + assert state.state == CoverState.CLOSING for _ in range(7): future = dt_util.utcnow() + timedelta(seconds=1) async_fire_time_changed(hass, future) await hass.async_block_till_done() state = hass.states.get(ENTITY_COVER) - assert state.state == STATE_CLOSED + assert state.state == CoverState.CLOSED assert state.attributes[ATTR_CURRENT_POSITION] == 0 async def test_open_cover(hass: HomeAssistant) -> None: """Test opening the cover.""" state = hass.states.get(ENTITY_COVER) - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN assert state.attributes[ATTR_CURRENT_POSITION] == 70 await hass.services.async_call( COVER_DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True ) state = hass.states.get(ENTITY_COVER) - assert state.state == STATE_OPENING + assert state.state == CoverState.OPENING for _ in range(7): future = dt_util.utcnow() + timedelta(seconds=1) async_fire_time_changed(hass, future) await hass.async_block_till_done() state = hass.states.get(ENTITY_COVER) - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN assert state.attributes[ATTR_CURRENT_POSITION] == 100 @@ -125,7 +122,7 @@ async def test_toggle_cover(hass: HomeAssistant) -> None: await hass.async_block_till_done() state = hass.states.get(ENTITY_COVER) - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN assert state.attributes["current_position"] == 100 # Toggle closed await hass.services.async_call( @@ -137,7 +134,7 @@ async def test_toggle_cover(hass: HomeAssistant) -> None: await hass.async_block_till_done() state = hass.states.get(ENTITY_COVER) - assert state.state == STATE_CLOSED + assert state.state == CoverState.CLOSED assert state.attributes[ATTR_CURRENT_POSITION] == 0 # Toggle open await hass.services.async_call( @@ -149,7 +146,7 @@ async def test_toggle_cover(hass: HomeAssistant) -> None: await hass.async_block_till_done() state = hass.states.get(ENTITY_COVER) - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN assert state.attributes[ATTR_CURRENT_POSITION] == 100 diff --git a/tests/components/demo/test_update.py b/tests/components/demo/test_update.py index 37fa5a7a2f6511..93a9f272aebc3a 100644 --- a/tests/components/demo/test_update.py +++ b/tests/components/demo/test_update.py @@ -11,6 +11,7 @@ ATTR_RELEASE_SUMMARY, ATTR_RELEASE_URL, ATTR_TITLE, + ATTR_UPDATE_PERCENTAGE, DOMAIN as UPDATE_DOMAIN, SERVICE_INSTALL, UpdateDeviceClass, @@ -125,17 +126,27 @@ def test_setup_params(hass: HomeAssistant) -> None: ) -async def test_update_with_progress(hass: HomeAssistant) -> None: +@pytest.mark.parametrize( + ("entity_id", "steps"), + [ + ("update.demo_update_with_progress", 10), + ("update.demo_update_with_decimal_progress", 1000), + ], +) +async def test_update_with_progress( + hass: HomeAssistant, entity_id: str, steps: int +) -> None: """Test update with progress.""" - state = hass.states.get("update.demo_update_with_progress") + state = hass.states.get(entity_id) assert state assert state.state == STATE_ON assert state.attributes[ATTR_IN_PROGRESS] is False + assert state.attributes[ATTR_UPDATE_PERCENTAGE] is None events = [] async_track_state_change_event( hass, - "update.demo_update_with_progress", + entity_id, # pylint: disable-next=unnecessary-lambda callback(lambda event: events.append(event)), ) @@ -144,36 +155,44 @@ async def test_update_with_progress(hass: HomeAssistant) -> None: await hass.services.async_call( UPDATE_DOMAIN, SERVICE_INSTALL, - {ATTR_ENTITY_ID: "update.demo_update_with_progress"}, + {ATTR_ENTITY_ID: entity_id}, blocking=True, ) - assert len(events) == 10 - assert events[0].data["new_state"].state == STATE_ON - assert events[0].data["new_state"].attributes[ATTR_IN_PROGRESS] == 10 - assert events[1].data["new_state"].attributes[ATTR_IN_PROGRESS] == 20 - assert events[2].data["new_state"].attributes[ATTR_IN_PROGRESS] == 30 - assert events[3].data["new_state"].attributes[ATTR_IN_PROGRESS] == 40 - assert events[4].data["new_state"].attributes[ATTR_IN_PROGRESS] == 50 - assert events[5].data["new_state"].attributes[ATTR_IN_PROGRESS] == 60 - assert events[6].data["new_state"].attributes[ATTR_IN_PROGRESS] == 70 - assert events[7].data["new_state"].attributes[ATTR_IN_PROGRESS] == 80 - assert events[8].data["new_state"].attributes[ATTR_IN_PROGRESS] == 90 - assert events[9].data["new_state"].attributes[ATTR_IN_PROGRESS] is False - assert events[9].data["new_state"].state == STATE_OFF - - -async def test_update_with_progress_raising(hass: HomeAssistant) -> None: + assert len(events) == steps + 1 + for i, event in enumerate(events[:steps]): + new_state = event.data["new_state"] + assert new_state.state == STATE_ON + assert new_state.attributes[ATTR_UPDATE_PERCENTAGE] == pytest.approx( + 100 / steps * i + ) + new_state = events[steps].data["new_state"] + assert new_state.attributes[ATTR_IN_PROGRESS] is False + assert new_state.attributes[ATTR_UPDATE_PERCENTAGE] is None + assert new_state.state == STATE_OFF + + +@pytest.mark.parametrize( + ("entity_id", "steps"), + [ + ("update.demo_update_with_progress", 10), + ("update.demo_update_with_decimal_progress", 1000), + ], +) +async def test_update_with_progress_raising( + hass: HomeAssistant, entity_id: str, steps: int +) -> None: """Test update with progress failing to install.""" - state = hass.states.get("update.demo_update_with_progress") + state = hass.states.get(entity_id) assert state assert state.state == STATE_ON assert state.attributes[ATTR_IN_PROGRESS] is False + assert state.attributes[ATTR_UPDATE_PERCENTAGE] is None events = [] async_track_state_change_event( hass, - "update.demo_update_with_progress", + entity_id, # pylint: disable-next=unnecessary-lambda callback(lambda event: events.append(event)), ) @@ -188,17 +207,19 @@ async def test_update_with_progress_raising(hass: HomeAssistant) -> None: await hass.services.async_call( UPDATE_DOMAIN, SERVICE_INSTALL, - {ATTR_ENTITY_ID: "update.demo_update_with_progress"}, + {ATTR_ENTITY_ID: entity_id}, blocking=True, ) await hass.async_block_till_done() assert fake_sleep.call_count == 5 - assert len(events) == 5 - assert events[0].data["new_state"].state == STATE_ON - assert events[0].data["new_state"].attributes[ATTR_IN_PROGRESS] == 10 - assert events[1].data["new_state"].attributes[ATTR_IN_PROGRESS] == 20 - assert events[2].data["new_state"].attributes[ATTR_IN_PROGRESS] == 30 - assert events[3].data["new_state"].attributes[ATTR_IN_PROGRESS] == 40 - assert events[4].data["new_state"].attributes[ATTR_IN_PROGRESS] is False - assert events[4].data["new_state"].state == STATE_ON + assert len(events) == 6 + for i, event in enumerate(events[:5]): + new_state = event.data["new_state"] + assert new_state.state == STATE_ON + assert new_state.attributes[ATTR_UPDATE_PERCENTAGE] == pytest.approx( + 100 / steps * i + ) + assert events[5].data["new_state"].attributes[ATTR_IN_PROGRESS] is False + assert events[5].data["new_state"].attributes[ATTR_UPDATE_PERCENTAGE] is None + assert events[5].data["new_state"].state == STATE_ON diff --git a/tests/components/derivative/test_init.py b/tests/components/derivative/test_init.py index 0081ab97580bd1..32802080e3911f 100644 --- a/tests/components/derivative/test_init.py +++ b/tests/components/derivative/test_init.py @@ -42,7 +42,7 @@ async def test_setup_and_remove_config_entry( # Check the platform is setup correctly state = hass.states.get(derivative_entity_id) - assert state.state == "0" + assert state.state == "0.0" assert "unit_of_measurement" not in state.attributes assert state.attributes["source"] == "sensor.input" diff --git a/tests/components/device_sun_light_trigger/test_init.py b/tests/components/device_sun_light_trigger/test_init.py index 1de0794b9eedd4..24996482916031 100644 --- a/tests/components/device_sun_light_trigger/test_init.py +++ b/tests/components/device_sun_light_trigger/test_init.py @@ -177,6 +177,9 @@ async def test_lights_turn_on_when_coming_home_after_sun_set_person( hass: HomeAssistant, freezer: FrozenDateTimeFactory ) -> None: """Test lights turn on when coming home after sun set.""" + # Ensure all setup tasks are done (avoid flaky tests) + await hass.async_block_till_done(wait_background_tasks=True) + device_1 = f"{DEVICE_TRACKER_DOMAIN}.device_1" device_2 = f"{DEVICE_TRACKER_DOMAIN}.device_2" diff --git a/tests/components/devolo_home_control/test_cover.py b/tests/components/devolo_home_control/test_cover.py index 4560da9f7b7667..7d4b081c87e454 100644 --- a/tests/components/devolo_home_control/test_cover.py +++ b/tests/components/devolo_home_control/test_cover.py @@ -8,13 +8,13 @@ ATTR_CURRENT_POSITION, ATTR_POSITION, DOMAIN as COVER_DOMAIN, + CoverState, ) from homeassistant.const import ( ATTR_ENTITY_ID, SERVICE_CLOSE_COVER, SERVICE_OPEN_COVER, SERVICE_SET_COVER_POSITION, - STATE_CLOSED, STATE_UNAVAILABLE, ) from homeassistant.core import HomeAssistant @@ -46,7 +46,7 @@ async def test_cover( test_gateway.publisher.dispatch("Test", ("devolo.Blinds", 0.0)) await hass.async_block_till_done() state = hass.states.get(f"{COVER_DOMAIN}.test") - assert state.state == STATE_CLOSED + assert state.state == CoverState.CLOSED assert state.attributes[ATTR_CURRENT_POSITION] == 0.0 # Test setting position diff --git a/tests/components/devolo_home_network/mock.py b/tests/components/devolo_home_network/mock.py index fc7786669b72ad..82bf3e5ad762dc 100644 --- a/tests/components/devolo_home_network/mock.py +++ b/tests/components/devolo_home_network/mock.py @@ -50,7 +50,7 @@ async def async_connect( self, session_instance: httpx.AsyncClient | None = None ) -> None: """Give a mocked device the needed properties.""" - self.mac = DISCOVERY_INFO.properties["PlcMacAddress"] + self.mac = DISCOVERY_INFO.properties["PlcMacAddress"] if self.plcnet else None self.mt_number = DISCOVERY_INFO.properties["MT"] self.product = DISCOVERY_INFO.properties["Product"] self.serial_number = DISCOVERY_INFO.properties["SN"] diff --git a/tests/components/devolo_home_network/snapshots/test_init.ambr b/tests/components/devolo_home_network/snapshots/test_init.ambr index 619a8ce11217a8..297c9a2518314e 100644 --- a/tests/components/devolo_home_network/snapshots/test_init.ambr +++ b/tests/components/devolo_home_network/snapshots/test_init.ambr @@ -1,5 +1,5 @@ # serializer version: 1 -# name: test_setup_entry +# name: test_setup_entry[mock_device] DeviceRegistryEntrySnapshot({ 'area_id': None, 'config_entries': , @@ -35,3 +35,35 @@ 'via_device_id': None, }) # --- +# name: test_setup_entry[mock_repeater_device] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': 'http://192.0.2.1', + 'connections': set({ + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'devolo_home_network', + '1234567890', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'devolo', + 'model': 'dLAN pro 1200+ WiFi ac', + 'model_id': '2730', + 'name': 'Mock Title', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': '1234567890', + 'suggested_area': None, + 'sw_version': '5.6.1', + 'via_device_id': None, + }) +# --- diff --git a/tests/components/devolo_home_network/snapshots/test_update.ambr b/tests/components/devolo_home_network/snapshots/test_update.ambr index 83ca84c82e87b7..8a1065f9a60ddf 100644 --- a/tests/components/devolo_home_network/snapshots/test_update.ambr +++ b/tests/components/devolo_home_network/snapshots/test_update.ambr @@ -4,6 +4,7 @@ 'attributes': ReadOnlyDict({ 'auto_update': False, 'device_class': 'firmware', + 'display_precision': 0, 'entity_picture': 'https://brands.home-assistant.io/_/devolo_home_network/icon.png', 'friendly_name': 'Mock Title Firmware', 'in_progress': False, @@ -14,6 +15,7 @@ 'skipped_version': None, 'supported_features': , 'title': None, + 'update_percentage': None, }), 'context': , 'entity_id': 'update.mock_title_firmware', diff --git a/tests/components/devolo_home_network/test_init.py b/tests/components/devolo_home_network/test_init.py index 1b8903c568e878..71823eabe828f2 100644 --- a/tests/components/devolo_home_network/test_init.py +++ b/tests/components/devolo_home_network/test_init.py @@ -27,13 +27,16 @@ from tests.common import MockConfigEntry +@pytest.mark.parametrize("device", ["mock_device", "mock_repeater_device"]) async def test_setup_entry( hass: HomeAssistant, - mock_device: MockDevice, + device: str, device_registry: dr.DeviceRegistry, snapshot: SnapshotAssertion, + request: pytest.FixtureRequest, ) -> None: """Test setup entry.""" + mock_device: MockDevice = request.getfixturevalue(device) entry = configure_integration(hass) assert await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() diff --git a/tests/components/dhcp/conftest.py b/tests/components/dhcp/conftest.py new file mode 100644 index 00000000000000..b0fa3f573c5069 --- /dev/null +++ b/tests/components/dhcp/conftest.py @@ -0,0 +1,21 @@ +"""Tests for the dhcp integration.""" + +import os +import pathlib + + +def pytest_sessionstart(session): + """Try to avoid flaky FileExistsError in CI. + + Called after the Session object has been created and + before performing collection and entering the run test loop. + + This is needed due to a race condition in scapy v2.6.0 + See https://github.com/secdev/scapy/pull/4558 + + Can be removed when scapy 2.6.1 is released. + """ + for sub_dir in (".cache", ".config"): + path = pathlib.Path(os.path.join(os.path.expanduser("~"), sub_dir)) + if not path.exists(): + path.mkdir(mode=0o700, exist_ok=True) diff --git a/tests/components/dhcp/test_init.py b/tests/components/dhcp/test_init.py index 478b32940a8c82..6852f4369ccb3e 100644 --- a/tests/components/dhcp/test_init.py +++ b/tests/components/dhcp/test_init.py @@ -8,10 +8,7 @@ import aiodhcpwatcher import pytest -from scapy import ( - arch, # noqa: F401 - interfaces, -) +from scapy import interfaces from scapy.error import Scapy_Exception from scapy.layers.dhcp import DHCP from scapy.layers.l2 import Ether diff --git a/tests/components/diagnostics/test_init.py b/tests/components/diagnostics/test_init.py index 7f583395387bc6..ffed7e21f60dfb 100644 --- a/tests/components/diagnostics/test_init.py +++ b/tests/components/diagnostics/test_init.py @@ -174,6 +174,7 @@ async def test_download_diagnostics( "dependencies": [], "domain": "fake_integration", "is_built_in": True, + "overwrites_built_in": False, "name": "fake_integration", "requirements": [], }, @@ -260,6 +261,7 @@ async def test_download_diagnostics( "dependencies": [], "domain": "fake_integration", "is_built_in": True, + "overwrites_built_in": False, "name": "fake_integration", "requirements": [], }, diff --git a/tests/components/dialogflow/test_init.py b/tests/components/dialogflow/test_init.py index 4c36a6887aa71b..8144bef7c1c884 100644 --- a/tests/components/dialogflow/test_init.py +++ b/tests/components/dialogflow/test_init.py @@ -8,8 +8,8 @@ from homeassistant import config_entries from homeassistant.components import dialogflow, intent_script -from homeassistant.config import async_process_ha_core_config from homeassistant.core import HomeAssistant, ServiceCall, callback +from homeassistant.core_config import async_process_ha_core_config from homeassistant.data_entry_flow import FlowResultType from homeassistant.setup import async_setup_component diff --git a/tests/components/dynalite/test_cover.py b/tests/components/dynalite/test_cover.py index 930318978fc7f7..ac8dd7b676da09 100644 --- a/tests/components/dynalite/test_cover.py +++ b/tests/components/dynalite/test_cover.py @@ -13,15 +13,9 @@ ATTR_POSITION, ATTR_TILT_POSITION, CoverDeviceClass, + CoverState, ) -from homeassistant.const import ( - ATTR_DEVICE_CLASS, - ATTR_FRIENDLY_NAME, - STATE_CLOSED, - STATE_CLOSING, - STATE_OPEN, - STATE_OPENING, -) +from homeassistant.const import ATTR_DEVICE_CLASS, ATTR_FRIENDLY_NAME from homeassistant.core import HomeAssistant, State from homeassistant.exceptions import HomeAssistantError @@ -130,16 +124,16 @@ async def test_cover_positions(hass: HomeAssistant, mock_device: Mock) -> None: """Test that the state updates in the various positions.""" update_func = await create_entity_from_device(hass, mock_device) await check_cover_position( - hass, update_func, mock_device, True, False, False, STATE_CLOSING + hass, update_func, mock_device, True, False, False, CoverState.CLOSING ) await check_cover_position( - hass, update_func, mock_device, False, True, False, STATE_OPENING + hass, update_func, mock_device, False, True, False, CoverState.OPENING ) await check_cover_position( - hass, update_func, mock_device, False, False, True, STATE_CLOSED + hass, update_func, mock_device, False, False, True, CoverState.CLOSED ) await check_cover_position( - hass, update_func, mock_device, False, False, False, STATE_OPEN + hass, update_func, mock_device, False, False, False, CoverState.OPEN ) @@ -147,12 +141,12 @@ async def test_cover_restore_state(hass: HomeAssistant, mock_device: Mock) -> No """Test restore from cache.""" mock_restore_cache( hass, - [State("cover.name", STATE_OPEN, attributes={ATTR_CURRENT_POSITION: 77})], + [State("cover.name", CoverState.OPEN, attributes={ATTR_CURRENT_POSITION: 77})], ) await create_entity_from_device(hass, mock_device) mock_device.init_level.assert_called_once_with(77) entity_state = hass.states.get("cover.name") - assert entity_state.state == STATE_OPEN + assert entity_state.state == CoverState.OPEN async def test_cover_restore_state_bad_cache( @@ -161,9 +155,9 @@ async def test_cover_restore_state_bad_cache( """Test restore from a cache without the attribute.""" mock_restore_cache( hass, - [State("cover.name", STATE_OPEN, attributes={"bla bla": 77})], + [State("cover.name", CoverState.OPEN, attributes={"bla bla": 77})], ) await create_entity_from_device(hass, mock_device) mock_device.init_level.assert_not_called() entity_state = hass.states.get("cover.name") - assert entity_state.state == STATE_CLOSED + assert entity_state.state == CoverState.CLOSED diff --git a/tests/components/ecobee/common.py b/tests/components/ecobee/common.py index e320a08673a4ae..69d576ce2b5949 100644 --- a/tests/components/ecobee/common.py +++ b/tests/components/ecobee/common.py @@ -11,7 +11,7 @@ async def setup_platform( hass: HomeAssistant, - platform: str, + platforms: str | list[str], ) -> MockConfigEntry: """Set up the ecobee platform.""" mock_entry = MockConfigEntry( @@ -24,7 +24,9 @@ async def setup_platform( ) mock_entry.add_to_hass(hass) - with patch("homeassistant.components.ecobee.PLATFORMS", [platform]): + platforms = [platforms] if isinstance(platforms, str) else platforms + + with patch("homeassistant.components.ecobee.PLATFORMS", platforms): await hass.config_entries.async_setup(mock_entry.entry_id) await hass.async_block_till_done() return mock_entry diff --git a/tests/components/ecobee/fixtures/ecobee-data.json b/tests/components/ecobee/fixtures/ecobee-data.json index b2f336e064d54c..e0e82d688633db 100644 --- a/tests/components/ecobee/fixtures/ecobee-data.json +++ b/tests/components/ecobee/fixtures/ecobee-data.json @@ -1,7 +1,7 @@ { "thermostatList": [ { - "identifier": 8675309, + "identifier": "8675309", "name": "ecobee", "modelNumber": "athenaSmart", "utcTime": "2022-01-01 10:00:00", @@ -11,13 +11,32 @@ }, "program": { "climates": [ + { + "name": "Home", + "climateRef": "home", + "sensors": [ + { + "name": "ecobee" + } + ] + }, { "name": "Climate1", - "climateRef": "c1" + "climateRef": "c1", + "sensors": [ + { + "name": "ecobee" + } + ] }, { "name": "Climate2", - "climateRef": "c2" + "climateRef": "c2", + "sensors": [ + { + "name": "ecobee" + } + ] } ], "currentClimateRef": "c1" @@ -62,6 +81,24 @@ } ], "remoteSensors": [ + { + "id": "ei:0", + "name": "ecobee", + "type": "thermostat", + "inUse": true, + "capability": [ + { + "id": "1", + "type": "temperature", + "value": "782" + }, + { + "id": "2", + "type": "humidity", + "value": "54" + } + ] + }, { "id": "rs:100", "name": "Remote Sensor 1", @@ -123,6 +160,7 @@ "hasHumidifier": true, "humidifierMode": "manual", "hasHeatPump": true, + "compressorProtectionMinTemp": 100, "humidity": "30" }, "equipmentStatus": "fan", @@ -157,6 +195,25 @@ "value": "false" } ] + }, + { + "id": "rs:101", + "name": "Remote Sensor 2", + "type": "ecobee3_remote_sensor", + "code": "VTRK", + "inUse": false, + "capability": [ + { + "id": "1", + "type": "temperature", + "value": "782" + }, + { + "id": "2", + "type": "occupancy", + "value": "false" + } + ] } ] }, diff --git a/tests/components/ecobee/test_climate.py b/tests/components/ecobee/test_climate.py index 559153874a5ca0..403ac4a01ad268 100644 --- a/tests/components/ecobee/test_climate.py +++ b/tests/components/ecobee/test_climate.py @@ -3,16 +3,27 @@ from http import HTTPStatus from unittest import mock +from freezegun.api import FrozenDateTimeFactory import pytest from homeassistant import const from homeassistant.components.climate import ClimateEntityFeature -from homeassistant.components.ecobee.climate import PRESET_AWAY_INDEFINITELY, Thermostat -from homeassistant.const import ATTR_SUPPORTED_FEATURES, STATE_OFF +from homeassistant.components.ecobee.climate import ( + ATTR_PRESET_MODE, + ATTR_SENSOR_LIST, + PRESET_AWAY_INDEFINITELY, + Thermostat, +) +from homeassistant.components.ecobee.const import DOMAIN +from homeassistant.const import ATTR_ENTITY_ID, ATTR_SUPPORTED_FEATURES, STATE_OFF from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ServiceValidationError +from homeassistant.helpers import device_registry as dr from .common import setup_platform +from tests.common import MockConfigEntry, async_fire_time_changed + ENTITY_ID = "climate.ecobee" @@ -25,9 +36,18 @@ def ecobee_fixture(): "identifier": "abc", "program": { "climates": [ - {"name": "Climate1", "climateRef": "c1"}, - {"name": "Climate2", "climateRef": "c2"}, - {"name": "Away", "climateRef": "away"}, + { + "name": "Climate1", + "climateRef": "c1", + "sensors": [{"name": "Ecobee"}], + }, + { + "name": "Climate2", + "climateRef": "c2", + "sensors": [{"name": "Ecobee"}], + }, + {"name": "Away", "climateRef": "away", "sensors": [{"name": "Ecobee"}]}, + {"name": "Home", "climateRef": "home", "sensors": [{"name": "Ecobee"}]}, ], "currentClimateRef": "c1", }, @@ -60,8 +80,19 @@ def ecobee_fixture(): "endTime": "10:00:00", } ], + "remoteSensors": [ + { + "id": "ei:0", + "name": "Ecobee", + }, + { + "id": "rs2:100", + "name": "Remote Sensor 1", + }, + ], } mock_ecobee = mock.Mock() + mock_ecobee.get = mock.Mock(side_effect=vals.get) mock_ecobee.__getitem__ = mock.Mock(side_effect=vals.__getitem__) mock_ecobee.__setitem__ = mock.Mock(side_effect=vals.__setitem__) return mock_ecobee @@ -76,10 +107,10 @@ def data_fixture(ecobee_fixture): @pytest.fixture(name="thermostat") -def thermostat_fixture(data): +def thermostat_fixture(data, hass: HomeAssistant): """Set up ecobee thermostat object.""" thermostat = data.ecobee.get_thermostat(1) - return Thermostat(data, 1, thermostat) + return Thermostat(data, 1, thermostat, hass) async def test_name(thermostat) -> None: @@ -186,6 +217,8 @@ async def test_extra_state_attributes(ecobee_fixture, thermostat) -> None: "climate_mode": "Climate1", "fan_min_on_time": 10, "equipment_running": "heatPump2", + "available_sensors": [], + "active_sensors": [], } ecobee_fixture["equipmentStatus"] = "auxHeat2" @@ -194,6 +227,8 @@ async def test_extra_state_attributes(ecobee_fixture, thermostat) -> None: "climate_mode": "Climate1", "fan_min_on_time": 10, "equipment_running": "auxHeat2", + "available_sensors": [], + "active_sensors": [], } ecobee_fixture["equipmentStatus"] = "compCool1" @@ -202,6 +237,8 @@ async def test_extra_state_attributes(ecobee_fixture, thermostat) -> None: "climate_mode": "Climate1", "fan_min_on_time": 10, "equipment_running": "compCool1", + "available_sensors": [], + "active_sensors": [], } ecobee_fixture["equipmentStatus"] = "" assert thermostat.extra_state_attributes == { @@ -209,6 +246,8 @@ async def test_extra_state_attributes(ecobee_fixture, thermostat) -> None: "climate_mode": "Climate1", "fan_min_on_time": 10, "equipment_running": "", + "available_sensors": [], + "active_sensors": [], } ecobee_fixture["equipmentStatus"] = "Unknown" @@ -217,6 +256,8 @@ async def test_extra_state_attributes(ecobee_fixture, thermostat) -> None: "climate_mode": "Climate1", "fan_min_on_time": 10, "equipment_running": "Unknown", + "available_sensors": [], + "active_sensors": [], } ecobee_fixture["program"]["currentClimateRef"] = "c2" @@ -225,6 +266,8 @@ async def test_extra_state_attributes(ecobee_fixture, thermostat) -> None: "climate_mode": "Climate2", "fan_min_on_time": 10, "equipment_running": "Unknown", + "available_sensors": [], + "active_sensors": [], } @@ -375,3 +418,203 @@ async def test_set_preset_mode(ecobee_fixture, thermostat, data) -> None: data.ecobee.set_climate_hold.assert_has_calls( [mock.call(1, "away", "indefinite", thermostat.hold_hours())] ) + + +async def test_remote_sensors(hass: HomeAssistant) -> None: + """Test remote sensors.""" + await setup_platform(hass, [const.Platform.CLIMATE, const.Platform.SENSOR]) + platform = hass.data[const.Platform.CLIMATE].entities + for entity in platform: + if entity.entity_id == "climate.ecobee": + thermostat = entity + break + + assert thermostat is not None + remote_sensors = thermostat.remote_sensors + + assert sorted(remote_sensors) == sorted(["ecobee", "Remote Sensor 1"]) + + +async def test_remote_sensor_devices( + hass: HomeAssistant, freezer: FrozenDateTimeFactory +) -> None: + """Test remote sensor devices.""" + await setup_platform(hass, [const.Platform.CLIMATE, const.Platform.SENSOR]) + freezer.tick(100) + async_fire_time_changed(hass) + state = hass.states.get(ENTITY_ID) + device_registry = dr.async_get(hass) + for device in device_registry.devices.values(): + if device.name == "Remote Sensor 1": + remote_sensor_1_id = device.id + if device.name == "ecobee": + ecobee_id = device.id + assert sorted(state.attributes.get("available_sensors")) == sorted( + [f"Remote Sensor 1 ({remote_sensor_1_id})", f"ecobee ({ecobee_id})"] + ) + + +async def test_active_sensors_in_preset_mode(hass: HomeAssistant) -> None: + """Test active sensors in preset mode property.""" + await setup_platform(hass, [const.Platform.CLIMATE, const.Platform.SENSOR]) + platform = hass.data[const.Platform.CLIMATE].entities + for entity in platform: + if entity.entity_id == "climate.ecobee": + thermostat = entity + break + + assert thermostat is not None + remote_sensors = thermostat.active_sensors_in_preset_mode + + assert sorted(remote_sensors) == sorted(["ecobee"]) + + +async def test_active_sensor_devices_in_preset_mode(hass: HomeAssistant) -> None: + """Test active sensor devices in preset mode.""" + await setup_platform(hass, [const.Platform.CLIMATE, const.Platform.SENSOR]) + state = hass.states.get(ENTITY_ID) + + assert state.attributes.get("active_sensors") == ["ecobee"] + + +async def test_remote_sensor_ids_names(hass: HomeAssistant) -> None: + """Test getting ids and names_by_user for thermostat.""" + await setup_platform(hass, [const.Platform.CLIMATE, const.Platform.SENSOR]) + platform = hass.data[const.Platform.CLIMATE].entities + for entity in platform: + if entity.entity_id == "climate.ecobee": + thermostat = entity + break + + assert thermostat is not None + + remote_sensor_ids_names = thermostat.remote_sensor_ids_names + for id_name in remote_sensor_ids_names: + assert id_name.get("id") is not None + + name_by_user_list = [item["name_by_user"] for item in remote_sensor_ids_names] + assert sorted(name_by_user_list) == sorted(["Remote Sensor 1", "ecobee"]) + + +async def test_set_sensors_used_in_climate(hass: HomeAssistant) -> None: + """Test set sensors used in climate.""" + # Get device_id of remote sensor from the device registry. + await setup_platform(hass, [const.Platform.CLIMATE, const.Platform.SENSOR]) + device_registry = dr.async_get(hass) + for device in device_registry.devices.values(): + if device.name == "Remote Sensor 1": + remote_sensor_1_id = device.id + if device.name == "ecobee": + ecobee_id = device.id + if device.name == "Remote Sensor 2": + remote_sensor_2_id = device.id + + entry = MockConfigEntry(domain="test") + entry.add_to_hass(hass) + device_from_other_integration = device_registry.async_get_or_create( + config_entry_id=entry.entry_id, identifiers={("test", "unique")} + ) + + # Test that the function call works in its entirety. + with mock.patch("pyecobee.Ecobee.update_climate_sensors") as mock_sensors: + await hass.services.async_call( + DOMAIN, + "set_sensors_used_in_climate", + { + ATTR_ENTITY_ID: ENTITY_ID, + ATTR_PRESET_MODE: "Climate1", + ATTR_SENSOR_LIST: [remote_sensor_1_id], + }, + blocking=True, + ) + await hass.async_block_till_done() + mock_sensors.assert_called_once_with(0, "Climate1", sensor_ids=["rs:100"]) + + # Update sensors without preset mode. + with mock.patch("pyecobee.Ecobee.update_climate_sensors") as mock_sensors: + await hass.services.async_call( + DOMAIN, + "set_sensors_used_in_climate", + { + ATTR_ENTITY_ID: ENTITY_ID, + ATTR_SENSOR_LIST: [remote_sensor_1_id], + }, + blocking=True, + ) + await hass.async_block_till_done() + # `temp` is the preset running because of a hold. + mock_sensors.assert_called_once_with(0, "temp", sensor_ids=["rs:100"]) + + # Check that sensors are not updated when the sent sensors are the currently set sensors. + with mock.patch("pyecobee.Ecobee.update_climate_sensors") as mock_sensors: + await hass.services.async_call( + DOMAIN, + "set_sensors_used_in_climate", + { + ATTR_ENTITY_ID: ENTITY_ID, + ATTR_PRESET_MODE: "Climate1", + ATTR_SENSOR_LIST: [ecobee_id], + }, + blocking=True, + ) + mock_sensors.assert_not_called() + + # Error raised because invalid climate name. + with pytest.raises(ServiceValidationError) as execinfo: + await hass.services.async_call( + DOMAIN, + "set_sensors_used_in_climate", + { + ATTR_ENTITY_ID: ENTITY_ID, + ATTR_PRESET_MODE: "InvalidClimate", + ATTR_SENSOR_LIST: [remote_sensor_1_id], + }, + blocking=True, + ) + assert execinfo.value.translation_domain == "ecobee" + assert execinfo.value.translation_key == "invalid_preset" + + ## Error raised because invalid sensor. + with pytest.raises(ServiceValidationError) as execinfo: + await hass.services.async_call( + DOMAIN, + "set_sensors_used_in_climate", + { + ATTR_ENTITY_ID: ENTITY_ID, + ATTR_PRESET_MODE: "Climate1", + ATTR_SENSOR_LIST: ["abcd"], + }, + blocking=True, + ) + assert execinfo.value.translation_domain == "ecobee" + assert execinfo.value.translation_key == "invalid_sensor" + + ## Error raised because sensor not available on device. + with pytest.raises(ServiceValidationError): + await hass.services.async_call( + DOMAIN, + "set_sensors_used_in_climate", + { + ATTR_ENTITY_ID: ENTITY_ID, + ATTR_PRESET_MODE: "Climate1", + ATTR_SENSOR_LIST: [remote_sensor_2_id], + }, + blocking=True, + ) + + with pytest.raises(ServiceValidationError) as execinfo: + await hass.services.async_call( + DOMAIN, + "set_sensors_used_in_climate", + { + ATTR_ENTITY_ID: ENTITY_ID, + ATTR_PRESET_MODE: "Climate1", + ATTR_SENSOR_LIST: [ + remote_sensor_1_id, + device_from_other_integration.id, + ], + }, + blocking=True, + ) + assert execinfo.value.translation_domain == "ecobee" + assert execinfo.value.translation_key == "sensor_lookup_failed" diff --git a/tests/components/ecobee/test_config_flow.py b/tests/components/ecobee/test_config_flow.py index 20d3dabb1ea4a9..5c919ffab5c309 100644 --- a/tests/components/ecobee/test_config_flow.py +++ b/tests/components/ecobee/test_config_flow.py @@ -11,6 +11,7 @@ DATA_ECOBEE_CONFIG, DOMAIN, ) +from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_API_KEY from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -20,12 +21,11 @@ async def test_abort_if_already_setup(hass: HomeAssistant) -> None: """Test we abort if ecobee is already setup.""" - flow = config_flow.EcobeeFlowHandler() - flow.hass = hass - MockConfigEntry(domain=DOMAIN).add_to_hass(hass) - result = await flow.async_step_user() + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "single_instance_allowed" diff --git a/tests/components/ecobee/test_notify.py b/tests/components/ecobee/test_notify.py index c66f04c752ac7b..ca5e40dbdb17f6 100644 --- a/tests/components/ecobee/test_notify.py +++ b/tests/components/ecobee/test_notify.py @@ -2,13 +2,11 @@ from unittest.mock import MagicMock -from homeassistant.components.ecobee import DOMAIN from homeassistant.components.notify import ( DOMAIN as NOTIFY_DOMAIN, SERVICE_SEND_MESSAGE, ) from homeassistant.core import HomeAssistant -from homeassistant.helpers import issue_registry as ir from .common import setup_platform @@ -34,24 +32,3 @@ async def test_notify_entity_service( ) await hass.async_block_till_done() mock_ecobee.send_message.assert_called_with(THERMOSTAT_ID, "It is too cold!") - - -async def test_legacy_notify_service( - hass: HomeAssistant, - mock_ecobee: MagicMock, - issue_registry: ir.IssueRegistry, -) -> None: - """Test the legacy notify service.""" - await setup_platform(hass, NOTIFY_DOMAIN) - - assert hass.services.has_service(NOTIFY_DOMAIN, DOMAIN) - await hass.services.async_call( - NOTIFY_DOMAIN, - DOMAIN, - service_data={"message": "It is too cold!", "target": THERMOSTAT_ID}, - blocking=True, - ) - await hass.async_block_till_done() - mock_ecobee.send_message.assert_called_with(THERMOSTAT_ID, "It is too cold!") - mock_ecobee.send_message.reset_mock() - assert len(issue_registry.issues) == 1 diff --git a/tests/components/ecobee/test_number.py b/tests/components/ecobee/test_number.py index 5b01fe8c5bae5f..be65b6dbb3039f 100644 --- a/tests/components/ecobee/test_number.py +++ b/tests/components/ecobee/test_number.py @@ -12,8 +12,8 @@ from .common import setup_platform -VENTILATOR_MIN_HOME_ID = "number.ecobee_ventilator_min_time_home" -VENTILATOR_MIN_AWAY_ID = "number.ecobee_ventilator_min_time_away" +VENTILATOR_MIN_HOME_ID = "number.ecobee_ventilator_minimum_time_home" +VENTILATOR_MIN_AWAY_ID = "number.ecobee_ventilator_minimum_time_away" THERMOSTAT_ID = 0 @@ -26,7 +26,9 @@ async def test_ventilator_min_on_home_attributes(hass: HomeAssistant) -> None: assert state.attributes.get("min") == 0 assert state.attributes.get("max") == 60 assert state.attributes.get("step") == 5 - assert state.attributes.get("friendly_name") == "ecobee Ventilator min time home" + assert ( + state.attributes.get("friendly_name") == "ecobee Ventilator minimum time home" + ) assert state.attributes.get("unit_of_measurement") == UnitOfTime.MINUTES @@ -39,7 +41,9 @@ async def test_ventilator_min_on_away_attributes(hass: HomeAssistant) -> None: assert state.attributes.get("min") == 0 assert state.attributes.get("max") == 60 assert state.attributes.get("step") == 5 - assert state.attributes.get("friendly_name") == "ecobee Ventilator min time away" + assert ( + state.attributes.get("friendly_name") == "ecobee Ventilator minimum time away" + ) assert state.attributes.get("unit_of_measurement") == UnitOfTime.MINUTES @@ -77,3 +81,42 @@ async def test_set_min_time_away(hass: HomeAssistant) -> None: ) await hass.async_block_till_done() mock_set_min_away_time.assert_called_once_with(THERMOSTAT_ID, target_value) + + +COMPRESSOR_MIN_TEMP_ID = "number.ecobee2_compressor_minimum_temperature" + + +async def test_compressor_protection_min_temp_attributes(hass: HomeAssistant) -> None: + """Test the compressor min temp value is correct. + + Ecobee runs in Fahrenheit; the test rig runs in Celsius. Conversions are necessary. + """ + await setup_platform(hass, NUMBER_DOMAIN) + + state = hass.states.get(COMPRESSOR_MIN_TEMP_ID) + assert state.state == "-12.2" + assert ( + state.attributes.get("friendly_name") + == "ecobee2 Compressor minimum temperature" + ) + + +async def test_set_compressor_protection_min_temp(hass: HomeAssistant) -> None: + """Test the number can set minimum compressor operating temp. + + Ecobee runs in Fahrenheit; the test rig runs in Celsius. Conversions are necessary + """ + target_value = 0 + with patch( + "homeassistant.components.ecobee.Ecobee.set_aux_cutover_threshold" + ) as mock_set_compressor_min_temp: + await setup_platform(hass, NUMBER_DOMAIN) + + await hass.services.async_call( + NUMBER_DOMAIN, + SERVICE_SET_VALUE, + {ATTR_ENTITY_ID: COMPRESSOR_MIN_TEMP_ID, ATTR_VALUE: target_value}, + blocking=True, + ) + await hass.async_block_till_done() + mock_set_compressor_min_temp.assert_called_once_with(1, 32) diff --git a/tests/components/ecobee/test_repairs.py b/tests/components/ecobee/test_repairs.py deleted file mode 100644 index b00c49e7d915db..00000000000000 --- a/tests/components/ecobee/test_repairs.py +++ /dev/null @@ -1,70 +0,0 @@ -"""Test repairs for Ecobee integration.""" - -from unittest.mock import MagicMock - -from homeassistant.components.ecobee import DOMAIN -from homeassistant.components.notify import DOMAIN as NOTIFY_DOMAIN -from homeassistant.core import HomeAssistant -from homeassistant.helpers import issue_registry as ir - -from .common import setup_platform - -from tests.components.repairs import ( - async_process_repairs_platforms, - process_repair_fix_flow, - start_repair_fix_flow, -) -from tests.typing import ClientSessionGenerator - -THERMOSTAT_ID = 0 - - -async def test_ecobee_notify_repair_flow( - hass: HomeAssistant, - mock_ecobee: MagicMock, - hass_client: ClientSessionGenerator, - issue_registry: ir.IssueRegistry, -) -> None: - """Test the ecobee notify service repair flow is triggered.""" - await setup_platform(hass, NOTIFY_DOMAIN) - await async_process_repairs_platforms(hass) - - http_client = await hass_client() - - # Simulate legacy service being used - assert hass.services.has_service(NOTIFY_DOMAIN, DOMAIN) - await hass.services.async_call( - NOTIFY_DOMAIN, - DOMAIN, - service_data={"message": "It is too cold!", "target": THERMOSTAT_ID}, - blocking=True, - ) - await hass.async_block_till_done() - mock_ecobee.send_message.assert_called_with(THERMOSTAT_ID, "It is too cold!") - mock_ecobee.send_message.reset_mock() - - # Assert the issue is present - assert issue_registry.async_get_issue( - domain="notify", - issue_id=f"migrate_notify_{DOMAIN}_{DOMAIN}", - ) - assert len(issue_registry.issues) == 1 - - data = await start_repair_fix_flow( - http_client, "notify", f"migrate_notify_{DOMAIN}_{DOMAIN}" - ) - - flow_id = data["flow_id"] - assert data["step_id"] == "confirm" - - data = await process_repair_fix_flow(http_client, flow_id) - assert data["type"] == "create_entry" - # Test confirm step in repair flow - await hass.async_block_till_done() - - # Assert the issue is no longer present - assert not issue_registry.async_get_issue( - domain="notify", - issue_id=f"migrate_notify_{DOMAIN}_{DOMAIN}", - ) - assert len(issue_registry.issues) == 0 diff --git a/tests/components/ecobee/test_switch.py b/tests/components/ecobee/test_switch.py index 31c8ce8f72d44d..b3c4c4f8296a40 100644 --- a/tests/components/ecobee/test_switch.py +++ b/tests/components/ecobee/test_switch.py @@ -118,7 +118,7 @@ async def test_turn_off_20min_ventilator(hass: HomeAssistant) -> None: mock_set_20min_ventilator.assert_called_once_with(THERMOSTAT_ID, False) -DEVICE_ID = "switch.ecobee2_aux_heat_only" +DEVICE_ID = "switch.ecobee2_auxiliary_heat_only" async def test_aux_heat_only_turn_on(hass: HomeAssistant) -> None: diff --git a/tests/components/elevenlabs/test_tts.py b/tests/components/elevenlabs/test_tts.py index 37866a53c5b15a..7151aab10f2470 100644 --- a/tests/components/elevenlabs/test_tts.py +++ b/tests/components/elevenlabs/test_tts.py @@ -32,9 +32,9 @@ DOMAIN as DOMAIN_MP, SERVICE_PLAY_MEDIA, ) -from homeassistant.config import async_process_ha_core_config from homeassistant.const import ATTR_ENTITY_ID, CONF_API_KEY from homeassistant.core import HomeAssistant, ServiceCall +from homeassistant.core_config import async_process_ha_core_config from .const import MOCK_MODELS, MOCK_VOICES diff --git a/tests/components/emoncms/conftest.py b/tests/components/emoncms/conftest.py index 29e86f3c59d643..4bd1d68217abb0 100644 --- a/tests/components/emoncms/conftest.py +++ b/tests/components/emoncms/conftest.py @@ -91,6 +91,21 @@ def config_entry() -> MockConfigEntry: ) +FLOW_RESULT_SECOND_URL = copy.deepcopy(FLOW_RESULT) +FLOW_RESULT_SECOND_URL[CONF_URL] = "http://1.1.1.2" + + +@pytest.fixture +def config_entry_unique_id() -> MockConfigEntry: + """Mock emoncms config entry.""" + return MockConfigEntry( + domain=DOMAIN, + title=SENSOR_NAME, + data=FLOW_RESULT_SECOND_URL, + unique_id="123-53535292", + ) + + FLOW_RESULT_NO_FEED = copy.deepcopy(FLOW_RESULT) FLOW_RESULT_NO_FEED[CONF_ONLY_INCLUDE_FEEDID] = None @@ -143,4 +158,5 @@ async def emoncms_client() -> AsyncGenerator[AsyncMock]: ): client = mock_client.return_value client.async_request.return_value = {"success": True, "message": FEEDS} + client.async_get_uuid.return_value = "123-53535292" yield client diff --git a/tests/components/emoncms/snapshots/test_sensor.ambr b/tests/components/emoncms/snapshots/test_sensor.ambr index 5e718c1d8e8407..f6a2745fb1ad58 100644 --- a/tests/components/emoncms/snapshots/test_sensor.ambr +++ b/tests/components/emoncms/snapshots/test_sensor.ambr @@ -30,7 +30,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'XXXXXXXX-1', + 'unique_id': '123-53535292-1', 'unit_of_measurement': , }) # --- diff --git a/tests/components/emoncms/test_config_flow.py b/tests/components/emoncms/test_config_flow.py index 17ec32a9008d87..1914f23fb0b41c 100644 --- a/tests/components/emoncms/test_config_flow.py +++ b/tests/components/emoncms/test_config_flow.py @@ -42,7 +42,7 @@ async def test_flow_import_failure( data=YAML, ) assert result["type"] is FlowResultType.ABORT - assert result["reason"] == EMONCMS_FAILURE["message"] + assert result["reason"] == "api_error" async def test_flow_import_already_configured( @@ -97,10 +97,6 @@ async def test_user_flow( assert len(mock_setup_entry.mock_calls) == 1 -USER_OPTIONS = { - CONF_ONLY_INCLUDE_FEEDID: ["1"], -} - CONFIG_ENTRY = { CONF_API_KEY: "my_api_key", CONF_ONLY_INCLUDE_FEEDID: ["1"], @@ -110,21 +106,24 @@ async def test_user_flow( async def test_options_flow( hass: HomeAssistant, - mock_setup_entry: AsyncMock, emoncms_client: AsyncMock, config_entry: MockConfigEntry, ) -> None: """Options flow - success test.""" await setup_integration(hass, config_entry) + assert config_entry.options == {} result = await hass.config_entries.options.async_init(config_entry.entry_id) await hass.async_block_till_done() result = await hass.config_entries.options.async_configure( result["flow_id"], - user_input=USER_OPTIONS, + user_input={ + CONF_ONLY_INCLUDE_FEEDID: ["1"], + }, ) assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["data"] == CONFIG_ENTRY - assert config_entry.options == CONFIG_ENTRY + assert config_entry.options == { + CONF_ONLY_INCLUDE_FEEDID: ["1"], + } async def test_options_flow_failure( @@ -138,6 +137,25 @@ async def test_options_flow_failure( await setup_integration(hass, config_entry) result = await hass.config_entries.options.async_init(config_entry.entry_id) await hass.async_block_till_done() - assert result["errors"]["base"] == "failure" + assert result["errors"]["base"] == "api_error" + assert result["description_placeholders"]["details"] == "failure" assert result["type"] is FlowResultType.FORM assert result["step_id"] == "init" + + +async def test_unique_id_exists( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + emoncms_client: AsyncMock, + config_entry_unique_id: MockConfigEntry, +) -> None: + """Test when entry with same unique id already exists.""" + config_entry_unique_id.add_to_hass(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + result = await hass.config_entries.flow.async_configure( + result["flow_id"], USER_INPUT + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" diff --git a/tests/components/emoncms/test_init.py b/tests/components/emoncms/test_init.py index b89b6e65a66d89..abe1a020034423 100644 --- a/tests/components/emoncms/test_init.py +++ b/tests/components/emoncms/test_init.py @@ -4,11 +4,14 @@ from unittest.mock import AsyncMock +from homeassistant.components.emoncms.const import DOMAIN, FEED_ID, FEED_NAME from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import Platform from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er, issue_registry as ir from . import setup_integration -from .conftest import EMONCMS_FAILURE +from .conftest import EMONCMS_FAILURE, FEEDS from tests.common import MockConfigEntry @@ -38,3 +41,49 @@ async def test_failure( emoncms_client.async_request.return_value = EMONCMS_FAILURE config_entry.add_to_hass(hass) assert not await hass.config_entries.async_setup(config_entry.entry_id) + + +async def test_migrate_uuid( + hass: HomeAssistant, + config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + emoncms_client: AsyncMock, +) -> None: + """Test migration from home assistant uuid to emoncms uuid.""" + config_entry.add_to_hass(hass) + assert config_entry.unique_id is None + for _, feed in enumerate(FEEDS): + entity_registry.async_get_or_create( + Platform.SENSOR, + DOMAIN, + f"{config_entry.entry_id}-{feed[FEED_ID]}", + config_entry=config_entry, + suggested_object_id=f"{DOMAIN}_{feed[FEED_NAME]}", + ) + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + emoncms_uuid = emoncms_client.async_get_uuid.return_value + assert config_entry.unique_id == emoncms_uuid + entity_entries = er.async_entries_for_config_entry( + entity_registry, config_entry.entry_id + ) + + for nb, feed in enumerate(FEEDS): + assert entity_entries[nb].unique_id == f"{emoncms_uuid}-{feed[FEED_ID]}" + assert ( + entity_entries[nb].previous_unique_id + == f"{config_entry.entry_id}-{feed[FEED_ID]}" + ) + + +async def test_no_uuid( + hass: HomeAssistant, + config_entry: MockConfigEntry, + issue_registry: ir.IssueRegistry, + emoncms_client: AsyncMock, +) -> None: + """Test an issue is created when the emoncms server does not ship an uuid.""" + emoncms_client.async_get_uuid.return_value = None + await setup_integration(hass, config_entry) + + assert issue_registry.async_get_issue(domain=DOMAIN, issue_id="migrate database") diff --git a/tests/components/enphase_envoy/test_config_flow.py b/tests/components/enphase_envoy/test_config_flow.py index ee10e9462f3a9a..44e2e680d5f59b 100644 --- a/tests/components/enphase_envoy/test_config_flow.py +++ b/tests/components/enphase_envoy/test_config_flow.py @@ -703,7 +703,7 @@ async def test_reconfigure( await setup_integration(hass, config_entry) result = await config_entry.start_reconfigure_flow(hass) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure_confirm" + assert result["step_id"] == "reconfigure" assert result["errors"] == {} # original entry @@ -739,7 +739,7 @@ async def test_reconfigure_nochange( await setup_integration(hass, config_entry) result = await config_entry.start_reconfigure_flow(hass) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure_confirm" + assert result["step_id"] == "reconfigure" assert result["errors"] == {} # original entry @@ -775,7 +775,7 @@ async def test_reconfigure_otherenvoy( await setup_integration(hass, config_entry) result = await config_entry.start_reconfigure_flow(hass) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure_confirm" + assert result["step_id"] == "reconfigure" assert result["errors"] == {} # let mock return different serial from first time, sim it's other one on changed ip @@ -790,34 +790,14 @@ async def test_reconfigure_otherenvoy( }, ) - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {"base": "unexpected_envoy"} + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "unique_id_mismatch" # entry should still be original entry assert config_entry.data[CONF_HOST] == "1.1.1.1" assert config_entry.data[CONF_USERNAME] == "test-username" assert config_entry.data[CONF_PASSWORD] == "test-password" - # set serial back to original to finsich flow - mock_envoy.serial_number = "1234" - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_HOST: "1.1.1.1", - CONF_USERNAME: "test-username", - CONF_PASSWORD: "new-password", - }, - ) - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "reconfigure_successful" - - # updated original entry - assert config_entry.data[CONF_HOST] == "1.1.1.1" - assert config_entry.data[CONF_USERNAME] == "test-username" - assert config_entry.data[CONF_PASSWORD] == "new-password" - @pytest.mark.parametrize( ("exception", "error"), @@ -909,7 +889,7 @@ async def test_reconfigure_change_ip_to_existing( result = await config_entry.start_reconfigure_flow(hass) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure_confirm" + assert result["step_id"] == "reconfigure" assert result["errors"] == {} # original entry diff --git a/tests/components/esphome/test_alarm_control_panel.py b/tests/components/esphome/test_alarm_control_panel.py index af717ac1b4976c..a3bfc72f3e219b 100644 --- a/tests/components/esphome/test_alarm_control_panel.py +++ b/tests/components/esphome/test_alarm_control_panel.py @@ -4,9 +4,9 @@ from aioesphomeapi import ( AlarmControlPanelCommand, - AlarmControlPanelEntityState, + AlarmControlPanelEntityState as ESPHomeAlarmEntityState, AlarmControlPanelInfo, - AlarmControlPanelState, + AlarmControlPanelState as ESPHomeAlarmState, APIClient, ) @@ -20,9 +20,10 @@ SERVICE_ALARM_ARM_VACATION, SERVICE_ALARM_DISARM, SERVICE_ALARM_TRIGGER, + AlarmControlPanelState, ) from homeassistant.components.esphome.alarm_control_panel import EspHomeACPFeatures -from homeassistant.const import ATTR_ENTITY_ID, STATE_ALARM_ARMED_AWAY, STATE_UNKNOWN +from homeassistant.const import ATTR_ENTITY_ID, STATE_UNKNOWN from homeassistant.core import HomeAssistant @@ -48,9 +49,7 @@ async def test_generic_alarm_control_panel_requires_code( requires_code_to_arm=True, ) ] - states = [ - AlarmControlPanelEntityState(key=1, state=AlarmControlPanelState.ARMED_AWAY) - ] + states = [ESPHomeAlarmEntityState(key=1, state=ESPHomeAlarmState.ARMED_AWAY)] user_service = [] await mock_generic_device_entry( mock_client=mock_client, @@ -60,7 +59,7 @@ async def test_generic_alarm_control_panel_requires_code( ) state = hass.states.get("alarm_control_panel.test_myalarm_control_panel") assert state is not None - assert state.state == STATE_ALARM_ARMED_AWAY + assert state.state == AlarmControlPanelState.ARMED_AWAY await hass.services.async_call( ALARM_CONTROL_PANEL_DOMAIN, @@ -183,9 +182,7 @@ async def test_generic_alarm_control_panel_no_code( requires_code_to_arm=False, ) ] - states = [ - AlarmControlPanelEntityState(key=1, state=AlarmControlPanelState.ARMED_AWAY) - ] + states = [ESPHomeAlarmEntityState(key=1, state=ESPHomeAlarmState.ARMED_AWAY)] user_service = [] await mock_generic_device_entry( mock_client=mock_client, @@ -195,7 +192,7 @@ async def test_generic_alarm_control_panel_no_code( ) state = hass.states.get("alarm_control_panel.test_myalarm_control_panel") assert state is not None - assert state.state == STATE_ALARM_ARMED_AWAY + assert state.state == AlarmControlPanelState.ARMED_AWAY await hass.services.async_call( ALARM_CONTROL_PANEL_DOMAIN, diff --git a/tests/components/esphome/test_assist_satellite.py b/tests/components/esphome/test_assist_satellite.py index b2c44af2cf9cec..e8344e50161c19 100644 --- a/tests/components/esphome/test_assist_satellite.py +++ b/tests/components/esphome/test_assist_satellite.py @@ -1448,6 +1448,7 @@ async def test_get_set_configuration( states=[], device_info={ "voice_assistant_feature_flags": VoiceAssistantFeature.VOICE_ASSISTANT + | VoiceAssistantFeature.ANNOUNCE }, ) await hass.async_block_till_done() diff --git a/tests/components/esphome/test_config_flow.py b/tests/components/esphome/test_config_flow.py index 2f91921e7f22ac..0a389969c78ba1 100644 --- a/tests/components/esphome/test_config_flow.py +++ b/tests/components/esphome/test_config_flow.py @@ -27,10 +27,10 @@ DEFAULT_NEW_CONFIG_ALLOW_ALLOW_SERVICE_CALLS, DOMAIN, ) -from homeassistant.components.hassio import HassioServiceInfo from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_PORT from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType +from homeassistant.helpers.service_info.hassio import HassioServiceInfo from homeassistant.helpers.service_info.mqtt import MqttServiceInfo from . import VALID_NOISE_PSK @@ -1400,6 +1400,14 @@ async def test_discovery_mqtt_no_mac( await mqtt_discovery_test_abort(hass, "{}", "mqtt_missing_mac") +@pytest.mark.usefixtures("mock_zeroconf") +async def test_discovery_mqtt_empty_payload( + hass: HomeAssistant, mock_client, mock_setup_entry: None +) -> None: + """Test discovery aborted if MQTT payload is empty.""" + await mqtt_discovery_test_abort(hass, "", "mqtt_missing_payload") + + @pytest.mark.usefixtures("mock_zeroconf") async def test_discovery_mqtt_no_api( hass: HomeAssistant, mock_client, mock_setup_entry: None diff --git a/tests/components/esphome/test_cover.py b/tests/components/esphome/test_cover.py index b190d287198f62..4cfe91c6dea1cd 100644 --- a/tests/components/esphome/test_cover.py +++ b/tests/components/esphome/test_cover.py @@ -7,7 +7,7 @@ APIClient, CoverInfo, CoverOperation, - CoverState, + CoverState as ESPHomeCoverState, EntityInfo, EntityState, UserService, @@ -26,10 +26,7 @@ SERVICE_SET_COVER_POSITION, SERVICE_SET_COVER_TILT_POSITION, SERVICE_STOP_COVER, - STATE_CLOSED, - STATE_CLOSING, - STATE_OPEN, - STATE_OPENING, + CoverState, ) from homeassistant.const import ATTR_ENTITY_ID from homeassistant.core import HomeAssistant @@ -58,7 +55,7 @@ async def test_cover_entity( ) ] states = [ - CoverState( + ESPHomeCoverState( key=1, position=0.5, tilt=0.5, @@ -74,7 +71,7 @@ async def test_cover_entity( ) state = hass.states.get("cover.test_mycover") assert state is not None - assert state.state == STATE_OPENING + assert state.state == CoverState.OPENING assert state.attributes[ATTR_CURRENT_POSITION] == 50 assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 50 @@ -142,28 +139,30 @@ async def test_cover_entity( mock_client.cover_command.reset_mock() mock_device.set_state( - CoverState(key=1, position=0.0, current_operation=CoverOperation.IDLE) + ESPHomeCoverState(key=1, position=0.0, current_operation=CoverOperation.IDLE) ) await hass.async_block_till_done() state = hass.states.get("cover.test_mycover") assert state is not None - assert state.state == STATE_CLOSED + assert state.state == CoverState.CLOSED mock_device.set_state( - CoverState(key=1, position=0.5, current_operation=CoverOperation.IS_CLOSING) + ESPHomeCoverState( + key=1, position=0.5, current_operation=CoverOperation.IS_CLOSING + ) ) await hass.async_block_till_done() state = hass.states.get("cover.test_mycover") assert state is not None - assert state.state == STATE_CLOSING + assert state.state == CoverState.CLOSING mock_device.set_state( - CoverState(key=1, position=1.0, current_operation=CoverOperation.IDLE) + ESPHomeCoverState(key=1, position=1.0, current_operation=CoverOperation.IDLE) ) await hass.async_block_till_done() state = hass.states.get("cover.test_mycover") assert state is not None - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN async def test_cover_entity_without_position( @@ -187,7 +186,7 @@ async def test_cover_entity_without_position( ) ] states = [ - CoverState( + ESPHomeCoverState( key=1, position=0.5, tilt=0.5, @@ -203,6 +202,6 @@ async def test_cover_entity_without_position( ) state = hass.states.get("cover.test_mycover") assert state is not None - assert state.state == STATE_OPENING + assert state.state == CoverState.OPENING assert ATTR_CURRENT_TILT_POSITION not in state.attributes assert ATTR_CURRENT_POSITION not in state.attributes diff --git a/tests/components/esphome/test_ffmpeg_proxy.py b/tests/components/esphome/test_ffmpeg_proxy.py index ef657ed8c7b029..295d8d2fda97df 100644 --- a/tests/components/esphome/test_ffmpeg_proxy.py +++ b/tests/components/esphome/test_ffmpeg_proxy.py @@ -1,13 +1,17 @@ """Tests for ffmpeg proxy view.""" +from collections.abc import Generator from http import HTTPStatus import io +import os import tempfile from unittest.mock import patch from urllib.request import pathname2url import wave +from aiohttp import client_exceptions import mutagen +import pytest from homeassistant.components import esphome from homeassistant.components.esphome.ffmpeg_proxy import async_create_proxy_url @@ -17,6 +21,29 @@ from tests.typing import ClientSessionGenerator +@pytest.fixture(name="wav_file_length") +def wav_file_length_fixture() -> int: + """Wanted length of temporary wave file.""" + return 1 + + +@pytest.fixture(name="wav_file") +def wav_file_fixture(wav_file_length: int) -> Generator[str]: + """Create a temporary file and fill it with 1s of silence.""" + with tempfile.NamedTemporaryFile(mode="wb+", suffix=".wav") as temp_file: + _write_silence(temp_file.name, wav_file_length) + yield temp_file.name + + +def _write_silence(filename: str, length: int) -> None: + """Write silence to a file.""" + with wave.open(filename, "wb") as wav_file: + wav_file.setframerate(16000) + wav_file.setsampwidth(2) + wav_file.setnchannels(1) + wav_file.writeframes(bytes(16000 * 2 * length)) # length s + + async def test_async_create_proxy_url(hass: HomeAssistant) -> None: """Test that async_create_proxy_url returns the correct format.""" assert await async_setup_component(hass, "esphome", {}) @@ -40,6 +67,7 @@ async def test_async_create_proxy_url(hass: HomeAssistant) -> None: async def test_proxy_view( hass: HomeAssistant, hass_client: ClientSessionGenerator, + wav_file: str, ) -> None: """Test proxy HTTP view for converting audio.""" device_id = "1234" @@ -47,44 +75,36 @@ async def test_proxy_view( await async_setup_component(hass, esphome.DOMAIN, {esphome.DOMAIN: {}}) client = await hass_client() - with tempfile.NamedTemporaryFile(mode="wb+", suffix=".wav") as temp_file: - with wave.open(temp_file.name, "wb") as wav_file: - wav_file.setframerate(16000) - wav_file.setsampwidth(2) - wav_file.setnchannels(1) - wav_file.writeframes(bytes(16000 * 2)) # 1s + wav_url = pathname2url(wav_file) + convert_id = "test-id" + url = f"/api/esphome/ffmpeg_proxy/{device_id}/{convert_id}.mp3" - temp_file.seek(0) - wav_url = pathname2url(temp_file.name) - convert_id = "test-id" - url = f"/api/esphome/ffmpeg_proxy/{device_id}/{convert_id}.mp3" + # Should fail because we haven't allowed the URL yet + req = await client.get(url) + assert req.status == HTTPStatus.NOT_FOUND - # Should fail because we haven't allowed the URL yet - req = await client.get(url) - assert req.status == HTTPStatus.NOT_FOUND - - # Allow the URL - with patch( - "homeassistant.components.esphome.ffmpeg_proxy.secrets.token_urlsafe", - return_value=convert_id, - ): - assert ( - async_create_proxy_url( - hass, device_id, wav_url, media_format="mp3", rate=22050, channels=2 - ) - == url + # Allow the URL + with patch( + "homeassistant.components.esphome.ffmpeg_proxy.secrets.token_urlsafe", + return_value=convert_id, + ): + assert ( + async_create_proxy_url( + hass, device_id, wav_url, media_format="mp3", rate=22050, channels=2 ) + == url + ) - # Requesting the wrong media format should fail - wrong_url = f"/api/esphome/ffmpeg_proxy/{device_id}/{convert_id}.flac" - req = await client.get(wrong_url) - assert req.status == HTTPStatus.BAD_REQUEST + # Requesting the wrong media format should fail + wrong_url = f"/api/esphome/ffmpeg_proxy/{device_id}/{convert_id}.flac" + req = await client.get(wrong_url) + assert req.status == HTTPStatus.BAD_REQUEST - # Correct URL - req = await client.get(url) - assert req.status == HTTPStatus.OK + # Correct URL + req = await client.get(url) + assert req.status == HTTPStatus.OK - mp3_data = await req.content.read() + mp3_data = await req.content.read() # Verify conversion with io.BytesIO(mp3_data) as mp3_io: @@ -120,6 +140,7 @@ async def test_ffmpeg_file_doesnt_exist( async def test_lingering_process( hass: HomeAssistant, hass_client: ClientSessionGenerator, + wav_file: str, ) -> None: """Test that a new request stops the old ffmpeg process.""" device_id = "1234" @@ -127,65 +148,59 @@ async def test_lingering_process( await async_setup_component(hass, esphome.DOMAIN, {esphome.DOMAIN: {}}) client = await hass_client() - with tempfile.NamedTemporaryFile(mode="wb+", suffix=".wav") as temp_file: - with wave.open(temp_file.name, "wb") as wav_file: - wav_file.setframerate(16000) - wav_file.setsampwidth(2) - wav_file.setnchannels(1) - wav_file.writeframes(bytes(16000 * 2)) # 1s - - temp_file.seek(0) - wav_url = pathname2url(temp_file.name) - url1 = async_create_proxy_url( - hass, - device_id, - wav_url, - media_format="wav", - rate=22050, - channels=2, - width=2, - ) - - # First request will start ffmpeg - req1 = await client.get(url1) - assert req1.status == HTTPStatus.OK - - # Only read part of the data - await req1.content.readexactly(100) - - # Allow another URL - url2 = async_create_proxy_url( - hass, - device_id, - wav_url, - media_format="wav", - rate=22050, - channels=2, - width=2, - ) - - req2 = await client.get(url2) - assert req2.status == HTTPStatus.OK - - wav_data = await req2.content.read() + wav_url = pathname2url(wav_file) + url1 = async_create_proxy_url( + hass, + device_id, + wav_url, + media_format="wav", + rate=22050, + channels=2, + width=2, + ) + + # First request will start ffmpeg + req1 = await client.get(url1) + assert req1.status == HTTPStatus.OK + + # Only read part of the data + await req1.content.readexactly(100) + + # Allow another URL + url2 = async_create_proxy_url( + hass, + device_id, + wav_url, + media_format="wav", + rate=22050, + channels=2, + width=2, + ) + + req2 = await client.get(url2) + assert req2.status == HTTPStatus.OK + + wav_data = await req2.content.read() # All of the data should be there because this is a new ffmpeg process - with io.BytesIO(wav_data) as wav_io, wave.open(wav_io, "rb") as wav_file: + with io.BytesIO(wav_data) as wav_io, wave.open(wav_io, "rb") as received_wav_file: # We can't use getnframes() here because the WAV header will be incorrect. # WAV encoders usually go back and update the WAV header after all of # the frames are written, but ffmpeg can't do that because we're # streaming the data. # So instead, we just read and count frames until we run out. num_frames = 0 - while chunk := wav_file.readframes(1024): + while chunk := received_wav_file.readframes(1024): num_frames += len(chunk) // (2 * 2) # 2 channels, 16-bit samples assert num_frames == 22050 # 1s +@pytest.mark.parametrize("wav_file_length", [10]) async def test_request_same_url_multiple_times( hass: HomeAssistant, hass_client: ClientSessionGenerator, + wav_file: str, ) -> None: """Test that the ffmpeg process is restarted if the same URL is requested multiple times.""" device_id = "1234" @@ -193,14 +208,108 @@ async def test_request_same_url_multiple_times( await async_setup_component(hass, esphome.DOMAIN, {esphome.DOMAIN: {}}) client = await hass_client() + wav_url = pathname2url(wav_file) + url = async_create_proxy_url( + hass, + device_id, + wav_url, + media_format="wav", + rate=22050, + channels=2, + width=2, + ) + + # First request will start ffmpeg + req1 = await client.get(url) + assert req1.status == HTTPStatus.OK + + # Only read part of the data + await req1.content.readexactly(100) + + # Second request should restart ffmpeg + req2 = await client.get(url) + assert req2.status == HTTPStatus.OK + + wav_data = await req2.content.read() + + # All of the data should be there because this is a new ffmpeg process + with io.BytesIO(wav_data) as wav_io, wave.open(wav_io, "rb") as received_wav_file: + num_frames = 0 + while chunk := received_wav_file.readframes(1024): + num_frames += len(chunk) // (2 * 2) # 2 channels, 16-bit samples + + assert num_frames == 22050 * 10 # 10s + + +async def test_max_conversions_per_device( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, +) -> None: + """Test that each device has a maximum number of conversions (currently 2).""" + max_conversions = 2 + device_ids = ["1234", "5678"] + + await async_setup_component(hass, esphome.DOMAIN, {esphome.DOMAIN: {}}) + client = await hass_client() + + with tempfile.TemporaryDirectory() as temp_dir: + wav_paths = [ + os.path.join(temp_dir, f"{i}.wav") for i in range(max_conversions + 1) + ] + for wav_path in wav_paths: + _write_silence(wav_path, 10) + + wav_urls = [pathname2url(p) for p in wav_paths] + + # Each device will have max + 1 conversions + device_urls = { + device_id: [ + async_create_proxy_url( + hass, + device_id, + wav_url, + media_format="wav", + rate=22050, + channels=2, + width=2, + ) + for wav_url in wav_urls + ] + for device_id in device_ids + } + + for urls in device_urls.values(): + # First URL should fail because it was overwritten by the others + req = await client.get(urls[0]) + assert req.status == HTTPStatus.BAD_REQUEST + + # All other URLs should succeed + for url in urls[1:]: + req = await client.get(url) + assert req.status == HTTPStatus.OK + + +async def test_abort_on_shutdown( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, +) -> None: + """Test we abort on Home Assistant shutdown.""" + device_id = "1234" + + await async_setup_component(hass, esphome.DOMAIN, {esphome.DOMAIN: {}}) + client = await hass_client() + with tempfile.NamedTemporaryFile(mode="wb+", suffix=".wav") as temp_file: with wave.open(temp_file.name, "wb") as wav_file: wav_file.setframerate(16000) wav_file.setsampwidth(2) wav_file.setnchannels(1) - wav_file.writeframes(bytes(16000 * 2 * 10)) # 10s + wav_file.writeframes(bytes(16000 * 2)) # 1s + + wav_url = pathname2url(temp_file.name) + convert_id = "test-id" + url = f"/api/esphome/ffmpeg_proxy/{device_id}/{convert_id}.mp3" - temp_file.seek(0) wav_url = pathname2url(temp_file.name) url = async_create_proxy_url( hass, @@ -212,23 +321,14 @@ async def test_request_same_url_multiple_times( width=2, ) - # First request will start ffmpeg - req1 = await client.get(url) - assert req1.status == HTTPStatus.OK - - # Only read part of the data - await req1.content.readexactly(100) - - # Second request should restart ffmpeg - req2 = await client.get(url) - assert req2.status == HTTPStatus.OK - - wav_data = await req2.content.read() + # Get URL and start reading + req = await client.get(url) + assert req.status == HTTPStatus.OK + initial_mp3_data = await req.content.read(4) + assert initial_mp3_data == b"RIFF" - # All of the data should be there because this is a new ffmpeg process - with io.BytesIO(wav_data) as wav_io, wave.open(wav_io, "rb") as wav_file: - num_frames = 0 - while chunk := wav_file.readframes(1024): - num_frames += len(chunk) // (2 * 2) # 2 channels, 16-bit samples + # Shut down Home Assistant + await hass.async_stop() - assert num_frames == 22050 * 10 # 10s + with pytest.raises(client_exceptions.ClientPayloadError): + await req.content.read() diff --git a/tests/components/esphome/test_update.py b/tests/components/esphome/test_update.py index 83e89b1de00c2c..7593ab21838d28 100644 --- a/tests/components/esphome/test_update.py +++ b/tests/components/esphome/test_update.py @@ -531,7 +531,8 @@ async def test_generic_device_update_entity_has_update( state = hass.states.get("update.test_myupdate") assert state is not None assert state.state == STATE_ON - assert state.attributes["in_progress"] == 50 + assert state.attributes["in_progress"] is True + assert state.attributes["update_percentage"] == 50 await hass.services.async_call( HOMEASSISTANT_DOMAIN, diff --git a/tests/components/evohome/conftest.py b/tests/components/evohome/conftest.py index b46c62f8651685..6daab3f32bb524 100644 --- a/tests/components/evohome/conftest.py +++ b/tests/components/evohome/conftest.py @@ -11,12 +11,15 @@ from aiohttp import ClientSession from evohomeasync2 import EvohomeClient from evohomeasync2.broker import Broker +from evohomeasync2.controlsystem import ControlSystem +from evohomeasync2.zone import Zone import pytest from homeassistant.components.evohome import CONF_PASSWORD, CONF_USERNAME, DOMAIN +from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component -from homeassistant.util import dt as dt_util +from homeassistant.util import dt as dt_util, slugify from homeassistant.util.json import JsonArrayType, JsonObjectType from .const import ACCESS_TOKEN, REFRESH_TOKEN, USERNAME @@ -138,7 +141,14 @@ async def setup_evohome( patch("homeassistant.components.evohome.ev1.EvohomeClient", return_value=None), patch("evohomeasync2.broker.Broker.get", mock_get_factory(install)), ): - mock_client.side_effect = EvohomeClient + evo: EvohomeClient | None = None + + def evohome_client(*args, **kwargs) -> EvohomeClient: + nonlocal evo + evo = EvohomeClient(*args, **kwargs) + return evo + + mock_client.side_effect = evohome_client assert await async_setup_component(hass, DOMAIN, {DOMAIN: config}) await hass.async_block_till_done() @@ -150,6 +160,49 @@ async def setup_evohome( assert isinstance(mock_client.call_args.kwargs["session"], ClientSession) - assert mock_client.account_info is not None + assert evo and evo.account_info is not None + + mock_client.return_value = evo + yield mock_client + + +@pytest.fixture +async def evohome( + hass: HomeAssistant, + config: dict[str, str], + install: str, +) -> AsyncGenerator[MagicMock]: + """Return the mocked evohome client for this install fixture.""" + async for mock_client in setup_evohome(hass, config, install=install): yield mock_client + + +@pytest.fixture +async def ctl_id( + hass: HomeAssistant, + config: dict[str, str], + install: MagicMock, +) -> AsyncGenerator[str]: + """Return the entity_id of the evohome integration's controller.""" + + async for mock_client in setup_evohome(hass, config, install=install): + evo: EvohomeClient = mock_client.return_value + ctl: ControlSystem = evo._get_single_tcs() + + yield f"{Platform.CLIMATE}.{slugify(ctl.location.name)}" + + +@pytest.fixture +async def zone_id( + hass: HomeAssistant, + config: dict[str, str], + install: MagicMock, +) -> AsyncGenerator[str]: + """Return the entity_id of the evohome integration's first zone.""" + + async for mock_client in setup_evohome(hass, config, install=install): + evo: EvohomeClient = mock_client.return_value + zone: Zone = list(evo._get_single_tcs().zones.values())[0] + + yield f"{Platform.CLIMATE}.{slugify(zone.name)}" diff --git a/tests/components/evohome/snapshots/test_climate.ambr b/tests/components/evohome/snapshots/test_climate.ambr index 1a77cf0e80de6a..ce7fcf2744e4a1 100644 --- a/tests/components/evohome/snapshots/test_climate.ambr +++ b/tests/components/evohome/snapshots/test_climate.ambr @@ -1,190 +1,1459 @@ # serializer version: 1 -# name: test_zone_set_hvac_mode[default] +# name: test_ctl_set_hvac_mode[default] + list([ + tuple( + 'HeatingOff', + ), + tuple( + 'Auto', + ), + ]) +# --- +# name: test_ctl_set_hvac_mode[h032585] + list([ + tuple( + 'Off', + ), + tuple( + 'Heat', + ), + ]) +# --- +# name: test_ctl_set_hvac_mode[h099625] + list([ + tuple( + 'HeatingOff', + ), + tuple( + 'Auto', + ), + ]) +# --- +# name: test_ctl_set_hvac_mode[minimal] + list([ + tuple( + 'HeatingOff', + ), + tuple( + 'Auto', + ), + ]) +# --- +# name: test_ctl_set_hvac_mode[sys_004] + list([ + tuple( + 'HeatingOff', + ), + tuple( + 'Auto', + ), + ]) +# --- +# name: test_ctl_turn_off[default] + list([ + tuple( + 'HeatingOff', + ), + ]) +# --- +# name: test_ctl_turn_off[h032585] + list([ + tuple( + 'Off', + ), + ]) +# --- +# name: test_ctl_turn_off[h099625] + list([ + tuple( + 'HeatingOff', + ), + ]) +# --- +# name: test_ctl_turn_off[minimal] list([ tuple( - dict({ - 'HeatSetpointValue': 5.0, - 'setpointMode': , + 'HeatingOff', + ), + ]) +# --- +# name: test_ctl_turn_off[sys_004] + list([ + tuple( + 'HeatingOff', + ), + ]) +# --- +# name: test_ctl_turn_on[default] + list([ + tuple( + 'Auto', + ), + ]) +# --- +# name: test_ctl_turn_on[h032585] + list([ + tuple( + 'Heat', + ), + ]) +# --- +# name: test_ctl_turn_on[h099625] + list([ + tuple( + 'Auto', + ), + ]) +# --- +# name: test_ctl_turn_on[minimal] + list([ + tuple( + 'Auto', + ), + ]) +# --- +# name: test_ctl_turn_on[sys_004] + list([ + tuple( + 'Auto', + ), + ]) +# --- +# name: test_setup_platform[botched][climate.bathroom_dn-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 20.0, + 'friendly_name': 'Bathroom Dn', + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 35.0, + 'min_temp': 5.0, + 'preset_mode': 'none', + 'preset_modes': list([ + 'none', + 'temporary', + 'permanent', + ]), + 'status': dict({ + 'active_faults': list([ + ]), + 'setpoint_status': dict({ + 'setpoint_mode': 'FollowSchedule', + 'target_heat_temperature': 16.0, + }), + 'setpoints': dict({ + 'next_sp_from': '2024-07-10T22:10:00+01:00', + 'next_sp_temp': 18.6, + 'this_sp_from': '2024-07-10T08:00:00+01:00', + 'this_sp_temp': 16.0, + }), + 'temperature_status': dict({ + 'is_available': True, + 'temperature': 20.0, + }), + 'zone_id': '3432579', + }), + 'supported_features': , + 'temperature': 16.0, + }), + 'context': , + 'entity_id': 'climate.bathroom_dn', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heat', + }) +# --- +# name: test_setup_platform[botched][climate.dead_zone-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': None, + 'friendly_name': 'Dead Zone', + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 35.0, + 'min_temp': 5.0, + 'preset_mode': 'none', + 'preset_modes': list([ + 'none', + 'temporary', + 'permanent', + ]), + 'status': dict({ + 'active_faults': list([ + ]), + 'setpoint_status': dict({ + 'setpoint_mode': 'FollowSchedule', + 'target_heat_temperature': 17.0, + }), + 'setpoints': dict({ + 'next_sp_from': '2024-07-10T22:10:00+01:00', + 'next_sp_temp': 18.6, + 'this_sp_from': '2024-07-10T08:00:00+01:00', + 'this_sp_temp': 16.0, + }), + 'temperature_status': dict({ + 'is_available': False, + }), + 'zone_id': '3432521', + }), + 'supported_features': , + 'temperature': 17.0, + }), + 'context': , + 'entity_id': 'climate.dead_zone', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heat', + }) +# --- +# name: test_setup_platform[botched][climate.front_room-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 19.0, + 'friendly_name': 'Front Room', + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 35.0, + 'min_temp': 5.0, + 'preset_mode': 'temporary', + 'preset_modes': list([ + 'none', + 'temporary', + 'permanent', + ]), + 'status': dict({ + 'active_faults': list([ + dict({ + 'faultType': 'TempZoneActuatorLowBattery', + 'since': '2022-03-02T04:50:20', + }), + ]), + 'setpoint_status': dict({ + 'setpoint_mode': 'TemporaryOverride', + 'target_heat_temperature': 21.0, + 'until': '2022-03-07T20:00:00+01:00', + }), + 'setpoints': dict({ + 'next_sp_from': '2024-07-10T22:10:00+01:00', + 'next_sp_temp': 18.6, + 'this_sp_from': '2024-07-10T08:00:00+01:00', + 'this_sp_temp': 16.0, + }), + 'temperature_status': dict({ + 'is_available': True, + 'temperature': 19.0, + }), + 'zone_id': '3432577', + }), + 'supported_features': , + 'temperature': 21.0, + }), + 'context': , + 'entity_id': 'climate.front_room', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heat', + }) +# --- +# name: test_setup_platform[botched][climate.kids_room-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 19.5, + 'friendly_name': 'Kids Room', + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 35.0, + 'min_temp': 5.0, + 'preset_mode': 'none', + 'preset_modes': list([ + 'none', + 'temporary', + 'permanent', + ]), + 'status': dict({ + 'active_faults': list([ + ]), + 'setpoint_status': dict({ + 'setpoint_mode': 'FollowSchedule', + 'target_heat_temperature': 17.0, + }), + 'setpoints': dict({ + 'next_sp_from': '2024-07-10T22:10:00+01:00', + 'next_sp_temp': 18.6, + 'this_sp_from': '2024-07-10T08:00:00+01:00', + 'this_sp_temp': 16.0, + }), + 'temperature_status': dict({ + 'is_available': True, + 'temperature': 19.5, + }), + 'zone_id': '3449703', }), + 'supported_features': , + 'temperature': 17.0, + }), + 'context': , + 'entity_id': 'climate.kids_room', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heat', + }) +# --- +# name: test_setup_platform[botched][climate.kitchen-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 20.0, + 'friendly_name': 'Kitchen', + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 35.0, + 'min_temp': 5.0, + 'preset_mode': 'none', + 'preset_modes': list([ + 'none', + 'temporary', + 'permanent', + ]), + 'status': dict({ + 'active_faults': list([ + ]), + 'setpoint_status': dict({ + 'setpoint_mode': 'FollowSchedule', + 'target_heat_temperature': 17.0, + }), + 'setpoints': dict({ + 'next_sp_from': '2024-07-10T22:10:00+01:00', + 'next_sp_temp': 18.6, + 'this_sp_from': '2024-07-10T08:00:00+01:00', + 'this_sp_temp': 16.0, + }), + 'temperature_status': dict({ + 'is_available': True, + 'temperature': 20.0, + }), + 'zone_id': '3432578', + }), + 'supported_features': , + 'temperature': 17.0, + }), + 'context': , + 'entity_id': 'climate.kitchen', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heat', + }) +# --- +# name: test_setup_platform[botched][climate.main_bedroom-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 21.0, + 'friendly_name': 'Main Bedroom', + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 35.0, + 'min_temp': 5.0, + 'preset_mode': 'none', + 'preset_modes': list([ + 'none', + 'temporary', + 'permanent', + ]), + 'status': dict({ + 'active_faults': list([ + ]), + 'setpoint_status': dict({ + 'setpoint_mode': 'FollowSchedule', + 'target_heat_temperature': 16.0, + }), + 'setpoints': dict({ + 'next_sp_from': '2024-07-10T22:10:00+01:00', + 'next_sp_temp': 18.6, + 'this_sp_from': '2024-07-10T08:00:00+01:00', + 'this_sp_temp': 16.0, + }), + 'temperature_status': dict({ + 'is_available': True, + 'temperature': 21.0, + }), + 'zone_id': '3432580', + }), + 'supported_features': , + 'temperature': 16.0, + }), + 'context': , + 'entity_id': 'climate.main_bedroom', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heat', + }) +# --- +# name: test_setup_platform[botched][climate.main_room-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 19.0, + 'friendly_name': 'Main Room', + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 35.0, + 'min_temp': 5.0, + 'preset_mode': 'permanent', + 'preset_modes': list([ + 'none', + 'temporary', + 'permanent', + ]), + 'status': dict({ + 'active_faults': list([ + dict({ + 'faultType': 'TempZoneActuatorCommunicationLost', + 'since': '2022-03-02T15:56:01', + }), + ]), + 'setpoint_status': dict({ + 'setpoint_mode': 'PermanentOverride', + 'target_heat_temperature': 17.0, + }), + 'setpoints': dict({ + 'next_sp_from': '2024-07-10T22:10:00+01:00', + 'next_sp_temp': 18.6, + 'this_sp_from': '2024-07-10T08:00:00+01:00', + 'this_sp_temp': 16.0, + }), + 'temperature_status': dict({ + 'is_available': True, + 'temperature': 19.0, + }), + 'zone_id': '3432576', + }), + 'supported_features': , + 'temperature': 17.0, + }), + 'context': , + 'entity_id': 'climate.main_room', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heat', + }) +# --- +# name: test_setup_platform[botched][climate.my_home-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 19.7, + 'friendly_name': 'My Home', + 'hvac_modes': list([ + , + , + ]), + 'icon': 'mdi:thermostat', + 'max_temp': 35, + 'min_temp': 7, + 'preset_mode': 'eco', + 'preset_modes': list([ + 'Reset', + 'eco', + 'away', + 'home', + 'Custom', + ]), + 'status': dict({ + 'active_system_faults': list([ + ]), + 'system_id': '3432522', + 'system_mode_status': dict({ + 'is_permanent': True, + 'mode': 'AutoWithEco', + }), + }), + 'supported_features': , + }), + 'context': , + 'entity_id': 'climate.my_home', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heat', + }) +# --- +# name: test_setup_platform[default][climate.bathroom_dn-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 20.0, + 'friendly_name': 'Bathroom Dn', + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 35.0, + 'min_temp': 5.0, + 'preset_mode': 'none', + 'preset_modes': list([ + 'none', + 'temporary', + 'permanent', + ]), + 'status': dict({ + 'active_faults': list([ + ]), + 'setpoint_status': dict({ + 'setpoint_mode': 'FollowSchedule', + 'target_heat_temperature': 16.0, + }), + 'setpoints': dict({ + 'next_sp_from': '2024-07-10T22:10:00+01:00', + 'next_sp_temp': 18.6, + 'this_sp_from': '2024-07-10T08:00:00+01:00', + 'this_sp_temp': 16.0, + }), + 'temperature_status': dict({ + 'is_available': True, + 'temperature': 20.0, + }), + 'zone_id': '3432579', + }), + 'supported_features': , + 'temperature': 16.0, + }), + 'context': , + 'entity_id': 'climate.bathroom_dn', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heat', + }) +# --- +# name: test_setup_platform[default][climate.dead_zone-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': None, + 'friendly_name': 'Dead Zone', + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 35.0, + 'min_temp': 5.0, + 'preset_mode': 'none', + 'preset_modes': list([ + 'none', + 'temporary', + 'permanent', + ]), + 'status': dict({ + 'active_faults': list([ + ]), + 'setpoint_status': dict({ + 'setpoint_mode': 'FollowSchedule', + 'target_heat_temperature': 17.0, + }), + 'setpoints': dict({ + 'next_sp_from': '2024-07-10T22:10:00+01:00', + 'next_sp_temp': 18.6, + 'this_sp_from': '2024-07-10T08:00:00+01:00', + 'this_sp_temp': 16.0, + }), + 'temperature_status': dict({ + 'is_available': False, + }), + 'zone_id': '3432521', + }), + 'supported_features': , + 'temperature': 17.0, + }), + 'context': , + 'entity_id': 'climate.dead_zone', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heat', + }) +# --- +# name: test_setup_platform[default][climate.front_room-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 19.0, + 'friendly_name': 'Front Room', + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 35.0, + 'min_temp': 5.0, + 'preset_mode': 'temporary', + 'preset_modes': list([ + 'none', + 'temporary', + 'permanent', + ]), + 'status': dict({ + 'active_faults': list([ + ]), + 'setpoint_status': dict({ + 'setpoint_mode': 'TemporaryOverride', + 'target_heat_temperature': 21.0, + 'until': '2022-03-07T20:00:00+01:00', + }), + 'setpoints': dict({ + 'next_sp_from': '2024-07-10T22:10:00+01:00', + 'next_sp_temp': 18.6, + 'this_sp_from': '2024-07-10T08:00:00+01:00', + 'this_sp_temp': 16.0, + }), + 'temperature_status': dict({ + 'is_available': True, + 'temperature': 19.0, + }), + 'zone_id': '3432577', + }), + 'supported_features': , + 'temperature': 21.0, + }), + 'context': , + 'entity_id': 'climate.front_room', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heat', + }) +# --- +# name: test_setup_platform[default][climate.kids_room-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 19.5, + 'friendly_name': 'Kids Room', + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 35.0, + 'min_temp': 5.0, + 'preset_mode': 'none', + 'preset_modes': list([ + 'none', + 'temporary', + 'permanent', + ]), + 'status': dict({ + 'active_faults': list([ + ]), + 'setpoint_status': dict({ + 'setpoint_mode': 'FollowSchedule', + 'target_heat_temperature': 17.0, + }), + 'setpoints': dict({ + 'next_sp_from': '2024-07-10T22:10:00+01:00', + 'next_sp_temp': 18.6, + 'this_sp_from': '2024-07-10T08:00:00+01:00', + 'this_sp_temp': 16.0, + }), + 'temperature_status': dict({ + 'is_available': True, + 'temperature': 19.5, + }), + 'zone_id': '3449703', + }), + 'supported_features': , + 'temperature': 17.0, + }), + 'context': , + 'entity_id': 'climate.kids_room', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heat', + }) +# --- +# name: test_setup_platform[default][climate.kitchen-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 20.0, + 'friendly_name': 'Kitchen', + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 35.0, + 'min_temp': 5.0, + 'preset_mode': 'none', + 'preset_modes': list([ + 'none', + 'temporary', + 'permanent', + ]), + 'status': dict({ + 'active_faults': list([ + ]), + 'setpoint_status': dict({ + 'setpoint_mode': 'FollowSchedule', + 'target_heat_temperature': 17.0, + }), + 'setpoints': dict({ + 'next_sp_from': '2024-07-10T22:10:00+01:00', + 'next_sp_temp': 18.6, + 'this_sp_from': '2024-07-10T08:00:00+01:00', + 'this_sp_temp': 16.0, + }), + 'temperature_status': dict({ + 'is_available': True, + 'temperature': 20.0, + }), + 'zone_id': '3432578', + }), + 'supported_features': , + 'temperature': 17.0, + }), + 'context': , + 'entity_id': 'climate.kitchen', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heat', + }) +# --- +# name: test_setup_platform[default][climate.main_bedroom-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 21.0, + 'friendly_name': 'Main Bedroom', + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 35.0, + 'min_temp': 5.0, + 'preset_mode': 'none', + 'preset_modes': list([ + 'none', + 'temporary', + 'permanent', + ]), + 'status': dict({ + 'active_faults': list([ + ]), + 'setpoint_status': dict({ + 'setpoint_mode': 'FollowSchedule', + 'target_heat_temperature': 16.0, + }), + 'setpoints': dict({ + 'next_sp_from': '2024-07-10T22:10:00+01:00', + 'next_sp_temp': 18.6, + 'this_sp_from': '2024-07-10T08:00:00+01:00', + 'this_sp_temp': 16.0, + }), + 'temperature_status': dict({ + 'is_available': True, + 'temperature': 21.0, + }), + 'zone_id': '3432580', + }), + 'supported_features': , + 'temperature': 16.0, + }), + 'context': , + 'entity_id': 'climate.main_bedroom', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heat', + }) +# --- +# name: test_setup_platform[default][climate.main_room-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 19.0, + 'friendly_name': 'Main Room', + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 35.0, + 'min_temp': 5.0, + 'preset_mode': 'permanent', + 'preset_modes': list([ + 'none', + 'temporary', + 'permanent', + ]), + 'status': dict({ + 'active_faults': list([ + ]), + 'setpoint_status': dict({ + 'setpoint_mode': 'PermanentOverride', + 'target_heat_temperature': 17.0, + }), + 'setpoints': dict({ + 'next_sp_from': '2024-07-10T22:10:00+01:00', + 'next_sp_temp': 18.6, + 'this_sp_from': '2024-07-10T08:00:00+01:00', + 'this_sp_temp': 16.0, + }), + 'temperature_status': dict({ + 'is_available': True, + 'temperature': 19.0, + }), + 'zone_id': '3432576', + }), + 'supported_features': , + 'temperature': 17.0, + }), + 'context': , + 'entity_id': 'climate.main_room', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heat', + }) +# --- +# name: test_setup_platform[default][climate.my_home-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 19.7, + 'friendly_name': 'My Home', + 'hvac_modes': list([ + , + , + ]), + 'icon': 'mdi:thermostat', + 'max_temp': 35, + 'min_temp': 7, + 'preset_mode': 'eco', + 'preset_modes': list([ + 'Reset', + 'eco', + 'away', + 'home', + 'Custom', + ]), + 'status': dict({ + 'active_system_faults': list([ + ]), + 'system_id': '3432522', + 'system_mode_status': dict({ + 'is_permanent': True, + 'mode': 'AutoWithEco', + }), + }), + 'supported_features': , + }), + 'context': , + 'entity_id': 'climate.my_home', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heat', + }) +# --- +# name: test_setup_platform[default][climate.spare_room-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 19.5, + 'friendly_name': 'Spare Room', + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 35.0, + 'min_temp': 5.0, + 'preset_mode': 'permanent', + 'preset_modes': list([ + 'none', + 'temporary', + 'permanent', + ]), + 'status': dict({ + 'active_faults': list([ + ]), + 'setpoint_status': dict({ + 'setpoint_mode': 'PermanentOverride', + 'target_heat_temperature': 14.0, + }), + 'setpoints': dict({ + 'next_sp_from': '2024-07-10T22:10:00+01:00', + 'next_sp_temp': 18.6, + 'this_sp_from': '2024-07-10T08:00:00+01:00', + 'this_sp_temp': 16.0, + }), + 'temperature_status': dict({ + 'is_available': True, + 'temperature': 19.5, + }), + 'zone_id': '3450733', + }), + 'supported_features': , + 'temperature': 14.0, + }), + 'context': , + 'entity_id': 'climate.spare_room', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heat', + }) +# --- +# name: test_setup_platform[h032585][climate.my_home-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 21.5, + 'friendly_name': 'My Home', + 'hvac_modes': list([ + , + , + ]), + 'icon': 'mdi:thermostat', + 'max_temp': 35, + 'min_temp': 7, + 'status': dict({ + 'active_system_faults': list([ + ]), + 'system_id': '416856', + 'system_mode_status': dict({ + 'is_permanent': True, + 'mode': 'Heat', + }), + }), + 'supported_features': , + }), + 'context': , + 'entity_id': 'climate.my_home', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heat', + }) +# --- +# name: test_setup_platform[h032585][climate.thermostat-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 21.5, + 'friendly_name': 'THERMOSTAT', + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 32.0, + 'min_temp': 4.5, + 'preset_mode': 'none', + 'preset_modes': list([ + 'none', + 'temporary', + 'permanent', + ]), + 'status': dict({ + 'active_faults': list([ + ]), + 'setpoint_status': dict({ + 'setpoint_mode': 'FollowSchedule', + 'target_heat_temperature': 21.5, + }), + 'setpoints': dict({ + 'next_sp_from': '2024-07-10T22:10:00+01:00', + 'next_sp_temp': 18.6, + 'this_sp_from': '2024-07-10T08:00:00+01:00', + 'this_sp_temp': 16.0, + }), + 'temperature_status': dict({ + 'is_available': True, + 'temperature': 21.5, + }), + 'zone_id': '416856', + }), + 'supported_features': , + 'temperature': 21.5, + }), + 'context': , + 'entity_id': 'climate.thermostat', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heat', + }) +# --- +# name: test_setup_platform[h099625][climate.my_home-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 21.5, + 'friendly_name': 'My Home', + 'hvac_modes': list([ + , + , + ]), + 'icon': 'mdi:thermostat', + 'max_temp': 35, + 'min_temp': 7, + 'preset_mode': None, + 'preset_modes': list([ + 'eco', + 'away', + ]), + 'status': dict({ + 'active_system_faults': list([ + ]), + 'system_id': '8557535', + 'system_mode_status': dict({ + 'is_permanent': True, + 'mode': 'Auto', + }), + }), + 'supported_features': , + }), + 'context': , + 'entity_id': 'climate.my_home', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heat', + }) +# --- +# name: test_setup_platform[h099625][climate.thermostat-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 21.5, + 'friendly_name': 'THERMOSTAT', + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 35.0, + 'min_temp': 5.0, + 'preset_mode': 'none', + 'preset_modes': list([ + 'none', + 'temporary', + 'permanent', + ]), + 'status': dict({ + 'active_faults': list([ + ]), + 'setpoint_status': dict({ + 'setpoint_mode': 'FollowSchedule', + 'target_heat_temperature': 21.5, + }), + 'setpoints': dict({ + 'next_sp_from': '2024-07-10T22:10:00+03:00', + 'next_sp_temp': 18.6, + 'this_sp_from': '2024-07-10T08:00:00+03:00', + 'this_sp_temp': 16.0, + }), + 'temperature_status': dict({ + 'is_available': True, + 'temperature': 21.5, + }), + 'zone_id': '8557539', + }), + 'supported_features': , + 'temperature': 21.5, + }), + 'context': , + 'entity_id': 'climate.thermostat', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heat', + }) +# --- +# name: test_setup_platform[h099625][climate.thermostat_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 21.5, + 'friendly_name': 'THERMOSTAT', + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 35.0, + 'min_temp': 5.0, + 'preset_mode': 'none', + 'preset_modes': list([ + 'none', + 'temporary', + 'permanent', + ]), + 'status': dict({ + 'active_faults': list([ + ]), + 'setpoint_status': dict({ + 'setpoint_mode': 'FollowSchedule', + 'target_heat_temperature': 21.5, + }), + 'setpoints': dict({ + 'next_sp_from': '2024-07-10T22:10:00+03:00', + 'next_sp_temp': 18.6, + 'this_sp_from': '2024-07-10T08:00:00+03:00', + 'this_sp_temp': 16.0, + }), + 'temperature_status': dict({ + 'is_available': True, + 'temperature': 21.5, + }), + 'zone_id': '8557541', + }), + 'supported_features': , + 'temperature': 21.5, + }), + 'context': , + 'entity_id': 'climate.thermostat_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heat', + }) +# --- +# name: test_setup_platform[minimal][climate.main_room-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 19.0, + 'friendly_name': 'Main Room', + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 35.0, + 'min_temp': 5.0, + 'preset_mode': 'none', + 'preset_modes': list([ + 'none', + 'temporary', + 'permanent', + ]), + 'status': dict({ + 'active_faults': list([ + ]), + 'setpoint_status': dict({ + 'setpoint_mode': 'FollowSchedule', + 'target_heat_temperature': 17.0, + }), + 'setpoints': dict({ + 'next_sp_from': '2024-07-10T22:10:00+01:00', + 'next_sp_temp': 18.6, + 'this_sp_from': '2024-07-10T08:00:00+01:00', + 'this_sp_temp': 16.0, + }), + 'temperature_status': dict({ + 'is_available': True, + 'temperature': 19.0, + }), + 'zone_id': '3432576', + }), + 'supported_features': , + 'temperature': 17.0, + }), + 'context': , + 'entity_id': 'climate.main_room', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heat', + }) +# --- +# name: test_setup_platform[minimal][climate.my_home-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 19.0, + 'friendly_name': 'My Home', + 'hvac_modes': list([ + , + , + ]), + 'icon': 'mdi:thermostat', + 'max_temp': 35, + 'min_temp': 7, + 'preset_mode': 'eco', + 'preset_modes': list([ + 'Reset', + 'eco', + 'away', + 'home', + 'Custom', + ]), + 'status': dict({ + 'active_system_faults': list([ + ]), + 'system_id': '3432522', + 'system_mode_status': dict({ + 'is_permanent': True, + 'mode': 'AutoWithEco', + }), + }), + 'supported_features': , + }), + 'context': , + 'entity_id': 'climate.my_home', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heat', + }) +# --- +# name: test_setup_platform[sys_004][climate.living_room-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 19.5, + 'friendly_name': 'Living room', + 'hvac_modes': list([ + , + , + ]), + 'icon': 'mdi:thermostat', + 'max_temp': 35, + 'min_temp': 7, + 'preset_mode': None, + 'preset_modes': list([ + 'eco', + 'away', + ]), + 'status': dict({ + 'active_system_faults': list([ + ]), + 'system_id': '4187769', + 'system_mode_status': dict({ + 'is_permanent': True, + 'mode': 'Auto', + }), + }), + 'supported_features': , + }), + 'context': , + 'entity_id': 'climate.living_room', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heat', + }) +# --- +# name: test_setup_platform[sys_004][climate.thermostat-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 19.5, + 'friendly_name': 'Thermostat', + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 35.0, + 'min_temp': 5.0, + 'preset_mode': 'permanent', + 'preset_modes': list([ + 'none', + 'temporary', + 'permanent', + ]), + 'status': dict({ + 'active_faults': list([ + ]), + 'setpoint_status': dict({ + 'setpoint_mode': 'PermanentOverride', + 'target_heat_temperature': 15.0, + }), + 'setpoints': dict({ + 'next_sp_from': '2024-07-10T22:10:00+02:00', + 'next_sp_temp': 18.6, + 'this_sp_from': '2024-07-10T08:00:00+02:00', + 'this_sp_temp': 16.0, + }), + 'temperature_status': dict({ + 'is_available': True, + 'temperature': 19.5, + }), + 'zone_id': '4187768', + }), + 'supported_features': , + 'temperature': 15.0, + }), + 'context': , + 'entity_id': 'climate.thermostat', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heat', + }) +# --- +# name: test_zone_set_hvac_mode[default] + list([ + tuple( + 5.0, ), ]) # --- # name: test_zone_set_hvac_mode[h032585] list([ tuple( - dict({ - 'HeatSetpointValue': 4.5, - 'setpointMode': , - }), + 4.5, ), ]) # --- # name: test_zone_set_hvac_mode[h099625] list([ tuple( - dict({ - 'HeatSetpointValue': 5.0, - 'setpointMode': , - }), + 5.0, ), ]) # --- # name: test_zone_set_hvac_mode[minimal] list([ tuple( - dict({ - 'HeatSetpointValue': 5.0, - 'setpointMode': , - }), + 5.0, ), ]) # --- # name: test_zone_set_hvac_mode[sys_004] list([ tuple( - dict({ - 'HeatSetpointValue': 5.0, - 'setpointMode': , - }), + 5.0, ), ]) # --- # name: test_zone_set_preset_mode[default] list([ tuple( - dict({ - 'HeatSetpointValue': 17.0, - 'setpointMode': , - }), + 17.0, ), tuple( - dict({ - 'HeatSetpointValue': 17.0, - 'setpointMode': , - 'timeUntil': '2024-07-10T21:10:00Z', - }), + 17.0, ), + dict({ + 'until': datetime.datetime(2024, 7, 10, 21, 10, tzinfo=datetime.timezone.utc), + }), ]) # --- # name: test_zone_set_preset_mode[h032585] list([ tuple( - dict({ - 'HeatSetpointValue': 21.5, - 'setpointMode': , - }), + 21.5, ), tuple( - dict({ - 'HeatSetpointValue': 21.5, - 'setpointMode': , - 'timeUntil': '2024-07-10T21:10:00Z', - }), + 21.5, ), + dict({ + 'until': datetime.datetime(2024, 7, 10, 21, 10, tzinfo=datetime.timezone.utc), + }), ]) # --- # name: test_zone_set_preset_mode[h099625] list([ tuple( - dict({ - 'HeatSetpointValue': 21.5, - 'setpointMode': , - }), + 21.5, ), tuple( - dict({ - 'HeatSetpointValue': 21.5, - 'setpointMode': , - 'timeUntil': '2024-07-10T19:10:00Z', - }), + 21.5, ), + dict({ + 'until': datetime.datetime(2024, 7, 10, 19, 10, tzinfo=datetime.timezone.utc), + }), ]) # --- # name: test_zone_set_preset_mode[minimal] list([ tuple( - dict({ - 'HeatSetpointValue': 17.0, - 'setpointMode': , - }), + 17.0, ), tuple( - dict({ - 'HeatSetpointValue': 17.0, - 'setpointMode': , - 'timeUntil': '2024-07-10T21:10:00Z', - }), + 17.0, ), + dict({ + 'until': datetime.datetime(2024, 7, 10, 21, 10, tzinfo=datetime.timezone.utc), + }), ]) # --- # name: test_zone_set_preset_mode[sys_004] list([ tuple( - dict({ - 'HeatSetpointValue': 15.0, - 'setpointMode': , - }), + 15.0, ), tuple( - dict({ - 'HeatSetpointValue': 15.0, - 'setpointMode': , - 'timeUntil': '2024-07-10T20:10:00Z', - }), + 15.0, ), + dict({ + 'until': datetime.datetime(2024, 7, 10, 20, 10, tzinfo=datetime.timezone.utc), + }), ]) # --- # name: test_zone_set_temperature[default] + list([ + dict({ + 'until': datetime.datetime(2024, 7, 10, 21, 10, tzinfo=datetime.timezone.utc), + }), + ]) +# --- +# name: test_zone_set_temperature[h032585] + list([ + dict({ + 'until': datetime.datetime(2024, 7, 10, 21, 10, tzinfo=datetime.timezone.utc), + }), + ]) +# --- +# name: test_zone_set_temperature[h099625] + list([ + dict({ + 'until': datetime.datetime(2024, 7, 10, 19, 10, tzinfo=datetime.timezone.utc), + }), + ]) +# --- +# name: test_zone_set_temperature[minimal] + list([ + dict({ + 'until': datetime.datetime(2024, 7, 10, 21, 10, tzinfo=datetime.timezone.utc), + }), + ]) +# --- +# name: test_zone_set_temperature[sys_004] + list([ + dict({ + 'until': None, + }), + ]) +# --- +# name: test_zone_turn_off[default] list([ tuple( - dict({ - 'HeatSetpointValue': 19.1, - 'setpointMode': , - 'timeUntil': '2024-07-10T21:10:00Z', - }), + 5.0, ), ]) # --- -# name: test_zone_set_temperature[h032585] +# name: test_zone_turn_off[h032585] list([ tuple( - dict({ - 'HeatSetpointValue': 19.1, - 'setpointMode': , - 'timeUntil': '2024-07-10T21:10:00Z', - }), + 4.5, ), ]) # --- -# name: test_zone_set_temperature[h099625] +# name: test_zone_turn_off[h099625] list([ tuple( - dict({ - 'HeatSetpointValue': 19.1, - 'setpointMode': , - 'timeUntil': '2024-07-10T19:10:00Z', - }), + 5.0, ), ]) # --- -# name: test_zone_set_temperature[minimal] +# name: test_zone_turn_off[minimal] list([ tuple( - dict({ - 'HeatSetpointValue': 19.1, - 'setpointMode': , - 'timeUntil': '2024-07-10T21:10:00Z', - }), + 5.0, ), ]) # --- -# name: test_zone_set_temperature[sys_004] +# name: test_zone_turn_off[sys_004] list([ tuple( - dict({ - 'HeatSetpointValue': 19.1, - 'setpointMode': , - }), + 5.0, ), ]) # --- diff --git a/tests/components/evohome/snapshots/test_init.ambr b/tests/components/evohome/snapshots/test_init.ambr index 11237e6b35abe2..d2e91e3c43d195 100644 --- a/tests/components/evohome/snapshots/test_init.ambr +++ b/tests/components/evohome/snapshots/test_init.ambr @@ -1,1231 +1,19 @@ # serializer version: 1 -# name: test_entities[botched] - list([ - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'current_temperature': 19.7, - 'friendly_name': 'My Home', - 'hvac_modes': list([ - , - , - ]), - 'icon': 'mdi:thermostat', - 'max_temp': 35, - 'min_temp': 7, - 'preset_mode': 'eco', - 'preset_modes': list([ - 'Reset', - 'eco', - 'away', - 'home', - 'Custom', - ]), - 'status': dict({ - 'active_system_faults': list([ - ]), - 'system_id': '3432522', - 'system_mode_status': dict({ - 'is_permanent': True, - 'mode': 'AutoWithEco', - }), - }), - 'supported_features': , - }), - 'context': , - 'entity_id': 'climate.my_home', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'heat', - }), - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'current_temperature': None, - 'friendly_name': 'Dead Zone', - 'hvac_modes': list([ - , - , - ]), - 'max_temp': 35.0, - 'min_temp': 5.0, - 'preset_mode': 'none', - 'preset_modes': list([ - 'none', - 'temporary', - 'permanent', - ]), - 'status': dict({ - 'active_faults': list([ - ]), - 'setpoint_status': dict({ - 'setpoint_mode': 'FollowSchedule', - 'target_heat_temperature': 17.0, - }), - 'setpoints': dict({ - 'next_sp_from': '2024-07-10T22:10:00+01:00', - 'next_sp_temp': 18.6, - 'this_sp_from': '2024-07-10T08:00:00+01:00', - 'this_sp_temp': 16.0, - }), - 'temperature_status': dict({ - 'is_available': False, - }), - 'zone_id': '3432521', - }), - 'supported_features': , - 'temperature': 17.0, - }), - 'context': , - 'entity_id': 'climate.dead_zone', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'heat', - }), - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'current_temperature': 19.0, - 'friendly_name': 'Main Room', - 'hvac_modes': list([ - , - , - ]), - 'max_temp': 35.0, - 'min_temp': 5.0, - 'preset_mode': 'permanent', - 'preset_modes': list([ - 'none', - 'temporary', - 'permanent', - ]), - 'status': dict({ - 'active_faults': list([ - dict({ - 'faultType': 'TempZoneActuatorCommunicationLost', - 'since': '2022-03-02T15:56:01', - }), - ]), - 'setpoint_status': dict({ - 'setpoint_mode': 'PermanentOverride', - 'target_heat_temperature': 17.0, - }), - 'setpoints': dict({ - 'next_sp_from': '2024-07-10T22:10:00+01:00', - 'next_sp_temp': 18.6, - 'this_sp_from': '2024-07-10T08:00:00+01:00', - 'this_sp_temp': 16.0, - }), - 'temperature_status': dict({ - 'is_available': True, - 'temperature': 19.0, - }), - 'zone_id': '3432576', - }), - 'supported_features': , - 'temperature': 17.0, - }), - 'context': , - 'entity_id': 'climate.main_room', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'heat', - }), - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'current_temperature': 19.0, - 'friendly_name': 'Front Room', - 'hvac_modes': list([ - , - , - ]), - 'max_temp': 35.0, - 'min_temp': 5.0, - 'preset_mode': 'temporary', - 'preset_modes': list([ - 'none', - 'temporary', - 'permanent', - ]), - 'status': dict({ - 'active_faults': list([ - dict({ - 'faultType': 'TempZoneActuatorLowBattery', - 'since': '2022-03-02T04:50:20', - }), - ]), - 'setpoint_status': dict({ - 'setpoint_mode': 'TemporaryOverride', - 'target_heat_temperature': 21.0, - 'until': '2022-03-07T20:00:00+01:00', - }), - 'setpoints': dict({ - 'next_sp_from': '2024-07-10T22:10:00+01:00', - 'next_sp_temp': 18.6, - 'this_sp_from': '2024-07-10T08:00:00+01:00', - 'this_sp_temp': 16.0, - }), - 'temperature_status': dict({ - 'is_available': True, - 'temperature': 19.0, - }), - 'zone_id': '3432577', - }), - 'supported_features': , - 'temperature': 21.0, - }), - 'context': , - 'entity_id': 'climate.front_room', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'heat', - }), - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'current_temperature': 20.0, - 'friendly_name': 'Kitchen', - 'hvac_modes': list([ - , - , - ]), - 'max_temp': 35.0, - 'min_temp': 5.0, - 'preset_mode': 'none', - 'preset_modes': list([ - 'none', - 'temporary', - 'permanent', - ]), - 'status': dict({ - 'active_faults': list([ - ]), - 'setpoint_status': dict({ - 'setpoint_mode': 'FollowSchedule', - 'target_heat_temperature': 17.0, - }), - 'setpoints': dict({ - 'next_sp_from': '2024-07-10T22:10:00+01:00', - 'next_sp_temp': 18.6, - 'this_sp_from': '2024-07-10T08:00:00+01:00', - 'this_sp_temp': 16.0, - }), - 'temperature_status': dict({ - 'is_available': True, - 'temperature': 20.0, - }), - 'zone_id': '3432578', - }), - 'supported_features': , - 'temperature': 17.0, - }), - 'context': , - 'entity_id': 'climate.kitchen', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'heat', - }), - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'current_temperature': 20.0, - 'friendly_name': 'Bathroom Dn', - 'hvac_modes': list([ - , - , - ]), - 'max_temp': 35.0, - 'min_temp': 5.0, - 'preset_mode': 'none', - 'preset_modes': list([ - 'none', - 'temporary', - 'permanent', - ]), - 'status': dict({ - 'active_faults': list([ - ]), - 'setpoint_status': dict({ - 'setpoint_mode': 'FollowSchedule', - 'target_heat_temperature': 16.0, - }), - 'setpoints': dict({ - 'next_sp_from': '2024-07-10T22:10:00+01:00', - 'next_sp_temp': 18.6, - 'this_sp_from': '2024-07-10T08:00:00+01:00', - 'this_sp_temp': 16.0, - }), - 'temperature_status': dict({ - 'is_available': True, - 'temperature': 20.0, - }), - 'zone_id': '3432579', - }), - 'supported_features': , - 'temperature': 16.0, - }), - 'context': , - 'entity_id': 'climate.bathroom_dn', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'heat', - }), - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'current_temperature': 21.0, - 'friendly_name': 'Main Bedroom', - 'hvac_modes': list([ - , - , - ]), - 'max_temp': 35.0, - 'min_temp': 5.0, - 'preset_mode': 'none', - 'preset_modes': list([ - 'none', - 'temporary', - 'permanent', - ]), - 'status': dict({ - 'active_faults': list([ - ]), - 'setpoint_status': dict({ - 'setpoint_mode': 'FollowSchedule', - 'target_heat_temperature': 16.0, - }), - 'setpoints': dict({ - 'next_sp_from': '2024-07-10T22:10:00+01:00', - 'next_sp_temp': 18.6, - 'this_sp_from': '2024-07-10T08:00:00+01:00', - 'this_sp_temp': 16.0, - }), - 'temperature_status': dict({ - 'is_available': True, - 'temperature': 21.0, - }), - 'zone_id': '3432580', - }), - 'supported_features': , - 'temperature': 16.0, - }), - 'context': , - 'entity_id': 'climate.main_bedroom', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'heat', - }), - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'current_temperature': 19.5, - 'friendly_name': 'Kids Room', - 'hvac_modes': list([ - , - , - ]), - 'max_temp': 35.0, - 'min_temp': 5.0, - 'preset_mode': 'none', - 'preset_modes': list([ - 'none', - 'temporary', - 'permanent', - ]), - 'status': dict({ - 'active_faults': list([ - ]), - 'setpoint_status': dict({ - 'setpoint_mode': 'FollowSchedule', - 'target_heat_temperature': 17.0, - }), - 'setpoints': dict({ - 'next_sp_from': '2024-07-10T22:10:00+01:00', - 'next_sp_temp': 18.6, - 'this_sp_from': '2024-07-10T08:00:00+01:00', - 'this_sp_temp': 16.0, - }), - 'temperature_status': dict({ - 'is_available': True, - 'temperature': 19.5, - }), - 'zone_id': '3449703', - }), - 'supported_features': , - 'temperature': 17.0, - }), - 'context': , - 'entity_id': 'climate.kids_room', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'heat', - }), - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'away_mode': 'on', - 'current_temperature': 23, - 'friendly_name': 'Domestic Hot Water', - 'icon': 'mdi:thermometer-lines', - 'max_temp': 60, - 'min_temp': 43, - 'operation_list': list([ - 'auto', - 'on', - 'off', - ]), - 'operation_mode': 'off', - 'status': dict({ - 'active_faults': list([ - ]), - 'dhw_id': '3933910', - 'setpoints': dict({ - 'next_sp_from': '2024-07-10T13:00:00+01:00', - 'next_sp_state': 'Off', - 'this_sp_from': '2024-07-10T12:00:00+01:00', - 'this_sp_state': 'On', - }), - 'state_status': dict({ - 'mode': 'PermanentOverride', - 'state': 'Off', - }), - 'temperature_status': dict({ - 'is_available': True, - 'temperature': 23.0, - }), - }), - 'supported_features': , - 'target_temp_high': None, - 'target_temp_low': None, - 'temperature': None, - }), - 'context': , - 'entity_id': 'water_heater.domestic_hot_water', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }), - ]) +# name: test_setup[botched] + dict_keys(['refresh_system', 'reset_system', 'set_system_mode', 'clear_zone_override', 'set_zone_override']) # --- -# name: test_entities[default] - list([ - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'current_temperature': 19.7, - 'friendly_name': 'My Home', - 'hvac_modes': list([ - , - , - ]), - 'icon': 'mdi:thermostat', - 'max_temp': 35, - 'min_temp': 7, - 'preset_mode': 'eco', - 'preset_modes': list([ - 'Reset', - 'eco', - 'away', - 'home', - 'Custom', - ]), - 'status': dict({ - 'active_system_faults': list([ - ]), - 'system_id': '3432522', - 'system_mode_status': dict({ - 'is_permanent': True, - 'mode': 'AutoWithEco', - }), - }), - 'supported_features': , - }), - 'context': , - 'entity_id': 'climate.my_home', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'heat', - }), - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'current_temperature': None, - 'friendly_name': 'Dead Zone', - 'hvac_modes': list([ - , - , - ]), - 'max_temp': 35.0, - 'min_temp': 5.0, - 'preset_mode': 'none', - 'preset_modes': list([ - 'none', - 'temporary', - 'permanent', - ]), - 'status': dict({ - 'active_faults': list([ - ]), - 'setpoint_status': dict({ - 'setpoint_mode': 'FollowSchedule', - 'target_heat_temperature': 17.0, - }), - 'setpoints': dict({ - 'next_sp_from': '2024-07-10T22:10:00+01:00', - 'next_sp_temp': 18.6, - 'this_sp_from': '2024-07-10T08:00:00+01:00', - 'this_sp_temp': 16.0, - }), - 'temperature_status': dict({ - 'is_available': False, - }), - 'zone_id': '3432521', - }), - 'supported_features': , - 'temperature': 17.0, - }), - 'context': , - 'entity_id': 'climate.dead_zone', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'heat', - }), - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'current_temperature': 19.0, - 'friendly_name': 'Main Room', - 'hvac_modes': list([ - , - , - ]), - 'max_temp': 35.0, - 'min_temp': 5.0, - 'preset_mode': 'permanent', - 'preset_modes': list([ - 'none', - 'temporary', - 'permanent', - ]), - 'status': dict({ - 'active_faults': list([ - ]), - 'setpoint_status': dict({ - 'setpoint_mode': 'PermanentOverride', - 'target_heat_temperature': 17.0, - }), - 'setpoints': dict({ - 'next_sp_from': '2024-07-10T22:10:00+01:00', - 'next_sp_temp': 18.6, - 'this_sp_from': '2024-07-10T08:00:00+01:00', - 'this_sp_temp': 16.0, - }), - 'temperature_status': dict({ - 'is_available': True, - 'temperature': 19.0, - }), - 'zone_id': '3432576', - }), - 'supported_features': , - 'temperature': 17.0, - }), - 'context': , - 'entity_id': 'climate.main_room', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'heat', - }), - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'current_temperature': 19.0, - 'friendly_name': 'Front Room', - 'hvac_modes': list([ - , - , - ]), - 'max_temp': 35.0, - 'min_temp': 5.0, - 'preset_mode': 'temporary', - 'preset_modes': list([ - 'none', - 'temporary', - 'permanent', - ]), - 'status': dict({ - 'active_faults': list([ - ]), - 'setpoint_status': dict({ - 'setpoint_mode': 'TemporaryOverride', - 'target_heat_temperature': 21.0, - 'until': '2022-03-07T20:00:00+01:00', - }), - 'setpoints': dict({ - 'next_sp_from': '2024-07-10T22:10:00+01:00', - 'next_sp_temp': 18.6, - 'this_sp_from': '2024-07-10T08:00:00+01:00', - 'this_sp_temp': 16.0, - }), - 'temperature_status': dict({ - 'is_available': True, - 'temperature': 19.0, - }), - 'zone_id': '3432577', - }), - 'supported_features': , - 'temperature': 21.0, - }), - 'context': , - 'entity_id': 'climate.front_room', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'heat', - }), - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'current_temperature': 20.0, - 'friendly_name': 'Kitchen', - 'hvac_modes': list([ - , - , - ]), - 'max_temp': 35.0, - 'min_temp': 5.0, - 'preset_mode': 'none', - 'preset_modes': list([ - 'none', - 'temporary', - 'permanent', - ]), - 'status': dict({ - 'active_faults': list([ - ]), - 'setpoint_status': dict({ - 'setpoint_mode': 'FollowSchedule', - 'target_heat_temperature': 17.0, - }), - 'setpoints': dict({ - 'next_sp_from': '2024-07-10T22:10:00+01:00', - 'next_sp_temp': 18.6, - 'this_sp_from': '2024-07-10T08:00:00+01:00', - 'this_sp_temp': 16.0, - }), - 'temperature_status': dict({ - 'is_available': True, - 'temperature': 20.0, - }), - 'zone_id': '3432578', - }), - 'supported_features': , - 'temperature': 17.0, - }), - 'context': , - 'entity_id': 'climate.kitchen', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'heat', - }), - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'current_temperature': 20.0, - 'friendly_name': 'Bathroom Dn', - 'hvac_modes': list([ - , - , - ]), - 'max_temp': 35.0, - 'min_temp': 5.0, - 'preset_mode': 'none', - 'preset_modes': list([ - 'none', - 'temporary', - 'permanent', - ]), - 'status': dict({ - 'active_faults': list([ - ]), - 'setpoint_status': dict({ - 'setpoint_mode': 'FollowSchedule', - 'target_heat_temperature': 16.0, - }), - 'setpoints': dict({ - 'next_sp_from': '2024-07-10T22:10:00+01:00', - 'next_sp_temp': 18.6, - 'this_sp_from': '2024-07-10T08:00:00+01:00', - 'this_sp_temp': 16.0, - }), - 'temperature_status': dict({ - 'is_available': True, - 'temperature': 20.0, - }), - 'zone_id': '3432579', - }), - 'supported_features': , - 'temperature': 16.0, - }), - 'context': , - 'entity_id': 'climate.bathroom_dn', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'heat', - }), - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'current_temperature': 21.0, - 'friendly_name': 'Main Bedroom', - 'hvac_modes': list([ - , - , - ]), - 'max_temp': 35.0, - 'min_temp': 5.0, - 'preset_mode': 'none', - 'preset_modes': list([ - 'none', - 'temporary', - 'permanent', - ]), - 'status': dict({ - 'active_faults': list([ - ]), - 'setpoint_status': dict({ - 'setpoint_mode': 'FollowSchedule', - 'target_heat_temperature': 16.0, - }), - 'setpoints': dict({ - 'next_sp_from': '2024-07-10T22:10:00+01:00', - 'next_sp_temp': 18.6, - 'this_sp_from': '2024-07-10T08:00:00+01:00', - 'this_sp_temp': 16.0, - }), - 'temperature_status': dict({ - 'is_available': True, - 'temperature': 21.0, - }), - 'zone_id': '3432580', - }), - 'supported_features': , - 'temperature': 16.0, - }), - 'context': , - 'entity_id': 'climate.main_bedroom', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'heat', - }), - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'current_temperature': 19.5, - 'friendly_name': 'Kids Room', - 'hvac_modes': list([ - , - , - ]), - 'max_temp': 35.0, - 'min_temp': 5.0, - 'preset_mode': 'none', - 'preset_modes': list([ - 'none', - 'temporary', - 'permanent', - ]), - 'status': dict({ - 'active_faults': list([ - ]), - 'setpoint_status': dict({ - 'setpoint_mode': 'FollowSchedule', - 'target_heat_temperature': 17.0, - }), - 'setpoints': dict({ - 'next_sp_from': '2024-07-10T22:10:00+01:00', - 'next_sp_temp': 18.6, - 'this_sp_from': '2024-07-10T08:00:00+01:00', - 'this_sp_temp': 16.0, - }), - 'temperature_status': dict({ - 'is_available': True, - 'temperature': 19.5, - }), - 'zone_id': '3449703', - }), - 'supported_features': , - 'temperature': 17.0, - }), - 'context': , - 'entity_id': 'climate.kids_room', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'heat', - }), - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'current_temperature': 19.5, - 'friendly_name': 'Spare Room', - 'hvac_modes': list([ - , - , - ]), - 'max_temp': 35.0, - 'min_temp': 5.0, - 'preset_mode': 'permanent', - 'preset_modes': list([ - 'none', - 'temporary', - 'permanent', - ]), - 'status': dict({ - 'active_faults': list([ - ]), - 'setpoint_status': dict({ - 'setpoint_mode': 'PermanentOverride', - 'target_heat_temperature': 14.0, - }), - 'setpoints': dict({ - 'next_sp_from': '2024-07-10T22:10:00+01:00', - 'next_sp_temp': 18.6, - 'this_sp_from': '2024-07-10T08:00:00+01:00', - 'this_sp_temp': 16.0, - }), - 'temperature_status': dict({ - 'is_available': True, - 'temperature': 19.5, - }), - 'zone_id': '3450733', - }), - 'supported_features': , - 'temperature': 14.0, - }), - 'context': , - 'entity_id': 'climate.spare_room', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'heat', - }), - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'away_mode': 'on', - 'current_temperature': 23, - 'friendly_name': 'Domestic Hot Water', - 'icon': 'mdi:thermometer-lines', - 'max_temp': 60, - 'min_temp': 43, - 'operation_list': list([ - 'auto', - 'on', - 'off', - ]), - 'operation_mode': 'off', - 'status': dict({ - 'active_faults': list([ - ]), - 'dhw_id': '3933910', - 'setpoints': dict({ - 'next_sp_from': '2024-07-10T13:00:00+01:00', - 'next_sp_state': 'Off', - 'this_sp_from': '2024-07-10T12:00:00+01:00', - 'this_sp_state': 'On', - }), - 'state_status': dict({ - 'mode': 'PermanentOverride', - 'state': 'Off', - }), - 'temperature_status': dict({ - 'is_available': True, - 'temperature': 23.0, - }), - }), - 'supported_features': , - 'target_temp_high': None, - 'target_temp_low': None, - 'temperature': None, - }), - 'context': , - 'entity_id': 'water_heater.domestic_hot_water', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }), - ]) +# name: test_setup[default] + dict_keys(['refresh_system', 'reset_system', 'set_system_mode', 'clear_zone_override', 'set_zone_override']) # --- -# name: test_entities[h032585] - list([ - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'current_temperature': 21.5, - 'friendly_name': 'My Home', - 'hvac_modes': list([ - , - , - ]), - 'icon': 'mdi:thermostat', - 'max_temp': 35, - 'min_temp': 7, - 'status': dict({ - 'active_system_faults': list([ - ]), - 'system_id': '416856', - 'system_mode_status': dict({ - 'is_permanent': True, - 'mode': 'Heat', - }), - }), - 'supported_features': , - }), - 'context': , - 'entity_id': 'climate.my_home', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'heat', - }), - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'current_temperature': 21.5, - 'friendly_name': 'THERMOSTAT', - 'hvac_modes': list([ - , - , - ]), - 'max_temp': 32.0, - 'min_temp': 4.5, - 'preset_mode': 'none', - 'preset_modes': list([ - 'none', - 'temporary', - 'permanent', - ]), - 'status': dict({ - 'active_faults': list([ - ]), - 'setpoint_status': dict({ - 'setpoint_mode': 'FollowSchedule', - 'target_heat_temperature': 21.5, - }), - 'setpoints': dict({ - 'next_sp_from': '2024-07-10T22:10:00+01:00', - 'next_sp_temp': 18.6, - 'this_sp_from': '2024-07-10T08:00:00+01:00', - 'this_sp_temp': 16.0, - }), - 'temperature_status': dict({ - 'is_available': True, - 'temperature': 21.5, - }), - 'zone_id': '416856', - }), - 'supported_features': , - 'temperature': 21.5, - }), - 'context': , - 'entity_id': 'climate.thermostat', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'heat', - }), - ]) +# name: test_setup[h032585] + dict_keys(['refresh_system', 'set_system_mode', 'clear_zone_override', 'set_zone_override']) # --- -# name: test_entities[h099625] - list([ - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'current_temperature': 21.5, - 'friendly_name': 'My Home', - 'hvac_modes': list([ - , - , - ]), - 'icon': 'mdi:thermostat', - 'max_temp': 35, - 'min_temp': 7, - 'preset_mode': None, - 'preset_modes': list([ - 'eco', - 'away', - ]), - 'status': dict({ - 'active_system_faults': list([ - ]), - 'system_id': '8557535', - 'system_mode_status': dict({ - 'is_permanent': True, - 'mode': 'Auto', - }), - }), - 'supported_features': , - }), - 'context': , - 'entity_id': 'climate.my_home', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'heat', - }), - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'current_temperature': 21.5, - 'friendly_name': 'THERMOSTAT', - 'hvac_modes': list([ - , - , - ]), - 'max_temp': 35.0, - 'min_temp': 5.0, - 'preset_mode': 'none', - 'preset_modes': list([ - 'none', - 'temporary', - 'permanent', - ]), - 'status': dict({ - 'active_faults': list([ - ]), - 'setpoint_status': dict({ - 'setpoint_mode': 'FollowSchedule', - 'target_heat_temperature': 21.5, - }), - 'setpoints': dict({ - 'next_sp_from': '2024-07-10T22:10:00+03:00', - 'next_sp_temp': 18.6, - 'this_sp_from': '2024-07-10T08:00:00+03:00', - 'this_sp_temp': 16.0, - }), - 'temperature_status': dict({ - 'is_available': True, - 'temperature': 21.5, - }), - 'zone_id': '8557539', - }), - 'supported_features': , - 'temperature': 21.5, - }), - 'context': , - 'entity_id': 'climate.thermostat', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'heat', - }), - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'current_temperature': 21.5, - 'friendly_name': 'THERMOSTAT', - 'hvac_modes': list([ - , - , - ]), - 'max_temp': 35.0, - 'min_temp': 5.0, - 'preset_mode': 'none', - 'preset_modes': list([ - 'none', - 'temporary', - 'permanent', - ]), - 'status': dict({ - 'active_faults': list([ - ]), - 'setpoint_status': dict({ - 'setpoint_mode': 'FollowSchedule', - 'target_heat_temperature': 21.5, - }), - 'setpoints': dict({ - 'next_sp_from': '2024-07-10T22:10:00+03:00', - 'next_sp_temp': 18.6, - 'this_sp_from': '2024-07-10T08:00:00+03:00', - 'this_sp_temp': 16.0, - }), - 'temperature_status': dict({ - 'is_available': True, - 'temperature': 21.5, - }), - 'zone_id': '8557541', - }), - 'supported_features': , - 'temperature': 21.5, - }), - 'context': , - 'entity_id': 'climate.thermostat_2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'heat', - }), - ]) +# name: test_setup[h099625] + dict_keys(['refresh_system', 'set_system_mode', 'clear_zone_override', 'set_zone_override']) # --- -# name: test_entities[minimal] - list([ - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'current_temperature': 19.0, - 'friendly_name': 'My Home', - 'hvac_modes': list([ - , - , - ]), - 'icon': 'mdi:thermostat', - 'max_temp': 35, - 'min_temp': 7, - 'preset_mode': 'eco', - 'preset_modes': list([ - 'Reset', - 'eco', - 'away', - 'home', - 'Custom', - ]), - 'status': dict({ - 'active_system_faults': list([ - ]), - 'system_id': '3432522', - 'system_mode_status': dict({ - 'is_permanent': True, - 'mode': 'AutoWithEco', - }), - }), - 'supported_features': , - }), - 'context': , - 'entity_id': 'climate.my_home', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'heat', - }), - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'current_temperature': 19.0, - 'friendly_name': 'Main Room', - 'hvac_modes': list([ - , - , - ]), - 'max_temp': 35.0, - 'min_temp': 5.0, - 'preset_mode': 'none', - 'preset_modes': list([ - 'none', - 'temporary', - 'permanent', - ]), - 'status': dict({ - 'active_faults': list([ - ]), - 'setpoint_status': dict({ - 'setpoint_mode': 'FollowSchedule', - 'target_heat_temperature': 17.0, - }), - 'setpoints': dict({ - 'next_sp_from': '2024-07-10T22:10:00+01:00', - 'next_sp_temp': 18.6, - 'this_sp_from': '2024-07-10T08:00:00+01:00', - 'this_sp_temp': 16.0, - }), - 'temperature_status': dict({ - 'is_available': True, - 'temperature': 19.0, - }), - 'zone_id': '3432576', - }), - 'supported_features': , - 'temperature': 17.0, - }), - 'context': , - 'entity_id': 'climate.main_room', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'heat', - }), - ]) +# name: test_setup[minimal] + dict_keys(['refresh_system', 'reset_system', 'set_system_mode', 'clear_zone_override', 'set_zone_override']) # --- -# name: test_entities[sys_004] - list([ - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'current_temperature': 19.5, - 'friendly_name': 'Living room', - 'hvac_modes': list([ - , - , - ]), - 'icon': 'mdi:thermostat', - 'max_temp': 35, - 'min_temp': 7, - 'preset_mode': None, - 'preset_modes': list([ - 'eco', - 'away', - ]), - 'status': dict({ - 'active_system_faults': list([ - ]), - 'system_id': '4187769', - 'system_mode_status': dict({ - 'is_permanent': True, - 'mode': 'Auto', - }), - }), - 'supported_features': , - }), - 'context': , - 'entity_id': 'climate.living_room', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'heat', - }), - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'current_temperature': 19.5, - 'friendly_name': 'Thermostat', - 'hvac_modes': list([ - , - , - ]), - 'max_temp': 35.0, - 'min_temp': 5.0, - 'preset_mode': 'permanent', - 'preset_modes': list([ - 'none', - 'temporary', - 'permanent', - ]), - 'status': dict({ - 'active_faults': list([ - ]), - 'setpoint_status': dict({ - 'setpoint_mode': 'PermanentOverride', - 'target_heat_temperature': 15.0, - }), - 'setpoints': dict({ - 'next_sp_from': '2024-07-10T22:10:00+02:00', - 'next_sp_temp': 18.6, - 'this_sp_from': '2024-07-10T08:00:00+02:00', - 'this_sp_temp': 16.0, - }), - 'temperature_status': dict({ - 'is_available': True, - 'temperature': 19.5, - }), - 'zone_id': '4187768', - }), - 'supported_features': , - 'temperature': 15.0, - }), - 'context': , - 'entity_id': 'climate.thermostat', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'heat', - }), - ]) +# name: test_setup[sys_004] + dict_keys(['refresh_system', 'set_system_mode', 'clear_zone_override', 'set_zone_override']) # --- diff --git a/tests/components/evohome/snapshots/test_water_heater.ambr b/tests/components/evohome/snapshots/test_water_heater.ambr index b521772e6c7174..4cdeb28f445fd0 100644 --- a/tests/components/evohome/snapshots/test_water_heater.ambr +++ b/tests/components/evohome/snapshots/test_water_heater.ambr @@ -1,19 +1,105 @@ # serializer version: 1 # name: test_set_operation_mode[default] list([ - tuple( - dict({ - 'mode': , - 'state': , - 'untilTime': '2024-07-10T12:00:00Z', + dict({ + 'until': datetime.datetime(2024, 7, 10, 12, 0, tzinfo=datetime.timezone.utc), + }), + dict({ + 'until': datetime.datetime(2024, 7, 10, 12, 0, tzinfo=datetime.timezone.utc), + }), + ]) +# --- +# name: test_setup_platform[botched][water_heater.domestic_hot_water-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'away_mode': 'on', + 'current_temperature': 23, + 'friendly_name': 'Domestic Hot Water', + 'icon': 'mdi:thermometer-lines', + 'max_temp': 60, + 'min_temp': 43, + 'operation_list': list([ + 'auto', + 'on', + 'off', + ]), + 'operation_mode': 'off', + 'status': dict({ + 'active_faults': list([ + ]), + 'dhw_id': '3933910', + 'setpoints': dict({ + 'next_sp_from': '2024-07-10T13:00:00+01:00', + 'next_sp_state': 'Off', + 'this_sp_from': '2024-07-10T12:00:00+01:00', + 'this_sp_state': 'On', + }), + 'state_status': dict({ + 'mode': 'PermanentOverride', + 'state': 'Off', + }), + 'temperature_status': dict({ + 'is_available': True, + 'temperature': 23.0, + }), }), - ), - tuple( - dict({ - 'mode': , - 'state': , - 'untilTime': '2024-07-10T12:00:00Z', + 'supported_features': , + 'target_temp_high': None, + 'target_temp_low': None, + 'temperature': None, + }), + 'context': , + 'entity_id': 'water_heater.domestic_hot_water', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_setup_platform[default][water_heater.domestic_hot_water-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'away_mode': 'on', + 'current_temperature': 23, + 'friendly_name': 'Domestic Hot Water', + 'icon': 'mdi:thermometer-lines', + 'max_temp': 60, + 'min_temp': 43, + 'operation_list': list([ + 'auto', + 'on', + 'off', + ]), + 'operation_mode': 'off', + 'status': dict({ + 'active_faults': list([ + ]), + 'dhw_id': '3933910', + 'setpoints': dict({ + 'next_sp_from': '2024-07-10T13:00:00+01:00', + 'next_sp_state': 'Off', + 'this_sp_from': '2024-07-10T12:00:00+01:00', + 'this_sp_state': 'On', + }), + 'state_status': dict({ + 'mode': 'PermanentOverride', + 'state': 'Off', + }), + 'temperature_status': dict({ + 'is_available': True, + 'temperature': 23.0, + }), }), - ), - ]) + 'supported_features': , + 'target_temp_high': None, + 'target_temp_low': None, + 'temperature': None, + }), + 'context': , + 'entity_id': 'water_heater.domestic_hot_water', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) # --- diff --git a/tests/components/evohome/test_climate.py b/tests/components/evohome/test_climate.py index 602a2ac561a982..325dd914bc0e4b 100644 --- a/tests/components/evohome/test_climate.py +++ b/tests/components/evohome/test_climate.py @@ -1,4 +1,4 @@ -"""The tests for climate entities of evohome. +"""The tests for the climate platform of evohome. All evohome systems have controllers and at least one zone. """ @@ -11,78 +11,221 @@ import pytest from syrupy import SnapshotAssertion -from homeassistant.components.climate import HVACMode -from homeassistant.components.evohome import DOMAIN -from homeassistant.components.evohome.climate import EvoZone -from homeassistant.components.evohome.coordinator import EvoBroker -from homeassistant.const import Platform +from homeassistant.components.climate import ( + ATTR_HVAC_MODE, + ATTR_PRESET_MODE, + SERVICE_SET_HVAC_MODE, + SERVICE_SET_PRESET_MODE, + SERVICE_SET_TEMPERATURE, + HVACMode, +) +from homeassistant.const import ( + ATTR_ENTITY_ID, + ATTR_TEMPERATURE, + SERVICE_TURN_OFF, + SERVICE_TURN_ON, + Platform, +) from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er -from homeassistant.helpers.entity_component import EntityComponent -from homeassistant.util import dt as dt_util +from homeassistant.exceptions import HomeAssistantError from .conftest import setup_evohome from .const import TEST_INSTALLS -def get_zone_entity(hass: HomeAssistant) -> EvoZone: - """Return the entity of the first zone of the evohome system.""" +@pytest.mark.parametrize("install", [*TEST_INSTALLS, "botched"]) +async def test_setup_platform( + hass: HomeAssistant, + config: dict[str, str], + install: str, + snapshot: SnapshotAssertion, + freezer: FrozenDateTimeFactory, +) -> None: + """Test entities and their states after setup of evohome.""" - broker: EvoBroker = hass.data[DOMAIN]["broker"] + # Cannot use the evohome fixture, as need to set dtm first + # - some extended state attrs are relative the current time + freezer.move_to("2024-07-10T12:00:00Z") - unique_id = broker.tcs._zones[0]._id - if unique_id == broker.tcs._id: - unique_id += "z" # special case of merged controller/zone + async for _ in setup_evohome(hass, config, install=install): + pass - entity_registry = er.async_get(hass) - entity_id = entity_registry.async_get_entity_id(Platform.CLIMATE, DOMAIN, unique_id) + for x in hass.states.async_all(Platform.CLIMATE): + assert x == snapshot(name=f"{x.entity_id}-state") - component: EntityComponent = hass.data.get(Platform.CLIMATE) # type: ignore[assignment] - return next(e for e in component.entities if e.entity_id == entity_id) # type: ignore[return-value] + +@pytest.mark.parametrize("install", TEST_INSTALLS) +async def test_ctl_set_hvac_mode( + hass: HomeAssistant, + ctl_id: str, + snapshot: SnapshotAssertion, +) -> None: + """Test SERVICE_SET_HVAC_MODE of an evohome controller.""" + + results = [] + + # SERVICE_SET_HVAC_MODE: HVACMode.OFF + with patch("evohomeasync2.controlsystem.ControlSystem.set_mode") as mock_fcn: + await hass.services.async_call( + Platform.CLIMATE, + SERVICE_SET_HVAC_MODE, + { + ATTR_ENTITY_ID: ctl_id, + ATTR_HVAC_MODE: HVACMode.OFF, + }, + blocking=True, + ) + + assert mock_fcn.await_count == 1 + assert mock_fcn.await_args.args != () # 'HeatingOff' or 'Off' + assert mock_fcn.await_args.kwargs == {"until": None} + + results.append(mock_fcn.await_args.args) + + # SERVICE_SET_HVAC_MODE: HVACMode.HEAT + with patch("evohomeasync2.controlsystem.ControlSystem.set_mode") as mock_fcn: + await hass.services.async_call( + Platform.CLIMATE, + SERVICE_SET_HVAC_MODE, + { + ATTR_ENTITY_ID: ctl_id, + ATTR_HVAC_MODE: HVACMode.HEAT, + }, + blocking=True, + ) + + assert mock_fcn.await_count == 1 + assert mock_fcn.await_args.args != () # 'Auto' or 'Heat' + assert mock_fcn.await_args.kwargs == {"until": None} + + results.append(mock_fcn.await_args.args) + + assert results == snapshot @pytest.mark.parametrize("install", TEST_INSTALLS) -async def test_zone_set_hvac_mode( +async def test_ctl_set_temperature( hass: HomeAssistant, - config: dict[str, str], - install: str, + ctl_id: str, +) -> None: + """Test SERVICE_SET_TEMPERATURE of an evohome controller.""" + + # Entity climate.xxx does not support this service + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + Platform.CLIMATE, + SERVICE_SET_TEMPERATURE, + { + ATTR_ENTITY_ID: ctl_id, + ATTR_TEMPERATURE: 19.1, + }, + blocking=True, + ) + + +@pytest.mark.parametrize("install", TEST_INSTALLS) +async def test_ctl_turn_off( + hass: HomeAssistant, + ctl_id: str, snapshot: SnapshotAssertion, ) -> None: - """Test climate methods of a evohome-compatible zone.""" + """Test SERVICE_TURN_OFF of an evohome controller.""" results = [] - async for _ in setup_evohome(hass, config, install=install): - zone = get_zone_entity(hass) + # SERVICE_TURN_OFF + with patch("evohomeasync2.controlsystem.ControlSystem.set_mode") as mock_fcn: + await hass.services.async_call( + Platform.CLIMATE, + SERVICE_TURN_OFF, + { + ATTR_ENTITY_ID: ctl_id, + }, + blocking=True, + ) - assert zone.hvac_modes == [HVACMode.OFF, HVACMode.HEAT] + assert mock_fcn.await_count == 1 + assert mock_fcn.await_args.args != () # 'HeatingOff' or 'Off' + assert mock_fcn.await_args.kwargs == {"until": None} - # set_hvac_mode(HVACMode.HEAT): FollowSchedule - with patch("evohomeasync2.zone.Zone._set_mode") as mock_fcn: - await zone.async_set_hvac_mode(HVACMode.HEAT) + results.append(mock_fcn.await_args.args) - assert mock_fcn.await_count == 1 - assert install != "default" or mock_fcn.await_args.args == ( - { - "setpointMode": "FollowSchedule", - }, - ) - assert mock_fcn.await_args.kwargs == {} + assert results == snapshot - # set_hvac_mode(HVACMode.OFF): PermanentOverride, minHeatSetpoint - with patch("evohomeasync2.zone.Zone._set_mode") as mock_fcn: - await zone.async_set_hvac_mode(HVACMode.OFF) - assert mock_fcn.await_count == 1 - assert install != "default" or mock_fcn.await_args.args == ( - { - "setpointMode": "PermanentOverride", - "HeatSetpointValue": 5.0, # varies by install - }, - ) - assert mock_fcn.await_args.kwargs == {} +@pytest.mark.parametrize("install", TEST_INSTALLS) +async def test_ctl_turn_on( + hass: HomeAssistant, + ctl_id: str, + snapshot: SnapshotAssertion, +) -> None: + """Test SERVICE_TURN_ON of an evohome controller.""" - results.append(mock_fcn.await_args.args) + results = [] + + # SERVICE_TURN_ON + with patch("evohomeasync2.controlsystem.ControlSystem.set_mode") as mock_fcn: + await hass.services.async_call( + Platform.CLIMATE, + SERVICE_TURN_ON, + { + ATTR_ENTITY_ID: ctl_id, + }, + blocking=True, + ) + + assert mock_fcn.await_count == 1 + assert mock_fcn.await_args.args != () # 'Auto' or 'Heat' + assert mock_fcn.await_args.kwargs == {"until": None} + + results.append(mock_fcn.await_args.args) + + assert results == snapshot + + +@pytest.mark.parametrize("install", TEST_INSTALLS) +async def test_zone_set_hvac_mode( + hass: HomeAssistant, + zone_id: str, + snapshot: SnapshotAssertion, +) -> None: + """Test SERVICE_SET_HVAC_MODE of an evohome heating zone.""" + + results = [] + + # SERVICE_SET_HVAC_MODE: HVACMode.HEAT + with patch("evohomeasync2.zone.Zone.reset_mode") as mock_fcn: + await hass.services.async_call( + Platform.CLIMATE, + SERVICE_SET_HVAC_MODE, + { + ATTR_ENTITY_ID: zone_id, + ATTR_HVAC_MODE: HVACMode.HEAT, + }, + blocking=True, + ) + + assert mock_fcn.await_count == 1 + assert mock_fcn.await_args.args == () + assert mock_fcn.await_args.kwargs == {} + + # SERVICE_SET_HVAC_MODE: HVACMode.OFF + with patch("evohomeasync2.zone.Zone.set_temperature") as mock_fcn: + await hass.services.async_call( + Platform.CLIMATE, + SERVICE_SET_HVAC_MODE, + { + ATTR_ENTITY_ID: zone_id, + ATTR_HVAC_MODE: HVACMode.OFF, + }, + blocking=True, + ) + + assert mock_fcn.await_count == 1 + assert mock_fcn.await_args.args != () # minimum target temp + assert mock_fcn.await_args.kwargs == {"until": None} + + results.append(mock_fcn.await_args.args) assert results == snapshot @@ -90,63 +233,67 @@ async def test_zone_set_hvac_mode( @pytest.mark.parametrize("install", TEST_INSTALLS) async def test_zone_set_preset_mode( hass: HomeAssistant, - config: dict[str, str], - install: str, + zone_id: str, freezer: FrozenDateTimeFactory, snapshot: SnapshotAssertion, ) -> None: - """Test climate methods of a evohome-compatible zone.""" + """Test SERVICE_SET_PRESET_MODE of an evohome heating zone.""" freezer.move_to("2024-07-10T12:00:00Z") results = [] - async for _ in setup_evohome(hass, config, install=install): - zone = get_zone_entity(hass) - - assert zone.preset_modes == ["none", "temporary", "permanent"] - - # set_preset_mode(none): FollowSchedule - with patch("evohomeasync2.zone.Zone._set_mode") as mock_fcn: - await zone.async_set_preset_mode("none") - - assert mock_fcn.await_count == 1 - assert mock_fcn.await_args.args == ( - { - "setpointMode": "FollowSchedule", - }, - ) - assert mock_fcn.await_args.kwargs == {} - - # set_preset_mode(permanent): PermanentOverride - with patch("evohomeasync2.zone.Zone._set_mode") as mock_fcn: - await zone.async_set_preset_mode("permanent") - - assert mock_fcn.await_count == 1 - assert install != "default" or mock_fcn.await_args.args == ( - { - "setpointMode": "PermanentOverride", - "HeatSetpointValue": 17.0, # varies by install - }, - ) - assert mock_fcn.await_args.kwargs == {} - - results.append(mock_fcn.await_args.args) - - # set_preset_mode(permanent): TemporaryOverride - with patch("evohomeasync2.zone.Zone._set_mode") as mock_fcn: - await zone.async_set_preset_mode("temporary") - - assert mock_fcn.await_count == 1 - assert install != "default" or mock_fcn.await_args.args == ( - { - "setpointMode": "TemporaryOverride", - "HeatSetpointValue": 17.0, # varies by install - "timeUntil": "2024-07-10T21:10:00Z", # varies by install - }, - ) - assert mock_fcn.await_args.kwargs == {} - - results.append(mock_fcn.await_args.args) + # SERVICE_SET_PRESET_MODE: none + with patch("evohomeasync2.zone.Zone.reset_mode") as mock_fcn: + await hass.services.async_call( + Platform.CLIMATE, + SERVICE_SET_PRESET_MODE, + { + ATTR_ENTITY_ID: zone_id, + ATTR_PRESET_MODE: "none", + }, + blocking=True, + ) + + assert mock_fcn.await_count == 1 + assert mock_fcn.await_args.args == () + assert mock_fcn.await_args.kwargs == {} + + # SERVICE_SET_PRESET_MODE: permanent + with patch("evohomeasync2.zone.Zone.set_temperature") as mock_fcn: + await hass.services.async_call( + Platform.CLIMATE, + SERVICE_SET_PRESET_MODE, + { + ATTR_ENTITY_ID: zone_id, + ATTR_PRESET_MODE: "permanent", + }, + blocking=True, + ) + + assert mock_fcn.await_count == 1 + assert mock_fcn.await_args.args != () # current target temp + assert mock_fcn.await_args.kwargs == {"until": None} + + results.append(mock_fcn.await_args.args) + + # SERVICE_SET_PRESET_MODE: temporary + with patch("evohomeasync2.zone.Zone.set_temperature") as mock_fcn: + await hass.services.async_call( + Platform.CLIMATE, + SERVICE_SET_PRESET_MODE, + { + ATTR_ENTITY_ID: zone_id, + ATTR_PRESET_MODE: "temporary", + }, + blocking=True, + ) + + assert mock_fcn.await_count == 1 + assert mock_fcn.await_args.args != () # current target temp + assert mock_fcn.await_args.kwargs != {} # next setpoint dtm + + results.append(mock_fcn.await_args.args) + results.append(mock_fcn.await_args.kwargs) assert results == snapshot @@ -154,50 +301,84 @@ async def test_zone_set_preset_mode( @pytest.mark.parametrize("install", TEST_INSTALLS) async def test_zone_set_temperature( hass: HomeAssistant, - config: dict[str, str], - install: str, + zone_id: str, freezer: FrozenDateTimeFactory, snapshot: SnapshotAssertion, ) -> None: - """Test climate methods of a evohome-compatible zone.""" + """Test SERVICE_SET_TEMPERATURE of an evohome heating zone.""" freezer.move_to("2024-07-10T12:00:00Z") results = [] - async for _ in setup_evohome(hass, config, install=install): - zone = get_zone_entity(hass) - - # set_temperature(temp): TemporaryOverride, advanced - with patch("evohomeasync2.zone.Zone._set_mode") as mock_fcn: - await zone.async_set_temperature(temperature=19.1) - - assert mock_fcn.await_count == 1 - assert install != "default" or mock_fcn.await_args.args == ( - { - "setpointMode": "TemporaryOverride", - "HeatSetpointValue": 19.1, - "timeUntil": "2024-07-10T21:10:00Z", # varies by install - }, - ) - assert mock_fcn.await_args.kwargs == {} - - results.append(mock_fcn.await_args.args) - - # set_temperature(temp, until): TemporaryOverride, until - with patch("evohomeasync2.zone.Zone._set_mode") as mock_fcn: - await zone.async_set_temperature( - temperature=19.2, - until=dt_util.parse_datetime("2024-07-10T13:30:00Z"), - ) - - assert mock_fcn.await_count == 1 - assert mock_fcn.await_args.args == ( - { - "setpointMode": "TemporaryOverride", - "HeatSetpointValue": 19.2, - "timeUntil": "2024-07-10T13:30:00Z", - }, - ) - assert mock_fcn.await_args.kwargs == {} + # SERVICE_SET_TEMPERATURE: temperature + with patch("evohomeasync2.zone.Zone.set_temperature") as mock_fcn: + await hass.services.async_call( + Platform.CLIMATE, + SERVICE_SET_TEMPERATURE, + { + ATTR_ENTITY_ID: zone_id, + ATTR_TEMPERATURE: 19.1, + }, + blocking=True, + ) + + assert mock_fcn.await_count == 1 + assert mock_fcn.await_args.args == (19.1,) + assert mock_fcn.await_args.kwargs != {} # next setpoint dtm + + results.append(mock_fcn.await_args.kwargs) assert results == snapshot + + +@pytest.mark.parametrize("install", TEST_INSTALLS) +async def test_zone_turn_off( + hass: HomeAssistant, + zone_id: str, + snapshot: SnapshotAssertion, +) -> None: + """Test SERVICE_TURN_OFF of an evohome heating zone.""" + + results = [] + + # SERVICE_TURN_OFF + with patch("evohomeasync2.zone.Zone.set_temperature") as mock_fcn: + await hass.services.async_call( + Platform.CLIMATE, + SERVICE_TURN_OFF, + { + ATTR_ENTITY_ID: zone_id, + }, + blocking=True, + ) + + assert mock_fcn.await_count == 1 + assert mock_fcn.await_args.args != () # minimum target temp + assert mock_fcn.await_args.kwargs == {"until": None} + + results.append(mock_fcn.await_args.args) + + assert results == snapshot + + +@pytest.mark.parametrize("install", TEST_INSTALLS) +async def test_zone_turn_on( + hass: HomeAssistant, + zone_id: str, +) -> None: + """Test SERVICE_TURN_ON of an evohome heating zone.""" + + # SERVICE_TURN_ON + with patch("evohomeasync2.zone.Zone.reset_mode") as mock_fcn: + await hass.services.async_call( + Platform.CLIMATE, + SERVICE_TURN_ON, + { + ATTR_ENTITY_ID: zone_id, + }, + blocking=True, + ) + + assert mock_fcn.await_count == 1 + assert mock_fcn.await_args.args == () + assert mock_fcn.await_args.kwargs == {} diff --git a/tests/components/evohome/test_init.py b/tests/components/evohome/test_init.py index b61efe9b0666f7..49a854016eafbe 100644 --- a/tests/components/evohome/test_init.py +++ b/tests/components/evohome/test_init.py @@ -2,30 +2,181 @@ from __future__ import annotations -from freezegun.api import FrozenDateTimeFactory +from http import HTTPStatus +import logging +from unittest.mock import patch + +from evohomeasync2 import EvohomeClient, exceptions as exc +from evohomeasync2.broker import _ERR_MSG_LOOKUP_AUTH, _ERR_MSG_LOOKUP_BASE import pytest from syrupy import SnapshotAssertion +from homeassistant.components.evohome import DOMAIN, EvoService from homeassistant.core import HomeAssistant +from homeassistant.setup import async_setup_component -from .conftest import setup_evohome from .const import TEST_INSTALLS +SETUP_FAILED_ANTICIPATED = ( + "homeassistant.setup", + logging.ERROR, + "Setup failed for 'evohome': Integration failed to initialize.", +) +SETUP_FAILED_UNEXPECTED = ( + "homeassistant.setup", + logging.ERROR, + "Error during setup of component evohome", +) +AUTHENTICATION_FAILED = ( + "homeassistant.components.evohome.helpers", + logging.ERROR, + "Failed to authenticate with the vendor's server. Check your username" + " and password. NB: Some special password characters that work" + " correctly via the website will not work via the web API. Message" + " is: ", +) +REQUEST_FAILED_NONE = ( + "homeassistant.components.evohome.helpers", + logging.WARNING, + "Unable to connect with the vendor's server. " + "Check your network and the vendor's service status page. " + "Message is: ", +) +REQUEST_FAILED_503 = ( + "homeassistant.components.evohome.helpers", + logging.WARNING, + "The vendor says their server is currently unavailable. " + "Check the vendor's service status page", +) +REQUEST_FAILED_429 = ( + "homeassistant.components.evohome.helpers", + logging.WARNING, + "The vendor's API rate limit has been exceeded. " + "If this message persists, consider increasing the scan_interval", +) -@pytest.mark.parametrize("install", [*TEST_INSTALLS, "botched"]) -async def test_entities( +REQUEST_FAILED_LOOKUP = { + None: [ + REQUEST_FAILED_NONE, + SETUP_FAILED_ANTICIPATED, + ], + HTTPStatus.SERVICE_UNAVAILABLE: [ + REQUEST_FAILED_503, + SETUP_FAILED_ANTICIPATED, + ], + HTTPStatus.TOO_MANY_REQUESTS: [ + REQUEST_FAILED_429, + SETUP_FAILED_ANTICIPATED, + ], +} + + +@pytest.mark.parametrize( + "status", [*sorted([*_ERR_MSG_LOOKUP_AUTH, HTTPStatus.BAD_GATEWAY]), None] +) +async def test_authentication_failure_v2( + hass: HomeAssistant, + config: dict[str, str], + status: HTTPStatus, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test failure to setup an evohome-compatible system. + + In this instance, the failure occurs in the v2 API. + """ + + with patch("evohomeasync2.broker.Broker.get") as mock_fcn: + mock_fcn.side_effect = exc.AuthenticationFailed("", status=status) + + with caplog.at_level(logging.WARNING): + result = await async_setup_component(hass, DOMAIN, {DOMAIN: config}) + + assert result is False + + assert caplog.record_tuples == [ + AUTHENTICATION_FAILED, + SETUP_FAILED_ANTICIPATED, + ] + + +@pytest.mark.parametrize( + "status", [*sorted([*_ERR_MSG_LOOKUP_BASE, HTTPStatus.BAD_GATEWAY]), None] +) +async def test_client_request_failure_v2( hass: HomeAssistant, config: dict[str, str], - install: str, + status: HTTPStatus, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test failure to setup an evohome-compatible system. + + In this instance, the failure occurs in the v2 API. + """ + + with patch("evohomeasync2.broker.Broker.get") as mock_fcn: + mock_fcn.side_effect = exc.RequestFailed("", status=status) + + with caplog.at_level(logging.WARNING): + result = await async_setup_component(hass, DOMAIN, {DOMAIN: config}) + + assert result is False + + assert caplog.record_tuples == REQUEST_FAILED_LOOKUP.get( + status, [SETUP_FAILED_UNEXPECTED] + ) + + +@pytest.mark.parametrize("install", [*TEST_INSTALLS, "botched"]) +async def test_setup( + hass: HomeAssistant, + evohome: EvohomeClient, snapshot: SnapshotAssertion, - freezer: FrozenDateTimeFactory, ) -> None: - """Test entities and state after setup of a Honeywell TCC-compatible system.""" + """Test services after setup of evohome. + + Registered services vary by the type of system. + """ + + assert hass.services.async_services_for_domain(DOMAIN).keys() == snapshot + - # some extended state attrs are relative the current time - freezer.move_to("2024-07-10T12:00:00Z") +@pytest.mark.parametrize("install", ["default"]) +async def test_service_refresh_system( + hass: HomeAssistant, + evohome: EvohomeClient, +) -> None: + """Test EvoService.REFRESH_SYSTEM of an evohome system.""" + + # EvoService.REFRESH_SYSTEM + with patch("evohomeasync2.location.Location.refresh_status") as mock_fcn: + await hass.services.async_call( + DOMAIN, + EvoService.REFRESH_SYSTEM, + {}, + blocking=True, + ) + + assert mock_fcn.await_count == 1 + assert mock_fcn.await_args.args == () + assert mock_fcn.await_args.kwargs == {} + + +@pytest.mark.parametrize("install", ["default"]) +async def test_service_reset_system( + hass: HomeAssistant, + evohome: EvohomeClient, +) -> None: + """Test EvoService.RESET_SYSTEM of an evohome system.""" - async for _ in setup_evohome(hass, config, install=install): - pass + # EvoService.RESET_SYSTEM (if SZ_AUTO_WITH_RESET in modes) + with patch("evohomeasync2.controlsystem.ControlSystem.set_mode") as mock_fcn: + await hass.services.async_call( + DOMAIN, + EvoService.RESET_SYSTEM, + {}, + blocking=True, + ) - assert hass.states.async_all() == snapshot + assert mock_fcn.await_count == 1 + assert mock_fcn.await_args.args == ("AutoWithReset",) + assert mock_fcn.await_args.kwargs == {"until": None} diff --git a/tests/components/evohome/test_storage.py b/tests/components/evohome/test_storage.py index 33f6c6b3e6ce19..4cc21078333157 100644 --- a/tests/components/evohome/test_storage.py +++ b/tests/components/evohome/test_storage.py @@ -55,20 +55,17 @@ def dt_pair(dt_dtm: datetime) -> tuple[datetime, str]: USERNAME_DIFF: Final = f"not_{USERNAME}" USERNAME_SAME: Final = USERNAME +_TEST_STORAGE_BASE: Final[_TokenStoreT] = { + SZ_USERNAME: USERNAME_SAME, + SZ_REFRESH_TOKEN: REFRESH_TOKEN, + SZ_ACCESS_TOKEN: ACCESS_TOKEN, + SZ_ACCESS_TOKEN_EXPIRES: ACCESS_TOKEN_EXP_STR, +} + TEST_STORAGE_DATA: Final[dict[str, _TokenStoreT]] = { - "sans_session_id": { - SZ_USERNAME: USERNAME_SAME, - SZ_REFRESH_TOKEN: REFRESH_TOKEN, - SZ_ACCESS_TOKEN: ACCESS_TOKEN, - SZ_ACCESS_TOKEN_EXPIRES: ACCESS_TOKEN_EXP_STR, - }, - "with_session_id": { - SZ_USERNAME: USERNAME_SAME, - SZ_REFRESH_TOKEN: REFRESH_TOKEN, - SZ_ACCESS_TOKEN: ACCESS_TOKEN, - SZ_ACCESS_TOKEN_EXPIRES: ACCESS_TOKEN_EXP_STR, - SZ_USER_DATA: {"sessionId": SESSION_ID}, - }, + "sans_session_id": _TEST_STORAGE_BASE, + "null_session_id": _TEST_STORAGE_BASE | {SZ_USER_DATA: None}, # type: ignore[dict-item] + "with_session_id": _TEST_STORAGE_BASE | {SZ_USER_DATA: {"sessionId": SESSION_ID}}, } TEST_STORAGE_NULL: Final[dict[str, _EmptyStoreT | None]] = { diff --git a/tests/components/evohome/test_water_heater.py b/tests/components/evohome/test_water_heater.py index 3dc1d961d292c4..8acfd469b59263 100644 --- a/tests/components/evohome/test_water_heater.py +++ b/tests/components/evohome/test_water_heater.py @@ -1,4 +1,4 @@ -"""The tests for water_heater entities of evohome. +"""The tests for the water_heater platform of evohome. Not all evohome systems will have a DHW zone. """ @@ -7,203 +7,184 @@ from unittest.mock import patch +from evohomeasync2 import EvohomeClient from freezegun.api import FrozenDateTimeFactory import pytest from syrupy import SnapshotAssertion -from homeassistant.components.evohome import DOMAIN -from homeassistant.components.evohome.coordinator import EvoBroker -from homeassistant.components.evohome.water_heater import EvoDHW -from homeassistant.const import Platform +from homeassistant.components.water_heater import ( + ATTR_AWAY_MODE, + ATTR_OPERATION_MODE, + SERVICE_SET_AWAY_MODE, + SERVICE_SET_OPERATION_MODE, +) +from homeassistant.const import ( + ATTR_ENTITY_ID, + SERVICE_TURN_OFF, + SERVICE_TURN_ON, + Platform, +) from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er -from homeassistant.helpers.entity_component import EntityComponent +from homeassistant.exceptions import HomeAssistantError from .conftest import setup_evohome from .const import TEST_INSTALLS_WITH_DHW +DHW_ENTITY_ID = "water_heater.domestic_hot_water" -def get_dhw_entity(hass: HomeAssistant) -> EvoDHW | None: - """Return the DHW entity of the evohome system.""" - broker: EvoBroker = hass.data[DOMAIN]["broker"] - - if (dhw := broker.tcs.hotwater) is None: - return None - - entity_registry = er.async_get(hass) - entity_id = entity_registry.async_get_entity_id( - Platform.WATER_HEATER, DOMAIN, dhw._id - ) - - component: EntityComponent = hass.data.get(Platform.WATER_HEATER) # type: ignore[assignment] - return next(e for e in component.entities if e.entity_id == entity_id) # type: ignore[return-value] - - -@pytest.mark.parametrize("install", TEST_INSTALLS_WITH_DHW) -async def test_set_operation_mode( +@pytest.mark.parametrize("install", [*TEST_INSTALLS_WITH_DHW, "botched"]) +async def test_setup_platform( hass: HomeAssistant, config: dict[str, str], install: str, - freezer: FrozenDateTimeFactory, snapshot: SnapshotAssertion, + freezer: FrozenDateTimeFactory, ) -> None: - """Test water_heater services of a evohome-compatible DHW zone.""" + """Test entities and their states after setup of evohome.""" - freezer.move_to("2024-07-10T11:55:00Z") - results = [] + # Cannot use the evohome fixture, as need to set dtm first + # - some extended state attrs are relative the current time + freezer.move_to("2024-07-10T12:00:00Z") async for _ in setup_evohome(hass, config, install=install): - dhw = get_dhw_entity(hass) - - # set_operation_mode(auto): FollowSchedule - with patch("evohomeasync2.hotwater.HotWater._set_mode") as mock_fcn: - await dhw.async_set_operation_mode("auto") - - assert mock_fcn.await_count == 1 - assert mock_fcn.await_args.args == ( - { - "mode": "FollowSchedule", - "state": None, - "untilTime": None, - }, - ) - assert mock_fcn.await_args.kwargs == {} - - # set_operation_mode(off): TemporaryOverride, advanced - with patch("evohomeasync2.hotwater.HotWater._set_mode") as mock_fcn: - await dhw.async_set_operation_mode("off") - - assert mock_fcn.await_count == 1 - assert install != "default" or mock_fcn.await_args.args == ( - { - "mode": "TemporaryOverride", - "state": "Off", - "untilTime": "2024-07-10T12:00:00Z", # varies by install - }, - ) - assert mock_fcn.await_args.kwargs == {} - - results.append(mock_fcn.await_args.args) - - # set_operation_mode(on): TemporaryOverride, advanced - with patch("evohomeasync2.hotwater.HotWater._set_mode") as mock_fcn: - await dhw.async_set_operation_mode("on") - - assert mock_fcn.await_count == 1 - assert install != "default" or mock_fcn.await_args.args == ( - { - "mode": "TemporaryOverride", - "state": "On", - "untilTime": "2024-07-10T12:00:00Z", # varies by install - }, - ) - assert mock_fcn.await_args.kwargs == {} - - results.append(mock_fcn.await_args.args) + pass - assert results == snapshot + for x in hass.states.async_all(Platform.WATER_HEATER): + assert x == snapshot(name=f"{x.entity_id}-state") @pytest.mark.parametrize("install", TEST_INSTALLS_WITH_DHW) -async def test_turn_away_mode_off( +async def test_set_operation_mode( hass: HomeAssistant, - config: dict[str, str], - install: str, + evohome: EvohomeClient, + freezer: FrozenDateTimeFactory, + snapshot: SnapshotAssertion, ) -> None: - """Test water_heater services of a evohome-compatible DHW zone.""" + """Test SERVICE_SET_OPERATION_MODE of an evohome DHW zone.""" - async for _ in setup_evohome(hass, config, install=install): - dhw = get_dhw_entity(hass) + freezer.move_to("2024-07-10T11:55:00Z") + results = [] - # turn_away_mode_off(): FollowSchedule - with patch("evohomeasync2.hotwater.HotWater._set_mode") as mock_fcn: - await dhw.async_turn_away_mode_off() + # SERVICE_SET_OPERATION_MODE: auto + with patch("evohomeasync2.hotwater.HotWater.reset_mode") as mock_fcn: + await hass.services.async_call( + Platform.WATER_HEATER, + SERVICE_SET_OPERATION_MODE, + { + ATTR_ENTITY_ID: DHW_ENTITY_ID, + ATTR_OPERATION_MODE: "auto", + }, + blocking=True, + ) + + assert mock_fcn.await_count == 1 + assert mock_fcn.await_args.args == () + assert mock_fcn.await_args.kwargs == {} + + # SERVICE_SET_OPERATION_MODE: off (until next scheduled setpoint) + with patch("evohomeasync2.hotwater.HotWater.set_off") as mock_fcn: + await hass.services.async_call( + Platform.WATER_HEATER, + SERVICE_SET_OPERATION_MODE, + { + ATTR_ENTITY_ID: DHW_ENTITY_ID, + ATTR_OPERATION_MODE: "off", + }, + blocking=True, + ) + + assert mock_fcn.await_count == 1 + assert mock_fcn.await_args.args == () + assert mock_fcn.await_args.kwargs != {} + + results.append(mock_fcn.await_args.kwargs) + + # SERVICE_SET_OPERATION_MODE: on (until next scheduled setpoint) + with patch("evohomeasync2.hotwater.HotWater.set_on") as mock_fcn: + await hass.services.async_call( + Platform.WATER_HEATER, + SERVICE_SET_OPERATION_MODE, + { + ATTR_ENTITY_ID: DHW_ENTITY_ID, + ATTR_OPERATION_MODE: "on", + }, + blocking=True, + ) + + assert mock_fcn.await_count == 1 + assert mock_fcn.await_args.args == () + assert mock_fcn.await_args.kwargs != {} + + results.append(mock_fcn.await_args.kwargs) - assert mock_fcn.await_count == 1 - assert mock_fcn.await_args.args == ( - { - "mode": "FollowSchedule", - "state": None, - "untilTime": None, - }, - ) - assert mock_fcn.await_args.kwargs == {} + assert results == snapshot @pytest.mark.parametrize("install", TEST_INSTALLS_WITH_DHW) -async def test_turn_away_mode_on( - hass: HomeAssistant, - config: dict[str, str], - install: str, -) -> None: - """Test water_heater services of a evohome-compatible DHW zone.""" - - async for _ in setup_evohome(hass, config, install=install): - dhw = get_dhw_entity(hass) - - # turn_away_mode_on(): PermanentOverride, Off - with patch("evohomeasync2.hotwater.HotWater._set_mode") as mock_fcn: - await dhw.async_turn_away_mode_on() - - assert mock_fcn.await_count == 1 - assert mock_fcn.await_args.args == ( - { - "mode": "PermanentOverride", - "state": "Off", - "untilTime": None, - }, - ) - assert mock_fcn.await_args.kwargs == {} +async def test_set_away_mode(hass: HomeAssistant, evohome: EvohomeClient) -> None: + """Test SERVICE_SET_AWAY_MODE of an evohome DHW zone.""" + + # set_away_mode: off + with patch("evohomeasync2.hotwater.HotWater.reset_mode") as mock_fcn: + await hass.services.async_call( + Platform.WATER_HEATER, + SERVICE_SET_AWAY_MODE, + { + ATTR_ENTITY_ID: DHW_ENTITY_ID, + ATTR_AWAY_MODE: "off", + }, + blocking=True, + ) + + assert mock_fcn.await_count == 1 + assert mock_fcn.await_args.args == () + assert mock_fcn.await_args.kwargs == {} + + # set_away_mode: on + with patch("evohomeasync2.hotwater.HotWater.set_off") as mock_fcn: + await hass.services.async_call( + Platform.WATER_HEATER, + SERVICE_SET_AWAY_MODE, + { + ATTR_ENTITY_ID: DHW_ENTITY_ID, + ATTR_AWAY_MODE: "on", + }, + blocking=True, + ) + + assert mock_fcn.await_count == 1 + assert mock_fcn.await_args.args == () + assert mock_fcn.await_args.kwargs == {} @pytest.mark.parametrize("install", TEST_INSTALLS_WITH_DHW) -async def test_turn_off( - hass: HomeAssistant, - config: dict[str, str], - install: str, -) -> None: - """Test water_heater services of a evohome-compatible DHW zone.""" - - async for _ in setup_evohome(hass, config, install=install): - dhw = get_dhw_entity(hass) +async def test_turn_off(hass: HomeAssistant, evohome: EvohomeClient) -> None: + """Test SERVICE_TURN_OFF of an evohome DHW zone.""" - # turn_off(): PermanentOverride, Off - with patch("evohomeasync2.hotwater.HotWater._set_mode") as mock_fcn: - await dhw.async_turn_off() - - assert mock_fcn.await_count == 1 - assert mock_fcn.await_args.args == ( - { - "mode": "PermanentOverride", - "state": "Off", - "untilTime": None, - }, - ) - assert mock_fcn.await_args.kwargs == {} + # Entity water_heater.xxx does not support this service + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + Platform.WATER_HEATER, + SERVICE_TURN_OFF, + { + ATTR_ENTITY_ID: DHW_ENTITY_ID, + }, + blocking=True, + ) @pytest.mark.parametrize("install", TEST_INSTALLS_WITH_DHW) -async def test_turn_on( - hass: HomeAssistant, - config: dict[str, str], - install: str, -) -> None: - """Test water_heater services of a evohome-compatible DHW zone.""" - - async for _ in setup_evohome(hass, config, install=install): - dhw = get_dhw_entity(hass) - - # turn_on(): PermanentOverride, On - with patch("evohomeasync2.hotwater.HotWater._set_mode") as mock_fcn: - await dhw.async_turn_on() - - assert mock_fcn.await_count == 1 - assert mock_fcn.await_args.args == ( - { - "mode": "PermanentOverride", - "state": "On", - "untilTime": None, - }, - ) - assert mock_fcn.await_args.kwargs == {} +async def test_turn_on(hass: HomeAssistant, evohome: EvohomeClient) -> None: + """Test SERVICE_TURN_ON of an evohome DHW zone.""" + + # Entity water_heater.xxx does not support this service + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + Platform.WATER_HEATER, + SERVICE_TURN_ON, + { + ATTR_ENTITY_ID: DHW_ENTITY_ID, + }, + blocking=True, + ) diff --git a/tests/components/feedreader/test_config_flow.py b/tests/components/feedreader/test_config_flow.py index 29e52c5b01e064..2a434306c0f625 100644 --- a/tests/components/feedreader/test_config_flow.py +++ b/tests/components/feedreader/test_config_flow.py @@ -164,7 +164,7 @@ async def test_reconfigure(hass: HomeAssistant, feedparser) -> None: # init user flow result = await entry.start_reconfigure_flow(hass) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure_confirm" + assert result["step_id"] == "reconfigure" # success with patch( @@ -196,7 +196,7 @@ async def test_reconfigure_errors( # init user flow result = await entry.start_reconfigure_flow(hass) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure_confirm" + assert result["step_id"] == "reconfigure" # raise URLError feedparser.side_effect = urllib.error.URLError("Test") @@ -208,7 +208,7 @@ async def test_reconfigure_errors( }, ) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure_confirm" + assert result["step_id"] == "reconfigure" assert result["errors"] == {"base": "url_error"} # success diff --git a/tests/components/fibaro/conftest.py b/tests/components/fibaro/conftest.py index df8b12e2167bdf..ac10d4fc79d2b5 100644 --- a/tests/components/fibaro/conftest.py +++ b/tests/components/fibaro/conftest.py @@ -76,6 +76,36 @@ def mock_power_sensor() -> Mock: return sensor +@pytest.fixture +def mock_cover() -> Mock: + """Fixture for a cover.""" + cover = Mock() + cover.fibaro_id = 3 + cover.parent_fibaro_id = 0 + cover.name = "Test cover" + cover.room_id = 1 + cover.dead = False + cover.visible = True + cover.enabled = True + cover.type = "com.fibaro.FGR" + cover.base_type = "com.fibaro.device" + cover.properties = {"manufacturer": ""} + cover.actions = {"open": 0, "close": 0} + cover.supported_features = {} + value_mock = Mock() + value_mock.has_value = True + value_mock.int_value.return_value = 20 + cover.value = value_mock + value2_mock = Mock() + value2_mock.has_value = False + cover.value_2 = value2_mock + state_mock = Mock() + state_mock.has_value = True + state_mock.str_value.return_value = "opening" + cover.state = state_mock + return cover + + @pytest.fixture def mock_config_entry(hass: HomeAssistant) -> MockConfigEntry: """Return the default mocked config entry.""" diff --git a/tests/components/fibaro/test_cover.py b/tests/components/fibaro/test_cover.py new file mode 100644 index 00000000000000..d5b08f7d1f8c62 --- /dev/null +++ b/tests/components/fibaro/test_cover.py @@ -0,0 +1,98 @@ +"""Test the Fibaro cover platform.""" + +from unittest.mock import Mock, patch + +from homeassistant.components.cover import CoverState +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from .conftest import init_integration + +from tests.common import MockConfigEntry + + +async def test_cover_setup( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + mock_fibaro_client: Mock, + mock_config_entry: MockConfigEntry, + mock_cover: Mock, + mock_room: Mock, +) -> None: + """Test that the cover creates an entity.""" + + # Arrange + mock_fibaro_client.read_rooms.return_value = [mock_room] + mock_fibaro_client.read_devices.return_value = [mock_cover] + + with patch("homeassistant.components.fibaro.PLATFORMS", [Platform.COVER]): + # Act + await init_integration(hass, mock_config_entry) + # Assert + entry = entity_registry.async_get("cover.room_1_test_cover_3") + assert entry + assert entry.unique_id == "hc2_111111.3" + assert entry.original_name == "Room 1 Test cover" + + +async def test_cover_opening( + hass: HomeAssistant, + mock_fibaro_client: Mock, + mock_config_entry: MockConfigEntry, + mock_cover: Mock, + mock_room: Mock, +) -> None: + """Test that the cover opening state is reported.""" + + # Arrange + mock_fibaro_client.read_rooms.return_value = [mock_room] + mock_fibaro_client.read_devices.return_value = [mock_cover] + + with patch("homeassistant.components.fibaro.PLATFORMS", [Platform.COVER]): + # Act + await init_integration(hass, mock_config_entry) + # Assert + assert hass.states.get("cover.room_1_test_cover_3").state == CoverState.OPENING + + +async def test_cover_opening_closing_none( + hass: HomeAssistant, + mock_fibaro_client: Mock, + mock_config_entry: MockConfigEntry, + mock_cover: Mock, + mock_room: Mock, +) -> None: + """Test that the cover opening closing states return None if not available.""" + + # Arrange + mock_fibaro_client.read_rooms.return_value = [mock_room] + mock_cover.state.has_value = False + mock_fibaro_client.read_devices.return_value = [mock_cover] + + with patch("homeassistant.components.fibaro.PLATFORMS", [Platform.COVER]): + # Act + await init_integration(hass, mock_config_entry) + # Assert + assert hass.states.get("cover.room_1_test_cover_3").state == CoverState.OPEN + + +async def test_cover_closing( + hass: HomeAssistant, + mock_fibaro_client: Mock, + mock_config_entry: MockConfigEntry, + mock_cover: Mock, + mock_room: Mock, +) -> None: + """Test that the cover closing state is reported.""" + + # Arrange + mock_fibaro_client.read_rooms.return_value = [mock_room] + mock_cover.state.str_value.return_value = "closing" + mock_fibaro_client.read_devices.return_value = [mock_cover] + + with patch("homeassistant.components.fibaro.PLATFORMS", [Platform.COVER]): + # Act + await init_integration(hass, mock_config_entry) + # Assert + assert hass.states.get("cover.room_1_test_cover_3").state == CoverState.CLOSING diff --git a/tests/components/file/test_notify.py b/tests/components/file/test_notify.py index 33e4739a488a72..e7cb85a9cfcf24 100644 --- a/tests/components/file/test_notify.py +++ b/tests/components/file/test_notify.py @@ -12,222 +12,46 @@ from homeassistant.components.notify import ATTR_TITLE_DEFAULT from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceValidationError -from homeassistant.helpers.typing import ConfigType -from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util -from tests.common import MockConfigEntry, assert_setup_component - - -async def test_bad_config(hass: HomeAssistant) -> None: - """Test set up the platform with bad/missing config.""" - config = {notify.DOMAIN: {"name": "test", "platform": "file"}} - with assert_setup_component(0, domain="notify") as handle_config: - assert await async_setup_component(hass, notify.DOMAIN, config) - await hass.async_block_till_done() - assert not handle_config[notify.DOMAIN] +from tests.common import MockConfigEntry @pytest.mark.parametrize( ("domain", "service", "params"), [ - (notify.DOMAIN, "test", {"message": "one, two, testing, testing"}), ( notify.DOMAIN, "send_message", {"entity_id": "notify.test", "message": "one, two, testing, testing"}, ), ], - ids=["legacy", "entity"], -) -@pytest.mark.parametrize( - ("timestamp", "config"), - [ - ( - False, - { - "notify": [ - { - "name": "test", - "platform": "file", - "filename": "mock_file", - "timestamp": False, - } - ] - }, - ), - ( - True, - { - "notify": [ - { - "name": "test", - "platform": "file", - "filename": "mock_file", - "timestamp": True, - } - ] - }, - ), - ], - ids=["no_timestamp", "timestamp"], ) +@pytest.mark.parametrize("timestamp", [False, True], ids=["no_timestamp", "timestamp"]) async def test_notify_file( hass: HomeAssistant, freezer: FrozenDateTimeFactory, - timestamp: bool, mock_is_allowed_path: MagicMock, - config: ConfigType, + timestamp: bool, domain: str, service: str, params: dict[str, str], ) -> None: """Test the notify file output.""" filename = "mock_file" - message = params["message"] - assert await async_setup_component(hass, notify.DOMAIN, config) - await hass.async_block_till_done() - assert await async_setup_component(hass, DOMAIN, config) - await hass.async_block_till_done(wait_background_tasks=True) - - freezer.move_to(dt_util.utcnow()) - - m_open = mock_open() - with ( - patch("homeassistant.components.file.notify.open", m_open, create=True), - patch("homeassistant.components.file.notify.os.stat") as mock_st, - ): - mock_st.return_value.st_size = 0 - title = ( - f"{ATTR_TITLE_DEFAULT} notifications " - f"(Log started: {dt_util.utcnow().isoformat()})\n{'-' * 80}\n" - ) - - await hass.services.async_call(domain, service, params, blocking=True) + full_filename = os.path.join(hass.config.path(), filename) - full_filename = os.path.join(hass.config.path(), filename) - assert m_open.call_count == 1 - assert m_open.call_args == call(full_filename, "a", encoding="utf8") - - assert m_open.return_value.write.call_count == 2 - if not timestamp: - assert m_open.return_value.write.call_args_list == [ - call(title), - call(f"{message}\n"), - ] - else: - assert m_open.return_value.write.call_args_list == [ - call(title), - call(f"{dt_util.utcnow().isoformat()} {message}\n"), - ] - - -@pytest.mark.parametrize( - ("domain", "service", "params"), - [(notify.DOMAIN, "test", {"message": "one, two, testing, testing"})], - ids=["legacy"], -) -@pytest.mark.parametrize( - ("is_allowed", "config"), - [ - ( - True, - { - "notify": [ - { - "name": "test", - "platform": "file", - "filename": "mock_file", - } - ] - }, - ), - ], - ids=["allowed_but_access_failed"], -) -async def test_legacy_notify_file_exception( - hass: HomeAssistant, - freezer: FrozenDateTimeFactory, - mock_is_allowed_path: MagicMock, - config: ConfigType, - domain: str, - service: str, - params: dict[str, str], -) -> None: - """Test legacy notify file output has exception.""" - assert await async_setup_component(hass, notify.DOMAIN, config) - await hass.async_block_till_done() - assert await async_setup_component(hass, DOMAIN, config) - await hass.async_block_till_done(wait_background_tasks=True) - - freezer.move_to(dt_util.utcnow()) - - m_open = mock_open() - with ( - patch("homeassistant.components.file.notify.open", m_open, create=True), - patch("homeassistant.components.file.notify.os.stat") as mock_st, - ): - mock_st.side_effect = OSError("Access Failed") - with pytest.raises(ServiceValidationError) as exc: - await hass.services.async_call(domain, service, params, blocking=True) - assert f"{exc.value!r}" == "ServiceValidationError('write_access_failed')" - - -@pytest.mark.parametrize( - ("timestamp", "data", "options"), - [ - ( - False, - { - "name": "test", - "platform": "notify", - "file_path": "mock_file", - }, - { - "timestamp": False, - }, - ), - ( - True, - { - "name": "test", - "platform": "notify", - "file_path": "mock_file", - }, - { - "timestamp": True, - }, - ), - ], - ids=["no_timestamp", "timestamp"], -) -async def test_legacy_notify_file_entry_only_setup( - hass: HomeAssistant, - freezer: FrozenDateTimeFactory, - timestamp: bool, - mock_is_allowed_path: MagicMock, - data: dict[str, Any], - options: dict[str, Any], -) -> None: - """Test the legacy notify file output in entry only setup.""" - filename = "mock_file" - - domain = notify.DOMAIN - service = "test" - params = {"message": "one, two, testing, testing"} message = params["message"] entry = MockConfigEntry( domain=DOMAIN, - data=data, + data={"name": "test", "platform": "notify", "file_path": full_filename}, + options={"timestamp": timestamp}, version=2, - options=options, - title=f"test [{data['file_path']}]", + title=f"test [{filename}]", ) entry.add_to_hass(hass) - await hass.config_entries.async_setup(entry.entry_id) - await hass.async_block_till_done() - await hass.async_block_till_done(wait_background_tasks=True) + assert await hass.config_entries.async_setup(entry.entry_id) freezer.move_to(dt_util.utcnow()) @@ -245,7 +69,7 @@ async def test_legacy_notify_file_entry_only_setup( await hass.services.async_call(domain, service, params, blocking=True) assert m_open.call_count == 1 - assert m_open.call_args == call(filename, "a", encoding="utf8") + assert m_open.call_args == call(full_filename, "a", encoding="utf8") assert m_open.return_value.write.call_count == 2 if not timestamp: @@ -277,14 +101,14 @@ async def test_legacy_notify_file_entry_only_setup( ], ids=["not_allowed"], ) -async def test_legacy_notify_file_not_allowed( +async def test_notify_file_not_allowed( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, mock_is_allowed_path: MagicMock, config: dict[str, Any], options: dict[str, Any], ) -> None: - """Test legacy notify file output not allowed.""" + """Test notify file output not allowed.""" entry = MockConfigEntry( domain=DOMAIN, data=config, @@ -301,11 +125,10 @@ async def test_legacy_notify_file_not_allowed( @pytest.mark.parametrize( ("service", "params"), [ - ("test", {"message": "one, two, testing, testing"}), ( "send_message", {"entity_id": "notify.test", "message": "one, two, testing, testing"}, - ), + ) ], ) @pytest.mark.parametrize( diff --git a/tests/components/file/test_sensor.py b/tests/components/file/test_sensor.py index 634ae9d626c9f9..9e6a16e3e27237 100644 --- a/tests/components/file/test_sensor.py +++ b/tests/components/file/test_sensor.py @@ -7,33 +7,10 @@ from homeassistant.components.file import DOMAIN from homeassistant.const import STATE_UNKNOWN from homeassistant.core import HomeAssistant -from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry, get_fixture_path -@patch("os.path.isfile", Mock(return_value=True)) -@patch("os.access", Mock(return_value=True)) -async def test_file_value_yaml_setup( - hass: HomeAssistant, mock_is_allowed_path: MagicMock -) -> None: - """Test the File sensor from YAML setup.""" - config = { - "sensor": { - "platform": "file", - "scan_interval": 30, - "name": "file1", - "file_path": get_fixture_path("file_value.txt", "file"), - } - } - - assert await async_setup_component(hass, "sensor", config) - await hass.async_block_till_done() - - state = hass.states.get("sensor.file1") - assert state.state == "21" - - @patch("os.path.isfile", Mock(return_value=True)) @patch("os.access", Mock(return_value=True)) async def test_file_value_entry_setup( diff --git a/tests/components/flume/test_config_flow.py b/tests/components/flume/test_config_flow.py index 87fe3a2bbf0659..c323defc791e37 100644 --- a/tests/components/flume/test_config_flow.py +++ b/tests/components/flume/test_config_flow.py @@ -61,6 +61,10 @@ async def test_form(hass: HomeAssistant) -> None: assert len(mock_setup_entry.mock_calls) == 1 +@pytest.mark.parametrize( # Remove when translations fixed + "ignore_translations", + ["component.flume.config.error.invalid_auth"], +) @pytest.mark.usefixtures("access_token") async def test_form_invalid_auth(hass: HomeAssistant, requests_mock: Mocker) -> None: """Test we handle invalid auth.""" @@ -89,6 +93,10 @@ async def test_form_invalid_auth(hass: HomeAssistant, requests_mock: Mocker) -> assert result2["errors"] == {"password": "invalid_auth"} +@pytest.mark.parametrize( # Remove when translations fixed + "ignore_translations", + ["component.flume.config.error.cannot_connect"], +) @pytest.mark.usefixtures("access_token", "device_list_timeout") async def test_form_cannot_connect(hass: HomeAssistant) -> None: """Test we handle cannot connect error.""" @@ -110,6 +118,16 @@ async def test_form_cannot_connect(hass: HomeAssistant) -> None: assert result2["errors"] == {"base": "cannot_connect"} +@pytest.mark.parametrize( # Remove when translations fixed + "ignore_translations", + [ + [ + "component.flume.config.abort.reauth_successful", + "component.flume.config.error.cannot_connect", + "component.flume.config.error.invalid_auth", + ] + ], +) @pytest.mark.usefixtures("access_token") async def test_reauth(hass: HomeAssistant, requests_mock: Mocker) -> None: """Test we can reauth.""" @@ -190,6 +208,10 @@ async def test_reauth(hass: HomeAssistant, requests_mock: Mocker) -> None: assert result4["reason"] == "reauth_successful" +@pytest.mark.parametrize( # Remove when translations fixed + "ignore_translations", + ["component.flume.config.error.cannot_connect"], +) @pytest.mark.usefixtures("access_token") async def test_form_no_devices(hass: HomeAssistant, requests_mock: Mocker) -> None: """Test a device list response that contains no values will raise an error.""" diff --git a/tests/components/folder_watcher/test_config_flow.py b/tests/components/folder_watcher/test_config_flow.py index 745059717fbcb0..3b41b5724fcc86 100644 --- a/tests/components/folder_watcher/test_config_flow.py +++ b/tests/components/folder_watcher/test_config_flow.py @@ -148,39 +148,3 @@ async def test_form_already_configured(hass: HomeAssistant, tmp_path: Path) -> N assert result["type"] == FlowResultType.ABORT assert result["reason"] == "already_configured" - - -async def test_import(hass: HomeAssistant, tmp_path: Path) -> None: - """Test import flow.""" - path = tmp_path.as_posix() - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_IMPORT}, - data={CONF_FOLDER: path, CONF_PATTERNS: ["*"]}, - ) - await hass.async_block_till_done() - - assert result["type"] == FlowResultType.CREATE_ENTRY - assert result["title"] == f"Folder Watcher {path}" - assert result["options"] == {CONF_FOLDER: path, CONF_PATTERNS: ["*"]} - - -async def test_import_already_configured(hass: HomeAssistant, tmp_path: Path) -> None: - """Test we abort import when entry is already configured.""" - path = tmp_path.as_posix() - - entry = MockConfigEntry( - domain=DOMAIN, - title=f"Folder Watcher {path}", - data={CONF_FOLDER: path}, - ) - entry.add_to_hass(hass) - - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_IMPORT}, - data={CONF_FOLDER: path}, - ) - - assert result["type"] == FlowResultType.ABORT - assert result["reason"] == "already_configured" diff --git a/tests/components/folder_watcher/test_init.py b/tests/components/folder_watcher/test_init.py index 965ae33c4f8b4e..f4a3b7e3630347 100644 --- a/tests/components/folder_watcher/test_init.py +++ b/tests/components/folder_watcher/test_init.py @@ -1,33 +1,68 @@ """The tests for the folder_watcher component.""" -import os +from pathlib import Path from types import SimpleNamespace from unittest.mock import Mock, patch +from freezegun.api import FrozenDateTimeFactory + from homeassistant.components import folder_watcher +from homeassistant.components.folder_watcher.const import DOMAIN +from homeassistant.config_entries import SOURCE_USER, ConfigEntryState from homeassistant.core import HomeAssistant -from homeassistant.setup import async_setup_component +from homeassistant.helpers import issue_registry as ir + +from tests.common import MockConfigEntry -async def test_invalid_path_setup(hass: HomeAssistant) -> None: +async def test_invalid_path_setup( + hass: HomeAssistant, + tmp_path: Path, + freezer: FrozenDateTimeFactory, + issue_registry: ir.IssueRegistry, +) -> None: """Test that an invalid path is not set up.""" - assert not await async_setup_component( - hass, - folder_watcher.DOMAIN, - {folder_watcher.DOMAIN: {folder_watcher.CONF_FOLDER: "invalid_path"}}, + freezer.move_to("2022-04-19 10:31:02+00:00") + path = tmp_path.as_posix() + config_entry = MockConfigEntry( + domain=DOMAIN, + source=SOURCE_USER, + title=f"Folder Watcher {path!s}", + data={}, + options={"folder": str(path), "patterns": ["*"]}, + entry_id="1", ) + config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.SETUP_ERROR + assert len(issue_registry.issues) == 1 -async def test_valid_path_setup(hass: HomeAssistant) -> None: + +async def test_valid_path_setup( + hass: HomeAssistant, tmp_path: Path, freezer: FrozenDateTimeFactory +) -> None: """Test that a valid path is setup.""" - cwd = os.path.join(os.path.dirname(__file__)) - hass.config.allowlist_external_dirs = {cwd} - with patch.object(folder_watcher, "Watcher"): - assert await async_setup_component( - hass, - folder_watcher.DOMAIN, - {folder_watcher.DOMAIN: {folder_watcher.CONF_FOLDER: cwd}}, - ) + freezer.move_to("2022-04-19 10:31:02+00:00") + path = tmp_path.as_posix() + hass.config.allowlist_external_dirs = {path} + config_entry = MockConfigEntry( + domain=DOMAIN, + source=SOURCE_USER, + title=f"Folder Watcher {path!s}", + data={}, + options={"folder": str(path), "patterns": ["*"]}, + entry_id="1", + ) + + config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED def test_event() -> None: diff --git a/tests/components/freebox/test_alarm_control_panel.py b/tests/components/freebox/test_alarm_control_panel.py index e4ee8f63b2c3e9..b02e4c974ffb27 100644 --- a/tests/components/freebox/test_alarm_control_panel.py +++ b/tests/components/freebox/test_alarm_control_panel.py @@ -8,6 +8,7 @@ from homeassistant.components.alarm_control_panel import ( DOMAIN as ALARM_CONTROL_PANEL_DOMAIN, AlarmControlPanelEntityFeature, + AlarmControlPanelState, ) from homeassistant.components.freebox import SCAN_INTERVAL from homeassistant.const import ( @@ -16,11 +17,6 @@ SERVICE_ALARM_ARM_HOME, SERVICE_ALARM_DISARM, SERVICE_ALARM_TRIGGER, - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMING, - STATE_ALARM_DISARMED, - STATE_ALARM_TRIGGERED, STATE_UNKNOWN, ) from homeassistant.core import HomeAssistant @@ -59,7 +55,7 @@ async def test_alarm_changed_from_external( # Initial state assert ( hass.states.get("alarm_control_panel.systeme_d_alarme").state - == STATE_ALARM_ARMING + == AlarmControlPanelState.ARMING ) # Now simulate a changed status @@ -73,7 +69,7 @@ async def test_alarm_changed_from_external( assert ( hass.states.get("alarm_control_panel.systeme_d_alarme").state - == STATE_ALARM_ARMED_AWAY + == AlarmControlPanelState.ARMED_AWAY ) @@ -98,7 +94,7 @@ async def test_alarm_changed_from_hass(hass: HomeAssistant, router: Mock) -> Non # Initial state: arm_away assert ( hass.states.get("alarm_control_panel.systeme_d_alarme").state - == STATE_ALARM_ARMED_AWAY + == AlarmControlPanelState.ARMED_AWAY ) # Now call for a change -> disarmed @@ -113,7 +109,7 @@ async def test_alarm_changed_from_hass(hass: HomeAssistant, router: Mock) -> Non assert ( hass.states.get("alarm_control_panel.systeme_d_alarme").state - == STATE_ALARM_DISARMED + == AlarmControlPanelState.DISARMED ) # Now call for a change -> arm_away @@ -128,7 +124,7 @@ async def test_alarm_changed_from_hass(hass: HomeAssistant, router: Mock) -> Non assert ( hass.states.get("alarm_control_panel.systeme_d_alarme").state - == STATE_ALARM_ARMING + == AlarmControlPanelState.ARMING ) # Now call for a change -> arm_home @@ -144,7 +140,7 @@ async def test_alarm_changed_from_hass(hass: HomeAssistant, router: Mock) -> Non assert ( hass.states.get("alarm_control_panel.systeme_d_alarme").state - == STATE_ALARM_ARMED_HOME + == AlarmControlPanelState.ARMED_HOME ) # Now call for a change -> trigger @@ -159,7 +155,7 @@ async def test_alarm_changed_from_hass(hass: HomeAssistant, router: Mock) -> Non assert ( hass.states.get("alarm_control_panel.systeme_d_alarme").state - == STATE_ALARM_TRIGGERED + == AlarmControlPanelState.TRIGGERED ) diff --git a/tests/components/freedompro/test_cover.py b/tests/components/freedompro/test_cover.py index ba48da1d1d45f9..bcba1e0b9171f4 100644 --- a/tests/components/freedompro/test_cover.py +++ b/tests/components/freedompro/test_cover.py @@ -5,14 +5,16 @@ import pytest -from homeassistant.components.cover import ATTR_POSITION, DOMAIN as COVER_DOMAIN +from homeassistant.components.cover import ( + ATTR_POSITION, + DOMAIN as COVER_DOMAIN, + CoverState, +) from homeassistant.const import ( ATTR_ENTITY_ID, SERVICE_CLOSE_COVER, SERVICE_OPEN_COVER, SERVICE_SET_COVER_POSITION, - STATE_CLOSED, - STATE_OPEN, ) from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er @@ -56,7 +58,7 @@ async def test_cover_get_state( state = hass.states.get(entity_id) assert state - assert state.state == STATE_CLOSED + assert state.state == CoverState.CLOSED assert state.attributes.get("friendly_name") == name entry = entity_registry.async_get(entity_id) @@ -80,7 +82,7 @@ async def test_cover_get_state( assert entry assert entry.unique_id == uid - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN @pytest.mark.parametrize( @@ -107,7 +109,7 @@ async def test_cover_set_position( state = hass.states.get(entity_id) assert state - assert state.state == STATE_CLOSED + assert state.state == CoverState.CLOSED assert state.attributes.get("friendly_name") == name entry = entity_registry.async_get(entity_id) @@ -133,7 +135,7 @@ async def test_cover_set_position( await hass.async_block_till_done() state = hass.states.get(entity_id) - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN assert state.attributes["current_position"] == 33 @@ -171,7 +173,7 @@ async def test_cover_close( state = hass.states.get(entity_id) assert state - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN assert state.attributes.get("friendly_name") == name entry = entity_registry.async_get(entity_id) @@ -196,7 +198,7 @@ async def test_cover_close( await hass.async_block_till_done() state = hass.states.get(entity_id) - assert state.state == STATE_CLOSED + assert state.state == CoverState.CLOSED @pytest.mark.parametrize( @@ -223,7 +225,7 @@ async def test_cover_open( state = hass.states.get(entity_id) assert state - assert state.state == STATE_CLOSED + assert state.state == CoverState.CLOSED assert state.attributes.get("friendly_name") == name entry = entity_registry.async_get(entity_id) @@ -249,4 +251,4 @@ async def test_cover_open( await hass.async_block_till_done() state = hass.states.get(entity_id) - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN diff --git a/tests/components/fritz/const.py b/tests/components/fritz/const.py index 0817cc5d804f87..acd96879b1e3b3 100644 --- a/tests/components/fritz/const.py +++ b/tests/components/fritz/const.py @@ -655,7 +655,23 @@ "cur_data_rate_tx": 0, "cur_availability_rx": 99, "cur_availability_tx": 99, - } + }, + { + "uid": "nl-79", + "type": "LAN", + "state": "DISCONNECTED", + "last_connected": 1642872667, + "node_1_uid": "n-167", + "node_2_uid": "n-76", + "node_interface_1_uid": "ni-140", + "node_interface_2_uid": "ni-77", + "max_data_rate_rx": 1000000, + "max_data_rate_tx": 1000000, + "cur_data_rate_rx": 0, + "cur_data_rate_tx": 0, + "cur_availability_rx": 99, + "cur_availability_tx": 99, + }, ], } ], diff --git a/tests/components/fritz/snapshots/test_image.ambr b/tests/components/fritz/snapshots/test_image.ambr index a51ab015a898e9..6ef7413998bb0d 100644 --- a/tests/components/fritz/snapshots/test_image.ambr +++ b/tests/components/fritz/snapshots/test_image.ambr @@ -1,10 +1,10 @@ # serializer version: 1 # name: test_image_entity[fc_data0] - b'\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x00\x94\x00\x00\x00\x94\x01\x00\x00\x00\x00]G=y\x00\x00\x00\xf5IDATx\xda\xedVQ\x0eC!\x0c"\xbb@\xef\x7fKn\xe0\x00\xfd\xdb\xcf6\xf9|\xc6\xc4\xc6\x0f\xd2\x02\xadb},\xe2\xb9\xfb\xe5\x0e\xc0(\x18\xf2\x84/|\xaeo\xef\x847\xda\x14\x1af\x1c\xde\xe3\x19(X\tKxN\xb2\x87\x17j9\x1d\xd7\xb7o\x8c44\x1a3\xbe\x16x\x03\xc1`\xe5k\x87Oh'\xf1\x07\xde\xd1\xcd\xa1\xc2\x877\x13]U\xfey\xe2Y\x95\xfe\xd2\x1a\xe0\xd0\x9bD\x91\x7f\xfcO\xfa\xca\xedg\xbc\xb1\xb4\xfb\x8a\x87\x16\xa2\x88\x1f\xf0\x11a\xc1_6/\xd1#\xc2\xb0\xf0/\xac}\xba\xfe\xd9\xe4\xaf\xd8n\xf1B\xbf\xcb_)<\xf3\xcfn\xf2\xc7\xba\x9f\xfam\xf4{\x1eQ\x82\xb3\xd1O;=\xae\x80\xc9\xaa\x7f2>\xf2\xd04\xf5k\xf0\xc4\xfe\xcc\x80f\xfeD\xfc}\x01\xe8\xfc\xdf\xc1u{*\xfd\xd3\xbe7@\xa7\xd4/5\x94\x06\xae\xfa\xff\xa6\xe7\xe6_\xe2\x97\xba\x99\x80\xe5\xfcO\xeby\x03l\xff?\xb8\xf8l\xe7\xaf\xa1j\xf4{\x03\x17\xfa\xb4\x19\xc7\xc5\xe1\xd3\x00\x00\x00\x00IEND\xaeB`\x82" # --- diff --git a/tests/components/fritz/snapshots/test_update.ambr b/tests/components/fritz/snapshots/test_update.ambr index 5544c97249988d..3c7880d01e768d 100644 --- a/tests/components/fritz/snapshots/test_update.ambr +++ b/tests/components/fritz/snapshots/test_update.ambr @@ -36,6 +36,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'auto_update': False, + 'display_precision': 0, 'entity_picture': 'https://brands.home-assistant.io/_/fritz/icon.png', 'friendly_name': 'Mock Title FRITZ!OS', 'in_progress': False, @@ -46,6 +47,7 @@ 'skipped_version': None, 'supported_features': , 'title': 'FRITZ!OS', + 'update_percentage': None, }), 'context': , 'entity_id': 'update.mock_title_fritz_os', @@ -92,6 +94,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'auto_update': False, + 'display_precision': 0, 'entity_picture': 'https://brands.home-assistant.io/_/fritz/icon.png', 'friendly_name': 'Mock Title FRITZ!OS', 'in_progress': False, @@ -102,6 +105,7 @@ 'skipped_version': None, 'supported_features': , 'title': 'FRITZ!OS', + 'update_percentage': None, }), 'context': , 'entity_id': 'update.mock_title_fritz_os', @@ -148,6 +152,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'auto_update': False, + 'display_precision': 0, 'entity_picture': 'https://brands.home-assistant.io/_/fritz/icon.png', 'friendly_name': 'Mock Title FRITZ!OS', 'in_progress': False, @@ -158,6 +163,7 @@ 'skipped_version': None, 'supported_features': , 'title': 'FRITZ!OS', + 'update_percentage': None, }), 'context': , 'entity_id': 'update.mock_title_fritz_os', diff --git a/tests/components/fritz/test_config_flow.py b/tests/components/fritz/test_config_flow.py index 96ceffa31844f8..e3fae8c083e91d 100644 --- a/tests/components/fritz/test_config_flow.py +++ b/tests/components/fritz/test_config_flow.py @@ -458,7 +458,7 @@ async def test_reconfigure_successful( ) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure_confirm" + assert result["step_id"] == "reconfigure" result = await hass.config_entries.flow.async_configure( result["flow_id"], @@ -512,7 +512,7 @@ async def test_reconfigure_not_successful( result = await mock_config.start_reconfigure_flow(hass) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure_confirm" + assert result["step_id"] == "reconfigure" result = await hass.config_entries.flow.async_configure( result["flow_id"], @@ -523,7 +523,7 @@ async def test_reconfigure_not_successful( ) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure_confirm" + assert result["step_id"] == "reconfigure" assert result["errors"]["base"] == ERROR_CANNOT_CONNECT result = await hass.config_entries.flow.async_configure( diff --git a/tests/components/fritz/test_image.py b/tests/components/fritz/test_image.py index 9097aab1762abf..d8652bd6508e22 100644 --- a/tests/components/fritz/test_image.py +++ b/tests/components/fritz/test_image.py @@ -24,6 +24,7 @@ GUEST_WIFI_ENABLED: dict[str, dict] = { "WLANConfiguration0": {}, "WLANConfiguration1": { + "GetBeaconAdvertisement": {"NewBeaconAdvertisementEnabled": 1}, "GetInfo": { "NewEnable": True, "NewStatus": "Up", @@ -43,6 +44,7 @@ GUEST_WIFI_CHANGED: dict[str, dict] = { "WLANConfiguration0": {}, "WLANConfiguration1": { + "GetBeaconAdvertisement": {"NewBeaconAdvertisementEnabled": 1}, "GetInfo": { "NewEnable": True, "NewStatus": "Up", @@ -62,6 +64,7 @@ GUEST_WIFI_DISABLED: dict[str, dict] = { "WLANConfiguration0": {}, "WLANConfiguration1": { + "GetBeaconAdvertisement": {"NewBeaconAdvertisementEnabled": 1}, "GetInfo": { "NewEnable": False, "NewStatus": "Up", diff --git a/tests/components/fritzbox/test_config_flow.py b/tests/components/fritzbox/test_config_flow.py index fc63684e5d1241..0df6d0b2ea9406 100644 --- a/tests/components/fritzbox/test_config_flow.py +++ b/tests/components/fritzbox/test_config_flow.py @@ -198,7 +198,7 @@ async def test_reconfigure_success(hass: HomeAssistant, fritz: Mock) -> None: result = await mock_config.start_reconfigure_flow(hass) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure_confirm" + assert result["step_id"] == "reconfigure" result = await hass.config_entries.flow.async_configure( result["flow_id"], @@ -227,7 +227,7 @@ async def test_reconfigure_failed(hass: HomeAssistant, fritz: Mock) -> None: result = await mock_config.start_reconfigure_flow(hass) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure_confirm" + assert result["step_id"] == "reconfigure" result = await hass.config_entries.flow.async_configure( result["flow_id"], @@ -236,7 +236,7 @@ async def test_reconfigure_failed(hass: HomeAssistant, fritz: Mock) -> None: }, ) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure_confirm" + assert result["step_id"] == "reconfigure" assert result["errors"]["base"] == "no_devices_found" result = await hass.config_entries.flow.async_configure( diff --git a/tests/components/fritzbox/test_cover.py b/tests/components/fritzbox/test_cover.py index 383a051256575a..f26e65fc28a33d 100644 --- a/tests/components/fritzbox/test_cover.py +++ b/tests/components/fritzbox/test_cover.py @@ -7,7 +7,7 @@ ATTR_CURRENT_POSITION, ATTR_POSITION, DOMAIN as COVER_DOMAIN, - STATE_OPEN, + CoverState, ) from homeassistant.components.fritzbox.const import DOMAIN as FB_DOMAIN from homeassistant.const import ( @@ -44,7 +44,7 @@ async def test_setup(hass: HomeAssistant, fritz: Mock) -> None: state = hass.states.get(ENTITY_ID) assert state - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN assert state.attributes[ATTR_CURRENT_POSITION] == 100 diff --git a/tests/components/fronius/test_config_flow.py b/tests/components/fronius/test_config_flow.py index b85aafb2e1e206..1b9c41d5aa6a25 100644 --- a/tests/components/fronius/test_config_flow.py +++ b/tests/components/fronius/test_config_flow.py @@ -344,7 +344,7 @@ async def test_reconfigure(hass: HomeAssistant) -> None: """Test reconfiguring an entry.""" entry = MockConfigEntry( domain=DOMAIN, - unique_id="123.4567890", + unique_id="1234567", data={ CONF_HOST: "10.1.2.3", "is_logger": True, @@ -490,7 +490,7 @@ async def test_reconfigure_already_configured(hass: HomeAssistant) -> None: await hass.async_block_till_done() assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "already_configured" + assert result["reason"] == "unique_id_mismatch" assert len(mock_setup_entry.mock_calls) == 0 @@ -531,4 +531,4 @@ async def test_reconfigure_already_existing(hass: HomeAssistant) -> None: await hass.async_block_till_done() assert result2["type"] is FlowResultType.ABORT - assert result2["reason"] == "already_configured" + assert result2["reason"] == "unique_id_mismatch" diff --git a/tests/components/fyta/conftest.py b/tests/components/fyta/conftest.py index 2bcad9b3c80666..299b96be9591d5 100644 --- a/tests/components/fyta/conftest.py +++ b/tests/components/fyta/conftest.py @@ -2,7 +2,7 @@ from collections.abc import Generator from datetime import UTC, datetime -from unittest.mock import AsyncMock, patch +from unittest.mock import AsyncMock, MagicMock, patch from fyta_cli.fyta_models import Credentials, Plant import pytest @@ -46,6 +46,7 @@ def mock_fyta_connector(): tzinfo=UTC ) mock_fyta_connector.client = AsyncMock(autospec=True) + mock_fyta_connector.data = MagicMock() mock_fyta_connector.update_all_plants.return_value = plants mock_fyta_connector.plant_list = { 0: "Gummibaum", diff --git a/tests/components/fyta/fixtures/plant_status1.json b/tests/components/fyta/fixtures/plant_status1.json index f2e8dc9c97033e..72d129492bba49 100644 --- a/tests/components/fyta/fixtures/plant_status1.json +++ b/tests/components/fyta/fixtures/plant_status1.json @@ -9,7 +9,7 @@ "moisture_status": 3, "sensor_available": true, "sw_version": "1.0", - "status": 3, + "status": 1, "online": true, "ph": null, "plant_id": 0, diff --git a/tests/components/fyta/fixtures/plant_status2.json b/tests/components/fyta/fixtures/plant_status2.json index a5c2735ca7c300..8ed095325671e1 100644 --- a/tests/components/fyta/fixtures/plant_status2.json +++ b/tests/components/fyta/fixtures/plant_status2.json @@ -9,7 +9,7 @@ "moisture_status": 3, "sensor_available": true, "sw_version": "1.0", - "status": 3, + "status": 1, "online": true, "ph": 7, "plant_id": 0, diff --git a/tests/components/fyta/fixtures/plant_status3.json b/tests/components/fyta/fixtures/plant_status3.json new file mode 100644 index 00000000000000..6e32ba601ed089 --- /dev/null +++ b/tests/components/fyta/fixtures/plant_status3.json @@ -0,0 +1,23 @@ +{ + "battery_level": 80, + "battery_status": true, + "last_updated": "2023-01-02 10:10:00", + "light": 2, + "light_status": 3, + "nickname": "Tomatenpflanze", + "moisture": 61, + "moisture_status": 3, + "sensor_available": true, + "sw_version": "1.0", + "status": 1, + "online": true, + "ph": 7, + "plant_id": 0, + "plant_origin_path": "", + "plant_thumb_path": "", + "salinity": 1, + "salinity_status": 4, + "scientific_name": "Solanum lycopersicum", + "temperature": 25.2, + "temperature_status": 3 +} diff --git a/tests/components/fyta/snapshots/test_diagnostics.ambr b/tests/components/fyta/snapshots/test_diagnostics.ambr index 5c68040f5411bf..2af616c64123f9 100644 --- a/tests/components/fyta/snapshots/test_diagnostics.ambr +++ b/tests/components/fyta/snapshots/test_diagnostics.ambr @@ -42,7 +42,7 @@ 'salinity_status': 4, 'scientific_name': 'Ficus elastica', 'sensor_available': True, - 'status': 3, + 'status': 1, 'sw_version': '1.0', 'temperature': 25.2, 'temperature_status': 3, @@ -65,7 +65,7 @@ 'salinity_status': 4, 'scientific_name': 'Theobroma cacao', 'sensor_available': True, - 'status': 3, + 'status': 1, 'sw_version': '1.0', 'temperature': 25.2, 'temperature_status': 3, diff --git a/tests/components/fyta/snapshots/test_sensor.ambr b/tests/components/fyta/snapshots/test_sensor.ambr index 2e96de0a283f27..ef583dd28a6e7f 100644 --- a/tests/components/fyta/snapshots/test_sensor.ambr +++ b/tests/components/fyta/snapshots/test_sensor.ambr @@ -386,7 +386,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'no_sensor', + 'state': 'doing_great', }) # --- # name: test_all_entities[sensor.gummibaum_salinity-entry] @@ -421,7 +421,7 @@ 'supported_features': 0, 'translation_key': 'salinity', 'unique_id': 'ce5f5431554d101905d31797e1232da8-0-salinity', - 'unit_of_measurement': , + 'unit_of_measurement': , }) # --- # name: test_all_entities[sensor.gummibaum_salinity-state] @@ -430,7 +430,7 @@ 'device_class': 'conductivity', 'friendly_name': 'Gummibaum Salinity', 'state_class': , - 'unit_of_measurement': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.gummibaum_salinity', @@ -1052,7 +1052,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'no_sensor', + 'state': 'doing_great', }) # --- # name: test_all_entities[sensor.kakaobaum_salinity-entry] @@ -1087,7 +1087,7 @@ 'supported_features': 0, 'translation_key': 'salinity', 'unique_id': 'ce5f5431554d101905d31797e1232da8-1-salinity', - 'unit_of_measurement': , + 'unit_of_measurement': , }) # --- # name: test_all_entities[sensor.kakaobaum_salinity-state] @@ -1096,7 +1096,7 @@ 'device_class': 'conductivity', 'friendly_name': 'Kakaobaum Salinity', 'state_class': , - 'unit_of_measurement': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.kakaobaum_salinity', diff --git a/tests/components/fyta/test_sensor.py b/tests/components/fyta/test_sensor.py index e33c54695e5d42..07e3965e66f21b 100644 --- a/tests/components/fyta/test_sensor.py +++ b/tests/components/fyta/test_sensor.py @@ -5,16 +5,23 @@ from freezegun.api import FrozenDateTimeFactory from fyta_cli.fyta_exceptions import FytaConnectionError, FytaPlantError +from fyta_cli.fyta_models import Plant import pytest from syrupy import SnapshotAssertion +from homeassistant.components.fyta.const import DOMAIN as FYTA_DOMAIN from homeassistant.const import STATE_UNAVAILABLE, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er from . import setup_platform -from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform +from tests.common import ( + MockConfigEntry, + async_fire_time_changed, + load_json_object_fixture, + snapshot_platform, +) async def test_all_entities( @@ -54,3 +61,32 @@ async def test_connection_error( await hass.async_block_till_done() assert hass.states.get("sensor.gummibaum_plant_state").state == STATE_UNAVAILABLE + + +async def test_add_remove_entities( + hass: HomeAssistant, + mock_fyta_connector: AsyncMock, + mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, +) -> None: + """Test if entities are added and old are removed.""" + await setup_platform(hass, mock_config_entry, [Platform.SENSOR]) + + assert hass.states.get("sensor.gummibaum_plant_state").state == "doing_great" + + plants: dict[int, Plant] = { + 0: Plant.from_dict(load_json_object_fixture("plant_status1.json", FYTA_DOMAIN)), + 2: Plant.from_dict(load_json_object_fixture("plant_status3.json", FYTA_DOMAIN)), + } + mock_fyta_connector.update_all_plants.return_value = plants + mock_fyta_connector.plant_list = { + 0: "Kautschukbaum", + 2: "Tomatenpflanze", + } + + freezer.tick(delta=timedelta(minutes=10)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert hass.states.get("sensor.kakaobaum_plant_state") is None + assert hass.states.get("sensor.tomatenpflanze_plant_state").state == "doing_great" diff --git a/tests/components/gardena_bluetooth/snapshots/test_config_flow.ambr b/tests/components/gardena_bluetooth/snapshots/test_config_flow.ambr index 42ae66addf0c7f..6d521b1f2c8d62 100644 --- a/tests/components/gardena_bluetooth/snapshots/test_config_flow.ambr +++ b/tests/components/gardena_bluetooth/snapshots/test_config_flow.ambr @@ -84,60 +84,6 @@ 'type': , }) # --- -# name: test_bluetooth_lost - FlowResultSnapshot({ - 'data_schema': None, - 'description_placeholders': dict({ - 'name': 'Timer', - }), - 'errors': None, - 'flow_id': , - 'handler': 'gardena_bluetooth', - 'last_step': None, - 'step_id': 'confirm', - 'type': , - }) -# --- -# name: test_bluetooth_lost.1 - FlowResultSnapshot({ - 'context': dict({ - 'confirm_only': True, - 'source': 'bluetooth', - 'title_placeholders': dict({ - 'name': 'Timer', - }), - 'unique_id': '00000000-0000-0000-0000-000000000001', - }), - 'data': dict({ - 'address': '00000000-0000-0000-0000-000000000001', - }), - 'description': None, - 'description_placeholders': None, - 'flow_id': , - 'handler': 'gardena_bluetooth', - 'options': dict({ - }), - 'result': ConfigEntrySnapshot({ - 'data': dict({ - 'address': '00000000-0000-0000-0000-000000000001', - }), - 'disabled_by': None, - 'domain': 'gardena_bluetooth', - 'entry_id': , - 'options': dict({ - }), - 'pref_disable_new_entities': False, - 'pref_disable_polling': False, - 'source': 'bluetooth', - 'title': 'Timer', - 'unique_id': '00000000-0000-0000-0000-000000000001', - 'version': 1, - }), - 'title': 'Timer', - 'type': , - 'version': 1, - }) -# --- # name: test_failed_connect FlowResultSnapshot({ 'data_schema': list([ diff --git a/tests/components/gardena_bluetooth/test_config_flow.py b/tests/components/gardena_bluetooth/test_config_flow.py index 3b4e9c242b3850..b20395ec40f557 100644 --- a/tests/components/gardena_bluetooth/test_config_flow.py +++ b/tests/components/gardena_bluetooth/test_config_flow.py @@ -31,6 +31,7 @@ async def test_user_selection( inject_bluetooth_service_info(hass, WATER_TIMER_SERVICE_INFO) inject_bluetooth_service_info(hass, WATER_TIMER_UNNAMED_SERVICE_INFO) + await hass.async_block_till_done(wait_background_tasks=True) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} diff --git a/tests/components/generic/test_camera.py b/tests/components/generic/test_camera.py index 59ff513ccc9f65..d3ef0a39241cb1 100644 --- a/tests/components/generic/test_camera.py +++ b/tests/components/generic/test_camera.py @@ -275,7 +275,9 @@ async def test_limit_refetch( with ( pytest.raises(aiohttp.ServerTimeoutError), - patch("asyncio.timeout", side_effect=TimeoutError()), + patch.object( + client.session._connector, "connect", side_effect=asyncio.TimeoutError + ), ): resp = await client.get("/api/camera_proxy/camera.config_test") diff --git a/tests/components/generic/test_config_flow.py b/tests/components/generic/test_config_flow.py index cf4ab0bde57bd1..7575a078675874 100644 --- a/tests/components/generic/test_config_flow.py +++ b/tests/components/generic/test_config_flow.py @@ -637,6 +637,10 @@ async def test_form_stream_other_error(hass: HomeAssistant, user_flow) -> None: await hass.async_block_till_done() +@pytest.mark.parametrize( # Remove when translations fixed + "ignore_translations", + ["component.generic.config.error.Some message"], +) @respx.mock @pytest.mark.usefixtures("fakeimg_png") async def test_form_stream_worker_error( diff --git a/tests/components/generic_thermostat/snapshots/test_config_flow.ambr b/tests/components/generic_thermostat/snapshots/test_config_flow.ambr index d515d52a81b829..ed757d1c2aec2b 100644 --- a/tests/components/generic_thermostat/snapshots/test_config_flow.ambr +++ b/tests/components/generic_thermostat/snapshots/test_config_flow.ambr @@ -18,6 +18,25 @@ 'type': , }) # --- +# name: test_config_flow_preset_accepts_float[create_entry] + FlowResultSnapshot({ + 'result': ConfigEntrySnapshot({ + 'title': 'My thermostat', + }), + 'title': 'My thermostat', + 'type': , + }) +# --- +# name: test_config_flow_preset_accepts_float[init] + FlowResultSnapshot({ + 'type': , + }) +# --- +# name: test_config_flow_preset_accepts_float[presets] + FlowResultSnapshot({ + 'type': , + }) +# --- # name: test_options[create_entry] FlowResultSnapshot({ 'result': True, diff --git a/tests/components/generic_thermostat/test_config_flow.py b/tests/components/generic_thermostat/test_config_flow.py index 7a7fdabc6e680c..561870ad3d49c9 100644 --- a/tests/components/generic_thermostat/test_config_flow.py +++ b/tests/components/generic_thermostat/test_config_flow.py @@ -132,3 +132,51 @@ async def test_options(hass: HomeAssistant, snapshot: SnapshotAssertion) -> None # Check config entry is reloaded with new options await hass.async_block_till_done() assert hass.states.get("climate.my_thermostat") == snapshot(name="without_away") + + +async def test_config_flow_preset_accepts_float( + hass: HomeAssistant, snapshot: SnapshotAssertion +) -> None: + """Test the config flow with preset is a float.""" + with patch( + "homeassistant.components.generic_thermostat.async_setup_entry", + return_value=True, + ) as mock_setup_entry: + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result == snapshot(name="init", include=SNAPSHOT_FLOW_PROPS) + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_NAME: "My thermostat", + CONF_HEATER: "switch.run", + CONF_SENSOR: "sensor.temperature", + CONF_AC_MODE: False, + CONF_COLD_TOLERANCE: 0.3, + CONF_HOT_TOLERANCE: 0.3, + }, + ) + assert result == snapshot(name="presets", include=SNAPSHOT_FLOW_PROPS) + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_PRESETS[PRESET_AWAY]: 10.4, + }, + ) + assert result == snapshot(name="create_entry", include=SNAPSHOT_FLOW_PROPS) + + await hass.async_block_till_done() + + assert len(mock_setup_entry.mock_calls) == 1 + assert result["options"] == { + "ac_mode": False, + "away_temp": 10.4, + "cold_tolerance": 0.3, + "heater": "switch.run", + "hot_tolerance": 0.3, + "name": "My thermostat", + "target_sensor": "sensor.temperature", + } diff --git a/tests/components/geniushub/conftest.py b/tests/components/geniushub/conftest.py index 1d2e706a6a6615..304d7555a8caa5 100644 --- a/tests/components/geniushub/conftest.py +++ b/tests/components/geniushub/conftest.py @@ -2,7 +2,7 @@ from collections.abc import Generator from typing import Any -from unittest.mock import MagicMock, patch +from unittest.mock import AsyncMock, MagicMock, patch from geniushubclient import GeniusDevice, GeniusZone import pytest @@ -11,7 +11,6 @@ from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_TOKEN, CONF_USERNAME from tests.common import MockConfigEntry, load_json_array_fixture -from tests.components.smhi.common import AsyncMock @pytest.fixture diff --git a/tests/components/geniushub/test_config_flow.py b/tests/components/geniushub/test_config_flow.py index 9234e03e35a258..7d1d33a22451a4 100644 --- a/tests/components/geniushub/test_config_flow.py +++ b/tests/components/geniushub/test_config_flow.py @@ -2,21 +2,14 @@ from http import HTTPStatus import socket -from typing import Any from unittest.mock import AsyncMock from aiohttp import ClientConnectionError, ClientResponseError import pytest from homeassistant.components.geniushub import DOMAIN -from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_USER -from homeassistant.const import ( - CONF_HOST, - CONF_MAC, - CONF_PASSWORD, - CONF_TOKEN, - CONF_USERNAME, -) +from homeassistant.config_entries import SOURCE_USER +from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_TOKEN, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -309,174 +302,3 @@ async def test_cloud_duplicate( ) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" - - -@pytest.mark.parametrize( - ("data"), - [ - { - CONF_HOST: "10.0.0.130", - CONF_USERNAME: "test-username", - CONF_PASSWORD: "test-password", - }, - { - CONF_HOST: "10.0.0.130", - CONF_USERNAME: "test-username", - CONF_PASSWORD: "test-password", - CONF_MAC: "aa:bb:cc:dd:ee:ff", - }, - ], -) -async def test_import_local_flow( - hass: HomeAssistant, - mock_setup_entry: AsyncMock, - mock_geniushub_client: AsyncMock, - data: dict[str, Any], -) -> None: - """Test full local import flow.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_IMPORT}, - data=data, - ) - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "10.0.0.130" - assert result["data"] == data - assert result["result"].unique_id == "aa:bb:cc:dd:ee:ff" - - -@pytest.mark.parametrize( - ("data"), - [ - { - CONF_TOKEN: "abcdef", - }, - { - CONF_TOKEN: "abcdef", - CONF_MAC: "aa:bb:cc:dd:ee:ff", - }, - ], -) -async def test_import_cloud_flow( - hass: HomeAssistant, - mock_setup_entry: AsyncMock, - mock_geniushub_client: AsyncMock, - data: dict[str, Any], -) -> None: - """Test full cloud import flow.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_IMPORT}, - data=data, - ) - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "Genius hub" - assert result["data"] == data - - -@pytest.mark.parametrize( - ("data"), - [ - { - CONF_HOST: "10.0.0.130", - CONF_USERNAME: "test-username", - CONF_PASSWORD: "test-password", - }, - { - CONF_HOST: "10.0.0.130", - CONF_USERNAME: "test-username", - CONF_PASSWORD: "test-password", - CONF_MAC: "aa:bb:cc:dd:ee:ff", - }, - { - CONF_TOKEN: "abcdef", - }, - { - CONF_TOKEN: "abcdef", - CONF_MAC: "aa:bb:cc:dd:ee:ff", - }, - ], -) -@pytest.mark.parametrize( - ("exception", "reason"), - [ - (socket.gaierror, "invalid_host"), - ( - ClientResponseError(AsyncMock(), (), status=HTTPStatus.UNAUTHORIZED), - "invalid_auth", - ), - ( - ClientResponseError(AsyncMock(), (), status=HTTPStatus.NOT_FOUND), - "invalid_host", - ), - (TimeoutError, "cannot_connect"), - (ClientConnectionError, "cannot_connect"), - (Exception, "unknown"), - ], -) -async def test_import_flow_exceptions( - hass: HomeAssistant, - mock_geniushub_client: AsyncMock, - data: dict[str, Any], - exception: Exception, - reason: str, -) -> None: - """Test import flow exceptions.""" - mock_geniushub_client.request.side_effect = exception - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_IMPORT}, - data=data, - ) - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == reason - - -@pytest.mark.parametrize( - ("data"), - [ - { - CONF_HOST: "10.0.0.130", - CONF_USERNAME: "test-username", - CONF_PASSWORD: "test-password", - }, - { - CONF_HOST: "10.0.0.131", - CONF_USERNAME: "test-username1", - CONF_PASSWORD: "test-password", - }, - ], -) -async def test_import_flow_local_duplicate( - hass: HomeAssistant, - mock_geniushub_client: AsyncMock, - mock_local_config_entry: MockConfigEntry, - data: dict[str, Any], -) -> None: - """Test import flow aborts on local duplicate data.""" - mock_local_config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_IMPORT}, - data=data, - ) - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "already_configured" - - -async def test_import_flow_cloud_duplicate( - hass: HomeAssistant, - mock_geniushub_client: AsyncMock, - mock_cloud_config_entry: MockConfigEntry, -) -> None: - """Test import flow aborts on cloud duplicate data.""" - mock_cloud_config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_IMPORT}, - data={ - CONF_TOKEN: "abcdef", - }, - ) - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "already_configured" diff --git a/tests/components/geofency/test_init.py b/tests/components/geofency/test_init.py index 3a98c6480bd6a1..3374039786895c 100644 --- a/tests/components/geofency/test_init.py +++ b/tests/components/geofency/test_init.py @@ -10,7 +10,6 @@ from homeassistant.components import zone from homeassistant.components.device_tracker.legacy import Device from homeassistant.components.geofency import CONF_MOBILE_BEACONS, DOMAIN -from homeassistant.config import async_process_ha_core_config from homeassistant.const import ( ATTR_LATITUDE, ATTR_LONGITUDE, @@ -18,6 +17,7 @@ STATE_NOT_HOME, ) from homeassistant.core import HomeAssistant +from homeassistant.core_config import async_process_ha_core_config from homeassistant.data_entry_flow import FlowResultType from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.setup import async_setup_component diff --git a/tests/components/glances/test_config_flow.py b/tests/components/glances/test_config_flow.py index 0fabc387a4f633..ae8c2e1d51ea36 100644 --- a/tests/components/glances/test_config_flow.py +++ b/tests/components/glances/test_config_flow.py @@ -11,6 +11,7 @@ from homeassistant import config_entries from homeassistant.components import glances +from homeassistant.const import CONF_NAME, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -92,7 +93,10 @@ async def test_reauth_success(hass: HomeAssistant) -> None: result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" - assert result["description_placeholders"] == {"username": "username"} + assert result["description_placeholders"] == { + CONF_NAME: "Mock Title", + CONF_USERNAME: "username", + } result2 = await hass.config_entries.flow.async_configure( result["flow_id"], @@ -123,7 +127,10 @@ async def test_reauth_fails( result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" - assert result["description_placeholders"] == {"username": "username"} + assert result["description_placeholders"] == { + CONF_NAME: "Mock Title", + CONF_USERNAME: "username", + } result2 = await hass.config_entries.flow.async_configure( result["flow_id"], diff --git a/tests/components/go2rtc/__init__.py b/tests/components/go2rtc/__init__.py index 20cbd67d571e4b..0971541efa5296 100644 --- a/tests/components/go2rtc/__init__.py +++ b/tests/components/go2rtc/__init__.py @@ -1,13 +1 @@ """Go2rtc tests.""" - -from homeassistant.core import HomeAssistant - -from tests.common import MockConfigEntry - - -async def setup_integration(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: - """Fixture for setting up the component.""" - config_entry.add_to_hass(hass) - - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() diff --git a/tests/components/go2rtc/conftest.py b/tests/components/go2rtc/conftest.py index 02c1b3b908c255..42b363b232440c 100644 --- a/tests/components/go2rtc/conftest.py +++ b/tests/components/go2rtc/conftest.py @@ -3,55 +3,90 @@ from collections.abc import Generator from unittest.mock import AsyncMock, Mock, patch -from go2rtc_client.client import _StreamClient, _WebRTCClient +from go2rtc_client.rest import _StreamClient, _WebRTCClient import pytest -from homeassistant.components.go2rtc.const import CONF_BINARY, DOMAIN -from homeassistant.const import CONF_HOST +from homeassistant.components.go2rtc.server import Server -from tests.common import MockConfigEntry +GO2RTC_PATH = "homeassistant.components.go2rtc" @pytest.fixture -def mock_setup_entry() -> Generator[AsyncMock]: - """Override async_setup_entry.""" - with patch( - "homeassistant.components.go2rtc.async_setup_entry", - return_value=True, - ) as mock_setup_entry: - yield mock_setup_entry - - -@pytest.fixture -def mock_client() -> Generator[AsyncMock]: - """Mock a go2rtc client.""" +def rest_client() -> Generator[AsyncMock]: + """Mock a go2rtc rest client.""" with ( patch( - "homeassistant.components.go2rtc.Go2RtcClient", + "homeassistant.components.go2rtc.Go2RtcRestClient", ) as mock_client, - patch( - "homeassistant.components.go2rtc.config_flow.Go2RtcClient", - new=mock_client, - ), + patch("homeassistant.components.go2rtc.server.Go2RtcRestClient", mock_client), ): client = mock_client.return_value - client.streams = Mock(spec_set=_StreamClient) + client.streams = streams = Mock(spec_set=_StreamClient) + streams.list.return_value = {} + client.validate_server_version = AsyncMock() client.webrtc = Mock(spec_set=_WebRTCClient) yield client @pytest.fixture -def mock_server() -> Generator[Mock]: - """Mock a go2rtc server.""" - with patch("homeassistant.components.go2rtc.Server", autoSpec=True) as mock_server: - yield mock_server +def ws_client() -> Generator[Mock]: + """Mock a go2rtc websocket client.""" + with patch( + "homeassistant.components.go2rtc.Go2RtcWsClient", autospec=True + ) as ws_client_mock: + yield ws_client_mock.return_value + + +@pytest.fixture +def server_stdout() -> list[str]: + """Server stdout lines.""" + return [ + "09:00:03.466 INF go2rtc platform=linux/amd64 revision=780f378 version=1.9.5", + "09:00:03.466 INF config path=/tmp/go2rtc.yaml", + "09:00:03.467 INF [rtsp] listen addr=:8554", + "09:00:03.467 INF [api] listen addr=127.0.0.1:1984", + "09:00:03.467 INF [webrtc] listen addr=:8555/tcp", + ] + + +@pytest.fixture +def mock_create_subprocess(server_stdout: list[str]) -> Generator[AsyncMock]: + """Mock create_subprocess_exec.""" + with patch(f"{GO2RTC_PATH}.server.asyncio.create_subprocess_exec") as mock_subproc: + subproc = AsyncMock() + subproc.terminate = Mock() + subproc.kill = Mock() + subproc.returncode = None + # Simulate process output + subproc.stdout.__aiter__.return_value = iter( + [f"{entry}\n".encode() for entry in server_stdout] + ) + mock_subproc.return_value = subproc + yield mock_subproc @pytest.fixture -def mock_config_entry() -> MockConfigEntry: - """Mock a config entry.""" - return MockConfigEntry( - domain=DOMAIN, - title=DOMAIN, - data={CONF_HOST: "http://localhost:1984/", CONF_BINARY: "/usr/bin/go2rtc"}, - ) +def server_start(mock_create_subprocess: AsyncMock) -> Generator[AsyncMock]: + """Mock start of a go2rtc server.""" + with patch( + f"{GO2RTC_PATH}.server.Server.start", wraps=Server.start, autospec=True + ) as mock_server_start: + yield mock_server_start + + +@pytest.fixture +def server_stop() -> Generator[AsyncMock]: + """Mock stop of a go2rtc server.""" + with ( + patch( + f"{GO2RTC_PATH}.server.Server.stop", wraps=Server.stop, autospec=True + ) as mock_server_stop, + ): + yield mock_server_stop + + +@pytest.fixture +def server(server_start: AsyncMock, server_stop: AsyncMock) -> Generator[AsyncMock]: + """Mock a go2rtc server.""" + with patch(f"{GO2RTC_PATH}.Server", wraps=Server) as mock_server: + yield mock_server diff --git a/tests/components/go2rtc/test_config_flow.py b/tests/components/go2rtc/test_config_flow.py index 25c993e7d3149a..c414af35b386cd 100644 --- a/tests/components/go2rtc/test_config_flow.py +++ b/tests/components/go2rtc/test_config_flow.py @@ -1,156 +1,45 @@ -"""Tests for the Go2rtc config flow.""" +"""Test the Home Assistant Cloud config flow.""" -from unittest.mock import Mock, patch +from unittest.mock import patch -import pytest - -from homeassistant.components.go2rtc.const import CONF_BINARY, DOMAIN -from homeassistant.config_entries import SOURCE_USER -from homeassistant.const import CONF_HOST +from homeassistant.components.go2rtc.const import DOMAIN from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType from tests.common import MockConfigEntry -@pytest.mark.usefixtures("mock_client", "mock_setup_entry") -async def test_single_instance_allowed( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, -) -> None: - """Test that flow will abort if already configured.""" - mock_config_entry.add_to_hass(hass) - - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_USER}, - ) - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "single_instance_allowed" - +async def test_config_flow(hass: HomeAssistant) -> None: + """Test create cloud entry.""" -@pytest.mark.usefixtures("mock_setup_entry") -async def test_docker_with_binary( - hass: HomeAssistant, -) -> None: - """Test config flow, where HA is running in docker with a go2rtc binary available.""" - binary = "/usr/bin/go2rtc" with ( patch( - "homeassistant.components.go2rtc.config_flow.is_docker_env", - return_value=True, - ), + "homeassistant.components.go2rtc.async_setup", return_value=True + ) as mock_setup, patch( - "homeassistant.components.go2rtc.config_flow.shutil.which", - return_value=binary, - ), + "homeassistant.components.go2rtc.async_setup_entry", + return_value=True, + ) as mock_setup_entry, ): result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_USER}, + DOMAIN, context={"source": "system"} ) assert result["type"] is FlowResultType.CREATE_ENTRY assert result["title"] == "go2rtc" - assert result["data"] == { - CONF_BINARY: binary, - CONF_HOST: "http://localhost:1984/", - } - + assert result["data"] == {} + await hass.async_block_till_done() -@pytest.mark.usefixtures("mock_setup_entry", "mock_client") -@pytest.mark.parametrize( - ("is_docker_env", "shutil_which"), - [ - (True, None), - (False, None), - (False, "/usr/bin/go2rtc"), - ], -) -async def test_config_flow_host( - hass: HomeAssistant, - is_docker_env: bool, - shutil_which: str | None, -) -> None: - """Test config flow with host input.""" - with ( - patch( - "homeassistant.components.go2rtc.config_flow.is_docker_env", - return_value=is_docker_env, - ), - patch( - "homeassistant.components.go2rtc.config_flow.shutil.which", - return_value=shutil_which, - ), - ): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_USER}, - ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "host" - host = "http://go2rtc.local:1984/" - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_HOST: host}, - ) + assert len(mock_setup.mock_calls) == 1 + assert len(mock_setup_entry.mock_calls) == 1 - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "go2rtc" - assert result["data"] == { - CONF_HOST: host, - } +async def test_multiple_entries(hass: HomeAssistant) -> None: + """Test creating multiple cloud entries.""" + config_entry = MockConfigEntry(domain=DOMAIN) + config_entry.add_to_hass(hass) -@pytest.mark.usefixtures("mock_setup_entry") -async def test_flow_errors( - hass: HomeAssistant, - mock_client: Mock, -) -> None: - """Test flow errors.""" - with ( - patch( - "homeassistant.components.go2rtc.config_flow.is_docker_env", - return_value=False, - ), - ): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_USER}, - ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "host" - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_HOST: "go2rtc.local:1984/"}, - ) - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {"host": "invalid_url_schema"} - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_HOST: "http://"}, - ) - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {"host": "invalid_url"} - - host = "http://go2rtc.local:1984/" - mock_client.streams.list.side_effect = Exception - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_HOST: host}, - ) - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {"host": "cannot_connect"} - - mock_client.streams.list.side_effect = None - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_HOST: host}, - ) - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "go2rtc" - assert result["data"] == { - CONF_HOST: host, - } + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": "system"} + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "single_instance_allowed" diff --git a/tests/components/go2rtc/test_init.py b/tests/components/go2rtc/test_init.py index afd336dc2b8302..ec5867761428a8 100644 --- a/tests/components/go2rtc/test_init.py +++ b/tests/components/go2rtc/test_init.py @@ -1,27 +1,47 @@ """The tests for the go2rtc component.""" -from collections.abc import Callable -from unittest.mock import AsyncMock, Mock - -from go2rtc_client import Stream, WebRTCSdpAnswer, WebRTCSdpOffer +from collections.abc import Callable, Generator +import logging +from typing import NamedTuple +from unittest.mock import AsyncMock, Mock, call, patch + +from aiohttp.client_exceptions import ClientConnectionError, ServerConnectionError +from go2rtc_client import Stream +from go2rtc_client.exceptions import Go2RtcClientError, Go2RtcVersionError from go2rtc_client.models import Producer +from go2rtc_client.ws import ( + ReceiveMessages, + WebRTCAnswer, + WebRTCCandidate, + WebRTCOffer, + WsError, +) import pytest +from webrtc_models import RTCIceCandidate from homeassistant.components.camera import ( DOMAIN as CAMERA_DOMAIN, Camera, CameraEntityFeature, + StreamType, + WebRTCAnswer as HAWebRTCAnswer, + WebRTCCandidate as HAWebRTCCandidate, + WebRTCError, + WebRTCMessage, + WebRTCSendMessage, ) -from homeassistant.components.camera.const import StreamType -from homeassistant.components.camera.helper import get_camera_from_entity_id +from homeassistant.components.default_config import DOMAIN as DEFAULT_CONFIG_DOMAIN from homeassistant.components.go2rtc import WebRTCProvider -from homeassistant.components.go2rtc.const import DOMAIN +from homeassistant.components.go2rtc.const import ( + CONF_DEBUG_UI, + DEBUG_UI_URL_MESSAGE, + DOMAIN, +) from homeassistant.config_entries import ConfigEntry, ConfigEntryState, ConfigFlow -from homeassistant.const import CONF_HOST +from homeassistant.const import CONF_URL from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError - -from . import setup_integration +from homeassistant.helpers.typing import ConfigType +from homeassistant.setup import async_setup_component from tests.common import ( MockConfigEntry, @@ -64,12 +84,6 @@ async def stream_source(self) -> str | None: return self._stream_source -@pytest.fixture -def integration_entity() -> MockCamera: - """Mock Camera Entity.""" - return MockCamera() - - @pytest.fixture def integration_config_entry(hass: HomeAssistant) -> ConfigEntry: """Test mock config entry.""" @@ -78,12 +92,70 @@ def integration_config_entry(hass: HomeAssistant) -> ConfigEntry: return entry +@pytest.fixture(name="go2rtc_binary") +def go2rtc_binary_fixture() -> str: + """Fixture to provide go2rtc binary name.""" + return "/usr/bin/go2rtc" + + +@pytest.fixture +def mock_get_binary(go2rtc_binary) -> Generator[Mock]: + """Mock _get_binary.""" + with patch( + "homeassistant.components.go2rtc.shutil.which", + return_value=go2rtc_binary, + ) as mock_which: + yield mock_which + + +@pytest.fixture(name="has_go2rtc_entry") +def has_go2rtc_entry_fixture() -> bool: + """Fixture to control if a go2rtc config entry should be created.""" + return True + + +@pytest.fixture +def mock_go2rtc_entry(hass: HomeAssistant, has_go2rtc_entry: bool) -> None: + """Mock a go2rtc onfig entry.""" + if not has_go2rtc_entry: + return + config_entry = MockConfigEntry(domain=DOMAIN) + config_entry.add_to_hass(hass) + + +@pytest.fixture(name="is_docker_env") +def is_docker_env_fixture() -> bool: + """Fixture to provide is_docker_env return value.""" + return True + + +@pytest.fixture +def mock_is_docker_env(is_docker_env) -> Generator[Mock]: + """Mock is_docker_env.""" + with patch( + "homeassistant.components.go2rtc.is_docker_env", + return_value=is_docker_env, + ) as mock_is_docker_env: + yield mock_is_docker_env + + +@pytest.fixture +async def init_integration( + hass: HomeAssistant, + rest_client: AsyncMock, + mock_is_docker_env, + mock_get_binary, + server: Mock, +) -> None: + """Initialize the go2rtc integration.""" + assert await async_setup_component(hass, DOMAIN, {DOMAIN: {}}) + + @pytest.fixture async def init_test_integration( hass: HomeAssistant, integration_config_entry: ConfigEntry, - integration_entity: MockCamera, -) -> None: +) -> MockCamera: """Initialize components.""" async def async_setup_entry_init( @@ -112,8 +184,9 @@ async def async_unload_entry_init( async_unload_entry=async_unload_entry_init, ), ) + test_camera = MockCamera() setup_test_component_platform( - hass, CAMERA_DOMAIN, [integration_entity], from_config_entry=True + hass, CAMERA_DOMAIN, [test_camera], from_config_entry=True ) mock_platform(hass, f"{TEST_DOMAIN}.config_flow", Mock()) @@ -121,99 +194,695 @@ async def async_unload_entry_init( assert await hass.config_entries.async_setup(integration_config_entry.entry_id) await hass.async_block_till_done() - return integration_config_entry + return test_camera -@pytest.mark.usefixtures("init_test_integration") -async def _test_setup( +async def _test_setup_and_signaling( hass: HomeAssistant, - mock_client: AsyncMock, - mock_config_entry: MockConfigEntry, + rest_client: AsyncMock, + ws_client: Mock, + config: ConfigType, after_setup_fn: Callable[[], None], + camera: MockCamera, ) -> None: """Test the go2rtc config entry.""" - entity_id = "camera.test" - camera = get_camera_from_entity_id(hass, entity_id) + entity_id = camera.entity_id assert camera.frontend_stream_type == StreamType.HLS - await setup_integration(hass, mock_config_entry) + assert await async_setup_component(hass, DOMAIN, config) + await hass.async_block_till_done(wait_background_tasks=True) + config_entries = hass.config_entries.async_entries(DOMAIN) + assert len(config_entries) == 1 + assert config_entries[0].state == ConfigEntryState.LOADED after_setup_fn() - mock_client.webrtc.forward_whep_sdp_offer.return_value = WebRTCSdpAnswer(ANSWER_SDP) + receive_message_callback = Mock(spec_set=WebRTCSendMessage) + + async def test() -> None: + await camera.async_handle_async_webrtc_offer( + OFFER_SDP, "session_id", receive_message_callback + ) + ws_client.send.assert_called_once_with( + WebRTCOffer( + OFFER_SDP, + camera.async_get_webrtc_client_configuration().configuration.ice_servers, + ) + ) + ws_client.subscribe.assert_called_once() + + # Simulate the answer from the go2rtc server + callback = ws_client.subscribe.call_args[0][0] + callback(WebRTCAnswer(ANSWER_SDP)) + receive_message_callback.assert_called_once_with(HAWebRTCAnswer(ANSWER_SDP)) + + await test() + + rest_client.streams.add.assert_called_once_with( + entity_id, ["rtsp://stream", f"ffmpeg:{camera.entity_id}#audio=opus"] + ) + + # Stream exists but the source is different + rest_client.streams.add.reset_mock() + rest_client.streams.list.return_value = { + entity_id: Stream([Producer("rtsp://different")]) + } - answer = await camera.async_handle_web_rtc_offer(OFFER_SDP) - assert answer == ANSWER_SDP + receive_message_callback.reset_mock() + ws_client.reset_mock() + await test() - mock_client.webrtc.forward_whep_sdp_offer.assert_called_once_with( - entity_id, WebRTCSdpOffer(OFFER_SDP) + rest_client.streams.add.assert_called_once_with( + entity_id, ["rtsp://stream", f"ffmpeg:{camera.entity_id}#audio=opus"] ) - mock_client.streams.add.assert_called_once_with(entity_id, "rtsp://stream") # If the stream is already added, the stream should not be added again. - mock_client.streams.add.reset_mock() - mock_client.streams.list.return_value = { + rest_client.streams.add.reset_mock() + rest_client.streams.list.return_value = { entity_id: Stream([Producer("rtsp://stream")]) } - answer = await camera.async_handle_web_rtc_offer(OFFER_SDP) - assert answer == ANSWER_SDP - mock_client.streams.add.assert_not_called() - assert mock_client.webrtc.forward_whep_sdp_offer.call_count == 2 - assert isinstance(camera._webrtc_providers[0], WebRTCProvider) + receive_message_callback.reset_mock() + ws_client.reset_mock() + await test() + + rest_client.streams.add.assert_not_called() + assert isinstance(camera._webrtc_provider, WebRTCProvider) # Set stream source to None and provider should be skipped - mock_client.streams.list.return_value = {} + rest_client.streams.list.return_value = {} + receive_message_callback.reset_mock() camera.set_stream_source(None) - with pytest.raises( - HomeAssistantError, - match="WebRTC offer was not accepted by the supported providers", - ): - await camera.async_handle_web_rtc_offer(OFFER_SDP) - - # Remove go2rtc config entry - assert mock_config_entry.state is ConfigEntryState.LOADED - await hass.config_entries.async_remove(mock_config_entry.entry_id) - await hass.async_block_till_done() - assert mock_config_entry.state is ConfigEntryState.NOT_LOADED - - assert camera._webrtc_providers == [] - assert camera.frontend_stream_type == StreamType.HLS + await camera.async_handle_async_webrtc_offer( + OFFER_SDP, "session_id", receive_message_callback + ) + receive_message_callback.assert_called_once_with( + WebRTCError("go2rtc_webrtc_offer_failed", "Camera has no stream source") + ) -@pytest.mark.usefixtures("init_test_integration") -async def test_setup_go_binary( +@pytest.mark.usefixtures( + "init_test_integration", + "mock_get_binary", + "mock_is_docker_env", + "mock_go2rtc_entry", +) +@pytest.mark.parametrize( + ("config", "ui_enabled"), + [ + ({DOMAIN: {}}, False), + ({DOMAIN: {CONF_DEBUG_UI: True}}, True), + ({DEFAULT_CONFIG_DOMAIN: {}}, False), + ({DEFAULT_CONFIG_DOMAIN: {}, DOMAIN: {CONF_DEBUG_UI: True}}, True), + ], +) +@pytest.mark.parametrize("has_go2rtc_entry", [True, False]) +async def test_setup_managed( hass: HomeAssistant, - mock_client: AsyncMock, - mock_server: Mock, - mock_config_entry: MockConfigEntry, + rest_client: AsyncMock, + ws_client: Mock, + server: AsyncMock, + server_start: Mock, + server_stop: Mock, + init_test_integration: MockCamera, + has_go2rtc_entry: bool, + config: ConfigType, + ui_enabled: bool, ) -> None: - """Test the go2rtc config entry with binary.""" + """Test the go2rtc setup with managed go2rtc instance.""" + assert (len(hass.config_entries.async_entries(DOMAIN)) == 1) == has_go2rtc_entry + camera = init_test_integration - def after_setup() -> None: - mock_server.assert_called_once_with("/usr/bin/go2rtc") - mock_server.return_value.start.assert_called_once() + entity_id = camera.entity_id + stream_name_original = f"{camera.entity_id}_original" + assert camera.frontend_stream_type == StreamType.HLS - await _test_setup(hass, mock_client, mock_config_entry, after_setup) + assert await async_setup_component(hass, DOMAIN, config) + await hass.async_block_till_done(wait_background_tasks=True) + config_entries = hass.config_entries.async_entries(DOMAIN) + assert len(config_entries) == 1 + assert config_entries[0].state == ConfigEntryState.LOADED + server.assert_called_once_with(hass, "/usr/bin/go2rtc", enable_ui=ui_enabled) + server_start.assert_called_once() - mock_server.return_value.stop.assert_called_once() + receive_message_callback = Mock(spec_set=WebRTCSendMessage) + async def test() -> None: + await camera.async_handle_async_webrtc_offer( + OFFER_SDP, "session_id", receive_message_callback + ) + ws_client.send.assert_called_once_with( + WebRTCOffer( + OFFER_SDP, + camera.async_get_webrtc_client_configuration().configuration.ice_servers, + ) + ) + ws_client.subscribe.assert_called_once() + + # Simulate the answer from the go2rtc server + callback = ws_client.subscribe.call_args[0][0] + callback(WebRTCAnswer(ANSWER_SDP)) + receive_message_callback.assert_called_once_with(HAWebRTCAnswer(ANSWER_SDP)) + + await test() + + stream_added_calls = [ + call(stream_name_original, "rtsp://stream"), + call( + entity_id, + [ + f"rtsp://127.0.0.1:18554/{stream_name_original}", + f"ffmpeg:{stream_name_original}#audio=opus", + ], + ), + ] + assert rest_client.streams.add.call_args_list == stream_added_calls + + # Stream original missing + rest_client.streams.add.reset_mock() + rest_client.streams.list.return_value = { + entity_id: Stream( + [ + Producer(f"rtsp://127.0.0.1:18554/{stream_name_original}"), + Producer(f"ffmpeg:{stream_name_original}#audio=opus"), + ] + ) + } -@pytest.mark.usefixtures("init_test_integration") -async def test_setup_go( + receive_message_callback.reset_mock() + ws_client.reset_mock() + await test() + + assert rest_client.streams.add.call_args_list == stream_added_calls + + # Stream original source different + rest_client.streams.add.reset_mock() + rest_client.streams.list.return_value = { + stream_name_original: Stream([Producer("rtsp://different")]), + entity_id: Stream( + [ + Producer(f"rtsp://127.0.0.1:18554/{stream_name_original}"), + Producer(f"ffmpeg:{stream_name_original}#audio=opus"), + ] + ), + } + + receive_message_callback.reset_mock() + ws_client.reset_mock() + await test() + + assert rest_client.streams.add.call_args_list == stream_added_calls + + # Stream source different + rest_client.streams.add.reset_mock() + rest_client.streams.list.return_value = { + stream_name_original: Stream([Producer("rtsp://stream")]), + entity_id: Stream([Producer("rtsp://different")]), + } + + receive_message_callback.reset_mock() + ws_client.reset_mock() + await test() + + assert rest_client.streams.add.call_args_list == stream_added_calls + + # If the stream is already added, the stream should not be added again. + rest_client.streams.add.reset_mock() + rest_client.streams.list.return_value = { + stream_name_original: Stream([Producer("rtsp://stream")]), + entity_id: Stream( + [ + Producer(f"rtsp://127.0.0.1:18554/{stream_name_original}"), + Producer(f"ffmpeg:{stream_name_original}#audio=opus"), + ] + ), + } + + receive_message_callback.reset_mock() + ws_client.reset_mock() + await test() + + rest_client.streams.add.assert_not_called() + assert isinstance(camera._webrtc_provider, WebRTCProvider) + + # Set stream source to None and provider should be skipped + rest_client.streams.list.return_value = {} + receive_message_callback.reset_mock() + camera.set_stream_source(None) + await camera.async_handle_async_webrtc_offer( + OFFER_SDP, "session_id", receive_message_callback + ) + receive_message_callback.assert_called_once_with( + WebRTCError("go2rtc_webrtc_offer_failed", "Camera has no stream source") + ) + + await hass.async_stop() + server_stop.assert_called_once() + + +@pytest.mark.usefixtures("mock_go2rtc_entry") +@pytest.mark.parametrize( + ("go2rtc_binary", "is_docker_env"), + [ + ("/usr/bin/go2rtc", True), + (None, False), + ], +) +@pytest.mark.parametrize("has_go2rtc_entry", [True, False]) +async def test_setup_self_hosted( hass: HomeAssistant, - mock_client: AsyncMock, - mock_server: Mock, + rest_client: AsyncMock, + ws_client: Mock, + server: Mock, + init_test_integration: MockCamera, + mock_get_binary: Mock, + mock_is_docker_env: Mock, + has_go2rtc_entry: bool, +) -> None: + """Test the go2rtc with selfhosted go2rtc instance.""" + assert (len(hass.config_entries.async_entries(DOMAIN)) == 1) == has_go2rtc_entry + + config = {DOMAIN: {CONF_URL: "http://localhost:1984/"}} + camera = init_test_integration + + entity_id = camera.entity_id + assert camera.frontend_stream_type == StreamType.HLS + + assert await async_setup_component(hass, DOMAIN, config) + await hass.async_block_till_done(wait_background_tasks=True) + config_entries = hass.config_entries.async_entries(DOMAIN) + assert len(config_entries) == 1 + assert config_entries[0].state == ConfigEntryState.LOADED + server.assert_not_called() + + receive_message_callback = Mock(spec_set=WebRTCSendMessage) + + async def test() -> None: + await camera.async_handle_async_webrtc_offer( + OFFER_SDP, "session_id", receive_message_callback + ) + ws_client.send.assert_called_once_with( + WebRTCOffer( + OFFER_SDP, + camera.async_get_webrtc_client_configuration().configuration.ice_servers, + ) + ) + ws_client.subscribe.assert_called_once() + + # Simulate the answer from the go2rtc server + callback = ws_client.subscribe.call_args[0][0] + callback(WebRTCAnswer(ANSWER_SDP)) + receive_message_callback.assert_called_once_with(HAWebRTCAnswer(ANSWER_SDP)) + + await test() + + rest_client.streams.add.assert_called_once_with( + entity_id, ["rtsp://stream", f"ffmpeg:{camera.entity_id}#audio=opus"] + ) + + # Stream exists but the source is different + rest_client.streams.add.reset_mock() + rest_client.streams.list.return_value = { + entity_id: Stream([Producer("rtsp://different")]) + } + + receive_message_callback.reset_mock() + ws_client.reset_mock() + await test() + + rest_client.streams.add.assert_called_once_with( + entity_id, ["rtsp://stream", f"ffmpeg:{camera.entity_id}#audio=opus"] + ) + + # If the stream is already added, the stream should not be added again. + rest_client.streams.add.reset_mock() + rest_client.streams.list.return_value = { + entity_id: Stream([Producer("rtsp://stream")]) + } + + receive_message_callback.reset_mock() + ws_client.reset_mock() + await test() + + rest_client.streams.add.assert_not_called() + assert isinstance(camera._webrtc_provider, WebRTCProvider) + + # Set stream source to None and provider should be skipped + rest_client.streams.list.return_value = {} + receive_message_callback.reset_mock() + camera.set_stream_source(None) + await camera.async_handle_async_webrtc_offer( + OFFER_SDP, "session_id", receive_message_callback + ) + receive_message_callback.assert_called_once_with( + WebRTCError("go2rtc_webrtc_offer_failed", "Camera has no stream source") + ) + + mock_get_binary.assert_not_called() + server.assert_not_called() + + +class Callbacks(NamedTuple): + """Callbacks for the test.""" + + on_message: Mock + send_message: Mock + + +@pytest.fixture +async def message_callbacks( + ws_client: Mock, + init_test_integration: MockCamera, +) -> Callbacks: + """Prepare and return receive message callback.""" + receive_callback = Mock(spec_set=WebRTCSendMessage) + camera = init_test_integration + + await camera.async_handle_async_webrtc_offer( + OFFER_SDP, "session_id", receive_callback + ) + ws_client.send.assert_called_once_with( + WebRTCOffer( + OFFER_SDP, + camera.async_get_webrtc_client_configuration().configuration.ice_servers, + ) + ) + ws_client.subscribe.assert_called_once() + + # Simulate messages from the go2rtc server + send_callback = ws_client.subscribe.call_args[0][0] + + return Callbacks(receive_callback, send_callback) + + +@pytest.mark.parametrize( + ("message", "expected_message"), + [ + ( + WebRTCCandidate("candidate"), + HAWebRTCCandidate(RTCIceCandidate("candidate")), + ), + ( + WebRTCAnswer(ANSWER_SDP), + HAWebRTCAnswer(ANSWER_SDP), + ), + ( + WsError("error"), + WebRTCError("go2rtc_webrtc_offer_failed", "error"), + ), + ], +) +@pytest.mark.usefixtures("init_integration") +async def test_receiving_messages_from_go2rtc_server( + message_callbacks: Callbacks, + message: ReceiveMessages, + expected_message: WebRTCMessage, +) -> None: + """Test receiving message from go2rtc server.""" + on_message, send_message = message_callbacks + + send_message(message) + on_message.assert_called_once_with(expected_message) + + +@pytest.mark.usefixtures("init_integration") +async def test_on_candidate( + ws_client: Mock, + init_test_integration: MockCamera, + caplog: pytest.LogCaptureFixture, ) -> None: - """Test the go2rtc config entry without binary.""" - config_entry = MockConfigEntry( - domain=DOMAIN, - title=DOMAIN, - data={CONF_HOST: "http://localhost:1984/"}, + """Test frontend sending candidate to go2rtc server.""" + camera = init_test_integration + session_id = "session_id" + + # Session doesn't exist + await camera.async_on_webrtc_candidate(session_id, RTCIceCandidate("candidate")) + assert ( + "homeassistant.components.go2rtc", + logging.DEBUG, + f"Unknown session {session_id}. Ignoring candidate", + ) in caplog.record_tuples + caplog.clear() + + # Store session + await init_test_integration.async_handle_async_webrtc_offer( + OFFER_SDP, session_id, Mock() + ) + ws_client.send.assert_called_once_with( + WebRTCOffer( + OFFER_SDP, + camera.async_get_webrtc_client_configuration().configuration.ice_servers, + ) ) + ws_client.reset_mock() + + await camera.async_on_webrtc_candidate(session_id, RTCIceCandidate("candidate")) + ws_client.send.assert_called_once_with(WebRTCCandidate("candidate")) + assert caplog.record_tuples == [] + + +@pytest.mark.usefixtures("init_integration") +async def test_close_session( + ws_client: Mock, + init_test_integration: MockCamera, +) -> None: + """Test closing session.""" + camera = init_test_integration + session_id = "session_id" + + # Session doesn't exist + with pytest.raises(KeyError): + camera.close_webrtc_session(session_id) + ws_client.close.assert_not_called() + + # Store session + await init_test_integration.async_handle_async_webrtc_offer( + OFFER_SDP, session_id, Mock() + ) + ws_client.send.assert_called_once_with( + WebRTCOffer( + OFFER_SDP, + camera.async_get_webrtc_client_configuration().configuration.ice_servers, + ) + ) + + # Close session + camera.close_webrtc_session(session_id) + ws_client.close.assert_called_once() - def after_setup() -> None: - mock_server.assert_not_called() + # Close again should raise an error + ws_client.reset_mock() + with pytest.raises(KeyError): + camera.close_webrtc_session(session_id) + ws_client.close.assert_not_called() - await _test_setup(hass, mock_client, config_entry, after_setup) - mock_server.assert_not_called() +ERR_BINARY_NOT_FOUND = "Could not find go2rtc docker binary" +ERR_CONNECT = "Could not connect to go2rtc instance" +ERR_CONNECT_RETRY = ( + "Could not connect to go2rtc instance on http://localhost:1984/; Retrying" +) +ERR_START_SERVER = "Could not start go2rtc server" +ERR_UNSUPPORTED_VERSION = "The go2rtc server version is not supported" +_INVALID_CONFIG = "Invalid config for 'go2rtc': " +ERR_INVALID_URL = _INVALID_CONFIG + "invalid url" +ERR_EXCLUSIVE = _INVALID_CONFIG + DEBUG_UI_URL_MESSAGE +ERR_URL_REQUIRED = "Go2rtc URL required in non-docker installs" + + +@pytest.mark.parametrize( + ("config", "go2rtc_binary", "is_docker_env"), + [ + ({}, None, False), + ], +) +@pytest.mark.parametrize("has_go2rtc_entry", [True, False]) +@pytest.mark.usefixtures( + "mock_get_binary", "mock_go2rtc_entry", "mock_is_docker_env", "server" +) +async def test_non_user_setup_with_error( + hass: HomeAssistant, + config: ConfigType, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test setup integration does not fail if not setup by user.""" + + assert await async_setup_component(hass, DOMAIN, config) + await hass.async_block_till_done(wait_background_tasks=True) + assert not hass.config_entries.async_entries(DOMAIN) + + +@pytest.mark.parametrize( + ("config", "go2rtc_binary", "is_docker_env", "expected_log_message"), + [ + ({DEFAULT_CONFIG_DOMAIN: {}}, None, True, ERR_BINARY_NOT_FOUND), + ({DEFAULT_CONFIG_DOMAIN: {}}, "/usr/bin/go2rtc", True, ERR_START_SERVER), + ({DOMAIN: {}}, None, False, ERR_URL_REQUIRED), + ({DOMAIN: {}}, None, True, ERR_BINARY_NOT_FOUND), + ({DOMAIN: {}}, "/usr/bin/go2rtc", True, ERR_START_SERVER), + ({DOMAIN: {CONF_URL: "invalid"}}, None, True, ERR_INVALID_URL), + ( + {DOMAIN: {CONF_URL: "http://localhost:1984", CONF_DEBUG_UI: True}}, + None, + True, + ERR_EXCLUSIVE, + ), + ], +) +@pytest.mark.parametrize("has_go2rtc_entry", [True, False]) +@pytest.mark.usefixtures( + "mock_get_binary", "mock_go2rtc_entry", "mock_is_docker_env", "server" +) +async def test_setup_with_setup_error( + hass: HomeAssistant, + config: ConfigType, + caplog: pytest.LogCaptureFixture, + has_go2rtc_entry: bool, + expected_log_message: str, +) -> None: + """Test setup integration fails.""" + + assert not await async_setup_component(hass, DOMAIN, config) + await hass.async_block_till_done(wait_background_tasks=True) + assert bool(hass.config_entries.async_entries(DOMAIN)) == has_go2rtc_entry + assert expected_log_message in caplog.text + + +@pytest.mark.parametrize( + ("config", "go2rtc_binary", "is_docker_env", "expected_log_message"), + [ + ({DOMAIN: {CONF_URL: "http://localhost:1984/"}}, None, True, ERR_CONNECT), + ], +) +@pytest.mark.parametrize("has_go2rtc_entry", [True, False]) +@pytest.mark.usefixtures( + "mock_get_binary", "mock_go2rtc_entry", "mock_is_docker_env", "server" +) +async def test_setup_with_setup_entry_error( + hass: HomeAssistant, + config: ConfigType, + caplog: pytest.LogCaptureFixture, + expected_log_message: str, +) -> None: + """Test setup integration entry fails.""" + + assert await async_setup_component(hass, DOMAIN, config) + await hass.async_block_till_done(wait_background_tasks=True) + config_entries = hass.config_entries.async_entries(DOMAIN) + assert len(config_entries) == 1 + assert config_entries[0].state == ConfigEntryState.SETUP_ERROR + assert expected_log_message in caplog.text + + +@pytest.mark.parametrize("config", [{DOMAIN: {CONF_URL: "http://localhost:1984/"}}]) +@pytest.mark.parametrize( + ("cause", "expected_config_entry_state", "expected_log_message"), + [ + (ClientConnectionError(), ConfigEntryState.SETUP_RETRY, ERR_CONNECT_RETRY), + (ServerConnectionError(), ConfigEntryState.SETUP_RETRY, ERR_CONNECT_RETRY), + (None, ConfigEntryState.SETUP_ERROR, ERR_CONNECT), + (Exception(), ConfigEntryState.SETUP_ERROR, ERR_CONNECT), + ], +) +@pytest.mark.parametrize("has_go2rtc_entry", [True, False]) +@pytest.mark.usefixtures( + "mock_get_binary", "mock_go2rtc_entry", "mock_is_docker_env", "server" +) +async def test_setup_with_retryable_setup_entry_error_custom_server( + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, + rest_client: AsyncMock, + config: ConfigType, + cause: Exception, + expected_config_entry_state: ConfigEntryState, + expected_log_message: str, +) -> None: + """Test setup integration entry fails.""" + go2rtc_error = Go2RtcClientError() + go2rtc_error.__cause__ = cause + rest_client.validate_server_version.side_effect = go2rtc_error + assert await async_setup_component(hass, DOMAIN, config) + await hass.async_block_till_done(wait_background_tasks=True) + config_entries = hass.config_entries.async_entries(DOMAIN) + assert len(config_entries) == 1 + assert config_entries[0].state == expected_config_entry_state + assert expected_log_message in caplog.text + + +@pytest.mark.parametrize("config", [{DOMAIN: {}}, {DEFAULT_CONFIG_DOMAIN: {}}]) +@pytest.mark.parametrize( + ("cause", "expected_config_entry_state", "expected_log_message"), + [ + (ClientConnectionError(), ConfigEntryState.NOT_LOADED, ERR_START_SERVER), + (ServerConnectionError(), ConfigEntryState.NOT_LOADED, ERR_START_SERVER), + (None, ConfigEntryState.NOT_LOADED, ERR_START_SERVER), + (Exception(), ConfigEntryState.NOT_LOADED, ERR_START_SERVER), + ], +) +@pytest.mark.parametrize("has_go2rtc_entry", [True, False]) +@pytest.mark.usefixtures( + "mock_get_binary", "mock_go2rtc_entry", "mock_is_docker_env", "server" +) +async def test_setup_with_retryable_setup_entry_error_default_server( + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, + rest_client: AsyncMock, + has_go2rtc_entry: bool, + config: ConfigType, + cause: Exception, + expected_config_entry_state: ConfigEntryState, + expected_log_message: str, +) -> None: + """Test setup integration entry fails.""" + go2rtc_error = Go2RtcClientError() + go2rtc_error.__cause__ = cause + rest_client.validate_server_version.side_effect = go2rtc_error + assert not await async_setup_component(hass, DOMAIN, config) + await hass.async_block_till_done(wait_background_tasks=True) + config_entries = hass.config_entries.async_entries(DOMAIN) + assert len(config_entries) == has_go2rtc_entry + for config_entry in config_entries: + assert config_entry.state == expected_config_entry_state + assert expected_log_message in caplog.text + + +@pytest.mark.parametrize("config", [{DOMAIN: {}}, {DEFAULT_CONFIG_DOMAIN: {}}]) +@pytest.mark.parametrize( + ("go2rtc_error", "expected_config_entry_state", "expected_log_message"), + [ + ( + Go2RtcVersionError("1.9.4", "1.9.5", "2.0.0"), + ConfigEntryState.SETUP_RETRY, + ERR_UNSUPPORTED_VERSION, + ), + ], +) +@pytest.mark.parametrize("has_go2rtc_entry", [True, False]) +@pytest.mark.usefixtures( + "mock_get_binary", "mock_go2rtc_entry", "mock_is_docker_env", "server" +) +async def test_setup_with_version_error( + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, + rest_client: AsyncMock, + config: ConfigType, + go2rtc_error: Exception, + expected_config_entry_state: ConfigEntryState, + expected_log_message: str, +) -> None: + """Test setup integration entry fails.""" + rest_client.validate_server_version.side_effect = [None, go2rtc_error] + assert await async_setup_component(hass, DOMAIN, config) + await hass.async_block_till_done(wait_background_tasks=True) + config_entries = hass.config_entries.async_entries(DOMAIN) + assert len(config_entries) == 1 + assert config_entries[0].state == expected_config_entry_state + assert expected_log_message in caplog.text + + +async def test_config_entry_remove(hass: HomeAssistant) -> None: + """Test config entry removed when neither default_config nor go2rtc is in config.""" + config_entry = MockConfigEntry(domain=DOMAIN) + config_entry.add_to_hass(hass) + assert len(hass.config_entries.async_entries(DOMAIN)) == 1 + assert not await hass.config_entries.async_setup(config_entry.entry_id) + assert len(hass.config_entries.async_entries(DOMAIN)) == 0 diff --git a/tests/components/go2rtc/test_server.py b/tests/components/go2rtc/test_server.py index 1617ea55015392..e4fe3993f3cddf 100644 --- a/tests/components/go2rtc/test_server.py +++ b/tests/components/go2rtc/test_server.py @@ -2,90 +2,392 @@ import asyncio from collections.abc import Generator +import logging import subprocess -from unittest.mock import MagicMock, patch +from unittest.mock import AsyncMock, MagicMock, Mock, patch import pytest from homeassistant.components.go2rtc.server import Server +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError TEST_BINARY = "/bin/go2rtc" @pytest.fixture -def server() -> Server: +def enable_ui() -> bool: + """Fixture to enable the UI.""" + return False + + +@pytest.fixture +def server(hass: HomeAssistant, enable_ui: bool) -> Server: """Fixture to initialize the Server.""" - return Server(binary=TEST_BINARY) + return Server(hass, binary=TEST_BINARY, enable_ui=enable_ui) @pytest.fixture -def mock_tempfile() -> Generator[MagicMock]: +def mock_tempfile() -> Generator[Mock]: """Fixture to mock NamedTemporaryFile.""" with patch( - "homeassistant.components.go2rtc.server.NamedTemporaryFile" + "homeassistant.components.go2rtc.server.NamedTemporaryFile", autospec=True ) as mock_tempfile: - mock_tempfile.return_value.__enter__.return_value.name = "test.yaml" - yield mock_tempfile + file = mock_tempfile.return_value.__enter__.return_value + file.name = "test.yaml" + yield file -@pytest.fixture -def mock_popen() -> Generator[MagicMock]: - """Fixture to mock subprocess.Popen.""" - with patch("homeassistant.components.go2rtc.server.subprocess.Popen") as mock_popen: - yield mock_popen +def _assert_server_output_logged( + server_stdout: list[str], + caplog: pytest.LogCaptureFixture, + loglevel: int, + expect_logged: bool, +) -> None: + """Check server stdout was logged.""" + for entry in server_stdout: + assert ( + ( + "homeassistant.components.go2rtc.server", + loglevel, + entry, + ) + in caplog.record_tuples + ) is expect_logged -@pytest.mark.usefixtures("mock_tempfile") -async def test_server_run_success(mock_popen: MagicMock, server: Server) -> None: +def assert_server_output_logged( + server_stdout: list[str], + caplog: pytest.LogCaptureFixture, + loglevel: int, +) -> None: + """Check server stdout was logged.""" + _assert_server_output_logged(server_stdout, caplog, loglevel, True) + + +def assert_server_output_not_logged( + server_stdout: list[str], + caplog: pytest.LogCaptureFixture, + loglevel: int, +) -> None: + """Check server stdout was logged.""" + _assert_server_output_logged(server_stdout, caplog, loglevel, False) + + +@pytest.mark.parametrize( + ("enable_ui", "api_ip"), + [ + (True, ""), + (False, "127.0.0.1"), + ], +) +async def test_server_run_success( + mock_create_subprocess: AsyncMock, + rest_client: AsyncMock, + server_stdout: list[str], + server: Server, + caplog: pytest.LogCaptureFixture, + mock_tempfile: Mock, + api_ip: str, +) -> None: """Test that the server runs successfully.""" - mock_process = MagicMock() - mock_process.poll.return_value = None # Simulate process running - # Simulate process output - mock_process.stdout.readline.side_effect = [ - b"log line 1\n", - b"log line 2\n", - b"", - ] - mock_popen.return_value.__enter__.return_value = mock_process - - server.start() - await asyncio.sleep(0) + await server.start() # Check that Popen was called with the right arguments - mock_popen.assert_called_once_with( - [TEST_BINARY, "-c", "webrtc.ice_servers=[]", "-c", "test.yaml"], + mock_create_subprocess.assert_called_once_with( + TEST_BINARY, + "-c", + "test.yaml", stdout=subprocess.PIPE, stderr=subprocess.STDOUT, + close_fds=False, ) - # Check that server read the log lines - assert mock_process.stdout.readline.call_count == 3 + # Verify that the config file was written + mock_tempfile.write.assert_called_once_with( + f"""# This file is managed by Home Assistant +# Do not edit it manually - server.stop() - mock_process.terminate.assert_called_once() - assert not server.is_alive() +api: + listen: "{api_ip}:11984" + +rtsp: + listen: "127.0.0.1:18554" + +webrtc: + listen: ":18555/tcp" + ice_servers: [] +""".encode() + ) + + # Verify go2rtc binary stdout was logged with debug level + assert_server_output_logged(server_stdout, caplog, logging.DEBUG) + + await server.stop() + mock_create_subprocess.return_value.terminate.assert_called_once() + + # Verify go2rtc binary stdout was not logged with warning level + assert_server_output_not_logged(server_stdout, caplog, logging.WARNING) @pytest.mark.usefixtures("mock_tempfile") -def test_server_run_process_timeout(mock_popen: MagicMock, server: Server) -> None: +async def test_server_timeout_on_stop( + mock_create_subprocess: MagicMock, rest_client: AsyncMock, server: Server +) -> None: """Test server run where the process takes too long to terminate.""" + # Start server thread + await server.start() + + async def sleep() -> None: + await asyncio.sleep(1) - mock_process = MagicMock() - mock_process.poll.return_value = None # Simulate process running - # Simulate process output - mock_process.stdout.readline.side_effect = [ - b"log line 1\n", - b"", - ] # Simulate timeout - mock_process.wait.side_effect = subprocess.TimeoutExpired(cmd="go2rtc", timeout=5) - mock_popen.return_value.__enter__.return_value = mock_process + mock_create_subprocess.return_value.wait.side_effect = sleep - # Start server thread - server.start() - server.stop() + with patch("homeassistant.components.go2rtc.server._TERMINATE_TIMEOUT", new=0.1): + await server.stop() # Ensure terminate and kill were called due to timeout - mock_process.terminate.assert_called_once() - mock_process.kill.assert_called_once() - assert not server.is_alive() + mock_create_subprocess.return_value.terminate.assert_called_once() + mock_create_subprocess.return_value.kill.assert_called_once() + + +@pytest.mark.parametrize( + "server_stdout", + [ + [ + "09:00:03.466 INF go2rtc platform=linux/amd64 revision=780f378 version=1.9.5", + "09:00:03.466 INF config path=/tmp/go2rtc.yaml", + ] + ], +) +@pytest.mark.usefixtures("mock_tempfile") +async def test_server_failed_to_start( + mock_create_subprocess: MagicMock, + server_stdout: list[str], + server: Server, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test server, where an exception is raised if the expected log entry was not received until the timeout.""" + with ( + patch("homeassistant.components.go2rtc.server._SETUP_TIMEOUT", new=0.1), + pytest.raises(HomeAssistantError, match="Go2rtc server didn't start correctly"), + ): + await server.start() + + # Verify go2rtc binary stdout was logged with debug and warning level + assert_server_output_logged(server_stdout, caplog, logging.DEBUG) + assert_server_output_logged(server_stdout, caplog, logging.WARNING) + + assert ( + "homeassistant.components.go2rtc.server", + logging.ERROR, + "Go2rtc server didn't start correctly", + ) in caplog.record_tuples + + # Check that Popen was called with the right arguments + mock_create_subprocess.assert_called_once_with( + TEST_BINARY, + "-c", + "test.yaml", + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + close_fds=False, + ) + + +@pytest.mark.parametrize( + ("server_stdout", "expected_loglevel"), + [ + ( + [ + "09:00:03.466 TRC [api] register path path=/", + "09:00:03.466 DBG build vcs.time=2024-10-28T19:47:55Z version=go1.23.2", + "09:00:03.466 INF go2rtc platform=linux/amd64 revision=780f378 version=1.9.5", + "09:00:03.467 INF [api] listen addr=127.0.0.1:1984", + "09:00:03.466 WRN warning message", + '09:00:03.466 ERR [api] listen error="listen tcp 127.0.0.1:11984: bind: address already in use"', + "09:00:03.466 FTL fatal message", + "09:00:03.466 PNC panic message", + "exit with signal: interrupt", # Example of stderr write + ], + [ + logging.DEBUG, + logging.DEBUG, + logging.DEBUG, + logging.DEBUG, + logging.WARNING, + logging.WARNING, + logging.ERROR, + logging.ERROR, + logging.WARNING, + ], + ) + ], +) +@patch("homeassistant.components.go2rtc.server._RESPAWN_COOLDOWN", 0) +async def test_log_level_mapping( + hass: HomeAssistant, + mock_create_subprocess: MagicMock, + server_stdout: list[str], + rest_client: AsyncMock, + server: Server, + caplog: pytest.LogCaptureFixture, + expected_loglevel: list[int], +) -> None: + """Log level mapping.""" + evt = asyncio.Event() + + async def wait_event() -> None: + await evt.wait() + + mock_create_subprocess.return_value.wait.side_effect = wait_event + + await server.start() + + await asyncio.sleep(0.1) + await hass.async_block_till_done() + + # Verify go2rtc binary stdout was logged with default level + for i, entry in enumerate(server_stdout): + assert ( + "homeassistant.components.go2rtc.server", + expected_loglevel[i], + entry, + ) in caplog.record_tuples + + evt.set() + await asyncio.sleep(0.1) + await hass.async_block_till_done() + + assert_server_output_logged(server_stdout, caplog, logging.WARNING) + + await server.stop() + + +@patch("homeassistant.components.go2rtc.server._RESPAWN_COOLDOWN", 0) +async def test_server_restart_process_exit( + hass: HomeAssistant, + mock_create_subprocess: AsyncMock, + server_stdout: list[str], + rest_client: AsyncMock, + server: Server, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test that the server is restarted when it exits.""" + evt = asyncio.Event() + + async def wait_event() -> None: + await evt.wait() + + mock_create_subprocess.return_value.wait.side_effect = wait_event + + await server.start() + mock_create_subprocess.assert_awaited_once() + mock_create_subprocess.reset_mock() + + await asyncio.sleep(0.1) + await hass.async_block_till_done() + mock_create_subprocess.assert_not_awaited() + + # Verify go2rtc binary stdout was not yet logged with warning level + assert_server_output_not_logged(server_stdout, caplog, logging.WARNING) + + evt.set() + await asyncio.sleep(0.1) + mock_create_subprocess.assert_awaited_once() + + # Verify go2rtc binary stdout was logged with warning level + assert_server_output_logged(server_stdout, caplog, logging.WARNING) + + await server.stop() + + +@patch("homeassistant.components.go2rtc.server._RESPAWN_COOLDOWN", 0) +async def test_server_restart_process_error( + hass: HomeAssistant, + mock_create_subprocess: AsyncMock, + server_stdout: list[str], + rest_client: AsyncMock, + server: Server, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test that the server is restarted on error.""" + mock_create_subprocess.return_value.wait.side_effect = [Exception, None, None, None] + + await server.start() + mock_create_subprocess.assert_awaited_once() + mock_create_subprocess.reset_mock() + + # Verify go2rtc binary stdout was not yet logged with warning level + assert_server_output_not_logged(server_stdout, caplog, logging.WARNING) + + await asyncio.sleep(0.1) + await hass.async_block_till_done() + mock_create_subprocess.assert_awaited_once() + + # Verify go2rtc binary stdout was logged with warning level + assert_server_output_logged(server_stdout, caplog, logging.WARNING) + + await server.stop() + + +@patch("homeassistant.components.go2rtc.server._RESPAWN_COOLDOWN", 0) +async def test_server_restart_api_error( + hass: HomeAssistant, + mock_create_subprocess: AsyncMock, + server_stdout: list[str], + rest_client: AsyncMock, + server: Server, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test that the server is restarted on error.""" + rest_client.streams.list.side_effect = Exception + + await server.start() + mock_create_subprocess.assert_awaited_once() + mock_create_subprocess.reset_mock() + + # Verify go2rtc binary stdout was not yet logged with warning level + assert_server_output_not_logged(server_stdout, caplog, logging.WARNING) + + await asyncio.sleep(0.1) + await hass.async_block_till_done() + mock_create_subprocess.assert_awaited_once() + + # Verify go2rtc binary stdout was logged with warning level + assert_server_output_logged(server_stdout, caplog, logging.WARNING) + + await server.stop() + + +@patch("homeassistant.components.go2rtc.server._RESPAWN_COOLDOWN", 0) +async def test_server_restart_error( + hass: HomeAssistant, + mock_create_subprocess: AsyncMock, + server_stdout: list[str], + rest_client: AsyncMock, + server: Server, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test error handling when exception is raised during restart.""" + rest_client.streams.list.side_effect = Exception + mock_create_subprocess.return_value.terminate.side_effect = [Exception, None] + + await server.start() + mock_create_subprocess.assert_awaited_once() + mock_create_subprocess.reset_mock() + + # Verify go2rtc binary stdout was not yet logged with warning level + assert_server_output_not_logged(server_stdout, caplog, logging.WARNING) + + await asyncio.sleep(0.1) + await hass.async_block_till_done() + mock_create_subprocess.assert_awaited_once() + + # Verify go2rtc binary stdout was logged with warning level + assert_server_output_logged(server_stdout, caplog, logging.WARNING) + + assert "Unexpected error when restarting go2rtc server" in caplog.text + + await server.stop() diff --git a/tests/components/gogogate2/test_cover.py b/tests/components/gogogate2/test_cover.py index 001212fa17b9e7..42ee1f6f7312f2 100644 --- a/tests/components/gogogate2/test_cover.py +++ b/tests/components/gogogate2/test_cover.py @@ -20,6 +20,7 @@ DOMAIN as COVER_DOMAIN, CoverDeviceClass, CoverEntityFeature, + CoverState, ) from homeassistant.components.gogogate2.const import ( DEVICE_TYPE_GOGOGATE2, @@ -34,10 +35,6 @@ CONF_IP_ADDRESS, CONF_PASSWORD, CONF_USERNAME, - STATE_CLOSED, - STATE_CLOSING, - STATE_OPEN, - STATE_OPENING, STATE_UNAVAILABLE, STATE_UNKNOWN, ) @@ -144,7 +141,7 @@ def info_response(door_status: DoorStatus) -> GogoGate2InfoResponse: assert hass.states.get("cover.door1") is None assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - assert hass.states.get("cover.door1").state == STATE_OPEN + assert hass.states.get("cover.door1").state == CoverState.OPEN assert dict(hass.states.get("cover.door1").attributes) == expected_attributes api.async_info.return_value = info_response(DoorStatus.CLOSED) @@ -163,12 +160,12 @@ def info_response(door_status: DoorStatus) -> GogoGate2InfoResponse: } async_fire_time_changed(hass, utcnow() + timedelta(hours=2)) await hass.async_block_till_done() - assert hass.states.get("cover.door1").state == STATE_CLOSING + assert hass.states.get("cover.door1").state == CoverState.CLOSING api.async_close_door.assert_called_with(1) async_fire_time_changed(hass, utcnow() + timedelta(seconds=10)) await hass.async_block_till_done() - assert hass.states.get("cover.door1").state == STATE_CLOSING + assert hass.states.get("cover.door1").state == CoverState.CLOSING api.async_info.return_value = info_response(DoorStatus.CLOSED) api.async_get_door_statuses_from_info.return_value = { @@ -177,7 +174,7 @@ def info_response(door_status: DoorStatus) -> GogoGate2InfoResponse: } async_fire_time_changed(hass, utcnow() + timedelta(hours=2)) await hass.async_block_till_done() - assert hass.states.get("cover.door1").state == STATE_CLOSED + assert hass.states.get("cover.door1").state == CoverState.CLOSED api.async_info.return_value = info_response(DoorStatus.OPENED) api.async_get_door_statuses_from_info.return_value = { @@ -195,12 +192,12 @@ def info_response(door_status: DoorStatus) -> GogoGate2InfoResponse: } async_fire_time_changed(hass, utcnow() + timedelta(hours=2)) await hass.async_block_till_done() - assert hass.states.get("cover.door1").state == STATE_OPENING + assert hass.states.get("cover.door1").state == CoverState.OPENING api.async_open_door.assert_called_with(1) async_fire_time_changed(hass, utcnow() + timedelta(seconds=10)) await hass.async_block_till_done() - assert hass.states.get("cover.door1").state == STATE_OPENING + assert hass.states.get("cover.door1").state == CoverState.OPENING api.async_info.return_value = info_response(DoorStatus.OPENED) api.async_get_door_statuses_from_info.return_value = { @@ -209,7 +206,7 @@ def info_response(door_status: DoorStatus) -> GogoGate2InfoResponse: } async_fire_time_changed(hass, utcnow() + timedelta(hours=2)) await hass.async_block_till_done() - assert hass.states.get("cover.door1").state == STATE_OPEN + assert hass.states.get("cover.door1").state == CoverState.OPEN api.async_info.return_value = info_response(DoorStatus.UNDEFINED) api.async_get_door_statuses_from_info.return_value = { @@ -241,7 +238,7 @@ def info_response(door_status: DoorStatus) -> GogoGate2InfoResponse: } async_fire_time_changed(hass, utcnow() + timedelta(hours=2)) await hass.async_block_till_done() - assert hass.states.get("cover.door1").state == STATE_OPENING + assert hass.states.get("cover.door1").state == CoverState.OPENING api.async_open_door.assert_called_with(1) assert await hass.config_entries.async_unload(config_entry.entry_id) @@ -303,7 +300,7 @@ async def test_availability(ismartgateapi_mock, hass: HomeAssistant) -> None: } async_fire_time_changed(hass, utcnow() + timedelta(hours=2)) await hass.async_block_till_done() - assert hass.states.get("cover.door1").state == STATE_CLOSED + assert hass.states.get("cover.door1").state == CoverState.CLOSED assert dict(hass.states.get("cover.door1").attributes) == expected_attributes diff --git a/tests/components/gogogate2/test_init.py b/tests/components/gogogate2/test_init.py index f7e58296a43265..90765c425b4129 100644 --- a/tests/components/gogogate2/test_init.py +++ b/tests/components/gogogate2/test_init.py @@ -3,11 +3,10 @@ from unittest.mock import MagicMock, patch from ismartgate import GogoGate2Api -import pytest -from homeassistant.components.gogogate2 import DEVICE_TYPE_GOGOGATE2, async_setup_entry +from homeassistant.components.gogogate2 import DEVICE_TYPE_GOGOGATE2 from homeassistant.components.gogogate2.const import DEVICE_TYPE_ISMARTGATE, DOMAIN -from homeassistant.config_entries import SOURCE_USER +from homeassistant.config_entries import SOURCE_USER, ConfigEntryState from homeassistant.const import ( CONF_DEVICE, CONF_IP_ADDRESS, @@ -15,7 +14,6 @@ CONF_USERNAME, ) from homeassistant.core import HomeAssistant -from homeassistant.exceptions import ConfigEntryNotReady from tests.common import MockConfigEntry @@ -97,6 +95,8 @@ async def test_api_failure_on_startup(hass: HomeAssistant) -> None: "homeassistant.components.gogogate2.common.ISmartGateApi.async_info", side_effect=TimeoutError, ), - pytest.raises(ConfigEntryNotReady), ): - await async_setup_entry(hass, config_entry) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.SETUP_RETRY diff --git a/tests/components/google/test_calendar.py b/tests/components/google/test_calendar.py index 03b171c5e19d6a..6ce95a2bc170d4 100644 --- a/tests/components/google/test_calendar.py +++ b/tests/components/google/test_calendar.py @@ -572,6 +572,62 @@ async def test_opaque_event( assert state.state == (STATE_ON if expect_visible_event else STATE_OFF) +async def test_declined_event( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + mock_calendars_yaml, + mock_events_list_items, + component_setup, +) -> None: + """Test querying the API and fetching events from the server.""" + event = { + **TEST_EVENT, + **upcoming(), + "attendees": [ + { + "self": "True", + "responseStatus": "declined", + } + ], + } + mock_events_list_items([event]) + assert await component_setup() + + client = await hass_client() + response = await client.get(upcoming_event_url(TEST_YAML_ENTITY)) + assert response.status == HTTPStatus.OK + events = await response.json() + assert len(events) == 0 + + +async def test_attending_event( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + mock_calendars_yaml, + mock_events_list_items, + component_setup, +) -> None: + """Test querying the API and fetching events from the server.""" + event = { + **TEST_EVENT, + **upcoming(), + "attendees": [ + { + "self": "True", + "responseStatus": "accepted", + } + ], + } + mock_events_list_items([event]) + assert await component_setup() + + client = await hass_client() + response = await client.get(upcoming_event_url(TEST_YAML_ENTITY)) + assert response.status == HTTPStatus.OK + events = await response.json() + assert len(events) == 1 + + @pytest.mark.parametrize("mock_test_setup", [None]) async def test_scan_calendar_error( hass: HomeAssistant, diff --git a/tests/components/google/test_config_flow.py b/tests/components/google/test_config_flow.py index b7962921ffd5e0..de882a6f791044 100644 --- a/tests/components/google/test_config_flow.py +++ b/tests/components/google/test_config_flow.py @@ -26,9 +26,11 @@ async_import_client_credential, ) from homeassistant.components.google.const import ( + CONF_CALENDAR_ACCESS, CONF_CREDENTIAL_TYPE, DOMAIN, CredentialType, + FeatureAccess, ) from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant @@ -474,10 +476,27 @@ async def test_wrong_configuration( assert result.get("reason") == "oauth_error" +@pytest.mark.parametrize( + ("options"), + [ + ({}), + ( + { + CONF_CALENDAR_ACCESS: FeatureAccess.read_write.name, + } + ), + ( + { + CONF_CALENDAR_ACCESS: FeatureAccess.read_only.name, + } + ), + ], +) async def test_reauth_flow( hass: HomeAssistant, mock_code_flow: Mock, mock_exchange: Mock, + options: dict[str, Any] | None, ) -> None: """Test reauth of an existing config entry.""" config_entry = MockConfigEntry( @@ -486,6 +505,7 @@ async def test_reauth_flow( "auth_implementation": DOMAIN, "token": {"access_token": "OLD_ACCESS_TOKEN"}, }, + options=options, ) config_entry.add_to_hass(hass) await async_import_client_credential( @@ -540,6 +560,8 @@ async def test_reauth_flow( }, "credential_type": "device_auth", } + # Options are preserved during reauth + assert entries[0].options == options assert len(mock_setup.mock_calls) == 1 diff --git a/tests/components/google_assistant/test_helpers.py b/tests/components/google_assistant/test_helpers.py index 8b46545d9c5992..0e6876cc901992 100644 --- a/tests/components/google_assistant/test_helpers.py +++ b/tests/components/google_assistant/test_helpers.py @@ -15,8 +15,8 @@ STORE_GOOGLE_LOCAL_WEBHOOK_ID, ) from homeassistant.components.matter import MatterDeviceInfo -from homeassistant.config import async_process_ha_core_config from homeassistant.core import HomeAssistant, State +from homeassistant.core_config import async_process_ha_core_config from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.setup import async_setup_component from homeassistant.util import dt as dt_util diff --git a/tests/components/google_assistant/test_smart_home.py b/tests/components/google_assistant/test_smart_home.py index 214fc4a38dead2..f1b7108c3482a8 100644 --- a/tests/components/google_assistant/test_smart_home.py +++ b/tests/components/google_assistant/test_smart_home.py @@ -32,7 +32,6 @@ smart_home as sh, trait, ) -from homeassistant.config import async_process_ha_core_config from homeassistant.const import ( ATTR_UNIT_OF_MEASUREMENT, EVENT_CALL_SERVICE, @@ -41,6 +40,7 @@ __version__, ) from homeassistant.core import HomeAssistant, State +from homeassistant.core_config import async_process_ha_core_config from homeassistant.helpers import ( area_registry as ar, device_registry as dr, @@ -209,7 +209,7 @@ async def test_sync_message(hass: HomeAssistant, registries) -> None: }, "traits": [ trait.TRAIT_BRIGHTNESS, - trait.TRAIT_ONOFF, + trait.TRAIT_ON_OFF, trait.TRAIT_COLOR_SETTING, trait.TRAIT_MODES, ], @@ -329,7 +329,7 @@ async def test_sync_in_area(area_on_device, hass: HomeAssistant, registries) -> "name": {"name": "Demo Light"}, "traits": [ trait.TRAIT_BRIGHTNESS, - trait.TRAIT_ONOFF, + trait.TRAIT_ON_OFF, trait.TRAIT_COLOR_SETTING, trait.TRAIT_MODES, ], @@ -926,7 +926,7 @@ async def test_unavailable_state_does_sync(hass: HomeAssistant) -> None: "name": {"name": "Demo Light"}, "traits": [ trait.TRAIT_BRIGHTNESS, - trait.TRAIT_ONOFF, + trait.TRAIT_ON_OFF, trait.TRAIT_COLOR_SETTING, trait.TRAIT_MODES, ], diff --git a/tests/components/google_assistant/test_trait.py b/tests/components/google_assistant/test_trait.py index 77a9027e76d886..1e42edf8e7b2ed 100644 --- a/tests/components/google_assistant/test_trait.py +++ b/tests/components/google_assistant/test_trait.py @@ -33,7 +33,10 @@ valve, water_heater, ) -from homeassistant.components.alarm_control_panel import AlarmControlPanelEntityFeature +from homeassistant.components.alarm_control_panel import ( + AlarmControlPanelEntityFeature, + AlarmControlPanelState, +) from homeassistant.components.camera import CameraEntityFeature from homeassistant.components.climate import ClimateEntityFeature from homeassistant.components.cover import CoverEntityFeature @@ -51,7 +54,6 @@ from homeassistant.components.vacuum import VacuumEntityFeature from homeassistant.components.valve import ValveEntityFeature from homeassistant.components.water_heater import WaterHeaterEntityFeature -from homeassistant.config import async_process_ha_core_config from homeassistant.const import ( ATTR_ASSUMED_STATE, ATTR_BATTERY_LEVEL, @@ -63,9 +65,6 @@ EVENT_CALL_SERVICE, SERVICE_TURN_OFF, SERVICE_TURN_ON, - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_DISARMED, - STATE_ALARM_PENDING, STATE_IDLE, STATE_OFF, STATE_ON, @@ -77,6 +76,7 @@ UnitOfTemperature, ) from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant, State +from homeassistant.core_config import async_process_ha_core_config from homeassistant.util import color, dt as dt_util from homeassistant.util.unit_conversion import TemperatureConverter @@ -187,12 +187,12 @@ async def test_onoff_group(hass: HomeAssistant) -> None: assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, HOMEASSISTANT_DOMAIN, SERVICE_TURN_ON) - await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) + await trt_on.execute(trait.COMMAND_ON_OFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "group.bla"} off_calls = async_mock_service(hass, HOMEASSISTANT_DOMAIN, SERVICE_TURN_OFF) - await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) + await trt_on.execute(trait.COMMAND_ON_OFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "group.bla"} @@ -215,12 +215,12 @@ async def test_onoff_input_boolean(hass: HomeAssistant) -> None: assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, input_boolean.DOMAIN, SERVICE_TURN_ON) - await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) + await trt_on.execute(trait.COMMAND_ON_OFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "input_boolean.bla"} off_calls = async_mock_service(hass, input_boolean.DOMAIN, SERVICE_TURN_OFF) - await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) + await trt_on.execute(trait.COMMAND_ON_OFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "input_boolean.bla"} @@ -282,12 +282,12 @@ async def test_onoff_switch(hass: HomeAssistant) -> None: assert trt_assumed.sync_attributes() == {"commandOnlyOnOff": True} on_calls = async_mock_service(hass, switch.DOMAIN, SERVICE_TURN_ON) - await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) + await trt_on.execute(trait.COMMAND_ON_OFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "switch.bla"} off_calls = async_mock_service(hass, switch.DOMAIN, SERVICE_TURN_OFF) - await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) + await trt_on.execute(trait.COMMAND_ON_OFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "switch.bla"} @@ -307,12 +307,12 @@ async def test_onoff_fan(hass: HomeAssistant) -> None: assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, fan.DOMAIN, SERVICE_TURN_ON) - await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) + await trt_on.execute(trait.COMMAND_ON_OFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "fan.bla"} off_calls = async_mock_service(hass, fan.DOMAIN, SERVICE_TURN_OFF) - await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) + await trt_on.execute(trait.COMMAND_ON_OFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "fan.bla"} @@ -333,12 +333,12 @@ async def test_onoff_light(hass: HomeAssistant) -> None: assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) - await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) + await trt_on.execute(trait.COMMAND_ON_OFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "light.bla"} off_calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_OFF) - await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) + await trt_on.execute(trait.COMMAND_ON_OFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "light.bla"} @@ -359,13 +359,13 @@ async def test_onoff_media_player(hass: HomeAssistant) -> None: assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, media_player.DOMAIN, SERVICE_TURN_ON) - await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) + await trt_on.execute(trait.COMMAND_ON_OFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} off_calls = async_mock_service(hass, media_player.DOMAIN, SERVICE_TURN_OFF) - await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) + await trt_on.execute(trait.COMMAND_ON_OFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "media_player.bla"} @@ -386,13 +386,13 @@ async def test_onoff_humidifier(hass: HomeAssistant) -> None: assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, humidifier.DOMAIN, SERVICE_TURN_ON) - await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) + await trt_on.execute(trait.COMMAND_ON_OFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "humidifier.bla"} off_calls = async_mock_service(hass, humidifier.DOMAIN, SERVICE_TURN_OFF) - await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) + await trt_on.execute(trait.COMMAND_ON_OFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "humidifier.bla"} @@ -415,13 +415,13 @@ async def test_onoff_water_heater(hass: HomeAssistant) -> None: assert trt_off.query_attributes() == {"on": False} on_calls = async_mock_service(hass, water_heater.DOMAIN, SERVICE_TURN_ON) - await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) + await trt_on.execute(trait.COMMAND_ON_OFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "water_heater.bla"} off_calls = async_mock_service(hass, water_heater.DOMAIN, SERVICE_TURN_OFF) - await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) + await trt_on.execute(trait.COMMAND_ON_OFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "water_heater.bla"} @@ -562,22 +562,22 @@ async def test_startstop_vacuum(hass: HomeAssistant) -> None: assert trt.query_attributes() == {"isRunning": False, "isPaused": True} start_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_START) - await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": True}, {}) + await trt.execute(trait.COMMAND_START_STOP, BASIC_DATA, {"start": True}, {}) assert len(start_calls) == 1 assert start_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} stop_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_STOP) - await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) + await trt.execute(trait.COMMAND_START_STOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} pause_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_PAUSE) - await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"pause": True}, {}) + await trt.execute(trait.COMMAND_PAUSE_UNPAUSE, BASIC_DATA, {"pause": True}, {}) assert len(pause_calls) == 1 assert pause_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} unpause_calls = async_mock_service(hass, vacuum.DOMAIN, vacuum.SERVICE_START) - await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"pause": False}, {}) + await trt.execute(trait.COMMAND_PAUSE_UNPAUSE, BASIC_DATA, {"pause": False}, {}) assert len(unpause_calls) == 1 assert unpause_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} @@ -665,7 +665,7 @@ async def test_startstop_cover_valve( open_calls = async_mock_service(hass, domain, service_open) close_calls = async_mock_service(hass, domain, service_close) toggle_calls = async_mock_service(hass, domain, service_toggle) - await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) + await trt.execute(trait.COMMAND_START_STOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert stop_calls[0].data == {ATTR_ENTITY_ID: f"{domain}.bla"} @@ -681,18 +681,18 @@ async def test_startstop_cover_valve( with pytest.raises( SmartHomeError, match=f"{domain.capitalize()} is already stopped" ): - await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) + await trt.execute(trait.COMMAND_START_STOP, BASIC_DATA, {"start": False}, {}) # Start triggers toggle open state.state = state_closed - await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": True}, {}) + await trt.execute(trait.COMMAND_START_STOP, BASIC_DATA, {"start": True}, {}) assert len(open_calls) == 0 assert len(close_calls) == 0 assert len(toggle_calls) == 1 assert toggle_calls[0].data == {ATTR_ENTITY_ID: f"{domain}.bla"} # Second start triggers toggle close state.state = state_open - await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": True}, {}) + await trt.execute(trait.COMMAND_START_STOP, BASIC_DATA, {"start": True}, {}) assert len(open_calls) == 0 assert len(close_calls) == 0 assert len(toggle_calls) == 2 @@ -703,7 +703,7 @@ async def test_startstop_cover_valve( SmartHomeError, match="Command action.devices.commands.PauseUnpause is not supported", ): - await trt.execute(trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {"start": True}, {}) + await trt.execute(trait.COMMAND_PAUSE_UNPAUSE, BASIC_DATA, {"start": True}, {}) @pytest.mark.parametrize( @@ -779,13 +779,13 @@ async def test_startstop_cover_valve_assumed( stop_calls = async_mock_service(hass, domain, service_stop) toggle_calls = async_mock_service(hass, domain, service_toggle) - await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": False}, {}) + await trt.execute(trait.COMMAND_START_STOP, BASIC_DATA, {"start": False}, {}) assert len(stop_calls) == 1 assert len(toggle_calls) == 0 assert stop_calls[0].data == {ATTR_ENTITY_ID: f"{domain}.bla"} stop_calls.clear() - await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {"start": True}, {}) + await trt.execute(trait.COMMAND_START_STOP, BASIC_DATA, {"start": True}, {}) assert len(stop_calls) == 0 assert len(toggle_calls) == 1 assert toggle_calls[0].data == {ATTR_ENTITY_ID: f"{domain}.bla"} @@ -984,13 +984,13 @@ async def test_light_modes(hass: HomeAssistant) -> None: } assert trt.can_execute( - trait.COMMAND_MODES, + trait.COMMAND_SET_MODES, params={"updateModeSettings": {"effect": "colorloop"}}, ) calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON) await trt.execute( - trait.COMMAND_MODES, + trait.COMMAND_SET_MODES, BASIC_DATA, {"updateModeSettings": {"effect": "colorloop"}}, {}, @@ -1422,7 +1422,7 @@ async def test_temperature_control(hass: HomeAssistant) -> None: "temperatureAmbientCelsius": 18, } with pytest.raises(helpers.SmartHomeError) as err: - await trt.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) + await trt.execute(trait.COMMAND_ON_OFF, BASIC_DATA, {"on": False}, {}) assert err.value.code == const.ERR_NOT_SUPPORTED @@ -1609,11 +1609,11 @@ async def test_lock_unlock_lock(hass: HomeAssistant) -> None: assert trt.query_attributes() == {"isLocked": True} - assert trt.can_execute(trait.COMMAND_LOCKUNLOCK, {"lock": True}) + assert trt.can_execute(trait.COMMAND_LOCK_UNLOCK, {"lock": True}) calls = async_mock_service(hass, lock.DOMAIN, lock.SERVICE_LOCK) - await trt.execute(trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": True}, {}) + await trt.execute(trait.COMMAND_LOCK_UNLOCK, PIN_DATA, {"lock": True}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "lock.front_door"} @@ -1652,11 +1652,11 @@ async def test_lock_unlock_lock_jammed(hass: HomeAssistant) -> None: assert trt.query_attributes() == {"isJammed": True} - assert trt.can_execute(trait.COMMAND_LOCKUNLOCK, {"lock": True}) + assert trt.can_execute(trait.COMMAND_LOCK_UNLOCK, {"lock": True}) calls = async_mock_service(hass, lock.DOMAIN, lock.SERVICE_LOCK) - await trt.execute(trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": True}, {}) + await trt.execute(trait.COMMAND_LOCK_UNLOCK, PIN_DATA, {"lock": True}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "lock.front_door"} @@ -1677,13 +1677,13 @@ async def test_lock_unlock_unlock(hass: HomeAssistant) -> None: assert trt.query_attributes() == {"isLocked": True} - assert trt.can_execute(trait.COMMAND_LOCKUNLOCK, {"lock": False}) + assert trt.can_execute(trait.COMMAND_LOCK_UNLOCK, {"lock": False}) calls = async_mock_service(hass, lock.DOMAIN, lock.SERVICE_UNLOCK) # No challenge data with pytest.raises(error.ChallengeNeeded) as err: - await trt.execute(trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {}) + await trt.execute(trait.COMMAND_LOCK_UNLOCK, PIN_DATA, {"lock": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED @@ -1691,14 +1691,14 @@ async def test_lock_unlock_unlock(hass: HomeAssistant) -> None: # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( - trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {"pin": 9999} + trait.COMMAND_LOCK_UNLOCK, PIN_DATA, {"lock": False}, {"pin": 9999} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED await trt.execute( - trait.COMMAND_LOCKUNLOCK, PIN_DATA, {"lock": False}, {"pin": "1234"} + trait.COMMAND_LOCK_UNLOCK, PIN_DATA, {"lock": False}, {"pin": "1234"} ) assert len(calls) == 1 @@ -1710,7 +1710,7 @@ async def test_lock_unlock_unlock(hass: HomeAssistant) -> None: ) with pytest.raises(error.SmartHomeError) as err: - await trt.execute(trait.COMMAND_LOCKUNLOCK, BASIC_DATA, {"lock": False}, {}) + await trt.execute(trait.COMMAND_LOCK_UNLOCK, BASIC_DATA, {"lock": False}, {}) assert len(calls) == 1 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP @@ -1720,7 +1720,7 @@ async def test_lock_unlock_unlock(hass: HomeAssistant) -> None: "should_2fa", return_value=False, ): - await trt.execute(trait.COMMAND_LOCKUNLOCK, BASIC_DATA, {"lock": False}, {}) + await trt.execute(trait.COMMAND_LOCK_UNLOCK, BASIC_DATA, {"lock": False}, {}) assert len(calls) == 2 @@ -1734,7 +1734,7 @@ async def test_arm_disarm_arm_away(hass: HomeAssistant) -> None: hass, State( "alarm_control_panel.alarm", - STATE_ALARM_ARMED_AWAY, + AlarmControlPanelState.ARMED_AWAY, { alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True, ATTR_SUPPORTED_FEATURES: AlarmControlPanelEntityFeature.ARM_HOME @@ -1765,11 +1765,12 @@ async def test_arm_disarm_arm_away(hass: HomeAssistant) -> None: assert trt.query_attributes() == { "isArmed": True, - "currentArmLevel": STATE_ALARM_ARMED_AWAY, + "currentArmLevel": AlarmControlPanelState.ARMED_AWAY, } assert trt.can_execute( - trait.COMMAND_ARMDISARM, {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY} + trait.COMMAND_ARM_DISARM, + {"arm": True, "armLevel": AlarmControlPanelState.ARMED_AWAY}, ) calls = async_mock_service( @@ -1782,16 +1783,16 @@ async def test_arm_disarm_arm_away(hass: HomeAssistant) -> None: hass, State( "alarm_control_panel.alarm", - STATE_ALARM_DISARMED, + AlarmControlPanelState.DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), BASIC_CONFIG, ) with pytest.raises(error.SmartHomeError) as err: await trt.execute( - trait.COMMAND_ARMDISARM, + trait.COMMAND_ARM_DISARM, BASIC_DATA, - {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, + {"arm": True, "armLevel": AlarmControlPanelState.ARMED_AWAY}, {}, ) assert len(calls) == 0 @@ -1801,7 +1802,7 @@ async def test_arm_disarm_arm_away(hass: HomeAssistant) -> None: hass, State( "alarm_control_panel.alarm", - STATE_ALARM_DISARMED, + AlarmControlPanelState.DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, @@ -1809,9 +1810,9 @@ async def test_arm_disarm_arm_away(hass: HomeAssistant) -> None: # No challenge data with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( - trait.COMMAND_ARMDISARM, + trait.COMMAND_ARM_DISARM, PIN_DATA, - {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, + {"arm": True, "armLevel": AlarmControlPanelState.ARMED_AWAY}, {}, ) assert len(calls) == 0 @@ -1821,9 +1822,9 @@ async def test_arm_disarm_arm_away(hass: HomeAssistant) -> None: # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( - trait.COMMAND_ARMDISARM, + trait.COMMAND_ARM_DISARM, PIN_DATA, - {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, + {"arm": True, "armLevel": AlarmControlPanelState.ARMED_AWAY}, {"pin": 9999}, ) assert len(calls) == 0 @@ -1832,9 +1833,9 @@ async def test_arm_disarm_arm_away(hass: HomeAssistant) -> None: # correct pin await trt.execute( - trait.COMMAND_ARMDISARM, + trait.COMMAND_ARM_DISARM, PIN_DATA, - {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, + {"arm": True, "armLevel": AlarmControlPanelState.ARMED_AWAY}, {"pin": "1234"}, ) @@ -1845,16 +1846,16 @@ async def test_arm_disarm_arm_away(hass: HomeAssistant) -> None: hass, State( "alarm_control_panel.alarm", - STATE_ALARM_ARMED_AWAY, + AlarmControlPanelState.ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) with pytest.raises(error.SmartHomeError) as err: await trt.execute( - trait.COMMAND_ARMDISARM, + trait.COMMAND_ARM_DISARM, PIN_DATA, - {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, + {"arm": True, "armLevel": AlarmControlPanelState.ARMED_AWAY}, {}, ) assert len(calls) == 1 @@ -1865,22 +1866,22 @@ async def test_arm_disarm_arm_away(hass: HomeAssistant) -> None: hass, State( "alarm_control_panel.alarm", - STATE_ALARM_DISARMED, + AlarmControlPanelState.DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( - trait.COMMAND_ARMDISARM, + trait.COMMAND_ARM_DISARM, PIN_DATA, - {"arm": True, "armLevel": STATE_ALARM_ARMED_AWAY}, + {"arm": True, "armLevel": AlarmControlPanelState.ARMED_AWAY}, {}, ) assert len(calls) == 2 with pytest.raises(error.SmartHomeError) as err: await trt.execute( - trait.COMMAND_ARMDISARM, + trait.COMMAND_ARM_DISARM, PIN_DATA, {"arm": True}, {}, @@ -1897,7 +1898,7 @@ async def test_arm_disarm_disarm(hass: HomeAssistant) -> None: hass, State( "alarm_control_panel.alarm", - STATE_ALARM_DISARMED, + AlarmControlPanelState.DISARMED, { alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True, ATTR_SUPPORTED_FEATURES: AlarmControlPanelEntityFeature.TRIGGER @@ -1942,7 +1943,7 @@ async def test_arm_disarm_disarm(hass: HomeAssistant) -> None: "isArmed": False, } - assert trt.can_execute(trait.COMMAND_ARMDISARM, {"arm": False}) + assert trt.can_execute(trait.COMMAND_ARM_DISARM, {"arm": False}) calls = async_mock_service( hass, alarm_control_panel.DOMAIN, alarm_control_panel.SERVICE_ALARM_DISARM @@ -1953,13 +1954,13 @@ async def test_arm_disarm_disarm(hass: HomeAssistant) -> None: hass, State( "alarm_control_panel.alarm", - STATE_ALARM_ARMED_AWAY, + AlarmControlPanelState.ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), BASIC_CONFIG, ) with pytest.raises(error.SmartHomeError) as err: - await trt.execute(trait.COMMAND_ARMDISARM, BASIC_DATA, {"arm": False}, {}) + await trt.execute(trait.COMMAND_ARM_DISARM, BASIC_DATA, {"arm": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NOT_SETUP @@ -1968,7 +1969,7 @@ async def test_arm_disarm_disarm(hass: HomeAssistant) -> None: hass, State( "alarm_control_panel.alarm", - STATE_ALARM_ARMED_AWAY, + AlarmControlPanelState.ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, @@ -1976,7 +1977,7 @@ async def test_arm_disarm_disarm(hass: HomeAssistant) -> None: # No challenge data with pytest.raises(error.ChallengeNeeded) as err: - await trt.execute(trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {}) + await trt.execute(trait.COMMAND_ARM_DISARM, PIN_DATA, {"arm": False}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED @@ -1984,7 +1985,7 @@ async def test_arm_disarm_disarm(hass: HomeAssistant) -> None: # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( - trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {"pin": 9999} + trait.COMMAND_ARM_DISARM, PIN_DATA, {"arm": False}, {"pin": 9999} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED @@ -1992,7 +1993,7 @@ async def test_arm_disarm_disarm(hass: HomeAssistant) -> None: # correct pin await trt.execute( - trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {"pin": "1234"} + trait.COMMAND_ARM_DISARM, PIN_DATA, {"arm": False}, {"pin": "1234"} ) assert len(calls) == 1 @@ -2002,13 +2003,13 @@ async def test_arm_disarm_disarm(hass: HomeAssistant) -> None: hass, State( "alarm_control_panel.alarm", - STATE_ALARM_DISARMED, + AlarmControlPanelState.DISARMED, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, ), PIN_CONFIG, ) with pytest.raises(error.SmartHomeError) as err: - await trt.execute(trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": False}, {}) + await trt.execute(trait.COMMAND_ARM_DISARM, PIN_DATA, {"arm": False}, {}) assert len(calls) == 1 assert err.value.code == const.ERR_ALREADY_DISARMED @@ -2016,7 +2017,7 @@ async def test_arm_disarm_disarm(hass: HomeAssistant) -> None: hass, State( "alarm_control_panel.alarm", - STATE_ALARM_ARMED_AWAY, + AlarmControlPanelState.ARMED_AWAY, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, @@ -2025,7 +2026,7 @@ async def test_arm_disarm_disarm(hass: HomeAssistant) -> None: # Cancel arming after already armed will require pin with pytest.raises(error.SmartHomeError) as err: await trt.execute( - trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "cancel": True}, {} + trait.COMMAND_ARM_DISARM, PIN_DATA, {"arm": True, "cancel": True}, {} ) assert len(calls) == 1 assert err.value.code == const.ERR_CHALLENGE_NEEDED @@ -2036,13 +2037,13 @@ async def test_arm_disarm_disarm(hass: HomeAssistant) -> None: hass, State( "alarm_control_panel.alarm", - STATE_ALARM_PENDING, + AlarmControlPanelState.PENDING, {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: False}, ), PIN_CONFIG, ) await trt.execute( - trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True, "cancel": True}, {} + trait.COMMAND_ARM_DISARM, PIN_DATA, {"arm": True, "cancel": True}, {} ) assert len(calls) == 2 @@ -2078,10 +2079,12 @@ async def test_fan_speed(hass: HomeAssistant) -> None: "currentFanSpeedSetting": ANY, } - assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeedPercent": 10}) + assert trt.can_execute(trait.COMMAND_SET_FAN_SPEED, params={"fanSpeedPercent": 10}) calls = async_mock_service(hass, fan.DOMAIN, fan.SERVICE_SET_PERCENTAGE) - await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeedPercent": 10}, {}) + await trt.execute( + trait.COMMAND_SET_FAN_SPEED, BASIC_DATA, {"fanSpeedPercent": 10}, {} + ) assert len(calls) == 1 assert calls[0].data == {"entity_id": "fan.living_room_fan", "percentage": 10} @@ -2216,10 +2219,10 @@ async def test_fan_speed_ordered( "currentFanSpeedSetting": speed, } - assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeed": speed}) + assert trt.can_execute(trait.COMMAND_SET_FAN_SPEED, params={"fanSpeed": speed}) calls = async_mock_service(hass, fan.DOMAIN, fan.SERVICE_SET_PERCENTAGE) - await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeed": speed}, {}) + await trt.execute(trait.COMMAND_SET_FAN_SPEED, BASIC_DATA, {"fanSpeed": speed}, {}) assert len(calls) == 1 assert calls[0].data == { @@ -2328,10 +2331,12 @@ async def test_climate_fan_speed(hass: HomeAssistant) -> None: "currentFanSpeedSetting": "low", } - assert trt.can_execute(trait.COMMAND_FANSPEED, params={"fanSpeed": "medium"}) + assert trt.can_execute(trait.COMMAND_SET_FAN_SPEED, params={"fanSpeed": "medium"}) calls = async_mock_service(hass, climate.DOMAIN, climate.SERVICE_SET_FAN_MODE) - await trt.execute(trait.COMMAND_FANSPEED, BASIC_DATA, {"fanSpeed": "medium"}, {}) + await trt.execute( + trait.COMMAND_SET_FAN_SPEED, BASIC_DATA, {"fanSpeed": "medium"}, {} + ) assert len(calls) == 1 assert calls[0].data == { @@ -2387,7 +2392,7 @@ async def test_inputselector(hass: HomeAssistant) -> None: } assert trt.can_execute( - trait.COMMAND_INPUT, + trait.COMMAND_SET_INPUT, params={"newInput": "media"}, ) @@ -2395,7 +2400,7 @@ async def test_inputselector(hass: HomeAssistant) -> None: hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOURCE ) await trt.execute( - trait.COMMAND_INPUT, + trait.COMMAND_SET_INPUT, BASIC_DATA, {"newInput": "media"}, {}, @@ -2563,7 +2568,7 @@ async def test_modes_input_select(hass: HomeAssistant) -> None: } assert trt.can_execute( - trait.COMMAND_MODES, + trait.COMMAND_SET_MODES, params={"updateModeSettings": {"option": "xyz"}}, ) @@ -2571,7 +2576,7 @@ async def test_modes_input_select(hass: HomeAssistant) -> None: hass, input_select.DOMAIN, input_select.SERVICE_SELECT_OPTION ) await trt.execute( - trait.COMMAND_MODES, + trait.COMMAND_SET_MODES, BASIC_DATA, {"updateModeSettings": {"option": "xyz"}}, {}, @@ -2639,13 +2644,13 @@ async def test_modes_select(hass: HomeAssistant) -> None: } assert trt.can_execute( - trait.COMMAND_MODES, + trait.COMMAND_SET_MODES, params={"updateModeSettings": {"option": "xyz"}}, ) calls = async_mock_service(hass, select.DOMAIN, select.SERVICE_SELECT_OPTION) await trt.execute( - trait.COMMAND_MODES, + trait.COMMAND_SET_MODES, BASIC_DATA, {"updateModeSettings": {"option": "xyz"}}, {}, @@ -2716,12 +2721,12 @@ async def test_modes_humidifier(hass: HomeAssistant) -> None: } assert trt.can_execute( - trait.COMMAND_MODES, params={"updateModeSettings": {"mode": "away"}} + trait.COMMAND_SET_MODES, params={"updateModeSettings": {"mode": "away"}} ) calls = async_mock_service(hass, humidifier.DOMAIN, humidifier.SERVICE_SET_MODE) await trt.execute( - trait.COMMAND_MODES, + trait.COMMAND_SET_MODES, BASIC_DATA, {"updateModeSettings": {"mode": "away"}}, {}, @@ -2792,14 +2797,15 @@ async def test_modes_water_heater(hass: HomeAssistant) -> None: } assert trt.can_execute( - trait.COMMAND_MODES, params={"updateModeSettings": {"operation mode": "gas"}} + trait.COMMAND_SET_MODES, + params={"updateModeSettings": {"operation mode": "gas"}}, ) calls = async_mock_service( hass, water_heater.DOMAIN, water_heater.SERVICE_SET_OPERATION_MODE ) await trt.execute( - trait.COMMAND_MODES, + trait.COMMAND_SET_MODES, BASIC_DATA, {"updateModeSettings": {"operation mode": "gas"}}, {}, @@ -2868,7 +2874,7 @@ async def test_sound_modes(hass: HomeAssistant) -> None: } assert trt.can_execute( - trait.COMMAND_MODES, + trait.COMMAND_SET_MODES, params={"updateModeSettings": {"sound mode": "stereo"}}, ) @@ -2876,7 +2882,7 @@ async def test_sound_modes(hass: HomeAssistant) -> None: hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOUND_MODE ) await trt.execute( - trait.COMMAND_MODES, + trait.COMMAND_SET_MODES, BASIC_DATA, {"updateModeSettings": {"sound mode": "stereo"}}, {}, @@ -2941,13 +2947,13 @@ async def test_preset_modes(hass: HomeAssistant) -> None: } assert trt.can_execute( - trait.COMMAND_MODES, + trait.COMMAND_SET_MODES, params={"updateModeSettings": {"preset mode": "auto"}}, ) calls = async_mock_service(hass, fan.DOMAIN, fan.SERVICE_SET_PRESET_MODE) await trt.execute( - trait.COMMAND_MODES, + trait.COMMAND_SET_MODES, BASIC_DATA, {"updateModeSettings": {"preset mode": "auto"}}, {}, @@ -2975,7 +2981,7 @@ async def test_traits_unknown_domains( assert trt.supported("not_supported_domain", False, None, None) is False await trt.execute( - trait.COMMAND_MODES, + trait.COMMAND_SET_MODES, BASIC_DATA, {"updateModeSettings": {}}, {}, @@ -3049,9 +3055,9 @@ async def test_openclose_cover_valve( calls_open = async_mock_service(hass, domain, open_service) calls_close = async_mock_service(hass, domain, close_service) - await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 50}, {}) + await trt.execute(trait.COMMAND_OPEN_CLOSE, BASIC_DATA, {"openPercent": 50}, {}) await trt.execute( - trait.COMMAND_OPENCLOSE_RELATIVE, BASIC_DATA, {"openRelativePercent": 50}, {} + trait.COMMAND_OPEN_CLOSE_RELATIVE, BASIC_DATA, {"openRelativePercent": 50}, {} ) assert len(calls_set) == 1 assert calls_set[0].data == { @@ -3066,9 +3072,9 @@ async def test_openclose_cover_valve( assert len(calls_close) == 0 - await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 0}, {}) + await trt.execute(trait.COMMAND_OPEN_CLOSE, BASIC_DATA, {"openPercent": 0}, {}) await trt.execute( - trait.COMMAND_OPENCLOSE_RELATIVE, BASIC_DATA, {"openRelativePercent": 0}, {} + trait.COMMAND_OPEN_CLOSE_RELATIVE, BASIC_DATA, {"openRelativePercent": 0}, {} ) assert len(calls_set) == 1 assert len(calls_close) == 1 @@ -3123,7 +3129,7 @@ async def test_openclose_cover_valve_unknown_state( trt.query_attributes() calls = async_mock_service(hass, domain, open_service) - await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 100}, {}) + await trt.execute(trait.COMMAND_OPEN_CLOSE, BASIC_DATA, {"openPercent": 100}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: f"{domain}.bla"} @@ -3177,7 +3183,7 @@ async def test_openclose_cover_valve_assumed_state( assert trt.query_attributes() == {} calls = async_mock_service(hass, domain, set_position_service) - await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 40}, {}) + await trt.execute(trait.COMMAND_OPEN_CLOSE, BASIC_DATA, {"openPercent": 40}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: f"{domain}.bla", cover.ATTR_POSITION: 40} @@ -3291,12 +3297,12 @@ async def test_openclose_cover_valve_no_position( assert trt.query_attributes() == {"openPercent": 0} calls = async_mock_service(hass, domain, close_service) - await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 0}, {}) + await trt.execute(trait.COMMAND_OPEN_CLOSE, BASIC_DATA, {"openPercent": 0}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: f"{domain}.bla"} calls = async_mock_service(hass, domain, open_service) - await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 100}, {}) + await trt.execute(trait.COMMAND_OPEN_CLOSE, BASIC_DATA, {"openPercent": 100}, {}) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: f"{domain}.bla"} @@ -3304,14 +3310,14 @@ async def test_openclose_cover_valve_no_position( SmartHomeError, match=r"Current position not know for relative command" ): await trt.execute( - trait.COMMAND_OPENCLOSE_RELATIVE, + trait.COMMAND_OPEN_CLOSE_RELATIVE, BASIC_DATA, {"openRelativePercent": 100}, {}, ) with pytest.raises(SmartHomeError, match=r"No support for partial open close"): - await trt.execute(trait.COMMAND_OPENCLOSE, BASIC_DATA, {"openPercent": 50}, {}) + await trt.execute(trait.COMMAND_OPEN_CLOSE, BASIC_DATA, {"openPercent": 50}, {}) @pytest.mark.parametrize( @@ -3354,7 +3360,7 @@ async def test_openclose_cover_secure(hass: HomeAssistant, device_class) -> None # No challenge data with pytest.raises(error.ChallengeNeeded) as err: - await trt.execute(trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {}) + await trt.execute(trait.COMMAND_OPEN_CLOSE, PIN_DATA, {"openPercent": 50}, {}) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_PIN_NEEDED @@ -3362,20 +3368,20 @@ async def test_openclose_cover_secure(hass: HomeAssistant, device_class) -> None # invalid pin with pytest.raises(error.ChallengeNeeded) as err: await trt.execute( - trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {"pin": "9999"} + trait.COMMAND_OPEN_CLOSE, PIN_DATA, {"openPercent": 50}, {"pin": "9999"} ) assert len(calls) == 0 assert err.value.code == const.ERR_CHALLENGE_NEEDED assert err.value.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED await trt.execute( - trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 50}, {"pin": "1234"} + trait.COMMAND_OPEN_CLOSE, PIN_DATA, {"openPercent": 50}, {"pin": "1234"} ) assert len(calls) == 1 assert calls[0].data == {ATTR_ENTITY_ID: "cover.bla", cover.ATTR_POSITION: 50} # no challenge on close - await trt.execute(trait.COMMAND_OPENCLOSE, PIN_DATA, {"openPercent": 0}, {}) + await trt.execute(trait.COMMAND_OPEN_CLOSE, PIN_DATA, {"openPercent": 0}, {}) assert len(calls_close) == 1 assert calls_close[0].data == {ATTR_ENTITY_ID: "cover.bla"} @@ -3699,7 +3705,7 @@ async def test_humidity_setting_sensor_data( assert trt.query_attributes() == {} with pytest.raises(helpers.SmartHomeError) as err: - await trt.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) + await trt.execute(trait.COMMAND_ON_OFF, BASIC_DATA, {"on": False}, {}) assert err.value.code == const.ERR_NOT_SUPPORTED @@ -4063,3 +4069,90 @@ async def test_sensorstate( ) is False ) + + +@pytest.mark.parametrize( + ("state", "identifier"), + [ + (STATE_ON, 0), + (STATE_OFF, 1), + (STATE_UNKNOWN, 2), + ], +) +@pytest.mark.parametrize( + ("device_class", "name", "states"), + [ + ( + binary_sensor.BinarySensorDeviceClass.CO, + "CarbonMonoxideLevel", + ["carbon monoxide detected", "no carbon monoxide detected", "unknown"], + ), + ( + binary_sensor.BinarySensorDeviceClass.SMOKE, + "SmokeLevel", + ["smoke detected", "no smoke detected", "unknown"], + ), + ( + binary_sensor.BinarySensorDeviceClass.MOISTURE, + "WaterLeak", + ["leak", "no leak", "unknown"], + ), + ], +) +async def test_binary_sensorstate( + hass: HomeAssistant, + state: str, + identifier: int, + device_class: binary_sensor.BinarySensorDeviceClass, + name: str, + states: list[str], +) -> None: + """Test SensorState trait support for binary sensor domain.""" + + assert helpers.get_google_type(binary_sensor.DOMAIN, None) is not None + assert trait.SensorStateTrait.supported( + binary_sensor.DOMAIN, None, device_class, None + ) + + trt = trait.SensorStateTrait( + hass, + State( + "binary_sensor.test", + state, + { + "device_class": device_class, + }, + ), + BASIC_CONFIG, + ) + + assert trt.sync_attributes() == { + "sensorStatesSupported": [ + { + "name": name, + "descriptiveCapabilities": { + "availableStates": states, + }, + } + ] + } + assert trt.query_attributes() == { + "currentSensorStateData": [ + { + "name": name, + "currentSensorState": states[identifier], + "rawValue": None, + }, + ] + } + + assert helpers.get_google_type(binary_sensor.DOMAIN, None) is not None + assert ( + trait.SensorStateTrait.supported( + binary_sensor.DOMAIN, + None, + binary_sensor.BinarySensorDeviceClass.TAMPER, + None, + ) + is False + ) diff --git a/tests/components/google_assistant_sdk/test_config_flow.py b/tests/components/google_assistant_sdk/test_config_flow.py index d66d12509e89d4..b6ee701b228ee2 100644 --- a/tests/components/google_assistant_sdk/test_config_flow.py +++ b/tests/components/google_assistant_sdk/test_config_flow.py @@ -157,6 +157,10 @@ async def test_reauth( assert config_entry.data["token"].get("refresh_token") == "mock-refresh-token" +@pytest.mark.parametrize( # Remove when translations fixed + "ignore_translations", + ["component.google_assistant_sdk.config.abort.single_instance_allowed"], +) @pytest.mark.usefixtures("current_request_with_host") async def test_single_instance_allowed( hass: HomeAssistant, diff --git a/tests/components/google_pubsub/test_init.py b/tests/components/google_pubsub/test_init.py index 97e499d5d6d68a..5f160054da7738 100644 --- a/tests/components/google_pubsub/test_init.py +++ b/tests/components/google_pubsub/test_init.py @@ -148,7 +148,7 @@ async def test_allowlist(hass: HomeAssistant, mock_client) -> None: ] for test in tests: - hass.states.async_set(test.id, "not blank") + hass.states.async_set(test.id, "on") await hass.async_block_till_done() was_called = publish_client.publish.call_count == 1 @@ -178,7 +178,7 @@ async def test_denylist(hass: HomeAssistant, mock_client) -> None: ] for test in tests: - hass.states.async_set(test.id, "not blank") + hass.states.async_set(test.id, "on") await hass.async_block_till_done() was_called = publish_client.publish.call_count == 1 diff --git a/tests/components/google_sheets/test_config_flow.py b/tests/components/google_sheets/test_config_flow.py index a504d8c42805c8..756ff080212d2c 100644 --- a/tests/components/google_sheets/test_config_flow.py +++ b/tests/components/google_sheets/test_config_flow.py @@ -235,6 +235,7 @@ async def test_reauth( "homeassistant.components.google_sheets.async_setup_entry", return_value=True ) as mock_setup: result = await hass.config_entries.flow.async_configure(result["flow_id"]) + await hass.async_block_till_done() assert len(hass.config_entries.async_entries(DOMAIN)) == 1 assert len(mock_setup.mock_calls) == 1 diff --git a/tests/components/google_translate/test_tts.py b/tests/components/google_translate/test_tts.py index 1f199a5db97807..5b691da4bdcc59 100644 --- a/tests/components/google_translate/test_tts.py +++ b/tests/components/google_translate/test_tts.py @@ -14,9 +14,9 @@ from homeassistant.components import tts from homeassistant.components.google_translate.const import CONF_TLD, DOMAIN from homeassistant.components.media_player import ATTR_MEDIA_CONTENT_ID -from homeassistant.config import async_process_ha_core_config from homeassistant.const import ATTR_ENTITY_ID, CONF_PLATFORM from homeassistant.core import HomeAssistant, ServiceCall +from homeassistant.core_config import async_process_ha_core_config from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry diff --git a/tests/components/google_travel_time/test_config_flow.py b/tests/components/google_travel_time/test_config_flow.py index 7600c669464137..5f9d5d4549bd53 100644 --- a/tests/components/google_travel_time/test_config_flow.py +++ b/tests/components/google_travel_time/test_config_flow.py @@ -200,7 +200,7 @@ async def test_reconfigure(hass: HomeAssistant, mock_config: MockConfigEntry) -> """Test reconfigure flow.""" reconfigure_result = await mock_config.start_reconfigure_flow(hass) assert reconfigure_result["type"] is FlowResultType.FORM - assert reconfigure_result["step_id"] == "reconfigure_confirm" + assert reconfigure_result["step_id"] == "reconfigure" await assert_common_reconfigure_steps(hass, reconfigure_result) diff --git a/tests/components/gpslogger/test_init.py b/tests/components/gpslogger/test_init.py index fab6aaa4e84047..aff8b20dc529a7 100644 --- a/tests/components/gpslogger/test_init.py +++ b/tests/components/gpslogger/test_init.py @@ -11,9 +11,9 @@ from homeassistant.components.device_tracker import DOMAIN as DEVICE_TRACKER_DOMAIN from homeassistant.components.device_tracker.legacy import Device from homeassistant.components.gpslogger import DOMAIN, TRACKER_UPDATE -from homeassistant.config import async_process_ha_core_config from homeassistant.const import STATE_HOME, STATE_NOT_HOME from homeassistant.core import HomeAssistant +from homeassistant.core_config import async_process_ha_core_config from homeassistant.data_entry_flow import FlowResultType from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.dispatcher import DATA_DISPATCHER diff --git a/tests/components/group/test_cover.py b/tests/components/group/test_cover.py index f89aa9609ccb3a..b1f622569bdb55 100644 --- a/tests/components/group/test_cover.py +++ b/tests/components/group/test_cover.py @@ -12,6 +12,7 @@ ATTR_POSITION, ATTR_TILT_POSITION, DOMAIN as COVER_DOMAIN, + CoverState, ) from homeassistant.components.group.cover import DEFAULT_NAME from homeassistant.const import ( @@ -31,10 +32,6 @@ SERVICE_STOP_COVER_TILT, SERVICE_TOGGLE, SERVICE_TOGGLE_COVER_TILT, - STATE_CLOSED, - STATE_CLOSING, - STATE_OPEN, - STATE_OPENING, STATE_UNAVAILABLE, STATE_UNKNOWN, ) @@ -158,90 +155,105 @@ async def test_state(hass: HomeAssistant) -> None: # At least one member opening -> group opening for state_1 in ( - STATE_CLOSED, - STATE_CLOSING, - STATE_OPEN, - STATE_OPENING, + CoverState.CLOSED, + CoverState.CLOSING, + CoverState.OPEN, + CoverState.OPENING, STATE_UNAVAILABLE, STATE_UNKNOWN, ): for state_2 in ( - STATE_CLOSED, - STATE_CLOSING, - STATE_OPEN, - STATE_OPENING, + CoverState.CLOSED, + CoverState.CLOSING, + CoverState.OPEN, + CoverState.OPENING, STATE_UNAVAILABLE, STATE_UNKNOWN, ): for state_3 in ( - STATE_CLOSED, - STATE_CLOSING, - STATE_OPEN, - STATE_OPENING, + CoverState.CLOSED, + CoverState.CLOSING, + CoverState.OPEN, + CoverState.OPENING, STATE_UNAVAILABLE, STATE_UNKNOWN, ): hass.states.async_set(DEMO_COVER, state_1, {}) hass.states.async_set(DEMO_COVER_POS, state_2, {}) hass.states.async_set(DEMO_COVER_TILT, state_3, {}) - hass.states.async_set(DEMO_TILT, STATE_OPENING, {}) + hass.states.async_set(DEMO_TILT, CoverState.OPENING, {}) await hass.async_block_till_done() state = hass.states.get(COVER_GROUP) - assert state.state == STATE_OPENING + assert state.state == CoverState.OPENING # At least one member closing -> group closing for state_1 in ( - STATE_CLOSED, - STATE_CLOSING, - STATE_OPEN, + CoverState.CLOSED, + CoverState.CLOSING, + CoverState.OPEN, STATE_UNAVAILABLE, STATE_UNKNOWN, ): for state_2 in ( - STATE_CLOSED, - STATE_CLOSING, - STATE_OPEN, + CoverState.CLOSED, + CoverState.CLOSING, + CoverState.OPEN, STATE_UNAVAILABLE, STATE_UNKNOWN, ): for state_3 in ( - STATE_CLOSED, - STATE_CLOSING, - STATE_OPEN, + CoverState.CLOSED, + CoverState.CLOSING, + CoverState.OPEN, STATE_UNAVAILABLE, STATE_UNKNOWN, ): hass.states.async_set(DEMO_COVER, state_1, {}) hass.states.async_set(DEMO_COVER_POS, state_2, {}) hass.states.async_set(DEMO_COVER_TILT, state_3, {}) - hass.states.async_set(DEMO_TILT, STATE_CLOSING, {}) + hass.states.async_set(DEMO_TILT, CoverState.CLOSING, {}) await hass.async_block_till_done() state = hass.states.get(COVER_GROUP) - assert state.state == STATE_CLOSING + assert state.state == CoverState.CLOSING # At least one member open -> group open - for state_1 in (STATE_CLOSED, STATE_OPEN, STATE_UNAVAILABLE, STATE_UNKNOWN): - for state_2 in (STATE_CLOSED, STATE_OPEN, STATE_UNAVAILABLE, STATE_UNKNOWN): - for state_3 in (STATE_CLOSED, STATE_OPEN, STATE_UNAVAILABLE, STATE_UNKNOWN): + for state_1 in ( + CoverState.CLOSED, + CoverState.OPEN, + STATE_UNAVAILABLE, + STATE_UNKNOWN, + ): + for state_2 in ( + CoverState.CLOSED, + CoverState.OPEN, + STATE_UNAVAILABLE, + STATE_UNKNOWN, + ): + for state_3 in ( + CoverState.CLOSED, + CoverState.OPEN, + STATE_UNAVAILABLE, + STATE_UNKNOWN, + ): hass.states.async_set(DEMO_COVER, state_1, {}) hass.states.async_set(DEMO_COVER_POS, state_2, {}) hass.states.async_set(DEMO_COVER_TILT, state_3, {}) - hass.states.async_set(DEMO_TILT, STATE_OPEN, {}) + hass.states.async_set(DEMO_TILT, CoverState.OPEN, {}) await hass.async_block_till_done() state = hass.states.get(COVER_GROUP) - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN # At least one member closed -> group closed - for state_1 in (STATE_CLOSED, STATE_UNAVAILABLE, STATE_UNKNOWN): - for state_2 in (STATE_CLOSED, STATE_UNAVAILABLE, STATE_UNKNOWN): - for state_3 in (STATE_CLOSED, STATE_UNAVAILABLE, STATE_UNKNOWN): + for state_1 in (CoverState.CLOSED, STATE_UNAVAILABLE, STATE_UNKNOWN): + for state_2 in (CoverState.CLOSED, STATE_UNAVAILABLE, STATE_UNKNOWN): + for state_3 in (CoverState.CLOSED, STATE_UNAVAILABLE, STATE_UNKNOWN): hass.states.async_set(DEMO_COVER, state_1, {}) hass.states.async_set(DEMO_COVER_POS, state_2, {}) hass.states.async_set(DEMO_COVER_TILT, state_3, {}) - hass.states.async_set(DEMO_TILT, STATE_CLOSED, {}) + hass.states.async_set(DEMO_TILT, CoverState.CLOSED, {}) await hass.async_block_till_done() state = hass.states.get(COVER_GROUP) - assert state.state == STATE_CLOSED + assert state.state == CoverState.CLOSED # All group members removed from the state machine -> unavailable hass.states.async_remove(DEMO_COVER) @@ -269,11 +281,11 @@ async def test_attributes( assert ATTR_CURRENT_TILT_POSITION not in state.attributes # Set entity as closed - hass.states.async_set(DEMO_COVER, STATE_CLOSED, {}) + hass.states.async_set(DEMO_COVER, CoverState.CLOSED, {}) await hass.async_block_till_done() state = hass.states.get(COVER_GROUP) - assert state.state == STATE_CLOSED + assert state.state == CoverState.CLOSED assert state.attributes[ATTR_ENTITY_ID] == [ DEMO_COVER, DEMO_COVER_POS, @@ -282,18 +294,18 @@ async def test_attributes( ] # Set entity as opening - hass.states.async_set(DEMO_COVER, STATE_OPENING, {}) + hass.states.async_set(DEMO_COVER, CoverState.OPENING, {}) await hass.async_block_till_done() state = hass.states.get(COVER_GROUP) - assert state.state == STATE_OPENING + assert state.state == CoverState.OPENING # Set entity as closing - hass.states.async_set(DEMO_COVER, STATE_CLOSING, {}) + hass.states.async_set(DEMO_COVER, CoverState.CLOSING, {}) await hass.async_block_till_done() state = hass.states.get(COVER_GROUP) - assert state.state == STATE_CLOSING + assert state.state == CoverState.CLOSING # Set entity as unknown again hass.states.async_set(DEMO_COVER, STATE_UNKNOWN, {}) @@ -303,11 +315,11 @@ async def test_attributes( assert state.state == STATE_UNKNOWN # Add Entity that supports open / close / stop - hass.states.async_set(DEMO_COVER, STATE_OPEN, {ATTR_SUPPORTED_FEATURES: 11}) + hass.states.async_set(DEMO_COVER, CoverState.OPEN, {ATTR_SUPPORTED_FEATURES: 11}) await hass.async_block_till_done() state = hass.states.get(COVER_GROUP) - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN assert ATTR_ASSUMED_STATE not in state.attributes assert state.attributes[ATTR_SUPPORTED_FEATURES] == 11 assert ATTR_CURRENT_POSITION not in state.attributes @@ -316,24 +328,24 @@ async def test_attributes( # Add Entity that supports set_cover_position hass.states.async_set( DEMO_COVER_POS, - STATE_OPEN, + CoverState.OPEN, {ATTR_SUPPORTED_FEATURES: 4, ATTR_CURRENT_POSITION: 70}, ) await hass.async_block_till_done() state = hass.states.get(COVER_GROUP) - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN assert ATTR_ASSUMED_STATE not in state.attributes assert state.attributes[ATTR_SUPPORTED_FEATURES] == 15 assert state.attributes[ATTR_CURRENT_POSITION] == 70 assert ATTR_CURRENT_TILT_POSITION not in state.attributes # Add Entity that supports open tilt / close tilt / stop tilt - hass.states.async_set(DEMO_TILT, STATE_OPEN, {ATTR_SUPPORTED_FEATURES: 112}) + hass.states.async_set(DEMO_TILT, CoverState.OPEN, {ATTR_SUPPORTED_FEATURES: 112}) await hass.async_block_till_done() state = hass.states.get(COVER_GROUP) - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN assert ATTR_ASSUMED_STATE not in state.attributes assert state.attributes[ATTR_SUPPORTED_FEATURES] == 127 assert state.attributes[ATTR_CURRENT_POSITION] == 70 @@ -342,13 +354,13 @@ async def test_attributes( # Add Entity that supports set_tilt_position hass.states.async_set( DEMO_COVER_TILT, - STATE_OPEN, + CoverState.OPEN, {ATTR_SUPPORTED_FEATURES: 128, ATTR_CURRENT_TILT_POSITION: 60}, ) await hass.async_block_till_done() state = hass.states.get(COVER_GROUP) - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN assert ATTR_ASSUMED_STATE not in state.attributes assert state.attributes[ATTR_SUPPORTED_FEATURES] == 255 assert state.attributes[ATTR_CURRENT_POSITION] == 70 @@ -359,12 +371,14 @@ async def test_attributes( # Covers hass.states.async_set( - DEMO_COVER, STATE_OPEN, {ATTR_SUPPORTED_FEATURES: 4, ATTR_CURRENT_POSITION: 100} + DEMO_COVER, + CoverState.OPEN, + {ATTR_SUPPORTED_FEATURES: 4, ATTR_CURRENT_POSITION: 100}, ) await hass.async_block_till_done() state = hass.states.get(COVER_GROUP) - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN assert ATTR_ASSUMED_STATE not in state.attributes assert state.attributes[ATTR_SUPPORTED_FEATURES] == 244 assert state.attributes[ATTR_CURRENT_POSITION] == 85 # (70 + 100) / 2 @@ -375,7 +389,7 @@ async def test_attributes( await hass.async_block_till_done() state = hass.states.get(COVER_GROUP) - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN assert ATTR_ASSUMED_STATE not in state.attributes assert state.attributes[ATTR_SUPPORTED_FEATURES] == 240 assert ATTR_CURRENT_POSITION not in state.attributes @@ -384,31 +398,31 @@ async def test_attributes( # Tilts hass.states.async_set( DEMO_TILT, - STATE_OPEN, + CoverState.OPEN, {ATTR_SUPPORTED_FEATURES: 128, ATTR_CURRENT_TILT_POSITION: 100}, ) await hass.async_block_till_done() state = hass.states.get(COVER_GROUP) - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN assert ATTR_ASSUMED_STATE not in state.attributes assert state.attributes[ATTR_SUPPORTED_FEATURES] == 128 assert ATTR_CURRENT_POSITION not in state.attributes assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 80 # (60 + 100) / 2 hass.states.async_remove(DEMO_COVER_TILT) - hass.states.async_set(DEMO_TILT, STATE_CLOSED) + hass.states.async_set(DEMO_TILT, CoverState.CLOSED) await hass.async_block_till_done() state = hass.states.get(COVER_GROUP) - assert state.state == STATE_CLOSED + assert state.state == CoverState.CLOSED assert ATTR_ASSUMED_STATE not in state.attributes assert state.attributes[ATTR_SUPPORTED_FEATURES] == 0 assert ATTR_CURRENT_POSITION not in state.attributes assert ATTR_CURRENT_TILT_POSITION not in state.attributes # Group member has set assumed_state - hass.states.async_set(DEMO_TILT, STATE_CLOSED, {ATTR_ASSUMED_STATE: True}) + hass.states.async_set(DEMO_TILT, CoverState.CLOSED, {ATTR_ASSUMED_STATE: True}) await hass.async_block_till_done() state = hass.states.get(COVER_GROUP) @@ -426,16 +440,16 @@ async def test_cover_that_only_supports_tilt_removed(hass: HomeAssistant) -> Non """Test removing a cover that support tilt.""" hass.states.async_set( DEMO_COVER_TILT, - STATE_OPEN, + CoverState.OPEN, {ATTR_SUPPORTED_FEATURES: 128, ATTR_CURRENT_TILT_POSITION: 60}, ) hass.states.async_set( DEMO_TILT, - STATE_OPEN, + CoverState.OPEN, {ATTR_SUPPORTED_FEATURES: 128, ATTR_CURRENT_TILT_POSITION: 60}, ) state = hass.states.get(COVER_GROUP) - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN assert state.attributes[ATTR_FRIENDLY_NAME] == DEFAULT_NAME assert state.attributes[ATTR_ENTITY_ID] == [ DEMO_COVER_TILT, @@ -445,7 +459,7 @@ async def test_cover_that_only_supports_tilt_removed(hass: HomeAssistant) -> Non assert ATTR_CURRENT_TILT_POSITION in state.attributes hass.states.async_remove(DEMO_COVER_TILT) - hass.states.async_set(DEMO_TILT, STATE_CLOSED) + hass.states.async_set(DEMO_TILT, CoverState.CLOSED) await hass.async_block_till_done() @@ -463,10 +477,10 @@ async def test_open_covers(hass: HomeAssistant) -> None: await hass.async_block_till_done() state = hass.states.get(COVER_GROUP) - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN assert state.attributes[ATTR_CURRENT_POSITION] == 100 - assert hass.states.get(DEMO_COVER).state == STATE_OPEN + assert hass.states.get(DEMO_COVER).state == CoverState.OPEN assert hass.states.get(DEMO_COVER_POS).attributes[ATTR_CURRENT_POSITION] == 100 assert hass.states.get(DEMO_COVER_TILT).attributes[ATTR_CURRENT_POSITION] == 100 @@ -485,10 +499,10 @@ async def test_close_covers(hass: HomeAssistant) -> None: await hass.async_block_till_done() state = hass.states.get(COVER_GROUP) - assert state.state == STATE_CLOSED + assert state.state == CoverState.CLOSED assert state.attributes[ATTR_CURRENT_POSITION] == 0 - assert hass.states.get(DEMO_COVER).state == STATE_CLOSED + assert hass.states.get(DEMO_COVER).state == CoverState.CLOSED assert hass.states.get(DEMO_COVER_POS).attributes[ATTR_CURRENT_POSITION] == 0 assert hass.states.get(DEMO_COVER_TILT).attributes[ATTR_CURRENT_POSITION] == 0 @@ -507,7 +521,7 @@ async def test_toggle_covers(hass: HomeAssistant) -> None: await hass.async_block_till_done() state = hass.states.get(COVER_GROUP) - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN # Toggle will close covers await hass.services.async_call( @@ -519,10 +533,10 @@ async def test_toggle_covers(hass: HomeAssistant) -> None: await hass.async_block_till_done() state = hass.states.get(COVER_GROUP) - assert state.state == STATE_CLOSED + assert state.state == CoverState.CLOSED assert state.attributes[ATTR_CURRENT_POSITION] == 0 - assert hass.states.get(DEMO_COVER).state == STATE_CLOSED + assert hass.states.get(DEMO_COVER).state == CoverState.CLOSED assert hass.states.get(DEMO_COVER_POS).attributes[ATTR_CURRENT_POSITION] == 0 assert hass.states.get(DEMO_COVER_TILT).attributes[ATTR_CURRENT_POSITION] == 0 @@ -536,10 +550,10 @@ async def test_toggle_covers(hass: HomeAssistant) -> None: await hass.async_block_till_done() state = hass.states.get(COVER_GROUP) - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN assert state.attributes[ATTR_CURRENT_POSITION] == 100 - assert hass.states.get(DEMO_COVER).state == STATE_OPEN + assert hass.states.get(DEMO_COVER).state == CoverState.OPEN assert hass.states.get(DEMO_COVER_POS).attributes[ATTR_CURRENT_POSITION] == 100 assert hass.states.get(DEMO_COVER_TILT).attributes[ATTR_CURRENT_POSITION] == 100 @@ -563,10 +577,10 @@ async def test_stop_covers(hass: HomeAssistant) -> None: await hass.async_block_till_done() state = hass.states.get(COVER_GROUP) - assert state.state == STATE_OPENING + assert state.state == CoverState.OPENING assert state.attributes[ATTR_CURRENT_POSITION] == 50 # (20 + 80) / 2 - assert hass.states.get(DEMO_COVER).state == STATE_OPEN + assert hass.states.get(DEMO_COVER).state == CoverState.OPEN assert hass.states.get(DEMO_COVER_POS).attributes[ATTR_CURRENT_POSITION] == 20 assert hass.states.get(DEMO_COVER_TILT).attributes[ATTR_CURRENT_POSITION] == 80 @@ -587,10 +601,10 @@ async def test_set_cover_position(hass: HomeAssistant) -> None: await hass.async_block_till_done() state = hass.states.get(COVER_GROUP) - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN assert state.attributes[ATTR_CURRENT_POSITION] == 50 - assert hass.states.get(DEMO_COVER).state == STATE_CLOSED + assert hass.states.get(DEMO_COVER).state == CoverState.CLOSED assert hass.states.get(DEMO_COVER_POS).attributes[ATTR_CURRENT_POSITION] == 50 assert hass.states.get(DEMO_COVER_TILT).attributes[ATTR_CURRENT_POSITION] == 50 @@ -611,7 +625,7 @@ async def test_open_tilts(hass: HomeAssistant) -> None: await hass.async_block_till_done() state = hass.states.get(COVER_GROUP) - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 100 assert ( @@ -635,7 +649,7 @@ async def test_close_tilts(hass: HomeAssistant) -> None: await hass.async_block_till_done() state = hass.states.get(COVER_GROUP) - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 0 assert hass.states.get(DEMO_COVER_TILT).attributes[ATTR_CURRENT_TILT_POSITION] == 0 @@ -658,7 +672,7 @@ async def test_toggle_tilts(hass: HomeAssistant) -> None: await hass.async_block_till_done() state = hass.states.get(COVER_GROUP) - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 100 assert ( @@ -678,7 +692,7 @@ async def test_toggle_tilts(hass: HomeAssistant) -> None: await hass.async_block_till_done() state = hass.states.get(COVER_GROUP) - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 0 assert hass.states.get(DEMO_COVER_TILT).attributes[ATTR_CURRENT_TILT_POSITION] == 0 @@ -696,7 +710,7 @@ async def test_toggle_tilts(hass: HomeAssistant) -> None: await hass.async_block_till_done() state = hass.states.get(COVER_GROUP) - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 100 assert ( @@ -729,7 +743,7 @@ async def test_stop_tilts(hass: HomeAssistant) -> None: await hass.async_block_till_done() state = hass.states.get(COVER_GROUP) - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 60 assert hass.states.get(DEMO_COVER_TILT).attributes[ATTR_CURRENT_TILT_POSITION] == 60 @@ -751,7 +765,7 @@ async def test_set_tilt_positions(hass: HomeAssistant) -> None: await hass.async_block_till_done() state = hass.states.get(COVER_GROUP) - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 80 assert hass.states.get(DEMO_COVER_TILT).attributes[ATTR_CURRENT_TILT_POSITION] == 80 @@ -767,9 +781,9 @@ async def test_is_opening_closing(hass: HomeAssistant) -> None: await hass.async_block_till_done() # Both covers opening -> opening - assert hass.states.get(DEMO_COVER_POS).state == STATE_OPENING - assert hass.states.get(DEMO_COVER_TILT).state == STATE_OPENING - assert hass.states.get(COVER_GROUP).state == STATE_OPENING + assert hass.states.get(DEMO_COVER_POS).state == CoverState.OPENING + assert hass.states.get(DEMO_COVER_TILT).state == CoverState.OPENING + assert hass.states.get(COVER_GROUP).state == CoverState.OPENING for _ in range(10): future = dt_util.utcnow() + timedelta(seconds=1) @@ -781,54 +795,68 @@ async def test_is_opening_closing(hass: HomeAssistant) -> None: ) # Both covers closing -> closing - assert hass.states.get(DEMO_COVER_POS).state == STATE_CLOSING - assert hass.states.get(DEMO_COVER_TILT).state == STATE_CLOSING - assert hass.states.get(COVER_GROUP).state == STATE_CLOSING + assert hass.states.get(DEMO_COVER_POS).state == CoverState.CLOSING + assert hass.states.get(DEMO_COVER_TILT).state == CoverState.CLOSING + assert hass.states.get(COVER_GROUP).state == CoverState.CLOSING - hass.states.async_set(DEMO_COVER_POS, STATE_OPENING, {ATTR_SUPPORTED_FEATURES: 11}) + hass.states.async_set( + DEMO_COVER_POS, CoverState.OPENING, {ATTR_SUPPORTED_FEATURES: 11} + ) await hass.async_block_till_done() # Closing + Opening -> Opening - assert hass.states.get(DEMO_COVER_TILT).state == STATE_CLOSING - assert hass.states.get(DEMO_COVER_POS).state == STATE_OPENING - assert hass.states.get(COVER_GROUP).state == STATE_OPENING + assert hass.states.get(DEMO_COVER_TILT).state == CoverState.CLOSING + assert hass.states.get(DEMO_COVER_POS).state == CoverState.OPENING + assert hass.states.get(COVER_GROUP).state == CoverState.OPENING - hass.states.async_set(DEMO_COVER_POS, STATE_CLOSING, {ATTR_SUPPORTED_FEATURES: 11}) + hass.states.async_set( + DEMO_COVER_POS, CoverState.CLOSING, {ATTR_SUPPORTED_FEATURES: 11} + ) await hass.async_block_till_done() # Both covers closing -> closing - assert hass.states.get(DEMO_COVER_TILT).state == STATE_CLOSING - assert hass.states.get(DEMO_COVER_POS).state == STATE_CLOSING - assert hass.states.get(COVER_GROUP).state == STATE_CLOSING + assert hass.states.get(DEMO_COVER_TILT).state == CoverState.CLOSING + assert hass.states.get(DEMO_COVER_POS).state == CoverState.CLOSING + assert hass.states.get(COVER_GROUP).state == CoverState.CLOSING # Closed + Closing -> Closing - hass.states.async_set(DEMO_COVER_POS, STATE_CLOSED, {ATTR_SUPPORTED_FEATURES: 11}) + hass.states.async_set( + DEMO_COVER_POS, CoverState.CLOSED, {ATTR_SUPPORTED_FEATURES: 11} + ) await hass.async_block_till_done() - assert hass.states.get(DEMO_COVER_TILT).state == STATE_CLOSING - assert hass.states.get(DEMO_COVER_POS).state == STATE_CLOSED - assert hass.states.get(COVER_GROUP).state == STATE_CLOSING + assert hass.states.get(DEMO_COVER_TILT).state == CoverState.CLOSING + assert hass.states.get(DEMO_COVER_POS).state == CoverState.CLOSED + assert hass.states.get(COVER_GROUP).state == CoverState.CLOSING # Open + Closing -> Closing - hass.states.async_set(DEMO_COVER_POS, STATE_OPEN, {ATTR_SUPPORTED_FEATURES: 11}) + hass.states.async_set( + DEMO_COVER_POS, CoverState.OPEN, {ATTR_SUPPORTED_FEATURES: 11} + ) await hass.async_block_till_done() - assert hass.states.get(DEMO_COVER_TILT).state == STATE_CLOSING - assert hass.states.get(DEMO_COVER_POS).state == STATE_OPEN - assert hass.states.get(COVER_GROUP).state == STATE_CLOSING + assert hass.states.get(DEMO_COVER_TILT).state == CoverState.CLOSING + assert hass.states.get(DEMO_COVER_POS).state == CoverState.OPEN + assert hass.states.get(COVER_GROUP).state == CoverState.CLOSING # Closed + Opening -> Closing - hass.states.async_set(DEMO_COVER_TILT, STATE_OPENING, {ATTR_SUPPORTED_FEATURES: 11}) - hass.states.async_set(DEMO_COVER_POS, STATE_CLOSED, {ATTR_SUPPORTED_FEATURES: 11}) + hass.states.async_set( + DEMO_COVER_TILT, CoverState.OPENING, {ATTR_SUPPORTED_FEATURES: 11} + ) + hass.states.async_set( + DEMO_COVER_POS, CoverState.CLOSED, {ATTR_SUPPORTED_FEATURES: 11} + ) await hass.async_block_till_done() - assert hass.states.get(DEMO_COVER_TILT).state == STATE_OPENING - assert hass.states.get(DEMO_COVER_POS).state == STATE_CLOSED - assert hass.states.get(COVER_GROUP).state == STATE_OPENING + assert hass.states.get(DEMO_COVER_TILT).state == CoverState.OPENING + assert hass.states.get(DEMO_COVER_POS).state == CoverState.CLOSED + assert hass.states.get(COVER_GROUP).state == CoverState.OPENING # Open + Opening -> Closing - hass.states.async_set(DEMO_COVER_POS, STATE_OPEN, {ATTR_SUPPORTED_FEATURES: 11}) + hass.states.async_set( + DEMO_COVER_POS, CoverState.OPEN, {ATTR_SUPPORTED_FEATURES: 11} + ) await hass.async_block_till_done() - assert hass.states.get(DEMO_COVER_TILT).state == STATE_OPENING - assert hass.states.get(DEMO_COVER_POS).state == STATE_OPEN - assert hass.states.get(COVER_GROUP).state == STATE_OPENING + assert hass.states.get(DEMO_COVER_TILT).state == CoverState.OPENING + assert hass.states.get(DEMO_COVER_POS).state == CoverState.OPEN + assert hass.states.get(COVER_GROUP).state == CoverState.OPENING async def test_nested_group(hass: HomeAssistant) -> None: @@ -858,12 +886,12 @@ async def test_nested_group(hass: HomeAssistant) -> None: state = hass.states.get("cover.bedroom_group") assert state is not None - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN assert state.attributes.get(ATTR_ENTITY_ID) == [DEMO_COVER_POS, DEMO_COVER_TILT] state = hass.states.get("cover.nested_group") assert state is not None - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN assert state.attributes.get(ATTR_ENTITY_ID) == ["cover.bedroom_group"] # Test controlling the nested group @@ -874,7 +902,7 @@ async def test_nested_group(hass: HomeAssistant) -> None: {ATTR_ENTITY_ID: "cover.nested_group"}, blocking=True, ) - assert hass.states.get(DEMO_COVER_POS).state == STATE_CLOSING - assert hass.states.get(DEMO_COVER_TILT).state == STATE_CLOSING - assert hass.states.get("cover.bedroom_group").state == STATE_CLOSING - assert hass.states.get("cover.nested_group").state == STATE_CLOSING + assert hass.states.get(DEMO_COVER_POS).state == CoverState.CLOSING + assert hass.states.get(DEMO_COVER_TILT).state == CoverState.CLOSING + assert hass.states.get("cover.bedroom_group").state == CoverState.CLOSING + assert hass.states.get("cover.nested_group").state == CoverState.CLOSING diff --git a/tests/components/group/test_sensor.py b/tests/components/group/test_sensor.py index db642506361dd6..de406cb251c076 100644 --- a/tests/components/group/test_sensor.py +++ b/tests/components/group/test_sensor.py @@ -32,6 +32,7 @@ SERVICE_RELOAD, STATE_UNAVAILABLE, STATE_UNKNOWN, + UnitOfTemperature, ) from homeassistant.core import HomeAssistant from homeassistant.helpers import issue_registry as ir @@ -496,7 +497,7 @@ async def test_sensor_with_uoms_but_no_device_class( state = hass.states.get("sensor.test_sum") assert state.attributes.get("device_class") is None assert state.attributes.get("state_class") is None - assert state.attributes.get("unit_of_measurement") == "W" + assert state.attributes.get("unit_of_measurement") is None assert state.state == STATE_UNKNOWN assert ( @@ -650,10 +651,10 @@ async def test_sensor_calculated_result_fails_on_uom(hass: HomeAssistant) -> Non await hass.async_block_till_done() state = hass.states.get("sensor.test_sum") - assert state.state == STATE_UNKNOWN + assert state.state == STATE_UNAVAILABLE assert state.attributes.get("device_class") == "energy" assert state.attributes.get("state_class") == "total" - assert state.attributes.get("unit_of_measurement") == "kWh" + assert state.attributes.get("unit_of_measurement") is None async def test_sensor_calculated_properties_not_convertible_device_class( @@ -730,7 +731,7 @@ async def test_sensor_calculated_properties_not_convertible_device_class( assert state.state == STATE_UNKNOWN assert state.attributes.get("device_class") == "humidity" assert state.attributes.get("state_class") == "measurement" - assert state.attributes.get("unit_of_measurement") == "%" + assert state.attributes.get("unit_of_measurement") is None assert ( "Unable to use state. Only entities with correct unit of measurement is" @@ -812,3 +813,197 @@ async def test_sensors_attributes_added_when_entity_info_available( assert state.attributes.get(ATTR_ICON) is None assert state.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.TOTAL assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == "L" + + +async def test_sensor_state_class_no_uom_not_available( + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test when input sensors drops unit of measurement.""" + + # If we have a valid unit of measurement from all input sensors + # the group sensor will go unknown in the case any input sensor + # drops the unit of measurement and log a warning. + + config = { + SENSOR_DOMAIN: { + "platform": GROUP_DOMAIN, + "name": "test_sum", + "type": "sum", + "entities": ["sensor.test_1", "sensor.test_2", "sensor.test_3"], + "unique_id": "very_unique_id_sum_sensor", + } + } + + entity_ids = config["sensor"]["entities"] + + input_attributes = { + "state_class": SensorStateClass.MEASUREMENT, + "unit_of_measurement": PERCENTAGE, + } + + hass.states.async_set(entity_ids[0], VALUES[0], input_attributes) + hass.states.async_set(entity_ids[1], VALUES[1], input_attributes) + hass.states.async_set(entity_ids[2], VALUES[2], input_attributes) + await hass.async_block_till_done() + + assert await async_setup_component(hass, "sensor", config) + await hass.async_block_till_done() + + state = hass.states.get("sensor.test_sum") + assert state.state == str(sum(VALUES)) + assert state.attributes.get("state_class") == "measurement" + assert state.attributes.get("unit_of_measurement") == "%" + + assert ( + "Unable to use state. Only entities with correct unit of measurement is" + " supported" + ) not in caplog.text + + # sensor.test_3 drops the unit of measurement + hass.states.async_set( + entity_ids[2], + VALUES[2], + { + "state_class": SensorStateClass.MEASUREMENT, + }, + ) + await hass.async_block_till_done() + + state = hass.states.get("sensor.test_sum") + assert state.state == STATE_UNKNOWN + assert state.attributes.get("state_class") == "measurement" + assert state.attributes.get("unit_of_measurement") is None + + assert ( + "Unable to use state. Only entities with correct unit of measurement is" + " supported, entity sensor.test_3, value 15.3 with" + " device class None and unit of measurement None excluded from calculation" + " in sensor.test_sum" + ) in caplog.text + + +async def test_sensor_different_attributes_ignore_non_numeric( + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test the sensor handles calculating attributes when using ignore_non_numeric.""" + config = { + SENSOR_DOMAIN: { + "platform": GROUP_DOMAIN, + "name": "test_sum", + "type": "sum", + "ignore_non_numeric": True, + "entities": ["sensor.test_1", "sensor.test_2", "sensor.test_3"], + "unique_id": "very_unique_id_sum_sensor", + } + } + + entity_ids = config["sensor"]["entities"] + + assert await async_setup_component(hass, "sensor", config) + await hass.async_block_till_done() + + state = hass.states.get("sensor.test_sum") + assert state.state == STATE_UNAVAILABLE + assert state.attributes.get("state_class") is None + assert state.attributes.get("device_class") is None + assert state.attributes.get("unit_of_measurement") is None + + test_cases = [ + { + "entity": entity_ids[0], + "value": VALUES[0], + "attributes": { + "state_class": SensorStateClass.MEASUREMENT, + "unit_of_measurement": PERCENTAGE, + }, + "expected_state": str(float(VALUES[0])), + "expected_state_class": SensorStateClass.MEASUREMENT, + "expected_device_class": None, + "expected_unit_of_measurement": PERCENTAGE, + }, + { + "entity": entity_ids[1], + "value": VALUES[1], + "attributes": { + "state_class": SensorStateClass.MEASUREMENT, + "device_class": SensorDeviceClass.HUMIDITY, + "unit_of_measurement": PERCENTAGE, + }, + "expected_state": str(float(sum([VALUES[0], VALUES[1]]))), + "expected_state_class": SensorStateClass.MEASUREMENT, + "expected_device_class": None, + "expected_unit_of_measurement": PERCENTAGE, + }, + { + "entity": entity_ids[2], + "value": VALUES[2], + "attributes": { + "state_class": SensorStateClass.MEASUREMENT, + "device_class": SensorDeviceClass.TEMPERATURE, + "unit_of_measurement": UnitOfTemperature.CELSIUS, + }, + "expected_state": str(float(sum(VALUES))), + "expected_state_class": SensorStateClass.MEASUREMENT, + "expected_device_class": None, + "expected_unit_of_measurement": None, + }, + { + "entity": entity_ids[2], + "value": VALUES[2], + "attributes": { + "state_class": SensorStateClass.MEASUREMENT, + "device_class": SensorDeviceClass.HUMIDITY, + "unit_of_measurement": PERCENTAGE, + }, + "expected_state": str(float(sum(VALUES))), + "expected_state_class": SensorStateClass.MEASUREMENT, + # One sensor does not have a device class + "expected_device_class": None, + "expected_unit_of_measurement": PERCENTAGE, + }, + { + "entity": entity_ids[0], + "value": VALUES[0], + "attributes": { + "state_class": SensorStateClass.MEASUREMENT, + "device_class": SensorDeviceClass.HUMIDITY, + "unit_of_measurement": PERCENTAGE, + }, + "expected_state": str(float(sum(VALUES))), + "expected_state_class": SensorStateClass.MEASUREMENT, + # First sensor now has a device class + "expected_device_class": SensorDeviceClass.HUMIDITY, + "expected_unit_of_measurement": PERCENTAGE, + }, + { + "entity": entity_ids[0], + "value": VALUES[0], + "attributes": { + "state_class": SensorStateClass.MEASUREMENT, + }, + "expected_state": str(float(sum(VALUES))), + "expected_state_class": SensorStateClass.MEASUREMENT, + "expected_device_class": None, + "expected_unit_of_measurement": None, + }, + ] + + for test_case in test_cases: + hass.states.async_set( + test_case["entity"], + test_case["value"], + test_case["attributes"], + ) + await hass.async_block_till_done() + state = hass.states.get("sensor.test_sum") + assert state.state == test_case["expected_state"] + assert state.attributes.get("state_class") == test_case["expected_state_class"] + assert ( + state.attributes.get("device_class") == test_case["expected_device_class"] + ) + assert ( + state.attributes.get("unit_of_measurement") + == test_case["expected_unit_of_measurement"] + ) diff --git a/tests/components/habitica/conftest.py b/tests/components/habitica/conftest.py index 2401397be26ca1..8d729f4358fcd4 100644 --- a/tests/components/habitica/conftest.py +++ b/tests/components/habitica/conftest.py @@ -3,6 +3,14 @@ from unittest.mock import patch import pytest +from yarl import URL + +from homeassistant.components.habitica.const import CONF_API_USER, DEFAULT_URL, DOMAIN +from homeassistant.const import CONF_API_KEY, CONF_URL +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry, load_json_object_fixture +from tests.test_util.aiohttp import AiohttpClientMocker @pytest.fixture(autouse=True) @@ -13,3 +21,66 @@ def disable_plumbum(): """ with patch("plumbum.local"), patch("plumbum.colors"): yield + + +def mock_called_with( + mock_client: AiohttpClientMocker, + method: str, + url: str, +) -> tuple | None: + """Assert request mock was called with json data.""" + + return next( + ( + call + for call in mock_client.mock_calls + if call[0].upper() == method.upper() and call[1] == URL(url) + ), + None, + ) + + +@pytest.fixture +def mock_habitica(aioclient_mock: AiohttpClientMocker) -> AiohttpClientMocker: + """Mock aiohttp requests.""" + + aioclient_mock.get( + f"{DEFAULT_URL}/api/v3/user", json=load_json_object_fixture("user.json", DOMAIN) + ) + aioclient_mock.get( + f"{DEFAULT_URL}/api/v3/tasks/user", + params={"type": "completedTodos"}, + json=load_json_object_fixture("completed_todos.json", DOMAIN), + ) + aioclient_mock.get( + f"{DEFAULT_URL}/api/v3/tasks/user", + json=load_json_object_fixture("tasks.json", DOMAIN), + ) + aioclient_mock.get( + f"{DEFAULT_URL}/api/v3/content", + params={"language": "en"}, + json=load_json_object_fixture("content.json", DOMAIN), + ) + + return aioclient_mock + + +@pytest.fixture(name="config_entry") +def mock_config_entry() -> MockConfigEntry: + """Mock Habitica configuration entry.""" + return MockConfigEntry( + domain=DOMAIN, + title="test-user", + data={ + CONF_URL: DEFAULT_URL, + CONF_API_USER: "test-api-user", + CONF_API_KEY: "test-api-key", + }, + unique_id="00000000-0000-0000-0000-000000000000", + ) + + +@pytest.fixture +async def set_tz(hass: HomeAssistant) -> None: + """Fixture to set timezone.""" + await hass.config.async_set_time_zone("Europe/Berlin") diff --git a/tests/components/habitica/fixtures/common_buttons_unavailable.json b/tests/components/habitica/fixtures/common_buttons_unavailable.json new file mode 100644 index 00000000000000..efee5364e023db --- /dev/null +++ b/tests/components/habitica/fixtures/common_buttons_unavailable.json @@ -0,0 +1,54 @@ +{ + "data": { + "api_user": "test-api-user", + "profile": { "name": "test-user" }, + "stats": { + "buffs": { + "str": 0, + "int": 0, + "per": 0, + "con": 0, + "stealth": 0, + "streaks": true, + "seafoam": false, + "shinySeed": false, + "snowball": false, + "spookySparkles": false + }, + "hp": 50, + "mp": 50, + "exp": 737, + "gp": 0, + "lvl": 5, + "class": "wizard", + "maxHealth": 50, + "maxMP": 166, + "toNextLevel": 880, + "points": 0 + }, + "preferences": { + "sleep": false, + "automaticAllocation": false, + "disableClasses": false, + "language": "en" + }, + "flags": { + "classSelected": true + }, + "needsCron": false, + "items": { + "gear": { + "equipped": { + "weapon": "weapon_warrior_5", + "armor": "armor_warrior_5", + "head": "head_warrior_5", + "shield": "shield_warrior_5", + "back": "heroicAureole", + "headAccessory": "headAccessory_armoire_gogglesOfBookbinding", + "eyewear": "plagueDoctorMask", + "body": "aetherAmulet" + } + } + } + } +} diff --git a/tests/components/habitica/fixtures/completed_todos.json b/tests/components/habitica/fixtures/completed_todos.json new file mode 100644 index 00000000000000..8185a0a4ff7d7d --- /dev/null +++ b/tests/components/habitica/fixtures/completed_todos.json @@ -0,0 +1,78 @@ +{ + "success": true, + "data": [ + { + "_id": "162f0bbe-a097-4a06-b4f4-8fbeed85d2ba", + "completed": true, + "collapseChecklist": false, + "checklist": [], + "type": "todo", + "text": "Wocheneinkauf erledigen", + "notes": "Lebensmittel und Haushaltsbedarf für die Woche einkaufen.", + "tags": ["64235347-55d0-4ba1-a86a-3428dcfdf319"], + "value": 1, + "priority": 1.5, + "attribute": "str", + "challenge": {}, + "group": { + "completedBy": {}, + "assignedUsers": [] + }, + "reminders": [], + "byHabitica": false, + "createdAt": "2024-09-21T22:19:10.919Z", + "updatedAt": "2024-09-21T22:19:15.484Z", + "userId": "5f359083-ef78-4af0-985a-0b2c6d05797c", + "dateCompleted": "2024-09-21T22:19:15.478Z", + "id": "162f0bbe-a097-4a06-b4f4-8fbeed85d2ba" + }, + { + "_id": "3fa06743-aa0f-472b-af1a-f27c755e329c", + "completed": true, + "collapseChecklist": false, + "checklist": [], + "type": "todo", + "text": "Wohnung aufräumen", + "notes": "Wohnzimmer und Küche gründlich aufräumen.", + "tags": ["64235347-55d0-4ba1-a86a-3428dcfdf319"], + "value": 1, + "priority": 2, + "attribute": "str", + "challenge": {}, + "group": { + "completedBy": {}, + "assignedUsers": [] + }, + "reminders": [], + "byHabitica": false, + "createdAt": "2024-09-21T22:18:30.646Z", + "updatedAt": "2024-09-21T22:18:34.663Z", + "userId": "5f359083-ef78-4af0-985a-0b2c6d05797c", + "dateCompleted": "2024-09-21T22:18:34.660Z", + "id": "3fa06743-aa0f-472b-af1a-f27c755e329c" + } + ], + "notifications": [ + { + "type": "ITEM_RECEIVED", + "data": { + "icon": "notif_orca_mount", + "title": "Orcas for Summer Splash!", + "text": "To celebrate Summer Splash, we've given you an Orca Mount!", + "destination": "stable" + }, + "seen": true, + "id": "b7a85df1-06ed-4ab1-b56d-43418fc6a5e5" + }, + { + "type": "UNALLOCATED_STATS_POINTS", + "data": { + "points": 2 + }, + "seen": true, + "id": "bc3f8a69-231f-4eb1-ba48-a00b6c0e0f37" + } + ], + "userV": 584, + "appVersion": "5.28.6" +} diff --git a/tests/components/habitica/fixtures/content.json b/tests/components/habitica/fixtures/content.json new file mode 100644 index 00000000000000..e8e14dead73626 --- /dev/null +++ b/tests/components/habitica/fixtures/content.json @@ -0,0 +1,287 @@ +{ + "success": true, + "data": { + "gear": { + "flat": { + "weapon_warrior_5": { + "text": "Ruby Sword", + "notes": "Weapon whose forge-glow never fades. Increases Strength by 15. ", + "str": 15, + "value": 90, + "type": "weapon", + "key": "weapon_warrior_5", + "set": "warrior-5", + "klass": "warrior", + "index": "5", + "int": 0, + "per": 0, + "con": 0 + }, + "armor_warrior_5": { + "text": "Golden Armor", + "notes": "Looks ceremonial, but no known blade can pierce it. Increases Constitution by 11.", + "con": 11, + "value": 120, + "last": true, + "type": "armor", + "key": "armor_warrior_5", + "set": "warrior-5", + "klass": "warrior", + "index": "5", + "str": 0, + "int": 0, + "per": 0 + }, + "head_warrior_5": { + "text": "Golden Helm", + "notes": "Regal crown bound to shining armor. Increases Strength by 12.", + "str": 12, + "value": 80, + "last": true, + "type": "head", + "key": "head_warrior_5", + "set": "warrior-5", + "klass": "warrior", + "index": "5", + "int": 0, + "per": 0, + "con": 0 + }, + "shield_warrior_5": { + "text": "Golden Shield", + "notes": "Shining badge of the vanguard. Increases Constitution by 9.", + "con": 9, + "value": 90, + "last": true, + "type": "shield", + "key": "shield_warrior_5", + "set": "warrior-5", + "klass": "warrior", + "index": "5", + "str": 0, + "int": 0, + "per": 0 + }, + "weapon_wizard_5": { + "twoHanded": true, + "text": "Archmage Staff", + "notes": "Assists in weaving the most complex of spells. Increases Intelligence by 15 and Perception by 7. Two-handed item.", + "int": 15, + "per": 7, + "value": 160, + "type": "weapon", + "key": "weapon_wizard_5", + "set": "wizard-5", + "klass": "wizard", + "index": "5", + "str": 0, + "con": 0 + }, + "armor_wizard_5": { + "text": "Royal Magus Robe", + "notes": "Symbol of the power behind the throne. Increases Intelligence by 12.", + "int": 12, + "value": 120, + "last": true, + "type": "armor", + "key": "armor_wizard_5", + "set": "wizard-5", + "klass": "wizard", + "index": "5", + "str": 0, + "per": 0, + "con": 0 + }, + "head_wizard_5": { + "text": "Royal Magus Hat", + "notes": "Shows authority over fortune, weather, and lesser mages. Increases Perception by 10.", + "per": 10, + "value": 80, + "last": true, + "type": "head", + "key": "head_wizard_5", + "set": "wizard-5", + "klass": "wizard", + "index": "5", + "str": 0, + "int": 0, + "con": 0 + }, + "weapon_healer_5": { + "text": "Royal Scepter", + "notes": "Fit to grace the hand of a monarch, or of one who stands at a monarch's right hand. Increases Intelligence by 9. ", + "int": 9, + "value": 90, + "type": "weapon", + "key": "weapon_healer_5", + "set": "healer-5", + "klass": "healer", + "index": "5", + "str": 0, + "per": 0, + "con": 0 + }, + "armor_healer_5": { + "text": "Royal Mantle", + "notes": "Attire of those who have saved the lives of kings. Increases Constitution by 18.", + "con": 18, + "value": 120, + "last": true, + "type": "armor", + "key": "armor_healer_5", + "set": "healer-5", + "klass": "healer", + "index": "5", + "str": 0, + "int": 0, + "per": 0 + }, + "head_healer_5": { + "text": "Royal Diadem", + "notes": "For king, queen, or miracle-worker. Increases Intelligence by 9.", + "int": 9, + "value": 80, + "last": true, + "type": "head", + "key": "head_healer_5", + "set": "healer-5", + "klass": "healer", + "index": "5", + "str": 0, + "per": 0, + "con": 0 + }, + "shield_healer_5": { + "text": "Royal Shield", + "notes": "Bestowed upon those most dedicated to the kingdom's defense. Increases Constitution by 12.", + "con": 12, + "value": 90, + "last": true, + "type": "shield", + "key": "shield_healer_5", + "set": "healer-5", + "klass": "healer", + "index": "5", + "str": 0, + "int": 0, + "per": 0 + }, + "weapon_rogue_5": { + "text": "Ninja-to", + "notes": "Sleek and deadly as the ninja themselves. Increases Strength by 8. ", + "str": 8, + "value": 90, + "type": "weapon", + "key": "weapon_rogue_5", + "set": "rogue-5", + "klass": "rogue", + "index": "5", + "int": 0, + "per": 0, + "con": 0 + }, + "armor_rogue_5": { + "text": "Umbral Armor", + "notes": "Allows stealth in the open in broad daylight. Increases Perception by 18.", + "per": 18, + "value": 120, + "last": true, + "type": "armor", + "key": "armor_rogue_5", + "set": "rogue-5", + "klass": "rogue", + "index": "5", + "str": 0, + "int": 0, + "con": 0 + }, + "head_rogue_5": { + "text": "Umbral Hood", + "notes": "Conceals even thoughts from those who would probe them. Increases Perception by 12.", + "per": 12, + "value": 80, + "last": true, + "type": "head", + "key": "head_rogue_5", + "set": "rogue-5", + "klass": "rogue", + "index": "5", + "str": 0, + "int": 0, + "con": 0 + }, + "shield_rogue_5": { + "text": "Ninja-to", + "notes": "Sleek and deadly as the ninja themselves. Increases Strength by 8. ", + "str": 8, + "value": 90, + "type": "shield", + "key": "shield_rogue_5", + "set": "rogue-5", + "klass": "rogue", + "index": "5", + "int": 0, + "per": 0, + "con": 0 + }, + "back_special_heroicAureole": { + "text": "Heroic Aureole", + "notes": "The gems on this aureole glimmer when you tell your tales of glory. Increases all stats by 7.", + "con": 7, + "str": 7, + "per": 7, + "int": 7, + "value": 175, + "type": "back", + "key": "back_special_heroicAureole", + "set": "special-heroicAureole", + "klass": "special", + "index": "heroicAureole" + }, + "headAccessory_armoire_gogglesOfBookbinding": { + "per": 8, + "set": "bookbinder", + "notes": "These goggles will help you zero in on any task, large or small! Increases Perception by 8. Enchanted Armoire: Bookbinder Set (Item 1 of 4).", + "text": "Goggles of Bookbinding", + "value": 100, + "type": "headAccessory", + "key": "headAccessory_armoire_gogglesOfBookbinding", + "klass": "armoire", + "index": "gogglesOfBookbinding", + "str": 0, + "int": 0, + "con": 0 + }, + "eyewear_armoire_plagueDoctorMask": { + "con": 5, + "int": 5, + "set": "plagueDoctor", + "notes": "An authentic mask worn by the doctors who battle the Plague of Procrastination. Increases Constitution and Intelligence by 5 each. Enchanted Armoire: Plague Doctor Set (Item 2 of 3).", + "text": "Plague Doctor Mask", + "value": 100, + "type": "eyewear", + "key": "eyewear_armoire_plagueDoctorMask", + "klass": "armoire", + "index": "plagueDoctorMask", + "str": 0, + "per": 0 + }, + "body_special_aetherAmulet": { + "text": "Aether Amulet", + "notes": "This amulet has a mysterious history. Increases Constitution and Strength by 10 each.", + "value": 175, + "str": 10, + "con": 10, + "type": "body", + "key": "body_special_aetherAmulet", + "set": "special-aetherAmulet", + "klass": "special", + "index": "aetherAmulet", + "int": 0, + "per": 0 + } + } + } + }, + "appVersion": "5.29.2" +} diff --git a/tests/components/habitica/fixtures/duedate_fixture_1.json b/tests/components/habitica/fixtures/duedate_fixture_1.json new file mode 100644 index 00000000000000..d44d5f38498168 --- /dev/null +++ b/tests/components/habitica/fixtures/duedate_fixture_1.json @@ -0,0 +1,51 @@ +{ + "success": true, + "data": [ + { + "_id": "564b9ac9-c53d-4638-9e7f-1cd96fe19baa", + "frequency": "daily", + "everyX": 1, + "repeat": { + "m": true, + "t": true, + "w": true, + "th": true, + "f": true, + "s": true, + "su": true + }, + "streak": 1, + "nextDue": ["2024-09-22T22:00:00.000Z", "2024-09-23T22:00:00.000Z"], + "yesterDaily": true, + "history": [], + "completed": false, + "collapseChecklist": false, + "type": "daily", + "text": "Zahnseide benutzen", + "notes": "Klicke um Änderungen zu machen!", + "tags": [], + "value": -2.9663035443712333, + "priority": 1, + "attribute": "str", + "challenge": {}, + "group": { + "completedBy": {}, + "assignedUsers": [] + }, + "byHabitica": false, + "startDate": "2024-07-06T22:00:00.000Z", + "daysOfMonth": [], + "weeksOfMonth": [], + "checklist": [], + "reminders": [], + "createdAt": "2024-07-07T17:51:53.268Z", + "updatedAt": "2024-09-21T22:24:20.154Z", + "userId": "5f359083-ef78-4af0-985a-0b2c6d05797c", + "isDue": true, + "id": "564b9ac9-c53d-4638-9e7f-1cd96fe19baa" + } + ], + "notifications": [], + "userV": 589, + "appVersion": "5.28.6" +} diff --git a/tests/components/habitica/fixtures/duedate_fixture_2.json b/tests/components/habitica/fixtures/duedate_fixture_2.json new file mode 100644 index 00000000000000..99cf4e89454940 --- /dev/null +++ b/tests/components/habitica/fixtures/duedate_fixture_2.json @@ -0,0 +1,51 @@ +{ + "success": true, + "data": [ + { + "_id": "564b9ac9-c53d-4638-9e7f-1cd96fe19baa", + "frequency": "daily", + "everyX": 1, + "repeat": { + "m": true, + "t": true, + "w": true, + "th": true, + "f": true, + "s": true, + "su": true + }, + "streak": 1, + "nextDue": ["2024-09-22T22:00:00.000Z", "2024-09-23T22:00:00.000Z"], + "yesterDaily": true, + "history": [], + "completed": false, + "collapseChecklist": false, + "type": "daily", + "text": "Zahnseide benutzen", + "notes": "Klicke um Änderungen zu machen!", + "tags": [], + "value": -2.9663035443712333, + "priority": 1, + "attribute": "str", + "challenge": {}, + "group": { + "completedBy": {}, + "assignedUsers": [] + }, + "byHabitica": false, + "startDate": "2024-09-23T22:00:00.000Z", + "daysOfMonth": [], + "weeksOfMonth": [], + "checklist": [], + "reminders": [], + "createdAt": "2024-07-07T17:51:53.268Z", + "updatedAt": "2024-09-21T22:24:20.154Z", + "userId": "5f359083-ef78-4af0-985a-0b2c6d05797c", + "isDue": false, + "id": "564b9ac9-c53d-4638-9e7f-1cd96fe19baa" + } + ], + "notifications": [], + "userV": 589, + "appVersion": "5.28.6" +} diff --git a/tests/components/habitica/fixtures/duedate_fixture_3.json b/tests/components/habitica/fixtures/duedate_fixture_3.json new file mode 100644 index 00000000000000..78b66ad6643163 --- /dev/null +++ b/tests/components/habitica/fixtures/duedate_fixture_3.json @@ -0,0 +1,51 @@ +{ + "success": true, + "data": [ + { + "_id": "564b9ac9-c53d-4638-9e7f-1cd96fe19baa", + "frequency": "monthly", + "everyX": 1, + "repeat": { + "m": true, + "t": true, + "w": true, + "th": true, + "f": true, + "s": true, + "su": true + }, + "streak": 1, + "nextDue": ["2024-10-22T22:00:00.000Z", "2024-11-22T22:00:00.000Z"], + "yesterDaily": true, + "history": [], + "completed": false, + "collapseChecklist": false, + "type": "daily", + "text": "Zahnseide benutzen", + "notes": "Klicke um Änderungen zu machen!", + "tags": [], + "value": -2.9663035443712333, + "priority": 1, + "attribute": "str", + "challenge": {}, + "group": { + "completedBy": {}, + "assignedUsers": [] + }, + "byHabitica": false, + "startDate": "2024-10-22T22:00:00.000Z", + "daysOfMonth": [23], + "weeksOfMonth": [], + "checklist": [], + "reminders": [], + "createdAt": "2024-07-07T17:51:53.268Z", + "updatedAt": "2024-09-21T22:24:20.154Z", + "userId": "5f359083-ef78-4af0-985a-0b2c6d05797c", + "isDue": false, + "id": "564b9ac9-c53d-4638-9e7f-1cd96fe19baa" + } + ], + "notifications": [], + "userV": 589, + "appVersion": "5.28.6" +} diff --git a/tests/components/habitica/fixtures/duedate_fixture_4.json b/tests/components/habitica/fixtures/duedate_fixture_4.json new file mode 100644 index 00000000000000..7e14e3339e2ae9 --- /dev/null +++ b/tests/components/habitica/fixtures/duedate_fixture_4.json @@ -0,0 +1,51 @@ +{ + "success": true, + "data": [ + { + "_id": "564b9ac9-c53d-4638-9e7f-1cd96fe19baa", + "frequency": "yearly", + "everyX": 1, + "repeat": { + "m": true, + "t": true, + "w": true, + "th": true, + "f": true, + "s": true, + "su": true + }, + "streak": 1, + "nextDue": ["2024-10-22T22:00:00.000Z", "2025-10-22T22:00:00.000Z"], + "yesterDaily": true, + "history": [], + "completed": false, + "collapseChecklist": false, + "type": "daily", + "text": "Zahnseide benutzen", + "notes": "Klicke um Änderungen zu machen!", + "tags": [], + "value": -2.9663035443712333, + "priority": 1, + "attribute": "str", + "challenge": {}, + "group": { + "completedBy": {}, + "assignedUsers": [] + }, + "byHabitica": false, + "startDate": "2024-10-22T22:00:00.000Z", + "daysOfMonth": [22], + "weeksOfMonth": [], + "checklist": [], + "reminders": [], + "createdAt": "2024-07-07T17:51:53.268Z", + "updatedAt": "2024-09-21T22:24:20.154Z", + "userId": "5f359083-ef78-4af0-985a-0b2c6d05797c", + "isDue": false, + "id": "564b9ac9-c53d-4638-9e7f-1cd96fe19baa" + } + ], + "notifications": [], + "userV": 589, + "appVersion": "5.28.6" +} diff --git a/tests/components/habitica/fixtures/duedate_fixture_5.json b/tests/components/habitica/fixtures/duedate_fixture_5.json new file mode 100644 index 00000000000000..d8d5f4cd7730b6 --- /dev/null +++ b/tests/components/habitica/fixtures/duedate_fixture_5.json @@ -0,0 +1,51 @@ +{ + "success": true, + "data": [ + { + "_id": "564b9ac9-c53d-4638-9e7f-1cd96fe19baa", + "frequency": "weekly", + "everyX": 1, + "repeat": { + "m": true, + "t": true, + "w": true, + "th": true, + "f": true, + "s": true, + "su": true + }, + "streak": 1, + "nextDue": ["2024-09-20T22:00:00.000Z", "2024-09-27T22:00:00.000Z"], + "yesterDaily": true, + "history": [], + "completed": false, + "collapseChecklist": false, + "type": "daily", + "text": "Zahnseide benutzen", + "notes": "Klicke um Änderungen zu machen!", + "tags": [], + "value": -2.9663035443712333, + "priority": 1, + "attribute": "str", + "challenge": {}, + "group": { + "completedBy": {}, + "assignedUsers": [] + }, + "byHabitica": false, + "startDate": "2024-09-25T22:00:00.000Z", + "daysOfMonth": [], + "weeksOfMonth": [], + "checklist": [], + "reminders": [], + "createdAt": "2024-07-07T17:51:53.268Z", + "updatedAt": "2024-09-21T22:24:20.154Z", + "userId": "5f359083-ef78-4af0-985a-0b2c6d05797c", + "isDue": false, + "id": "564b9ac9-c53d-4638-9e7f-1cd96fe19baa" + } + ], + "notifications": [], + "userV": 589, + "appVersion": "5.28.6" +} diff --git a/tests/components/habitica/fixtures/duedate_fixture_6.json b/tests/components/habitica/fixtures/duedate_fixture_6.json new file mode 100644 index 00000000000000..dce177b1abc8c5 --- /dev/null +++ b/tests/components/habitica/fixtures/duedate_fixture_6.json @@ -0,0 +1,51 @@ +{ + "success": true, + "data": [ + { + "_id": "564b9ac9-c53d-4638-9e7f-1cd96fe19baa", + "frequency": "monthly", + "everyX": 1, + "repeat": { + "m": true, + "t": true, + "w": true, + "th": true, + "f": true, + "s": true, + "su": true + }, + "streak": 1, + "nextDue": ["2024-09-20T22:00:00.000Z", "2024-10-20T22:00:00.000Z"], + "yesterDaily": true, + "history": [], + "completed": false, + "collapseChecklist": false, + "type": "daily", + "text": "Zahnseide benutzen", + "notes": "Klicke um Änderungen zu machen!", + "tags": [], + "value": -2.9663035443712333, + "priority": 1, + "attribute": "str", + "challenge": {}, + "group": { + "completedBy": {}, + "assignedUsers": [] + }, + "byHabitica": false, + "startDate": "2024-09-25T22:00:00.000Z", + "daysOfMonth": [], + "weeksOfMonth": [], + "checklist": [], + "reminders": [], + "createdAt": "2024-07-07T17:51:53.268Z", + "updatedAt": "2024-09-21T22:24:20.154Z", + "userId": "5f359083-ef78-4af0-985a-0b2c6d05797c", + "isDue": false, + "id": "564b9ac9-c53d-4638-9e7f-1cd96fe19baa" + } + ], + "notifications": [], + "userV": 589, + "appVersion": "5.28.6" +} diff --git a/tests/components/habitica/fixtures/duedate_fixture_7.json b/tests/components/habitica/fixtures/duedate_fixture_7.json new file mode 100644 index 00000000000000..723ee40062de11 --- /dev/null +++ b/tests/components/habitica/fixtures/duedate_fixture_7.json @@ -0,0 +1,51 @@ +{ + "success": true, + "data": [ + { + "_id": "564b9ac9-c53d-4638-9e7f-1cd96fe19baa", + "frequency": "monthly", + "everyX": 0, + "repeat": { + "m": true, + "t": true, + "w": true, + "th": true, + "f": true, + "s": true, + "su": true + }, + "streak": 1, + "nextDue": ["2024-09-22T22:00:00.000Z", "2024-09-23T22:00:00.000Z"], + "yesterDaily": true, + "history": [], + "completed": false, + "collapseChecklist": false, + "type": "daily", + "text": "Zahnseide benutzen", + "notes": "Klicke um Änderungen zu machen!", + "tags": [], + "value": -2.9663035443712333, + "priority": 1, + "attribute": "str", + "challenge": {}, + "group": { + "completedBy": {}, + "assignedUsers": [] + }, + "byHabitica": false, + "startDate": "2024-09-23T22:00:00.000Z", + "daysOfMonth": [], + "weeksOfMonth": [], + "checklist": [], + "reminders": [], + "createdAt": "2024-07-07T17:51:53.268Z", + "updatedAt": "2024-09-21T22:24:20.154Z", + "userId": "5f359083-ef78-4af0-985a-0b2c6d05797c", + "isDue": false, + "id": "564b9ac9-c53d-4638-9e7f-1cd96fe19baa" + } + ], + "notifications": [], + "userV": 589, + "appVersion": "5.28.6" +} diff --git a/tests/components/habitica/fixtures/duedate_fixture_8.json b/tests/components/habitica/fixtures/duedate_fixture_8.json new file mode 100644 index 00000000000000..21a40a0a6491e9 --- /dev/null +++ b/tests/components/habitica/fixtures/duedate_fixture_8.json @@ -0,0 +1,51 @@ +{ + "success": true, + "data": [ + { + "_id": "564b9ac9-c53d-4638-9e7f-1cd96fe19baa", + "frequency": "daily", + "everyX": 1, + "repeat": { + "m": true, + "t": true, + "w": true, + "th": true, + "f": true, + "s": true, + "su": true + }, + "streak": 1, + "nextDue": [], + "yesterDaily": true, + "history": [], + "completed": false, + "collapseChecklist": false, + "type": "daily", + "text": "Zahnseide benutzen", + "notes": "Klicke um Änderungen zu machen!", + "tags": [], + "value": -2.9663035443712333, + "priority": 1, + "attribute": "str", + "challenge": {}, + "group": { + "completedBy": {}, + "assignedUsers": [] + }, + "byHabitica": false, + "startDate": "2024-09-23T22:00:00.000Z", + "daysOfMonth": [], + "weeksOfMonth": [], + "checklist": [], + "reminders": [], + "createdAt": "2024-07-07T17:51:53.268Z", + "updatedAt": "2024-09-21T22:24:20.154Z", + "userId": "5f359083-ef78-4af0-985a-0b2c6d05797c", + "isDue": false, + "id": "564b9ac9-c53d-4638-9e7f-1cd96fe19baa" + } + ], + "notifications": [], + "userV": 589, + "appVersion": "5.28.6" +} diff --git a/tests/components/habitica/fixtures/healer_fixture.json b/tests/components/habitica/fixtures/healer_fixture.json new file mode 100644 index 00000000000000..85f719f4ca7d24 --- /dev/null +++ b/tests/components/habitica/fixtures/healer_fixture.json @@ -0,0 +1,59 @@ +{ + "data": { + "api_user": "test-api-user", + "profile": { "name": "test-user" }, + "stats": { + "buffs": { + "str": 26, + "int": 26, + "per": 26, + "con": 26, + "stealth": 0, + "streaks": false, + "seafoam": false, + "shinySeed": false, + "snowball": false, + "spookySparkles": false + }, + "hp": 45, + "mp": 50.89999999999998, + "exp": 737, + "gp": 137.62587214609795, + "lvl": 38, + "class": "healer", + "maxHealth": 50, + "maxMP": 166, + "toNextLevel": 880, + "points": 5, + "str": 15, + "con": 15, + "int": 15, + "per": 15 + }, + "preferences": { + "sleep": false, + "automaticAllocation": true, + "disableClasses": false, + "language": "en" + }, + "flags": { + "classSelected": true + }, + "needsCron": true, + "lastCron": "2024-09-21T22:01:55.586Z", + "items": { + "gear": { + "equipped": { + "weapon": "weapon_healer_5", + "armor": "armor_healer_5", + "head": "head_healer_5", + "shield": "shield_healer_5", + "back": "heroicAureole", + "headAccessory": "headAccessory_armoire_gogglesOfBookbinding", + "eyewear": "plagueDoctorMask", + "body": "aetherAmulet" + } + } + } + } +} diff --git a/tests/components/habitica/fixtures/healer_skills_unavailable.json b/tests/components/habitica/fixtures/healer_skills_unavailable.json new file mode 100644 index 00000000000000..a6bff246b2a931 --- /dev/null +++ b/tests/components/habitica/fixtures/healer_skills_unavailable.json @@ -0,0 +1,58 @@ +{ + "data": { + "api_user": "test-api-user", + "profile": { "name": "test-user" }, + "stats": { + "buffs": { + "str": 26, + "int": 26, + "per": 26, + "con": 26, + "stealth": 0, + "streaks": false, + "seafoam": false, + "shinySeed": false, + "snowball": false, + "spookySparkles": false + }, + "hp": 50, + "mp": 10, + "exp": 737, + "gp": 0, + "lvl": 34, + "class": "healer", + "maxHealth": 50, + "maxMP": 166, + "toNextLevel": 880, + "points": 0, + "str": 15, + "con": 15, + "int": 15, + "per": 15 + }, + "preferences": { + "sleep": false, + "automaticAllocation": false, + "disableClasses": false, + "language": "en" + }, + "flags": { + "classSelected": true + }, + "needsCron": false, + "items": { + "gear": { + "equipped": { + "weapon": "weapon_healer_5", + "armor": "armor_healer_5", + "head": "head_healer_5", + "shield": "shield_healer_5", + "back": "heroicAureole", + "headAccessory": "headAccessory_armoire_gogglesOfBookbinding", + "eyewear": "plagueDoctorMask", + "body": "aetherAmulet" + } + } + } + } +} diff --git a/tests/components/habitica/fixtures/quest_invitation_off.json b/tests/components/habitica/fixtures/quest_invitation_off.json new file mode 100644 index 00000000000000..b5eccd99e10b14 --- /dev/null +++ b/tests/components/habitica/fixtures/quest_invitation_off.json @@ -0,0 +1,65 @@ +{ + "data": { + "api_user": "test-api-user", + "profile": { "name": "test-user" }, + "stats": { + "buffs": { + "str": 0, + "int": 0, + "per": 0, + "con": 0, + "stealth": 0, + "streaks": false, + "seafoam": false, + "shinySeed": false, + "snowball": false, + "spookySparkles": false + }, + "hp": 0, + "mp": 50.89999999999998, + "exp": 737, + "gp": 137.62587214609795, + "lvl": 38, + "class": "wizard", + "maxHealth": 50, + "maxMP": 166, + "toNextLevel": 880, + "points": 5 + }, + "preferences": { + "sleep": false, + "automaticAllocation": true, + "disableClasses": false, + "language": "en" + }, + "flags": { + "classSelected": true + }, + "tasksOrder": { + "rewards": ["5e2ea1df-f6e6-4ba3-bccb-97c5ec63e99b"], + "todos": [ + "88de7cd9-af2b-49ce-9afd-bf941d87336b", + "2f6fcabc-f670-4ec3-ba65-817e8deea490", + "1aa3137e-ef72-4d1f-91ee-41933602f438", + "86ea2475-d1b5-4020-bdcc-c188c7996afa" + ], + "dailys": [ + "f21fa608-cfc6-4413-9fc7-0eb1b48ca43a", + "bc1d1855-b2b8-4663-98ff-62e7b763dfc4", + "e97659e0-2c42-4599-a7bb-00282adc410d", + "564b9ac9-c53d-4638-9e7f-1cd96fe19baa", + "f2c85972-1a19-4426-bc6d-ce3337b9d99f", + "2c6d136c-a1c3-4bef-b7c4-fa980784b1e1" + ], + "habits": ["1d147de6-5c02-4740-8e2f-71d3015a37f4"] + }, + "party": { + "quest": { + "RSVPNeeded": false, + "key": null + } + }, + "needsCron": true, + "lastCron": "2024-09-21T22:01:55.586Z" + } +} diff --git a/tests/components/habitica/fixtures/rogue_fixture.json b/tests/components/habitica/fixtures/rogue_fixture.json new file mode 100644 index 00000000000000..1e5e996c03463b --- /dev/null +++ b/tests/components/habitica/fixtures/rogue_fixture.json @@ -0,0 +1,59 @@ +{ + "data": { + "api_user": "test-api-user", + "profile": { "name": "test-user" }, + "stats": { + "buffs": { + "str": 26, + "int": 26, + "per": 26, + "con": 26, + "stealth": 0, + "streaks": false, + "seafoam": false, + "shinySeed": false, + "snowball": false, + "spookySparkles": false + }, + "hp": 0, + "mp": 50.89999999999998, + "exp": 737, + "gp": 137.62587214609795, + "lvl": 38, + "class": "rogue", + "maxHealth": 50, + "maxMP": 166, + "toNextLevel": 880, + "points": 5, + "str": 15, + "con": 15, + "int": 15, + "per": 15 + }, + "preferences": { + "sleep": false, + "automaticAllocation": true, + "disableClasses": false, + "language": "en" + }, + "flags": { + "classSelected": true + }, + "needsCron": true, + "lastCron": "2024-09-21T22:01:55.586Z", + "items": { + "gear": { + "equipped": { + "weapon": "weapon_rogue_5", + "armor": "armor_rogue_5", + "head": "head_rogue_5", + "shield": "shield_rogue_5", + "back": "heroicAureole", + "headAccessory": "headAccessory_armoire_gogglesOfBookbinding", + "eyewear": "plagueDoctorMask", + "body": "aetherAmulet" + } + } + } + } +} diff --git a/tests/components/habitica/fixtures/rogue_skills_unavailable.json b/tests/components/habitica/fixtures/rogue_skills_unavailable.json new file mode 100644 index 00000000000000..c7c5ff322458ab --- /dev/null +++ b/tests/components/habitica/fixtures/rogue_skills_unavailable.json @@ -0,0 +1,58 @@ +{ + "data": { + "api_user": "test-api-user", + "profile": { "name": "test-user" }, + "stats": { + "buffs": { + "str": 26, + "int": 26, + "per": 26, + "con": 26, + "stealth": 0, + "streaks": true, + "seafoam": false, + "shinySeed": false, + "snowball": false, + "spookySparkles": false + }, + "hp": 50, + "mp": 20, + "exp": 737, + "gp": 0, + "lvl": 38, + "class": "rogue", + "maxHealth": 50, + "maxMP": 166, + "toNextLevel": 880, + "points": 0, + "str": 15, + "con": 15, + "int": 15, + "per": 15 + }, + "preferences": { + "sleep": false, + "automaticAllocation": false, + "disableClasses": false, + "language": "en" + }, + "flags": { + "classSelected": true + }, + "needsCron": false, + "items": { + "gear": { + "equipped": { + "weapon": "weapon_rogue_5", + "armor": "armor_rogue_5", + "head": "head_rogue_5", + "shield": "shield_rogue_5", + "back": "heroicAureole", + "headAccessory": "headAccessory_armoire_gogglesOfBookbinding", + "eyewear": "plagueDoctorMask", + "body": "aetherAmulet" + } + } + } + } +} diff --git a/tests/components/habitica/fixtures/rogue_stealth_unavailable.json b/tests/components/habitica/fixtures/rogue_stealth_unavailable.json new file mode 100644 index 00000000000000..9fd7adcca42600 --- /dev/null +++ b/tests/components/habitica/fixtures/rogue_stealth_unavailable.json @@ -0,0 +1,58 @@ +{ + "data": { + "api_user": "test-api-user", + "profile": { "name": "test-user" }, + "stats": { + "buffs": { + "str": 26, + "int": 26, + "per": 26, + "con": 26, + "stealth": 4, + "streaks": false, + "seafoam": false, + "shinySeed": false, + "snowball": false, + "spookySparkles": false + }, + "hp": 50, + "mp": 50, + "exp": 737, + "gp": 0, + "lvl": 38, + "class": "rogue", + "maxHealth": 50, + "maxMP": 166, + "toNextLevel": 880, + "points": 0, + "str": 15, + "con": 15, + "int": 15, + "per": 15 + }, + "preferences": { + "sleep": false, + "automaticAllocation": false, + "disableClasses": false, + "language": "en" + }, + "flags": { + "classSelected": true + }, + "needsCron": false, + "items": { + "gear": { + "equipped": { + "weapon": "weapon_rogue_5", + "armor": "armor_rogue_5", + "head": "head_rogue_5", + "shield": "shield_rogue_5", + "back": "heroicAureole", + "headAccessory": "headAccessory_armoire_gogglesOfBookbinding", + "eyewear": "plagueDoctorMask", + "body": "aetherAmulet" + } + } + } + } +} diff --git a/tests/components/habitica/fixtures/score_with_drop.json b/tests/components/habitica/fixtures/score_with_drop.json new file mode 100644 index 00000000000000..f25838d6c3743a --- /dev/null +++ b/tests/components/habitica/fixtures/score_with_drop.json @@ -0,0 +1,69 @@ +{ + "success": true, + "data": { + "delta": 0.9999999781878414, + "_tmp": { + "quest": { + "progressDelta": 1.049999977097233 + }, + "drop": { + "value": 3, + "key": "Dragon", + "type": "Egg", + "dialog": "You've found a Dragon Egg!" + } + }, + "buffs": { + "str": 0, + "int": 0, + "per": 0, + "con": 0, + "stealth": 0, + "streaks": false, + "seafoam": false, + "shinySeed": false, + "snowball": false, + "spookySparkles": false + }, + "training": { + "int": 0, + "per": 0, + "str": 0, + "con": 0 + }, + "hp": 25.100000000000016, + "mp": 24, + "exp": 196, + "gp": 30.453660284128997, + "lvl": 20, + "class": "warrior", + "points": 2, + "str": 0, + "con": 0, + "int": 0, + "per": 0 + }, + "notifications": [ + { + "type": "ITEM_RECEIVED", + "data": { + "icon": "notif_orca_mount", + "title": "Orcas for Summer Splash!", + "text": "To celebrate Summer Splash, we've given you an Orca Mount!", + "destination": "stable" + }, + "seen": true, + "id": "b7a85df1-06ed-4ab1-b56d-43418fc6a5e5" + }, + { + "type": "UNALLOCATED_STATS_POINTS", + "data": { + "points": 2 + }, + "seen": true, + "id": "bc3f8a69-231f-4eb1-ba48-a00b6c0e0f37" + } + ], + "userV": 623, + "appVersion": "5.28.6" +} diff --git a/tests/components/habitica/fixtures/tasks.json b/tests/components/habitica/fixtures/tasks.json new file mode 100644 index 00000000000000..2e8305283d083b --- /dev/null +++ b/tests/components/habitica/fixtures/tasks.json @@ -0,0 +1,555 @@ +{ + "success": true, + "data": [ + { + "_id": "f21fa608-cfc6-4413-9fc7-0eb1b48ca43a", + "up": true, + "down": true, + "counterUp": 0, + "counterDown": 0, + "frequency": "daily", + "history": [], + "type": "habit", + "text": "Gesundes Essen/Junkfood", + "notes": "", + "tags": [], + "value": 0, + "priority": 1, + "attribute": "str", + "challenge": {}, + "group": { + "completedBy": {}, + "assignedUsers": [] + }, + "byHabitica": false, + "reminders": [], + "createdAt": "2024-07-07T17:51:53.268Z", + "updatedAt": "2024-07-07T17:51:53.268Z", + "userId": "5f359083-ef78-4af0-985a-0b2c6d05797c", + "id": "f21fa608-cfc6-4413-9fc7-0eb1b48ca43a" + }, + { + "_id": "1d147de6-5c02-4740-8e2f-71d3015a37f4", + "up": true, + "down": false, + "counterUp": 0, + "counterDown": 0, + "frequency": "daily", + "history": [ + { + "date": 1720376763324, + "value": 1, + "scoredUp": 1, + "scoredDown": 0 + } + ], + "type": "habit", + "text": "Eine kurze Pause machen", + "notes": "", + "tags": [], + "value": 0, + "priority": 1, + "attribute": "str", + "challenge": {}, + "group": { + "completedBy": {}, + "assignedUsers": [] + }, + "byHabitica": false, + "reminders": [], + "createdAt": "2024-07-07T17:51:53.266Z", + "updatedAt": "2024-07-12T09:58:45.438Z", + "userId": "5f359083-ef78-4af0-985a-0b2c6d05797c", + "id": "1d147de6-5c02-4740-8e2f-71d3015a37f4" + }, + { + "_id": "bc1d1855-b2b8-4663-98ff-62e7b763dfc4", + "up": false, + "down": true, + "counterUp": 0, + "counterDown": 0, + "frequency": "daily", + "history": [], + "type": "habit", + "text": "Klicke hier um dies als schlechte Gewohnheit zu markieren, die Du gerne loswerden möchtest", + "notes": "Oder lösche es über die Bearbeitungs-Ansicht", + "tags": [], + "value": 0, + "priority": 1, + "attribute": "str", + "challenge": {}, + "group": { + "completedBy": {}, + "assignedUsers": [] + }, + "byHabitica": false, + "reminders": [], + "createdAt": "2024-07-07T17:51:53.265Z", + "updatedAt": "2024-07-07T17:51:53.265Z", + "userId": "5f359083-ef78-4af0-985a-0b2c6d05797c", + "id": "bc1d1855-b2b8-4663-98ff-62e7b763dfc4" + }, + { + "_id": "e97659e0-2c42-4599-a7bb-00282adc410d", + "up": true, + "down": false, + "counterUp": 0, + "counterDown": 0, + "frequency": "daily", + "history": [ + { + "date": 1720376763140, + "value": 1, + "scoredUp": 1, + "scoredDown": 0 + } + ], + "type": "habit", + "text": "Füge eine Aufgabe zu Habitica hinzu", + "notes": "Eine Gewohnheit, eine Tagesaufgabe oder ein To-Do", + "tags": [], + "value": 0, + "priority": 1, + "attribute": "str", + "challenge": {}, + "group": { + "completedBy": {}, + "assignedUsers": [] + }, + "byHabitica": false, + "reminders": [], + "createdAt": "2024-07-07T17:51:53.264Z", + "updatedAt": "2024-07-12T09:58:45.438Z", + "userId": "5f359083-ef78-4af0-985a-0b2c6d05797c", + "id": "e97659e0-2c42-4599-a7bb-00282adc410d", + "alias": "create_a_task" + }, + { + "_id": "564b9ac9-c53d-4638-9e7f-1cd96fe19baa", + "frequency": "weekly", + "everyX": 1, + "repeat": { + "m": true, + "t": true, + "w": true, + "th": true, + "f": true, + "s": true, + "su": true + }, + "streak": 1, + "nextDue": [ + "Mon Sep 23 2024 00:00:00 GMT+0200", + "Tue Sep 24 2024 00:00:00 GMT+0200", + "Wed Sep 25 2024 00:00:00 GMT+0200", + "Thu Sep 26 2024 00:00:00 GMT+0200", + "Fri Sep 27 2024 00:00:00 GMT+0200", + "Sat Sep 28 2024 00:00:00 GMT+0200" + ], + "yesterDaily": true, + "history": [ + { + "date": 1720376766749, + "value": 1, + "isDue": true, + "completed": true + }, + { + "date": 1720545311292, + "value": 0.02529999999999999, + "isDue": true, + "completed": false + }, + { + "date": 1720564306719, + "value": -0.9740518837628547, + "isDue": true, + "completed": false + }, + { + "date": 1720691096907, + "value": 0.051222853419153, + "isDue": true, + "completed": true + }, + { + "date": 1720778325243, + "value": 1.0499115128458676, + "isDue": true, + "completed": true + }, + { + "date": 1724185196447, + "value": 0.07645736684721605, + "isDue": true, + "completed": false + }, + { + "date": 1724255707692, + "value": -0.921585289356988, + "isDue": true, + "completed": false + }, + { + "date": 1726846163640, + "value": -1.9454824860630637, + "isDue": true, + "completed": false + }, + { + "date": 1726953787542, + "value": -2.9966001649571803, + "isDue": true, + "completed": false + }, + { + "date": 1726956115608, + "value": -4.07641493832036, + "isDue": true, + "completed": false + }, + { + "date": 1726957460150, + "value": -2.9663035443712333, + "isDue": true, + "completed": true + } + ], + "completed": true, + "collapseChecklist": false, + "type": "daily", + "text": "Zahnseide benutzen", + "notes": "Klicke um Änderungen zu machen!", + "tags": [], + "value": -2.9663035443712333, + "priority": 1, + "attribute": "str", + "challenge": {}, + "group": { + "completedBy": {}, + "assignedUsers": [] + }, + "byHabitica": false, + "startDate": "2024-07-06T22:00:00.000Z", + "daysOfMonth": [], + "weeksOfMonth": [], + "checklist": [], + "reminders": [], + "createdAt": "2024-07-07T17:51:53.268Z", + "updatedAt": "2024-09-21T22:24:20.154Z", + "userId": "5f359083-ef78-4af0-985a-0b2c6d05797c", + "isDue": true, + "id": "564b9ac9-c53d-4638-9e7f-1cd96fe19baa" + }, + { + "_id": "f2c85972-1a19-4426-bc6d-ce3337b9d99f", + "frequency": "weekly", + "everyX": 1, + "repeat": { + "m": true, + "t": true, + "w": true, + "th": true, + "f": true, + "s": true, + "su": true + }, + "streak": 0, + "nextDue": [ + "2024-09-22T22:00:00.000Z", + "2024-09-23T22:00:00.000Z", + "2024-09-24T22:00:00.000Z", + "2024-09-25T22:00:00.000Z", + "2024-09-26T22:00:00.000Z", + "2024-09-27T22:00:00.000Z" + ], + "yesterDaily": true, + "history": [ + { + "date": 1720374903074, + "value": 1, + "isDue": true, + "completed": true + }, + { + "date": 1720545311291, + "value": 0.02529999999999999, + "isDue": true, + "completed": false + }, + { + "date": 1720564306717, + "value": -0.9740518837628547, + "isDue": true, + "completed": false + }, + { + "date": 1720682459722, + "value": 0.051222853419153, + "isDue": true, + "completed": true + }, + { + "date": 1720778325246, + "value": 1.0499115128458676, + "isDue": true, + "completed": true + }, + { + "date": 1720778492219, + "value": 2.023365658844519, + "isDue": true, + "completed": true + }, + { + "date": 1724255707691, + "value": 1.0738942424964806, + "isDue": true, + "completed": false + }, + { + "date": 1726846163638, + "value": 0.10103816898038132, + "isDue": true, + "completed": false + }, + { + "date": 1726953787540, + "value": -0.8963760215867302, + "isDue": true, + "completed": false + }, + { + "date": 1726956115607, + "value": -1.919611992979862, + "isDue": true, + "completed": false + } + ], + "completed": false, + "collapseChecklist": false, + "type": "daily", + "text": "5 Minuten ruhig durchatmen", + "notes": "Klicke um Deinen Terminplan festzulegen!", + "tags": [], + "value": -1.919611992979862, + "priority": 1, + "attribute": "str", + "challenge": {}, + "group": { + "completedBy": {}, + "assignedUsers": [] + }, + "byHabitica": false, + "startDate": "2024-07-06T22:00:00.000Z", + "daysOfMonth": [], + "weeksOfMonth": [], + "checklist": [], + "reminders": [], + "createdAt": "2024-07-07T17:51:53.266Z", + "updatedAt": "2024-09-21T22:51:41.756Z", + "userId": "5f359083-ef78-4af0-985a-0b2c6d05797c", + "isDue": true, + "id": "f2c85972-1a19-4426-bc6d-ce3337b9d99f" + }, + { + "_id": "2c6d136c-a1c3-4bef-b7c4-fa980784b1e1", + "frequency": "weekly", + "everyX": 1, + "startDate": "2024-09-21T22:00:00.000Z", + "repeat": { + "m": false, + "t": false, + "w": true, + "th": false, + "f": false, + "s": true, + "su": true + }, + "streak": 0, + "daysOfMonth": [], + "weeksOfMonth": [], + "nextDue": [ + "2024-09-24T22:00:00.000Z", + "2024-09-27T22:00:00.000Z", + "2024-09-28T22:00:00.000Z", + "2024-10-01T22:00:00.000Z", + "2024-10-04T22:00:00.000Z", + "2024-10-08T22:00:00.000Z" + ], + "yesterDaily": true, + "history": [], + "completed": false, + "collapseChecklist": false, + "checklist": [], + "type": "daily", + "text": "Fitnessstudio besuchen", + "notes": "Ein einstündiges Workout im Fitnessstudio absolvieren.", + "tags": ["51076966-2970-4b40-b6ba-d58c6a756dd7"], + "value": 0, + "priority": 2, + "attribute": "str", + "challenge": {}, + "group": { + "completedBy": {}, + "assignedUsers": [] + }, + "reminders": [], + "byHabitica": false, + "createdAt": "2024-09-22T11:44:43.774Z", + "updatedAt": "2024-09-22T11:44:43.774Z", + "userId": "1343a9af-d891-4027-841a-956d105ca408", + "isDue": true, + "id": "2c6d136c-a1c3-4bef-b7c4-fa980784b1e1" + }, + { + "_id": "88de7cd9-af2b-49ce-9afd-bf941d87336b", + "date": "2024-09-27T22:17:00.000Z", + "completed": false, + "collapseChecklist": false, + "checklist": [], + "type": "todo", + "text": "Buch zu Ende lesen", + "notes": "Das Buch, das du angefangen hast, bis zum Wochenende fertig lesen.", + "tags": [], + "value": 0, + "priority": 1, + "attribute": "str", + "challenge": {}, + "group": { + "completedBy": {}, + "assignedUsers": [] + }, + "reminders": [], + "byHabitica": false, + "createdAt": "2024-09-21T22:17:57.816Z", + "updatedAt": "2024-09-21T22:17:57.816Z", + "userId": "5f359083-ef78-4af0-985a-0b2c6d05797c", + "id": "88de7cd9-af2b-49ce-9afd-bf941d87336b" + }, + { + "_id": "2f6fcabc-f670-4ec3-ba65-817e8deea490", + "date": "2024-08-31T22:16:00.000Z", + "completed": false, + "collapseChecklist": false, + "checklist": [], + "type": "todo", + "text": "Rechnungen bezahlen", + "notes": "Strom- und Internetrechnungen rechtzeitig überweisen.", + "tags": [], + "value": 0, + "priority": 1, + "attribute": "str", + "challenge": {}, + "group": { + "completedBy": {}, + "assignedUsers": [] + }, + "reminders": [ + { + "id": "91c09432-10ac-4a49-bd20-823081ec29ed", + "time": "2024-09-22T02:00:00.0000Z" + } + ], + "byHabitica": false, + "createdAt": "2024-09-21T22:17:19.513Z", + "updatedAt": "2024-09-21T22:19:35.576Z", + "userId": "5f359083-ef78-4af0-985a-0b2c6d05797c", + "id": "2f6fcabc-f670-4ec3-ba65-817e8deea490", + "alias": "pay_bills" + }, + { + "_id": "1aa3137e-ef72-4d1f-91ee-41933602f438", + "completed": false, + "collapseChecklist": false, + "checklist": [], + "type": "todo", + "text": "Garten pflegen", + "notes": "Rasen mähen und die Pflanzen gießen.", + "tags": [], + "value": 0, + "priority": 1, + "attribute": "str", + "challenge": {}, + "group": { + "completedBy": {}, + "assignedUsers": [] + }, + "reminders": [], + "byHabitica": false, + "createdAt": "2024-09-21T22:16:38.153Z", + "updatedAt": "2024-09-21T22:16:38.153Z", + "userId": "5f359083-ef78-4af0-985a-0b2c6d05797c", + "id": "1aa3137e-ef72-4d1f-91ee-41933602f438" + }, + { + "_id": "86ea2475-d1b5-4020-bdcc-c188c7996afa", + "date": "2024-09-21T22:00:00.000Z", + "completed": false, + "collapseChecklist": false, + "checklist": [], + "type": "todo", + "text": "Wochenendausflug planen", + "notes": "Den Ausflug für das kommende Wochenende organisieren.", + "tags": ["51076966-2970-4b40-b6ba-d58c6a756dd7"], + "value": 0, + "priority": 1, + "attribute": "str", + "challenge": {}, + "group": { + "completedBy": {}, + "assignedUsers": [] + }, + "reminders": [], + "byHabitica": false, + "createdAt": "2024-09-21T22:16:16.756Z", + "updatedAt": "2024-09-21T22:16:16.756Z", + "userId": "5f359083-ef78-4af0-985a-0b2c6d05797c", + "id": "86ea2475-d1b5-4020-bdcc-c188c7996afa" + }, + { + "_id": "5e2ea1df-f6e6-4ba3-bccb-97c5ec63e99b", + "type": "reward", + "text": "Belohne Dich selbst", + "notes": "Schaue fern, spiele ein Spiel, gönne Dir einen Leckerbissen, es liegt ganz bei Dir!", + "tags": [], + "value": 10, + "priority": 1, + "attribute": "str", + "challenge": {}, + "group": { + "completedBy": {}, + "assignedUsers": [] + }, + "byHabitica": false, + "reminders": [], + "createdAt": "2024-07-07T17:51:53.266Z", + "updatedAt": "2024-07-07T17:51:53.266Z", + "userId": "5f359083-ef78-4af0-985a-0b2c6d05797c", + "id": "5e2ea1df-f6e6-4ba3-bccb-97c5ec63e99b" + } + ], + "notifications": [ + { + "type": "ITEM_RECEIVED", + "data": { + "icon": "notif_orca_mount", + "title": "Orcas for Summer Splash!", + "text": "To celebrate Summer Splash, we've given you an Orca Mount!", + "destination": "stable" + }, + "seen": true, + "id": "b7a85df1-06ed-4ab1-b56d-43418fc6a5e5" + }, + { + "type": "UNALLOCATED_STATS_POINTS", + "data": { + "points": 2 + }, + "seen": true, + "id": "bc3f8a69-231f-4eb1-ba48-a00b6c0e0f37" + } + ], + "userV": 589, + "appVersion": "5.28.6" +} diff --git a/tests/components/habitica/fixtures/user.json b/tests/components/habitica/fixtures/user.json new file mode 100644 index 00000000000000..569c5b81a023ee --- /dev/null +++ b/tests/components/habitica/fixtures/user.json @@ -0,0 +1,83 @@ +{ + "data": { + "api_user": "test-api-user", + "profile": { "name": "test-user" }, + "stats": { + "buffs": { + "str": 26, + "int": 26, + "per": 26, + "con": 26, + "stealth": 0, + "streaks": false, + "seafoam": false, + "shinySeed": false, + "snowball": false, + "spookySparkles": false + }, + "hp": 0, + "mp": 50.89999999999998, + "exp": 737, + "gp": 137.62587214609795, + "lvl": 38, + "class": "wizard", + "maxHealth": 50, + "maxMP": 166, + "toNextLevel": 880, + "points": 5, + "str": 15, + "con": 15, + "int": 15, + "per": 15 + }, + "preferences": { + "sleep": false, + "automaticAllocation": true, + "disableClasses": false, + "language": "en" + }, + "flags": { + "classSelected": true + }, + "tasksOrder": { + "rewards": ["5e2ea1df-f6e6-4ba3-bccb-97c5ec63e99b"], + "todos": [ + "88de7cd9-af2b-49ce-9afd-bf941d87336b", + "2f6fcabc-f670-4ec3-ba65-817e8deea490", + "1aa3137e-ef72-4d1f-91ee-41933602f438", + "86ea2475-d1b5-4020-bdcc-c188c7996afa" + ], + "dailys": [ + "f21fa608-cfc6-4413-9fc7-0eb1b48ca43a", + "bc1d1855-b2b8-4663-98ff-62e7b763dfc4", + "e97659e0-2c42-4599-a7bb-00282adc410d", + "564b9ac9-c53d-4638-9e7f-1cd96fe19baa", + "f2c85972-1a19-4426-bc6d-ce3337b9d99f", + "2c6d136c-a1c3-4bef-b7c4-fa980784b1e1" + ], + "habits": ["1d147de6-5c02-4740-8e2f-71d3015a37f4"] + }, + "party": { + "quest": { + "RSVPNeeded": true, + "key": "dustbunnies" + } + }, + "needsCron": true, + "lastCron": "2024-09-21T22:01:55.586Z", + "items": { + "gear": { + "equipped": { + "weapon": "weapon_warrior_5", + "armor": "armor_warrior_5", + "head": "head_warrior_5", + "shield": "shield_warrior_5", + "back": "heroicAureole", + "headAccessory": "headAccessory_armoire_gogglesOfBookbinding", + "eyewear": "plagueDoctorMask", + "body": "aetherAmulet" + } + } + } + } +} diff --git a/tests/components/habitica/fixtures/warrior_fixture.json b/tests/components/habitica/fixtures/warrior_fixture.json new file mode 100644 index 00000000000000..3517e8a908aae6 --- /dev/null +++ b/tests/components/habitica/fixtures/warrior_fixture.json @@ -0,0 +1,59 @@ +{ + "data": { + "api_user": "test-api-user", + "profile": { "name": "test-user" }, + "stats": { + "buffs": { + "str": 26, + "int": 26, + "per": 26, + "con": 26, + "stealth": 0, + "streaks": false, + "seafoam": false, + "shinySeed": false, + "snowball": false, + "spookySparkles": false + }, + "hp": 50, + "mp": 50.89999999999998, + "exp": 737, + "gp": 137.62587214609795, + "lvl": 38, + "class": "warrior", + "maxHealth": 50, + "maxMP": 166, + "toNextLevel": 880, + "points": 5, + "str": 15, + "con": 15, + "int": 15, + "per": 15 + }, + "preferences": { + "sleep": false, + "automaticAllocation": true, + "disableClasses": false, + "language": "en" + }, + "flags": { + "classSelected": true + }, + "needsCron": true, + "lastCron": "2024-09-21T22:01:55.586Z", + "items": { + "gear": { + "equipped": { + "weapon": "weapon_warrior_5", + "armor": "armor_warrior_5", + "head": "head_warrior_5", + "shield": "shield_warrior_5", + "back": "heroicAureole", + "headAccessory": "headAccessory_armoire_gogglesOfBookbinding", + "eyewear": "plagueDoctorMask", + "body": "aetherAmulet" + } + } + } + } +} diff --git a/tests/components/habitica/fixtures/warrior_skills_unavailable.json b/tests/components/habitica/fixtures/warrior_skills_unavailable.json new file mode 100644 index 00000000000000..b3d33c85d5cd4a --- /dev/null +++ b/tests/components/habitica/fixtures/warrior_skills_unavailable.json @@ -0,0 +1,58 @@ +{ + "data": { + "api_user": "test-api-user", + "profile": { "name": "test-user" }, + "stats": { + "buffs": { + "str": 26, + "int": 26, + "per": 26, + "con": 26, + "stealth": 0, + "streaks": false, + "seafoam": false, + "shinySeed": false, + "snowball": false, + "spookySparkles": false + }, + "hp": 50, + "mp": 10, + "exp": 737, + "gp": 0, + "lvl": 34, + "class": "warrior", + "maxHealth": 50, + "maxMP": 166, + "toNextLevel": 880, + "points": 0, + "str": 15, + "con": 15, + "int": 15, + "per": 15 + }, + "preferences": { + "sleep": false, + "automaticAllocation": false, + "disableClasses": false, + "language": "en" + }, + "flags": { + "classSelected": true + }, + "needsCron": false, + "items": { + "gear": { + "equipped": { + "weapon": "weapon_warrior_5", + "armor": "armor_warrior_5", + "head": "head_warrior_5", + "shield": "shield_warrior_5", + "back": "heroicAureole", + "headAccessory": "headAccessory_armoire_gogglesOfBookbinding", + "eyewear": "plagueDoctorMask", + "body": "aetherAmulet" + } + } + } + } +} diff --git a/tests/components/habitica/fixtures/wizard_fixture.json b/tests/components/habitica/fixtures/wizard_fixture.json new file mode 100644 index 00000000000000..de596e231de866 --- /dev/null +++ b/tests/components/habitica/fixtures/wizard_fixture.json @@ -0,0 +1,59 @@ +{ + "data": { + "api_user": "test-api-user", + "profile": { "name": "test-user" }, + "stats": { + "buffs": { + "str": 26, + "int": 26, + "per": 26, + "con": 26, + "stealth": 0, + "streaks": false, + "seafoam": false, + "shinySeed": false, + "snowball": false, + "spookySparkles": false + }, + "hp": 50, + "mp": 50.89999999999998, + "exp": 737, + "gp": 137.62587214609795, + "lvl": 38, + "class": "wizard", + "maxHealth": 50, + "maxMP": 166, + "toNextLevel": 880, + "points": 5, + "str": 15, + "con": 15, + "int": 15, + "per": 15 + }, + "preferences": { + "sleep": false, + "automaticAllocation": true, + "disableClasses": false, + "language": "en" + }, + "flags": { + "classSelected": true + }, + "needsCron": true, + "lastCron": "2024-09-21T22:01:55.586Z", + "items": { + "gear": { + "equipped": { + "weapon": "weapon_wizard_5", + "armor": "armor_wizard_5", + "head": "head_wizard_5", + "shield": "shield_base_0", + "back": "heroicAureole", + "headAccessory": "headAccessory_armoire_gogglesOfBookbinding", + "eyewear": "plagueDoctorMask", + "body": "aetherAmulet" + } + } + } + } +} diff --git a/tests/components/habitica/fixtures/wizard_frost_unavailable.json b/tests/components/habitica/fixtures/wizard_frost_unavailable.json new file mode 100644 index 00000000000000..31d10fde4b9920 --- /dev/null +++ b/tests/components/habitica/fixtures/wizard_frost_unavailable.json @@ -0,0 +1,58 @@ +{ + "data": { + "api_user": "test-api-user", + "profile": { "name": "test-user" }, + "stats": { + "buffs": { + "str": 26, + "int": 26, + "per": 26, + "con": 26, + "stealth": 0, + "streaks": true, + "seafoam": false, + "shinySeed": false, + "snowball": false, + "spookySparkles": false + }, + "hp": 50, + "mp": 50, + "exp": 737, + "gp": 0, + "lvl": 34, + "class": "wizard", + "maxHealth": 50, + "maxMP": 166, + "toNextLevel": 880, + "points": 0, + "str": 15, + "con": 15, + "int": 15, + "per": 15 + }, + "preferences": { + "sleep": false, + "automaticAllocation": false, + "disableClasses": false, + "language": "en" + }, + "flags": { + "classSelected": true + }, + "needsCron": false, + "items": { + "gear": { + "equipped": { + "weapon": "weapon_wizard_5", + "armor": "armor_wizard_5", + "head": "head_wizard_5", + "shield": "shield_base_0", + "back": "heroicAureole", + "headAccessory": "headAccessory_armoire_gogglesOfBookbinding", + "eyewear": "plagueDoctorMask", + "body": "aetherAmulet" + } + } + } + } +} diff --git a/tests/components/habitica/fixtures/wizard_skills_unavailable.json b/tests/components/habitica/fixtures/wizard_skills_unavailable.json new file mode 100644 index 00000000000000..f3bdee9dd74caf --- /dev/null +++ b/tests/components/habitica/fixtures/wizard_skills_unavailable.json @@ -0,0 +1,58 @@ +{ + "data": { + "api_user": "test-api-user", + "profile": { "name": "test-user" }, + "stats": { + "buffs": { + "str": 26, + "int": 26, + "per": 26, + "con": 26, + "stealth": 0, + "streaks": false, + "seafoam": false, + "shinySeed": false, + "snowball": false, + "spookySparkles": false + }, + "hp": 50, + "mp": 10, + "exp": 737, + "gp": 0, + "lvl": 34, + "class": "wizard", + "maxHealth": 50, + "maxMP": 166, + "toNextLevel": 880, + "points": 0, + "str": 15, + "con": 15, + "int": 15, + "per": 15 + }, + "preferences": { + "sleep": false, + "automaticAllocation": false, + "disableClasses": false, + "language": "en" + }, + "flags": { + "classSelected": true + }, + "needsCron": false, + "items": { + "gear": { + "equipped": { + "weapon": "weapon_wizard_5", + "armor": "armor_wizard_5", + "head": "head_wizard_5", + "shield": "shield_base_0", + "back": "heroicAureole", + "headAccessory": "headAccessory_armoire_gogglesOfBookbinding", + "eyewear": "plagueDoctorMask", + "body": "aetherAmulet" + } + } + } + } +} diff --git a/tests/components/habitica/snapshots/test_binary_sensor.ambr b/tests/components/habitica/snapshots/test_binary_sensor.ambr new file mode 100644 index 00000000000000..c18f8f551c9212 --- /dev/null +++ b/tests/components/habitica/snapshots/test_binary_sensor.ambr @@ -0,0 +1,48 @@ +# serializer version: 1 +# name: test_binary_sensors[binary_sensor.test_user_pending_quest_invitation-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.test_user_pending_quest_invitation', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Pending quest invitation', + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_pending_quest', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[binary_sensor.test_user_pending_quest_invitation-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'entity_picture': 'https://habitica-assets.s3.amazonaws.com/mobileApp/images/inventory_quest_scroll_dustbunnies.png', + 'friendly_name': 'test-user Pending quest invitation', + }), + 'context': , + 'entity_id': 'binary_sensor.test_user_pending_quest_invitation', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- diff --git a/tests/components/habitica/snapshots/test_button.ambr b/tests/components/habitica/snapshots/test_button.ambr new file mode 100644 index 00000000000000..c8f926508743fe --- /dev/null +++ b/tests/components/habitica/snapshots/test_button.ambr @@ -0,0 +1,1305 @@ +# serializer version: 1 +# name: test_buttons[healer_fixture][button.test_user_allocate_all_stat_points-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.test_user_allocate_all_stat_points', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Allocate all stat points', + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_allocate_all_stat_points', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[healer_fixture][button.test_user_allocate_all_stat_points-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'test-user Allocate all stat points', + }), + 'context': , + 'entity_id': 'button.test_user_allocate_all_stat_points', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[healer_fixture][button.test_user_blessing-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.test_user_blessing', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Blessing', + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_heal_all', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[healer_fixture][button.test_user_blessing-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'entity_picture': 'https://habitica-assets.s3.amazonaws.com/mobileApp/images/shop_healAll.png', + 'friendly_name': 'test-user Blessing', + }), + 'context': , + 'entity_id': 'button.test_user_blessing', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[healer_fixture][button.test_user_buy_a_health_potion-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.test_user_buy_a_health_potion', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Buy a health potion', + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_buy_health_potion', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[healer_fixture][button.test_user_buy_a_health_potion-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'entity_picture': 'https://habitica-assets.s3.amazonaws.com/mobileApp/images/shop_potion.png', + 'friendly_name': 'test-user Buy a health potion', + }), + 'context': , + 'entity_id': 'button.test_user_buy_a_health_potion', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[healer_fixture][button.test_user_healing_light-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.test_user_healing_light', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Healing light', + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_heal', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[healer_fixture][button.test_user_healing_light-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'entity_picture': 'https://habitica-assets.s3.amazonaws.com/mobileApp/images/shop_heal.png', + 'friendly_name': 'test-user Healing light', + }), + 'context': , + 'entity_id': 'button.test_user_healing_light', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[healer_fixture][button.test_user_protective_aura-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.test_user_protective_aura', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Protective aura', + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_protect_aura', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[healer_fixture][button.test_user_protective_aura-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'entity_picture': 'https://habitica-assets.s3.amazonaws.com/mobileApp/images/shop_protectAura.png', + 'friendly_name': 'test-user Protective aura', + }), + 'context': , + 'entity_id': 'button.test_user_protective_aura', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[healer_fixture][button.test_user_revive_from_death-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.test_user_revive_from_death', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Revive from death', + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_revive', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[healer_fixture][button.test_user_revive_from_death-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'test-user Revive from death', + }), + 'context': , + 'entity_id': 'button.test_user_revive_from_death', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- +# name: test_buttons[healer_fixture][button.test_user_searing_brightness-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.test_user_searing_brightness', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Searing brightness', + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_brightness', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[healer_fixture][button.test_user_searing_brightness-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'entity_picture': 'https://habitica-assets.s3.amazonaws.com/mobileApp/images/shop_brightness.png', + 'friendly_name': 'test-user Searing brightness', + }), + 'context': , + 'entity_id': 'button.test_user_searing_brightness', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[healer_fixture][button.test_user_start_my_day-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.test_user_start_my_day', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Start my day', + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_run_cron', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[healer_fixture][button.test_user_start_my_day-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'test-user Start my day', + }), + 'context': , + 'entity_id': 'button.test_user_start_my_day', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[rogue_fixture][button.test_user_allocate_all_stat_points-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.test_user_allocate_all_stat_points', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Allocate all stat points', + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_allocate_all_stat_points', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[rogue_fixture][button.test_user_allocate_all_stat_points-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'test-user Allocate all stat points', + }), + 'context': , + 'entity_id': 'button.test_user_allocate_all_stat_points', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[rogue_fixture][button.test_user_buy_a_health_potion-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.test_user_buy_a_health_potion', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Buy a health potion', + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_buy_health_potion', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[rogue_fixture][button.test_user_buy_a_health_potion-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'entity_picture': 'https://habitica-assets.s3.amazonaws.com/mobileApp/images/shop_potion.png', + 'friendly_name': 'test-user Buy a health potion', + }), + 'context': , + 'entity_id': 'button.test_user_buy_a_health_potion', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[rogue_fixture][button.test_user_revive_from_death-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.test_user_revive_from_death', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Revive from death', + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_revive', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[rogue_fixture][button.test_user_revive_from_death-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'test-user Revive from death', + }), + 'context': , + 'entity_id': 'button.test_user_revive_from_death', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[rogue_fixture][button.test_user_start_my_day-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.test_user_start_my_day', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Start my day', + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_run_cron', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[rogue_fixture][button.test_user_start_my_day-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'test-user Start my day', + }), + 'context': , + 'entity_id': 'button.test_user_start_my_day', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[rogue_fixture][button.test_user_stealth-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.test_user_stealth', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Stealth', + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_stealth', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[rogue_fixture][button.test_user_stealth-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'entity_picture': 'https://habitica-assets.s3.amazonaws.com/mobileApp/images/shop_stealth.png', + 'friendly_name': 'test-user Stealth', + }), + 'context': , + 'entity_id': 'button.test_user_stealth', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[rogue_fixture][button.test_user_tools_of_the_trade-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.test_user_tools_of_the_trade', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Tools of the trade', + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_tools_of_trade', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[rogue_fixture][button.test_user_tools_of_the_trade-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'entity_picture': 'https://habitica-assets.s3.amazonaws.com/mobileApp/images/shop_toolsOfTrade.png', + 'friendly_name': 'test-user Tools of the trade', + }), + 'context': , + 'entity_id': 'button.test_user_tools_of_the_trade', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[warrior_fixture][button.test_user_allocate_all_stat_points-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.test_user_allocate_all_stat_points', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Allocate all stat points', + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_allocate_all_stat_points', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[warrior_fixture][button.test_user_allocate_all_stat_points-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'test-user Allocate all stat points', + }), + 'context': , + 'entity_id': 'button.test_user_allocate_all_stat_points', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[warrior_fixture][button.test_user_buy_a_health_potion-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.test_user_buy_a_health_potion', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Buy a health potion', + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_buy_health_potion', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[warrior_fixture][button.test_user_buy_a_health_potion-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'entity_picture': 'https://habitica-assets.s3.amazonaws.com/mobileApp/images/shop_potion.png', + 'friendly_name': 'test-user Buy a health potion', + }), + 'context': , + 'entity_id': 'button.test_user_buy_a_health_potion', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- +# name: test_buttons[warrior_fixture][button.test_user_defensive_stance-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.test_user_defensive_stance', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Defensive stance', + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_defensive_stance', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[warrior_fixture][button.test_user_defensive_stance-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'entity_picture': 'https://habitica-assets.s3.amazonaws.com/mobileApp/images/shop_defensiveStance.png', + 'friendly_name': 'test-user Defensive stance', + }), + 'context': , + 'entity_id': 'button.test_user_defensive_stance', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[warrior_fixture][button.test_user_intimidating_gaze-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.test_user_intimidating_gaze', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Intimidating gaze', + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_intimidate', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[warrior_fixture][button.test_user_intimidating_gaze-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'entity_picture': 'https://habitica-assets.s3.amazonaws.com/mobileApp/images/shop_intimidate.png', + 'friendly_name': 'test-user Intimidating gaze', + }), + 'context': , + 'entity_id': 'button.test_user_intimidating_gaze', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[warrior_fixture][button.test_user_revive_from_death-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.test_user_revive_from_death', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Revive from death', + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_revive', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[warrior_fixture][button.test_user_revive_from_death-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'test-user Revive from death', + }), + 'context': , + 'entity_id': 'button.test_user_revive_from_death', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- +# name: test_buttons[warrior_fixture][button.test_user_start_my_day-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.test_user_start_my_day', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Start my day', + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_run_cron', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[warrior_fixture][button.test_user_start_my_day-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'test-user Start my day', + }), + 'context': , + 'entity_id': 'button.test_user_start_my_day', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[warrior_fixture][button.test_user_valorous_presence-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.test_user_valorous_presence', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Valorous presence', + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_valorous_presence', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[warrior_fixture][button.test_user_valorous_presence-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'entity_picture': 'https://habitica-assets.s3.amazonaws.com/mobileApp/images/shop_valorousPresence.png', + 'friendly_name': 'test-user Valorous presence', + }), + 'context': , + 'entity_id': 'button.test_user_valorous_presence', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[wizard_fixture][button.test_user_allocate_all_stat_points-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.test_user_allocate_all_stat_points', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Allocate all stat points', + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_allocate_all_stat_points', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[wizard_fixture][button.test_user_allocate_all_stat_points-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'test-user Allocate all stat points', + }), + 'context': , + 'entity_id': 'button.test_user_allocate_all_stat_points', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[wizard_fixture][button.test_user_buy_a_health_potion-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.test_user_buy_a_health_potion', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Buy a health potion', + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_buy_health_potion', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[wizard_fixture][button.test_user_buy_a_health_potion-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'entity_picture': 'https://habitica-assets.s3.amazonaws.com/mobileApp/images/shop_potion.png', + 'friendly_name': 'test-user Buy a health potion', + }), + 'context': , + 'entity_id': 'button.test_user_buy_a_health_potion', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- +# name: test_buttons[wizard_fixture][button.test_user_chilling_frost-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.test_user_chilling_frost', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Chilling frost', + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_frost', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[wizard_fixture][button.test_user_chilling_frost-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'entity_picture': 'https://habitica-assets.s3.amazonaws.com/mobileApp/images/shop_frost.png', + 'friendly_name': 'test-user Chilling frost', + }), + 'context': , + 'entity_id': 'button.test_user_chilling_frost', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[wizard_fixture][button.test_user_earthquake-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.test_user_earthquake', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Earthquake', + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_earth', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[wizard_fixture][button.test_user_earthquake-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'entity_picture': 'https://habitica-assets.s3.amazonaws.com/mobileApp/images/shop_earth.png', + 'friendly_name': 'test-user Earthquake', + }), + 'context': , + 'entity_id': 'button.test_user_earthquake', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[wizard_fixture][button.test_user_ethereal_surge-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.test_user_ethereal_surge', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Ethereal surge', + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_mpheal', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[wizard_fixture][button.test_user_ethereal_surge-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'entity_picture': 'https://habitica-assets.s3.amazonaws.com/mobileApp/images/shop_mpheal.png', + 'friendly_name': 'test-user Ethereal surge', + }), + 'context': , + 'entity_id': 'button.test_user_ethereal_surge', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[wizard_fixture][button.test_user_revive_from_death-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.test_user_revive_from_death', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Revive from death', + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_revive', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[wizard_fixture][button.test_user_revive_from_death-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'test-user Revive from death', + }), + 'context': , + 'entity_id': 'button.test_user_revive_from_death', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- +# name: test_buttons[wizard_fixture][button.test_user_start_my_day-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.test_user_start_my_day', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Start my day', + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_run_cron', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[wizard_fixture][button.test_user_start_my_day-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'test-user Start my day', + }), + 'context': , + 'entity_id': 'button.test_user_start_my_day', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- diff --git a/tests/components/habitica/snapshots/test_calendar.ambr b/tests/components/habitica/snapshots/test_calendar.ambr new file mode 100644 index 00000000000000..7325e12547048a --- /dev/null +++ b/tests/components/habitica/snapshots/test_calendar.ambr @@ -0,0 +1,730 @@ +# serializer version: 1 +# name: test_api_events[calendar.test_user_dailies] + list([ + dict({ + 'description': 'Klicke um Deinen Terminplan festzulegen!', + 'end': dict({ + 'date': '2024-09-22', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': 'FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR,SA,SU', + 'start': dict({ + 'date': '2024-09-21', + }), + 'summary': '5 Minuten ruhig durchatmen', + 'uid': 'f2c85972-1a19-4426-bc6d-ce3337b9d99f', + }), + dict({ + 'description': 'Ein einstündiges Workout im Fitnessstudio absolvieren.', + 'end': dict({ + 'date': '2024-09-22', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': 'FREQ=WEEKLY;BYDAY=WE,SA,SU', + 'start': dict({ + 'date': '2024-09-21', + }), + 'summary': 'Fitnessstudio besuchen', + 'uid': '2c6d136c-a1c3-4bef-b7c4-fa980784b1e1', + }), + dict({ + 'description': 'Klicke um Änderungen zu machen!', + 'end': dict({ + 'date': '2024-09-23', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': 'FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR,SA,SU', + 'start': dict({ + 'date': '2024-09-22', + }), + 'summary': 'Zahnseide benutzen', + 'uid': '564b9ac9-c53d-4638-9e7f-1cd96fe19baa', + }), + dict({ + 'description': 'Klicke um Deinen Terminplan festzulegen!', + 'end': dict({ + 'date': '2024-09-23', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': 'FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR,SA,SU', + 'start': dict({ + 'date': '2024-09-22', + }), + 'summary': '5 Minuten ruhig durchatmen', + 'uid': 'f2c85972-1a19-4426-bc6d-ce3337b9d99f', + }), + dict({ + 'description': 'Ein einstündiges Workout im Fitnessstudio absolvieren.', + 'end': dict({ + 'date': '2024-09-23', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': 'FREQ=WEEKLY;BYDAY=WE,SA,SU', + 'start': dict({ + 'date': '2024-09-22', + }), + 'summary': 'Fitnessstudio besuchen', + 'uid': '2c6d136c-a1c3-4bef-b7c4-fa980784b1e1', + }), + dict({ + 'description': 'Klicke um Änderungen zu machen!', + 'end': dict({ + 'date': '2024-09-24', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': 'FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR,SA,SU', + 'start': dict({ + 'date': '2024-09-23', + }), + 'summary': 'Zahnseide benutzen', + 'uid': '564b9ac9-c53d-4638-9e7f-1cd96fe19baa', + }), + dict({ + 'description': 'Klicke um Deinen Terminplan festzulegen!', + 'end': dict({ + 'date': '2024-09-24', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': 'FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR,SA,SU', + 'start': dict({ + 'date': '2024-09-23', + }), + 'summary': '5 Minuten ruhig durchatmen', + 'uid': 'f2c85972-1a19-4426-bc6d-ce3337b9d99f', + }), + dict({ + 'description': 'Klicke um Änderungen zu machen!', + 'end': dict({ + 'date': '2024-09-25', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': 'FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR,SA,SU', + 'start': dict({ + 'date': '2024-09-24', + }), + 'summary': 'Zahnseide benutzen', + 'uid': '564b9ac9-c53d-4638-9e7f-1cd96fe19baa', + }), + dict({ + 'description': 'Klicke um Deinen Terminplan festzulegen!', + 'end': dict({ + 'date': '2024-09-25', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': 'FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR,SA,SU', + 'start': dict({ + 'date': '2024-09-24', + }), + 'summary': '5 Minuten ruhig durchatmen', + 'uid': 'f2c85972-1a19-4426-bc6d-ce3337b9d99f', + }), + dict({ + 'description': 'Klicke um Änderungen zu machen!', + 'end': dict({ + 'date': '2024-09-26', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': 'FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR,SA,SU', + 'start': dict({ + 'date': '2024-09-25', + }), + 'summary': 'Zahnseide benutzen', + 'uid': '564b9ac9-c53d-4638-9e7f-1cd96fe19baa', + }), + dict({ + 'description': 'Klicke um Deinen Terminplan festzulegen!', + 'end': dict({ + 'date': '2024-09-26', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': 'FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR,SA,SU', + 'start': dict({ + 'date': '2024-09-25', + }), + 'summary': '5 Minuten ruhig durchatmen', + 'uid': 'f2c85972-1a19-4426-bc6d-ce3337b9d99f', + }), + dict({ + 'description': 'Ein einstündiges Workout im Fitnessstudio absolvieren.', + 'end': dict({ + 'date': '2024-09-26', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': 'FREQ=WEEKLY;BYDAY=WE,SA,SU', + 'start': dict({ + 'date': '2024-09-25', + }), + 'summary': 'Fitnessstudio besuchen', + 'uid': '2c6d136c-a1c3-4bef-b7c4-fa980784b1e1', + }), + dict({ + 'description': 'Klicke um Änderungen zu machen!', + 'end': dict({ + 'date': '2024-09-27', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': 'FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR,SA,SU', + 'start': dict({ + 'date': '2024-09-26', + }), + 'summary': 'Zahnseide benutzen', + 'uid': '564b9ac9-c53d-4638-9e7f-1cd96fe19baa', + }), + dict({ + 'description': 'Klicke um Deinen Terminplan festzulegen!', + 'end': dict({ + 'date': '2024-09-27', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': 'FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR,SA,SU', + 'start': dict({ + 'date': '2024-09-26', + }), + 'summary': '5 Minuten ruhig durchatmen', + 'uid': 'f2c85972-1a19-4426-bc6d-ce3337b9d99f', + }), + dict({ + 'description': 'Klicke um Änderungen zu machen!', + 'end': dict({ + 'date': '2024-09-28', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': 'FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR,SA,SU', + 'start': dict({ + 'date': '2024-09-27', + }), + 'summary': 'Zahnseide benutzen', + 'uid': '564b9ac9-c53d-4638-9e7f-1cd96fe19baa', + }), + dict({ + 'description': 'Klicke um Deinen Terminplan festzulegen!', + 'end': dict({ + 'date': '2024-09-28', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': 'FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR,SA,SU', + 'start': dict({ + 'date': '2024-09-27', + }), + 'summary': '5 Minuten ruhig durchatmen', + 'uid': 'f2c85972-1a19-4426-bc6d-ce3337b9d99f', + }), + dict({ + 'description': 'Klicke um Änderungen zu machen!', + 'end': dict({ + 'date': '2024-09-29', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': 'FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR,SA,SU', + 'start': dict({ + 'date': '2024-09-28', + }), + 'summary': 'Zahnseide benutzen', + 'uid': '564b9ac9-c53d-4638-9e7f-1cd96fe19baa', + }), + dict({ + 'description': 'Klicke um Deinen Terminplan festzulegen!', + 'end': dict({ + 'date': '2024-09-29', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': 'FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR,SA,SU', + 'start': dict({ + 'date': '2024-09-28', + }), + 'summary': '5 Minuten ruhig durchatmen', + 'uid': 'f2c85972-1a19-4426-bc6d-ce3337b9d99f', + }), + dict({ + 'description': 'Ein einstündiges Workout im Fitnessstudio absolvieren.', + 'end': dict({ + 'date': '2024-09-29', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': 'FREQ=WEEKLY;BYDAY=WE,SA,SU', + 'start': dict({ + 'date': '2024-09-28', + }), + 'summary': 'Fitnessstudio besuchen', + 'uid': '2c6d136c-a1c3-4bef-b7c4-fa980784b1e1', + }), + dict({ + 'description': 'Klicke um Änderungen zu machen!', + 'end': dict({ + 'date': '2024-09-30', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': 'FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR,SA,SU', + 'start': dict({ + 'date': '2024-09-29', + }), + 'summary': 'Zahnseide benutzen', + 'uid': '564b9ac9-c53d-4638-9e7f-1cd96fe19baa', + }), + dict({ + 'description': 'Klicke um Deinen Terminplan festzulegen!', + 'end': dict({ + 'date': '2024-09-30', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': 'FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR,SA,SU', + 'start': dict({ + 'date': '2024-09-29', + }), + 'summary': '5 Minuten ruhig durchatmen', + 'uid': 'f2c85972-1a19-4426-bc6d-ce3337b9d99f', + }), + dict({ + 'description': 'Ein einstündiges Workout im Fitnessstudio absolvieren.', + 'end': dict({ + 'date': '2024-09-30', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': 'FREQ=WEEKLY;BYDAY=WE,SA,SU', + 'start': dict({ + 'date': '2024-09-29', + }), + 'summary': 'Fitnessstudio besuchen', + 'uid': '2c6d136c-a1c3-4bef-b7c4-fa980784b1e1', + }), + dict({ + 'description': 'Klicke um Änderungen zu machen!', + 'end': dict({ + 'date': '2024-10-01', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': 'FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR,SA,SU', + 'start': dict({ + 'date': '2024-09-30', + }), + 'summary': 'Zahnseide benutzen', + 'uid': '564b9ac9-c53d-4638-9e7f-1cd96fe19baa', + }), + dict({ + 'description': 'Klicke um Deinen Terminplan festzulegen!', + 'end': dict({ + 'date': '2024-10-01', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': 'FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR,SA,SU', + 'start': dict({ + 'date': '2024-09-30', + }), + 'summary': '5 Minuten ruhig durchatmen', + 'uid': 'f2c85972-1a19-4426-bc6d-ce3337b9d99f', + }), + dict({ + 'description': 'Klicke um Änderungen zu machen!', + 'end': dict({ + 'date': '2024-10-02', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': 'FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR,SA,SU', + 'start': dict({ + 'date': '2024-10-01', + }), + 'summary': 'Zahnseide benutzen', + 'uid': '564b9ac9-c53d-4638-9e7f-1cd96fe19baa', + }), + dict({ + 'description': 'Klicke um Deinen Terminplan festzulegen!', + 'end': dict({ + 'date': '2024-10-02', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': 'FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR,SA,SU', + 'start': dict({ + 'date': '2024-10-01', + }), + 'summary': '5 Minuten ruhig durchatmen', + 'uid': 'f2c85972-1a19-4426-bc6d-ce3337b9d99f', + }), + dict({ + 'description': 'Klicke um Änderungen zu machen!', + 'end': dict({ + 'date': '2024-10-03', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': 'FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR,SA,SU', + 'start': dict({ + 'date': '2024-10-02', + }), + 'summary': 'Zahnseide benutzen', + 'uid': '564b9ac9-c53d-4638-9e7f-1cd96fe19baa', + }), + dict({ + 'description': 'Klicke um Deinen Terminplan festzulegen!', + 'end': dict({ + 'date': '2024-10-03', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': 'FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR,SA,SU', + 'start': dict({ + 'date': '2024-10-02', + }), + 'summary': '5 Minuten ruhig durchatmen', + 'uid': 'f2c85972-1a19-4426-bc6d-ce3337b9d99f', + }), + dict({ + 'description': 'Ein einstündiges Workout im Fitnessstudio absolvieren.', + 'end': dict({ + 'date': '2024-10-03', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': 'FREQ=WEEKLY;BYDAY=WE,SA,SU', + 'start': dict({ + 'date': '2024-10-02', + }), + 'summary': 'Fitnessstudio besuchen', + 'uid': '2c6d136c-a1c3-4bef-b7c4-fa980784b1e1', + }), + dict({ + 'description': 'Klicke um Änderungen zu machen!', + 'end': dict({ + 'date': '2024-10-04', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': 'FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR,SA,SU', + 'start': dict({ + 'date': '2024-10-03', + }), + 'summary': 'Zahnseide benutzen', + 'uid': '564b9ac9-c53d-4638-9e7f-1cd96fe19baa', + }), + dict({ + 'description': 'Klicke um Deinen Terminplan festzulegen!', + 'end': dict({ + 'date': '2024-10-04', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': 'FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR,SA,SU', + 'start': dict({ + 'date': '2024-10-03', + }), + 'summary': '5 Minuten ruhig durchatmen', + 'uid': 'f2c85972-1a19-4426-bc6d-ce3337b9d99f', + }), + dict({ + 'description': 'Klicke um Änderungen zu machen!', + 'end': dict({ + 'date': '2024-10-05', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': 'FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR,SA,SU', + 'start': dict({ + 'date': '2024-10-04', + }), + 'summary': 'Zahnseide benutzen', + 'uid': '564b9ac9-c53d-4638-9e7f-1cd96fe19baa', + }), + dict({ + 'description': 'Klicke um Deinen Terminplan festzulegen!', + 'end': dict({ + 'date': '2024-10-05', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': 'FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR,SA,SU', + 'start': dict({ + 'date': '2024-10-04', + }), + 'summary': '5 Minuten ruhig durchatmen', + 'uid': 'f2c85972-1a19-4426-bc6d-ce3337b9d99f', + }), + dict({ + 'description': 'Klicke um Änderungen zu machen!', + 'end': dict({ + 'date': '2024-10-06', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': 'FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR,SA,SU', + 'start': dict({ + 'date': '2024-10-05', + }), + 'summary': 'Zahnseide benutzen', + 'uid': '564b9ac9-c53d-4638-9e7f-1cd96fe19baa', + }), + dict({ + 'description': 'Klicke um Deinen Terminplan festzulegen!', + 'end': dict({ + 'date': '2024-10-06', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': 'FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR,SA,SU', + 'start': dict({ + 'date': '2024-10-05', + }), + 'summary': '5 Minuten ruhig durchatmen', + 'uid': 'f2c85972-1a19-4426-bc6d-ce3337b9d99f', + }), + dict({ + 'description': 'Ein einstündiges Workout im Fitnessstudio absolvieren.', + 'end': dict({ + 'date': '2024-10-06', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': 'FREQ=WEEKLY;BYDAY=WE,SA,SU', + 'start': dict({ + 'date': '2024-10-05', + }), + 'summary': 'Fitnessstudio besuchen', + 'uid': '2c6d136c-a1c3-4bef-b7c4-fa980784b1e1', + }), + dict({ + 'description': 'Klicke um Änderungen zu machen!', + 'end': dict({ + 'date': '2024-10-07', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': 'FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR,SA,SU', + 'start': dict({ + 'date': '2024-10-06', + }), + 'summary': 'Zahnseide benutzen', + 'uid': '564b9ac9-c53d-4638-9e7f-1cd96fe19baa', + }), + dict({ + 'description': 'Klicke um Deinen Terminplan festzulegen!', + 'end': dict({ + 'date': '2024-10-07', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': 'FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR,SA,SU', + 'start': dict({ + 'date': '2024-10-06', + }), + 'summary': '5 Minuten ruhig durchatmen', + 'uid': 'f2c85972-1a19-4426-bc6d-ce3337b9d99f', + }), + dict({ + 'description': 'Ein einstündiges Workout im Fitnessstudio absolvieren.', + 'end': dict({ + 'date': '2024-10-07', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': 'FREQ=WEEKLY;BYDAY=WE,SA,SU', + 'start': dict({ + 'date': '2024-10-06', + }), + 'summary': 'Fitnessstudio besuchen', + 'uid': '2c6d136c-a1c3-4bef-b7c4-fa980784b1e1', + }), + dict({ + 'description': 'Klicke um Änderungen zu machen!', + 'end': dict({ + 'date': '2024-10-08', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': 'FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR,SA,SU', + 'start': dict({ + 'date': '2024-10-07', + }), + 'summary': 'Zahnseide benutzen', + 'uid': '564b9ac9-c53d-4638-9e7f-1cd96fe19baa', + }), + dict({ + 'description': 'Klicke um Deinen Terminplan festzulegen!', + 'end': dict({ + 'date': '2024-10-08', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': 'FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR,SA,SU', + 'start': dict({ + 'date': '2024-10-07', + }), + 'summary': '5 Minuten ruhig durchatmen', + 'uid': 'f2c85972-1a19-4426-bc6d-ce3337b9d99f', + }), + ]) +# --- +# name: test_api_events[calendar.test_user_to_do_s] + list([ + dict({ + 'description': 'Strom- und Internetrechnungen rechtzeitig überweisen.', + 'end': dict({ + 'date': '2024-09-01', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': None, + 'start': dict({ + 'date': '2024-08-31', + }), + 'summary': 'Rechnungen bezahlen', + 'uid': '2f6fcabc-f670-4ec3-ba65-817e8deea490', + }), + dict({ + 'description': 'Den Ausflug für das kommende Wochenende organisieren.', + 'end': dict({ + 'date': '2024-09-22', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': None, + 'start': dict({ + 'date': '2024-09-21', + }), + 'summary': 'Wochenendausflug planen', + 'uid': '86ea2475-d1b5-4020-bdcc-c188c7996afa', + }), + dict({ + 'description': 'Das Buch, das du angefangen hast, bis zum Wochenende fertig lesen.', + 'end': dict({ + 'date': '2024-09-28', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': None, + 'start': dict({ + 'date': '2024-09-27', + }), + 'summary': 'Buch zu Ende lesen', + 'uid': '88de7cd9-af2b-49ce-9afd-bf941d87336b', + }), + ]) +# --- +# name: test_calendar_platform[calendar.test_user_dailies-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'calendar', + 'entity_category': None, + 'entity_id': 'calendar.test_user_dailies', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Dailies', + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_dailys', + 'unit_of_measurement': None, + }) +# --- +# name: test_calendar_platform[calendar.test_user_dailies-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'all_day': True, + 'description': 'Klicke um Deinen Terminplan festzulegen!', + 'end_time': '2024-09-22 00:00:00', + 'friendly_name': 'test-user Dailies', + 'location': '', + 'message': '5 Minuten ruhig durchatmen', + 'start_time': '2024-09-21 00:00:00', + 'yesterdaily': False, + }), + 'context': , + 'entity_id': 'calendar.test_user_dailies', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_calendar_platform[calendar.test_user_to_do_s-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'calendar', + 'entity_category': None, + 'entity_id': 'calendar.test_user_to_do_s', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': "To-Do's", + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_todos', + 'unit_of_measurement': None, + }) +# --- +# name: test_calendar_platform[calendar.test_user_to_do_s-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'all_day': True, + 'description': 'Den Ausflug für das kommende Wochenende organisieren.', + 'end_time': '2024-09-22 00:00:00', + 'friendly_name': "test-user To-Do's", + 'location': '', + 'message': 'Wochenendausflug planen', + 'start_time': '2024-09-21 00:00:00', + }), + 'context': , + 'entity_id': 'calendar.test_user_to_do_s', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- diff --git a/tests/components/habitica/snapshots/test_sensor.ambr b/tests/components/habitica/snapshots/test_sensor.ambr new file mode 100644 index 00000000000000..3a43069bfc461c --- /dev/null +++ b/tests/components/habitica/snapshots/test_sensor.ambr @@ -0,0 +1,1239 @@ +# serializer version: 1 +# name: test_sensors[sensor.test_user_class-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'warrior', + 'healer', + 'wizard', + 'rogue', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_user_class', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Class', + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_class', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[sensor.test_user_class-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'test-user Class', + 'options': list([ + 'warrior', + 'healer', + 'wizard', + 'rogue', + ]), + }), + 'context': , + 'entity_id': 'sensor.test_user_class', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'wizard', + }) +# --- +# name: test_sensors[sensor.test_user_constitution-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_user_constitution', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Constitution', + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_constitution', + 'unit_of_measurement': 'CON', + }) +# --- +# name: test_sensors[sensor.test_user_constitution-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'allocated': 15, + 'buffs': 26, + 'class': 0, + 'equipment': 20, + 'friendly_name': 'test-user Constitution', + 'level': 19, + 'unit_of_measurement': 'CON', + }), + 'context': , + 'entity_id': 'sensor.test_user_constitution', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '80', + }) +# --- +# name: test_sensors[sensor.test_user_dailies-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_user_dailies', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Dailies', + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_dailys', + 'unit_of_measurement': 'tasks', + }) +# --- +# name: test_sensors[sensor.test_user_dailies-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + '2c6d136c-a1c3-4bef-b7c4-fa980784b1e1': dict({ + 'created_at': '2024-09-22T11:44:43.774Z', + 'every_x': 1, + 'frequency': 'weekly', + 'group': dict({ + 'assignedUsers': list([ + ]), + 'completedBy': dict({ + }), + }), + 'is_due': True, + 'next_due': list([ + '2024-09-24T22:00:00.000Z', + '2024-09-27T22:00:00.000Z', + '2024-09-28T22:00:00.000Z', + '2024-10-01T22:00:00.000Z', + '2024-10-04T22:00:00.000Z', + '2024-10-08T22:00:00.000Z', + ]), + 'notes': 'Ein einstündiges Workout im Fitnessstudio absolvieren.', + 'priority': 2, + 'repeat': dict({ + 'f': False, + 'm': False, + 's': True, + 'su': True, + 't': False, + 'th': False, + 'w': True, + }), + 'start_date': '2024-09-21T22:00:00.000Z', + 'tags': list([ + '51076966-2970-4b40-b6ba-d58c6a756dd7', + ]), + 'text': 'Fitnessstudio besuchen', + 'type': 'daily', + 'yester_daily': True, + }), + '564b9ac9-c53d-4638-9e7f-1cd96fe19baa': dict({ + 'completed': True, + 'created_at': '2024-07-07T17:51:53.268Z', + 'every_x': 1, + 'frequency': 'weekly', + 'group': dict({ + 'assignedUsers': list([ + ]), + 'completedBy': dict({ + }), + }), + 'is_due': True, + 'next_due': list([ + 'Mon Sep 23 2024 00:00:00 GMT+0200', + 'Tue Sep 24 2024 00:00:00 GMT+0200', + 'Wed Sep 25 2024 00:00:00 GMT+0200', + 'Thu Sep 26 2024 00:00:00 GMT+0200', + 'Fri Sep 27 2024 00:00:00 GMT+0200', + 'Sat Sep 28 2024 00:00:00 GMT+0200', + ]), + 'notes': 'Klicke um Änderungen zu machen!', + 'priority': 1, + 'repeat': dict({ + 'f': True, + 'm': True, + 's': True, + 'su': True, + 't': True, + 'th': True, + 'w': True, + }), + 'start_date': '2024-07-06T22:00:00.000Z', + 'streak': 1, + 'text': 'Zahnseide benutzen', + 'type': 'daily', + 'value': -2.9663035443712333, + 'yester_daily': True, + }), + 'f2c85972-1a19-4426-bc6d-ce3337b9d99f': dict({ + 'created_at': '2024-07-07T17:51:53.266Z', + 'every_x': 1, + 'frequency': 'weekly', + 'group': dict({ + 'assignedUsers': list([ + ]), + 'completedBy': dict({ + }), + }), + 'is_due': True, + 'next_due': list([ + '2024-09-22T22:00:00.000Z', + '2024-09-23T22:00:00.000Z', + '2024-09-24T22:00:00.000Z', + '2024-09-25T22:00:00.000Z', + '2024-09-26T22:00:00.000Z', + '2024-09-27T22:00:00.000Z', + ]), + 'notes': 'Klicke um Deinen Terminplan festzulegen!', + 'priority': 1, + 'repeat': dict({ + 'f': True, + 'm': True, + 's': True, + 'su': True, + 't': True, + 'th': True, + 'w': True, + }), + 'start_date': '2024-07-06T22:00:00.000Z', + 'text': '5 Minuten ruhig durchatmen', + 'type': 'daily', + 'value': -1.919611992979862, + 'yester_daily': True, + }), + 'friendly_name': 'test-user Dailies', + 'unit_of_measurement': 'tasks', + }), + 'context': , + 'entity_id': 'sensor.test_user_dailies', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3', + }) +# --- +# name: test_sensors[sensor.test_user_display_name-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_user_display_name', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Display name', + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_display_name', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[sensor.test_user_display_name-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'test-user Display name', + }), + 'context': , + 'entity_id': 'sensor.test_user_display_name', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'test-user', + }) +# --- +# name: test_sensors[sensor.test_user_experience-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_user_experience', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Experience', + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_experience', + 'unit_of_measurement': 'XP', + }) +# --- +# name: test_sensors[sensor.test_user_experience-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'test-user Experience', + 'unit_of_measurement': 'XP', + }), + 'context': , + 'entity_id': 'sensor.test_user_experience', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '737', + }) +# --- +# name: test_sensors[sensor.test_user_gems-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_user_gems', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Gems', + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_gems', + 'unit_of_measurement': 'gems', + }) +# --- +# name: test_sensors[sensor.test_user_gems-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'test-user Gems', + 'unit_of_measurement': 'gems', + }), + 'context': , + 'entity_id': 'sensor.test_user_gems', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensors[sensor.test_user_gold-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_user_gold', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Gold', + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_gold', + 'unit_of_measurement': 'GP', + }) +# --- +# name: test_sensors[sensor.test_user_gold-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'test-user Gold', + 'unit_of_measurement': 'GP', + }), + 'context': , + 'entity_id': 'sensor.test_user_gold', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '137.625872146098', + }) +# --- +# name: test_sensors[sensor.test_user_habits-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_user_habits', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Habits', + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_habits', + 'unit_of_measurement': 'tasks', + }) +# --- +# name: test_sensors[sensor.test_user_habits-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + '1d147de6-5c02-4740-8e2f-71d3015a37f4': dict({ + 'created_at': '2024-07-07T17:51:53.266Z', + 'frequency': 'daily', + 'group': dict({ + 'assignedUsers': list([ + ]), + 'completedBy': dict({ + }), + }), + 'priority': 1, + 'text': 'Eine kurze Pause machen', + 'type': 'habit', + 'up': True, + }), + 'bc1d1855-b2b8-4663-98ff-62e7b763dfc4': dict({ + 'created_at': '2024-07-07T17:51:53.265Z', + 'down': True, + 'frequency': 'daily', + 'group': dict({ + 'assignedUsers': list([ + ]), + 'completedBy': dict({ + }), + }), + 'notes': 'Oder lösche es über die Bearbeitungs-Ansicht', + 'priority': 1, + 'text': 'Klicke hier um dies als schlechte Gewohnheit zu markieren, die Du gerne loswerden möchtest', + 'type': 'habit', + }), + 'e97659e0-2c42-4599-a7bb-00282adc410d': dict({ + 'created_at': '2024-07-07T17:51:53.264Z', + 'frequency': 'daily', + 'group': dict({ + 'assignedUsers': list([ + ]), + 'completedBy': dict({ + }), + }), + 'notes': 'Eine Gewohnheit, eine Tagesaufgabe oder ein To-Do', + 'priority': 1, + 'text': 'Füge eine Aufgabe zu Habitica hinzu', + 'type': 'habit', + 'up': True, + }), + 'f21fa608-cfc6-4413-9fc7-0eb1b48ca43a': dict({ + 'created_at': '2024-07-07T17:51:53.268Z', + 'down': True, + 'frequency': 'daily', + 'group': dict({ + 'assignedUsers': list([ + ]), + 'completedBy': dict({ + }), + }), + 'priority': 1, + 'text': 'Gesundes Essen/Junkfood', + 'type': 'habit', + 'up': True, + }), + 'friendly_name': 'test-user Habits', + 'unit_of_measurement': 'tasks', + }), + 'context': , + 'entity_id': 'sensor.test_user_habits', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '4', + }) +# --- +# name: test_sensors[sensor.test_user_health-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_user_health', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Health', + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_health', + 'unit_of_measurement': 'HP', + }) +# --- +# name: test_sensors[sensor.test_user_health-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'test-user Health', + 'unit_of_measurement': 'HP', + }), + 'context': , + 'entity_id': 'sensor.test_user_health', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensors[sensor.test_user_intelligence-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_user_intelligence', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Intelligence', + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_intelligence', + 'unit_of_measurement': 'INT', + }) +# --- +# name: test_sensors[sensor.test_user_intelligence-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'allocated': 15, + 'buffs': 26, + 'class': 0, + 'equipment': 0, + 'friendly_name': 'test-user Intelligence', + 'level': 19, + 'unit_of_measurement': 'INT', + }), + 'context': , + 'entity_id': 'sensor.test_user_intelligence', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '60', + }) +# --- +# name: test_sensors[sensor.test_user_level-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_user_level', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Level', + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_level', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[sensor.test_user_level-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'test-user Level', + }), + 'context': , + 'entity_id': 'sensor.test_user_level', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '38', + }) +# --- +# name: test_sensors[sensor.test_user_mana-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_user_mana', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Mana', + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_mana', + 'unit_of_measurement': 'MP', + }) +# --- +# name: test_sensors[sensor.test_user_mana-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'test-user Mana', + 'unit_of_measurement': 'MP', + }), + 'context': , + 'entity_id': 'sensor.test_user_mana', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '50.9', + }) +# --- +# name: test_sensors[sensor.test_user_max_health-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_user_max_health', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Max. health', + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_health_max', + 'unit_of_measurement': 'HP', + }) +# --- +# name: test_sensors[sensor.test_user_max_health-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'test-user Max. health', + 'unit_of_measurement': 'HP', + }), + 'context': , + 'entity_id': 'sensor.test_user_max_health', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '50', + }) +# --- +# name: test_sensors[sensor.test_user_max_mana-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_user_max_mana', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Max. mana', + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_mana_max', + 'unit_of_measurement': 'MP', + }) +# --- +# name: test_sensors[sensor.test_user_max_mana-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'test-user Max. mana', + 'unit_of_measurement': 'MP', + }), + 'context': , + 'entity_id': 'sensor.test_user_max_mana', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '166', + }) +# --- +# name: test_sensors[sensor.test_user_mystic_hourglasses-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_user_mystic_hourglasses', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Mystic hourglasses', + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_trinkets', + 'unit_of_measurement': '⧖', + }) +# --- +# name: test_sensors[sensor.test_user_mystic_hourglasses-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'test-user Mystic hourglasses', + 'unit_of_measurement': '⧖', + }), + 'context': , + 'entity_id': 'sensor.test_user_mystic_hourglasses', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensors[sensor.test_user_next_level-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_user_next_level', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Next level', + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_experience_max', + 'unit_of_measurement': 'XP', + }) +# --- +# name: test_sensors[sensor.test_user_next_level-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'test-user Next level', + 'unit_of_measurement': 'XP', + }), + 'context': , + 'entity_id': 'sensor.test_user_next_level', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '880', + }) +# --- +# name: test_sensors[sensor.test_user_perception-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_user_perception', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Perception', + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_perception', + 'unit_of_measurement': 'PER', + }) +# --- +# name: test_sensors[sensor.test_user_perception-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'allocated': 15, + 'buffs': 26, + 'class': 0, + 'equipment': 8, + 'friendly_name': 'test-user Perception', + 'level': 19, + 'unit_of_measurement': 'PER', + }), + 'context': , + 'entity_id': 'sensor.test_user_perception', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '68', + }) +# --- +# name: test_sensors[sensor.test_user_rewards-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_user_rewards', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Rewards', + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_rewards', + 'unit_of_measurement': 'tasks', + }) +# --- +# name: test_sensors[sensor.test_user_rewards-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + '5e2ea1df-f6e6-4ba3-bccb-97c5ec63e99b': dict({ + 'created_at': '2024-07-07T17:51:53.266Z', + 'group': dict({ + 'assignedUsers': list([ + ]), + 'completedBy': dict({ + }), + }), + 'notes': 'Schaue fern, spiele ein Spiel, gönne Dir einen Leckerbissen, es liegt ganz bei Dir!', + 'priority': 1, + 'text': 'Belohne Dich selbst', + 'type': 'reward', + 'value': 10, + }), + 'friendly_name': 'test-user Rewards', + 'unit_of_measurement': 'tasks', + }), + 'context': , + 'entity_id': 'sensor.test_user_rewards', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1', + }) +# --- +# name: test_sensors[sensor.test_user_strength-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_user_strength', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Strength', + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_strength', + 'unit_of_measurement': 'STR', + }) +# --- +# name: test_sensors[sensor.test_user_strength-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'allocated': 15, + 'buffs': 26, + 'class': 0, + 'equipment': 27, + 'friendly_name': 'test-user Strength', + 'level': 19, + 'unit_of_measurement': 'STR', + }), + 'context': , + 'entity_id': 'sensor.test_user_strength', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '87', + }) +# --- +# name: test_sensors[sensor.test_user_to_do_s-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_user_to_do_s', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': "To-Do's", + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_todos', + 'unit_of_measurement': 'tasks', + }) +# --- +# name: test_sensors[sensor.test_user_to_do_s-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + '1aa3137e-ef72-4d1f-91ee-41933602f438': dict({ + 'created_at': '2024-09-21T22:16:38.153Z', + 'group': dict({ + 'assignedUsers': list([ + ]), + 'completedBy': dict({ + }), + }), + 'notes': 'Rasen mähen und die Pflanzen gießen.', + 'priority': 1, + 'text': 'Garten pflegen', + 'type': 'todo', + }), + '2f6fcabc-f670-4ec3-ba65-817e8deea490': dict({ + 'created_at': '2024-09-21T22:17:19.513Z', + 'group': dict({ + 'assignedUsers': list([ + ]), + 'completedBy': dict({ + }), + }), + 'notes': 'Strom- und Internetrechnungen rechtzeitig überweisen.', + 'priority': 1, + 'text': 'Rechnungen bezahlen', + 'type': 'todo', + }), + '86ea2475-d1b5-4020-bdcc-c188c7996afa': dict({ + 'created_at': '2024-09-21T22:16:16.756Z', + 'group': dict({ + 'assignedUsers': list([ + ]), + 'completedBy': dict({ + }), + }), + 'notes': 'Den Ausflug für das kommende Wochenende organisieren.', + 'priority': 1, + 'tags': list([ + '51076966-2970-4b40-b6ba-d58c6a756dd7', + ]), + 'text': 'Wochenendausflug planen', + 'type': 'todo', + }), + '88de7cd9-af2b-49ce-9afd-bf941d87336b': dict({ + 'created_at': '2024-09-21T22:17:57.816Z', + 'group': dict({ + 'assignedUsers': list([ + ]), + 'completedBy': dict({ + }), + }), + 'notes': 'Das Buch, das du angefangen hast, bis zum Wochenende fertig lesen.', + 'priority': 1, + 'text': 'Buch zu Ende lesen', + 'type': 'todo', + }), + 'friendly_name': "test-user To-Do's", + 'unit_of_measurement': 'tasks', + }), + 'context': , + 'entity_id': 'sensor.test_user_to_do_s', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '4', + }) +# --- diff --git a/tests/components/habitica/snapshots/test_switch.ambr b/tests/components/habitica/snapshots/test_switch.ambr new file mode 100644 index 00000000000000..3affbd11e2a34a --- /dev/null +++ b/tests/components/habitica/snapshots/test_switch.ambr @@ -0,0 +1,48 @@ +# serializer version: 1 +# name: test_switch[switch.test_user_rest_in_the_inn-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.test_user_rest_in_the_inn', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Rest in the inn', + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_sleep', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.test_user_rest_in_the_inn-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'switch', + 'friendly_name': 'test-user Rest in the inn', + }), + 'context': , + 'entity_id': 'switch.test_user_rest_in_the_inn', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- diff --git a/tests/components/habitica/snapshots/test_todo.ambr b/tests/components/habitica/snapshots/test_todo.ambr new file mode 100644 index 00000000000000..79eca9dbbb0acd --- /dev/null +++ b/tests/components/habitica/snapshots/test_todo.ambr @@ -0,0 +1,189 @@ +# serializer version: 1 +# name: test_complete_todo_item[daily] + tuple( + 'Habitica', + ''' + ![Dragon](https://habitica-assets.s3.amazonaws.com/mobileApp/images/Pet_Egg_Dragon.png) + You've found a Dragon Egg! + ''', + ) +# --- +# name: test_complete_todo_item[todo] + tuple( + 'Habitica', + ''' + ![Dragon](https://habitica-assets.s3.amazonaws.com/mobileApp/images/Pet_Egg_Dragon.png) + You've found a Dragon Egg! + ''', + ) +# --- +# name: test_todo_items[todo.test_user_dailies] + dict({ + 'todo.test_user_dailies': dict({ + 'items': list([ + dict({ + 'description': 'Klicke um Änderungen zu machen!', + 'due': '2024-09-22', + 'status': 'completed', + 'summary': 'Zahnseide benutzen', + 'uid': '564b9ac9-c53d-4638-9e7f-1cd96fe19baa', + }), + dict({ + 'description': 'Klicke um Deinen Terminplan festzulegen!', + 'due': '2024-09-21', + 'status': 'needs_action', + 'summary': '5 Minuten ruhig durchatmen', + 'uid': 'f2c85972-1a19-4426-bc6d-ce3337b9d99f', + }), + dict({ + 'description': 'Ein einstündiges Workout im Fitnessstudio absolvieren.', + 'due': '2024-09-21', + 'status': 'needs_action', + 'summary': 'Fitnessstudio besuchen', + 'uid': '2c6d136c-a1c3-4bef-b7c4-fa980784b1e1', + }), + ]), + }), + }) +# --- +# name: test_todo_items[todo.test_user_to_do_s] + dict({ + 'todo.test_user_to_do_s': dict({ + 'items': list([ + dict({ + 'description': 'Das Buch, das du angefangen hast, bis zum Wochenende fertig lesen.', + 'due': '2024-09-27', + 'status': 'needs_action', + 'summary': 'Buch zu Ende lesen', + 'uid': '88de7cd9-af2b-49ce-9afd-bf941d87336b', + }), + dict({ + 'description': 'Strom- und Internetrechnungen rechtzeitig überweisen.', + 'due': '2024-08-31', + 'status': 'needs_action', + 'summary': 'Rechnungen bezahlen', + 'uid': '2f6fcabc-f670-4ec3-ba65-817e8deea490', + }), + dict({ + 'description': 'Rasen mähen und die Pflanzen gießen.', + 'status': 'needs_action', + 'summary': 'Garten pflegen', + 'uid': '1aa3137e-ef72-4d1f-91ee-41933602f438', + }), + dict({ + 'description': 'Den Ausflug für das kommende Wochenende organisieren.', + 'due': '2024-09-21', + 'status': 'needs_action', + 'summary': 'Wochenendausflug planen', + 'uid': '86ea2475-d1b5-4020-bdcc-c188c7996afa', + }), + dict({ + 'description': 'Lebensmittel und Haushaltsbedarf für die Woche einkaufen.', + 'status': 'completed', + 'summary': 'Wocheneinkauf erledigen', + 'uid': '162f0bbe-a097-4a06-b4f4-8fbeed85d2ba', + }), + dict({ + 'description': 'Wohnzimmer und Küche gründlich aufräumen.', + 'status': 'completed', + 'summary': 'Wohnung aufräumen', + 'uid': '3fa06743-aa0f-472b-af1a-f27c755e329c', + }), + ]), + }), + }) +# --- +# name: test_todos[todo.test_user_dailies-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'todo', + 'entity_category': None, + 'entity_id': 'todo.test_user_dailies', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Dailies', + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_dailys', + 'unit_of_measurement': None, + }) +# --- +# name: test_todos[todo.test_user_dailies-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'test-user Dailies', + 'supported_features': , + }), + 'context': , + 'entity_id': 'todo.test_user_dailies', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2', + }) +# --- +# name: test_todos[todo.test_user_to_do_s-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'todo', + 'entity_category': None, + 'entity_id': 'todo.test_user_to_do_s', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': "To-Do's", + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_todos', + 'unit_of_measurement': None, + }) +# --- +# name: test_todos[todo.test_user_to_do_s-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': "test-user To-Do's", + 'supported_features': , + }), + 'context': , + 'entity_id': 'todo.test_user_to_do_s', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '4', + }) +# --- diff --git a/tests/components/habitica/test_binary_sensor.py b/tests/components/habitica/test_binary_sensor.py new file mode 100644 index 00000000000000..1710f8f217effa --- /dev/null +++ b/tests/components/habitica/test_binary_sensor.py @@ -0,0 +1,84 @@ +"""Tests for the Habitica binary sensor platform.""" + +from collections.abc import Generator +from unittest.mock import patch + +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.habitica.const import ASSETS_URL, DEFAULT_URL, DOMAIN +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import STATE_OFF, STATE_ON, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from tests.common import MockConfigEntry, load_json_object_fixture, snapshot_platform +from tests.test_util.aiohttp import AiohttpClientMocker + + +@pytest.fixture(autouse=True) +def binary_sensor_only() -> Generator[None]: + """Enable only the binarty sensor platform.""" + with patch( + "homeassistant.components.habitica.PLATFORMS", + [Platform.BINARY_SENSOR], + ): + yield + + +@pytest.mark.usefixtures("mock_habitica") +async def test_binary_sensors( + hass: HomeAssistant, + config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, +) -> None: + """Test setup of the Habitica binary sensor platform.""" + + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) + + +@pytest.mark.parametrize( + ("fixture", "entity_state", "entity_picture"), + [ + ("user", STATE_ON, f"{ASSETS_URL}inventory_quest_scroll_dustbunnies.png"), + ("quest_invitation_off", STATE_OFF, None), + ], +) +async def test_pending_quest_states( + hass: HomeAssistant, + config_entry: MockConfigEntry, + aioclient_mock: AiohttpClientMocker, + fixture: str, + entity_state: str, + entity_picture: str | None, +) -> None: + """Test states of pending quest sensor.""" + + aioclient_mock.get( + f"{DEFAULT_URL}/api/v3/user", + json=load_json_object_fixture(f"{fixture}.json", DOMAIN), + ) + aioclient_mock.get(f"{DEFAULT_URL}/api/v3/tasks/user", json={"data": []}) + aioclient_mock.get( + f"{DEFAULT_URL}/api/v3/content", + params={"language": "en"}, + json=load_json_object_fixture("content.json", DOMAIN), + ) + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + assert ( + state := hass.states.get("binary_sensor.test_user_pending_quest_invitation") + ) + assert state.state == entity_state + assert state.attributes.get("entity_picture") == entity_picture diff --git a/tests/components/habitica/test_button.py b/tests/components/habitica/test_button.py new file mode 100644 index 00000000000000..979cefef9238b1 --- /dev/null +++ b/tests/components/habitica/test_button.py @@ -0,0 +1,342 @@ +"""Tests for Habitica button platform.""" + +from collections.abc import Generator +from http import HTTPStatus +import re +from unittest.mock import patch + +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS +from homeassistant.components.habitica.const import DEFAULT_URL, DOMAIN +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import ATTR_ENTITY_ID, STATE_UNAVAILABLE, Platform +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError, ServiceValidationError +from homeassistant.helpers import entity_registry as er + +from .conftest import mock_called_with + +from tests.common import MockConfigEntry, load_json_object_fixture, snapshot_platform +from tests.test_util.aiohttp import AiohttpClientMocker + + +@pytest.fixture(autouse=True) +def button_only() -> Generator[None]: + """Enable only the button platform.""" + with patch( + "homeassistant.components.habitica.PLATFORMS", + [Platform.BUTTON], + ): + yield + + +@pytest.mark.parametrize( + "fixture", + [ + "wizard_fixture", + "rogue_fixture", + "warrior_fixture", + "healer_fixture", + ], +) +async def test_buttons( + hass: HomeAssistant, + config_entry: MockConfigEntry, + aioclient_mock: AiohttpClientMocker, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, + fixture: str, +) -> None: + """Test button entities.""" + aioclient_mock.get( + f"{DEFAULT_URL}/api/v3/user", + json=load_json_object_fixture(f"{fixture}.json", DOMAIN), + ) + aioclient_mock.get( + f"{DEFAULT_URL}/api/v3/tasks/user", + params={"type": "completedTodos"}, + json=load_json_object_fixture("completed_todos.json", DOMAIN), + ) + aioclient_mock.get( + f"{DEFAULT_URL}/api/v3/tasks/user", + json=load_json_object_fixture("tasks.json", DOMAIN), + ) + aioclient_mock.get( + f"{DEFAULT_URL}/api/v3/content", + params={"language": "en"}, + json=load_json_object_fixture("content.json", DOMAIN), + ) + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) + + +@pytest.mark.parametrize( + ("entity_id", "api_url", "fixture"), + [ + ("button.test_user_allocate_all_stat_points", "user/allocate-now", "user"), + ("button.test_user_buy_a_health_potion", "user/buy-health-potion", "user"), + ("button.test_user_revive_from_death", "user/revive", "user"), + ("button.test_user_start_my_day", "cron", "user"), + ( + "button.test_user_chilling_frost", + "user/class/cast/frost", + "wizard_fixture", + ), + ( + "button.test_user_earthquake", + "user/class/cast/earth", + "wizard_fixture", + ), + ( + "button.test_user_ethereal_surge", + "user/class/cast/mpheal", + "wizard_fixture", + ), + ( + "button.test_user_stealth", + "user/class/cast/stealth", + "rogue_fixture", + ), + ( + "button.test_user_tools_of_the_trade", + "user/class/cast/toolsOfTrade", + "rogue_fixture", + ), + ( + "button.test_user_defensive_stance", + "user/class/cast/defensiveStance", + "warrior_fixture", + ), + ( + "button.test_user_intimidating_gaze", + "user/class/cast/intimidate", + "warrior_fixture", + ), + ( + "button.test_user_valorous_presence", + "user/class/cast/valorousPresence", + "warrior_fixture", + ), + ( + "button.test_user_healing_light", + "user/class/cast/heal", + "healer_fixture", + ), + ( + "button.test_user_protective_aura", + "user/class/cast/protectAura", + "healer_fixture", + ), + ( + "button.test_user_searing_brightness", + "user/class/cast/brightness", + "healer_fixture", + ), + ( + "button.test_user_blessing", + "user/class/cast/healAll", + "healer_fixture", + ), + ], +) +async def test_button_press( + hass: HomeAssistant, + config_entry: MockConfigEntry, + aioclient_mock: AiohttpClientMocker, + entity_id: str, + api_url: str, + fixture: str, +) -> None: + """Test button press method.""" + aioclient_mock.get( + f"{DEFAULT_URL}/api/v3/user", + json=load_json_object_fixture(f"{fixture}.json", DOMAIN), + ) + aioclient_mock.get( + f"{DEFAULT_URL}/api/v3/tasks/user", + params={"type": "completedTodos"}, + json=load_json_object_fixture("completed_todos.json", DOMAIN), + ) + aioclient_mock.get( + f"{DEFAULT_URL}/api/v3/tasks/user", + json=load_json_object_fixture("tasks.json", DOMAIN), + ) + aioclient_mock.get( + f"{DEFAULT_URL}/api/v3/content", + params={"language": "en"}, + json=load_json_object_fixture("content.json", DOMAIN), + ) + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + aioclient_mock.post(f"{DEFAULT_URL}/api/v3/{api_url}", json={"data": None}) + + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + + assert mock_called_with(aioclient_mock, "post", f"{DEFAULT_URL}/api/v3/{api_url}") + + +@pytest.mark.parametrize( + ("entity_id", "api_url"), + [ + ("button.test_user_allocate_all_stat_points", "user/allocate-now"), + ("button.test_user_buy_a_health_potion", "user/buy-health-potion"), + ("button.test_user_revive_from_death", "user/revive"), + ("button.test_user_start_my_day", "cron"), + ("button.test_user_chilling_frost", "user/class/cast/frost"), + ("button.test_user_earthquake", "user/class/cast/earth"), + ("button.test_user_ethereal_surge", "user/class/cast/mpheal"), + ], + ids=[ + "allocate-points", + "health-potion", + "revive", + "run-cron", + "chilling frost", + "earthquake", + "ethereal surge", + ], +) +@pytest.mark.parametrize( + ("status_code", "msg", "exception"), + [ + ( + HTTPStatus.TOO_MANY_REQUESTS, + "Rate limit exceeded, try again later", + ServiceValidationError, + ), + ( + HTTPStatus.BAD_REQUEST, + "Unable to connect to Habitica, try again later", + HomeAssistantError, + ), + ( + HTTPStatus.UNAUTHORIZED, + "Unable to complete action, the required conditions are not met", + ServiceValidationError, + ), + ], +) +async def test_button_press_exceptions( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_habitica: AiohttpClientMocker, + entity_id: str, + api_url: str, + status_code: HTTPStatus, + msg: str, + exception: Exception, +) -> None: + """Test button press exceptions.""" + + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + mock_habitica.post( + f"{DEFAULT_URL}/api/v3/{api_url}", + status=status_code, + json={"data": None}, + ) + + with pytest.raises(exception, match=msg): + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + + assert mock_called_with(mock_habitica, "post", f"{DEFAULT_URL}/api/v3/{api_url}") + + +@pytest.mark.parametrize( + ("fixture", "entity_ids"), + [ + ( + "common_buttons_unavailable", + [ + "button.test_user_allocate_all_stat_points", + "button.test_user_revive_from_death", + "button.test_user_buy_a_health_potion", + "button.test_user_start_my_day", + ], + ), + ( + "wizard_skills_unavailable", + [ + "button.test_user_chilling_frost", + "button.test_user_earthquake", + "button.test_user_ethereal_surge", + ], + ), + ("wizard_frost_unavailable", ["button.test_user_chilling_frost"]), + ( + "rogue_skills_unavailable", + ["button.test_user_tools_of_the_trade", "button.test_user_stealth"], + ), + ("rogue_stealth_unavailable", ["button.test_user_stealth"]), + ( + "warrior_skills_unavailable", + [ + "button.test_user_defensive_stance", + "button.test_user_intimidating_gaze", + "button.test_user_valorous_presence", + ], + ), + ( + "healer_skills_unavailable", + [ + "button.test_user_healing_light", + "button.test_user_protective_aura", + "button.test_user_searing_brightness", + "button.test_user_blessing", + ], + ), + ], +) +async def test_button_unavailable( + hass: HomeAssistant, + config_entry: MockConfigEntry, + aioclient_mock: AiohttpClientMocker, + fixture: str, + entity_ids: list[str], +) -> None: + """Test buttons are unavailable if conditions are not met.""" + + aioclient_mock.get( + f"{DEFAULT_URL}/api/v3/user", + json=load_json_object_fixture(f"{fixture}.json", DOMAIN), + ) + aioclient_mock.get( + f"{DEFAULT_URL}/api/v3/tasks/user", + json=load_json_object_fixture("tasks.json", DOMAIN), + ) + aioclient_mock.get(re.compile(r".*"), json={"data": []}) + + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + for entity_id in entity_ids: + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_UNAVAILABLE diff --git a/tests/components/habitica/test_calendar.py b/tests/components/habitica/test_calendar.py new file mode 100644 index 00000000000000..7c0a2686038b77 --- /dev/null +++ b/tests/components/habitica/test_calendar.py @@ -0,0 +1,80 @@ +"""Tests for the Habitica calendar platform.""" + +from collections.abc import Generator +from unittest.mock import patch + +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from tests.common import MockConfigEntry, snapshot_platform +from tests.typing import ClientSessionGenerator + + +@pytest.fixture(autouse=True) +def calendar_only() -> Generator[None]: + """Enable only the calendar platform.""" + with patch( + "homeassistant.components.habitica.PLATFORMS", + [Platform.CALENDAR], + ): + yield + + +@pytest.fixture(autouse=True) +async def set_tz(hass: HomeAssistant) -> None: + """Fixture to set timezone.""" + await hass.config.async_set_time_zone("Europe/Berlin") + + +@pytest.mark.usefixtures("mock_habitica") +@pytest.mark.freeze_time("2024-09-20T22:00:00.000Z") +async def test_calendar_platform( + hass: HomeAssistant, + config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, +) -> None: + """Test setup of the Habitica calendar platform.""" + + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) + + +@pytest.mark.parametrize( + ("entity"), + [ + "calendar.test_user_to_do_s", + "calendar.test_user_dailies", + ], +) +@pytest.mark.freeze_time("2024-09-20T22:00:00.000Z") +@pytest.mark.usefixtures("mock_habitica") +async def test_api_events( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + config_entry: MockConfigEntry, + hass_client: ClientSessionGenerator, + entity: str, +) -> None: + """Test calendar event.""" + + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + client = await hass_client() + response = await client.get( + f"/api/calendars/{entity}?start=2024-08-29&end=2024-10-08" + ) + + assert await response.json() == snapshot diff --git a/tests/components/habitica/test_config_flow.py b/tests/components/habitica/test_config_flow.py index 09cda3fbb0a005..604877f0c47a0b 100644 --- a/tests/components/habitica/test_config_flow.py +++ b/tests/components/habitica/test_config_flow.py @@ -17,8 +17,6 @@ from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType -from tests.common import MockConfigEntry - MOCK_DATA_LOGIN_STEP = { CONF_USERNAME: "test-email@example.com", CONF_PASSWORD: "test-password", @@ -217,38 +215,3 @@ async def test_form_advanced_errors( assert result2["type"] is FlowResultType.FORM assert result2["errors"] == {"base": text_error} - - -async def test_manual_flow_config_exist(hass: HomeAssistant) -> None: - """Test config flow discovers only already configured config.""" - MockConfigEntry( - domain=DOMAIN, - unique_id="test-api-user", - data={"api_user": "test-api-user", "api_key": "test-api-key"}, - ).add_to_hass(hass) - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_IMPORT} - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "advanced" - - mock_obj = MagicMock() - mock_obj.user.get = AsyncMock(return_value={"api_user": "test-api-user"}) - - with patch( - "homeassistant.components.habitica.config_flow.HabitipyAsync", - return_value=mock_obj, - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - "url": DEFAULT_URL, - "api_user": "test-api-user", - "api_key": "test-api-key", - }, - ) - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "already_configured" diff --git a/tests/components/habitica/test_init.py b/tests/components/habitica/test_init.py index 683472a720fd8d..fd8a18b2d449e5 100644 --- a/tests/components/habitica/test_init.py +++ b/tests/components/habitica/test_init.py @@ -1,7 +1,10 @@ """Test the habitica module.""" +import datetime from http import HTTPStatus +import logging +from freezegun.api import FrozenDateTimeFactory import pytest from homeassistant.components.habitica.const import ( @@ -13,10 +16,16 @@ EVENT_API_CALL_SUCCESS, SERVICE_API_CALL, ) +from homeassistant.config_entries import ConfigEntryState from homeassistant.const import ATTR_NAME from homeassistant.core import Event, HomeAssistant -from tests.common import MockConfigEntry, async_capture_events +from tests.common import ( + MockConfigEntry, + async_capture_events, + async_fire_time_changed, + load_json_object_fixture, +) from tests.test_util.aiohttp import AiohttpClientMocker TEST_API_CALL_ARGS = {"text": "Use API from Home Assistant", "type": "todo"} @@ -29,121 +38,47 @@ def capture_api_call_success(hass: HomeAssistant) -> list[Event]: return async_capture_events(hass, EVENT_API_CALL_SUCCESS) -@pytest.fixture -def habitica_entry(hass: HomeAssistant) -> MockConfigEntry: - """Test entry for the following tests.""" - entry = MockConfigEntry( - domain=DOMAIN, - unique_id="test-api-user", - data={ - "api_user": "test-api-user", - "api_key": "test-api-key", - "url": DEFAULT_URL, - }, - ) - entry.add_to_hass(hass) - return entry - - -@pytest.fixture -def common_requests(aioclient_mock: AiohttpClientMocker) -> AiohttpClientMocker: - """Register requests for the tests.""" - aioclient_mock.get( - "https://habitica.com/api/v3/user", - json={ - "data": { - "auth": {"local": {"username": TEST_USER_NAME}}, - "api_user": "test-api-user", - "profile": {"name": TEST_USER_NAME}, - "stats": { - "class": "warrior", - "con": 1, - "exp": 2, - "gp": 3, - "hp": 4, - "int": 5, - "lvl": 6, - "maxHealth": 7, - "maxMP": 8, - "mp": 9, - "per": 10, - "points": 11, - "str": 12, - "toNextLevel": 13, - }, - } - }, - ) - - aioclient_mock.get( - "https://habitica.com/api/v3/tasks/user", - json={ - "data": [ - { - "text": f"this is a mock {task} #{i}", - "id": f"{i}", - "type": task, - "completed": False, - } - for i, task in enumerate(("habit", "daily", "todo", "reward"), start=1) - ] - }, - ) - aioclient_mock.get( - "https://habitica.com/api/v3/tasks/user?type=completedTodos", - json={ - "data": [ - { - "text": "this is a mock todo #5", - "id": 5, - "type": "todo", - "completed": True, - } - ] - }, - ) - - aioclient_mock.post( - "https://habitica.com/api/v3/tasks/user", - status=HTTPStatus.CREATED, - json={"data": TEST_API_CALL_ARGS}, - ) - - return aioclient_mock - - -@pytest.mark.usefixtures("common_requests") +@pytest.mark.usefixtures("mock_habitica") async def test_entry_setup_unload( - hass: HomeAssistant, habitica_entry: MockConfigEntry + hass: HomeAssistant, config_entry: MockConfigEntry ) -> None: """Test integration setup and unload.""" - assert await hass.config_entries.async_setup(habitica_entry.entry_id) + + config_entry.add_to_hass(hass) + assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - assert hass.services.has_service(DOMAIN, SERVICE_API_CALL) + assert config_entry.state is ConfigEntryState.LOADED - assert await hass.config_entries.async_unload(habitica_entry.entry_id) + assert await hass.config_entries.async_unload(config_entry.entry_id) - assert not hass.services.has_service(DOMAIN, SERVICE_API_CALL) + assert config_entry.state is ConfigEntryState.NOT_LOADED -@pytest.mark.usefixtures("common_requests") +@pytest.mark.usefixtures("mock_habitica") async def test_service_call( hass: HomeAssistant, - habitica_entry: MockConfigEntry, + config_entry: MockConfigEntry, capture_api_call_success: list[Event], + mock_habitica: AiohttpClientMocker, ) -> None: """Test integration setup, service call and unload.""" - - assert await hass.config_entries.async_setup(habitica_entry.entry_id) + config_entry.add_to_hass(hass) + assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - assert hass.services.has_service(DOMAIN, SERVICE_API_CALL) + assert config_entry.state is ConfigEntryState.LOADED assert len(capture_api_call_success) == 0 + mock_habitica.post( + "https://habitica.com/api/v3/tasks/user", + status=HTTPStatus.CREATED, + json={"data": TEST_API_CALL_ARGS}, + ) + TEST_SERVICE_DATA = { - ATTR_NAME: "test_user", + ATTR_NAME: "test-user", ATTR_PATH: ["tasks", "user", "post"], ATTR_ARGS: TEST_API_CALL_ARGS, } @@ -157,6 +92,77 @@ async def test_service_call( del captured_data[ATTR_DATA] assert captured_data == TEST_SERVICE_DATA - assert await hass.config_entries.async_unload(habitica_entry.entry_id) - assert not hass.services.has_service(DOMAIN, SERVICE_API_CALL) +@pytest.mark.parametrize( + ("status"), [HTTPStatus.NOT_FOUND, HTTPStatus.TOO_MANY_REQUESTS] +) +async def test_config_entry_not_ready( + hass: HomeAssistant, + config_entry: MockConfigEntry, + aioclient_mock: AiohttpClientMocker, + status: HTTPStatus, +) -> None: + """Test config entry not ready.""" + + aioclient_mock.get( + f"{DEFAULT_URL}/api/v3/user", + status=status, + ) + + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.SETUP_RETRY + + +async def test_coordinator_update_failed( + hass: HomeAssistant, + config_entry: MockConfigEntry, + aioclient_mock: AiohttpClientMocker, +) -> None: + """Test coordinator update failed.""" + + aioclient_mock.get( + f"{DEFAULT_URL}/api/v3/user", + json=load_json_object_fixture("user.json", DOMAIN), + ) + aioclient_mock.get( + f"{DEFAULT_URL}/api/v3/tasks/user", + status=HTTPStatus.NOT_FOUND, + ) + + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.SETUP_RETRY + + +async def test_coordinator_rate_limited( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_habitica: AiohttpClientMocker, + caplog: pytest.LogCaptureFixture, + freezer: FrozenDateTimeFactory, +) -> None: + """Test coordinator when rate limited.""" + + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + mock_habitica.clear_requests() + mock_habitica.get( + f"{DEFAULT_URL}/api/v3/user", + status=HTTPStatus.TOO_MANY_REQUESTS, + ) + + with caplog.at_level(logging.DEBUG): + freezer.tick(datetime.timedelta(seconds=60)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert "Rate limit exceeded, will try again later" in caplog.text diff --git a/tests/components/habitica/test_sensor.py b/tests/components/habitica/test_sensor.py new file mode 100644 index 00000000000000..defe5a270ae18f --- /dev/null +++ b/tests/components/habitica/test_sensor.py @@ -0,0 +1,72 @@ +"""Test Habitica sensor platform.""" + +from collections.abc import Generator +from unittest.mock import patch + +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.habitica.const import DOMAIN +from homeassistant.components.habitica.sensor import HabitipySensorEntity +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er, issue_registry as ir + +from tests.common import MockConfigEntry, snapshot_platform + + +@pytest.fixture(autouse=True) +def sensor_only() -> Generator[None]: + """Enable only the sensor platform.""" + with patch( + "homeassistant.components.habitica.PLATFORMS", + [Platform.SENSOR], + ): + yield + + +@pytest.mark.usefixtures("mock_habitica", "entity_registry_enabled_by_default") +async def test_sensors( + hass: HomeAssistant, + config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, +) -> None: + """Test setup of the Habitica sensor platform.""" + + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) + + +@pytest.mark.usefixtures("mock_habitica", "entity_registry_enabled_by_default") +async def test_sensor_deprecation_issue( + hass: HomeAssistant, + config_entry: MockConfigEntry, + issue_registry: ir.IssueRegistry, +) -> None: + """Test task sensor deprecation issue.""" + + with patch( + "homeassistant.components.habitica.sensor.entity_used_in", return_value=True + ): + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + assert issue_registry.async_get_issue( + domain=DOMAIN, + issue_id=f"deprecated_task_entity_{HabitipySensorEntity.TODOS}", + ) + assert issue_registry.async_get_issue( + domain=DOMAIN, + issue_id=f"deprecated_task_entity_{HabitipySensorEntity.DAILIES}", + ) diff --git a/tests/components/habitica/test_services.py b/tests/components/habitica/test_services.py new file mode 100644 index 00000000000000..403779bcbfbb5b --- /dev/null +++ b/tests/components/habitica/test_services.py @@ -0,0 +1,548 @@ +"""Test Habitica actions.""" + +from collections.abc import Generator +from http import HTTPStatus +from typing import Any +from unittest.mock import patch + +import pytest + +from homeassistant.components.habitica.const import ( + ATTR_CONFIG_ENTRY, + ATTR_DIRECTION, + ATTR_SKILL, + ATTR_TASK, + DEFAULT_URL, + DOMAIN, + SERVICE_ABORT_QUEST, + SERVICE_ACCEPT_QUEST, + SERVICE_CANCEL_QUEST, + SERVICE_CAST_SKILL, + SERVICE_LEAVE_QUEST, + SERVICE_REJECT_QUEST, + SERVICE_SCORE_HABIT, + SERVICE_SCORE_REWARD, + SERVICE_START_QUEST, +) +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError, ServiceValidationError + +from .conftest import mock_called_with + +from tests.common import MockConfigEntry +from tests.test_util.aiohttp import AiohttpClientMocker + +REQUEST_EXCEPTION_MSG = "Unable to connect to Habitica, try again later" +RATE_LIMIT_EXCEPTION_MSG = "Rate limit exceeded, try again later" + + +@pytest.fixture(autouse=True) +def services_only() -> Generator[None]: + """Enable only services.""" + with patch( + "homeassistant.components.habitica.PLATFORMS", + [], + ): + yield + + +@pytest.fixture(autouse=True) +async def load_entry( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_habitica: AiohttpClientMocker, + services_only: Generator, +) -> None: + """Load config entry.""" + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + +@pytest.mark.parametrize( + ("service_data", "item", "target_id"), + [ + ( + { + ATTR_TASK: "2f6fcabc-f670-4ec3-ba65-817e8deea490", + ATTR_SKILL: "pickpocket", + }, + "pickPocket", + "2f6fcabc-f670-4ec3-ba65-817e8deea490", + ), + ( + { + ATTR_TASK: "2f6fcabc-f670-4ec3-ba65-817e8deea490", + ATTR_SKILL: "backstab", + }, + "backStab", + "2f6fcabc-f670-4ec3-ba65-817e8deea490", + ), + ( + { + ATTR_TASK: "2f6fcabc-f670-4ec3-ba65-817e8deea490", + ATTR_SKILL: "fireball", + }, + "fireball", + "2f6fcabc-f670-4ec3-ba65-817e8deea490", + ), + ( + { + ATTR_TASK: "2f6fcabc-f670-4ec3-ba65-817e8deea490", + ATTR_SKILL: "smash", + }, + "smash", + "2f6fcabc-f670-4ec3-ba65-817e8deea490", + ), + ( + { + ATTR_TASK: "Rechnungen bezahlen", + ATTR_SKILL: "smash", + }, + "smash", + "2f6fcabc-f670-4ec3-ba65-817e8deea490", + ), + ( + { + ATTR_TASK: "pay_bills", + ATTR_SKILL: "smash", + }, + "smash", + "2f6fcabc-f670-4ec3-ba65-817e8deea490", + ), + ], + ids=[ + "cast pickpocket", + "cast backstab", + "cast fireball", + "cast smash", + "select task by name", + "select task_by_alias", + ], +) +async def test_cast_skill( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_habitica: AiohttpClientMocker, + service_data: dict[str, Any], + item: str, + target_id: str, +) -> None: + """Test Habitica cast skill action.""" + + mock_habitica.post( + f"{DEFAULT_URL}/api/v3/user/class/cast/{item}?targetId={target_id}", + json={"success": True, "data": {}}, + ) + + await hass.services.async_call( + DOMAIN, + SERVICE_CAST_SKILL, + service_data={ + ATTR_CONFIG_ENTRY: config_entry.entry_id, + **service_data, + }, + return_response=True, + blocking=True, + ) + + assert mock_called_with( + mock_habitica, + "post", + f"{DEFAULT_URL}/api/v3/user/class/cast/{item}?targetId={target_id}", + ) + + +@pytest.mark.parametrize( + ( + "service_data", + "http_status", + "expected_exception", + "expected_exception_msg", + ), + [ + ( + { + ATTR_TASK: "task-not-found", + ATTR_SKILL: "smash", + }, + HTTPStatus.OK, + ServiceValidationError, + "Unable to complete action, could not find the task 'task-not-found'", + ), + ( + { + ATTR_TASK: "Rechnungen bezahlen", + ATTR_SKILL: "smash", + }, + HTTPStatus.TOO_MANY_REQUESTS, + ServiceValidationError, + RATE_LIMIT_EXCEPTION_MSG, + ), + ( + { + ATTR_TASK: "Rechnungen bezahlen", + ATTR_SKILL: "smash", + }, + HTTPStatus.NOT_FOUND, + ServiceValidationError, + "Unable to cast skill, your character does not have the skill or spell smash", + ), + ( + { + ATTR_TASK: "Rechnungen bezahlen", + ATTR_SKILL: "smash", + }, + HTTPStatus.UNAUTHORIZED, + ServiceValidationError, + "Unable to cast skill, not enough mana. Your character has 50 MP, but the skill costs 10 MP", + ), + ( + { + ATTR_TASK: "Rechnungen bezahlen", + ATTR_SKILL: "smash", + }, + HTTPStatus.BAD_REQUEST, + HomeAssistantError, + REQUEST_EXCEPTION_MSG, + ), + ], +) +@pytest.mark.usefixtures("mock_habitica") +async def test_cast_skill_exceptions( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_habitica: AiohttpClientMocker, + service_data: dict[str, Any], + http_status: HTTPStatus, + expected_exception: Exception, + expected_exception_msg: str, +) -> None: + """Test Habitica cast skill action exceptions.""" + + mock_habitica.post( + f"{DEFAULT_URL}/api/v3/user/class/cast/smash?targetId=2f6fcabc-f670-4ec3-ba65-817e8deea490", + json={"success": True, "data": {}}, + status=http_status, + ) + + with pytest.raises(expected_exception, match=expected_exception_msg): + await hass.services.async_call( + DOMAIN, + SERVICE_CAST_SKILL, + service_data={ + ATTR_CONFIG_ENTRY: config_entry.entry_id, + **service_data, + }, + return_response=True, + blocking=True, + ) + + +@pytest.mark.usefixtures("mock_habitica") +async def test_get_config_entry( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_habitica: AiohttpClientMocker, +) -> None: + """Test Habitica config entry exceptions.""" + + with pytest.raises( + ServiceValidationError, + match="The selected character is not configured in Home Assistant", + ): + await hass.services.async_call( + DOMAIN, + SERVICE_CAST_SKILL, + service_data={ + ATTR_CONFIG_ENTRY: "0000000000000000", + ATTR_TASK: "2f6fcabc-f670-4ec3-ba65-817e8deea490", + ATTR_SKILL: "smash", + }, + return_response=True, + blocking=True, + ) + + assert await hass.config_entries.async_unload(config_entry.entry_id) + + with pytest.raises( + ServiceValidationError, + match="The selected character is currently not loaded or disabled in Home Assistant", + ): + await hass.services.async_call( + DOMAIN, + SERVICE_CAST_SKILL, + service_data={ + ATTR_CONFIG_ENTRY: config_entry.entry_id, + ATTR_TASK: "2f6fcabc-f670-4ec3-ba65-817e8deea490", + ATTR_SKILL: "smash", + }, + return_response=True, + blocking=True, + ) + + +@pytest.mark.parametrize( + ("service", "command"), + [ + (SERVICE_ABORT_QUEST, "abort"), + (SERVICE_ACCEPT_QUEST, "accept"), + (SERVICE_CANCEL_QUEST, "cancel"), + (SERVICE_LEAVE_QUEST, "leave"), + (SERVICE_REJECT_QUEST, "reject"), + (SERVICE_START_QUEST, "force-start"), + ], + ids=[], +) +async def test_handle_quests( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_habitica: AiohttpClientMocker, + service: str, + command: str, +) -> None: + """Test Habitica actions for quest handling.""" + + mock_habitica.post( + f"{DEFAULT_URL}/api/v3/groups/party/quests/{command}", + json={"success": True, "data": {}}, + ) + + await hass.services.async_call( + DOMAIN, + service, + service_data={ATTR_CONFIG_ENTRY: config_entry.entry_id}, + return_response=True, + blocking=True, + ) + + assert mock_called_with( + mock_habitica, + "post", + f"{DEFAULT_URL}/api/v3/groups/party/quests/{command}", + ) + + +@pytest.mark.parametrize( + ( + "http_status", + "expected_exception", + "expected_exception_msg", + ), + [ + ( + HTTPStatus.TOO_MANY_REQUESTS, + ServiceValidationError, + RATE_LIMIT_EXCEPTION_MSG, + ), + ( + HTTPStatus.NOT_FOUND, + ServiceValidationError, + "Unable to complete action, quest or group not found", + ), + ( + HTTPStatus.UNAUTHORIZED, + ServiceValidationError, + "Action not allowed, only quest leader or group leader can perform this action", + ), + ( + HTTPStatus.BAD_REQUEST, + HomeAssistantError, + REQUEST_EXCEPTION_MSG, + ), + ], +) +@pytest.mark.usefixtures("mock_habitica") +async def test_handle_quests_exceptions( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_habitica: AiohttpClientMocker, + http_status: HTTPStatus, + expected_exception: Exception, + expected_exception_msg: str, +) -> None: + """Test Habitica handle quests action exceptions.""" + + mock_habitica.post( + f"{DEFAULT_URL}/api/v3/groups/party/quests/accept", + json={"success": True, "data": {}}, + status=http_status, + ) + + with pytest.raises(expected_exception, match=expected_exception_msg): + await hass.services.async_call( + DOMAIN, + SERVICE_ACCEPT_QUEST, + service_data={ATTR_CONFIG_ENTRY: config_entry.entry_id}, + return_response=True, + blocking=True, + ) + + +@pytest.mark.parametrize( + ("service", "service_data", "task_id"), + [ + ( + SERVICE_SCORE_HABIT, + { + ATTR_TASK: "e97659e0-2c42-4599-a7bb-00282adc410d", + ATTR_DIRECTION: "up", + }, + "e97659e0-2c42-4599-a7bb-00282adc410d", + ), + ( + SERVICE_SCORE_HABIT, + { + ATTR_TASK: "e97659e0-2c42-4599-a7bb-00282adc410d", + ATTR_DIRECTION: "down", + }, + "e97659e0-2c42-4599-a7bb-00282adc410d", + ), + ( + SERVICE_SCORE_REWARD, + { + ATTR_TASK: "5e2ea1df-f6e6-4ba3-bccb-97c5ec63e99b", + }, + "5e2ea1df-f6e6-4ba3-bccb-97c5ec63e99b", + ), + ( + SERVICE_SCORE_HABIT, + { + ATTR_TASK: "Füge eine Aufgabe zu Habitica hinzu", + ATTR_DIRECTION: "up", + }, + "e97659e0-2c42-4599-a7bb-00282adc410d", + ), + ( + SERVICE_SCORE_HABIT, + { + ATTR_TASK: "create_a_task", + ATTR_DIRECTION: "up", + }, + "e97659e0-2c42-4599-a7bb-00282adc410d", + ), + ], + ids=[ + "habit score up", + "habit score down", + "buy reward", + "match task by name", + "match task by alias", + ], +) +async def test_score_task( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_habitica: AiohttpClientMocker, + service: str, + service_data: dict[str, Any], + task_id: str, +) -> None: + """Test Habitica score task action.""" + + mock_habitica.post( + f"{DEFAULT_URL}/api/v3/tasks/{task_id}/score/{service_data.get(ATTR_DIRECTION, "up")}", + json={"success": True, "data": {}}, + ) + + await hass.services.async_call( + DOMAIN, + service, + service_data={ + ATTR_CONFIG_ENTRY: config_entry.entry_id, + **service_data, + }, + return_response=True, + blocking=True, + ) + + assert mock_called_with( + mock_habitica, + "post", + f"{DEFAULT_URL}/api/v3/tasks/{task_id}/score/{service_data.get(ATTR_DIRECTION, "up")}", + ) + + +@pytest.mark.parametrize( + ( + "service_data", + "http_status", + "expected_exception", + "expected_exception_msg", + ), + [ + ( + { + ATTR_TASK: "task does not exist", + ATTR_DIRECTION: "up", + }, + HTTPStatus.OK, + ServiceValidationError, + "Unable to complete action, could not find the task 'task does not exist'", + ), + ( + { + ATTR_TASK: "e97659e0-2c42-4599-a7bb-00282adc410d", + ATTR_DIRECTION: "up", + }, + HTTPStatus.TOO_MANY_REQUESTS, + ServiceValidationError, + RATE_LIMIT_EXCEPTION_MSG, + ), + ( + { + ATTR_TASK: "e97659e0-2c42-4599-a7bb-00282adc410d", + ATTR_DIRECTION: "up", + }, + HTTPStatus.BAD_REQUEST, + HomeAssistantError, + REQUEST_EXCEPTION_MSG, + ), + ( + { + ATTR_TASK: "5e2ea1df-f6e6-4ba3-bccb-97c5ec63e99b", + ATTR_DIRECTION: "up", + }, + HTTPStatus.UNAUTHORIZED, + HomeAssistantError, + "Unable to buy reward, not enough gold. Your character has 137.63 GP, but the reward costs 10 GP", + ), + ], +) +@pytest.mark.usefixtures("mock_habitica") +async def test_score_task_exceptions( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_habitica: AiohttpClientMocker, + service_data: dict[str, Any], + http_status: HTTPStatus, + expected_exception: Exception, + expected_exception_msg: str, +) -> None: + """Test Habitica score task action exceptions.""" + + mock_habitica.post( + f"{DEFAULT_URL}/api/v3/tasks/e97659e0-2c42-4599-a7bb-00282adc410d/score/up", + json={"success": True, "data": {}}, + status=http_status, + ) + mock_habitica.post( + f"{DEFAULT_URL}/api/v3/tasks/5e2ea1df-f6e6-4ba3-bccb-97c5ec63e99b/score/up", + json={"success": True, "data": {}}, + status=http_status, + ) + + with pytest.raises(expected_exception, match=expected_exception_msg): + await hass.services.async_call( + DOMAIN, + SERVICE_SCORE_HABIT, + service_data={ + ATTR_CONFIG_ENTRY: config_entry.entry_id, + **service_data, + }, + return_response=True, + blocking=True, + ) diff --git a/tests/components/habitica/test_switch.py b/tests/components/habitica/test_switch.py new file mode 100644 index 00000000000000..55ba7b19b228b9 --- /dev/null +++ b/tests/components/habitica/test_switch.py @@ -0,0 +1,138 @@ +"""Tests for the Habitica switch platform.""" + +from collections.abc import Generator +from http import HTTPStatus +from unittest.mock import patch + +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.habitica.const import DEFAULT_URL +from homeassistant.components.switch import ( + DOMAIN as SWITCH_DOMAIN, + SERVICE_TOGGLE, + SERVICE_TURN_OFF, + SERVICE_TURN_ON, +) +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import ATTR_ENTITY_ID, Platform +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError, ServiceValidationError +from homeassistant.helpers import entity_registry as er + +from .conftest import mock_called_with + +from tests.common import MockConfigEntry, snapshot_platform +from tests.test_util.aiohttp import AiohttpClientMocker + + +@pytest.fixture(autouse=True) +def switch_only() -> Generator[None]: + """Enable only the switch platform.""" + with patch( + "homeassistant.components.habitica.PLATFORMS", + [Platform.SWITCH], + ): + yield + + +@pytest.mark.usefixtures("mock_habitica") +async def test_switch( + hass: HomeAssistant, + config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, +) -> None: + """Test switch entities.""" + + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) + + +@pytest.mark.parametrize( + ("service_call"), + [ + SERVICE_TURN_ON, + SERVICE_TURN_OFF, + SERVICE_TOGGLE, + ], +) +async def test_turn_on_off_toggle( + hass: HomeAssistant, + config_entry: MockConfigEntry, + service_call: str, + mock_habitica: AiohttpClientMocker, +) -> None: + """Test switch turn on/off, toggle method.""" + + mock_habitica.post( + f"{DEFAULT_URL}/api/v3/user/sleep", + json={"success": True, "data": False}, + ) + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + await hass.services.async_call( + SWITCH_DOMAIN, + service_call, + {ATTR_ENTITY_ID: "switch.test_user_rest_in_the_inn"}, + blocking=True, + ) + + assert mock_called_with(mock_habitica, "post", f"{DEFAULT_URL}/api/v3/user/sleep") + + +@pytest.mark.parametrize( + ("service_call"), + [ + SERVICE_TURN_ON, + SERVICE_TURN_OFF, + SERVICE_TOGGLE, + ], +) +@pytest.mark.parametrize( + ("status_code", "exception"), + [ + (HTTPStatus.TOO_MANY_REQUESTS, ServiceValidationError), + (HTTPStatus.BAD_REQUEST, HomeAssistantError), + ], +) +async def test_turn_on_off_toggle_exceptions( + hass: HomeAssistant, + config_entry: MockConfigEntry, + service_call: str, + mock_habitica: AiohttpClientMocker, + status_code: HTTPStatus, + exception: Exception, +) -> None: + """Test switch turn on/off, toggle method.""" + + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + mock_habitica.post( + f"{DEFAULT_URL}/api/v3/user/sleep", + status=status_code, + json={"success": True, "data": False}, + ) + + with pytest.raises(expected_exception=exception): + await hass.services.async_call( + SWITCH_DOMAIN, + service_call, + {ATTR_ENTITY_ID: "switch.test_user_rest_in_the_inn"}, + blocking=True, + ) + + assert mock_called_with(mock_habitica, "post", f"{DEFAULT_URL}/api/v3/user/sleep") diff --git a/tests/components/habitica/test_todo.py b/tests/components/habitica/test_todo.py new file mode 100644 index 00000000000000..c9a4b3dd37a947 --- /dev/null +++ b/tests/components/habitica/test_todo.py @@ -0,0 +1,700 @@ +"""Tests for Habitica todo platform.""" + +from collections.abc import Generator +from datetime import datetime +from http import HTTPStatus +import json +import re +from unittest.mock import patch + +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.habitica.const import DEFAULT_URL, DOMAIN +from homeassistant.components.todo import ( + ATTR_DESCRIPTION, + ATTR_DUE_DATE, + ATTR_ITEM, + ATTR_RENAME, + ATTR_STATUS, + DOMAIN as TODO_DOMAIN, + TodoServices, +) +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import ATTR_ENTITY_ID, Platform +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ServiceValidationError +from homeassistant.helpers import entity_registry as er + +from .conftest import mock_called_with + +from tests.common import ( + MockConfigEntry, + async_get_persistent_notifications, + load_json_object_fixture, + snapshot_platform, +) +from tests.test_util.aiohttp import AiohttpClientMocker +from tests.typing import WebSocketGenerator + + +@pytest.fixture(autouse=True) +def switch_only() -> Generator[None]: + """Enable only the todo platform.""" + with patch( + "homeassistant.components.habitica.PLATFORMS", + [Platform.TODO], + ): + yield + + +@pytest.mark.usefixtures("mock_habitica") +async def test_todos( + hass: HomeAssistant, + config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, +) -> None: + """Test todo platform.""" + + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) + + +@pytest.mark.parametrize( + ("entity_id"), + [ + "todo.test_user_to_do_s", + "todo.test_user_dailies", + ], +) +@pytest.mark.usefixtures("mock_habitica") +async def test_todo_items( + hass: HomeAssistant, + config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, + entity_id: str, +) -> None: + """Test items on todo lists.""" + + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + result = await hass.services.async_call( + TODO_DOMAIN, + TodoServices.GET_ITEMS, + {}, + target={ATTR_ENTITY_ID: entity_id}, + blocking=True, + return_response=True, + ) + + assert result == snapshot + + +@pytest.mark.freeze_time("2024-09-21 00:00:00") +@pytest.mark.parametrize( + ("entity_id", "uid"), + [ + ("todo.test_user_to_do_s", "88de7cd9-af2b-49ce-9afd-bf941d87336b"), + ("todo.test_user_dailies", "f2c85972-1a19-4426-bc6d-ce3337b9d99f"), + ], + ids=["todo", "daily"], +) +async def test_complete_todo_item( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_habitica: AiohttpClientMocker, + snapshot: SnapshotAssertion, + entity_id: str, + uid: str, +) -> None: + """Test completing an item on the todo list.""" + + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + mock_habitica.post( + f"{DEFAULT_URL}/api/v3/tasks/{uid}/score/up", + json=load_json_object_fixture("score_with_drop.json", DOMAIN), + ) + await hass.services.async_call( + TODO_DOMAIN, + TodoServices.UPDATE_ITEM, + {ATTR_ITEM: uid, ATTR_STATUS: "completed"}, + target={ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + + assert mock_called_with( + mock_habitica, "post", f"{DEFAULT_URL}/api/v3/tasks/{uid}/score/up" + ) + + # Test notification for item drop + notifications = async_get_persistent_notifications(hass) + assert len(notifications) == 1 + _id, *_ = notifications + assert snapshot == (notifications[_id]["title"], notifications[_id]["message"]) + + +@pytest.mark.parametrize( + ("entity_id", "uid"), + [ + ("todo.test_user_to_do_s", "162f0bbe-a097-4a06-b4f4-8fbeed85d2ba"), + ("todo.test_user_dailies", "564b9ac9-c53d-4638-9e7f-1cd96fe19baa"), + ], + ids=["todo", "daily"], +) +async def test_uncomplete_todo_item( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_habitica: AiohttpClientMocker, + entity_id: str, + uid: str, +) -> None: + """Test uncompleting an item on the todo list.""" + + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + mock_habitica.post( + f"{DEFAULT_URL}/api/v3/tasks/{uid}/score/down", + json={"data": {}, "success": True}, + ) + await hass.services.async_call( + TODO_DOMAIN, + TodoServices.UPDATE_ITEM, + {ATTR_ITEM: uid, ATTR_STATUS: "needs_action"}, + target={ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + + assert mock_called_with( + mock_habitica, "post", f"{DEFAULT_URL}/api/v3/tasks/{uid}/score/down" + ) + + +@pytest.mark.parametrize( + ("uid", "status"), + [ + ("88de7cd9-af2b-49ce-9afd-bf941d87336b", "completed"), + ("162f0bbe-a097-4a06-b4f4-8fbeed85d2ba", "needs_action"), + ], + ids=["completed", "needs_action"], +) +async def test_complete_todo_item_exception( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_habitica: AiohttpClientMocker, + uid: str, + status: str, +) -> None: + """Test exception when completing/uncompleting an item on the todo list.""" + + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + mock_habitica.post( + re.compile(f"{DEFAULT_URL}/api/v3/tasks/{uid}/score/.+"), + status=HTTPStatus.NOT_FOUND, + ) + with pytest.raises( + expected_exception=ServiceValidationError, + match=r"Unable to update the score for your Habitica to-do `.+`, please try again", + ): + await hass.services.async_call( + TODO_DOMAIN, + TodoServices.UPDATE_ITEM, + {ATTR_ITEM: uid, ATTR_STATUS: status}, + target={ATTR_ENTITY_ID: "todo.test_user_to_do_s"}, + blocking=True, + ) + + +@pytest.mark.parametrize( + ("entity_id", "uid", "date"), + [ + ( + "todo.test_user_to_do_s", + "88de7cd9-af2b-49ce-9afd-bf941d87336b", + "2024-07-30", + ), + ( + "todo.test_user_dailies", + "f2c85972-1a19-4426-bc6d-ce3337b9d99f", + None, + ), + ], + ids=["todo", "daily"], +) +async def test_update_todo_item( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_habitica: AiohttpClientMocker, + entity_id: str, + uid: str, + date: str, +) -> None: + """Test update details of a item on the todo list.""" + + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + mock_habitica.put( + f"{DEFAULT_URL}/api/v3/tasks/{uid}", + json={"data": {}, "success": True}, + ) + await hass.services.async_call( + TODO_DOMAIN, + TodoServices.UPDATE_ITEM, + { + ATTR_ITEM: uid, + ATTR_RENAME: "test-summary", + ATTR_DESCRIPTION: "test-description", + ATTR_DUE_DATE: date, + }, + target={ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + + mock_call = mock_called_with( + mock_habitica, "PUT", f"{DEFAULT_URL}/api/v3/tasks/{uid}" + ) + assert mock_call + assert json.loads(mock_call[2]) == { + "date": date, + "notes": "test-description", + "text": "test-summary", + } + + +async def test_update_todo_item_exception( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_habitica: AiohttpClientMocker, +) -> None: + """Test exception when update item on the todo list.""" + uid = "88de7cd9-af2b-49ce-9afd-bf941d87336b" + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + mock_habitica.put( + f"{DEFAULT_URL}/api/v3/tasks/{uid}", + status=HTTPStatus.NOT_FOUND, + ) + with pytest.raises( + expected_exception=ServiceValidationError, + match="Unable to update the Habitica to-do `test-summary`, please try again", + ): + await hass.services.async_call( + TODO_DOMAIN, + TodoServices.UPDATE_ITEM, + { + ATTR_ITEM: uid, + ATTR_RENAME: "test-summary", + ATTR_DESCRIPTION: "test-description", + ATTR_DUE_DATE: "2024-07-30", + }, + target={ATTR_ENTITY_ID: "todo.test_user_to_do_s"}, + blocking=True, + ) + + +async def test_add_todo_item( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_habitica: AiohttpClientMocker, +) -> None: + """Test add a todo item to the todo list.""" + + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + mock_habitica.post( + f"{DEFAULT_URL}/api/v3/tasks/user", + json={"data": {}, "success": True}, + status=HTTPStatus.CREATED, + ) + await hass.services.async_call( + TODO_DOMAIN, + TodoServices.ADD_ITEM, + { + ATTR_ITEM: "test-summary", + ATTR_DESCRIPTION: "test-description", + ATTR_DUE_DATE: "2024-07-30", + }, + target={ATTR_ENTITY_ID: "todo.test_user_to_do_s"}, + blocking=True, + ) + + mock_call = mock_called_with( + mock_habitica, + "post", + f"{DEFAULT_URL}/api/v3/tasks/user", + ) + assert mock_call + assert json.loads(mock_call[2]) == { + "date": "2024-07-30", + "notes": "test-description", + "text": "test-summary", + "type": "todo", + } + + +async def test_add_todo_item_exception( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_habitica: AiohttpClientMocker, +) -> None: + """Test exception when adding a todo item to the todo list.""" + + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + mock_habitica.post( + f"{DEFAULT_URL}/api/v3/tasks/user", + status=HTTPStatus.NOT_FOUND, + ) + with pytest.raises( + expected_exception=ServiceValidationError, + match="Unable to create new to-do `test-summary` for Habitica, please try again", + ): + await hass.services.async_call( + TODO_DOMAIN, + TodoServices.ADD_ITEM, + { + ATTR_ITEM: "test-summary", + ATTR_DESCRIPTION: "test-description", + ATTR_DUE_DATE: "2024-07-30", + }, + target={ATTR_ENTITY_ID: "todo.test_user_to_do_s"}, + blocking=True, + ) + + +async def test_delete_todo_item( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_habitica: AiohttpClientMocker, +) -> None: + """Test deleting a todo item from the todo list.""" + + uid = "2f6fcabc-f670-4ec3-ba65-817e8deea490" + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + mock_habitica.delete( + f"{DEFAULT_URL}/api/v3/tasks/{uid}", + json={"data": {}, "success": True}, + ) + await hass.services.async_call( + TODO_DOMAIN, + TodoServices.REMOVE_ITEM, + {ATTR_ITEM: uid}, + target={ATTR_ENTITY_ID: "todo.test_user_to_do_s"}, + blocking=True, + ) + + assert mock_called_with( + mock_habitica, "delete", f"{DEFAULT_URL}/api/v3/tasks/{uid}" + ) + + +async def test_delete_todo_item_exception( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_habitica: AiohttpClientMocker, +) -> None: + """Test exception when deleting a todo item from the todo list.""" + + uid = "2f6fcabc-f670-4ec3-ba65-817e8deea490" + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + mock_habitica.delete( + f"{DEFAULT_URL}/api/v3/tasks/{uid}", + status=HTTPStatus.NOT_FOUND, + ) + with pytest.raises( + expected_exception=ServiceValidationError, + match="Unable to delete item from Habitica to-do list, please try again", + ): + await hass.services.async_call( + TODO_DOMAIN, + TodoServices.REMOVE_ITEM, + {ATTR_ITEM: uid}, + target={ATTR_ENTITY_ID: "todo.test_user_to_do_s"}, + blocking=True, + ) + + +async def test_delete_completed_todo_items( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_habitica: AiohttpClientMocker, +) -> None: + """Test deleting completed todo items from the todo list.""" + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + mock_habitica.post( + f"{DEFAULT_URL}/api/v3/tasks/clearCompletedTodos", + json={"data": {}, "success": True}, + ) + await hass.services.async_call( + TODO_DOMAIN, + TodoServices.REMOVE_COMPLETED_ITEMS, + {}, + target={ATTR_ENTITY_ID: "todo.test_user_to_do_s"}, + blocking=True, + ) + + assert mock_called_with( + mock_habitica, "post", f"{DEFAULT_URL}/api/v3/tasks/clearCompletedTodos" + ) + + +async def test_delete_completed_todo_items_exception( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_habitica: AiohttpClientMocker, +) -> None: + """Test exception when deleting completed todo items from the todo list.""" + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + mock_habitica.post( + f"{DEFAULT_URL}/api/v3/tasks/clearCompletedTodos", + status=HTTPStatus.NOT_FOUND, + ) + with pytest.raises( + expected_exception=ServiceValidationError, + match="Unable to delete completed to-do items from Habitica to-do list, please try again", + ): + await hass.services.async_call( + TODO_DOMAIN, + TodoServices.REMOVE_COMPLETED_ITEMS, + {}, + target={ATTR_ENTITY_ID: "todo.test_user_to_do_s"}, + blocking=True, + ) + + +@pytest.mark.parametrize( + ("entity_id", "uid", "previous_uid"), + [ + ( + "todo.test_user_to_do_s", + "1aa3137e-ef72-4d1f-91ee-41933602f438", + "88de7cd9-af2b-49ce-9afd-bf941d87336b", + ), + ( + "todo.test_user_dailies", + "2c6d136c-a1c3-4bef-b7c4-fa980784b1e1", + "564b9ac9-c53d-4638-9e7f-1cd96fe19baa", + ), + ], + ids=["todo", "daily"], +) +async def test_move_todo_item( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_habitica: AiohttpClientMocker, + hass_ws_client: WebSocketGenerator, + entity_id: str, + uid: str, + previous_uid: str, +) -> None: + """Test move todo items.""" + + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + for pos in (0, 1): + mock_habitica.post( + f"{DEFAULT_URL}/api/v3/tasks/{uid}/move/to/{pos}", + json={"data": {}, "success": True}, + ) + + client = await hass_ws_client() + # move to second position + data = { + "id": id, + "type": "todo/item/move", + "entity_id": entity_id, + "uid": uid, + "previous_uid": previous_uid, + } + await client.send_json_auto_id(data) + resp = await client.receive_json() + assert resp.get("success") + + # move to top position + data = { + "id": id, + "type": "todo/item/move", + "entity_id": entity_id, + "uid": uid, + } + await client.send_json_auto_id(data) + resp = await client.receive_json() + assert resp.get("success") + + for pos in (0, 1): + assert mock_called_with( + mock_habitica, + "post", + f"{DEFAULT_URL}/api/v3/tasks/{uid}/move/to/{pos}", + ) + + +async def test_move_todo_item_exception( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_habitica: AiohttpClientMocker, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test exception when moving todo item.""" + + uid = "1aa3137e-ef72-4d1f-91ee-41933602f438" + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + mock_habitica.post( + f"{DEFAULT_URL}/api/v3/tasks/{uid}/move/to/0", + status=HTTPStatus.NOT_FOUND, + ) + + client = await hass_ws_client() + + data = { + "id": id, + "type": "todo/item/move", + "entity_id": "todo.test_user_to_do_s", + "uid": uid, + } + await client.send_json_auto_id(data) + resp = await client.receive_json() + assert resp.get("success") is False + + +@pytest.mark.parametrize( + ("fixture", "calculated_due_date"), + [ + ("duedate_fixture_1.json", (2024, 9, 23)), + ("duedate_fixture_2.json", (2024, 9, 24)), + ("duedate_fixture_3.json", (2024, 10, 23)), + ("duedate_fixture_4.json", (2024, 10, 23)), + ("duedate_fixture_5.json", (2024, 9, 28)), + ("duedate_fixture_6.json", (2024, 10, 21)), + ("duedate_fixture_7.json", None), + ("duedate_fixture_8.json", None), + ], + ids=[ + "default", + "daily starts on startdate", + "monthly starts on startdate", + "yearly starts on startdate", + "weekly", + "monthly starts on fixed day", + "grey daily", + "empty nextDue", + ], +) +@pytest.mark.usefixtures("set_tz") +async def test_next_due_date( + hass: HomeAssistant, + fixture: str, + calculated_due_date: tuple | None, + config_entry: MockConfigEntry, + aioclient_mock: AiohttpClientMocker, +) -> None: + """Test next_due_date calculation.""" + + dailies_entity = "todo.test_user_dailies" + + aioclient_mock.get( + f"{DEFAULT_URL}/api/v3/user", json=load_json_object_fixture("user.json", DOMAIN) + ) + aioclient_mock.get( + f"{DEFAULT_URL}/api/v3/tasks/user", + params={"type": "completedTodos"}, + json={"data": []}, + ) + aioclient_mock.get( + f"{DEFAULT_URL}/api/v3/tasks/user", + json=load_json_object_fixture(fixture, DOMAIN), + ) + aioclient_mock.get( + f"{DEFAULT_URL}/api/v3/content", + params={"language": "en"}, + json=load_json_object_fixture("content.json", DOMAIN), + ) + + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + result = await hass.services.async_call( + TODO_DOMAIN, + TodoServices.GET_ITEMS, + {}, + target={ATTR_ENTITY_ID: dailies_entity}, + blocking=True, + return_response=True, + ) + + assert ( + result[dailies_entity]["items"][0].get("due") is None + if not calculated_due_date + else datetime(*calculated_due_date).date() + ) diff --git a/tests/components/hassio/common.py b/tests/components/hassio/common.py index 0a990a0db3f0a4..82d3564440b570 100644 --- a/tests/components/hassio/common.py +++ b/tests/components/hassio/common.py @@ -7,15 +7,58 @@ import logging from types import MethodType from typing import Any -from unittest.mock import DEFAULT, AsyncMock, Mock, patch - -from aiohasupervisor.models import InstalledAddonComplete +from unittest.mock import AsyncMock, Mock, patch + +from aiohasupervisor.models import ( + AddonsOptions, + AddonsStats, + AddonStage, + InstalledAddonComplete, + Repository, + StoreAddon, + StoreAddonComplete, +) from homeassistant.components.hassio.addon_manager import AddonManager from homeassistant.core import HomeAssistant LOGGER = logging.getLogger(__name__) INSTALLED_ADDON_FIELDS = [field.name for field in fields(InstalledAddonComplete)] +STORE_ADDON_FIELDS = [field.name for field in fields(StoreAddonComplete)] +ADDONS_STATS_FIELDS = [field.name for field in fields(AddonsStats)] + +MOCK_STORE_ADDONS = [ + StoreAddon( + name="test", + arch=[], + documentation=False, + advanced=False, + available=True, + build=False, + description="Test add-on service", + homeassistant=None, + icon=False, + logo=False, + repository="core", + slug="core_test", + stage=AddonStage.EXPERIMENTAL, + update_available=False, + url="https://example.com/addons/tree/master/test", + version_latest="1.0.0", + version="1.0.0", + installed=True, + ) +] + +MOCK_REPOSITORIES = [ + Repository( + slug="core", + name="Official add-ons", + source="core", + url="https://home-assistant.io/addons", + maintainer="Home Assistant", + ) +] def mock_to_dict(obj: Mock, fields: list[str]) -> dict[str, Any]: @@ -32,43 +75,35 @@ def mock_addon_manager(hass: HomeAssistant) -> AddonManager: return AddonManager(hass, LOGGER, "Test", "test_addon") -def mock_discovery_info() -> Any: - """Return the discovery info from the supervisor.""" - return DEFAULT - - -def mock_get_addon_discovery_info( - discovery_info: dict[str, Any], discovery_info_side_effect: Any | None -) -> Generator[AsyncMock]: - """Mock get add-on discovery info.""" - with patch( - "homeassistant.components.hassio.addon_manager.async_get_addon_discovery_info", - side_effect=discovery_info_side_effect, - return_value=discovery_info, - ) as get_addon_discovery_info: - yield get_addon_discovery_info - - def mock_addon_store_info( + supervisor_client: AsyncMock, addon_store_info_side_effect: Any | None, -) -> Generator[AsyncMock]: +) -> AsyncMock: """Mock Supervisor add-on store info.""" - with patch( - "homeassistant.components.hassio.addon_manager.async_get_addon_store_info", - side_effect=addon_store_info_side_effect, - ) as addon_store_info: - addon_store_info.return_value = { - "available": True, - "installed": None, - "state": None, - "version": "1.0.0", - } - yield addon_store_info + supervisor_client.store.addon_info.side_effect = addon_store_info_side_effect + + supervisor_client.store.addon_info.return_value = addon_info = Mock( + spec=StoreAddonComplete, + slug="test", + repository="core", + available=True, + installed=False, + update_available=False, + version="1.0.0", + supervisor_api=False, + supervisor_role="default", + ) + addon_info.name = "test" + addon_info.to_dict = MethodType( + lambda self: mock_to_dict(self, STORE_ADDON_FIELDS), + addon_info, + ) + return supervisor_client.store.addon_info def mock_addon_info( supervisor_client: AsyncMock, addon_info_side_effect: Any | None -) -> Generator[AsyncMock]: +) -> AsyncMock: """Mock Supervisor add-on info.""" supervisor_client.addons.addon_info.side_effect = addon_info_side_effect @@ -90,14 +125,14 @@ def mock_addon_info( lambda self: mock_to_dict(self, INSTALLED_ADDON_FIELDS), addon_info, ) - yield supervisor_client.addons.addon_info + return supervisor_client.addons.addon_info def mock_addon_not_installed( addon_store_info: AsyncMock, addon_info: AsyncMock ) -> AsyncMock: """Mock add-on not installed.""" - addon_store_info.return_value["available"] = True + addon_store_info.return_value.available = True return addon_info @@ -105,12 +140,8 @@ def mock_addon_installed( addon_store_info: AsyncMock, addon_info: AsyncMock ) -> AsyncMock: """Mock add-on already installed but not running.""" - addon_store_info.return_value = { - "available": True, - "installed": "1.0.0", - "state": "stopped", - "version": "1.0.0", - } + addon_store_info.return_value.available = True + addon_store_info.return_value.installed = True addon_info.return_value.available = True addon_info.return_value.hostname = "core-test-addon" addon_info.return_value.state = "stopped" @@ -120,12 +151,8 @@ def mock_addon_installed( def mock_addon_running(addon_store_info: AsyncMock, addon_info: AsyncMock) -> AsyncMock: """Mock add-on already running.""" - addon_store_info.return_value = { - "available": True, - "installed": "1.0.0", - "state": "started", - "version": "1.0.0", - } + addon_store_info.return_value.available = True + addon_store_info.return_value.installed = True addon_info.return_value.state = "started" return addon_info @@ -135,15 +162,10 @@ def mock_install_addon_side_effect( ) -> Any | None: """Return the install add-on side effect.""" - async def install_addon(hass: HomeAssistant, slug): + async def install_addon(addon: str): """Mock install add-on.""" - addon_store_info.return_value = { - "available": True, - "installed": "1.0.0", - "state": "stopped", - "version": "1.0.0", - } - + addon_store_info.return_value.available = True + addon_store_info.return_value.installed = True addon_info.return_value.available = True addon_info.return_value.state = "stopped" addon_info.return_value.version = "1.0.0" @@ -151,16 +173,6 @@ async def install_addon(hass: HomeAssistant, slug): return install_addon -def mock_install_addon(install_addon_side_effect: Any | None) -> Generator[AsyncMock]: - """Mock install add-on.""" - - with patch( - "homeassistant.components.hassio.addon_manager.async_install_addon", - side_effect=install_addon_side_effect, - ) as install_addon: - yield install_addon - - def mock_start_addon_side_effect( addon_store_info: AsyncMock, addon_info: AsyncMock ) -> Any | None: @@ -168,44 +180,24 @@ def mock_start_addon_side_effect( async def start_addon(addon: str) -> None: """Mock start add-on.""" - addon_store_info.return_value = { - "available": True, - "installed": "1.0.0", - "state": "started", - "version": "1.0.0", - } + addon_store_info.return_value.available = True + addon_store_info.return_value.installed = True addon_info.return_value.available = True addon_info.return_value.state = "started" return start_addon -def mock_addon_options(addon_info: AsyncMock) -> dict[str, Any]: - """Mock add-on options.""" - return addon_info.return_value.options - - def mock_set_addon_options_side_effect(addon_options: dict[str, Any]) -> Any | None: """Return the set add-on options side effect.""" - async def set_addon_options(hass: HomeAssistant, slug: str, options: dict) -> None: + async def set_addon_options(slug: str, options: AddonsOptions) -> None: """Mock set add-on options.""" - addon_options.update(options["options"]) + addon_options.update(options.config) return set_addon_options -def mock_set_addon_options( - set_addon_options_side_effect: Any | None, -) -> Generator[AsyncMock]: - """Mock set add-on options.""" - with patch( - "homeassistant.components.hassio.addon_manager.async_set_addon_options", - side_effect=set_addon_options_side_effect, - ) as set_options: - yield set_options - - def mock_create_backup() -> Generator[AsyncMock]: """Mock create backup.""" with patch( @@ -214,9 +206,21 @@ def mock_create_backup() -> Generator[AsyncMock]: yield create_backup -def mock_update_addon() -> Generator[AsyncMock]: - """Mock update add-on.""" - with patch( - "homeassistant.components.hassio.addon_manager.async_update_addon" - ) as update_addon: - yield update_addon +def mock_addon_stats(supervisor_client: AsyncMock) -> AsyncMock: + """Mock addon stats.""" + supervisor_client.addons.addon_stats.return_value = addon_stats = Mock( + spec=AddonsStats, + cpu_percent=0.99, + memory_usage=182611968, + memory_limit=3977146368, + memory_percent=4.59, + network_rx=362570232, + network_tx=82374138, + blk_read=46010945536, + blk_write=15051526144, + ) + addon_stats.to_dict = MethodType( + lambda self: mock_to_dict(self, ADDONS_STATS_FIELDS), + addon_stats, + ) + return supervisor_client.addons.addon_stats diff --git a/tests/components/hassio/conftest.py b/tests/components/hassio/conftest.py index db1a07c4df3679..7075b9d6982987 100644 --- a/tests/components/hassio/conftest.py +++ b/tests/components/hassio/conftest.py @@ -3,8 +3,9 @@ from collections.abc import Generator import os import re -from unittest.mock import Mock, patch +from unittest.mock import AsyncMock, Mock, patch +from aiohasupervisor.models import AddonsStats, AddonState from aiohttp.test_utils import TestClient import pytest @@ -31,14 +32,10 @@ def disable_security_filter() -> Generator[None]: @pytest.fixture -def hassio_env() -> Generator[None]: +def hassio_env(supervisor_is_connected: AsyncMock) -> Generator[None]: """Fixture to inject hassio env.""" with ( patch.dict(os.environ, {"SUPERVISOR": "127.0.0.1"}), - patch( - "homeassistant.components.hassio.HassIO.is_connected", - return_value={"result": "ok", "data": {}}, - ), patch.dict(os.environ, {"SUPERVISOR_TOKEN": SUPERVISOR_TOKEN}), patch( "homeassistant.components.hassio.HassIO.get_info", @@ -54,6 +51,7 @@ def hassio_stubs( hass: HomeAssistant, hass_client: ClientSessionGenerator, aioclient_mock: AiohttpClientMocker, + supervisor_client: AsyncMock, ) -> RefreshToken: """Create mock hassio http client.""" with ( @@ -76,9 +74,6 @@ def hassio_stubs( patch( "homeassistant.components.hassio.issues.SupervisorIssues.setup", ), - patch( - "homeassistant.components.hassio.HassIO.refresh_updates", - ), ): hass.set_state(CoreState.starting) hass.loop.run_until_complete(async_setup_component(hass, "hassio", {})) @@ -129,7 +124,12 @@ def hassio_handler( @pytest.fixture def all_setup_requests( - aioclient_mock: AiohttpClientMocker, request: pytest.FixtureRequest + aioclient_mock: AiohttpClientMocker, + request: pytest.FixtureRequest, + addon_installed: AsyncMock, + store_info: AsyncMock, + addon_changelog: AsyncMock, + addon_stats: AsyncMock, ) -> None: """Mock all setup requests.""" include_addons = hasattr(request, "param") and request.param.get( @@ -137,7 +137,6 @@ def all_setup_requests( ) aioclient_mock.post("http://127.0.0.1/homeassistant/options", json={"result": "ok"}) - aioclient_mock.get("http://127.0.0.1/supervisor/ping", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/supervisor/options", json={"result": "ok"}) aioclient_mock.get( "http://127.0.0.1/info", @@ -150,13 +149,6 @@ def all_setup_requests( }, }, ) - aioclient_mock.get( - "http://127.0.0.1/store", - json={ - "result": "ok", - "data": {"addons": [], "repositories": []}, - }, - ) aioclient_mock.get( "http://127.0.0.1/host/info", json={ @@ -225,46 +217,32 @@ def all_setup_requests( aioclient_mock.get( "http://127.0.0.1/ingress/panels", json={"result": "ok", "data": {"panels": {}}} ) - aioclient_mock.post("http://127.0.0.1/refresh_updates", json={"result": "ok"}) - aioclient_mock.get("http://127.0.0.1/addons/test/changelog", text="") - aioclient_mock.get( - "http://127.0.0.1/addons/test/info", - json={ - "result": "ok", - "data": { - "name": "test", - "slug": "test", - "update_available": False, - "version": "1.0.0", - "version_latest": "1.0.0", - "repository": "core", - "state": "started", - "icon": False, - "url": "https://github.com/home-assistant/addons/test", - "auto_update": True, - }, - }, - ) - aioclient_mock.get("http://127.0.0.1/addons/test2/changelog", text="") - aioclient_mock.get( - "http://127.0.0.1/addons/test2/info", - json={ - "result": "ok", - "data": { - "name": "test2", - "slug": "test2", - "update_available": False, - "version": "1.0.0", - "version_latest": "1.0.0", - "repository": "core", - "state": "started", - "icon": False, - "url": "https://github.com", - "auto_update": False, - }, - }, - ) + addon_installed.return_value.update_available = False + addon_installed.return_value.version = "1.0.0" + addon_installed.return_value.version_latest = "1.0.0" + addon_installed.return_value.repository = "core" + addon_installed.return_value.state = AddonState.STARTED + addon_installed.return_value.icon = False + + def mock_addon_info(slug: str): + if slug == "test": + addon_installed.return_value.name = "test" + addon_installed.return_value.slug = "test" + addon_installed.return_value.url = ( + "https://github.com/home-assistant/addons/test" + ) + addon_installed.return_value.auto_update = True + else: + addon_installed.return_value.name = "test2" + addon_installed.return_value.slug = "test2" + addon_installed.return_value.url = "https://github.com" + addon_installed.return_value.auto_update = False + + return addon_installed.return_value + + addon_installed.side_effect = mock_addon_info + aioclient_mock.get( "http://127.0.0.1/core/stats", json={ @@ -297,38 +275,32 @@ def all_setup_requests( }, }, ) - aioclient_mock.get( - "http://127.0.0.1/addons/test/stats", - json={ - "result": "ok", - "data": { - "cpu_percent": 0.99, - "memory_usage": 182611968, - "memory_limit": 3977146368, - "memory_percent": 4.59, - "network_rx": 362570232, - "network_tx": 82374138, - "blk_read": 46010945536, - "blk_write": 15051526144, - }, - }, - ) - aioclient_mock.get( - "http://127.0.0.1/addons/test2/stats", - json={ - "result": "ok", - "data": { - "cpu_percent": 0.8, - "memory_usage": 51941376, - "memory_limit": 3977146368, - "memory_percent": 1.31, - "network_rx": 31338284, - "network_tx": 15692900, - "blk_read": 740077568, - "blk_write": 6004736, - }, - }, - ) + + async def mock_addon_stats(addon: str) -> AddonsStats: + """Mock addon stats for test and test2.""" + if addon == "test2": + return AddonsStats( + cpu_percent=0.8, + memory_usage=51941376, + memory_limit=3977146368, + memory_percent=1.31, + network_rx=31338284, + network_tx=15692900, + blk_read=740077568, + blk_write=6004736, + ) + return AddonsStats( + cpu_percent=0.99, + memory_usage=182611968, + memory_limit=3977146368, + memory_percent=4.59, + network_rx=362570232, + network_tx=82374138, + blk_read=46010945536, + blk_write=15051526144, + ) + + addon_stats.side_effect = mock_addon_stats aioclient_mock.get( "http://127.0.0.1/network/info", json={ diff --git a/tests/components/hassio/test_addon_manager.py b/tests/components/hassio/test_addon_manager.py index 09a7475ae10733..3d4644fbfd9755 100644 --- a/tests/components/hassio/test_addon_manager.py +++ b/tests/components/hassio/test_addon_manager.py @@ -5,8 +5,10 @@ import asyncio from typing import Any from unittest.mock import AsyncMock, call +from uuid import uuid4 from aiohasupervisor import SupervisorError +from aiohasupervisor.models import AddonsOptions, Discovery import pytest from homeassistant.components.hassio.addon_manager import ( @@ -43,7 +45,7 @@ async def test_not_available_raises_exception( addon_info: AsyncMock, ) -> None: """Test addon not available raises exception.""" - addon_store_info.return_value["available"] = False + addon_store_info.return_value.available = False addon_info.return_value.available = False with pytest.raises(AddonError) as err: @@ -61,7 +63,11 @@ async def test_get_addon_discovery_info( addon_manager: AddonManager, get_addon_discovery_info: AsyncMock ) -> None: """Test get addon discovery info.""" - get_addon_discovery_info.return_value = {"config": {"test_key": "test"}} + get_addon_discovery_info.return_value = [ + Discovery( + addon="test_addon", service="", uuid=uuid4(), config={"test_key": "test"} + ) + ] assert await addon_manager.async_get_addon_discovery_info() == {"test_key": "test"} @@ -72,8 +78,6 @@ async def test_missing_addon_discovery_info( addon_manager: AddonManager, get_addon_discovery_info: AsyncMock ) -> None: """Test missing addon discovery info.""" - get_addon_discovery_info.return_value = None - with pytest.raises(AddonError): await addon_manager.async_get_addon_discovery_info() @@ -84,7 +88,7 @@ async def test_get_addon_discovery_info_error( addon_manager: AddonManager, get_addon_discovery_info: AsyncMock ) -> None: """Test get addon discovery info raises error.""" - get_addon_discovery_info.side_effect = HassioAPIError("Boom") + get_addon_discovery_info.side_effect = SupervisorError("Boom") with pytest.raises(AddonError) as err: assert await addon_manager.async_get_addon_discovery_info() @@ -137,7 +141,7 @@ async def test_get_addon_info( "addon_store_info_error", "addon_store_info_calls", ), - [(SupervisorError("Boom"), 1, None, 1), (None, 0, HassioAPIError("Boom"), 1)], + [(SupervisorError("Boom"), 1, None, 1), (None, 0, SupervisorError("Boom"), 1)], ) async def test_get_addon_info_error( addon_manager: AddonManager, @@ -170,7 +174,7 @@ async def test_set_addon_options( assert set_addon_options.call_count == 1 assert set_addon_options.call_args == call( - hass, "test_addon", {"options": {"test_key": "test"}} + "test_addon", AddonsOptions(config={"test_key": "test"}) ) @@ -178,7 +182,7 @@ async def test_set_addon_options_error( hass: HomeAssistant, addon_manager: AddonManager, set_addon_options: AsyncMock ) -> None: """Test set addon options raises error.""" - set_addon_options.side_effect = HassioAPIError("Boom") + set_addon_options.side_effect = SupervisorError("Boom") with pytest.raises(AddonError) as err: await addon_manager.async_set_addon_options({"test_key": "test"}) @@ -187,7 +191,7 @@ async def test_set_addon_options_error( assert set_addon_options.call_count == 1 assert set_addon_options.call_args == call( - hass, "test_addon", {"options": {"test_key": "test"}} + "test_addon", AddonsOptions(config={"test_key": "test"}) ) @@ -198,7 +202,7 @@ async def test_install_addon( addon_info: AsyncMock, ) -> None: """Test install addon.""" - addon_store_info.return_value["available"] = True + addon_store_info.return_value.available = True addon_info.return_value.available = True await addon_manager.async_install_addon() @@ -213,9 +217,9 @@ async def test_install_addon_error( addon_info: AsyncMock, ) -> None: """Test install addon raises error.""" - addon_store_info.return_value["available"] = True + addon_store_info.return_value.available = True addon_info.return_value.available = True - install_addon.side_effect = HassioAPIError("Boom") + install_addon.side_effect = SupervisorError("Boom") with pytest.raises(AddonError) as err: await addon_manager.async_install_addon() @@ -266,7 +270,7 @@ async def test_schedule_install_addon_error( install_addon: AsyncMock, ) -> None: """Test schedule install addon raises error.""" - install_addon.side_effect = HassioAPIError("Boom") + install_addon.side_effect = SupervisorError("Boom") with pytest.raises(AddonError) as err: await addon_manager.async_schedule_install_addon() @@ -283,7 +287,7 @@ async def test_schedule_install_addon_logs_error( caplog: pytest.LogCaptureFixture, ) -> None: """Test schedule install addon logs error.""" - install_addon.side_effect = HassioAPIError("Boom") + install_addon.side_effect = SupervisorError("Boom") await addon_manager.async_schedule_install_addon(catch_error=True) @@ -541,7 +545,7 @@ async def test_update_addon_error( ) -> None: """Test update addon raises error.""" addon_info.return_value.update_available = True - update_addon.side_effect = HassioAPIError("Boom") + update_addon.side_effect = SupervisorError("Boom") with pytest.raises(AddonError) as err: await addon_manager.async_update_addon() @@ -620,7 +624,7 @@ async def test_schedule_update_addon( ( None, 1, - HassioAPIError("Boom"), + SupervisorError("Boom"), 1, "Failed to update the Test add-on: Boom", ), @@ -670,7 +674,7 @@ async def test_schedule_update_addon_error( ( None, 1, - HassioAPIError("Boom"), + SupervisorError("Boom"), 1, "Failed to update the Test add-on: Boom", ), @@ -790,7 +794,7 @@ async def test_schedule_install_setup_addon( ), [ ( - HassioAPIError("Boom"), + SupervisorError("Boom"), 1, None, 0, @@ -801,7 +805,7 @@ async def test_schedule_install_setup_addon( ( None, 1, - HassioAPIError("Boom"), + SupervisorError("Boom"), 1, None, 0, @@ -859,7 +863,7 @@ async def test_schedule_install_setup_addon_error( ), [ ( - HassioAPIError("Boom"), + SupervisorError("Boom"), 1, None, 0, @@ -870,7 +874,7 @@ async def test_schedule_install_setup_addon_error( ( None, 1, - HassioAPIError("Boom"), + SupervisorError("Boom"), 1, None, 0, @@ -956,7 +960,7 @@ async def test_schedule_setup_addon( ), [ ( - HassioAPIError("Boom"), + SupervisorError("Boom"), 1, None, 0, @@ -1005,7 +1009,7 @@ async def test_schedule_setup_addon_error( ), [ ( - HassioAPIError("Boom"), + SupervisorError("Boom"), 1, None, 0, diff --git a/tests/components/hassio/test_addon_panel.py b/tests/components/hassio/test_addon_panel.py index f7407152f7e7cc..2c3552c8d08627 100644 --- a/tests/components/hassio/test_addon_panel.py +++ b/tests/components/hassio/test_addon_panel.py @@ -1,7 +1,7 @@ """Test add-on panel.""" from http import HTTPStatus -from unittest.mock import patch +from unittest.mock import AsyncMock, patch import pytest @@ -13,10 +13,11 @@ @pytest.fixture(autouse=True) -def mock_all(aioclient_mock: AiohttpClientMocker) -> None: +def mock_all( + aioclient_mock: AiohttpClientMocker, supervisor_is_connected: AsyncMock +) -> None: """Mock all setup requests.""" aioclient_mock.post("http://127.0.0.1/homeassistant/options", json={"result": "ok"}) - aioclient_mock.get("http://127.0.0.1/supervisor/ping", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/supervisor/options", json={"result": "ok"}) aioclient_mock.get( "http://127.0.0.1/homeassistant/info", diff --git a/tests/components/hassio/test_binary_sensor.py b/tests/components/hassio/test_binary_sensor.py index 33cfd448b445a2..9878dd67a21812 100644 --- a/tests/components/hassio/test_binary_sensor.py +++ b/tests/components/hassio/test_binary_sensor.py @@ -10,6 +10,8 @@ from homeassistant.helpers import entity_registry as er from homeassistant.setup import async_setup_component +from .common import MOCK_REPOSITORIES, MOCK_STORE_ADDONS + from tests.common import MockConfigEntry from tests.test_util.aiohttp import AiohttpClientMocker @@ -17,10 +19,16 @@ @pytest.fixture(autouse=True) -def mock_all(aioclient_mock: AiohttpClientMocker, addon_installed) -> None: +def mock_all( + aioclient_mock: AiohttpClientMocker, + addon_installed: AsyncMock, + store_info: AsyncMock, + addon_changelog: AsyncMock, + addon_stats: AsyncMock, + resolution_info: AsyncMock, +) -> None: """Mock all setup requests.""" aioclient_mock.post("http://127.0.0.1/homeassistant/options", json={"result": "ok"}) - aioclient_mock.get("http://127.0.0.1/supervisor/ping", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/supervisor/options", json={"result": "ok"}) aioclient_mock.get( "http://127.0.0.1/info", @@ -33,13 +41,6 @@ def mock_all(aioclient_mock: AiohttpClientMocker, addon_installed) -> None: }, }, ) - aioclient_mock.get( - "http://127.0.0.1/store", - json={ - "result": "ok", - "data": {"addons": [], "repositories": []}, - }, - ) aioclient_mock.get( "http://127.0.0.1/host/info", json={ @@ -105,22 +106,6 @@ def mock_all(aioclient_mock: AiohttpClientMocker, addon_installed) -> None: }, }, ) - aioclient_mock.get( - "http://127.0.0.1/addons/test/stats", - json={ - "result": "ok", - "data": { - "cpu_percent": 0.99, - "memory_usage": 182611968, - "memory_limit": 3977146368, - "memory_percent": 4.59, - "network_rx": 362570232, - "network_tx": 82374138, - "blk_read": 46010945536, - "blk_write": 15051526144, - }, - }, - ) aioclient_mock.get( "http://127.0.0.1/core/stats", json={ @@ -153,33 +138,9 @@ def mock_all(aioclient_mock: AiohttpClientMocker, addon_installed) -> None: }, }, ) - aioclient_mock.get("http://127.0.0.1/addons/test/changelog", text="") - aioclient_mock.get( - "http://127.0.0.1/addons/test/info", - json={"result": "ok", "data": {"auto_update": True}}, - ) - aioclient_mock.get("http://127.0.0.1/addons/test2/changelog", text="") - aioclient_mock.get( - "http://127.0.0.1/addons/test2/info", - json={"result": "ok", "data": {"auto_update": False}}, - ) aioclient_mock.get( "http://127.0.0.1/ingress/panels", json={"result": "ok", "data": {"panels": {}}} ) - aioclient_mock.post("http://127.0.0.1/refresh_updates", json={"result": "ok"}) - aioclient_mock.get( - "http://127.0.0.1/resolution/info", - json={ - "result": "ok", - "data": { - "unsupported": [], - "unhealthy": [], - "suggestions": [], - "issues": [], - "checks": [], - }, - }, - ) aioclient_mock.get( "http://127.0.0.1/network/info", json={ @@ -192,6 +153,9 @@ def mock_all(aioclient_mock: AiohttpClientMocker, addon_installed) -> None: ) +@pytest.mark.parametrize( + ("store_addons", "store_repositories"), [(MOCK_STORE_ADDONS, MOCK_REPOSITORIES)] +) @pytest.mark.parametrize( ("entity_id", "expected", "addon_state"), [ diff --git a/tests/components/hassio/test_config_flow.py b/tests/components/hassio/test_config_flow.py index 1153203817de47..48c1a06f81ed0e 100644 --- a/tests/components/hassio/test_config_flow.py +++ b/tests/components/hassio/test_config_flow.py @@ -38,4 +38,4 @@ async def test_multiple_entries(hass: HomeAssistant) -> None: DOMAIN, context={"source": "system"} ) assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "already_configured" + assert result["reason"] == "single_instance_allowed" diff --git a/tests/components/hassio/test_diagnostics.py b/tests/components/hassio/test_diagnostics.py index 0fcf7933ac00f8..c95cde67b8aac0 100644 --- a/tests/components/hassio/test_diagnostics.py +++ b/tests/components/hassio/test_diagnostics.py @@ -1,7 +1,7 @@ """Test Supervisor diagnostics.""" import os -from unittest.mock import patch +from unittest.mock import AsyncMock, patch import pytest @@ -18,10 +18,16 @@ @pytest.fixture(autouse=True) -def mock_all(aioclient_mock: AiohttpClientMocker, addon_installed) -> None: +def mock_all( + aioclient_mock: AiohttpClientMocker, + addon_installed: AsyncMock, + store_info: AsyncMock, + addon_stats: AsyncMock, + addon_changelog: AsyncMock, + resolution_info: AsyncMock, +) -> None: """Mock all setup requests.""" aioclient_mock.post("http://127.0.0.1/homeassistant/options", json={"result": "ok"}) - aioclient_mock.get("http://127.0.0.1/supervisor/ping", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/supervisor/options", json={"result": "ok"}) aioclient_mock.get( "http://127.0.0.1/info", @@ -34,13 +40,6 @@ def mock_all(aioclient_mock: AiohttpClientMocker, addon_installed) -> None: }, }, ) - aioclient_mock.get( - "http://127.0.0.1/store", - json={ - "result": "ok", - "data": {"addons": [], "repositories": []}, - }, - ) aioclient_mock.get( "http://127.0.0.1/host/info", json={ @@ -110,22 +109,6 @@ def mock_all(aioclient_mock: AiohttpClientMocker, addon_installed) -> None: }, }, ) - aioclient_mock.get( - "http://127.0.0.1/addons/test/stats", - json={ - "result": "ok", - "data": { - "cpu_percent": 0.99, - "memory_usage": 182611968, - "memory_limit": 3977146368, - "memory_percent": 4.59, - "network_rx": 362570232, - "network_tx": 82374138, - "blk_read": 46010945536, - "blk_write": 15051526144, - }, - }, - ) aioclient_mock.get( "http://127.0.0.1/core/stats", json={ @@ -158,33 +141,9 @@ def mock_all(aioclient_mock: AiohttpClientMocker, addon_installed) -> None: }, }, ) - aioclient_mock.get("http://127.0.0.1/addons/test/changelog", text="") - aioclient_mock.get( - "http://127.0.0.1/addons/test/info", - json={"result": "ok", "data": {"auto_update": True}}, - ) - aioclient_mock.get("http://127.0.0.1/addons/test2/changelog", text="") - aioclient_mock.get( - "http://127.0.0.1/addons/test2/info", - json={"result": "ok", "data": {"auto_update": False}}, - ) aioclient_mock.get( "http://127.0.0.1/ingress/panels", json={"result": "ok", "data": {"panels": {}}} ) - aioclient_mock.post("http://127.0.0.1/refresh_updates", json={"result": "ok"}) - aioclient_mock.get( - "http://127.0.0.1/resolution/info", - json={ - "result": "ok", - "data": { - "unsupported": [], - "unhealthy": [], - "suggestions": [], - "issues": [], - "checks": [], - }, - }, - ) aioclient_mock.get( "http://127.0.0.1/network/info", json={ diff --git a/tests/components/hassio/test_discovery.py b/tests/components/hassio/test_discovery.py index a0851ccd9f6efb..ba6338f84e29c5 100644 --- a/tests/components/hassio/test_discovery.py +++ b/tests/components/hassio/test_discovery.py @@ -3,19 +3,28 @@ from collections.abc import Generator from http import HTTPStatus from unittest.mock import AsyncMock, Mock, patch +from uuid import uuid4 +from aiohasupervisor.models import Discovery from aiohttp.test_utils import TestClient import pytest from homeassistant import config_entries -from homeassistant.components.hassio.discovery import HassioServiceInfo from homeassistant.components.hassio.handler import HassioAPIError from homeassistant.components.mqtt import DOMAIN as MQTT_DOMAIN from homeassistant.const import EVENT_HOMEASSISTANT_START, EVENT_HOMEASSISTANT_STARTED from homeassistant.core import HomeAssistant +from homeassistant.helpers.discovery_flow import DiscoveryKey +from homeassistant.helpers.service_info.hassio import HassioServiceInfo from homeassistant.setup import async_setup_component -from tests.common import MockModule, mock_config_flow, mock_integration, mock_platform +from tests.common import ( + MockConfigEntry, + MockModule, + mock_config_flow, + mock_integration, + mock_platform, +) from tests.test_util.aiohttp import AiohttpClientMocker @@ -41,42 +50,34 @@ class MqttFlow(config_entries.ConfigFlow): @pytest.mark.usefixtures("hassio_client") async def test_hassio_discovery_startup( hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, mock_mqtt: type[config_entries.ConfigFlow], addon_installed: AsyncMock, + get_addon_discovery_info: AsyncMock, ) -> None: """Test startup and discovery after event.""" - aioclient_mock.get( - "http://127.0.0.1/discovery", - json={ - "result": "ok", - "data": { - "discovery": [ - { - "service": "mqtt", - "uuid": "test", - "addon": "mosquitto", - "config": { - "broker": "mock-broker", - "port": 1883, - "username": "mock-user", - "password": "mock-pass", - "protocol": "3.1.1", - }, - } - ] + get_addon_discovery_info.return_value = [ + Discovery( + addon="mosquitto", + service="mqtt", + uuid=(uuid := uuid4()), + config={ + "broker": "mock-broker", + "port": 1883, + "username": "mock-user", + "password": "mock-pass", + "protocol": "3.1.1", }, - }, - ) + ) + ] addon_installed.return_value.name = "Mosquitto Test" - assert aioclient_mock.call_count == 0 + assert get_addon_discovery_info.call_count == 0 hass.bus.async_fire(EVENT_HOMEASSISTANT_START) await hass.async_block_till_done() hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) await hass.async_block_till_done() - assert aioclient_mock.call_count == 1 + assert get_addon_discovery_info.call_count == 1 assert mock_mqtt.async_step_hassio.called mock_mqtt.async_step_hassio.assert_called_with( HassioServiceInfo( @@ -90,7 +91,7 @@ async def test_hassio_discovery_startup( }, name="Mosquitto Test", slug="mosquitto", - uuid="test", + uuid=uuid.hex, ) ) @@ -101,34 +102,27 @@ async def test_hassio_discovery_startup_done( aioclient_mock: AiohttpClientMocker, mock_mqtt: type[config_entries.ConfigFlow], addon_installed: AsyncMock, + get_addon_discovery_info: AsyncMock, ) -> None: """Test startup and discovery with hass discovery.""" aioclient_mock.post( "http://127.0.0.1/supervisor/options", json={"result": "ok", "data": {}}, ) - aioclient_mock.get( - "http://127.0.0.1/discovery", - json={ - "result": "ok", - "data": { - "discovery": [ - { - "service": "mqtt", - "uuid": "test", - "addon": "mosquitto", - "config": { - "broker": "mock-broker", - "port": 1883, - "username": "mock-user", - "password": "mock-pass", - "protocol": "3.1.1", - }, - } - ] + get_addon_discovery_info.return_value = [ + Discovery( + addon="mosquitto", + service="mqtt", + uuid=(uuid := uuid4()), + config={ + "broker": "mock-broker", + "port": 1883, + "username": "mock-user", + "password": "mock-pass", + "protocol": "3.1.1", }, - }, - ) + ) + ] addon_installed.return_value.name = "Mosquitto Test" with ( @@ -145,7 +139,7 @@ async def test_hassio_discovery_startup_done( await async_setup_component(hass, "hassio", {}) await hass.async_block_till_done() - assert aioclient_mock.call_count == 1 + assert get_addon_discovery_info.call_count == 1 assert mock_mqtt.async_step_hassio.called mock_mqtt.async_step_hassio.assert_called_with( HassioServiceInfo( @@ -159,49 +153,43 @@ async def test_hassio_discovery_startup_done( }, name="Mosquitto Test", slug="mosquitto", - uuid="test", + uuid=uuid.hex, ) ) async def test_hassio_discovery_webhook( hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, hassio_client: TestClient, mock_mqtt: type[config_entries.ConfigFlow], addon_installed: AsyncMock, + get_discovery_message: AsyncMock, ) -> None: """Test discovery webhook.""" - aioclient_mock.get( - "http://127.0.0.1/discovery/testuuid", - json={ - "result": "ok", - "data": { - "service": "mqtt", - "uuid": "test", - "addon": "mosquitto", - "config": { - "broker": "mock-broker", - "port": 1883, - "username": "mock-user", - "password": "mock-pass", - "protocol": "3.1.1", - }, - }, + get_discovery_message.return_value = Discovery( + addon="mosquitto", + service="mqtt", + uuid=(uuid := uuid4()), + config={ + "broker": "mock-broker", + "port": 1883, + "username": "mock-user", + "password": "mock-pass", + "protocol": "3.1.1", }, ) addon_installed.return_value.name = "Mosquitto Test" resp = await hassio_client.post( - "/api/hassio_push/discovery/testuuid", - json={"addon": "mosquitto", "service": "mqtt", "uuid": "testuuid"}, + f"/api/hassio_push/discovery/{uuid!s}", + json={"addon": "mosquitto", "service": "mqtt", "uuid": str(uuid)}, ) await hass.async_block_till_done() hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) await hass.async_block_till_done() assert resp.status == HTTPStatus.OK - assert aioclient_mock.call_count == 1 + assert get_discovery_message.call_count == 1 assert mock_mqtt.async_step_hassio.called mock_mqtt.async_step_hassio.assert_called_with( HassioServiceInfo( @@ -215,6 +203,153 @@ async def test_hassio_discovery_webhook( }, name="Mosquitto Test", slug="mosquitto", - uuid="test", + uuid=uuid.hex, ) ) + + +TEST_UUID = str(uuid4()) + + +@pytest.mark.parametrize( + ( + "entry_domain", + "entry_discovery_keys", + ), + [ + # Matching discovery key + ( + "mock-domain", + {"hassio": (DiscoveryKey(domain="hassio", key=TEST_UUID, version=1),)}, + ), + # Matching discovery key + ( + "mock-domain", + { + "hassio": (DiscoveryKey(domain="hassio", key=TEST_UUID, version=1),), + "other": (DiscoveryKey(domain="other", key="blah", version=1),), + }, + ), + # Matching discovery key, other domain + # Note: Rediscovery is not currently restricted to the domain of the removed + # entry. Such a check can be added if needed. + ( + "comp", + {"hassio": (DiscoveryKey(domain="hassio", key=TEST_UUID, version=1),)}, + ), + ], +) +@pytest.mark.parametrize( + "entry_source", + [ + config_entries.SOURCE_HASSIO, + config_entries.SOURCE_IGNORE, + config_entries.SOURCE_USER, + ], +) +async def test_hassio_rediscover( + hass: HomeAssistant, + aioclient_mock: AiohttpClientMocker, + hassio_client: TestClient, + addon_installed: AsyncMock, + entry_domain: str, + entry_discovery_keys: dict[str, tuple[DiscoveryKey, ...]], + entry_source: str, + get_addon_discovery_info: AsyncMock, + get_discovery_message: AsyncMock, +) -> None: + """Test we reinitiate flows when an ignored config entry is removed.""" + + hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) + await hass.async_block_till_done() + + entry = MockConfigEntry( + domain=entry_domain, + discovery_keys=entry_discovery_keys, + unique_id="mock-unique-id", + state=config_entries.ConfigEntryState.LOADED, + source=entry_source, + ) + entry.add_to_hass(hass) + + get_discovery_message.return_value = Discovery( + addon="mosquitto", + service="mqtt", + uuid=(uuid := uuid4()), + config={ + "broker": "mock-broker", + "port": 1883, + "username": "mock-user", + "password": "mock-pass", + "protocol": "3.1.1", + }, + ) + + expected_context = { + "discovery_key": DiscoveryKey(domain="hassio", key=uuid.hex, version=1), + "source": config_entries.SOURCE_HASSIO, + } + + with patch.object(hass.config_entries.flow, "async_init") as mock_init: + await hass.config_entries.async_remove(entry.entry_id) + await hass.async_block_till_done() + + assert len(mock_init.mock_calls) == 1 + assert mock_init.mock_calls[0][1][0] == "mqtt" + assert mock_init.mock_calls[0][2]["context"] == expected_context + + +@pytest.mark.usefixtures("mock_async_zeroconf") +@pytest.mark.parametrize( + ( + "entry_domain", + "entry_discovery_keys", + "entry_source", + "entry_unique_id", + ), + [ + # Discovery key from other domain + ( + "mock-domain", + {"bluetooth": (DiscoveryKey(domain="bluetooth", key="test", version=1),)}, + config_entries.SOURCE_IGNORE, + "mock-unique-id", + ), + # Discovery key from the future + ( + "mock-domain", + {"hassio": (DiscoveryKey(domain="hassio", key="test", version=2),)}, + config_entries.SOURCE_IGNORE, + "mock-unique-id", + ), + ], +) +async def test_hassio_rediscover_no_match( + hass: HomeAssistant, + hassio_client: TestClient, + entry_domain: str, + entry_discovery_keys: dict[str, tuple[DiscoveryKey, ...]], + entry_source: str, + entry_unique_id: str, +) -> None: + """Test we don't reinitiate flows when a non matching config entry is removed.""" + + mock_integration(hass, MockModule(entry_domain)) + + hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) + await hass.async_block_till_done() + + entry = MockConfigEntry( + domain=entry_domain, + discovery_keys=entry_discovery_keys, + unique_id=entry_unique_id, + state=config_entries.ConfigEntryState.LOADED, + source=entry_source, + ) + entry.add_to_hass(hass) + + with patch.object(hass.config_entries.flow, "async_init") as mock_init: + await hass.config_entries.async_remove(entry.entry_id) + await hass.async_block_till_done() + + assert len(mock_init.mock_calls) == 0 diff --git a/tests/components/hassio/test_handler.py b/tests/components/hassio/test_handler.py index 1fb1e44c46d419..56f0dcb706c4df 100644 --- a/tests/components/hassio/test_handler.py +++ b/tests/components/hassio/test_handler.py @@ -4,7 +4,6 @@ from typing import Any, Literal -import aiohttp from aiohttp import hdrs, web import pytest @@ -16,36 +15,6 @@ from tests.test_util.aiohttp import AiohttpClientMocker -async def test_api_ping( - hassio_handler: HassIO, aioclient_mock: AiohttpClientMocker -) -> None: - """Test setup with API ping.""" - aioclient_mock.get("http://127.0.0.1/supervisor/ping", json={"result": "ok"}) - - assert await hassio_handler.is_connected() - assert aioclient_mock.call_count == 1 - - -async def test_api_ping_error( - hassio_handler: HassIO, aioclient_mock: AiohttpClientMocker -) -> None: - """Test setup with API ping error.""" - aioclient_mock.get("http://127.0.0.1/supervisor/ping", json={"result": "error"}) - - assert not (await hassio_handler.is_connected()) - assert aioclient_mock.call_count == 1 - - -async def test_api_ping_exeption( - hassio_handler: HassIO, aioclient_mock: AiohttpClientMocker -) -> None: - """Test setup with API ping exception.""" - aioclient_mock.get("http://127.0.0.1/supervisor/ping", exc=aiohttp.ClientError()) - - assert not (await hassio_handler.is_connected()) - assert aioclient_mock.call_count == 1 - - async def test_api_info( hassio_handler: HassIO, aioclient_mock: AiohttpClientMocker ) -> None: @@ -181,40 +150,6 @@ async def test_api_core_info_error( assert aioclient_mock.call_count == 1 -async def test_api_homeassistant_stop( - hassio_handler: HassIO, aioclient_mock: AiohttpClientMocker -) -> None: - """Test setup with API Home Assistant stop.""" - aioclient_mock.post("http://127.0.0.1/homeassistant/stop", json={"result": "ok"}) - - assert await hassio_handler.stop_homeassistant() - assert aioclient_mock.call_count == 1 - - -async def test_api_homeassistant_restart( - hassio_handler: HassIO, aioclient_mock: AiohttpClientMocker -) -> None: - """Test setup with API Home Assistant restart.""" - aioclient_mock.post("http://127.0.0.1/homeassistant/restart", json={"result": "ok"}) - - assert await hassio_handler.restart_homeassistant() - assert aioclient_mock.call_count == 1 - - -async def test_api_addon_stats( - hassio_handler: HassIO, aioclient_mock: AiohttpClientMocker -) -> None: - """Test setup with API Add-on stats.""" - aioclient_mock.get( - "http://127.0.0.1/addons/test/stats", - json={"result": "ok", "data": {"memory_percent": 0.01}}, - ) - - data = await hassio_handler.get_addon_stats("test") - assert data["memory_percent"] == 0.01 - assert aioclient_mock.call_count == 1 - - async def test_api_core_stats( hassio_handler: HassIO, aioclient_mock: AiohttpClientMocker ) -> None: @@ -243,34 +178,6 @@ async def test_api_supervisor_stats( assert aioclient_mock.call_count == 1 -async def test_api_discovery_message( - hassio_handler: HassIO, aioclient_mock: AiohttpClientMocker -) -> None: - """Test setup with API discovery message.""" - aioclient_mock.get( - "http://127.0.0.1/discovery/test", - json={"result": "ok", "data": {"service": "mqtt"}}, - ) - - data = await hassio_handler.get_discovery_message("test") - assert data["service"] == "mqtt" - assert aioclient_mock.call_count == 1 - - -async def test_api_retrieve_discovery( - hassio_handler: HassIO, aioclient_mock: AiohttpClientMocker -) -> None: - """Test setup with API discovery message.""" - aioclient_mock.get( - "http://127.0.0.1/discovery", - json={"result": "ok", "data": {"discovery": [{"service": "mqtt"}]}}, - ) - - data = await hassio_handler.retrieve_discovery_messages() - assert data["discovery"][-1]["service"] == "mqtt" - assert aioclient_mock.call_count == 1 - - async def test_api_ingress_panels( hassio_handler: HassIO, aioclient_mock: AiohttpClientMocker ) -> None: @@ -301,8 +208,7 @@ async def test_api_ingress_panels( @pytest.mark.parametrize( ("api_call", "method", "payload"), [ - ("retrieve_discovery_messages", "GET", None), - ("refresh_updates", "POST", None), + ("get_network_info", "GET", None), ("update_diagnostics", "POST", True), ], ) diff --git a/tests/components/hassio/test_http.py b/tests/components/hassio/test_http.py index 404c047a56c728..8ed59bc78d1d0e 100644 --- a/tests/components/hassio/test_http.py +++ b/tests/components/hassio/test_http.py @@ -82,7 +82,9 @@ async def test_forward_request_onboarded_user_unallowed_methods( # Unauthenticated path ("supervisor/info", HTTPStatus.UNAUTHORIZED), ("supervisor/logs", HTTPStatus.UNAUTHORIZED), + ("supervisor/logs/follow", HTTPStatus.UNAUTHORIZED), ("addons/bl_b392/logs", HTTPStatus.UNAUTHORIZED), + ("addons/bl_b392/logs/follow", HTTPStatus.UNAUTHORIZED), ], ) async def test_forward_request_onboarded_user_unallowed_paths( @@ -152,7 +154,9 @@ async def test_forward_request_onboarded_noauth_unallowed_methods( # Unauthenticated path ("supervisor/info", HTTPStatus.UNAUTHORIZED), ("supervisor/logs", HTTPStatus.UNAUTHORIZED), + ("supervisor/logs/follow", HTTPStatus.UNAUTHORIZED), ("addons/bl_b392/logs", HTTPStatus.UNAUTHORIZED), + ("addons/bl_b392/logs/follow", HTTPStatus.UNAUTHORIZED), ], ) async def test_forward_request_onboarded_noauth_unallowed_paths( @@ -265,7 +269,9 @@ async def test_forward_request_not_onboarded_unallowed_methods( # Unauthenticated path ("supervisor/info", HTTPStatus.UNAUTHORIZED), ("supervisor/logs", HTTPStatus.UNAUTHORIZED), + ("supervisor/logs/follow", HTTPStatus.UNAUTHORIZED), ("addons/bl_b392/logs", HTTPStatus.UNAUTHORIZED), + ("addons/bl_b392/logs/follow", HTTPStatus.UNAUTHORIZED), ], ) async def test_forward_request_not_onboarded_unallowed_paths( @@ -292,7 +298,9 @@ async def test_forward_request_not_onboarded_unallowed_paths( ("addons/bl_b392/icon", False), ("backups/1234abcd/info", True), ("supervisor/logs", True), + ("supervisor/logs/follow", True), ("addons/bl_b392/logs", True), + ("addons/bl_b392/logs/follow", True), ("addons/bl_b392/changelog", True), ("addons/bl_b392/documentation", True), ], @@ -494,3 +502,70 @@ async def test_entrypoint_cache_control( assert resp1.headers["Cache-Control"] == "no-store, max-age=0" assert "Cache-Control" not in resp2.headers + + +async def test_no_follow_logs_compress( + hassio_client: TestClient, aioclient_mock: AiohttpClientMocker +) -> None: + """Test that we do not compress follow logs.""" + aioclient_mock.get("http://127.0.0.1/supervisor/logs/follow") + aioclient_mock.get("http://127.0.0.1/supervisor/logs") + + resp1 = await hassio_client.get("/api/hassio/supervisor/logs/follow") + resp2 = await hassio_client.get("/api/hassio/supervisor/logs") + + # Check we got right response + assert resp1.status == HTTPStatus.OK + assert resp1.headers.get("Content-Encoding") is None + + assert resp2.status == HTTPStatus.OK + assert resp2.headers.get("Content-Encoding") == "deflate" + + +async def test_forward_range_header_for_logs( + hassio_client: TestClient, aioclient_mock: AiohttpClientMocker +) -> None: + """Test that we forward the Range header for logs.""" + aioclient_mock.get("http://127.0.0.1/host/logs") + aioclient_mock.get("http://127.0.0.1/host/logs/boots/-1") + aioclient_mock.get("http://127.0.0.1/host/logs/boots/-2/follow?lines=100") + aioclient_mock.get("http://127.0.0.1/addons/123abc_esphome/logs") + aioclient_mock.get("http://127.0.0.1/addons/123abc_esphome/logs/follow") + aioclient_mock.get("http://127.0.0.1/backups/1234abcd/download") + + test_range = ":-100:50" + + host_resp = await hassio_client.get( + "/api/hassio/host/logs", headers={"Range": test_range} + ) + host_resp2 = await hassio_client.get( + "/api/hassio/host/logs/boots/-1", headers={"Range": test_range} + ) + host_resp3 = await hassio_client.get( + "/api/hassio/host/logs/boots/-2/follow?lines=100", headers={"Range": test_range} + ) + addon_resp = await hassio_client.get( + "/api/hassio/addons/123abc_esphome/logs", headers={"Range": test_range} + ) + addon_resp2 = await hassio_client.get( + "/api/hassio/addons/123abc_esphome/logs/follow", headers={"Range": test_range} + ) + backup_resp = await hassio_client.get( + "/api/hassio/backups/1234abcd/download", headers={"Range": test_range} + ) + + assert host_resp.status == HTTPStatus.OK + assert host_resp2.status == HTTPStatus.OK + assert host_resp3.status == HTTPStatus.OK + assert addon_resp.status == HTTPStatus.OK + assert addon_resp2.status == HTTPStatus.OK + assert backup_resp.status == HTTPStatus.OK + + assert len(aioclient_mock.mock_calls) == 6 + + assert aioclient_mock.mock_calls[0][-1].get("Range") == test_range + assert aioclient_mock.mock_calls[1][-1].get("Range") == test_range + assert aioclient_mock.mock_calls[2][-1].get("Range") == test_range + assert aioclient_mock.mock_calls[3][-1].get("Range") == test_range + assert aioclient_mock.mock_calls[4][-1].get("Range") == test_range + assert aioclient_mock.mock_calls[5][-1].get("Range") is None diff --git a/tests/components/hassio/test_init.py b/tests/components/hassio/test_init.py index 13626ef19d04bc..5c11370ae74cd1 100644 --- a/tests/components/hassio/test_init.py +++ b/tests/components/hassio/test_init.py @@ -1,34 +1,42 @@ """The tests for the hassio component.""" from datetime import timedelta +import logging import os from typing import Any -from unittest.mock import patch +from unittest.mock import AsyncMock, patch +from aiohasupervisor import SupervisorError +from aiohasupervisor.models import AddonsStats import pytest from voluptuous import Invalid from homeassistant.auth.const import GROUP_ID_ADMIN -from homeassistant.components import frontend +from homeassistant.components import frontend, hassio from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSOR_DOMAIN from homeassistant.components.hassio import ( ADDONS_COORDINATOR, DOMAIN, STORAGE_KEY, - async_get_addon_store_info, get_core_info, + get_supervisor_ip, hostname_from_addon_slug, - is_hassio, + is_hassio as deprecated_is_hassio, ) from homeassistant.components.hassio.const import REQUEST_REFRESH_DELAY -from homeassistant.components.hassio.handler import HassioAPIError from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN from homeassistant.core import HomeAssistant -from homeassistant.helpers import device_registry as dr +from homeassistant.helpers import device_registry as dr, issue_registry as ir +from homeassistant.helpers.hassio import is_hassio +from homeassistant.helpers.service_info.hassio import HassioServiceInfo from homeassistant.setup import async_setup_component from homeassistant.util import dt as dt_util -from tests.common import MockConfigEntry, async_fire_time_changed +from tests.common import ( + MockConfigEntry, + async_fire_time_changed, + import_and_test_deprecated_constant, +) from tests.test_util.aiohttp import AiohttpClientMocker MOCK_ENVIRON = {"SUPERVISOR": "127.0.0.1", "SUPERVISOR_TOKEN": "abcdefgh"} @@ -52,10 +60,17 @@ def os_info(extra_os_info): @pytest.fixture(autouse=True) -def mock_all(aioclient_mock: AiohttpClientMocker, os_info) -> None: +def mock_all( + aioclient_mock: AiohttpClientMocker, + os_info: AsyncMock, + store_info: AsyncMock, + addon_info: AsyncMock, + addon_stats: AsyncMock, + addon_changelog: AsyncMock, + resolution_info: AsyncMock, +) -> None: """Mock all setup requests.""" aioclient_mock.post("http://127.0.0.1/homeassistant/options", json={"result": "ok"}) - aioclient_mock.get("http://127.0.0.1/supervisor/ping", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/supervisor/options", json={"result": "ok"}) aioclient_mock.get( "http://127.0.0.1/info", @@ -68,13 +83,6 @@ def mock_all(aioclient_mock: AiohttpClientMocker, os_info) -> None: }, }, ) - aioclient_mock.get( - "http://127.0.0.1/store", - json={ - "result": "ok", - "data": {"addons": [], "repositories": []}, - }, - ) aioclient_mock.get( "http://127.0.0.1/host/info", json={ @@ -162,81 +170,41 @@ def mock_all(aioclient_mock: AiohttpClientMocker, os_info) -> None: }, }, ) - aioclient_mock.get( - "http://127.0.0.1/addons/test/stats", - json={ - "result": "ok", - "data": { - "cpu_percent": 0.99, - "memory_usage": 182611968, - "memory_limit": 3977146368, - "memory_percent": 4.59, - "network_rx": 362570232, - "network_tx": 82374138, - "blk_read": 46010945536, - "blk_write": 15051526144, - }, - }, - ) - aioclient_mock.get( - "http://127.0.0.1/addons/test2/stats", - json={ - "result": "ok", - "data": { - "cpu_percent": 0.8, - "memory_usage": 51941376, - "memory_limit": 3977146368, - "memory_percent": 1.31, - "network_rx": 31338284, - "network_tx": 15692900, - "blk_read": 740077568, - "blk_write": 6004736, - }, - }, - ) - aioclient_mock.get( - "http://127.0.0.1/addons/test3/stats", - json={ - "result": "ok", - "data": { - "cpu_percent": 0.8, - "memory_usage": 51941376, - "memory_limit": 3977146368, - "memory_percent": 1.31, - "network_rx": 31338284, - "network_tx": 15692900, - "blk_read": 740077568, - "blk_write": 6004736, - }, - }, - ) - aioclient_mock.get("http://127.0.0.1/addons/test/changelog", text="") - aioclient_mock.get( - "http://127.0.0.1/addons/test/info", - json={"result": "ok", "data": {"auto_update": True}}, - ) - aioclient_mock.get("http://127.0.0.1/addons/test2/changelog", text="") - aioclient_mock.get( - "http://127.0.0.1/addons/test2/info", - json={"result": "ok", "data": {"auto_update": False}}, - ) + + async def mock_addon_stats(addon: str) -> AddonsStats: + """Mock addon stats for test and test2.""" + if addon in {"test2", "test3"}: + return AddonsStats( + cpu_percent=0.8, + memory_usage=51941376, + memory_limit=3977146368, + memory_percent=1.31, + network_rx=31338284, + network_tx=15692900, + blk_read=740077568, + blk_write=6004736, + ) + return AddonsStats( + cpu_percent=0.99, + memory_usage=182611968, + memory_limit=3977146368, + memory_percent=4.59, + network_rx=362570232, + network_tx=82374138, + blk_read=46010945536, + blk_write=15051526144, + ) + + addon_stats.side_effect = mock_addon_stats + + def mock_addon_info(slug: str): + addon_info.return_value.auto_update = slug == "test" + return addon_info.return_value + + addon_info.side_effect = mock_addon_info aioclient_mock.get( "http://127.0.0.1/ingress/panels", json={"result": "ok", "data": {"panels": {}}} ) - aioclient_mock.post("http://127.0.0.1/refresh_updates", json={"result": "ok"}) - aioclient_mock.get( - "http://127.0.0.1/resolution/info", - json={ - "result": "ok", - "data": { - "unsupported": [], - "unhealthy": [], - "suggestions": [], - "issues": [], - "checks": [], - }, - }, - ) aioclient_mock.get( "http://127.0.0.1/network/info", json={ @@ -250,7 +218,9 @@ def mock_all(aioclient_mock: AiohttpClientMocker, os_info) -> None: async def test_setup_api_ping( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker + hass: HomeAssistant, + aioclient_mock: AiohttpClientMocker, + supervisor_client: AsyncMock, ) -> None: """Test setup with API ping.""" with patch.dict(os.environ, MOCK_ENVIRON): @@ -258,7 +228,7 @@ async def test_setup_api_ping( await hass.async_block_till_done() assert result - assert aioclient_mock.call_count == 20 + assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 20 assert get_core_info(hass)["version_latest"] == "1.0.0" assert is_hassio(hass) @@ -293,7 +263,9 @@ async def test_setup_api_panel( async def test_setup_api_push_api_data( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker + hass: HomeAssistant, + aioclient_mock: AiohttpClientMocker, + supervisor_client: AsyncMock, ) -> None: """Test setup with API push.""" with patch.dict(os.environ, MOCK_ENVIRON): @@ -303,14 +275,16 @@ async def test_setup_api_push_api_data( await hass.async_block_till_done() assert result - assert aioclient_mock.call_count == 20 - assert not aioclient_mock.mock_calls[1][2]["ssl"] - assert aioclient_mock.mock_calls[1][2]["port"] == 9999 - assert "watchdog" not in aioclient_mock.mock_calls[1][2] + assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 20 + assert not aioclient_mock.mock_calls[0][2]["ssl"] + assert aioclient_mock.mock_calls[0][2]["port"] == 9999 + assert "watchdog" not in aioclient_mock.mock_calls[0][2] async def test_setup_api_push_api_data_server_host( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker + hass: HomeAssistant, + aioclient_mock: AiohttpClientMocker, + supervisor_client: AsyncMock, ) -> None: """Test setup with API push with active server host.""" with patch.dict(os.environ, MOCK_ENVIRON): @@ -322,16 +296,17 @@ async def test_setup_api_push_api_data_server_host( await hass.async_block_till_done() assert result - assert aioclient_mock.call_count == 20 - assert not aioclient_mock.mock_calls[1][2]["ssl"] - assert aioclient_mock.mock_calls[1][2]["port"] == 9999 - assert not aioclient_mock.mock_calls[1][2]["watchdog"] + assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 20 + assert not aioclient_mock.mock_calls[0][2]["ssl"] + assert aioclient_mock.mock_calls[0][2]["port"] == 9999 + assert not aioclient_mock.mock_calls[0][2]["watchdog"] async def test_setup_api_push_api_data_default( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, hass_storage: dict[str, Any], + supervisor_client: AsyncMock, ) -> None: """Test setup with API push default data.""" with patch.dict(os.environ, MOCK_ENVIRON): @@ -339,10 +314,10 @@ async def test_setup_api_push_api_data_default( await hass.async_block_till_done() assert result - assert aioclient_mock.call_count == 20 - assert not aioclient_mock.mock_calls[1][2]["ssl"] - assert aioclient_mock.mock_calls[1][2]["port"] == 8123 - refresh_token = aioclient_mock.mock_calls[1][2]["refresh_token"] + assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 20 + assert not aioclient_mock.mock_calls[0][2]["ssl"] + assert aioclient_mock.mock_calls[0][2]["port"] == 8123 + refresh_token = aioclient_mock.mock_calls[0][2]["refresh_token"] hassio_user = await hass.auth.async_get_user( hass_storage[STORAGE_KEY]["data"]["hassio_user"] ) @@ -409,6 +384,7 @@ async def test_setup_api_existing_hassio_user( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, hass_storage: dict[str, Any], + supervisor_client: AsyncMock, ) -> None: """Test setup with API push default data.""" user = await hass.auth.async_create_system_user("Hass.io test") @@ -419,14 +395,16 @@ async def test_setup_api_existing_hassio_user( await hass.async_block_till_done() assert result - assert aioclient_mock.call_count == 20 - assert not aioclient_mock.mock_calls[1][2]["ssl"] - assert aioclient_mock.mock_calls[1][2]["port"] == 8123 - assert aioclient_mock.mock_calls[1][2]["refresh_token"] == token.token + assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 20 + assert not aioclient_mock.mock_calls[0][2]["ssl"] + assert aioclient_mock.mock_calls[0][2]["port"] == 8123 + assert aioclient_mock.mock_calls[0][2]["refresh_token"] == token.token async def test_setup_core_push_timezone( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker + hass: HomeAssistant, + aioclient_mock: AiohttpClientMocker, + supervisor_client: AsyncMock, ) -> None: """Test setup with API push default data.""" hass.config.time_zone = "testzone" @@ -436,8 +414,8 @@ async def test_setup_core_push_timezone( await hass.async_block_till_done() assert result - assert aioclient_mock.call_count == 20 - assert aioclient_mock.mock_calls[2][2]["timezone"] == "testzone" + assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 20 + assert aioclient_mock.mock_calls[1][2]["timezone"] == "testzone" with patch("homeassistant.util.dt.set_default_time_zone"): await hass.config.async_update(time_zone="America/New_York") @@ -446,7 +424,9 @@ async def test_setup_core_push_timezone( async def test_setup_hassio_no_additional_data( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker + hass: HomeAssistant, + aioclient_mock: AiohttpClientMocker, + supervisor_client: AsyncMock, ) -> None: """Test setup with API push default data.""" with ( @@ -457,7 +437,7 @@ async def test_setup_hassio_no_additional_data( await hass.async_block_till_done() assert result - assert aioclient_mock.call_count == 20 + assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 20 assert aioclient_mock.mock_calls[-1][3]["Authorization"] == "Bearer 123456" @@ -469,16 +449,13 @@ async def test_fail_setup_without_environ_var(hass: HomeAssistant) -> None: async def test_warn_when_cannot_connect( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, + supervisor_is_connected: AsyncMock, ) -> None: """Fail warn when we cannot connect.""" - with ( - patch.dict(os.environ, MOCK_ENVIRON), - patch( - "homeassistant.components.hassio.HassIO.is_connected", - return_value=None, - ), - ): + supervisor_is_connected.side_effect = SupervisorError + with patch.dict(os.environ, MOCK_ENVIRON): result = await async_setup_component(hass, "hassio", {}) assert result @@ -509,16 +486,14 @@ async def test_service_calls( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, caplog: pytest.LogCaptureFixture, - addon_installed, + supervisor_client: AsyncMock, + addon_installed: AsyncMock, + supervisor_is_connected: AsyncMock, + issue_registry: ir.IssueRegistry, ) -> None: """Call service and check the API calls behind that.""" - with ( - patch.dict(os.environ, MOCK_ENVIRON), - patch( - "homeassistant.components.hassio.HassIO.is_connected", - return_value=None, - ), - ): + supervisor_is_connected.side_effect = SupervisorError + with patch.dict(os.environ, MOCK_ENVIRON): assert await async_setup_component(hass, "hassio", {}) await hass.async_block_till_done() @@ -542,19 +517,20 @@ async def test_service_calls( await hass.services.async_call("hassio", "addon_stop", {"addon": "test"}) await hass.services.async_call("hassio", "addon_restart", {"addon": "test"}) await hass.services.async_call("hassio", "addon_update", {"addon": "test"}) + assert (DOMAIN, "update_service_deprecated") in issue_registry.issues await hass.services.async_call( "hassio", "addon_stdin", {"addon": "test", "input": "test"} ) await hass.async_block_till_done() - assert aioclient_mock.call_count == 22 + assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 25 assert aioclient_mock.mock_calls[-1][2] == "test" await hass.services.async_call("hassio", "host_shutdown", {}) await hass.services.async_call("hassio", "host_reboot", {}) await hass.async_block_till_done() - assert aioclient_mock.call_count == 24 + assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 27 await hass.services.async_call("hassio", "backup_full", {}) await hass.services.async_call( @@ -569,7 +545,7 @@ async def test_service_calls( ) await hass.async_block_till_done() - assert aioclient_mock.call_count == 26 + assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 29 assert aioclient_mock.mock_calls[-1][2] == { "name": "2021-11-13 03:48:00", "homeassistant": True, @@ -594,7 +570,7 @@ async def test_service_calls( ) await hass.async_block_till_done() - assert aioclient_mock.call_count == 28 + assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 31 assert aioclient_mock.mock_calls[-1][2] == { "addons": ["test"], "folders": ["ssl"], @@ -613,7 +589,7 @@ async def test_service_calls( ) await hass.async_block_till_done() - assert aioclient_mock.call_count == 29 + assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 32 assert aioclient_mock.mock_calls[-1][2] == { "name": "backup_name", "location": "backup_share", @@ -629,7 +605,7 @@ async def test_service_calls( ) await hass.async_block_till_done() - assert aioclient_mock.call_count == 30 + assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 33 assert aioclient_mock.mock_calls[-1][2] == { "name": "2021-11-13 03:48:00", "location": None, @@ -648,7 +624,7 @@ async def test_service_calls( ) await hass.async_block_till_done() - assert aioclient_mock.call_count == 32 + assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 35 assert aioclient_mock.mock_calls[-1][2] == { "name": "2021-11-13 11:48:00", "location": None, @@ -658,15 +634,11 @@ async def test_service_calls( async def test_invalid_service_calls( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, + supervisor_is_connected: AsyncMock, ) -> None: """Call service with invalid input and check that it raises.""" - with ( - patch.dict(os.environ, MOCK_ENVIRON), - patch( - "homeassistant.components.hassio.HassIO.is_connected", - return_value=None, - ), - ): + supervisor_is_connected.side_effect = SupervisorError + with patch.dict(os.environ, MOCK_ENVIRON): assert await async_setup_component(hass, "hassio", {}) await hass.async_block_till_done() @@ -683,6 +655,7 @@ async def test_invalid_service_calls( async def test_addon_service_call_with_complex_slug( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, + supervisor_is_connected: AsyncMock, ) -> None: """Addon slugs can have ., - and _, confirm that passes validation.""" supervisor_mock_data = { @@ -702,12 +675,9 @@ async def test_addon_service_call_with_complex_slug( }, ], } + supervisor_is_connected.side_effect = SupervisorError with ( patch.dict(os.environ, MOCK_ENVIRON), - patch( - "homeassistant.components.hassio.HassIO.is_connected", - return_value=None, - ), patch( "homeassistant.components.hassio.HassIO.get_supervisor_info", return_value=supervisor_mock_data, @@ -721,7 +691,9 @@ async def test_addon_service_call_with_complex_slug( @pytest.mark.usefixtures("hassio_env") async def test_service_calls_core( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker + hass: HomeAssistant, + aioclient_mock: AiohttpClientMocker, + supervisor_client: AsyncMock, ) -> None: """Call core service and check the API calls behind that.""" assert await async_setup_component(hass, "homeassistant", {}) @@ -733,12 +705,12 @@ async def test_service_calls_core( await hass.services.async_call("homeassistant", "stop") await hass.async_block_till_done() - assert aioclient_mock.call_count == 5 + assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 6 await hass.services.async_call("homeassistant", "check_config") await hass.async_block_till_done() - assert aioclient_mock.call_count == 5 + assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 6 with patch( "homeassistant.config.async_check_ha_config_file", return_value=None @@ -747,7 +719,7 @@ async def test_service_calls_core( await hass.async_block_till_done() assert mock_check_config.called - assert aioclient_mock.call_count == 6 + assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 7 @pytest.mark.usefixtures("addon_installed") @@ -932,129 +904,108 @@ async def test_device_registry_calls( @pytest.mark.usefixtures("addon_installed") async def test_coordinator_updates( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture + hass: HomeAssistant, caplog: pytest.LogCaptureFixture, supervisor_client: AsyncMock ) -> None: """Test coordinator updates.""" await async_setup_component(hass, "homeassistant", {}) - with ( - patch.dict(os.environ, MOCK_ENVIRON), - patch( - "homeassistant.components.hassio.HassIO.refresh_updates" - ) as refresh_updates_mock, - ): + with patch.dict(os.environ, MOCK_ENVIRON): config_entry = MockConfigEntry(domain=DOMAIN, data={}, unique_id=DOMAIN) config_entry.add_to_hass(hass) assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() # Initial refresh, no update refresh call - assert refresh_updates_mock.call_count == 0 + supervisor_client.refresh_updates.assert_not_called() - with patch( - "homeassistant.components.hassio.HassIO.refresh_updates", - ) as refresh_updates_mock: - async_fire_time_changed(hass, dt_util.now() + timedelta(minutes=20)) - await hass.async_block_till_done() + async_fire_time_changed(hass, dt_util.now() + timedelta(minutes=20)) + await hass.async_block_till_done() - # Scheduled refresh, no update refresh call - assert refresh_updates_mock.call_count == 0 + # Scheduled refresh, no update refresh call + supervisor_client.refresh_updates.assert_not_called() - with patch( - "homeassistant.components.hassio.HassIO.refresh_updates", - ) as refresh_updates_mock: - await hass.services.async_call( - "homeassistant", - "update_entity", - { - "entity_id": [ - "update.home_assistant_core_update", - "update.home_assistant_supervisor_update", - ] - }, - blocking=True, - ) + await hass.services.async_call( + "homeassistant", + "update_entity", + { + "entity_id": [ + "update.home_assistant_core_update", + "update.home_assistant_supervisor_update", + ] + }, + blocking=True, + ) - # There is a REQUEST_REFRESH_DELAYs cooldown on the debouncer - assert refresh_updates_mock.call_count == 0 - async_fire_time_changed( - hass, dt_util.now() + timedelta(seconds=REQUEST_REFRESH_DELAY) - ) - await hass.async_block_till_done() - assert refresh_updates_mock.call_count == 1 + # There is a REQUEST_REFRESH_DELAYs cooldown on the debouncer + supervisor_client.refresh_updates.assert_not_called() + async_fire_time_changed( + hass, dt_util.now() + timedelta(seconds=REQUEST_REFRESH_DELAY) + ) + await hass.async_block_till_done() + supervisor_client.refresh_updates.assert_called_once() - with patch( - "homeassistant.components.hassio.HassIO.refresh_updates", - side_effect=HassioAPIError("Unknown"), - ) as refresh_updates_mock: - await hass.services.async_call( - "homeassistant", - "update_entity", - { - "entity_id": [ - "update.home_assistant_core_update", - "update.home_assistant_supervisor_update", - ] - }, - blocking=True, - ) - # There is a REQUEST_REFRESH_DELAYs cooldown on the debouncer - async_fire_time_changed( - hass, dt_util.now() + timedelta(seconds=REQUEST_REFRESH_DELAY) - ) - await hass.async_block_till_done() - assert refresh_updates_mock.call_count == 1 - assert "Error on Supervisor API: Unknown" in caplog.text + supervisor_client.refresh_updates.reset_mock() + supervisor_client.refresh_updates.side_effect = SupervisorError("Unknown") + await hass.services.async_call( + "homeassistant", + "update_entity", + { + "entity_id": [ + "update.home_assistant_core_update", + "update.home_assistant_supervisor_update", + ] + }, + blocking=True, + ) + # There is a REQUEST_REFRESH_DELAYs cooldown on the debouncer + async_fire_time_changed( + hass, dt_util.now() + timedelta(seconds=REQUEST_REFRESH_DELAY) + ) + await hass.async_block_till_done() + supervisor_client.refresh_updates.assert_called_once() + assert "Error on Supervisor API: Unknown" in caplog.text @pytest.mark.usefixtures("entity_registry_enabled_by_default", "addon_installed") async def test_coordinator_updates_stats_entities_enabled( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, + supervisor_client: AsyncMock, ) -> None: """Test coordinator updates with stats entities enabled.""" await async_setup_component(hass, "homeassistant", {}) - with ( - patch.dict(os.environ, MOCK_ENVIRON), - patch( - "homeassistant.components.hassio.HassIO.refresh_updates" - ) as refresh_updates_mock, - ): + with patch.dict(os.environ, MOCK_ENVIRON): config_entry = MockConfigEntry(domain=DOMAIN, data={}, unique_id=DOMAIN) config_entry.add_to_hass(hass) assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() # Initial refresh without stats - assert refresh_updates_mock.call_count == 0 + supervisor_client.refresh_updates.assert_not_called() # Refresh with stats once we know which ones are needed async_fire_time_changed( hass, dt_util.now() + timedelta(seconds=REQUEST_REFRESH_DELAY) ) await hass.async_block_till_done() - assert refresh_updates_mock.call_count == 1 - with patch( - "homeassistant.components.hassio.HassIO.refresh_updates", - ) as refresh_updates_mock: - async_fire_time_changed(hass, dt_util.now() + timedelta(minutes=20)) - await hass.async_block_till_done() - assert refresh_updates_mock.call_count == 0 + supervisor_client.refresh_updates.assert_called_once() - with patch( - "homeassistant.components.hassio.HassIO.refresh_updates", - ) as refresh_updates_mock: - await hass.services.async_call( - "homeassistant", - "update_entity", - { - "entity_id": [ - "update.home_assistant_core_update", - "update.home_assistant_supervisor_update", - ] - }, - blocking=True, - ) - assert refresh_updates_mock.call_count == 0 + supervisor_client.refresh_updates.reset_mock() + async_fire_time_changed(hass, dt_util.now() + timedelta(minutes=20)) + await hass.async_block_till_done() + supervisor_client.refresh_updates.assert_not_called() + + await hass.services.async_call( + "homeassistant", + "update_entity", + { + "entity_id": [ + "update.home_assistant_core_update", + "update.home_assistant_supervisor_update", + ] + }, + blocking=True, + ) + supervisor_client.refresh_updates.assert_not_called() # There is a REQUEST_REFRESH_DELAYs cooldown on the debouncer async_fire_time_changed( @@ -1062,28 +1013,26 @@ async def test_coordinator_updates_stats_entities_enabled( ) await hass.async_block_till_done() - with patch( - "homeassistant.components.hassio.HassIO.refresh_updates", - side_effect=HassioAPIError("Unknown"), - ) as refresh_updates_mock: - await hass.services.async_call( - "homeassistant", - "update_entity", - { - "entity_id": [ - "update.home_assistant_core_update", - "update.home_assistant_supervisor_update", - ] - }, - blocking=True, - ) - # There is a REQUEST_REFRESH_DELAYs cooldown on the debouncer - async_fire_time_changed( - hass, dt_util.now() + timedelta(seconds=REQUEST_REFRESH_DELAY) - ) - await hass.async_block_till_done() - assert refresh_updates_mock.call_count == 1 - assert "Error on Supervisor API: Unknown" in caplog.text + supervisor_client.refresh_updates.reset_mock() + supervisor_client.refresh_updates.side_effect = SupervisorError("Unknown") + await hass.services.async_call( + "homeassistant", + "update_entity", + { + "entity_id": [ + "update.home_assistant_core_update", + "update.home_assistant_supervisor_update", + ] + }, + blocking=True, + ) + # There is a REQUEST_REFRESH_DELAYs cooldown on the debouncer + async_fire_time_changed( + hass, dt_util.now() + timedelta(seconds=REQUEST_REFRESH_DELAY) + ) + await hass.async_block_till_done() + supervisor_client.refresh_updates.assert_called_once() + assert "Error on Supervisor API: Unknown" in caplog.text @pytest.mark.parametrize( @@ -1103,7 +1052,10 @@ async def test_coordinator_updates_stats_entities_enabled( ], ) async def test_setup_hardware_integration( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, integration + hass: HomeAssistant, + aioclient_mock: AiohttpClientMocker, + supervisor_client: AsyncMock, + integration, ) -> None: """Test setup initiates hardware integration.""" @@ -1118,26 +1070,10 @@ async def test_setup_hardware_integration( await hass.async_block_till_done(wait_background_tasks=True) assert result - assert aioclient_mock.call_count == 20 + assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 20 assert len(mock_setup_entry.mock_calls) == 1 -@pytest.mark.usefixtures("hassio_stubs") -async def test_get_store_addon_info( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker -) -> None: - """Test get store add-on info from Supervisor API.""" - aioclient_mock.clear_requests() - aioclient_mock.get( - "http://127.0.0.1/store/addons/test", - json={"result": "ok", "data": {"name": "bla"}}, - ) - - data = await async_get_addon_store_info(hass, "test") - assert data["name"] == "bla" - assert aioclient_mock.call_count == 1 - - def test_hostname_from_addon_slug() -> None: """Test hostname_from_addon_slug.""" assert hostname_from_addon_slug("mqtt") == "mqtt" @@ -1145,3 +1081,62 @@ def test_hostname_from_addon_slug() -> None: hostname_from_addon_slug("core_silabs_multiprotocol") == "core-silabs-multiprotocol" ) + + +def test_deprecated_function_is_hassio( + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test calling deprecated_is_hassio function will create log entry.""" + + deprecated_is_hassio(hass) + assert caplog.record_tuples == [ + ( + "homeassistant.components.hassio", + logging.WARNING, + "is_hassio is a deprecated function which will be removed in HA Core 2025.11. Use homeassistant.helpers.hassio.is_hassio instead", + ) + ] + + +def test_deprecated_function_get_supervisor_ip( + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test calling get_supervisor_ip function will create log entry.""" + + get_supervisor_ip() + assert caplog.record_tuples == [ + ( + "homeassistant.helpers.hassio", + logging.WARNING, + "get_supervisor_ip is a deprecated function which will be removed in HA Core 2025.11. Use homeassistant.helpers.hassio.get_supervisor_ip instead", + ) + ] + + +@pytest.mark.parametrize( + ("constant_name", "replacement_name", "replacement"), + [ + ( + "HassioServiceInfo", + "homeassistant.helpers.service_info.hassio.HassioServiceInfo", + HassioServiceInfo, + ), + ], +) +def test_deprecated_constants( + caplog: pytest.LogCaptureFixture, + constant_name: str, + replacement_name: str, + replacement: Any, +) -> None: + """Test deprecated automation constants.""" + import_and_test_deprecated_constant( + caplog, + hassio, + constant_name, + replacement_name, + replacement, + "2025.11", + ) diff --git a/tests/components/hassio/test_issues.py b/tests/components/hassio/test_issues.py index 578279dbf79b33..7ce11a18fb5094 100644 --- a/tests/components/hassio/test_issues.py +++ b/tests/components/hassio/test_issues.py @@ -4,11 +4,28 @@ from collections.abc import Generator from datetime import timedelta -from http import HTTPStatus import os from typing import Any -from unittest.mock import ANY, patch +from unittest.mock import ANY, AsyncMock, patch +from uuid import UUID, uuid4 +from aiohasupervisor import ( + SupervisorBadRequestError, + SupervisorError, + SupervisorTimeoutError, +) +from aiohasupervisor.models import ( + Check, + CheckType, + ContextType, + Issue, + IssueType, + ResolutionInfo, + Suggestion, + SuggestionType, + UnhealthyReason, + UnsupportedReason, +) from freezegun.api import FrozenDateTimeFactory import pytest @@ -18,7 +35,6 @@ from .test_init import MOCK_ENVIRON -from tests.test_util.aiohttp import AiohttpClientMocker, AiohttpClientMockResponse from tests.typing import WebSocketGenerator @@ -36,49 +52,41 @@ def fixture_supervisor_environ() -> Generator[None]: def mock_resolution_info( - aioclient_mock: AiohttpClientMocker, - unsupported: list[str] | None = None, - unhealthy: list[str] | None = None, - issues: list[dict[str, str]] | None = None, - suggestion_result: str = "ok", + supervisor_client: AsyncMock, + unsupported: list[UnsupportedReason] | None = None, + unhealthy: list[UnhealthyReason] | None = None, + issues: list[Issue] | None = None, + suggestions_by_issue: dict[UUID, list[Suggestion]] | None = None, + suggestion_result: SupervisorError | None = None, ) -> None: """Mock resolution/info endpoint with unsupported/unhealthy reasons and/or issues.""" - aioclient_mock.get( - "http://127.0.0.1/resolution/info", - json={ - "result": "ok", - "data": { - "unsupported": unsupported or [], - "unhealthy": unhealthy or [], - "suggestions": [], - "issues": [ - {k: v for k, v in issue.items() if k != "suggestions"} - for issue in issues - ] - if issues - else [], - "checks": [ - {"enabled": True, "slug": "supervisor_trust"}, - {"enabled": True, "slug": "free_space"}, - ], - }, - }, + supervisor_client.resolution.info.return_value = ResolutionInfo( + unsupported=unsupported or [], + unhealthy=unhealthy or [], + issues=issues or [], + suggestions=[ + suggestion + for issue_list in suggestions_by_issue.values() + for suggestion in issue_list + ] + if suggestions_by_issue + else [], + checks=[ + Check(enabled=True, slug=CheckType.SUPERVISOR_TRUST), + Check(enabled=True, slug=CheckType.FREE_SPACE), + ], ) - if issues: - suggestions_by_issue = { - issue["uuid"]: issue.get("suggestions", []) for issue in issues - } - for issue_uuid, suggestions in suggestions_by_issue.items(): - aioclient_mock.get( - f"http://127.0.0.1/resolution/issue/{issue_uuid}/suggestions", - json={"result": "ok", "data": {"suggestions": suggestions}}, - ) - for suggestion in suggestions: - aioclient_mock.post( - f"http://127.0.0.1/resolution/suggestion/{suggestion['uuid']}", - json={"result": suggestion_result}, - ) + if suggestions_by_issue: + + async def mock_suggestions_for_issue(uuid: UUID) -> list[Suggestion]: + """Mock of suggestions for issue api.""" + return suggestions_by_issue.get(uuid, []) + + supervisor_client.resolution.suggestions_for_issue.side_effect = ( + mock_suggestions_for_issue + ) + supervisor_client.resolution.apply_suggestion.side_effect = suggestion_result def assert_repair_in_list( @@ -134,11 +142,13 @@ def assert_issue_repair_in_list( @pytest.mark.usefixtures("all_setup_requests") async def test_unhealthy_issues( hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, + supervisor_client: AsyncMock, hass_ws_client: WebSocketGenerator, ) -> None: """Test issues added for unhealthy systems.""" - mock_resolution_info(aioclient_mock, unhealthy=["docker", "setup"]) + mock_resolution_info( + supervisor_client, unhealthy=[UnhealthyReason.DOCKER, UnhealthyReason.SETUP] + ) result = await async_setup_component(hass, "hassio", {}) assert result @@ -156,11 +166,14 @@ async def test_unhealthy_issues( @pytest.mark.usefixtures("all_setup_requests") async def test_unsupported_issues( hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, + supervisor_client: AsyncMock, hass_ws_client: WebSocketGenerator, ) -> None: """Test issues added for unsupported systems.""" - mock_resolution_info(aioclient_mock, unsupported=["content_trust", "os"]) + mock_resolution_info( + supervisor_client, + unsupported=[UnsupportedReason.CONTENT_TRUST, UnsupportedReason.OS], + ) result = await async_setup_component(hass, "hassio", {}) assert result @@ -180,11 +193,11 @@ async def test_unsupported_issues( @pytest.mark.usefixtures("all_setup_requests") async def test_unhealthy_issues_add_remove( hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, + supervisor_client: AsyncMock, hass_ws_client: WebSocketGenerator, ) -> None: """Test unhealthy issues added and removed from dispatches.""" - mock_resolution_info(aioclient_mock) + mock_resolution_info(supervisor_client) result = await async_setup_component(hass, "hassio", {}) assert result @@ -237,11 +250,11 @@ async def test_unhealthy_issues_add_remove( @pytest.mark.usefixtures("all_setup_requests") async def test_unsupported_issues_add_remove( hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, + supervisor_client: AsyncMock, hass_ws_client: WebSocketGenerator, ) -> None: """Test unsupported issues added and removed from dispatches.""" - mock_resolution_info(aioclient_mock) + mock_resolution_info(supervisor_client) result = await async_setup_component(hass, "hassio", {}) assert result @@ -294,21 +307,21 @@ async def test_unsupported_issues_add_remove( @pytest.mark.usefixtures("all_setup_requests") async def test_reset_issues_supervisor_restart( hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, + supervisor_client: AsyncMock, hass_ws_client: WebSocketGenerator, ) -> None: """All issues reset on supervisor restart.""" mock_resolution_info( - aioclient_mock, - unsupported=["os"], - unhealthy=["docker"], + supervisor_client, + unsupported=[UnsupportedReason.OS], + unhealthy=[UnhealthyReason.DOCKER], issues=[ - { - "uuid": "1234", - "type": "reboot_required", - "context": "system", - "reference": None, - } + Issue( + type=IssueType.REBOOT_REQUIRED, + context=ContextType.SYSTEM, + reference=None, + uuid=(uuid := uuid4()), + ) ], ) @@ -325,15 +338,14 @@ async def test_reset_issues_supervisor_restart( assert_repair_in_list(msg["result"]["issues"], unhealthy=False, reason="os") assert_issue_repair_in_list( msg["result"]["issues"], - uuid="1234", + uuid=uuid.hex, context="system", type_="reboot_required", fixable=False, reference=None, ) - aioclient_mock.clear_requests() - mock_resolution_info(aioclient_mock) + mock_resolution_info(supervisor_client) await client.send_json( { "id": 2, @@ -358,11 +370,15 @@ async def test_reset_issues_supervisor_restart( @pytest.mark.usefixtures("all_setup_requests") async def test_reasons_added_and_removed( hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, + supervisor_client: AsyncMock, hass_ws_client: WebSocketGenerator, ) -> None: """Test an unsupported/unhealthy reasons being added and removed at same time.""" - mock_resolution_info(aioclient_mock, unsupported=["os"], unhealthy=["docker"]) + mock_resolution_info( + supervisor_client, + unsupported=[UnsupportedReason.OS], + unhealthy=[UnhealthyReason.DOCKER], + ) result = await async_setup_component(hass, "hassio", {}) assert result @@ -376,9 +392,10 @@ async def test_reasons_added_and_removed( assert_repair_in_list(msg["result"]["issues"], unhealthy=True, reason="docker") assert_repair_in_list(msg["result"]["issues"], unhealthy=False, reason="os") - aioclient_mock.clear_requests() mock_resolution_info( - aioclient_mock, unsupported=["content_trust"], unhealthy=["setup"] + supervisor_client, + unsupported=[UnsupportedReason.CONTENT_TRUST], + unhealthy=[UnhealthyReason.SETUP], ) await client.send_json( { @@ -408,12 +425,14 @@ async def test_reasons_added_and_removed( @pytest.mark.usefixtures("all_setup_requests") async def test_ignored_unsupported_skipped( hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, + supervisor_client: AsyncMock, hass_ws_client: WebSocketGenerator, ) -> None: """Unsupported reasons which have an identical unhealthy reason are ignored.""" mock_resolution_info( - aioclient_mock, unsupported=["privileged"], unhealthy=["privileged"] + supervisor_client, + unsupported=[UnsupportedReason.PRIVILEGED], + unhealthy=[UnhealthyReason.PRIVILEGED], ) result = await async_setup_component(hass, "hassio", {}) @@ -431,12 +450,14 @@ async def test_ignored_unsupported_skipped( @pytest.mark.usefixtures("all_setup_requests") async def test_new_unsupported_unhealthy_reason( hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, + supervisor_client: AsyncMock, hass_ws_client: WebSocketGenerator, ) -> None: """New unsupported/unhealthy reasons result in a generic repair until next core update.""" mock_resolution_info( - aioclient_mock, unsupported=["fake_unsupported"], unhealthy=["fake_unhealthy"] + supervisor_client, + unsupported=["fake_unsupported"], + unhealthy=["fake_unhealthy"], ) result = await async_setup_component(hass, "hassio", {}) @@ -481,40 +502,43 @@ async def test_new_unsupported_unhealthy_reason( @pytest.mark.usefixtures("all_setup_requests") async def test_supervisor_issues( hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, + supervisor_client: AsyncMock, hass_ws_client: WebSocketGenerator, ) -> None: """Test repairs added for supervisor issue.""" mock_resolution_info( - aioclient_mock, + supervisor_client, issues=[ - { - "uuid": "1234", - "type": "reboot_required", - "context": "system", - "reference": None, - }, - { - "uuid": "1235", - "type": "multiple_data_disks", - "context": "system", - "reference": "/dev/sda1", - "suggestions": [ - { - "uuid": "1236", - "type": "rename_data_disk", - "context": "system", - "reference": "/dev/sda1", - } - ], - }, - { - "uuid": "1237", - "type": "should_not_be_repair", - "context": "os", - "reference": None, - }, + Issue( + type=IssueType.REBOOT_REQUIRED, + context=ContextType.SYSTEM, + reference=None, + uuid=(uuid_issue1 := uuid4()), + ), + Issue( + type=IssueType.MULTIPLE_DATA_DISKS, + context=ContextType.SYSTEM, + reference="/dev/sda1", + uuid=(uuid_issue2 := uuid4()), + ), + Issue( + type="should_not_be_repair", + context=ContextType.OS, + reference=None, + uuid=uuid4(), + ), ], + suggestions_by_issue={ + uuid_issue2: [ + Suggestion( + type=SuggestionType.RENAME_DATA_DISK, + context=ContextType.SYSTEM, + reference="/dev/sda1", + uuid=uuid4(), + auto=False, + ) + ] + }, ) result = await async_setup_component(hass, "hassio", {}) @@ -528,7 +552,7 @@ async def test_supervisor_issues( assert len(msg["result"]["issues"]) == 2 assert_issue_repair_in_list( msg["result"]["issues"], - uuid="1234", + uuid=uuid_issue1.hex, context="system", type_="reboot_required", fixable=False, @@ -536,7 +560,7 @@ async def test_supervisor_issues( ) assert_issue_repair_in_list( msg["result"]["issues"], - uuid="1235", + uuid=uuid_issue2.hex, context="system", type_="multiple_data_disks", fixable=True, @@ -547,61 +571,33 @@ async def test_supervisor_issues( @pytest.mark.usefixtures("all_setup_requests") async def test_supervisor_issues_initial_failure( hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, + resolution_info: AsyncMock, + resolution_suggestions_for_issue: AsyncMock, hass_ws_client: WebSocketGenerator, freezer: FrozenDateTimeFactory, ) -> None: """Test issues manager retries after initial update failure.""" - responses = [ - AiohttpClientMockResponse( - method="get", - url="http://127.0.0.1/resolution/info", - status=HTTPStatus.BAD_REQUEST, - json={ - "result": "error", - "message": "System is not ready with state: setup", - }, - ), - AiohttpClientMockResponse( - method="get", - url="http://127.0.0.1/resolution/info", - status=HTTPStatus.OK, - json={ - "result": "ok", - "data": { - "unsupported": [], - "unhealthy": [], - "suggestions": [], - "issues": [ - { - "uuid": "1234", - "type": "reboot_required", - "context": "system", - "reference": None, - }, - ], - "checks": [ - {"enabled": True, "slug": "supervisor_trust"}, - {"enabled": True, "slug": "free_space"}, - ], - }, - }, + resolution_info.side_effect = [ + SupervisorBadRequestError("System is not ready with state: setup"), + ResolutionInfo( + unsupported=[], + unhealthy=[], + suggestions=[], + issues=[ + Issue( + type=IssueType.REBOOT_REQUIRED, + context=ContextType.SYSTEM, + reference=None, + uuid=uuid4(), + ) + ], + checks=[ + Check(enabled=True, slug=CheckType.SUPERVISOR_TRUST), + Check(enabled=True, slug=CheckType.FREE_SPACE), + ], ), ] - async def mock_responses(*args): - nonlocal responses - return responses.pop(0) - - aioclient_mock.get( - "http://127.0.0.1/resolution/info", - side_effect=mock_responses, - ) - aioclient_mock.get( - "http://127.0.0.1/resolution/issue/1234/suggestions", - json={"result": "ok", "data": {"suggestions": []}}, - ) - with patch("homeassistant.components.hassio.issues.REQUEST_REFRESH_DELAY", new=0.1): result = await async_setup_component(hass, "hassio", {}) await hass.async_block_till_done() @@ -625,11 +621,11 @@ async def mock_responses(*args): @pytest.mark.usefixtures("all_setup_requests") async def test_supervisor_issues_add_remove( hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, + supervisor_client: AsyncMock, hass_ws_client: WebSocketGenerator, ) -> None: """Test supervisor issues added and removed from dispatches.""" - mock_resolution_info(aioclient_mock) + mock_resolution_info(supervisor_client) result = await async_setup_component(hass, "hassio", {}) assert result @@ -643,7 +639,7 @@ async def test_supervisor_issues_add_remove( "data": { "event": "issue_changed", "data": { - "uuid": "1234", + "uuid": (issue_uuid := uuid4().hex), "type": "reboot_required", "context": "system", "reference": None, @@ -661,7 +657,7 @@ async def test_supervisor_issues_add_remove( assert len(msg["result"]["issues"]) == 1 assert_issue_repair_in_list( msg["result"]["issues"], - uuid="1234", + uuid=issue_uuid, context="system", type_="reboot_required", fixable=False, @@ -675,13 +671,13 @@ async def test_supervisor_issues_add_remove( "data": { "event": "issue_changed", "data": { - "uuid": "1234", + "uuid": issue_uuid, "type": "reboot_required", "context": "system", "reference": None, "suggestions": [ { - "uuid": "1235", + "uuid": uuid4().hex, "type": "execute_reboot", "context": "system", "reference": None, @@ -701,7 +697,7 @@ async def test_supervisor_issues_add_remove( assert len(msg["result"]["issues"]) == 1 assert_issue_repair_in_list( msg["result"]["issues"], - uuid="1234", + uuid=issue_uuid, context="system", type_="reboot_required", fixable=True, @@ -715,7 +711,7 @@ async def test_supervisor_issues_add_remove( "data": { "event": "issue_removed", "data": { - "uuid": "1234", + "uuid": issue_uuid, "type": "reboot_required", "context": "system", "reference": None, @@ -736,37 +732,23 @@ async def test_supervisor_issues_add_remove( @pytest.mark.usefixtures("all_setup_requests") async def test_supervisor_issues_suggestions_fail( hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, + supervisor_client: AsyncMock, + resolution_suggestions_for_issue: AsyncMock, hass_ws_client: WebSocketGenerator, ) -> None: """Test failing to get suggestions for issue skips it.""" - aioclient_mock.get( - "http://127.0.0.1/resolution/info", - json={ - "result": "ok", - "data": { - "unsupported": [], - "unhealthy": [], - "suggestions": [], - "issues": [ - { - "uuid": "1234", - "type": "reboot_required", - "context": "system", - "reference": None, - } - ], - "checks": [ - {"enabled": True, "slug": "supervisor_trust"}, - {"enabled": True, "slug": "free_space"}, - ], - }, - }, - ) - aioclient_mock.get( - "http://127.0.0.1/resolution/issue/1234/suggestions", - exc=TimeoutError(), + mock_resolution_info( + supervisor_client, + issues=[ + Issue( + type=IssueType.REBOOT_REQUIRED, + context=ContextType.SYSTEM, + reference=None, + uuid=uuid4(), + ) + ], ) + resolution_suggestions_for_issue.side_effect = SupervisorTimeoutError result = await async_setup_component(hass, "hassio", {}) assert result @@ -782,11 +764,11 @@ async def test_supervisor_issues_suggestions_fail( @pytest.mark.usefixtures("all_setup_requests") async def test_supervisor_remove_missing_issue_without_error( hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, + supervisor_client: AsyncMock, hass_ws_client: WebSocketGenerator, ) -> None: """Test HA skips message to remove issue that it didn't know about (sync issue).""" - mock_resolution_info(aioclient_mock) + mock_resolution_info(supervisor_client) result = await async_setup_component(hass, "hassio", {}) assert result @@ -816,16 +798,12 @@ async def test_supervisor_remove_missing_issue_without_error( @pytest.mark.usefixtures("all_setup_requests") async def test_system_is_not_ready( hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, + resolution_info: AsyncMock, caplog: pytest.LogCaptureFixture, ) -> None: """Ensure hassio starts despite error.""" - aioclient_mock.get( - "http://127.0.0.1/resolution/info", - json={ - "result": "", - "message": "System is not ready with state: setup", - }, + resolution_info.side_effect = SupervisorBadRequestError( + "System is not ready with state: setup" ) assert await async_setup_component(hass, "hassio", {}) @@ -835,14 +813,14 @@ async def test_system_is_not_ready( @pytest.mark.parametrize( "all_setup_requests", [{"include_addons": True}], indirect=True ) -@pytest.mark.usefixtures("all_setup_requests", "addon_installed") +@pytest.mark.usefixtures("all_setup_requests") async def test_supervisor_issues_detached_addon_missing( hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, + supervisor_client: AsyncMock, hass_ws_client: WebSocketGenerator, ) -> None: """Test supervisor issue for detached addon due to missing repository.""" - mock_resolution_info(aioclient_mock) + mock_resolution_info(supervisor_client) result = await async_setup_component(hass, "hassio", {}) assert result @@ -856,7 +834,7 @@ async def test_supervisor_issues_detached_addon_missing( "data": { "event": "issue_changed", "data": { - "uuid": "1234", + "uuid": (issue_uuid := uuid4().hex), "type": "detached_addon_missing", "context": "addon", "reference": "test", @@ -874,7 +852,7 @@ async def test_supervisor_issues_detached_addon_missing( assert len(msg["result"]["issues"]) == 1 assert_issue_repair_in_list( msg["result"]["issues"], - uuid="1234", + uuid=issue_uuid, context="addon", type_="detached_addon_missing", fixable=False, diff --git a/tests/components/hassio/test_repairs.py b/tests/components/hassio/test_repairs.py index 7655f657edaf80..f8cac4e1a976c3 100644 --- a/tests/components/hassio/test_repairs.py +++ b/tests/components/hassio/test_repairs.py @@ -3,8 +3,17 @@ from collections.abc import Generator from http import HTTPStatus import os -from unittest.mock import patch - +from unittest.mock import AsyncMock, patch +from uuid import uuid4 + +from aiohasupervisor import SupervisorError +from aiohasupervisor.models import ( + ContextType, + Issue, + IssueType, + Suggestion, + SuggestionType, +) import pytest from homeassistant.core import HomeAssistant @@ -14,7 +23,6 @@ from .test_init import MOCK_ENVIRON from .test_issues import mock_resolution_info -from tests.test_util.aiohttp import AiohttpClientMocker from tests.typing import ClientSessionGenerator @@ -28,34 +36,39 @@ def fixture_supervisor_environ() -> Generator[None]: @pytest.mark.usefixtures("all_setup_requests") async def test_supervisor_issue_repair_flow( hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, + supervisor_client: AsyncMock, hass_client: ClientSessionGenerator, issue_registry: ir.IssueRegistry, ) -> None: """Test fix flow for supervisor issue.""" mock_resolution_info( - aioclient_mock, + supervisor_client, issues=[ - { - "uuid": "1234", - "type": "multiple_data_disks", - "context": "system", - "reference": "/dev/sda1", - "suggestions": [ - { - "uuid": "1235", - "type": "rename_data_disk", - "context": "system", - "reference": "/dev/sda1", - } - ], - }, + Issue( + type=IssueType.MULTIPLE_DATA_DISKS, + context=ContextType.SYSTEM, + reference="/dev/sda1", + uuid=(issue_uuid := uuid4()), + ), ], + suggestions_by_issue={ + issue_uuid: [ + Suggestion( + type=SuggestionType.RENAME_DATA_DISK, + context=ContextType.SYSTEM, + reference="/dev/sda1", + uuid=(sugg_uuid := uuid4()), + auto=False, + ) + ] + }, ) assert await async_setup_component(hass, "hassio", {}) - repair_issue = issue_registry.async_get_issue(domain="hassio", issue_id="1234") + repair_issue = issue_registry.async_get_issue( + domain="hassio", issue_id=issue_uuid.hex + ) assert repair_issue client = await hass_client() @@ -95,52 +108,53 @@ async def test_supervisor_issue_repair_flow( "description_placeholders": None, } - assert not issue_registry.async_get_issue(domain="hassio", issue_id="1234") - - assert aioclient_mock.mock_calls[-1][0] == "post" - assert ( - str(aioclient_mock.mock_calls[-1][1]) - == "http://127.0.0.1/resolution/suggestion/1235" - ) + assert not issue_registry.async_get_issue(domain="hassio", issue_id=issue_uuid.hex) + supervisor_client.resolution.apply_suggestion.assert_called_once_with(sugg_uuid) @pytest.mark.usefixtures("all_setup_requests") async def test_supervisor_issue_repair_flow_with_multiple_suggestions( hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, + supervisor_client: AsyncMock, hass_client: ClientSessionGenerator, issue_registry: ir.IssueRegistry, ) -> None: """Test fix flow for supervisor issue with multiple suggestions.""" mock_resolution_info( - aioclient_mock, + supervisor_client, issues=[ - { - "uuid": "1234", - "type": "reboot_required", - "context": "system", - "reference": "test", - "suggestions": [ - { - "uuid": "1235", - "type": "execute_reboot", - "context": "system", - "reference": "test", - }, - { - "uuid": "1236", - "type": "test_type", - "context": "system", - "reference": "test", - }, - ], - }, + Issue( + type=IssueType.REBOOT_REQUIRED, + context=ContextType.SYSTEM, + reference="test", + uuid=(issue_uuid := uuid4()), + ), ], + suggestions_by_issue={ + issue_uuid: [ + Suggestion( + type=SuggestionType.EXECUTE_REBOOT, + context=ContextType.SYSTEM, + reference="test", + uuid=uuid4(), + auto=False, + ), + Suggestion( + type="test_type", + context=ContextType.SYSTEM, + reference="test", + uuid=(sugg_uuid := uuid4()), + auto=False, + ), + ] + }, ) assert await async_setup_component(hass, "hassio", {}) - repair_issue = issue_registry.async_get_issue(domain="hassio", issue_id="1234") + repair_issue = issue_registry.async_get_issue( + domain="hassio", issue_id=issue_uuid.hex + ) assert repair_issue client = await hass_client() @@ -189,52 +203,53 @@ async def test_supervisor_issue_repair_flow_with_multiple_suggestions( "description_placeholders": None, } - assert not issue_registry.async_get_issue(domain="hassio", issue_id="1234") - - assert aioclient_mock.mock_calls[-1][0] == "post" - assert ( - str(aioclient_mock.mock_calls[-1][1]) - == "http://127.0.0.1/resolution/suggestion/1236" - ) + assert not issue_registry.async_get_issue(domain="hassio", issue_id=issue_uuid.hex) + supervisor_client.resolution.apply_suggestion.assert_called_once_with(sugg_uuid) @pytest.mark.usefixtures("all_setup_requests") async def test_supervisor_issue_repair_flow_with_multiple_suggestions_and_confirmation( hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, + supervisor_client: AsyncMock, hass_client: ClientSessionGenerator, issue_registry: ir.IssueRegistry, ) -> None: """Test fix flow for supervisor issue with multiple suggestions and choice requires confirmation.""" mock_resolution_info( - aioclient_mock, + supervisor_client, issues=[ - { - "uuid": "1234", - "type": "reboot_required", - "context": "system", - "reference": None, - "suggestions": [ - { - "uuid": "1235", - "type": "execute_reboot", - "context": "system", - "reference": None, - }, - { - "uuid": "1236", - "type": "test_type", - "context": "system", - "reference": None, - }, - ], - }, + Issue( + type=IssueType.REBOOT_REQUIRED, + context=ContextType.SYSTEM, + reference=None, + uuid=(issue_uuid := uuid4()), + ), ], + suggestions_by_issue={ + issue_uuid: [ + Suggestion( + type=SuggestionType.EXECUTE_REBOOT, + context=ContextType.SYSTEM, + reference=None, + uuid=(sugg_uuid := uuid4()), + auto=False, + ), + Suggestion( + type="test_type", + context=ContextType.SYSTEM, + reference=None, + uuid=uuid4(), + auto=False, + ), + ] + }, ) assert await async_setup_component(hass, "hassio", {}) - repair_issue = issue_registry.async_get_issue(domain="hassio", issue_id="1234") + repair_issue = issue_registry.async_get_issue( + domain="hassio", issue_id=issue_uuid.hex + ) assert repair_issue client = await hass_client() @@ -302,46 +317,46 @@ async def test_supervisor_issue_repair_flow_with_multiple_suggestions_and_confir "description_placeholders": None, } - assert not issue_registry.async_get_issue(domain="hassio", issue_id="1234") - - assert aioclient_mock.mock_calls[-1][0] == "post" - assert ( - str(aioclient_mock.mock_calls[-1][1]) - == "http://127.0.0.1/resolution/suggestion/1235" - ) + assert not issue_registry.async_get_issue(domain="hassio", issue_id=issue_uuid.hex) + supervisor_client.resolution.apply_suggestion.assert_called_once_with(sugg_uuid) @pytest.mark.usefixtures("all_setup_requests") async def test_supervisor_issue_repair_flow_skip_confirmation( hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, + supervisor_client: AsyncMock, hass_client: ClientSessionGenerator, issue_registry: ir.IssueRegistry, ) -> None: """Test confirmation skipped for fix flow for supervisor issue with one suggestion.""" mock_resolution_info( - aioclient_mock, + supervisor_client, issues=[ - { - "uuid": "1234", - "type": "reboot_required", - "context": "system", - "reference": None, - "suggestions": [ - { - "uuid": "1235", - "type": "execute_reboot", - "context": "system", - "reference": None, - } - ], - }, + Issue( + type=IssueType.REBOOT_REQUIRED, + context=ContextType.SYSTEM, + reference=None, + uuid=(issue_uuid := uuid4()), + ), ], + suggestions_by_issue={ + issue_uuid: [ + Suggestion( + type=SuggestionType.EXECUTE_REBOOT, + context=ContextType.SYSTEM, + reference=None, + uuid=(sugg_uuid := uuid4()), + auto=False, + ), + ] + }, ) assert await async_setup_component(hass, "hassio", {}) - repair_issue = issue_registry.async_get_issue(domain="hassio", issue_id="1234") + repair_issue = issue_registry.async_get_issue( + domain="hassio", issue_id=issue_uuid.hex + ) assert repair_issue client = await hass_client() @@ -381,53 +396,54 @@ async def test_supervisor_issue_repair_flow_skip_confirmation( "description_placeholders": None, } - assert not issue_registry.async_get_issue(domain="hassio", issue_id="1234") - - assert aioclient_mock.mock_calls[-1][0] == "post" - assert ( - str(aioclient_mock.mock_calls[-1][1]) - == "http://127.0.0.1/resolution/suggestion/1235" - ) + assert not issue_registry.async_get_issue(domain="hassio", issue_id=issue_uuid.hex) + supervisor_client.resolution.apply_suggestion.assert_called_once_with(sugg_uuid) @pytest.mark.usefixtures("all_setup_requests") async def test_mount_failed_repair_flow_error( hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, + supervisor_client: AsyncMock, hass_client: ClientSessionGenerator, issue_registry: ir.IssueRegistry, ) -> None: """Test repair flow fails when repair fails to apply.""" mock_resolution_info( - aioclient_mock, + supervisor_client, issues=[ - { - "uuid": "1234", - "type": "mount_failed", - "context": "mount", - "reference": "backup_share", - "suggestions": [ - { - "uuid": "1235", - "type": "execute_reload", - "context": "mount", - "reference": "backup_share", - }, - { - "uuid": "1236", - "type": "execute_remove", - "context": "mount", - "reference": "backup_share", - }, - ], - }, + Issue( + type=IssueType.MOUNT_FAILED, + context=ContextType.MOUNT, + reference="backup_share", + uuid=(issue_uuid := uuid4()), + ), ], - suggestion_result=False, + suggestions_by_issue={ + issue_uuid: [ + Suggestion( + type=SuggestionType.EXECUTE_RELOAD, + context=ContextType.MOUNT, + reference="backup_share", + uuid=uuid4(), + auto=False, + ), + Suggestion( + type=SuggestionType.EXECUTE_REMOVE, + context=ContextType.MOUNT, + reference="backup_share", + uuid=uuid4(), + auto=False, + ), + ] + }, + suggestion_result=SupervisorError("boom"), ) assert await async_setup_component(hass, "hassio", {}) - repair_issue = issue_registry.async_get_issue(domain="hassio", issue_id="1234") + repair_issue = issue_registry.async_get_issue( + domain="hassio", issue_id=issue_uuid.hex + ) assert repair_issue client = await hass_client() @@ -459,46 +475,52 @@ async def test_mount_failed_repair_flow_error( "description_placeholders": None, } - assert issue_registry.async_get_issue(domain="hassio", issue_id="1234") + assert issue_registry.async_get_issue(domain="hassio", issue_id=issue_uuid.hex) @pytest.mark.usefixtures("all_setup_requests") async def test_mount_failed_repair_flow( hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, + supervisor_client: AsyncMock, hass_client: ClientSessionGenerator, issue_registry: ir.IssueRegistry, ) -> None: """Test repair flow for mount_failed issue.""" mock_resolution_info( - aioclient_mock, + supervisor_client, issues=[ - { - "uuid": "1234", - "type": "mount_failed", - "context": "mount", - "reference": "backup_share", - "suggestions": [ - { - "uuid": "1235", - "type": "execute_reload", - "context": "mount", - "reference": "backup_share", - }, - { - "uuid": "1236", - "type": "execute_remove", - "context": "mount", - "reference": "backup_share", - }, - ], - }, + Issue( + type=IssueType.MOUNT_FAILED, + context=ContextType.MOUNT, + reference="backup_share", + uuid=(issue_uuid := uuid4()), + ), ], + suggestions_by_issue={ + issue_uuid: [ + Suggestion( + type=SuggestionType.EXECUTE_RELOAD, + context=ContextType.MOUNT, + reference="backup_share", + uuid=(sugg_uuid := uuid4()), + auto=False, + ), + Suggestion( + type=SuggestionType.EXECUTE_REMOVE, + context=ContextType.MOUNT, + reference="backup_share", + uuid=uuid4(), + auto=False, + ), + ] + }, ) assert await async_setup_component(hass, "hassio", {}) - repair_issue = issue_registry.async_get_issue(domain="hassio", issue_id="1234") + repair_issue = issue_registry.async_get_issue( + domain="hassio", issue_id=issue_uuid.hex + ) assert repair_issue client = await hass_client() @@ -551,77 +573,79 @@ async def test_mount_failed_repair_flow( "description_placeholders": None, } - assert not issue_registry.async_get_issue(domain="hassio", issue_id="1234") - - assert aioclient_mock.mock_calls[-1][0] == "post" - assert ( - str(aioclient_mock.mock_calls[-1][1]) - == "http://127.0.0.1/resolution/suggestion/1235" - ) + assert not issue_registry.async_get_issue(domain="hassio", issue_id=issue_uuid.hex) + supervisor_client.resolution.apply_suggestion.assert_called_once_with(sugg_uuid) @pytest.mark.parametrize( "all_setup_requests", [{"include_addons": True}], indirect=True ) -@pytest.mark.usefixtures("all_setup_requests", "addon_installed") +@pytest.mark.usefixtures("all_setup_requests") async def test_supervisor_issue_docker_config_repair_flow( hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, + supervisor_client: AsyncMock, hass_client: ClientSessionGenerator, issue_registry: ir.IssueRegistry, ) -> None: """Test fix flow for supervisor issue.""" mock_resolution_info( - aioclient_mock, + supervisor_client, issues=[ - { - "uuid": "1234", - "type": "docker_config", - "context": "system", - "reference": None, - "suggestions": [ - { - "uuid": "1235", - "type": "execute_rebuild", - "context": "system", - "reference": None, - } - ], - }, - { - "uuid": "1236", - "type": "docker_config", - "context": "core", - "reference": None, - "suggestions": [ - { - "uuid": "1237", - "type": "execute_rebuild", - "context": "core", - "reference": None, - } - ], - }, - { - "uuid": "1238", - "type": "docker_config", - "context": "addon", - "reference": "test", - "suggestions": [ - { - "uuid": "1239", - "type": "execute_rebuild", - "context": "addon", - "reference": "test", - } - ], - }, + Issue( + type=IssueType.DOCKER_CONFIG, + context=ContextType.SYSTEM, + reference=None, + uuid=(issue1_uuid := uuid4()), + ), + Issue( + type=IssueType.DOCKER_CONFIG, + context=ContextType.CORE, + reference=None, + uuid=(issue2_uuid := uuid4()), + ), + Issue( + type=IssueType.DOCKER_CONFIG, + context=ContextType.ADDON, + reference="test", + uuid=(issue3_uuid := uuid4()), + ), ], + suggestions_by_issue={ + issue1_uuid: [ + Suggestion( + type=SuggestionType.EXECUTE_REBUILD, + context=ContextType.SYSTEM, + reference=None, + uuid=(sugg_uuid := uuid4()), + auto=False, + ), + ], + issue2_uuid: [ + Suggestion( + type=SuggestionType.EXECUTE_REBUILD, + context=ContextType.CORE, + reference=None, + uuid=uuid4(), + auto=False, + ), + ], + issue3_uuid: [ + Suggestion( + type=SuggestionType.EXECUTE_REBUILD, + context=ContextType.ADDON, + reference="test", + uuid=uuid4(), + auto=False, + ), + ], + }, ) assert await async_setup_component(hass, "hassio", {}) - repair_issue = issue_registry.async_get_issue(domain="hassio", issue_id="1234") + repair_issue = issue_registry.async_get_issue( + domain="hassio", issue_id=issue1_uuid.hex + ) assert repair_issue client = await hass_client() @@ -661,52 +685,53 @@ async def test_supervisor_issue_docker_config_repair_flow( "description_placeholders": None, } - assert not issue_registry.async_get_issue(domain="hassio", issue_id="1234") - - assert aioclient_mock.mock_calls[-1][0] == "post" - assert ( - str(aioclient_mock.mock_calls[-1][1]) - == "http://127.0.0.1/resolution/suggestion/1235" - ) + assert not issue_registry.async_get_issue(domain="hassio", issue_id=issue1_uuid.hex) + supervisor_client.resolution.apply_suggestion.assert_called_once_with(sugg_uuid) @pytest.mark.usefixtures("all_setup_requests") async def test_supervisor_issue_repair_flow_multiple_data_disks( hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, + supervisor_client: AsyncMock, hass_client: ClientSessionGenerator, issue_registry: ir.IssueRegistry, ) -> None: """Test fix flow for multiple data disks supervisor issue.""" mock_resolution_info( - aioclient_mock, + supervisor_client, issues=[ - { - "uuid": "1234", - "type": "multiple_data_disks", - "context": "system", - "reference": "/dev/sda1", - "suggestions": [ - { - "uuid": "1235", - "type": "rename_data_disk", - "context": "system", - "reference": "/dev/sda1", - }, - { - "uuid": "1236", - "type": "adopt_data_disk", - "context": "system", - "reference": "/dev/sda1", - }, - ], - }, + Issue( + type=IssueType.MULTIPLE_DATA_DISKS, + context=ContextType.SYSTEM, + reference="/dev/sda1", + uuid=(issue_uuid := uuid4()), + ), ], + suggestions_by_issue={ + issue_uuid: [ + Suggestion( + type=SuggestionType.RENAME_DATA_DISK, + context=ContextType.SYSTEM, + reference="/dev/sda1", + uuid=uuid4(), + auto=False, + ), + Suggestion( + type=SuggestionType.ADOPT_DATA_DISK, + context=ContextType.SYSTEM, + reference="/dev/sda1", + uuid=(sugg_uuid := uuid4()), + auto=False, + ), + ] + }, ) assert await async_setup_component(hass, "hassio", {}) - repair_issue = issue_registry.async_get_issue(domain="hassio", issue_id="1234") + repair_issue = issue_registry.async_get_issue( + domain="hassio", issue_id=issue_uuid.hex + ) assert repair_issue client = await hass_client() @@ -774,49 +799,49 @@ async def test_supervisor_issue_repair_flow_multiple_data_disks( "description_placeholders": None, } - assert not issue_registry.async_get_issue(domain="hassio", issue_id="1234") - - assert aioclient_mock.mock_calls[-1][0] == "post" - assert ( - str(aioclient_mock.mock_calls[-1][1]) - == "http://127.0.0.1/resolution/suggestion/1236" - ) + assert not issue_registry.async_get_issue(domain="hassio", issue_id=issue_uuid.hex) + supervisor_client.resolution.apply_suggestion.assert_called_once_with(sugg_uuid) @pytest.mark.parametrize( "all_setup_requests", [{"include_addons": True}], indirect=True ) -@pytest.mark.usefixtures("all_setup_requests", "addon_installed") +@pytest.mark.usefixtures("all_setup_requests") async def test_supervisor_issue_detached_addon_removed( hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, + supervisor_client: AsyncMock, hass_client: ClientSessionGenerator, issue_registry: ir.IssueRegistry, ) -> None: """Test fix flow for supervisor issue.""" mock_resolution_info( - aioclient_mock, + supervisor_client, issues=[ - { - "uuid": "1234", - "type": "detached_addon_removed", - "context": "addon", - "reference": "test", - "suggestions": [ - { - "uuid": "1235", - "type": "execute_remove", - "context": "addon", - "reference": "test", - } - ], - }, + Issue( + type=IssueType.DETACHED_ADDON_REMOVED, + context=ContextType.ADDON, + reference="test", + uuid=(issue_uuid := uuid4()), + ), ], + suggestions_by_issue={ + issue_uuid: [ + Suggestion( + type=SuggestionType.EXECUTE_REMOVE, + context=ContextType.ADDON, + reference="test", + uuid=(sugg_uuid := uuid4()), + auto=False, + ), + ] + }, ) assert await async_setup_component(hass, "hassio", {}) - repair_issue = issue_registry.async_get_issue(domain="hassio", issue_id="1234") + repair_issue = issue_registry.async_get_issue( + domain="hassio", issue_id=issue_uuid.hex + ) assert repair_issue client = await hass_client() @@ -861,10 +886,107 @@ async def test_supervisor_issue_detached_addon_removed( "description_placeholders": None, } - assert not issue_registry.async_get_issue(domain="hassio", issue_id="1234") + assert not issue_registry.async_get_issue(domain="hassio", issue_id=issue_uuid.hex) + supervisor_client.resolution.apply_suggestion.assert_called_once_with(sugg_uuid) + + +@pytest.mark.parametrize( + "all_setup_requests", [{"include_addons": True}], indirect=True +) +@pytest.mark.usefixtures("all_setup_requests") +async def test_supervisor_issue_addon_boot_fail( + hass: HomeAssistant, + supervisor_client: AsyncMock, + hass_client: ClientSessionGenerator, + issue_registry: ir.IssueRegistry, +) -> None: + """Test fix flow for supervisor issue.""" + mock_resolution_info( + supervisor_client, + issues=[ + Issue( + type="boot_fail", + context=ContextType.ADDON, + reference="test", + uuid=(issue_uuid := uuid4()), + ), + ], + suggestions_by_issue={ + issue_uuid: [ + Suggestion( + type="execute_start", + context=ContextType.ADDON, + reference="test", + uuid=(sugg_uuid := uuid4()), + auto=False, + ), + Suggestion( + type="disable_boot", + context=ContextType.ADDON, + reference="test", + uuid=uuid4(), + auto=False, + ), + ] + }, + ) + + assert await async_setup_component(hass, "hassio", {}) + + repair_issue = issue_registry.async_get_issue( + domain="hassio", issue_id=issue_uuid.hex + ) + assert repair_issue + + client = await hass_client() + + resp = await client.post( + "/api/repairs/issues/fix", + json={"handler": "hassio", "issue_id": repair_issue.issue_id}, + ) + + assert resp.status == HTTPStatus.OK + data = await resp.json() + + flow_id = data["flow_id"] + assert data == { + "type": "menu", + "flow_id": flow_id, + "handler": "hassio", + "step_id": "fix_menu", + "data_schema": [ + { + "type": "select", + "options": [ + ["addon_execute_start", "addon_execute_start"], + ["addon_disable_boot", "addon_disable_boot"], + ], + "name": "next_step_id", + } + ], + "menu_options": ["addon_execute_start", "addon_disable_boot"], + "description_placeholders": { + "reference": "test", + "addon": "test", + }, + } - assert aioclient_mock.mock_calls[-1][0] == "post" - assert ( - str(aioclient_mock.mock_calls[-1][1]) - == "http://127.0.0.1/resolution/suggestion/1235" + resp = await client.post( + f"/api/repairs/issues/fix/{flow_id}", + json={"next_step_id": "addon_execute_start"}, ) + + assert resp.status == HTTPStatus.OK + data = await resp.json() + + flow_id = data["flow_id"] + assert data == { + "type": "create_entry", + "flow_id": flow_id, + "handler": "hassio", + "description": None, + "description_placeholders": None, + } + + assert not issue_registry.async_get_issue(domain="hassio", issue_id=issue_uuid.hex) + supervisor_client.resolution.apply_suggestion.assert_called_once_with(sugg_uuid) diff --git a/tests/components/hassio/test_sensor.py b/tests/components/hassio/test_sensor.py index bd3de73baf58d7..7160a2cbf1601f 100644 --- a/tests/components/hassio/test_sensor.py +++ b/tests/components/hassio/test_sensor.py @@ -2,17 +2,14 @@ from datetime import timedelta import os -from unittest.mock import patch +from unittest.mock import AsyncMock, patch +from aiohasupervisor import SupervisorError from freezegun.api import FrozenDateTimeFactory import pytest from homeassistant import config_entries -from homeassistant.components.hassio import ( - DOMAIN, - HASSIO_UPDATE_INTERVAL, - HassioAPIError, -) +from homeassistant.components.hassio import DOMAIN, HASSIO_UPDATE_INTERVAL from homeassistant.components.hassio.const import REQUEST_REFRESH_DELAY from homeassistant.config_entries import ConfigEntryState from homeassistant.const import STATE_UNAVAILABLE @@ -21,6 +18,8 @@ from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util +from .common import MOCK_REPOSITORIES, MOCK_STORE_ADDONS + from tests.common import MockConfigEntry, async_fire_time_changed from tests.test_util.aiohttp import AiohttpClientMocker @@ -28,44 +27,21 @@ @pytest.fixture(autouse=True) -def mock_all(aioclient_mock: AiohttpClientMocker, addon_installed) -> None: +def mock_all( + aioclient_mock: AiohttpClientMocker, + addon_installed: AsyncMock, + store_info: AsyncMock, + addon_stats: AsyncMock, + addon_changelog: AsyncMock, + resolution_info: AsyncMock, +) -> None: """Mock all setup requests.""" _install_default_mocks(aioclient_mock) - _install_test_addon_stats_mock(aioclient_mock) - - -def _install_test_addon_stats_mock(aioclient_mock: AiohttpClientMocker): - """Install mock to provide valid stats for the test addon.""" - aioclient_mock.get( - "http://127.0.0.1/addons/test/stats", - json={ - "result": "ok", - "data": { - "cpu_percent": 0.99, - "memory_usage": 182611968, - "memory_limit": 3977146368, - "memory_percent": 4.59, - "network_rx": 362570232, - "network_tx": 82374138, - "blk_read": 46010945536, - "blk_write": 15051526144, - }, - }, - ) - - -def _install_test_addon_stats_failure_mock(aioclient_mock: AiohttpClientMocker): - """Install mocks to raise an exception when fetching stats for the test addon.""" - aioclient_mock.get( - "http://127.0.0.1/addons/test/stats", - exc=HassioAPIError, - ) def _install_default_mocks(aioclient_mock: AiohttpClientMocker): """Install default mocks.""" aioclient_mock.post("http://127.0.0.1/homeassistant/options", json={"result": "ok"}) - aioclient_mock.get("http://127.0.0.1/supervisor/ping", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/supervisor/options", json={"result": "ok"}) aioclient_mock.get( "http://127.0.0.1/info", @@ -78,13 +54,6 @@ def _install_default_mocks(aioclient_mock: AiohttpClientMocker): }, }, ) - aioclient_mock.get( - "http://127.0.0.1/store", - json={ - "result": "ok", - "data": {"addons": [], "repositories": []}, - }, - ) aioclient_mock.get( "http://127.0.0.1/host/info", json={ @@ -175,33 +144,9 @@ def _install_default_mocks(aioclient_mock: AiohttpClientMocker): }, }, ) - aioclient_mock.get("http://127.0.0.1/addons/test/changelog", text="") - aioclient_mock.get( - "http://127.0.0.1/addons/test/info", - json={"result": "ok", "data": {"auto_update": True}}, - ) - aioclient_mock.get("http://127.0.0.1/addons/test2/changelog", text="") - aioclient_mock.get( - "http://127.0.0.1/addons/test2/info", - json={"result": "ok", "data": {"auto_update": False}}, - ) aioclient_mock.get( "http://127.0.0.1/ingress/panels", json={"result": "ok", "data": {"panels": {}}} ) - aioclient_mock.post("http://127.0.0.1/refresh_updates", json={"result": "ok"}) - aioclient_mock.get( - "http://127.0.0.1/resolution/info", - json={ - "result": "ok", - "data": { - "unsupported": [], - "unhealthy": [], - "suggestions": [], - "issues": [], - "checks": [], - }, - }, - ) aioclient_mock.get( "http://127.0.0.1/network/info", json={ @@ -214,6 +159,9 @@ def _install_default_mocks(aioclient_mock: AiohttpClientMocker): ) +@pytest.mark.parametrize( + ("store_addons", "store_repositories"), [(MOCK_STORE_ADDONS, MOCK_REPOSITORIES)] +) @pytest.mark.parametrize( ("entity_id", "expected"), [ @@ -272,6 +220,9 @@ async def test_sensor( assert state.state == expected +@pytest.mark.parametrize( + ("store_addons", "store_repositories"), [(MOCK_STORE_ADDONS, MOCK_REPOSITORIES)] +) @pytest.mark.parametrize( ("entity_id", "expected"), [ @@ -288,6 +239,7 @@ async def test_stats_addon_sensor( entity_registry: er.EntityRegistry, caplog: pytest.LogCaptureFixture, freezer: FrozenDateTimeFactory, + addon_stats: AsyncMock, ) -> None: """Test stats addons sensor.""" config_entry = MockConfigEntry(domain=DOMAIN, data={}, unique_id=DOMAIN) @@ -305,7 +257,7 @@ async def test_stats_addon_sensor( aioclient_mock.clear_requests() _install_default_mocks(aioclient_mock) - _install_test_addon_stats_failure_mock(aioclient_mock) + addon_stats.side_effect = SupervisorError freezer.tick(HASSIO_UPDATE_INTERVAL + timedelta(seconds=1)) async_fire_time_changed(hass) @@ -315,7 +267,7 @@ async def test_stats_addon_sensor( aioclient_mock.clear_requests() _install_default_mocks(aioclient_mock) - _install_test_addon_stats_mock(aioclient_mock) + addon_stats.side_effect = None freezer.tick(HASSIO_UPDATE_INTERVAL + timedelta(seconds=1)) async_fire_time_changed(hass) @@ -348,7 +300,7 @@ async def test_stats_addon_sensor( aioclient_mock.clear_requests() _install_default_mocks(aioclient_mock) - _install_test_addon_stats_failure_mock(aioclient_mock) + addon_stats.side_effect = SupervisorError freezer.tick(HASSIO_UPDATE_INTERVAL + timedelta(seconds=1)) async_fire_time_changed(hass) diff --git a/tests/components/hassio/test_update.py b/tests/components/hassio/test_update.py index 6195e62aaac236..c1775d6e0b4091 100644 --- a/tests/components/hassio/test_update.py +++ b/tests/components/hassio/test_update.py @@ -4,10 +4,11 @@ import os from unittest.mock import AsyncMock, patch -from aiohasupervisor import SupervisorBadRequestError +from aiohasupervisor import SupervisorBadRequestError, SupervisorError +from aiohasupervisor.models import StoreAddonUpdate import pytest -from homeassistant.components.hassio import DOMAIN, HassioAPIError +from homeassistant.components.hassio import DOMAIN from homeassistant.components.hassio.const import REQUEST_REFRESH_DELAY from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError @@ -22,10 +23,16 @@ @pytest.fixture(autouse=True) -def mock_all(aioclient_mock: AiohttpClientMocker, addon_installed) -> None: +def mock_all( + aioclient_mock: AiohttpClientMocker, + addon_installed: AsyncMock, + store_info: AsyncMock, + addon_stats: AsyncMock, + addon_changelog: AsyncMock, + resolution_info: AsyncMock, +) -> None: """Mock all setup requests.""" aioclient_mock.post("http://127.0.0.1/homeassistant/options", json={"result": "ok"}) - aioclient_mock.get("http://127.0.0.1/supervisor/ping", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/supervisor/options", json={"result": "ok"}) aioclient_mock.get( "http://127.0.0.1/info", @@ -38,13 +45,6 @@ def mock_all(aioclient_mock: AiohttpClientMocker, addon_installed) -> None: }, }, ) - aioclient_mock.get( - "http://127.0.0.1/store", - json={ - "result": "ok", - "data": {"addons": [], "repositories": []}, - }, - ) aioclient_mock.get( "http://127.0.0.1/host/info", json={ @@ -115,22 +115,6 @@ def mock_all(aioclient_mock: AiohttpClientMocker, addon_installed) -> None: }, }, ) - aioclient_mock.get( - "http://127.0.0.1/addons/test/stats", - json={ - "result": "ok", - "data": { - "cpu_percent": 0.99, - "memory_usage": 182611968, - "memory_limit": 3977146368, - "memory_percent": 4.59, - "network_rx": 362570232, - "network_tx": 82374138, - "blk_read": 46010945536, - "blk_write": 15051526144, - }, - }, - ) aioclient_mock.get( "http://127.0.0.1/core/stats", json={ @@ -163,33 +147,9 @@ def mock_all(aioclient_mock: AiohttpClientMocker, addon_installed) -> None: }, }, ) - aioclient_mock.get("http://127.0.0.1/addons/test/changelog", text="") - aioclient_mock.get( - "http://127.0.0.1/addons/test/info", - json={"result": "ok", "data": {"auto_update": True}}, - ) - aioclient_mock.get("http://127.0.0.1/addons/test2/changelog", text="") - aioclient_mock.get( - "http://127.0.0.1/addons/test2/info", - json={"result": "ok", "data": {"auto_update": False}}, - ) aioclient_mock.get( "http://127.0.0.1/ingress/panels", json={"result": "ok", "data": {"panels": {}}} ) - aioclient_mock.post("http://127.0.0.1/refresh_updates", json={"result": "ok"}) - aioclient_mock.get( - "http://127.0.0.1/resolution/info", - json={ - "result": "ok", - "data": { - "unsupported": [], - "unhealthy": [], - "suggestions": [], - "issues": [], - "checks": [], - }, - }, - ) aioclient_mock.get( "http://127.0.0.1/network/info", json={ @@ -242,9 +202,7 @@ async def test_update_entities( assert state.attributes["auto_update"] is auto_update -async def test_update_addon( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker -) -> None: +async def test_update_addon(hass: HomeAssistant, update_addon: AsyncMock) -> None: """Test updating addon update entity.""" config_entry = MockConfigEntry(domain=DOMAIN, data={}, unique_id=DOMAIN) config_entry.add_to_hass(hass) @@ -258,22 +216,16 @@ async def test_update_addon( assert result await hass.async_block_till_done() - aioclient_mock.post( - "http://127.0.0.1/addons/test/update", - json={"result": "ok", "data": {}}, - ) - await hass.services.async_call( "update", "install", {"entity_id": "update.test_update"}, blocking=True, ) + update_addon.assert_called_once_with("test", StoreAddonUpdate(backup=False)) -async def test_update_os( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker -) -> None: +async def test_update_os(hass: HomeAssistant, supervisor_client: AsyncMock) -> None: """Test updating OS update entity.""" config_entry = MockConfigEntry(domain=DOMAIN, data={}, unique_id=DOMAIN) config_entry.add_to_hass(hass) @@ -287,22 +239,17 @@ async def test_update_os( assert result await hass.async_block_till_done() - aioclient_mock.post( - "http://127.0.0.1/os/update", - json={"result": "ok", "data": {}}, - ) - + supervisor_client.os.update.return_value = None await hass.services.async_call( "update", "install", {"entity_id": "update.home_assistant_operating_system_update"}, blocking=True, ) + supervisor_client.os.update.assert_called_once() -async def test_update_core( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker -) -> None: +async def test_update_core(hass: HomeAssistant, supervisor_client: AsyncMock) -> None: """Test updating core update entity.""" config_entry = MockConfigEntry(domain=DOMAIN, data={}, unique_id=DOMAIN) config_entry.add_to_hass(hass) @@ -316,21 +263,18 @@ async def test_update_core( assert result await hass.async_block_till_done() - aioclient_mock.post( - "http://127.0.0.1/core/update", - json={"result": "ok", "data": {}}, - ) - + supervisor_client.homeassistant.update.return_value = None await hass.services.async_call( "update", "install", - {"entity_id": "update.home_assistant_os_update"}, + {"entity_id": "update.home_assistant_core_update"}, blocking=True, ) + supervisor_client.homeassistant.update.assert_called_once() async def test_update_supervisor( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker + hass: HomeAssistant, supervisor_client: AsyncMock ) -> None: """Test updating supervisor update entity.""" config_entry = MockConfigEntry(domain=DOMAIN, data={}, unique_id=DOMAIN) @@ -345,21 +289,19 @@ async def test_update_supervisor( assert result await hass.async_block_till_done() - aioclient_mock.post( - "http://127.0.0.1/supervisor/update", - json={"result": "ok", "data": {}}, - ) - + supervisor_client.supervisor.update.return_value = None await hass.services.async_call( "update", "install", {"entity_id": "update.home_assistant_supervisor_update"}, blocking=True, ) + supervisor_client.supervisor.update.assert_called_once() async def test_update_addon_with_error( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker + hass: HomeAssistant, + update_addon: AsyncMock, ) -> None: """Test updating addon update entity with error.""" config_entry = MockConfigEntry(domain=DOMAIN, data={}, unique_id=DOMAIN) @@ -373,11 +315,7 @@ async def test_update_addon_with_error( ) await hass.async_block_till_done() - aioclient_mock.post( - "http://127.0.0.1/addons/test/update", - exc=HassioAPIError, - ) - + update_addon.side_effect = SupervisorError with pytest.raises(HomeAssistantError, match=r"^Error updating test:"): assert not await hass.services.async_call( "update", @@ -388,7 +326,7 @@ async def test_update_addon_with_error( async def test_update_os_with_error( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker + hass: HomeAssistant, supervisor_client: AsyncMock ) -> None: """Test updating OS update entity with error.""" config_entry = MockConfigEntry(domain=DOMAIN, data={}, unique_id=DOMAIN) @@ -402,11 +340,7 @@ async def test_update_os_with_error( ) await hass.async_block_till_done() - aioclient_mock.post( - "http://127.0.0.1/os/update", - exc=HassioAPIError, - ) - + supervisor_client.os.update.side_effect = SupervisorError with pytest.raises( HomeAssistantError, match=r"^Error updating Home Assistant Operating System:" ): @@ -419,7 +353,7 @@ async def test_update_os_with_error( async def test_update_supervisor_with_error( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker + hass: HomeAssistant, supervisor_client: AsyncMock ) -> None: """Test updating supervisor update entity with error.""" config_entry = MockConfigEntry(domain=DOMAIN, data={}, unique_id=DOMAIN) @@ -433,11 +367,7 @@ async def test_update_supervisor_with_error( ) await hass.async_block_till_done() - aioclient_mock.post( - "http://127.0.0.1/supervisor/update", - exc=HassioAPIError, - ) - + supervisor_client.supervisor.update.side_effect = SupervisorError with pytest.raises( HomeAssistantError, match=r"^Error updating Home Assistant Supervisor:" ): @@ -450,7 +380,7 @@ async def test_update_supervisor_with_error( async def test_update_core_with_error( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker + hass: HomeAssistant, supervisor_client: AsyncMock ) -> None: """Test updating core update entity with error.""" config_entry = MockConfigEntry(domain=DOMAIN, data={}, unique_id=DOMAIN) @@ -464,11 +394,7 @@ async def test_update_core_with_error( ) await hass.async_block_till_done() - aioclient_mock.post( - "http://127.0.0.1/core/update", - exc=HassioAPIError, - ) - + supervisor_client.homeassistant.update.side_effect = SupervisorError with pytest.raises( HomeAssistantError, match=r"^Error updating Home Assistant Core:" ): @@ -625,19 +551,15 @@ async def test_setting_up_core_update_when_addon_fails( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, addon_installed: AsyncMock, + addon_stats: AsyncMock, + addon_changelog: AsyncMock, ) -> None: """Test setting up core update when single addon fails.""" addon_installed.side_effect = SupervisorBadRequestError("Addon Test does not exist") + addon_stats.side_effect = SupervisorBadRequestError("add-on is not running") + addon_changelog.side_effect = SupervisorBadRequestError("add-on is not running") with ( patch.dict(os.environ, MOCK_ENVIRON), - patch( - "homeassistant.components.hassio.HassIO.get_addon_stats", - side_effect=HassioAPIError("add-on is not running"), - ), - patch( - "homeassistant.components.hassio.HassIO.get_addon_changelog", - side_effect=HassioAPIError("add-on is not running"), - ), ): result = await async_setup_component( hass, diff --git a/tests/components/hassio/test_websocket_api.py b/tests/components/hassio/test_websocket_api.py index 7d8f07bfaecda9..21e6b03678b182 100644 --- a/tests/components/hassio/test_websocket_api.py +++ b/tests/components/hassio/test_websocket_api.py @@ -1,5 +1,7 @@ """Test websocket API.""" +from unittest.mock import AsyncMock + import pytest from homeassistant.components.hassio.const import ( @@ -23,10 +25,13 @@ @pytest.fixture(autouse=True) -def mock_all(aioclient_mock: AiohttpClientMocker) -> None: +def mock_all( + aioclient_mock: AiohttpClientMocker, + supervisor_is_connected: AsyncMock, + resolution_info: AsyncMock, +) -> None: """Mock all setup requests.""" aioclient_mock.post("http://127.0.0.1/homeassistant/options", json={"result": "ok"}) - aioclient_mock.get("http://127.0.0.1/supervisor/ping", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/supervisor/options", json={"result": "ok"}) aioclient_mock.get( "http://127.0.0.1/info", @@ -64,19 +69,6 @@ def mock_all(aioclient_mock: AiohttpClientMocker) -> None: aioclient_mock.get( "http://127.0.0.1/ingress/panels", json={"result": "ok", "data": {"panels": {}}} ) - aioclient_mock.get( - "http://127.0.0.1/resolution/info", - json={ - "result": "ok", - "data": { - "unsupported": [], - "unhealthy": [], - "suggestions": [], - "issues": [], - "checks": [], - }, - }, - ) @pytest.mark.usefixtures("hassio_env") diff --git a/tests/components/home_connect/conftest.py b/tests/components/home_connect/conftest.py index 2c5231d2e7d575..4e790074700c51 100644 --- a/tests/components/home_connect/conftest.py +++ b/tests/components/home_connect/conftest.py @@ -178,6 +178,7 @@ def mock_problematic_appliance(request: pytest.FixtureRequest) -> Mock: ) mock.name = app type(mock).status = PropertyMock(return_value={}) + mock.get.side_effect = HomeConnectError mock.get_programs_active.side_effect = HomeConnectError mock.get_programs_available.side_effect = HomeConnectError mock.start_program.side_effect = HomeConnectError diff --git a/tests/components/home_connect/test_binary_sensor.py b/tests/components/home_connect/test_binary_sensor.py index de4263f63450c4..b564b003af62c7 100644 --- a/tests/components/home_connect/test_binary_sensor.py +++ b/tests/components/home_connect/test_binary_sensor.py @@ -6,19 +6,25 @@ from homeconnect.api import HomeConnectAPI import pytest +from homeassistant.components import automation, script +from homeassistant.components.automation import automations_with_entity from homeassistant.components.home_connect.const import ( BSH_DOOR_STATE, BSH_DOOR_STATE_CLOSED, BSH_DOOR_STATE_LOCKED, BSH_DOOR_STATE_OPEN, + DOMAIN, REFRIGERATION_STATUS_DOOR_CLOSED, REFRIGERATION_STATUS_DOOR_OPEN, REFRIGERATION_STATUS_DOOR_REFRIGERATOR, ) +from homeassistant.components.script import scripts_with_entity from homeassistant.config_entries import ConfigEntryState from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNAVAILABLE, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_component import async_update_entity +import homeassistant.helpers.issue_registry as ir +from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry, load_json_object_fixture @@ -68,9 +74,9 @@ async def test_binary_sensors_door_states( entity_id = "binary_sensor.washer_door" get_appliances.return_value = [appliance] assert config_entry.state == ConfigEntryState.NOT_LOADED + appliance.status.update({BSH_DOOR_STATE: {"value": state}}) assert await integration_setup() assert config_entry.state == ConfigEntryState.LOADED - appliance.status.update({BSH_DOOR_STATE: {"value": state}}) await async_update_entity(hass, entity_id) await hass.async_block_till_done() assert hass.states.is_state(entity_id, expected) @@ -130,3 +136,72 @@ async def test_bianry_sensors_fridge_door_states( await async_update_entity(hass, entity_id) await hass.async_block_till_done() assert hass.states.is_state(entity_id, expected) + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +@pytest.mark.usefixtures("bypass_throttle") +async def test_create_issue( + hass: HomeAssistant, + appliance: Mock, + config_entry: MockConfigEntry, + integration_setup: Callable[[], Awaitable[bool]], + setup_credentials: None, + get_appliances: MagicMock, + issue_registry: ir.IssueRegistry, +) -> None: + """Test we create an issue when an automation or script is using a deprecated entity.""" + entity_id = "binary_sensor.washer_door" + get_appliances.return_value = [appliance] + issue_id = f"deprecated_binary_common_door_sensor_{entity_id}" + + assert await async_setup_component( + hass, + automation.DOMAIN, + { + automation.DOMAIN: { + "alias": "test", + "trigger": {"platform": "state", "entity_id": entity_id}, + "action": { + "action": "automation.turn_on", + "target": { + "entity_id": "automation.test", + }, + }, + } + }, + ) + assert await async_setup_component( + hass, + script.DOMAIN, + { + script.DOMAIN: { + "test": { + "sequence": [ + { + "condition": "state", + "entity_id": entity_id, + "state": "on", + }, + ], + } + } + }, + ) + + assert config_entry.state == ConfigEntryState.NOT_LOADED + appliance.status.update({BSH_DOOR_STATE: {"value": BSH_DOOR_STATE_OPEN}}) + assert await integration_setup() + assert config_entry.state == ConfigEntryState.LOADED + + assert automations_with_entity(hass, entity_id)[0] == "automation.test" + assert scripts_with_entity(hass, entity_id)[0] == "script.test" + + assert len(issue_registry.issues) == 1 + assert issue_registry.async_get_issue(DOMAIN, issue_id) + + await hass.config_entries.async_unload(config_entry.entry_id) + await hass.async_block_till_done() + + # Assert the issue is no longer present + assert not issue_registry.async_get_issue(DOMAIN, issue_id) + assert len(issue_registry.issues) == 0 diff --git a/tests/components/home_connect/test_light.py b/tests/components/home_connect/test_light.py index 7d375ce0b62bb4..7a9747929c9f81 100644 --- a/tests/components/home_connect/test_light.py +++ b/tests/components/home_connect/test_light.py @@ -8,6 +8,7 @@ from homeassistant.components.home_connect.const import ( BSH_AMBIENT_LIGHT_BRIGHTNESS, + BSH_AMBIENT_LIGHT_COLOR, BSH_AMBIENT_LIGHT_CUSTOM_COLOR, BSH_AMBIENT_LIGHT_ENABLED, COOKING_LIGHTING, @@ -26,6 +27,7 @@ Platform, ) from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ServiceValidationError from .conftest import get_all_appliances @@ -67,7 +69,7 @@ async def test_light( ("entity_id", "status", "service", "service_data", "state", "appliance"), [ ( - "light.hood_light", + "light.hood_functional_light", { COOKING_LIGHTING: { "value": True, @@ -79,7 +81,7 @@ async def test_light( "Hood", ), ( - "light.hood_light", + "light.hood_functional_light", { COOKING_LIGHTING: { "value": True, @@ -92,7 +94,7 @@ async def test_light( "Hood", ), ( - "light.hood_light", + "light.hood_functional_light", { COOKING_LIGHTING: {"value": False}, COOKING_LIGHTING_BRIGHTNESS: {"value": 70}, @@ -103,7 +105,7 @@ async def test_light( "Hood", ), ( - "light.hood_light", + "light.hood_functional_light", { COOKING_LIGHTING: { "value": None, @@ -116,7 +118,7 @@ async def test_light( "Hood", ), ( - "light.hood_ambientlight", + "light.hood_ambient_light", { BSH_AMBIENT_LIGHT_ENABLED: { "value": True, @@ -129,7 +131,7 @@ async def test_light( "Hood", ), ( - "light.hood_ambientlight", + "light.hood_ambient_light", { BSH_AMBIENT_LIGHT_ENABLED: {"value": False}, BSH_AMBIENT_LIGHT_BRIGHTNESS: {"value": 70}, @@ -140,7 +142,7 @@ async def test_light( "Hood", ), ( - "light.hood_ambientlight", + "light.hood_ambient_light", { BSH_AMBIENT_LIGHT_ENABLED: {"value": True}, BSH_AMBIENT_LIGHT_CUSTOM_COLOR: {}, @@ -150,6 +152,22 @@ async def test_light( STATE_ON, "Hood", ), + ( + "light.hood_ambient_light", + { + BSH_AMBIENT_LIGHT_ENABLED: {"value": True}, + BSH_AMBIENT_LIGHT_COLOR: { + "value": "", + }, + BSH_AMBIENT_LIGHT_CUSTOM_COLOR: {}, + }, + SERVICE_TURN_ON, + { + "rgb_color": [255, 255, 0], + }, + STATE_ON, + "Hood", + ), ( "light.fridgefreezer_external_light", { @@ -215,10 +233,11 @@ async def test_light_functionality( "mock_attr", "attr_side_effect", "problematic_appliance", + "exception_match", ), [ ( - "light.hood_light", + "light.hood_functional_light", { COOKING_LIGHTING: { "value": False, @@ -229,9 +248,10 @@ async def test_light_functionality( "set_setting", [HomeConnectError, HomeConnectError], "Hood", + r"Error.*turn.*on.*", ), ( - "light.hood_light", + "light.hood_functional_light", { COOKING_LIGHTING: { "value": True, @@ -243,9 +263,10 @@ async def test_light_functionality( "set_setting", [HomeConnectError, HomeConnectError], "Hood", + r"Error.*turn.*on.*", ), ( - "light.hood_light", + "light.hood_functional_light", { COOKING_LIGHTING: {"value": False}, }, @@ -254,9 +275,10 @@ async def test_light_functionality( "set_setting", [HomeConnectError, HomeConnectError], "Hood", + r"Error.*turn.*off.*", ), ( - "light.hood_ambientlight", + "light.hood_ambient_light", { BSH_AMBIENT_LIGHT_ENABLED: { "value": True, @@ -268,9 +290,10 @@ async def test_light_functionality( "set_setting", [HomeConnectError, HomeConnectError], "Hood", + r"Error.*turn.*on.*", ), ( - "light.hood_ambientlight", + "light.hood_ambient_light", { BSH_AMBIENT_LIGHT_ENABLED: { "value": True, @@ -280,8 +303,9 @@ async def test_light_functionality( SERVICE_TURN_ON, {"brightness": 200}, "set_setting", - [HomeConnectError, None, HomeConnectError, HomeConnectError], + [HomeConnectError, None, HomeConnectError], "Hood", + r"Error.*set.*color.*", ), ], indirect=["problematic_appliance"], @@ -294,6 +318,7 @@ async def test_switch_exception_handling( mock_attr: str, attr_side_effect: list, problematic_appliance: Mock, + exception_match: str, bypass_throttle: Generator[None], hass: HomeAssistant, integration_setup: Callable[[], Awaitable[bool]], @@ -316,5 +341,8 @@ async def test_switch_exception_handling( problematic_appliance.status.update(status) service_data["entity_id"] = entity_id - await hass.services.async_call(LIGHT_DOMAIN, service, service_data, blocking=True) + with pytest.raises(ServiceValidationError, match=exception_match): + await hass.services.async_call( + LIGHT_DOMAIN, service, service_data, blocking=True + ) assert getattr(problematic_appliance, mock_attr).call_count == len(attr_side_effect) diff --git a/tests/components/home_connect/test_number.py b/tests/components/home_connect/test_number.py new file mode 100644 index 00000000000000..f70e307cb416ea --- /dev/null +++ b/tests/components/home_connect/test_number.py @@ -0,0 +1,176 @@ +"""Tests for home_connect number entities.""" + +from collections.abc import Awaitable, Callable, Generator +import random +from unittest.mock import MagicMock, Mock + +from homeconnect.api import HomeConnectError +import pytest + +from homeassistant.components.home_connect.const import ( + ATTR_CONSTRAINTS, + ATTR_STEPSIZE, + ATTR_UNIT, + ATTR_VALUE, +) +from homeassistant.components.number import ( + ATTR_MAX, + ATTR_MIN, + ATTR_VALUE as SERVICE_ATTR_VALUE, + DEFAULT_MIN_VALUE, + DOMAIN as NUMBER_DOMAIN, + SERVICE_SET_VALUE, +) +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import ATTR_ENTITY_ID, Platform +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ServiceValidationError + +from .conftest import get_all_appliances + +from tests.common import MockConfigEntry + + +@pytest.fixture +def platforms() -> list[str]: + """Fixture to specify platforms to test.""" + return [Platform.NUMBER] + + +async def test_number( + bypass_throttle: Generator[None], + hass: HomeAssistant, + config_entry: MockConfigEntry, + integration_setup: Callable[[], Awaitable[bool]], + setup_credentials: None, + get_appliances: Mock, +) -> None: + """Test number entity.""" + get_appliances.side_effect = get_all_appliances + assert config_entry.state is ConfigEntryState.NOT_LOADED + assert await integration_setup() + assert config_entry.state is ConfigEntryState.LOADED + + +@pytest.mark.parametrize("appliance", ["Refrigerator"], indirect=True) +@pytest.mark.parametrize( + ( + "entity_id", + "setting_key", + "min_value", + "max_value", + "step_size", + "unit_of_measurement", + ), + [ + ( + f"{NUMBER_DOMAIN.lower()}.refrigerator_refrigerator_temperature", + "Refrigeration.FridgeFreezer.Setting.SetpointTemperatureRefrigerator", + 7, + 15, + 0.1, + "°C", + ), + ], +) +@pytest.mark.usefixtures("bypass_throttle") +async def test_number_entity_functionality( + appliance: Mock, + entity_id: str, + setting_key: str, + bypass_throttle: Generator[None], + min_value: int, + max_value: int, + step_size: float, + unit_of_measurement: str, + hass: HomeAssistant, + config_entry: MockConfigEntry, + integration_setup: Callable[[], Awaitable[bool]], + setup_credentials: None, + get_appliances: MagicMock, +) -> None: + """Test number entity functionality.""" + appliance.get.side_effect = [ + { + ATTR_CONSTRAINTS: { + ATTR_MIN: min_value, + ATTR_MAX: max_value, + ATTR_STEPSIZE: step_size, + }, + ATTR_UNIT: unit_of_measurement, + } + ] + get_appliances.return_value = [appliance] + current_value = min_value + appliance.status.update({setting_key: {ATTR_VALUE: current_value}}) + + assert config_entry.state is ConfigEntryState.NOT_LOADED + assert await integration_setup() + assert config_entry.state is ConfigEntryState.LOADED + assert hass.states.is_state(entity_id, str(current_value)) + state = hass.states.get(entity_id) + assert state.attributes["min"] == min_value + assert state.attributes["max"] == max_value + assert state.attributes["step"] == step_size + assert state.attributes["unit_of_measurement"] == unit_of_measurement + + new_value = random.randint(min_value + 1, max_value) + await hass.services.async_call( + NUMBER_DOMAIN, + SERVICE_SET_VALUE, + { + ATTR_ENTITY_ID: entity_id, + SERVICE_ATTR_VALUE: new_value, + }, + blocking=True, + ) + appliance.set_setting.assert_called_once_with(setting_key, new_value) + + +@pytest.mark.parametrize("problematic_appliance", ["Refrigerator"], indirect=True) +@pytest.mark.parametrize( + ("entity_id", "setting_key", "mock_attr"), + [ + ( + f"{NUMBER_DOMAIN.lower()}.refrigerator_refrigerator_temperature", + "Refrigeration.FridgeFreezer.Setting.SetpointTemperatureRefrigerator", + "set_setting", + ), + ], +) +@pytest.mark.usefixtures("bypass_throttle") +async def test_number_entity_error( + problematic_appliance: Mock, + entity_id: str, + setting_key: str, + mock_attr: str, + hass: HomeAssistant, + config_entry: MockConfigEntry, + integration_setup: Callable[[], Awaitable[bool]], + setup_credentials: None, + get_appliances: MagicMock, +) -> None: + """Test number entity error.""" + get_appliances.return_value = [problematic_appliance] + + assert config_entry.state is ConfigEntryState.NOT_LOADED + problematic_appliance.status.update({setting_key: {}}) + assert await integration_setup() + assert config_entry.state is ConfigEntryState.LOADED + + with pytest.raises(HomeConnectError): + getattr(problematic_appliance, mock_attr)() + + with pytest.raises( + ServiceValidationError, match=r"Error.*assign.*value.*to.*setting.*" + ): + await hass.services.async_call( + NUMBER_DOMAIN, + SERVICE_SET_VALUE, + { + ATTR_ENTITY_ID: entity_id, + SERVICE_ATTR_VALUE: DEFAULT_MIN_VALUE, + }, + blocking=True, + ) + assert getattr(problematic_appliance, mock_attr).call_count == 2 diff --git a/tests/components/home_connect/test_sensor.py b/tests/components/home_connect/test_sensor.py index f0565c178feded..f2ee3b13922eee 100644 --- a/tests/components/home_connect/test_sensor.py +++ b/tests/components/home_connect/test_sensor.py @@ -8,6 +8,10 @@ import pytest from homeassistant.components.home_connect.const import ( + BSH_DOOR_STATE, + BSH_DOOR_STATE_CLOSED, + BSH_DOOR_STATE_LOCKED, + BSH_DOOR_STATE_OPEN, BSH_EVENT_PRESENT_STATE_CONFIRMED, BSH_EVENT_PRESENT_STATE_OFF, BSH_EVENT_PRESENT_STATE_PRESENT, @@ -26,14 +30,14 @@ EVENT_PROG_DELAYED_START = { "BSH.Common.Status.OperationState": { - "value": "BSH.Common.EnumType.OperationState.Delayed" + "value": "BSH.Common.EnumType.OperationState.DelayedStart" }, } EVENT_PROG_REMAIN_NO_VALUE = { "BSH.Common.Option.RemainingProgramTime": {}, "BSH.Common.Status.OperationState": { - "value": "BSH.Common.EnumType.OperationState.Delayed" + "value": "BSH.Common.EnumType.OperationState.DelayedStart" }, } @@ -103,13 +107,13 @@ async def test_sensors( # Entity mapping to expected state at each program sequence. ENTITY_ID_STATES = { "sensor.dishwasher_operation_state": ( - "Delayed", - "Run", - "Run", - "Run", - "Ready", + "delayedstart", + "run", + "run", + "run", + "ready", ), - "sensor.dishwasher_remaining_program_time": ( + "sensor.dishwasher_program_finish_time": ( "unavailable", "2021-01-09T12:00:00+00:00", "2021-01-09T12:00:00+00:00", @@ -158,6 +162,8 @@ async def test_event_sensors( get_appliances.return_value = [appliance] assert config_entry.state == ConfigEntryState.NOT_LOADED + appliance.get_programs_available = MagicMock(return_value=["dummy_program"]) + appliance.status.update(EVENT_PROG_DELAYED_START) assert await integration_setup() assert config_entry.state == ConfigEntryState.LOADED @@ -198,11 +204,13 @@ async def test_remaining_prog_time_edge_cases( ) -> None: """Run program sequence to test edge cases for the remaining_prog_time entity.""" get_appliances.return_value = [appliance] - entity_id = "sensor.dishwasher_remaining_program_time" + entity_id = "sensor.dishwasher_program_finish_time" time_to_freeze = "2021-01-09 12:00:00+00:00" freezer.move_to(time_to_freeze) assert config_entry.state == ConfigEntryState.NOT_LOADED + appliance.get_programs_available = MagicMock(return_value=["dummy_program"]) + appliance.status.update(EVENT_PROG_REMAIN_NO_VALUE) assert await integration_setup() assert config_entry.state == ConfigEntryState.LOADED @@ -221,28 +229,49 @@ async def test_remaining_prog_time_edge_cases( ("entity_id", "status_key", "event_value_update", "expected", "appliance"), [ ( - "sensor.fridgefreezer_door_alarm_freezer", + "sensor.dishwasher_door", + BSH_DOOR_STATE, + BSH_DOOR_STATE_LOCKED, + "locked", + "Dishwasher", + ), + ( + "sensor.dishwasher_door", + BSH_DOOR_STATE, + BSH_DOOR_STATE_CLOSED, + "closed", + "Dishwasher", + ), + ( + "sensor.dishwasher_door", + BSH_DOOR_STATE, + BSH_DOOR_STATE_OPEN, + "open", + "Dishwasher", + ), + ( + "sensor.fridgefreezer_freezer_door_alarm", "EVENT_NOT_IN_STATUS_YET_SO_SET_TO_OFF", "", "off", "FridgeFreezer", ), ( - "sensor.fridgefreezer_door_alarm_freezer", + "sensor.fridgefreezer_freezer_door_alarm", REFRIGERATION_EVENT_DOOR_ALARM_FREEZER, BSH_EVENT_PRESENT_STATE_OFF, "off", "FridgeFreezer", ), ( - "sensor.fridgefreezer_door_alarm_freezer", + "sensor.fridgefreezer_freezer_door_alarm", REFRIGERATION_EVENT_DOOR_ALARM_FREEZER, BSH_EVENT_PRESENT_STATE_PRESENT, "present", "FridgeFreezer", ), ( - "sensor.fridgefreezer_door_alarm_freezer", + "sensor.fridgefreezer_freezer_door_alarm", REFRIGERATION_EVENT_DOOR_ALARM_FREEZER, BSH_EVENT_PRESENT_STATE_CONFIRMED, "confirmed", diff --git a/tests/components/home_connect/test_switch.py b/tests/components/home_connect/test_switch.py index d16a4626e592a2..06201ffd58c9a7 100644 --- a/tests/components/home_connect/test_switch.py +++ b/tests/components/home_connect/test_switch.py @@ -7,11 +7,14 @@ import pytest from homeassistant.components.home_connect.const import ( + ATTR_ALLOWED_VALUES, + ATTR_CONSTRAINTS, BSH_ACTIVE_PROGRAM, BSH_CHILD_LOCK_STATE, BSH_OPERATION_STATE, BSH_POWER_OFF, BSH_POWER_ON, + BSH_POWER_STANDBY, BSH_POWER_STATE, REFRIGERATION_SUPERMODEFREEZER, ) @@ -26,6 +29,7 @@ Platform, ) from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ServiceValidationError from .conftest import get_all_appliances @@ -34,7 +38,7 @@ SETTINGS_STATUS = { setting.pop("key"): setting for setting in load_json_object_fixture("home_connect/settings.json") - .get("Washer") + .get("Dishwasher") .get("data") .get("settings") } @@ -64,56 +68,38 @@ async def test_switches( @pytest.mark.parametrize( - ("entity_id", "status", "service", "state"), + ("entity_id", "status", "service", "state", "appliance"), [ ( - "switch.washer_program_mix", + "switch.dishwasher_program_mix", {BSH_ACTIVE_PROGRAM: {"value": PROGRAM}}, SERVICE_TURN_ON, STATE_ON, + "Dishwasher", ), ( - "switch.washer_program_mix", + "switch.dishwasher_program_mix", {BSH_ACTIVE_PROGRAM: {"value": ""}}, SERVICE_TURN_OFF, STATE_OFF, + "Dishwasher", ), ( - "switch.washer_power", - {BSH_POWER_STATE: {"value": BSH_POWER_ON}}, - SERVICE_TURN_ON, - STATE_ON, - ), - ( - "switch.washer_power", - {BSH_POWER_STATE: {"value": BSH_POWER_OFF}}, - SERVICE_TURN_OFF, - STATE_OFF, - ), - ( - "switch.washer_power", - { - BSH_POWER_STATE: {"value": ""}, - BSH_OPERATION_STATE: { - "value": "BSH.Common.EnumType.OperationState.Inactive" - }, - }, - SERVICE_TURN_OFF, - STATE_OFF, - ), - ( - "switch.washer_childlock", + "switch.dishwasher_child_lock", {BSH_CHILD_LOCK_STATE: {"value": True}}, SERVICE_TURN_ON, STATE_ON, + "Dishwasher", ), ( - "switch.washer_childlock", + "switch.dishwasher_child_lock", {BSH_CHILD_LOCK_STATE: {"value": False}}, SERVICE_TURN_OFF, STATE_OFF, + "Dishwasher", ), ], + indirect=["appliance"], ) async def test_switch_functionality( entity_id: str, @@ -145,51 +131,72 @@ async def test_switch_functionality( @pytest.mark.parametrize( - ("entity_id", "status", "service", "mock_attr"), + ( + "entity_id", + "status", + "service", + "mock_attr", + "problematic_appliance", + "exception_match", + ), [ ( - "switch.washer_program_mix", + "switch.dishwasher_program_mix", {BSH_ACTIVE_PROGRAM: {"value": PROGRAM}}, SERVICE_TURN_ON, "start_program", + "Dishwasher", + r"Error.*start.*program.*", ), ( - "switch.washer_program_mix", + "switch.dishwasher_program_mix", {BSH_ACTIVE_PROGRAM: {"value": PROGRAM}}, SERVICE_TURN_OFF, "stop_program", + "Dishwasher", + r"Error.*stop.*program.*", ), ( - "switch.washer_power", - {BSH_POWER_STATE: {"value": ""}}, - SERVICE_TURN_ON, + "switch.dishwasher_power", + {BSH_POWER_STATE: {"value": BSH_POWER_OFF}}, + SERVICE_TURN_OFF, "set_setting", + "Dishwasher", + r"Error.*turn.*off.*appliance.*value", ), ( - "switch.washer_power", + "switch.dishwasher_power", {BSH_POWER_STATE: {"value": ""}}, - SERVICE_TURN_OFF, + SERVICE_TURN_ON, "set_setting", + "Dishwasher", + r"Error.*turn.*on.*appliance.*", ), ( - "switch.washer_childlock", + "switch.dishwasher_child_lock", {BSH_CHILD_LOCK_STATE: {"value": ""}}, SERVICE_TURN_ON, "set_setting", + "Dishwasher", + r"Error.*turn.*on.*key.*", ), ( - "switch.washer_childlock", + "switch.dishwasher_child_lock", {BSH_CHILD_LOCK_STATE: {"value": ""}}, SERVICE_TURN_OFF, "set_setting", + "Dishwasher", + r"Error.*turn.*off.*key.*", ), ], + indirect=["problematic_appliance"], ) async def test_switch_exception_handling( entity_id: str, status: dict, service: str, mock_attr: str, + exception_match: str, bypass_throttle: Generator[None], hass: HomeAssistant, integration_setup: Callable[[], Awaitable[bool]], @@ -204,6 +211,7 @@ async def test_switch_exception_handling( get_appliances.return_value = [problematic_appliance] assert config_entry.state == ConfigEntryState.NOT_LOADED + problematic_appliance.status.update(status) assert await integration_setup() assert config_entry.state == ConfigEntryState.LOADED @@ -211,10 +219,10 @@ async def test_switch_exception_handling( with pytest.raises(HomeConnectError): getattr(problematic_appliance, mock_attr)() - problematic_appliance.status.update(status) - await hass.services.async_call( - SWITCH_DOMAIN, service, {"entity_id": entity_id}, blocking=True - ) + with pytest.raises(ServiceValidationError, match=exception_match): + await hass.services.async_call( + SWITCH_DOMAIN, service, {"entity_id": entity_id}, blocking=True + ) assert getattr(problematic_appliance, mock_attr).call_count == 2 @@ -222,14 +230,14 @@ async def test_switch_exception_handling( ("entity_id", "status", "service", "state", "appliance"), [ ( - "switch.fridgefreezer_supermode_freezer", + "switch.fridgefreezer_freezer_super_mode", {REFRIGERATION_SUPERMODEFREEZER: {"value": True}}, SERVICE_TURN_ON, STATE_ON, "FridgeFreezer", ), ( - "switch.fridgefreezer_supermode_freezer", + "switch.fridgefreezer_freezer_super_mode", {REFRIGERATION_SUPERMODEFREEZER: {"value": False}}, SERVICE_TURN_OFF, STATE_OFF, @@ -274,21 +282,30 @@ async def test_ent_desc_switch_functionality( @pytest.mark.parametrize( - ("entity_id", "status", "service", "mock_attr", "problematic_appliance"), + ( + "entity_id", + "status", + "service", + "mock_attr", + "problematic_appliance", + "exception_match", + ), [ ( - "switch.fridgefreezer_supermode_freezer", + "switch.fridgefreezer_freezer_super_mode", {REFRIGERATION_SUPERMODEFREEZER: {"value": ""}}, SERVICE_TURN_ON, "set_setting", "FridgeFreezer", + r"Error.*turn.*on.*key.*", ), ( - "switch.fridgefreezer_supermode_freezer", + "switch.fridgefreezer_freezer_super_mode", {REFRIGERATION_SUPERMODEFREEZER: {"value": ""}}, SERVICE_TURN_OFF, "set_setting", "FridgeFreezer", + r"Error.*turn.*off.*key.*", ), ], indirect=["problematic_appliance"], @@ -298,6 +315,7 @@ async def test_ent_desc_switch_exception_handling( status: dict, service: str, mock_attr: str, + exception_match: str, bypass_throttle: Generator[None], hass: HomeAssistant, integration_setup: Callable[[], Awaitable[bool]], @@ -326,7 +344,165 @@ async def test_ent_desc_switch_exception_handling( getattr(problematic_appliance, mock_attr)() problematic_appliance.status.update(status) + with pytest.raises(ServiceValidationError, match=exception_match): + await hass.services.async_call( + SWITCH_DOMAIN, service, {ATTR_ENTITY_ID: entity_id}, blocking=True + ) + assert getattr(problematic_appliance, mock_attr).call_count == 2 + + +@pytest.mark.parametrize( + ("entity_id", "status", "allowed_values", "service", "power_state", "appliance"), + [ + ( + "switch.dishwasher_power", + {BSH_POWER_STATE: {"value": BSH_POWER_ON}}, + [BSH_POWER_ON, BSH_POWER_OFF], + SERVICE_TURN_ON, + STATE_ON, + "Dishwasher", + ), + ( + "switch.dishwasher_power", + {BSH_POWER_STATE: {"value": BSH_POWER_OFF}}, + [BSH_POWER_ON, BSH_POWER_OFF], + SERVICE_TURN_OFF, + STATE_OFF, + "Dishwasher", + ), + ( + "switch.dishwasher_power", + { + BSH_POWER_STATE: {"value": ""}, + BSH_OPERATION_STATE: { + "value": "BSH.Common.EnumType.OperationState.Run" + }, + }, + [BSH_POWER_ON], + SERVICE_TURN_ON, + STATE_ON, + "Dishwasher", + ), + ( + "switch.dishwasher_power", + { + BSH_POWER_STATE: {"value": ""}, + BSH_OPERATION_STATE: { + "value": "BSH.Common.EnumType.OperationState.Inactive" + }, + }, + [BSH_POWER_ON], + SERVICE_TURN_ON, + STATE_OFF, + "Dishwasher", + ), + ( + "switch.dishwasher_power", + {BSH_POWER_STATE: {"value": BSH_POWER_ON}}, + [BSH_POWER_ON, BSH_POWER_STANDBY], + SERVICE_TURN_ON, + STATE_ON, + "Dishwasher", + ), + ( + "switch.dishwasher_power", + {BSH_POWER_STATE: {"value": BSH_POWER_STANDBY}}, + [BSH_POWER_ON, BSH_POWER_STANDBY], + SERVICE_TURN_OFF, + STATE_OFF, + "Dishwasher", + ), + ], + indirect=["appliance"], +) +@pytest.mark.usefixtures("bypass_throttle") +async def test_power_swtich( + entity_id: str, + status: dict, + allowed_values: list[str], + service: str, + power_state: str, + hass: HomeAssistant, + config_entry: MockConfigEntry, + integration_setup: Callable[[], Awaitable[bool]], + setup_credentials: None, + appliance: Mock, + get_appliances: MagicMock, +) -> None: + """Test power switch functionality.""" + appliance.get.side_effect = [ + { + ATTR_CONSTRAINTS: { + ATTR_ALLOWED_VALUES: allowed_values, + }, + } + ] + appliance.status.update(SETTINGS_STATUS) + appliance.status.update(status) + get_appliances.return_value = [appliance] + + assert config_entry.state == ConfigEntryState.NOT_LOADED + assert await integration_setup() + assert config_entry.state == ConfigEntryState.LOADED + await hass.services.async_call( SWITCH_DOMAIN, service, {ATTR_ENTITY_ID: entity_id}, blocking=True ) - assert getattr(problematic_appliance, mock_attr).call_count == 2 + assert hass.states.is_state(entity_id, power_state) + + +@pytest.mark.parametrize( + ("entity_id", "allowed_values", "service", "appliance", "exception_match"), + [ + ( + "switch.dishwasher_power", + [BSH_POWER_ON], + SERVICE_TURN_OFF, + "Dishwasher", + r".*not support.*turn.*off.*", + ), + ( + "switch.dishwasher_power", + None, + SERVICE_TURN_OFF, + "Dishwasher", + r".*Unable.*turn.*off.*support.*not.*determined.*", + ), + ], + indirect=["appliance"], +) +@pytest.mark.usefixtures("bypass_throttle") +async def test_power_switch_service_validation_errors( + entity_id: str, + allowed_values: list[str], + service: str, + hass: HomeAssistant, + config_entry: MockConfigEntry, + integration_setup: Callable[[], Awaitable[bool]], + setup_credentials: None, + appliance: Mock, + exception_match: str, + get_appliances: MagicMock, +) -> None: + """Test power switch functionality validation errors.""" + if allowed_values: + appliance.get.side_effect = [ + { + ATTR_CONSTRAINTS: { + ATTR_ALLOWED_VALUES: allowed_values, + }, + } + ] + appliance.status.update(SETTINGS_STATUS) + get_appliances.return_value = [appliance] + + assert config_entry.state == ConfigEntryState.NOT_LOADED + assert await integration_setup() + assert config_entry.state == ConfigEntryState.LOADED + + appliance.status.update({BSH_POWER_STATE: {"value": BSH_POWER_ON}}) + + with pytest.raises(ServiceValidationError, match=exception_match): + await hass.services.async_call( + SWITCH_DOMAIN, service, {"entity_id": entity_id}, blocking=True + ) diff --git a/tests/components/home_connect/test_time.py b/tests/components/home_connect/test_time.py new file mode 100644 index 00000000000000..25ce39786a5bae --- /dev/null +++ b/tests/components/home_connect/test_time.py @@ -0,0 +1,150 @@ +"""Tests for home_connect time entities.""" + +from collections.abc import Awaitable, Callable, Generator +from datetime import time +from unittest.mock import MagicMock, Mock + +from homeconnect.api import HomeConnectError +import pytest + +from homeassistant.components.home_connect.const import ATTR_VALUE +from homeassistant.components.time import DOMAIN as TIME_DOMAIN, SERVICE_SET_VALUE +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import ATTR_ENTITY_ID, ATTR_TIME, Platform +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ServiceValidationError + +from .conftest import get_all_appliances + +from tests.common import MockConfigEntry + + +@pytest.fixture +def platforms() -> list[str]: + """Fixture to specify platforms to test.""" + return [Platform.TIME] + + +async def test_time( + bypass_throttle: Generator[None], + hass: HomeAssistant, + config_entry: MockConfigEntry, + integration_setup: Callable[[], Awaitable[bool]], + setup_credentials: None, + get_appliances: Mock, +) -> None: + """Test time entity.""" + get_appliances.side_effect = get_all_appliances + assert config_entry.state is ConfigEntryState.NOT_LOADED + assert await integration_setup() + assert config_entry.state is ConfigEntryState.LOADED + + +@pytest.mark.parametrize("appliance", ["Oven"], indirect=True) +@pytest.mark.parametrize( + ("entity_id", "setting_key", "setting_value", "expected_state"), + [ + ( + f"{TIME_DOMAIN}.oven_alarm_clock", + "BSH.Common.Setting.AlarmClock", + {ATTR_VALUE: 59}, + str(time(second=59)), + ), + ( + f"{TIME_DOMAIN}.oven_alarm_clock", + "BSH.Common.Setting.AlarmClock", + {ATTR_VALUE: None}, + "unknown", + ), + ( + f"{TIME_DOMAIN}.oven_alarm_clock", + "BSH.Common.Setting.AlarmClock", + None, + "unknown", + ), + ], +) +@pytest.mark.usefixtures("bypass_throttle") +async def test_time_entity_functionality( + appliance: Mock, + entity_id: str, + setting_key: str, + setting_value: dict, + expected_state: str, + bypass_throttle: Generator[None], + hass: HomeAssistant, + config_entry: MockConfigEntry, + integration_setup: Callable[[], Awaitable[bool]], + setup_credentials: None, + get_appliances: MagicMock, +) -> None: + """Test time entity functionality.""" + get_appliances.return_value = [appliance] + appliance.status.update({setting_key: setting_value}) + + assert config_entry.state is ConfigEntryState.NOT_LOADED + assert await integration_setup() + assert config_entry.state is ConfigEntryState.LOADED + assert hass.states.is_state(entity_id, expected_state) + + new_value = 30 + assert hass.states.get(entity_id).state != new_value + await hass.services.async_call( + TIME_DOMAIN, + SERVICE_SET_VALUE, + { + ATTR_ENTITY_ID: entity_id, + ATTR_TIME: time(second=new_value), + }, + blocking=True, + ) + appliance.set_setting.assert_called_once_with(setting_key, new_value) + + +@pytest.mark.parametrize("problematic_appliance", ["Oven"], indirect=True) +@pytest.mark.parametrize( + ("entity_id", "setting_key", "mock_attr"), + [ + ( + f"{TIME_DOMAIN}.oven_alarm_clock", + "BSH.Common.Setting.AlarmClock", + "set_setting", + ), + ], +) +@pytest.mark.usefixtures("bypass_throttle") +async def test_time_entity_error( + problematic_appliance: Mock, + entity_id: str, + setting_key: str, + mock_attr: str, + hass: HomeAssistant, + config_entry: MockConfigEntry, + integration_setup: Callable[[], Awaitable[bool]], + setup_credentials: None, + get_appliances: MagicMock, +) -> None: + """Test time entity error.""" + get_appliances.return_value = [problematic_appliance] + + assert config_entry.state is ConfigEntryState.NOT_LOADED + problematic_appliance.status.update({setting_key: {}}) + assert await integration_setup() + assert config_entry.state is ConfigEntryState.LOADED + + with pytest.raises(HomeConnectError): + getattr(problematic_appliance, mock_attr)() + + with pytest.raises( + ServiceValidationError, match=r"Error.*assign.*value.*to.*setting.*" + ): + await hass.services.async_call( + TIME_DOMAIN, + SERVICE_SET_VALUE, + { + ATTR_ENTITY_ID: entity_id, + ATTR_TIME: time(minute=1), + }, + blocking=True, + ) + assert getattr(problematic_appliance, mock_attr).call_count == 2 diff --git a/tests/components/homeassistant/test_init.py b/tests/components/homeassistant/test_init.py index a66d13e5ffec1b..33d78cd6c9fac2 100644 --- a/tests/components/homeassistant/test_init.py +++ b/tests/components/homeassistant/test_init.py @@ -127,7 +127,7 @@ async def test_reload_core_conf(hass: HomeAssistant) -> None: @patch("homeassistant.config.os.path.isfile", Mock(return_value=True)) @patch("homeassistant.components.homeassistant._LOGGER.error") -@patch("homeassistant.config.async_process_ha_core_config") +@patch("homeassistant.core_config.async_process_ha_core_config") async def test_reload_core_with_wrong_conf( mock_process, mock_error, hass: HomeAssistant ) -> None: @@ -242,7 +242,7 @@ async def test_setting_location(hass: HomeAssistant) -> None: assert elevation != 50 await hass.services.async_call( "homeassistant", - "set_location", + SERVICE_SET_LOCATION, {"latitude": 30, "longitude": 40}, blocking=True, ) @@ -253,12 +253,24 @@ async def test_setting_location(hass: HomeAssistant) -> None: await hass.services.async_call( "homeassistant", - "set_location", + SERVICE_SET_LOCATION, {"latitude": 30, "longitude": 40, "elevation": 50}, blocking=True, ) + assert hass.config.latitude == 30 + assert hass.config.longitude == 40 assert hass.config.elevation == 50 + await hass.services.async_call( + "homeassistant", + SERVICE_SET_LOCATION, + {"latitude": 30, "longitude": 40, "elevation": 0}, + blocking=True, + ) + assert hass.config.latitude == 30 + assert hass.config.longitude == 40 + assert hass.config.elevation == 0 + async def test_require_admin( hass: HomeAssistant, hass_read_only_user: MockUser diff --git a/tests/components/homeassistant/triggers/test_time.py b/tests/components/homeassistant/triggers/test_time.py index 5455b06d1c0990..8900998a7b875b 100644 --- a/tests/components/homeassistant/triggers/test_time.py +++ b/tests/components/homeassistant/triggers/test_time.py @@ -159,7 +159,10 @@ async def test_if_fires_using_at_input_datetime( @pytest.mark.parametrize( ("conf_at", "trigger_deltas"), [ - (["5:00:00", "6:00:00"], [timedelta(0), timedelta(hours=1)]), + ( + ["5:00:00", "6:00:00", "{{ '7:00:00' }}"], + [timedelta(0), timedelta(hours=1), timedelta(hours=2)], + ), ( [ "5:00:05", @@ -435,10 +438,14 @@ async def test_untrack_time_change(hass: HomeAssistant) -> None: assert len(mock_track_time_change.mock_calls) == 3 +@pytest.mark.parametrize( + ("at_sensor"), ["sensor.next_alarm", "{{ 'sensor.next_alarm' }}"] +) async def test_if_fires_using_at_sensor( hass: HomeAssistant, freezer: FrozenDateTimeFactory, service_calls: list[ServiceCall], + at_sensor: str, ) -> None: """Test for firing at sensor time.""" now = dt_util.now() @@ -461,7 +468,7 @@ async def test_if_fires_using_at_sensor( automation.DOMAIN, { automation.DOMAIN: { - "trigger": {"platform": "time", "at": "sensor.next_alarm"}, + "trigger": {"platform": "time", "at": at_sensor}, "action": { "service": "test.automation", "data_template": {"some": some_data}, @@ -626,6 +633,9 @@ async def test_if_fires_using_at_sensor_with_offset( {"platform": "time", "at": "input_datetime.bla"}, {"platform": "time", "at": "sensor.bla"}, {"platform": "time", "at": "12:34"}, + {"platform": "time", "at": "{{ '12:34' }}"}, + {"platform": "time", "at": "{{ 'input_datetime.bla' }}"}, + {"platform": "time", "at": "{{ 'sensor.bla' }}"}, {"platform": "time", "at": {"entity_id": "sensor.bla", "offset": "-00:01"}}, { "platform": "time", @@ -724,3 +734,70 @@ async def test_datetime_in_past_on_load( service_calls[2].data["some"] == f"time-{future.day}-{future.hour}-input_datetime.my_trigger" ) + + +@pytest.mark.parametrize( + "trigger", + [ + {"platform": "time", "at": "{{ 'hello world' }}"}, + {"platform": "time", "at": "{{ 74 }}"}, + {"platform": "time", "at": "{{ true }}"}, + {"platform": "time", "at": "{{ 7.5465 }}"}, + ], +) +async def test_if_at_template_renders_bad_value( + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, + trigger: dict[str, str], +) -> None: + """Test for invalid templates.""" + assert await async_setup_component( + hass, + automation.DOMAIN, + { + automation.DOMAIN: { + "trigger": trigger, + "action": { + "service": "test.automation", + }, + } + }, + ) + + await hass.async_block_till_done() + + assert ( + "expected HH:MM, HH:MM:SS or Entity ID with domain 'input_datetime' or 'sensor'" + in caplog.text + ) + + +@pytest.mark.parametrize( + "trigger", + [ + {"platform": "time", "at": "{{ now().strftime('%H:%M') }}"}, + {"platform": "time", "at": "{{ states('sensor.blah') | int(0) }}"}, + ], +) +async def test_if_at_template_limited_template( + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, + trigger: dict[str, str], +) -> None: + """Test for invalid templates.""" + assert await async_setup_component( + hass, + automation.DOMAIN, + { + automation.DOMAIN: { + "trigger": trigger, + "action": { + "service": "test.automation", + }, + } + }, + ) + + await hass.async_block_till_done() + + assert "is not supported in limited templates" in caplog.text diff --git a/tests/components/homeassistant_hardware/test_config_flow.py b/tests/components/homeassistant_hardware/test_config_flow.py index b94238c1225e96..8b0995a67f392c 100644 --- a/tests/components/homeassistant_hardware/test_config_flow.py +++ b/tests/components/homeassistant_hardware/test_config_flow.py @@ -120,6 +120,11 @@ def mock_test_firmware_platform( yield +@pytest.fixture(autouse=True) +async def fixture_mock_supervisor_client(supervisor_client: AsyncMock): + """Mock supervisor client in tests.""" + + def delayed_side_effect() -> Callable[..., Awaitable[None]]: """Slows down eager tasks by delaying for an event loop tick.""" diff --git a/tests/components/homeassistant_hardware/test_config_flow_failures.py b/tests/components/homeassistant_hardware/test_config_flow_failures.py index a5c5f4d666a0d5..5a6f765c44ccb8 100644 --- a/tests/components/homeassistant_hardware/test_config_flow_failures.py +++ b/tests/components/homeassistant_hardware/test_config_flow_failures.py @@ -25,6 +25,15 @@ from tests.common import MockConfigEntry +@pytest.fixture(autouse=True) +async def fixture_mock_supervisor_client(supervisor_client: AsyncMock): + """Mock supervisor client in tests.""" + + +@pytest.mark.parametrize( + "ignore_translations", + ["component.test_firmware_domain.config.abort.unsupported_firmware"], +) @pytest.mark.parametrize( "next_step", [ @@ -55,6 +64,10 @@ async def test_config_flow_cannot_probe_firmware( assert result["reason"] == "unsupported_firmware" +@pytest.mark.parametrize( + "ignore_translations", + ["component.test_firmware_domain.config.abort.not_hassio"], +) async def test_config_flow_zigbee_not_hassio_wrong_firmware( hass: HomeAssistant, ) -> None: @@ -80,6 +93,10 @@ async def test_config_flow_zigbee_not_hassio_wrong_firmware( assert result["reason"] == "not_hassio" +@pytest.mark.parametrize( + "ignore_translations", + ["component.test_firmware_domain.config.abort.addon_already_running"], +) async def test_config_flow_zigbee_flasher_addon_already_running( hass: HomeAssistant, ) -> None: @@ -114,6 +131,10 @@ async def test_config_flow_zigbee_flasher_addon_already_running( assert result["reason"] == "addon_already_running" +@pytest.mark.parametrize( + "ignore_translations", + ["component.test_firmware_domain.config.abort.addon_info_failed"], +) async def test_config_flow_zigbee_flasher_addon_info_fails(hass: HomeAssistant) -> None: """Test failure case when flasher addon cannot be installed.""" result = await hass.config_entries.flow.async_init( @@ -147,6 +168,10 @@ async def test_config_flow_zigbee_flasher_addon_info_fails(hass: HomeAssistant) assert result["reason"] == "addon_info_failed" +@pytest.mark.parametrize( + "ignore_translations", + ["component.test_firmware_domain.config.abort.addon_install_failed"], +) async def test_config_flow_zigbee_flasher_addon_install_fails( hass: HomeAssistant, ) -> None: @@ -177,6 +202,10 @@ async def test_config_flow_zigbee_flasher_addon_install_fails( assert result["reason"] == "addon_install_failed" +@pytest.mark.parametrize( + "ignore_translations", + ["component.test_firmware_domain.config.abort.addon_set_config_failed"], +) async def test_config_flow_zigbee_flasher_addon_set_config_fails( hass: HomeAssistant, ) -> None: @@ -211,6 +240,10 @@ async def test_config_flow_zigbee_flasher_addon_set_config_fails( assert result["reason"] == "addon_set_config_failed" +@pytest.mark.parametrize( + "ignore_translations", + ["component.test_firmware_domain.config.abort.addon_start_failed"], +) async def test_config_flow_zigbee_flasher_run_fails(hass: HomeAssistant) -> None: """Test failure case when flasher addon fails to run.""" result = await hass.config_entries.flow.async_init( @@ -272,6 +305,10 @@ async def test_config_flow_zigbee_flasher_uninstall_fails(hass: HomeAssistant) - assert result["step_id"] == "confirm_zigbee" +@pytest.mark.parametrize( + "ignore_translations", + ["component.test_firmware_domain.config.abort.not_hassio_thread"], +) async def test_config_flow_thread_not_hassio(hass: HomeAssistant) -> None: """Test when the stick is used with a non-hassio setup and Thread is selected.""" result = await hass.config_entries.flow.async_init( @@ -295,6 +332,10 @@ async def test_config_flow_thread_not_hassio(hass: HomeAssistant) -> None: assert result["reason"] == "not_hassio_thread" +@pytest.mark.parametrize( + "ignore_translations", + ["component.test_firmware_domain.config.abort.addon_info_failed"], +) async def test_config_flow_thread_addon_info_fails(hass: HomeAssistant) -> None: """Test failure case when flasher addon cannot be installed.""" result = await hass.config_entries.flow.async_init( @@ -319,6 +360,10 @@ async def test_config_flow_thread_addon_info_fails(hass: HomeAssistant) -> None: assert result["reason"] == "addon_info_failed" +@pytest.mark.parametrize( + "ignore_translations", + ["component.test_firmware_domain.config.abort.otbr_addon_already_running"], +) async def test_config_flow_thread_addon_already_running(hass: HomeAssistant) -> None: """Test failure case when the Thread addon is already running.""" result = await hass.config_entries.flow.async_init( @@ -354,6 +399,10 @@ async def test_config_flow_thread_addon_already_running(hass: HomeAssistant) -> assert result["reason"] == "otbr_addon_already_running" +@pytest.mark.parametrize( + "ignore_translations", + ["component.test_firmware_domain.config.abort.addon_install_failed"], +) async def test_config_flow_thread_addon_install_fails(hass: HomeAssistant) -> None: """Test failure case when flasher addon cannot be installed.""" result = await hass.config_entries.flow.async_init( @@ -381,6 +430,10 @@ async def test_config_flow_thread_addon_install_fails(hass: HomeAssistant) -> No assert result["reason"] == "addon_install_failed" +@pytest.mark.parametrize( + "ignore_translations", + ["component.test_firmware_domain.config.abort.addon_set_config_failed"], +) async def test_config_flow_thread_addon_set_config_fails(hass: HomeAssistant) -> None: """Test failure case when flasher addon cannot be configured.""" result = await hass.config_entries.flow.async_init( @@ -408,6 +461,10 @@ async def test_config_flow_thread_addon_set_config_fails(hass: HomeAssistant) -> assert result["reason"] == "addon_set_config_failed" +@pytest.mark.parametrize( + "ignore_translations", + ["component.test_firmware_domain.config.abort.addon_start_failed"], +) async def test_config_flow_thread_flasher_run_fails(hass: HomeAssistant) -> None: """Test failure case when flasher addon fails to run.""" result = await hass.config_entries.flow.async_init( @@ -469,6 +526,10 @@ async def test_config_flow_thread_flasher_uninstall_fails(hass: HomeAssistant) - assert result["step_id"] == "confirm_otbr" +@pytest.mark.parametrize( + "ignore_translations", + ["component.test_firmware_domain.options.abort.zha_still_using_stick"], +) async def test_options_flow_zigbee_to_thread_zha_configured( hass: HomeAssistant, ) -> None: @@ -506,6 +567,10 @@ async def test_options_flow_zigbee_to_thread_zha_configured( assert result["reason"] == "zha_still_using_stick" +@pytest.mark.parametrize( + "ignore_translations", + ["component.test_firmware_domain.options.abort.otbr_still_using_stick"], +) async def test_options_flow_thread_to_zigbee_otbr_configured( hass: HomeAssistant, ) -> None: diff --git a/tests/components/homeassistant_hardware/test_silabs_multiprotocol_addon.py b/tests/components/homeassistant_hardware/test_silabs_multiprotocol_addon.py index f2d9c0f10ad331..22e3e3389864c3 100644 --- a/tests/components/homeassistant_hardware/test_silabs_multiprotocol_addon.py +++ b/tests/components/homeassistant_hardware/test_silabs_multiprotocol_addon.py @@ -7,15 +7,10 @@ from unittest.mock import AsyncMock, Mock, patch from aiohasupervisor import SupervisorError +from aiohasupervisor.models import AddonsOptions import pytest -from homeassistant.components.hassio import ( - AddonError, - AddonInfo, - AddonState, - HassIO, - HassioAPIError, -) +from homeassistant.components.hassio import AddonError, AddonInfo, AddonState, HassIO from homeassistant.components.homeassistant_hardware import silabs_multiprotocol_addon from homeassistant.components.zha import DOMAIN as ZHA_DOMAIN from homeassistant.config_entries import ConfigEntry, ConfigFlow @@ -38,6 +33,11 @@ TEST_DOMAIN_2 = "test_2" +@pytest.fixture(autouse=True) +def mock_supervisor_client(supervisor_client: AsyncMock) -> None: + """Mock supervisor client.""" + + class FakeConfigFlow(ConfigFlow): """Handle a config flow for the silabs multiprotocol add-on.""" @@ -247,22 +247,21 @@ async def test_option_flow_install_multi_pan_addon( assert result["progress_action"] == "install_addon" await hass.async_block_till_done() - install_addon.assert_called_once_with(hass, "core_silabs_multiprotocol") + install_addon.assert_called_once_with("core_silabs_multiprotocol") result = await hass.config_entries.options.async_configure(result["flow_id"]) assert result["type"] is FlowResultType.SHOW_PROGRESS assert result["step_id"] == "start_addon" set_addon_options.assert_called_once_with( - hass, "core_silabs_multiprotocol", - { - "options": { + AddonsOptions( + config={ "autoflash_firmware": True, "device": "/dev/ttyTEST123", "baudrate": "115200", "flow_control": True, } - }, + ), ) await hass.async_block_till_done() @@ -322,7 +321,7 @@ async def test_option_flow_install_multi_pan_addon_zha( assert result["progress_action"] == "install_addon" await hass.async_block_till_done() - install_addon.assert_called_once_with(hass, "core_silabs_multiprotocol") + install_addon.assert_called_once_with("core_silabs_multiprotocol") multipan_manager = await silabs_multiprotocol_addon.get_multiprotocol_addon_manager( hass @@ -336,16 +335,15 @@ async def test_option_flow_install_multi_pan_addon_zha( assert result["type"] is FlowResultType.SHOW_PROGRESS assert result["step_id"] == "start_addon" set_addon_options.assert_called_once_with( - hass, "core_silabs_multiprotocol", - { - "options": { + AddonsOptions( + config={ "autoflash_firmware": True, "device": "/dev/ttyTEST123", "baudrate": "115200", "flow_control": True, } - }, + ), ) # Check the channel is initialized from ZHA assert multipan_manager._channel == 11 @@ -417,23 +415,22 @@ async def test_option_flow_install_multi_pan_addon_zha_other_radio( assert result["progress_action"] == "install_addon" await hass.async_block_till_done() - install_addon.assert_called_once_with(hass, "core_silabs_multiprotocol") + install_addon.assert_called_once_with("core_silabs_multiprotocol") addon_info.return_value.hostname = "core-silabs-multiprotocol" result = await hass.config_entries.options.async_configure(result["flow_id"]) assert result["type"] is FlowResultType.SHOW_PROGRESS assert result["step_id"] == "start_addon" set_addon_options.assert_called_once_with( - hass, "core_silabs_multiprotocol", - { - "options": { + AddonsOptions( + config={ "autoflash_firmware": True, "device": "/dev/ttyTEST123", "baudrate": "115200", "flow_control": True, } - }, + ), ) await hass.async_block_till_done() @@ -453,6 +450,10 @@ async def test_option_flow_install_multi_pan_addon_zha_other_radio( } +@pytest.mark.parametrize( + "ignore_translations", + ["component.test.options.abort.not_hassio"], +) async def test_option_flow_non_hassio( hass: HomeAssistant, ) -> None: @@ -678,11 +679,8 @@ async def test_option_flow_addon_installed_same_device_uninstall( assert result["step_id"] == "uninstall_addon" # Make sure the flasher addon is installed - addon_store_info.return_value = { - "installed": None, - "available": True, - "state": "not_installed", - } + addon_store_info.return_value.installed = False + addon_store_info.return_Value.available = True result = await hass.config_entries.options.async_configure( result["flow_id"], {silabs_multiprotocol_addon.CONF_DISABLE_MULTI_PAN: True} @@ -709,7 +707,7 @@ async def test_option_flow_addon_installed_same_device_uninstall( assert result["description_placeholders"] == {"addon_name": "Silicon Labs Flasher"} await hass.async_block_till_done() - install_addon.assert_called_once_with(hass, "core_silabs_flasher") + install_addon.assert_called_once_with("core_silabs_flasher") result = await hass.config_entries.options.async_configure(result["flow_id"]) assert result["type"] is FlowResultType.CREATE_ENTRY @@ -768,6 +766,10 @@ async def test_option_flow_addon_installed_same_device_do_not_uninstall_multi_pa assert result["type"] is FlowResultType.CREATE_ENTRY +@pytest.mark.parametrize( + "ignore_translations", + ["component.test.options.abort.addon_already_running"], +) async def test_option_flow_flasher_already_running_failure( hass: HomeAssistant, addon_info, @@ -805,7 +807,7 @@ async def test_option_flow_flasher_already_running_failure( assert result["step_id"] == "uninstall_addon" # The flasher addon is already installed and running, this is bad - addon_store_info.return_value["installed"] = True + addon_store_info.return_value.installed = True addon_info.return_value.state = "started" result = await hass.config_entries.options.async_configure( @@ -851,11 +853,8 @@ async def test_option_flow_addon_installed_same_device_flasher_already_installed assert result["type"] is FlowResultType.FORM assert result["step_id"] == "uninstall_addon" - addon_store_info.return_value = { - "installed": True, - "available": True, - "state": "not_running", - } + addon_store_info.return_value.installed = True + addon_store_info.return_value.available = True result = await hass.config_entries.options.async_configure( result["flow_id"], {silabs_multiprotocol_addon.CONF_DISABLE_MULTI_PAN: True} @@ -873,11 +872,8 @@ async def test_option_flow_addon_installed_same_device_flasher_already_installed assert result["progress_action"] == "start_flasher_addon" assert result["description_placeholders"] == {"addon_name": "Silicon Labs Flasher"} - addon_store_info.return_value = { - "installed": True, - "available": True, - "state": "not_running", - } + addon_store_info.return_value.installed = True + addon_store_info.return_value.available = True await hass.async_block_till_done() install_addon.assert_not_called() @@ -885,6 +881,10 @@ async def test_option_flow_addon_installed_same_device_flasher_already_installed assert result["type"] is FlowResultType.CREATE_ENTRY +@pytest.mark.parametrize( + "ignore_translations", + ["component.test.options.abort.addon_install_failed"], +) async def test_option_flow_flasher_install_failure( hass: HomeAssistant, addon_info, @@ -932,11 +932,8 @@ async def test_option_flow_flasher_install_failure( assert result["type"] is FlowResultType.FORM assert result["step_id"] == "uninstall_addon" - addon_store_info.return_value = { - "installed": None, - "available": True, - "state": "not_installed", - } + addon_store_info.return_value.installed = False + addon_store_info.return_value.available = True install_addon.side_effect = [AddonError()] result = await hass.config_entries.options.async_configure( result["flow_id"], {silabs_multiprotocol_addon.CONF_DISABLE_MULTI_PAN: True} @@ -947,13 +944,17 @@ async def test_option_flow_flasher_install_failure( assert result["progress_action"] == "install_addon" await hass.async_block_till_done() - install_addon.assert_called_once_with(hass, "core_silabs_flasher") + install_addon.assert_called_once_with("core_silabs_flasher") result = await hass.config_entries.options.async_configure(result["flow_id"]) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "addon_install_failed" +@pytest.mark.parametrize( + "ignore_translations", + ["component.test.options.abort.addon_start_failed"], +) async def test_option_flow_flasher_addon_flash_failure( hass: HomeAssistant, addon_info, @@ -1016,6 +1017,10 @@ async def test_option_flow_flasher_addon_flash_failure( assert result["description_placeholders"]["addon_name"] == "Silicon Labs Flasher" +@pytest.mark.parametrize( + "ignore_translations", + ["component.test.options.abort.zha_migration_failed"], +) @patch( "homeassistant.components.zha.radio_manager.ZhaMultiPANMigrationHelper.async_initiate_migration", side_effect=Exception("Boom!"), @@ -1077,6 +1082,10 @@ async def test_option_flow_uninstall_migration_initiate_failure( mock_initiate_migration.assert_called_once() +@pytest.mark.parametrize( + "ignore_translations", + ["component.test.options.abort.zha_migration_failed"], +) @patch( "homeassistant.components.zha.radio_manager.ZhaMultiPANMigrationHelper.async_finish_migration", side_effect=Exception("Boom!"), @@ -1178,6 +1187,10 @@ async def test_option_flow_do_not_install_multi_pan_addon( assert result["type"] is FlowResultType.CREATE_ENTRY +@pytest.mark.parametrize( + "ignore_translations", + ["component.test.options.abort.addon_install_failed"], +) async def test_option_flow_install_multi_pan_addon_install_fails( hass: HomeAssistant, addon_store_info, @@ -1188,7 +1201,7 @@ async def test_option_flow_install_multi_pan_addon_install_fails( ) -> None: """Test installing the multi pan addon.""" - install_addon.side_effect = HassioAPIError("Boom") + install_addon.side_effect = SupervisorError("Boom") # Setup the config entry config_entry = MockConfigEntry( @@ -1214,13 +1227,17 @@ async def test_option_flow_install_multi_pan_addon_install_fails( assert result["progress_action"] == "install_addon" await hass.async_block_till_done() - install_addon.assert_called_once_with(hass, "core_silabs_multiprotocol") + install_addon.assert_called_once_with("core_silabs_multiprotocol") result = await hass.config_entries.options.async_configure(result["flow_id"]) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "addon_install_failed" +@pytest.mark.parametrize( + "ignore_translations", + ["component.test.options.abort.addon_start_failed"], +) async def test_option_flow_install_multi_pan_addon_start_fails( hass: HomeAssistant, addon_store_info, @@ -1257,22 +1274,21 @@ async def test_option_flow_install_multi_pan_addon_start_fails( assert result["progress_action"] == "install_addon" await hass.async_block_till_done() - install_addon.assert_called_once_with(hass, "core_silabs_multiprotocol") + install_addon.assert_called_once_with("core_silabs_multiprotocol") result = await hass.config_entries.options.async_configure(result["flow_id"]) assert result["type"] is FlowResultType.SHOW_PROGRESS assert result["step_id"] == "start_addon" set_addon_options.assert_called_once_with( - hass, "core_silabs_multiprotocol", - { - "options": { + AddonsOptions( + config={ "autoflash_firmware": True, "device": "/dev/ttyTEST123", "baudrate": "115200", "flow_control": True, } - }, + ), ) await hass.async_block_till_done() @@ -1283,6 +1299,10 @@ async def test_option_flow_install_multi_pan_addon_start_fails( assert result["reason"] == "addon_start_failed" +@pytest.mark.parametrize( + "ignore_translations", + ["component.test.options.abort.addon_set_config_failed"], +) async def test_option_flow_install_multi_pan_addon_set_options_fails( hass: HomeAssistant, addon_store_info, @@ -1293,7 +1313,7 @@ async def test_option_flow_install_multi_pan_addon_set_options_fails( ) -> None: """Test installing the multi pan addon.""" - set_addon_options.side_effect = HassioAPIError("Boom") + set_addon_options.side_effect = SupervisorError("Boom") # Setup the config entry config_entry = MockConfigEntry( @@ -1319,13 +1339,17 @@ async def test_option_flow_install_multi_pan_addon_set_options_fails( assert result["progress_action"] == "install_addon" await hass.async_block_till_done() - install_addon.assert_called_once_with(hass, "core_silabs_multiprotocol") + install_addon.assert_called_once_with("core_silabs_multiprotocol") result = await hass.config_entries.options.async_configure(result["flow_id"]) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "addon_set_config_failed" +@pytest.mark.parametrize( + "ignore_translations", + ["component.test.options.abort.addon_info_failed"], +) async def test_option_flow_addon_info_fails( hass: HomeAssistant, addon_store_info, @@ -1333,7 +1357,7 @@ async def test_option_flow_addon_info_fails( ) -> None: """Test installing the multi pan addon.""" - addon_store_info.side_effect = HassioAPIError("Boom") + addon_store_info.side_effect = SupervisorError("Boom") # Setup the config entry config_entry = MockConfigEntry( @@ -1349,6 +1373,10 @@ async def test_option_flow_addon_info_fails( assert result["reason"] == "addon_info_failed" +@pytest.mark.parametrize( + "ignore_translations", + ["component.test.options.abort.zha_migration_failed"], +) @patch( "homeassistant.components.zha.radio_manager.ZhaMultiPANMigrationHelper.async_initiate_migration", side_effect=Exception("Boom!"), @@ -1396,7 +1424,7 @@ async def test_option_flow_install_multi_pan_addon_zha_migration_fails_step_1( assert result["progress_action"] == "install_addon" await hass.async_block_till_done() - install_addon.assert_called_once_with(hass, "core_silabs_multiprotocol") + install_addon.assert_called_once_with("core_silabs_multiprotocol") result = await hass.config_entries.options.async_configure(result["flow_id"]) assert result["type"] is FlowResultType.ABORT @@ -1404,6 +1432,10 @@ async def test_option_flow_install_multi_pan_addon_zha_migration_fails_step_1( set_addon_options.assert_not_called() +@pytest.mark.parametrize( + "ignore_translations", + ["component.test.options.abort.zha_migration_failed"], +) @patch( "homeassistant.components.zha.radio_manager.ZhaMultiPANMigrationHelper.async_finish_migration", side_effect=Exception("Boom!"), @@ -1452,22 +1484,21 @@ async def test_option_flow_install_multi_pan_addon_zha_migration_fails_step_2( assert result["progress_action"] == "install_addon" await hass.async_block_till_done() - install_addon.assert_called_once_with(hass, "core_silabs_multiprotocol") + install_addon.assert_called_once_with("core_silabs_multiprotocol") result = await hass.config_entries.options.async_configure(result["flow_id"]) assert result["type"] is FlowResultType.SHOW_PROGRESS assert result["step_id"] == "start_addon" set_addon_options.assert_called_once_with( - hass, "core_silabs_multiprotocol", - { - "options": { + AddonsOptions( + config={ "autoflash_firmware": True, "device": "/dev/ttyTEST123", "baudrate": "115200", "flow_control": True, } - }, + ), ) await hass.async_block_till_done() @@ -1632,7 +1663,7 @@ async def test_check_multi_pan_addon_info_error( ) -> None: """Test `check_multi_pan_addon` where the addon info cannot be read.""" - addon_store_info.side_effect = HassioAPIError("Boom") + addon_store_info.side_effect = SupervisorError("Boom") with pytest.raises(HomeAssistantError): await silabs_multiprotocol_addon.check_multi_pan_addon(hass) @@ -1669,11 +1700,8 @@ async def test_check_multi_pan_addon_auto_start( """Test `check_multi_pan_addon` auto starting the addon.""" addon_info.return_value.state = "not_running" - addon_store_info.return_value = { - "installed": True, - "available": True, - "state": "not_running", - } + addon_store_info.return_value.installed = True + addon_store_info.return_value.available = True # An error is raised even if we auto-start with pytest.raises(HomeAssistantError): @@ -1688,11 +1716,8 @@ async def test_check_multi_pan_addon( """Test `check_multi_pan_addon`.""" addon_info.return_value.state = "started" - addon_store_info.return_value = { - "installed": True, - "available": True, - "state": "running", - } + addon_store_info.return_value.installed = True + addon_store_info.return_value.available = True await silabs_multiprotocol_addon.check_multi_pan_addon(hass) start_addon.assert_not_called() @@ -1719,11 +1744,8 @@ async def test_multi_pan_addon_using_device_not_running( """Test `multi_pan_addon_using_device` when the addon isn't running.""" addon_info.return_value.state = "not_running" - addon_store_info.return_value = { - "installed": True, - "available": True, - "state": "not_running", - } + addon_store_info.return_value.installed = True + addon_store_info.return_value.available = True assert ( await silabs_multiprotocol_addon.multi_pan_addon_using_device( @@ -1753,11 +1775,8 @@ async def test_multi_pan_addon_using_device( "baudrate": "115200", "flow_control": True, } - addon_store_info.return_value = { - "installed": True, - "available": True, - "state": "running", - } + addon_store_info.return_value.installed = True + addon_store_info.return_value.available = True assert ( await silabs_multiprotocol_addon.multi_pan_addon_using_device( diff --git a/tests/components/homeassistant_sky_connect/test_config_flow.py b/tests/components/homeassistant_sky_connect/test_config_flow.py index de9af6f204c839..055b63472670b6 100644 --- a/tests/components/homeassistant_sky_connect/test_config_flow.py +++ b/tests/components/homeassistant_sky_connect/test_config_flow.py @@ -159,6 +159,7 @@ async def mock_async_step_pick_firmware_zigbee(self, data): } +@pytest.mark.usefixtures("supervisor_client") @pytest.mark.parametrize( ("usb_data", "model"), [ diff --git a/tests/components/homeassistant_yellow/test_config_flow.py b/tests/components/homeassistant_yellow/test_config_flow.py index c82c08314b05be..ab6f158b211b73 100644 --- a/tests/components/homeassistant_yellow/test_config_flow.py +++ b/tests/components/homeassistant_yellow/test_config_flow.py @@ -341,6 +341,7 @@ async def mock_async_step_pick_firmware_zigbee(self, data): } +@pytest.mark.usefixtures("supervisor_client") async def test_options_flow_multipan_uninstall(hass: HomeAssistant) -> None: """Test options flow for when multi-PAN firmware is installed.""" mock_integration(hass, MockModule("hassio")) diff --git a/tests/components/homekit/test_type_covers.py b/tests/components/homekit/test_type_covers.py index 8d3b13b1856924..049f68187846ac 100644 --- a/tests/components/homekit/test_type_covers.py +++ b/tests/components/homekit/test_type_covers.py @@ -7,6 +7,7 @@ ATTR_TILT_POSITION, DOMAIN as COVER_DOMAIN, CoverEntityFeature, + CoverState, ) from homeassistant.components.homekit.const import ( ATTR_OBSTRUCTION_DETECTED, @@ -31,12 +32,8 @@ ATTR_SUPPORTED_FEATURES, EVENT_HOMEASSISTANT_START, SERVICE_SET_COVER_TILT_POSITION, - STATE_CLOSED, - STATE_CLOSING, STATE_OFF, STATE_ON, - STATE_OPEN, - STATE_OPENING, STATE_UNAVAILABLE, STATE_UNKNOWN, ) @@ -64,13 +61,15 @@ async def test_garage_door_open_close( assert acc.char_current_state.value == HK_DOOR_OPEN assert acc.char_target_state.value == HK_DOOR_OPEN - hass.states.async_set(entity_id, STATE_CLOSED, {ATTR_OBSTRUCTION_DETECTED: False}) + hass.states.async_set( + entity_id, CoverState.CLOSED, {ATTR_OBSTRUCTION_DETECTED: False} + ) await hass.async_block_till_done() assert acc.char_current_state.value == HK_DOOR_CLOSED assert acc.char_target_state.value == HK_DOOR_CLOSED assert acc.char_obstruction_detected.value is False - hass.states.async_set(entity_id, STATE_OPEN, {ATTR_OBSTRUCTION_DETECTED: True}) + hass.states.async_set(entity_id, CoverState.OPEN, {ATTR_OBSTRUCTION_DETECTED: True}) await hass.async_block_till_done() assert acc.char_current_state.value == HK_DOOR_OPEN assert acc.char_target_state.value == HK_DOOR_OPEN @@ -104,7 +103,7 @@ async def test_garage_door_open_close( assert len(events) == 1 assert events[-1].data[ATTR_VALUE] is None - hass.states.async_set(entity_id, STATE_CLOSED) + hass.states.async_set(entity_id, CoverState.CLOSED) await hass.async_block_till_done() acc.char_target_state.client_update_value(1) @@ -123,7 +122,7 @@ async def test_garage_door_open_close( assert len(events) == 3 assert events[-1].data[ATTR_VALUE] is None - hass.states.async_set(entity_id, STATE_OPEN) + hass.states.async_set(entity_id, CoverState.OPEN) await hass.async_block_till_done() acc.char_target_state.client_update_value(0) @@ -140,7 +139,7 @@ async def test_door_instantiate_set_position(hass: HomeAssistant, hk_driver) -> hass.states.async_set( entity_id, - STATE_OPEN, + CoverState.OPEN, { ATTR_SUPPORTED_FEATURES: CoverEntityFeature.SET_POSITION, ATTR_CURRENT_POSITION: 0, @@ -159,7 +158,7 @@ async def test_door_instantiate_set_position(hass: HomeAssistant, hk_driver) -> hass.states.async_set( entity_id, - STATE_OPEN, + CoverState.OPEN, { ATTR_SUPPORTED_FEATURES: CoverEntityFeature.SET_POSITION, ATTR_CURRENT_POSITION: 50, @@ -172,7 +171,7 @@ async def test_door_instantiate_set_position(hass: HomeAssistant, hk_driver) -> hass.states.async_set( entity_id, - STATE_OPEN, + CoverState.OPEN, { ATTR_SUPPORTED_FEATURES: CoverEntityFeature.SET_POSITION, ATTR_CURRENT_POSITION: "GARBAGE", @@ -221,7 +220,7 @@ async def test_windowcovering_set_cover_position( hass.states.async_set( entity_id, - STATE_OPENING, + CoverState.OPENING, { ATTR_SUPPORTED_FEATURES: CoverEntityFeature.SET_POSITION, ATTR_CURRENT_POSITION: 60, @@ -234,7 +233,7 @@ async def test_windowcovering_set_cover_position( hass.states.async_set( entity_id, - STATE_OPENING, + CoverState.OPENING, { ATTR_SUPPORTED_FEATURES: CoverEntityFeature.SET_POSITION, ATTR_CURRENT_POSITION: 70.0, @@ -247,7 +246,7 @@ async def test_windowcovering_set_cover_position( hass.states.async_set( entity_id, - STATE_CLOSING, + CoverState.CLOSING, { ATTR_SUPPORTED_FEATURES: CoverEntityFeature.SET_POSITION, ATTR_CURRENT_POSITION: 50, @@ -260,7 +259,7 @@ async def test_windowcovering_set_cover_position( hass.states.async_set( entity_id, - STATE_OPEN, + CoverState.OPEN, { ATTR_SUPPORTED_FEATURES: CoverEntityFeature.SET_POSITION, ATTR_CURRENT_POSITION: 50, @@ -303,7 +302,7 @@ async def test_window_instantiate_set_position(hass: HomeAssistant, hk_driver) - hass.states.async_set( entity_id, - STATE_OPEN, + CoverState.OPEN, { ATTR_SUPPORTED_FEATURES: CoverEntityFeature.SET_POSITION, ATTR_CURRENT_POSITION: 0, @@ -322,7 +321,7 @@ async def test_window_instantiate_set_position(hass: HomeAssistant, hk_driver) - hass.states.async_set( entity_id, - STATE_OPEN, + CoverState.OPEN, { ATTR_SUPPORTED_FEATURES: CoverEntityFeature.SET_POSITION, ATTR_CURRENT_POSITION: 50, @@ -335,7 +334,7 @@ async def test_window_instantiate_set_position(hass: HomeAssistant, hk_driver) - hass.states.async_set( entity_id, - STATE_OPEN, + CoverState.OPEN, { ATTR_SUPPORTED_FEATURES: CoverEntityFeature.SET_POSITION, ATTR_CURRENT_POSITION: "GARBAGE", @@ -369,22 +368,30 @@ async def test_windowcovering_cover_set_tilt( assert acc.char_current_tilt.value == 0 assert acc.char_target_tilt.value == 0 - hass.states.async_set(entity_id, STATE_CLOSING, {ATTR_CURRENT_TILT_POSITION: None}) + hass.states.async_set( + entity_id, CoverState.CLOSING, {ATTR_CURRENT_TILT_POSITION: None} + ) await hass.async_block_till_done() assert acc.char_current_tilt.value == 0 assert acc.char_target_tilt.value == 0 - hass.states.async_set(entity_id, STATE_CLOSING, {ATTR_CURRENT_TILT_POSITION: 100}) + hass.states.async_set( + entity_id, CoverState.CLOSING, {ATTR_CURRENT_TILT_POSITION: 100} + ) await hass.async_block_till_done() assert acc.char_current_tilt.value == 90 assert acc.char_target_tilt.value == 90 - hass.states.async_set(entity_id, STATE_CLOSING, {ATTR_CURRENT_TILT_POSITION: 50}) + hass.states.async_set( + entity_id, CoverState.CLOSING, {ATTR_CURRENT_TILT_POSITION: 50} + ) await hass.async_block_till_done() assert acc.char_current_tilt.value == 0 assert acc.char_target_tilt.value == 0 - hass.states.async_set(entity_id, STATE_CLOSING, {ATTR_CURRENT_TILT_POSITION: 0}) + hass.states.async_set( + entity_id, CoverState.CLOSING, {ATTR_CURRENT_TILT_POSITION: 0} + ) await hass.async_block_till_done() assert acc.char_current_tilt.value == -90 assert acc.char_target_tilt.value == -90 @@ -465,25 +472,25 @@ async def test_windowcovering_open_close( assert acc.char_target_position.value == 0 assert acc.char_position_state.value == 2 - hass.states.async_set(entity_id, STATE_OPENING) + hass.states.async_set(entity_id, CoverState.OPENING) await hass.async_block_till_done() assert acc.char_current_position.value == 0 assert acc.char_target_position.value == 0 assert acc.char_position_state.value == 1 - hass.states.async_set(entity_id, STATE_OPEN) + hass.states.async_set(entity_id, CoverState.OPEN) await hass.async_block_till_done() assert acc.char_current_position.value == 100 assert acc.char_target_position.value == 100 assert acc.char_position_state.value == 2 - hass.states.async_set(entity_id, STATE_CLOSING) + hass.states.async_set(entity_id, CoverState.CLOSING) await hass.async_block_till_done() assert acc.char_current_position.value == 100 assert acc.char_target_position.value == 100 assert acc.char_position_state.value == 0 - hass.states.async_set(entity_id, STATE_CLOSED) + hass.states.async_set(entity_id, CoverState.CLOSED) await hass.async_block_till_done() assert acc.char_current_position.value == 0 assert acc.char_target_position.value == 0 @@ -710,20 +717,20 @@ async def test_garage_door_with_linked_obstruction_sensor( assert acc.char_current_state.value == HK_DOOR_OPEN assert acc.char_target_state.value == HK_DOOR_OPEN - hass.states.async_set(entity_id, STATE_CLOSED) + hass.states.async_set(entity_id, CoverState.CLOSED) await hass.async_block_till_done() assert acc.char_current_state.value == HK_DOOR_CLOSED assert acc.char_target_state.value == HK_DOOR_CLOSED assert acc.char_obstruction_detected.value is False - hass.states.async_set(entity_id, STATE_OPEN) + hass.states.async_set(entity_id, CoverState.OPEN) hass.states.async_set(linked_obstruction_sensor_entity_id, STATE_ON) await hass.async_block_till_done() assert acc.char_current_state.value == HK_DOOR_OPEN assert acc.char_target_state.value == HK_DOOR_OPEN assert acc.char_obstruction_detected.value is True - hass.states.async_set(entity_id, STATE_CLOSED) + hass.states.async_set(entity_id, CoverState.CLOSED) hass.states.async_set(linked_obstruction_sensor_entity_id, STATE_OFF) await hass.async_block_till_done() assert acc.char_current_state.value == HK_DOOR_CLOSED diff --git a/tests/components/homekit/test_type_lights.py b/tests/components/homekit/test_type_lights.py index d365165aca4ac8..a45e4988c36ffe 100644 --- a/tests/components/homekit/test_type_lights.py +++ b/tests/components/homekit/test_type_lights.py @@ -226,6 +226,24 @@ async def test_light_brightness( assert len(events) == 3 assert events[-1].data[ATTR_VALUE] == f"Set state to 0, brightness at 0{PERCENTAGE}" + hk_driver.set_characteristics( + { + HAP_REPR_CHARS: [ + { + HAP_REPR_AID: acc.aid, + HAP_REPR_IID: char_brightness_iid, + HAP_REPR_VALUE: 0, + }, + ] + }, + "mock_addr", + ) + await _wait_for_light_coalesce(hass) + assert call_turn_off + assert call_turn_off[0].data[ATTR_ENTITY_ID] == entity_id + assert len(events) == 4 + assert events[-1].data[ATTR_VALUE] == f"Set state to 0, brightness at 0{PERCENTAGE}" + # 0 is a special case for homekit, see "Handle Brightness" # in update_state hass.states.async_set( diff --git a/tests/components/homekit/test_type_security_systems.py b/tests/components/homekit/test_type_security_systems.py index eb662823b4c354..94b0e68e76d6e7 100644 --- a/tests/components/homekit/test_type_security_systems.py +++ b/tests/components/homekit/test_type_security_systems.py @@ -6,19 +6,14 @@ from homeassistant.components.alarm_control_panel import ( DOMAIN as ALARM_CONTROL_PANEL_DOMAIN, AlarmControlPanelEntityFeature, + AlarmControlPanelState, ) from homeassistant.components.homekit.const import ATTR_VALUE from homeassistant.components.homekit.type_security_systems import SecuritySystem from homeassistant.const import ( ATTR_CODE, ATTR_ENTITY_ID, - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_ARMED_VACATION, - STATE_ALARM_ARMING, - STATE_ALARM_DISARMED, - STATE_ALARM_TRIGGERED, + STATE_UNAVAILABLE, STATE_UNKNOWN, ) from homeassistant.core import Event, HomeAssistant @@ -46,27 +41,27 @@ async def test_switch_set_state( assert acc.char_current_state.value == 3 assert acc.char_target_state.value == 3 - hass.states.async_set(entity_id, STATE_ALARM_ARMED_AWAY) + hass.states.async_set(entity_id, AlarmControlPanelState.ARMED_AWAY) await hass.async_block_till_done() assert acc.char_target_state.value == 1 assert acc.char_current_state.value == 1 - hass.states.async_set(entity_id, STATE_ALARM_ARMED_HOME) + hass.states.async_set(entity_id, AlarmControlPanelState.ARMED_HOME) await hass.async_block_till_done() assert acc.char_target_state.value == 0 assert acc.char_current_state.value == 0 - hass.states.async_set(entity_id, STATE_ALARM_ARMED_NIGHT) + hass.states.async_set(entity_id, AlarmControlPanelState.ARMED_NIGHT) await hass.async_block_till_done() assert acc.char_target_state.value == 2 assert acc.char_current_state.value == 2 - hass.states.async_set(entity_id, STATE_ALARM_DISARMED) + hass.states.async_set(entity_id, AlarmControlPanelState.DISARMED) await hass.async_block_till_done() assert acc.char_target_state.value == 3 assert acc.char_current_state.value == 3 - hass.states.async_set(entity_id, STATE_ALARM_TRIGGERED) + hass.states.async_set(entity_id, AlarmControlPanelState.TRIGGERED) await hass.async_block_till_done() assert acc.char_target_state.value == 3 assert acc.char_current_state.value == 4 @@ -161,42 +156,42 @@ async def test_arming(hass: HomeAssistant, hk_driver) -> None: acc.run() await hass.async_block_till_done() - hass.states.async_set(entity_id, STATE_ALARM_ARMED_AWAY) + hass.states.async_set(entity_id, AlarmControlPanelState.ARMED_AWAY) await hass.async_block_till_done() assert acc.char_target_state.value == 1 assert acc.char_current_state.value == 1 - hass.states.async_set(entity_id, STATE_ALARM_ARMED_HOME) + hass.states.async_set(entity_id, AlarmControlPanelState.ARMED_HOME) await hass.async_block_till_done() assert acc.char_target_state.value == 0 assert acc.char_current_state.value == 0 - hass.states.async_set(entity_id, STATE_ALARM_ARMED_VACATION) + hass.states.async_set(entity_id, AlarmControlPanelState.ARMED_VACATION) await hass.async_block_till_done() assert acc.char_target_state.value == 1 assert acc.char_current_state.value == 1 - hass.states.async_set(entity_id, STATE_ALARM_ARMED_NIGHT) + hass.states.async_set(entity_id, AlarmControlPanelState.ARMED_NIGHT) await hass.async_block_till_done() assert acc.char_target_state.value == 2 assert acc.char_current_state.value == 2 - hass.states.async_set(entity_id, STATE_ALARM_ARMING) + hass.states.async_set(entity_id, AlarmControlPanelState.ARMING) await hass.async_block_till_done() assert acc.char_target_state.value == 1 assert acc.char_current_state.value == 3 - hass.states.async_set(entity_id, STATE_ALARM_DISARMED) + hass.states.async_set(entity_id, AlarmControlPanelState.DISARMED) await hass.async_block_till_done() assert acc.char_target_state.value == 3 assert acc.char_current_state.value == 3 - hass.states.async_set(entity_id, STATE_ALARM_ARMED_AWAY) + hass.states.async_set(entity_id, AlarmControlPanelState.ARMED_AWAY) await hass.async_block_till_done() assert acc.char_target_state.value == 1 assert acc.char_current_state.value == 1 - hass.states.async_set(entity_id, STATE_ALARM_TRIGGERED) + hass.states.async_set(entity_id, AlarmControlPanelState.TRIGGERED) await hass.async_block_till_done() assert acc.char_target_state.value == 1 assert acc.char_current_state.value == 4 @@ -317,3 +312,33 @@ async def test_supported_states(hass: HomeAssistant, hk_driver) -> None: for val in valid_target_values.values(): assert val in test_config.get("target_values") + + +@pytest.mark.parametrize( + ("state"), + [ + (None), + ("None"), + (STATE_UNKNOWN), + (STATE_UNAVAILABLE), + ], +) +async def test_handle_non_alarm_states( + hass: HomeAssistant, hk_driver, events: list[Event], state: str +) -> None: + """Test we can handle states that should not raise.""" + code = "1234" + config = {ATTR_CODE: code} + entity_id = "alarm_control_panel.test" + + hass.states.async_set(entity_id, state) + await hass.async_block_till_done() + acc = SecuritySystem(hass, hk_driver, "SecuritySystem", entity_id, 2, config) + acc.run() + await hass.async_block_till_done() + + assert acc.aid == 2 + assert acc.category == 11 # AlarmSystem + + assert acc.char_current_state.value == 3 + assert acc.char_target_state.value == 3 diff --git a/tests/components/homekit/test_type_sensors.py b/tests/components/homekit/test_type_sensors.py index ef1c124781a604..2bfddf4d4c6f3f 100644 --- a/tests/components/homekit/test_type_sensors.py +++ b/tests/components/homekit/test_type_sensors.py @@ -30,10 +30,9 @@ ATTR_UNIT_OF_MEASUREMENT, EVENT_HOMEASSISTANT_START, PERCENTAGE, - STATE_HOME, - STATE_NOT_HOME, STATE_OFF, STATE_ON, + STATE_UNAVAILABLE, STATE_UNKNOWN, UnitOfTemperature, ) @@ -535,11 +534,11 @@ async def test_binary(hass: HomeAssistant, hk_driver) -> None: await hass.async_block_till_done() assert acc.char_detected.value == 0 - hass.states.async_set(entity_id, STATE_HOME, {ATTR_DEVICE_CLASS: "opening"}) + hass.states.async_set(entity_id, STATE_UNKNOWN, {ATTR_DEVICE_CLASS: "opening"}) await hass.async_block_till_done() - assert acc.char_detected.value == 1 + assert acc.char_detected.value == 0 - hass.states.async_set(entity_id, STATE_NOT_HOME, {ATTR_DEVICE_CLASS: "opening"}) + hass.states.async_set(entity_id, STATE_UNAVAILABLE, {ATTR_DEVICE_CLASS: "opening"}) await hass.async_block_till_done() assert acc.char_detected.value == 0 @@ -579,13 +578,15 @@ async def test_motion_uses_bool(hass: HomeAssistant, hk_driver) -> None: assert acc.char_detected.value is False hass.states.async_set( - entity_id, STATE_HOME, {ATTR_DEVICE_CLASS: BinarySensorDeviceClass.MOTION} + entity_id, STATE_UNKNOWN, {ATTR_DEVICE_CLASS: BinarySensorDeviceClass.MOTION} ) await hass.async_block_till_done() - assert acc.char_detected.value is True + assert acc.char_detected.value is False hass.states.async_set( - entity_id, STATE_NOT_HOME, {ATTR_DEVICE_CLASS: BinarySensorDeviceClass.MOTION} + entity_id, + STATE_UNAVAILABLE, + {ATTR_DEVICE_CLASS: BinarySensorDeviceClass.MOTION}, ) await hass.async_block_till_done() assert acc.char_detected.value is False diff --git a/tests/components/homekit_controller/fixtures/u_by_moen_ts3304.json b/tests/components/homekit_controller/fixtures/u_by_moen_ts3304.json new file mode 100644 index 00000000000000..a3c24eb85c39e3 --- /dev/null +++ b/tests/components/homekit_controller/fixtures/u_by_moen_ts3304.json @@ -0,0 +1,378 @@ +[ + { + "aid": 1, + "services": [ + { + "iid": 1, + "type": "0000003E-0000-1000-8000-0026BB765291", + "characteristics": [ + { + "type": "00000023-0000-1000-8000-0026BB765291", + "iid": 2, + "perms": ["pr"], + "format": "string", + "value": "U by Moen-015F44", + "description": "Name", + "maxLen": 64 + }, + { + "type": "00000020-0000-1000-8000-0026BB765291", + "iid": 3, + "perms": ["pr"], + "format": "string", + "value": "Moen Incorporated", + "description": "Manufacturer", + "maxLen": 64 + }, + { + "type": "00000021-0000-1000-8000-0026BB765291", + "iid": 4, + "perms": ["pr"], + "format": "string", + "value": "TS3304", + "description": "Model", + "maxLen": 64 + }, + { + "type": "00000030-0000-1000-8000-0026BB765291", + "iid": 5, + "perms": ["pr"], + "format": "string", + "value": "**REDACTED**", + "description": "Serial Number", + "maxLen": 64 + }, + { + "type": "00000014-0000-1000-8000-0026BB765291", + "iid": 6, + "perms": ["pw"], + "format": "bool", + "description": "Identify" + }, + { + "type": "00000052-0000-1000-8000-0026BB765291", + "iid": 7, + "perms": ["pr"], + "format": "string", + "value": "3.3.0", + "description": "Firmware Revision", + "maxLen": 64 + } + ] + }, + { + "iid": 8, + "type": "000000D7-0000-1000-8000-0026BB765291", + "characteristics": [ + { + "type": "000000B0-0000-1000-8000-0026BB765291", + "iid": 9, + "perms": ["pr", "pw", "ev"], + "format": "uint8", + "value": 0, + "description": "Active", + "minValue": 0, + "maxValue": 1, + "minStep": 1 + }, + { + "type": "00000023-0000-1000-8000-0026BB765291", + "iid": 10, + "perms": ["pr"], + "format": "string", + "value": "u by moen", + "description": "Name", + "maxLen": 64 + } + ], + "linked": [11, 17, 22, 27, 32] + }, + { + "iid": 11, + "type": "000000BC-0000-1000-8000-0026BB765291", + "characteristics": [ + { + "type": "000000B0-0000-1000-8000-0026BB765291", + "iid": 12, + "perms": ["pr", "pw", "ev"], + "format": "uint8", + "value": 0, + "description": "Active", + "minValue": 0, + "maxValue": 1, + "minStep": 1 + }, + { + "type": "00000011-0000-1000-8000-0026BB765291", + "iid": 13, + "perms": ["pr", "ev"], + "format": "float", + "value": 21.66666, + "description": "Current Temperature", + "unit": "celsius", + "minValue": 0.0, + "maxValue": 100.0, + "minStep": 0.1 + }, + { + "type": "000000B1-0000-1000-8000-0026BB765291", + "iid": 14, + "perms": ["pr", "ev"], + "format": "uint8", + "value": 0, + "description": "Current Heater Cooler State", + "minValue": 0, + "maxValue": 3, + "minStep": 1 + }, + { + "type": "000000B2-0000-1000-8000-0026BB765291", + "iid": 15, + "perms": ["pr", "pw", "ev"], + "format": "uint8", + "value": 0, + "description": "Target Heater Cooler State", + "minValue": 0, + "maxValue": 2, + "minStep": 1 + }, + { + "type": "00000012-0000-1000-8000-0026BB765291", + "iid": 16, + "perms": ["pr", "pw", "ev"], + "format": "float", + "value": 37.77777, + "description": "Heating Threshold Temperature", + "unit": "celsius", + "minValue": 15.55556, + "maxValue": 48.88888, + "minStep": 0.1 + } + ] + }, + { + "iid": 17, + "type": "000000D0-0000-1000-8000-0026BB765291", + "characteristics": [ + { + "type": "000000B0-0000-1000-8000-0026BB765291", + "iid": 18, + "perms": ["pr", "pw", "ev"], + "format": "uint8", + "value": 0, + "description": "Active", + "minValue": 0, + "maxValue": 1, + "minStep": 1 + }, + { + "type": "000000D2-0000-1000-8000-0026BB765291", + "iid": 19, + "perms": ["pr", "ev"], + "format": "uint8", + "value": 0, + "description": "In Use" + }, + { + "type": "000000D5-0000-1000-8000-0026BB765291", + "iid": 20, + "perms": ["pr", "ev"], + "format": "uint8", + "value": 2, + "description": "Valve Type" + }, + { + "type": "00000023-0000-1000-8000-0026BB765291", + "iid": 21, + "perms": ["pr"], + "format": "string", + "value": "Outlet 1", + "description": "Name", + "maxLen": 64 + } + ] + }, + { + "iid": 22, + "type": "000000D0-0000-1000-8000-0026BB765291", + "characteristics": [ + { + "type": "000000B0-0000-1000-8000-0026BB765291", + "iid": 23, + "perms": ["pr", "pw", "ev"], + "format": "uint8", + "value": 0, + "description": "Active", + "minValue": 0, + "maxValue": 1, + "minStep": 1 + }, + { + "type": "000000D2-0000-1000-8000-0026BB765291", + "iid": 24, + "perms": ["pr", "ev"], + "format": "uint8", + "value": 0, + "description": "In Use" + }, + { + "type": "000000D5-0000-1000-8000-0026BB765291", + "iid": 25, + "perms": ["pr", "ev"], + "format": "uint8", + "value": 2, + "description": "Valve Type" + }, + { + "type": "00000023-0000-1000-8000-0026BB765291", + "iid": 26, + "perms": ["pr"], + "format": "string", + "value": "Outlet 2", + "description": "Name", + "maxLen": 64 + } + ] + }, + { + "iid": 27, + "type": "000000D0-0000-1000-8000-0026BB765291", + "characteristics": [ + { + "type": "000000B0-0000-1000-8000-0026BB765291", + "iid": 28, + "perms": ["pr", "pw", "ev"], + "format": "uint8", + "value": 0, + "description": "Active", + "minValue": 0, + "maxValue": 1, + "minStep": 1 + }, + { + "type": "000000D2-0000-1000-8000-0026BB765291", + "iid": 29, + "perms": ["pr", "ev"], + "format": "uint8", + "value": 0, + "description": "In Use" + }, + { + "type": "000000D5-0000-1000-8000-0026BB765291", + "iid": 30, + "perms": ["pr", "ev"], + "format": "uint8", + "value": 2, + "description": "Valve Type" + }, + { + "type": "00000023-0000-1000-8000-0026BB765291", + "iid": 31, + "perms": ["pr"], + "format": "string", + "value": "Outlet 3", + "description": "Name", + "maxLen": 64 + } + ] + }, + { + "iid": 32, + "type": "000000D0-0000-1000-8000-0026BB765291", + "characteristics": [ + { + "type": "000000B0-0000-1000-8000-0026BB765291", + "iid": 33, + "perms": ["pr", "pw", "ev"], + "format": "uint8", + "value": 0, + "description": "Active", + "minValue": 0, + "maxValue": 1, + "minStep": 1 + }, + { + "type": "000000D2-0000-1000-8000-0026BB765291", + "iid": 34, + "perms": ["pr", "ev"], + "format": "uint8", + "value": 0, + "description": "In Use" + }, + { + "type": "000000D5-0000-1000-8000-0026BB765291", + "iid": 35, + "perms": ["pr", "ev"], + "format": "uint8", + "value": 2, + "description": "Valve Type" + }, + { + "type": "00000023-0000-1000-8000-0026BB765291", + "iid": 36, + "perms": ["pr"], + "format": "string", + "value": "Outlet 4", + "description": "Name", + "maxLen": 64 + } + ] + }, + { + "iid": 37, + "type": "00000010-0000-1000-8000-001D4B474349", + "characteristics": [ + { + "type": "00000011-0000-1000-8000-001D4B474349", + "iid": 38, + "perms": ["pr", "ev", "hd"], + "format": "uint8", + "value": 1 + }, + { + "type": "00000012-0000-1000-8000-001D4B474349", + "iid": 39, + "perms": ["pw", "hd"], + "format": "uint8" + }, + { + "type": "00000013-0000-1000-8000-001D4B474349", + "iid": 40, + "perms": ["pw", "hd"], + "format": "string", + "maxLen": 64 + }, + { + "type": "00000014-0000-1000-8000-001D4B474349", + "iid": 41, + "perms": ["pw", "hd"], + "format": "string", + "maxLen": 64 + }, + { + "type": "00000015-0000-1000-8000-001D4B474349", + "iid": 42, + "perms": ["pw", "hd"], + "format": "string", + "maxLen": 64 + } + ] + }, + { + "iid": 43, + "type": "000000A2-0000-1000-8000-0026BB765291", + "characteristics": [ + { + "type": "00000037-0000-1000-8000-0026BB765291", + "iid": 44, + "perms": ["pr"], + "format": "string", + "value": "1.1.0", + "description": "Version", + "maxLen": 64 + } + ] + } + ] + } +] diff --git a/tests/components/homekit_controller/snapshots/test_init.ambr b/tests/components/homekit_controller/snapshots/test_init.ambr index 6a0fead65d3e25..8304d567916a7b 100644 --- a/tests/components/homekit_controller/snapshots/test_init.ambr +++ b/tests/components/homekit_controller/snapshots/test_init.ambr @@ -17758,7 +17758,7 @@ }), ]) # --- -# name: test_snapshots[velux_active_netatmo_co2] +# name: test_snapshots[u_by_moen_ts3304] list([ dict({ 'device': dict({ @@ -17781,15 +17781,15 @@ 'is_new': False, 'labels': list([ ]), - 'manufacturer': 'Netatmo', - 'model': 'VELUX Sensor', + 'manufacturer': 'Moen Incorporated', + 'model': 'TS3304', 'model_id': None, - 'name': 'VELUX Sensor', + 'name': 'U by Moen-015F44', 'name_by_user': None, 'primary_config_entry': 'TestData', 'serial_number': '**REDACTED**', 'suggested_area': None, - 'sw_version': '16.0.0', + 'sw_version': '3.3.0', }), 'entities': list([ dict({ @@ -17805,7 +17805,7 @@ 'disabled_by': None, 'domain': 'button', 'entity_category': , - 'entity_id': 'button.velux_sensor_identify', + 'entity_id': 'button.u_by_moen_015f44_identify', 'has_entity_name': False, 'hidden_by': None, 'icon': None, @@ -17816,20 +17816,20 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'VELUX Sensor Identify', + 'original_name': 'U by Moen-015F44 Identify', 'platform': 'homekit_controller', 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '00:00:00:00:00:00_1_1_7', + 'unique_id': '00:00:00:00:00:00_1_1_6', 'unit_of_measurement': None, }), 'state': dict({ 'attributes': dict({ 'device_class': 'identify', - 'friendly_name': 'VELUX Sensor Identify', + 'friendly_name': 'U by Moen-015F44 Identify', }), - 'entity_id': 'button.velux_sensor_identify', + 'entity_id': 'button.u_by_moen_015f44_identify', 'state': 'unknown', }), }), @@ -17839,61 +17839,24 @@ ]), 'area_id': None, 'capabilities': dict({ - 'state_class': , - }), - 'categories': dict({ - }), - 'config_entry_id': 'TestData', - 'device_class': None, - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.velux_sensor_carbon_dioxide_sensor', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'VELUX Sensor Carbon Dioxide sensor', - 'platform': 'homekit_controller', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00_1_14', - 'unit_of_measurement': 'ppm', - }), - 'state': dict({ - 'attributes': dict({ - 'device_class': 'carbon_dioxide', - 'friendly_name': 'VELUX Sensor Carbon Dioxide sensor', - 'state_class': , - 'unit_of_measurement': 'ppm', - }), - 'entity_id': 'sensor.velux_sensor_carbon_dioxide_sensor', - 'state': '1124.0', - }), - }), - dict({ - 'entry': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , + 'hvac_modes': list([ + , + , + , + , + ]), + 'max_temp': 35, + 'min_temp': 7, + 'target_temp_step': 1.0, }), 'categories': dict({ }), 'config_entry_id': 'TestData', 'device_class': None, 'disabled_by': None, - 'domain': 'sensor', + 'domain': 'climate', 'entity_category': None, - 'entity_id': 'sensor.velux_sensor_humidity_sensor', + 'entity_id': 'climate.u_by_moen_015f44', 'has_entity_name': False, 'hidden_by': None, 'icon': None, @@ -17902,25 +17865,35 @@ 'name': None, 'options': dict({ }), - 'original_device_class': , + 'original_device_class': None, 'original_icon': None, - 'original_name': 'VELUX Sensor Humidity sensor', + 'original_name': 'U by Moen-015F44', 'platform': 'homekit_controller', 'previous_unique_id': None, - 'supported_features': 0, + 'supported_features': , 'translation_key': None, 'unique_id': '00:00:00:00:00:00_1_11', - 'unit_of_measurement': '%', + 'unit_of_measurement': None, }), 'state': dict({ 'attributes': dict({ - 'device_class': 'humidity', - 'friendly_name': 'VELUX Sensor Humidity sensor', - 'state_class': , - 'unit_of_measurement': '%', + 'current_temperature': 21.7, + 'friendly_name': 'U by Moen-015F44', + 'hvac_action': , + 'hvac_modes': list([ + , + , + , + , + ]), + 'max_temp': 35, + 'min_temp': 7, + 'supported_features': , + 'target_temp_step': 1.0, + 'temperature': None, }), - 'entity_id': 'sensor.velux_sensor_humidity_sensor', - 'state': '69.0', + 'entity_id': 'climate.u_by_moen_015f44', + 'state': 'off', }), }), dict({ @@ -17938,7 +17911,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.velux_sensor_temperature_sensor', + 'entity_id': 'sensor.u_by_moen_015f44_current_temperature', 'has_entity_name': False, 'hidden_by': None, 'icon': None, @@ -17949,63 +17922,25 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'VELUX Sensor Temperature sensor', + 'original_name': 'U by Moen-015F44 Current Temperature', 'platform': 'homekit_controller', 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '00:00:00:00:00:00_1_8', + 'unique_id': '00:00:00:00:00:00_1_11_13', 'unit_of_measurement': , }), 'state': dict({ 'attributes': dict({ 'device_class': 'temperature', - 'friendly_name': 'VELUX Sensor Temperature sensor', + 'friendly_name': 'U by Moen-015F44 Current Temperature', 'state_class': , 'unit_of_measurement': , }), - 'entity_id': 'sensor.velux_sensor_temperature_sensor', - 'state': '23.9', + 'entity_id': 'sensor.u_by_moen_015f44_current_temperature', + 'state': '21.66666', }), }), - ]), - }), - ]) -# --- -# name: test_snapshots[velux_gateway] - list([ - dict({ - 'device': dict({ - 'area_id': None, - 'config_entries': list([ - 'TestData', - ]), - 'configuration_url': None, - 'connections': list([ - ]), - 'disabled_by': None, - 'entry_type': None, - 'hw_version': '', - 'identifiers': list([ - list([ - 'homekit_controller:accessory-id', - '00:00:00:00:00:00:aid:1', - ]), - ]), - 'is_new': False, - 'labels': list([ - ]), - 'manufacturer': 'VELUX', - 'model': 'VELUX Gateway', - 'model_id': None, - 'name': 'VELUX Gateway', - 'name_by_user': None, - 'primary_config_entry': 'TestData', - 'serial_number': 'a1a11a1', - 'suggested_area': None, - 'sw_version': '70', - }), - 'entities': list([ dict({ 'entry': dict({ 'aliases': list([ @@ -18017,9 +17952,9 @@ 'config_entry_id': 'TestData', 'device_class': None, 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.velux_gateway_identify', + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.u_by_moen_015f44', 'has_entity_name': False, 'hidden_by': None, 'icon': None, @@ -18028,59 +17963,24 @@ 'name': None, 'options': dict({ }), - 'original_device_class': , + 'original_device_class': None, 'original_icon': None, - 'original_name': 'VELUX Gateway Identify', + 'original_name': 'U by Moen-015F44', 'platform': 'homekit_controller', 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '00:00:00:00:00:00_1_1_6', + 'unique_id': '00:00:00:00:00:00_1_8', 'unit_of_measurement': None, }), 'state': dict({ 'attributes': dict({ - 'device_class': 'identify', - 'friendly_name': 'VELUX Gateway Identify', + 'friendly_name': 'U by Moen-015F44', }), - 'entity_id': 'button.velux_gateway_identify', - 'state': 'unknown', + 'entity_id': 'switch.u_by_moen_015f44', + 'state': 'off', }), }), - ]), - }), - dict({ - 'device': dict({ - 'area_id': None, - 'config_entries': list([ - 'TestData', - ]), - 'configuration_url': None, - 'connections': list([ - ]), - 'disabled_by': None, - 'entry_type': None, - 'hw_version': '', - 'identifiers': list([ - list([ - 'homekit_controller:accessory-id', - '00:00:00:00:00:00:aid:2', - ]), - ]), - 'is_new': False, - 'labels': list([ - ]), - 'manufacturer': 'VELUX', - 'model': 'VELUX Sensor', - 'model_id': None, - 'name': 'VELUX Sensor', - 'name_by_user': None, - 'primary_config_entry': 'TestData', - 'serial_number': 'a11b111', - 'suggested_area': None, - 'sw_version': '16', - }), - 'entities': list([ dict({ 'entry': dict({ 'aliases': list([ @@ -18092,9 +17992,9 @@ 'config_entry_id': 'TestData', 'device_class': None, 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.velux_sensor_identify', + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.u_by_moen_015f44_outlet_1', 'has_entity_name': False, 'hidden_by': None, 'icon': None, @@ -18103,23 +18003,23 @@ 'name': None, 'options': dict({ }), - 'original_device_class': , + 'original_device_class': None, 'original_icon': None, - 'original_name': 'VELUX Sensor Identify', + 'original_name': 'U by Moen-015F44 Outlet 1', 'platform': 'homekit_controller', 'previous_unique_id': None, 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00_2_1_7', + 'translation_key': 'valve', + 'unique_id': '00:00:00:00:00:00_1_17', 'unit_of_measurement': None, }), 'state': dict({ 'attributes': dict({ - 'device_class': 'identify', - 'friendly_name': 'VELUX Sensor Identify', + 'friendly_name': 'U by Moen-015F44 Outlet 1', + 'in_use': False, }), - 'entity_id': 'button.velux_sensor_identify', - 'state': 'unknown', + 'entity_id': 'switch.u_by_moen_015f44_outlet_1', + 'state': 'off', }), }), dict({ @@ -18127,17 +18027,15 @@ 'aliases': list([ ]), 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), + 'capabilities': None, 'categories': dict({ }), 'config_entry_id': 'TestData', 'device_class': None, 'disabled_by': None, - 'domain': 'sensor', + 'domain': 'switch', 'entity_category': None, - 'entity_id': 'sensor.velux_sensor_carbon_dioxide_sensor', + 'entity_id': 'switch.u_by_moen_015f44_outlet_2', 'has_entity_name': False, 'hidden_by': None, 'icon': None, @@ -18146,25 +18044,23 @@ 'name': None, 'options': dict({ }), - 'original_device_class': , + 'original_device_class': None, 'original_icon': None, - 'original_name': 'VELUX Sensor Carbon Dioxide sensor', + 'original_name': 'U by Moen-015F44 Outlet 2', 'platform': 'homekit_controller', 'previous_unique_id': None, 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00_2_14', - 'unit_of_measurement': 'ppm', + 'translation_key': 'valve', + 'unique_id': '00:00:00:00:00:00_1_22', + 'unit_of_measurement': None, }), 'state': dict({ 'attributes': dict({ - 'device_class': 'carbon_dioxide', - 'friendly_name': 'VELUX Sensor Carbon Dioxide sensor', - 'state_class': , - 'unit_of_measurement': 'ppm', + 'friendly_name': 'U by Moen-015F44 Outlet 2', + 'in_use': False, }), - 'entity_id': 'sensor.velux_sensor_carbon_dioxide_sensor', - 'state': '400', + 'entity_id': 'switch.u_by_moen_015f44_outlet_2', + 'state': 'off', }), }), dict({ @@ -18172,17 +18068,15 @@ 'aliases': list([ ]), 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), + 'capabilities': None, 'categories': dict({ }), 'config_entry_id': 'TestData', 'device_class': None, 'disabled_by': None, - 'domain': 'sensor', + 'domain': 'switch', 'entity_category': None, - 'entity_id': 'sensor.velux_sensor_humidity_sensor', + 'entity_id': 'switch.u_by_moen_015f44_outlet_3', 'has_entity_name': False, 'hidden_by': None, 'icon': None, @@ -18191,1125 +18085,25 @@ 'name': None, 'options': dict({ }), - 'original_device_class': , + 'original_device_class': None, 'original_icon': None, - 'original_name': 'VELUX Sensor Humidity sensor', + 'original_name': 'U by Moen-015F44 Outlet 3', 'platform': 'homekit_controller', 'previous_unique_id': None, 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00_2_11', - 'unit_of_measurement': '%', + 'translation_key': 'valve', + 'unique_id': '00:00:00:00:00:00_1_27', + 'unit_of_measurement': None, }), 'state': dict({ 'attributes': dict({ - 'device_class': 'humidity', - 'friendly_name': 'VELUX Sensor Humidity sensor', - 'state_class': , - 'unit_of_measurement': '%', + 'friendly_name': 'U by Moen-015F44 Outlet 3', + 'in_use': False, }), - 'entity_id': 'sensor.velux_sensor_humidity_sensor', - 'state': '58', - }), - }), - dict({ - 'entry': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'categories': dict({ - }), - 'config_entry_id': 'TestData', - 'device_class': None, - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.velux_sensor_temperature_sensor', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'VELUX Sensor Temperature sensor', - 'platform': 'homekit_controller', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00_2_8', - 'unit_of_measurement': , - }), - 'state': dict({ - 'attributes': dict({ - 'device_class': 'temperature', - 'friendly_name': 'VELUX Sensor Temperature sensor', - 'state_class': , - 'unit_of_measurement': , - }), - 'entity_id': 'sensor.velux_sensor_temperature_sensor', - 'state': '18.9', - }), - }), - ]), - }), - dict({ - 'device': dict({ - 'area_id': None, - 'config_entries': list([ - 'TestData', - ]), - 'configuration_url': None, - 'connections': list([ - ]), - 'disabled_by': None, - 'entry_type': None, - 'hw_version': '', - 'identifiers': list([ - list([ - 'homekit_controller:accessory-id', - '00:00:00:00:00:00:aid:3', - ]), - ]), - 'is_new': False, - 'labels': list([ - ]), - 'manufacturer': 'VELUX', - 'model': 'VELUX Window', - 'model_id': None, - 'name': 'VELUX Window', - 'name_by_user': None, - 'primary_config_entry': 'TestData', - 'serial_number': '1111111a114a111a', - 'suggested_area': None, - 'sw_version': '48', - }), - 'entities': list([ - dict({ - 'entry': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': 'TestData', - 'device_class': None, - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.velux_window_identify', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'VELUX Window Identify', - 'platform': 'homekit_controller', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00_3_1_7', - 'unit_of_measurement': None, - }), - 'state': dict({ - 'attributes': dict({ - 'device_class': 'identify', - 'friendly_name': 'VELUX Window Identify', - }), - 'entity_id': 'button.velux_window_identify', - 'state': 'unknown', - }), - }), - dict({ - 'entry': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': 'TestData', - 'device_class': None, - 'disabled_by': None, - 'domain': 'cover', - 'entity_category': None, - 'entity_id': 'cover.velux_window_roof_window', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'VELUX Window Roof Window', - 'platform': 'homekit_controller', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00_3_8', - 'unit_of_measurement': None, - }), - 'state': dict({ - 'attributes': dict({ - 'current_position': 0, - 'device_class': 'window', - 'friendly_name': 'VELUX Window Roof Window', - 'supported_features': , - }), - 'entity_id': 'cover.velux_window_roof_window', - 'state': 'closed', - }), - }), - ]), - }), - ]) -# --- -# name: test_snapshots[velux_somfy_venetian_blinds] - list([ - dict({ - 'device': dict({ - 'area_id': None, - 'config_entries': list([ - 'TestData', - ]), - 'configuration_url': None, - 'connections': list([ - ]), - 'disabled_by': None, - 'entry_type': None, - 'hw_version': '', - 'identifiers': list([ - list([ - 'homekit_controller:accessory-id', - '00:00:00:00:00:00:aid:5', - ]), - ]), - 'is_new': False, - 'labels': list([ - ]), - 'manufacturer': 'Netatmo', - 'model': 'VELUX External Cover', - 'model_id': None, - 'name': 'VELUX External Cover', - 'name_by_user': None, - 'primary_config_entry': 'TestData', - 'serial_number': '**REDACTED**', - 'suggested_area': None, - 'sw_version': '15.0.0', - }), - 'entities': list([ - dict({ - 'entry': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': 'TestData', - 'device_class': None, - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.velux_external_cover_identify', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'VELUX External Cover Identify', - 'platform': 'homekit_controller', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00_5_1_7', - 'unit_of_measurement': None, - }), - 'state': dict({ - 'attributes': dict({ - 'device_class': 'identify', - 'friendly_name': 'VELUX External Cover Identify', - }), - 'entity_id': 'button.velux_external_cover_identify', - 'state': 'unknown', - }), - }), - dict({ - 'entry': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': 'TestData', - 'device_class': None, - 'disabled_by': None, - 'domain': 'cover', - 'entity_category': None, - 'entity_id': 'cover.velux_external_cover_awning_blinds', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'VELUX External Cover Awning Blinds', - 'platform': 'homekit_controller', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00_5_8', - 'unit_of_measurement': None, - }), - 'state': dict({ - 'attributes': dict({ - 'current_position': 0, - 'friendly_name': 'VELUX External Cover Awning Blinds', - 'supported_features': , - }), - 'entity_id': 'cover.velux_external_cover_awning_blinds', - 'state': 'closed', - }), - }), - ]), - }), - dict({ - 'device': dict({ - 'area_id': None, - 'config_entries': list([ - 'TestData', - ]), - 'configuration_url': None, - 'connections': list([ - ]), - 'disabled_by': None, - 'entry_type': None, - 'hw_version': '', - 'identifiers': list([ - list([ - 'homekit_controller:accessory-id', - '00:00:00:00:00:00:aid:8', - ]), - ]), - 'is_new': False, - 'labels': list([ - ]), - 'manufacturer': 'Netatmo', - 'model': 'VELUX External Cover', - 'model_id': None, - 'name': 'VELUX External Cover', - 'name_by_user': None, - 'primary_config_entry': 'TestData', - 'serial_number': '**REDACTED**', - 'suggested_area': None, - 'sw_version': '0.0.0', - }), - 'entities': list([ - dict({ - 'entry': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': 'TestData', - 'device_class': None, - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.velux_external_cover_identify_2', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'VELUX External Cover Identify', - 'platform': 'homekit_controller', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00_8_1_7', - 'unit_of_measurement': None, - }), - 'state': dict({ - 'attributes': dict({ - 'device_class': 'identify', - 'friendly_name': 'VELUX External Cover Identify', - }), - 'entity_id': 'button.velux_external_cover_identify_2', - 'state': 'unknown', - }), - }), - dict({ - 'entry': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': 'TestData', - 'device_class': None, - 'disabled_by': None, - 'domain': 'cover', - 'entity_category': None, - 'entity_id': 'cover.velux_external_cover_awning_blinds_2', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'VELUX External Cover Awning Blinds', - 'platform': 'homekit_controller', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00_8_8', - 'unit_of_measurement': None, - }), - 'state': dict({ - 'attributes': dict({ - 'current_position': 45, - 'friendly_name': 'VELUX External Cover Awning Blinds', - 'supported_features': , - }), - 'entity_id': 'cover.velux_external_cover_awning_blinds_2', - 'state': 'open', - }), - }), - ]), - }), - dict({ - 'device': dict({ - 'area_id': None, - 'config_entries': list([ - 'TestData', - ]), - 'configuration_url': None, - 'connections': list([ - ]), - 'disabled_by': None, - 'entry_type': None, - 'hw_version': '', - 'identifiers': list([ - list([ - 'homekit_controller:accessory-id', - '00:00:00:00:00:00:aid:11', - ]), - ]), - 'is_new': False, - 'labels': list([ - ]), - 'manufacturer': 'Netatmo', - 'model': 'VELUX External Cover', - 'model_id': None, - 'name': 'VELUX External Cover', - 'name_by_user': None, - 'primary_config_entry': 'TestData', - 'serial_number': '**REDACTED**', - 'suggested_area': None, - 'sw_version': '15.0.0', - }), - 'entities': list([ - dict({ - 'entry': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': 'TestData', - 'device_class': None, - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.velux_external_cover_identify_3', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'VELUX External Cover Identify', - 'platform': 'homekit_controller', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00_11_1_7', - 'unit_of_measurement': None, - }), - 'state': dict({ - 'attributes': dict({ - 'device_class': 'identify', - 'friendly_name': 'VELUX External Cover Identify', - }), - 'entity_id': 'button.velux_external_cover_identify_3', - 'state': 'unknown', - }), - }), - dict({ - 'entry': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': 'TestData', - 'device_class': None, - 'disabled_by': None, - 'domain': 'cover', - 'entity_category': None, - 'entity_id': 'cover.velux_external_cover_awning_blinds_3', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'VELUX External Cover Awning Blinds', - 'platform': 'homekit_controller', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00_11_8', - 'unit_of_measurement': None, - }), - 'state': dict({ - 'attributes': dict({ - 'current_position': 0, - 'friendly_name': 'VELUX External Cover Awning Blinds', - 'supported_features': , - }), - 'entity_id': 'cover.velux_external_cover_awning_blinds_3', - 'state': 'closed', - }), - }), - ]), - }), - dict({ - 'device': dict({ - 'area_id': None, - 'config_entries': list([ - 'TestData', - ]), - 'configuration_url': None, - 'connections': list([ - ]), - 'disabled_by': None, - 'entry_type': None, - 'hw_version': '', - 'identifiers': list([ - list([ - 'homekit_controller:accessory-id', - '00:00:00:00:00:00:aid:12', - ]), - ]), - 'is_new': False, - 'labels': list([ - ]), - 'manufacturer': 'Netatmo', - 'model': 'VELUX External Cover', - 'model_id': None, - 'name': 'VELUX External Cover', - 'name_by_user': None, - 'primary_config_entry': 'TestData', - 'serial_number': '**REDACTED**', - 'suggested_area': None, - 'sw_version': '15.0.0', - }), - 'entities': list([ - dict({ - 'entry': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': 'TestData', - 'device_class': None, - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.velux_external_cover_identify_4', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'VELUX External Cover Identify', - 'platform': 'homekit_controller', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00_12_1_7', - 'unit_of_measurement': None, - }), - 'state': dict({ - 'attributes': dict({ - 'device_class': 'identify', - 'friendly_name': 'VELUX External Cover Identify', - }), - 'entity_id': 'button.velux_external_cover_identify_4', - 'state': 'unknown', - }), - }), - dict({ - 'entry': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': 'TestData', - 'device_class': None, - 'disabled_by': None, - 'domain': 'cover', - 'entity_category': None, - 'entity_id': 'cover.velux_external_cover_awning_blinds_4', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'VELUX External Cover Awning Blinds', - 'platform': 'homekit_controller', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00_12_8', - 'unit_of_measurement': None, - }), - 'state': dict({ - 'attributes': dict({ - 'current_position': 0, - 'friendly_name': 'VELUX External Cover Awning Blinds', - 'supported_features': , - }), - 'entity_id': 'cover.velux_external_cover_awning_blinds_4', - 'state': 'closed', - }), - }), - ]), - }), - dict({ - 'device': dict({ - 'area_id': None, - 'config_entries': list([ - 'TestData', - ]), - 'configuration_url': None, - 'connections': list([ - ]), - 'disabled_by': None, - 'entry_type': None, - 'hw_version': '', - 'identifiers': list([ - list([ - 'homekit_controller:accessory-id', - '00:00:00:00:00:00:aid:1', - ]), - ]), - 'is_new': False, - 'labels': list([ - ]), - 'manufacturer': 'Netatmo', - 'model': 'VELUX Gateway', - 'model_id': None, - 'name': 'VELUX Gateway', - 'name_by_user': None, - 'primary_config_entry': 'TestData', - 'serial_number': '**REDACTED**', - 'suggested_area': None, - 'sw_version': '132.0.0', - }), - 'entities': list([ - dict({ - 'entry': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': 'TestData', - 'device_class': None, - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.velux_gateway_identify', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'VELUX Gateway Identify', - 'platform': 'homekit_controller', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00_1_1_6', - 'unit_of_measurement': None, - }), - 'state': dict({ - 'attributes': dict({ - 'device_class': 'identify', - 'friendly_name': 'VELUX Gateway Identify', - }), - 'entity_id': 'button.velux_gateway_identify', - 'state': 'unknown', - }), - }), - ]), - }), - dict({ - 'device': dict({ - 'area_id': None, - 'config_entries': list([ - 'TestData', - ]), - 'configuration_url': None, - 'connections': list([ - ]), - 'disabled_by': None, - 'entry_type': None, - 'hw_version': '', - 'identifiers': list([ - list([ - 'homekit_controller:accessory-id', - '00:00:00:00:00:00:aid:9', - ]), - ]), - 'is_new': False, - 'labels': list([ - ]), - 'manufacturer': 'Netatmo', - 'model': 'VELUX Internal Cover', - 'model_id': None, - 'name': 'VELUX Internal Cover', - 'name_by_user': None, - 'primary_config_entry': 'TestData', - 'serial_number': '**REDACTED**', - 'suggested_area': None, - 'sw_version': '0.0.0', - }), - 'entities': list([ - dict({ - 'entry': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': 'TestData', - 'device_class': None, - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.velux_internal_cover_identify', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'VELUX Internal Cover Identify', - 'platform': 'homekit_controller', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00_9_1_7', - 'unit_of_measurement': None, - }), - 'state': dict({ - 'attributes': dict({ - 'device_class': 'identify', - 'friendly_name': 'VELUX Internal Cover Identify', - }), - 'entity_id': 'button.velux_internal_cover_identify', - 'state': 'unknown', - }), - }), - dict({ - 'entry': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': 'TestData', - 'device_class': None, - 'disabled_by': None, - 'domain': 'cover', - 'entity_category': None, - 'entity_id': 'cover.velux_internal_cover_venetian_blinds', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'VELUX Internal Cover Venetian Blinds', - 'platform': 'homekit_controller', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00_9_8', - 'unit_of_measurement': None, - }), - 'state': dict({ - 'attributes': dict({ - 'current_position': 0, - 'current_tilt_position': 100, - 'friendly_name': 'VELUX Internal Cover Venetian Blinds', - 'supported_features': , - }), - 'entity_id': 'cover.velux_internal_cover_venetian_blinds', - 'state': 'closed', - }), - }), - ]), - }), - dict({ - 'device': dict({ - 'area_id': None, - 'config_entries': list([ - 'TestData', - ]), - 'configuration_url': None, - 'connections': list([ - ]), - 'disabled_by': None, - 'entry_type': None, - 'hw_version': '', - 'identifiers': list([ - list([ - 'homekit_controller:accessory-id', - '00:00:00:00:00:00:aid:13', - ]), - ]), - 'is_new': False, - 'labels': list([ - ]), - 'manufacturer': 'Netatmo', - 'model': 'VELUX Internal Cover', - 'model_id': None, - 'name': 'VELUX Internal Cover', - 'name_by_user': None, - 'primary_config_entry': 'TestData', - 'serial_number': '**REDACTED**', - 'suggested_area': None, - 'sw_version': '0.0.0', - }), - 'entities': list([ - dict({ - 'entry': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': 'TestData', - 'device_class': None, - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.velux_internal_cover_identify_2', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'VELUX Internal Cover Identify', - 'platform': 'homekit_controller', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00_13_1_7', - 'unit_of_measurement': None, - }), - 'state': dict({ - 'attributes': dict({ - 'device_class': 'identify', - 'friendly_name': 'VELUX Internal Cover Identify', - }), - 'entity_id': 'button.velux_internal_cover_identify_2', - 'state': 'unknown', - }), - }), - dict({ - 'entry': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': 'TestData', - 'device_class': None, - 'disabled_by': None, - 'domain': 'cover', - 'entity_category': None, - 'entity_id': 'cover.velux_internal_cover_venetian_blinds_2', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'VELUX Internal Cover Venetian Blinds', - 'platform': 'homekit_controller', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00_13_8', - 'unit_of_measurement': None, - }), - 'state': dict({ - 'attributes': dict({ - 'current_position': 100, - 'current_tilt_position': 0, - 'friendly_name': 'VELUX Internal Cover Venetian Blinds', - 'supported_features': , - }), - 'entity_id': 'cover.velux_internal_cover_venetian_blinds_2', - 'state': 'open', - }), - }), - ]), - }), - dict({ - 'device': dict({ - 'area_id': None, - 'config_entries': list([ - 'TestData', - ]), - 'configuration_url': None, - 'connections': list([ - ]), - 'disabled_by': None, - 'entry_type': None, - 'hw_version': '', - 'identifiers': list([ - list([ - 'homekit_controller:accessory-id', - '00:00:00:00:00:00:aid:14', - ]), - ]), - 'is_new': False, - 'labels': list([ - ]), - 'manufacturer': 'Netatmo', - 'model': 'VELUX Internal Cover', - 'model_id': None, - 'name': 'VELUX Internal Cover', - 'name_by_user': None, - 'primary_config_entry': 'TestData', - 'serial_number': '**REDACTED**', - 'suggested_area': None, - 'sw_version': '0.0.0', - }), - 'entities': list([ - dict({ - 'entry': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': 'TestData', - 'device_class': None, - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.velux_internal_cover_identify_3', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'VELUX Internal Cover Identify', - 'platform': 'homekit_controller', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00_14_1_7', - 'unit_of_measurement': None, - }), - 'state': dict({ - 'attributes': dict({ - 'device_class': 'identify', - 'friendly_name': 'VELUX Internal Cover Identify', - }), - 'entity_id': 'button.velux_internal_cover_identify_3', - 'state': 'unknown', - }), - }), - dict({ - 'entry': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': 'TestData', - 'device_class': None, - 'disabled_by': None, - 'domain': 'cover', - 'entity_category': None, - 'entity_id': 'cover.velux_internal_cover_venetian_blinds_3', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'VELUX Internal Cover Venetian Blinds', - 'platform': 'homekit_controller', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00_14_8', - 'unit_of_measurement': None, - }), - 'state': dict({ - 'attributes': dict({ - 'current_position': 0, - 'current_tilt_position': 100, - 'friendly_name': 'VELUX Internal Cover Venetian Blinds', - 'supported_features': , - }), - 'entity_id': 'cover.velux_internal_cover_venetian_blinds_3', - 'state': 'closed', - }), - }), - ]), - }), - dict({ - 'device': dict({ - 'area_id': None, - 'config_entries': list([ - 'TestData', - ]), - 'configuration_url': None, - 'connections': list([ - ]), - 'disabled_by': None, - 'entry_type': None, - 'hw_version': '', - 'identifiers': list([ - list([ - 'homekit_controller:accessory-id', - '00:00:00:00:00:00:aid:15', - ]), - ]), - 'is_new': False, - 'labels': list([ - ]), - 'manufacturer': 'Netatmo', - 'model': 'VELUX Internal Cover', - 'model_id': None, - 'name': 'VELUX Internal Cover', - 'name_by_user': None, - 'primary_config_entry': 'TestData', - 'serial_number': '**REDACTED**', - 'suggested_area': None, - 'sw_version': '0.0.0', - }), - 'entities': list([ + 'entity_id': 'switch.u_by_moen_015f44_outlet_3', + 'state': 'off', + }), + }), dict({ 'entry': dict({ 'aliases': list([ @@ -19321,9 +18115,9 @@ 'config_entry_id': 'TestData', 'device_class': None, 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.velux_internal_cover_identify_4', + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.u_by_moen_015f44_outlet_4', 'has_entity_name': False, 'hidden_by': None, 'icon': None, @@ -19332,27 +18126,31 @@ 'name': None, 'options': dict({ }), - 'original_device_class': , + 'original_device_class': None, 'original_icon': None, - 'original_name': 'VELUX Internal Cover Identify', + 'original_name': 'U by Moen-015F44 Outlet 4', 'platform': 'homekit_controller', 'previous_unique_id': None, 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00_15_1_7', + 'translation_key': 'valve', + 'unique_id': '00:00:00:00:00:00_1_32', 'unit_of_measurement': None, }), 'state': dict({ 'attributes': dict({ - 'device_class': 'identify', - 'friendly_name': 'VELUX Internal Cover Identify', + 'friendly_name': 'U by Moen-015F44 Outlet 4', + 'in_use': False, }), - 'entity_id': 'button.velux_internal_cover_identify_4', - 'state': 'unknown', + 'entity_id': 'switch.u_by_moen_015f44_outlet_4', + 'state': 'off', }), }), ]), }), + ]) +# --- +# name: test_snapshots[velux_active_netatmo_co2] + list([ dict({ 'device': dict({ 'area_id': None, @@ -19368,7 +18166,7 @@ 'identifiers': list([ list([ 'homekit_controller:accessory-id', - '00:00:00:00:00:00:aid:2', + '00:00:00:00:00:00:aid:1', ]), ]), 'is_new': False, @@ -19414,7 +18212,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '00:00:00:00:00:00_2_1_7', + 'unique_id': '00:00:00:00:00:00_1_1_7', 'unit_of_measurement': None, }), 'state': dict({ @@ -19457,7 +18255,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '00:00:00:00:00:00_2_14', + 'unique_id': '00:00:00:00:00:00_1_14', 'unit_of_measurement': 'ppm', }), 'state': dict({ @@ -19502,7 +18300,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '00:00:00:00:00:00_2_11', + 'unique_id': '00:00:00:00:00:00_1_11', 'unit_of_measurement': '%', }), 'state': dict({ @@ -19547,7 +18345,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '00:00:00:00:00:00_2_8', + 'unique_id': '00:00:00:00:00:00_1_8', 'unit_of_measurement': , }), 'state': dict({ @@ -19563,6 +18361,10 @@ }), ]), }), + ]) +# --- +# name: test_snapshots[velux_gateway] + list([ dict({ 'device': dict({ 'area_id': None, @@ -19578,21 +18380,96 @@ 'identifiers': list([ list([ 'homekit_controller:accessory-id', - '00:00:00:00:00:00:aid:3', + '00:00:00:00:00:00:aid:1', ]), ]), 'is_new': False, 'labels': list([ ]), - 'manufacturer': 'Netatmo', + 'manufacturer': 'VELUX', + 'model': 'VELUX Gateway', + 'model_id': None, + 'name': 'VELUX Gateway', + 'name_by_user': None, + 'primary_config_entry': 'TestData', + 'serial_number': 'a1a11a1', + 'suggested_area': None, + 'sw_version': '70', + }), + 'entities': list([ + dict({ + 'entry': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': 'TestData', + 'device_class': None, + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.velux_gateway_identify', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'VELUX Gateway Identify', + 'platform': 'homekit_controller', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00_1_1_6', + 'unit_of_measurement': None, + }), + 'state': dict({ + 'attributes': dict({ + 'device_class': 'identify', + 'friendly_name': 'VELUX Gateway Identify', + }), + 'entity_id': 'button.velux_gateway_identify', + 'state': 'unknown', + }), + }), + ]), + }), + dict({ + 'device': dict({ + 'area_id': None, + 'config_entries': list([ + 'TestData', + ]), + 'configuration_url': None, + 'connections': list([ + ]), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': '', + 'identifiers': list([ + list([ + 'homekit_controller:accessory-id', + '00:00:00:00:00:00:aid:2', + ]), + ]), + 'is_new': False, + 'labels': list([ + ]), + 'manufacturer': 'VELUX', 'model': 'VELUX Sensor', 'model_id': None, 'name': 'VELUX Sensor', 'name_by_user': None, 'primary_config_entry': 'TestData', - 'serial_number': '**REDACTED**', + 'serial_number': 'a11b111', 'suggested_area': None, - 'sw_version': '16.0.0', + 'sw_version': '16', }), 'entities': list([ dict({ @@ -19608,7 +18485,7 @@ 'disabled_by': None, 'domain': 'button', 'entity_category': , - 'entity_id': 'button.velux_sensor_identify_2', + 'entity_id': 'button.velux_sensor_identify', 'has_entity_name': False, 'hidden_by': None, 'icon': None, @@ -19624,7 +18501,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '00:00:00:00:00:00_3_1_7', + 'unique_id': '00:00:00:00:00:00_2_1_7', 'unit_of_measurement': None, }), 'state': dict({ @@ -19632,7 +18509,7 @@ 'device_class': 'identify', 'friendly_name': 'VELUX Sensor Identify', }), - 'entity_id': 'button.velux_sensor_identify_2', + 'entity_id': 'button.velux_sensor_identify', 'state': 'unknown', }), }), @@ -19651,7 +18528,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.velux_sensor_carbon_dioxide_sensor_2', + 'entity_id': 'sensor.velux_sensor_carbon_dioxide_sensor', 'has_entity_name': False, 'hidden_by': None, 'icon': None, @@ -19667,7 +18544,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '00:00:00:00:00:00_3_14', + 'unique_id': '00:00:00:00:00:00_2_14', 'unit_of_measurement': 'ppm', }), 'state': dict({ @@ -19677,8 +18554,8 @@ 'state_class': , 'unit_of_measurement': 'ppm', }), - 'entity_id': 'sensor.velux_sensor_carbon_dioxide_sensor_2', - 'state': '1074.0', + 'entity_id': 'sensor.velux_sensor_carbon_dioxide_sensor', + 'state': '400', }), }), dict({ @@ -19696,7 +18573,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.velux_sensor_humidity_sensor_2', + 'entity_id': 'sensor.velux_sensor_humidity_sensor', 'has_entity_name': False, 'hidden_by': None, 'icon': None, @@ -19712,7 +18589,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '00:00:00:00:00:00_3_11', + 'unique_id': '00:00:00:00:00:00_2_11', 'unit_of_measurement': '%', }), 'state': dict({ @@ -19722,8 +18599,8 @@ 'state_class': , 'unit_of_measurement': '%', }), - 'entity_id': 'sensor.velux_sensor_humidity_sensor_2', - 'state': '64.0', + 'entity_id': 'sensor.velux_sensor_humidity_sensor', + 'state': '58', }), }), dict({ @@ -19741,7 +18618,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.velux_sensor_temperature_sensor_2', + 'entity_id': 'sensor.velux_sensor_temperature_sensor', 'has_entity_name': False, 'hidden_by': None, 'icon': None, @@ -19757,7 +18634,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '00:00:00:00:00:00_3_8', + 'unique_id': '00:00:00:00:00:00_2_8', 'unit_of_measurement': , }), 'state': dict({ @@ -19767,8 +18644,8 @@ 'state_class': , 'unit_of_measurement': , }), - 'entity_id': 'sensor.velux_sensor_temperature_sensor_2', - 'state': '24.5', + 'entity_id': 'sensor.velux_sensor_temperature_sensor', + 'state': '18.9', }), }), ]), @@ -19788,21 +18665,21 @@ 'identifiers': list([ list([ 'homekit_controller:accessory-id', - '00:00:00:00:00:00:aid:4', + '00:00:00:00:00:00:aid:3', ]), ]), 'is_new': False, 'labels': list([ ]), - 'manufacturer': 'Netatmo', + 'manufacturer': 'VELUX', 'model': 'VELUX Window', 'model_id': None, 'name': 'VELUX Window', 'name_by_user': None, 'primary_config_entry': 'TestData', - 'serial_number': '**REDACTED**', + 'serial_number': '1111111a114a111a', 'suggested_area': None, - 'sw_version': '0.0.0', + 'sw_version': '48', }), 'entities': list([ dict({ @@ -19834,7 +18711,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '00:00:00:00:00:00_4_1_7', + 'unique_id': '00:00:00:00:00:00_3_1_7', 'unit_of_measurement': None, }), 'state': dict({ @@ -19875,7 +18752,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': None, - 'unique_id': '00:00:00:00:00:00_4_8', + 'unique_id': '00:00:00:00:00:00_3_8', 'unit_of_measurement': None, }), 'state': dict({ @@ -19891,124 +18768,6 @@ }), ]), }), - dict({ - 'device': dict({ - 'area_id': None, - 'config_entries': list([ - 'TestData', - ]), - 'configuration_url': None, - 'connections': list([ - ]), - 'disabled_by': None, - 'entry_type': None, - 'hw_version': '', - 'identifiers': list([ - list([ - 'homekit_controller:accessory-id', - '00:00:00:00:00:00:aid:7', - ]), - ]), - 'is_new': False, - 'labels': list([ - ]), - 'manufacturer': 'Netatmo', - 'model': 'VELUX Window', - 'model_id': None, - 'name': 'VELUX Window', - 'name_by_user': None, - 'primary_config_entry': 'TestData', - 'serial_number': '**REDACTED**', - 'suggested_area': None, - 'sw_version': '0.0.0', - }), - 'entities': list([ - dict({ - 'entry': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': 'TestData', - 'device_class': None, - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.velux_window_identify_2', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'VELUX Window Identify', - 'platform': 'homekit_controller', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00_7_1_7', - 'unit_of_measurement': None, - }), - 'state': dict({ - 'attributes': dict({ - 'device_class': 'identify', - 'friendly_name': 'VELUX Window Identify', - }), - 'entity_id': 'button.velux_window_identify_2', - 'state': 'unknown', - }), - }), - dict({ - 'entry': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': 'TestData', - 'device_class': None, - 'disabled_by': None, - 'domain': 'cover', - 'entity_category': None, - 'entity_id': 'cover.velux_window_roof_window_2', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'VELUX Window Roof Window', - 'platform': 'homekit_controller', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00_7_8', - 'unit_of_measurement': None, - }), - 'state': dict({ - 'attributes': dict({ - 'current_position': 0, - 'device_class': 'window', - 'friendly_name': 'VELUX Window Roof Window', - 'supported_features': , - }), - 'entity_id': 'cover.velux_window_roof_window_2', - 'state': 'closed', - }), - }), - ]), - }), ]) # --- # name: test_snapshots[velux_window] diff --git a/tests/components/homekit_controller/test_climate.py b/tests/components/homekit_controller/test_climate.py index 76935d314a520e..62c73af9977486 100644 --- a/tests/components/homekit_controller/test_climate.py +++ b/tests/components/homekit_controller/test_climate.py @@ -6,6 +6,7 @@ from aiohomekit.model.characteristics import ( ActivationStateValues, CharacteristicsTypes, + CurrentFanStateValues, CurrentHeaterCoolerStateValues, SwingModeValues, TargetHeaterCoolerStateValues, @@ -66,6 +67,9 @@ def create_thermostat_service(accessory: Accessory) -> None: char = service.add_char(CharacteristicsTypes.RELATIVE_HUMIDITY_CURRENT) char.value = 0 + char = service.add_char(CharacteristicsTypes.FAN_STATE_CURRENT) + char.value = 0 + def create_thermostat_service_min_max(accessory: Accessory) -> None: """Define thermostat characteristics.""" @@ -648,6 +652,18 @@ async def test_hvac_mode_vs_hvac_action( assert state.state == "heat" assert state.attributes["hvac_action"] == "idle" + # Simulate the fan running while the heat/cool is idle + await helper.async_update( + ServicesTypes.THERMOSTAT, + { + CharacteristicsTypes.FAN_STATE_CURRENT: CurrentFanStateValues.ACTIVE, + }, + ) + + state = await helper.poll_and_get_state() + assert state.state == "heat" + assert state.attributes["hvac_action"] == "fan" + # Simulate that current temperature is below target temp # Heating might be on and hvac_action currently 'heat' await helper.async_update( diff --git a/tests/components/homekit_controller/test_switch.py b/tests/components/homekit_controller/test_switch.py index a2586f7355e140..d841323bd59a0c 100644 --- a/tests/components/homekit_controller/test_switch.py +++ b/tests/components/homekit_controller/test_switch.py @@ -27,6 +27,14 @@ def create_switch_service(accessory: Accessory) -> None: outlet_in_use.value = False +def create_faucet_service(accessory: Accessory) -> None: + """Define faucet characteristics.""" + service = accessory.add_service(ServicesTypes.FAUCET) + + active_char = service.add_char(CharacteristicsTypes.ACTIVE) + active_char.value = False + + def create_valve_service(accessory: Accessory) -> None: """Define valve characteristics.""" service = accessory.add_service(ServicesTypes.VALVE) @@ -115,6 +123,58 @@ async def test_switch_read_outlet_state( assert switch_1.attributes["outlet_in_use"] is True +async def test_faucet_change_active_state( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: + """Test that we can turn a HomeKit outlet on and off again.""" + helper = await setup_test_component(hass, get_next_aid(), create_faucet_service) + + await hass.services.async_call( + "switch", "turn_on", {"entity_id": "switch.testdevice"}, blocking=True + ) + helper.async_assert_service_values( + ServicesTypes.FAUCET, + { + CharacteristicsTypes.ACTIVE: 1, + }, + ) + + await hass.services.async_call( + "switch", "turn_off", {"entity_id": "switch.testdevice"}, blocking=True + ) + helper.async_assert_service_values( + ServicesTypes.FAUCET, + { + CharacteristicsTypes.ACTIVE: 0, + }, + ) + + +async def test_faucet_read_active_state( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: + """Test that we can read the state of a HomeKit outlet accessory.""" + helper = await setup_test_component(hass, get_next_aid(), create_faucet_service) + + # Initial state is that the switch is off and the outlet isn't in use + switch_1 = await helper.poll_and_get_state() + assert switch_1.state == "off" + + # Simulate that someone switched on the device in the real world not via HA + switch_1 = await helper.async_update( + ServicesTypes.FAUCET, + {CharacteristicsTypes.ACTIVE: True}, + ) + assert switch_1.state == "on" + + # Simulate that device switched off in the real world not via HA + switch_1 = await helper.async_update( + ServicesTypes.FAUCET, + {CharacteristicsTypes.ACTIVE: False}, + ) + assert switch_1.state == "off" + + async def test_valve_change_active_state( hass: HomeAssistant, get_next_aid: Callable[[], int] ) -> None: diff --git a/tests/components/homematicip_cloud/fixtures/homematicip_cloud.json b/tests/components/homematicip_cloud/fixtures/homematicip_cloud.json index e67ffd78467153..7a3d3f06b0956b 100644 --- a/tests/components/homematicip_cloud/fixtures/homematicip_cloud.json +++ b/tests/components/homematicip_cloud/fixtures/homematicip_cloud.json @@ -1805,93 +1805,164 @@ "updateState": "UP_TO_DATE" }, "3014F7110000000000000049": { - "availableFirmwareVersion": "1.0.8", + "availableFirmwareVersion": "1.4.8", "connectionType": "HMIP_RF", - "firmwareVersion": "1.0.8", - "firmwareVersionInteger": 65544, + "deviceArchetype": "HMIP", + "firmwareVersion": "1.4.8", + "firmwareVersionInteger": 66568, "functionalChannels": { "0": { + "busConfigMismatch": null, "coProFaulty": false, "coProRestartNeeded": false, "coProUpdateFailure": false, - "configPending": false, + "configPending": true, + "controlsMountingOrientation": null, "coolingEmergencyValue": 0.0, + "daliBusState": null, + "defaultLinkedGroup": [], + "deviceCommunicationError": null, + "deviceDriveError": null, + "deviceDriveModeError": null, "deviceId": "3014F7110000000000000049", + "deviceOperationMode": null, "deviceOverheated": false, "deviceOverloaded": false, + "devicePowerFailureDetected": false, "deviceUndervoltage": false, + "displayContrast": null, "dutyCycle": false, "frostProtectionTemperature": 8.0, "functionalChannelType": "DEVICE_BASE_FLOOR_HEATING", "groupIndex": 0, - "groups": [], - "heatingEmergencyValue": 0.25, + "groups": ["00000000-0000-0000-0000-000000000005"], + "heatingEmergencyValue": 0.05, "index": 0, "label": "", + "lockJammed": null, "lowBat": null, "minimumFloorHeatingValvePosition": 0.0, - "pulseWidthModulationAtLowFloorHeatingValvePositionEnabled": true, + "mountingOrientation": null, + "multicastRoutingEnabled": false, + "particulateMatterSensorCommunicationError": null, + "particulateMatterSensorError": null, + "powerShortCircuit": null, + "profilePeriodLimitReached": null, + "pulseWidthModulationAtLowFloorHeatingValvePositionEnabled": false, "routerModuleEnabled": false, "routerModuleSupported": false, - "rssiDeviceValue": -55, + "rssiDeviceValue": -83, "rssiPeerValue": null, + "sensorCommunicationError": null, + "sensorError": null, + "shortCircuitDataLine": null, "supportedOptionalFeatures": { + "IFeatureBusConfigMismatch": false, "IFeatureDeviceCoProError": false, "IFeatureDeviceCoProRestart": false, "IFeatureDeviceCoProUpdate": false, + "IFeatureDeviceCommunicationError": false, + "IFeatureDeviceDaliBusError": false, + "IFeatureDeviceDriveError": false, + "IFeatureDeviceDriveModeError": false, + "IFeatureDeviceIdentify": false, "IFeatureDeviceOverheated": false, "IFeatureDeviceOverloaded": false, + "IFeatureDeviceParticulateMatterSensorCommunicationError": false, + "IFeatureDeviceParticulateMatterSensorError": false, + "IFeatureDevicePowerFailure": false, + "IFeatureDeviceSensorCommunicationError": false, + "IFeatureDeviceSensorError": false, + "IFeatureDeviceTemperatureHumiditySensorCommunicationError": false, + "IFeatureDeviceTemperatureHumiditySensorError": false, "IFeatureDeviceTemperatureOutOfRange": false, "IFeatureDeviceUndervoltage": false, "IFeatureMinimumFloorHeatingValvePosition": true, - "IFeaturePulseWidthModulationAtLowFloorHeatingValvePosition": true + "IFeatureMulticastRouter": false, + "IFeaturePowerShortCircuit": false, + "IFeatureProfilePeriodLimit": false, + "IFeaturePulseWidthModulationAtLowFloorHeatingValvePosition": true, + "IFeatureRssiValue": true, + "IFeatureShortCircuitDataLine": false, + "IOptionalFeatureDefaultLinkedGroup": false, + "IOptionalFeatureDeviceErrorLockJammed": false, + "IOptionalFeatureDeviceOperationMode": false, + "IOptionalFeatureDisplayContrast": false, + "IOptionalFeatureDutyCycle": true, + "IOptionalFeatureLowBat": false, + "IOptionalFeatureMountingOrientation": false }, + "temperatureHumiditySensorCommunicationError": null, + "temperatureHumiditySensorError": null, "temperatureOutOfRange": false, "unreach": false, "valveProtectionDuration": 5, "valveProtectionSwitchingInterval": 14 }, "1": { + "channelRole": "FLOOR_HEATING_COOLING_CONTROLLER", "deviceId": "3014F7110000000000000049", "functionalChannelType": "FLOOR_TERMINAL_BLOCK_MECHANIC_CHANNEL", "groupIndex": 1, - "groups": [], + "groups": [ + "00000000-0000-0000-0000-000000000022", + "00000000-0000-0000-0000-000000000023" + ], "index": 1, - "label": "", + "label": "Heizkreislauf (1) OG Bad r", + "valvePosition": 0.475, "valveState": "ADAPTION_DONE" }, "10": { + "channelRole": "FLOOR_HEATING_COOLING_CONTROLLER", "deviceId": "3014F7110000000000000049", "functionalChannelType": "FLOOR_TERMINAL_BLOCK_MECHANIC_CHANNEL", "groupIndex": 10, - "groups": [], + "groups": [ + "00000000-0000-0000-0000-000000000030", + "00000000-0000-0000-0000-000000000031" + ], "index": 10, - "label": "", - "valveState": "ADJUSTMENT_TOO_SMALL" + "label": "Heizkreislauf (10) OG AZ rechts", + "valvePosition": 0.385, + "valveState": "ADAPTION_DONE" }, "11": { + "channelRole": "FLOOR_HEATING_COOLING_CONTROLLER", "deviceId": "3014F7110000000000000049", "functionalChannelType": "FLOOR_TERMINAL_BLOCK_MECHANIC_CHANNEL", "groupIndex": 11, - "groups": [], + "groups": [ + "00000000-0000-0000-0000-000000000030", + "00000000-0000-0000-0000-000000000031" + ], "index": 11, - "label": "", - "valveState": "ADJUSTMENT_TOO_SMALL" + "label": "Heizkreislauf (11) OG AZ links", + "valvePosition": 0.385, + "valveState": "ADAPTION_DONE" }, "12": { + "channelRole": "FLOOR_HEATING_COOLING_CONTROLLER", "deviceId": "3014F7110000000000000049", "functionalChannelType": "FLOOR_TERMINAL_BLOCK_MECHANIC_CHANNEL", "groupIndex": 12, - "groups": [], + "groups": [ + "00000000-0000-0000-0000-000000000022", + "00000000-0000-0000-0000-000000000023" + ], "index": 12, - "label": "", - "valveState": "ADJUSTMENT_TOO_SMALL" + "label": "Heizkreislauf (12) OG Bad Heizk\u00f6rper", + "valvePosition": 0.385, + "valveState": "ADAPTION_DONE" }, "13": { "deviceId": "3014F7110000000000000049", "functionalChannelType": "HEAT_DEMAND_CHANNEL", "groupIndex": 0, - "groups": [], + "groups": [ + "00000000-0000-0000-0000-000000000058", + "00000000-0000-0000-0000-000000000059" + ], "index": 13, "label": "" }, @@ -1899,7 +1970,7 @@ "deviceId": "3014F7110000000000000049", "functionalChannelType": "DEHUMIDIFIER_DEMAND_CHANNEL", "groupIndex": 0, - "groups": [], + "groups": ["00000000-0000-0000-0000-000000000060"], "index": 14, "label": "" }, @@ -1907,89 +1978,136 @@ "deviceId": "3014F7110000000000000049", "functionalChannelType": "CHANGE_OVER_CHANNEL", "groupIndex": 0, - "groups": [], + "groups": [ + "00000000-0000-0000-0000-000000000061", + "00000000-0000-0000-0000-000000000062", + "00000000-0000-0000-0000-000000000063", + "00000000-0000-0000-0000-000000000064" + ], "index": 15, "label": "" }, "2": { + "channelRole": "FLOOR_HEATING_COOLING_CONTROLLER", "deviceId": "3014F7110000000000000049", "functionalChannelType": "FLOOR_TERMINAL_BLOCK_MECHANIC_CHANNEL", "groupIndex": 2, - "groups": [], + "groups": [ + "00000000-0000-0000-0000-000000000022", + "00000000-0000-0000-0000-000000000023" + ], "index": 2, - "label": "", + "label": "Heizkreislauf (2) OG Bad l", + "valvePosition": 0.385, "valveState": "ADAPTION_DONE" }, "3": { + "channelRole": "FLOOR_HEATING_COOLING_CONTROLLER", "deviceId": "3014F7110000000000000049", "functionalChannelType": "FLOOR_TERMINAL_BLOCK_MECHANIC_CHANNEL", "groupIndex": 3, - "groups": [], + "groups": [ + "00000000-0000-0000-0000-000000000017", + "00000000-0000-0000-0000-000000000018" + ], "index": 3, - "label": "", + "label": "Heizkreislauf (3) OG WZ rechts", + "valvePosition": 0.0, "valveState": "ADAPTION_DONE" }, "4": { + "channelRole": "FLOOR_HEATING_COOLING_CONTROLLER", "deviceId": "3014F7110000000000000049", "functionalChannelType": "FLOOR_TERMINAL_BLOCK_MECHANIC_CHANNEL", "groupIndex": 4, - "groups": [], + "groups": [ + "00000000-0000-0000-0000-000000000017", + "00000000-0000-0000-0000-000000000018" + ], "index": 4, - "label": "", + "label": "Heizkreislauf (4) OG WZ Mitte rechts", + "valvePosition": 0.0, "valveState": "ADAPTION_DONE" }, "5": { + "channelRole": "FLOOR_HEATING_COOLING_CONTROLLER", "deviceId": "3014F7110000000000000049", "functionalChannelType": "FLOOR_TERMINAL_BLOCK_MECHANIC_CHANNEL", "groupIndex": 5, - "groups": [], + "groups": [ + "00000000-0000-0000-0000-000000000017", + "00000000-0000-0000-0000-000000000018" + ], "index": 5, - "label": "", + "label": "Heizkreislauf (5) OG WZ Mitte links", + "valvePosition": 0.0, "valveState": "ADAPTION_DONE" }, "6": { + "channelRole": "FLOOR_HEATING_COOLING_CONTROLLER", "deviceId": "3014F7110000000000000049", "functionalChannelType": "FLOOR_TERMINAL_BLOCK_MECHANIC_CHANNEL", "groupIndex": 6, - "groups": [], + "groups": [ + "00000000-0000-0000-0000-000000000017", + "00000000-0000-0000-0000-000000000018" + ], "index": 6, - "label": "", - "valveState": "ADJUSTMENT_TOO_SMALL" + "label": "Heizkreislauf (6) OG WZ links", + "valvePosition": 0.0, + "valveState": "ADAPTION_DONE" }, "7": { + "channelRole": "FLOOR_HEATING_COOLING_CONTROLLER", "deviceId": "3014F7110000000000000049", "functionalChannelType": "FLOOR_TERMINAL_BLOCK_MECHANIC_CHANNEL", "groupIndex": 7, - "groups": [], + "groups": [ + "00000000-0000-0000-0000-000000000017", + "00000000-0000-0000-0000-000000000018" + ], "index": 7, - "label": "", - "valveState": "ADJUSTMENT_TOO_SMALL" + "label": "Heizkreislauf (7) OG K\u00fcche", + "valvePosition": 0.0, + "valveState": "ADAPTION_DONE" }, "8": { + "channelRole": "FLOOR_HEATING_COOLING_CONTROLLER", "deviceId": "3014F7110000000000000049", "functionalChannelType": "FLOOR_TERMINAL_BLOCK_MECHANIC_CHANNEL", "groupIndex": 8, - "groups": [], + "groups": [ + "00000000-0000-0000-0000-000000000026", + "00000000-0000-0000-0000-000000000027" + ], "index": 8, - "label": "", - "valveState": "ADJUSTMENT_TOO_SMALL" + "label": "Heizkreislauf (8) OG SZ rechts", + "valvePosition": 0.0, + "valveState": "ADAPTION_DONE" }, "9": { + "channelRole": "FLOOR_HEATING_COOLING_CONTROLLER", "deviceId": "3014F7110000000000000049", "functionalChannelType": "FLOOR_TERMINAL_BLOCK_MECHANIC_CHANNEL", "groupIndex": 9, - "groups": [], + "groups": [ + "00000000-0000-0000-0000-000000000026", + "00000000-0000-0000-0000-000000000027" + ], "index": 9, - "label": "", - "valveState": "ADJUSTMENT_TOO_SMALL" + "label": "Heizkreislauf (9) OG SZ links", + "valvePosition": 0.0, + "valveState": "ADAPTION_DONE" } }, "homeId": "00000000-0000-0000-0000-000000000001", "id": "3014F7110000000000000049", - "label": "Fu\u00dfbodenheizungsaktor OG motorisch", - "lastStatusUpdate": 1577486092047, + "label": "Fu\u00dfbodenheizungsaktor", + "lastStatusUpdate": 1704379652281, "liveUpdateState": "LIVE_UPDATE_NOT_SUPPORTED", + "manuallyUpdateForced": false, "manufacturerCode": 1, + "measuredAttributes": {}, "modelId": 365, "modelType": "HmIP-FALMOT-C12", "oem": "eQ-3", @@ -3237,6 +3355,173 @@ "type": "BRAND_SWITCH_NOTIFICATION_LIGHT", "updateState": "UP_TO_DATE" }, + "3014F711000000000000BSL2": { + "availableFirmwareVersion": "2.0.2", + "connectionType": "HMIP_RF", + "deviceArchetype": "HMIP", + "firmwareVersion": "2.0.2", + "firmwareVersionInteger": 131074, + "functionalChannels": { + "0": { + "busConfigMismatch": null, + "coProFaulty": false, + "coProRestartNeeded": false, + "coProUpdateFailure": false, + "configPending": false, + "controlsMountingOrientation": null, + "daliBusState": null, + "defaultLinkedGroup": [], + "deviceCommunicationError": null, + "deviceDriveError": null, + "deviceDriveModeError": null, + "deviceId": "3014F711000000000000BSL2", + "deviceOperationMode": null, + "deviceOverheated": false, + "deviceOverloaded": false, + "devicePowerFailureDetected": false, + "deviceUndervoltage": false, + "displayContrast": null, + "dutyCycle": false, + "functionalChannelType": "DEVICE_BASE", + "groupIndex": 0, + "groups": ["00000000-0000-0000-0000-000000000007"], + "index": 0, + "label": "", + "lockJammed": null, + "lowBat": null, + "mountingOrientation": null, + "multicastRoutingEnabled": false, + "particulateMatterSensorCommunicationError": null, + "particulateMatterSensorError": null, + "powerShortCircuit": null, + "profilePeriodLimitReached": null, + "routerModuleEnabled": false, + "routerModuleSupported": false, + "rssiDeviceValue": -74, + "rssiPeerValue": -75, + "sensorCommunicationError": null, + "sensorError": null, + "shortCircuitDataLine": null, + "supportedOptionalFeatures": { + "IFeatureBusConfigMismatch": false, + "IFeatureDeviceCoProError": false, + "IFeatureDeviceCoProRestart": false, + "IFeatureDeviceCoProUpdate": false, + "IFeatureDeviceCommunicationError": false, + "IFeatureDeviceDaliBusError": false, + "IFeatureDeviceDriveError": false, + "IFeatureDeviceDriveModeError": false, + "IFeatureDeviceIdentify": true, + "IFeatureDeviceOverheated": true, + "IFeatureDeviceOverloaded": false, + "IFeatureDeviceParticulateMatterSensorCommunicationError": false, + "IFeatureDeviceParticulateMatterSensorError": false, + "IFeatureDevicePowerFailure": false, + "IFeatureDeviceSensorCommunicationError": false, + "IFeatureDeviceSensorError": false, + "IFeatureDeviceTemperatureHumiditySensorCommunicationError": false, + "IFeatureDeviceTemperatureHumiditySensorError": false, + "IFeatureDeviceTemperatureOutOfRange": false, + "IFeatureDeviceUndervoltage": false, + "IFeatureMulticastRouter": false, + "IFeaturePowerShortCircuit": false, + "IFeatureProfilePeriodLimit": true, + "IFeatureRssiValue": true, + "IFeatureShortCircuitDataLine": false, + "IOptionalFeatureDefaultLinkedGroup": false, + "IOptionalFeatureDeviceErrorLockJammed": false, + "IOptionalFeatureDeviceOperationMode": false, + "IOptionalFeatureDisplayContrast": false, + "IOptionalFeatureDutyCycle": true, + "IOptionalFeatureLowBat": false, + "IOptionalFeatureMountingOrientation": false + }, + "temperatureHumiditySensorCommunicationError": null, + "temperatureHumiditySensorError": null, + "temperatureOutOfRange": false, + "unreach": false + }, + "1": { + "channelRole": null, + "deviceId": "3014F711000000000000BSL2", + "functionalChannelType": "SWITCH_CHANNEL", + "groupIndex": 1, + "groups": [], + "index": 1, + "internalLinkConfiguration": { + "firstInputAction": "OFF", + "internalLinkConfigurationType": "DOUBLE_INPUT_SWITCH", + "longPressOnTimeEnabled": false, + "onTime": 111600.0, + "secondInputAction": "ON" + }, + "label": "", + "on": false, + "powerUpSwitchState": "PERMANENT_OFF", + "profileMode": "AUTOMATIC", + "supportedOptionalFeatures": { + "IFeatureAccessAuthorizationActuatorChannel": false, + "IFeatureGarageGroupActuatorChannel": false, + "IFeatureLightGroupActuatorChannel": false, + "IFeatureLightProfileActuatorChannel": false, + "IOptionalFeatureInternalLinkConfiguration": true, + "IOptionalFeaturePowerUpSwitchState": true + }, + "userDesiredProfileMode": "AUTOMATIC" + }, + "2": { + "channelRole": "NOTIFICATION_LIGHT_DIMMING_ACTUATOR", + "deviceId": "3014F711000000000000BSL2", + "dimLevel": 0.0, + "functionalChannelType": "NOTIFICATION_LIGHT_CHANNEL", + "groupIndex": 2, + "groups": ["00000000-0000-0000-0000-000000000021"], + "index": 2, + "label": "Led Unten", + "on": false, + "opticalSignalBehaviour": "BLINKING_MIDDLE", + "profileMode": "AUTOMATIC", + "simpleRGBColorState": "TURQUOISE", + "supportedOptionalFeatures": { + "IFeatureOpticalSignalBehaviourState": true + }, + "userDesiredProfileMode": "AUTOMATIC" + }, + "3": { + "channelRole": "NOTIFICATION_LIGHT_DIMMING_ACTUATOR", + "deviceId": "3014F711000000000000BSL2", + "dimLevel": 0.25, + "functionalChannelType": "NOTIFICATION_LIGHT_CHANNEL", + "groupIndex": 3, + "groups": ["00000000-0000-0000-0000-000000000021"], + "index": 3, + "label": "Led Oben", + "on": true, + "opticalSignalBehaviour": "BLINKING_MIDDLE", + "profileMode": "AUTOMATIC", + "simpleRGBColorState": "GREEN", + "supportedOptionalFeatures": { + "IFeatureOpticalSignalBehaviourState": true + }, + "userDesiredProfileMode": "AUTOMATIC" + } + }, + "homeId": "00000000-0000-0000-0000-000000000001", + "id": "3014F711000000000000BSL2", + "label": "BSL2", + "lastStatusUpdate": 1714910246419, + "liveUpdateState": "LIVE_UPDATE_NOT_SUPPORTED", + "manuallyUpdateForced": false, + "manufacturerCode": 1, + "measuredAttributes": {}, + "modelId": 360, + "modelType": "HmIP-BSL", + "oem": "eQ-3", + "permanentlyReachable": true, + "serializedGlobalTradeItemNumber": "3014F711000000000000BSL2", + "type": "BRAND_SWITCH_NOTIFICATION_LIGHT", + "updateState": "UP_TO_DATE" + }, "3014F711SLO0000000000026": { "availableFirmwareVersion": "0.0.0", "connectionType": "HMIP_RF", diff --git a/tests/components/homematicip_cloud/helper.py b/tests/components/homematicip_cloud/helper.py index d42b9602d38482..800811235195bc 100644 --- a/tests/components/homematicip_cloud/helper.py +++ b/tests/components/homematicip_cloud/helper.py @@ -186,6 +186,10 @@ def update_home(self, json_state, clearConfig: bool = False): def _generate_mocks(self): """Generate mocks for groups and devices.""" self.devices = [_get_mock(device) for device in self.devices] + for device in self.devices: + device.functionalChannels = [ + _get_mock(ch) for ch in device.functionalChannels + ] self.groups = [_get_mock(group) for group in self.groups] diff --git a/tests/components/homematicip_cloud/test_alarm_control_panel.py b/tests/components/homematicip_cloud/test_alarm_control_panel.py index cf27aed7a848bb..094308862f63b2 100644 --- a/tests/components/homematicip_cloud/test_alarm_control_panel.py +++ b/tests/components/homematicip_cloud/test_alarm_control_panel.py @@ -4,14 +4,9 @@ from homeassistant.components.alarm_control_panel import ( DOMAIN as ALARM_CONTROL_PANEL_DOMAIN, + AlarmControlPanelState, ) from homeassistant.components.homematicip_cloud import DOMAIN as HMIPC_DOMAIN -from homeassistant.const import ( - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_DISARMED, - STATE_ALARM_TRIGGERED, -) from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component @@ -83,7 +78,7 @@ async def test_hmip_alarm_control_panel( await _async_manipulate_security_zones( hass, home, internal_active=True, external_active=True ) - assert hass.states.get(entity_id).state is STATE_ALARM_ARMED_AWAY + assert hass.states.get(entity_id).state == AlarmControlPanelState.ARMED_AWAY await hass.services.async_call( "alarm_control_panel", "alarm_arm_home", {"entity_id": entity_id}, blocking=True @@ -91,7 +86,7 @@ async def test_hmip_alarm_control_panel( assert home.mock_calls[-1][0] == "set_security_zones_activation" assert home.mock_calls[-1][1] == (False, True) await _async_manipulate_security_zones(hass, home, external_active=True) - assert hass.states.get(entity_id).state is STATE_ALARM_ARMED_HOME + assert hass.states.get(entity_id).state == AlarmControlPanelState.ARMED_HOME await hass.services.async_call( "alarm_control_panel", "alarm_disarm", {"entity_id": entity_id}, blocking=True @@ -99,7 +94,7 @@ async def test_hmip_alarm_control_panel( assert home.mock_calls[-1][0] == "set_security_zones_activation" assert home.mock_calls[-1][1] == (False, False) await _async_manipulate_security_zones(hass, home) - assert hass.states.get(entity_id).state is STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED await hass.services.async_call( "alarm_control_panel", "alarm_arm_away", {"entity_id": entity_id}, blocking=True @@ -109,7 +104,7 @@ async def test_hmip_alarm_control_panel( await _async_manipulate_security_zones( hass, home, internal_active=True, external_active=True, alarm_triggered=True ) - assert hass.states.get(entity_id).state is STATE_ALARM_TRIGGERED + assert hass.states.get(entity_id).state == AlarmControlPanelState.TRIGGERED await hass.services.async_call( "alarm_control_panel", "alarm_arm_home", {"entity_id": entity_id}, blocking=True @@ -119,4 +114,4 @@ async def test_hmip_alarm_control_panel( await _async_manipulate_security_zones( hass, home, external_active=True, alarm_triggered=True ) - assert hass.states.get(entity_id).state is STATE_ALARM_TRIGGERED + assert hass.states.get(entity_id).state == AlarmControlPanelState.TRIGGERED diff --git a/tests/components/homematicip_cloud/test_cover.py b/tests/components/homematicip_cloud/test_cover.py index 4d32ae547ef100..bcafa68917219e 100644 --- a/tests/components/homematicip_cloud/test_cover.py +++ b/tests/components/homematicip_cloud/test_cover.py @@ -6,9 +6,10 @@ ATTR_CURRENT_POSITION, ATTR_CURRENT_TILT_POSITION, DOMAIN as COVER_DOMAIN, + CoverState, ) from homeassistant.components.homematicip_cloud import DOMAIN as HMIPC_DOMAIN -from homeassistant.const import STATE_CLOSED, STATE_OPEN, STATE_UNKNOWN +from homeassistant.const import STATE_UNKNOWN from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component @@ -50,7 +51,7 @@ async def test_hmip_cover_shutter( assert hmip_device.mock_calls[-1][1] == (0, 1) await async_manipulate_test_data(hass, hmip_device, "shutterLevel", 0) ha_state = hass.states.get(entity_id) - assert ha_state.state == STATE_OPEN + assert ha_state.state == CoverState.OPEN assert ha_state.attributes[ATTR_CURRENT_POSITION] == 100 await hass.services.async_call( @@ -64,7 +65,7 @@ async def test_hmip_cover_shutter( assert hmip_device.mock_calls[-1][1] == (0.5, 1) await async_manipulate_test_data(hass, hmip_device, "shutterLevel", 0.5) ha_state = hass.states.get(entity_id) - assert ha_state.state == STATE_OPEN + assert ha_state.state == CoverState.OPEN assert ha_state.attributes[ATTR_CURRENT_POSITION] == 50 await hass.services.async_call( @@ -75,7 +76,7 @@ async def test_hmip_cover_shutter( assert hmip_device.mock_calls[-1][1] == (1, 1) await async_manipulate_test_data(hass, hmip_device, "shutterLevel", 1) ha_state = hass.states.get(entity_id) - assert ha_state.state == STATE_CLOSED + assert ha_state.state == CoverState.CLOSED assert ha_state.attributes[ATTR_CURRENT_POSITION] == 0 await hass.services.async_call( @@ -105,7 +106,7 @@ async def test_hmip_cover_slats( hass, mock_hap, entity_id, entity_name, device_model ) - assert ha_state.state == STATE_CLOSED + assert ha_state.state == CoverState.CLOSED assert ha_state.attributes[ATTR_CURRENT_POSITION] == 0 assert ha_state.attributes[ATTR_CURRENT_TILT_POSITION] == 0 service_call_counter = len(hmip_device.mock_calls) @@ -119,7 +120,7 @@ async def test_hmip_cover_slats( await async_manipulate_test_data(hass, hmip_device, "shutterLevel", 0) await async_manipulate_test_data(hass, hmip_device, "slatsLevel", 0) ha_state = hass.states.get(entity_id) - assert ha_state.state == STATE_OPEN + assert ha_state.state == CoverState.OPEN assert ha_state.attributes[ATTR_CURRENT_POSITION] == 100 assert ha_state.attributes[ATTR_CURRENT_TILT_POSITION] == 100 @@ -134,7 +135,7 @@ async def test_hmip_cover_slats( assert hmip_device.mock_calls[-1][2] == {"channelIndex": 1, "slatsLevel": 0.5} await async_manipulate_test_data(hass, hmip_device, "slatsLevel", 0.5) ha_state = hass.states.get(entity_id) - assert ha_state.state == STATE_OPEN + assert ha_state.state == CoverState.OPEN assert ha_state.attributes[ATTR_CURRENT_POSITION] == 100 assert ha_state.attributes[ATTR_CURRENT_TILT_POSITION] == 50 @@ -146,7 +147,7 @@ async def test_hmip_cover_slats( assert hmip_device.mock_calls[-1][2] == {"channelIndex": 1, "slatsLevel": 1} await async_manipulate_test_data(hass, hmip_device, "slatsLevel", 1) ha_state = hass.states.get(entity_id) - assert ha_state.state == STATE_OPEN + assert ha_state.state == CoverState.OPEN assert ha_state.attributes[ATTR_CURRENT_POSITION] == 100 assert ha_state.attributes[ATTR_CURRENT_TILT_POSITION] == 0 @@ -185,7 +186,7 @@ async def test_hmip_multi_cover_slats( await async_manipulate_test_data(hass, hmip_device, "slatsLevel", 1, channel=4) ha_state = hass.states.get(entity_id) - assert ha_state.state == STATE_CLOSED + assert ha_state.state == CoverState.CLOSED assert ha_state.attributes[ATTR_CURRENT_POSITION] == 0 assert ha_state.attributes[ATTR_CURRENT_TILT_POSITION] == 0 service_call_counter = len(hmip_device.mock_calls) @@ -199,7 +200,7 @@ async def test_hmip_multi_cover_slats( await async_manipulate_test_data(hass, hmip_device, "shutterLevel", 0, channel=4) await async_manipulate_test_data(hass, hmip_device, "slatsLevel", 0, channel=4) ha_state = hass.states.get(entity_id) - assert ha_state.state == STATE_OPEN + assert ha_state.state == CoverState.OPEN assert ha_state.attributes[ATTR_CURRENT_POSITION] == 100 assert ha_state.attributes[ATTR_CURRENT_TILT_POSITION] == 100 @@ -214,7 +215,7 @@ async def test_hmip_multi_cover_slats( assert hmip_device.mock_calls[-1][2] == {"channelIndex": 4, "slatsLevel": 0.5} await async_manipulate_test_data(hass, hmip_device, "slatsLevel", 0.5, channel=4) ha_state = hass.states.get(entity_id) - assert ha_state.state == STATE_OPEN + assert ha_state.state == CoverState.OPEN assert ha_state.attributes[ATTR_CURRENT_POSITION] == 100 assert ha_state.attributes[ATTR_CURRENT_TILT_POSITION] == 50 @@ -226,7 +227,7 @@ async def test_hmip_multi_cover_slats( assert hmip_device.mock_calls[-1][2] == {"channelIndex": 4, "slatsLevel": 1} await async_manipulate_test_data(hass, hmip_device, "slatsLevel", 1, channel=4) ha_state = hass.states.get(entity_id) - assert ha_state.state == STATE_OPEN + assert ha_state.state == CoverState.OPEN assert ha_state.attributes[ATTR_CURRENT_POSITION] == 100 assert ha_state.attributes[ATTR_CURRENT_TILT_POSITION] == 0 @@ -261,7 +262,7 @@ async def test_hmip_blind_module( hass, mock_hap, entity_id, entity_name, device_model ) - assert ha_state.state == STATE_OPEN + assert ha_state.state == CoverState.OPEN assert ha_state.attributes[ATTR_CURRENT_POSITION] == 5 assert ha_state.attributes[ATTR_CURRENT_TILT_POSITION] == 100 service_call_counter = len(hmip_device.mock_calls) @@ -287,7 +288,7 @@ async def test_hmip_blind_module( assert hmip_device.mock_calls[-1][2] == {"primaryShadingLevel": 0} ha_state = hass.states.get(entity_id) - assert ha_state.state == STATE_OPEN + assert ha_state.state == CoverState.OPEN assert ha_state.attributes[ATTR_CURRENT_POSITION] == 100 assert ha_state.attributes[ATTR_CURRENT_TILT_POSITION] == 100 @@ -310,7 +311,7 @@ async def test_hmip_blind_module( assert hmip_device.mock_calls[-1][0] == "set_primary_shading_level" assert hmip_device.mock_calls[-1][2] == {"primaryShadingLevel": 0.5} ha_state = hass.states.get(entity_id) - assert ha_state.state == STATE_OPEN + assert ha_state.state == CoverState.OPEN assert ha_state.attributes[ATTR_CURRENT_POSITION] == 50 assert ha_state.attributes[ATTR_CURRENT_TILT_POSITION] == 50 @@ -331,7 +332,7 @@ async def test_hmip_blind_module( } ha_state = hass.states.get(entity_id) - assert ha_state.state == STATE_CLOSED + assert ha_state.state == CoverState.CLOSED assert ha_state.attributes[ATTR_CURRENT_POSITION] == 0 assert ha_state.attributes[ATTR_CURRENT_TILT_POSITION] == 0 @@ -385,7 +386,7 @@ async def test_hmip_garage_door_tormatic( assert hmip_device.mock_calls[-1][1] == (DoorCommand.OPEN,) await async_manipulate_test_data(hass, hmip_device, "doorState", DoorState.OPEN) ha_state = hass.states.get(entity_id) - assert ha_state.state == STATE_OPEN + assert ha_state.state == CoverState.OPEN assert ha_state.attributes[ATTR_CURRENT_POSITION] == 100 await hass.services.async_call( @@ -396,7 +397,7 @@ async def test_hmip_garage_door_tormatic( assert hmip_device.mock_calls[-1][1] == (DoorCommand.CLOSE,) await async_manipulate_test_data(hass, hmip_device, "doorState", DoorState.CLOSED) ha_state = hass.states.get(entity_id) - assert ha_state.state == STATE_CLOSED + assert ha_state.state == CoverState.CLOSED assert ha_state.attributes[ATTR_CURRENT_POSITION] == 0 await hass.services.async_call( @@ -434,7 +435,7 @@ async def test_hmip_garage_door_hoermann( assert hmip_device.mock_calls[-1][1] == (DoorCommand.OPEN,) await async_manipulate_test_data(hass, hmip_device, "doorState", DoorState.OPEN) ha_state = hass.states.get(entity_id) - assert ha_state.state == STATE_OPEN + assert ha_state.state == CoverState.OPEN assert ha_state.attributes[ATTR_CURRENT_POSITION] == 100 await hass.services.async_call( @@ -445,7 +446,7 @@ async def test_hmip_garage_door_hoermann( assert hmip_device.mock_calls[-1][1] == (DoorCommand.CLOSE,) await async_manipulate_test_data(hass, hmip_device, "doorState", DoorState.CLOSED) ha_state = hass.states.get(entity_id) - assert ha_state.state == STATE_CLOSED + assert ha_state.state == CoverState.CLOSED assert ha_state.attributes[ATTR_CURRENT_POSITION] == 0 await hass.services.async_call( @@ -481,7 +482,7 @@ async def test_hmip_cover_shutter_group( assert hmip_device.mock_calls[-1][1] == (0,) await async_manipulate_test_data(hass, hmip_device, "shutterLevel", 0) ha_state = hass.states.get(entity_id) - assert ha_state.state == STATE_OPEN + assert ha_state.state == CoverState.OPEN assert ha_state.attributes[ATTR_CURRENT_POSITION] == 100 await hass.services.async_call( @@ -495,7 +496,7 @@ async def test_hmip_cover_shutter_group( assert hmip_device.mock_calls[-1][1] == (0.5,) await async_manipulate_test_data(hass, hmip_device, "shutterLevel", 0.5) ha_state = hass.states.get(entity_id) - assert ha_state.state == STATE_OPEN + assert ha_state.state == CoverState.OPEN assert ha_state.attributes[ATTR_CURRENT_POSITION] == 50 await hass.services.async_call( @@ -506,7 +507,7 @@ async def test_hmip_cover_shutter_group( assert hmip_device.mock_calls[-1][1] == (1,) await async_manipulate_test_data(hass, hmip_device, "shutterLevel", 1) ha_state = hass.states.get(entity_id) - assert ha_state.state == STATE_CLOSED + assert ha_state.state == CoverState.CLOSED assert ha_state.attributes[ATTR_CURRENT_POSITION] == 0 await hass.services.async_call( @@ -536,7 +537,7 @@ async def test_hmip_cover_slats_group( await async_manipulate_test_data(hass, hmip_device, "slatsLevel", 1) ha_state = hass.states.get(entity_id) - assert ha_state.state == STATE_CLOSED + assert ha_state.state == CoverState.CLOSED assert ha_state.attributes[ATTR_CURRENT_POSITION] == 0 assert ha_state.attributes[ATTR_CURRENT_TILT_POSITION] == 0 service_call_counter = len(hmip_device.mock_calls) @@ -557,7 +558,7 @@ async def test_hmip_cover_slats_group( await async_manipulate_test_data(hass, hmip_device, "shutterLevel", 0.5) await async_manipulate_test_data(hass, hmip_device, "slatsLevel", 0) ha_state = hass.states.get(entity_id) - assert ha_state.state == STATE_OPEN + assert ha_state.state == CoverState.OPEN assert ha_state.attributes[ATTR_CURRENT_POSITION] == 50 assert ha_state.attributes[ATTR_CURRENT_TILT_POSITION] == 100 @@ -572,7 +573,7 @@ async def test_hmip_cover_slats_group( assert hmip_device.mock_calls[-1][1] == (0.5,) await async_manipulate_test_data(hass, hmip_device, "slatsLevel", 0.5) ha_state = hass.states.get(entity_id) - assert ha_state.state == STATE_OPEN + assert ha_state.state == CoverState.OPEN assert ha_state.attributes[ATTR_CURRENT_POSITION] == 50 assert ha_state.attributes[ATTR_CURRENT_TILT_POSITION] == 50 @@ -584,7 +585,7 @@ async def test_hmip_cover_slats_group( assert hmip_device.mock_calls[-1][1] == (1,) await async_manipulate_test_data(hass, hmip_device, "slatsLevel", 1) ha_state = hass.states.get(entity_id) - assert ha_state.state == STATE_OPEN + assert ha_state.state == CoverState.OPEN assert ha_state.attributes[ATTR_CURRENT_POSITION] == 50 assert ha_state.attributes[ATTR_CURRENT_TILT_POSITION] == 0 diff --git a/tests/components/homematicip_cloud/test_device.py b/tests/components/homematicip_cloud/test_device.py index 25fb31c3c62024..5b4993f7314ac7 100644 --- a/tests/components/homematicip_cloud/test_device.py +++ b/tests/components/homematicip_cloud/test_device.py @@ -28,7 +28,7 @@ async def test_hmip_load_all_supported_devices( test_devices=None, test_groups=None ) - assert len(mock_hap.hmip_device_by_entity_id) == 293 + assert len(mock_hap.hmip_device_by_entity_id) == 308 async def test_hmip_remove_device( diff --git a/tests/components/homematicip_cloud/test_light.py b/tests/components/homematicip_cloud/test_light.py index 18d490c37869a6..c0717e81e0df93 100644 --- a/tests/components/homematicip_cloud/test_light.py +++ b/tests/components/homematicip_cloud/test_light.py @@ -1,12 +1,14 @@ """Tests for HomematicIP Cloud light.""" -from homematicip.base.enums import RGBColorState +from homematicip.base.enums import OpticalSignalBehaviour, RGBColorState from homeassistant.components.homematicip_cloud import DOMAIN as HMIPC_DOMAIN from homeassistant.components.light import ( ATTR_BRIGHTNESS, ATTR_COLOR_MODE, ATTR_COLOR_NAME, + ATTR_EFFECT, + ATTR_HS_COLOR, ATTR_SUPPORTED_COLOR_MODES, DOMAIN as LIGHT_DOMAIN, ColorMode, @@ -173,6 +175,101 @@ async def test_hmip_notification_light( assert not ha_state.attributes.get(ATTR_BRIGHTNESS) +async def test_hmip_notification_light_2( + hass: HomeAssistant, default_mock_hap_factory: HomeFactory +) -> None: + """Test HomematicipNotificationLight.""" + entity_id = "light.led_oben" + entity_name = "Led Oben" + device_model = "HmIP-BSL" + mock_hap = await default_mock_hap_factory.async_get_mock_hap(test_devices=["BSL2"]) + + ha_state, hmip_device = get_and_check_entity_basics( + hass, mock_hap, entity_id, entity_name, device_model + ) + + assert ha_state.state == STATE_ON + assert ha_state.attributes[ATTR_EFFECT] == "BLINKING_MIDDLE" + + functional_channel = hmip_device.functionalChannels[3] + service_call_counter = len(functional_channel.mock_calls) + + # Send all color via service call. + await hass.services.async_call( + "light", + "turn_on", + {"entity_id": entity_id, ATTR_HS_COLOR: [240.0, 100.0], ATTR_BRIGHTNESS: 128}, + blocking=True, + ) + assert functional_channel.mock_calls[-1][0] == "async_set_optical_signal" + assert functional_channel.mock_calls[-1][2] == { + "opticalSignalBehaviour": OpticalSignalBehaviour.BLINKING_MIDDLE, + "rgb": RGBColorState.BLUE, + "dimLevel": 0.5, + } + assert service_call_counter + 1 == len(functional_channel.mock_calls) + + +async def test_hmip_notification_light_2_without_brightness_and_light( + hass: HomeAssistant, default_mock_hap_factory: HomeFactory +) -> None: + """Test HomematicipNotificationLight.""" + entity_id = "light.led_oben" + entity_name = "Led Oben" + device_model = "HmIP-BSL" + mock_hap = await default_mock_hap_factory.async_get_mock_hap(test_devices=["BSL2"]) + ha_state, hmip_device = get_and_check_entity_basics( + hass, mock_hap, entity_id, entity_name, device_model + ) + + color_before = ha_state.attributes["color_name"] + + functional_channel = hmip_device.functionalChannels[3] + service_call_counter = len(functional_channel.mock_calls) + + # Send all color via service call. + await hass.services.async_call( + "light", + "turn_on", + {"entity_id": entity_id, ATTR_EFFECT: OpticalSignalBehaviour.FLASH_MIDDLE}, + blocking=True, + ) + assert functional_channel.mock_calls[-1][0] == "async_set_optical_signal" + assert functional_channel.mock_calls[-1][2] == { + "opticalSignalBehaviour": OpticalSignalBehaviour.FLASH_MIDDLE, + "rgb": color_before, + "dimLevel": 1, + } + assert service_call_counter + 1 == len(functional_channel.mock_calls) + + +async def test_hmip_notification_light_2_turn_off( + hass: HomeAssistant, default_mock_hap_factory: HomeFactory +) -> None: + """Test HomematicipNotificationLight.""" + entity_id = "light.led_oben" + entity_name = "Led Oben" + device_model = "HmIP-BSL" + mock_hap = await default_mock_hap_factory.async_get_mock_hap(test_devices=["BSL2"]) + + ha_state, hmip_device = get_and_check_entity_basics( + hass, mock_hap, entity_id, entity_name, device_model + ) + + functional_channel = hmip_device.functionalChannels[3] + service_call_counter = len(functional_channel.mock_calls) + + # Send all color via service call. + await hass.services.async_call( + "light", + "turn_off", + {"entity_id": entity_id}, + blocking=True, + ) + assert functional_channel.mock_calls[-1][0] == "async_turn_off" + assert service_call_counter + 1 == len(functional_channel.mock_calls) + + async def test_hmip_dimmer( hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: diff --git a/tests/components/homematicip_cloud/test_sensor.py b/tests/components/homematicip_cloud/test_sensor.py index 07cf5ea0ae5b7c..2dda3116032ce5 100644 --- a/tests/components/homematicip_cloud/test_sensor.py +++ b/tests/components/homematicip_cloud/test_sensor.py @@ -12,6 +12,7 @@ ATTR_RSSI_DEVICE, ATTR_RSSI_PEER, ) +from homeassistant.components.homematicip_cloud.hap import HomematicipHAP from homeassistant.components.homematicip_cloud.sensor import ( ATTR_CURRENT_ILLUMINATION, ATTR_HIGHEST_ILLUMINATION, @@ -22,7 +23,11 @@ ATTR_WIND_DIRECTION, ATTR_WIND_DIRECTION_VARIATION, ) -from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN +from homeassistant.components.sensor import ( + ATTR_STATE_CLASS, + DOMAIN as SENSOR_DOMAIN, + SensorStateClass, +) from homeassistant.const import ( ATTR_UNIT_OF_MEASUREMENT, LIGHT_LUX, @@ -361,6 +366,7 @@ async def test_hmip_windspeed_sensor( assert ( ha_state.attributes[ATTR_UNIT_OF_MEASUREMENT] == UnitOfSpeed.KILOMETERS_PER_HOUR ) + assert ha_state.attributes[ATTR_STATE_CLASS] == SensorStateClass.MEASUREMENT await async_manipulate_test_data(hass, hmip_device, "windSpeed", 9.4) ha_state = hass.states.get(entity_id) assert ha_state.state == "9.4" @@ -410,6 +416,7 @@ async def test_hmip_today_rain_sensor( assert ha_state.state == "3.9" assert ha_state.attributes[ATTR_UNIT_OF_MEASUREMENT] == UnitOfLength.MILLIMETERS + assert ha_state.attributes[ATTR_STATE_CLASS] == SensorStateClass.MEASUREMENT await async_manipulate_test_data(hass, hmip_device, "todayRainCounter", 14.2) ha_state = hass.states.get(entity_id) assert ha_state.state == "14.2" @@ -515,6 +522,47 @@ async def test_hmip_passage_detector_delta_counter( assert ha_state.state == "190" +async def test_hmip_floor_terminal_block_mechanic_channel_1_valve_position( + hass: HomeAssistant, default_mock_hap_factory: HomematicipHAP +) -> None: + """Test HomematicipFloorTerminalBlockMechanicChannelValve Channel 1 HmIP-FALMOT-C12.""" + entity_id = "sensor.heizkreislauf_1_og_bad_r" + entity_name = "Heizkreislauf (1) OG Bad r" + device_model = "HmIP-FALMOT-C12" + + mock_hap = await default_mock_hap_factory.async_get_mock_hap( + test_devices=["Fu\u00dfbodenheizungsaktor"] + ) + ha_state, hmip_device = get_and_check_entity_basics( + hass, mock_hap, entity_id, entity_name, device_model + ) + + hmip_device = mock_hap.hmip_device_by_entity_id.get(entity_id) + + assert ha_state.state == "48" + assert ha_state.attributes[ATTR_UNIT_OF_MEASUREMENT] == PERCENTAGE + await async_manipulate_test_data(hass, hmip_device, "valvePosition", 0.36) + ha_state = hass.states.get(entity_id) + assert ha_state.state == "36" + + await async_manipulate_test_data(hass, hmip_device, "configPending", True) + ha_state = hass.states.get(entity_id) + assert ha_state.attributes["icon"] == "mdi:alert-circle" + + await async_manipulate_test_data(hass, hmip_device, "configPending", False) + await async_manipulate_test_data( + hass, hmip_device, "valveState", ValveState.ADAPTION_IN_PROGRESS + ) + ha_state = hass.states.get(entity_id) + assert ha_state.attributes["icon"] == "mdi:alert" + + await async_manipulate_test_data( + hass, hmip_device, "valveState", ValveState.ADAPTION_DONE + ) + ha_state = hass.states.get(entity_id) + assert ha_state.attributes["icon"] == "mdi:heating-coil" + + async def test_hmip_esi_iec_current_power_consumption( hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: diff --git a/tests/components/homewizard/test_init.py b/tests/components/homewizard/test_init.py index 334129006776a0..a01f075ee61c69 100644 --- a/tests/components/homewizard/test_init.py +++ b/tests/components/homewizard/test_init.py @@ -1,17 +1,17 @@ """Tests for the homewizard component.""" +from datetime import timedelta from unittest.mock import MagicMock +from freezegun.api import FrozenDateTimeFactory from homewizard_energy.errors import DisabledError import pytest from homeassistant.components.homewizard.const import DOMAIN from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntryState -from homeassistant.const import Platform from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er -from tests.common import MockConfigEntry +from tests.common import MockConfigEntry, async_fire_time_changed async def test_load_unload( @@ -97,60 +97,36 @@ async def test_load_removes_reauth_flow( assert len(flows) == 0 -@pytest.mark.parametrize( - ("device_fixture", "old_unique_id", "new_unique_id"), - [ - ( - "HWE-P1", - "homewizard_G001", - "homewizard_gas_meter_G001", - ), - ( - "HWE-P1", - "homewizard_W001", - "homewizard_water_meter_W001", - ), - ( - "HWE-P1", - "homewizard_WW001", - "homewizard_warm_water_meter_WW001", - ), - ( - "HWE-P1", - "homewizard_H001", - "homewizard_heat_meter_H001", - ), - ( - "HWE-P1", - "homewizard_IH001", - "homewizard_inlet_heat_meter_IH001", - ), - ], -) @pytest.mark.usefixtures("mock_homewizardenergy") -async def test_external_sensor_migration( +async def test_disablederror_reloads_integration( hass: HomeAssistant, - entity_registry: er.EntityRegistry, mock_config_entry: MockConfigEntry, - old_unique_id: str, - new_unique_id: str, + mock_homewizardenergy: MagicMock, + freezer: FrozenDateTimeFactory, ) -> None: - """Test unique ID or External sensors are migrated.""" + """Test DisabledError reloads integration.""" mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() - entity: er.RegistryEntry = entity_registry.async_get_or_create( - domain=Platform.SENSOR, - platform=DOMAIN, - unique_id=old_unique_id, - config_entry=mock_config_entry, - ) + # Make sure current state is loaded and not reauth flow is active + assert mock_config_entry.state is ConfigEntryState.LOADED + flows = hass.config_entries.flow.async_progress_by_handler(DOMAIN) + assert len(flows) == 0 - assert entity.unique_id == old_unique_id + # Simulate DisabledError and wait for next update + mock_homewizardenergy.device.side_effect = DisabledError() - assert await hass.config_entries.async_setup(mock_config_entry.entry_id) + freezer.tick(timedelta(seconds=5)) + async_fire_time_changed(hass) await hass.async_block_till_done() - entity_migrated = entity_registry.async_get(entity.entity_id) - assert entity_migrated - assert entity_migrated.unique_id == new_unique_id - assert entity_migrated.previous_unique_id == old_unique_id + # State should be setup retry and reauth flow should be active + assert mock_config_entry.state is ConfigEntryState.SETUP_RETRY + + flows = hass.config_entries.flow.async_progress_by_handler(DOMAIN) + assert len(flows) == 1 + + flow = flows[0] + assert flow.get("step_id") == "reauth_confirm" + assert flow.get("handler") == DOMAIN diff --git a/tests/components/homeworks/test_config_flow.py b/tests/components/homeworks/test_config_flow.py index 503b936dc15437..e8c4ab15b3def5 100644 --- a/tests/components/homeworks/test_config_flow.py +++ b/tests/components/homeworks/test_config_flow.py @@ -243,7 +243,7 @@ async def test_reconfigure_flow( result = await mock_config_entry.start_reconfigure_flow(hass) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure_confirm" + assert result["step_id"] == "reconfigure" result = await hass.config_entries.flow.async_configure( result["flow_id"], @@ -308,7 +308,7 @@ async def test_reconfigure_flow_flow_duplicate( result = await entry1.start_reconfigure_flow(hass) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure_confirm" + assert result["step_id"] == "reconfigure" result = await hass.config_entries.flow.async_configure( result["flow_id"], @@ -318,7 +318,7 @@ async def test_reconfigure_flow_flow_duplicate( }, ) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure_confirm" + assert result["step_id"] == "reconfigure" assert result["errors"] == {"base": "duplicated_host_port"} @@ -330,7 +330,7 @@ async def test_reconfigure_flow_flow_no_change( result = await mock_config_entry.start_reconfigure_flow(hass) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure_confirm" + assert result["step_id"] == "reconfigure" result = await hass.config_entries.flow.async_configure( result["flow_id"], @@ -375,7 +375,7 @@ async def test_reconfigure_flow_credentials_password_only( result = await mock_config_entry.start_reconfigure_flow(hass) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure_confirm" + assert result["step_id"] == "reconfigure" result = await hass.config_entries.flow.async_configure( result["flow_id"], @@ -386,7 +386,7 @@ async def test_reconfigure_flow_credentials_password_only( }, ) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure_confirm" + assert result["step_id"] == "reconfigure" assert result["errors"] == {"base": "need_username_with_password"} diff --git a/tests/components/honeywell/test_climate.py b/tests/components/honeywell/test_climate.py index 9485f2f4302da9..73c5ff33dbcafb 100644 --- a/tests/components/honeywell/test_climate.py +++ b/tests/components/honeywell/test_climate.py @@ -5,6 +5,7 @@ from aiohttp import ClientConnectionError import aiosomecomfort +from freezegun.api import FrozenDateTimeFactory import pytest from syrupy.assertion import SnapshotAssertion from syrupy.filters import props @@ -29,6 +30,8 @@ ) from homeassistant.components.honeywell.climate import ( DOMAIN, + MODE_PERMANENT_HOLD, + MODE_TEMPORARY_HOLD, PRESET_HOLD, RETRY, SCAN_INTERVAL, @@ -1207,3 +1210,59 @@ async def test_unique_id( await init_integration(hass, config_entry) entity_entry = entity_registry.async_get(f"climate.{device.name}") assert entity_entry.unique_id == str(device.deviceid) + + +async def test_preset_mode( + hass: HomeAssistant, + device: MagicMock, + config_entry: er.EntityRegistry, + freezer: FrozenDateTimeFactory, +) -> None: + """Test mode settings properly reflected.""" + await init_integration(hass, config_entry) + entity_id = f"climate.{device.name}" + + device.raw_ui_data["StatusHeat"] = 3 + device.raw_ui_data["StatusCool"] = 3 + + freezer.tick(SCAN_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + state = hass.states.get(entity_id) + assert state.attributes[ATTR_PRESET_MODE] == PRESET_NONE + + device.raw_ui_data["StatusHeat"] = MODE_TEMPORARY_HOLD + device.raw_ui_data["StatusCool"] = MODE_TEMPORARY_HOLD + + freezer.tick(SCAN_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + state = hass.states.get(entity_id) + assert state.attributes[ATTR_PRESET_MODE] == PRESET_HOLD + + device.raw_ui_data["StatusHeat"] = MODE_PERMANENT_HOLD + device.raw_ui_data["StatusCool"] = MODE_PERMANENT_HOLD + + freezer.tick(SCAN_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + state = hass.states.get(entity_id) + assert state.attributes[ATTR_PRESET_MODE] == PRESET_HOLD + + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_PRESET_MODE, + {ATTR_ENTITY_ID: entity_id, ATTR_PRESET_MODE: PRESET_AWAY}, + blocking=True, + ) + + state = hass.states.get(entity_id) + assert state.attributes[ATTR_PRESET_MODE] == PRESET_AWAY + + device.raw_ui_data["StatusHeat"] = 3 + device.raw_ui_data["StatusCool"] = 3 + freezer.tick(SCAN_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + state = hass.states.get(entity_id) + assert state.attributes[ATTR_PRESET_MODE] == PRESET_NONE diff --git a/tests/components/http/test_ban.py b/tests/components/http/test_ban.py index 41f36dad2df452..59011de0cfd8ea 100644 --- a/tests/components/http/test_ban.py +++ b/tests/components/http/test_ban.py @@ -3,7 +3,7 @@ from http import HTTPStatus from ipaddress import ip_address import os -from unittest.mock import Mock, mock_open, patch +from unittest.mock import AsyncMock, Mock, mock_open, patch from aiohttp import web from aiohttp.web_exceptions import HTTPUnauthorized @@ -34,14 +34,10 @@ @pytest.fixture(name="hassio_env") -def hassio_env_fixture(): +def hassio_env_fixture(supervisor_is_connected: AsyncMock): """Fixture to inject hassio env.""" with ( patch.dict(os.environ, {"SUPERVISOR": "127.0.0.1"}), - patch( - "homeassistant.components.hassio.HassIO.is_connected", - return_value={"result": "ok", "data": {}}, - ), patch.dict(os.environ, {"SUPERVISOR_TOKEN": "123456"}), ): yield @@ -201,6 +197,7 @@ async def test_access_from_supervisor_ip( hass: HomeAssistant, aiohttp_client: ClientSessionGenerator, hassio_env, + resolution_info: AsyncMock, ) -> None: """Test accessing to server from supervisor IP.""" app = web.Application() @@ -222,17 +219,7 @@ async def unauth_handler(request): manager = app[KEY_BAN_MANAGER] - with patch( - "homeassistant.components.hassio.HassIO.get_resolution_info", - return_value={ - "unsupported": [], - "unhealthy": [], - "suggestions": [], - "issues": [], - "checks": [], - }, - ): - assert await async_setup_component(hass, "hassio", {"hassio": {}}) + assert await async_setup_component(hass, "hassio", {"hassio": {}}) m_open = mock_open() diff --git a/tests/components/hunterdouglas_powerview/conftest.py b/tests/components/hunterdouglas_powerview/conftest.py index d4433f93dcbf26..ea40ba4ecc61d0 100644 --- a/tests/components/hunterdouglas_powerview/conftest.py +++ b/tests/components/hunterdouglas_powerview/conftest.py @@ -33,15 +33,15 @@ def mock_hunterdouglas_hub( """Return a mocked Powerview Hub with all data populated.""" with ( patch( - "homeassistant.components.hunterdouglas_powerview.Hub.request_raw_data", + "homeassistant.components.hunterdouglas_powerview.util.Hub.request_raw_data", return_value=load_json_object_fixture(device_json, DOMAIN), ), patch( - "homeassistant.components.hunterdouglas_powerview.Hub.request_home_data", + "homeassistant.components.hunterdouglas_powerview.util.Hub.request_home_data", return_value=load_json_object_fixture(home_json, DOMAIN), ), patch( - "homeassistant.components.hunterdouglas_powerview.Hub.request_raw_firmware", + "homeassistant.components.hunterdouglas_powerview.util.Hub.request_raw_firmware", return_value=load_json_object_fixture(firmware_json, DOMAIN), ), patch( @@ -111,7 +111,7 @@ def firmware_json(api_version: int) -> str: def rooms_json(api_version: int) -> str: """Return the get_resources fixture for a specific device.""" if api_version == 1: - return "gen2/rooms.json" + return "gen1/rooms.json" if api_version == 2: return "gen2/rooms.json" if api_version == 3: @@ -124,7 +124,7 @@ def rooms_json(api_version: int) -> str: def scenes_json(api_version: int) -> str: """Return the get_resources fixture for a specific device.""" if api_version == 1: - return "gen2/scenes.json" + return "gen1/scenes.json" if api_version == 2: return "gen2/scenes.json" if api_version == 3: @@ -137,7 +137,7 @@ def scenes_json(api_version: int) -> str: def shades_json(api_version: int) -> str: """Return the get_resources fixture for a specific device.""" if api_version == 1: - return "gen2/shades.json" + return "gen1/shades.json" if api_version == 2: return "gen2/shades.json" if api_version == 3: diff --git a/tests/components/hunterdouglas_powerview/const.py b/tests/components/hunterdouglas_powerview/const.py index 5a912a63a17939..65b03fd5ec20cd 100644 --- a/tests/components/hunterdouglas_powerview/const.py +++ b/tests/components/hunterdouglas_powerview/const.py @@ -6,6 +6,7 @@ from homeassistant.components import dhcp, zeroconf MOCK_MAC = "AA::BB::CC::DD::EE::FF" +MOCK_SERIAL = "A1B2C3D4E5G6H7" HOMEKIT_DISCOVERY_GEN2 = zeroconf.ZeroconfServiceInfo( ip_address="1.2.3.4", @@ -41,7 +42,7 @@ ip_address="1.2.3.4", ip_addresses=[IPv4Address("1.2.3.4")], hostname="mock_hostname", - name="Powerview Generation 3._powerview-g3._tcp.local.", + name="Powerview Generation 3._PowerView-G3._tcp.local.", port=None, properties={}, type="mock_type", diff --git a/tests/components/hunterdouglas_powerview/fixtures/gen1/rooms.json b/tests/components/hunterdouglas_powerview/fixtures/gen1/rooms.json new file mode 100644 index 00000000000000..4ddcccd466ea44 --- /dev/null +++ b/tests/components/hunterdouglas_powerview/fixtures/gen1/rooms.json @@ -0,0 +1,13 @@ +{ + "roomIds": [4896], + "roomData": [ + { + "id": 4896, + "name": "U3BpbmRsZQ==", + "order": 0, + "colorId": 11, + "iconId": 77, + "name_unicode": "Spindle" + } + ] +} diff --git a/tests/components/hunterdouglas_powerview/fixtures/gen1/scenes.json b/tests/components/hunterdouglas_powerview/fixtures/gen1/scenes.json new file mode 100644 index 00000000000000..4b6b7fb9cc368c --- /dev/null +++ b/tests/components/hunterdouglas_powerview/fixtures/gen1/scenes.json @@ -0,0 +1,188 @@ +{ + "sceneIds": [ + 19831, 4068, 55363, 43508, 59372, 48243, 54636, 20625, 4034, 59103, 61648, + 24626, 64679, 22498, 28856, 25458, 51159, 959 + ], + "sceneData": [ + { + "id": 19831, + "networkNumber": 0, + "name": "Q2xvc2UgTG91bmdlIFJvb20=", + "roomId": 4896, + "order": 0, + "colorId": 7, + "iconId": 171, + "name_unicode": "Close Lounge Room" + }, + { + "id": 4068, + "networkNumber": 1, + "name": "Q2xvc2UgQmVkIDQ=", + "roomId": 4896, + "order": 1, + "colorId": 7, + "iconId": 10, + "name_unicode": "Close Bed 4" + }, + { + "id": 55363, + "networkNumber": 2, + "name": "Q2xvc2UgQmVkIDI=", + "roomId": 4896, + "order": 2, + "colorId": 11, + "iconId": 171, + "name_unicode": "Close Bed 2" + }, + { + "id": 43508, + "networkNumber": 3, + "name": "Q2xvc2UgTWFzdGVyIEJlZA==", + "roomId": 4896, + "order": 3, + "colorId": 11, + "iconId": 10, + "name_unicode": "Close Master Bed" + }, + { + "id": 59372, + "networkNumber": 4, + "name": "Q2xvc2UgRmFtaWx5", + "roomId": 4896, + "order": 4, + "colorId": 0, + "iconId": 171, + "name_unicode": "Close Family" + }, + { + "id": 48243, + "networkNumber": 5, + "name": "T3BlbiBCZWQgNA==", + "roomId": 4896, + "order": 5, + "colorId": 0, + "iconId": 10, + "name_unicode": "Open Bed 4" + }, + { + "id": 54636, + "networkNumber": 6, + "name": "T3BlbiBNYXN0ZXIgQmVk", + "roomId": 4896, + "order": 6, + "colorId": 0, + "iconId": 26, + "name_unicode": "Open Master Bed" + }, + { + "id": 20625, + "networkNumber": 7, + "name": "T3BlbiBCZWQgMw==", + "roomId": 4896, + "order": 7, + "colorId": 7, + "iconId": 26, + "name_unicode": "Open Bed 3" + }, + { + "id": 4034, + "networkNumber": 8, + "name": "T3BlbiBGYW1pbHk=", + "roomId": 4896, + "order": 8, + "colorId": 11, + "iconId": 26, + "name_unicode": "Open Family" + }, + { + "id": 59103, + "networkNumber": 9, + "name": "Q2xvc2UgU3R1ZHk=", + "roomId": 4896, + "order": 9, + "colorId": 0, + "iconId": 171, + "name_unicode": "Close Study" + }, + { + "id": 61648, + "networkNumber": 10, + "name": "T3BlbiBBbGw=", + "roomId": 4896, + "order": 10, + "colorId": 11, + "iconId": 26, + "name_unicode": "Open All" + }, + { + "id": 24626, + "networkNumber": 11, + "name": "Q2xvc2UgQWxs", + "roomId": 4896, + "order": 11, + "colorId": 0, + "iconId": 171, + "name_unicode": "Close All" + }, + { + "id": 64679, + "networkNumber": 12, + "name": "T3BlbiBLaXRjaGVu", + "roomId": 4896, + "order": 12, + "colorId": 7, + "iconId": 26, + "name_unicode": "Open Kitchen" + }, + { + "id": 22498, + "networkNumber": 13, + "name": "T3BlbiBMb3VuZ2UgUm9vbQ==", + "roomId": 4896, + "order": 13, + "colorId": 7, + "iconId": 26, + "name_unicode": "Open Lounge Room" + }, + { + "id": 25458, + "networkNumber": 14, + "name": "T3BlbiBCZWQgMg==", + "roomId": 4896, + "order": 14, + "colorId": 0, + "iconId": 26, + "name_unicode": "Open Bed 2" + }, + { + "id": 46225, + "networkNumber": 15, + "name": "Q2xvc2UgQmVkIDM=", + "roomId": 4896, + "order": 15, + "colorId": 0, + "iconId": 26, + "name_unicode": "Close Bed 3" + }, + { + "id": 51159, + "networkNumber": 16, + "name": "Q2xvc2UgS2l0Y2hlbg==", + "roomId": 4896, + "order": 16, + "colorId": 0, + "iconId": 26, + "name_unicode": "Close Kitchen" + }, + { + "id": 959, + "networkNumber": 17, + "name": "T3BlbiBTdHVkeQ==", + "roomId": 4896, + "order": 17, + "colorId": 0, + "iconId": 26, + "name_unicode": "Open Study" + } + ] +} diff --git a/tests/components/hunterdouglas_powerview/fixtures/gen1/shades.json b/tests/components/hunterdouglas_powerview/fixtures/gen1/shades.json new file mode 100644 index 00000000000000..6e43c1d788d170 --- /dev/null +++ b/tests/components/hunterdouglas_powerview/fixtures/gen1/shades.json @@ -0,0 +1,53 @@ +{ + "shadeIds": [36492, 65111, 7003, 53627], + "shadeData": [ + { + "id": 36492, + "name": "S2l0Y2hlbiBOb3J0aA==", + "roomId": 4896, + "groupId": 35661, + "order": 0, + "type": 40, + "batteryStrength": 116, + "batteryStatus": 3, + "positions": { "position1": 65535, "posKind1": 1 }, + "name_unicode": "Kitchen North" + }, + { + "id": 65111, + "name": "S2l0Y2hlbiBXZXN0", + "roomId": 4896, + "groupId": 35661, + "order": 1, + "type": 40, + "batteryStrength": 124, + "batteryStatus": 3, + "positions": { "position1": 65535, "posKind1": 3 }, + "name_unicode": "Kitchen West" + }, + { + "id": 7003, + "name": "QmF0aCBFYXN0", + "roomId": 4896, + "groupId": 35661, + "order": 2, + "type": 40, + "batteryStrength": 94, + "batteryStatus": 1, + "positions": { "position1": 65535, "posKind1": 1 }, + "name_unicode": "Bath East" + }, + { + "id": 53627, + "name": "QmF0aCBTb3V0aA==", + "roomId": 4896, + "groupId": 35661, + "order": 3, + "type": 40, + "batteryStrength": 127, + "batteryStatus": 3, + "positions": { "position1": 65535, "posKind1": 3 }, + "name_unicode": "Bath South" + } + ] +} diff --git a/tests/components/hunterdouglas_powerview/fixtures/gen1/userdata.json b/tests/components/hunterdouglas_powerview/fixtures/gen1/userdata.json index 132e2721b05b23..90b64ee4686667 100644 --- a/tests/components/hunterdouglas_powerview/fixtures/gen1/userdata.json +++ b/tests/components/hunterdouglas_powerview/fixtures/gen1/userdata.json @@ -1,34 +1,34 @@ { "userData": { - "enableScheduledEvents": true, - "staticIp": false, + "serialNumber": "A1B2C3D4E5G6H7", + "rfID": "0x8B2A", + "rfIDInt": 35626, + "rfStatus": 0, + "hubName": "UG93ZXJ2aWV3IEdlbmVyYXRpb24gMQ==", + "macAddress": "AA:BB:CC:DD:EE:FF", + "roomCount": 1, + "shadeCount": 4, + "groupCount": 5, + "sceneCount": 9, + "sceneMemberCount": 24, + "multiSceneCount": 0, + "multiSceneMemberCount": 0, + "scheduledEventCount": 4, "sceneControllerCount": 0, + "sceneControllerMemberCount": 0, "accessPointCount": 0, - "shadeCount": 5, - "ip": "192.168.0.20", - "groupCount": 9, - "scheduledEventCount": 0, + "localTimeDataSet": true, + "enableScheduledEvents": true, + "remoteConnectEnabled": true, "editingEnabled": true, - "roomCount": 5, "setupCompleted": false, - "sceneCount": 18, - "sceneControllerMemberCount": 0, - "mask": "255.255.255.0", - "hubName": "UG93ZXJ2aWV3IEdlbmVyYXRpb24gMQ==", - "rfID": "0x8B2A", - "remoteConnectEnabled": false, - "multiSceneMemberCount": 0, - "rfStatus": 0, - "serialNumber": "A1B2C3D4E5G6H7", - "undefinedShadeCount": 0, - "sceneMemberCount": 18, - "unassignedShadeCount": 0, - "multiSceneCount": 0, - "addressKind": "newPrimary", "gateway": "192.168.0.1", - "localTimeDataSet": true, + "mask": "255.255.255.0", + "ip": "192.168.0.20", "dns": "192.168.0.1", - "macAddress": "AA:BB:CC:DD:EE:FF", - "rfIDInt": 35626 + "staticIp": false, + "addressKind": "newPrimary", + "unassignedShadeCount": 0, + "undefinedShadeCount": 0 } } diff --git a/tests/components/hunterdouglas_powerview/test_config_flow.py b/tests/components/hunterdouglas_powerview/test_config_flow.py index b9721f4adb1f1e..42589bb10e05ad 100644 --- a/tests/components/hunterdouglas_powerview/test_config_flow.py +++ b/tests/components/hunterdouglas_powerview/test_config_flow.py @@ -10,8 +10,9 @@ from homeassistant.const import CONF_API_VERSION, CONF_HOST, CONF_NAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType +import homeassistant.helpers.entity_registry as er -from .const import DHCP_DATA, DISCOVERY_DATA, HOMEKIT_DATA +from .const import DHCP_DATA, DISCOVERY_DATA, HOMEKIT_DATA, MOCK_SERIAL from tests.common import MockConfigEntry, load_json_object_fixture @@ -40,7 +41,7 @@ async def test_user_form( assert result2["type"] is FlowResultType.CREATE_ENTRY assert result2["title"] == f"Powerview Generation {api_version}" assert result2["data"] == {CONF_HOST: "1.2.3.4", CONF_API_VERSION: api_version} - assert result2["result"].unique_id == "A1B2C3D4E5G6H7" + assert result2["result"].unique_id == MOCK_SERIAL assert len(mock_setup_entry.mock_calls) == 1 @@ -75,7 +76,7 @@ async def test_form_homekit_and_dhcp_cannot_connect( ignored_config_entry.add_to_hass(hass) with patch( - "homeassistant.components.hunterdouglas_powerview.Hub.query_firmware", + "homeassistant.components.hunterdouglas_powerview.util.Hub.query_firmware", side_effect=TimeoutError, ): result = await hass.config_entries.flow.async_init( @@ -100,7 +101,7 @@ async def test_form_homekit_and_dhcp_cannot_connect( assert result3["type"] is FlowResultType.CREATE_ENTRY assert result3["title"] == f"Powerview Generation {api_version}" assert result3["data"] == {CONF_HOST: "1.2.3.4", CONF_API_VERSION: api_version} - assert result3["result"].unique_id == "A1B2C3D4E5G6H7" + assert result3["result"].unique_id == MOCK_SERIAL assert len(mock_setup_entry.mock_calls) == 1 @@ -142,7 +143,7 @@ async def test_form_homekit_and_dhcp( assert result2["type"] is FlowResultType.CREATE_ENTRY assert result2["title"] == f"Powerview Generation {api_version}" assert result2["data"] == {CONF_HOST: "1.2.3.4", CONF_API_VERSION: api_version} - assert result2["result"].unique_id == "A1B2C3D4E5G6H7" + assert result2["result"].unique_id == MOCK_SERIAL assert len(mock_setup_entry.mock_calls) == 1 @@ -205,7 +206,7 @@ async def test_form_cannot_connect( # Simulate a timeout error with patch( - "homeassistant.components.hunterdouglas_powerview.Hub.query_firmware", + "homeassistant.components.hunterdouglas_powerview.util.Hub.query_firmware", side_effect=TimeoutError, ): result2 = await hass.config_entries.flow.async_configure( @@ -225,7 +226,7 @@ async def test_form_cannot_connect( assert result3["type"] is FlowResultType.CREATE_ENTRY assert result3["title"] == f"Powerview Generation {api_version}" assert result3["data"] == {CONF_HOST: "1.2.3.4", CONF_API_VERSION: api_version} - assert result3["result"].unique_id == "A1B2C3D4E5G6H7" + assert result3["result"].unique_id == MOCK_SERIAL assert len(mock_setup_entry.mock_calls) == 1 @@ -244,11 +245,11 @@ async def test_form_no_data( with ( patch( - "homeassistant.components.hunterdouglas_powerview.Hub.request_raw_data", + "homeassistant.components.hunterdouglas_powerview.util.Hub.request_raw_data", return_value={}, ), patch( - "homeassistant.components.hunterdouglas_powerview.Hub.request_home_data", + "homeassistant.components.hunterdouglas_powerview.util.Hub.request_home_data", return_value={}, ), ): @@ -269,7 +270,7 @@ async def test_form_no_data( assert result3["type"] is FlowResultType.CREATE_ENTRY assert result3["title"] == f"Powerview Generation {api_version}" assert result3["data"] == {CONF_HOST: "1.2.3.4", CONF_API_VERSION: api_version} - assert result3["result"].unique_id == "A1B2C3D4E5G6H7" + assert result3["result"].unique_id == MOCK_SERIAL assert len(mock_setup_entry.mock_calls) == 1 @@ -288,7 +289,7 @@ async def test_form_unknown_exception( # Simulate a transient error with patch( - "homeassistant.components.hunterdouglas_powerview.config_flow.Hub.query_firmware", + "homeassistant.components.hunterdouglas_powerview.util.Hub.query_firmware", side_effect=SyntaxError, ): result2 = await hass.config_entries.flow.async_configure( @@ -308,7 +309,7 @@ async def test_form_unknown_exception( assert result2["type"] is FlowResultType.CREATE_ENTRY assert result2["title"] == f"Powerview Generation {api_version}" assert result2["data"] == {CONF_HOST: "1.2.3.4", CONF_API_VERSION: api_version} - assert result2["result"].unique_id == "A1B2C3D4E5G6H7" + assert result2["result"].unique_id == MOCK_SERIAL assert len(mock_setup_entry.mock_calls) == 1 @@ -327,7 +328,7 @@ async def test_form_unsupported_device( # Simulate a gen 3 secondary hub with patch( - "homeassistant.components.hunterdouglas_powerview.Hub.request_raw_data", + "homeassistant.components.hunterdouglas_powerview.util.Hub.request_raw_data", return_value=load_json_object_fixture("gen3/gateway/secondary.json", DOMAIN), ): result2 = await hass.config_entries.flow.async_configure( @@ -347,6 +348,57 @@ async def test_form_unsupported_device( assert result3["type"] is FlowResultType.CREATE_ENTRY assert result3["title"] == f"Powerview Generation {api_version}" assert result3["data"] == {CONF_HOST: "1.2.3.4", CONF_API_VERSION: api_version} - assert result3["result"].unique_id == "A1B2C3D4E5G6H7" + assert result3["result"].unique_id == MOCK_SERIAL assert len(mock_setup_entry.mock_calls) == 1 + + +@pytest.mark.usefixtures("mock_hunterdouglas_hub") +@pytest.mark.parametrize("api_version", [1, 2, 3]) +async def test_migrate_entry( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + api_version: int, +) -> None: + """Test migrate to newest version.""" + entry = MockConfigEntry( + domain=DOMAIN, + data={"host": "1.2.3.4"}, + unique_id=MOCK_SERIAL, + version=1, + minor_version=1, + ) + + # Add entries with int unique_id + entity_registry.async_get_or_create( + domain="cover", + platform="hunterdouglas_powerview", + unique_id=123, + config_entry=entry, + ) + # Add entries with a str unique_id not starting with entry.unique_id + entity_registry.async_get_or_create( + domain="cover", + platform="hunterdouglas_powerview", + unique_id="old_unique_id", + config_entry=entry, + ) + + assert entry.version == 1 + assert entry.minor_version == 1 + + entry.add_to_hass(hass) + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + assert entry.version == 1 + assert entry.minor_version == 2 + + # Reload the registry entries + registry_entries = er.async_entries_for_config_entry( + entity_registry, entry.entry_id + ) + + # Ensure the IDs have been migrated + for reg_entry in registry_entries: + assert reg_entry.unique_id.startswith(f"{entry.unique_id}_") diff --git a/tests/components/husqvarna_automower/conftest.py b/tests/components/husqvarna_automower/conftest.py index dbb8f3b4c724b3..2814e1558d1374 100644 --- a/tests/components/husqvarna_automower/conftest.py +++ b/tests/components/husqvarna_automower/conftest.py @@ -4,6 +4,7 @@ import time from unittest.mock import AsyncMock, patch +from aioautomower.model import MowerAttributes from aioautomower.session import AutomowerSession, _MowerCommands from aioautomower.utils import mower_list_to_dictionary_dataclass from aiohttp import ClientWebSocketResponse @@ -16,6 +17,7 @@ from homeassistant.components.husqvarna_automower.const import DOMAIN from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component +from homeassistant.util import dt as dt_util from .const import CLIENT_ID, CLIENT_SECRET, USER_ID @@ -40,6 +42,21 @@ def mock_scope() -> str: return "iam:read amc:api" +@pytest.fixture(name="mower_time_zone") +async def mock_time_zone(hass: HomeAssistant) -> dict[str, MowerAttributes]: + """Fixture to set correct scope for the token.""" + return await dt_util.async_get_time_zone("Europe/Berlin") + + +@pytest.fixture(name="values") +def mock_values(mower_time_zone) -> dict[str, MowerAttributes]: + """Fixture to set correct scope for the token.""" + return mower_list_to_dictionary_dataclass( + load_json_value_fixture("mower.json", DOMAIN), + mower_time_zone, + ) + + @pytest.fixture def mock_config_entry(jwt: str, expires_at: int, scope: str) -> MockConfigEntry: """Return the default mocked config entry.""" @@ -81,17 +98,13 @@ async def setup_credentials(hass: HomeAssistant) -> None: @pytest.fixture -def mock_automower_client() -> Generator[AsyncMock]: +def mock_automower_client(values) -> Generator[AsyncMock]: """Mock a Husqvarna Automower client.""" - mower_dict = mower_list_to_dictionary_dataclass( - load_json_value_fixture("mower.json", DOMAIN) - ) - mock = AsyncMock(spec=AutomowerSession) mock.auth = AsyncMock(side_effect=ClientWebSocketResponse) mock.commands = AsyncMock(spec_set=_MowerCommands) - mock.get_status.return_value = mower_dict + mock.get_status.return_value = values with patch( "homeassistant.components.husqvarna_automower.AutomowerSession", diff --git a/tests/components/husqvarna_automower/snapshots/test_calendar.ambr b/tests/components/husqvarna_automower/snapshots/test_calendar.ambr index 1924b9ad42e393..7cd8c68b624d6c 100644 --- a/tests/components/husqvarna_automower/snapshots/test_calendar.ambr +++ b/tests/components/husqvarna_automower/snapshots/test_calendar.ambr @@ -68,6 +68,11 @@ 'start': '2023-06-10T01:00:00+02:00', 'summary': 'Back lawn schedule 2', }), + dict({ + 'end': '2023-06-12T09:00:00+02:00', + 'start': '2023-06-12T01:00:00+02:00', + 'summary': 'Back lawn schedule 2', + }), ]), }), 'calendar.test_mower_2': dict({ diff --git a/tests/components/husqvarna_automower/snapshots/test_diagnostics.ambr b/tests/components/husqvarna_automower/snapshots/test_diagnostics.ambr index ab9e81985c9c11..ee9b75107702e0 100644 --- a/tests/components/husqvarna_automower/snapshots/test_diagnostics.ambr +++ b/tests/components/husqvarna_automower/snapshots/test_diagnostics.ambr @@ -68,31 +68,33 @@ 'status_dateteime': '2023-06-05T00:00:00+00:00', }), 'mower': dict({ - 'activity': 'PARKED_IN_CS', + 'activity': 'parked_in_cs', 'error_code': 0, + 'error_datetime': None, 'error_datetime_naive': None, 'error_key': None, 'error_timestamp': 0, - 'inactive_reason': 'NONE', + 'inactive_reason': 'none', 'is_error_confirmable': False, - 'mode': 'MAIN_AREA', - 'state': 'RESTRICTED', + 'mode': 'main_area', + 'state': 'restricted', 'work_area_id': 123456, 'work_area_name': 'Front lawn', }), 'planner': dict({ 'next_start': 1685991600000, + 'next_start_datetime': '2023-06-05T19:00:00+02:00', 'next_start_datetime_naive': '2023-06-05T19:00:00', 'override': dict({ - 'action': 'NOT_ACTIVE', + 'action': 'not_active', }), - 'restricted_reason': 'WEEK_SCHEDULE', + 'restricted_reason': 'week_schedule', }), 'positions': '**REDACTED**', 'settings': dict({ 'cutting_height': 4, 'headlight': dict({ - 'mode': 'EVENING_ONLY', + 'mode': 'evening_only', }), }), 'statistics': dict({ @@ -138,6 +140,7 @@ '0': dict({ 'cutting_height': 50, 'enabled': False, + 'last_time_completed': '2024-08-12T05:07:49+02:00', 'last_time_completed_naive': '2024-08-12T05:07:49', 'name': 'my_lawn', 'progress': 20, @@ -145,6 +148,7 @@ '123456': dict({ 'cutting_height': 50, 'enabled': True, + 'last_time_completed': '2024-08-12T07:54:29+02:00', 'last_time_completed_naive': '2024-08-12T07:54:29', 'name': 'Front lawn', 'progress': 40, @@ -152,6 +156,7 @@ '654321': dict({ 'cutting_height': 25, 'enabled': True, + 'last_time_completed': None, 'last_time_completed_naive': None, 'name': 'Back lawn', 'progress': None, @@ -165,7 +170,7 @@ 'auth_implementation': 'husqvarna_automower', 'token': dict({ 'access_token': '**REDACTED**', - 'expires_at': 1685926800.0, + 'expires_at': 1685919600.0, 'expires_in': 86399, 'provider': 'husqvarna', 'refresh_token': '**REDACTED**', diff --git a/tests/components/husqvarna_automower/snapshots/test_init.ambr b/tests/components/husqvarna_automower/snapshots/test_init.ambr index adf70fb0aab8ce..e79bd1f8145d22 100644 --- a/tests/components/husqvarna_automower/snapshots/test_init.ambr +++ b/tests/components/husqvarna_automower/snapshots/test_init.ambr @@ -20,7 +20,7 @@ 'labels': set({ }), 'manufacturer': 'Husqvarna', - 'model': 'HUSQVARNA AUTOMOWER® 450XH', + 'model': 'AUTOMOWER® 450XH', 'model_id': None, 'name': 'Test Mower 1', 'name_by_user': None, diff --git a/tests/components/husqvarna_automower/snapshots/test_sensor.ambr b/tests/components/husqvarna_automower/snapshots/test_sensor.ambr index dfc1d41775fa17..d57a829a997007 100644 --- a/tests/components/husqvarna_automower/snapshots/test_sensor.ambr +++ b/tests/components/husqvarna_automower/snapshots/test_sensor.ambr @@ -552,11 +552,11 @@ 'area_id': None, 'capabilities': dict({ 'options': list([ - 'main_area', - 'demo', - 'secondary_area', - 'home', - 'unknown', + , + , + , + , + , ]), }), 'config_entry_id': , @@ -592,11 +592,11 @@ 'device_class': 'enum', 'friendly_name': 'Test Mower 1 Mode', 'options': list([ - 'main_area', - 'demo', - 'secondary_area', - 'home', - 'unknown', + , + , + , + , + , ]), }), 'context': , @@ -856,16 +856,16 @@ 'area_id': None, 'capabilities': dict({ 'options': list([ - 'all_work_areas_completed', - 'daily_limit', - 'external', - 'fota', - 'frost', - 'none', - 'not_applicable', - 'park_override', - 'sensor', - 'week_schedule', + , + , + , + , + , + , + , + , + , + , ]), }), 'config_entry_id': , @@ -901,16 +901,16 @@ 'device_class': 'enum', 'friendly_name': 'Test Mower 1 Restricted reason', 'options': list([ - 'all_work_areas_completed', - 'daily_limit', - 'external', - 'fota', - 'frost', - 'none', - 'not_applicable', - 'park_override', - 'sensor', - 'week_schedule', + , + , + , + , + , + , + , + , + , + , ]), }), 'context': , @@ -1658,11 +1658,11 @@ 'area_id': None, 'capabilities': dict({ 'options': list([ - 'main_area', - 'demo', - 'secondary_area', - 'home', - 'unknown', + , + , + , + , + , ]), }), 'config_entry_id': , @@ -1698,11 +1698,11 @@ 'device_class': 'enum', 'friendly_name': 'Test Mower 2 Mode', 'options': list([ - 'main_area', - 'demo', - 'secondary_area', - 'home', - 'unknown', + , + , + , + , + , ]), }), 'context': , @@ -1767,16 +1767,16 @@ 'area_id': None, 'capabilities': dict({ 'options': list([ - 'all_work_areas_completed', - 'daily_limit', - 'external', - 'fota', - 'frost', - 'none', - 'not_applicable', - 'park_override', - 'sensor', - 'week_schedule', + , + , + , + , + , + , + , + , + , + , ]), }), 'config_entry_id': , @@ -1812,16 +1812,16 @@ 'device_class': 'enum', 'friendly_name': 'Test Mower 2 Restricted reason', 'options': list([ - 'all_work_areas_completed', - 'daily_limit', - 'external', - 'fota', - 'frost', - 'none', - 'not_applicable', - 'park_override', - 'sensor', - 'week_schedule', + , + , + , + , + , + , + , + , + , + , ]), }), 'context': , diff --git a/tests/components/husqvarna_automower/test_binary_sensor.py b/tests/components/husqvarna_automower/test_binary_sensor.py index fceaeee2321044..858dc03b93f944 100644 --- a/tests/components/husqvarna_automower/test_binary_sensor.py +++ b/tests/components/husqvarna_automower/test_binary_sensor.py @@ -2,12 +2,10 @@ from unittest.mock import AsyncMock, patch -from aioautomower.model import MowerActivities -from aioautomower.utils import mower_list_to_dictionary_dataclass +from aioautomower.model import MowerActivities, MowerAttributes from freezegun.api import FrozenDateTimeFactory from syrupy import SnapshotAssertion -from homeassistant.components.husqvarna_automower.const import DOMAIN from homeassistant.components.husqvarna_automower.coordinator import SCAN_INTERVAL from homeassistant.const import Platform from homeassistant.core import HomeAssistant @@ -16,12 +14,7 @@ from . import setup_integration from .const import TEST_MOWER_ID -from tests.common import ( - MockConfigEntry, - async_fire_time_changed, - load_json_value_fixture, - snapshot_platform, -) +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform async def test_binary_sensor_states( @@ -29,11 +22,9 @@ async def test_binary_sensor_states( mock_automower_client: AsyncMock, mock_config_entry: MockConfigEntry, freezer: FrozenDateTimeFactory, + values: dict[str, MowerAttributes], ) -> None: """Test binary sensor states.""" - values = mower_list_to_dictionary_dataclass( - load_json_value_fixture("mower.json", DOMAIN) - ) await setup_integration(hass, mock_config_entry) state = hass.states.get("binary_sensor.test_mower_1_charging") assert state is not None diff --git a/tests/components/husqvarna_automower/test_button.py b/tests/components/husqvarna_automower/test_button.py index bf76fcbb5988b2..25fa64b531fce3 100644 --- a/tests/components/husqvarna_automower/test_button.py +++ b/tests/components/husqvarna_automower/test_button.py @@ -2,16 +2,14 @@ import datetime from unittest.mock import AsyncMock, patch -import zoneinfo from aioautomower.exceptions import ApiException -from aioautomower.utils import mower_list_to_dictionary_dataclass +from aioautomower.model import MowerAttributes from freezegun.api import FrozenDateTimeFactory import pytest from syrupy import SnapshotAssertion from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS -from homeassistant.components.husqvarna_automower.const import DOMAIN from homeassistant.components.husqvarna_automower.coordinator import SCAN_INTERVAL from homeassistant.const import ( ATTR_ENTITY_ID, @@ -26,12 +24,7 @@ from . import setup_integration from .const import TEST_MOWER_ID -from tests.common import ( - MockConfigEntry, - async_fire_time_changed, - load_json_value_fixture, - snapshot_platform, -) +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform @pytest.mark.freeze_time(datetime.datetime(2023, 6, 5, tzinfo=datetime.UTC)) @@ -40,6 +33,7 @@ async def test_button_states_and_commands( mock_automower_client: AsyncMock, mock_config_entry: MockConfigEntry, freezer: FrozenDateTimeFactory, + values: dict[str, MowerAttributes], ) -> None: """Test error confirm button command.""" entity_id = "button.test_mower_1_confirm_error" @@ -48,9 +42,6 @@ async def test_button_states_and_commands( assert state.name == "Test Mower 1 Confirm error" assert state.state == STATE_UNAVAILABLE - values = mower_list_to_dictionary_dataclass( - load_json_value_fixture("mower.json", DOMAIN) - ) values[TEST_MOWER_ID].mower.is_error_confirmable = None mock_automower_client.get_status.return_value = values freezer.tick(SCAN_INTERVAL) @@ -99,6 +90,7 @@ async def test_sync_clock( mock_automower_client: AsyncMock, mock_config_entry: MockConfigEntry, freezer: FrozenDateTimeFactory, + values: dict[str, MowerAttributes], ) -> None: """Test sync clock button command.""" entity_id = "button.test_mower_1_sync_clock" @@ -106,9 +98,6 @@ async def test_sync_clock( state = hass.states.get(entity_id) assert state.name == "Test Mower 1 Sync clock" - values = mower_list_to_dictionary_dataclass( - load_json_value_fixture("mower.json", DOMAIN) - ) mock_automower_client.get_status.return_value = values await hass.services.async_call( @@ -118,12 +107,7 @@ async def test_sync_clock( blocking=True, ) mocked_method = mock_automower_client.commands.set_datetime - # datetime(2024, 2, 29, 11, tzinfo=datetime.UTC) is in local time of the tests - # datetime(2024, 2, 29, 12, tzinfo=zoneinfo.ZoneInfo(key='Europe/Berlin')) - mocked_method.assert_called_once_with( - TEST_MOWER_ID, - datetime.datetime(2024, 2, 29, 12, tzinfo=zoneinfo.ZoneInfo("Europe/Berlin")), - ) + mocked_method.assert_called_once_with(TEST_MOWER_ID) await hass.async_block_till_done() state = hass.states.get(entity_id) assert state.state == "2024-02-29T11:00:00+00:00" diff --git a/tests/components/husqvarna_automower/test_calendar.py b/tests/components/husqvarna_automower/test_calendar.py index 0e914e272fb200..8138b8c139b6b9 100644 --- a/tests/components/husqvarna_automower/test_calendar.py +++ b/tests/components/husqvarna_automower/test_calendar.py @@ -6,6 +6,7 @@ from typing import Any from unittest.mock import AsyncMock import urllib +import zoneinfo from aioautomower.utils import mower_list_to_dictionary_dataclass from freezegun.api import FrozenDateTimeFactory @@ -93,12 +94,16 @@ async def test_empty_calendar( mock_config_entry: MockConfigEntry, freezer: FrozenDateTimeFactory, get_events: GetEventsFn, + mower_time_zone: zoneinfo.ZoneInfo, ) -> None: """State if there is no schedule set.""" await setup_integration(hass, mock_config_entry) json_values = load_json_value_fixture("mower.json", DOMAIN) json_values["data"][0]["attributes"]["calendar"]["tasks"] = [] - values = mower_list_to_dictionary_dataclass(json_values) + values = mower_list_to_dictionary_dataclass( + json_values, + mower_time_zone, + ) mock_automower_client.get_status.return_value = values freezer.tick(SCAN_INTERVAL) async_fire_time_changed(hass) diff --git a/tests/components/husqvarna_automower/test_diagnostics.py b/tests/components/husqvarna_automower/test_diagnostics.py index f8dc89af6f0d15..2b47bff25a43eb 100644 --- a/tests/components/husqvarna_automower/test_diagnostics.py +++ b/tests/components/husqvarna_automower/test_diagnostics.py @@ -2,6 +2,7 @@ import datetime from unittest.mock import AsyncMock +import zoneinfo import pytest from syrupy.assertion import SnapshotAssertion @@ -21,7 +22,9 @@ from tests.typing import ClientSessionGenerator -@pytest.mark.freeze_time(datetime.datetime(2023, 6, 5, tzinfo=datetime.UTC)) +@pytest.mark.freeze_time( + datetime.datetime(2023, 6, 5, tzinfo=zoneinfo.ZoneInfo("Europe/Berlin")) +) async def test_entry_diagnostics( hass: HomeAssistant, hass_client: ClientSessionGenerator, @@ -40,7 +43,9 @@ async def test_entry_diagnostics( assert result == snapshot(exclude=props("created_at", "modified_at")) -@pytest.mark.freeze_time(datetime.datetime(2023, 6, 5, tzinfo=datetime.UTC)) +@pytest.mark.freeze_time( + datetime.datetime(2023, 6, 5, tzinfo=zoneinfo.ZoneInfo("Europe/Berlin")) +) async def test_device_diagnostics( hass: HomeAssistant, hass_client: ClientSessionGenerator, diff --git a/tests/components/husqvarna_automower/test_init.py b/tests/components/husqvarna_automower/test_init.py index b7cc6f883f4075..ca0c2a04af1a70 100644 --- a/tests/components/husqvarna_automower/test_init.py +++ b/tests/components/husqvarna_automower/test_init.py @@ -1,6 +1,6 @@ """Tests for init module.""" -from datetime import timedelta +from datetime import datetime, timedelta import http import time from unittest.mock import AsyncMock @@ -10,26 +10,28 @@ AuthException, HusqvarnaWSServerHandshakeError, ) -from aioautomower.utils import mower_list_to_dictionary_dataclass +from aioautomower.model import MowerAttributes, WorkArea from freezegun.api import FrozenDateTimeFactory import pytest from syrupy.assertion import SnapshotAssertion from homeassistant.components.husqvarna_automower.const import DOMAIN, OAUTH2_TOKEN +from homeassistant.components.husqvarna_automower.coordinator import SCAN_INTERVAL from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er +from homeassistant.util import dt as dt_util from . import setup_integration from .const import TEST_MOWER_ID -from tests.common import ( - MockConfigEntry, - async_fire_time_changed, - load_json_value_fixture, -) +from tests.common import MockConfigEntry, async_fire_time_changed from tests.test_util.aiohttp import AiohttpClientMocker +ADDITIONAL_NUMBER_ENTITIES = 1 +ADDITIONAL_SENSOR_ENTITIES = 2 +ADDITIONAL_SWITCH_ENTITIES = 1 + async def test_load_unload_entry( hass: HomeAssistant, @@ -167,37 +169,13 @@ async def test_device_info( assert reg_device == snapshot -async def test_workarea_deleted( - hass: HomeAssistant, - mock_automower_client: AsyncMock, - mock_config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, -) -> None: - """Test if work area is deleted after removed.""" - - values = mower_list_to_dictionary_dataclass( - load_json_value_fixture("mower.json", DOMAIN) - ) - await setup_integration(hass, mock_config_entry) - current_entries = len( - er.async_entries_for_config_entry(entity_registry, mock_config_entry.entry_id) - ) - - del values[TEST_MOWER_ID].work_areas[123456] - mock_automower_client.get_status.return_value = values - await hass.config_entries.async_reload(mock_config_entry.entry_id) - await hass.async_block_till_done() - assert len( - er.async_entries_for_config_entry(entity_registry, mock_config_entry.entry_id) - ) == (current_entries - 2) - - async def test_coordinator_automatic_registry_cleanup( hass: HomeAssistant, mock_automower_client: AsyncMock, mock_config_entry: MockConfigEntry, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, + values: dict[str, MowerAttributes], ) -> None: """Test automatic registry cleanup.""" await setup_integration(hass, mock_config_entry) @@ -211,9 +189,6 @@ async def test_coordinator_automatic_registry_cleanup( dr.async_entries_for_config_entry(device_registry, entry.entry_id) ) - values = mower_list_to_dictionary_dataclass( - load_json_value_fixture("mower.json", DOMAIN) - ) values.pop(TEST_MOWER_ID) mock_automower_client.get_status.return_value = values await hass.config_entries.async_reload(mock_config_entry.entry_id) @@ -227,3 +202,71 @@ async def test_coordinator_automatic_registry_cleanup( len(dr.async_entries_for_config_entry(device_registry, entry.entry_id)) == current_devices - 1 ) + + +async def test_add_and_remove_work_area( + hass: HomeAssistant, + mock_automower_client: AsyncMock, + mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, + entity_registry: er.EntityRegistry, + values: dict[str, MowerAttributes], +) -> None: + """Test adding a work area in runtime.""" + await setup_integration(hass, mock_config_entry) + entry = hass.config_entries.async_entries(DOMAIN)[0] + current_entites_start = len( + er.async_entries_for_config_entry(entity_registry, entry.entry_id) + ) + values[TEST_MOWER_ID].work_area_names.append("new work area") + values[TEST_MOWER_ID].work_area_dict.update({1: "new work area"}) + values[TEST_MOWER_ID].work_areas.update( + { + 1: WorkArea( + name="new work area", + cutting_height=12, + enabled=True, + progress=12, + last_time_completed=datetime( + 2024, 10, 1, 11, 11, 0, tzinfo=dt_util.get_default_time_zone() + ), + ) + } + ) + mock_automower_client.get_status.return_value = values + freezer.tick(SCAN_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + current_entites_after_addition = len( + er.async_entries_for_config_entry(entity_registry, entry.entry_id) + ) + assert ( + current_entites_after_addition + == current_entites_start + + ADDITIONAL_NUMBER_ENTITIES + + ADDITIONAL_SENSOR_ENTITIES + + ADDITIONAL_SWITCH_ENTITIES + ) + + values[TEST_MOWER_ID].work_area_names.remove("new work area") + del values[TEST_MOWER_ID].work_area_dict[1] + del values[TEST_MOWER_ID].work_areas[1] + values[TEST_MOWER_ID].work_area_names.remove("Front lawn") + del values[TEST_MOWER_ID].work_area_dict[123456] + del values[TEST_MOWER_ID].work_areas[123456] + del values[TEST_MOWER_ID].calendar.tasks[:2] + values[TEST_MOWER_ID].mower.work_area_id = 654321 + mock_automower_client.get_status.return_value = values + freezer.tick(SCAN_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + current_entites_after_deletion = len( + er.async_entries_for_config_entry(entity_registry, entry.entry_id) + ) + assert ( + current_entites_after_deletion + == current_entites_start + - ADDITIONAL_SWITCH_ENTITIES + - ADDITIONAL_NUMBER_ENTITIES + - ADDITIONAL_SENSOR_ENTITIES + ) diff --git a/tests/components/husqvarna_automower/test_lawn_mower.py b/tests/components/husqvarna_automower/test_lawn_mower.py index 552a3a6a9cf176..3aca509e865048 100644 --- a/tests/components/husqvarna_automower/test_lawn_mower.py +++ b/tests/components/husqvarna_automower/test_lawn_mower.py @@ -4,7 +4,7 @@ from unittest.mock import AsyncMock from aioautomower.exceptions import ApiException -from aioautomower.utils import mower_list_to_dictionary_dataclass +from aioautomower.model import MowerActivities, MowerAttributes, MowerStates from freezegun.api import FrozenDateTimeFactory import pytest from voluptuous.error import MultipleInvalid @@ -18,11 +18,7 @@ from . import setup_integration from .const import TEST_MOWER_ID -from tests.common import ( - MockConfigEntry, - async_fire_time_changed, - load_json_value_fixture, -) +from tests.common import MockConfigEntry, async_fire_time_changed async def test_lawn_mower_states( @@ -30,21 +26,23 @@ async def test_lawn_mower_states( mock_automower_client: AsyncMock, mock_config_entry: MockConfigEntry, freezer: FrozenDateTimeFactory, + values: dict[str, MowerAttributes], ) -> None: """Test lawn_mower state.""" - values = mower_list_to_dictionary_dataclass( - load_json_value_fixture("mower.json", DOMAIN) - ) await setup_integration(hass, mock_config_entry) state = hass.states.get("lawn_mower.test_mower_1") assert state is not None assert state.state == LawnMowerActivity.DOCKED for activity, state, expected_state in ( - ("UNKNOWN", "PAUSED", LawnMowerActivity.PAUSED), - ("MOWING", "NOT_APPLICABLE", LawnMowerActivity.MOWING), - ("NOT_APPLICABLE", "ERROR", LawnMowerActivity.ERROR), - ("GOING_HOME", "IN_OPERATION", LawnMowerActivity.RETURNING), + (MowerActivities.UNKNOWN, MowerStates.PAUSED, LawnMowerActivity.PAUSED), + (MowerActivities.MOWING, MowerStates.NOT_APPLICABLE, LawnMowerActivity.MOWING), + (MowerActivities.NOT_APPLICABLE, MowerStates.ERROR, LawnMowerActivity.ERROR), + ( + MowerActivities.GOING_HOME, + MowerStates.IN_OPERATION, + LawnMowerActivity.RETURNING, + ), ): values[TEST_MOWER_ID].mower.activity = activity values[TEST_MOWER_ID].mower.state = state @@ -253,12 +251,10 @@ async def test_lawn_mower_wrong_service_commands( mock_automower_client: AsyncMock, mock_config_entry: MockConfigEntry, freezer: FrozenDateTimeFactory, + values: dict[str, MowerAttributes], ) -> None: """Test lawn_mower commands.""" await setup_integration(hass, mock_config_entry) - values = mower_list_to_dictionary_dataclass( - load_json_value_fixture("mower.json", DOMAIN) - ) values[TEST_MOWER_ID].capabilities.work_areas = mower_support_wa mock_automower_client.get_status.return_value = values freezer.tick(SCAN_INTERVAL) diff --git a/tests/components/husqvarna_automower/test_number.py b/tests/components/husqvarna_automower/test_number.py index b7ff84e14e6dac..e1f232e7b5ce53 100644 --- a/tests/components/husqvarna_automower/test_number.py +++ b/tests/components/husqvarna_automower/test_number.py @@ -4,15 +4,12 @@ from unittest.mock import AsyncMock, patch from aioautomower.exceptions import ApiException -from aioautomower.utils import mower_list_to_dictionary_dataclass +from aioautomower.model import MowerAttributes from freezegun.api import FrozenDateTimeFactory import pytest from syrupy import SnapshotAssertion -from homeassistant.components.husqvarna_automower.const import ( - DOMAIN, - EXECUTION_TIME_DELAY, -) +from homeassistant.components.husqvarna_automower.const import EXECUTION_TIME_DELAY from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError @@ -21,12 +18,7 @@ from . import setup_integration from .const import TEST_MOWER_ID -from tests.common import ( - MockConfigEntry, - async_fire_time_changed, - load_json_value_fixture, - snapshot_platform, -) +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform @pytest.mark.usefixtures("entity_registry_enabled_by_default") @@ -68,13 +60,11 @@ async def test_number_workarea_commands( mock_automower_client: AsyncMock, mock_config_entry: MockConfigEntry, freezer: FrozenDateTimeFactory, + values: dict[str, MowerAttributes], ) -> None: """Test number commands.""" entity_id = "number.test_mower_1_front_lawn_cutting_height" await setup_integration(hass, mock_config_entry) - values = mower_list_to_dictionary_dataclass( - load_json_value_fixture("mower.json", DOMAIN) - ) values[TEST_MOWER_ID].work_areas[123456].cutting_height = 75 mock_automower_client.get_status.return_value = values mocked_method = AsyncMock() diff --git a/tests/components/husqvarna_automower/test_select.py b/tests/components/husqvarna_automower/test_select.py index e885a4d3487927..18d1b0ed21ff61 100644 --- a/tests/components/husqvarna_automower/test_select.py +++ b/tests/components/husqvarna_automower/test_select.py @@ -3,12 +3,10 @@ from unittest.mock import AsyncMock from aioautomower.exceptions import ApiException -from aioautomower.model import HeadlightModes -from aioautomower.utils import mower_list_to_dictionary_dataclass +from aioautomower.model import HeadlightModes, MowerAttributes from freezegun.api import FrozenDateTimeFactory import pytest -from homeassistant.components.husqvarna_automower.const import DOMAIN from homeassistant.components.husqvarna_automower.coordinator import SCAN_INTERVAL from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError @@ -16,11 +14,7 @@ from . import setup_integration from .const import TEST_MOWER_ID -from tests.common import ( - MockConfigEntry, - async_fire_time_changed, - load_json_value_fixture, -) +from tests.common import MockConfigEntry, async_fire_time_changed async def test_select_states( @@ -28,11 +22,9 @@ async def test_select_states( mock_automower_client: AsyncMock, mock_config_entry: MockConfigEntry, freezer: FrozenDateTimeFactory, + values: dict[str, MowerAttributes], ) -> None: """Test states of headlight mode select.""" - values = mower_list_to_dictionary_dataclass( - load_json_value_fixture("mower.json", DOMAIN) - ) await setup_integration(hass, mock_config_entry) state = hass.states.get("select.test_mower_1_headlight_mode") assert state is not None diff --git a/tests/components/husqvarna_automower/test_sensor.py b/tests/components/husqvarna_automower/test_sensor.py index 39bff398da6730..06fcc30e40c2bd 100644 --- a/tests/components/husqvarna_automower/test_sensor.py +++ b/tests/components/husqvarna_automower/test_sensor.py @@ -1,14 +1,14 @@ """Tests for sensor platform.""" +import datetime from unittest.mock import AsyncMock, patch +import zoneinfo -from aioautomower.model import MowerModes, MowerStates -from aioautomower.utils import mower_list_to_dictionary_dataclass +from aioautomower.model import MowerAttributes, MowerModes, MowerStates from freezegun.api import FrozenDateTimeFactory import pytest from syrupy import SnapshotAssertion -from homeassistant.components.husqvarna_automower.const import DOMAIN from homeassistant.components.husqvarna_automower.coordinator import SCAN_INTERVAL from homeassistant.const import STATE_UNKNOWN, Platform from homeassistant.core import HomeAssistant @@ -17,12 +17,7 @@ from . import setup_integration from .const import TEST_MOWER_ID -from tests.common import ( - MockConfigEntry, - async_fire_time_changed, - load_json_value_fixture, - snapshot_platform, -) +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform async def test_sensor_unknown_states( @@ -30,11 +25,9 @@ async def test_sensor_unknown_states( mock_automower_client: AsyncMock, mock_config_entry: MockConfigEntry, freezer: FrozenDateTimeFactory, + values: dict[str, MowerAttributes], ) -> None: """Test a sensor which returns unknown.""" - values = mower_list_to_dictionary_dataclass( - load_json_value_fixture("mower.json", DOMAIN) - ) await setup_integration(hass, mock_config_entry) state = hass.states.get("sensor.test_mower_1_mode") assert state is not None @@ -63,11 +56,15 @@ async def test_cutting_blade_usage_time_sensor( assert state.state == "0.034" +@pytest.mark.freeze_time( + datetime.datetime(2023, 6, 5, tzinfo=zoneinfo.ZoneInfo("Europe/Berlin")) +) async def test_next_start_sensor( hass: HomeAssistant, mock_automower_client: AsyncMock, mock_config_entry: MockConfigEntry, freezer: FrozenDateTimeFactory, + values: dict[str, MowerAttributes], ) -> None: """Test if this sensor is only added, if data is available.""" await setup_integration(hass, mock_config_entry) @@ -75,10 +72,7 @@ async def test_next_start_sensor( assert state is not None assert state.state == "2023-06-05T17:00:00+00:00" - values = mower_list_to_dictionary_dataclass( - load_json_value_fixture("mower.json", DOMAIN) - ) - values[TEST_MOWER_ID].planner.next_start_datetime_naive = None + values[TEST_MOWER_ID].planner.next_start_datetime = None mock_automower_client.get_status.return_value = values freezer.tick(SCAN_INTERVAL) async_fire_time_changed(hass) @@ -92,6 +86,7 @@ async def test_work_area_sensor( mock_automower_client: AsyncMock, mock_config_entry: MockConfigEntry, freezer: FrozenDateTimeFactory, + values: dict[str, MowerAttributes], ) -> None: """Test the work area sensor.""" await setup_integration(hass, mock_config_entry) @@ -99,9 +94,6 @@ async def test_work_area_sensor( assert state is not None assert state.state == "Front lawn" - values = mower_list_to_dictionary_dataclass( - load_json_value_fixture("mower.json", DOMAIN) - ) values[TEST_MOWER_ID].mower.work_area_id = None mock_automower_client.get_status.return_value = values freezer.tick(SCAN_INTERVAL) @@ -137,13 +129,10 @@ async def test_statistics_not_available( mock_automower_client: AsyncMock, mock_config_entry: MockConfigEntry, sensor_to_test: str, + values: dict[str, MowerAttributes], ) -> None: """Test if this sensor is only added, if data is available.""" - values = mower_list_to_dictionary_dataclass( - load_json_value_fixture("mower.json", DOMAIN) - ) - delattr(values[TEST_MOWER_ID].statistics, sensor_to_test) mock_automower_client.get_status.return_value = values await setup_integration(hass, mock_config_entry) @@ -156,11 +145,9 @@ async def test_error_sensor( mock_automower_client: AsyncMock, mock_config_entry: MockConfigEntry, freezer: FrozenDateTimeFactory, + values: dict[str, MowerAttributes], ) -> None: """Test error sensor.""" - values = mower_list_to_dictionary_dataclass( - load_json_value_fixture("mower.json", DOMAIN) - ) await setup_integration(hass, mock_config_entry) for state, error_key, expected_state in ( diff --git a/tests/components/husqvarna_automower/test_switch.py b/tests/components/husqvarna_automower/test_switch.py index 8c62ff89154230..100fd9fe3a4e7e 100644 --- a/tests/components/husqvarna_automower/test_switch.py +++ b/tests/components/husqvarna_automower/test_switch.py @@ -2,9 +2,10 @@ from datetime import timedelta from unittest.mock import AsyncMock, patch +import zoneinfo from aioautomower.exceptions import ApiException -from aioautomower.model import MowerModes +from aioautomower.model import MowerAttributes, MowerModes, Zone from aioautomower.utils import mower_list_to_dictionary_dataclass from freezegun.api import FrozenDateTimeFactory import pytest @@ -37,8 +38,9 @@ snapshot_platform, ) -TEST_ZONE_ID = "AAAAAAAA-BBBB-CCCC-DDDD-123456789101" TEST_AREA_ID = 0 +TEST_VARIABLE_ZONE_ID = "203F6359-AB56-4D57-A6DC-703095BB695D" +TEST_ZONE_ID = "AAAAAAAA-BBBB-CCCC-DDDD-123456789101" async def test_switch_states( @@ -46,11 +48,9 @@ async def test_switch_states( mock_automower_client: AsyncMock, mock_config_entry: MockConfigEntry, freezer: FrozenDateTimeFactory, + values: dict[str, MowerAttributes], ) -> None: """Test switch state.""" - values = mower_list_to_dictionary_dataclass( - load_json_value_fixture("mower.json", DOMAIN) - ) await setup_integration(hass, mock_config_entry) for mode, expected_state in ( @@ -122,12 +122,14 @@ async def test_stay_out_zone_switch_commands( mock_automower_client: AsyncMock, mock_config_entry: MockConfigEntry, freezer: FrozenDateTimeFactory, + mower_time_zone: zoneinfo.ZoneInfo, ) -> None: """Test switch commands.""" entity_id = "switch.test_mower_1_avoid_danger_zone" await setup_integration(hass, mock_config_entry) values = mower_list_to_dictionary_dataclass( - load_json_value_fixture("mower.json", DOMAIN) + load_json_value_fixture("mower.json", DOMAIN), + mower_time_zone, ) values[TEST_MOWER_ID].stay_out_zones.zones[TEST_ZONE_ID].enabled = boolean mock_automower_client.get_status.return_value = values @@ -177,12 +179,15 @@ async def test_work_area_switch_commands( mock_automower_client: AsyncMock, mock_config_entry: MockConfigEntry, freezer: FrozenDateTimeFactory, + mower_time_zone: zoneinfo.ZoneInfo, + values: dict[str, MowerAttributes], ) -> None: """Test switch commands.""" entity_id = "switch.test_mower_1_my_lawn" await setup_integration(hass, mock_config_entry) values = mower_list_to_dictionary_dataclass( - load_json_value_fixture("mower.json", DOMAIN) + load_json_value_fixture("mower.json", DOMAIN), + mower_time_zone, ) values[TEST_MOWER_ID].work_areas[TEST_AREA_ID].enabled = boolean mock_automower_client.get_status.return_value = values @@ -216,29 +221,46 @@ async def test_work_area_switch_commands( assert len(mocked_method.mock_calls) == 2 -async def test_zones_deleted( +async def test_add_stay_out_zone( hass: HomeAssistant, mock_automower_client: AsyncMock, mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, entity_registry: er.EntityRegistry, + values: dict[str, MowerAttributes], ) -> None: - """Test if stay-out-zone is deleted after removed.""" - - values = mower_list_to_dictionary_dataclass( - load_json_value_fixture("mower.json", DOMAIN) - ) + """Test adding a stay out zone in runtime.""" await setup_integration(hass, mock_config_entry) - current_entries = len( - er.async_entries_for_config_entry(entity_registry, mock_config_entry.entry_id) + entry = hass.config_entries.async_entries(DOMAIN)[0] + current_entites = len( + er.async_entries_for_config_entry(entity_registry, entry.entry_id) ) - - del values[TEST_MOWER_ID].stay_out_zones.zones[TEST_ZONE_ID] + values[TEST_MOWER_ID].stay_out_zones.zones.update( + { + TEST_VARIABLE_ZONE_ID: Zone( + name="future_zone", + enabled=True, + ) + } + ) + mock_automower_client.get_status.return_value = values + freezer.tick(SCAN_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + current_entites_after_addition = len( + er.async_entries_for_config_entry(entity_registry, entry.entry_id) + ) + assert current_entites_after_addition == current_entites + 1 + values[TEST_MOWER_ID].stay_out_zones.zones.pop(TEST_VARIABLE_ZONE_ID) + values[TEST_MOWER_ID].stay_out_zones.zones.pop(TEST_ZONE_ID) mock_automower_client.get_status.return_value = values - await hass.config_entries.async_reload(mock_config_entry.entry_id) + freezer.tick(SCAN_INTERVAL) + async_fire_time_changed(hass) await hass.async_block_till_done() - assert len( - er.async_entries_for_config_entry(entity_registry, mock_config_entry.entry_id) - ) == (current_entries - 1) + current_entites_after_deletion = len( + er.async_entries_for_config_entry(entity_registry, entry.entry_id) + ) + assert current_entites_after_deletion == current_entites - 1 async def test_switch_snapshot( diff --git a/tests/components/husqvarna_automower_ble/__init__.py b/tests/components/husqvarna_automower_ble/__init__.py new file mode 100644 index 00000000000000..7ca5aea121d98f --- /dev/null +++ b/tests/components/husqvarna_automower_ble/__init__.py @@ -0,0 +1,74 @@ +"""Tests for the Husqvarna Automower Bluetooth integration.""" + +from unittest.mock import patch + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers.service_info.bluetooth import BluetoothServiceInfo + +from tests.common import MockConfigEntry +from tests.components.bluetooth import inject_bluetooth_service_info + +AUTOMOWER_SERVICE_INFO = BluetoothServiceInfo( + name="305", + address="00000000-0000-0000-0000-000000000003", + rssi=-63, + service_data={}, + manufacturer_data={1062: b"\x05\x04\xbf\xcf\xbb\r"}, + service_uuids=[ + "98bd0001-0b0e-421a-84e5-ddbf75dc6de4", + "00001800-0000-1000-8000-00805f9b34fb", + ], + source="local", +) + +AUTOMOWER_UNNAMED_SERVICE_INFO = BluetoothServiceInfo( + name=None, + address="00000000-0000-0000-0000-000000000004", + rssi=-63, + service_data={}, + manufacturer_data={1062: b"\x05\x04\xbf\xcf\xbb\r"}, + service_uuids=[ + "98bd0001-0b0e-421a-84e5-ddbf75dc6de4", + "00001800-0000-1000-8000-00805f9b34fb", + ], + source="local", +) + +AUTOMOWER_MISSING_MANUFACTURER_DATA_SERVICE_INFO = BluetoothServiceInfo( + name="Missing Manufacturer Data", + address="00000000-0000-0000-0002-000000000001", + rssi=-63, + service_data={}, + manufacturer_data={}, + service_uuids=[ + "98bd0001-0b0e-421a-84e5-ddbf75dc6de4", + "00001800-0000-1000-8000-00805f9b34fb", + ], + source="local", +) + +AUTOMOWER_UNSUPPORTED_GROUP_SERVICE_INFO = BluetoothServiceInfo( + name="Unsupported Group", + address="00000000-0000-0000-0002-000000000002", + rssi=-63, + service_data={}, + manufacturer_data={1062: b"\x05\x04\xbf\xcf\xbb\r"}, + service_uuids=[ + "98bd0001-0b0e-421a-84e5-ddbf75dc6de4", + ], + source="local", +) + + +async def setup_entry( + hass: HomeAssistant, mock_entry: MockConfigEntry, platforms: list[Platform] +) -> None: + """Make sure the device is available.""" + + inject_bluetooth_service_info(hass, AUTOMOWER_SERVICE_INFO) + + with patch("homeassistant.components.husqvarna_automower_ble.PLATFORMS", platforms): + mock_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_entry.entry_id) + await hass.async_block_till_done() diff --git a/tests/components/husqvarna_automower_ble/conftest.py b/tests/components/husqvarna_automower_ble/conftest.py new file mode 100644 index 00000000000000..3a8e881aba0b27 --- /dev/null +++ b/tests/components/husqvarna_automower_ble/conftest.py @@ -0,0 +1,62 @@ +"""Common fixtures for the Husqvarna Automower Bluetooth tests.""" + +from collections.abc import Generator +from unittest.mock import AsyncMock, patch + +import pytest + +from homeassistant.components.husqvarna_automower_ble.const import DOMAIN +from homeassistant.const import CONF_ADDRESS, CONF_CLIENT_ID + +from . import AUTOMOWER_SERVICE_INFO + +from tests.common import MockConfigEntry + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Override async_setup_entry.""" + with patch( + "homeassistant.components.husqvarna_automower_ble.async_setup_entry", + return_value=True, + ) as mock_setup_entry: + yield mock_setup_entry + + +@pytest.fixture(autouse=True) +def mock_automower_client(enable_bluetooth: None) -> Generator[AsyncMock]: + """Mock a BleakClient client.""" + with ( + patch( + "homeassistant.components.husqvarna_automower_ble.Mower", + autospec=True, + ) as mock_client, + patch( + "homeassistant.components.husqvarna_automower_ble.config_flow.Mower", + new=mock_client, + ), + ): + client = mock_client.return_value + client.connect.return_value = True + client.is_connected.return_value = True + client.get_model.return_value = "305" + client.battery_level.return_value = 100 + client.mower_state.return_value = "pendingStart" + client.mower_activity.return_value = "charging" + client.probe_gatts.return_value = ("Husqvarna", "Automower", "305") + + yield client + + +@pytest.fixture +def mock_config_entry() -> MockConfigEntry: + """Mock a config entry.""" + return MockConfigEntry( + domain=DOMAIN, + title="Husqvarna AutoMower", + data={ + CONF_ADDRESS: AUTOMOWER_SERVICE_INFO.address, + CONF_CLIENT_ID: 1197489078, + }, + unique_id=AUTOMOWER_SERVICE_INFO.address, + ) diff --git a/tests/components/husqvarna_automower_ble/snapshots/test_init.ambr b/tests/components/husqvarna_automower_ble/snapshots/test_init.ambr new file mode 100644 index 00000000000000..1cc5402019501f --- /dev/null +++ b/tests/components/husqvarna_automower_ble/snapshots/test_init.ambr @@ -0,0 +1,33 @@ +# serializer version: 1 +# name: test_setup + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': None, + 'connections': set({ + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'husqvarna_automower_ble', + '00000000-0000-0000-0000-000000000003_1197489078', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'Husqvarna', + 'model': None, + 'model_id': '305', + 'name': 'Husqvarna AutoMower', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': None, + 'suggested_area': None, + 'sw_version': None, + 'via_device_id': None, + }) +# --- diff --git a/tests/components/husqvarna_automower_ble/test_config_flow.py b/tests/components/husqvarna_automower_ble/test_config_flow.py new file mode 100644 index 00000000000000..e053a28b7dd473 --- /dev/null +++ b/tests/components/husqvarna_automower_ble/test_config_flow.py @@ -0,0 +1,198 @@ +"""Test the Husqvarna Bluetooth config flow.""" + +from unittest.mock import Mock, patch + +from bleak import BleakError +import pytest + +from homeassistant.components.husqvarna_automower_ble.const import DOMAIN +from homeassistant.config_entries import SOURCE_BLUETOOTH, SOURCE_USER +from homeassistant.const import CONF_ADDRESS, CONF_CLIENT_ID +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from . import ( + AUTOMOWER_SERVICE_INFO, + AUTOMOWER_UNNAMED_SERVICE_INFO, + AUTOMOWER_UNSUPPORTED_GROUP_SERVICE_INFO, +) + +from tests.common import MockConfigEntry +from tests.components.bluetooth import inject_bluetooth_service_info + +pytestmark = pytest.mark.usefixtures("mock_setup_entry") + + +@pytest.fixture(autouse=True) +def mock_random() -> Mock: + """Mock random to generate predictable client id.""" + with patch( + "homeassistant.components.husqvarna_automower_ble.config_flow.random" + ) as mock_random: + mock_random.randint.return_value = 1197489078 + yield mock_random + + +async def test_user_selection(hass: HomeAssistant) -> None: + """Test we can select a device.""" + + inject_bluetooth_service_info(hass, AUTOMOWER_SERVICE_INFO) + inject_bluetooth_service_info(hass, AUTOMOWER_UNNAMED_SERVICE_INFO) + await hass.async_block_till_done(wait_background_tasks=True) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_ADDRESS: "00000000-0000-0000-0000-000000000001"}, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "confirm" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={}, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "Husqvarna Automower" + assert result["result"].unique_id == "00000000-0000-0000-0000-000000000001" + + assert result["data"] == { + CONF_ADDRESS: "00000000-0000-0000-0000-000000000001", + CONF_CLIENT_ID: 1197489078, + } + + +async def test_bluetooth(hass: HomeAssistant) -> None: + """Test bluetooth device discovery.""" + + inject_bluetooth_service_info(hass, AUTOMOWER_SERVICE_INFO) + await hass.async_block_till_done(wait_background_tasks=True) + + result = hass.config_entries.flow.async_progress_by_handler(DOMAIN)[0] + assert result["step_id"] == "confirm" + assert result["context"]["unique_id"] == "00000000-0000-0000-0000-000000000003" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={}, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "Husqvarna Automower" + assert result["result"].unique_id == "00000000-0000-0000-0000-000000000003" + + assert result["data"] == { + CONF_ADDRESS: "00000000-0000-0000-0000-000000000003", + CONF_CLIENT_ID: 1197489078, + } + + +async def test_bluetooth_invalid(hass: HomeAssistant) -> None: + """Test bluetooth device discovery with invalid data.""" + + inject_bluetooth_service_info(hass, AUTOMOWER_UNSUPPORTED_GROUP_SERVICE_INFO) + await hass.async_block_till_done(wait_background_tasks=True) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_BLUETOOTH}, + data=AUTOMOWER_UNSUPPORTED_GROUP_SERVICE_INFO, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "no_devices_found" + + +async def test_failed_connect( + hass: HomeAssistant, + mock_automower_client: Mock, +) -> None: + """Test we can select a device.""" + + inject_bluetooth_service_info(hass, AUTOMOWER_SERVICE_INFO) + inject_bluetooth_service_info(hass, AUTOMOWER_UNNAMED_SERVICE_INFO) + await hass.async_block_till_done(wait_background_tasks=True) + + mock_automower_client.connect.side_effect = False + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_ADDRESS: "00000000-0000-0000-0000-000000000001"}, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "confirm" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={}, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "Husqvarna Automower" + assert result["result"].unique_id == "00000000-0000-0000-0000-000000000001" + + assert result["data"] == { + CONF_ADDRESS: "00000000-0000-0000-0000-000000000001", + CONF_CLIENT_ID: 1197489078, + } + + +async def test_duplicate_entry( + hass: HomeAssistant, + mock_automower_client: Mock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test we can select a device.""" + + mock_config_entry.add_to_hass(hass) + + inject_bluetooth_service_info(hass, AUTOMOWER_SERVICE_INFO) + + await hass.async_block_till_done(wait_background_tasks=True) + + # Test we should not discover the already configured device + assert len(hass.config_entries.flow.async_progress_by_handler(DOMAIN)) == 0 + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_ADDRESS: "00000000-0000-0000-0000-000000000003"}, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + +async def test_exception_connect( + hass: HomeAssistant, + mock_automower_client: Mock, +) -> None: + """Test we can select a device.""" + + inject_bluetooth_service_info(hass, AUTOMOWER_SERVICE_INFO) + inject_bluetooth_service_info(hass, AUTOMOWER_UNNAMED_SERVICE_INFO) + await hass.async_block_till_done(wait_background_tasks=True) + + mock_automower_client.probe_gatts.side_effect = BleakError + + result = hass.config_entries.flow.async_progress_by_handler(DOMAIN)[0] + assert result["step_id"] == "confirm" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={}, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "cannot_connect" diff --git a/tests/components/husqvarna_automower_ble/test_init.py b/tests/components/husqvarna_automower_ble/test_init.py new file mode 100644 index 00000000000000..3cb4338eca4095 --- /dev/null +++ b/tests/components/husqvarna_automower_ble/test_init.py @@ -0,0 +1,71 @@ +"""Test the Husqvarna Automower Bluetooth setup.""" + +from unittest.mock import Mock + +from bleak import BleakError +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.husqvarna_automower_ble.const import DOMAIN +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr + +from . import AUTOMOWER_SERVICE_INFO + +from tests.common import MockConfigEntry + +pytestmark = pytest.mark.usefixtures("mock_automower_client") + + +async def test_setup( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + mock_config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, +) -> None: + """Test setup creates expected devices.""" + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + assert mock_config_entry.state is ConfigEntryState.LOADED + + device_entry = device_registry.async_get_device( + identifiers={(DOMAIN, f"{AUTOMOWER_SERVICE_INFO.address}_1197489078")} + ) + + assert device_entry == snapshot + + +async def test_setup_retry_connect( + hass: HomeAssistant, + mock_automower_client: Mock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test setup creates expected devices.""" + + mock_automower_client.connect.return_value = False + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + assert mock_config_entry.state is ConfigEntryState.SETUP_RETRY + + +async def test_setup_failed_connect( + hass: HomeAssistant, + mock_automower_client: Mock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test setup creates expected devices.""" + + mock_automower_client.connect.side_effect = BleakError + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + assert mock_config_entry.state is ConfigEntryState.SETUP_RETRY diff --git a/tests/components/husqvarna_automower_ble/test_lawn_mower.py b/tests/components/husqvarna_automower_ble/test_lawn_mower.py new file mode 100644 index 00000000000000..3f00d3dbff0060 --- /dev/null +++ b/tests/components/husqvarna_automower_ble/test_lawn_mower.py @@ -0,0 +1,126 @@ +"""Test the Husqvarna Automower Bluetooth setup.""" + +from datetime import timedelta +from unittest.mock import Mock + +from bleak import BleakError +from freezegun.api import FrozenDateTimeFactory +import pytest + +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import STATE_UNAVAILABLE +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry, async_fire_time_changed + +pytestmark = pytest.mark.usefixtures("mock_automower_client") + + +@pytest.mark.parametrize( + ( + "is_connected_side_effect", + "is_connected_return_value", + "connect_side_effect", + "connect_return_value", + ), + [ + (None, False, None, False), + (None, False, BleakError, False), + (None, False, None, True), + (BleakError, False, None, True), + ], +) +async def test_setup_disconnect( + hass: HomeAssistant, + mock_automower_client: Mock, + mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, + is_connected_side_effect: Exception, + is_connected_return_value: bool, + connect_side_effect: Exception, + connect_return_value: bool, +) -> None: + """Test disconnected device.""" + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + assert mock_config_entry.state is ConfigEntryState.LOADED + + assert hass.states.get("lawn_mower.husqvarna_automower").state != STATE_UNAVAILABLE + + mock_automower_client.is_connected.side_effect = is_connected_side_effect + mock_automower_client.is_connected.return_value = is_connected_return_value + mock_automower_client.connect.side_effect = connect_side_effect + mock_automower_client.connect.return_value = connect_return_value + + freezer.tick(timedelta(seconds=60)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert hass.states.get("lawn_mower.husqvarna_automower").state == STATE_UNAVAILABLE + + +@pytest.mark.parametrize( + ("attribute"), + [ + "mower_activity", + "mower_state", + "battery_level", + ], +) +async def test_invalid_data_received( + hass: HomeAssistant, + mock_automower_client: Mock, + mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, + attribute: str, +) -> None: + """Test invalid data received.""" + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + assert mock_config_entry.state is ConfigEntryState.LOADED + + getattr(mock_automower_client, attribute).return_value = None + + freezer.tick(timedelta(seconds=60)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert hass.states.get("lawn_mower.husqvarna_automower").state == STATE_UNAVAILABLE + + +@pytest.mark.parametrize( + ("attribute"), + [ + "mower_activity", + "mower_state", + "battery_level", + ], +) +async def test_bleak_error_data_update( + hass: HomeAssistant, + mock_automower_client: Mock, + mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, + attribute: str, +) -> None: + """Test BleakError during data update.""" + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + assert mock_config_entry.state is ConfigEntryState.LOADED + + getattr(mock_automower_client, attribute).side_effect = BleakError + + freezer.tick(timedelta(seconds=60)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert hass.states.get("lawn_mower.husqvarna_automower").state == STATE_UNAVAILABLE diff --git a/tests/components/huum/conftest.py b/tests/components/huum/conftest.py new file mode 100644 index 00000000000000..da66cc54b72ea2 --- /dev/null +++ b/tests/components/huum/conftest.py @@ -0,0 +1,6 @@ +"""Skip test collection for Python 3.13.""" + +import sys + +if sys.version_info >= (3, 13): + collect_ignore_glob = ["test_*.py"] diff --git a/tests/components/hvv_departures/test_config_flow.py b/tests/components/hvv_departures/test_config_flow.py index c85bfb7f6ee98f..8d82382d9a29e9 100644 --- a/tests/components/hvv_departures/test_config_flow.py +++ b/tests/components/hvv_departures/test_config_flow.py @@ -4,6 +4,7 @@ from unittest.mock import patch from pygti.exceptions import CannotConnect, InvalidAuth +import pytest from homeassistant.components.hvv_departures.const import ( CONF_FILTER, @@ -312,6 +313,10 @@ async def test_options_flow(hass: HomeAssistant) -> None: } +@pytest.mark.parametrize( # Remove when translations fixed + "ignore_translations", + ["component.hvv_departures.options.error.invalid_auth"], +) async def test_options_flow_invalid_auth(hass: HomeAssistant) -> None: """Test that options flow works.""" @@ -355,6 +360,10 @@ async def test_options_flow_invalid_auth(hass: HomeAssistant) -> None: assert result["errors"] == {"base": "invalid_auth"} +@pytest.mark.parametrize( # Remove when translations fixed + "ignore_translations", + ["component.hvv_departures.options.error.cannot_connect"], +) async def test_options_flow_cannot_connect(hass: HomeAssistant) -> None: """Test that options flow works.""" diff --git a/tests/components/iaqualink/test_config_flow.py b/tests/components/iaqualink/test_config_flow.py index 4aaa66416f6344..26540eb73086ba 100644 --- a/tests/components/iaqualink/test_config_flow.py +++ b/tests/components/iaqualink/test_config_flow.py @@ -7,7 +7,8 @@ AqualinkServiceUnauthorizedException, ) -from homeassistant.components.iaqualink import config_flow +from homeassistant.components.iaqualink import DOMAIN, config_flow +from homeassistant.config_entries import SOURCE_USER from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -18,13 +19,12 @@ async def test_already_configured( """Test config flow when iaqualink component is already setup.""" config_entry.add_to_hass(hass) - flow = config_flow.AqualinkFlowHandler() - flow.hass = hass - flow.context = {} - - result = await flow.async_step_user(config_data) + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "single_instance_allowed" async def test_without_config(hass: HomeAssistant) -> None: diff --git a/tests/components/idasen_desk/test_cover.py b/tests/components/idasen_desk/test_cover.py index 0110fe7d8208b4..83312c04e725c4 100644 --- a/tests/components/idasen_desk/test_cover.py +++ b/tests/components/idasen_desk/test_cover.py @@ -10,14 +10,13 @@ ATTR_CURRENT_POSITION, ATTR_POSITION, DOMAIN as COVER_DOMAIN, + CoverState, ) from homeassistant.const import ( SERVICE_CLOSE_COVER, SERVICE_OPEN_COVER, SERVICE_SET_COVER_POSITION, SERVICE_STOP_COVER, - STATE_CLOSED, - STATE_OPEN, STATE_UNAVAILABLE, ) from homeassistant.core import HomeAssistant @@ -36,7 +35,7 @@ async def test_cover_available( state = hass.states.get(entity_id) assert state - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN assert state.attributes[ATTR_CURRENT_POSITION] == 60 mock_desk_api.connect = AsyncMock() @@ -51,11 +50,11 @@ async def test_cover_available( @pytest.mark.parametrize( ("service", "service_data", "expected_state", "expected_position"), [ - (SERVICE_SET_COVER_POSITION, {ATTR_POSITION: 100}, STATE_OPEN, 100), - (SERVICE_SET_COVER_POSITION, {ATTR_POSITION: 0}, STATE_CLOSED, 0), - (SERVICE_OPEN_COVER, {}, STATE_OPEN, 100), - (SERVICE_CLOSE_COVER, {}, STATE_CLOSED, 0), - (SERVICE_STOP_COVER, {}, STATE_OPEN, 60), + (SERVICE_SET_COVER_POSITION, {ATTR_POSITION: 100}, CoverState.OPEN, 100), + (SERVICE_SET_COVER_POSITION, {ATTR_POSITION: 0}, CoverState.CLOSED, 0), + (SERVICE_OPEN_COVER, {}, CoverState.OPEN, 100), + (SERVICE_CLOSE_COVER, {}, CoverState.CLOSED, 0), + (SERVICE_STOP_COVER, {}, CoverState.OPEN, 60), ], ) async def test_cover_services( @@ -71,7 +70,7 @@ async def test_cover_services( await init_integration(hass) state = hass.states.get(entity_id) assert state - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN assert state.attributes[ATTR_CURRENT_POSITION] == 60 await hass.services.async_call( COVER_DOMAIN, diff --git a/tests/components/ifttt/test_init.py b/tests/components/ifttt/test_init.py index 44896dc0f2c1c0..c6d24421a8ab60 100644 --- a/tests/components/ifttt/test_init.py +++ b/tests/components/ifttt/test_init.py @@ -2,8 +2,8 @@ from homeassistant import config_entries from homeassistant.components import ifttt -from homeassistant.config import async_process_ha_core_config from homeassistant.core import HomeAssistant, callback +from homeassistant.core_config import async_process_ha_core_config from homeassistant.data_entry_flow import FlowResultType from tests.typing import ClientSessionGenerator diff --git a/tests/components/image/conftest.py b/tests/components/image/conftest.py index e5e7649bee8e9e..06ef7db9f491ab 100644 --- a/tests/components/image/conftest.py +++ b/tests/components/image/conftest.py @@ -88,6 +88,16 @@ async def async_image(self) -> bytes | None: return b"Test" +class MockImageNoDataEntity(image.ImageEntity): + """Mock image entity.""" + + _attr_name = "Test" + + async def async_image(self) -> bytes | None: + """Return bytes of image.""" + return None + + class MockImageSyncEntity(image.ImageEntity): """Mock image entity.""" diff --git a/tests/components/image/test_init.py b/tests/components/image/test_init.py index 90b750976ce470..3bcf0df52e3a59 100644 --- a/tests/components/image/test_init.py +++ b/tests/components/image/test_init.py @@ -3,7 +3,7 @@ from datetime import datetime from http import HTTPStatus import ssl -from unittest.mock import MagicMock, patch +from unittest.mock import MagicMock, mock_open, patch from aiohttp import hdrs from freezegun.api import FrozenDateTimeFactory @@ -13,13 +13,16 @@ from homeassistant.components import image from homeassistant.config_entries import ConfigEntry +from homeassistant.const import ATTR_ENTITY_ID from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError from homeassistant.setup import async_setup_component from .conftest import ( MockImageEntity, MockImageEntityCapitalContentType, MockImageEntityInvalidContentType, + MockImageNoDataEntity, MockImageNoStateEntity, MockImagePlatform, MockImageSyncEntity, @@ -381,3 +384,112 @@ async def _wrap_async_get_still_stream(*args, **kwargs): await hass.async_block_till_done() await close_future + + +async def test_snapshot_service(hass: HomeAssistant) -> None: + """Test snapshot service.""" + mopen = mock_open() + mock_integration(hass, MockModule(domain="test")) + mock_platform(hass, "test.image", MockImagePlatform([MockImageSyncEntity(hass)])) + assert await async_setup_component( + hass, image.DOMAIN, {"image": {"platform": "test"}} + ) + await hass.async_block_till_done() + + with ( + patch("homeassistant.components.image.open", mopen, create=True), + patch("homeassistant.components.image.os.makedirs"), + patch.object(hass.config, "is_allowed_path", return_value=True), + ): + await hass.services.async_call( + image.DOMAIN, + image.SERVICE_SNAPSHOT, + { + ATTR_ENTITY_ID: "image.test", + image.ATTR_FILENAME: "/test/snapshot.jpg", + }, + blocking=True, + ) + + mock_write = mopen().write + + assert len(mock_write.mock_calls) == 1 + assert mock_write.mock_calls[0][1][0] == b"Test" + + +async def test_snapshot_service_no_image(hass: HomeAssistant) -> None: + """Test snapshot service with no image.""" + mopen = mock_open() + mock_integration(hass, MockModule(domain="test")) + mock_platform(hass, "test.image", MockImagePlatform([MockImageNoDataEntity(hass)])) + assert await async_setup_component( + hass, image.DOMAIN, {"image": {"platform": "test"}} + ) + await hass.async_block_till_done() + + with ( + patch("homeassistant.components.image.open", mopen, create=True), + patch( + "homeassistant.components.image.os.makedirs", + ), + patch.object(hass.config, "is_allowed_path", return_value=True), + ): + await hass.services.async_call( + image.DOMAIN, + image.SERVICE_SNAPSHOT, + { + ATTR_ENTITY_ID: "image.test", + image.ATTR_FILENAME: "/test/snapshot.jpg", + }, + blocking=True, + ) + + mock_write = mopen().write + + assert len(mock_write.mock_calls) == 0 + + +async def test_snapshot_service_not_allowed_path(hass: HomeAssistant) -> None: + """Test snapshot service with a not allowed path.""" + mock_integration(hass, MockModule(domain="test")) + mock_platform(hass, "test.image", MockImagePlatform([MockURLImageEntity(hass)])) + assert await async_setup_component( + hass, image.DOMAIN, {"image": {"platform": "test"}} + ) + await hass.async_block_till_done() + + with pytest.raises(HomeAssistantError, match="/test/snapshot.jpg"): + await hass.services.async_call( + image.DOMAIN, + image.SERVICE_SNAPSHOT, + { + ATTR_ENTITY_ID: "image.test", + image.ATTR_FILENAME: "/test/snapshot.jpg", + }, + blocking=True, + ) + + +async def test_snapshot_service_os_error(hass: HomeAssistant) -> None: + """Test snapshot service with os error.""" + mock_integration(hass, MockModule(domain="test")) + mock_platform(hass, "test.image", MockImagePlatform([MockImageSyncEntity(hass)])) + assert await async_setup_component( + hass, image.DOMAIN, {"image": {"platform": "test"}} + ) + await hass.async_block_till_done() + + with ( + patch.object(hass.config, "is_allowed_path", return_value=True), + patch("os.makedirs", side_effect=OSError), + pytest.raises(HomeAssistantError), + ): + await hass.services.async_call( + image.DOMAIN, + image.SERVICE_SNAPSHOT, + { + ATTR_ENTITY_ID: "image.test", + image.ATTR_FILENAME: "/test/snapshot.jpg", + }, + blocking=True, + ) diff --git a/tests/components/imap/test_config_flow.py b/tests/components/imap/test_config_flow.py index fb97bf0505d5d3..2270030ad4ffde 100644 --- a/tests/components/imap/test_config_flow.py +++ b/tests/components/imap/test_config_flow.py @@ -15,7 +15,7 @@ DOMAIN, ) from homeassistant.components.imap.errors import InvalidAuth, InvalidFolder -from homeassistant.const import CONF_PASSWORD, CONF_USERNAME +from homeassistant.const import CONF_NAME, CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -218,7 +218,10 @@ async def test_reauth_success(hass: HomeAssistant, mock_setup_entry: AsyncMock) result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" - assert result["description_placeholders"] == {CONF_USERNAME: "email@email.com"} + assert result["description_placeholders"] == { + CONF_USERNAME: "email@email.com", + CONF_NAME: "Mock Title", + } with patch( "homeassistant.components.imap.config_flow.connect_to_server" diff --git a/tests/components/improv_ble/__init__.py b/tests/components/improv_ble/__init__.py index 41ea98cda7bf58..521d088144316b 100644 --- a/tests/components/improv_ble/__init__.py +++ b/tests/components/improv_ble/__init__.py @@ -25,6 +25,25 @@ ) +BAD_IMPROV_BLE_DISCOVERY_INFO = BluetoothServiceInfoBleak( + name="00123456", + address="AA:BB:CC:DD:EE:F0", + rssi=-60, + manufacturer_data={}, + service_uuids=[SERVICE_UUID], + service_data={SERVICE_DATA_UUID: b"\x00\x00\x00\x00\x00\x00"}, + source="local", + device=generate_ble_device(address="AA:BB:CC:DD:EE:F0", name="00123456"), + advertisement=generate_advertisement_data( + service_uuids=[SERVICE_UUID], + service_data={SERVICE_DATA_UUID: b"\x00\x00\x00\x00\x00\x00"}, + ), + time=0, + connectable=True, + tx_power=-127, +) + + PROVISIONED_IMPROV_BLE_DISCOVERY_INFO = BluetoothServiceInfoBleak( name="00123456", address="AA:BB:CC:DD:EE:F0", diff --git a/tests/components/improv_ble/test_config_flow.py b/tests/components/improv_ble/test_config_flow.py index 640a931bee5464..2df4be2ba7d2d5 100644 --- a/tests/components/improv_ble/test_config_flow.py +++ b/tests/components/improv_ble/test_config_flow.py @@ -15,6 +15,7 @@ from homeassistant.data_entry_flow import FlowResult, FlowResultType from . import ( + BAD_IMPROV_BLE_DISCOVERY_INFO, IMPROV_BLE_DISCOVERY_INFO, NOT_IMPROV_BLE_DISCOVERY_INFO, PROVISIONED_IMPROV_BLE_DISCOVERY_INFO, @@ -649,3 +650,20 @@ async def test_provision_retry(hass: HomeAssistant, exc, error) -> None: assert result["type"] is FlowResultType.FORM assert result["step_id"] == "provision" assert result["errors"] == {"base": error} + + +async def test_provision_fails_invalid_data( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture +) -> None: + """Test bluetooth flow with error due to invalid data.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_BLUETOOTH}, + data=BAD_IMPROV_BLE_DISCOVERY_INFO, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "invalid_improv_data" + assert ( + "Aborting improv flow, device AA:BB:CC:DD:EE:F0 sent invalid improv data: '000000000000'" + in caplog.text + ) diff --git a/tests/components/incomfort/snapshots/test_binary_sensor.ambr b/tests/components/incomfort/snapshots/test_binary_sensor.ambr index 565abcaa26f8d1..2f2319b6a4421d 100644 --- a/tests/components/incomfort/snapshots/test_binary_sensor.ambr +++ b/tests/components/incomfort/snapshots/test_binary_sensor.ambr @@ -188,7 +188,7 @@ 'state': 'off', }) # --- -# name: test_setup_binary_sensors_alt[is_burning][binary_sensor.boiler_running-entry] +# name: test_setup_binary_sensors_alt[is_failed][binary_sensor.boiler_burner-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -200,7 +200,7 @@ 'disabled_by': None, 'domain': 'binary_sensor', 'entity_category': None, - 'entity_id': 'binary_sensor.boiler_running', + 'entity_id': 'binary_sensor.boiler_burner', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -212,30 +212,30 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Running', + 'original_name': 'Burner', 'platform': 'incomfort', 'previous_unique_id': None, 'supported_features': 0, - 'translation_key': 'is_pumping', - 'unique_id': 'c0ffeec0ffee_is_pumping', + 'translation_key': 'is_burning', + 'unique_id': 'c0ffeec0ffee_is_burning', 'unit_of_measurement': None, }) # --- -# name: test_setup_binary_sensors_alt[is_burning][binary_sensor.boiler_running-state] +# name: test_setup_binary_sensors_alt[is_failed][binary_sensor.boiler_burner-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'running', - 'friendly_name': 'Boiler Running', + 'friendly_name': 'Boiler Burner', }), 'context': , - 'entity_id': 'binary_sensor.boiler_running', + 'entity_id': 'binary_sensor.boiler_burner', 'last_changed': , 'last_reported': , 'last_updated': , 'state': 'off', }) # --- -# name: test_setup_binary_sensors_alt[is_burning][binary_sensor.boiler_running_2-entry] +# name: test_setup_binary_sensors_alt[is_failed][binary_sensor.boiler_fault-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -247,7 +247,7 @@ 'disabled_by': None, 'domain': 'binary_sensor', 'entity_category': None, - 'entity_id': 'binary_sensor.boiler_running_2', + 'entity_id': 'binary_sensor.boiler_fault', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -257,32 +257,33 @@ 'name': None, 'options': dict({ }), - 'original_device_class': , + 'original_device_class': , 'original_icon': None, - 'original_name': 'Running', + 'original_name': 'Fault', 'platform': 'incomfort', 'previous_unique_id': None, 'supported_features': 0, - 'translation_key': 'is_burning', - 'unique_id': 'c0ffeec0ffee_is_burning', + 'translation_key': 'fault', + 'unique_id': 'c0ffeec0ffee_failed', 'unit_of_measurement': None, }) # --- -# name: test_setup_binary_sensors_alt[is_burning][binary_sensor.boiler_running_2-state] +# name: test_setup_binary_sensors_alt[is_failed][binary_sensor.boiler_fault-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'device_class': 'running', - 'friendly_name': 'Boiler Running', + 'device_class': 'problem', + 'fault_code': , + 'friendly_name': 'Boiler Fault', }), 'context': , - 'entity_id': 'binary_sensor.boiler_running_2', + 'entity_id': 'binary_sensor.boiler_fault', 'last_changed': , 'last_reported': , 'last_updated': , 'state': 'on', }) # --- -# name: test_setup_binary_sensors_alt[is_burning][binary_sensor.boiler_running_3-entry] +# name: test_setup_binary_sensors_alt[is_failed][binary_sensor.boiler_hot_water_tap-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -294,7 +295,7 @@ 'disabled_by': None, 'domain': 'binary_sensor', 'entity_category': None, - 'entity_id': 'binary_sensor.boiler_running_3', + 'entity_id': 'binary_sensor.boiler_hot_water_tap', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -306,7 +307,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Running', + 'original_name': 'Hot water tap', 'platform': 'incomfort', 'previous_unique_id': None, 'supported_features': 0, @@ -315,116 +316,21 @@ 'unit_of_measurement': None, }) # --- -# name: test_setup_binary_sensors_alt[is_burning][binary_sensor.boiler_running_3-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'running', - 'friendly_name': 'Boiler Running', - }), - 'context': , - 'entity_id': 'binary_sensor.boiler_running_3', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_setup_binary_sensors_alt[is_failed][binary_sensor.boiler_burner-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.boiler_burner', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Burner', - 'platform': 'incomfort', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'is_burning', - 'unique_id': 'c0ffeec0ffee_is_burning', - 'unit_of_measurement': None, - }) -# --- -# name: test_setup_binary_sensors_alt[is_failed][binary_sensor.boiler_burner-state] +# name: test_setup_binary_sensors_alt[is_failed][binary_sensor.boiler_hot_water_tap-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'running', - 'friendly_name': 'Boiler Burner', + 'friendly_name': 'Boiler Hot water tap', }), 'context': , - 'entity_id': 'binary_sensor.boiler_burner', + 'entity_id': 'binary_sensor.boiler_hot_water_tap', 'last_changed': , 'last_reported': , 'last_updated': , 'state': 'off', }) # --- -# name: test_setup_binary_sensors_alt[is_failed][binary_sensor.boiler_fault-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.boiler_fault', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Fault', - 'platform': 'incomfort', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'fault', - 'unique_id': 'c0ffeec0ffee_failed', - 'unit_of_measurement': None, - }) -# --- -# name: test_setup_binary_sensors_alt[is_failed][binary_sensor.boiler_fault-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'problem', - 'fault_code': , - 'friendly_name': 'Boiler Fault', - }), - 'context': , - 'entity_id': 'binary_sensor.boiler_fault', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_setup_binary_sensors_alt[is_failed][binary_sensor.boiler_hot_water_tap-entry] +# name: test_setup_binary_sensors_alt[is_failed][binary_sensor.boiler_pump-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -436,7 +342,7 @@ 'disabled_by': None, 'domain': 'binary_sensor', 'entity_category': None, - 'entity_id': 'binary_sensor.boiler_hot_water_tap', + 'entity_id': 'binary_sensor.boiler_pump', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -448,30 +354,30 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Hot water tap', + 'original_name': 'Pump', 'platform': 'incomfort', 'previous_unique_id': None, 'supported_features': 0, - 'translation_key': 'is_tapping', - 'unique_id': 'c0ffeec0ffee_is_tapping', + 'translation_key': 'is_pumping', + 'unique_id': 'c0ffeec0ffee_is_pumping', 'unit_of_measurement': None, }) # --- -# name: test_setup_binary_sensors_alt[is_failed][binary_sensor.boiler_hot_water_tap-state] +# name: test_setup_binary_sensors_alt[is_failed][binary_sensor.boiler_pump-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'running', - 'friendly_name': 'Boiler Hot water tap', + 'friendly_name': 'Boiler Pump', }), 'context': , - 'entity_id': 'binary_sensor.boiler_hot_water_tap', + 'entity_id': 'binary_sensor.boiler_pump', 'last_changed': , 'last_reported': , 'last_updated': , 'state': 'off', }) # --- -# name: test_setup_binary_sensors_alt[is_failed][binary_sensor.boiler_pump-entry] +# name: test_setup_binary_sensors_alt[is_pumping][binary_sensor.boiler_burner-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -483,7 +389,7 @@ 'disabled_by': None, 'domain': 'binary_sensor', 'entity_category': None, - 'entity_id': 'binary_sensor.boiler_pump', + 'entity_id': 'binary_sensor.boiler_burner', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -495,30 +401,30 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Pump', + 'original_name': 'Burner', 'platform': 'incomfort', 'previous_unique_id': None, 'supported_features': 0, - 'translation_key': 'is_pumping', - 'unique_id': 'c0ffeec0ffee_is_pumping', + 'translation_key': 'is_burning', + 'unique_id': 'c0ffeec0ffee_is_burning', 'unit_of_measurement': None, }) # --- -# name: test_setup_binary_sensors_alt[is_failed][binary_sensor.boiler_pump-state] +# name: test_setup_binary_sensors_alt[is_pumping][binary_sensor.boiler_burner-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'running', - 'friendly_name': 'Boiler Pump', + 'friendly_name': 'Boiler Burner', }), 'context': , - 'entity_id': 'binary_sensor.boiler_pump', + 'entity_id': 'binary_sensor.boiler_burner', 'last_changed': , 'last_reported': , 'last_updated': , 'state': 'off', }) # --- -# name: test_setup_binary_sensors_alt[is_failed][binary_sensor.boiler_running-entry] +# name: test_setup_binary_sensors_alt[is_pumping][binary_sensor.boiler_fault-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -530,7 +436,7 @@ 'disabled_by': None, 'domain': 'binary_sensor', 'entity_category': None, - 'entity_id': 'binary_sensor.boiler_running', + 'entity_id': 'binary_sensor.boiler_fault', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -540,32 +446,33 @@ 'name': None, 'options': dict({ }), - 'original_device_class': , + 'original_device_class': , 'original_icon': None, - 'original_name': 'Running', + 'original_name': 'Fault', 'platform': 'incomfort', 'previous_unique_id': None, 'supported_features': 0, - 'translation_key': 'is_pumping', - 'unique_id': 'c0ffeec0ffee_is_pumping', + 'translation_key': 'fault', + 'unique_id': 'c0ffeec0ffee_failed', 'unit_of_measurement': None, }) # --- -# name: test_setup_binary_sensors_alt[is_failed][binary_sensor.boiler_running-state] +# name: test_setup_binary_sensors_alt[is_pumping][binary_sensor.boiler_fault-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'device_class': 'running', - 'friendly_name': 'Boiler Running', + 'device_class': 'problem', + 'fault_code': 'none', + 'friendly_name': 'Boiler Fault', }), 'context': , - 'entity_id': 'binary_sensor.boiler_running', + 'entity_id': 'binary_sensor.boiler_fault', 'last_changed': , 'last_reported': , 'last_updated': , 'state': 'off', }) # --- -# name: test_setup_binary_sensors_alt[is_failed][binary_sensor.boiler_running_2-entry] +# name: test_setup_binary_sensors_alt[is_pumping][binary_sensor.boiler_hot_water_tap-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -577,7 +484,7 @@ 'disabled_by': None, 'domain': 'binary_sensor', 'entity_category': None, - 'entity_id': 'binary_sensor.boiler_running_2', + 'entity_id': 'binary_sensor.boiler_hot_water_tap', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -589,30 +496,30 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Running', + 'original_name': 'Hot water tap', 'platform': 'incomfort', 'previous_unique_id': None, 'supported_features': 0, - 'translation_key': 'is_burning', - 'unique_id': 'c0ffeec0ffee_is_burning', + 'translation_key': 'is_tapping', + 'unique_id': 'c0ffeec0ffee_is_tapping', 'unit_of_measurement': None, }) # --- -# name: test_setup_binary_sensors_alt[is_failed][binary_sensor.boiler_running_2-state] +# name: test_setup_binary_sensors_alt[is_pumping][binary_sensor.boiler_hot_water_tap-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'running', - 'friendly_name': 'Boiler Running', + 'friendly_name': 'Boiler Hot water tap', }), 'context': , - 'entity_id': 'binary_sensor.boiler_running_2', + 'entity_id': 'binary_sensor.boiler_hot_water_tap', 'last_changed': , 'last_reported': , 'last_updated': , 'state': 'off', }) # --- -# name: test_setup_binary_sensors_alt[is_failed][binary_sensor.boiler_running_3-entry] +# name: test_setup_binary_sensors_alt[is_pumping][binary_sensor.boiler_pump-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -624,7 +531,7 @@ 'disabled_by': None, 'domain': 'binary_sensor', 'entity_category': None, - 'entity_id': 'binary_sensor.boiler_running_3', + 'entity_id': 'binary_sensor.boiler_pump', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -636,30 +543,30 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Running', + 'original_name': 'Pump', 'platform': 'incomfort', 'previous_unique_id': None, 'supported_features': 0, - 'translation_key': 'is_tapping', - 'unique_id': 'c0ffeec0ffee_is_tapping', + 'translation_key': 'is_pumping', + 'unique_id': 'c0ffeec0ffee_is_pumping', 'unit_of_measurement': None, }) # --- -# name: test_setup_binary_sensors_alt[is_failed][binary_sensor.boiler_running_3-state] +# name: test_setup_binary_sensors_alt[is_pumping][binary_sensor.boiler_pump-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'running', - 'friendly_name': 'Boiler Running', + 'friendly_name': 'Boiler Pump', }), 'context': , - 'entity_id': 'binary_sensor.boiler_running_3', + 'entity_id': 'binary_sensor.boiler_pump', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'off', + 'state': 'on', }) # --- -# name: test_setup_binary_sensors_alt[is_pumping][binary_sensor.boiler_burner-entry] +# name: test_setup_binary_sensors_alt[is_tapping][binary_sensor.boiler_burner-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -692,7 +599,7 @@ 'unit_of_measurement': None, }) # --- -# name: test_setup_binary_sensors_alt[is_pumping][binary_sensor.boiler_burner-state] +# name: test_setup_binary_sensors_alt[is_tapping][binary_sensor.boiler_burner-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'running', @@ -706,7 +613,7 @@ 'state': 'off', }) # --- -# name: test_setup_binary_sensors_alt[is_pumping][binary_sensor.boiler_fault-entry] +# name: test_setup_binary_sensors_alt[is_tapping][binary_sensor.boiler_fault-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -739,7 +646,7 @@ 'unit_of_measurement': None, }) # --- -# name: test_setup_binary_sensors_alt[is_pumping][binary_sensor.boiler_fault-state] +# name: test_setup_binary_sensors_alt[is_tapping][binary_sensor.boiler_fault-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'problem', @@ -754,7 +661,7 @@ 'state': 'off', }) # --- -# name: test_setup_binary_sensors_alt[is_pumping][binary_sensor.boiler_hot_water_tap-entry] +# name: test_setup_binary_sensors_alt[is_tapping][binary_sensor.boiler_hot_water_tap-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -787,7 +694,7 @@ 'unit_of_measurement': None, }) # --- -# name: test_setup_binary_sensors_alt[is_pumping][binary_sensor.boiler_hot_water_tap-state] +# name: test_setup_binary_sensors_alt[is_tapping][binary_sensor.boiler_hot_water_tap-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'running', @@ -798,10 +705,10 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'off', + 'state': 'on', }) # --- -# name: test_setup_binary_sensors_alt[is_pumping][binary_sensor.boiler_pump-entry] +# name: test_setup_binary_sensors_alt[is_tapping][binary_sensor.boiler_pump-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -834,7 +741,7 @@ 'unit_of_measurement': None, }) # --- -# name: test_setup_binary_sensors_alt[is_pumping][binary_sensor.boiler_pump-state] +# name: test_setup_binary_sensors_alt[is_tapping][binary_sensor.boiler_pump-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'running', @@ -845,10 +752,10 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'on', + 'state': 'off', }) # --- -# name: test_setup_binary_sensors_alt[is_pumping][binary_sensor.boiler_running-entry] +# name: test_setup_platform[binary_sensor.boiler_burner-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -860,7 +767,7 @@ 'disabled_by': None, 'domain': 'binary_sensor', 'entity_category': None, - 'entity_id': 'binary_sensor.boiler_running', + 'entity_id': 'binary_sensor.boiler_burner', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -872,30 +779,30 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Running', + 'original_name': 'Burner', 'platform': 'incomfort', 'previous_unique_id': None, 'supported_features': 0, - 'translation_key': 'is_pumping', - 'unique_id': 'c0ffeec0ffee_is_pumping', + 'translation_key': 'is_burning', + 'unique_id': 'c0ffeec0ffee_is_burning', 'unit_of_measurement': None, }) # --- -# name: test_setup_binary_sensors_alt[is_pumping][binary_sensor.boiler_running-state] +# name: test_setup_platform[binary_sensor.boiler_burner-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'running', - 'friendly_name': 'Boiler Running', + 'friendly_name': 'Boiler Burner', }), 'context': , - 'entity_id': 'binary_sensor.boiler_running', + 'entity_id': 'binary_sensor.boiler_burner', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'on', + 'state': 'off', }) # --- -# name: test_setup_binary_sensors_alt[is_pumping][binary_sensor.boiler_running_2-entry] +# name: test_setup_platform[binary_sensor.boiler_fault-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -907,7 +814,7 @@ 'disabled_by': None, 'domain': 'binary_sensor', 'entity_category': None, - 'entity_id': 'binary_sensor.boiler_running_2', + 'entity_id': 'binary_sensor.boiler_fault', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -917,32 +824,33 @@ 'name': None, 'options': dict({ }), - 'original_device_class': , + 'original_device_class': , 'original_icon': None, - 'original_name': 'Running', + 'original_name': 'Fault', 'platform': 'incomfort', 'previous_unique_id': None, 'supported_features': 0, - 'translation_key': 'is_burning', - 'unique_id': 'c0ffeec0ffee_is_burning', + 'translation_key': 'fault', + 'unique_id': 'c0ffeec0ffee_failed', 'unit_of_measurement': None, }) # --- -# name: test_setup_binary_sensors_alt[is_pumping][binary_sensor.boiler_running_2-state] +# name: test_setup_platform[binary_sensor.boiler_fault-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'device_class': 'running', - 'friendly_name': 'Boiler Running', + 'device_class': 'problem', + 'fault_code': 'none', + 'friendly_name': 'Boiler Fault', }), 'context': , - 'entity_id': 'binary_sensor.boiler_running_2', + 'entity_id': 'binary_sensor.boiler_fault', 'last_changed': , 'last_reported': , 'last_updated': , 'state': 'off', }) # --- -# name: test_setup_binary_sensors_alt[is_pumping][binary_sensor.boiler_running_3-entry] +# name: test_setup_platform[binary_sensor.boiler_hot_water_tap-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -954,7 +862,7 @@ 'disabled_by': None, 'domain': 'binary_sensor', 'entity_category': None, - 'entity_id': 'binary_sensor.boiler_running_3', + 'entity_id': 'binary_sensor.boiler_hot_water_tap', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -966,7 +874,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Running', + 'original_name': 'Hot water tap', 'platform': 'incomfort', 'previous_unique_id': None, 'supported_features': 0, @@ -975,21 +883,21 @@ 'unit_of_measurement': None, }) # --- -# name: test_setup_binary_sensors_alt[is_pumping][binary_sensor.boiler_running_3-state] +# name: test_setup_platform[binary_sensor.boiler_hot_water_tap-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'running', - 'friendly_name': 'Boiler Running', + 'friendly_name': 'Boiler Hot water tap', }), 'context': , - 'entity_id': 'binary_sensor.boiler_running_3', + 'entity_id': 'binary_sensor.boiler_hot_water_tap', 'last_changed': , 'last_reported': , 'last_updated': , 'state': 'off', }) # --- -# name: test_setup_binary_sensors_alt[is_tapping][binary_sensor.boiler_burner-entry] +# name: test_setup_platform[binary_sensor.boiler_pump-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -1001,7 +909,7 @@ 'disabled_by': None, 'domain': 'binary_sensor', 'entity_category': None, - 'entity_id': 'binary_sensor.boiler_burner', + 'entity_id': 'binary_sensor.boiler_pump', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -1013,479 +921,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Burner', - 'platform': 'incomfort', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'is_burning', - 'unique_id': 'c0ffeec0ffee_is_burning', - 'unit_of_measurement': None, - }) -# --- -# name: test_setup_binary_sensors_alt[is_tapping][binary_sensor.boiler_burner-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'running', - 'friendly_name': 'Boiler Burner', - }), - 'context': , - 'entity_id': 'binary_sensor.boiler_burner', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_setup_binary_sensors_alt[is_tapping][binary_sensor.boiler_fault-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.boiler_fault', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Fault', - 'platform': 'incomfort', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'fault', - 'unique_id': 'c0ffeec0ffee_failed', - 'unit_of_measurement': None, - }) -# --- -# name: test_setup_binary_sensors_alt[is_tapping][binary_sensor.boiler_fault-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'problem', - 'fault_code': 'none', - 'friendly_name': 'Boiler Fault', - }), - 'context': , - 'entity_id': 'binary_sensor.boiler_fault', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_setup_binary_sensors_alt[is_tapping][binary_sensor.boiler_hot_water_tap-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.boiler_hot_water_tap', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Hot water tap', - 'platform': 'incomfort', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'is_tapping', - 'unique_id': 'c0ffeec0ffee_is_tapping', - 'unit_of_measurement': None, - }) -# --- -# name: test_setup_binary_sensors_alt[is_tapping][binary_sensor.boiler_hot_water_tap-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'running', - 'friendly_name': 'Boiler Hot water tap', - }), - 'context': , - 'entity_id': 'binary_sensor.boiler_hot_water_tap', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_setup_binary_sensors_alt[is_tapping][binary_sensor.boiler_pump-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.boiler_pump', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Pump', - 'platform': 'incomfort', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'is_pumping', - 'unique_id': 'c0ffeec0ffee_is_pumping', - 'unit_of_measurement': None, - }) -# --- -# name: test_setup_binary_sensors_alt[is_tapping][binary_sensor.boiler_pump-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'running', - 'friendly_name': 'Boiler Pump', - }), - 'context': , - 'entity_id': 'binary_sensor.boiler_pump', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_setup_binary_sensors_alt[is_tapping][binary_sensor.boiler_running-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.boiler_running', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Running', - 'platform': 'incomfort', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'is_pumping', - 'unique_id': 'c0ffeec0ffee_is_pumping', - 'unit_of_measurement': None, - }) -# --- -# name: test_setup_binary_sensors_alt[is_tapping][binary_sensor.boiler_running-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'running', - 'friendly_name': 'Boiler Running', - }), - 'context': , - 'entity_id': 'binary_sensor.boiler_running', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_setup_binary_sensors_alt[is_tapping][binary_sensor.boiler_running_2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.boiler_running_2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Running', - 'platform': 'incomfort', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'is_burning', - 'unique_id': 'c0ffeec0ffee_is_burning', - 'unit_of_measurement': None, - }) -# --- -# name: test_setup_binary_sensors_alt[is_tapping][binary_sensor.boiler_running_2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'running', - 'friendly_name': 'Boiler Running', - }), - 'context': , - 'entity_id': 'binary_sensor.boiler_running_2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_setup_binary_sensors_alt[is_tapping][binary_sensor.boiler_running_3-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.boiler_running_3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Running', - 'platform': 'incomfort', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'is_tapping', - 'unique_id': 'c0ffeec0ffee_is_tapping', - 'unit_of_measurement': None, - }) -# --- -# name: test_setup_binary_sensors_alt[is_tapping][binary_sensor.boiler_running_3-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'running', - 'friendly_name': 'Boiler Running', - }), - 'context': , - 'entity_id': 'binary_sensor.boiler_running_3', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_setup_platform[binary_sensor.boiler_burner-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.boiler_burner', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Burner', - 'platform': 'incomfort', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'is_burning', - 'unique_id': 'c0ffeec0ffee_is_burning', - 'unit_of_measurement': None, - }) -# --- -# name: test_setup_platform[binary_sensor.boiler_burner-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'running', - 'friendly_name': 'Boiler Burner', - }), - 'context': , - 'entity_id': 'binary_sensor.boiler_burner', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_setup_platform[binary_sensor.boiler_fault-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.boiler_fault', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Fault', - 'platform': 'incomfort', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'fault', - 'unique_id': 'c0ffeec0ffee_failed', - 'unit_of_measurement': None, - }) -# --- -# name: test_setup_platform[binary_sensor.boiler_fault-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'problem', - 'fault_code': 'none', - 'friendly_name': 'Boiler Fault', - }), - 'context': , - 'entity_id': 'binary_sensor.boiler_fault', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_setup_platform[binary_sensor.boiler_hot_water_tap-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.boiler_hot_water_tap', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Hot water tap', - 'platform': 'incomfort', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'is_tapping', - 'unique_id': 'c0ffeec0ffee_is_tapping', - 'unit_of_measurement': None, - }) -# --- -# name: test_setup_platform[binary_sensor.boiler_hot_water_tap-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'running', - 'friendly_name': 'Boiler Hot water tap', - }), - 'context': , - 'entity_id': 'binary_sensor.boiler_hot_water_tap', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_setup_platform[binary_sensor.boiler_pump-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.boiler_pump', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Pump', + 'original_name': 'Pump', 'platform': 'incomfort', 'previous_unique_id': None, 'supported_features': 0, @@ -1508,144 +944,3 @@ 'state': 'off', }) # --- -# name: test_setup_platform[binary_sensor.boiler_running-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.boiler_running', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Running', - 'platform': 'incomfort', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'is_pumping', - 'unique_id': 'c0ffeec0ffee_is_pumping', - 'unit_of_measurement': None, - }) -# --- -# name: test_setup_platform[binary_sensor.boiler_running-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'running', - 'friendly_name': 'Boiler Running', - }), - 'context': , - 'entity_id': 'binary_sensor.boiler_running', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_setup_platform[binary_sensor.boiler_running_2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.boiler_running_2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Running', - 'platform': 'incomfort', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'is_burning', - 'unique_id': 'c0ffeec0ffee_is_burning', - 'unit_of_measurement': None, - }) -# --- -# name: test_setup_platform[binary_sensor.boiler_running_2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'running', - 'friendly_name': 'Boiler Running', - }), - 'context': , - 'entity_id': 'binary_sensor.boiler_running_2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_setup_platform[binary_sensor.boiler_running_3-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.boiler_running_3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Running', - 'platform': 'incomfort', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'is_tapping', - 'unique_id': 'c0ffeec0ffee_is_tapping', - 'unit_of_measurement': None, - }) -# --- -# name: test_setup_platform[binary_sensor.boiler_running_3-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'running', - 'friendly_name': 'Boiler Running', - }), - 'context': , - 'entity_id': 'binary_sensor.boiler_running_3', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- diff --git a/tests/components/incomfort/test_water_heater.py b/tests/components/incomfort/test_water_heater.py index 5b7aebc50a8019..082aecf6d497cc 100644 --- a/tests/components/incomfort/test_water_heater.py +++ b/tests/components/incomfort/test_water_heater.py @@ -2,6 +2,7 @@ from unittest.mock import MagicMock, patch +import pytest from syrupy import SnapshotAssertion from homeassistant.config_entries import ConfigEntry @@ -9,6 +10,8 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er +from .conftest import MOCK_HEATER_STATUS + from tests.common import snapshot_platform @@ -23,3 +26,44 @@ async def test_setup_platform( """Test the incomfort entities are set up correctly.""" await hass.config_entries.async_setup(mock_config_entry.entry_id) await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +@pytest.mark.parametrize( + ("mock_heater_status", "current_temperature"), + [ + (MOCK_HEATER_STATUS, 35.3), + (MOCK_HEATER_STATUS | {"is_tapping": True}, 30.2), + (MOCK_HEATER_STATUS | {"is_pumping": True}, 35.3), + (MOCK_HEATER_STATUS | {"heater_temp": None}, 30.2), + (MOCK_HEATER_STATUS | {"tap_temp": None}, 35.3), + (MOCK_HEATER_STATUS | {"heater_temp": None, "tap_temp": None}, None), + ], + ids=[ + "both_temps_available_choose_highest", + "is_tapping_choose_tapping_temp", + "is_pumping_choose_heater_temp", + "heater_temp_not_available_choose_tapping_temp", + "tapping_temp_not_available_choose_heater_temp", + "tapping_and_heater_temp_not_available_unknown", + ], +) +@patch("homeassistant.components.incomfort.PLATFORMS", [Platform.WATER_HEATER]) +async def test_current_temperature_cases( + hass: HomeAssistant, + mock_incomfort: MagicMock, + entity_registry: er.EntityRegistry, + mock_config_entry: ConfigEntry, + current_temperature: float | None, +) -> None: + """Test incomfort entities with alternate current temperature calculation. + + The boilers current temperature is calculated from the testdata: + heater_temp: 35.34 + tap_temp: 30.21 + + It is based on the operating mode as the boiler can heat tap water or + the house. + """ + await hass.config_entries.async_setup(mock_config_entry.entry_id) + assert (state := hass.states.get("water_heater.boiler")) is not None + assert state.attributes.get("current_temperature") == current_temperature diff --git a/tests/components/intellifire/snapshots/test_binary_sensor.ambr b/tests/components/intellifire/snapshots/test_binary_sensor.ambr index 34d5836a02523b..1b85db51d68722 100644 --- a/tests/components/intellifire/snapshots/test_binary_sensor.ambr +++ b/tests/components/intellifire/snapshots/test_binary_sensor.ambr @@ -47,6 +47,54 @@ 'state': 'off', }) # --- +# name: test_all_binary_sensor_entities[binary_sensor.intellifire_cloud_connectivity-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.intellifire_cloud_connectivity', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Cloud connectivity', + 'platform': 'intellifire', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'cloud_connectivity', + 'unique_id': 'cloud_connectivity_mock_serial', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_binary_sensor_entities[binary_sensor.intellifire_cloud_connectivity-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by unpublished Intellifire API', + 'device_class': 'connectivity', + 'friendly_name': 'IntelliFire Cloud connectivity', + }), + 'context': , + 'entity_id': 'binary_sensor.intellifire_cloud_connectivity', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- # name: test_all_binary_sensor_entities[binary_sensor.intellifire_disabled_error-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -382,6 +430,54 @@ 'state': 'off', }) # --- +# name: test_all_binary_sensor_entities[binary_sensor.intellifire_local_connectivity-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.intellifire_local_connectivity', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Local connectivity', + 'platform': 'intellifire', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'local_connectivity', + 'unique_id': 'local_connectivity_mock_serial', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_binary_sensor_entities[binary_sensor.intellifire_local_connectivity-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by unpublished Intellifire API', + 'device_class': 'connectivity', + 'friendly_name': 'IntelliFire Local connectivity', + }), + 'context': , + 'entity_id': 'binary_sensor.intellifire_local_connectivity', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- # name: test_all_binary_sensor_entities[binary_sensor.intellifire_maintenance_error-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/intellifire/snapshots/test_sensor.ambr b/tests/components/intellifire/snapshots/test_sensor.ambr index d5e59e3f00fa22..d749da216ac3ce 100644 --- a/tests/components/intellifire/snapshots/test_sensor.ambr +++ b/tests/components/intellifire/snapshots/test_sensor.ambr @@ -288,100 +288,6 @@ 'state': '192.168.2.108', }) # --- -# name: test_all_sensor_entities[sensor.intellifire_local_connectivity-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.intellifire_local_connectivity', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Local connectivity', - 'platform': 'intellifire', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'local_connectivity', - 'unique_id': 'local_connectivity_mock_serial', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_sensor_entities[sensor.intellifire_local_connectivity-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by unpublished Intellifire API', - 'friendly_name': 'IntelliFire Local connectivity', - }), - 'context': , - 'entity_id': 'sensor.intellifire_local_connectivity', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'True', - }) -# --- -# name: test_all_sensor_entities[sensor.intellifire_none-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.intellifire_none', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': None, - 'platform': 'intellifire', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'local_connectivity', - 'unique_id': 'local_connectivity_mock_serial', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_sensor_entities[sensor.intellifire_none-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by unpublished Intellifire API', - 'friendly_name': 'IntelliFire None', - }), - 'context': , - 'entity_id': 'sensor.intellifire_none', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'True', - }) -# --- # name: test_all_sensor_entities[sensor.intellifire_target_temperature-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/intent/test_init.py b/tests/components/intent/test_init.py index 659ca16c0bbc7e..20c0f9d8d4440c 100644 --- a/tests/components/intent/test_init.py +++ b/tests/components/intent/test_init.py @@ -455,3 +455,14 @@ async def test_set_position_intent_unsupported_domain(hass: HomeAssistant) -> No "HassSetPosition", {"name": {"value": "test light"}, "position": {"value": 100}}, ) + + +async def test_intents_with_no_responses(hass: HomeAssistant) -> None: + """Test intents that should not return a response during handling.""" + assert await async_setup_component(hass, "homeassistant", {}) + assert await async_setup_component(hass, "intent", {}) + + # The "respond" intent gets its response text from home-assistant-intents + for intent_name in (intent.INTENT_NEVERMIND, intent.INTENT_RESPOND): + response = await intent.async_handle(hass, "test", intent_name, {}) + assert not response.speech diff --git a/tests/components/iotty/test_cover.py b/tests/components/iotty/test_cover.py index fd30fe1b574cc4..c9e1edaa24bbe3 100644 --- a/tests/components/iotty/test_cover.py +++ b/tests/components/iotty/test_cover.py @@ -18,10 +18,7 @@ SERVICE_OPEN_COVER, SERVICE_SET_COVER_POSITION, SERVICE_STOP_COVER, - STATE_CLOSED, - STATE_CLOSING, - STATE_OPEN, - STATE_OPENING, + CoverState, ) from homeassistant.components.iotty.const import DOMAIN from homeassistant.components.iotty.coordinator import UPDATE_INTERVAL @@ -55,7 +52,7 @@ async def test_open_ok( await hass.config_entries.async_setup(mock_config_entry.entry_id) assert (state := hass.states.get(entity_id)) - assert state.state == STATE_CLOSED + assert state.state == CoverState.CLOSED mock_get_status_filled_stationary_0.return_value = { RESULT: {STATUS: STATUS_OPENING, OPEN_PERCENTAGE: 10} @@ -72,7 +69,7 @@ async def test_open_ok( mock_command_fn.assert_called_once() assert (state := hass.states.get(entity_id)) - assert state.state == STATE_OPENING + assert state.state == CoverState.OPENING async def test_close_ok( @@ -96,7 +93,7 @@ async def test_close_ok( await hass.config_entries.async_setup(mock_config_entry.entry_id) assert (state := hass.states.get(entity_id)) - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN mock_get_status_filled_stationary_100.return_value = { RESULT: {STATUS: STATUS_CLOSING, OPEN_PERCENTAGE: 90} @@ -113,7 +110,7 @@ async def test_close_ok( mock_command_fn.assert_called_once() assert (state := hass.states.get(entity_id)) - assert state.state == STATE_CLOSING + assert state.state == CoverState.CLOSING async def test_stop_ok( @@ -137,7 +134,7 @@ async def test_stop_ok( await hass.config_entries.async_setup(mock_config_entry.entry_id) assert (state := hass.states.get(entity_id)) - assert state.state == STATE_OPENING + assert state.state == CoverState.OPENING mock_get_status_filled_opening_50.return_value = { RESULT: {STATUS: STATUS_STATIONATRY, OPEN_PERCENTAGE: 60} @@ -154,7 +151,7 @@ async def test_stop_ok( mock_command_fn.assert_called_once() assert (state := hass.states.get(entity_id)) - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN async def test_set_position_ok( @@ -178,7 +175,7 @@ async def test_set_position_ok( await hass.config_entries.async_setup(mock_config_entry.entry_id) assert (state := hass.states.get(entity_id)) - assert state.state == STATE_CLOSED + assert state.state == CoverState.CLOSED mock_get_status_filled_stationary_0.return_value = { RESULT: {STATUS: STATUS_OPENING, OPEN_PERCENTAGE: 50} @@ -195,7 +192,7 @@ async def test_set_position_ok( mock_command_fn.assert_called_once() assert (state := hass.states.get(entity_id)) - assert state.state == STATE_OPENING + assert state.state == CoverState.OPENING async def test_devices_insertion_ok( diff --git a/tests/components/ipp/snapshots/test_diagnostics.ambr b/tests/components/ipp/snapshots/test_diagnostics.ambr index 98d0055c98209c..bd2564c5a4098d 100644 --- a/tests/components/ipp/snapshots/test_diagnostics.ambr +++ b/tests/components/ipp/snapshots/test_diagnostics.ambr @@ -2,6 +2,7 @@ # name: test_diagnostics dict({ 'data': dict({ + 'booted_at': '2019-11-11T09:10:02+00:00', 'info': dict({ 'command_set': 'ESCPL2,BDC,D4,D4PX,ESCPR7,END4,GENEP,URF', 'location': None, diff --git a/tests/components/ipp/snapshots/test_sensor.ambr b/tests/components/ipp/snapshots/test_sensor.ambr new file mode 100644 index 00000000000000..3f910399ad8ff7 --- /dev/null +++ b/tests/components/ipp/snapshots/test_sensor.ambr @@ -0,0 +1,378 @@ +# serializer version: 1 +# name: test_sensors[sensor.test_ha_1000_series-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'idle', + 'printing', + 'stopped', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_ha_1000_series', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': None, + 'platform': 'ipp', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'printer', + 'unique_id': 'cfe92100-67c4-11d4-a45f-f8d027761251_printer', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[sensor.test_ha_1000_series-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'command_set': 'ESCPL2,BDC,D4,D4PX,ESCPR7,END4,GENEP,URF', + 'device_class': 'enum', + 'friendly_name': 'Test HA-1000 Series', + 'info': 'Test HA-1000 Series', + 'location': None, + 'options': list([ + 'idle', + 'printing', + 'stopped', + ]), + 'serial': '555534593035345555', + 'state_message': None, + 'state_reason': None, + 'uri_supported': 'ipps://192.168.1.31:631/ipp/print,ipp://192.168.1.31:631/ipp/print', + }), + 'context': , + 'entity_id': 'sensor.test_ha_1000_series', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'idle', + }) +# --- +# name: test_sensors[sensor.test_ha_1000_series_black_ink-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_ha_1000_series_black_ink', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Black ink', + 'platform': 'ipp', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'marker', + 'unique_id': 'cfe92100-67c4-11d4-a45f-f8d027761251_marker_0', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[sensor.test_ha_1000_series_black_ink-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test HA-1000 Series Black ink', + 'marker_high_level': 100, + 'marker_low_level': 10, + 'marker_type': 'ink-cartridge', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.test_ha_1000_series_black_ink', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '58', + }) +# --- +# name: test_sensors[sensor.test_ha_1000_series_cyan_ink-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_ha_1000_series_cyan_ink', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Cyan ink', + 'platform': 'ipp', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'marker', + 'unique_id': 'cfe92100-67c4-11d4-a45f-f8d027761251_marker_1', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[sensor.test_ha_1000_series_cyan_ink-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test HA-1000 Series Cyan ink', + 'marker_high_level': 100, + 'marker_low_level': 10, + 'marker_type': 'ink-cartridge', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.test_ha_1000_series_cyan_ink', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '91', + }) +# --- +# name: test_sensors[sensor.test_ha_1000_series_magenta_ink-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_ha_1000_series_magenta_ink', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Magenta ink', + 'platform': 'ipp', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'marker', + 'unique_id': 'cfe92100-67c4-11d4-a45f-f8d027761251_marker_2', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[sensor.test_ha_1000_series_magenta_ink-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test HA-1000 Series Magenta ink', + 'marker_high_level': 100, + 'marker_low_level': 10, + 'marker_type': 'ink-cartridge', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.test_ha_1000_series_magenta_ink', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '73', + }) +# --- +# name: test_sensors[sensor.test_ha_1000_series_photo_black_ink-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_ha_1000_series_photo_black_ink', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Photo black ink', + 'platform': 'ipp', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'marker', + 'unique_id': 'cfe92100-67c4-11d4-a45f-f8d027761251_marker_3', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[sensor.test_ha_1000_series_photo_black_ink-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test HA-1000 Series Photo black ink', + 'marker_high_level': 100, + 'marker_low_level': 10, + 'marker_type': 'ink-cartridge', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.test_ha_1000_series_photo_black_ink', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '98', + }) +# --- +# name: test_sensors[sensor.test_ha_1000_series_uptime-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.test_ha_1000_series_uptime', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Uptime', + 'platform': 'ipp', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'uptime', + 'unique_id': 'cfe92100-67c4-11d4-a45f-f8d027761251_uptime', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[sensor.test_ha_1000_series_uptime-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Test HA-1000 Series Uptime', + }), + 'context': , + 'entity_id': 'sensor.test_ha_1000_series_uptime', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2019-11-11T09:10:02+00:00', + }) +# --- +# name: test_sensors[sensor.test_ha_1000_series_yellow_ink-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_ha_1000_series_yellow_ink', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Yellow ink', + 'platform': 'ipp', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'marker', + 'unique_id': 'cfe92100-67c4-11d4-a45f-f8d027761251_marker_4', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[sensor.test_ha_1000_series_yellow_ink-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test HA-1000 Series Yellow ink', + 'marker_high_level': 100, + 'marker_low_level': 10, + 'marker_type': 'ink-cartridge', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.test_ha_1000_series_yellow_ink', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '95', + }) +# --- diff --git a/tests/components/ipp/test_diagnostics.py b/tests/components/ipp/test_diagnostics.py index 08446601e69c51..d78f066d788aee 100644 --- a/tests/components/ipp/test_diagnostics.py +++ b/tests/components/ipp/test_diagnostics.py @@ -1,5 +1,6 @@ """Tests for the diagnostics data provided by the Internet Printing Protocol (IPP) integration.""" +import pytest from syrupy import SnapshotAssertion from homeassistant.core import HomeAssistant @@ -9,6 +10,7 @@ from tests.typing import ClientSessionGenerator +@pytest.mark.freeze_time("2019-11-11 09:10:32+00:00") async def test_diagnostics( hass: HomeAssistant, hass_client: ClientSessionGenerator, diff --git a/tests/components/ipp/test_sensor.py b/tests/components/ipp/test_sensor.py index 9f0079a4e40b4f..bdbb9a88d3559e 100644 --- a/tests/components/ipp/test_sensor.py +++ b/tests/components/ipp/test_sensor.py @@ -3,13 +3,12 @@ from unittest.mock import AsyncMock import pytest +from syrupy.assertion import SnapshotAssertion -from homeassistant.components.sensor import ATTR_OPTIONS -from homeassistant.const import ATTR_UNIT_OF_MEASUREMENT, PERCENTAGE, EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from tests.common import MockConfigEntry +from tests.common import MockConfigEntry, snapshot_platform @pytest.mark.freeze_time("2019-11-11 09:10:32+00:00") @@ -17,53 +16,11 @@ async def test_sensors( hass: HomeAssistant, entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, init_integration: MockConfigEntry, ) -> None: """Test the creation and values of the IPP sensors.""" - state = hass.states.get("sensor.test_ha_1000_series") - assert state - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) is None - assert state.attributes.get(ATTR_OPTIONS) == ["idle", "printing", "stopped"] - - entry = entity_registry.async_get("sensor.test_ha_1000_series") - assert entry - assert entry.translation_key == "printer" - - state = hass.states.get("sensor.test_ha_1000_series_black_ink") - assert state - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) is PERCENTAGE - assert state.state == "58" - - state = hass.states.get("sensor.test_ha_1000_series_photo_black_ink") - assert state - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) is PERCENTAGE - assert state.state == "98" - - state = hass.states.get("sensor.test_ha_1000_series_cyan_ink") - assert state - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) is PERCENTAGE - assert state.state == "91" - - state = hass.states.get("sensor.test_ha_1000_series_yellow_ink") - assert state - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) is PERCENTAGE - assert state.state == "95" - - state = hass.states.get("sensor.test_ha_1000_series_magenta_ink") - assert state - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) is PERCENTAGE - assert state.state == "73" - - state = hass.states.get("sensor.test_ha_1000_series_uptime") - assert state - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) is None - assert state.state == "2019-11-11T09:10:02+00:00" - - entry = entity_registry.async_get("sensor.test_ha_1000_series_uptime") - - assert entry - assert entry.unique_id == "cfe92100-67c4-11d4-a45f-f8d027761251_uptime" - assert entry.entity_category == EntityCategory.DIAGNOSTIC + await snapshot_platform(hass, entity_registry, snapshot, init_integration.entry_id) async def test_disabled_by_default_sensors( diff --git a/tests/components/iron_os/conftest.py b/tests/components/iron_os/conftest.py index f489d7b7bb53cd..a7c3592ae7343a 100644 --- a/tests/components/iron_os/conftest.py +++ b/tests/components/iron_os/conftest.py @@ -107,6 +107,29 @@ def mock_ble_device() -> Generator[MagicMock]: yield ble_device +@pytest.fixture(autouse=True) +def mock_githubapi() -> Generator[AsyncMock]: + """Mock aiogithubapi.""" + + with patch( + "homeassistant.components.iron_os.GitHubAPI", + autospec=True, + ) as mock_client: + client = mock_client.return_value + client.repos.releases.latest = AsyncMock() + + client.repos.releases.latest.return_value.data.html_url = ( + "https://github.com/Ralim/IronOS/releases/tag/v2.22" + ) + client.repos.releases.latest.return_value.data.name = ( + "V2.22 | TS101 & S60 Added | PinecilV2 improved" + ) + client.repos.releases.latest.return_value.data.tag_name = "v2.22" + client.repos.releases.latest.return_value.data.body = "**RELEASE_NOTES**" + + yield client + + @pytest.fixture def mock_pynecil() -> Generator[AsyncMock]: """Mock Pynecil library.""" diff --git a/tests/components/iron_os/snapshots/test_update.ambr b/tests/components/iron_os/snapshots/test_update.ambr new file mode 100644 index 00000000000000..e0872d032ec25f --- /dev/null +++ b/tests/components/iron_os/snapshots/test_update.ambr @@ -0,0 +1,63 @@ +# serializer version: 1 +# name: test_update.2 + '**RELEASE_NOTES**' +# --- +# name: test_update[update.pinecil_firmware-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'update', + 'entity_category': , + 'entity_id': 'update.pinecil_firmware', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Firmware', + 'platform': 'iron_os', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': 'c0:ff:ee:c0:ff:ee_firmware', + 'unit_of_measurement': None, + }) +# --- +# name: test_update[update.pinecil_firmware-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'auto_update': False, + 'device_class': 'firmware', + 'display_precision': 0, + 'entity_picture': 'https://brands.home-assistant.io/_/iron_os/icon.png', + 'friendly_name': 'Pinecil Firmware', + 'in_progress': False, + 'installed_version': 'v2.22', + 'latest_version': 'v2.22', + 'release_summary': None, + 'release_url': 'https://github.com/Ralim/IronOS/releases/tag/v2.22', + 'skipped_version': None, + 'supported_features': , + 'title': 'IronOS V2.22 | TS101 & S60 Added | PinecilV2 improved', + 'update_percentage': None, + }), + 'context': , + 'entity_id': 'update.pinecil_firmware', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- diff --git a/tests/components/iron_os/test_update.py b/tests/components/iron_os/test_update.py new file mode 100644 index 00000000000000..7a2650ba7a3c1b --- /dev/null +++ b/tests/components/iron_os/test_update.py @@ -0,0 +1,77 @@ +"""Tests for IronOS update platform.""" + +from collections.abc import AsyncGenerator +from unittest.mock import AsyncMock, patch + +from aiogithubapi import GitHubException +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import STATE_UNAVAILABLE, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from tests.common import MockConfigEntry, snapshot_platform +from tests.typing import WebSocketGenerator + + +@pytest.fixture(autouse=True) +async def update_only() -> AsyncGenerator[None]: + """Enable only the update platform.""" + with patch( + "homeassistant.components.iron_os.PLATFORMS", + [Platform.UPDATE], + ): + yield + + +@pytest.mark.usefixtures("mock_pynecil", "ble_device", "mock_githubapi") +async def test_update( + hass: HomeAssistant, + config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test the IronOS update platform.""" + ws_client = await hass_ws_client(hass) + + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) + + await ws_client.send_json( + { + "id": 1, + "type": "update/release_notes", + "entity_id": "update.pinecil_firmware", + } + ) + result = await ws_client.receive_json() + assert result["result"] == snapshot + + +@pytest.mark.usefixtures("ble_device", "mock_pynecil") +async def test_update_unavailable( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_githubapi: AsyncMock, +) -> None: + """Test update entity unavailable on error.""" + + mock_githubapi.repos.releases.latest.side_effect = GitHubException + + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + state = hass.states.get("update.pinecil_firmware") + assert state is not None + assert state.state == STATE_UNAVAILABLE diff --git a/tests/components/isy994/test_config_flow.py b/tests/components/isy994/test_config_flow.py index 34e267fe904ce5..2bc1fff222ffc5 100644 --- a/tests/components/isy994/test_config_flow.py +++ b/tests/components/isy994/test_config_flow.py @@ -698,3 +698,16 @@ async def test_reauth(hass: HomeAssistant) -> None: assert mock_setup_entry.called assert result4["type"] is FlowResultType.ABORT assert result4["reason"] == "reauth_successful" + + +async def test_options_flow(hass: HomeAssistant) -> None: + """Test option flow.""" + entry = MockConfigEntry(domain=DOMAIN) + entry.add_to_hass(hass) + + result = await hass.config_entries.options.async_init(entry.entry_id) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "init" + + # This should be improved at a later stage to increase test coverage + hass.config_entries.options.async_abort(result["flow_id"]) diff --git a/tests/components/jellyfin/test_remote.py b/tests/components/jellyfin/test_remote.py new file mode 100644 index 00000000000000..38390eabdccbcb --- /dev/null +++ b/tests/components/jellyfin/test_remote.py @@ -0,0 +1,93 @@ +"""Tests for the Jellyfin remote platform.""" + +from unittest.mock import MagicMock + +from homeassistant.components.remote import ( + ATTR_COMMAND, + ATTR_DELAY_SECS, + ATTR_HOLD_SECS, + ATTR_NUM_REPEATS, + DOMAIN as R_DOMAIN, + SERVICE_SEND_COMMAND, +) +from homeassistant.const import ATTR_ENTITY_ID, STATE_ON +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr, entity_registry as er + +from tests.common import MockConfigEntry + + +async def test_remote( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, + init_integration: MockConfigEntry, + mock_jellyfin: MagicMock, + mock_api: MagicMock, +) -> None: + """Test the Jellyfin remote.""" + state = hass.states.get("remote.jellyfin_device") + state2 = hass.states.get("remote.jellyfin_device_two") + state3 = hass.states.get("remote.jellyfin_device_three") + state4 = hass.states.get("remote.jellyfin_device_four") + + assert state + assert state2 + # Doesn't support remote control; remote not created + assert state3 is None + assert state4 + + assert state.state == STATE_ON + + +async def test_services( + hass: HomeAssistant, + init_integration: MockConfigEntry, + mock_jellyfin: MagicMock, + mock_api: MagicMock, +) -> None: + """Test Jellyfin remote services.""" + state = hass.states.get("remote.jellyfin_device") + assert state + + command = "Select" + await hass.services.async_call( + R_DOMAIN, + SERVICE_SEND_COMMAND, + { + ATTR_ENTITY_ID: state.entity_id, + ATTR_COMMAND: command, + ATTR_NUM_REPEATS: 1, + ATTR_DELAY_SECS: 0, + ATTR_HOLD_SECS: 0, + }, + blocking=True, + ) + assert len(mock_api.command.mock_calls) == 1 + assert mock_api.command.mock_calls[0].args == ( + "SESSION-UUID", + command, + ) + + command = "MoveLeft" + await hass.services.async_call( + R_DOMAIN, + SERVICE_SEND_COMMAND, + { + ATTR_ENTITY_ID: state.entity_id, + ATTR_COMMAND: command, + ATTR_NUM_REPEATS: 2, + ATTR_DELAY_SECS: 0, + ATTR_HOLD_SECS: 0, + }, + blocking=True, + ) + assert len(mock_api.command.mock_calls) == 3 + assert mock_api.command.mock_calls[1].args == ( + "SESSION-UUID", + command, + ) + assert mock_api.command.mock_calls[2].args == ( + "SESSION-UUID", + command, + ) diff --git a/tests/components/jewish_calendar/test_config_flow.py b/tests/components/jewish_calendar/test_config_flow.py index 1468a66efbb7e9..e00fe41749ff7a 100644 --- a/tests/components/jewish_calendar/test_config_flow.py +++ b/tests/components/jewish_calendar/test_config_flow.py @@ -2,8 +2,6 @@ from unittest.mock import AsyncMock -import pytest - from homeassistant import config_entries, setup from homeassistant.components.jewish_calendar.const import ( CONF_CANDLE_LIGHT_MINUTES, @@ -20,12 +18,10 @@ CONF_LANGUAGE, CONF_LATITUDE, CONF_LONGITUDE, - CONF_NAME, CONF_TIME_ZONE, ) from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType -from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry @@ -59,49 +55,6 @@ async def test_step_user(hass: HomeAssistant, mock_setup_entry: AsyncMock) -> No assert entries[0].data[CONF_TIME_ZONE] == hass.config.time_zone -@pytest.mark.parametrize("diaspora", [True, False]) -@pytest.mark.parametrize("language", ["hebrew", "english"]) -async def test_import_no_options(hass: HomeAssistant, language, diaspora) -> None: - """Test that the import step works.""" - conf = { - DOMAIN: {CONF_NAME: "test", CONF_LANGUAGE: language, CONF_DIASPORA: diaspora} - } - - assert await async_setup_component(hass, DOMAIN, conf.copy()) - await hass.async_block_till_done() - - entries = hass.config_entries.async_entries(DOMAIN) - assert len(entries) == 1 - for entry_key, entry_val in entries[0].data.items(): - assert entry_val == conf[DOMAIN][entry_key] - - -async def test_import_with_options(hass: HomeAssistant) -> None: - """Test that the import step works.""" - conf = { - DOMAIN: { - CONF_NAME: "test", - CONF_DIASPORA: DEFAULT_DIASPORA, - CONF_LANGUAGE: DEFAULT_LANGUAGE, - CONF_CANDLE_LIGHT_MINUTES: 20, - CONF_HAVDALAH_OFFSET_MINUTES: 50, - CONF_LATITUDE: 31.76, - CONF_LONGITUDE: 35.235, - } - } - - # Simulate HomeAssistant setting up the component - assert await async_setup_component(hass, DOMAIN, conf.copy()) - await hass.async_block_till_done() - - entries = hass.config_entries.async_entries(DOMAIN) - assert len(entries) == 1 - for entry_key, entry_val in entries[0].data.items(): - assert entry_val == conf[DOMAIN][entry_key] - for entry_key, entry_val in entries[0].options.items(): - assert entry_val == conf[DOMAIN][entry_key] - - async def test_single_instance_allowed( hass: HomeAssistant, mock_config_entry: MockConfigEntry, @@ -177,7 +130,7 @@ async def test_reconfigure( # init user flow result = await mock_config_entry.start_reconfigure_flow(hass) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure_confirm" + assert result["step_id"] == "reconfigure" # success result = await hass.config_entries.flow.async_configure( diff --git a/tests/components/jewish_calendar/test_init.py b/tests/components/jewish_calendar/test_init.py index b8454b41a603f7..cb982afec0f0d1 100644 --- a/tests/components/jewish_calendar/test_init.py +++ b/tests/components/jewish_calendar/test_init.py @@ -1,76 +1 @@ """Tests for the Jewish Calendar component's init.""" - -from hdate import Location - -from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSORS -from homeassistant.components.jewish_calendar import get_unique_prefix -from homeassistant.components.jewish_calendar.const import ( - CONF_CANDLE_LIGHT_MINUTES, - CONF_DIASPORA, - CONF_HAVDALAH_OFFSET_MINUTES, - DEFAULT_DIASPORA, - DEFAULT_LANGUAGE, - DOMAIN, -) -from homeassistant.const import CONF_LANGUAGE, CONF_LATITUDE, CONF_LONGITUDE, CONF_NAME -from homeassistant.core import HomeAssistant -import homeassistant.helpers.entity_registry as er -from homeassistant.setup import async_setup_component - - -async def test_import_unique_id_migration(hass: HomeAssistant) -> None: - """Test unique_id migration.""" - yaml_conf = { - DOMAIN: { - CONF_NAME: "test", - CONF_DIASPORA: DEFAULT_DIASPORA, - CONF_LANGUAGE: DEFAULT_LANGUAGE, - CONF_CANDLE_LIGHT_MINUTES: 20, - CONF_HAVDALAH_OFFSET_MINUTES: 50, - CONF_LATITUDE: 31.76, - CONF_LONGITUDE: 35.235, - } - } - - # Create an entry in the entity registry with the data from conf - ent_reg = er.async_get(hass) - location = Location( - latitude=yaml_conf[DOMAIN][CONF_LATITUDE], - longitude=yaml_conf[DOMAIN][CONF_LONGITUDE], - timezone=hass.config.time_zone, - diaspora=DEFAULT_DIASPORA, - ) - old_prefix = get_unique_prefix(location, DEFAULT_LANGUAGE, 20, 50) - sample_entity = ent_reg.async_get_or_create( - BINARY_SENSORS, - DOMAIN, - unique_id=f"{old_prefix}_erev_shabbat_hag", - suggested_object_id=f"{DOMAIN}_erev_shabbat_hag", - ) - # Save the existing unique_id, DEFAULT_LANGUAGE should be part of it - old_unique_id = sample_entity.unique_id - assert DEFAULT_LANGUAGE in old_unique_id - - # Simulate HomeAssistant setting up the component - assert await async_setup_component(hass, DOMAIN, yaml_conf.copy()) - await hass.async_block_till_done() - - entries = hass.config_entries.async_entries(DOMAIN) - assert len(entries) == 1 - for entry_key, entry_val in entries[0].data.items(): - assert entry_val == yaml_conf[DOMAIN][entry_key] - for entry_key, entry_val in entries[0].options.items(): - assert entry_val == yaml_conf[DOMAIN][entry_key] - - # Assert that the unique_id was updated - new_unique_id = ent_reg.async_get(sample_entity.entity_id).unique_id - assert new_unique_id != old_unique_id - assert DEFAULT_LANGUAGE not in new_unique_id - - # Confirm that when the component is reloaded, the unique_id is not changed - assert ent_reg.async_get(sample_entity.entity_id).unique_id == new_unique_id - - # Confirm that all the unique_ids are prefixed correctly - await hass.config_entries.async_reload(entries[0].entry_id) - er_entries = er.async_entries_for_config_entry(ent_reg, entries[0].entry_id) - assert all(entry.unique_id.startswith(entries[0].entry_id) for entry in er_entries) diff --git a/tests/components/knx/test_climate.py b/tests/components/knx/test_climate.py index 487fab5d7232c4..8fb348f1724c86 100644 --- a/tests/components/knx/test_climate.py +++ b/tests/components/knx/test_climate.py @@ -819,3 +819,34 @@ async def test_fan_speed_zero_mode_auto(hass: HomeAssistant, knx: KNXTestKit) -> ) await knx.assert_write("1/2/6", (0x0,)) knx.assert_state("climate.test", HVACMode.HEAT, fan_mode="auto") + + +async def test_climate_humidity(hass: HomeAssistant, knx: KNXTestKit) -> None: + """Test KNX climate humidity.""" + await knx.setup_integration( + { + ClimateSchema.PLATFORM: { + CONF_NAME: "test", + ClimateSchema.CONF_TEMPERATURE_ADDRESS: "1/2/3", + ClimateSchema.CONF_TARGET_TEMPERATURE_STATE_ADDRESS: "1/2/5", + ClimateSchema.CONF_HUMIDITY_STATE_ADDRESS: "1/2/16", + } + } + ) + + # read states state updater + await knx.assert_read("1/2/3") + await knx.assert_read("1/2/5") + + # StateUpdater initialize state + await knx.receive_response("1/2/5", RAW_FLOAT_22_0) + await knx.receive_response("1/2/3", RAW_FLOAT_21_0) + + # Query status + await knx.assert_read("1/2/16") + await knx.receive_response("1/2/16", (0x14, 0x74)) + knx.assert_state( + "climate.test", + HVACMode.HEAT, + current_humidity=45.6, + ) diff --git a/tests/components/knx/test_cover.py b/tests/components/knx/test_cover.py index 2d2b72e90153d6..0604b575c5b395 100644 --- a/tests/components/knx/test_cover.py +++ b/tests/components/knx/test_cover.py @@ -1,7 +1,8 @@ """Test KNX cover.""" +from homeassistant.components.cover import CoverState from homeassistant.components.knx.schema import CoverSchema -from homeassistant.const import CONF_NAME, STATE_CLOSING +from homeassistant.const import CONF_NAME from homeassistant.core import HomeAssistant from .conftest import KNXTestKit @@ -72,7 +73,7 @@ async def test_cover_basic(hass: HomeAssistant, knx: KNXTestKit) -> None: knx.assert_state( "cover.test", - STATE_CLOSING, + CoverState.CLOSING, ) assert len(events) == 1 diff --git a/tests/components/knx/test_notify.py b/tests/components/knx/test_notify.py index b481675140bd5d..c7e33dd5fe48c6 100644 --- a/tests/components/knx/test_notify.py +++ b/tests/components/knx/test_notify.py @@ -9,74 +9,6 @@ from .conftest import KNXTestKit -async def test_legacy_notify_service_simple( - hass: HomeAssistant, knx: KNXTestKit -) -> None: - """Test KNX notify can send to one device.""" - await knx.setup_integration( - { - NotifySchema.PLATFORM: { - CONF_NAME: "test", - KNX_ADDRESS: "1/0/0", - } - } - ) - await hass.services.async_call( - "notify", "notify", {"target": "test", "message": "I love KNX"}, blocking=True - ) - await knx.assert_write( - "1/0/0", - (73, 32, 108, 111, 118, 101, 32, 75, 78, 88, 0, 0, 0, 0), - ) - await hass.services.async_call( - "notify", - "notify", - { - "target": "test", - "message": "I love KNX, but this text is too long for KNX, poor KNX", - }, - blocking=True, - ) - await knx.assert_write( - "1/0/0", - (73, 32, 108, 111, 118, 101, 32, 75, 78, 88, 44, 32, 98, 117), - ) - - -async def test_legacy_notify_service_multiple_sends_to_all_with_different_encodings( - hass: HomeAssistant, knx: KNXTestKit -) -> None: - """Test KNX notify `type` configuration.""" - await knx.setup_integration( - { - NotifySchema.PLATFORM: [ - { - CONF_NAME: "ASCII", - KNX_ADDRESS: "1/0/0", - CONF_TYPE: "string", - }, - { - CONF_NAME: "Latin-1", - KNX_ADDRESS: "1/0/1", - CONF_TYPE: "latin_1", - }, - ] - } - ) - await hass.services.async_call( - "notify", "notify", {"message": "Gänsefüßchen"}, blocking=True - ) - await knx.assert_write( - "1/0/0", - # "G?nsef??chen" - (71, 63, 110, 115, 101, 102, 63, 63, 99, 104, 101, 110, 0, 0), - ) - await knx.assert_write( - "1/0/1", - (71, 228, 110, 115, 101, 102, 252, 223, 99, 104, 101, 110, 0, 0), - ) - - async def test_notify_simple(hass: HomeAssistant, knx: KNXTestKit) -> None: """Test KNX notify can send to one device.""" await knx.setup_integration( diff --git a/tests/components/knx/test_repairs.py b/tests/components/knx/test_repairs.py deleted file mode 100644 index b801f70324fe07..00000000000000 --- a/tests/components/knx/test_repairs.py +++ /dev/null @@ -1,72 +0,0 @@ -"""Test repairs for KNX integration.""" - -from homeassistant.components.knx.const import DOMAIN, KNX_ADDRESS -from homeassistant.components.knx.schema import NotifySchema -from homeassistant.components.notify import DOMAIN as NOTIFY_DOMAIN -from homeassistant.const import CONF_NAME -from homeassistant.core import HomeAssistant -import homeassistant.helpers.issue_registry as ir - -from .conftest import KNXTestKit - -from tests.components.repairs import process_repair_fix_flow, start_repair_fix_flow -from tests.typing import ClientSessionGenerator - - -async def test_knx_notify_service_issue( - hass: HomeAssistant, - knx: KNXTestKit, - hass_client: ClientSessionGenerator, - issue_registry: ir.IssueRegistry, -) -> None: - """Test the legacy notify service still works before migration and repair flow is triggered.""" - await knx.setup_integration( - { - NotifySchema.PLATFORM: { - CONF_NAME: "test", - KNX_ADDRESS: "1/0/0", - } - } - ) - http_client = await hass_client() - - # Assert no issue is present - assert len(issue_registry.issues) == 0 - - # Simulate legacy service being used - assert hass.services.has_service(NOTIFY_DOMAIN, NOTIFY_DOMAIN) - await hass.services.async_call( - NOTIFY_DOMAIN, - NOTIFY_DOMAIN, - service_data={"message": "It is too cold!", "target": "test"}, - blocking=True, - ) - await knx.assert_write( - "1/0/0", - (73, 116, 32, 105, 115, 32, 116, 111, 111, 32, 99, 111, 108, 100), - ) - - # Assert the issue is present - assert len(issue_registry.issues) == 1 - assert issue_registry.async_get_issue( - domain="notify", - issue_id=f"migrate_notify_{DOMAIN}_notify", - ) - - # Test confirm step in repair flow - data = await start_repair_fix_flow( - http_client, "notify", f"migrate_notify_{DOMAIN}_notify" - ) - - flow_id = data["flow_id"] - assert data["step_id"] == "confirm" - - data = await process_repair_fix_flow(http_client, flow_id) - assert data["type"] == "create_entry" - - # Assert the issue is no longer present - assert not issue_registry.async_get_issue( - domain="notify", - issue_id=f"migrate_notify_{DOMAIN}_notify", - ) - assert len(issue_registry.issues) == 0 diff --git a/tests/components/konnected/test_init.py b/tests/components/konnected/test_init.py index 1a2da88624db98..6fc6b10ff20e17 100644 --- a/tests/components/konnected/test_init.py +++ b/tests/components/konnected/test_init.py @@ -7,8 +7,8 @@ from homeassistant.components import konnected from homeassistant.components.konnected import config_flow -from homeassistant.config import async_process_ha_core_config from homeassistant.core import HomeAssistant +from homeassistant.core_config import async_process_ha_core_config from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry diff --git a/tests/components/lamarzocco/__init__.py b/tests/components/lamarzocco/__init__.py index 4d274d10baaa82..f6ca0fe40df646 100644 --- a/tests/components/lamarzocco/__init__.py +++ b/tests/components/lamarzocco/__init__.py @@ -1,6 +1,6 @@ """Mock inputs for tests.""" -from lmcloud.const import MachineModel +from pylamarzocco.const import MachineModel from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant @@ -19,10 +19,10 @@ USER_INPUT = PASSWORD_SELECTION | {CONF_USERNAME: "username"} SERIAL_DICT = { - MachineModel.GS3_AV: "GS01234", - MachineModel.GS3_MP: "GS01234", - MachineModel.LINEA_MICRA: "MR01234", - MachineModel.LINEA_MINI: "LM01234", + MachineModel.GS3_AV: "GS012345", + MachineModel.GS3_MP: "GS012345", + MachineModel.LINEA_MICRA: "MR012345", + MachineModel.LINEA_MINI: "LM012345", } WAKE_UP_SLEEP_ENTRY_IDS = ["Os2OswX", "aXFz5bJ"] diff --git a/tests/components/lamarzocco/conftest.py b/tests/components/lamarzocco/conftest.py index 2520433e86ab60..210dd9406cc9e8 100644 --- a/tests/components/lamarzocco/conftest.py +++ b/tests/components/lamarzocco/conftest.py @@ -5,9 +5,9 @@ from unittest.mock import MagicMock, patch from bleak.backends.device import BLEDevice -from lmcloud.const import FirmwareType, MachineModel, SteamLevel -from lmcloud.lm_machine import LaMarzoccoMachine -from lmcloud.models import LaMarzoccoDeviceInfo +from pylamarzocco.const import FirmwareType, MachineModel, SteamLevel +from pylamarzocco.lm_machine import LaMarzoccoMachine +from pylamarzocco.models import LaMarzoccoDeviceInfo import pytest from homeassistant.components.lamarzocco.const import DOMAIN @@ -75,11 +75,11 @@ def device_fixture() -> MachineModel: @pytest.fixture -def mock_device_info() -> LaMarzoccoDeviceInfo: +def mock_device_info(device_fixture: MachineModel) -> LaMarzoccoDeviceInfo: """Return a mocked La Marzocco device info.""" return LaMarzoccoDeviceInfo( - model=MachineModel.GS3_AV, - serial_number="GS01234", + model=device_fixture, + serial_number=SERIAL_DICT[device_fixture], name="GS3", communication_key="token", ) @@ -157,5 +157,5 @@ def mock_bluetooth(enable_bluetooth: None) -> None: def mock_ble_device() -> BLEDevice: """Return a mock BLE device.""" return BLEDevice( - "00:00:00:00:00:00", "GS_GS01234", details={"path": "path"}, rssi=50 + "00:00:00:00:00:00", "GS_GS012345", details={"path": "path"}, rssi=50 ) diff --git a/tests/components/lamarzocco/snapshots/test_binary_sensor.ambr b/tests/components/lamarzocco/snapshots/test_binary_sensor.ambr index df47ac002e6fd8..cda285a71069f0 100644 --- a/tests/components/lamarzocco/snapshots/test_binary_sensor.ambr +++ b/tests/components/lamarzocco/snapshots/test_binary_sensor.ambr @@ -1,19 +1,19 @@ # serializer version: 1 -# name: test_binary_sensors[GS01234_backflush_active-binary_sensor] +# name: test_binary_sensors[GS012345_backflush_active-binary_sensor] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'running', - 'friendly_name': 'GS01234 Backflush active', + 'friendly_name': 'GS012345 Backflush active', }), 'context': , - 'entity_id': 'binary_sensor.gs01234_backflush_active', + 'entity_id': 'binary_sensor.gs012345_backflush_active', 'last_changed': , 'last_reported': , 'last_updated': , 'state': 'off', }) # --- -# name: test_binary_sensors[GS01234_backflush_active-entry] +# name: test_binary_sensors[GS012345_backflush_active-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -25,7 +25,7 @@ 'disabled_by': None, 'domain': 'binary_sensor', 'entity_category': , - 'entity_id': 'binary_sensor.gs01234_backflush_active', + 'entity_id': 'binary_sensor.gs012345_backflush_active', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -42,25 +42,25 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'backflush_enabled', - 'unique_id': 'GS01234_backflush_enabled', + 'unique_id': 'GS012345_backflush_enabled', 'unit_of_measurement': None, }) # --- -# name: test_binary_sensors[GS01234_brewing_active-binary_sensor] +# name: test_binary_sensors[GS012345_brewing_active-binary_sensor] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'running', - 'friendly_name': 'GS01234 Brewing active', + 'friendly_name': 'GS012345 Brewing active', }), 'context': , - 'entity_id': 'binary_sensor.gs01234_brewing_active', + 'entity_id': 'binary_sensor.gs012345_brewing_active', 'last_changed': , 'last_reported': , 'last_updated': , 'state': 'off', }) # --- -# name: test_binary_sensors[GS01234_brewing_active-entry] +# name: test_binary_sensors[GS012345_brewing_active-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -72,7 +72,7 @@ 'disabled_by': None, 'domain': 'binary_sensor', 'entity_category': , - 'entity_id': 'binary_sensor.gs01234_brewing_active', + 'entity_id': 'binary_sensor.gs012345_brewing_active', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -89,25 +89,25 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'brew_active', - 'unique_id': 'GS01234_brew_active', + 'unique_id': 'GS012345_brew_active', 'unit_of_measurement': None, }) # --- -# name: test_binary_sensors[GS01234_water_tank_empty-binary_sensor] +# name: test_binary_sensors[GS012345_water_tank_empty-binary_sensor] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'problem', - 'friendly_name': 'GS01234 Water tank empty', + 'friendly_name': 'GS012345 Water tank empty', }), 'context': , - 'entity_id': 'binary_sensor.gs01234_water_tank_empty', + 'entity_id': 'binary_sensor.gs012345_water_tank_empty', 'last_changed': , 'last_reported': , 'last_updated': , 'state': 'off', }) # --- -# name: test_binary_sensors[GS01234_water_tank_empty-entry] +# name: test_binary_sensors[GS012345_water_tank_empty-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -119,7 +119,7 @@ 'disabled_by': None, 'domain': 'binary_sensor', 'entity_category': , - 'entity_id': 'binary_sensor.gs01234_water_tank_empty', + 'entity_id': 'binary_sensor.gs012345_water_tank_empty', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -136,7 +136,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'water_tank', - 'unique_id': 'GS01234_water_tank', + 'unique_id': 'GS012345_water_tank', 'unit_of_measurement': None, }) # --- diff --git a/tests/components/lamarzocco/snapshots/test_button.ambr b/tests/components/lamarzocco/snapshots/test_button.ambr index 023039cc6f7fa4..64d47a110727fe 100644 --- a/tests/components/lamarzocco/snapshots/test_button.ambr +++ b/tests/components/lamarzocco/snapshots/test_button.ambr @@ -2,10 +2,10 @@ # name: test_start_backflush StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'GS01234 Start backflush', + 'friendly_name': 'GS012345 Start backflush', }), 'context': , - 'entity_id': 'button.gs01234_start_backflush', + 'entity_id': 'button.gs012345_start_backflush', 'last_changed': , 'last_reported': , 'last_updated': , @@ -24,7 +24,7 @@ 'disabled_by': None, 'domain': 'button', 'entity_category': None, - 'entity_id': 'button.gs01234_start_backflush', + 'entity_id': 'button.gs012345_start_backflush', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -41,7 +41,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'start_backflush', - 'unique_id': 'GS01234_start_backflush', + 'unique_id': 'GS012345_start_backflush', 'unit_of_measurement': None, }) # --- diff --git a/tests/components/lamarzocco/snapshots/test_calendar.ambr b/tests/components/lamarzocco/snapshots/test_calendar.ambr index 2fd5dab846a38c..729eed5879a311 100644 --- a/tests/components/lamarzocco/snapshots/test_calendar.ambr +++ b/tests/components/lamarzocco/snapshots/test_calendar.ambr @@ -1,7 +1,7 @@ # serializer version: 1 # name: test_calendar_edge_cases[start_date0-end_date0] dict({ - 'calendar.gs01234_auto_on_off_schedule_axfz5bj': dict({ + 'calendar.gs012345_auto_on_off_schedule_axfz5bj': dict({ 'events': list([ dict({ 'description': 'Machine is scheduled to turn on at the start time and off at the end time', @@ -15,7 +15,7 @@ # --- # name: test_calendar_edge_cases[start_date1-end_date1] dict({ - 'calendar.gs01234_auto_on_off_schedule_axfz5bj': dict({ + 'calendar.gs012345_auto_on_off_schedule_axfz5bj': dict({ 'events': list([ dict({ 'description': 'Machine is scheduled to turn on at the start time and off at the end time', @@ -29,7 +29,7 @@ # --- # name: test_calendar_edge_cases[start_date2-end_date2] dict({ - 'calendar.gs01234_auto_on_off_schedule_axfz5bj': dict({ + 'calendar.gs012345_auto_on_off_schedule_axfz5bj': dict({ 'events': list([ dict({ 'description': 'Machine is scheduled to turn on at the start time and off at the end time', @@ -43,7 +43,7 @@ # --- # name: test_calendar_edge_cases[start_date3-end_date3] dict({ - 'calendar.gs01234_auto_on_off_schedule_axfz5bj': dict({ + 'calendar.gs012345_auto_on_off_schedule_axfz5bj': dict({ 'events': list([ dict({ 'description': 'Machine is scheduled to turn on at the start time and off at the end time', @@ -57,7 +57,7 @@ # --- # name: test_calendar_edge_cases[start_date4-end_date4] dict({ - 'calendar.gs01234_auto_on_off_schedule_axfz5bj': dict({ + 'calendar.gs012345_auto_on_off_schedule_axfz5bj': dict({ 'events': list([ ]), }), @@ -65,7 +65,7 @@ # --- # name: test_calendar_edge_cases[start_date5-end_date5] dict({ - 'calendar.gs01234_auto_on_off_schedule_axfz5bj': dict({ + 'calendar.gs012345_auto_on_off_schedule_axfz5bj': dict({ 'events': list([ dict({ 'description': 'Machine is scheduled to turn on at the start time and off at the end time', @@ -83,7 +83,7 @@ }), }) # --- -# name: test_calendar_events[entry.GS01234_auto_on_off_schedule_axfz5bj] +# name: test_calendar_events[entry.GS012345_auto_on_off_schedule_axfz5bj] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -95,7 +95,7 @@ 'disabled_by': None, 'domain': 'calendar', 'entity_category': None, - 'entity_id': 'calendar.gs01234_auto_on_off_schedule_axfz5bj', + 'entity_id': 'calendar.gs012345_auto_on_off_schedule_axfz5bj', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -112,11 +112,11 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'auto_on_off_schedule', - 'unique_id': 'GS01234_auto_on_off_schedule_aXFz5bJ', + 'unique_id': 'GS012345_auto_on_off_schedule_aXFz5bJ', 'unit_of_measurement': None, }) # --- -# name: test_calendar_events[entry.GS01234_auto_on_off_schedule_os2oswx] +# name: test_calendar_events[entry.GS012345_auto_on_off_schedule_os2oswx] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -128,7 +128,7 @@ 'disabled_by': None, 'domain': 'calendar', 'entity_category': None, - 'entity_id': 'calendar.gs01234_auto_on_off_schedule_os2oswx', + 'entity_id': 'calendar.gs012345_auto_on_off_schedule_os2oswx', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -145,13 +145,13 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'auto_on_off_schedule', - 'unique_id': 'GS01234_auto_on_off_schedule_Os2OswX', + 'unique_id': 'GS012345_auto_on_off_schedule_Os2OswX', 'unit_of_measurement': None, }) # --- -# name: test_calendar_events[events.GS01234_auto_on_off_schedule_axfz5bj] +# name: test_calendar_events[events.GS012345_auto_on_off_schedule_axfz5bj] dict({ - 'calendar.gs01234_auto_on_off_schedule_axfz5bj': dict({ + 'calendar.gs012345_auto_on_off_schedule_axfz5bj': dict({ 'events': list([ dict({ 'description': 'Machine is scheduled to turn on at the start time and off at the end time', @@ -181,9 +181,9 @@ }), }) # --- -# name: test_calendar_events[events.GS01234_auto_on_off_schedule_os2oswx] +# name: test_calendar_events[events.GS012345_auto_on_off_schedule_os2oswx] dict({ - 'calendar.gs01234_auto_on_off_schedule_os2oswx': dict({ + 'calendar.gs012345_auto_on_off_schedule_os2oswx': dict({ 'events': list([ dict({ 'description': 'Machine is scheduled to turn on at the start time and off at the end time', @@ -327,38 +327,38 @@ }), }) # --- -# name: test_calendar_events[state.GS01234_auto_on_off_schedule_axfz5bj] +# name: test_calendar_events[state.GS012345_auto_on_off_schedule_axfz5bj] StateSnapshot({ 'attributes': ReadOnlyDict({ 'all_day': False, 'description': 'Machine is scheduled to turn on at the start time and off at the end time', 'end_time': '2024-01-14 07:30:00', - 'friendly_name': 'GS01234 Auto on/off schedule (aXFz5bJ)', + 'friendly_name': 'GS012345 Auto on/off schedule (aXFz5bJ)', 'location': '', 'message': 'Machine My LaMarzocco on', 'start_time': '2024-01-14 07:00:00', }), 'context': , - 'entity_id': 'calendar.gs01234_auto_on_off_schedule_axfz5bj', + 'entity_id': 'calendar.gs012345_auto_on_off_schedule_axfz5bj', 'last_changed': , 'last_reported': , 'last_updated': , 'state': 'off', }) # --- -# name: test_calendar_events[state.GS01234_auto_on_off_schedule_os2oswx] +# name: test_calendar_events[state.GS012345_auto_on_off_schedule_os2oswx] StateSnapshot({ 'attributes': ReadOnlyDict({ 'all_day': False, 'description': 'Machine is scheduled to turn on at the start time and off at the end time', 'end_time': '2024-01-13 00:00:00', - 'friendly_name': 'GS01234 Auto on/off schedule (Os2OswX)', + 'friendly_name': 'GS012345 Auto on/off schedule (Os2OswX)', 'location': '', 'message': 'Machine My LaMarzocco on', 'start_time': '2024-01-12 22:00:00', }), 'context': , - 'entity_id': 'calendar.gs01234_auto_on_off_schedule_os2oswx', + 'entity_id': 'calendar.gs012345_auto_on_off_schedule_os2oswx', 'last_changed': , 'last_reported': , 'last_updated': , @@ -367,7 +367,7 @@ # --- # name: test_no_calendar_events_global_disable dict({ - 'calendar.gs01234_auto_on_off_schedule_os2oswx': dict({ + 'calendar.gs012345_auto_on_off_schedule_os2oswx': dict({ 'events': list([ ]), }), diff --git a/tests/components/lamarzocco/snapshots/test_number.ambr b/tests/components/lamarzocco/snapshots/test_number.ambr index 8265e7d7646ca1..b7e42bb425ffb9 100644 --- a/tests/components/lamarzocco/snapshots/test_number.ambr +++ b/tests/components/lamarzocco/snapshots/test_number.ambr @@ -1,9 +1,9 @@ # serializer version: 1 -# name: test_coffee_boiler +# name: test_general_numbers[coffee_target_temperature-94-set_temp-kwargs0] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'temperature', - 'friendly_name': 'GS01234 Coffee target temperature', + 'friendly_name': 'GS012345 Coffee target temperature', 'max': 104, 'min': 85, 'mode': , @@ -11,14 +11,14 @@ 'unit_of_measurement': , }), 'context': , - 'entity_id': 'number.gs01234_coffee_target_temperature', + 'entity_id': 'number.gs012345_coffee_target_temperature', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '95', }) # --- -# name: test_coffee_boiler.1 +# name: test_general_numbers[coffee_target_temperature-94-set_temp-kwargs0].1 EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -35,7 +35,7 @@ 'disabled_by': None, 'domain': 'number', 'entity_category': None, - 'entity_id': 'number.gs01234_coffee_target_temperature', + 'entity_id': 'number.gs012345_coffee_target_temperature', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -52,15 +52,72 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'coffee_temp', - 'unique_id': 'GS01234_coffee_temp', + 'unique_id': 'GS012345_coffee_temp', 'unit_of_measurement': , }) # --- +# name: test_general_numbers[smart_standby_time-23-set_smart_standby-kwargs1] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'duration', + 'friendly_name': 'GS012345 Smart standby time', + 'max': 240, + 'min': 10, + 'mode': , + 'step': 10, + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'number.gs012345_smart_standby_time', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '10', + }) +# --- +# name: test_general_numbers[smart_standby_time-23-set_smart_standby-kwargs1].1 + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 240, + 'min': 10, + 'mode': , + 'step': 10, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.gs012345_smart_standby_time', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Smart standby time', + 'platform': 'lamarzocco', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'smart_standby_time', + 'unique_id': 'GS012345_smart_standby_time', + 'unit_of_measurement': , + }) +# --- # name: test_gs3_exclusive[steam_target_temperature-131-set_temp-kwargs0-GS3 AV] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'temperature', - 'friendly_name': 'GS01234 Steam target temperature', + 'friendly_name': 'GS012345 Steam target temperature', 'max': 131, 'min': 126, 'mode': , @@ -68,7 +125,7 @@ 'unit_of_measurement': , }), 'context': , - 'entity_id': 'number.gs01234_steam_target_temperature', + 'entity_id': 'number.gs012345_steam_target_temperature', 'last_changed': , 'last_reported': , 'last_updated': , @@ -92,7 +149,7 @@ 'disabled_by': None, 'domain': 'number', 'entity_category': None, - 'entity_id': 'number.gs01234_steam_target_temperature', + 'entity_id': 'number.gs012345_steam_target_temperature', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -109,7 +166,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'steam_temp', - 'unique_id': 'GS01234_steam_temp', + 'unique_id': 'GS012345_steam_temp', 'unit_of_measurement': , }) # --- @@ -117,7 +174,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'temperature', - 'friendly_name': 'GS01234 Steam target temperature', + 'friendly_name': 'GS012345 Steam target temperature', 'max': 131, 'min': 126, 'mode': , @@ -125,7 +182,7 @@ 'unit_of_measurement': , }), 'context': , - 'entity_id': 'number.gs01234_steam_target_temperature', + 'entity_id': 'number.gs012345_steam_target_temperature', 'last_changed': , 'last_reported': , 'last_updated': , @@ -149,7 +206,7 @@ 'disabled_by': None, 'domain': 'number', 'entity_category': None, - 'entity_id': 'number.gs01234_steam_target_temperature', + 'entity_id': 'number.gs012345_steam_target_temperature', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -166,7 +223,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'steam_temp', - 'unique_id': 'GS01234_steam_temp', + 'unique_id': 'GS012345_steam_temp', 'unit_of_measurement': , }) # --- @@ -174,7 +231,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'duration', - 'friendly_name': 'GS01234 Tea water duration', + 'friendly_name': 'GS012345 Tea water duration', 'max': 30, 'min': 0, 'mode': , @@ -182,7 +239,7 @@ 'unit_of_measurement': , }), 'context': , - 'entity_id': 'number.gs01234_tea_water_duration', + 'entity_id': 'number.gs012345_tea_water_duration', 'last_changed': , 'last_reported': , 'last_updated': , @@ -206,7 +263,7 @@ 'disabled_by': None, 'domain': 'number', 'entity_category': None, - 'entity_id': 'number.gs01234_tea_water_duration', + 'entity_id': 'number.gs012345_tea_water_duration', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -223,7 +280,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'tea_water_duration', - 'unique_id': 'GS01234_tea_water_duration', + 'unique_id': 'GS012345_tea_water_duration', 'unit_of_measurement': , }) # --- @@ -231,7 +288,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'duration', - 'friendly_name': 'GS01234 Tea water duration', + 'friendly_name': 'GS012345 Tea water duration', 'max': 30, 'min': 0, 'mode': , @@ -239,7 +296,7 @@ 'unit_of_measurement': , }), 'context': , - 'entity_id': 'number.gs01234_tea_water_duration', + 'entity_id': 'number.gs012345_tea_water_duration', 'last_changed': , 'last_reported': , 'last_updated': , @@ -263,7 +320,7 @@ 'disabled_by': None, 'domain': 'number', 'entity_category': None, - 'entity_id': 'number.gs01234_tea_water_duration', + 'entity_id': 'number.gs012345_tea_water_duration', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -280,14 +337,14 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'tea_water_duration', - 'unique_id': 'GS01234_tea_water_duration', + 'unique_id': 'GS012345_tea_water_duration', 'unit_of_measurement': , }) # --- -# name: test_pre_brew_infusion_key_numbers[dose-6-Disabled-set_dose-kwargs3-GS3 AV][GS01234_dose_key_1-state] +# name: test_pre_brew_infusion_key_numbers[dose-6-Disabled-set_dose-kwargs3-GS3 AV][GS012345_dose_key_1-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'GS01234 Dose Key 1', + 'friendly_name': 'GS012345 Dose Key 1', 'max': 999, 'min': 0, 'mode': , @@ -295,17 +352,17 @@ 'unit_of_measurement': 'ticks', }), 'context': , - 'entity_id': 'number.gs01234_dose_key_1', + 'entity_id': 'number.gs012345_dose_key_1', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '135', }) # --- -# name: test_pre_brew_infusion_key_numbers[dose-6-Disabled-set_dose-kwargs3-GS3 AV][GS01234_dose_key_2-state] +# name: test_pre_brew_infusion_key_numbers[dose-6-Disabled-set_dose-kwargs3-GS3 AV][GS012345_dose_key_2-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'GS01234 Dose Key 2', + 'friendly_name': 'GS012345 Dose Key 2', 'max': 999, 'min': 0, 'mode': , @@ -313,17 +370,17 @@ 'unit_of_measurement': 'ticks', }), 'context': , - 'entity_id': 'number.gs01234_dose_key_2', + 'entity_id': 'number.gs012345_dose_key_2', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '97', }) # --- -# name: test_pre_brew_infusion_key_numbers[dose-6-Disabled-set_dose-kwargs3-GS3 AV][GS01234_dose_key_3-state] +# name: test_pre_brew_infusion_key_numbers[dose-6-Disabled-set_dose-kwargs3-GS3 AV][GS012345_dose_key_3-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'GS01234 Dose Key 3', + 'friendly_name': 'GS012345 Dose Key 3', 'max': 999, 'min': 0, 'mode': , @@ -331,17 +388,17 @@ 'unit_of_measurement': 'ticks', }), 'context': , - 'entity_id': 'number.gs01234_dose_key_3', + 'entity_id': 'number.gs012345_dose_key_3', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '108', }) # --- -# name: test_pre_brew_infusion_key_numbers[dose-6-Disabled-set_dose-kwargs3-GS3 AV][GS01234_dose_key_4-state] +# name: test_pre_brew_infusion_key_numbers[dose-6-Disabled-set_dose-kwargs3-GS3 AV][GS012345_dose_key_4-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'GS01234 Dose Key 4', + 'friendly_name': 'GS012345 Dose Key 4', 'max': 999, 'min': 0, 'mode': , @@ -349,18 +406,18 @@ 'unit_of_measurement': 'ticks', }), 'context': , - 'entity_id': 'number.gs01234_dose_key_4', + 'entity_id': 'number.gs012345_dose_key_4', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '121', }) # --- -# name: test_pre_brew_infusion_key_numbers[prebrew_off_time-6-Enabled-set_prebrew_time-kwargs0-GS3 AV][GS01234_prebrew_off_time_key_1-state] +# name: test_pre_brew_infusion_key_numbers[prebrew_off_time-6-Enabled-set_prebrew_time-kwargs0-GS3 AV][GS012345_prebrew_off_time_key_1-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'duration', - 'friendly_name': 'GS01234 Prebrew off time Key 1', + 'friendly_name': 'GS012345 Prebrew off time Key 1', 'max': 10, 'min': 1, 'mode': , @@ -368,18 +425,18 @@ 'unit_of_measurement': , }), 'context': , - 'entity_id': 'number.gs01234_prebrew_off_time_key_1', + 'entity_id': 'number.gs012345_prebrew_off_time_key_1', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '1', }) # --- -# name: test_pre_brew_infusion_key_numbers[prebrew_off_time-6-Enabled-set_prebrew_time-kwargs0-GS3 AV][GS01234_prebrew_off_time_key_2-state] +# name: test_pre_brew_infusion_key_numbers[prebrew_off_time-6-Enabled-set_prebrew_time-kwargs0-GS3 AV][GS012345_prebrew_off_time_key_2-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'duration', - 'friendly_name': 'GS01234 Prebrew off time Key 2', + 'friendly_name': 'GS012345 Prebrew off time Key 2', 'max': 10, 'min': 1, 'mode': , @@ -387,18 +444,18 @@ 'unit_of_measurement': , }), 'context': , - 'entity_id': 'number.gs01234_prebrew_off_time_key_2', + 'entity_id': 'number.gs012345_prebrew_off_time_key_2', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '1', }) # --- -# name: test_pre_brew_infusion_key_numbers[prebrew_off_time-6-Enabled-set_prebrew_time-kwargs0-GS3 AV][GS01234_prebrew_off_time_key_3-state] +# name: test_pre_brew_infusion_key_numbers[prebrew_off_time-6-Enabled-set_prebrew_time-kwargs0-GS3 AV][GS012345_prebrew_off_time_key_3-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'duration', - 'friendly_name': 'GS01234 Prebrew off time Key 3', + 'friendly_name': 'GS012345 Prebrew off time Key 3', 'max': 10, 'min': 1, 'mode': , @@ -406,18 +463,18 @@ 'unit_of_measurement': , }), 'context': , - 'entity_id': 'number.gs01234_prebrew_off_time_key_3', + 'entity_id': 'number.gs012345_prebrew_off_time_key_3', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '3.29999995231628', }) # --- -# name: test_pre_brew_infusion_key_numbers[prebrew_off_time-6-Enabled-set_prebrew_time-kwargs0-GS3 AV][GS01234_prebrew_off_time_key_4-state] +# name: test_pre_brew_infusion_key_numbers[prebrew_off_time-6-Enabled-set_prebrew_time-kwargs0-GS3 AV][GS012345_prebrew_off_time_key_4-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'duration', - 'friendly_name': 'GS01234 Prebrew off time Key 4', + 'friendly_name': 'GS012345 Prebrew off time Key 4', 'max': 10, 'min': 1, 'mode': , @@ -425,18 +482,18 @@ 'unit_of_measurement': , }), 'context': , - 'entity_id': 'number.gs01234_prebrew_off_time_key_4', + 'entity_id': 'number.gs012345_prebrew_off_time_key_4', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '2', }) # --- -# name: test_pre_brew_infusion_key_numbers[prebrew_on_time-6-Enabled-set_prebrew_time-kwargs1-GS3 AV][GS01234_prebrew_on_time_key_1-state] +# name: test_pre_brew_infusion_key_numbers[prebrew_on_time-6-Enabled-set_prebrew_time-kwargs1-GS3 AV][GS012345_prebrew_on_time_key_1-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'duration', - 'friendly_name': 'GS01234 Prebrew on time Key 1', + 'friendly_name': 'GS012345 Prebrew on time Key 1', 'max': 10, 'min': 2, 'mode': , @@ -444,18 +501,18 @@ 'unit_of_measurement': , }), 'context': , - 'entity_id': 'number.gs01234_prebrew_on_time_key_1', + 'entity_id': 'number.gs012345_prebrew_on_time_key_1', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '1', }) # --- -# name: test_pre_brew_infusion_key_numbers[prebrew_on_time-6-Enabled-set_prebrew_time-kwargs1-GS3 AV][GS01234_prebrew_on_time_key_2-state] +# name: test_pre_brew_infusion_key_numbers[prebrew_on_time-6-Enabled-set_prebrew_time-kwargs1-GS3 AV][GS012345_prebrew_on_time_key_2-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'duration', - 'friendly_name': 'GS01234 Prebrew on time Key 2', + 'friendly_name': 'GS012345 Prebrew on time Key 2', 'max': 10, 'min': 2, 'mode': , @@ -463,18 +520,18 @@ 'unit_of_measurement': , }), 'context': , - 'entity_id': 'number.gs01234_prebrew_on_time_key_2', + 'entity_id': 'number.gs012345_prebrew_on_time_key_2', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '1', }) # --- -# name: test_pre_brew_infusion_key_numbers[prebrew_on_time-6-Enabled-set_prebrew_time-kwargs1-GS3 AV][GS01234_prebrew_on_time_key_3-state] +# name: test_pre_brew_infusion_key_numbers[prebrew_on_time-6-Enabled-set_prebrew_time-kwargs1-GS3 AV][GS012345_prebrew_on_time_key_3-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'duration', - 'friendly_name': 'GS01234 Prebrew on time Key 3', + 'friendly_name': 'GS012345 Prebrew on time Key 3', 'max': 10, 'min': 2, 'mode': , @@ -482,18 +539,18 @@ 'unit_of_measurement': , }), 'context': , - 'entity_id': 'number.gs01234_prebrew_on_time_key_3', + 'entity_id': 'number.gs012345_prebrew_on_time_key_3', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '3.29999995231628', }) # --- -# name: test_pre_brew_infusion_key_numbers[prebrew_on_time-6-Enabled-set_prebrew_time-kwargs1-GS3 AV][GS01234_prebrew_on_time_key_4-state] +# name: test_pre_brew_infusion_key_numbers[prebrew_on_time-6-Enabled-set_prebrew_time-kwargs1-GS3 AV][GS012345_prebrew_on_time_key_4-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'duration', - 'friendly_name': 'GS01234 Prebrew on time Key 4', + 'friendly_name': 'GS012345 Prebrew on time Key 4', 'max': 10, 'min': 2, 'mode': , @@ -501,18 +558,18 @@ 'unit_of_measurement': , }), 'context': , - 'entity_id': 'number.gs01234_prebrew_on_time_key_4', + 'entity_id': 'number.gs012345_prebrew_on_time_key_4', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '2', }) # --- -# name: test_pre_brew_infusion_key_numbers[preinfusion_time-7-TypeB-set_preinfusion_time-kwargs2-GS3 AV][GS01234_preinfusion_time_key_1-state] +# name: test_pre_brew_infusion_key_numbers[preinfusion_time-7-TypeB-set_preinfusion_time-kwargs2-GS3 AV][GS012345_preinfusion_time_key_1-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'duration', - 'friendly_name': 'GS01234 Preinfusion time Key 1', + 'friendly_name': 'GS012345 Preinfusion time Key 1', 'max': 29, 'min': 2, 'mode': , @@ -520,18 +577,18 @@ 'unit_of_measurement': , }), 'context': , - 'entity_id': 'number.gs01234_preinfusion_time_key_1', + 'entity_id': 'number.gs012345_preinfusion_time_key_1', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '1', }) # --- -# name: test_pre_brew_infusion_key_numbers[preinfusion_time-7-TypeB-set_preinfusion_time-kwargs2-GS3 AV][GS01234_preinfusion_time_key_2-state] +# name: test_pre_brew_infusion_key_numbers[preinfusion_time-7-TypeB-set_preinfusion_time-kwargs2-GS3 AV][GS012345_preinfusion_time_key_2-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'duration', - 'friendly_name': 'GS01234 Preinfusion time Key 2', + 'friendly_name': 'GS012345 Preinfusion time Key 2', 'max': 29, 'min': 2, 'mode': , @@ -539,18 +596,18 @@ 'unit_of_measurement': , }), 'context': , - 'entity_id': 'number.gs01234_preinfusion_time_key_2', + 'entity_id': 'number.gs012345_preinfusion_time_key_2', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '1', }) # --- -# name: test_pre_brew_infusion_key_numbers[preinfusion_time-7-TypeB-set_preinfusion_time-kwargs2-GS3 AV][GS01234_preinfusion_time_key_3-state] +# name: test_pre_brew_infusion_key_numbers[preinfusion_time-7-TypeB-set_preinfusion_time-kwargs2-GS3 AV][GS012345_preinfusion_time_key_3-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'duration', - 'friendly_name': 'GS01234 Preinfusion time Key 3', + 'friendly_name': 'GS012345 Preinfusion time Key 3', 'max': 29, 'min': 2, 'mode': , @@ -558,18 +615,18 @@ 'unit_of_measurement': , }), 'context': , - 'entity_id': 'number.gs01234_preinfusion_time_key_3', + 'entity_id': 'number.gs012345_preinfusion_time_key_3', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '3.29999995231628', }) # --- -# name: test_pre_brew_infusion_key_numbers[preinfusion_time-7-TypeB-set_preinfusion_time-kwargs2-GS3 AV][GS01234_preinfusion_time_key_4-state] +# name: test_pre_brew_infusion_key_numbers[preinfusion_time-7-TypeB-set_preinfusion_time-kwargs2-GS3 AV][GS012345_preinfusion_time_key_4-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'duration', - 'friendly_name': 'GS01234 Preinfusion time Key 4', + 'friendly_name': 'GS012345 Preinfusion time Key 4', 'max': 29, 'min': 2, 'mode': , @@ -577,7 +634,7 @@ 'unit_of_measurement': , }), 'context': , - 'entity_id': 'number.gs01234_preinfusion_time_key_4', + 'entity_id': 'number.gs012345_preinfusion_time_key_4', 'last_changed': , 'last_reported': , 'last_updated': , @@ -588,7 +645,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'duration', - 'friendly_name': 'LM01234 Prebrew off time', + 'friendly_name': 'LM012345 Prebrew off time', 'max': 10, 'min': 1, 'mode': , @@ -596,7 +653,7 @@ 'unit_of_measurement': , }), 'context': , - 'entity_id': 'number.lm01234_prebrew_off_time', + 'entity_id': 'number.lm012345_prebrew_off_time', 'last_changed': , 'last_reported': , 'last_updated': , @@ -620,7 +677,7 @@ 'disabled_by': None, 'domain': 'number', 'entity_category': , - 'entity_id': 'number.lm01234_prebrew_off_time', + 'entity_id': 'number.lm012345_prebrew_off_time', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -637,7 +694,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'prebrew_off', - 'unique_id': 'LM01234_prebrew_off', + 'unique_id': 'LM012345_prebrew_off', 'unit_of_measurement': , }) # --- @@ -645,7 +702,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'duration', - 'friendly_name': 'MR01234 Prebrew off time', + 'friendly_name': 'MR012345 Prebrew off time', 'max': 10, 'min': 1, 'mode': , @@ -653,7 +710,7 @@ 'unit_of_measurement': , }), 'context': , - 'entity_id': 'number.mr01234_prebrew_off_time', + 'entity_id': 'number.mr012345_prebrew_off_time', 'last_changed': , 'last_reported': , 'last_updated': , @@ -677,7 +734,7 @@ 'disabled_by': None, 'domain': 'number', 'entity_category': , - 'entity_id': 'number.mr01234_prebrew_off_time', + 'entity_id': 'number.mr012345_prebrew_off_time', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -694,7 +751,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'prebrew_off', - 'unique_id': 'MR01234_prebrew_off', + 'unique_id': 'MR012345_prebrew_off', 'unit_of_measurement': , }) # --- @@ -702,7 +759,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'duration', - 'friendly_name': 'LM01234 Prebrew on time', + 'friendly_name': 'LM012345 Prebrew on time', 'max': 10, 'min': 2, 'mode': , @@ -710,7 +767,7 @@ 'unit_of_measurement': , }), 'context': , - 'entity_id': 'number.lm01234_prebrew_on_time', + 'entity_id': 'number.lm012345_prebrew_on_time', 'last_changed': , 'last_reported': , 'last_updated': , @@ -734,7 +791,7 @@ 'disabled_by': None, 'domain': 'number', 'entity_category': , - 'entity_id': 'number.lm01234_prebrew_on_time', + 'entity_id': 'number.lm012345_prebrew_on_time', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -751,7 +808,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'prebrew_on', - 'unique_id': 'LM01234_prebrew_on', + 'unique_id': 'LM012345_prebrew_on', 'unit_of_measurement': , }) # --- @@ -759,7 +816,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'duration', - 'friendly_name': 'MR01234 Prebrew on time', + 'friendly_name': 'MR012345 Prebrew on time', 'max': 10, 'min': 2, 'mode': , @@ -767,7 +824,7 @@ 'unit_of_measurement': , }), 'context': , - 'entity_id': 'number.mr01234_prebrew_on_time', + 'entity_id': 'number.mr012345_prebrew_on_time', 'last_changed': , 'last_reported': , 'last_updated': , @@ -791,7 +848,7 @@ 'disabled_by': None, 'domain': 'number', 'entity_category': , - 'entity_id': 'number.mr01234_prebrew_on_time', + 'entity_id': 'number.mr012345_prebrew_on_time', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -808,7 +865,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'prebrew_on', - 'unique_id': 'MR01234_prebrew_on', + 'unique_id': 'MR012345_prebrew_on', 'unit_of_measurement': , }) # --- @@ -816,7 +873,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'duration', - 'friendly_name': 'LM01234 Preinfusion time', + 'friendly_name': 'LM012345 Preinfusion time', 'max': 29, 'min': 2, 'mode': , @@ -824,7 +881,7 @@ 'unit_of_measurement': , }), 'context': , - 'entity_id': 'number.lm01234_preinfusion_time', + 'entity_id': 'number.lm012345_preinfusion_time', 'last_changed': , 'last_reported': , 'last_updated': , @@ -848,7 +905,7 @@ 'disabled_by': None, 'domain': 'number', 'entity_category': , - 'entity_id': 'number.lm01234_preinfusion_time', + 'entity_id': 'number.lm012345_preinfusion_time', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -865,7 +922,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'preinfusion_off', - 'unique_id': 'LM01234_preinfusion_off', + 'unique_id': 'LM012345_preinfusion_off', 'unit_of_measurement': , }) # --- @@ -873,7 +930,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'duration', - 'friendly_name': 'MR01234 Preinfusion time', + 'friendly_name': 'MR012345 Preinfusion time', 'max': 29, 'min': 2, 'mode': , @@ -881,7 +938,7 @@ 'unit_of_measurement': , }), 'context': , - 'entity_id': 'number.mr01234_preinfusion_time', + 'entity_id': 'number.mr012345_preinfusion_time', 'last_changed': , 'last_reported': , 'last_updated': , @@ -905,7 +962,7 @@ 'disabled_by': None, 'domain': 'number', 'entity_category': , - 'entity_id': 'number.mr01234_preinfusion_time', + 'entity_id': 'number.mr012345_preinfusion_time', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -922,7 +979,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'preinfusion_off', - 'unique_id': 'MR01234_preinfusion_off', + 'unique_id': 'MR012345_preinfusion_off', 'unit_of_measurement': , }) # --- diff --git a/tests/components/lamarzocco/snapshots/test_select.ambr b/tests/components/lamarzocco/snapshots/test_select.ambr index be56af2b092b43..46fa55eff13e1c 100644 --- a/tests/components/lamarzocco/snapshots/test_select.ambr +++ b/tests/components/lamarzocco/snapshots/test_select.ambr @@ -2,7 +2,7 @@ # name: test_pre_brew_infusion_select[GS3 AV] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'GS01234 Prebrew/-infusion mode', + 'friendly_name': 'GS012345 Prebrew/-infusion mode', 'options': list([ 'disabled', 'prebrew', @@ -10,7 +10,7 @@ ]), }), 'context': , - 'entity_id': 'select.gs01234_prebrew_infusion_mode', + 'entity_id': 'select.gs012345_prebrew_infusion_mode', 'last_changed': , 'last_reported': , 'last_updated': , @@ -35,7 +35,7 @@ 'disabled_by': None, 'domain': 'select', 'entity_category': , - 'entity_id': 'select.gs01234_prebrew_infusion_mode', + 'entity_id': 'select.gs012345_prebrew_infusion_mode', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -52,14 +52,14 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'prebrew_infusion_select', - 'unique_id': 'GS01234_prebrew_infusion_select', + 'unique_id': 'GS012345_prebrew_infusion_select', 'unit_of_measurement': None, }) # --- # name: test_pre_brew_infusion_select[Linea Mini] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'LM01234 Prebrew/-infusion mode', + 'friendly_name': 'LM012345 Prebrew/-infusion mode', 'options': list([ 'disabled', 'prebrew', @@ -67,7 +67,7 @@ ]), }), 'context': , - 'entity_id': 'select.lm01234_prebrew_infusion_mode', + 'entity_id': 'select.lm012345_prebrew_infusion_mode', 'last_changed': , 'last_reported': , 'last_updated': , @@ -92,7 +92,7 @@ 'disabled_by': None, 'domain': 'select', 'entity_category': , - 'entity_id': 'select.lm01234_prebrew_infusion_mode', + 'entity_id': 'select.lm012345_prebrew_infusion_mode', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -109,14 +109,14 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'prebrew_infusion_select', - 'unique_id': 'LM01234_prebrew_infusion_select', + 'unique_id': 'LM012345_prebrew_infusion_select', 'unit_of_measurement': None, }) # --- # name: test_pre_brew_infusion_select[Micra] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'MR01234 Prebrew/-infusion mode', + 'friendly_name': 'MR012345 Prebrew/-infusion mode', 'options': list([ 'disabled', 'prebrew', @@ -124,7 +124,7 @@ ]), }), 'context': , - 'entity_id': 'select.mr01234_prebrew_infusion_mode', + 'entity_id': 'select.mr012345_prebrew_infusion_mode', 'last_changed': , 'last_reported': , 'last_updated': , @@ -149,7 +149,7 @@ 'disabled_by': None, 'domain': 'select', 'entity_category': , - 'entity_id': 'select.mr01234_prebrew_infusion_mode', + 'entity_id': 'select.mr012345_prebrew_infusion_mode', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -166,14 +166,69 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'prebrew_infusion_select', - 'unique_id': 'MR01234_prebrew_infusion_select', + 'unique_id': 'MR012345_prebrew_infusion_select', + 'unit_of_measurement': None, + }) +# --- +# name: test_smart_standby_mode + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'GS012345 Smart standby mode', + 'options': list([ + 'power_on', + 'last_brewing', + ]), + }), + 'context': , + 'entity_id': 'select.gs012345_smart_standby_mode', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'last_brewing', + }) +# --- +# name: test_smart_standby_mode.1 + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'power_on', + 'last_brewing', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': , + 'entity_id': 'select.gs012345_smart_standby_mode', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Smart standby mode', + 'platform': 'lamarzocco', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'smart_standby_mode', + 'unique_id': 'GS012345_smart_standby_mode', 'unit_of_measurement': None, }) # --- # name: test_steam_boiler_level[Micra] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'MR01234 Steam level', + 'friendly_name': 'MR012345 Steam level', 'options': list([ '1', '2', @@ -181,7 +236,7 @@ ]), }), 'context': , - 'entity_id': 'select.mr01234_steam_level', + 'entity_id': 'select.mr012345_steam_level', 'last_changed': , 'last_reported': , 'last_updated': , @@ -206,7 +261,7 @@ 'disabled_by': None, 'domain': 'select', 'entity_category': None, - 'entity_id': 'select.mr01234_steam_level', + 'entity_id': 'select.mr012345_steam_level', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -223,7 +278,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'steam_temp_select', - 'unique_id': 'MR01234_steam_temp_select', + 'unique_id': 'MR012345_steam_temp_select', 'unit_of_measurement': None, }) # --- diff --git a/tests/components/lamarzocco/snapshots/test_sensor.ambr b/tests/components/lamarzocco/snapshots/test_sensor.ambr index 2237a8416e1660..da1efbf1eaae4d 100644 --- a/tests/components/lamarzocco/snapshots/test_sensor.ambr +++ b/tests/components/lamarzocco/snapshots/test_sensor.ambr @@ -1,5 +1,5 @@ # serializer version: 1 -# name: test_sensors[GS01234_current_coffee_temperature-entry] +# name: test_sensors[GS012345_current_coffee_temperature-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -13,7 +13,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.gs01234_current_coffee_temperature', + 'entity_id': 'sensor.gs012345_current_coffee_temperature', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -33,27 +33,27 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'current_temp_coffee', - 'unique_id': 'GS01234_current_temp_coffee', + 'unique_id': 'GS012345_current_temp_coffee', 'unit_of_measurement': , }) # --- -# name: test_sensors[GS01234_current_coffee_temperature-sensor] +# name: test_sensors[GS012345_current_coffee_temperature-sensor] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'temperature', - 'friendly_name': 'GS01234 Current coffee temperature', + 'friendly_name': 'GS012345 Current coffee temperature', 'state_class': , 'unit_of_measurement': , }), 'context': , - 'entity_id': 'sensor.gs01234_current_coffee_temperature', + 'entity_id': 'sensor.gs012345_current_coffee_temperature', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '96.5', }) # --- -# name: test_sensors[GS01234_current_steam_temperature-entry] +# name: test_sensors[GS012345_current_steam_temperature-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -67,7 +67,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.gs01234_current_steam_temperature', + 'entity_id': 'sensor.gs012345_current_steam_temperature', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -87,27 +87,27 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'current_temp_steam', - 'unique_id': 'GS01234_current_temp_steam', + 'unique_id': 'GS012345_current_temp_steam', 'unit_of_measurement': , }) # --- -# name: test_sensors[GS01234_current_steam_temperature-sensor] +# name: test_sensors[GS012345_current_steam_temperature-sensor] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'temperature', - 'friendly_name': 'GS01234 Current steam temperature', + 'friendly_name': 'GS012345 Current steam temperature', 'state_class': , 'unit_of_measurement': , }), 'context': , - 'entity_id': 'sensor.gs01234_current_steam_temperature', + 'entity_id': 'sensor.gs012345_current_steam_temperature', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '123.800003051758', }) # --- -# name: test_sensors[GS01234_shot_timer-entry] +# name: test_sensors[GS012345_shot_timer-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -121,7 +121,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': , - 'entity_id': 'sensor.gs01234_shot_timer', + 'entity_id': 'sensor.gs012345_shot_timer', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -138,27 +138,27 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'shot_timer', - 'unique_id': 'GS01234_shot_timer', + 'unique_id': 'GS012345_shot_timer', 'unit_of_measurement': , }) # --- -# name: test_sensors[GS01234_shot_timer-sensor] +# name: test_sensors[GS012345_shot_timer-sensor] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'duration', - 'friendly_name': 'GS01234 Shot timer', + 'friendly_name': 'GS012345 Shot timer', 'state_class': , 'unit_of_measurement': , }), 'context': , - 'entity_id': 'sensor.gs01234_shot_timer', + 'entity_id': 'sensor.gs012345_shot_timer', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '0', }) # --- -# name: test_sensors[GS01234_total_coffees_made-entry] +# name: test_sensors[GS012345_total_coffees_made-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -172,7 +172,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': , - 'entity_id': 'sensor.gs01234_total_coffees_made', + 'entity_id': 'sensor.gs012345_total_coffees_made', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -189,26 +189,26 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'drink_stats_coffee', - 'unique_id': 'GS01234_drink_stats_coffee', + 'unique_id': 'GS012345_drink_stats_coffee', 'unit_of_measurement': 'drinks', }) # --- -# name: test_sensors[GS01234_total_coffees_made-sensor] +# name: test_sensors[GS012345_total_coffees_made-sensor] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'GS01234 Total coffees made', + 'friendly_name': 'GS012345 Total coffees made', 'state_class': , 'unit_of_measurement': 'drinks', }), 'context': , - 'entity_id': 'sensor.gs01234_total_coffees_made', + 'entity_id': 'sensor.gs012345_total_coffees_made', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '1047', }) # --- -# name: test_sensors[GS01234_total_flushes_made-entry] +# name: test_sensors[GS012345_total_flushes_made-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -222,7 +222,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': , - 'entity_id': 'sensor.gs01234_total_flushes_made', + 'entity_id': 'sensor.gs012345_total_flushes_made', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -239,19 +239,19 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'drink_stats_flushing', - 'unique_id': 'GS01234_drink_stats_flushing', + 'unique_id': 'GS012345_drink_stats_flushing', 'unit_of_measurement': 'drinks', }) # --- -# name: test_sensors[GS01234_total_flushes_made-sensor] +# name: test_sensors[GS012345_total_flushes_made-sensor] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'GS01234 Total flushes made', + 'friendly_name': 'GS012345 Total flushes made', 'state_class': , 'unit_of_measurement': 'drinks', }), 'context': , - 'entity_id': 'sensor.gs01234_total_flushes_made', + 'entity_id': 'sensor.gs012345_total_flushes_made', 'last_changed': , 'last_reported': , 'last_updated': , diff --git a/tests/components/lamarzocco/snapshots/test_switch.ambr b/tests/components/lamarzocco/snapshots/test_switch.ambr index 4ec22e3123d0d2..5e3b99da6176eb 100644 --- a/tests/components/lamarzocco/snapshots/test_switch.ambr +++ b/tests/components/lamarzocco/snapshots/test_switch.ambr @@ -11,7 +11,7 @@ 'disabled_by': None, 'domain': 'switch', 'entity_category': , - 'entity_id': 'switch.gs01234_auto_on_off_os2oswx', + 'entity_id': 'switch.gs012345_auto_on_off_os2oswx', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -28,7 +28,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'auto_on_off', - 'unique_id': 'GS01234_auto_on_off_Os2OswX', + 'unique_id': 'GS012345_auto_on_off_Os2OswX', 'unit_of_measurement': None, }) # --- @@ -44,7 +44,7 @@ 'disabled_by': None, 'domain': 'switch', 'entity_category': , - 'entity_id': 'switch.gs01234_auto_on_off_axfz5bj', + 'entity_id': 'switch.gs012345_auto_on_off_axfz5bj', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -61,17 +61,17 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'auto_on_off', - 'unique_id': 'GS01234_auto_on_off_aXFz5bJ', + 'unique_id': 'GS012345_auto_on_off_aXFz5bJ', 'unit_of_measurement': None, }) # --- # name: test_auto_on_off_switches[state.auto_on_off_Os2OswX] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'GS01234 Auto on/off (Os2OswX)', + 'friendly_name': 'GS012345 Auto on/off (Os2OswX)', }), 'context': , - 'entity_id': 'switch.gs01234_auto_on_off_os2oswx', + 'entity_id': 'switch.gs012345_auto_on_off_os2oswx', 'last_changed': , 'last_reported': , 'last_updated': , @@ -81,10 +81,10 @@ # name: test_auto_on_off_switches[state.auto_on_off_aXFz5bJ] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'GS01234 Auto on/off (aXFz5bJ)', + 'friendly_name': 'GS012345 Auto on/off (aXFz5bJ)', }), 'context': , - 'entity_id': 'switch.gs01234_auto_on_off_axfz5bj', + 'entity_id': 'switch.gs012345_auto_on_off_axfz5bj', 'last_changed': , 'last_reported': , 'last_updated': , @@ -105,7 +105,7 @@ 'identifiers': set({ tuple( 'lamarzocco', - 'GS01234', + 'GS012345', ), }), 'is_new': False, @@ -113,30 +113,30 @@ }), 'manufacturer': 'La Marzocco', 'model': , - 'model_id': None, - 'name': 'GS01234', + 'model_id': , + 'name': 'GS012345', 'name_by_user': None, 'primary_config_entry': , - 'serial_number': 'GS01234', + 'serial_number': 'GS012345', 'suggested_area': None, 'sw_version': '1.40', 'via_device_id': None, }) # --- -# name: test_switches[-set_power] +# name: test_switches[-set_power-kwargs0] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'GS01234', + 'friendly_name': 'GS012345', }), 'context': , - 'entity_id': 'switch.gs01234', + 'entity_id': 'switch.gs012345', 'last_changed': , 'last_reported': , 'last_updated': , 'state': 'on', }) # --- -# name: test_switches[-set_power].1 +# name: test_switches[-set_power-kwargs0].1 EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -148,7 +148,7 @@ 'disabled_by': None, 'domain': 'switch', 'entity_category': None, - 'entity_id': 'switch.gs01234', + 'entity_id': 'switch.gs012345', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -165,24 +165,70 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'main', - 'unique_id': 'GS01234_main', + 'unique_id': 'GS012345_main', 'unit_of_measurement': None, }) # --- -# name: test_switches[_steam_boiler-set_steam] +# name: test_switches[_smart_standby_enabled-set_smart_standby-kwargs2] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'GS01234 Steam boiler', + 'friendly_name': 'GS012345 Smart standby enabled', }), 'context': , - 'entity_id': 'switch.gs01234_steam_boiler', + 'entity_id': 'switch.gs012345_smart_standby_enabled', 'last_changed': , 'last_reported': , 'last_updated': , 'state': 'on', }) # --- -# name: test_switches[_steam_boiler-set_steam].1 +# name: test_switches[_smart_standby_enabled-set_smart_standby-kwargs2].1 + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.gs012345_smart_standby_enabled', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Smart standby enabled', + 'platform': 'lamarzocco', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'smart_standby_enabled', + 'unique_id': 'GS012345_smart_standby_enabled', + 'unit_of_measurement': None, + }) +# --- +# name: test_switches[_steam_boiler-set_steam-kwargs1] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'GS012345 Steam boiler', + }), + 'context': , + 'entity_id': 'switch.gs012345_steam_boiler', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switches[_steam_boiler-set_steam-kwargs1].1 EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -194,7 +240,7 @@ 'disabled_by': None, 'domain': 'switch', 'entity_category': None, - 'entity_id': 'switch.gs01234_steam_boiler', + 'entity_id': 'switch.gs012345_steam_boiler', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -211,7 +257,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'steam_boiler', - 'unique_id': 'GS01234_steam_boiler_enable', + 'unique_id': 'GS012345_steam_boiler_enable', 'unit_of_measurement': None, }) # --- diff --git a/tests/components/lamarzocco/snapshots/test_update.ambr b/tests/components/lamarzocco/snapshots/test_update.ambr index f08b9249f50353..46fa4cff8154fc 100644 --- a/tests/components/lamarzocco/snapshots/test_update.ambr +++ b/tests/components/lamarzocco/snapshots/test_update.ambr @@ -4,8 +4,9 @@ 'attributes': ReadOnlyDict({ 'auto_update': False, 'device_class': 'firmware', + 'display_precision': 0, 'entity_picture': 'https://brands.home-assistant.io/_/lamarzocco/icon.png', - 'friendly_name': 'GS01234 Gateway firmware', + 'friendly_name': 'GS012345 Gateway firmware', 'in_progress': False, 'installed_version': 'v3.1-rc4', 'latest_version': 'v3.5-rc3', @@ -14,9 +15,10 @@ 'skipped_version': None, 'supported_features': , 'title': None, + 'update_percentage': None, }), 'context': , - 'entity_id': 'update.gs01234_gateway_firmware', + 'entity_id': 'update.gs012345_gateway_firmware', 'last_changed': , 'last_reported': , 'last_updated': , @@ -35,7 +37,7 @@ 'disabled_by': None, 'domain': 'update', 'entity_category': , - 'entity_id': 'update.gs01234_gateway_firmware', + 'entity_id': 'update.gs012345_gateway_firmware', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -52,7 +54,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': 'gateway_firmware', - 'unique_id': 'GS01234_gateway_firmware', + 'unique_id': 'GS012345_gateway_firmware', 'unit_of_measurement': None, }) # --- @@ -61,8 +63,9 @@ 'attributes': ReadOnlyDict({ 'auto_update': False, 'device_class': 'firmware', + 'display_precision': 0, 'entity_picture': 'https://brands.home-assistant.io/_/lamarzocco/icon.png', - 'friendly_name': 'GS01234 Machine firmware', + 'friendly_name': 'GS012345 Machine firmware', 'in_progress': False, 'installed_version': '1.40', 'latest_version': '1.55', @@ -71,9 +74,10 @@ 'skipped_version': None, 'supported_features': , 'title': None, + 'update_percentage': None, }), 'context': , - 'entity_id': 'update.gs01234_machine_firmware', + 'entity_id': 'update.gs012345_machine_firmware', 'last_changed': , 'last_reported': , 'last_updated': , @@ -92,7 +96,7 @@ 'disabled_by': None, 'domain': 'update', 'entity_category': , - 'entity_id': 'update.gs01234_machine_firmware', + 'entity_id': 'update.gs012345_machine_firmware', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -109,7 +113,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': 'machine_firmware', - 'unique_id': 'GS01234_machine_firmware', + 'unique_id': 'GS012345_machine_firmware', 'unit_of_measurement': None, }) # --- diff --git a/tests/components/lamarzocco/test_binary_sensor.py b/tests/components/lamarzocco/test_binary_sensor.py index 120d825c804636..956bfe90dd4e31 100644 --- a/tests/components/lamarzocco/test_binary_sensor.py +++ b/tests/components/lamarzocco/test_binary_sensor.py @@ -4,7 +4,7 @@ from unittest.mock import MagicMock from freezegun.api import FrozenDateTimeFactory -from lmcloud.exceptions import RequestNotSuccessful +from pylamarzocco.exceptions import RequestNotSuccessful from syrupy import SnapshotAssertion from homeassistant.const import STATE_UNAVAILABLE diff --git a/tests/components/lamarzocco/test_button.py b/tests/components/lamarzocco/test_button.py index b754688f36996c..61b7ba77c22a7d 100644 --- a/tests/components/lamarzocco/test_button.py +++ b/tests/components/lamarzocco/test_button.py @@ -1,8 +1,8 @@ """Tests for the La Marzocco Buttons.""" -from unittest.mock import MagicMock +from unittest.mock import AsyncMock, MagicMock, patch -from lmcloud.exceptions import RequestNotSuccessful +from pylamarzocco.exceptions import RequestNotSuccessful import pytest from syrupy import SnapshotAssertion @@ -33,14 +33,18 @@ async def test_start_backflush( assert entry assert entry == snapshot - await hass.services.async_call( - BUTTON_DOMAIN, - SERVICE_PRESS, - { - ATTR_ENTITY_ID: f"button.{serial_number}_start_backflush", - }, - blocking=True, - ) + with patch( + "homeassistant.components.lamarzocco.button.asyncio.sleep", + new_callable=AsyncMock, + ): + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + { + ATTR_ENTITY_ID: f"button.{serial_number}_start_backflush", + }, + blocking=True, + ) assert len(mock_lamarzocco.start_backflush.mock_calls) == 1 mock_lamarzocco.start_backflush.assert_called_once() diff --git a/tests/components/lamarzocco/test_config_flow.py b/tests/components/lamarzocco/test_config_flow.py index 7206013de10be8..be93779848fb9f 100644 --- a/tests/components/lamarzocco/test_config_flow.py +++ b/tests/components/lamarzocco/test_config_flow.py @@ -2,12 +2,20 @@ from unittest.mock import MagicMock, patch -from lmcloud.exceptions import AuthFail, RequestNotSuccessful -from lmcloud.models import LaMarzoccoDeviceInfo +from pylamarzocco.const import MachineModel +from pylamarzocco.exceptions import AuthFail, RequestNotSuccessful +from pylamarzocco.models import LaMarzoccoDeviceInfo +import pytest +from homeassistant.components.dhcp import DhcpServiceInfo from homeassistant.components.lamarzocco.config_flow import CONF_MACHINE from homeassistant.components.lamarzocco.const import CONF_USE_BLUETOOTH, DOMAIN -from homeassistant.config_entries import SOURCE_BLUETOOTH, SOURCE_USER, ConfigEntryState +from homeassistant.config_entries import ( + SOURCE_BLUETOOTH, + SOURCE_DHCP, + SOURCE_USER, + ConfigEntryState, +) from homeassistant.const import ( CONF_HOST, CONF_MAC, @@ -271,7 +279,7 @@ async def test_reconfigure_flow( result = await mock_config_entry.start_reconfigure_flow(hass) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure_confirm" + assert result["step_id"] == "reconfigure" result2 = await __do_successful_user_step(hass, result, mock_cloud_client) service_info = get_bluetooth_service_info( @@ -430,6 +438,50 @@ async def test_bluetooth_discovery_errors( } +@pytest.mark.parametrize( + "device_fixture", + [MachineModel.LINEA_MICRA, MachineModel.LINEA_MINI, MachineModel.GS3_AV], +) +async def test_dhcp_discovery( + hass: HomeAssistant, + mock_lamarzocco: MagicMock, + mock_cloud_client: MagicMock, + mock_device_info: LaMarzoccoDeviceInfo, +) -> None: + """Test dhcp discovery.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_DHCP}, + data=DhcpServiceInfo( + ip="192.168.1.42", + hostname=mock_lamarzocco.serial_number, + macaddress="aa:bb:cc:dd:ee:ff", + ), + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + with patch( + "homeassistant.components.lamarzocco.config_flow.LaMarzoccoLocalClient.validate_connection", + return_value=True, + ): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + USER_INPUT, + ) + assert result2["type"] is FlowResultType.CREATE_ENTRY + assert result2["data"] == { + **USER_INPUT, + CONF_HOST: "192.168.1.42", + CONF_MACHINE: mock_lamarzocco.serial_number, + CONF_MODEL: mock_device_info.model, + CONF_NAME: mock_device_info.name, + CONF_TOKEN: mock_device_info.communication_key, + } + + async def test_options_flow( hass: HomeAssistant, mock_lamarzocco: MagicMock, diff --git a/tests/components/lamarzocco/test_init.py b/tests/components/lamarzocco/test_init.py index 2c812f7943890e..b99077a9059283 100644 --- a/tests/components/lamarzocco/test_init.py +++ b/tests/components/lamarzocco/test_init.py @@ -2,8 +2,8 @@ from unittest.mock import AsyncMock, MagicMock, patch -from lmcloud.const import FirmwareType -from lmcloud.exceptions import AuthFail, RequestNotSuccessful +from pylamarzocco.const import FirmwareType +from pylamarzocco.exceptions import AuthFail, RequestNotSuccessful import pytest from homeassistant.components.lamarzocco.config_flow import CONF_MACHINE diff --git a/tests/components/lamarzocco/test_number.py b/tests/components/lamarzocco/test_number.py index 70d8efa5de7060..710a0220e06e5f 100644 --- a/tests/components/lamarzocco/test_number.py +++ b/tests/components/lamarzocco/test_number.py @@ -1,15 +1,16 @@ """Tests for the La Marzocco number entities.""" +from typing import Any from unittest.mock import MagicMock -from lmcloud.const import ( +from pylamarzocco.const import ( KEYS_PER_MODEL, BoilerType, MachineModel, PhysicalKey, PrebrewMode, ) -from lmcloud.exceptions import RequestNotSuccessful +from pylamarzocco.exceptions import RequestNotSuccessful import pytest from syrupy import SnapshotAssertion @@ -28,20 +29,41 @@ from tests.common import MockConfigEntry -async def test_coffee_boiler( +@pytest.mark.parametrize( + ("entity_name", "value", "func_name", "kwargs"), + [ + ( + "coffee_target_temperature", + 94, + "set_temp", + {"boiler": BoilerType.COFFEE, "temperature": 94}, + ), + ( + "smart_standby_time", + 23, + "set_smart_standby", + {"enabled": True, "mode": "LastBrewing", "minutes": 23}, + ), + ], +) +async def test_general_numbers( hass: HomeAssistant, mock_lamarzocco: MagicMock, mock_config_entry: MockConfigEntry, entity_registry: er.EntityRegistry, device_registry: dr.DeviceRegistry, snapshot: SnapshotAssertion, + entity_name: str, + value: float, + func_name: str, + kwargs: dict[str, Any], ) -> None: - """Test the La Marzocco coffee temperature Number.""" + """Test the numbers available to all machines.""" await async_init_integration(hass, mock_config_entry) serial_number = mock_lamarzocco.serial_number - state = hass.states.get(f"number.{serial_number}_coffee_target_temperature") + state = hass.states.get(f"number.{serial_number}_{entity_name}") assert state assert state == snapshot @@ -59,16 +81,14 @@ async def test_coffee_boiler( NUMBER_DOMAIN, SERVICE_SET_VALUE, { - ATTR_ENTITY_ID: f"number.{serial_number}_coffee_target_temperature", - ATTR_VALUE: 94, + ATTR_ENTITY_ID: f"number.{serial_number}_{entity_name}", + ATTR_VALUE: value, }, blocking=True, ) - assert len(mock_lamarzocco.set_temp.mock_calls) == 1 - mock_lamarzocco.set_temp.assert_called_once_with( - boiler=BoilerType.COFFEE, temperature=94 - ) + mock_func = getattr(mock_lamarzocco, func_name) + mock_func.assert_called_once_with(**kwargs) @pytest.mark.parametrize("device_fixture", [MachineModel.GS3_AV, MachineModel.GS3_MP]) diff --git a/tests/components/lamarzocco/test_select.py b/tests/components/lamarzocco/test_select.py index 862898428f5f66..24b96f84f37498 100644 --- a/tests/components/lamarzocco/test_select.py +++ b/tests/components/lamarzocco/test_select.py @@ -2,8 +2,8 @@ from unittest.mock import MagicMock -from lmcloud.const import MachineModel, PrebrewMode, SteamLevel -from lmcloud.exceptions import RequestNotSuccessful +from pylamarzocco.const import MachineModel, PrebrewMode, SmartStandbyMode, SteamLevel +from pylamarzocco.exceptions import RequestNotSuccessful import pytest from syrupy import SnapshotAssertion @@ -121,6 +121,40 @@ async def test_pre_brew_infusion_select_none( assert state is None +async def test_smart_standby_mode( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + mock_lamarzocco: MagicMock, + snapshot: SnapshotAssertion, +) -> None: + """Test the La Marzocco Smart Standby mode select.""" + + serial_number = mock_lamarzocco.serial_number + + state = hass.states.get(f"select.{serial_number}_smart_standby_mode") + + assert state + assert state == snapshot + + entry = entity_registry.async_get(state.entity_id) + assert entry + assert entry == snapshot + + await hass.services.async_call( + SELECT_DOMAIN, + SERVICE_SELECT_OPTION, + { + ATTR_ENTITY_ID: f"select.{serial_number}_smart_standby_mode", + ATTR_OPTION: "power_on", + }, + blocking=True, + ) + + mock_lamarzocco.set_smart_standby.assert_called_once_with( + enabled=True, mode=SmartStandbyMode.POWER_ON, minutes=10 + ) + + async def test_select_errors( hass: HomeAssistant, mock_lamarzocco: MagicMock, diff --git a/tests/components/lamarzocco/test_sensor.py b/tests/components/lamarzocco/test_sensor.py index 760dcffd28ffff..6f14d52d1fcc9d 100644 --- a/tests/components/lamarzocco/test_sensor.py +++ b/tests/components/lamarzocco/test_sensor.py @@ -2,7 +2,7 @@ from unittest.mock import MagicMock -from lmcloud.const import MachineModel +from pylamarzocco.const import MachineModel import pytest from syrupy import SnapshotAssertion diff --git a/tests/components/lamarzocco/test_switch.py b/tests/components/lamarzocco/test_switch.py index a09d254ffe9978..5c6d1cb1e421da 100644 --- a/tests/components/lamarzocco/test_switch.py +++ b/tests/components/lamarzocco/test_switch.py @@ -1,8 +1,9 @@ """Tests for La Marzocco switches.""" +from typing import Any from unittest.mock import MagicMock -from lmcloud.exceptions import RequestNotSuccessful +from pylamarzocco.exceptions import RequestNotSuccessful import pytest from syrupy import SnapshotAssertion @@ -25,15 +26,15 @@ ( "entity_name", "method_name", + "kwargs", ), [ + ("", "set_power", {}), + ("_steam_boiler", "set_steam", {}), ( - "", - "set_power", - ), - ( - "_steam_boiler", - "set_steam", + "_smart_standby_enabled", + "set_smart_standby", + {"mode": "LastBrewing", "minutes": 10}, ), ], ) @@ -45,6 +46,7 @@ async def test_switches( snapshot: SnapshotAssertion, entity_name: str, method_name: str, + kwargs: dict[str, Any], ) -> None: """Test the La Marzocco switches.""" await async_init_integration(hass, mock_config_entry) @@ -71,7 +73,7 @@ async def test_switches( ) assert len(control_fn.mock_calls) == 1 - control_fn.assert_called_once_with(False) + control_fn.assert_called_once_with(enabled=False, **kwargs) await hass.services.async_call( SWITCH_DOMAIN, @@ -83,7 +85,7 @@ async def test_switches( ) assert len(control_fn.mock_calls) == 2 - control_fn.assert_called_with(True) + control_fn.assert_called_with(enabled=True, **kwargs) async def test_device( diff --git a/tests/components/lamarzocco/test_update.py b/tests/components/lamarzocco/test_update.py index 3dc2a86b57468b..aef37d7c9218a8 100644 --- a/tests/components/lamarzocco/test_update.py +++ b/tests/components/lamarzocco/test_update.py @@ -2,8 +2,8 @@ from unittest.mock import MagicMock -from lmcloud.const import FirmwareType -from lmcloud.exceptions import RequestNotSuccessful +from pylamarzocco.const import FirmwareType +from pylamarzocco.exceptions import RequestNotSuccessful import pytest from syrupy import SnapshotAssertion diff --git a/tests/components/lcn/fixtures/config_entry_pchk.json b/tests/components/lcn/fixtures/config_entry_pchk.json index d8eef6d1eb31bd..068b875770710b 100644 --- a/tests/components/lcn/fixtures/config_entry_pchk.json +++ b/tests/components/lcn/fixtures/config_entry_pchk.json @@ -32,7 +32,7 @@ "domain_data": { "output": "OUTPUT1", "dimmable": true, - "transition": 5000.0 + "transition": 5.0 } }, { @@ -43,7 +43,7 @@ "domain_data": { "output": "OUTPUT2", "dimmable": false, - "transition": 0 + "transition": 0.0 } }, { @@ -93,6 +93,24 @@ "output": "RELAY2" } }, + { + "address": [0, 7, false], + "name": "Switch_Regulator1", + "resource": "r1varsetpoint", + "domain": "switch", + "domain_data": { + "output": "R1VARSETPOINT" + } + }, + { + "address": [0, 7, false], + "name": "Switch_KeyLock1", + "resource": "a1", + "domain": "switch", + "domain_data": { + "output": "A1" + } + }, { "address": [0, 5, true], "name": "Switch_Group5", @@ -145,7 +163,7 @@ "register": 0, "scene": 0, "outputs": ["OUTPUT1", "OUTPUT2", "RELAY1"], - "transition": null + "transition": 0.0 } }, { @@ -157,7 +175,7 @@ "register": 0, "scene": 1, "outputs": ["OUTPUT1", "OUTPUT2", "RELAY1"], - "transition": 10 + "transition": 10.0 } }, { diff --git a/tests/components/lcn/fixtures/config_entry_pchk_v1_1.json b/tests/components/lcn/fixtures/config_entry_pchk_v1_1.json index 9a8095ff16d079..e1893c30b42cf5 100644 --- a/tests/components/lcn/fixtures/config_entry_pchk_v1_1.json +++ b/tests/components/lcn/fixtures/config_entry_pchk_v1_1.json @@ -92,6 +92,24 @@ "output": "RELAY2" } }, + { + "address": [0, 7, false], + "name": "Switch_Regulator1", + "resource": "r1varsetpoint", + "domain": "switch", + "domain_data": { + "output": "R1VARSETPOINT" + } + }, + { + "address": [0, 7, false], + "name": "Switch_KeyLock1", + "resource": "a1", + "domain": "switch", + "domain_data": { + "output": "A1" + } + }, { "address": [0, 5, true], "name": "Switch_Group5", @@ -156,7 +174,7 @@ "register": 0, "scene": 1, "outputs": ["OUTPUT1", "OUTPUT2", "RELAY1"], - "transition": 10 + "transition": 10000 } }, { diff --git a/tests/components/lcn/fixtures/config_entry_pchk_v1_2.json b/tests/components/lcn/fixtures/config_entry_pchk_v1_2.json new file mode 100644 index 00000000000000..7389079dca9a8e --- /dev/null +++ b/tests/components/lcn/fixtures/config_entry_pchk_v1_2.json @@ -0,0 +1,249 @@ +{ + "host": "pchk", + "ip_address": "192.168.2.41", + "port": 4114, + "username": "lcn", + "password": "lcn", + "sk_num_tries": 0, + "dim_mode": "STEPS200", + "acknowledge": false, + "devices": [ + { + "address": [0, 7, false], + "name": "TestModule", + "hardware_serial": -1, + "software_serial": -1, + "hardware_type": -1 + }, + { + "address": [0, 5, true], + "name": "TestGroup", + "hardware_serial": -1, + "software_serial": -1, + "hardware_type": -1 + } + ], + "entities": [ + { + "address": [0, 7, false], + "name": "Light_Output1", + "resource": "output1", + "domain": "light", + "domain_data": { + "output": "OUTPUT1", + "dimmable": true, + "transition": 5000.0 + } + }, + { + "address": [0, 7, false], + "name": "Light_Output2", + "resource": "output2", + "domain": "light", + "domain_data": { + "output": "OUTPUT2", + "dimmable": false, + "transition": 0 + } + }, + { + "address": [0, 7, false], + "name": "Light_Relay1", + "resource": "relay1", + "domain": "light", + "domain_data": { + "output": "RELAY1", + "dimmable": false, + "transition": 0.0 + } + }, + { + "address": [0, 7, false], + "name": "Switch_Output1", + "resource": "output1", + "domain": "switch", + "domain_data": { + "output": "OUTPUT1" + } + }, + { + "address": [0, 7, false], + "name": "Switch_Output2", + "resource": "output2", + "domain": "switch", + "domain_data": { + "output": "OUTPUT2" + } + }, + { + "address": [0, 7, false], + "name": "Switch_Relay1", + "resource": "relay1", + "domain": "switch", + "domain_data": { + "output": "RELAY1" + } + }, + { + "address": [0, 7, false], + "name": "Switch_Relay2", + "resource": "relay2", + "domain": "switch", + "domain_data": { + "output": "RELAY2" + } + }, + { + "address": [0, 7, false], + "name": "Switch_Regulator1", + "resource": "r1varsetpoint", + "domain": "switch", + "domain_data": { + "output": "R1VARSETPOINT" + } + }, + { + "address": [0, 7, false], + "name": "Switch_KeyLock1", + "resource": "a1", + "domain": "switch", + "domain_data": { + "output": "A1" + } + }, + { + "address": [0, 5, true], + "name": "Switch_Group5", + "resource": "relay1", + "domain": "switch", + "domain_data": { + "output": "RELAY1" + } + }, + { + "address": [0, 7, false], + "name": "Cover_Outputs", + "resource": "outputs", + "domain": "cover", + "domain_data": { + "motor": "OUTPUTS", + "reverse_time": "RT1200" + } + }, + { + "address": [0, 7, false], + "name": "Cover_Relays", + "resource": "motor1", + "domain": "cover", + "domain_data": { + "motor": "MOTOR1", + "reverse_time": "RT1200" + } + }, + { + "address": [0, 7, false], + "name": "Climate1", + "resource": "var1.r1varsetpoint", + "domain": "climate", + "domain_data": { + "source": "VAR1", + "setpoint": "R1VARSETPOINT", + "lockable": true, + "min_temp": 0.0, + "max_temp": 40.0, + "unit_of_measurement": "°C" + } + }, + { + "address": [0, 7, false], + "name": "Romantic", + "resource": "0.0", + "domain": "scene", + "domain_data": { + "register": 0, + "scene": 0, + "outputs": ["OUTPUT1", "OUTPUT2", "RELAY1"], + "transition": null + } + }, + { + "address": [0, 7, false], + "name": "Romantic Transition", + "resource": "0.1", + "domain": "scene", + "domain_data": { + "register": 0, + "scene": 1, + "outputs": ["OUTPUT1", "OUTPUT2", "RELAY1"], + "transition": 10000 + } + }, + { + "address": [0, 7, false], + "name": "Sensor_LockRegulator1", + "resource": "r1varsetpoint", + "domain": "binary_sensor", + "domain_data": { + "source": "R1VARSETPOINT" + } + }, + { + "address": [0, 7, false], + "name": "Binary_Sensor1", + "resource": "binsensor1", + "domain": "binary_sensor", + "domain_data": { + "source": "BINSENSOR1" + } + }, + { + "address": [0, 7, false], + "name": "Sensor_KeyLock", + "resource": "a5", + "domain": "binary_sensor", + "domain_data": { + "source": "A5" + } + }, + { + "address": [0, 7, false], + "name": "Sensor_Var1", + "resource": "var1", + "domain": "sensor", + "domain_data": { + "source": "VAR1", + "unit_of_measurement": "°C" + } + }, + { + "address": [0, 7, false], + "name": "Sensor_Setpoint1", + "resource": "r1varsetpoint", + "domain": "sensor", + "domain_data": { + "source": "R1VARSETPOINT", + "unit_of_measurement": "°C" + } + }, + { + "address": [0, 7, false], + "name": "Sensor_Led6", + "resource": "led6", + "domain": "sensor", + "domain_data": { + "source": "LED6", + "unit_of_measurement": "NATIVE" + } + }, + { + "address": [0, 7, false], + "name": "Sensor_LogicOp1", + "resource": "logicop1", + "domain": "sensor", + "domain_data": { + "source": "LOGICOP1", + "unit_of_measurement": "NATIVE" + } + } + ] +} diff --git a/tests/components/lcn/snapshots/test_sensor.ambr b/tests/components/lcn/snapshots/test_sensor.ambr index d6ac73b58223e7..56776e3e0f6dc9 100644 --- a/tests/components/lcn/snapshots/test_sensor.ambr +++ b/tests/components/lcn/snapshots/test_sensor.ambr @@ -113,7 +113,7 @@ 'name': None, 'options': dict({ }), - 'original_device_class': None, + 'original_device_class': , 'original_icon': None, 'original_name': 'Sensor_Setpoint1', 'platform': 'lcn', @@ -121,14 +121,15 @@ 'supported_features': 0, 'translation_key': None, 'unique_id': 'lcn/config_entry_pchk.json-m000007-r1varsetpoint', - 'unit_of_measurement': '°C', + 'unit_of_measurement': , }) # --- # name: test_setup_lcn_sensor[sensor.sensor_setpoint1-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', 'friendly_name': 'Sensor_Setpoint1', - 'unit_of_measurement': '°C', + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.sensor_setpoint1', @@ -160,7 +161,7 @@ 'name': None, 'options': dict({ }), - 'original_device_class': None, + 'original_device_class': , 'original_icon': None, 'original_name': 'Sensor_Var1', 'platform': 'lcn', @@ -168,14 +169,15 @@ 'supported_features': 0, 'translation_key': None, 'unique_id': 'lcn/config_entry_pchk.json-m000007-var1', - 'unit_of_measurement': '°C', + 'unit_of_measurement': , }) # --- # name: test_setup_lcn_sensor[sensor.sensor_var1-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', 'friendly_name': 'Sensor_Var1', - 'unit_of_measurement': '°C', + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.sensor_var1', diff --git a/tests/components/lcn/snapshots/test_switch.ambr b/tests/components/lcn/snapshots/test_switch.ambr index 1f2aac041aa55b..36145b8d4fdab7 100644 --- a/tests/components/lcn/snapshots/test_switch.ambr +++ b/tests/components/lcn/snapshots/test_switch.ambr @@ -45,6 +45,52 @@ 'state': 'off', }) # --- +# name: test_setup_lcn_switch[switch.switch_keylock1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.switch_keylock1', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Switch_KeyLock1', + 'platform': 'lcn', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'lcn/config_entry_pchk.json-m000007-a1', + 'unit_of_measurement': None, + }) +# --- +# name: test_setup_lcn_switch[switch.switch_keylock1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Switch_KeyLock1', + }), + 'context': , + 'entity_id': 'switch.switch_keylock1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- # name: test_setup_lcn_switch[switch.switch_output1-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -137,6 +183,52 @@ 'state': 'off', }) # --- +# name: test_setup_lcn_switch[switch.switch_regulator1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.switch_regulator1', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Switch_Regulator1', + 'platform': 'lcn', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'lcn/config_entry_pchk.json-m000007-r1varsetpoint', + 'unit_of_measurement': None, + }) +# --- +# name: test_setup_lcn_switch[switch.switch_regulator1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Switch_Regulator1', + }), + 'context': , + 'entity_id': 'switch.switch_regulator1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- # name: test_setup_lcn_switch[switch.switch_relay1-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/lcn/test_binary_sensor.py b/tests/components/lcn/test_binary_sensor.py index 7abae6e0d8981e..2f64f421b931e6 100644 --- a/tests/components/lcn/test_binary_sensor.py +++ b/tests/components/lcn/test_binary_sensor.py @@ -5,12 +5,19 @@ from pypck.inputs import ModStatusBinSensors, ModStatusKeyLocks, ModStatusVar from pypck.lcn_addr import LcnAddr from pypck.lcn_defs import Var, VarValue +import pytest from syrupy.assertion import SnapshotAssertion +from homeassistant.components import automation, script +from homeassistant.components.automation import automations_with_entity +from homeassistant.components.lcn import DOMAIN from homeassistant.components.lcn.helpers import get_device_connection +from homeassistant.components.script import scripts_with_entity from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNAVAILABLE, Platform -from homeassistant.core import HomeAssistant +from homeassistant.core import HomeAssistant, ServiceCall from homeassistant.helpers import entity_registry as er +import homeassistant.helpers.issue_registry as ir +from homeassistant.setup import async_setup_component from .conftest import MockConfigEntry, init_integration @@ -131,3 +138,54 @@ async def test_unload_config_entry(hass: HomeAssistant, entry: MockConfigEntry) assert hass.states.get(BINARY_SENSOR_LOCKREGULATOR1).state == STATE_UNAVAILABLE assert hass.states.get(BINARY_SENSOR_SENSOR1).state == STATE_UNAVAILABLE assert hass.states.get(BINARY_SENSOR_KEYLOCK).state == STATE_UNAVAILABLE + + +@pytest.mark.parametrize( + "entity_id", ["binary_sensor.sensor_lockregulator1", "binary_sensor.sensor_keylock"] +) +async def test_create_issue( + hass: HomeAssistant, + service_calls: list[ServiceCall], + issue_registry: ir.IssueRegistry, + entry: MockConfigEntry, + entity_id, +) -> None: + """Test we create an issue when an automation or script is using a deprecated entity.""" + assert await async_setup_component( + hass, + automation.DOMAIN, + { + automation.DOMAIN: { + "alias": "test", + "trigger": {"platform": "state", "entity_id": entity_id}, + "action": {"action": "test.automation"}, + } + }, + ) + + assert await async_setup_component( + hass, + script.DOMAIN, + { + script.DOMAIN: { + "test": { + "sequence": { + "condition": "state", + "entity_id": entity_id, + "state": STATE_ON, + } + } + } + }, + ) + + await init_integration(hass, entry) + + assert automations_with_entity(hass, entity_id)[0] == "automation.test" + assert scripts_with_entity(hass, entity_id)[0] == "script.test" + + assert issue_registry.async_get_issue( + DOMAIN, f"deprecated_binary_sensor_{entity_id}" + ) + + assert len(issue_registry.issues) == 1 diff --git a/tests/components/lcn/test_config_flow.py b/tests/components/lcn/test_config_flow.py index 33b40e15b0cb47..b7967c247ec449 100644 --- a/tests/components/lcn/test_config_flow.py +++ b/tests/components/lcn/test_config_flow.py @@ -23,9 +23,7 @@ CONF_PORT, CONF_USERNAME, ) -from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant -from homeassistant.data_entry_flow import FlowResultType -from homeassistant.helpers import issue_registry as ir +from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry @@ -48,83 +46,6 @@ } -async def test_step_import( - hass: HomeAssistant, issue_registry: ir.IssueRegistry -) -> None: - """Test for import step.""" - - with ( - patch("homeassistant.components.lcn.PchkConnectionManager.async_connect"), - patch("homeassistant.components.lcn.async_setup", return_value=True), - patch("homeassistant.components.lcn.async_setup_entry", return_value=True), - ): - data = IMPORT_DATA.copy() - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=data - ) - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "pchk" - assert result["data"] == IMPORT_DATA - assert issue_registry.async_get_issue( - HOMEASSISTANT_DOMAIN, f"deprecated_yaml_{DOMAIN}" - ) - - -async def test_step_import_existing_host( - hass: HomeAssistant, issue_registry: ir.IssueRegistry -) -> None: - """Test for update of config_entry if imported host already exists.""" - - # Create config entry and add it to hass - mock_data = IMPORT_DATA.copy() - mock_data.update({CONF_SK_NUM_TRIES: 3, CONF_DIM_MODE: 50}) - mock_entry = MockConfigEntry(domain=DOMAIN, data=mock_data) - mock_entry.add_to_hass(hass) - # Initialize a config flow with different data but same host address - with patch("homeassistant.components.lcn.PchkConnectionManager.async_connect"): - imported_data = IMPORT_DATA.copy() - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=imported_data - ) - - # Check if config entry was updated - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "existing_configuration_updated" - assert mock_entry.source == config_entries.SOURCE_IMPORT - assert mock_entry.data == IMPORT_DATA - assert issue_registry.async_get_issue( - HOMEASSISTANT_DOMAIN, f"deprecated_yaml_{DOMAIN}" - ) - - -@pytest.mark.parametrize( - ("error", "reason"), - [ - (PchkAuthenticationError, "authentication_error"), - (PchkLicenseError, "license_error"), - (TimeoutError, "connection_refused"), - ], -) -async def test_step_import_error( - hass: HomeAssistant, issue_registry: ir.IssueRegistry, error, reason -) -> None: - """Test for error in import is handled correctly.""" - with patch( - "homeassistant.components.lcn.PchkConnectionManager.async_connect", - side_effect=error, - ): - data = IMPORT_DATA.copy() - data.update({CONF_HOST: "pchk"}) - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=data - ) - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == reason - assert issue_registry.async_get_issue(DOMAIN, reason) - - async def test_show_form(hass: HomeAssistant) -> None: """Test that the form is served with no input.""" flow = LcnFlowHandler() @@ -140,7 +61,6 @@ async def test_step_user(hass: HomeAssistant) -> None: """Test for user step.""" with ( patch("homeassistant.components.lcn.PchkConnectionManager.async_connect"), - patch("homeassistant.components.lcn.async_setup", return_value=True), patch("homeassistant.components.lcn.async_setup_entry", return_value=True), ): data = CONNECTION_DATA.copy() @@ -206,11 +126,10 @@ async def test_step_reconfigure(hass: HomeAssistant, entry: MockConfigEntry) -> result = await entry.start_reconfigure_flow(hass) assert result["type"] == data_entry_flow.FlowResultType.FORM - assert result["step_id"] == "reconfigure_confirm" + assert result["step_id"] == "reconfigure" with ( patch("homeassistant.components.lcn.PchkConnectionManager.async_connect"), - patch("homeassistant.components.lcn.async_setup", return_value=True), patch("homeassistant.components.lcn.async_setup_entry", return_value=True), ): result = await hass.config_entries.flow.async_configure( @@ -244,7 +163,7 @@ async def test_step_reconfigure_error( result = await entry.start_reconfigure_flow(hass) assert result["type"] == data_entry_flow.FlowResultType.FORM - assert result["step_id"] == "reconfigure_confirm" + assert result["step_id"] == "reconfigure" with patch( "homeassistant.components.lcn.PchkConnectionManager.async_connect", diff --git a/tests/components/lcn/test_cover.py b/tests/components/lcn/test_cover.py index 0067e755b5a4fe..ff4311b6687d9f 100644 --- a/tests/components/lcn/test_cover.py +++ b/tests/components/lcn/test_cover.py @@ -7,17 +7,13 @@ from pypck.lcn_defs import MotorReverseTime, MotorStateModifier from syrupy.assertion import SnapshotAssertion -from homeassistant.components.cover import DOMAIN as DOMAIN_COVER +from homeassistant.components.cover import DOMAIN as DOMAIN_COVER, CoverState from homeassistant.components.lcn.helpers import get_device_connection from homeassistant.const import ( ATTR_ENTITY_ID, SERVICE_CLOSE_COVER, SERVICE_OPEN_COVER, SERVICE_STOP_COVER, - STATE_CLOSED, - STATE_CLOSING, - STATE_OPEN, - STATE_OPENING, STATE_UNAVAILABLE, Platform, ) @@ -53,7 +49,7 @@ async def test_outputs_open(hass: HomeAssistant, entry: MockConfigEntry) -> None MockModuleConnection, "control_motors_outputs" ) as control_motors_outputs: state = hass.states.get(COVER_OUTPUTS) - state.state = STATE_CLOSED + state.state = CoverState.CLOSED # command failed control_motors_outputs.return_value = False @@ -71,7 +67,7 @@ async def test_outputs_open(hass: HomeAssistant, entry: MockConfigEntry) -> None state = hass.states.get(COVER_OUTPUTS) assert state is not None - assert state.state != STATE_OPENING + assert state.state != CoverState.OPENING # command success control_motors_outputs.reset_mock(return_value=True) @@ -90,7 +86,7 @@ async def test_outputs_open(hass: HomeAssistant, entry: MockConfigEntry) -> None state = hass.states.get(COVER_OUTPUTS) assert state is not None - assert state.state == STATE_OPENING + assert state.state == CoverState.OPENING async def test_outputs_close(hass: HomeAssistant, entry: MockConfigEntry) -> None: @@ -101,7 +97,7 @@ async def test_outputs_close(hass: HomeAssistant, entry: MockConfigEntry) -> Non MockModuleConnection, "control_motors_outputs" ) as control_motors_outputs: state = hass.states.get(COVER_OUTPUTS) - state.state = STATE_OPEN + state.state = CoverState.OPEN # command failed control_motors_outputs.return_value = False @@ -119,7 +115,7 @@ async def test_outputs_close(hass: HomeAssistant, entry: MockConfigEntry) -> Non state = hass.states.get(COVER_OUTPUTS) assert state is not None - assert state.state != STATE_CLOSING + assert state.state != CoverState.CLOSING # command success control_motors_outputs.reset_mock(return_value=True) @@ -138,7 +134,7 @@ async def test_outputs_close(hass: HomeAssistant, entry: MockConfigEntry) -> Non state = hass.states.get(COVER_OUTPUTS) assert state is not None - assert state.state == STATE_CLOSING + assert state.state == CoverState.CLOSING async def test_outputs_stop(hass: HomeAssistant, entry: MockConfigEntry) -> None: @@ -149,7 +145,7 @@ async def test_outputs_stop(hass: HomeAssistant, entry: MockConfigEntry) -> None MockModuleConnection, "control_motors_outputs" ) as control_motors_outputs: state = hass.states.get(COVER_OUTPUTS) - state.state = STATE_CLOSING + state.state = CoverState.CLOSING # command failed control_motors_outputs.return_value = False @@ -165,7 +161,7 @@ async def test_outputs_stop(hass: HomeAssistant, entry: MockConfigEntry) -> None state = hass.states.get(COVER_OUTPUTS) assert state is not None - assert state.state == STATE_CLOSING + assert state.state == CoverState.CLOSING # command success control_motors_outputs.reset_mock(return_value=True) @@ -182,7 +178,7 @@ async def test_outputs_stop(hass: HomeAssistant, entry: MockConfigEntry) -> None state = hass.states.get(COVER_OUTPUTS) assert state is not None - assert state.state not in (STATE_CLOSING, STATE_OPENING) + assert state.state not in (CoverState.CLOSING, CoverState.OPENING) async def test_relays_open(hass: HomeAssistant, entry: MockConfigEntry) -> None: @@ -196,7 +192,7 @@ async def test_relays_open(hass: HomeAssistant, entry: MockConfigEntry) -> None: states[0] = MotorStateModifier.UP state = hass.states.get(COVER_RELAYS) - state.state = STATE_CLOSED + state.state = CoverState.CLOSED # command failed control_motors_relays.return_value = False @@ -212,7 +208,7 @@ async def test_relays_open(hass: HomeAssistant, entry: MockConfigEntry) -> None: state = hass.states.get(COVER_RELAYS) assert state is not None - assert state.state != STATE_OPENING + assert state.state != CoverState.OPENING # command success control_motors_relays.reset_mock(return_value=True) @@ -229,7 +225,7 @@ async def test_relays_open(hass: HomeAssistant, entry: MockConfigEntry) -> None: state = hass.states.get(COVER_RELAYS) assert state is not None - assert state.state == STATE_OPENING + assert state.state == CoverState.OPENING async def test_relays_close(hass: HomeAssistant, entry: MockConfigEntry) -> None: @@ -243,7 +239,7 @@ async def test_relays_close(hass: HomeAssistant, entry: MockConfigEntry) -> None states[0] = MotorStateModifier.DOWN state = hass.states.get(COVER_RELAYS) - state.state = STATE_OPEN + state.state = CoverState.OPEN # command failed control_motors_relays.return_value = False @@ -259,7 +255,7 @@ async def test_relays_close(hass: HomeAssistant, entry: MockConfigEntry) -> None state = hass.states.get(COVER_RELAYS) assert state is not None - assert state.state != STATE_CLOSING + assert state.state != CoverState.CLOSING # command success control_motors_relays.reset_mock(return_value=True) @@ -276,7 +272,7 @@ async def test_relays_close(hass: HomeAssistant, entry: MockConfigEntry) -> None state = hass.states.get(COVER_RELAYS) assert state is not None - assert state.state == STATE_CLOSING + assert state.state == CoverState.CLOSING async def test_relays_stop(hass: HomeAssistant, entry: MockConfigEntry) -> None: @@ -290,7 +286,7 @@ async def test_relays_stop(hass: HomeAssistant, entry: MockConfigEntry) -> None: states[0] = MotorStateModifier.STOP state = hass.states.get(COVER_RELAYS) - state.state = STATE_CLOSING + state.state = CoverState.CLOSING # command failed control_motors_relays.return_value = False @@ -306,7 +302,7 @@ async def test_relays_stop(hass: HomeAssistant, entry: MockConfigEntry) -> None: state = hass.states.get(COVER_RELAYS) assert state is not None - assert state.state == STATE_CLOSING + assert state.state == CoverState.CLOSING # command success control_motors_relays.reset_mock(return_value=True) @@ -323,7 +319,7 @@ async def test_relays_stop(hass: HomeAssistant, entry: MockConfigEntry) -> None: state = hass.states.get(COVER_RELAYS) assert state is not None - assert state.state not in (STATE_CLOSING, STATE_OPENING) + assert state.state not in (CoverState.CLOSING, CoverState.OPENING) async def test_pushed_outputs_status_change( @@ -336,7 +332,7 @@ async def test_pushed_outputs_status_change( address = LcnAddr(0, 7, False) state = hass.states.get(COVER_OUTPUTS) - state.state = STATE_CLOSED + state.state = CoverState.CLOSED # push status "open" inp = ModStatusOutput(address, 0, 100) @@ -345,7 +341,7 @@ async def test_pushed_outputs_status_change( state = hass.states.get(COVER_OUTPUTS) assert state is not None - assert state.state == STATE_OPENING + assert state.state == CoverState.OPENING # push status "stop" inp = ModStatusOutput(address, 0, 0) @@ -354,7 +350,7 @@ async def test_pushed_outputs_status_change( state = hass.states.get(COVER_OUTPUTS) assert state is not None - assert state.state not in (STATE_OPENING, STATE_CLOSING) + assert state.state not in (CoverState.OPENING, CoverState.CLOSING) # push status "close" inp = ModStatusOutput(address, 1, 100) @@ -363,7 +359,7 @@ async def test_pushed_outputs_status_change( state = hass.states.get(COVER_OUTPUTS) assert state is not None - assert state.state == STATE_CLOSING + assert state.state == CoverState.CLOSING async def test_pushed_relays_status_change( @@ -377,7 +373,7 @@ async def test_pushed_relays_status_change( states = [False] * 8 state = hass.states.get(COVER_RELAYS) - state.state = STATE_CLOSED + state.state = CoverState.CLOSED # push status "open" states[0:2] = [True, False] @@ -387,7 +383,7 @@ async def test_pushed_relays_status_change( state = hass.states.get(COVER_RELAYS) assert state is not None - assert state.state == STATE_OPENING + assert state.state == CoverState.OPENING # push status "stop" states[0] = False @@ -397,7 +393,7 @@ async def test_pushed_relays_status_change( state = hass.states.get(COVER_RELAYS) assert state is not None - assert state.state not in (STATE_OPENING, STATE_CLOSING) + assert state.state not in (CoverState.OPENING, CoverState.CLOSING) # push status "close" states[0:2] = [True, True] @@ -407,7 +403,7 @@ async def test_pushed_relays_status_change( state = hass.states.get(COVER_RELAYS) assert state is not None - assert state.state == STATE_CLOSING + assert state.state == CoverState.CLOSING async def test_unload_config_entry(hass: HomeAssistant, entry: MockConfigEntry) -> None: diff --git a/tests/components/lcn/test_init.py b/tests/components/lcn/test_init.py index 62fa79961cbeed..2327635e356695 100644 --- a/tests/components/lcn/test_init.py +++ b/tests/components/lcn/test_init.py @@ -16,7 +16,6 @@ MockPchkConnectionManager, create_config_entry, init_integration, - setup_component, ) @@ -83,18 +82,6 @@ async def test_async_setup_entry_update( assert dummy_entity in entity_registry.entities.values() assert dummy_device in device_registry.devices.values() - # setup new entry with same data via import step (should cleanup dummy device) - with patch( - "homeassistant.components.lcn.config_flow.validate_connection", - return_value=None, - ): - await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=entry.data - ) - - assert dummy_device not in device_registry.devices.values() - assert dummy_entity not in entity_registry.entities.values() - @pytest.mark.parametrize( "exception", [PchkAuthenticationError, PchkLicenseError, TimeoutError] @@ -114,20 +101,6 @@ async def test_async_setup_entry_raises_authentication_error( assert entry.state is ConfigEntryState.SETUP_ERROR -async def test_async_setup_from_configuration_yaml(hass: HomeAssistant) -> None: - """Test a successful setup using data from configuration.yaml.""" - with ( - patch( - "homeassistant.components.lcn.config_flow.validate_connection", - return_value=None, - ), - patch("homeassistant.components.lcn.async_setup_entry") as async_setup_entry, - ): - await setup_component(hass) - - assert async_setup_entry.await_count == 2 - - @patch("homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager) async def test_migrate_1_1(hass: HomeAssistant, entry) -> None: """Test migration config entry.""" @@ -139,6 +112,22 @@ async def test_migrate_1_1(hass: HomeAssistant, entry) -> None: entry_migrated = hass.config_entries.async_get_entry(entry_v1_1.entry_id) assert entry_migrated.state is ConfigEntryState.LOADED - assert entry_migrated.version == 1 - assert entry_migrated.minor_version == 2 + assert entry_migrated.version == 2 + assert entry_migrated.minor_version == 1 + assert entry_migrated.data == entry.data + + +@patch("homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager) +async def test_migrate_1_2(hass: HomeAssistant, entry) -> None: + """Test migration config entry.""" + entry_v1_2 = create_config_entry("pchk_v1_2", version=(1, 2)) + entry_v1_2.add_to_hass(hass) + + await hass.config_entries.async_setup(entry_v1_2.entry_id) + await hass.async_block_till_done() + + entry_migrated = hass.config_entries.async_get_entry(entry_v1_2.entry_id) + assert entry_migrated.state is ConfigEntryState.LOADED + assert entry_migrated.version == 2 + assert entry_migrated.minor_version == 1 assert entry_migrated.data == entry.data diff --git a/tests/components/lcn/test_scene.py b/tests/components/lcn/test_scene.py index fcd59693479e29..27e7864df410a5 100644 --- a/tests/components/lcn/test_scene.py +++ b/tests/components/lcn/test_scene.py @@ -51,7 +51,7 @@ async def test_scene_activate( assert state is not None activate_scene.assert_awaited_with( - 0, 0, [OutputPort.OUTPUT1, OutputPort.OUTPUT2], [RelayPort.RELAY1], None + 0, 0, [OutputPort.OUTPUT1, OutputPort.OUTPUT2], [RelayPort.RELAY1], 0.0 ) diff --git a/tests/components/lcn/test_switch.py b/tests/components/lcn/test_switch.py index f57a51bc8a3824..15b156aac430a7 100644 --- a/tests/components/lcn/test_switch.py +++ b/tests/components/lcn/test_switch.py @@ -2,9 +2,14 @@ from unittest.mock import patch -from pypck.inputs import ModStatusOutput, ModStatusRelays +from pypck.inputs import ( + ModStatusKeyLocks, + ModStatusOutput, + ModStatusRelays, + ModStatusVar, +) from pypck.lcn_addr import LcnAddr -from pypck.lcn_defs import RelayStateModifier +from pypck.lcn_defs import KeyLockStateModifier, RelayStateModifier, Var, VarValue from syrupy.assertion import SnapshotAssertion from homeassistant.components.lcn.helpers import get_device_connection @@ -29,6 +34,8 @@ SWITCH_OUTPUT2 = "switch.switch_output2" SWITCH_RELAY1 = "switch.switch_relay1" SWITCH_RELAY2 = "switch.switch_relay2" +SWITCH_REGULATOR1 = "switch.switch_regulator1" +SWITCH_KEYLOCKK1 = "switch.switch_keylock1" async def test_setup_lcn_switch( @@ -204,6 +211,170 @@ async def test_relay_turn_off(hass: HomeAssistant, entry: MockConfigEntry) -> No assert state.state == STATE_OFF +async def test_regulatorlock_turn_on( + hass: HomeAssistant, entry: MockConfigEntry +) -> None: + """Test the regulator lock switch turns on.""" + await init_integration(hass, entry) + + with patch.object(MockModuleConnection, "lock_regulator") as lock_regulator: + # command failed + lock_regulator.return_value = False + + await hass.services.async_call( + DOMAIN_SWITCH, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: SWITCH_REGULATOR1}, + blocking=True, + ) + + lock_regulator.assert_awaited_with(0, True) + + state = hass.states.get(SWITCH_REGULATOR1) + assert state.state == STATE_OFF + + # command success + lock_regulator.reset_mock(return_value=True) + lock_regulator.return_value = True + + await hass.services.async_call( + DOMAIN_SWITCH, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: SWITCH_REGULATOR1}, + blocking=True, + ) + + lock_regulator.assert_awaited_with(0, True) + + state = hass.states.get(SWITCH_REGULATOR1) + assert state.state == STATE_ON + + +async def test_regulatorlock_turn_off( + hass: HomeAssistant, entry: MockConfigEntry +) -> None: + """Test the regulator lock switch turns off.""" + await init_integration(hass, entry) + + with patch.object(MockModuleConnection, "lock_regulator") as lock_regulator: + state = hass.states.get(SWITCH_REGULATOR1) + state.state = STATE_ON + + # command failed + lock_regulator.return_value = False + + await hass.services.async_call( + DOMAIN_SWITCH, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: SWITCH_REGULATOR1}, + blocking=True, + ) + + lock_regulator.assert_awaited_with(0, False) + + state = hass.states.get(SWITCH_REGULATOR1) + assert state.state == STATE_ON + + # command success + lock_regulator.reset_mock(return_value=True) + lock_regulator.return_value = True + + await hass.services.async_call( + DOMAIN_SWITCH, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: SWITCH_REGULATOR1}, + blocking=True, + ) + + lock_regulator.assert_awaited_with(0, False) + + state = hass.states.get(SWITCH_REGULATOR1) + assert state.state == STATE_OFF + + +async def test_keylock_turn_on(hass: HomeAssistant, entry: MockConfigEntry) -> None: + """Test the keylock switch turns on.""" + await init_integration(hass, entry) + + with patch.object(MockModuleConnection, "lock_keys") as lock_keys: + states = [KeyLockStateModifier.NOCHANGE] * 8 + states[0] = KeyLockStateModifier.ON + + # command failed + lock_keys.return_value = False + + await hass.services.async_call( + DOMAIN_SWITCH, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: SWITCH_KEYLOCKK1}, + blocking=True, + ) + + lock_keys.assert_awaited_with(0, states) + + state = hass.states.get(SWITCH_KEYLOCKK1) + assert state.state == STATE_OFF + + # command success + lock_keys.reset_mock(return_value=True) + lock_keys.return_value = True + + await hass.services.async_call( + DOMAIN_SWITCH, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: SWITCH_KEYLOCKK1}, + blocking=True, + ) + + lock_keys.assert_awaited_with(0, states) + + state = hass.states.get(SWITCH_KEYLOCKK1) + assert state.state == STATE_ON + + +async def test_keylock_turn_off(hass: HomeAssistant, entry: MockConfigEntry) -> None: + """Test the keylock switch turns off.""" + await init_integration(hass, entry) + + with patch.object(MockModuleConnection, "lock_keys") as lock_keys: + states = [KeyLockStateModifier.NOCHANGE] * 8 + states[0] = KeyLockStateModifier.OFF + + state = hass.states.get(SWITCH_KEYLOCKK1) + state.state = STATE_ON + + # command failed + lock_keys.return_value = False + + await hass.services.async_call( + DOMAIN_SWITCH, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: SWITCH_KEYLOCKK1}, + blocking=True, + ) + + lock_keys.assert_awaited_with(0, states) + + state = hass.states.get(SWITCH_KEYLOCKK1) + assert state.state == STATE_ON + + # command success + lock_keys.reset_mock(return_value=True) + lock_keys.return_value = True + + await hass.services.async_call( + DOMAIN_SWITCH, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: SWITCH_KEYLOCKK1}, + blocking=True, + ) + + lock_keys.assert_awaited_with(0, states) + + state = hass.states.get(SWITCH_KEYLOCKK1) + assert state.state == STATE_OFF + + async def test_pushed_output_status_change( hass: HomeAssistant, entry: MockConfigEntry ) -> None: @@ -259,6 +430,64 @@ async def test_pushed_relay_status_change( assert state.state == STATE_OFF +async def test_pushed_regulatorlock_status_change( + hass: HomeAssistant, entry: MockConfigEntry +) -> None: + """Test the regulator lock switch changes its state on status received.""" + await init_integration(hass, entry) + + device_connection = get_device_connection(hass, (0, 7, False), entry) + address = LcnAddr(0, 7, False) + states = [False] * 8 + + # push status "on" + states[0] = True + inp = ModStatusVar(address, Var.R1VARSETPOINT, VarValue(0x8000)) + await device_connection.async_process_input(inp) + await hass.async_block_till_done() + + state = hass.states.get(SWITCH_REGULATOR1) + assert state.state == STATE_ON + + # push status "off" + states[0] = False + inp = ModStatusVar(address, Var.R1VARSETPOINT, VarValue(0x7FFF)) + await device_connection.async_process_input(inp) + await hass.async_block_till_done() + + state = hass.states.get(SWITCH_REGULATOR1) + assert state.state == STATE_OFF + + +async def test_pushed_keylock_status_change( + hass: HomeAssistant, entry: MockConfigEntry +) -> None: + """Test the keylock switch changes its state on status received.""" + await init_integration(hass, entry) + + device_connection = get_device_connection(hass, (0, 7, False), entry) + address = LcnAddr(0, 7, False) + states = [[False] * 8 for i in range(4)] + states[0][0] = True + + # push status "on" + inp = ModStatusKeyLocks(address, states) + await device_connection.async_process_input(inp) + await hass.async_block_till_done() + + state = hass.states.get(SWITCH_KEYLOCKK1) + assert state.state == STATE_ON + + # push status "off" + states[0][0] = False + inp = ModStatusKeyLocks(address, states) + await device_connection.async_process_input(inp) + await hass.async_block_till_done() + + state = hass.states.get(SWITCH_KEYLOCKK1) + assert state.state == STATE_OFF + + async def test_unload_config_entry(hass: HomeAssistant, entry: MockConfigEntry) -> None: """Test the switch is removed when the config entry is unloaded.""" await init_integration(hass, entry) diff --git a/tests/components/lektrico/fixtures/get_info.json b/tests/components/lektrico/fixtures/get_info.json index 2f190d2f00c0e8..2b099a666e56f4 100644 --- a/tests/components/lektrico/fixtures/get_info.json +++ b/tests/components/lektrico/fixtures/get_info.json @@ -13,5 +13,16 @@ "led_max_brightness": 20, "dynamic_current": 32, "user_current": 32, - "lb_mode": 0 + "lb_mode": 0, + "require_auth": true, + "state_e_activated": false, + "undervoltage_error": true, + "rcd_error": false, + "meter_fault": false, + "overcurrent": false, + "overtemp": false, + "overvoltage_error": false, + "contactor_failure": false, + "cp_diode_failure": false, + "critical_temp": false } diff --git a/tests/components/lektrico/snapshots/test_binary_sensor.ambr b/tests/components/lektrico/snapshots/test_binary_sensor.ambr new file mode 100644 index 00000000000000..6a28e7c60de5cc --- /dev/null +++ b/tests/components/lektrico/snapshots/test_binary_sensor.ambr @@ -0,0 +1,471 @@ +# serializer version: 1 +# name: test_all_entities[binary_sensor.1p7k_500006_ev_diode_short-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.1p7k_500006_ev_diode_short', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Ev diode short', + 'platform': 'lektrico', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'cp_diode_failure', + 'unique_id': '500006_cp_diode_failure', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[binary_sensor.1p7k_500006_ev_diode_short-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': '1p7k_500006 Ev diode short', + }), + 'context': , + 'entity_id': 'binary_sensor.1p7k_500006_ev_diode_short', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[binary_sensor.1p7k_500006_ev_error-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.1p7k_500006_ev_error', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Ev error', + 'platform': 'lektrico', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'state_e_activated', + 'unique_id': '500006_state_e_activated', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[binary_sensor.1p7k_500006_ev_error-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': '1p7k_500006 Ev error', + }), + 'context': , + 'entity_id': 'binary_sensor.1p7k_500006_ev_error', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[binary_sensor.1p7k_500006_metering_error-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.1p7k_500006_metering_error', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Metering error', + 'platform': 'lektrico', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'meter_fault', + 'unique_id': '500006_meter_fault', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[binary_sensor.1p7k_500006_metering_error-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': '1p7k_500006 Metering error', + }), + 'context': , + 'entity_id': 'binary_sensor.1p7k_500006_metering_error', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[binary_sensor.1p7k_500006_overcurrent-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.1p7k_500006_overcurrent', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Overcurrent', + 'platform': 'lektrico', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'overcurrent', + 'unique_id': '500006_overcurrent', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[binary_sensor.1p7k_500006_overcurrent-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': '1p7k_500006 Overcurrent', + }), + 'context': , + 'entity_id': 'binary_sensor.1p7k_500006_overcurrent', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[binary_sensor.1p7k_500006_overheating-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.1p7k_500006_overheating', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Overheating', + 'platform': 'lektrico', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'critical_temp', + 'unique_id': '500006_critical_temp', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[binary_sensor.1p7k_500006_overheating-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': '1p7k_500006 Overheating', + }), + 'context': , + 'entity_id': 'binary_sensor.1p7k_500006_overheating', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[binary_sensor.1p7k_500006_overvoltage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.1p7k_500006_overvoltage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Overvoltage', + 'platform': 'lektrico', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'overvoltage', + 'unique_id': '500006_overvoltage', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[binary_sensor.1p7k_500006_overvoltage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': '1p7k_500006 Overvoltage', + }), + 'context': , + 'entity_id': 'binary_sensor.1p7k_500006_overvoltage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[binary_sensor.1p7k_500006_rcd_error-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.1p7k_500006_rcd_error', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Rcd error', + 'platform': 'lektrico', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'rcd_error', + 'unique_id': '500006_rcd_error', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[binary_sensor.1p7k_500006_rcd_error-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': '1p7k_500006 Rcd error', + }), + 'context': , + 'entity_id': 'binary_sensor.1p7k_500006_rcd_error', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[binary_sensor.1p7k_500006_relay_contacts_welded-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.1p7k_500006_relay_contacts_welded', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Relay contacts welded', + 'platform': 'lektrico', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'contactor_failure', + 'unique_id': '500006_contactor_failure', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[binary_sensor.1p7k_500006_relay_contacts_welded-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': '1p7k_500006 Relay contacts welded', + }), + 'context': , + 'entity_id': 'binary_sensor.1p7k_500006_relay_contacts_welded', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[binary_sensor.1p7k_500006_thermal_throttling-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.1p7k_500006_thermal_throttling', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Thermal throttling', + 'platform': 'lektrico', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'overtemp', + 'unique_id': '500006_overtemp', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[binary_sensor.1p7k_500006_thermal_throttling-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': '1p7k_500006 Thermal throttling', + }), + 'context': , + 'entity_id': 'binary_sensor.1p7k_500006_thermal_throttling', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[binary_sensor.1p7k_500006_undervoltage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.1p7k_500006_undervoltage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Undervoltage', + 'platform': 'lektrico', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'undervoltage', + 'unique_id': '500006_undervoltage', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[binary_sensor.1p7k_500006_undervoltage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': '1p7k_500006 Undervoltage', + }), + 'context': , + 'entity_id': 'binary_sensor.1p7k_500006_undervoltage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- diff --git a/tests/components/lektrico/snapshots/test_sensor.ambr b/tests/components/lektrico/snapshots/test_sensor.ambr index 002e0b00ca8bcd..73ec88e6fa1c2c 100644 --- a/tests/components/lektrico/snapshots/test_sensor.ambr +++ b/tests/components/lektrico/snapshots/test_sensor.ambr @@ -381,11 +381,13 @@ 'capabilities': dict({ 'options': list([ 'available', + 'charging', 'connected', + 'error', + 'locked', 'need_auth', 'paused', - 'charging', - 'error', + 'paused_by_scheduler', 'updating_firmware', ]), }), @@ -423,11 +425,13 @@ 'friendly_name': '1p7k_500006 State', 'options': list([ 'available', + 'charging', 'connected', + 'error', + 'locked', 'need_auth', 'paused', - 'charging', - 'error', + 'paused_by_scheduler', 'updating_firmware', ]), }), diff --git a/tests/components/lektrico/snapshots/test_switch.ambr b/tests/components/lektrico/snapshots/test_switch.ambr new file mode 100644 index 00000000000000..3f4a1693315895 --- /dev/null +++ b/tests/components/lektrico/snapshots/test_switch.ambr @@ -0,0 +1,93 @@ +# serializer version: 1 +# name: test_all_entities[switch.1p7k_500006_authentication-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.1p7k_500006_authentication', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Authentication', + 'platform': 'lektrico', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'authentication', + 'unique_id': '500006_authentication', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[switch.1p7k_500006_authentication-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': '1p7k_500006 Authentication', + }), + 'context': , + 'entity_id': 'switch.1p7k_500006_authentication', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_all_entities[switch.1p7k_500006_lock-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.1p7k_500006_lock', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Lock', + 'platform': 'lektrico', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lock', + 'unique_id': '500006_lock', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[switch.1p7k_500006_lock-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': '1p7k_500006 Lock', + }), + 'context': , + 'entity_id': 'switch.1p7k_500006_lock', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- diff --git a/tests/components/lektrico/test_binary_sensor.py b/tests/components/lektrico/test_binary_sensor.py new file mode 100644 index 00000000000000..d49eac6cc23ac0 --- /dev/null +++ b/tests/components/lektrico/test_binary_sensor.py @@ -0,0 +1,32 @@ +"""Tests for the Lektrico binary sensor platform.""" + +from unittest.mock import AsyncMock, patch + +from syrupy import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + + +async def test_all_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_device: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test all entities.""" + + with patch.multiple( + "homeassistant.components.lektrico", + CHARGERS_PLATFORMS=[Platform.BINARY_SENSOR], + LB_DEVICES_PLATFORMS=[Platform.BINARY_SENSOR], + ): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/lektrico/test_switch.py b/tests/components/lektrico/test_switch.py new file mode 100644 index 00000000000000..cfa693d9e44941 --- /dev/null +++ b/tests/components/lektrico/test_switch.py @@ -0,0 +1,32 @@ +"""Tests for the Lektrico switch platform.""" + +from unittest.mock import AsyncMock, patch + +from syrupy import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + + +async def test_all_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_device: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test all entities.""" + + with patch.multiple( + "homeassistant.components.lektrico", + CHARGERS_PLATFORMS=[Platform.SWITCH], + LB_DEVICES_PLATFORMS=[Platform.SWITCH], + ): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/lg_netcast/test_config_flow.py b/tests/components/lg_netcast/test_config_flow.py index 2ecbadbaf4439d..7959c0c445ec85 100644 --- a/tests/components/lg_netcast/test_config_flow.py +++ b/tests/components/lg_netcast/test_config_flow.py @@ -3,9 +3,11 @@ from datetime import timedelta from unittest.mock import DEFAULT, patch +import pytest + from homeassistant import data_entry_flow from homeassistant.components.lg_netcast.const import DOMAIN -from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.const import ( CONF_ACCESS_TOKEN, CONF_HOST, @@ -24,8 +26,6 @@ _patch_lg_netcast, ) -from tests.common import MockConfigEntry - async def test_show_form(hass: HomeAssistant) -> None: """Test that the form is served with no input.""" @@ -114,6 +114,10 @@ async def test_manual_host_unsuccessful_details_response(hass: HomeAssistant) -> assert result["reason"] == "cannot_connect" +@pytest.mark.parametrize( # Remove when translations fixed + "ignore_translations", + ["component.lg_netcast.config.abort.invalid_host"], +) async def test_manual_host_no_unique_id_response(hass: HomeAssistant) -> None: """Test manual host configuration.""" with _patch_lg_netcast(no_unique_id=True): @@ -146,77 +150,6 @@ async def test_invalid_session_id(hass: HomeAssistant) -> None: assert result2["errors"]["base"] == "cannot_connect" -async def test_import(hass: HomeAssistant) -> None: - """Test that the import works.""" - with _patch_lg_netcast(): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_IMPORT}, - data={ - CONF_HOST: IP_ADDRESS, - CONF_ACCESS_TOKEN: FAKE_PIN, - CONF_NAME: MODEL_NAME, - }, - ) - - assert result["type"] == data_entry_flow.FlowResultType.CREATE_ENTRY - assert result["result"].unique_id == UNIQUE_ID - assert result["data"] == { - CONF_HOST: IP_ADDRESS, - CONF_ACCESS_TOKEN: FAKE_PIN, - CONF_NAME: MODEL_NAME, - CONF_MODEL: MODEL_NAME, - CONF_ID: UNIQUE_ID, - } - - -async def test_import_not_online(hass: HomeAssistant) -> None: - """Test that the import works.""" - with _patch_lg_netcast(fail_connection=True): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_IMPORT}, - data={ - CONF_HOST: IP_ADDRESS, - CONF_ACCESS_TOKEN: FAKE_PIN, - CONF_NAME: MODEL_NAME, - }, - ) - - assert result["type"] == data_entry_flow.FlowResultType.ABORT - assert result["reason"] == "cannot_connect" - - -async def test_import_duplicate_error(hass: HomeAssistant) -> None: - """Test that errors are shown when duplicates are added during import.""" - config_entry = MockConfigEntry( - domain=DOMAIN, - unique_id=UNIQUE_ID, - data={ - CONF_HOST: IP_ADDRESS, - CONF_ACCESS_TOKEN: FAKE_PIN, - CONF_NAME: MODEL_NAME, - CONF_ID: UNIQUE_ID, - }, - ) - config_entry.add_to_hass(hass) - - with _patch_lg_netcast(): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_IMPORT}, - data={ - CONF_HOST: IP_ADDRESS, - CONF_ACCESS_TOKEN: FAKE_PIN, - CONF_NAME: MODEL_NAME, - CONF_ID: UNIQUE_ID, - }, - ) - - assert result["type"] == data_entry_flow.FlowResultType.ABORT - assert result["reason"] == "already_configured" - - async def test_display_access_token_aborted(hass: HomeAssistant) -> None: """Test Access token display is cancelled.""" diff --git a/tests/components/lg_thinq/__init__.py b/tests/components/lg_thinq/__init__.py new file mode 100644 index 00000000000000..a5ba55ab1c9381 --- /dev/null +++ b/tests/components/lg_thinq/__init__.py @@ -0,0 +1,13 @@ +"""Tests for the LG ThinQ integration.""" + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def setup_integration(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: + """Fixture for setting up the component.""" + config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() diff --git a/tests/components/lg_thinq/conftest.py b/tests/components/lg_thinq/conftest.py new file mode 100644 index 00000000000000..05cb3164137900 --- /dev/null +++ b/tests/components/lg_thinq/conftest.py @@ -0,0 +1,110 @@ +"""Configure tests for the LGThinQ integration.""" + +from collections.abc import Generator +from unittest.mock import AsyncMock, MagicMock, patch + +import pytest +from thinqconnect import ThinQAPIException + +from homeassistant.components.lg_thinq.const import CONF_CONNECT_CLIENT_ID, DOMAIN +from homeassistant.const import CONF_ACCESS_TOKEN, CONF_COUNTRY + +from .const import MOCK_CONNECT_CLIENT_ID, MOCK_COUNTRY, MOCK_PAT, MOCK_UUID + +from tests.common import MockConfigEntry, load_json_object_fixture + + +def mock_thinq_api_response( + *, + status: int = 200, + body: dict | None = None, + error_code: str | None = None, + error_message: str | None = None, +) -> MagicMock: + """Create a mock thinq api response.""" + response = MagicMock() + response.status = status + response.body = body + response.error_code = error_code + response.error_message = error_message + return response + + +@pytest.fixture +def mock_config_entry() -> MockConfigEntry: + """Create a mock config entry.""" + return MockConfigEntry( + domain=DOMAIN, + title=f"Test {DOMAIN}", + unique_id=MOCK_PAT, + data={ + CONF_ACCESS_TOKEN: MOCK_PAT, + CONF_CONNECT_CLIENT_ID: MOCK_CONNECT_CLIENT_ID, + CONF_COUNTRY: MOCK_COUNTRY, + }, + ) + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Mock setting up a config entry.""" + with patch( + "homeassistant.components.lg_thinq.async_setup_entry", return_value=True + ) as mock_setup_entry: + yield mock_setup_entry + + +@pytest.fixture +def mock_uuid() -> Generator[AsyncMock]: + """Mock a uuid.""" + with ( + patch("uuid.uuid4", autospec=True, return_value=MOCK_UUID) as mock_uuid, + patch( + "homeassistant.components.lg_thinq.config_flow.uuid.uuid4", + new=mock_uuid, + ), + ): + yield mock_uuid.return_value + + +@pytest.fixture +def mock_thinq_api(mock_thinq_mqtt_client: AsyncMock) -> Generator[AsyncMock]: + """Mock a thinq api.""" + with ( + patch("homeassistant.components.lg_thinq.ThinQApi", autospec=True) as mock_api, + patch( + "homeassistant.components.lg_thinq.config_flow.ThinQApi", + new=mock_api, + ), + ): + thinq_api = mock_api.return_value + thinq_api.async_get_device_list.return_value = [ + load_json_object_fixture("air_conditioner/device.json", DOMAIN) + ] + thinq_api.async_get_device_profile.return_value = load_json_object_fixture( + "air_conditioner/profile.json", DOMAIN + ) + thinq_api.async_get_device_status.return_value = load_json_object_fixture( + "air_conditioner/status.json", DOMAIN + ) + yield thinq_api + + +@pytest.fixture +def mock_thinq_mqtt_client() -> Generator[AsyncMock]: + """Mock a thinq api.""" + with patch( + "homeassistant.components.lg_thinq.mqtt.ThinQMQTTClient", autospec=True + ) as mock_api: + yield mock_api + + +@pytest.fixture +def mock_invalid_thinq_api(mock_thinq_api: AsyncMock) -> AsyncMock: + """Mock an invalid thinq api.""" + mock_thinq_api.async_get_device_list = AsyncMock( + side_effect=ThinQAPIException( + code="1309", message="Not allowed api call", headers=None + ) + ) + return mock_thinq_api diff --git a/tests/components/lg_thinq/const.py b/tests/components/lg_thinq/const.py new file mode 100644 index 00000000000000..f46baa61c38aba --- /dev/null +++ b/tests/components/lg_thinq/const.py @@ -0,0 +1,8 @@ +"""Constants for lgthinq test.""" + +from typing import Final + +MOCK_PAT: Final[str] = "123abc4567de8f90g123h4ij56klmn789012p345rst6uvw789xy" +MOCK_UUID: Final[str] = "1b3deabc-123d-456d-987d-2a1c7b3bdb67" +MOCK_CONNECT_CLIENT_ID: Final[str] = f"home-assistant-{MOCK_UUID}" +MOCK_COUNTRY: Final[str] = "KR" diff --git a/tests/components/lg_thinq/fixtures/air_conditioner/device.json b/tests/components/lg_thinq/fixtures/air_conditioner/device.json new file mode 100644 index 00000000000000..fb931c69929792 --- /dev/null +++ b/tests/components/lg_thinq/fixtures/air_conditioner/device.json @@ -0,0 +1,9 @@ +{ + "deviceId": "MW2-2E247F93-B570-46A6-B827-920E9E10F966", + "deviceInfo": { + "deviceType": "DEVICE_AIR_CONDITIONER", + "modelName": "PAC_910604_WW", + "alias": "Test air conditioner", + "reportable": true + } +} diff --git a/tests/components/lg_thinq/fixtures/air_conditioner/profile.json b/tests/components/lg_thinq/fixtures/air_conditioner/profile.json new file mode 100644 index 00000000000000..0d45dc5c9f44bd --- /dev/null +++ b/tests/components/lg_thinq/fixtures/air_conditioner/profile.json @@ -0,0 +1,154 @@ +{ + "notification": { + "push": ["WATER_IS_FULL"] + }, + "property": { + "airConJobMode": { + "currentJobMode": { + "mode": ["r", "w"], + "type": "enum", + "value": { + "r": ["AIR_CLEAN", "COOL", "AIR_DRY"], + "w": ["AIR_CLEAN", "COOL", "AIR_DRY"] + } + } + }, + "airFlow": { + "windStrength": { + "mode": ["r", "w"], + "type": "enum", + "value": { + "r": ["LOW", "HIGH", "MID"], + "w": ["LOW", "HIGH", "MID"] + } + } + }, + "airQualitySensor": { + "PM1": { + "mode": ["r"], + "type": "number" + }, + "PM10": { + "mode": ["r"], + "type": "number" + }, + "PM2": { + "mode": ["r"], + "type": "number" + }, + "humidity": { + "mode": ["r"], + "type": "number" + }, + "monitoringEnabled": { + "mode": ["r", "w"], + "type": "enum", + "value": { + "r": ["ON_WORKING", "ALWAYS"], + "w": ["ON_WORKING", "ALWAYS"] + } + }, + "oder": { + "mode": ["r"], + "type": "number" + }, + "totalPollution": { + "mode": ["r"], + "type": "number" + } + }, + "operation": { + "airCleanOperationMode": { + "mode": ["w"], + "type": "enum", + "value": { + "w": ["START", "STOP"] + } + }, + "airConOperationMode": { + "mode": ["r", "w"], + "type": "enum", + "value": { + "r": ["POWER_ON", "POWER_OFF"], + "w": ["POWER_ON", "POWER_OFF"] + } + } + }, + "powerSave": { + "powerSaveEnabled": { + "mode": ["r", "w"], + "type": "boolean", + "value": { + "r": [false, true], + "w": [false, true] + } + } + }, + "temperature": { + "coolTargetTemperature": { + "mode": ["w"], + "type": "range", + "value": { + "w": { + "max": 30, + "min": 18, + "step": 1 + } + } + }, + "currentTemperature": { + "mode": ["r"], + "type": "number" + }, + "targetTemperature": { + "mode": ["r", "w"], + "type": "range", + "value": { + "r": { + "max": 30, + "min": 18, + "step": 1 + }, + "w": { + "max": 30, + "min": 18, + "step": 1 + } + } + }, + "unit": { + "mode": ["r"], + "type": "enum", + "value": { + "r": ["C", "F"] + } + } + }, + "timer": { + "relativeHourToStart": { + "mode": ["r", "w"], + "type": "number" + }, + "relativeHourToStop": { + "mode": ["r", "w"], + "type": "number" + }, + "relativeMinuteToStart": { + "mode": ["r", "w"], + "type": "number" + }, + "relativeMinuteToStop": { + "mode": ["r", "w"], + "type": "number" + }, + "absoluteHourToStart": { + "mode": ["r", "w"], + "type": "number" + }, + "absoluteMinuteToStart": { + "mode": ["r", "w"], + "type": "number" + } + } + } +} diff --git a/tests/components/lg_thinq/fixtures/air_conditioner/status.json b/tests/components/lg_thinq/fixtures/air_conditioner/status.json new file mode 100644 index 00000000000000..90d15d1ae16d27 --- /dev/null +++ b/tests/components/lg_thinq/fixtures/air_conditioner/status.json @@ -0,0 +1,43 @@ +{ + "airConJobMode": { + "currentJobMode": "COOL" + }, + "airFlow": { + "windStrength": "MID" + }, + "airQualitySensor": { + "PM1": 12, + "PM10": 7, + "PM2": 24, + "humidity": 40, + "monitoringEnabled": "ON_WORKING", + "totalPollution": 3, + "totalPollutionLevel": "GOOD" + }, + "filterInfo": { + "filterLifetime": 540, + "usedTime": 180 + }, + "operation": { + "airConOperationMode": "POWER_ON" + }, + "powerSave": { + "powerSaveEnabled": false + }, + "sleepTimer": { + "relativeStopTimer": "UNSET" + }, + "temperature": { + "currentTemperature": 25, + "targetTemperature": 19, + "unit": "C" + }, + "timer": { + "relativeStartTimer": "UNSET", + "relativeStopTimer": "UNSET", + "absoluteStartTimer": "SET", + "absoluteStopTimer": "UNSET", + "absoluteHourToStart": 13, + "absoluteMinuteToStart": 14 + } +} diff --git a/tests/components/lg_thinq/snapshots/test_climate.ambr b/tests/components/lg_thinq/snapshots/test_climate.ambr new file mode 100644 index 00000000000000..e9470c3de031d6 --- /dev/null +++ b/tests/components/lg_thinq/snapshots/test_climate.ambr @@ -0,0 +1,86 @@ +# serializer version: 1 +# name: test_all_entities[climate.test_air_conditioner-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'fan_modes': list([ + 'low', + 'high', + 'mid', + ]), + 'hvac_modes': list([ + , + , + , + ]), + 'max_temp': 30, + 'min_temp': 18, + 'preset_modes': list([ + 'air_clean', + ]), + 'target_temp_step': 1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'climate', + 'entity_category': None, + 'entity_id': 'climate.test_air_conditioner', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'lg_thinq', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': , + 'unique_id': 'MW2-2E247F93-B570-46A6-B827-920E9E10F966_climate_air_conditioner', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[climate.test_air_conditioner-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_humidity': 40, + 'current_temperature': 25, + 'fan_mode': 'mid', + 'fan_modes': list([ + 'low', + 'high', + 'mid', + ]), + 'friendly_name': 'Test air conditioner', + 'hvac_modes': list([ + , + , + , + ]), + 'max_temp': 30, + 'min_temp': 18, + 'preset_mode': None, + 'preset_modes': list([ + 'air_clean', + ]), + 'supported_features': , + 'target_temp_step': 1, + 'temperature': 19, + }), + 'context': , + 'entity_id': 'climate.test_air_conditioner', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'cool', + }) +# --- diff --git a/tests/components/lg_thinq/snapshots/test_event.ambr b/tests/components/lg_thinq/snapshots/test_event.ambr new file mode 100644 index 00000000000000..025f4496aeb54e --- /dev/null +++ b/tests/components/lg_thinq/snapshots/test_event.ambr @@ -0,0 +1,55 @@ +# serializer version: 1 +# name: test_all_entities[event.test_air_conditioner_notification-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'event_types': list([ + 'water_is_full', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'event', + 'entity_category': None, + 'entity_id': 'event.test_air_conditioner_notification', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Notification', + 'platform': 'lg_thinq', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'MW2-2E247F93-B570-46A6-B827-920E9E10F966_notification', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[event.test_air_conditioner_notification-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'event_type': None, + 'event_types': list([ + 'water_is_full', + ]), + 'friendly_name': 'Test air conditioner Notification', + }), + 'context': , + 'entity_id': 'event.test_air_conditioner_notification', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- diff --git a/tests/components/lg_thinq/snapshots/test_number.ambr b/tests/components/lg_thinq/snapshots/test_number.ambr new file mode 100644 index 00000000000000..68f01854501160 --- /dev/null +++ b/tests/components/lg_thinq/snapshots/test_number.ambr @@ -0,0 +1,113 @@ +# serializer version: 1 +# name: test_all_entities[number.test_air_conditioner_schedule_turn_off-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 100.0, + 'min': 0.0, + 'mode': , + 'step': 1.0, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': None, + 'entity_id': 'number.test_air_conditioner_schedule_turn_off', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Schedule turn-off', + 'platform': 'lg_thinq', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'MW2-2E247F93-B570-46A6-B827-920E9E10F966_relative_hour_to_stop', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[number.test_air_conditioner_schedule_turn_off-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test air conditioner Schedule turn-off', + 'max': 100.0, + 'min': 0.0, + 'mode': , + 'step': 1.0, + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'number.test_air_conditioner_schedule_turn_off', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_all_entities[number.test_air_conditioner_schedule_turn_on-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 100.0, + 'min': 0.0, + 'mode': , + 'step': 1.0, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': None, + 'entity_id': 'number.test_air_conditioner_schedule_turn_on', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Schedule turn-on', + 'platform': 'lg_thinq', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'MW2-2E247F93-B570-46A6-B827-920E9E10F966_relative_hour_to_start', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[number.test_air_conditioner_schedule_turn_on-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test air conditioner Schedule turn-on', + 'max': 100.0, + 'min': 0.0, + 'mode': , + 'step': 1.0, + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'number.test_air_conditioner_schedule_turn_on', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- diff --git a/tests/components/lg_thinq/snapshots/test_sensor.ambr b/tests/components/lg_thinq/snapshots/test_sensor.ambr new file mode 100644 index 00000000000000..387df916eba025 --- /dev/null +++ b/tests/components/lg_thinq/snapshots/test_sensor.ambr @@ -0,0 +1,205 @@ +# serializer version: 1 +# name: test_all_entities[sensor.test_air_conditioner_humidity-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_air_conditioner_humidity', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Humidity', + 'platform': 'lg_thinq', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'MW2-2E247F93-B570-46A6-B827-920E9E10F966_humidity', + 'unit_of_measurement': '%', + }) +# --- +# name: test_all_entities[sensor.test_air_conditioner_humidity-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'humidity', + 'friendly_name': 'Test air conditioner Humidity', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.test_air_conditioner_humidity', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '40', + }) +# --- +# name: test_all_entities[sensor.test_air_conditioner_pm1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_air_conditioner_pm1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'PM1', + 'platform': 'lg_thinq', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'MW2-2E247F93-B570-46A6-B827-920E9E10F966_pm1', + 'unit_of_measurement': 'µg/m³', + }) +# --- +# name: test_all_entities[sensor.test_air_conditioner_pm1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pm1', + 'friendly_name': 'Test air conditioner PM1', + 'state_class': , + 'unit_of_measurement': 'µg/m³', + }), + 'context': , + 'entity_id': 'sensor.test_air_conditioner_pm1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '12', + }) +# --- +# name: test_all_entities[sensor.test_air_conditioner_pm10-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_air_conditioner_pm10', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'PM10', + 'platform': 'lg_thinq', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'MW2-2E247F93-B570-46A6-B827-920E9E10F966_pm10', + 'unit_of_measurement': 'µg/m³', + }) +# --- +# name: test_all_entities[sensor.test_air_conditioner_pm10-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pm10', + 'friendly_name': 'Test air conditioner PM10', + 'state_class': , + 'unit_of_measurement': 'µg/m³', + }), + 'context': , + 'entity_id': 'sensor.test_air_conditioner_pm10', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '7', + }) +# --- +# name: test_all_entities[sensor.test_air_conditioner_pm2_5-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_air_conditioner_pm2_5', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'PM2.5', + 'platform': 'lg_thinq', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'MW2-2E247F93-B570-46A6-B827-920E9E10F966_pm2', + 'unit_of_measurement': 'µg/m³', + }) +# --- +# name: test_all_entities[sensor.test_air_conditioner_pm2_5-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pm25', + 'friendly_name': 'Test air conditioner PM2.5', + 'state_class': , + 'unit_of_measurement': 'µg/m³', + }), + 'context': , + 'entity_id': 'sensor.test_air_conditioner_pm2_5', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '24', + }) +# --- diff --git a/tests/components/lg_thinq/test_climate.py b/tests/components/lg_thinq/test_climate.py new file mode 100644 index 00000000000000..24ed3ad230dbbd --- /dev/null +++ b/tests/components/lg_thinq/test_climate.py @@ -0,0 +1,29 @@ +"""Tests for the LG Thinq climate platform.""" + +from unittest.mock import AsyncMock, patch + +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_all_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_thinq_api: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test all entities.""" + with patch("homeassistant.components.lg_thinq.PLATFORMS", [Platform.CLIMATE]): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/lg_thinq/test_config_flow.py b/tests/components/lg_thinq/test_config_flow.py new file mode 100644 index 00000000000000..e7ee632810eefb --- /dev/null +++ b/tests/components/lg_thinq/test_config_flow.py @@ -0,0 +1,69 @@ +"""Test the lgthinq config flow.""" + +from unittest.mock import AsyncMock + +from homeassistant.components.lg_thinq.const import CONF_CONNECT_CLIENT_ID, DOMAIN +from homeassistant.config_entries import SOURCE_USER +from homeassistant.const import CONF_ACCESS_TOKEN, CONF_COUNTRY +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from .const import MOCK_CONNECT_CLIENT_ID, MOCK_COUNTRY, MOCK_PAT + +from tests.common import MockConfigEntry + + +async def test_config_flow( + hass: HomeAssistant, + mock_thinq_api: AsyncMock, + mock_uuid: AsyncMock, + mock_setup_entry: AsyncMock, +) -> None: + """Test that an thinq entry is normally created.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_ACCESS_TOKEN: MOCK_PAT, CONF_COUNTRY: MOCK_COUNTRY}, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["data"] == { + CONF_ACCESS_TOKEN: MOCK_PAT, + CONF_COUNTRY: MOCK_COUNTRY, + CONF_CONNECT_CLIENT_ID: MOCK_CONNECT_CLIENT_ID, + } + + mock_thinq_api.async_get_device_list.assert_called_once() + + +async def test_config_flow_invalid_pat( + hass: HomeAssistant, mock_invalid_thinq_api: AsyncMock +) -> None: + """Test that an thinq flow should be aborted with an invalid PAT.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + data={CONF_ACCESS_TOKEN: MOCK_PAT, CONF_COUNTRY: MOCK_COUNTRY}, + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "token_unauthorized"} + mock_invalid_thinq_api.async_get_device_list.assert_called_once() + + +async def test_config_flow_already_configured( + hass: HomeAssistant, mock_config_entry: MockConfigEntry, mock_thinq_api: AsyncMock +) -> None: + """Test that thinq flow should be aborted when already configured.""" + mock_config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + data={CONF_ACCESS_TOKEN: MOCK_PAT, CONF_COUNTRY: MOCK_COUNTRY}, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" diff --git a/tests/components/lg_thinq/test_event.py b/tests/components/lg_thinq/test_event.py new file mode 100644 index 00000000000000..bea758cb943ad2 --- /dev/null +++ b/tests/components/lg_thinq/test_event.py @@ -0,0 +1,29 @@ +"""Tests for the LG Thinq event platform.""" + +from unittest.mock import AsyncMock, patch + +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_all_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_thinq_api: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test all entities.""" + with patch("homeassistant.components.lg_thinq.PLATFORMS", [Platform.EVENT]): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/lg_thinq/test_init.py b/tests/components/lg_thinq/test_init.py new file mode 100644 index 00000000000000..7da7e79fec07aa --- /dev/null +++ b/tests/components/lg_thinq/test_init.py @@ -0,0 +1,26 @@ +"""Tests for the LG ThinQ integration.""" + +from unittest.mock import AsyncMock + +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def test_load_unload_entry( + hass: HomeAssistant, + mock_thinq_api: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test load and unload entry.""" + mock_config_entry.add_to_hass(hass) + assert await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + assert mock_config_entry.state is ConfigEntryState.LOADED + + await hass.config_entries.async_remove(mock_config_entry.entry_id) + await hass.async_block_till_done() + + assert mock_config_entry.state is ConfigEntryState.NOT_LOADED diff --git a/tests/components/lg_thinq/test_number.py b/tests/components/lg_thinq/test_number.py new file mode 100644 index 00000000000000..e578e4eba7a091 --- /dev/null +++ b/tests/components/lg_thinq/test_number.py @@ -0,0 +1,29 @@ +"""Tests for the LG Thinq number platform.""" + +from unittest.mock import AsyncMock, patch + +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_all_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_thinq_api: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test all entities.""" + with patch("homeassistant.components.lg_thinq.PLATFORMS", [Platform.NUMBER]): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/lg_thinq/test_sensor.py b/tests/components/lg_thinq/test_sensor.py new file mode 100644 index 00000000000000..02b91b4771b962 --- /dev/null +++ b/tests/components/lg_thinq/test_sensor.py @@ -0,0 +1,29 @@ +"""Tests for the LG Thinq sensor platform.""" + +from unittest.mock import AsyncMock, patch + +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_all_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_thinq_api: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test all entities.""" + with patch("homeassistant.components.lg_thinq.PLATFORMS", [Platform.SENSOR]): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/lidarr/conftest.py b/tests/components/lidarr/conftest.py index 1024aadc403aa2..bd87fa947bcc47 100644 --- a/tests/components/lidarr/conftest.py +++ b/tests/components/lidarr/conftest.py @@ -44,10 +44,12 @@ def mock_error( aioclient_mock.get(f"{API_URL}/rootfolder", status=status) aioclient_mock.get(f"{API_URL}/system/status", status=status) aioclient_mock.get(f"{API_URL}/wanted/missing", status=status) + aioclient_mock.get(f"{API_URL}/album", status=status) aioclient_mock.get(f"{API_URL}/queue", exc=ClientError) aioclient_mock.get(f"{API_URL}/rootfolder", exc=ClientError) aioclient_mock.get(f"{API_URL}/system/status", exc=ClientError) aioclient_mock.get(f"{API_URL}/wanted/missing", exc=ClientError) + aioclient_mock.get(f"{API_URL}/album", exc=ClientError) @pytest.fixture @@ -115,6 +117,11 @@ def mock_connection(aioclient_mock: AiohttpClientMocker) -> None: text=load_fixture("lidarr/wanted-missing.json"), headers={"Content-Type": CONTENT_TYPE_JSON}, ) + aioclient_mock.get( + f"{API_URL}/album", + text=load_fixture("lidarr/album.json"), + headers={"Content-Type": CONTENT_TYPE_JSON}, + ) aioclient_mock.get( f"{API_URL}/rootfolder", text=load_fixture("lidarr/rootfolder-linux.json"), diff --git a/tests/components/lidarr/fixtures/album.json b/tests/components/lidarr/fixtures/album.json new file mode 100644 index 00000000000000..d257cabf1f1c0f --- /dev/null +++ b/tests/components/lidarr/fixtures/album.json @@ -0,0 +1,155 @@ +[ + { + "id": 0, + "title": "string", + "disambiguation": "string", + "overview": "string", + "artistId": 0, + "foreignAlbumId": "string", + "monitored": true, + "anyReleaseOk": true, + "profileId": 0, + "duration": 0, + "albumType": "string", + "secondaryTypes": ["string"], + "mediumCount": 0, + "ratings": { + "votes": 0, + "value": 0 + }, + "releaseDate": "2024-09-09T20:16:28.493Z", + "releases": [ + { + "id": 0, + "albumId": 0, + "foreignReleaseId": "string", + "title": "string", + "status": "string", + "duration": 0, + "trackCount": 0, + "media": [ + { + "mediumNumber": 0, + "mediumName": "string", + "mediumFormat": "string" + } + ], + "mediumCount": 0, + "disambiguation": "string", + "country": ["string"], + "label": ["string"], + "format": "string", + "monitored": true + } + ], + "genres": ["string"], + "media": [ + { + "mediumNumber": 0, + "mediumName": "string", + "mediumFormat": "string" + } + ], + "artist": { + "id": 0, + "status": "continuing", + "ended": true, + "artistName": "string", + "foreignArtistId": "string", + "mbId": "string", + "tadbId": 0, + "discogsId": 0, + "allMusicId": "string", + "overview": "string", + "artistType": "string", + "disambiguation": "string", + "links": [ + { + "url": "string", + "name": "string" + } + ], + "nextAlbum": "string", + "lastAlbum": "string", + "images": [ + { + "url": "string", + "coverType": "unknown", + "extension": "string", + "remoteUrl": "string" + } + ], + "members": [ + { + "name": "string", + "instrument": "string", + "images": [ + { + "url": "string", + "coverType": "unknown", + "extension": "string", + "remoteUrl": "string" + } + ] + } + ], + "remotePoster": "string", + "path": "string", + "qualityProfileId": 0, + "metadataProfileId": 0, + "monitored": true, + "monitorNewItems": "all", + "rootFolderPath": "string", + "folder": "string", + "genres": ["string"], + "cleanName": "string", + "sortName": "string", + "tags": [0], + "added": "2024-09-09T20:16:28.493Z", + "addOptions": { + "monitor": "all", + "albumsToMonitor": ["string"], + "monitored": true, + "searchForMissingAlbums": true + }, + "ratings": { + "votes": 0, + "value": 0 + }, + "statistics": { + "albumCount": 0, + "trackFileCount": 0, + "trackCount": 0, + "totalTrackCount": 0, + "sizeOnDisk": 0, + "percentOfTracks": 0 + } + }, + "images": [ + { + "url": "string", + "coverType": "unknown", + "extension": "string", + "remoteUrl": "string" + } + ], + "links": [ + { + "url": "string", + "name": "string" + } + ], + "statistics": { + "trackFileCount": 0, + "trackCount": 0, + "totalTrackCount": 0, + "sizeOnDisk": 0, + "percentOfTracks": 0 + }, + "addOptions": { + "addType": "automatic", + "searchForNewAlbum": true + }, + "remoteCover": "string" + } +] diff --git a/tests/components/lidarr/test_sensor.py b/tests/components/lidarr/test_sensor.py index 0c19355a252ea5..716df21303afeb 100644 --- a/tests/components/lidarr/test_sensor.py +++ b/tests/components/lidarr/test_sensor.py @@ -25,10 +25,14 @@ async def test_sensors( assert state.state == "2" assert state.attributes.get("string") == "stopped" assert state.attributes.get("string2") == "downloading" - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == "Albums" + assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == "albums" assert state.attributes.get(CONF_STATE_CLASS) == SensorStateClass.TOTAL state = hass.states.get("sensor.mock_title_wanted") assert state.state == "1" assert state.attributes.get("test") == "test" - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == "Albums" + assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == "albums" + assert state.attributes.get(CONF_STATE_CLASS) == SensorStateClass.TOTAL + state = hass.states.get("sensor.mock_title_albums") + assert state.state == "1" + assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == "albums" assert state.attributes.get(CONF_STATE_CLASS) == SensorStateClass.TOTAL diff --git a/tests/components/light/common.py b/tests/components/light/common.py index 0ad492a31e9145..ba095a036421dc 100644 --- a/tests/components/light/common.py +++ b/tests/components/light/common.py @@ -99,7 +99,7 @@ async def async_turn_on( flash: str | None = None, effect: str | None = None, color_name: str | None = None, - white: bool | None = None, + white: int | None = None, ) -> None: """Turn all or specified light on.""" data = { diff --git a/tests/components/linear_garage_door/test_cover.py b/tests/components/linear_garage_door/test_cover.py index f4593ff4d60b8d..be5ae8f35f79ec 100644 --- a/tests/components/linear_garage_door/test_cover.py +++ b/tests/components/linear_garage_door/test_cover.py @@ -10,16 +10,10 @@ DOMAIN as COVER_DOMAIN, SERVICE_CLOSE_COVER, SERVICE_OPEN_COVER, + CoverState, ) from homeassistant.components.linear_garage_door import DOMAIN -from homeassistant.const import ( - ATTR_ENTITY_ID, - STATE_CLOSED, - STATE_CLOSING, - STATE_OPEN, - STATE_OPENING, - Platform, -) +from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -109,8 +103,8 @@ async def test_update_cover_state( await setup_integration(hass, mock_config_entry, [Platform.COVER]) - assert hass.states.get("cover.test_garage_1").state == STATE_OPEN - assert hass.states.get("cover.test_garage_2").state == STATE_CLOSED + assert hass.states.get("cover.test_garage_1").state == CoverState.OPEN + assert hass.states.get("cover.test_garage_2").state == CoverState.CLOSED device_states = load_json_object_fixture("get_device_state_1.json", DOMAIN) mock_linear.get_device_state.side_effect = lambda device_id: device_states[ @@ -120,5 +114,5 @@ async def test_update_cover_state( freezer.tick(timedelta(seconds=60)) async_fire_time_changed(hass) - assert hass.states.get("cover.test_garage_1").state == STATE_CLOSING - assert hass.states.get("cover.test_garage_2").state == STATE_OPENING + assert hass.states.get("cover.test_garage_1").state == CoverState.CLOSING + assert hass.states.get("cover.test_garage_2").state == CoverState.OPENING diff --git a/tests/components/linkplay/__init__.py b/tests/components/linkplay/__init__.py index 5962f7fdaba043..f825826f196928 100644 --- a/tests/components/linkplay/__init__.py +++ b/tests/components/linkplay/__init__.py @@ -1 +1,16 @@ """Tests for the LinkPlay integration.""" + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def setup_integration( + hass: HomeAssistant, + config_entry: MockConfigEntry, +) -> None: + """Fixture for setting up the component.""" + config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() diff --git a/tests/components/linkplay/conftest.py b/tests/components/linkplay/conftest.py index be83dd2412d325..81ae993f6c3a1c 100644 --- a/tests/components/linkplay/conftest.py +++ b/tests/components/linkplay/conftest.py @@ -1,12 +1,22 @@ """Test configuration and mocks for LinkPlay component.""" -from collections.abc import Generator +from collections.abc import Generator, Iterator +from contextlib import contextmanager +from typing import Any +from unittest import mock from unittest.mock import AsyncMock, patch from aiohttp import ClientSession from linkplay.bridge import LinkPlayBridge, LinkPlayDevice import pytest +from homeassistant.components.linkplay.const import DOMAIN +from homeassistant.const import CONF_HOST, EVENT_HOMEASSISTANT_CLOSE +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry, load_fixture +from tests.conftest import AiohttpClientMocker + HOST = "10.0.0.150" HOST_REENTRY = "10.0.0.66" UUID = "FF31F09E-5001-FBDE-0546-2DBFFF31F09E" @@ -24,15 +34,15 @@ def mock_linkplay_factory_bridge() -> Generator[AsyncMock]: ), patch( "homeassistant.components.linkplay.config_flow.linkplay_factory_httpapi_bridge", - ) as factory, + ) as conf_factory, ): bridge = AsyncMock(spec=LinkPlayBridge) bridge.endpoint = HOST bridge.device = AsyncMock(spec=LinkPlayDevice) bridge.device.uuid = UUID bridge.device.name = NAME - factory.return_value = bridge - yield factory + conf_factory.return_value = bridge + yield conf_factory @pytest.fixture @@ -43,3 +53,55 @@ def mock_setup_entry() -> Generator[AsyncMock]: return_value=True, ) as mock_setup_entry: yield mock_setup_entry + + +@pytest.fixture +def mock_config_entry() -> MockConfigEntry: + """Mock a config entry.""" + return MockConfigEntry( + domain=DOMAIN, + title=NAME, + data={CONF_HOST: HOST}, + unique_id=UUID, + ) + + +@pytest.fixture +def mock_player_ex( + mock_player_ex: AsyncMock, +) -> AsyncMock: + """Mock a update_status of the LinkPlayPlayer.""" + mock_player_ex.return_value = load_fixture("getPlayerEx.json", DOMAIN) + return mock_player_ex + + +@pytest.fixture +def mock_status_ex( + mock_status_ex: AsyncMock, +) -> AsyncMock: + """Mock a update_status of the LinkPlayDevice.""" + mock_status_ex.return_value = load_fixture("getStatusEx.json", DOMAIN) + return mock_status_ex + + +@contextmanager +def mock_lp_aiohttp_client() -> Iterator[AiohttpClientMocker]: + """Context manager to mock aiohttp client.""" + mocker = AiohttpClientMocker() + + def create_session(hass: HomeAssistant, *args: Any, **kwargs: Any) -> ClientSession: + session = mocker.create_session(hass.loop) + + async def close_session(event): + """Close session.""" + await session.close() + + hass.bus.async_listen_once(EVENT_HOMEASSISTANT_CLOSE, close_session) + + return session + + with mock.patch( + "homeassistant.components.linkplay.async_get_client_session", + side_effect=create_session, + ): + yield mocker diff --git a/tests/components/linkplay/fixtures/getPlayerEx.json b/tests/components/linkplay/fixtures/getPlayerEx.json new file mode 100644 index 00000000000000..79d09f942df2e7 --- /dev/null +++ b/tests/components/linkplay/fixtures/getPlayerEx.json @@ -0,0 +1,19 @@ +{ + "type": "0", + "ch": "0", + "mode": "0", + "loop": "0", + "eq": "0", + "status": "stop", + "curpos": "0", + "offset_pts": "0", + "totlen": "0", + "Title": "", + "Artist": "", + "Album": "", + "alarmflag": "0", + "plicount": "0", + "plicurr": "0", + "vol": "80", + "mute": "0" +} diff --git a/tests/components/linkplay/fixtures/getStatusEx.json b/tests/components/linkplay/fixtures/getStatusEx.json new file mode 100644 index 00000000000000..17eda4aeee8d88 --- /dev/null +++ b/tests/components/linkplay/fixtures/getStatusEx.json @@ -0,0 +1,81 @@ +{ + "uuid": "FF31F09E5001FBDE05462DBFFF31F09E", + "DeviceName": "Smart Zone 1_54B9", + "GroupName": "Smart Zone 1_54B9", + "ssid": "Smart Zone 1_54B9", + "language": "en_us", + "firmware": "4.6.415145", + "hardware": "A31", + "build": "release", + "project": "SMART_ZONE4_AMP", + "priv_prj": "SMART_ZONE4_AMP", + "project_build_name": "a31rakoit", + "Release": "20220427", + "temp_uuid": "97296CE38DE8CC3D", + "hideSSID": "1", + "SSIDStrategy": "2", + "branch": "A31_stable_4.6", + "group": "0", + "wmrm_version": "4.2", + "internet": "1", + "MAC": "00:22:6C:21:7F:1D", + "STA_MAC": "00:00:00:00:00:00", + "CountryCode": "CN", + "CountryRegion": "1", + "netstat": "0", + "essid": "", + "apcli0": "", + "eth2": "192.168.168.197", + "ra0": "10.10.10.254", + "eth_dhcp": "1", + "VersionUpdate": "0", + "NewVer": "0", + "set_dns_enable": "1", + "mcu_ver": "37", + "mcu_ver_new": "0", + "dsp_ver": "0", + "dsp_ver_new": "0", + "date": "2024:10:29", + "time": "17:13:22", + "tz": "1.0000", + "dst_enable": "1", + "region": "unknown", + "prompt_status": "1", + "iot_ver": "1.0.0", + "upnp_version": "1005", + "cap1": "0x305200", + "capability": "0x28e90b80", + "languages": "0x6", + "streams_all": "0x7bff7ffe", + "streams": "0x7b9831fe", + "external": "0x0", + "plm_support": "0x40152", + "preset_key": "10", + "spotify_active": "0", + "lbc_support": "0", + "privacy_mode": "0", + "WifiChannel": "11", + "RSSI": "0", + "BSSID": "", + "battery": "0", + "battery_percent": "0", + "securemode": "1", + "auth": "WPAPSKWPA2PSK", + "encry": "AES", + "upnp_uuid": "uuid:FF31F09E-5001-FBDE-0546-2DBFFF31F09E", + "uart_pass_port": "8899", + "communication_port": "8819", + "web_firmware_update_hide": "0", + "ignore_talkstart": "0", + "web_login_result": "-1", + "silenceOTATime": "", + "ignore_silenceOTATime": "1", + "new_tunein_preset_and_alarm": "1", + "iheartradio_new": "1", + "new_iheart_podcast": "1", + "tidal_version": "2.0", + "service_version": "1.0", + "ETH_MAC": "00:22:6C:21:7F:20", + "security": "https/2.0", + "security_version": "2.0" +} diff --git a/tests/components/linkplay/snapshots/test_diagnostics.ambr b/tests/components/linkplay/snapshots/test_diagnostics.ambr new file mode 100644 index 00000000000000..d8c52a2564984a --- /dev/null +++ b/tests/components/linkplay/snapshots/test_diagnostics.ambr @@ -0,0 +1,115 @@ +# serializer version: 1 +# name: test_diagnostics + dict({ + 'device_info': dict({ + 'device': dict({ + 'properties': dict({ + 'BSSID': '', + 'CountryCode': 'CN', + 'CountryRegion': '1', + 'DeviceName': 'Smart Zone 1_54B9', + 'ETH_MAC': '00:22:6C:21:7F:20', + 'GroupName': 'Smart Zone 1_54B9', + 'MAC': '00:22:6C:21:7F:1D', + 'NewVer': '0', + 'RSSI': '0', + 'Release': '20220427', + 'SSIDStrategy': '2', + 'STA_MAC': '00:00:00:00:00:00', + 'VersionUpdate': '0', + 'WifiChannel': '11', + 'apcli0': '', + 'auth': 'WPAPSKWPA2PSK', + 'battery': '0', + 'battery_percent': '0', + 'branch': 'A31_stable_4.6', + 'build': 'release', + 'cap1': '0x305200', + 'capability': '0x28e90b80', + 'communication_port': '8819', + 'date': '2024:10:29', + 'dsp_ver': '0', + 'dsp_ver_new': '0', + 'dst_enable': '1', + 'encry': 'AES', + 'essid': '', + 'eth2': '192.168.168.197', + 'eth_dhcp': '1', + 'external': '0x0', + 'firmware': '4.6.415145', + 'group': '0', + 'hardware': 'A31', + 'hideSSID': '1', + 'ignore_silenceOTATime': '1', + 'ignore_talkstart': '0', + 'iheartradio_new': '1', + 'internet': '1', + 'iot_ver': '1.0.0', + 'language': 'en_us', + 'languages': '0x6', + 'lbc_support': '0', + 'mcu_ver': '37', + 'mcu_ver_new': '0', + 'netstat': '0', + 'new_iheart_podcast': '1', + 'new_tunein_preset_and_alarm': '1', + 'plm_support': '0x40152', + 'preset_key': '10', + 'priv_prj': 'SMART_ZONE4_AMP', + 'privacy_mode': '0', + 'project': 'SMART_ZONE4_AMP', + 'project_build_name': 'a31rakoit', + 'prompt_status': '1', + 'ra0': '10.10.10.254', + 'region': 'unknown', + 'securemode': '1', + 'security': 'https/2.0', + 'security_version': '2.0', + 'service_version': '1.0', + 'set_dns_enable': '1', + 'silenceOTATime': '', + 'spotify_active': '0', + 'ssid': 'Smart Zone 1_54B9', + 'streams': '0x7b9831fe', + 'streams_all': '0x7bff7ffe', + 'temp_uuid': '97296CE38DE8CC3D', + 'tidal_version': '2.0', + 'time': '17:13:22', + 'tz': '1.0000', + 'uart_pass_port': '8899', + 'upnp_uuid': 'uuid:FF31F09E-5001-FBDE-0546-2DBFFF31F09E', + 'upnp_version': '1005', + 'uuid': 'FF31F09E5001FBDE05462DBFFF31F09E', + 'web_firmware_update_hide': '0', + 'web_login_result': '-1', + 'wmrm_version': '4.2', + }), + }), + 'endpoint': dict({ + 'endpoint': 'https://10.0.0.150', + }), + 'multiroom': None, + 'player': dict({ + 'properties': dict({ + 'Album': '', + 'Artist': '', + 'Title': '', + 'alarmflag': '0', + 'ch': '0', + 'curpos': '0', + 'eq': '0', + 'loop': '0', + 'mode': '0', + 'mute': '0', + 'offset_pts': '0', + 'plicount': '0', + 'plicurr': '0', + 'status': 'stop', + 'totlen': '0', + 'type': '0', + 'vol': '80', + }), + }), + }), + }) +# --- diff --git a/tests/components/linkplay/test_diagnostics.py b/tests/components/linkplay/test_diagnostics.py new file mode 100644 index 00000000000000..369142978a341e --- /dev/null +++ b/tests/components/linkplay/test_diagnostics.py @@ -0,0 +1,53 @@ +"""Tests for the LinkPlay diagnostics.""" + +from unittest.mock import patch + +from linkplay.bridge import LinkPlayMultiroom +from linkplay.consts import API_ENDPOINT +from linkplay.endpoint import LinkPlayApiEndpoint +from syrupy import SnapshotAssertion + +from homeassistant.components.linkplay.const import DOMAIN +from homeassistant.core import HomeAssistant + +from . import setup_integration +from .conftest import HOST, mock_lp_aiohttp_client + +from tests.common import MockConfigEntry, load_fixture +from tests.components.diagnostics import get_diagnostics_for_config_entry +from tests.typing import ClientSessionGenerator + + +async def test_diagnostics( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + mock_config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, +) -> None: + """Test diagnostics.""" + + with ( + mock_lp_aiohttp_client() as mock_session, + patch.object(LinkPlayMultiroom, "update_status", return_value=None), + ): + endpoints = [ + LinkPlayApiEndpoint(protocol="https", endpoint=HOST, session=None), + LinkPlayApiEndpoint(protocol="http", endpoint=HOST, session=None), + ] + for endpoint in endpoints: + mock_session.get( + API_ENDPOINT.format(str(endpoint), "getPlayerStatusEx"), + text=load_fixture("getPlayerEx.json", DOMAIN), + ) + + mock_session.get( + API_ENDPOINT.format(str(endpoint), "getStatusEx"), + text=load_fixture("getStatusEx.json", DOMAIN), + ) + + await setup_integration(hass, mock_config_entry) + + assert ( + await get_diagnostics_for_config_entry(hass, hass_client, mock_config_entry) + == snapshot + ) diff --git a/tests/components/local_file/conftest.py b/tests/components/local_file/conftest.py new file mode 100644 index 00000000000000..4ec06369c94253 --- /dev/null +++ b/tests/components/local_file/conftest.py @@ -0,0 +1,63 @@ +"""Fixtures for the Local file integration.""" + +from __future__ import annotations + +from collections.abc import Generator +from typing import Any +from unittest.mock import AsyncMock, Mock, patch + +import pytest + +from homeassistant.components.local_file.const import DEFAULT_NAME, DOMAIN +from homeassistant.config_entries import SOURCE_USER +from homeassistant.const import CONF_FILE_PATH, CONF_NAME +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Automatically patch setup.""" + with patch( + "homeassistant.components.local_file.async_setup_entry", + return_value=True, + ) as mock_setup_entry: + yield mock_setup_entry + + +@pytest.fixture(name="get_config") +async def get_config_to_integration_load() -> dict[str, Any]: + """Return configuration. + + To override the config, tests can be marked with: + @pytest.mark.parametrize("get_config", [{...}]) + """ + return {CONF_NAME: DEFAULT_NAME, CONF_FILE_PATH: "mock.file"} + + +@pytest.fixture(name="loaded_entry") +async def load_integration( + hass: HomeAssistant, get_config: dict[str, Any] +) -> MockConfigEntry: + """Set up the Local file integration in Home Assistant.""" + config_entry = MockConfigEntry( + domain=DOMAIN, + source=SOURCE_USER, + options=get_config, + entry_id="1", + ) + + config_entry.add_to_hass(hass) + with ( + patch("os.path.isfile", Mock(return_value=True)), + patch("os.access", Mock(return_value=True)), + patch( + "homeassistant.components.local_file.camera.mimetypes.guess_type", + Mock(return_value=(None, None)), + ), + ): + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + return config_entry diff --git a/tests/components/local_file/test_camera.py b/tests/components/local_file/test_camera.py index 132212df0ec65f..ddfdf4249bd1cf 100644 --- a/tests/components/local_file/test_camera.py +++ b/tests/components/local_file/test_camera.py @@ -1,222 +1,189 @@ """The tests for local file camera component.""" from http import HTTPStatus -from unittest import mock +from typing import Any +from unittest.mock import Mock, mock_open, patch import pytest -from homeassistant.components.local_file.const import DOMAIN, SERVICE_UPDATE_FILE_PATH +from homeassistant.components.local_file.const import ( + DEFAULT_NAME, + DOMAIN, + SERVICE_UPDATE_FILE_PATH, +) +from homeassistant.config_entries import SOURCE_USER from homeassistant.const import ATTR_ENTITY_ID, CONF_FILE_PATH -from homeassistant.core import HomeAssistant +from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant from homeassistant.exceptions import ServiceValidationError +from homeassistant.helpers import issue_registry as ir from homeassistant.setup import async_setup_component +from homeassistant.util import slugify +from tests.common import MockConfigEntry from tests.typing import ClientSessionGenerator async def test_loading_file( - hass: HomeAssistant, hass_client: ClientSessionGenerator + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + loaded_entry: MockConfigEntry, ) -> None: """Test that it loads image from disk.""" - with ( - mock.patch("os.path.isfile", mock.Mock(return_value=True)), - mock.patch("os.access", mock.Mock(return_value=True)), - mock.patch( - "homeassistant.components.local_file.camera.mimetypes.guess_type", - mock.Mock(return_value=(None, None)), - ), - ): - await async_setup_component( - hass, - "camera", - { - "camera": { - "name": "config_test", - "platform": "local_file", - "file_path": "mock.file", - } - }, - ) - await hass.async_block_till_done() client = await hass_client() - m_open = mock.mock_open(read_data=b"hello") - with mock.patch( - "homeassistant.components.local_file.camera.open", m_open, create=True - ): - resp = await client.get("/api/camera_proxy/camera.config_test") + m_open = mock_open(read_data=b"hello") + with patch("homeassistant.components.local_file.camera.open", m_open, create=True): + resp = await client.get("/api/camera_proxy/camera.local_file") assert resp.status == HTTPStatus.OK body = await resp.text() assert body == "hello" -async def test_file_not_readable( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture -) -> None: - """Test a warning is shown setup when file is not readable.""" - with ( - mock.patch("os.path.isfile", mock.Mock(return_value=True)), - mock.patch("os.access", mock.Mock(return_value=False)), - ): - await async_setup_component( - hass, - "camera", - { - "camera": { - "name": "config_test", - "platform": "local_file", - "file_path": "mock.file", - } - }, - ) - await hass.async_block_till_done() - - assert "File path mock.file is not readable;" in caplog.text - - async def test_file_not_readable_after_setup( hass: HomeAssistant, hass_client: ClientSessionGenerator, caplog: pytest.LogCaptureFixture, + loaded_entry: MockConfigEntry, ) -> None: """Test a warning is shown setup when file is not readable.""" - with ( - mock.patch("os.path.isfile", mock.Mock(return_value=True)), - mock.patch("os.access", mock.Mock(return_value=True)), - mock.patch( - "homeassistant.components.local_file.camera.mimetypes.guess_type", - mock.Mock(return_value=(None, None)), - ), - ): - await async_setup_component( - hass, - "camera", - { - "camera": { - "name": "config_test", - "platform": "local_file", - "file_path": "mock.file", - } - }, - ) - await hass.async_block_till_done() client = await hass_client() - with mock.patch( + with patch( "homeassistant.components.local_file.camera.open", side_effect=FileNotFoundError ): - resp = await client.get("/api/camera_proxy/camera.config_test") + resp = await client.get("/api/camera_proxy/camera.local_file") assert resp.status == HTTPStatus.INTERNAL_SERVER_ERROR - assert "Could not read camera config_test image from file: mock.file" in caplog.text + assert "Could not read camera Local File image from file: mock.file" in caplog.text +@pytest.mark.parametrize( + ("config", "url", "content_type"), + [ + ( + { + "name": "test_jpg", + "file_path": "/path/to/image.jpg", + }, + "/api/camera_proxy/camera.test_jpg", + "image/jpeg", + ), + ( + { + "name": "test_png", + "file_path": "/path/to/image.png", + }, + "/api/camera_proxy/camera.test_png", + "image/png", + ), + ( + { + "name": "test_svg", + "file_path": "/path/to/image.svg", + }, + "/api/camera_proxy/camera.test_svg", + "image/svg+xml", + ), + ( + { + "name": "test_no_ext", + "file_path": "/path/to/image", + }, + "/api/camera_proxy/camera.test_no_ext", + "image/jpeg", + ), + ], +) async def test_camera_content_type( - hass: HomeAssistant, hass_client: ClientSessionGenerator + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + config: dict[str, Any], + url: str, + content_type: str, ) -> None: """Test local_file camera content_type.""" - cam_config_jpg = { - "name": "test_jpg", - "platform": "local_file", - "file_path": "/path/to/image.jpg", - } - cam_config_png = { - "name": "test_png", - "platform": "local_file", - "file_path": "/path/to/image.png", - } - cam_config_svg = { - "name": "test_svg", - "platform": "local_file", - "file_path": "/path/to/image.svg", - } - cam_config_noext = { - "name": "test_no_ext", - "platform": "local_file", - "file_path": "/path/to/image", - } + config_entry = MockConfigEntry( + domain=DOMAIN, + source=SOURCE_USER, + options=config, + entry_id="1", + ) + + config_entry.add_to_hass(hass) with ( - mock.patch("os.path.isfile", mock.Mock(return_value=True)), - mock.patch("os.access", mock.Mock(return_value=True)), + patch("os.path.isfile", Mock(return_value=True)), + patch("os.access", Mock(return_value=True)), ): - await async_setup_component( - hass, - "camera", - { - "camera": [ - cam_config_jpg, - cam_config_png, - cam_config_svg, - cam_config_noext, - ] - }, - ) + await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() client = await hass_client() image = "hello" - m_open = mock.mock_open(read_data=image.encode()) - with mock.patch( - "homeassistant.components.local_file.camera.open", m_open, create=True - ): - resp_1 = await client.get("/api/camera_proxy/camera.test_jpg") - resp_2 = await client.get("/api/camera_proxy/camera.test_png") - resp_3 = await client.get("/api/camera_proxy/camera.test_svg") - resp_4 = await client.get("/api/camera_proxy/camera.test_no_ext") + m_open = mock_open(read_data=image.encode()) + with patch("homeassistant.components.local_file.camera.open", m_open, create=True): + resp_1 = await client.get(url) assert resp_1.status == HTTPStatus.OK - assert resp_1.content_type == "image/jpeg" + assert resp_1.content_type == content_type body = await resp_1.text() assert body == image - assert resp_2.status == HTTPStatus.OK - assert resp_2.content_type == "image/png" - body = await resp_2.text() - assert body == image - - assert resp_3.status == HTTPStatus.OK - assert resp_3.content_type == "image/svg+xml" - body = await resp_3.text() - assert body == image - - # default mime type - assert resp_4.status == HTTPStatus.OK - assert resp_4.content_type == "image/jpeg" - body = await resp_4.text() - assert body == image - -async def test_update_file_path(hass: HomeAssistant) -> None: +@pytest.mark.parametrize( + "get_config", + [ + { + "name": DEFAULT_NAME, + "file_path": "mock/path.jpg", + } + ], +) +async def test_update_file_path( + hass: HomeAssistant, loaded_entry: MockConfigEntry +) -> None: """Test update_file_path service.""" # Setup platform + config_entry = MockConfigEntry( + domain=DOMAIN, + source=SOURCE_USER, + options={ + "name": "local_file_camera_2", + "file_path": "mock/path_2.jpg", + }, + entry_id="2", + ) + + config_entry.add_to_hass(hass) with ( - mock.patch("os.path.isfile", mock.Mock(return_value=True)), - mock.patch("os.access", mock.Mock(return_value=True)), - mock.patch( + patch("os.path.isfile", Mock(return_value=True)), + patch("os.access", Mock(return_value=True)), + patch( "homeassistant.components.local_file.camera.mimetypes.guess_type", - mock.Mock(return_value=(None, None)), + Mock(return_value=(None, None)), ), ): - camera_1 = {"platform": "local_file", "file_path": "mock/path.jpg"} - camera_2 = { - "platform": "local_file", - "name": "local_file_camera_2", - "file_path": "mock/path_2.jpg", - } - await async_setup_component(hass, "camera", {"camera": [camera_1, camera_2]}) + await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - # Fetch state and check motion detection attribute - state = hass.states.get("camera.local_file") - assert state.attributes.get("friendly_name") == "Local File" - assert state.attributes.get("file_path") == "mock/path.jpg" + # Fetch state and check motion detection attribute + state = hass.states.get("camera.local_file") + assert state.attributes.get("friendly_name") == "Local File" + assert state.attributes.get("file_path") == "mock/path.jpg" - service_data = {"entity_id": "camera.local_file", "file_path": "new/path.jpg"} + service_data = {"entity_id": "camera.local_file", "file_path": "new/path.jpg"} + with ( + patch("os.path.isfile", Mock(return_value=True)), + patch("os.access", Mock(return_value=True)), + patch( + "homeassistant.components.local_file.camera.mimetypes.guess_type", + Mock(return_value=(None, None)), + ), + ): await hass.services.async_call( DOMAIN, SERVICE_UPDATE_FILE_PATH, @@ -224,12 +191,12 @@ async def test_update_file_path(hass: HomeAssistant) -> None: blocking=True, ) - state = hass.states.get("camera.local_file") - assert state.attributes.get("file_path") == "new/path.jpg" + state = hass.states.get("camera.local_file") + assert state.attributes.get("file_path") == "new/path.jpg" - # Check that local_file_camera_2 file_path is still as configured - state = hass.states.get("camera.local_file_camera_2") - assert state.attributes.get("file_path") == "mock/path_2.jpg" + # Check that local_file_camera_2 file_path is still as configured + state = hass.states.get("camera.local_file_camera_2") + assert state.attributes.get("file_path") == "mock/path_2.jpg" # Assert it fails if file is not readable service_data = { @@ -245,3 +212,76 @@ async def test_update_file_path(hass: HomeAssistant) -> None: service_data, blocking=True, ) + + +async def test_import_from_yaml_success( + hass: HomeAssistant, issue_registry: ir.IssueRegistry +) -> None: + """Test import.""" + + with ( + patch("os.path.isfile", Mock(return_value=True)), + patch("os.access", Mock(return_value=True)), + patch( + "homeassistant.components.local_file.camera.mimetypes.guess_type", + Mock(return_value=(None, None)), + ), + ): + await async_setup_component( + hass, + "camera", + { + "camera": { + "name": "config_test", + "platform": "local_file", + "file_path": "mock.file", + } + }, + ) + await hass.async_block_till_done() + + assert hass.config_entries.async_has_entries(DOMAIN) + state = hass.states.get("camera.config_test") + assert state.attributes.get("file_path") == "mock.file" + + issue = issue_registry.async_get_issue( + HOMEASSISTANT_DOMAIN, f"deprecated_yaml_{DOMAIN}" + ) + assert issue + assert issue.translation_key == "deprecated_yaml" + + +async def test_import_from_yaml_fails( + hass: HomeAssistant, issue_registry: ir.IssueRegistry +) -> None: + """Test import fails due to not accessible file.""" + + with ( + patch("os.path.isfile", Mock(return_value=True)), + patch("os.access", Mock(return_value=False)), + patch( + "homeassistant.components.local_file.camera.mimetypes.guess_type", + Mock(return_value=(None, None)), + ), + ): + await async_setup_component( + hass, + "camera", + { + "camera": { + "name": "config_test", + "platform": "local_file", + "file_path": "mock.file", + } + }, + ) + await hass.async_block_till_done() + + assert not hass.config_entries.async_has_entries(DOMAIN) + assert not hass.states.get("camera.config_test") + + issue = issue_registry.async_get_issue( + DOMAIN, f"no_access_path_{slugify("mock.file")}" + ) + assert issue + assert issue.translation_key == "no_access_path" diff --git a/tests/components/local_file/test_config_flow.py b/tests/components/local_file/test_config_flow.py new file mode 100644 index 00000000000000..dda9d606107a9e --- /dev/null +++ b/tests/components/local_file/test_config_flow.py @@ -0,0 +1,235 @@ +"""Test the Scrape config flow.""" + +from __future__ import annotations + +from unittest.mock import AsyncMock, Mock, patch + +import pytest + +from homeassistant import config_entries +from homeassistant.components.local_file.const import DEFAULT_NAME, DOMAIN +from homeassistant.const import CONF_FILE_PATH, CONF_NAME +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from tests.common import MockConfigEntry + + +async def test_form_sensor(hass: HomeAssistant, mock_setup_entry: AsyncMock) -> None: + """Test we get the form for sensor.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["step_id"] == "user" + assert result["type"] is FlowResultType.FORM + + with ( + patch("os.path.isfile", Mock(return_value=True)), + patch("os.access", Mock(return_value=True)), + patch( + "homeassistant.components.local_file.camera.mimetypes.guess_type", + Mock(return_value=(None, None)), + ), + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_NAME: DEFAULT_NAME, + CONF_FILE_PATH: "mock.file", + }, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["version"] == 1 + assert result["options"] == { + CONF_NAME: DEFAULT_NAME, + CONF_FILE_PATH: "mock.file", + } + + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_options_flow(hass: HomeAssistant, loaded_entry: MockConfigEntry) -> None: + """Test options flow.""" + + result = await hass.config_entries.options.async_init(loaded_entry.entry_id) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "init" + + with ( + patch("os.path.isfile", Mock(return_value=True)), + patch("os.access", Mock(return_value=True)), + patch( + "homeassistant.components.local_file.camera.mimetypes.guess_type", + Mock(return_value=(None, None)), + ), + ): + result = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input={CONF_FILE_PATH: "mock.new.file"}, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["data"] == {CONF_NAME: DEFAULT_NAME, CONF_FILE_PATH: "mock.new.file"} + + await hass.async_block_till_done() + + # Check the entity was updated, no new entity was created + assert len(hass.states.async_all()) == 1 + + state = hass.states.get("camera.local_file") + assert state is not None + + +async def test_validation_options( + hass: HomeAssistant, mock_setup_entry: AsyncMock +) -> None: + """Test validation.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["step_id"] == "user" + assert result["type"] is FlowResultType.FORM + + with ( + patch("os.path.isfile", Mock(return_value=True)), + patch("os.access", Mock(return_value=False)), + patch( + "homeassistant.components.local_file.camera.mimetypes.guess_type", + Mock(return_value=(None, None)), + ), + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_NAME: DEFAULT_NAME, + CONF_FILE_PATH: "mock.file", + }, + ) + await hass.async_block_till_done() + + assert result["step_id"] == "user" + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "not_readable_path"} + + with ( + patch("os.path.isfile", Mock(return_value=True)), + patch("os.access", Mock(return_value=True)), + patch( + "homeassistant.components.local_file.camera.mimetypes.guess_type", + Mock(return_value=(None, None)), + ), + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_NAME: DEFAULT_NAME, + CONF_FILE_PATH: "mock.new.file", + }, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["version"] == 1 + assert result["options"] == { + CONF_NAME: DEFAULT_NAME, + CONF_FILE_PATH: "mock.new.file", + } + + assert len(mock_setup_entry.mock_calls) == 1 + + +@pytest.mark.usefixtures("mock_setup_entry") +async def test_entry_already_exist( + hass: HomeAssistant, loaded_entry: MockConfigEntry +) -> None: + """Test abort when entry already exist.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["step_id"] == "user" + assert result["type"] is FlowResultType.FORM + + with ( + patch("os.path.isfile", Mock(return_value=True)), + patch("os.access", Mock(return_value=True)), + patch( + "homeassistant.components.local_file.camera.mimetypes.guess_type", + Mock(return_value=(None, None)), + ), + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_NAME: DEFAULT_NAME, + CONF_FILE_PATH: "mock.file", + }, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + +@pytest.mark.usefixtures("mock_setup_entry") +async def test_import(hass: HomeAssistant) -> None: + """Test import.""" + + with ( + patch("os.path.isfile", Mock(return_value=True)), + patch("os.access", Mock(return_value=True)), + patch( + "homeassistant.components.local_file.camera.mimetypes.guess_type", + Mock(return_value=(None, None)), + ), + ): + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_IMPORT}, + data={ + "name": DEFAULT_NAME, + "file_path": "mock/path.jpg", + }, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["version"] == 1 + assert result["options"] == { + CONF_NAME: DEFAULT_NAME, + CONF_FILE_PATH: "mock/path.jpg", + } + + +@pytest.mark.usefixtures("mock_setup_entry") +async def test_import_already_exist( + hass: HomeAssistant, loaded_entry: MockConfigEntry +) -> None: + """Test import abort existing entry.""" + + with ( + patch("os.path.isfile", Mock(return_value=True)), + patch("os.access", Mock(return_value=True)), + patch( + "homeassistant.components.local_file.camera.mimetypes.guess_type", + Mock(return_value=(None, None)), + ), + ): + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_IMPORT}, + data={ + CONF_NAME: DEFAULT_NAME, + CONF_FILE_PATH: "mock.file", + }, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" diff --git a/tests/components/local_file/test_init.py b/tests/components/local_file/test_init.py new file mode 100644 index 00000000000000..2b8b93e81008ff --- /dev/null +++ b/tests/components/local_file/test_init.py @@ -0,0 +1,47 @@ +"""Test Statistics component setup process.""" + +from __future__ import annotations + +from unittest.mock import Mock, patch + +from homeassistant.components.local_file.const import DOMAIN +from homeassistant.config_entries import SOURCE_USER, ConfigEntryState +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def test_unload_entry(hass: HomeAssistant, loaded_entry: MockConfigEntry) -> None: + """Test unload an entry.""" + + assert loaded_entry.state is ConfigEntryState.LOADED + assert await hass.config_entries.async_unload(loaded_entry.entry_id) + await hass.async_block_till_done() + assert loaded_entry.state is ConfigEntryState.NOT_LOADED + + +async def test_file_not_readable_during_startup( + hass: HomeAssistant, + get_config: dict[str, str], +) -> None: + """Test a warning is shown setup when file is not readable.""" + config_entry = MockConfigEntry( + domain=DOMAIN, + source=SOURCE_USER, + options=get_config, + entry_id="1", + ) + config_entry.add_to_hass(hass) + + with ( + patch("os.path.isfile", Mock(return_value=True)), + patch("os.access", Mock(return_value=False)), + patch( + "homeassistant.components.local_file.camera.mimetypes.guess_type", + Mock(return_value=(None, None)), + ), + ): + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.SETUP_ERROR diff --git a/tests/components/locative/test_init.py b/tests/components/locative/test_init.py index 89d26ea6c7a67f..c41db68e3d6fea 100644 --- a/tests/components/locative/test_init.py +++ b/tests/components/locative/test_init.py @@ -11,8 +11,8 @@ from homeassistant.components.device_tracker import DOMAIN as DEVICE_TRACKER_DOMAIN from homeassistant.components.device_tracker.legacy import Device from homeassistant.components.locative import DOMAIN, TRACKER_UPDATE -from homeassistant.config import async_process_ha_core_config from homeassistant.core import HomeAssistant +from homeassistant.core_config import async_process_ha_core_config from homeassistant.data_entry_flow import FlowResultType from homeassistant.helpers.dispatcher import DATA_DISPATCHER from homeassistant.setup import async_setup_component diff --git a/tests/components/logbook/test_websocket_api.py b/tests/components/logbook/test_websocket_api.py index 2a97556f5ad011..50139d0f4f711e 100644 --- a/tests/components/logbook/test_websocket_api.py +++ b/tests/components/logbook/test_websocket_api.py @@ -2985,8 +2985,8 @@ async def test_live_stream_with_changed_state_change( ] ) - hass.states.async_set("binary_sensor.is_light", "ignored") - hass.states.async_set("binary_sensor.is_light", "init") + hass.states.async_set("binary_sensor.is_light", "unavailable") + hass.states.async_set("binary_sensor.is_light", "unknown") await async_wait_recording_done(hass) @callback @@ -3023,7 +3023,7 @@ def auto_off_listener(event): # Make sure we get rows back in order assert recieved_rows == [ - {"entity_id": "binary_sensor.is_light", "state": "init", "when": ANY}, + {"entity_id": "binary_sensor.is_light", "state": "unknown", "when": ANY}, {"entity_id": "binary_sensor.is_light", "state": "on", "when": ANY}, {"entity_id": "binary_sensor.is_light", "state": "off", "when": ANY}, ] diff --git a/tests/components/lovelace/test_cast.py b/tests/components/lovelace/test_cast.py index c54b31d929761b..dc57975701d8a7 100644 --- a/tests/components/lovelace/test_cast.py +++ b/tests/components/lovelace/test_cast.py @@ -8,8 +8,8 @@ from homeassistant.components.lovelace import cast as lovelace_cast from homeassistant.components.media_player import MediaClass -from homeassistant.config import async_process_ha_core_config from homeassistant.core import HomeAssistant +from homeassistant.core_config import async_process_ha_core_config from homeassistant.exceptions import HomeAssistantError from homeassistant.setup import async_setup_component diff --git a/tests/components/madvr/test_config_flow.py b/tests/components/madvr/test_config_flow.py index 42081f3b9b5dfc..35db8a01b5bf92 100644 --- a/tests/components/madvr/test_config_flow.py +++ b/tests/components/madvr/test_config_flow.py @@ -138,7 +138,7 @@ async def test_reconfigure_flow( result = await mock_config_entry.start_reconfigure_flow(hass) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure_confirm" + assert result["step_id"] == "reconfigure" assert result["errors"] == {} # define new host @@ -165,6 +165,10 @@ async def test_reconfigure_flow( mock_madvr_client.async_cancel_tasks.assert_called() +@pytest.mark.parametrize( # Remove when translations fixed + "ignore_translations", + ["component.madvr.config.abort.set_up_new_device"], +) async def test_reconfigure_new_device( hass: HomeAssistant, mock_madvr_client: AsyncMock, @@ -204,7 +208,7 @@ async def test_reconfigure_flow_errors( result = await mock_config_entry.start_reconfigure_flow(hass) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure_confirm" + assert result["step_id"] == "reconfigure" # Test CannotConnect error mock_madvr_client.open_connection.side_effect = TimeoutError diff --git a/tests/components/mailgun/test_init.py b/tests/components/mailgun/test_init.py index 2e60c56faa4771..7dbde02b10f615 100644 --- a/tests/components/mailgun/test_init.py +++ b/tests/components/mailgun/test_init.py @@ -8,9 +8,9 @@ from homeassistant import config_entries from homeassistant.components import mailgun, webhook -from homeassistant.config import async_process_ha_core_config from homeassistant.const import CONF_API_KEY, CONF_DOMAIN from homeassistant.core import Event, HomeAssistant, callback +from homeassistant.core_config import async_process_ha_core_config from homeassistant.data_entry_flow import FlowResultType from homeassistant.setup import async_setup_component diff --git a/tests/components/manual/test_alarm_control_panel.py b/tests/components/manual/test_alarm_control_panel.py index 7900dfd1c911d9..9fc92cd54581f4 100644 --- a/tests/components/manual/test_alarm_control_panel.py +++ b/tests/components/manual/test_alarm_control_panel.py @@ -7,7 +7,10 @@ import pytest from homeassistant.components import alarm_control_panel -from homeassistant.components.alarm_control_panel import AlarmControlPanelEntityFeature +from homeassistant.components.alarm_control_panel import ( + AlarmControlPanelEntityFeature, + AlarmControlPanelState, +) from homeassistant.components.demo import alarm_control_panel as demo from homeassistant.components.manual.alarm_control_panel import ( ATTR_NEXT_STATE, @@ -21,15 +24,6 @@ SERVICE_ALARM_ARM_HOME, SERVICE_ALARM_ARM_NIGHT, SERVICE_ALARM_ARM_VACATION, - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_CUSTOM_BYPASS, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_ARMED_VACATION, - STATE_ALARM_ARMING, - STATE_ALARM_DISARMED, - STATE_ALARM_PENDING, - STATE_ALARM_TRIGGERED, ) from homeassistant.core import CoreState, HomeAssistant, State from homeassistant.exceptions import ServiceValidationError @@ -53,11 +47,14 @@ async def test_setup_demo_platform(hass: HomeAssistant) -> None: @pytest.mark.parametrize( ("service", "expected_state"), [ - (SERVICE_ALARM_ARM_AWAY, STATE_ALARM_ARMED_AWAY), - (SERVICE_ALARM_ARM_CUSTOM_BYPASS, STATE_ALARM_ARMED_CUSTOM_BYPASS), - (SERVICE_ALARM_ARM_HOME, STATE_ALARM_ARMED_HOME), - (SERVICE_ALARM_ARM_NIGHT, STATE_ALARM_ARMED_NIGHT), - (SERVICE_ALARM_ARM_VACATION, STATE_ALARM_ARMED_VACATION), + (SERVICE_ALARM_ARM_AWAY, AlarmControlPanelState.ARMED_AWAY), + ( + SERVICE_ALARM_ARM_CUSTOM_BYPASS, + AlarmControlPanelState.ARMED_CUSTOM_BYPASS, + ), + (SERVICE_ALARM_ARM_HOME, AlarmControlPanelState.ARMED_HOME), + (SERVICE_ALARM_ARM_NIGHT, AlarmControlPanelState.ARMED_NIGHT), + (SERVICE_ALARM_ARM_VACATION, AlarmControlPanelState.ARMED_VACATION), ], ) async def test_no_pending(hass: HomeAssistant, service, expected_state) -> None: @@ -79,7 +76,7 @@ async def test_no_pending(hass: HomeAssistant, service, expected_state) -> None: entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED await hass.services.async_call( alarm_control_panel.DOMAIN, @@ -94,11 +91,14 @@ async def test_no_pending(hass: HomeAssistant, service, expected_state) -> None: @pytest.mark.parametrize( ("service", "expected_state"), [ - (SERVICE_ALARM_ARM_AWAY, STATE_ALARM_ARMED_AWAY), - (SERVICE_ALARM_ARM_CUSTOM_BYPASS, STATE_ALARM_ARMED_CUSTOM_BYPASS), - (SERVICE_ALARM_ARM_HOME, STATE_ALARM_ARMED_HOME), - (SERVICE_ALARM_ARM_NIGHT, STATE_ALARM_ARMED_NIGHT), - (SERVICE_ALARM_ARM_VACATION, STATE_ALARM_ARMED_VACATION), + (SERVICE_ALARM_ARM_AWAY, AlarmControlPanelState.ARMED_AWAY), + ( + SERVICE_ALARM_ARM_CUSTOM_BYPASS, + AlarmControlPanelState.ARMED_CUSTOM_BYPASS, + ), + (SERVICE_ALARM_ARM_HOME, AlarmControlPanelState.ARMED_HOME), + (SERVICE_ALARM_ARM_NIGHT, AlarmControlPanelState.ARMED_NIGHT), + (SERVICE_ALARM_ARM_VACATION, AlarmControlPanelState.ARMED_VACATION), ], ) async def test_no_pending_when_code_not_req( @@ -123,7 +123,7 @@ async def test_no_pending_when_code_not_req( entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED await hass.services.async_call( alarm_control_panel.DOMAIN, @@ -138,11 +138,14 @@ async def test_no_pending_when_code_not_req( @pytest.mark.parametrize( ("service", "expected_state"), [ - (SERVICE_ALARM_ARM_AWAY, STATE_ALARM_ARMED_AWAY), - (SERVICE_ALARM_ARM_CUSTOM_BYPASS, STATE_ALARM_ARMED_CUSTOM_BYPASS), - (SERVICE_ALARM_ARM_HOME, STATE_ALARM_ARMED_HOME), - (SERVICE_ALARM_ARM_NIGHT, STATE_ALARM_ARMED_NIGHT), - (SERVICE_ALARM_ARM_VACATION, STATE_ALARM_ARMED_VACATION), + (SERVICE_ALARM_ARM_AWAY, AlarmControlPanelState.ARMED_AWAY), + ( + SERVICE_ALARM_ARM_CUSTOM_BYPASS, + AlarmControlPanelState.ARMED_CUSTOM_BYPASS, + ), + (SERVICE_ALARM_ARM_HOME, AlarmControlPanelState.ARMED_HOME), + (SERVICE_ALARM_ARM_NIGHT, AlarmControlPanelState.ARMED_NIGHT), + (SERVICE_ALARM_ARM_VACATION, AlarmControlPanelState.ARMED_VACATION), ], ) async def test_with_pending(hass: HomeAssistant, service, expected_state) -> None: @@ -164,7 +167,7 @@ async def test_with_pending(hass: HomeAssistant, service, expected_state) -> Non entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED await hass.services.async_call( alarm_control_panel.DOMAIN, @@ -173,7 +176,7 @@ async def test_with_pending(hass: HomeAssistant, service, expected_state) -> Non blocking=True, ) - assert hass.states.get(entity_id).state == STATE_ALARM_ARMING + assert hass.states.get(entity_id).state == AlarmControlPanelState.ARMING state = hass.states.get(entity_id) assert state.attributes["next_state"] == expected_state @@ -203,11 +206,14 @@ async def test_with_pending(hass: HomeAssistant, service, expected_state) -> Non @pytest.mark.parametrize( ("service", "expected_state"), [ - (SERVICE_ALARM_ARM_AWAY, STATE_ALARM_ARMED_AWAY), - (SERVICE_ALARM_ARM_CUSTOM_BYPASS, STATE_ALARM_ARMED_CUSTOM_BYPASS), - (SERVICE_ALARM_ARM_HOME, STATE_ALARM_ARMED_HOME), - (SERVICE_ALARM_ARM_NIGHT, STATE_ALARM_ARMED_NIGHT), - (SERVICE_ALARM_ARM_VACATION, STATE_ALARM_ARMED_VACATION), + (SERVICE_ALARM_ARM_AWAY, AlarmControlPanelState.ARMED_AWAY), + ( + SERVICE_ALARM_ARM_CUSTOM_BYPASS, + AlarmControlPanelState.ARMED_CUSTOM_BYPASS, + ), + (SERVICE_ALARM_ARM_HOME, AlarmControlPanelState.ARMED_HOME), + (SERVICE_ALARM_ARM_NIGHT, AlarmControlPanelState.ARMED_NIGHT), + (SERVICE_ALARM_ARM_VACATION, AlarmControlPanelState.ARMED_VACATION), ], ) async def test_with_invalid_code(hass: HomeAssistant, service, expected_state) -> None: @@ -229,7 +235,7 @@ async def test_with_invalid_code(hass: HomeAssistant, service, expected_state) - entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED with pytest.raises(ServiceValidationError, match=r"^Invalid alarm code provided$"): await hass.services.async_call( @@ -242,17 +248,20 @@ async def test_with_invalid_code(hass: HomeAssistant, service, expected_state) - blocking=True, ) - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED @pytest.mark.parametrize( ("service", "expected_state"), [ - (SERVICE_ALARM_ARM_AWAY, STATE_ALARM_ARMED_AWAY), - (SERVICE_ALARM_ARM_CUSTOM_BYPASS, STATE_ALARM_ARMED_CUSTOM_BYPASS), - (SERVICE_ALARM_ARM_HOME, STATE_ALARM_ARMED_HOME), - (SERVICE_ALARM_ARM_NIGHT, STATE_ALARM_ARMED_NIGHT), - (SERVICE_ALARM_ARM_VACATION, STATE_ALARM_ARMED_VACATION), + (SERVICE_ALARM_ARM_AWAY, AlarmControlPanelState.ARMED_AWAY), + ( + SERVICE_ALARM_ARM_CUSTOM_BYPASS, + AlarmControlPanelState.ARMED_CUSTOM_BYPASS, + ), + (SERVICE_ALARM_ARM_HOME, AlarmControlPanelState.ARMED_HOME), + (SERVICE_ALARM_ARM_NIGHT, AlarmControlPanelState.ARMED_NIGHT), + (SERVICE_ALARM_ARM_VACATION, AlarmControlPanelState.ARMED_VACATION), ], ) async def test_with_template_code(hass: HomeAssistant, service, expected_state) -> None: @@ -274,7 +283,7 @@ async def test_with_template_code(hass: HomeAssistant, service, expected_state) entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED await hass.services.async_call( alarm_control_panel.DOMAIN, @@ -290,11 +299,14 @@ async def test_with_template_code(hass: HomeAssistant, service, expected_state) @pytest.mark.parametrize( ("service", "expected_state"), [ - (SERVICE_ALARM_ARM_AWAY, STATE_ALARM_ARMED_AWAY), - (SERVICE_ALARM_ARM_CUSTOM_BYPASS, STATE_ALARM_ARMED_CUSTOM_BYPASS), - (SERVICE_ALARM_ARM_HOME, STATE_ALARM_ARMED_HOME), - (SERVICE_ALARM_ARM_NIGHT, STATE_ALARM_ARMED_NIGHT), - (SERVICE_ALARM_ARM_VACATION, STATE_ALARM_ARMED_VACATION), + (SERVICE_ALARM_ARM_AWAY, AlarmControlPanelState.ARMED_AWAY), + ( + SERVICE_ALARM_ARM_CUSTOM_BYPASS, + AlarmControlPanelState.ARMED_CUSTOM_BYPASS, + ), + (SERVICE_ALARM_ARM_HOME, AlarmControlPanelState.ARMED_HOME), + (SERVICE_ALARM_ARM_NIGHT, AlarmControlPanelState.ARMED_NIGHT), + (SERVICE_ALARM_ARM_VACATION, AlarmControlPanelState.ARMED_VACATION), ], ) async def test_with_specific_pending( @@ -324,7 +336,7 @@ async def test_with_specific_pending( blocking=True, ) - assert hass.states.get(entity_id).state == STATE_ALARM_ARMING + assert hass.states.get(entity_id).state == AlarmControlPanelState.ARMING future = dt_util.utcnow() + timedelta(seconds=2) with patch( @@ -355,11 +367,11 @@ async def test_trigger_no_pending(hass: HomeAssistant) -> None: entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED await common.async_alarm_trigger(hass, entity_id=entity_id) - assert hass.states.get(entity_id).state == STATE_ALARM_PENDING + assert hass.states.get(entity_id).state == AlarmControlPanelState.PENDING future = dt_util.utcnow() + timedelta(seconds=60) with patch( @@ -370,8 +382,8 @@ async def test_trigger_no_pending(hass: HomeAssistant) -> None: await hass.async_block_till_done() state = hass.states.get(entity_id) - assert state.attributes["previous_state"] == STATE_ALARM_DISARMED - assert state.state == STATE_ALARM_TRIGGERED + assert state.attributes["previous_state"] == AlarmControlPanelState.DISARMED + assert state.state == AlarmControlPanelState.TRIGGERED async def test_trigger_with_delay(hass: HomeAssistant) -> None: @@ -394,17 +406,17 @@ async def test_trigger_with_delay(hass: HomeAssistant) -> None: entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED await common.async_alarm_arm_away(hass, CODE) - assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_AWAY + assert hass.states.get(entity_id).state == AlarmControlPanelState.ARMED_AWAY await common.async_alarm_trigger(hass, entity_id=entity_id) state = hass.states.get(entity_id) - assert state.state == STATE_ALARM_PENDING - assert state.attributes["next_state"] == STATE_ALARM_TRIGGERED + assert state.state == AlarmControlPanelState.PENDING + assert state.attributes["next_state"] == AlarmControlPanelState.TRIGGERED future = dt_util.utcnow() + timedelta(seconds=1) with patch( @@ -415,8 +427,8 @@ async def test_trigger_with_delay(hass: HomeAssistant) -> None: await hass.async_block_till_done() state = hass.states.get(entity_id) - assert state.attributes["previous_state"] == STATE_ALARM_ARMED_AWAY - assert state.state == STATE_ALARM_TRIGGERED + assert state.attributes["previous_state"] == AlarmControlPanelState.ARMED_AWAY + assert state.state == AlarmControlPanelState.TRIGGERED async def test_trigger_zero_trigger_time(hass: HomeAssistant) -> None: @@ -438,11 +450,11 @@ async def test_trigger_zero_trigger_time(hass: HomeAssistant) -> None: entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED await common.async_alarm_trigger(hass) - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED async def test_trigger_zero_trigger_time_with_pending(hass: HomeAssistant) -> None: @@ -464,11 +476,11 @@ async def test_trigger_zero_trigger_time_with_pending(hass: HomeAssistant) -> No entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED await common.async_alarm_trigger(hass) - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED async def test_trigger_with_pending(hass: HomeAssistant) -> None: @@ -490,14 +502,14 @@ async def test_trigger_with_pending(hass: HomeAssistant) -> None: entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED await common.async_alarm_trigger(hass) - assert hass.states.get(entity_id).state == STATE_ALARM_PENDING + assert hass.states.get(entity_id).state == AlarmControlPanelState.PENDING state = hass.states.get(entity_id) - assert state.attributes["next_state"] == STATE_ALARM_TRIGGERED + assert state.attributes["next_state"] == AlarmControlPanelState.TRIGGERED future = dt_util.utcnow() + timedelta(seconds=2) with patch( @@ -508,8 +520,8 @@ async def test_trigger_with_pending(hass: HomeAssistant) -> None: await hass.async_block_till_done() state = hass.states.get(entity_id) - assert state.attributes["previous_state"] == STATE_ALARM_DISARMED - assert state.state == STATE_ALARM_TRIGGERED + assert state.attributes["previous_state"] == AlarmControlPanelState.DISARMED + assert state.state == AlarmControlPanelState.TRIGGERED future = dt_util.utcnow() + timedelta(seconds=5) with patch( @@ -520,7 +532,7 @@ async def test_trigger_with_pending(hass: HomeAssistant) -> None: await hass.async_block_till_done() state = hass.states.get(entity_id) - assert state.state == STATE_ALARM_DISARMED + assert state.state == AlarmControlPanelState.DISARMED async def test_trigger_with_unused_specific_delay(hass: HomeAssistant) -> None: @@ -544,17 +556,17 @@ async def test_trigger_with_unused_specific_delay(hass: HomeAssistant) -> None: entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED await common.async_alarm_arm_away(hass, CODE) - assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_AWAY + assert hass.states.get(entity_id).state == AlarmControlPanelState.ARMED_AWAY await common.async_alarm_trigger(hass, entity_id=entity_id) state = hass.states.get(entity_id) - assert state.state == STATE_ALARM_PENDING - assert state.attributes["next_state"] == STATE_ALARM_TRIGGERED + assert state.state == AlarmControlPanelState.PENDING + assert state.attributes["next_state"] == AlarmControlPanelState.TRIGGERED future = dt_util.utcnow() + timedelta(seconds=5) with patch( @@ -565,8 +577,8 @@ async def test_trigger_with_unused_specific_delay(hass: HomeAssistant) -> None: await hass.async_block_till_done() state = hass.states.get(entity_id) - assert state.attributes["previous_state"] == STATE_ALARM_ARMED_AWAY - assert state.state == STATE_ALARM_TRIGGERED + assert state.attributes["previous_state"] == AlarmControlPanelState.ARMED_AWAY + assert state.state == AlarmControlPanelState.TRIGGERED async def test_trigger_with_specific_delay(hass: HomeAssistant) -> None: @@ -590,17 +602,17 @@ async def test_trigger_with_specific_delay(hass: HomeAssistant) -> None: entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED await common.async_alarm_arm_away(hass, CODE) - assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_AWAY + assert hass.states.get(entity_id).state == AlarmControlPanelState.ARMED_AWAY await common.async_alarm_trigger(hass, entity_id=entity_id) state = hass.states.get(entity_id) - assert state.state == STATE_ALARM_PENDING - assert state.attributes["next_state"] == STATE_ALARM_TRIGGERED + assert state.state == AlarmControlPanelState.PENDING + assert state.attributes["next_state"] == AlarmControlPanelState.TRIGGERED future = dt_util.utcnow() + timedelta(seconds=1) with patch( @@ -611,8 +623,8 @@ async def test_trigger_with_specific_delay(hass: HomeAssistant) -> None: await hass.async_block_till_done() state = hass.states.get(entity_id) - assert state.attributes["previous_state"] == STATE_ALARM_ARMED_AWAY - assert state.state == STATE_ALARM_TRIGGERED + assert state.attributes["previous_state"] == AlarmControlPanelState.ARMED_AWAY + assert state.state == AlarmControlPanelState.TRIGGERED async def test_trigger_with_pending_and_delay(hass: HomeAssistant) -> None: @@ -635,17 +647,17 @@ async def test_trigger_with_pending_and_delay(hass: HomeAssistant) -> None: entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED await common.async_alarm_arm_away(hass, CODE) - assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_AWAY + assert hass.states.get(entity_id).state == AlarmControlPanelState.ARMED_AWAY await common.async_alarm_trigger(hass, entity_id=entity_id) state = hass.states.get(entity_id) - assert state.state == STATE_ALARM_PENDING - assert state.attributes["next_state"] == STATE_ALARM_TRIGGERED + assert state.state == AlarmControlPanelState.PENDING + assert state.attributes["next_state"] == AlarmControlPanelState.TRIGGERED future = dt_util.utcnow() + timedelta(seconds=1) with patch( @@ -656,8 +668,8 @@ async def test_trigger_with_pending_and_delay(hass: HomeAssistant) -> None: await hass.async_block_till_done() state = hass.states.get(entity_id) - assert state.state == STATE_ALARM_PENDING - assert state.attributes["next_state"] == STATE_ALARM_TRIGGERED + assert state.state == AlarmControlPanelState.PENDING + assert state.attributes["next_state"] == AlarmControlPanelState.TRIGGERED future += timedelta(seconds=1) with patch( @@ -668,8 +680,8 @@ async def test_trigger_with_pending_and_delay(hass: HomeAssistant) -> None: await hass.async_block_till_done() state = hass.states.get(entity_id) - assert state.attributes["previous_state"] == STATE_ALARM_ARMED_AWAY - assert state.state == STATE_ALARM_TRIGGERED + assert state.attributes["previous_state"] == AlarmControlPanelState.ARMED_AWAY + assert state.state == AlarmControlPanelState.TRIGGERED async def test_trigger_with_pending_and_specific_delay(hass: HomeAssistant) -> None: @@ -693,17 +705,17 @@ async def test_trigger_with_pending_and_specific_delay(hass: HomeAssistant) -> N entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED await common.async_alarm_arm_away(hass, CODE) - assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_AWAY + assert hass.states.get(entity_id).state == AlarmControlPanelState.ARMED_AWAY await common.async_alarm_trigger(hass, entity_id=entity_id) state = hass.states.get(entity_id) - assert state.state == STATE_ALARM_PENDING - assert state.attributes["next_state"] == STATE_ALARM_TRIGGERED + assert state.state == AlarmControlPanelState.PENDING + assert state.attributes["next_state"] == AlarmControlPanelState.TRIGGERED future = dt_util.utcnow() + timedelta(seconds=1) with patch( @@ -714,8 +726,8 @@ async def test_trigger_with_pending_and_specific_delay(hass: HomeAssistant) -> N await hass.async_block_till_done() state = hass.states.get(entity_id) - assert state.state == STATE_ALARM_PENDING - assert state.attributes["next_state"] == STATE_ALARM_TRIGGERED + assert state.state == AlarmControlPanelState.PENDING + assert state.attributes["next_state"] == AlarmControlPanelState.TRIGGERED future += timedelta(seconds=1) with patch( @@ -726,8 +738,8 @@ async def test_trigger_with_pending_and_specific_delay(hass: HomeAssistant) -> N await hass.async_block_till_done() state = hass.states.get(entity_id) - assert state.attributes["previous_state"] == STATE_ALARM_ARMED_AWAY - assert state.state == STATE_ALARM_TRIGGERED + assert state.attributes["previous_state"] == AlarmControlPanelState.ARMED_AWAY + assert state.state == AlarmControlPanelState.TRIGGERED async def test_trigger_with_specific_pending(hass: HomeAssistant) -> None: @@ -752,7 +764,7 @@ async def test_trigger_with_specific_pending(hass: HomeAssistant) -> None: await common.async_alarm_trigger(hass) - assert hass.states.get(entity_id).state == STATE_ALARM_PENDING + assert hass.states.get(entity_id).state == AlarmControlPanelState.PENDING future = dt_util.utcnow() + timedelta(seconds=2) with patch( @@ -763,8 +775,8 @@ async def test_trigger_with_specific_pending(hass: HomeAssistant) -> None: await hass.async_block_till_done() state = hass.states.get(entity_id) - assert state.attributes["previous_state"] == STATE_ALARM_DISARMED - assert state.state == STATE_ALARM_TRIGGERED + assert state.attributes["previous_state"] == AlarmControlPanelState.DISARMED + assert state.state == AlarmControlPanelState.TRIGGERED future = dt_util.utcnow() + timedelta(seconds=5) with patch( @@ -774,7 +786,7 @@ async def test_trigger_with_specific_pending(hass: HomeAssistant) -> None: async_fire_time_changed(hass, future) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED async def test_trigger_with_disarm_after_trigger(hass: HomeAssistant) -> None: @@ -796,13 +808,13 @@ async def test_trigger_with_disarm_after_trigger(hass: HomeAssistant) -> None: entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED await common.async_alarm_trigger(hass, entity_id=entity_id) state = hass.states.get(entity_id) - assert state.attributes["previous_state"] == STATE_ALARM_DISARMED - assert state.state == STATE_ALARM_TRIGGERED + assert state.attributes["previous_state"] == AlarmControlPanelState.DISARMED + assert state.state == AlarmControlPanelState.TRIGGERED future = dt_util.utcnow() + timedelta(seconds=5) with patch( @@ -812,7 +824,7 @@ async def test_trigger_with_disarm_after_trigger(hass: HomeAssistant) -> None: async_fire_time_changed(hass, future) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED async def test_trigger_with_zero_specific_trigger_time(hass: HomeAssistant) -> None: @@ -835,11 +847,11 @@ async def test_trigger_with_zero_specific_trigger_time(hass: HomeAssistant) -> N entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED await common.async_alarm_trigger(hass, entity_id=entity_id) - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED async def test_trigger_with_unused_zero_specific_trigger_time( @@ -864,13 +876,13 @@ async def test_trigger_with_unused_zero_specific_trigger_time( entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED await common.async_alarm_trigger(hass, entity_id=entity_id) state = hass.states.get(entity_id) - assert state.attributes["previous_state"] == STATE_ALARM_DISARMED - assert state.state == STATE_ALARM_TRIGGERED + assert state.attributes["previous_state"] == AlarmControlPanelState.DISARMED + assert state.state == AlarmControlPanelState.TRIGGERED future = dt_util.utcnow() + timedelta(seconds=5) with patch( @@ -880,7 +892,7 @@ async def test_trigger_with_unused_zero_specific_trigger_time( async_fire_time_changed(hass, future) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED async def test_trigger_with_specific_trigger_time(hass: HomeAssistant) -> None: @@ -902,13 +914,13 @@ async def test_trigger_with_specific_trigger_time(hass: HomeAssistant) -> None: entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED await common.async_alarm_trigger(hass, entity_id=entity_id) state = hass.states.get(entity_id) - assert state.attributes["previous_state"] == STATE_ALARM_DISARMED - assert state.state == STATE_ALARM_TRIGGERED + assert state.attributes["previous_state"] == AlarmControlPanelState.DISARMED + assert state.state == AlarmControlPanelState.TRIGGERED future = dt_util.utcnow() + timedelta(seconds=5) with patch( @@ -918,7 +930,7 @@ async def test_trigger_with_specific_trigger_time(hass: HomeAssistant) -> None: async_fire_time_changed(hass, future) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED async def test_trigger_with_no_disarm_after_trigger(hass: HomeAssistant) -> None: @@ -941,17 +953,17 @@ async def test_trigger_with_no_disarm_after_trigger(hass: HomeAssistant) -> None entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED await common.async_alarm_arm_away(hass, CODE, entity_id) - assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_AWAY + assert hass.states.get(entity_id).state == AlarmControlPanelState.ARMED_AWAY await common.async_alarm_trigger(hass, entity_id=entity_id) state = hass.states.get(entity_id) - assert state.attributes["previous_state"] == STATE_ALARM_ARMED_AWAY - assert state.state == STATE_ALARM_TRIGGERED + assert state.attributes["previous_state"] == AlarmControlPanelState.ARMED_AWAY + assert state.state == AlarmControlPanelState.TRIGGERED future = dt_util.utcnow() + timedelta(seconds=5) with patch( @@ -961,7 +973,7 @@ async def test_trigger_with_no_disarm_after_trigger(hass: HomeAssistant) -> None async_fire_time_changed(hass, future) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_AWAY + assert hass.states.get(entity_id).state == AlarmControlPanelState.ARMED_AWAY async def test_back_to_back_trigger_with_no_disarm_after_trigger( @@ -986,17 +998,17 @@ async def test_back_to_back_trigger_with_no_disarm_after_trigger( entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED await common.async_alarm_arm_away(hass, CODE, entity_id) - assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_AWAY + assert hass.states.get(entity_id).state == AlarmControlPanelState.ARMED_AWAY await common.async_alarm_trigger(hass, entity_id=entity_id) state = hass.states.get(entity_id) - assert state.attributes["previous_state"] == STATE_ALARM_ARMED_AWAY - assert state.state == STATE_ALARM_TRIGGERED + assert state.attributes["previous_state"] == AlarmControlPanelState.ARMED_AWAY + assert state.state == AlarmControlPanelState.TRIGGERED future = dt_util.utcnow() + timedelta(seconds=5) with patch( @@ -1006,13 +1018,13 @@ async def test_back_to_back_trigger_with_no_disarm_after_trigger( async_fire_time_changed(hass, future) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_AWAY + assert hass.states.get(entity_id).state == AlarmControlPanelState.ARMED_AWAY await common.async_alarm_trigger(hass, entity_id=entity_id) state = hass.states.get(entity_id) - assert state.attributes["previous_state"] == STATE_ALARM_ARMED_AWAY - assert state.state == STATE_ALARM_TRIGGERED + assert state.attributes["previous_state"] == AlarmControlPanelState.ARMED_AWAY + assert state.state == AlarmControlPanelState.TRIGGERED future = dt_util.utcnow() + timedelta(seconds=5) with patch( @@ -1022,7 +1034,7 @@ async def test_back_to_back_trigger_with_no_disarm_after_trigger( async_fire_time_changed(hass, future) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_AWAY + assert hass.states.get(entity_id).state == AlarmControlPanelState.ARMED_AWAY async def test_disarm_while_pending_trigger(hass: HomeAssistant) -> None: @@ -1043,15 +1055,15 @@ async def test_disarm_while_pending_trigger(hass: HomeAssistant) -> None: entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED await common.async_alarm_trigger(hass) - assert hass.states.get(entity_id).state == STATE_ALARM_PENDING + assert hass.states.get(entity_id).state == AlarmControlPanelState.PENDING await common.async_alarm_disarm(hass, entity_id=entity_id) - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED future = dt_util.utcnow() + timedelta(seconds=5) with patch( @@ -1061,7 +1073,7 @@ async def test_disarm_while_pending_trigger(hass: HomeAssistant) -> None: async_fire_time_changed(hass, future) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED async def test_disarm_during_trigger_with_invalid_code(hass: HomeAssistant) -> None: @@ -1083,7 +1095,7 @@ async def test_disarm_during_trigger_with_invalid_code(hass: HomeAssistant) -> N entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED assert ( hass.states.get(entity_id).attributes[alarm_control_panel.ATTR_CODE_FORMAT] == alarm_control_panel.CodeFormat.NUMBER @@ -1091,12 +1103,12 @@ async def test_disarm_during_trigger_with_invalid_code(hass: HomeAssistant) -> N await common.async_alarm_trigger(hass) - assert hass.states.get(entity_id).state == STATE_ALARM_PENDING + assert hass.states.get(entity_id).state == AlarmControlPanelState.PENDING with pytest.raises(ServiceValidationError, match=r"^Invalid alarm code provided$"): await common.async_alarm_disarm(hass, entity_id=entity_id) - assert hass.states.get(entity_id).state == STATE_ALARM_PENDING + assert hass.states.get(entity_id).state == AlarmControlPanelState.PENDING future = dt_util.utcnow() + timedelta(seconds=5) with patch( @@ -1107,8 +1119,8 @@ async def test_disarm_during_trigger_with_invalid_code(hass: HomeAssistant) -> N await hass.async_block_till_done() state = hass.states.get(entity_id) - assert state.attributes["previous_state"] == STATE_ALARM_DISARMED - assert state.state == STATE_ALARM_TRIGGERED + assert state.attributes["previous_state"] == AlarmControlPanelState.DISARMED + assert state.state == AlarmControlPanelState.TRIGGERED async def test_disarm_with_template_code(hass: HomeAssistant) -> None: @@ -1130,23 +1142,23 @@ async def test_disarm_with_template_code(hass: HomeAssistant) -> None: entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED await common.async_alarm_arm_home(hass, "def") state = hass.states.get(entity_id) - assert state.state == STATE_ALARM_ARMED_HOME + assert state.state == AlarmControlPanelState.ARMED_HOME with pytest.raises(ServiceValidationError, match=r"^Invalid alarm code provided$"): await common.async_alarm_disarm(hass, "def") state = hass.states.get(entity_id) - assert state.state == STATE_ALARM_ARMED_HOME + assert state.state == AlarmControlPanelState.ARMED_HOME await common.async_alarm_disarm(hass, "abc") state = hass.states.get(entity_id) - assert state.state == STATE_ALARM_DISARMED + assert state.state == AlarmControlPanelState.DISARMED async def test_arm_away_after_disabled_disarmed(hass: HomeAssistant) -> None: @@ -1171,21 +1183,21 @@ async def test_arm_away_after_disabled_disarmed(hass: HomeAssistant) -> None: entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED await common.async_alarm_arm_away(hass, CODE) state = hass.states.get(entity_id) - assert state.state == STATE_ALARM_ARMING - assert state.attributes["previous_state"] == STATE_ALARM_DISARMED - assert state.attributes["next_state"] == STATE_ALARM_ARMED_AWAY + assert state.state == AlarmControlPanelState.ARMING + assert state.attributes["previous_state"] == AlarmControlPanelState.DISARMED + assert state.attributes["next_state"] == AlarmControlPanelState.ARMED_AWAY await common.async_alarm_trigger(hass, entity_id=entity_id) state = hass.states.get(entity_id) - assert state.state == STATE_ALARM_ARMING - assert state.attributes["previous_state"] == STATE_ALARM_DISARMED - assert state.attributes["next_state"] == STATE_ALARM_ARMED_AWAY + assert state.state == AlarmControlPanelState.ARMING + assert state.attributes["previous_state"] == AlarmControlPanelState.DISARMED + assert state.attributes["next_state"] == AlarmControlPanelState.ARMED_AWAY future = dt_util.utcnow() + timedelta(seconds=1) with freeze_time(future): @@ -1193,14 +1205,14 @@ async def test_arm_away_after_disabled_disarmed(hass: HomeAssistant) -> None: await hass.async_block_till_done() state = hass.states.get(entity_id) - assert state.state == STATE_ALARM_ARMED_AWAY + assert state.state == AlarmControlPanelState.ARMED_AWAY await common.async_alarm_trigger(hass, entity_id=entity_id) state = hass.states.get(entity_id) - assert state.state == STATE_ALARM_PENDING - assert state.attributes["previous_state"] == STATE_ALARM_ARMED_AWAY - assert state.attributes["next_state"] == STATE_ALARM_TRIGGERED + assert state.state == AlarmControlPanelState.PENDING + assert state.attributes["previous_state"] == AlarmControlPanelState.ARMED_AWAY + assert state.attributes["next_state"] == AlarmControlPanelState.TRIGGERED future += timedelta(seconds=1) with freeze_time(future): @@ -1208,19 +1220,19 @@ async def test_arm_away_after_disabled_disarmed(hass: HomeAssistant) -> None: await hass.async_block_till_done() state = hass.states.get(entity_id) - assert state.attributes["previous_state"] == STATE_ALARM_ARMED_AWAY - assert state.state == STATE_ALARM_TRIGGERED + assert state.attributes["previous_state"] == AlarmControlPanelState.ARMED_AWAY + assert state.state == AlarmControlPanelState.TRIGGERED @pytest.mark.parametrize( "expected_state", [ - (STATE_ALARM_ARMED_AWAY), - (STATE_ALARM_ARMED_CUSTOM_BYPASS), - (STATE_ALARM_ARMED_HOME), - (STATE_ALARM_ARMED_NIGHT), - (STATE_ALARM_ARMED_VACATION), - (STATE_ALARM_DISARMED), + (AlarmControlPanelState.ARMED_AWAY), + (AlarmControlPanelState.ARMED_CUSTOM_BYPASS), + (AlarmControlPanelState.ARMED_HOME), + (AlarmControlPanelState.ARMED_NIGHT), + (AlarmControlPanelState.ARMED_VACATION), + (AlarmControlPanelState.DISARMED), ], ) async def test_restore_state(hass: HomeAssistant, expected_state) -> None: @@ -1253,11 +1265,11 @@ async def test_restore_state(hass: HomeAssistant, expected_state) -> None: @pytest.mark.parametrize( "expected_state", [ - (STATE_ALARM_ARMED_AWAY), - (STATE_ALARM_ARMED_CUSTOM_BYPASS), - (STATE_ALARM_ARMED_HOME), - (STATE_ALARM_ARMED_NIGHT), - (STATE_ALARM_ARMED_VACATION), + (AlarmControlPanelState.ARMED_AWAY), + (AlarmControlPanelState.ARMED_CUSTOM_BYPASS), + (AlarmControlPanelState.ARMED_HOME), + (AlarmControlPanelState.ARMED_NIGHT), + (AlarmControlPanelState.ARMED_VACATION), ], ) async def test_restore_state_arming(hass: HomeAssistant, expected_state) -> None: @@ -1265,7 +1277,7 @@ async def test_restore_state_arming(hass: HomeAssistant, expected_state) -> None time = dt_util.utcnow() - timedelta(seconds=15) entity_id = "alarm_control_panel.test" attributes = { - "previous_state": STATE_ALARM_DISARMED, + "previous_state": AlarmControlPanelState.DISARMED, "next_state": expected_state, } mock_restore_cache( @@ -1292,9 +1304,9 @@ async def test_restore_state_arming(hass: HomeAssistant, expected_state) -> None state = hass.states.get(entity_id) assert state - assert state.attributes["previous_state"] == STATE_ALARM_DISARMED + assert state.attributes["previous_state"] == AlarmControlPanelState.DISARMED assert state.attributes["next_state"] == expected_state - assert state.state == STATE_ALARM_ARMING + assert state.state == AlarmControlPanelState.ARMING future = time + timedelta(seconds=61) with freeze_time(future): @@ -1308,12 +1320,12 @@ async def test_restore_state_arming(hass: HomeAssistant, expected_state) -> None @pytest.mark.parametrize( "previous_state", [ - (STATE_ALARM_ARMED_AWAY), - (STATE_ALARM_ARMED_CUSTOM_BYPASS), - (STATE_ALARM_ARMED_HOME), - (STATE_ALARM_ARMED_NIGHT), - (STATE_ALARM_ARMED_VACATION), - (STATE_ALARM_DISARMED), + (AlarmControlPanelState.ARMED_AWAY), + (AlarmControlPanelState.ARMED_CUSTOM_BYPASS), + (AlarmControlPanelState.ARMED_HOME), + (AlarmControlPanelState.ARMED_NIGHT), + (AlarmControlPanelState.ARMED_VACATION), + (AlarmControlPanelState.DISARMED), ], ) async def test_restore_state_pending(hass: HomeAssistant, previous_state) -> None: @@ -1322,11 +1334,18 @@ async def test_restore_state_pending(hass: HomeAssistant, previous_state) -> Non entity_id = "alarm_control_panel.test" attributes = { "previous_state": previous_state, - "next_state": STATE_ALARM_TRIGGERED, + "next_state": AlarmControlPanelState.TRIGGERED, } mock_restore_cache( hass, - (State(entity_id, STATE_ALARM_TRIGGERED, attributes, last_updated=time),), + ( + State( + entity_id, + AlarmControlPanelState.TRIGGERED, + attributes, + last_updated=time, + ), + ), ) hass.set_state(CoreState.starting) @@ -1351,8 +1370,8 @@ async def test_restore_state_pending(hass: HomeAssistant, previous_state) -> Non state = hass.states.get(entity_id) assert state assert state.attributes["previous_state"] == previous_state - assert state.attributes["next_state"] == STATE_ALARM_TRIGGERED - assert state.state == STATE_ALARM_PENDING + assert state.attributes["next_state"] == AlarmControlPanelState.TRIGGERED + assert state.state == AlarmControlPanelState.PENDING future = time + timedelta(seconds=61) with freeze_time(future): @@ -1360,7 +1379,7 @@ async def test_restore_state_pending(hass: HomeAssistant, previous_state) -> Non await hass.async_block_till_done() state = hass.states.get(entity_id) - assert state.state == STATE_ALARM_TRIGGERED + assert state.state == AlarmControlPanelState.TRIGGERED future = time + timedelta(seconds=121) with freeze_time(future): @@ -1374,12 +1393,12 @@ async def test_restore_state_pending(hass: HomeAssistant, previous_state) -> Non @pytest.mark.parametrize( "previous_state", [ - (STATE_ALARM_ARMED_AWAY), - (STATE_ALARM_ARMED_CUSTOM_BYPASS), - (STATE_ALARM_ARMED_HOME), - (STATE_ALARM_ARMED_NIGHT), - (STATE_ALARM_ARMED_VACATION), - (STATE_ALARM_DISARMED), + (AlarmControlPanelState.ARMED_AWAY), + (AlarmControlPanelState.ARMED_CUSTOM_BYPASS), + (AlarmControlPanelState.ARMED_HOME), + (AlarmControlPanelState.ARMED_NIGHT), + (AlarmControlPanelState.ARMED_VACATION), + (AlarmControlPanelState.DISARMED), ], ) async def test_restore_state_triggered(hass: HomeAssistant, previous_state) -> None: @@ -1391,7 +1410,14 @@ async def test_restore_state_triggered(hass: HomeAssistant, previous_state) -> N } mock_restore_cache( hass, - (State(entity_id, STATE_ALARM_TRIGGERED, attributes, last_updated=time),), + ( + State( + entity_id, + AlarmControlPanelState.TRIGGERED, + attributes, + last_updated=time, + ), + ), ) hass.set_state(CoreState.starting) @@ -1417,7 +1443,7 @@ async def test_restore_state_triggered(hass: HomeAssistant, previous_state) -> N assert state assert state.attributes[ATTR_PREVIOUS_STATE] == previous_state assert state.attributes[ATTR_NEXT_STATE] is None - assert state.state == STATE_ALARM_TRIGGERED + assert state.state == AlarmControlPanelState.TRIGGERED future = time + timedelta(seconds=121) with freeze_time(future): @@ -1433,11 +1459,18 @@ async def test_restore_state_triggered_long_ago(hass: HomeAssistant) -> None: time = dt_util.utcnow() - timedelta(seconds=125) entity_id = "alarm_control_panel.test" attributes = { - "previous_state": STATE_ALARM_ARMED_AWAY, + "previous_state": AlarmControlPanelState.ARMED_AWAY, } mock_restore_cache( hass, - (State(entity_id, STATE_ALARM_TRIGGERED, attributes, last_updated=time),), + ( + State( + entity_id, + AlarmControlPanelState.TRIGGERED, + attributes, + last_updated=time, + ), + ), ) hass.set_state(CoreState.starting) @@ -1460,7 +1493,7 @@ async def test_restore_state_triggered_long_ago(hass: HomeAssistant) -> None: await hass.async_block_till_done() state = hass.states.get(entity_id) - assert state.state == STATE_ALARM_DISARMED + assert state.state == AlarmControlPanelState.DISARMED async def test_default_arming_states(hass: HomeAssistant) -> None: diff --git a/tests/components/manual_mqtt/test_alarm_control_panel.py b/tests/components/manual_mqtt/test_alarm_control_panel.py index a1c913135a7d06..2b401cb10a0e3a 100644 --- a/tests/components/manual_mqtt/test_alarm_control_panel.py +++ b/tests/components/manual_mqtt/test_alarm_control_panel.py @@ -7,6 +7,7 @@ import pytest from homeassistant.components import alarm_control_panel +from homeassistant.components.alarm_control_panel import AlarmControlPanelState from homeassistant.const import ( ATTR_CODE, ATTR_ENTITY_ID, @@ -15,14 +16,6 @@ SERVICE_ALARM_ARM_HOME, SERVICE_ALARM_ARM_NIGHT, SERVICE_ALARM_ARM_VACATION, - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_CUSTOM_BYPASS, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_ARMED_VACATION, - STATE_ALARM_DISARMED, - STATE_ALARM_PENDING, - STATE_ALARM_TRIGGERED, ) from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError @@ -78,11 +71,14 @@ async def test_fail_setup_without_command_topic( @pytest.mark.parametrize( ("service", "expected_state"), [ - (SERVICE_ALARM_ARM_AWAY, STATE_ALARM_ARMED_AWAY), - (SERVICE_ALARM_ARM_CUSTOM_BYPASS, STATE_ALARM_ARMED_CUSTOM_BYPASS), - (SERVICE_ALARM_ARM_HOME, STATE_ALARM_ARMED_HOME), - (SERVICE_ALARM_ARM_NIGHT, STATE_ALARM_ARMED_NIGHT), - (SERVICE_ALARM_ARM_VACATION, STATE_ALARM_ARMED_VACATION), + (SERVICE_ALARM_ARM_AWAY, AlarmControlPanelState.ARMED_AWAY), + ( + SERVICE_ALARM_ARM_CUSTOM_BYPASS, + AlarmControlPanelState.ARMED_CUSTOM_BYPASS, + ), + (SERVICE_ALARM_ARM_HOME, AlarmControlPanelState.ARMED_HOME), + (SERVICE_ALARM_ARM_NIGHT, AlarmControlPanelState.ARMED_NIGHT), + (SERVICE_ALARM_ARM_VACATION, AlarmControlPanelState.ARMED_VACATION), ], ) async def test_no_pending( @@ -111,7 +107,7 @@ async def test_no_pending( entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED await hass.services.async_call( alarm_control_panel.DOMAIN, @@ -126,11 +122,14 @@ async def test_no_pending( @pytest.mark.parametrize( ("service", "expected_state"), [ - (SERVICE_ALARM_ARM_AWAY, STATE_ALARM_ARMED_AWAY), - (SERVICE_ALARM_ARM_CUSTOM_BYPASS, STATE_ALARM_ARMED_CUSTOM_BYPASS), - (SERVICE_ALARM_ARM_HOME, STATE_ALARM_ARMED_HOME), - (SERVICE_ALARM_ARM_NIGHT, STATE_ALARM_ARMED_NIGHT), - (SERVICE_ALARM_ARM_VACATION, STATE_ALARM_ARMED_VACATION), + (SERVICE_ALARM_ARM_AWAY, AlarmControlPanelState.ARMED_AWAY), + ( + SERVICE_ALARM_ARM_CUSTOM_BYPASS, + AlarmControlPanelState.ARMED_CUSTOM_BYPASS, + ), + (SERVICE_ALARM_ARM_HOME, AlarmControlPanelState.ARMED_HOME), + (SERVICE_ALARM_ARM_NIGHT, AlarmControlPanelState.ARMED_NIGHT), + (SERVICE_ALARM_ARM_VACATION, AlarmControlPanelState.ARMED_VACATION), ], ) async def test_no_pending_when_code_not_req( @@ -160,7 +159,7 @@ async def test_no_pending_when_code_not_req( entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED await hass.services.async_call( alarm_control_panel.DOMAIN, @@ -175,11 +174,14 @@ async def test_no_pending_when_code_not_req( @pytest.mark.parametrize( ("service", "expected_state"), [ - (SERVICE_ALARM_ARM_AWAY, STATE_ALARM_ARMED_AWAY), - (SERVICE_ALARM_ARM_CUSTOM_BYPASS, STATE_ALARM_ARMED_CUSTOM_BYPASS), - (SERVICE_ALARM_ARM_HOME, STATE_ALARM_ARMED_HOME), - (SERVICE_ALARM_ARM_NIGHT, STATE_ALARM_ARMED_NIGHT), - (SERVICE_ALARM_ARM_VACATION, STATE_ALARM_ARMED_VACATION), + (SERVICE_ALARM_ARM_AWAY, AlarmControlPanelState.ARMED_AWAY), + ( + SERVICE_ALARM_ARM_CUSTOM_BYPASS, + AlarmControlPanelState.ARMED_CUSTOM_BYPASS, + ), + (SERVICE_ALARM_ARM_HOME, AlarmControlPanelState.ARMED_HOME), + (SERVICE_ALARM_ARM_NIGHT, AlarmControlPanelState.ARMED_NIGHT), + (SERVICE_ALARM_ARM_VACATION, AlarmControlPanelState.ARMED_VACATION), ], ) async def test_with_pending( @@ -208,7 +210,7 @@ async def test_with_pending( entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED await hass.services.async_call( alarm_control_panel.DOMAIN, @@ -217,7 +219,7 @@ async def test_with_pending( blocking=True, ) - assert hass.states.get(entity_id).state == STATE_ALARM_PENDING + assert hass.states.get(entity_id).state == AlarmControlPanelState.PENDING state = hass.states.get(entity_id) assert state.attributes["post_pending_state"] == expected_state @@ -247,11 +249,14 @@ async def test_with_pending( @pytest.mark.parametrize( ("service", "expected_state"), [ - (SERVICE_ALARM_ARM_AWAY, STATE_ALARM_ARMED_AWAY), - (SERVICE_ALARM_ARM_CUSTOM_BYPASS, STATE_ALARM_ARMED_CUSTOM_BYPASS), - (SERVICE_ALARM_ARM_HOME, STATE_ALARM_ARMED_HOME), - (SERVICE_ALARM_ARM_NIGHT, STATE_ALARM_ARMED_NIGHT), - (SERVICE_ALARM_ARM_VACATION, STATE_ALARM_ARMED_VACATION), + (SERVICE_ALARM_ARM_AWAY, AlarmControlPanelState.ARMED_AWAY), + ( + SERVICE_ALARM_ARM_CUSTOM_BYPASS, + AlarmControlPanelState.ARMED_CUSTOM_BYPASS, + ), + (SERVICE_ALARM_ARM_HOME, AlarmControlPanelState.ARMED_HOME), + (SERVICE_ALARM_ARM_NIGHT, AlarmControlPanelState.ARMED_NIGHT), + (SERVICE_ALARM_ARM_VACATION, AlarmControlPanelState.ARMED_VACATION), ], ) async def test_with_invalid_code( @@ -280,7 +285,7 @@ async def test_with_invalid_code( entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED with pytest.raises(HomeAssistantError, match=r"^Invalid alarm code provided$"): await hass.services.async_call( @@ -290,17 +295,20 @@ async def test_with_invalid_code( blocking=True, ) - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED @pytest.mark.parametrize( ("service", "expected_state"), [ - (SERVICE_ALARM_ARM_AWAY, STATE_ALARM_ARMED_AWAY), - (SERVICE_ALARM_ARM_CUSTOM_BYPASS, STATE_ALARM_ARMED_CUSTOM_BYPASS), - (SERVICE_ALARM_ARM_HOME, STATE_ALARM_ARMED_HOME), - (SERVICE_ALARM_ARM_NIGHT, STATE_ALARM_ARMED_NIGHT), - (SERVICE_ALARM_ARM_VACATION, STATE_ALARM_ARMED_VACATION), + (SERVICE_ALARM_ARM_AWAY, AlarmControlPanelState.ARMED_AWAY), + ( + SERVICE_ALARM_ARM_CUSTOM_BYPASS, + AlarmControlPanelState.ARMED_CUSTOM_BYPASS, + ), + (SERVICE_ALARM_ARM_HOME, AlarmControlPanelState.ARMED_HOME), + (SERVICE_ALARM_ARM_NIGHT, AlarmControlPanelState.ARMED_NIGHT), + (SERVICE_ALARM_ARM_VACATION, AlarmControlPanelState.ARMED_VACATION), ], ) async def test_with_template_code( @@ -329,7 +337,7 @@ async def test_with_template_code( entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED await hass.services.async_call( alarm_control_panel.DOMAIN, @@ -345,11 +353,14 @@ async def test_with_template_code( @pytest.mark.parametrize( ("service", "expected_state"), [ - (SERVICE_ALARM_ARM_AWAY, STATE_ALARM_ARMED_AWAY), - (SERVICE_ALARM_ARM_CUSTOM_BYPASS, STATE_ALARM_ARMED_CUSTOM_BYPASS), - (SERVICE_ALARM_ARM_HOME, STATE_ALARM_ARMED_HOME), - (SERVICE_ALARM_ARM_NIGHT, STATE_ALARM_ARMED_NIGHT), - (SERVICE_ALARM_ARM_VACATION, STATE_ALARM_ARMED_VACATION), + (SERVICE_ALARM_ARM_AWAY, AlarmControlPanelState.ARMED_AWAY), + ( + SERVICE_ALARM_ARM_CUSTOM_BYPASS, + AlarmControlPanelState.ARMED_CUSTOM_BYPASS, + ), + (SERVICE_ALARM_ARM_HOME, AlarmControlPanelState.ARMED_HOME), + (SERVICE_ALARM_ARM_NIGHT, AlarmControlPanelState.ARMED_NIGHT), + (SERVICE_ALARM_ARM_VACATION, AlarmControlPanelState.ARMED_VACATION), ], ) async def test_with_specific_pending( @@ -384,7 +395,7 @@ async def test_with_specific_pending( blocking=True, ) - assert hass.states.get(entity_id).state == STATE_ALARM_PENDING + assert hass.states.get(entity_id).state == AlarmControlPanelState.PENDING future = dt_util.utcnow() + timedelta(seconds=2) with patch( @@ -419,12 +430,12 @@ async def test_trigger_no_pending( entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED await common.async_alarm_trigger(hass, entity_id=entity_id) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_ALARM_PENDING + assert hass.states.get(entity_id).state == AlarmControlPanelState.PENDING future = dt_util.utcnow() + timedelta(seconds=60) with patch( @@ -434,7 +445,7 @@ async def test_trigger_no_pending( async_fire_time_changed(hass, future) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_ALARM_TRIGGERED + assert hass.states.get(entity_id).state == AlarmControlPanelState.TRIGGERED async def test_trigger_with_delay( @@ -461,17 +472,17 @@ async def test_trigger_with_delay( entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED await common.async_alarm_arm_away(hass, CODE) - assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_AWAY + assert hass.states.get(entity_id).state == AlarmControlPanelState.ARMED_AWAY await common.async_alarm_trigger(hass, entity_id=entity_id) state = hass.states.get(entity_id) - assert state.state == STATE_ALARM_PENDING - assert state.attributes["post_pending_state"] == STATE_ALARM_TRIGGERED + assert state.state == AlarmControlPanelState.PENDING + assert state.attributes["post_pending_state"] == AlarmControlPanelState.TRIGGERED future = dt_util.utcnow() + timedelta(seconds=1) with patch( @@ -482,7 +493,7 @@ async def test_trigger_with_delay( await hass.async_block_till_done() state = hass.states.get(entity_id) - assert state.state == STATE_ALARM_TRIGGERED + assert state.state == AlarmControlPanelState.TRIGGERED async def test_trigger_zero_trigger_time( @@ -508,11 +519,11 @@ async def test_trigger_zero_trigger_time( entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED await common.async_alarm_trigger(hass) - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED async def test_trigger_zero_trigger_time_with_pending( @@ -538,11 +549,11 @@ async def test_trigger_zero_trigger_time_with_pending( entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED await common.async_alarm_trigger(hass) - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED async def test_trigger_with_pending( @@ -568,14 +579,14 @@ async def test_trigger_with_pending( entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED await common.async_alarm_trigger(hass) - assert hass.states.get(entity_id).state == STATE_ALARM_PENDING + assert hass.states.get(entity_id).state == AlarmControlPanelState.PENDING state = hass.states.get(entity_id) - assert state.attributes["post_pending_state"] == STATE_ALARM_TRIGGERED + assert state.attributes["post_pending_state"] == AlarmControlPanelState.TRIGGERED future = dt_util.utcnow() + timedelta(seconds=2) with patch( @@ -585,7 +596,7 @@ async def test_trigger_with_pending( async_fire_time_changed(hass, future) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_ALARM_TRIGGERED + assert hass.states.get(entity_id).state == AlarmControlPanelState.TRIGGERED future = dt_util.utcnow() + timedelta(seconds=5) with patch( @@ -595,7 +606,7 @@ async def test_trigger_with_pending( async_fire_time_changed(hass, future) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED async def test_trigger_with_disarm_after_trigger( @@ -621,11 +632,11 @@ async def test_trigger_with_disarm_after_trigger( entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED await common.async_alarm_trigger(hass, entity_id=entity_id) - assert hass.states.get(entity_id).state == STATE_ALARM_TRIGGERED + assert hass.states.get(entity_id).state == AlarmControlPanelState.TRIGGERED future = dt_util.utcnow() + timedelta(seconds=5) with patch( @@ -635,7 +646,7 @@ async def test_trigger_with_disarm_after_trigger( async_fire_time_changed(hass, future) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED async def test_trigger_with_zero_specific_trigger_time( @@ -662,11 +673,11 @@ async def test_trigger_with_zero_specific_trigger_time( entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED await common.async_alarm_trigger(hass, entity_id=entity_id) - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED async def test_trigger_with_unused_zero_specific_trigger_time( @@ -693,11 +704,11 @@ async def test_trigger_with_unused_zero_specific_trigger_time( entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED await common.async_alarm_trigger(hass, entity_id=entity_id) - assert hass.states.get(entity_id).state == STATE_ALARM_TRIGGERED + assert hass.states.get(entity_id).state == AlarmControlPanelState.TRIGGERED future = dt_util.utcnow() + timedelta(seconds=5) with patch( @@ -707,7 +718,7 @@ async def test_trigger_with_unused_zero_specific_trigger_time( async_fire_time_changed(hass, future) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED async def test_trigger_with_specific_trigger_time( @@ -733,11 +744,11 @@ async def test_trigger_with_specific_trigger_time( entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED await common.async_alarm_trigger(hass, entity_id=entity_id) - assert hass.states.get(entity_id).state == STATE_ALARM_TRIGGERED + assert hass.states.get(entity_id).state == AlarmControlPanelState.TRIGGERED future = dt_util.utcnow() + timedelta(seconds=5) with patch( @@ -747,7 +758,7 @@ async def test_trigger_with_specific_trigger_time( async_fire_time_changed(hass, future) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED async def test_back_to_back_trigger_with_no_disarm_after_trigger( @@ -773,15 +784,15 @@ async def test_back_to_back_trigger_with_no_disarm_after_trigger( entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED await common.async_alarm_arm_away(hass, CODE, entity_id) - assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_AWAY + assert hass.states.get(entity_id).state == AlarmControlPanelState.ARMED_AWAY await common.async_alarm_trigger(hass, entity_id=entity_id) - assert hass.states.get(entity_id).state == STATE_ALARM_TRIGGERED + assert hass.states.get(entity_id).state == AlarmControlPanelState.TRIGGERED future = dt_util.utcnow() + timedelta(seconds=5) with patch( @@ -791,11 +802,11 @@ async def test_back_to_back_trigger_with_no_disarm_after_trigger( async_fire_time_changed(hass, future) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_AWAY + assert hass.states.get(entity_id).state == AlarmControlPanelState.ARMED_AWAY await common.async_alarm_trigger(hass, entity_id=entity_id) - assert hass.states.get(entity_id).state == STATE_ALARM_TRIGGERED + assert hass.states.get(entity_id).state == AlarmControlPanelState.TRIGGERED future = dt_util.utcnow() + timedelta(seconds=5) with patch( @@ -805,7 +816,7 @@ async def test_back_to_back_trigger_with_no_disarm_after_trigger( async_fire_time_changed(hass, future) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_AWAY + assert hass.states.get(entity_id).state == AlarmControlPanelState.ARMED_AWAY async def test_disarm_while_pending_trigger( @@ -830,15 +841,15 @@ async def test_disarm_while_pending_trigger( entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED await common.async_alarm_trigger(hass) - assert hass.states.get(entity_id).state == STATE_ALARM_PENDING + assert hass.states.get(entity_id).state == AlarmControlPanelState.PENDING await common.async_alarm_disarm(hass, entity_id=entity_id) - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED future = dt_util.utcnow() + timedelta(seconds=5) with patch( @@ -848,7 +859,7 @@ async def test_disarm_while_pending_trigger( async_fire_time_changed(hass, future) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED async def test_disarm_during_trigger_with_invalid_code( @@ -874,7 +885,7 @@ async def test_disarm_during_trigger_with_invalid_code( entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED assert ( hass.states.get(entity_id).attributes[alarm_control_panel.ATTR_CODE_FORMAT] == alarm_control_panel.CodeFormat.NUMBER @@ -882,12 +893,12 @@ async def test_disarm_during_trigger_with_invalid_code( await common.async_alarm_trigger(hass) - assert hass.states.get(entity_id).state == STATE_ALARM_PENDING + assert hass.states.get(entity_id).state == AlarmControlPanelState.PENDING with pytest.raises(HomeAssistantError, match=r"Invalid alarm code provided$"): await common.async_alarm_disarm(hass, entity_id=entity_id) - assert hass.states.get(entity_id).state == STATE_ALARM_PENDING + assert hass.states.get(entity_id).state == AlarmControlPanelState.PENDING future = dt_util.utcnow() + timedelta(seconds=5) with patch( @@ -897,7 +908,7 @@ async def test_disarm_during_trigger_with_invalid_code( async_fire_time_changed(hass, future) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_ALARM_TRIGGERED + assert hass.states.get(entity_id).state == AlarmControlPanelState.TRIGGERED async def test_trigger_with_unused_specific_delay( @@ -925,17 +936,17 @@ async def test_trigger_with_unused_specific_delay( entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED await common.async_alarm_arm_away(hass, CODE) - assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_AWAY + assert hass.states.get(entity_id).state == AlarmControlPanelState.ARMED_AWAY await common.async_alarm_trigger(hass, entity_id=entity_id) state = hass.states.get(entity_id) - assert state.state == STATE_ALARM_PENDING - assert state.attributes["post_pending_state"] == STATE_ALARM_TRIGGERED + assert state.state == AlarmControlPanelState.PENDING + assert state.attributes["post_pending_state"] == AlarmControlPanelState.TRIGGERED future = dt_util.utcnow() + timedelta(seconds=5) with patch( @@ -946,7 +957,7 @@ async def test_trigger_with_unused_specific_delay( await hass.async_block_till_done() state = hass.states.get(entity_id) - assert state.state == STATE_ALARM_TRIGGERED + assert state.state == AlarmControlPanelState.TRIGGERED async def test_trigger_with_specific_delay( @@ -974,17 +985,17 @@ async def test_trigger_with_specific_delay( entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED await common.async_alarm_arm_away(hass, CODE) - assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_AWAY + assert hass.states.get(entity_id).state == AlarmControlPanelState.ARMED_AWAY await common.async_alarm_trigger(hass, entity_id=entity_id) state = hass.states.get(entity_id) - assert state.state == STATE_ALARM_PENDING - assert state.attributes["post_pending_state"] == STATE_ALARM_TRIGGERED + assert state.state == AlarmControlPanelState.PENDING + assert state.attributes["post_pending_state"] == AlarmControlPanelState.TRIGGERED future = dt_util.utcnow() + timedelta(seconds=1) with patch( @@ -995,7 +1006,7 @@ async def test_trigger_with_specific_delay( await hass.async_block_till_done() state = hass.states.get(entity_id) - assert state.state == STATE_ALARM_TRIGGERED + assert state.state == AlarmControlPanelState.TRIGGERED async def test_trigger_with_pending_and_delay( @@ -1023,17 +1034,17 @@ async def test_trigger_with_pending_and_delay( entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED await common.async_alarm_arm_away(hass, CODE) - assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_AWAY + assert hass.states.get(entity_id).state == AlarmControlPanelState.ARMED_AWAY await common.async_alarm_trigger(hass, entity_id=entity_id) state = hass.states.get(entity_id) - assert state.state == STATE_ALARM_PENDING - assert state.attributes["post_pending_state"] == STATE_ALARM_TRIGGERED + assert state.state == AlarmControlPanelState.PENDING + assert state.attributes["post_pending_state"] == AlarmControlPanelState.TRIGGERED future = dt_util.utcnow() + timedelta(seconds=1) with patch( @@ -1044,8 +1055,8 @@ async def test_trigger_with_pending_and_delay( await hass.async_block_till_done() state = hass.states.get(entity_id) - assert state.state == STATE_ALARM_PENDING - assert state.attributes["post_pending_state"] == STATE_ALARM_TRIGGERED + assert state.state == AlarmControlPanelState.PENDING + assert state.attributes["post_pending_state"] == AlarmControlPanelState.TRIGGERED future += timedelta(seconds=1) with patch( @@ -1056,7 +1067,7 @@ async def test_trigger_with_pending_and_delay( await hass.async_block_till_done() state = hass.states.get(entity_id) - assert state.state == STATE_ALARM_TRIGGERED + assert state.state == AlarmControlPanelState.TRIGGERED async def test_trigger_with_pending_and_specific_delay( @@ -1085,17 +1096,17 @@ async def test_trigger_with_pending_and_specific_delay( entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED await common.async_alarm_arm_away(hass, CODE) - assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_AWAY + assert hass.states.get(entity_id).state == AlarmControlPanelState.ARMED_AWAY await common.async_alarm_trigger(hass, entity_id=entity_id) state = hass.states.get(entity_id) - assert state.state == STATE_ALARM_PENDING - assert state.attributes["post_pending_state"] == STATE_ALARM_TRIGGERED + assert state.state == AlarmControlPanelState.PENDING + assert state.attributes["post_pending_state"] == AlarmControlPanelState.TRIGGERED future = dt_util.utcnow() + timedelta(seconds=1) with patch( @@ -1106,8 +1117,8 @@ async def test_trigger_with_pending_and_specific_delay( await hass.async_block_till_done() state = hass.states.get(entity_id) - assert state.state == STATE_ALARM_PENDING - assert state.attributes["post_pending_state"] == STATE_ALARM_TRIGGERED + assert state.state == AlarmControlPanelState.PENDING + assert state.attributes["post_pending_state"] == AlarmControlPanelState.TRIGGERED future += timedelta(seconds=1) with patch( @@ -1118,7 +1129,7 @@ async def test_trigger_with_pending_and_specific_delay( await hass.async_block_till_done() state = hass.states.get(entity_id) - assert state.state == STATE_ALARM_TRIGGERED + assert state.state == AlarmControlPanelState.TRIGGERED async def test_trigger_with_specific_pending( @@ -1147,7 +1158,7 @@ async def test_trigger_with_specific_pending( await common.async_alarm_trigger(hass) - assert hass.states.get(entity_id).state == STATE_ALARM_PENDING + assert hass.states.get(entity_id).state == AlarmControlPanelState.PENDING future = dt_util.utcnow() + timedelta(seconds=2) with patch( @@ -1157,7 +1168,7 @@ async def test_trigger_with_specific_pending( async_fire_time_changed(hass, future) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_ALARM_TRIGGERED + assert hass.states.get(entity_id).state == AlarmControlPanelState.TRIGGERED future = dt_util.utcnow() + timedelta(seconds=5) with patch( @@ -1167,7 +1178,7 @@ async def test_trigger_with_specific_pending( async_fire_time_changed(hass, future) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED async def test_trigger_with_no_disarm_after_trigger( @@ -1194,15 +1205,15 @@ async def test_trigger_with_no_disarm_after_trigger( entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED await common.async_alarm_arm_away(hass, CODE, entity_id) - assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_AWAY + assert hass.states.get(entity_id).state == AlarmControlPanelState.ARMED_AWAY await common.async_alarm_trigger(hass, entity_id=entity_id) - assert hass.states.get(entity_id).state == STATE_ALARM_TRIGGERED + assert hass.states.get(entity_id).state == AlarmControlPanelState.TRIGGERED future = dt_util.utcnow() + timedelta(seconds=5) with patch( @@ -1212,7 +1223,7 @@ async def test_trigger_with_no_disarm_after_trigger( async_fire_time_changed(hass, future) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_AWAY + assert hass.states.get(entity_id).state == AlarmControlPanelState.ARMED_AWAY async def test_arm_away_after_disabled_disarmed( @@ -1241,21 +1252,21 @@ async def test_arm_away_after_disabled_disarmed( entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED await common.async_alarm_arm_away(hass, CODE) state = hass.states.get(entity_id) - assert state.state == STATE_ALARM_PENDING - assert state.attributes["pre_pending_state"] == STATE_ALARM_DISARMED - assert state.attributes["post_pending_state"] == STATE_ALARM_ARMED_AWAY + assert state.state == AlarmControlPanelState.PENDING + assert state.attributes["pre_pending_state"] == AlarmControlPanelState.DISARMED + assert state.attributes["post_pending_state"] == AlarmControlPanelState.ARMED_AWAY await common.async_alarm_trigger(hass, entity_id=entity_id) state = hass.states.get(entity_id) - assert state.state == STATE_ALARM_PENDING - assert state.attributes["pre_pending_state"] == STATE_ALARM_DISARMED - assert state.attributes["post_pending_state"] == STATE_ALARM_ARMED_AWAY + assert state.state == AlarmControlPanelState.PENDING + assert state.attributes["pre_pending_state"] == AlarmControlPanelState.DISARMED + assert state.attributes["post_pending_state"] == AlarmControlPanelState.ARMED_AWAY future = dt_util.utcnow() + timedelta(seconds=1) with freeze_time(future): @@ -1263,14 +1274,18 @@ async def test_arm_away_after_disabled_disarmed( await hass.async_block_till_done() state = hass.states.get(entity_id) - assert state.state == STATE_ALARM_ARMED_AWAY + assert state.state == AlarmControlPanelState.ARMED_AWAY await common.async_alarm_trigger(hass, entity_id=entity_id) state = hass.states.get(entity_id) - assert state.state == STATE_ALARM_PENDING - assert state.attributes["pre_pending_state"] == STATE_ALARM_ARMED_AWAY - assert state.attributes["post_pending_state"] == STATE_ALARM_TRIGGERED + assert state.state == AlarmControlPanelState.PENDING + assert ( + state.attributes["pre_pending_state"] == AlarmControlPanelState.ARMED_AWAY + ) + assert ( + state.attributes["post_pending_state"] == AlarmControlPanelState.TRIGGERED + ) future += timedelta(seconds=1) with freeze_time(future): @@ -1278,7 +1293,7 @@ async def test_arm_away_after_disabled_disarmed( await hass.async_block_till_done() state = hass.states.get(entity_id) - assert state.state == STATE_ALARM_TRIGGERED + assert state.state == AlarmControlPanelState.TRIGGERED async def test_disarm_with_template_code( @@ -1304,33 +1319,33 @@ async def test_disarm_with_template_code( entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED await common.async_alarm_arm_home(hass, "def") state = hass.states.get(entity_id) - assert state.state == STATE_ALARM_ARMED_HOME + assert state.state == AlarmControlPanelState.ARMED_HOME with pytest.raises(HomeAssistantError, match=r"Invalid alarm code provided$"): await common.async_alarm_disarm(hass, "def") state = hass.states.get(entity_id) - assert state.state == STATE_ALARM_ARMED_HOME + assert state.state == AlarmControlPanelState.ARMED_HOME await common.async_alarm_disarm(hass, "abc") state = hass.states.get(entity_id) - assert state.state == STATE_ALARM_DISARMED + assert state.state == AlarmControlPanelState.DISARMED @pytest.mark.parametrize( ("config", "expected_state"), [ - ("payload_arm_away", STATE_ALARM_ARMED_AWAY), - ("payload_arm_custom_bypass", STATE_ALARM_ARMED_CUSTOM_BYPASS), - ("payload_arm_home", STATE_ALARM_ARMED_HOME), - ("payload_arm_night", STATE_ALARM_ARMED_NIGHT), - ("payload_arm_vacation", STATE_ALARM_ARMED_VACATION), + ("payload_arm_away", AlarmControlPanelState.ARMED_AWAY), + ("payload_arm_custom_bypass", AlarmControlPanelState.ARMED_CUSTOM_BYPASS), + ("payload_arm_home", AlarmControlPanelState.ARMED_HOME), + ("payload_arm_night", AlarmControlPanelState.ARMED_NIGHT), + ("payload_arm_vacation", AlarmControlPanelState.ARMED_VACATION), ], ) async def test_arm_via_command_topic( @@ -1359,12 +1374,12 @@ async def test_arm_via_command_topic( entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED # Fire the arm command via MQTT; ensure state changes to arming async_fire_mqtt_message(hass, "alarm/command", command) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_ALARM_PENDING + assert hass.states.get(entity_id).state == AlarmControlPanelState.PENDING # Fast-forward a little bit future = dt_util.utcnow() + timedelta(seconds=1) @@ -1400,18 +1415,18 @@ async def test_disarm_pending_via_command_topic( entity_id = "alarm_control_panel.test" - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED await common.async_alarm_trigger(hass) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_ALARM_PENDING + assert hass.states.get(entity_id).state == AlarmControlPanelState.PENDING # Now that we're pending, receive a command to disarm async_fire_mqtt_message(hass, "alarm/command", "DISARM") await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED async def test_state_changes_are_published_to_mqtt( @@ -1437,7 +1452,7 @@ async def test_state_changes_are_published_to_mqtt( # Component should send disarmed alarm state on startup await hass.async_block_till_done() mqtt_mock.async_publish.assert_called_once_with( - "alarm/state", STATE_ALARM_DISARMED, 0, True + "alarm/state", AlarmControlPanelState.DISARMED, 0, True ) mqtt_mock.async_publish.reset_mock() @@ -1445,7 +1460,7 @@ async def test_state_changes_are_published_to_mqtt( await common.async_alarm_arm_home(hass, "1234") await hass.async_block_till_done() mqtt_mock.async_publish.assert_called_once_with( - "alarm/state", STATE_ALARM_PENDING, 0, True + "alarm/state", AlarmControlPanelState.PENDING, 0, True ) mqtt_mock.async_publish.reset_mock() # Fast-forward a little bit @@ -1457,7 +1472,7 @@ async def test_state_changes_are_published_to_mqtt( async_fire_time_changed(hass, future) await hass.async_block_till_done() mqtt_mock.async_publish.assert_called_once_with( - "alarm/state", STATE_ALARM_ARMED_HOME, 0, True + "alarm/state", AlarmControlPanelState.ARMED_HOME, 0, True ) mqtt_mock.async_publish.reset_mock() @@ -1465,7 +1480,7 @@ async def test_state_changes_are_published_to_mqtt( await common.async_alarm_arm_away(hass, "1234") await hass.async_block_till_done() mqtt_mock.async_publish.assert_called_once_with( - "alarm/state", STATE_ALARM_PENDING, 0, True + "alarm/state", AlarmControlPanelState.PENDING, 0, True ) mqtt_mock.async_publish.reset_mock() # Fast-forward a little bit @@ -1477,7 +1492,7 @@ async def test_state_changes_are_published_to_mqtt( async_fire_time_changed(hass, future) await hass.async_block_till_done() mqtt_mock.async_publish.assert_called_once_with( - "alarm/state", STATE_ALARM_ARMED_AWAY, 0, True + "alarm/state", AlarmControlPanelState.ARMED_AWAY, 0, True ) mqtt_mock.async_publish.reset_mock() @@ -1485,7 +1500,7 @@ async def test_state_changes_are_published_to_mqtt( await common.async_alarm_arm_night(hass, "1234") await hass.async_block_till_done() mqtt_mock.async_publish.assert_called_once_with( - "alarm/state", STATE_ALARM_PENDING, 0, True + "alarm/state", AlarmControlPanelState.PENDING, 0, True ) mqtt_mock.async_publish.reset_mock() # Fast-forward a little bit @@ -1497,7 +1512,7 @@ async def test_state_changes_are_published_to_mqtt( async_fire_time_changed(hass, future) await hass.async_block_till_done() mqtt_mock.async_publish.assert_called_once_with( - "alarm/state", STATE_ALARM_ARMED_NIGHT, 0, True + "alarm/state", AlarmControlPanelState.ARMED_NIGHT, 0, True ) mqtt_mock.async_publish.reset_mock() @@ -1505,7 +1520,7 @@ async def test_state_changes_are_published_to_mqtt( await common.async_alarm_disarm(hass) await hass.async_block_till_done() mqtt_mock.async_publish.assert_called_once_with( - "alarm/state", STATE_ALARM_DISARMED, 0, True + "alarm/state", AlarmControlPanelState.DISARMED, 0, True ) diff --git a/tests/components/map/__init__.py b/tests/components/map/__init__.py deleted file mode 100644 index 142afc0d5c985e..00000000000000 --- a/tests/components/map/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""Tests for Map.""" diff --git a/tests/components/map/test_init.py b/tests/components/map/test_init.py deleted file mode 100644 index 217550852bd4cf..00000000000000 --- a/tests/components/map/test_init.py +++ /dev/null @@ -1,118 +0,0 @@ -"""Test the Map initialization.""" - -from collections.abc import Generator -from typing import Any -from unittest.mock import MagicMock, patch - -import pytest - -from homeassistant.components.map import DOMAIN -from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant -from homeassistant.helpers import issue_registry as ir -from homeassistant.setup import async_setup_component - -from tests.common import MockModule, mock_integration - - -@pytest.fixture -def mock_onboarding_not_done() -> Generator[MagicMock]: - """Mock that Home Assistant is currently onboarding.""" - with patch( - "homeassistant.components.onboarding.async_is_onboarded", - return_value=False, - ) as mock_onboarding: - yield mock_onboarding - - -@pytest.fixture -def mock_onboarding_done() -> Generator[MagicMock]: - """Mock that Home Assistant is currently onboarding.""" - with patch( - "homeassistant.components.onboarding.async_is_onboarded", - return_value=True, - ) as mock_onboarding: - yield mock_onboarding - - -@pytest.fixture -def mock_create_map_dashboard() -> Generator[MagicMock]: - """Mock the create map dashboard function.""" - with patch( - "homeassistant.components.map._create_map_dashboard", - ) as mock_create_map_dashboard: - yield mock_create_map_dashboard - - -async def test_create_dashboards_when_onboarded( - hass: HomeAssistant, - hass_storage: dict[str, Any], - mock_onboarding_done, - mock_create_map_dashboard, -) -> None: - """Test we create map dashboard when onboarded.""" - # Mock the lovelace integration to prevent it from creating a map dashboard - mock_integration(hass, MockModule("lovelace")) - - assert await async_setup_component(hass, DOMAIN, {}) - - mock_create_map_dashboard.assert_called_once() - assert hass_storage[DOMAIN]["data"] == {"migrated": True} - - -async def test_create_dashboards_once_when_onboarded( - hass: HomeAssistant, - hass_storage: dict[str, Any], - mock_onboarding_done, - mock_create_map_dashboard, -) -> None: - """Test we create map dashboard once when onboarded.""" - hass_storage[DOMAIN] = { - "version": 1, - "minor_version": 1, - "key": "map", - "data": {"migrated": True}, - } - - # Mock the lovelace integration to prevent it from creating a map dashboard - mock_integration(hass, MockModule("lovelace")) - - assert await async_setup_component(hass, DOMAIN, {}) - - mock_create_map_dashboard.assert_not_called() - assert hass_storage[DOMAIN]["data"] == {"migrated": True} - - -async def test_create_dashboards_when_not_onboarded( - hass: HomeAssistant, - hass_storage: dict[str, Any], - mock_onboarding_not_done, - mock_create_map_dashboard, -) -> None: - """Test we do not create map dashboard when not onboarded.""" - # Mock the lovelace integration to prevent it from creating a map dashboard - mock_integration(hass, MockModule("lovelace")) - - assert await async_setup_component(hass, DOMAIN, {}) - - mock_create_map_dashboard.assert_not_called() - assert hass_storage[DOMAIN]["data"] == {"migrated": True} - - -async def test_create_issue_when_not_manually_configured( - hass: HomeAssistant, issue_registry: ir.IssueRegistry -) -> None: - """Test creating issue registry issues.""" - assert await async_setup_component(hass, DOMAIN, {}) - - assert not issue_registry.async_get_issue( - HOMEASSISTANT_DOMAIN, "deprecated_yaml_map" - ) - - -async def test_create_issue_when_manually_configured( - hass: HomeAssistant, issue_registry: ir.IssueRegistry -) -> None: - """Test creating issue registry issues.""" - assert await async_setup_component(hass, DOMAIN, {DOMAIN: {}}) - - assert issue_registry.async_get_issue(HOMEASSISTANT_DOMAIN, "deprecated_yaml_map") diff --git a/tests/components/mastodon/conftest.py b/tests/components/mastodon/conftest.py index c64de44d49655e..ac23141be55cc9 100644 --- a/tests/components/mastodon/conftest.py +++ b/tests/components/mastodon/conftest.py @@ -1,7 +1,7 @@ """Mastodon tests configuration.""" from collections.abc import Generator -from unittest.mock import patch +from unittest.mock import AsyncMock, patch import pytest @@ -9,7 +9,6 @@ from homeassistant.const import CONF_ACCESS_TOKEN, CONF_CLIENT_ID, CONF_CLIENT_SECRET from tests.common import MockConfigEntry, load_json_object_fixture -from tests.components.smhi.common import AsyncMock @pytest.fixture diff --git a/tests/components/mastodon/test_config_flow.py b/tests/components/mastodon/test_config_flow.py index 073a6534d7d959..33f73812348c72 100644 --- a/tests/components/mastodon/test_config_flow.py +++ b/tests/components/mastodon/test_config_flow.py @@ -47,6 +47,39 @@ async def test_full_flow( assert result["result"].unique_id == "trwnh_mastodon_social" +async def test_full_flow_with_path( + hass: HomeAssistant, + mock_mastodon_client: AsyncMock, + mock_setup_entry: AsyncMock, +) -> None: + """Test full flow, where a path is accidentally specified.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_BASE_URL: "https://mastodon.social/home", + CONF_CLIENT_ID: "client_id", + CONF_CLIENT_SECRET: "client_secret", + CONF_ACCESS_TOKEN: "access_token", + }, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "@trwnh@mastodon.social" + assert result["data"] == { + CONF_BASE_URL: "https://mastodon.social", + CONF_CLIENT_ID: "client_id", + CONF_CLIENT_SECRET: "client_secret", + CONF_ACCESS_TOKEN: "access_token", + } + assert result["result"].unique_id == "trwnh_mastodon_social" + + @pytest.mark.parametrize( ("exception", "error"), [ diff --git a/tests/components/matrix/conftest.py b/tests/components/matrix/conftest.py index 0b84aff5434175..f0f16787f7753c 100644 --- a/tests/components/matrix/conftest.py +++ b/tests/components/matrix/conftest.py @@ -267,7 +267,9 @@ def mock_load_json(): @pytest.fixture def mock_allowed_path(): """Allow using NamedTemporaryFile for mock image.""" - with patch("homeassistant.core.Config.is_allowed_path", return_value=True) as mock: + with patch( + "homeassistant.core_config.Config.is_allowed_path", return_value=True + ) as mock: yield mock diff --git a/tests/components/matter/conftest.py b/tests/components/matter/conftest.py index 556d324d7ee87b..bbafec48e10faf 100644 --- a/tests/components/matter/conftest.py +++ b/tests/components/matter/conftest.py @@ -108,6 +108,7 @@ async def integration_fixture( "switch_unit", "temperature_sensor", "thermostat", + "vacuum_cleaner", "valve", "window_covering_full", "window_covering_lift", diff --git a/tests/components/matter/fixtures/nodes/vacuum_cleaner.json b/tests/components/matter/fixtures/nodes/vacuum_cleaner.json new file mode 100644 index 00000000000000..d6268144ffd12b --- /dev/null +++ b/tests/components/matter/fixtures/nodes/vacuum_cleaner.json @@ -0,0 +1,309 @@ +{ + "node_id": 66, + "date_commissioned": "2024-10-29T08:27:39.860951", + "last_interview": "2024-10-29T08:27:39.860959", + "interview_version": 6, + "available": true, + "is_bridge": false, + "attributes": { + "0/29/0": [ + { + "0": 22, + "1": 1 + } + ], + "0/29/1": [29, 31, 40, 48, 49, 50, 51, 60, 62, 63], + "0/29/2": [], + "0/29/3": [1], + "0/29/65532": 0, + "0/29/65533": 2, + "0/29/65528": [], + "0/29/65529": [], + "0/29/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], + "0/31/0": [ + { + "1": 5, + "2": 2, + "3": [112233], + "4": null, + "254": 1 + } + ], + "0/31/1": [], + "0/31/2": 4, + "0/31/3": 3, + "0/31/4": 4, + "0/31/65532": 0, + "0/31/65533": 1, + "0/31/65528": [], + "0/31/65529": [], + "0/31/65531": [0, 1, 2, 3, 4, 65528, 65529, 65531, 65532, 65533], + "0/40/0": 17, + "0/40/1": "TEST_VENDOR", + "0/40/2": 65521, + "0/40/3": "Mock Vacuum", + "0/40/4": 32769, + "0/40/5": "Mock Vacuum", + "0/40/6": "**REDACTED**", + "0/40/7": 0, + "0/40/8": "TEST_VERSION", + "0/40/9": 1, + "0/40/10": "1.0", + "0/40/11": "20200101", + "0/40/12": "", + "0/40/13": "", + "0/40/14": "", + "0/40/15": "TEST_SN", + "0/40/16": false, + "0/40/18": "F0D59DFAAEAD6E76", + "0/40/19": { + "0": 3, + "1": 65535 + }, + "0/40/21": 16973824, + "0/40/22": 1, + "0/40/65532": 0, + "0/40/65533": 3, + "0/40/65528": [], + "0/40/65529": [], + "0/40/65531": [ + 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 18, 19, 21, 22, + 65528, 65529, 65531, 65532, 65533 + ], + "0/48/0": 0, + "0/48/1": { + "0": 60, + "1": 900 + }, + "0/48/2": 0, + "0/48/3": 2, + "0/48/4": true, + "0/48/65532": 0, + "0/48/65533": 1, + "0/48/65528": [1, 3, 5], + "0/48/65529": [0, 2, 4], + "0/48/65531": [0, 1, 2, 3, 4, 65528, 65529, 65531, 65532, 65533], + "0/49/0": 1, + "0/49/1": [ + { + "0": "ZW5kMA==", + "1": true + } + ], + "0/49/2": 0, + "0/49/3": 0, + "0/49/4": true, + "0/49/5": null, + "0/49/6": null, + "0/49/7": null, + "0/49/65532": 4, + "0/49/65533": 2, + "0/49/65528": [], + "0/49/65529": [], + "0/49/65531": [0, 1, 2, 3, 4, 5, 6, 7, 65528, 65529, 65531, 65532, 65533], + "0/50/65532": 0, + "0/50/65533": 1, + "0/50/65528": [1], + "0/50/65529": [0], + "0/50/65531": [65528, 65529, 65531, 65532, 65533], + "0/51/0": [], + "0/51/1": 1, + "0/51/2": 47, + "0/51/3": 0, + "0/51/4": 0, + "0/51/5": [], + "0/51/6": [], + "0/51/7": [], + "0/51/8": false, + "0/51/65532": 0, + "0/51/65533": 2, + "0/51/65528": [2], + "0/51/65529": [0, 1], + "0/51/65531": [ + 0, 1, 2, 3, 4, 5, 6, 7, 8, 65528, 65529, 65531, 65532, 65533 + ], + "0/60/0": 0, + "0/60/1": null, + "0/60/2": null, + "0/60/65532": 0, + "0/60/65533": 1, + "0/60/65528": [], + "0/60/65529": [0, 2], + "0/60/65531": [0, 1, 2, 65528, 65529, 65531, 65532, 65533], + "0/62/0": [], + "0/62/1": [], + "0/62/2": 16, + "0/62/3": 1, + "0/62/4": [], + "0/62/5": 1, + "0/62/65532": 0, + "0/62/65533": 1, + "0/62/65528": [1, 3, 5, 8], + "0/62/65529": [0, 2, 4, 6, 7, 9, 10, 11], + "0/62/65531": [0, 1, 2, 3, 4, 5, 65528, 65529, 65531, 65532, 65533], + "0/63/0": [], + "0/63/1": [], + "0/63/2": 4, + "0/63/3": 3, + "0/63/65532": 0, + "0/63/65533": 2, + "0/63/65528": [2, 5], + "0/63/65529": [0, 1, 3, 4], + "0/63/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], + "1/29/0": [ + { + "0": 116, + "1": 1 + } + ], + "1/29/1": [3, 29, 84, 85, 97], + "1/29/2": [], + "1/29/3": [], + "1/29/65532": 0, + "1/29/65533": 2, + "1/29/65528": [], + "1/29/65529": [], + "1/29/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], + "1/84/0": [ + { + "0": "Idle", + "1": 0, + "2": [ + { + "1": 16384 + } + ] + }, + { + "0": "Cleaning", + "1": 1, + "2": [ + { + "1": 16385 + } + ] + }, + { + "0": "Mapping", + "1": 2, + "2": [ + { + "1": 16386 + } + ] + } + ], + "1/84/1": 0, + "1/84/65532": 0, + "1/84/65533": 2, + "1/84/65528": [1], + "1/84/65529": [0], + "1/84/65531": [0, 1, 65528, 65529, 65531, 65532, 65533], + "1/85/0": [ + { + "0": "Quick", + "1": 0, + "2": [ + { + "1": 16385 + }, + { + "1": 1 + } + ] + }, + { + "0": "Auto", + "1": 1, + "2": [ + { + "1": 0 + }, + { + "1": 16385 + } + ] + }, + { + "0": "Deep Clean", + "1": 2, + "2": [ + { + "1": 16386 + }, + { + "1": 16384 + }, + { + "1": 16385 + } + ] + }, + { + "0": "Quiet", + "1": 3, + "2": [ + { + "1": 2 + }, + { + "1": 16385 + } + ] + }, + { + "0": "Max Vac", + "1": 4, + "2": [ + { + "1": 16385 + }, + { + "1": 16384 + } + ] + } + ], + "1/85/1": 0, + "1/85/65532": 0, + "1/85/65533": 2, + "1/85/65528": [1], + "1/85/65529": [0], + "1/85/65531": [0, 1, 65528, 65529, 65531, 65532, 65533], + "1/97/0": null, + "1/97/1": null, + "1/97/3": [ + { + "0": 0 + }, + { + "0": 1 + }, + { + "0": 2 + }, + { + "0": 3 + }, + { + "0": 64 + }, + { + "0": 65 + }, + { + "0": 66 + } + ], + "1/97/4": 0, + "1/97/5": { + "0": 0 + }, + "1/97/65532": 0, + "1/97/65533": 1, + "1/97/65528": [4], + "1/97/65529": [0, 3, 128], + "1/97/65531": [0, 1, 3, 4, 5, 65528, 65529, 65531, 65532, 65533] + }, + "attribute_subscriptions": [] +} diff --git a/tests/components/matter/snapshots/test_button.ambr b/tests/components/matter/snapshots/test_button.ambr new file mode 100644 index 00000000000000..10792b58d28e0f --- /dev/null +++ b/tests/components/matter/snapshots/test_button.ambr @@ -0,0 +1,2812 @@ +# serializer version: 1 +# name: test_buttons[air_purifier][button.air_purifier_identify_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.air_purifier_identify_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Identify (1)', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-000000000000008F-MatterNodeDevice-1-IdentifyButton-3-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[air_purifier][button.air_purifier_identify_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'identify', + 'friendly_name': 'Air Purifier Identify (1)', + }), + 'context': , + 'entity_id': 'button.air_purifier_identify_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[air_purifier][button.air_purifier_identify_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.air_purifier_identify_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Identify (2)', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-000000000000008F-MatterNodeDevice-2-IdentifyButton-3-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[air_purifier][button.air_purifier_identify_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'identify', + 'friendly_name': 'Air Purifier Identify (2)', + }), + 'context': , + 'entity_id': 'button.air_purifier_identify_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[air_purifier][button.air_purifier_identify_3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.air_purifier_identify_3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Identify (3)', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-000000000000008F-MatterNodeDevice-3-IdentifyButton-3-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[air_purifier][button.air_purifier_identify_3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'identify', + 'friendly_name': 'Air Purifier Identify (3)', + }), + 'context': , + 'entity_id': 'button.air_purifier_identify_3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[air_purifier][button.air_purifier_identify_4-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.air_purifier_identify_4', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Identify (4)', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-000000000000008F-MatterNodeDevice-4-IdentifyButton-3-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[air_purifier][button.air_purifier_identify_4-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'identify', + 'friendly_name': 'Air Purifier Identify (4)', + }), + 'context': , + 'entity_id': 'button.air_purifier_identify_4', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[air_purifier][button.air_purifier_identify_5-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.air_purifier_identify_5', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Identify (5)', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-000000000000008F-MatterNodeDevice-5-IdentifyButton-3-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[air_purifier][button.air_purifier_identify_5-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'identify', + 'friendly_name': 'Air Purifier Identify (5)', + }), + 'context': , + 'entity_id': 'button.air_purifier_identify_5', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[air_purifier][button.air_purifier_reset_filter_condition-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.air_purifier_reset_filter_condition', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Reset filter condition', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'reset_filter_condition', + 'unique_id': '00000000000004D2-000000000000008F-MatterNodeDevice-1-HepaFilterMonitoringResetButton-113-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[air_purifier][button.air_purifier_reset_filter_condition-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Air Purifier Reset filter condition', + }), + 'context': , + 'entity_id': 'button.air_purifier_reset_filter_condition', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[air_purifier][button.air_purifier_reset_filter_condition_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.air_purifier_reset_filter_condition_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Reset filter condition', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'reset_filter_condition', + 'unique_id': '00000000000004D2-000000000000008F-MatterNodeDevice-1-ActivatedCarbonFilterMonitoringResetButton-114-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[air_purifier][button.air_purifier_reset_filter_condition_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Air Purifier Reset filter condition', + }), + 'context': , + 'entity_id': 'button.air_purifier_reset_filter_condition_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[air_quality_sensor][button.lightfi_aq1_air_quality_sensor_identify-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.lightfi_aq1_air_quality_sensor_identify', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Identify', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-IdentifyButton-3-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[air_quality_sensor][button.lightfi_aq1_air_quality_sensor_identify-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'identify', + 'friendly_name': 'lightfi-aq1-air-quality-sensor Identify', + }), + 'context': , + 'entity_id': 'button.lightfi_aq1_air_quality_sensor_identify', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[color_temperature_light][button.mock_color_temperature_light_identify-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.mock_color_temperature_light_identify', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Identify', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-IdentifyButton-3-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[color_temperature_light][button.mock_color_temperature_light_identify-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'identify', + 'friendly_name': 'Mock Color Temperature Light Identify', + }), + 'context': , + 'entity_id': 'button.mock_color_temperature_light_identify', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[dimmable_plugin_unit][button.dimmable_plugin_unit_identify-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.dimmable_plugin_unit_identify', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Identify', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000024-MatterNodeDevice-1-IdentifyButton-3-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[dimmable_plugin_unit][button.dimmable_plugin_unit_identify-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'identify', + 'friendly_name': 'Dimmable Plugin Unit Identify', + }), + 'context': , + 'entity_id': 'button.dimmable_plugin_unit_identify', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[door_lock][button.mock_door_lock_identify-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.mock_door_lock_identify', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Identify', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-IdentifyButton-3-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[door_lock][button.mock_door_lock_identify-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'identify', + 'friendly_name': 'Mock Door Lock Identify', + }), + 'context': , + 'entity_id': 'button.mock_door_lock_identify', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[door_lock_with_unbolt][button.mock_door_lock_identify-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.mock_door_lock_identify', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Identify', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-IdentifyButton-3-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[door_lock_with_unbolt][button.mock_door_lock_identify-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'identify', + 'friendly_name': 'Mock Door Lock Identify', + }), + 'context': , + 'entity_id': 'button.mock_door_lock_identify', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[eve_contact_sensor][button.eve_door_identify-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.eve_door_identify', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Identify', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-IdentifyButton-3-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[eve_contact_sensor][button.eve_door_identify-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'identify', + 'friendly_name': 'Eve Door Identify', + }), + 'context': , + 'entity_id': 'button.eve_door_identify', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[eve_energy_plug][button.eve_energy_plug_identify-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.eve_energy_plug_identify', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Identify', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000053-MatterNodeDevice-1-IdentifyButton-3-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[eve_energy_plug][button.eve_energy_plug_identify-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'identify', + 'friendly_name': 'Eve Energy Plug Identify', + }), + 'context': , + 'entity_id': 'button.eve_energy_plug_identify', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[eve_energy_plug_patched][button.eve_energy_plug_patched_identify-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.eve_energy_plug_patched_identify', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Identify', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-00000000000000B7-MatterNodeDevice-1-IdentifyButton-3-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[eve_energy_plug_patched][button.eve_energy_plug_patched_identify-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'identify', + 'friendly_name': 'Eve Energy Plug Patched Identify', + }), + 'context': , + 'entity_id': 'button.eve_energy_plug_patched_identify', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[eve_thermo][button.eve_thermo_identify-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.eve_thermo_identify', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Identify', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000021-MatterNodeDevice-1-IdentifyButton-3-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[eve_thermo][button.eve_thermo_identify-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'identify', + 'friendly_name': 'Eve Thermo Identify', + }), + 'context': , + 'entity_id': 'button.eve_thermo_identify', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[eve_weather_sensor][button.eve_weather_identify_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.eve_weather_identify_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Identify (1)', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-000000000000001D-MatterNodeDevice-1-IdentifyButton-3-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[eve_weather_sensor][button.eve_weather_identify_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'identify', + 'friendly_name': 'Eve Weather Identify (1)', + }), + 'context': , + 'entity_id': 'button.eve_weather_identify_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[eve_weather_sensor][button.eve_weather_identify_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.eve_weather_identify_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Identify (2)', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-000000000000001D-MatterNodeDevice-2-IdentifyButton-3-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[eve_weather_sensor][button.eve_weather_identify_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'identify', + 'friendly_name': 'Eve Weather Identify (2)', + }), + 'context': , + 'entity_id': 'button.eve_weather_identify_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[extended_color_light][button.mock_extended_color_light_identify-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.mock_extended_color_light_identify', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Identify', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-IdentifyButton-3-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[extended_color_light][button.mock_extended_color_light_identify-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'identify', + 'friendly_name': 'Mock Extended Color Light Identify', + }), + 'context': , + 'entity_id': 'button.mock_extended_color_light_identify', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[fan][button.mocked_fan_switch_identify-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.mocked_fan_switch_identify', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Identify', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-000000000000001D-MatterNodeDevice-1-IdentifyButton-3-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[fan][button.mocked_fan_switch_identify-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'identify', + 'friendly_name': 'Mocked Fan Switch Identify', + }), + 'context': , + 'entity_id': 'button.mocked_fan_switch_identify', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[flow_sensor][button.mock_flow_sensor_identify-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.mock_flow_sensor_identify', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Identify', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-IdentifyButton-3-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[flow_sensor][button.mock_flow_sensor_identify-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'identify', + 'friendly_name': 'Mock Flow Sensor Identify', + }), + 'context': , + 'entity_id': 'button.mock_flow_sensor_identify', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[generic_switch][button.mock_generic_switch_identify-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.mock_generic_switch_identify', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Identify', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-IdentifyButton-3-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[generic_switch][button.mock_generic_switch_identify-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'identify', + 'friendly_name': 'Mock Generic Switch Identify', + }), + 'context': , + 'entity_id': 'button.mock_generic_switch_identify', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[generic_switch_multi][button.mock_generic_switch_fancy_button-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.mock_generic_switch_fancy_button', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Fancy Button', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-2-IdentifyButton-3-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[generic_switch_multi][button.mock_generic_switch_fancy_button-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'identify', + 'friendly_name': 'Mock Generic Switch Fancy Button', + }), + 'context': , + 'entity_id': 'button.mock_generic_switch_fancy_button', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[generic_switch_multi][button.mock_generic_switch_identify_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.mock_generic_switch_identify_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Identify (1)', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-IdentifyButton-3-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[generic_switch_multi][button.mock_generic_switch_identify_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'identify', + 'friendly_name': 'Mock Generic Switch Identify (1)', + }), + 'context': , + 'entity_id': 'button.mock_generic_switch_identify_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[humidity_sensor][button.mock_humidity_sensor_identify-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.mock_humidity_sensor_identify', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Identify', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-IdentifyButton-3-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[humidity_sensor][button.mock_humidity_sensor_identify-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'identify', + 'friendly_name': 'Mock Humidity Sensor Identify', + }), + 'context': , + 'entity_id': 'button.mock_humidity_sensor_identify', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[light_sensor][button.mock_light_sensor_identify-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.mock_light_sensor_identify', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Identify', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-IdentifyButton-3-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[light_sensor][button.mock_light_sensor_identify-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'identify', + 'friendly_name': 'Mock Light Sensor Identify', + }), + 'context': , + 'entity_id': 'button.mock_light_sensor_identify', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[microwave_oven][button.microwave_oven_identify-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.microwave_oven_identify', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Identify', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-000000000000009D-MatterNodeDevice-1-IdentifyButton-3-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[microwave_oven][button.microwave_oven_identify-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'identify', + 'friendly_name': 'Microwave Oven Identify', + }), + 'context': , + 'entity_id': 'button.microwave_oven_identify', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[microwave_oven][button.microwave_oven_pause-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.microwave_oven_pause', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Pause', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'pause', + 'unique_id': '00000000000004D2-000000000000009D-MatterNodeDevice-1-OperationalStatePauseButton-96-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[microwave_oven][button.microwave_oven_pause-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Microwave Oven Pause', + }), + 'context': , + 'entity_id': 'button.microwave_oven_pause', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[microwave_oven][button.microwave_oven_resume-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.microwave_oven_resume', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Resume', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'resume', + 'unique_id': '00000000000004D2-000000000000009D-MatterNodeDevice-1-OperationalStateResumeButton-96-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[microwave_oven][button.microwave_oven_resume-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Microwave Oven Resume', + }), + 'context': , + 'entity_id': 'button.microwave_oven_resume', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[microwave_oven][button.microwave_oven_start-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.microwave_oven_start', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Start', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'start', + 'unique_id': '00000000000004D2-000000000000009D-MatterNodeDevice-1-OperationalStateStartButton-96-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[microwave_oven][button.microwave_oven_start-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Microwave Oven Start', + }), + 'context': , + 'entity_id': 'button.microwave_oven_start', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[microwave_oven][button.microwave_oven_stop-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.microwave_oven_stop', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Stop', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'stop', + 'unique_id': '00000000000004D2-000000000000009D-MatterNodeDevice-1-OperationalStateStopButton-96-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[microwave_oven][button.microwave_oven_stop-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Microwave Oven Stop', + }), + 'context': , + 'entity_id': 'button.microwave_oven_stop', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[multi_endpoint_light][button.inovelli_config-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.inovelli_config', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Config', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-00000000000000C5-MatterNodeDevice-5-IdentifyButton-3-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[multi_endpoint_light][button.inovelli_config-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'identify', + 'friendly_name': 'Inovelli Config', + }), + 'context': , + 'entity_id': 'button.inovelli_config', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[multi_endpoint_light][button.inovelli_down-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.inovelli_down', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Down', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-00000000000000C5-MatterNodeDevice-4-IdentifyButton-3-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[multi_endpoint_light][button.inovelli_down-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'identify', + 'friendly_name': 'Inovelli Down', + }), + 'context': , + 'entity_id': 'button.inovelli_down', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[multi_endpoint_light][button.inovelli_identify_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.inovelli_identify_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Identify (1)', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-00000000000000C5-MatterNodeDevice-1-IdentifyButton-3-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[multi_endpoint_light][button.inovelli_identify_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'identify', + 'friendly_name': 'Inovelli Identify (1)', + }), + 'context': , + 'entity_id': 'button.inovelli_identify_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[multi_endpoint_light][button.inovelli_identify_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.inovelli_identify_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Identify (2)', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-00000000000000C5-MatterNodeDevice-2-IdentifyButton-3-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[multi_endpoint_light][button.inovelli_identify_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'identify', + 'friendly_name': 'Inovelli Identify (2)', + }), + 'context': , + 'entity_id': 'button.inovelli_identify_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[multi_endpoint_light][button.inovelli_identify_6-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.inovelli_identify_6', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Identify (6)', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-00000000000000C5-MatterNodeDevice-6-IdentifyButton-3-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[multi_endpoint_light][button.inovelli_identify_6-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'identify', + 'friendly_name': 'Inovelli Identify (6)', + }), + 'context': , + 'entity_id': 'button.inovelli_identify_6', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[multi_endpoint_light][button.inovelli_up-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.inovelli_up', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Up', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-00000000000000C5-MatterNodeDevice-3-IdentifyButton-3-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[multi_endpoint_light][button.inovelli_up-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'identify', + 'friendly_name': 'Inovelli Up', + }), + 'context': , + 'entity_id': 'button.inovelli_up', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[occupancy_sensor][button.mock_occupancy_sensor_identify-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.mock_occupancy_sensor_identify', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Identify', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-IdentifyButton-3-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[occupancy_sensor][button.mock_occupancy_sensor_identify-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'identify', + 'friendly_name': 'Mock Occupancy Sensor Identify', + }), + 'context': , + 'entity_id': 'button.mock_occupancy_sensor_identify', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[on_off_plugin_unit][button.mock_onoffpluginunit_identify-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.mock_onoffpluginunit_identify', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Identify', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-IdentifyButton-3-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[on_off_plugin_unit][button.mock_onoffpluginunit_identify-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'identify', + 'friendly_name': 'Mock OnOffPluginUnit Identify', + }), + 'context': , + 'entity_id': 'button.mock_onoffpluginunit_identify', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[onoff_light][button.mock_onoff_light_identify-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.mock_onoff_light_identify', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Identify', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-IdentifyButton-3-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[onoff_light][button.mock_onoff_light_identify-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'identify', + 'friendly_name': 'Mock OnOff Light Identify', + }), + 'context': , + 'entity_id': 'button.mock_onoff_light_identify', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[onoff_light_alt_name][button.mock_onoff_light_identify-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.mock_onoff_light_identify', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Identify', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-IdentifyButton-3-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[onoff_light_alt_name][button.mock_onoff_light_identify-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'identify', + 'friendly_name': 'Mock OnOff Light Identify', + }), + 'context': , + 'entity_id': 'button.mock_onoff_light_identify', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[onoff_light_no_name][button.mock_light_identify-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.mock_light_identify', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Identify', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-IdentifyButton-3-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[onoff_light_no_name][button.mock_light_identify-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'identify', + 'friendly_name': 'Mock Light Identify', + }), + 'context': , + 'entity_id': 'button.mock_light_identify', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[onoff_light_with_levelcontrol_present][button.d215s_identify-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.d215s_identify', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Identify', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000008-MatterNodeDevice-1-IdentifyButton-3-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[onoff_light_with_levelcontrol_present][button.d215s_identify-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'identify', + 'friendly_name': 'D215S Identify', + }), + 'context': , + 'entity_id': 'button.d215s_identify', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[pressure_sensor][button.mock_pressure_sensor_identify-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.mock_pressure_sensor_identify', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Identify', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-IdentifyButton-3-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[pressure_sensor][button.mock_pressure_sensor_identify-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'identify', + 'friendly_name': 'Mock Pressure Sensor Identify', + }), + 'context': , + 'entity_id': 'button.mock_pressure_sensor_identify', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[room_airconditioner][button.room_airconditioner_identify_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.room_airconditioner_identify_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Identify (1)', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000024-MatterNodeDevice-1-IdentifyButton-3-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[room_airconditioner][button.room_airconditioner_identify_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'identify', + 'friendly_name': 'Room AirConditioner Identify (1)', + }), + 'context': , + 'entity_id': 'button.room_airconditioner_identify_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[room_airconditioner][button.room_airconditioner_identify_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.room_airconditioner_identify_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Identify (2)', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000024-MatterNodeDevice-2-IdentifyButton-3-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[room_airconditioner][button.room_airconditioner_identify_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'identify', + 'friendly_name': 'Room AirConditioner Identify (2)', + }), + 'context': , + 'entity_id': 'button.room_airconditioner_identify_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[silabs_dishwasher][button.dishwasher_identify-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.dishwasher_identify', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Identify', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000036-MatterNodeDevice-1-IdentifyButton-3-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[silabs_dishwasher][button.dishwasher_identify-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'identify', + 'friendly_name': 'Dishwasher Identify', + }), + 'context': , + 'entity_id': 'button.dishwasher_identify', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[silabs_dishwasher][button.dishwasher_pause-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.dishwasher_pause', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Pause', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'pause', + 'unique_id': '00000000000004D2-0000000000000036-MatterNodeDevice-1-OperationalStatePauseButton-96-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[silabs_dishwasher][button.dishwasher_pause-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Dishwasher Pause', + }), + 'context': , + 'entity_id': 'button.dishwasher_pause', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[silabs_dishwasher][button.dishwasher_start-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.dishwasher_start', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Start', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'start', + 'unique_id': '00000000000004D2-0000000000000036-MatterNodeDevice-1-OperationalStateStartButton-96-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[silabs_dishwasher][button.dishwasher_start-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Dishwasher Start', + }), + 'context': , + 'entity_id': 'button.dishwasher_start', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[silabs_dishwasher][button.dishwasher_stop-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.dishwasher_stop', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Stop', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'stop', + 'unique_id': '00000000000004D2-0000000000000036-MatterNodeDevice-1-OperationalStateStopButton-96-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[silabs_dishwasher][button.dishwasher_stop-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Dishwasher Stop', + }), + 'context': , + 'entity_id': 'button.dishwasher_stop', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[smoke_detector][button.smoke_sensor_identify-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.smoke_sensor_identify', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Identify', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-IdentifyButton-3-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[smoke_detector][button.smoke_sensor_identify-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'identify', + 'friendly_name': 'Smoke sensor Identify', + }), + 'context': , + 'entity_id': 'button.smoke_sensor_identify', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[switch_unit][button.mock_switchunit_identify-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.mock_switchunit_identify', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Identify', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-IdentifyButton-3-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[switch_unit][button.mock_switchunit_identify-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'identify', + 'friendly_name': 'Mock SwitchUnit Identify', + }), + 'context': , + 'entity_id': 'button.mock_switchunit_identify', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[temperature_sensor][button.mock_temperature_sensor_identify-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.mock_temperature_sensor_identify', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Identify', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-IdentifyButton-3-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[temperature_sensor][button.mock_temperature_sensor_identify-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'identify', + 'friendly_name': 'Mock Temperature Sensor Identify', + }), + 'context': , + 'entity_id': 'button.mock_temperature_sensor_identify', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[thermostat][button.longan_link_hvac_identify-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.longan_link_hvac_identify', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Identify', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000004-MatterNodeDevice-1-IdentifyButton-3-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[thermostat][button.longan_link_hvac_identify-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'identify', + 'friendly_name': 'Longan link HVAC Identify', + }), + 'context': , + 'entity_id': 'button.longan_link_hvac_identify', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[valve][button.valve_identify-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.valve_identify', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Identify', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-000000000000004B-MatterNodeDevice-1-IdentifyButton-3-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[valve][button.valve_identify-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'identify', + 'friendly_name': 'Valve Identify', + }), + 'context': , + 'entity_id': 'button.valve_identify', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[window_covering_full][button.mock_full_window_covering_identify-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.mock_full_window_covering_identify', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Identify', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000032-MatterNodeDevice-1-IdentifyButton-3-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[window_covering_full][button.mock_full_window_covering_identify-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'identify', + 'friendly_name': 'Mock Full Window Covering Identify', + }), + 'context': , + 'entity_id': 'button.mock_full_window_covering_identify', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[window_covering_lift][button.mock_lift_window_covering_identify-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.mock_lift_window_covering_identify', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Identify', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000032-MatterNodeDevice-1-IdentifyButton-3-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[window_covering_lift][button.mock_lift_window_covering_identify-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'identify', + 'friendly_name': 'Mock Lift Window Covering Identify', + }), + 'context': , + 'entity_id': 'button.mock_lift_window_covering_identify', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[window_covering_pa_lift][button.longan_link_wncv_da01_identify-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.longan_link_wncv_da01_identify', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Identify', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-IdentifyButton-3-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[window_covering_pa_lift][button.longan_link_wncv_da01_identify-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'identify', + 'friendly_name': 'Longan link WNCV DA01 Identify', + }), + 'context': , + 'entity_id': 'button.longan_link_wncv_da01_identify', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[window_covering_pa_tilt][button.mock_pa_tilt_window_covering_identify-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.mock_pa_tilt_window_covering_identify', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Identify', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000032-MatterNodeDevice-1-IdentifyButton-3-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[window_covering_pa_tilt][button.mock_pa_tilt_window_covering_identify-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'identify', + 'friendly_name': 'Mock PA Tilt Window Covering Identify', + }), + 'context': , + 'entity_id': 'button.mock_pa_tilt_window_covering_identify', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[window_covering_tilt][button.mock_tilt_window_covering_identify-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.mock_tilt_window_covering_identify', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Identify', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000032-MatterNodeDevice-1-IdentifyButton-3-65529', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[window_covering_tilt][button.mock_tilt_window_covering_identify-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'identify', + 'friendly_name': 'Mock Tilt Window Covering Identify', + }), + 'context': , + 'entity_id': 'button.mock_tilt_window_covering_identify', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- diff --git a/tests/components/matter/snapshots/test_climate.ambr b/tests/components/matter/snapshots/test_climate.ambr new file mode 100644 index 00000000000000..25f5ca06f621f4 --- /dev/null +++ b/tests/components/matter/snapshots/test_climate.ambr @@ -0,0 +1,263 @@ +# serializer version: 1 +# name: test_climates[air_purifier][climate.air_purifier-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 30.0, + 'min_temp': 5.0, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'climate', + 'entity_category': None, + 'entity_id': 'climate.air_purifier', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00000000000004D2-000000000000008F-MatterNodeDevice-5-MatterThermostat-513-0', + 'unit_of_measurement': None, + }) +# --- +# name: test_climates[air_purifier][climate.air_purifier-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 20.0, + 'friendly_name': 'Air Purifier', + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 30.0, + 'min_temp': 5.0, + 'supported_features': , + 'temperature': 20.0, + }), + 'context': , + 'entity_id': 'climate.air_purifier', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_climates[eve_thermo][climate.eve_thermo-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 30.0, + 'min_temp': 10.0, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'climate', + 'entity_category': None, + 'entity_id': 'climate.eve_thermo', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000021-MatterNodeDevice-1-MatterThermostat-513-0', + 'unit_of_measurement': None, + }) +# --- +# name: test_climates[eve_thermo][climate.eve_thermo-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 21.0, + 'friendly_name': 'Eve Thermo', + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 30.0, + 'min_temp': 10.0, + 'supported_features': , + 'temperature': 17.0, + }), + 'context': , + 'entity_id': 'climate.eve_thermo', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heat', + }) +# --- +# name: test_climates[room_airconditioner][climate.room_airconditioner-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'hvac_modes': list([ + , + , + , + , + , + , + ]), + 'max_temp': 32.0, + 'min_temp': 16.0, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'climate', + 'entity_category': None, + 'entity_id': 'climate.room_airconditioner', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000024-MatterNodeDevice-1-MatterThermostat-513-0', + 'unit_of_measurement': None, + }) +# --- +# name: test_climates[room_airconditioner][climate.room_airconditioner-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 20.0, + 'friendly_name': 'Room AirConditioner', + 'hvac_modes': list([ + , + , + , + , + , + , + ]), + 'max_temp': 32.0, + 'min_temp': 16.0, + 'supported_features': , + 'temperature': 20.0, + }), + 'context': , + 'entity_id': 'climate.room_airconditioner', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_climates[thermostat][climate.longan_link_hvac-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'hvac_modes': list([ + , + , + , + , + ]), + 'max_temp': 35, + 'min_temp': 7, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'climate', + 'entity_category': None, + 'entity_id': 'climate.longan_link_hvac', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000004-MatterNodeDevice-1-MatterThermostat-513-0', + 'unit_of_measurement': None, + }) +# --- +# name: test_climates[thermostat][climate.longan_link_hvac-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 28.3, + 'friendly_name': 'Longan link HVAC', + 'hvac_modes': list([ + , + , + , + , + ]), + 'max_temp': 35, + 'min_temp': 7, + 'supported_features': , + 'target_temp_high': None, + 'target_temp_low': None, + 'temperature': None, + }), + 'context': , + 'entity_id': 'climate.longan_link_hvac', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'cool', + }) +# --- diff --git a/tests/components/matter/snapshots/test_cover.ambr b/tests/components/matter/snapshots/test_cover.ambr new file mode 100644 index 00000000000000..7d036d359837b6 --- /dev/null +++ b/tests/components/matter/snapshots/test_cover.ambr @@ -0,0 +1,245 @@ +# serializer version: 1 +# name: test_covers[window_covering_full][cover.mock_full_window_covering-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'cover', + 'entity_category': None, + 'entity_id': 'cover.mock_full_window_covering', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': None, + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000032-MatterNodeDevice-1-MatterCoverPositionAwareLiftAndTilt-258-10', + 'unit_of_measurement': None, + }) +# --- +# name: test_covers[window_covering_full][cover.mock_full_window_covering-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_position': 100, + 'current_tilt_position': 100, + 'device_class': 'awning', + 'friendly_name': 'Mock Full Window Covering', + 'supported_features': , + }), + 'context': , + 'entity_id': 'cover.mock_full_window_covering', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'open', + }) +# --- +# name: test_covers[window_covering_lift][cover.mock_lift_window_covering-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'cover', + 'entity_category': None, + 'entity_id': 'cover.mock_lift_window_covering', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': None, + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000032-MatterNodeDevice-1-MatterCover-258-10', + 'unit_of_measurement': None, + }) +# --- +# name: test_covers[window_covering_lift][cover.mock_lift_window_covering-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'awning', + 'friendly_name': 'Mock Lift Window Covering', + 'supported_features': , + }), + 'context': , + 'entity_id': 'cover.mock_lift_window_covering', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_covers[window_covering_pa_lift][cover.longan_link_wncv_da01-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'cover', + 'entity_category': None, + 'entity_id': 'cover.longan_link_wncv_da01', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': None, + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-MatterCoverPositionAwareLift-258-10', + 'unit_of_measurement': None, + }) +# --- +# name: test_covers[window_covering_pa_lift][cover.longan_link_wncv_da01-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_position': 51, + 'device_class': 'awning', + 'friendly_name': 'Longan link WNCV DA01', + 'supported_features': , + }), + 'context': , + 'entity_id': 'cover.longan_link_wncv_da01', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'open', + }) +# --- +# name: test_covers[window_covering_pa_tilt][cover.mock_pa_tilt_window_covering-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'cover', + 'entity_category': None, + 'entity_id': 'cover.mock_pa_tilt_window_covering', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': None, + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000032-MatterNodeDevice-1-MatterCoverPositionAwareTilt-258-10', + 'unit_of_measurement': None, + }) +# --- +# name: test_covers[window_covering_pa_tilt][cover.mock_pa_tilt_window_covering-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_tilt_position': 100, + 'device_class': 'awning', + 'friendly_name': 'Mock PA Tilt Window Covering', + 'supported_features': , + }), + 'context': , + 'entity_id': 'cover.mock_pa_tilt_window_covering', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_covers[window_covering_tilt][cover.mock_tilt_window_covering-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'cover', + 'entity_category': None, + 'entity_id': 'cover.mock_tilt_window_covering', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': None, + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000032-MatterNodeDevice-1-MatterCover-258-10', + 'unit_of_measurement': None, + }) +# --- +# name: test_covers[window_covering_tilt][cover.mock_tilt_window_covering-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'awning', + 'friendly_name': 'Mock Tilt Window Covering', + 'supported_features': , + }), + 'context': , + 'entity_id': 'cover.mock_tilt_window_covering', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- diff --git a/tests/components/matter/snapshots/test_event.ambr b/tests/components/matter/snapshots/test_event.ambr new file mode 100644 index 00000000000000..031e8e9d24f1e0 --- /dev/null +++ b/tests/components/matter/snapshots/test_event.ambr @@ -0,0 +1,385 @@ +# serializer version: 1 +# name: test_events[generic_switch][event.mock_generic_switch_button-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'event_types': list([ + 'initial_press', + 'short_release', + 'long_press', + 'long_release', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'event', + 'entity_category': None, + 'entity_id': 'event.mock_generic_switch_button', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Button', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'button', + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-GenericSwitch-59-1', + 'unit_of_measurement': None, + }) +# --- +# name: test_events[generic_switch][event.mock_generic_switch_button-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'button', + 'event_type': None, + 'event_types': list([ + 'initial_press', + 'short_release', + 'long_press', + 'long_release', + ]), + 'friendly_name': 'Mock Generic Switch Button', + }), + 'context': , + 'entity_id': 'event.mock_generic_switch_button', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_events[generic_switch_multi][event.mock_generic_switch_button_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'event_types': list([ + 'multi_press_1', + 'multi_press_2', + 'long_press', + 'long_release', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'event', + 'entity_category': None, + 'entity_id': 'event.mock_generic_switch_button_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Button (1)', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'button', + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-GenericSwitch-59-1', + 'unit_of_measurement': None, + }) +# --- +# name: test_events[generic_switch_multi][event.mock_generic_switch_button_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'button', + 'event_type': None, + 'event_types': list([ + 'multi_press_1', + 'multi_press_2', + 'long_press', + 'long_release', + ]), + 'friendly_name': 'Mock Generic Switch Button (1)', + }), + 'context': , + 'entity_id': 'event.mock_generic_switch_button_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_events[generic_switch_multi][event.mock_generic_switch_fancy_button-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'event_types': list([ + 'multi_press_1', + 'multi_press_2', + 'long_press', + 'long_release', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'event', + 'entity_category': None, + 'entity_id': 'event.mock_generic_switch_fancy_button', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Fancy Button', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'button', + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-2-GenericSwitch-59-1', + 'unit_of_measurement': None, + }) +# --- +# name: test_events[generic_switch_multi][event.mock_generic_switch_fancy_button-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'button', + 'event_type': None, + 'event_types': list([ + 'multi_press_1', + 'multi_press_2', + 'long_press', + 'long_release', + ]), + 'friendly_name': 'Mock Generic Switch Fancy Button', + }), + 'context': , + 'entity_id': 'event.mock_generic_switch_fancy_button', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_events[multi_endpoint_light][event.inovelli_config-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'event_types': list([ + 'multi_press_1', + 'multi_press_2', + 'multi_press_3', + 'multi_press_4', + 'multi_press_5', + 'long_press', + 'long_release', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'event', + 'entity_category': None, + 'entity_id': 'event.inovelli_config', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Config', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'button', + 'unique_id': '00000000000004D2-00000000000000C5-MatterNodeDevice-5-GenericSwitch-59-1', + 'unit_of_measurement': None, + }) +# --- +# name: test_events[multi_endpoint_light][event.inovelli_config-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'button', + 'event_type': None, + 'event_types': list([ + 'multi_press_1', + 'multi_press_2', + 'multi_press_3', + 'multi_press_4', + 'multi_press_5', + 'long_press', + 'long_release', + ]), + 'friendly_name': 'Inovelli Config', + }), + 'context': , + 'entity_id': 'event.inovelli_config', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_events[multi_endpoint_light][event.inovelli_down-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'event_types': list([ + 'multi_press_1', + 'multi_press_2', + 'multi_press_3', + 'multi_press_4', + 'multi_press_5', + 'long_press', + 'long_release', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'event', + 'entity_category': None, + 'entity_id': 'event.inovelli_down', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Down', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'button', + 'unique_id': '00000000000004D2-00000000000000C5-MatterNodeDevice-4-GenericSwitch-59-1', + 'unit_of_measurement': None, + }) +# --- +# name: test_events[multi_endpoint_light][event.inovelli_down-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'button', + 'event_type': None, + 'event_types': list([ + 'multi_press_1', + 'multi_press_2', + 'multi_press_3', + 'multi_press_4', + 'multi_press_5', + 'long_press', + 'long_release', + ]), + 'friendly_name': 'Inovelli Down', + }), + 'context': , + 'entity_id': 'event.inovelli_down', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_events[multi_endpoint_light][event.inovelli_up-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'event_types': list([ + 'multi_press_1', + 'multi_press_2', + 'multi_press_3', + 'multi_press_4', + 'multi_press_5', + 'long_press', + 'long_release', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'event', + 'entity_category': None, + 'entity_id': 'event.inovelli_up', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Up', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'button', + 'unique_id': '00000000000004D2-00000000000000C5-MatterNodeDevice-3-GenericSwitch-59-1', + 'unit_of_measurement': None, + }) +# --- +# name: test_events[multi_endpoint_light][event.inovelli_up-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'button', + 'event_type': None, + 'event_types': list([ + 'multi_press_1', + 'multi_press_2', + 'multi_press_3', + 'multi_press_4', + 'multi_press_5', + 'long_press', + 'long_release', + ]), + 'friendly_name': 'Inovelli Up', + }), + 'context': , + 'entity_id': 'event.inovelli_up', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- diff --git a/tests/components/matter/snapshots/test_fan.ambr b/tests/components/matter/snapshots/test_fan.ambr new file mode 100644 index 00000000000000..7f1fe7d42db4a3 --- /dev/null +++ b/tests/components/matter/snapshots/test_fan.ambr @@ -0,0 +1,263 @@ +# serializer version: 1 +# name: test_fans[air_purifier][fan.air_purifier-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'preset_modes': list([ + 'low', + 'medium', + 'high', + 'auto', + 'natural_wind', + 'sleep_wind', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'fan', + 'entity_category': None, + 'entity_id': 'fan.air_purifier', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00000000000004D2-000000000000008F-MatterNodeDevice-1-MatterFan-514-0', + 'unit_of_measurement': None, + }) +# --- +# name: test_fans[air_purifier][fan.air_purifier-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'direction': 'forward', + 'friendly_name': 'Air Purifier', + 'oscillating': False, + 'percentage': None, + 'percentage_step': 10.0, + 'preset_mode': 'auto', + 'preset_modes': list([ + 'low', + 'medium', + 'high', + 'auto', + 'natural_wind', + 'sleep_wind', + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'fan.air_purifier', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_fans[fan][fan.mocked_fan_switch-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'preset_modes': list([ + 'low', + 'medium', + 'high', + 'auto', + 'natural_wind', + 'sleep_wind', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'fan', + 'entity_category': None, + 'entity_id': 'fan.mocked_fan_switch', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00000000000004D2-000000000000001D-MatterNodeDevice-1-MatterFan-514-0', + 'unit_of_measurement': None, + }) +# --- +# name: test_fans[fan][fan.mocked_fan_switch-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mocked Fan Switch', + 'percentage': 0, + 'percentage_step': 33.333333333333336, + 'preset_mode': None, + 'preset_modes': list([ + 'low', + 'medium', + 'high', + 'auto', + 'natural_wind', + 'sleep_wind', + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'fan.mocked_fan_switch', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_fans[room_airconditioner][fan.room_airconditioner-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'preset_modes': list([ + 'low', + 'medium', + 'high', + 'auto', + 'sleep_wind', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'fan', + 'entity_category': None, + 'entity_id': 'fan.room_airconditioner', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000024-MatterNodeDevice-1-MatterFan-514-0', + 'unit_of_measurement': None, + }) +# --- +# name: test_fans[room_airconditioner][fan.room_airconditioner-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Room AirConditioner', + 'percentage': 0, + 'percentage_step': 33.333333333333336, + 'preset_mode': None, + 'preset_modes': list([ + 'low', + 'medium', + 'high', + 'auto', + 'sleep_wind', + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'fan.room_airconditioner', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_fans[thermostat][fan.longan_link_hvac-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'preset_modes': list([ + 'low', + 'medium', + 'high', + 'auto', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'fan', + 'entity_category': None, + 'entity_id': 'fan.longan_link_hvac', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000004-MatterNodeDevice-1-MatterFan-514-0', + 'unit_of_measurement': None, + }) +# --- +# name: test_fans[thermostat][fan.longan_link_hvac-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Longan link HVAC', + 'preset_mode': None, + 'preset_modes': list([ + 'low', + 'medium', + 'high', + 'auto', + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'fan.longan_link_hvac', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- diff --git a/tests/components/matter/snapshots/test_light.ambr b/tests/components/matter/snapshots/test_light.ambr new file mode 100644 index 00000000000000..68c1b7dca740db --- /dev/null +++ b/tests/components/matter/snapshots/test_light.ambr @@ -0,0 +1,660 @@ +# serializer version: 1 +# name: test_lights[color_temperature_light][light.mock_color_temperature_light-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max_color_temp_kelvin': 6535, + 'max_mireds': 500, + 'min_color_temp_kelvin': 2000, + 'min_mireds': 153, + 'supported_color_modes': list([ + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.mock_color_temperature_light', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-MatterLight-6-0', + 'unit_of_measurement': None, + }) +# --- +# name: test_lights[color_temperature_light][light.mock_color_temperature_light-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'brightness': 128, + 'color_mode': , + 'color_temp': 284, + 'color_temp_kelvin': 3521, + 'friendly_name': 'Mock Color Temperature Light', + 'hs_color': tuple( + 27.152, + 44.32, + ), + 'max_color_temp_kelvin': 6535, + 'max_mireds': 500, + 'min_color_temp_kelvin': 2000, + 'min_mireds': 153, + 'rgb_color': tuple( + 255, + 193, + 141, + ), + 'supported_color_modes': list([ + , + ]), + 'supported_features': , + 'xy_color': tuple( + 0.453, + 0.374, + ), + }), + 'context': , + 'entity_id': 'light.mock_color_temperature_light', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_lights[dimmable_light][light.mock_dimmable_light-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'supported_color_modes': list([ + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.mock_dimmable_light', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-MatterLight-6-0', + 'unit_of_measurement': None, + }) +# --- +# name: test_lights[dimmable_light][light.mock_dimmable_light-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'brightness': 51, + 'color_mode': , + 'friendly_name': 'Mock Dimmable Light', + 'supported_color_modes': list([ + , + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'light.mock_dimmable_light', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_lights[dimmable_plugin_unit][light.dimmable_plugin_unit-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'supported_color_modes': list([ + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.dimmable_plugin_unit', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000024-MatterNodeDevice-1-MatterLight-6-0', + 'unit_of_measurement': None, + }) +# --- +# name: test_lights[dimmable_plugin_unit][light.dimmable_plugin_unit-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'brightness': 255, + 'color_mode': , + 'friendly_name': 'Dimmable Plugin Unit', + 'supported_color_modes': list([ + , + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'light.dimmable_plugin_unit', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_lights[extended_color_light][light.mock_extended_color_light-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max_color_temp_kelvin': 6535, + 'max_mireds': 500, + 'min_color_temp_kelvin': 2000, + 'min_mireds': 153, + 'supported_color_modes': list([ + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.mock_extended_color_light', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-MatterLight-6-0', + 'unit_of_measurement': None, + }) +# --- +# name: test_lights[extended_color_light][light.mock_extended_color_light-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'brightness': 128, + 'color_mode': , + 'color_temp': None, + 'color_temp_kelvin': None, + 'friendly_name': 'Mock Extended Color Light', + 'hs_color': tuple( + 51.024, + 20.079, + ), + 'max_color_temp_kelvin': 6535, + 'max_mireds': 500, + 'min_color_temp_kelvin': 2000, + 'min_mireds': 153, + 'rgb_color': tuple( + 255, + 247, + 203, + ), + 'supported_color_modes': list([ + , + , + , + ]), + 'supported_features': , + 'xy_color': tuple( + 0.363, + 0.374, + ), + }), + 'context': , + 'entity_id': 'light.mock_extended_color_light', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_lights[multi_endpoint_light][light.inovelli_light_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'supported_color_modes': list([ + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.inovelli_light_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Light (1)', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': 'light', + 'unique_id': '00000000000004D2-00000000000000C5-MatterNodeDevice-1-MatterLight-6-0', + 'unit_of_measurement': None, + }) +# --- +# name: test_lights[multi_endpoint_light][light.inovelli_light_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'brightness': None, + 'color_mode': None, + 'friendly_name': 'Inovelli Light (1)', + 'supported_color_modes': list([ + , + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'light.inovelli_light_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_lights[multi_endpoint_light][light.inovelli_light_6-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max_color_temp_kelvin': 6535, + 'max_mireds': 500, + 'min_color_temp_kelvin': 2000, + 'min_mireds': 153, + 'supported_color_modes': list([ + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.inovelli_light_6', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Light (6)', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': 'light', + 'unique_id': '00000000000004D2-00000000000000C5-MatterNodeDevice-6-MatterLight-6-0', + 'unit_of_measurement': None, + }) +# --- +# name: test_lights[multi_endpoint_light][light.inovelli_light_6-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'brightness': None, + 'color_mode': None, + 'color_temp': None, + 'color_temp_kelvin': None, + 'friendly_name': 'Inovelli Light (6)', + 'hs_color': None, + 'max_color_temp_kelvin': 6535, + 'max_mireds': 500, + 'min_color_temp_kelvin': 2000, + 'min_mireds': 153, + 'rgb_color': None, + 'supported_color_modes': list([ + , + , + , + ]), + 'supported_features': , + 'xy_color': None, + }), + 'context': , + 'entity_id': 'light.inovelli_light_6', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_lights[onoff_light][light.mock_onoff_light-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'supported_color_modes': list([ + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.mock_onoff_light', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-MatterLight-6-0', + 'unit_of_measurement': None, + }) +# --- +# name: test_lights[onoff_light][light.mock_onoff_light-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'color_mode': , + 'friendly_name': 'Mock OnOff Light', + 'supported_color_modes': list([ + , + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'light.mock_onoff_light', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_lights[onoff_light_alt_name][light.mock_onoff_light-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max_color_temp_kelvin': 6535, + 'max_mireds': 500, + 'min_color_temp_kelvin': 2000, + 'min_mireds': 153, + 'supported_color_modes': list([ + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.mock_onoff_light', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-MatterLight-6-0', + 'unit_of_measurement': None, + }) +# --- +# name: test_lights[onoff_light_alt_name][light.mock_onoff_light-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'brightness': None, + 'color_mode': , + 'color_temp': None, + 'color_temp_kelvin': None, + 'friendly_name': 'Mock OnOff Light', + 'hs_color': None, + 'max_color_temp_kelvin': 6535, + 'max_mireds': 500, + 'min_color_temp_kelvin': 2000, + 'min_mireds': 153, + 'rgb_color': None, + 'supported_color_modes': list([ + , + , + , + ]), + 'supported_features': , + 'xy_color': None, + }), + 'context': , + 'entity_id': 'light.mock_onoff_light', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_lights[onoff_light_no_name][light.mock_light-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max_color_temp_kelvin': 6535, + 'max_mireds': 500, + 'min_color_temp_kelvin': 2000, + 'min_mireds': 153, + 'supported_color_modes': list([ + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.mock_light', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-MatterLight-6-0', + 'unit_of_measurement': None, + }) +# --- +# name: test_lights[onoff_light_no_name][light.mock_light-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'brightness': None, + 'color_mode': , + 'color_temp': None, + 'color_temp_kelvin': None, + 'friendly_name': 'Mock Light', + 'hs_color': None, + 'max_color_temp_kelvin': 6535, + 'max_mireds': 500, + 'min_color_temp_kelvin': 2000, + 'min_mireds': 153, + 'rgb_color': None, + 'supported_color_modes': list([ + , + , + , + ]), + 'supported_features': , + 'xy_color': None, + }), + 'context': , + 'entity_id': 'light.mock_light', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_lights[onoff_light_with_levelcontrol_present][light.d215s-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'supported_color_modes': list([ + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.d215s', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000008-MatterNodeDevice-1-MatterLight-6-0', + 'unit_of_measurement': None, + }) +# --- +# name: test_lights[onoff_light_with_levelcontrol_present][light.d215s-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'color_mode': None, + 'friendly_name': 'D215S', + 'supported_color_modes': list([ + , + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'light.d215s', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- diff --git a/tests/components/matter/snapshots/test_lock.ambr b/tests/components/matter/snapshots/test_lock.ambr new file mode 100644 index 00000000000000..bf34ac267d755e --- /dev/null +++ b/tests/components/matter/snapshots/test_lock.ambr @@ -0,0 +1,95 @@ +# serializer version: 1 +# name: test_locks[door_lock][lock.mock_door_lock-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'lock', + 'entity_category': None, + 'entity_id': 'lock.mock_door_lock', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-MatterLock-257-0', + 'unit_of_measurement': None, + }) +# --- +# name: test_locks[door_lock][lock.mock_door_lock-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock Door Lock', + 'supported_features': , + }), + 'context': , + 'entity_id': 'lock.mock_door_lock', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unlocked', + }) +# --- +# name: test_locks[door_lock_with_unbolt][lock.mock_door_lock-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'lock', + 'entity_category': None, + 'entity_id': 'lock.mock_door_lock', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-MatterLock-257-0', + 'unit_of_measurement': None, + }) +# --- +# name: test_locks[door_lock_with_unbolt][lock.mock_door_lock-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock Door Lock', + 'supported_features': , + }), + 'context': , + 'entity_id': 'lock.mock_door_lock', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'locked', + }) +# --- diff --git a/tests/components/matter/snapshots/test_number.ambr b/tests/components/matter/snapshots/test_number.ambr new file mode 100644 index 00000000000000..9d51bb92e5122c --- /dev/null +++ b/tests/components/matter/snapshots/test_number.ambr @@ -0,0 +1,1560 @@ +# serializer version: 1 +# name: test_numbers[color_temperature_light][number.mock_color_temperature_light_on_level-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 255, + 'min': 0, + 'mode': , + 'step': 1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.mock_color_temperature_light_on_level', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'On level', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'on_level', + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-on_level-8-17', + 'unit_of_measurement': None, + }) +# --- +# name: test_numbers[color_temperature_light][number.mock_color_temperature_light_on_level-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock Color Temperature Light On level', + 'max': 255, + 'min': 0, + 'mode': , + 'step': 1, + }), + 'context': , + 'entity_id': 'number.mock_color_temperature_light_on_level', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '255', + }) +# --- +# name: test_numbers[dimmable_light][number.mock_dimmable_light_off_transition_time-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 65534, + 'min': 0, + 'mode': , + 'step': 0.1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.mock_dimmable_light_off_transition_time', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Off transition time', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'off_transition_time', + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-off_transition_time-8-19', + 'unit_of_measurement': , + }) +# --- +# name: test_numbers[dimmable_light][number.mock_dimmable_light_off_transition_time-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock Dimmable Light Off transition time', + 'max': 65534, + 'min': 0, + 'mode': , + 'step': 0.1, + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'number.mock_dimmable_light_off_transition_time', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_numbers[dimmable_light][number.mock_dimmable_light_on_level-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 255, + 'min': 0, + 'mode': , + 'step': 1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.mock_dimmable_light_on_level', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'On level', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'on_level', + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-on_level-8-17', + 'unit_of_measurement': None, + }) +# --- +# name: test_numbers[dimmable_light][number.mock_dimmable_light_on_level-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock Dimmable Light On level', + 'max': 255, + 'min': 0, + 'mode': , + 'step': 1, + }), + 'context': , + 'entity_id': 'number.mock_dimmable_light_on_level', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '255', + }) +# --- +# name: test_numbers[dimmable_light][number.mock_dimmable_light_on_off_transition_time-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 65534, + 'min': 0, + 'mode': , + 'step': 0.1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.mock_dimmable_light_on_off_transition_time', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'On/Off transition time', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'on_off_transition_time', + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-on_off_transition_time-8-16', + 'unit_of_measurement': , + }) +# --- +# name: test_numbers[dimmable_light][number.mock_dimmable_light_on_off_transition_time-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock Dimmable Light On/Off transition time', + 'max': 65534, + 'min': 0, + 'mode': , + 'step': 0.1, + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'number.mock_dimmable_light_on_off_transition_time', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_numbers[dimmable_light][number.mock_dimmable_light_on_transition_time-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 65534, + 'min': 0, + 'mode': , + 'step': 0.1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.mock_dimmable_light_on_transition_time', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'On transition time', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'on_transition_time', + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-on_transition_time-8-18', + 'unit_of_measurement': , + }) +# --- +# name: test_numbers[dimmable_light][number.mock_dimmable_light_on_transition_time-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock Dimmable Light On transition time', + 'max': 65534, + 'min': 0, + 'mode': , + 'step': 0.1, + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'number.mock_dimmable_light_on_transition_time', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_numbers[dimmable_plugin_unit][number.dimmable_plugin_unit_on_level-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 255, + 'min': 0, + 'mode': , + 'step': 1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.dimmable_plugin_unit_on_level', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'On level', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'on_level', + 'unique_id': '00000000000004D2-0000000000000024-MatterNodeDevice-1-on_level-8-17', + 'unit_of_measurement': None, + }) +# --- +# name: test_numbers[dimmable_plugin_unit][number.dimmable_plugin_unit_on_level-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Dimmable Plugin Unit On level', + 'max': 255, + 'min': 0, + 'mode': , + 'step': 1, + }), + 'context': , + 'entity_id': 'number.dimmable_plugin_unit_on_level', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '255', + }) +# --- +# name: test_numbers[dimmable_plugin_unit][number.dimmable_plugin_unit_on_off_transition_time-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 65534, + 'min': 0, + 'mode': , + 'step': 0.1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.dimmable_plugin_unit_on_off_transition_time', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'On/Off transition time', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'on_off_transition_time', + 'unique_id': '00000000000004D2-0000000000000024-MatterNodeDevice-1-on_off_transition_time-8-16', + 'unit_of_measurement': , + }) +# --- +# name: test_numbers[dimmable_plugin_unit][number.dimmable_plugin_unit_on_off_transition_time-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Dimmable Plugin Unit On/Off transition time', + 'max': 65534, + 'min': 0, + 'mode': , + 'step': 0.1, + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'number.dimmable_plugin_unit_on_off_transition_time', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.0', + }) +# --- +# name: test_numbers[eve_weather_sensor][number.eve_weather_altitude_above_sea_level-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 9000, + 'min': 0, + 'mode': , + 'step': 1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.eve_weather_altitude_above_sea_level', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Altitude above Sea Level', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'altitude', + 'unique_id': '00000000000004D2-000000000000001D-MatterNodeDevice-1-EveWeatherAltitude-319486977-319422483', + 'unit_of_measurement': , + }) +# --- +# name: test_numbers[eve_weather_sensor][number.eve_weather_altitude_above_sea_level-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'distance', + 'friendly_name': 'Eve Weather Altitude above Sea Level', + 'max': 9000, + 'min': 0, + 'mode': , + 'step': 1, + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'number.eve_weather_altitude_above_sea_level', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '40.0', + }) +# --- +# name: test_numbers[extended_color_light][number.mock_extended_color_light_on_level-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 255, + 'min': 0, + 'mode': , + 'step': 1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.mock_extended_color_light_on_level', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'On level', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'on_level', + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-on_level-8-17', + 'unit_of_measurement': None, + }) +# --- +# name: test_numbers[extended_color_light][number.mock_extended_color_light_on_level-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock Extended Color Light On level', + 'max': 255, + 'min': 0, + 'mode': , + 'step': 1, + }), + 'context': , + 'entity_id': 'number.mock_extended_color_light_on_level', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '255', + }) +# --- +# name: test_numbers[multi_endpoint_light][number.inovelli_off_transition_time-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 65534, + 'min': 0, + 'mode': , + 'step': 0.1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.inovelli_off_transition_time', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Off transition time', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'off_transition_time', + 'unique_id': '00000000000004D2-00000000000000C5-MatterNodeDevice-1-off_transition_time-8-19', + 'unit_of_measurement': , + }) +# --- +# name: test_numbers[multi_endpoint_light][number.inovelli_off_transition_time-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Inovelli Off transition time', + 'max': 65534, + 'min': 0, + 'mode': , + 'step': 0.1, + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'number.inovelli_off_transition_time', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.5', + }) +# --- +# name: test_numbers[multi_endpoint_light][number.inovelli_on_level_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 255, + 'min': 0, + 'mode': , + 'step': 1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.inovelli_on_level_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'On level (1)', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'on_level', + 'unique_id': '00000000000004D2-00000000000000C5-MatterNodeDevice-1-on_level-8-17', + 'unit_of_measurement': None, + }) +# --- +# name: test_numbers[multi_endpoint_light][number.inovelli_on_level_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Inovelli On level (1)', + 'max': 255, + 'min': 0, + 'mode': , + 'step': 1, + }), + 'context': , + 'entity_id': 'number.inovelli_on_level_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '137', + }) +# --- +# name: test_numbers[multi_endpoint_light][number.inovelli_on_level_6-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 255, + 'min': 0, + 'mode': , + 'step': 1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.inovelli_on_level_6', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'On level (6)', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'on_level', + 'unique_id': '00000000000004D2-00000000000000C5-MatterNodeDevice-6-on_level-8-17', + 'unit_of_measurement': None, + }) +# --- +# name: test_numbers[multi_endpoint_light][number.inovelli_on_level_6-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Inovelli On level (6)', + 'max': 255, + 'min': 0, + 'mode': , + 'step': 1, + }), + 'context': , + 'entity_id': 'number.inovelli_on_level_6', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '254', + }) +# --- +# name: test_numbers[multi_endpoint_light][number.inovelli_on_off_transition_time-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 65534, + 'min': 0, + 'mode': , + 'step': 0.1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.inovelli_on_off_transition_time', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'On/Off transition time', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'on_off_transition_time', + 'unique_id': '00000000000004D2-00000000000000C5-MatterNodeDevice-1-on_off_transition_time-8-16', + 'unit_of_measurement': , + }) +# --- +# name: test_numbers[multi_endpoint_light][number.inovelli_on_off_transition_time-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Inovelli On/Off transition time', + 'max': 65534, + 'min': 0, + 'mode': , + 'step': 0.1, + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'number.inovelli_on_off_transition_time', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.5', + }) +# --- +# name: test_numbers[multi_endpoint_light][number.inovelli_on_transition_time-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 65534, + 'min': 0, + 'mode': , + 'step': 0.1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.inovelli_on_transition_time', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'On transition time', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'on_transition_time', + 'unique_id': '00000000000004D2-00000000000000C5-MatterNodeDevice-1-on_transition_time-8-18', + 'unit_of_measurement': , + }) +# --- +# name: test_numbers[multi_endpoint_light][number.inovelli_on_transition_time-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Inovelli On transition time', + 'max': 65534, + 'min': 0, + 'mode': , + 'step': 0.1, + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'number.inovelli_on_transition_time', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.5', + }) +# --- +# name: test_numbers[on_off_plugin_unit][number.mock_onoffpluginunit_off_transition_time-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 65534, + 'min': 0, + 'mode': , + 'step': 0.1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.mock_onoffpluginunit_off_transition_time', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Off transition time', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'off_transition_time', + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-off_transition_time-8-19', + 'unit_of_measurement': , + }) +# --- +# name: test_numbers[on_off_plugin_unit][number.mock_onoffpluginunit_off_transition_time-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock OnOffPluginUnit Off transition time', + 'max': 65534, + 'min': 0, + 'mode': , + 'step': 0.1, + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'number.mock_onoffpluginunit_off_transition_time', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_numbers[on_off_plugin_unit][number.mock_onoffpluginunit_on_level-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 255, + 'min': 0, + 'mode': , + 'step': 1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.mock_onoffpluginunit_on_level', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'On level', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'on_level', + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-on_level-8-17', + 'unit_of_measurement': None, + }) +# --- +# name: test_numbers[on_off_plugin_unit][number.mock_onoffpluginunit_on_level-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock OnOffPluginUnit On level', + 'max': 255, + 'min': 0, + 'mode': , + 'step': 1, + }), + 'context': , + 'entity_id': 'number.mock_onoffpluginunit_on_level', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '255', + }) +# --- +# name: test_numbers[on_off_plugin_unit][number.mock_onoffpluginunit_on_off_transition_time-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 65534, + 'min': 0, + 'mode': , + 'step': 0.1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.mock_onoffpluginunit_on_off_transition_time', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'On/Off transition time', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'on_off_transition_time', + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-on_off_transition_time-8-16', + 'unit_of_measurement': , + }) +# --- +# name: test_numbers[on_off_plugin_unit][number.mock_onoffpluginunit_on_off_transition_time-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock OnOffPluginUnit On/Off transition time', + 'max': 65534, + 'min': 0, + 'mode': , + 'step': 0.1, + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'number.mock_onoffpluginunit_on_off_transition_time', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_numbers[on_off_plugin_unit][number.mock_onoffpluginunit_on_transition_time-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 65534, + 'min': 0, + 'mode': , + 'step': 0.1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.mock_onoffpluginunit_on_transition_time', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'On transition time', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'on_transition_time', + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-on_transition_time-8-18', + 'unit_of_measurement': , + }) +# --- +# name: test_numbers[on_off_plugin_unit][number.mock_onoffpluginunit_on_transition_time-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock OnOffPluginUnit On transition time', + 'max': 65534, + 'min': 0, + 'mode': , + 'step': 0.1, + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'number.mock_onoffpluginunit_on_transition_time', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_numbers[onoff_light_alt_name][number.mock_onoff_light_off_transition_time-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 65534, + 'min': 0, + 'mode': , + 'step': 0.1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.mock_onoff_light_off_transition_time', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Off transition time', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'off_transition_time', + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-off_transition_time-8-19', + 'unit_of_measurement': , + }) +# --- +# name: test_numbers[onoff_light_alt_name][number.mock_onoff_light_off_transition_time-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock OnOff Light Off transition time', + 'max': 65534, + 'min': 0, + 'mode': , + 'step': 0.1, + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'number.mock_onoff_light_off_transition_time', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_numbers[onoff_light_alt_name][number.mock_onoff_light_on_level-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 255, + 'min': 0, + 'mode': , + 'step': 1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.mock_onoff_light_on_level', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'On level', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'on_level', + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-on_level-8-17', + 'unit_of_measurement': None, + }) +# --- +# name: test_numbers[onoff_light_alt_name][number.mock_onoff_light_on_level-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock OnOff Light On level', + 'max': 255, + 'min': 0, + 'mode': , + 'step': 1, + }), + 'context': , + 'entity_id': 'number.mock_onoff_light_on_level', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '255', + }) +# --- +# name: test_numbers[onoff_light_alt_name][number.mock_onoff_light_on_off_transition_time-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 65534, + 'min': 0, + 'mode': , + 'step': 0.1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.mock_onoff_light_on_off_transition_time', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'On/Off transition time', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'on_off_transition_time', + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-on_off_transition_time-8-16', + 'unit_of_measurement': , + }) +# --- +# name: test_numbers[onoff_light_alt_name][number.mock_onoff_light_on_off_transition_time-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock OnOff Light On/Off transition time', + 'max': 65534, + 'min': 0, + 'mode': , + 'step': 0.1, + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'number.mock_onoff_light_on_off_transition_time', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_numbers[onoff_light_alt_name][number.mock_onoff_light_on_transition_time-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 65534, + 'min': 0, + 'mode': , + 'step': 0.1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.mock_onoff_light_on_transition_time', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'On transition time', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'on_transition_time', + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-on_transition_time-8-18', + 'unit_of_measurement': , + }) +# --- +# name: test_numbers[onoff_light_alt_name][number.mock_onoff_light_on_transition_time-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock OnOff Light On transition time', + 'max': 65534, + 'min': 0, + 'mode': , + 'step': 0.1, + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'number.mock_onoff_light_on_transition_time', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_numbers[onoff_light_no_name][number.mock_light_off_transition_time-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 65534, + 'min': 0, + 'mode': , + 'step': 0.1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.mock_light_off_transition_time', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Off transition time', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'off_transition_time', + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-off_transition_time-8-19', + 'unit_of_measurement': , + }) +# --- +# name: test_numbers[onoff_light_no_name][number.mock_light_off_transition_time-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock Light Off transition time', + 'max': 65534, + 'min': 0, + 'mode': , + 'step': 0.1, + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'number.mock_light_off_transition_time', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_numbers[onoff_light_no_name][number.mock_light_on_level-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 255, + 'min': 0, + 'mode': , + 'step': 1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.mock_light_on_level', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'On level', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'on_level', + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-on_level-8-17', + 'unit_of_measurement': None, + }) +# --- +# name: test_numbers[onoff_light_no_name][number.mock_light_on_level-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock Light On level', + 'max': 255, + 'min': 0, + 'mode': , + 'step': 1, + }), + 'context': , + 'entity_id': 'number.mock_light_on_level', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '255', + }) +# --- +# name: test_numbers[onoff_light_no_name][number.mock_light_on_off_transition_time-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 65534, + 'min': 0, + 'mode': , + 'step': 0.1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.mock_light_on_off_transition_time', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'On/Off transition time', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'on_off_transition_time', + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-on_off_transition_time-8-16', + 'unit_of_measurement': , + }) +# --- +# name: test_numbers[onoff_light_no_name][number.mock_light_on_off_transition_time-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock Light On/Off transition time', + 'max': 65534, + 'min': 0, + 'mode': , + 'step': 0.1, + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'number.mock_light_on_off_transition_time', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_numbers[onoff_light_no_name][number.mock_light_on_transition_time-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 65534, + 'min': 0, + 'mode': , + 'step': 0.1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.mock_light_on_transition_time', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'On transition time', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'on_transition_time', + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-on_transition_time-8-18', + 'unit_of_measurement': , + }) +# --- +# name: test_numbers[onoff_light_no_name][number.mock_light_on_transition_time-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock Light On transition time', + 'max': 65534, + 'min': 0, + 'mode': , + 'step': 0.1, + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'number.mock_light_on_transition_time', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_numbers[onoff_light_with_levelcontrol_present][number.d215s_on_level-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 255, + 'min': 0, + 'mode': , + 'step': 1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.d215s_on_level', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'On level', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'on_level', + 'unique_id': '00000000000004D2-0000000000000008-MatterNodeDevice-1-on_level-8-17', + 'unit_of_measurement': None, + }) +# --- +# name: test_numbers[onoff_light_with_levelcontrol_present][number.d215s_on_level-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'D215S On level', + 'max': 255, + 'min': 0, + 'mode': , + 'step': 1, + }), + 'context': , + 'entity_id': 'number.d215s_on_level', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '255', + }) +# --- +# name: test_numbers[onoff_light_with_levelcontrol_present][number.d215s_on_off_transition_time-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 65534, + 'min': 0, + 'mode': , + 'step': 0.1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.d215s_on_off_transition_time', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'On/Off transition time', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'on_off_transition_time', + 'unique_id': '00000000000004D2-0000000000000008-MatterNodeDevice-1-on_off_transition_time-8-16', + 'unit_of_measurement': , + }) +# --- +# name: test_numbers[onoff_light_with_levelcontrol_present][number.d215s_on_off_transition_time-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'D215S On/Off transition time', + 'max': 65534, + 'min': 0, + 'mode': , + 'step': 0.1, + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'number.d215s_on_off_transition_time', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- diff --git a/tests/components/matter/snapshots/test_select.ambr b/tests/components/matter/snapshots/test_select.ambr new file mode 100644 index 00000000000000..663b0cdaf51828 --- /dev/null +++ b/tests/components/matter/snapshots/test_select.ambr @@ -0,0 +1,1636 @@ +# serializer version: 1 +# name: test_selects[color_temperature_light][select.mock_color_temperature_light_lighting-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'Dark', + 'Medium', + 'Light', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': , + 'entity_id': 'select.mock_color_temperature_light_lighting', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Lighting', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'mode', + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-MatterModeSelect-80-3', + 'unit_of_measurement': None, + }) +# --- +# name: test_selects[color_temperature_light][select.mock_color_temperature_light_lighting-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock Color Temperature Light Lighting', + 'options': list([ + 'Dark', + 'Medium', + 'Light', + ]), + }), + 'context': , + 'entity_id': 'select.mock_color_temperature_light_lighting', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'Dark', + }) +# --- +# name: test_selects[color_temperature_light][select.mock_color_temperature_light_power_on_behavior_on_startup-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'on', + 'off', + 'toggle', + 'previous', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': , + 'entity_id': 'select.mock_color_temperature_light_power_on_behavior_on_startup', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Power-on behavior on startup', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'startup_on_off', + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-MatterStartUpOnOff-6-16387', + 'unit_of_measurement': None, + }) +# --- +# name: test_selects[color_temperature_light][select.mock_color_temperature_light_power_on_behavior_on_startup-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock Color Temperature Light Power-on behavior on startup', + 'options': list([ + 'on', + 'off', + 'toggle', + 'previous', + ]), + }), + 'context': , + 'entity_id': 'select.mock_color_temperature_light_power_on_behavior_on_startup', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'previous', + }) +# --- +# name: test_selects[dimmable_light][select.mock_dimmable_light_led_color-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'Red', + 'Orange', + 'Lemon', + 'Lime', + 'Green', + 'Teal', + 'Cyan', + 'Aqua', + 'Blue', + 'Violet', + 'Magenta', + 'Pink', + 'White', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': , + 'entity_id': 'select.mock_dimmable_light_led_color', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'LED Color', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'mode', + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-6-MatterModeSelect-80-3', + 'unit_of_measurement': None, + }) +# --- +# name: test_selects[dimmable_light][select.mock_dimmable_light_led_color-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock Dimmable Light LED Color', + 'options': list([ + 'Red', + 'Orange', + 'Lemon', + 'Lime', + 'Green', + 'Teal', + 'Cyan', + 'Aqua', + 'Blue', + 'Violet', + 'Magenta', + 'Pink', + 'White', + ]), + }), + 'context': , + 'entity_id': 'select.mock_dimmable_light_led_color', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'Aqua', + }) +# --- +# name: test_selects[dimmable_light][select.mock_dimmable_light_power_on_behavior_on_startup-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'on', + 'off', + 'toggle', + 'previous', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': , + 'entity_id': 'select.mock_dimmable_light_power_on_behavior_on_startup', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Power-on behavior on startup', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'startup_on_off', + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-MatterStartUpOnOff-6-16387', + 'unit_of_measurement': None, + }) +# --- +# name: test_selects[dimmable_light][select.mock_dimmable_light_power_on_behavior_on_startup-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock Dimmable Light Power-on behavior on startup', + 'options': list([ + 'on', + 'off', + 'toggle', + 'previous', + ]), + }), + 'context': , + 'entity_id': 'select.mock_dimmable_light_power_on_behavior_on_startup', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'previous', + }) +# --- +# name: test_selects[dimmable_plugin_unit][select.dimmable_plugin_unit_power_on_behavior_on_startup-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'on', + 'off', + 'toggle', + 'previous', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': , + 'entity_id': 'select.dimmable_plugin_unit_power_on_behavior_on_startup', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Power-on behavior on startup', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'startup_on_off', + 'unique_id': '00000000000004D2-0000000000000024-MatterNodeDevice-1-MatterStartUpOnOff-6-16387', + 'unit_of_measurement': None, + }) +# --- +# name: test_selects[dimmable_plugin_unit][select.dimmable_plugin_unit_power_on_behavior_on_startup-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Dimmable Plugin Unit Power-on behavior on startup', + 'options': list([ + 'on', + 'off', + 'toggle', + 'previous', + ]), + }), + 'context': , + 'entity_id': 'select.dimmable_plugin_unit_power_on_behavior_on_startup', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'previous', + }) +# --- +# name: test_selects[door_lock][select.mock_door_lock_power_on_behavior_on_startup-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'on', + 'off', + 'toggle', + 'previous', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': , + 'entity_id': 'select.mock_door_lock_power_on_behavior_on_startup', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Power-on behavior on startup', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'startup_on_off', + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-MatterStartUpOnOff-6-16387', + 'unit_of_measurement': None, + }) +# --- +# name: test_selects[door_lock][select.mock_door_lock_power_on_behavior_on_startup-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock Door Lock Power-on behavior on startup', + 'options': list([ + 'on', + 'off', + 'toggle', + 'previous', + ]), + }), + 'context': , + 'entity_id': 'select.mock_door_lock_power_on_behavior_on_startup', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_selects[door_lock_with_unbolt][select.mock_door_lock_power_on_behavior_on_startup-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'on', + 'off', + 'toggle', + 'previous', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': , + 'entity_id': 'select.mock_door_lock_power_on_behavior_on_startup', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Power-on behavior on startup', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'startup_on_off', + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-MatterStartUpOnOff-6-16387', + 'unit_of_measurement': None, + }) +# --- +# name: test_selects[door_lock_with_unbolt][select.mock_door_lock_power_on_behavior_on_startup-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock Door Lock Power-on behavior on startup', + 'options': list([ + 'on', + 'off', + 'toggle', + 'previous', + ]), + }), + 'context': , + 'entity_id': 'select.mock_door_lock_power_on_behavior_on_startup', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_selects[eve_energy_plug][select.eve_energy_plug_power_on_behavior_on_startup-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'on', + 'off', + 'toggle', + 'previous', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': , + 'entity_id': 'select.eve_energy_plug_power_on_behavior_on_startup', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Power-on behavior on startup', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'startup_on_off', + 'unique_id': '00000000000004D2-0000000000000053-MatterNodeDevice-1-MatterStartUpOnOff-6-16387', + 'unit_of_measurement': None, + }) +# --- +# name: test_selects[eve_energy_plug][select.eve_energy_plug_power_on_behavior_on_startup-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Eve Energy Plug Power-on behavior on startup', + 'options': list([ + 'on', + 'off', + 'toggle', + 'previous', + ]), + }), + 'context': , + 'entity_id': 'select.eve_energy_plug_power_on_behavior_on_startup', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'previous', + }) +# --- +# name: test_selects[eve_energy_plug_patched][select.eve_energy_plug_patched_power_on_behavior_on_startup-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'on', + 'off', + 'toggle', + 'previous', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': , + 'entity_id': 'select.eve_energy_plug_patched_power_on_behavior_on_startup', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Power-on behavior on startup', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'startup_on_off', + 'unique_id': '00000000000004D2-00000000000000B7-MatterNodeDevice-1-MatterStartUpOnOff-6-16387', + 'unit_of_measurement': None, + }) +# --- +# name: test_selects[eve_energy_plug_patched][select.eve_energy_plug_patched_power_on_behavior_on_startup-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Eve Energy Plug Patched Power-on behavior on startup', + 'options': list([ + 'on', + 'off', + 'toggle', + 'previous', + ]), + }), + 'context': , + 'entity_id': 'select.eve_energy_plug_patched_power_on_behavior_on_startup', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'previous', + }) +# --- +# name: test_selects[extended_color_light][select.mock_extended_color_light_lighting-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'Dark', + 'Medium', + 'Light', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': , + 'entity_id': 'select.mock_extended_color_light_lighting', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Lighting', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'mode', + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-MatterModeSelect-80-3', + 'unit_of_measurement': None, + }) +# --- +# name: test_selects[extended_color_light][select.mock_extended_color_light_lighting-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock Extended Color Light Lighting', + 'options': list([ + 'Dark', + 'Medium', + 'Light', + ]), + }), + 'context': , + 'entity_id': 'select.mock_extended_color_light_lighting', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'Dark', + }) +# --- +# name: test_selects[extended_color_light][select.mock_extended_color_light_power_on_behavior_on_startup-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'on', + 'off', + 'toggle', + 'previous', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': , + 'entity_id': 'select.mock_extended_color_light_power_on_behavior_on_startup', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Power-on behavior on startup', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'startup_on_off', + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-MatterStartUpOnOff-6-16387', + 'unit_of_measurement': None, + }) +# --- +# name: test_selects[extended_color_light][select.mock_extended_color_light_power_on_behavior_on_startup-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock Extended Color Light Power-on behavior on startup', + 'options': list([ + 'on', + 'off', + 'toggle', + 'previous', + ]), + }), + 'context': , + 'entity_id': 'select.mock_extended_color_light_power_on_behavior_on_startup', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'previous', + }) +# --- +# name: test_selects[multi_endpoint_light][select.inovelli_dimming_edge-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'Leading', + 'Trailing', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': , + 'entity_id': 'select.inovelli_dimming_edge', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Dimming Edge', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'mode', + 'unique_id': '00000000000004D2-00000000000000C5-MatterNodeDevice-3-MatterModeSelect-80-3', + 'unit_of_measurement': None, + }) +# --- +# name: test_selects[multi_endpoint_light][select.inovelli_dimming_edge-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Inovelli Dimming Edge', + 'options': list([ + 'Leading', + 'Trailing', + ]), + }), + 'context': , + 'entity_id': 'select.inovelli_dimming_edge', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'Leading', + }) +# --- +# name: test_selects[multi_endpoint_light][select.inovelli_dimming_speed-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'Instant', + '500ms', + '800ms', + '1s', + '1.5s', + '2s', + '2.5s', + '3s', + '3.5s', + '4s', + '5s', + '6s', + '7s', + '8s', + '10s', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': , + 'entity_id': 'select.inovelli_dimming_speed', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Dimming Speed', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'mode', + 'unique_id': '00000000000004D2-00000000000000C5-MatterNodeDevice-4-MatterModeSelect-80-3', + 'unit_of_measurement': None, + }) +# --- +# name: test_selects[multi_endpoint_light][select.inovelli_dimming_speed-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Inovelli Dimming Speed', + 'options': list([ + 'Instant', + '500ms', + '800ms', + '1s', + '1.5s', + '2s', + '2.5s', + '3s', + '3.5s', + '4s', + '5s', + '6s', + '7s', + '8s', + '10s', + ]), + }), + 'context': , + 'entity_id': 'select.inovelli_dimming_speed', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2s', + }) +# --- +# name: test_selects[multi_endpoint_light][select.inovelli_led_color-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'Red', + 'Orange', + 'Lemon', + 'Lime', + 'Green', + 'Teal', + 'Cyan', + 'Aqua', + 'Blue', + 'Violet', + 'Magenta', + 'Pink', + 'White', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': , + 'entity_id': 'select.inovelli_led_color', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'LED Color', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'mode', + 'unique_id': '00000000000004D2-00000000000000C5-MatterNodeDevice-6-MatterModeSelect-80-3', + 'unit_of_measurement': None, + }) +# --- +# name: test_selects[multi_endpoint_light][select.inovelli_led_color-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Inovelli LED Color', + 'options': list([ + 'Red', + 'Orange', + 'Lemon', + 'Lime', + 'Green', + 'Teal', + 'Cyan', + 'Aqua', + 'Blue', + 'Violet', + 'Magenta', + 'Pink', + 'White', + ]), + }), + 'context': , + 'entity_id': 'select.inovelli_led_color', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'Lemon', + }) +# --- +# name: test_selects[multi_endpoint_light][select.inovelli_power_on_behavior_on_startup_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'on', + 'off', + 'toggle', + 'previous', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': , + 'entity_id': 'select.inovelli_power_on_behavior_on_startup_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Power-on behavior on startup (1)', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'startup_on_off', + 'unique_id': '00000000000004D2-00000000000000C5-MatterNodeDevice-1-MatterStartUpOnOff-6-16387', + 'unit_of_measurement': None, + }) +# --- +# name: test_selects[multi_endpoint_light][select.inovelli_power_on_behavior_on_startup_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Inovelli Power-on behavior on startup (1)', + 'options': list([ + 'on', + 'off', + 'toggle', + 'previous', + ]), + }), + 'context': , + 'entity_id': 'select.inovelli_power_on_behavior_on_startup_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'previous', + }) +# --- +# name: test_selects[multi_endpoint_light][select.inovelli_power_on_behavior_on_startup_6-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'on', + 'off', + 'toggle', + 'previous', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': , + 'entity_id': 'select.inovelli_power_on_behavior_on_startup_6', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Power-on behavior on startup (6)', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'startup_on_off', + 'unique_id': '00000000000004D2-00000000000000C5-MatterNodeDevice-6-MatterStartUpOnOff-6-16387', + 'unit_of_measurement': None, + }) +# --- +# name: test_selects[multi_endpoint_light][select.inovelli_power_on_behavior_on_startup_6-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Inovelli Power-on behavior on startup (6)', + 'options': list([ + 'on', + 'off', + 'toggle', + 'previous', + ]), + }), + 'context': , + 'entity_id': 'select.inovelli_power_on_behavior_on_startup_6', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_selects[multi_endpoint_light][select.inovelli_relay-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'Relay Click Enable', + 'Relay Click Disable', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': , + 'entity_id': 'select.inovelli_relay', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Relay', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'mode', + 'unique_id': '00000000000004D2-00000000000000C5-MatterNodeDevice-5-MatterModeSelect-80-3', + 'unit_of_measurement': None, + }) +# --- +# name: test_selects[multi_endpoint_light][select.inovelli_relay-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Inovelli Relay', + 'options': list([ + 'Relay Click Enable', + 'Relay Click Disable', + ]), + }), + 'context': , + 'entity_id': 'select.inovelli_relay', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'Relay Click Disable', + }) +# --- +# name: test_selects[multi_endpoint_light][select.inovelli_smart_bulb_mode-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'Smart Bulb Disable', + 'Smart Bulb Enable', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': , + 'entity_id': 'select.inovelli_smart_bulb_mode', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Smart Bulb Mode', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'mode', + 'unique_id': '00000000000004D2-00000000000000C5-MatterNodeDevice-2-MatterModeSelect-80-3', + 'unit_of_measurement': None, + }) +# --- +# name: test_selects[multi_endpoint_light][select.inovelli_smart_bulb_mode-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Inovelli Smart Bulb Mode', + 'options': list([ + 'Smart Bulb Disable', + 'Smart Bulb Enable', + ]), + }), + 'context': , + 'entity_id': 'select.inovelli_smart_bulb_mode', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'Smart Bulb Disable', + }) +# --- +# name: test_selects[multi_endpoint_light][select.inovelli_switch_mode-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'OnOff+Single', + 'OnOff+Dumb', + 'OnOff+AUX', + 'OnOff+Full Wave', + 'Dimmer+Single', + 'Dimmer+Dumb', + 'Dimmer+Aux', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': , + 'entity_id': 'select.inovelli_switch_mode', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Switch Mode', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'mode', + 'unique_id': '00000000000004D2-00000000000000C5-MatterNodeDevice-1-MatterModeSelect-80-3', + 'unit_of_measurement': None, + }) +# --- +# name: test_selects[multi_endpoint_light][select.inovelli_switch_mode-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Inovelli Switch Mode', + 'options': list([ + 'OnOff+Single', + 'OnOff+Dumb', + 'OnOff+AUX', + 'OnOff+Full Wave', + 'Dimmer+Single', + 'Dimmer+Dumb', + 'Dimmer+Aux', + ]), + }), + 'context': , + 'entity_id': 'select.inovelli_switch_mode', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'Dimmer+Single', + }) +# --- +# name: test_selects[on_off_plugin_unit][select.mock_onoffpluginunit_power_on_behavior_on_startup-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'on', + 'off', + 'toggle', + 'previous', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': , + 'entity_id': 'select.mock_onoffpluginunit_power_on_behavior_on_startup', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Power-on behavior on startup', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'startup_on_off', + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-MatterStartUpOnOff-6-16387', + 'unit_of_measurement': None, + }) +# --- +# name: test_selects[on_off_plugin_unit][select.mock_onoffpluginunit_power_on_behavior_on_startup-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock OnOffPluginUnit Power-on behavior on startup', + 'options': list([ + 'on', + 'off', + 'toggle', + 'previous', + ]), + }), + 'context': , + 'entity_id': 'select.mock_onoffpluginunit_power_on_behavior_on_startup', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'previous', + }) +# --- +# name: test_selects[onoff_light][select.mock_onoff_light_power_on_behavior_on_startup-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'on', + 'off', + 'toggle', + 'previous', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': , + 'entity_id': 'select.mock_onoff_light_power_on_behavior_on_startup', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Power-on behavior on startup', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'startup_on_off', + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-MatterStartUpOnOff-6-16387', + 'unit_of_measurement': None, + }) +# --- +# name: test_selects[onoff_light][select.mock_onoff_light_power_on_behavior_on_startup-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock OnOff Light Power-on behavior on startup', + 'options': list([ + 'on', + 'off', + 'toggle', + 'previous', + ]), + }), + 'context': , + 'entity_id': 'select.mock_onoff_light_power_on_behavior_on_startup', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'previous', + }) +# --- +# name: test_selects[onoff_light_alt_name][select.mock_onoff_light_power_on_behavior_on_startup-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'on', + 'off', + 'toggle', + 'previous', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': , + 'entity_id': 'select.mock_onoff_light_power_on_behavior_on_startup', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Power-on behavior on startup', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'startup_on_off', + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-MatterStartUpOnOff-6-16387', + 'unit_of_measurement': None, + }) +# --- +# name: test_selects[onoff_light_alt_name][select.mock_onoff_light_power_on_behavior_on_startup-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock OnOff Light Power-on behavior on startup', + 'options': list([ + 'on', + 'off', + 'toggle', + 'previous', + ]), + }), + 'context': , + 'entity_id': 'select.mock_onoff_light_power_on_behavior_on_startup', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'previous', + }) +# --- +# name: test_selects[onoff_light_no_name][select.mock_light_power_on_behavior_on_startup-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'on', + 'off', + 'toggle', + 'previous', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': , + 'entity_id': 'select.mock_light_power_on_behavior_on_startup', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Power-on behavior on startup', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'startup_on_off', + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-MatterStartUpOnOff-6-16387', + 'unit_of_measurement': None, + }) +# --- +# name: test_selects[onoff_light_no_name][select.mock_light_power_on_behavior_on_startup-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock Light Power-on behavior on startup', + 'options': list([ + 'on', + 'off', + 'toggle', + 'previous', + ]), + }), + 'context': , + 'entity_id': 'select.mock_light_power_on_behavior_on_startup', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'previous', + }) +# --- +# name: test_selects[onoff_light_with_levelcontrol_present][select.d215s_power_on_behavior_on_startup-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'on', + 'off', + 'toggle', + 'previous', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': , + 'entity_id': 'select.d215s_power_on_behavior_on_startup', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Power-on behavior on startup', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'startup_on_off', + 'unique_id': '00000000000004D2-0000000000000008-MatterNodeDevice-1-MatterStartUpOnOff-6-16387', + 'unit_of_measurement': None, + }) +# --- +# name: test_selects[onoff_light_with_levelcontrol_present][select.d215s_power_on_behavior_on_startup-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'D215S Power-on behavior on startup', + 'options': list([ + 'on', + 'off', + 'toggle', + 'previous', + ]), + }), + 'context': , + 'entity_id': 'select.d215s_power_on_behavior_on_startup', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'previous', + }) +# --- +# name: test_selects[silabs_dishwasher][select.dishwasher_mode-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': None, + 'entity_id': 'select.dishwasher_mode', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Mode', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'mode', + 'unique_id': '00000000000004D2-0000000000000036-MatterNodeDevice-1-MatterDishwasherMode-89-1', + 'unit_of_measurement': None, + }) +# --- +# name: test_selects[silabs_dishwasher][select.dishwasher_mode-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Dishwasher Mode', + 'options': list([ + ]), + }), + 'context': , + 'entity_id': 'select.dishwasher_mode', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_selects[switch_unit][select.mock_switchunit_power_on_behavior_on_startup-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'on', + 'off', + 'toggle', + 'previous', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': , + 'entity_id': 'select.mock_switchunit_power_on_behavior_on_startup', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Power-on behavior on startup', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'startup_on_off', + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-MatterStartUpOnOff-6-16387', + 'unit_of_measurement': None, + }) +# --- +# name: test_selects[switch_unit][select.mock_switchunit_power_on_behavior_on_startup-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock SwitchUnit Power-on behavior on startup', + 'options': list([ + 'on', + 'off', + 'toggle', + 'previous', + ]), + }), + 'context': , + 'entity_id': 'select.mock_switchunit_power_on_behavior_on_startup', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'previous', + }) +# --- +# name: test_selects[vacuum_cleaner][select.mock_vacuum_clean_mode-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'Quick', + 'Auto', + 'Deep Clean', + 'Quiet', + 'Max Vac', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': None, + 'entity_id': 'select.mock_vacuum_clean_mode', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Clean mode', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'clean_mode', + 'unique_id': '00000000000004D2-0000000000000042-MatterNodeDevice-1-MatterRvcCleanMode-85-1', + 'unit_of_measurement': None, + }) +# --- +# name: test_selects[vacuum_cleaner][select.mock_vacuum_clean_mode-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock Vacuum Clean mode', + 'options': list([ + 'Quick', + 'Auto', + 'Deep Clean', + 'Quiet', + 'Max Vac', + ]), + }), + 'context': , + 'entity_id': 'select.mock_vacuum_clean_mode', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'Quick', + }) +# --- diff --git a/tests/components/matter/snapshots/test_switch.ambr b/tests/components/matter/snapshots/test_switch.ambr new file mode 100644 index 00000000000000..9396dccd2454dd --- /dev/null +++ b/tests/components/matter/snapshots/test_switch.ambr @@ -0,0 +1,377 @@ +# serializer version: 1 +# name: test_switches[door_lock][switch.mock_door_lock-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.mock_door_lock', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': None, + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-MatterSwitch-6-0', + 'unit_of_measurement': None, + }) +# --- +# name: test_switches[door_lock][switch.mock_door_lock-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'Mock Door Lock', + }), + 'context': , + 'entity_id': 'switch.mock_door_lock', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_switches[door_lock_with_unbolt][switch.mock_door_lock-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.mock_door_lock', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': None, + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-MatterSwitch-6-0', + 'unit_of_measurement': None, + }) +# --- +# name: test_switches[door_lock_with_unbolt][switch.mock_door_lock-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'Mock Door Lock', + }), + 'context': , + 'entity_id': 'switch.mock_door_lock', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_switches[eve_energy_plug][switch.eve_energy_plug-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.eve_energy_plug', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': None, + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000053-MatterNodeDevice-1-MatterPlug-6-0', + 'unit_of_measurement': None, + }) +# --- +# name: test_switches[eve_energy_plug][switch.eve_energy_plug-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'Eve Energy Plug', + }), + 'context': , + 'entity_id': 'switch.eve_energy_plug', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_switches[eve_energy_plug_patched][switch.eve_energy_plug_patched-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.eve_energy_plug_patched', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': None, + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-00000000000000B7-MatterNodeDevice-1-MatterPlug-6-0', + 'unit_of_measurement': None, + }) +# --- +# name: test_switches[eve_energy_plug_patched][switch.eve_energy_plug_patched-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'Eve Energy Plug Patched', + }), + 'context': , + 'entity_id': 'switch.eve_energy_plug_patched', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_switches[on_off_plugin_unit][switch.mock_onoffpluginunit-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.mock_onoffpluginunit', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': None, + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-MatterPlug-6-0', + 'unit_of_measurement': None, + }) +# --- +# name: test_switches[on_off_plugin_unit][switch.mock_onoffpluginunit-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'Mock OnOffPluginUnit', + }), + 'context': , + 'entity_id': 'switch.mock_onoffpluginunit', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_switches[room_airconditioner][switch.room_airconditioner_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.room_airconditioner_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power', + 'unique_id': '00000000000004D2-0000000000000024-MatterNodeDevice-1-MatterPowerToggle-6-0', + 'unit_of_measurement': None, + }) +# --- +# name: test_switches[room_airconditioner][switch.room_airconditioner_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'switch', + 'friendly_name': 'Room AirConditioner Power', + }), + 'context': , + 'entity_id': 'switch.room_airconditioner_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_switches[switch_unit][switch.mock_switchunit-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.mock_switchunit', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': None, + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-MatterSwitch-6-0', + 'unit_of_measurement': None, + }) +# --- +# name: test_switches[switch_unit][switch.mock_switchunit-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'Mock SwitchUnit', + }), + 'context': , + 'entity_id': 'switch.mock_switchunit', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_switches[thermostat][switch.longan_link_hvac-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.longan_link_hvac', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': None, + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000004-MatterNodeDevice-1-MatterSwitch-6-0', + 'unit_of_measurement': None, + }) +# --- +# name: test_switches[thermostat][switch.longan_link_hvac-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'Longan link HVAC', + }), + 'context': , + 'entity_id': 'switch.longan_link_hvac', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- diff --git a/tests/components/matter/snapshots/test_vacuum.ambr b/tests/components/matter/snapshots/test_vacuum.ambr new file mode 100644 index 00000000000000..9e6b52ed572103 --- /dev/null +++ b/tests/components/matter/snapshots/test_vacuum.ambr @@ -0,0 +1,48 @@ +# serializer version: 1 +# name: test_vacuum[vacuum_cleaner][vacuum.mock_vacuum-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'vacuum', + 'entity_category': None, + 'entity_id': 'vacuum.mock_vacuum', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00000000000004D2-0000000000000042-MatterNodeDevice-1-MatterVacuumCleaner-84-1', + 'unit_of_measurement': None, + }) +# --- +# name: test_vacuum[vacuum_cleaner][vacuum.mock_vacuum-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock Vacuum', + 'supported_features': , + }), + 'context': , + 'entity_id': 'vacuum.mock_vacuum', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'idle', + }) +# --- diff --git a/tests/components/matter/snapshots/test_valve.ambr b/tests/components/matter/snapshots/test_valve.ambr new file mode 100644 index 00000000000000..9863463547675f --- /dev/null +++ b/tests/components/matter/snapshots/test_valve.ambr @@ -0,0 +1,49 @@ +# serializer version: 1 +# name: test_valves[valve][valve.valve-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'valve', + 'entity_category': None, + 'entity_id': 'valve.valve', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': None, + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00000000000004D2-000000000000004B-MatterNodeDevice-1-MatterValve-129-4', + 'unit_of_measurement': None, + }) +# --- +# name: test_valves[valve][valve.valve-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'water', + 'friendly_name': 'Valve', + 'supported_features': , + }), + 'context': , + 'entity_id': 'valve.valve', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'closed', + }) +# --- diff --git a/tests/components/matter/test_adapter.py b/tests/components/matter/test_adapter.py index 6b1816ec9f45dc..01dff3b7899804 100644 --- a/tests/components/matter/test_adapter.py +++ b/tests/components/matter/test_adapter.py @@ -135,13 +135,13 @@ async def test_node_added_subscription( node_added_callback = matter_client.subscribe_events.call_args.kwargs["callback"] node = create_node_from_fixture("onoff_light") - entity_state = hass.states.get("light.mock_onoff_light_light") + entity_state = hass.states.get("light.mock_onoff_light") assert not entity_state node_added_callback(EventType.NODE_ADDED, node) await hass.async_block_till_done() - entity_state = hass.states.get("light.mock_onoff_light_light") + entity_state = hass.states.get("light.mock_onoff_light") assert entity_state @@ -200,6 +200,6 @@ async def test_bad_node_not_crash_integration( await hass.async_block_till_done() assert matter_client.get_nodes.call_count == 1 - assert hass.states.get("light.mock_onoff_light_light") is not None + assert hass.states.get("light.mock_onoff_light") is not None assert len(hass.states.async_all("light")) == 1 assert "Error setting up node" in caplog.text diff --git a/tests/components/matter/test_button.py b/tests/components/matter/test_button.py index 1d5a6aecf57b76..cbf62dd80c7b71 100644 --- a/tests/components/matter/test_button.py +++ b/tests/components/matter/test_button.py @@ -5,8 +5,23 @@ from chip.clusters import Objects as clusters from matter_server.client.models.node import MatterNode import pytest +from syrupy import SnapshotAssertion +from homeassistant.const import Platform from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from .common import snapshot_matter_entities + + +@pytest.mark.usefixtures("matter_devices") +async def test_buttons( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test buttons.""" + snapshot_matter_entities(hass, entity_registry, snapshot, Platform.BUTTON) @pytest.mark.parametrize("node_fixture", ["eve_energy_plug"]) diff --git a/tests/components/matter/test_climate.py b/tests/components/matter/test_climate.py index 168202637ffd7f..037ec4e76263d4 100644 --- a/tests/components/matter/test_climate.py +++ b/tests/components/matter/test_climate.py @@ -6,11 +6,28 @@ from matter_server.client.models.node import MatterNode from matter_server.common.helpers.util import create_attribute_path_from_attribute import pytest +from syrupy import SnapshotAssertion from homeassistant.components.climate import ClimateEntityFeature, HVACAction, HVACMode +from homeassistant.const import Platform from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er -from .common import set_node_attribute, trigger_subscription_callback +from .common import ( + set_node_attribute, + snapshot_matter_entities, + trigger_subscription_callback, +) + + +@pytest.mark.usefixtures("matter_devices") +async def test_climates( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test climates.""" + snapshot_matter_entities(hass, entity_registry, snapshot, Platform.CLIMATE) @pytest.mark.parametrize("node_fixture", ["thermostat"]) @@ -21,7 +38,7 @@ async def test_thermostat_base( ) -> None: """Test thermostat base attributes and state updates.""" # test entity attributes - state = hass.states.get("climate.longan_link_hvac_thermostat") + state = hass.states.get("climate.longan_link_hvac") assert state assert state.attributes["min_temp"] == 7 assert state.attributes["max_temp"] == 35 @@ -43,7 +60,7 @@ async def test_thermostat_base( set_node_attribute(matter_node, 1, 513, 5, 1600) set_node_attribute(matter_node, 1, 513, 6, 3000) await trigger_subscription_callback(hass, matter_client) - state = hass.states.get("climate.longan_link_hvac_thermostat") + state = hass.states.get("climate.longan_link_hvac") assert state assert state.attributes["min_temp"] == 16 assert state.attributes["max_temp"] == 30 @@ -57,56 +74,56 @@ async def test_thermostat_base( # test system mode update from device set_node_attribute(matter_node, 1, 513, 28, 0) await trigger_subscription_callback(hass, matter_client) - state = hass.states.get("climate.longan_link_hvac_thermostat") + state = hass.states.get("climate.longan_link_hvac") assert state assert state.state == HVACMode.OFF # test running state update from device set_node_attribute(matter_node, 1, 513, 41, 1) await trigger_subscription_callback(hass, matter_client) - state = hass.states.get("climate.longan_link_hvac_thermostat") + state = hass.states.get("climate.longan_link_hvac") assert state assert state.attributes["hvac_action"] == HVACAction.HEATING set_node_attribute(matter_node, 1, 513, 41, 8) await trigger_subscription_callback(hass, matter_client) - state = hass.states.get("climate.longan_link_hvac_thermostat") + state = hass.states.get("climate.longan_link_hvac") assert state assert state.attributes["hvac_action"] == HVACAction.HEATING set_node_attribute(matter_node, 1, 513, 41, 2) await trigger_subscription_callback(hass, matter_client) - state = hass.states.get("climate.longan_link_hvac_thermostat") + state = hass.states.get("climate.longan_link_hvac") assert state assert state.attributes["hvac_action"] == HVACAction.COOLING set_node_attribute(matter_node, 1, 513, 41, 16) await trigger_subscription_callback(hass, matter_client) - state = hass.states.get("climate.longan_link_hvac_thermostat") + state = hass.states.get("climate.longan_link_hvac") assert state assert state.attributes["hvac_action"] == HVACAction.COOLING set_node_attribute(matter_node, 1, 513, 41, 4) await trigger_subscription_callback(hass, matter_client) - state = hass.states.get("climate.longan_link_hvac_thermostat") + state = hass.states.get("climate.longan_link_hvac") assert state assert state.attributes["hvac_action"] == HVACAction.FAN set_node_attribute(matter_node, 1, 513, 41, 32) await trigger_subscription_callback(hass, matter_client) - state = hass.states.get("climate.longan_link_hvac_thermostat") + state = hass.states.get("climate.longan_link_hvac") assert state assert state.attributes["hvac_action"] == HVACAction.FAN set_node_attribute(matter_node, 1, 513, 41, 64) await trigger_subscription_callback(hass, matter_client) - state = hass.states.get("climate.longan_link_hvac_thermostat") + state = hass.states.get("climate.longan_link_hvac") assert state assert state.attributes["hvac_action"] == HVACAction.FAN set_node_attribute(matter_node, 1, 513, 41, 66) await trigger_subscription_callback(hass, matter_client) - state = hass.states.get("climate.longan_link_hvac_thermostat") + state = hass.states.get("climate.longan_link_hvac") assert state assert state.attributes["hvac_action"] == HVACAction.OFF @@ -114,7 +131,7 @@ async def test_thermostat_base( set_node_attribute(matter_node, 1, 513, 28, 4) await trigger_subscription_callback(hass, matter_client) - state = hass.states.get("climate.longan_link_hvac_thermostat") + state = hass.states.get("climate.longan_link_hvac") assert state assert state.state == HVACMode.HEAT @@ -122,7 +139,7 @@ async def test_thermostat_base( set_node_attribute(matter_node, 1, 513, 18, 2000) await trigger_subscription_callback(hass, matter_client) - state = hass.states.get("climate.longan_link_hvac_thermostat") + state = hass.states.get("climate.longan_link_hvac") assert state assert state.attributes["temperature"] == 20 @@ -135,14 +152,14 @@ async def test_thermostat_service_calls( ) -> None: """Test climate platform service calls.""" # test single-setpoint temperature adjustment when cool mode is active - state = hass.states.get("climate.longan_link_hvac_thermostat") + state = hass.states.get("climate.longan_link_hvac") assert state assert state.state == HVACMode.COOL await hass.services.async_call( "climate", "set_temperature", { - "entity_id": "climate.longan_link_hvac_thermostat", + "entity_id": "climate.longan_link_hvac", "temperature": 25, }, blocking=True, @@ -163,7 +180,7 @@ async def test_thermostat_service_calls( "climate", "set_temperature", { - "entity_id": "climate.longan_link_hvac_thermostat", + "entity_id": "climate.longan_link_hvac", "temperature": 25, }, blocking=True, @@ -175,7 +192,7 @@ async def test_thermostat_service_calls( # test single-setpoint temperature adjustment when heat mode is active set_node_attribute(matter_node, 1, 513, 28, 4) await trigger_subscription_callback(hass, matter_client) - state = hass.states.get("climate.longan_link_hvac_thermostat") + state = hass.states.get("climate.longan_link_hvac") assert state assert state.state == HVACMode.HEAT @@ -183,7 +200,7 @@ async def test_thermostat_service_calls( "climate", "set_temperature", { - "entity_id": "climate.longan_link_hvac_thermostat", + "entity_id": "climate.longan_link_hvac", "temperature": 20, }, blocking=True, @@ -200,7 +217,7 @@ async def test_thermostat_service_calls( # test dual setpoint temperature adjustments when heat_cool mode is active set_node_attribute(matter_node, 1, 513, 28, 1) await trigger_subscription_callback(hass, matter_client) - state = hass.states.get("climate.longan_link_hvac_thermostat") + state = hass.states.get("climate.longan_link_hvac") assert state assert state.state == HVACMode.HEAT_COOL @@ -208,7 +225,7 @@ async def test_thermostat_service_calls( "climate", "set_temperature", { - "entity_id": "climate.longan_link_hvac_thermostat", + "entity_id": "climate.longan_link_hvac", "target_temp_low": 10, "target_temp_high": 30, }, @@ -233,7 +250,7 @@ async def test_thermostat_service_calls( "climate", "set_hvac_mode", { - "entity_id": "climate.longan_link_hvac_thermostat", + "entity_id": "climate.longan_link_hvac", "hvac_mode": HVACMode.HEAT, }, blocking=True, @@ -257,7 +274,7 @@ async def test_thermostat_service_calls( "climate", "set_temperature", { - "entity_id": "climate.longan_link_hvac_thermostat", + "entity_id": "climate.longan_link_hvac", "temperature": 22, "hvac_mode": HVACMode.COOL, }, @@ -287,7 +304,7 @@ async def test_room_airconditioner( matter_node: MatterNode, ) -> None: """Test if a climate entity is created for a Room Airconditioner device.""" - state = hass.states.get("climate.room_airconditioner_thermostat") + state = hass.states.get("climate.room_airconditioner") assert state assert state.attributes["current_temperature"] == 20 # room airconditioner has mains power on OnOff cluster with value set to False @@ -301,7 +318,7 @@ async def test_room_airconditioner( # set mains power to ON (OnOff cluster) set_node_attribute(matter_node, 1, 6, 0, True) await trigger_subscription_callback(hass, matter_client) - state = hass.states.get("climate.room_airconditioner_thermostat") + state = hass.states.get("climate.room_airconditioner") # test supported HVAC modes include fan and dry modes assert state.attributes["hvac_modes"] == [ @@ -315,19 +332,19 @@ async def test_room_airconditioner( # test fan-only hvac mode set_node_attribute(matter_node, 1, 513, 28, 7) await trigger_subscription_callback(hass, matter_client) - state = hass.states.get("climate.room_airconditioner_thermostat") + state = hass.states.get("climate.room_airconditioner") assert state assert state.state == HVACMode.FAN_ONLY # test dry hvac mode set_node_attribute(matter_node, 1, 513, 28, 8) await trigger_subscription_callback(hass, matter_client) - state = hass.states.get("climate.room_airconditioner_thermostat") + state = hass.states.get("climate.room_airconditioner") assert state assert state.state == HVACMode.DRY # test featuremap update set_node_attribute(matter_node, 1, 513, 65532, 1) await trigger_subscription_callback(hass, matter_client) - state = hass.states.get("climate.room_airconditioner_thermostat") + state = hass.states.get("climate.room_airconditioner") assert state.attributes["supported_features"] & ClimateEntityFeature.TURN_ON diff --git a/tests/components/matter/test_config_flow.py b/tests/components/matter/test_config_flow.py index fb132c8972ff32..eed776c132e864 100644 --- a/tests/components/matter/test_config_flow.py +++ b/tests/components/matter/test_config_flow.py @@ -5,17 +5,19 @@ from collections.abc import Generator from ipaddress import ip_address from unittest.mock import AsyncMock, MagicMock, call, patch +from uuid import uuid4 from aiohasupervisor import SupervisorError +from aiohasupervisor.models import Discovery from matter_server.client.exceptions import CannotConnect, InvalidServerVersion import pytest from homeassistant import config_entries -from homeassistant.components.hassio import HassioAPIError, HassioServiceInfo from homeassistant.components.matter.const import ADDON_SLUG, DOMAIN from homeassistant.components.zeroconf import ZeroconfServiceInfo from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType +from homeassistant.helpers.service_info.hassio import HassioServiceInfo from tests.common import MockConfigEntry @@ -290,7 +292,19 @@ async def test_zeroconf_discovery_not_onboarded_not_supervisor( @pytest.mark.parametrize("zeroconf_info", [ZEROCONF_INFO_TCP, ZEROCONF_INFO_UDP]) -@pytest.mark.parametrize("discovery_info", [{"config": ADDON_DISCOVERY_INFO}]) +@pytest.mark.parametrize( + "discovery_info", + [ + [ + Discovery( + addon="core_matter_server", + service="matter", + uuid=uuid4(), + config=ADDON_DISCOVERY_INFO, + ) + ] + ], +) async def test_zeroconf_not_onboarded_already_discovered( hass: HomeAssistant, supervisor: MagicMock, @@ -328,7 +342,19 @@ async def test_zeroconf_not_onboarded_already_discovered( @pytest.mark.parametrize("zeroconf_info", [ZEROCONF_INFO_TCP, ZEROCONF_INFO_UDP]) -@pytest.mark.parametrize("discovery_info", [{"config": ADDON_DISCOVERY_INFO}]) +@pytest.mark.parametrize( + "discovery_info", + [ + [ + Discovery( + addon="core_matter_server", + service="matter", + uuid=uuid4(), + config=ADDON_DISCOVERY_INFO, + ) + ] + ], +) async def test_zeroconf_not_onboarded_running( hass: HomeAssistant, supervisor: MagicMock, @@ -360,7 +386,19 @@ async def test_zeroconf_not_onboarded_running( @pytest.mark.parametrize("zeroconf_info", [ZEROCONF_INFO_TCP, ZEROCONF_INFO_UDP]) -@pytest.mark.parametrize("discovery_info", [{"config": ADDON_DISCOVERY_INFO}]) +@pytest.mark.parametrize( + "discovery_info", + [ + [ + Discovery( + addon="core_matter_server", + service="matter", + uuid=uuid4(), + config=ADDON_DISCOVERY_INFO, + ) + ] + ], +) async def test_zeroconf_not_onboarded_installed( hass: HomeAssistant, supervisor: MagicMock, @@ -394,7 +432,19 @@ async def test_zeroconf_not_onboarded_installed( @pytest.mark.parametrize("zeroconf_info", [ZEROCONF_INFO_TCP, ZEROCONF_INFO_UDP]) -@pytest.mark.parametrize("discovery_info", [{"config": ADDON_DISCOVERY_INFO}]) +@pytest.mark.parametrize( + "discovery_info", + [ + [ + Discovery( + addon="core_matter_server", + service="matter", + uuid=uuid4(), + config=ADDON_DISCOVERY_INFO, + ) + ] + ], +) async def test_zeroconf_not_onboarded_not_installed( hass: HomeAssistant, supervisor: MagicMock, @@ -418,7 +468,7 @@ async def test_zeroconf_not_onboarded_not_installed( assert addon_info.call_count == 0 assert addon_store_info.call_count == 2 - assert install_addon.call_args == call(hass, "core_matter_server") + assert install_addon.call_args == call("core_matter_server") assert start_addon.call_args == call("core_matter_server") assert client_connect.call_count == 1 assert result["type"] is FlowResultType.CREATE_ENTRY @@ -431,7 +481,19 @@ async def test_zeroconf_not_onboarded_not_installed( assert setup_entry.call_count == 1 -@pytest.mark.parametrize("discovery_info", [{"config": ADDON_DISCOVERY_INFO}]) +@pytest.mark.parametrize( + "discovery_info", + [ + [ + Discovery( + addon="core_matter_server", + service="matter", + uuid=uuid4(), + config=ADDON_DISCOVERY_INFO, + ) + ] + ], +) async def test_supervisor_discovery( hass: HomeAssistant, supervisor: MagicMock, @@ -469,7 +531,19 @@ async def test_supervisor_discovery( @pytest.mark.parametrize( ("discovery_info", "error"), - [({"config": ADDON_DISCOVERY_INFO}, SupervisorError())], + [ + ( + [ + Discovery( + addon="core_matter_server", + service="matter", + uuid=uuid4(), + config=ADDON_DISCOVERY_INFO, + ) + ], + SupervisorError(), + ) + ], ) async def test_supervisor_discovery_addon_info_failed( hass: HomeAssistant, @@ -502,7 +576,19 @@ async def test_supervisor_discovery_addon_info_failed( assert result["reason"] == "addon_info_failed" -@pytest.mark.parametrize("discovery_info", [{"config": ADDON_DISCOVERY_INFO}]) +@pytest.mark.parametrize( + "discovery_info", + [ + [ + Discovery( + addon="core_matter_server", + service="matter", + uuid=uuid4(), + config=ADDON_DISCOVERY_INFO, + ) + ] + ], +) async def test_clean_supervisor_discovery_on_user_create( hass: HomeAssistant, supervisor: MagicMock, @@ -733,7 +819,7 @@ async def test_supervisor_discovery_addon_not_installed( await hass.async_block_till_done() result = await hass.config_entries.flow.async_configure(result["flow_id"]) - assert install_addon.call_args == call(hass, "core_matter_server") + assert install_addon.call_args == call("core_matter_server") assert result["type"] is FlowResultType.SHOW_PROGRESS assert result["step_id"] == "start_addon" @@ -793,7 +879,19 @@ async def test_not_addon( assert setup_entry.call_count == 1 -@pytest.mark.parametrize("discovery_info", [{"config": ADDON_DISCOVERY_INFO}]) +@pytest.mark.parametrize( + "discovery_info", + [ + [ + Discovery( + addon="core_matter_server", + service="matter", + uuid=uuid4(), + config=ADDON_DISCOVERY_INFO, + ) + ] + ], +) async def test_addon_running( hass: HomeAssistant, supervisor: MagicMock, @@ -839,8 +937,15 @@ async def test_addon_running( ), [ ( - {"config": ADDON_DISCOVERY_INFO}, - HassioAPIError(), + [ + Discovery( + addon="core_matter_server", + service="matter", + uuid=uuid4(), + config=ADDON_DISCOVERY_INFO, + ) + ], + SupervisorError(), None, None, "addon_get_discovery_info_failed", @@ -848,7 +953,14 @@ async def test_addon_running( False, ), ( - {"config": ADDON_DISCOVERY_INFO}, + [ + Discovery( + addon="core_matter_server", + service="matter", + uuid=uuid4(), + config=ADDON_DISCOVERY_INFO, + ) + ], None, CannotConnect(Exception("Boom")), None, @@ -857,7 +969,7 @@ async def test_addon_running( True, ), ( - None, + [], None, None, None, @@ -866,7 +978,14 @@ async def test_addon_running( False, ), ( - {"config": ADDON_DISCOVERY_INFO}, + [ + Discovery( + addon="core_matter_server", + service="matter", + uuid=uuid4(), + config=ADDON_DISCOVERY_INFO, + ) + ], None, None, SupervisorError(), @@ -925,8 +1044,15 @@ async def test_addon_running_failures( ), [ ( - {"config": ADDON_DISCOVERY_INFO}, - HassioAPIError(), + [ + Discovery( + addon="core_matter_server", + service="matter", + uuid=uuid4(), + config=ADDON_DISCOVERY_INFO, + ) + ], + SupervisorError(), None, None, "addon_get_discovery_info_failed", @@ -934,7 +1060,14 @@ async def test_addon_running_failures( False, ), ( - {"config": ADDON_DISCOVERY_INFO}, + [ + Discovery( + addon="core_matter_server", + service="matter", + uuid=uuid4(), + config=ADDON_DISCOVERY_INFO, + ) + ], None, CannotConnect(Exception("Boom")), None, @@ -943,7 +1076,7 @@ async def test_addon_running_failures( True, ), ( - None, + [], None, None, None, @@ -952,7 +1085,14 @@ async def test_addon_running_failures( False, ), ( - {"config": ADDON_DISCOVERY_INFO}, + [ + Discovery( + addon="core_matter_server", + service="matter", + uuid=uuid4(), + config=ADDON_DISCOVERY_INFO, + ) + ], None, None, SupervisorError(), @@ -996,7 +1136,19 @@ async def test_addon_running_failures_zeroconf( assert result["reason"] == abort_reason -@pytest.mark.parametrize("discovery_info", [{"config": ADDON_DISCOVERY_INFO}]) +@pytest.mark.parametrize( + "discovery_info", + [ + [ + Discovery( + addon="core_matter_server", + service="matter", + uuid=uuid4(), + config=ADDON_DISCOVERY_INFO, + ) + ] + ], +) async def test_addon_running_already_configured( hass: HomeAssistant, supervisor: MagicMock, @@ -1034,7 +1186,19 @@ async def test_addon_running_already_configured( assert setup_entry.call_count == 1 -@pytest.mark.parametrize("discovery_info", [{"config": ADDON_DISCOVERY_INFO}]) +@pytest.mark.parametrize( + "discovery_info", + [ + [ + Discovery( + addon="core_matter_server", + service="matter", + uuid=uuid4(), + config=ADDON_DISCOVERY_INFO, + ) + ] + ], +) async def test_addon_installed( hass: HomeAssistant, supervisor: MagicMock, @@ -1084,21 +1248,35 @@ async def test_addon_installed( ), [ ( - {"config": ADDON_DISCOVERY_INFO}, + [ + Discovery( + addon="core_matter_server", + service="matter", + uuid=uuid4(), + config=ADDON_DISCOVERY_INFO, + ) + ], SupervisorError(), None, False, False, ), ( - {"config": ADDON_DISCOVERY_INFO}, + [ + Discovery( + addon="core_matter_server", + service="matter", + uuid=uuid4(), + config=ADDON_DISCOVERY_INFO, + ) + ], None, CannotConnect(Exception("Boom")), True, True, ), ( - None, + [], None, None, True, @@ -1159,21 +1337,35 @@ async def test_addon_installed_failures( ), [ ( - {"config": ADDON_DISCOVERY_INFO}, + [ + Discovery( + addon="core_matter_server", + service="matter", + uuid=uuid4(), + config=ADDON_DISCOVERY_INFO, + ) + ], SupervisorError(), None, False, False, ), ( - {"config": ADDON_DISCOVERY_INFO}, + [ + Discovery( + addon="core_matter_server", + service="matter", + uuid=uuid4(), + config=ADDON_DISCOVERY_INFO, + ) + ], None, CannotConnect(Exception("Boom")), True, True, ), ( - None, + [], None, None, True, @@ -1213,7 +1405,19 @@ async def test_addon_installed_failures_zeroconf( assert result["reason"] == "addon_start_failed" -@pytest.mark.parametrize("discovery_info", [{"config": ADDON_DISCOVERY_INFO}]) +@pytest.mark.parametrize( + "discovery_info", + [ + [ + Discovery( + addon="core_matter_server", + service="matter", + uuid=uuid4(), + config=ADDON_DISCOVERY_INFO, + ) + ] + ], +) async def test_addon_installed_already_configured( hass: HomeAssistant, supervisor: MagicMock, @@ -1259,7 +1463,19 @@ async def test_addon_installed_already_configured( assert setup_entry.call_count == 1 -@pytest.mark.parametrize("discovery_info", [{"config": ADDON_DISCOVERY_INFO}]) +@pytest.mark.parametrize( + "discovery_info", + [ + [ + Discovery( + addon="core_matter_server", + service="matter", + uuid=uuid4(), + config=ADDON_DISCOVERY_INFO, + ) + ] + ], +) async def test_addon_not_installed( hass: HomeAssistant, supervisor: MagicMock, @@ -1291,7 +1507,7 @@ async def test_addon_not_installed( await hass.async_block_till_done() result = await hass.config_entries.flow.async_configure(result["flow_id"]) - assert install_addon.call_args == call(hass, "core_matter_server") + assert install_addon.call_args == call("core_matter_server") assert result["type"] is FlowResultType.SHOW_PROGRESS assert result["step_id"] == "start_addon" @@ -1318,7 +1534,7 @@ async def test_addon_not_installed_failures( install_addon: AsyncMock, ) -> None: """Test add-on install failure.""" - install_addon.side_effect = HassioAPIError() + install_addon.side_effect = SupervisorError() result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} @@ -1338,7 +1554,7 @@ async def test_addon_not_installed_failures( await hass.async_block_till_done() result = await hass.config_entries.flow.async_configure(result["flow_id"]) - assert install_addon.call_args == call(hass, "core_matter_server") + assert install_addon.call_args == call("core_matter_server") assert addon_info.call_count == 0 assert result["type"] is FlowResultType.ABORT assert result["reason"] == "addon_install_failed" @@ -1355,20 +1571,32 @@ async def test_addon_not_installed_failures_zeroconf( zeroconf_info: ZeroconfServiceInfo, ) -> None: """Test add-on install failure.""" - install_addon.side_effect = HassioAPIError() + install_addon.side_effect = SupervisorError() result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_ZEROCONF}, data=zeroconf_info ) await hass.async_block_till_done() - assert install_addon.call_args == call(hass, "core_matter_server") + assert install_addon.call_args == call("core_matter_server") assert addon_info.call_count == 0 assert result["type"] is FlowResultType.ABORT assert result["reason"] == "addon_install_failed" -@pytest.mark.parametrize("discovery_info", [{"config": ADDON_DISCOVERY_INFO}]) +@pytest.mark.parametrize( + "discovery_info", + [ + [ + Discovery( + addon="core_matter_server", + service="matter", + uuid=uuid4(), + config=ADDON_DISCOVERY_INFO, + ) + ] + ], +) async def test_addon_not_installed_already_configured( hass: HomeAssistant, supervisor: MagicMock, @@ -1410,7 +1638,7 @@ async def test_addon_not_installed_already_configured( await hass.async_block_till_done() result = await hass.config_entries.flow.async_configure(result["flow_id"]) - assert install_addon.call_args == call(hass, "core_matter_server") + assert install_addon.call_args == call("core_matter_server") assert result["type"] is FlowResultType.SHOW_PROGRESS assert result["step_id"] == "start_addon" diff --git a/tests/components/matter/test_cover.py b/tests/components/matter/test_cover.py index 3a7749e1c243a3..224aabd90825f2 100644 --- a/tests/components/matter/test_cover.py +++ b/tests/components/matter/test_cover.py @@ -6,27 +6,38 @@ from chip.clusters import Objects as clusters from matter_server.client.models.node import MatterNode import pytest +from syrupy import SnapshotAssertion -from homeassistant.components.cover import ( - STATE_CLOSED, - STATE_CLOSING, - STATE_OPEN, - STATE_OPENING, - CoverEntityFeature, -) +from homeassistant.components.cover import CoverEntityFeature, CoverState +from homeassistant.const import Platform from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from .common import ( + set_node_attribute, + snapshot_matter_entities, + trigger_subscription_callback, +) -from .common import set_node_attribute, trigger_subscription_callback + +@pytest.mark.usefixtures("matter_devices") +async def test_covers( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test covers.""" + snapshot_matter_entities(hass, entity_registry, snapshot, Platform.COVER) @pytest.mark.parametrize( ("node_fixture", "entity_id"), [ - ("window_covering_lift", "cover.mock_lift_window_covering_cover"), - ("window_covering_pa_lift", "cover.longan_link_wncv_da01_cover"), - ("window_covering_tilt", "cover.mock_tilt_window_covering_cover"), - ("window_covering_pa_tilt", "cover.mock_pa_tilt_window_covering_cover"), - ("window_covering_full", "cover.mock_full_window_covering_cover"), + ("window_covering_lift", "cover.mock_lift_window_covering"), + ("window_covering_pa_lift", "cover.longan_link_wncv_da01"), + ("window_covering_tilt", "cover.mock_tilt_window_covering"), + ("window_covering_pa_tilt", "cover.mock_pa_tilt_window_covering"), + ("window_covering_full", "cover.mock_full_window_covering"), ], ) async def test_cover( @@ -92,9 +103,9 @@ async def test_cover( @pytest.mark.parametrize( ("node_fixture", "entity_id"), [ - ("window_covering_lift", "cover.mock_lift_window_covering_cover"), - ("window_covering_pa_lift", "cover.longan_link_wncv_da01_cover"), - ("window_covering_full", "cover.mock_full_window_covering_cover"), + ("window_covering_lift", "cover.mock_lift_window_covering"), + ("window_covering_pa_lift", "cover.longan_link_wncv_da01"), + ("window_covering_full", "cover.mock_full_window_covering"), ], ) async def test_cover_lift( @@ -127,20 +138,20 @@ async def test_cover_lift( state = hass.states.get(entity_id) assert state - assert state.state == STATE_CLOSING + assert state.state == CoverState.CLOSING set_node_attribute(matter_node, 1, 258, 10, 0b000101) await trigger_subscription_callback(hass, matter_client) state = hass.states.get(entity_id) assert state - assert state.state == STATE_OPENING + assert state.state == CoverState.OPENING @pytest.mark.parametrize( ("node_fixture", "entity_id"), [ - ("window_covering_lift", "cover.mock_lift_window_covering_cover"), + ("window_covering_lift", "cover.mock_lift_window_covering"), ], ) async def test_cover_lift_only( @@ -177,7 +188,7 @@ async def test_cover_lift_only( @pytest.mark.parametrize( ("node_fixture", "entity_id"), [ - ("window_covering_pa_lift", "cover.longan_link_wncv_da01_cover"), + ("window_covering_pa_lift", "cover.longan_link_wncv_da01"), ], ) async def test_cover_position_aware_lift( @@ -206,7 +217,7 @@ async def test_cover_position_aware_lift( state = hass.states.get(entity_id) assert state assert state.attributes["current_position"] == 100 - floor(position / 100) - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN set_node_attribute(matter_node, 1, 258, 14, 10000) set_node_attribute(matter_node, 1, 258, 10, 0b000000) @@ -215,15 +226,15 @@ async def test_cover_position_aware_lift( state = hass.states.get(entity_id) assert state assert state.attributes["current_position"] == 0 - assert state.state == STATE_CLOSED + assert state.state == CoverState.CLOSED @pytest.mark.parametrize( ("node_fixture", "entity_id"), [ - ("window_covering_tilt", "cover.mock_tilt_window_covering_cover"), - ("window_covering_pa_tilt", "cover.mock_pa_tilt_window_covering_cover"), - ("window_covering_full", "cover.mock_full_window_covering_cover"), + ("window_covering_tilt", "cover.mock_tilt_window_covering"), + ("window_covering_pa_tilt", "cover.mock_pa_tilt_window_covering"), + ("window_covering_full", "cover.mock_full_window_covering"), ], ) async def test_cover_tilt( @@ -258,20 +269,20 @@ async def test_cover_tilt( await trigger_subscription_callback(hass, matter_client) state = hass.states.get(entity_id) assert state - assert state.state == STATE_CLOSING + assert state.state == CoverState.CLOSING set_node_attribute(matter_node, 1, 258, 10, 0b010001) await trigger_subscription_callback(hass, matter_client) state = hass.states.get(entity_id) assert state - assert state.state == STATE_OPENING + assert state.state == CoverState.OPENING @pytest.mark.parametrize( ("node_fixture", "entity_id"), [ - ("window_covering_tilt", "cover.mock_tilt_window_covering_cover"), + ("window_covering_tilt", "cover.mock_tilt_window_covering"), ], ) async def test_cover_tilt_only( @@ -306,7 +317,7 @@ async def test_cover_tilt_only( @pytest.mark.parametrize( ("node_fixture", "entity_id"), [ - ("window_covering_pa_tilt", "cover.mock_pa_tilt_window_covering_cover"), + ("window_covering_pa_tilt", "cover.mock_pa_tilt_window_covering"), ], ) async def test_cover_position_aware_tilt( @@ -346,7 +357,7 @@ async def test_cover_full_features( matter_node: MatterNode, ) -> None: """Test window covering devices with all the features.""" - entity_id = "cover.mock_full_window_covering_cover" + entity_id = "cover.mock_full_window_covering" state = hass.states.get(entity_id) assert state @@ -366,7 +377,7 @@ async def test_cover_full_features( state = hass.states.get(entity_id) assert state - assert state.state == STATE_CLOSED + assert state.state == CoverState.CLOSED set_node_attribute(matter_node, 1, 258, 14, 5000) set_node_attribute(matter_node, 1, 258, 15, 10000) @@ -375,7 +386,7 @@ async def test_cover_full_features( state = hass.states.get(entity_id) assert state - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN set_node_attribute(matter_node, 1, 258, 14, 10000) set_node_attribute(matter_node, 1, 258, 15, 5000) @@ -384,7 +395,7 @@ async def test_cover_full_features( state = hass.states.get(entity_id) assert state - assert state.state == STATE_CLOSED + assert state.state == CoverState.CLOSED set_node_attribute(matter_node, 1, 258, 14, 5000) set_node_attribute(matter_node, 1, 258, 15, 5000) @@ -393,7 +404,7 @@ async def test_cover_full_features( state = hass.states.get(entity_id) assert state - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN set_node_attribute(matter_node, 1, 258, 14, 5000) set_node_attribute(matter_node, 1, 258, 15, None) @@ -401,7 +412,7 @@ async def test_cover_full_features( await trigger_subscription_callback(hass, matter_client) state = hass.states.get(entity_id) assert state - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN set_node_attribute(matter_node, 1, 258, 14, None) set_node_attribute(matter_node, 1, 258, 15, 5000) @@ -417,7 +428,7 @@ async def test_cover_full_features( await trigger_subscription_callback(hass, matter_client) state = hass.states.get(entity_id) assert state - assert state.state == STATE_CLOSED + assert state.state == CoverState.CLOSED set_node_attribute(matter_node, 1, 258, 14, None) set_node_attribute(matter_node, 1, 258, 15, 10000) diff --git a/tests/components/matter/test_event.py b/tests/components/matter/test_event.py index 934858e6a3ace5..f3a318c4e8bb2c 100644 --- a/tests/components/matter/test_event.py +++ b/tests/components/matter/test_event.py @@ -5,11 +5,24 @@ from matter_server.client.models.node import MatterNode from matter_server.common.models import EventType, MatterNodeEvent import pytest +from syrupy import SnapshotAssertion from homeassistant.components.event import ATTR_EVENT_TYPE, ATTR_EVENT_TYPES +from homeassistant.const import Platform from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er -from .common import trigger_subscription_callback +from .common import snapshot_matter_entities, trigger_subscription_callback + + +@pytest.mark.usefixtures("matter_devices") +async def test_events( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test events.""" + snapshot_matter_entities(hass, entity_registry, snapshot, Platform.EVENT) @pytest.mark.parametrize("node_fixture", ["generic_switch"]) diff --git a/tests/components/matter/test_fan.py b/tests/components/matter/test_fan.py index 75ea9e39b67b99..6ed95b0ecc2dbe 100644 --- a/tests/components/matter/test_fan.py +++ b/tests/components/matter/test_fan.py @@ -4,6 +4,7 @@ from matter_server.client.models.node import MatterNode import pytest +from syrupy import SnapshotAssertion from homeassistant.components.fan import ( ATTR_DIRECTION, @@ -17,10 +18,30 @@ SERVICE_SET_DIRECTION, FanEntityFeature, ) -from homeassistant.const import ATTR_ENTITY_ID, SERVICE_TURN_OFF, SERVICE_TURN_ON +from homeassistant.const import ( + ATTR_ENTITY_ID, + SERVICE_TURN_OFF, + SERVICE_TURN_ON, + Platform, +) from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from .common import ( + set_node_attribute, + snapshot_matter_entities, + trigger_subscription_callback, +) -from .common import set_node_attribute, trigger_subscription_callback + +@pytest.mark.usefixtures("matter_devices") +async def test_fans( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test fans.""" + snapshot_matter_entities(hass, entity_registry, snapshot, Platform.FAN) @pytest.mark.parametrize("node_fixture", ["air_purifier"]) @@ -30,7 +51,7 @@ async def test_fan_base( matter_node: MatterNode, ) -> None: """Test Fan platform.""" - entity_id = "fan.air_purifier_fan" + entity_id = "fan.air_purifier" state = hass.states.get(entity_id) assert state assert state.attributes["preset_modes"] == [ @@ -98,7 +119,7 @@ async def test_fan_turn_on_with_percentage( matter_node: MatterNode, ) -> None: """Test turning on the fan with a specific percentage.""" - entity_id = "fan.air_purifier_fan" + entity_id = "fan.air_purifier" await hass.services.async_call( FAN_DOMAIN, SERVICE_TURN_ON, @@ -136,7 +157,7 @@ async def test_fan_turn_on_with_preset_mode( matter_node: MatterNode, ) -> None: """Test turning on the fan with a specific preset mode.""" - entity_id = "fan.mocked_fan_switch_fan" + entity_id = "fan.mocked_fan_switch" await hass.services.async_call( FAN_DOMAIN, SERVICE_TURN_ON, @@ -212,7 +233,7 @@ async def test_fan_turn_off( matter_node: MatterNode, ) -> None: """Test turning off the fan.""" - entity_id = "fan.air_purifier_fan" + entity_id = "fan.air_purifier" await hass.services.async_call( FAN_DOMAIN, SERVICE_TURN_OFF, @@ -255,7 +276,7 @@ async def test_fan_oscillate( matter_node: MatterNode, ) -> None: """Test oscillating the fan.""" - entity_id = "fan.air_purifier_fan" + entity_id = "fan.air_purifier" for oscillating, value in ((True, 1), (False, 0)): await hass.services.async_call( FAN_DOMAIN, @@ -279,7 +300,7 @@ async def test_fan_set_direction( matter_node: MatterNode, ) -> None: """Test oscillating the fan.""" - entity_id = "fan.air_purifier_fan" + entity_id = "fan.air_purifier" for direction, value in ((DIRECTION_FORWARD, 0), (DIRECTION_REVERSE, 1)): await hass.services.async_call( FAN_DOMAIN, @@ -302,7 +323,7 @@ async def test_fan_set_direction( [ ( "fan", - "fan.mocked_fan_switch_fan", + "fan.mocked_fan_switch", { "1/514/65532": 0, }, @@ -310,7 +331,7 @@ async def test_fan_set_direction( ), ( "fan", - "fan.mocked_fan_switch_fan", + "fan.mocked_fan_switch", { "1/514/65532": 1, }, @@ -322,7 +343,7 @@ async def test_fan_set_direction( ), ( "fan", - "fan.mocked_fan_switch_fan", + "fan.mocked_fan_switch", { "1/514/65532": 4, }, @@ -334,7 +355,7 @@ async def test_fan_set_direction( ), ( "fan", - "fan.mocked_fan_switch_fan", + "fan.mocked_fan_switch", { "1/514/65532": 36, }, @@ -366,7 +387,7 @@ async def test_fan_supported_features( [ ( "fan", - "fan.mocked_fan_switch_fan", + "fan.mocked_fan_switch", {"1/514/1": 0, "1/514/65532": 0}, [ "low", @@ -376,7 +397,7 @@ async def test_fan_supported_features( ), ( "fan", - "fan.mocked_fan_switch_fan", + "fan.mocked_fan_switch", {"1/514/1": 1, "1/514/65532": 0}, [ "low", @@ -385,25 +406,25 @@ async def test_fan_supported_features( ), ( "fan", - "fan.mocked_fan_switch_fan", + "fan.mocked_fan_switch", {"1/514/1": 2, "1/514/65532": 0}, ["low", "medium", "high", "auto"], ), ( "fan", - "fan.mocked_fan_switch_fan", + "fan.mocked_fan_switch", {"1/514/1": 4, "1/514/65532": 0}, ["high", "auto"], ), ( "fan", - "fan.mocked_fan_switch_fan", + "fan.mocked_fan_switch", {"1/514/1": 5, "1/514/65532": 0}, ["high"], ), ( "fan", - "fan.mocked_fan_switch_fan", + "fan.mocked_fan_switch", {"1/514/1": 5, "1/514/65532": 8, "1/514/9": 3}, ["high", "natural_wind", "sleep_wind"], ), diff --git a/tests/components/matter/test_init.py b/tests/components/matter/test_init.py index 23001aacf23d5e..f65766894133fe 100644 --- a/tests/components/matter/test_init.py +++ b/tests/components/matter/test_init.py @@ -67,7 +67,7 @@ async def test_entry_setup_unload( assert matter_client.connect.call_count == 1 assert matter_client.set_default_fabric_label.call_count == 1 assert entry.state is ConfigEntryState.LOADED - entity_state = hass.states.get("light.mock_onoff_light_light") + entity_state = hass.states.get("light.mock_onoff_light") assert entity_state assert entity_state.state != STATE_UNAVAILABLE @@ -75,7 +75,7 @@ async def test_entry_setup_unload( assert matter_client.disconnect.call_count == 1 assert entry.state is ConfigEntryState.NOT_LOADED - entity_state = hass.states.get("light.mock_onoff_light_light") + entity_state = hass.states.get("light.mock_onoff_light") assert entity_state assert entity_state.state == STATE_UNAVAILABLE @@ -246,10 +246,10 @@ async def test_raise_addon_task_in_progress( install_addon_original_side_effect = install_addon.side_effect - async def install_addon_side_effect(hass: HomeAssistant, slug: str) -> None: + async def install_addon_side_effect(slug: str) -> None: """Mock install add-on.""" await install_event.wait() - await install_addon_original_side_effect(hass, slug) + await install_addon_original_side_effect(slug) install_addon.side_effect = install_addon_side_effect @@ -337,7 +337,7 @@ async def test_install_addon( assert entry.state is ConfigEntryState.SETUP_RETRY assert addon_store_info.call_count == 3 assert install_addon.call_count == 1 - assert install_addon.call_args == call(hass, "core_matter_server") + assert install_addon.call_args == call("core_matter_server") assert start_addon.call_count == 1 assert start_addon.call_args == call("core_matter_server") @@ -389,7 +389,7 @@ async def test_addon_info_failure( True, 1, 1, - HassioAPIError("Boom"), + SupervisorError("Boom"), None, ServerVersionTooOld("Invalid version"), ), @@ -676,7 +676,7 @@ async def test_remove_config_entry_device( device_entry = dr.async_entries_for_config_entry( device_registry, config_entry.entry_id )[0] - entity_id = "light.m5stamp_lighting_app_light" + entity_id = "light.m5stamp_lighting_app" assert device_entry assert entity_registry.async_get(entity_id) diff --git a/tests/components/matter/test_light.py b/tests/components/matter/test_light.py index d843ce6dcfdc28..c49b47c9106c6b 100644 --- a/tests/components/matter/test_light.py +++ b/tests/components/matter/test_light.py @@ -5,11 +5,28 @@ from chip.clusters import Objects as clusters from matter_server.client.models.node import MatterNode import pytest +from syrupy import SnapshotAssertion from homeassistant.components.light import ColorMode +from homeassistant.const import Platform from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er -from .common import set_node_attribute, trigger_subscription_callback +from .common import ( + set_node_attribute, + snapshot_matter_entities, + trigger_subscription_callback, +) + + +@pytest.mark.usefixtures("matter_devices") +async def test_lights( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test lights.""" + snapshot_matter_entities(hass, entity_registry, snapshot, Platform.LIGHT) @pytest.mark.parametrize( @@ -17,17 +34,17 @@ [ ( "extended_color_light", - "light.mock_extended_color_light_light", + "light.mock_extended_color_light", ["color_temp", "hs", "xy"], ), ( "color_temperature_light", - "light.mock_color_temperature_light_light", + "light.mock_color_temperature_light", ["color_temp"], ), - ("dimmable_light", "light.mock_dimmable_light_light", ["brightness"]), - ("onoff_light", "light.mock_onoff_light_light", ["onoff"]), - ("onoff_light_with_levelcontrol_present", "light.d215s_light", ["onoff"]), + ("dimmable_light", "light.mock_dimmable_light", ["brightness"]), + ("onoff_light", "light.mock_onoff_light", ["onoff"]), + ("onoff_light_with_levelcontrol_present", "light.d215s", ["onoff"]), ], ) async def test_light_turn_on_off( @@ -100,10 +117,10 @@ async def test_light_turn_on_off( @pytest.mark.parametrize( ("node_fixture", "entity_id"), [ - ("extended_color_light", "light.mock_extended_color_light_light"), - ("color_temperature_light", "light.mock_color_temperature_light_light"), - ("dimmable_light", "light.mock_dimmable_light_light"), - ("dimmable_plugin_unit", "light.dimmable_plugin_unit_light"), + ("extended_color_light", "light.mock_extended_color_light"), + ("color_temperature_light", "light.mock_color_temperature_light"), + ("dimmable_light", "light.mock_dimmable_light"), + ("dimmable_plugin_unit", "light.dimmable_plugin_unit"), ], ) async def test_dimmable_light( @@ -168,8 +185,8 @@ async def test_dimmable_light( @pytest.mark.parametrize( ("node_fixture", "entity_id"), [ - ("extended_color_light", "light.mock_extended_color_light_light"), - ("color_temperature_light", "light.mock_color_temperature_light_light"), + ("extended_color_light", "light.mock_extended_color_light"), + ("color_temperature_light", "light.mock_color_temperature_light"), ], ) async def test_color_temperature_light( @@ -257,7 +274,7 @@ async def test_color_temperature_light( @pytest.mark.parametrize( ("node_fixture", "entity_id"), [ - ("extended_color_light", "light.mock_extended_color_light_light"), + ("extended_color_light", "light.mock_extended_color_light"), ], ) async def test_extended_color_light( diff --git a/tests/components/matter/test_lock.py b/tests/components/matter/test_lock.py index 3fbf783f57707d..7bcfd381d6cea3 100644 --- a/tests/components/matter/test_lock.py +++ b/tests/components/matter/test_lock.py @@ -5,14 +5,29 @@ from chip.clusters import Objects as clusters from matter_server.client.models.node import MatterNode import pytest +from syrupy import SnapshotAssertion from homeassistant.components.lock import LockEntityFeature, LockState -from homeassistant.const import ATTR_CODE, STATE_UNKNOWN +from homeassistant.const import ATTR_CODE, STATE_UNKNOWN, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceValidationError import homeassistant.helpers.entity_registry as er -from .common import set_node_attribute, trigger_subscription_callback +from .common import ( + set_node_attribute, + snapshot_matter_entities, + trigger_subscription_callback, +) + + +@pytest.mark.usefixtures("matter_devices") +async def test_locks( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test locks.""" + snapshot_matter_entities(hass, entity_registry, snapshot, Platform.LOCK) @pytest.mark.parametrize("node_fixture", ["door_lock"]) @@ -26,7 +41,7 @@ async def test_lock( "lock", "unlock", { - "entity_id": "lock.mock_door_lock_lock", + "entity_id": "lock.mock_door_lock", }, blocking=True, ) @@ -44,7 +59,7 @@ async def test_lock( "lock", "lock", { - "entity_id": "lock.mock_door_lock_lock", + "entity_id": "lock.mock_door_lock", }, blocking=True, ) @@ -59,42 +74,42 @@ async def test_lock( matter_client.send_device_command.reset_mock() await hass.async_block_till_done() - state = hass.states.get("lock.mock_door_lock_lock") + state = hass.states.get("lock.mock_door_lock") assert state assert state.state == LockState.LOCKING set_node_attribute(matter_node, 1, 257, 0, 0) await trigger_subscription_callback(hass, matter_client) - state = hass.states.get("lock.mock_door_lock_lock") + state = hass.states.get("lock.mock_door_lock") assert state assert state.state == LockState.UNLOCKED set_node_attribute(matter_node, 1, 257, 0, 2) await trigger_subscription_callback(hass, matter_client) - state = hass.states.get("lock.mock_door_lock_lock") + state = hass.states.get("lock.mock_door_lock") assert state assert state.state == LockState.UNLOCKED set_node_attribute(matter_node, 1, 257, 0, 1) await trigger_subscription_callback(hass, matter_client) - state = hass.states.get("lock.mock_door_lock_lock") + state = hass.states.get("lock.mock_door_lock") assert state assert state.state == LockState.LOCKED set_node_attribute(matter_node, 1, 257, 0, None) await trigger_subscription_callback(hass, matter_client) - state = hass.states.get("lock.mock_door_lock_lock") + state = hass.states.get("lock.mock_door_lock") assert state assert state.state == STATE_UNKNOWN # test featuremap update set_node_attribute(matter_node, 1, 257, 65532, 4096) await trigger_subscription_callback(hass, matter_client) - state = hass.states.get("lock.mock_door_lock_lock") + state = hass.states.get("lock.mock_door_lock") assert state.attributes["supported_features"] & LockEntityFeature.OPEN @@ -120,7 +135,7 @@ async def test_lock_requires_pin( await hass.services.async_call( "lock", "lock", - {"entity_id": "lock.mock_door_lock_lock", ATTR_CODE: "1234"}, + {"entity_id": "lock.mock_door_lock", ATTR_CODE: "1234"}, blocking=True, ) @@ -129,7 +144,7 @@ async def test_lock_requires_pin( await hass.services.async_call( "lock", "lock", - {"entity_id": "lock.mock_door_lock_lock", ATTR_CODE: code}, + {"entity_id": "lock.mock_door_lock", ATTR_CODE: code}, blocking=True, ) assert matter_client.send_device_command.call_count == 1 @@ -143,13 +158,13 @@ async def test_lock_requires_pin( # Lock door using default code default_code = "7654321" entity_registry.async_update_entity_options( - "lock.mock_door_lock_lock", "lock", {"default_code": default_code} + "lock.mock_door_lock", "lock", {"default_code": default_code} ) await trigger_subscription_callback(hass, matter_client) await hass.services.async_call( "lock", "lock", - {"entity_id": "lock.mock_door_lock_lock"}, + {"entity_id": "lock.mock_door_lock"}, blocking=True, ) assert matter_client.send_device_command.call_count == 2 @@ -168,7 +183,7 @@ async def test_lock_with_unbolt( matter_node: MatterNode, ) -> None: """Test door lock.""" - state = hass.states.get("lock.mock_door_lock_lock") + state = hass.states.get("lock.mock_door_lock") assert state assert state.state == LockState.LOCKED assert state.attributes["supported_features"] & LockEntityFeature.OPEN @@ -177,7 +192,7 @@ async def test_lock_with_unbolt( "lock", "unlock", { - "entity_id": "lock.mock_door_lock_lock", + "entity_id": "lock.mock_door_lock", }, blocking=True, ) @@ -195,7 +210,7 @@ async def test_lock_with_unbolt( "lock", "open", { - "entity_id": "lock.mock_door_lock_lock", + "entity_id": "lock.mock_door_lock", }, blocking=True, ) @@ -208,20 +223,20 @@ async def test_lock_with_unbolt( ) await hass.async_block_till_done() - state = hass.states.get("lock.mock_door_lock_lock") + state = hass.states.get("lock.mock_door_lock") assert state assert state.state == LockState.OPENING set_node_attribute(matter_node, 1, 257, 0, 0) await trigger_subscription_callback(hass, matter_client) - state = hass.states.get("lock.mock_door_lock_lock") + state = hass.states.get("lock.mock_door_lock") assert state assert state.state == LockState.UNLOCKED set_node_attribute(matter_node, 1, 257, 0, 3) await trigger_subscription_callback(hass, matter_client) - state = hass.states.get("lock.mock_door_lock_lock") + state = hass.states.get("lock.mock_door_lock") assert state assert state.state == LockState.OPEN diff --git a/tests/components/matter/test_number.py b/tests/components/matter/test_number.py index 3f111ce64c66bf..86e1fbbf41968d 100644 --- a/tests/components/matter/test_number.py +++ b/tests/components/matter/test_number.py @@ -6,10 +6,27 @@ from matter_server.common import custom_clusters from matter_server.common.helpers.util import create_attribute_path_from_attribute import pytest +from syrupy import SnapshotAssertion +from homeassistant.const import Platform from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er -from .common import set_node_attribute, trigger_subscription_callback +from .common import ( + set_node_attribute, + snapshot_matter_entities, + trigger_subscription_callback, +) + + +@pytest.mark.usefixtures("matter_devices") +async def test_numbers( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test numbers.""" + snapshot_matter_entities(hass, entity_registry, snapshot, Platform.NUMBER) @pytest.mark.parametrize("node_fixture", ["dimmable_light"]) diff --git a/tests/components/matter/test_select.py b/tests/components/matter/test_select.py index bc1b65302e0409..ffe996fd840f8c 100644 --- a/tests/components/matter/test_select.py +++ b/tests/components/matter/test_select.py @@ -5,10 +5,27 @@ from chip.clusters import Objects as clusters from matter_server.client.models.node import MatterNode import pytest +from syrupy import SnapshotAssertion +from homeassistant.const import Platform from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er -from .common import set_node_attribute, trigger_subscription_callback +from .common import ( + set_node_attribute, + snapshot_matter_entities, + trigger_subscription_callback, +) + + +@pytest.mark.usefixtures("matter_devices") +async def test_selects( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test selects.""" + snapshot_matter_entities(hass, entity_registry, snapshot, Platform.SELECT) @pytest.mark.parametrize("node_fixture", ["dimmable_light"]) diff --git a/tests/components/matter/test_switch.py b/tests/components/matter/test_switch.py index b193fb0e18999c..d7a6a700cdefde 100644 --- a/tests/components/matter/test_switch.py +++ b/tests/components/matter/test_switch.py @@ -5,10 +5,27 @@ from chip.clusters import Objects as clusters from matter_server.client.models.node import MatterNode import pytest +from syrupy import SnapshotAssertion +from homeassistant.const import Platform from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er -from .common import set_node_attribute, trigger_subscription_callback +from .common import ( + set_node_attribute, + snapshot_matter_entities, + trigger_subscription_callback, +) + + +@pytest.mark.usefixtures("matter_devices") +async def test_switches( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test switches.""" + snapshot_matter_entities(hass, entity_registry, snapshot, Platform.SWITCH) @pytest.mark.parametrize("node_fixture", ["on_off_plugin_unit"]) @@ -18,7 +35,7 @@ async def test_turn_on( matter_node: MatterNode, ) -> None: """Test turning on a switch.""" - state = hass.states.get("switch.mock_onoffpluginunit_switch") + state = hass.states.get("switch.mock_onoffpluginunit") assert state assert state.state == "off" @@ -26,7 +43,7 @@ async def test_turn_on( "switch", "turn_on", { - "entity_id": "switch.mock_onoffpluginunit_switch", + "entity_id": "switch.mock_onoffpluginunit", }, blocking=True, ) @@ -41,7 +58,7 @@ async def test_turn_on( set_node_attribute(matter_node, 1, 6, 0, True) await trigger_subscription_callback(hass, matter_client) - state = hass.states.get("switch.mock_onoffpluginunit_switch") + state = hass.states.get("switch.mock_onoffpluginunit") assert state assert state.state == "on" @@ -53,7 +70,7 @@ async def test_turn_off( matter_node: MatterNode, ) -> None: """Test turning off a switch.""" - state = hass.states.get("switch.mock_onoffpluginunit_switch") + state = hass.states.get("switch.mock_onoffpluginunit") assert state assert state.state == "off" @@ -61,7 +78,7 @@ async def test_turn_off( "switch", "turn_off", { - "entity_id": "switch.mock_onoffpluginunit_switch", + "entity_id": "switch.mock_onoffpluginunit", }, blocking=True, ) @@ -80,10 +97,10 @@ async def test_switch_unit(hass: HomeAssistant, matter_node: MatterNode) -> None # A switch entity should be discovered as fallback for ANY Matter device (endpoint) # that has the OnOff cluster and does not fall into an explicit discovery schema # by another platform (e.g. light, lock etc.). - state = hass.states.get("switch.mock_switchunit_switch") + state = hass.states.get("switch.mock_switchunit") assert state assert state.state == "off" - assert state.attributes["friendly_name"] == "Mock SwitchUnit Switch" + assert state.attributes["friendly_name"] == "Mock SwitchUnit" @pytest.mark.parametrize("node_fixture", ["room_airconditioner"]) diff --git a/tests/components/matter/test_update.py b/tests/components/matter/test_update.py index ad73bd38723b18..92576fa69e2eab 100644 --- a/tests/components/matter/test_update.py +++ b/tests/components/matter/test_update.py @@ -202,7 +202,8 @@ async def test_update_install( state = hass.states.get("update.mock_dimmable_light") assert state assert state.state == STATE_ON - assert state.attributes.get("in_progress") + assert state.attributes["in_progress"] is True + assert state.attributes["update_percentage"] is None set_node_attribute_typed( matter_node, @@ -215,7 +216,8 @@ async def test_update_install( state = hass.states.get("update.mock_dimmable_light") assert state assert state.state == STATE_ON - assert state.attributes.get("in_progress") == 50 + assert state.attributes["in_progress"] is True + assert state.attributes["update_percentage"] == 50 set_node_attribute_typed( matter_node, diff --git a/tests/components/matter/test_vacuum.py b/tests/components/matter/test_vacuum.py new file mode 100644 index 00000000000000..86f7542395aa1b --- /dev/null +++ b/tests/components/matter/test_vacuum.py @@ -0,0 +1,209 @@ +"""Test Matter vacuum.""" + +from unittest.mock import MagicMock, call + +from chip.clusters import Objects as clusters +from matter_server.client.models.node import MatterNode +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant, HomeAssistantError +from homeassistant.helpers import entity_registry as er + +from .common import ( + set_node_attribute, + snapshot_matter_entities, + trigger_subscription_callback, +) + + +@pytest.mark.usefixtures("matter_devices") +async def test_vacuum( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test that the correct entities get created for a vacuum device.""" + snapshot_matter_entities(hass, entity_registry, snapshot, Platform.VACUUM) + + +@pytest.mark.parametrize("node_fixture", ["vacuum_cleaner"]) +async def test_vacuum_actions( + hass: HomeAssistant, + matter_client: MagicMock, + matter_node: MatterNode, +) -> None: + """Test vacuum entity actions.""" + entity_id = "vacuum.mock_vacuum" + state = hass.states.get(entity_id) + assert state + + # test return_to_base action + await hass.services.async_call( + "vacuum", + "return_to_base", + { + "entity_id": entity_id, + }, + blocking=True, + ) + + assert matter_client.send_device_command.call_count == 1 + assert matter_client.send_device_command.call_args == call( + node_id=matter_node.node_id, + endpoint_id=1, + command=clusters.RvcOperationalState.Commands.GoHome(), + ) + matter_client.send_device_command.reset_mock() + + # test start/resume action + await hass.services.async_call( + "vacuum", + "start", + { + "entity_id": entity_id, + }, + blocking=True, + ) + + assert matter_client.send_device_command.call_count == 1 + assert matter_client.send_device_command.call_args == call( + node_id=matter_node.node_id, + endpoint_id=1, + command=clusters.RvcOperationalState.Commands.Resume(), + ) + matter_client.send_device_command.reset_mock() + + # test pause action + await hass.services.async_call( + "vacuum", + "pause", + { + "entity_id": entity_id, + }, + blocking=True, + ) + + assert matter_client.send_device_command.call_count == 1 + assert matter_client.send_device_command.call_args == call( + node_id=matter_node.node_id, + endpoint_id=1, + command=clusters.OperationalState.Commands.Pause(), + ) + matter_client.send_device_command.reset_mock() + + # test stop action + # stop command is not supported by the vacuum fixture + with pytest.raises( + HomeAssistantError, + match="Entity vacuum.mock_vacuum does not support this service.", + ): + await hass.services.async_call( + "vacuum", + "stop", + { + "entity_id": entity_id, + }, + blocking=True, + ) + + # update accepted command list to add support for stop command + set_node_attribute( + matter_node, 1, 97, 65529, [clusters.OperationalState.Commands.Stop.command_id] + ) + await trigger_subscription_callback(hass, matter_client) + await hass.services.async_call( + "vacuum", + "stop", + { + "entity_id": entity_id, + }, + blocking=True, + ) + assert matter_client.send_device_command.call_count == 1 + assert matter_client.send_device_command.call_args == call( + node_id=matter_node.node_id, + endpoint_id=1, + command=clusters.OperationalState.Commands.Stop(), + ) + matter_client.send_device_command.reset_mock() + + +@pytest.mark.parametrize("node_fixture", ["vacuum_cleaner"]) +async def test_vacuum_updates( + hass: HomeAssistant, + matter_client: MagicMock, + matter_node: MatterNode, +) -> None: + """Test vacuum entity updates.""" + entity_id = "vacuum.mock_vacuum" + state = hass.states.get(entity_id) + assert state + # confirm initial state is idle (as stored in the fixture) + assert state.state == "idle" + + # confirm state is 'docked' by setting the operational state to 0x42 + set_node_attribute(matter_node, 1, 97, 4, 0x42) + await trigger_subscription_callback(hass, matter_client) + state = hass.states.get(entity_id) + assert state + assert state.state == "docked" + + # confirm state is 'docked' by setting the operational state to 0x41 + set_node_attribute(matter_node, 1, 97, 4, 0x41) + await trigger_subscription_callback(hass, matter_client) + state = hass.states.get(entity_id) + assert state + assert state.state == "docked" + + # confirm state is 'returning' by setting the operational state to 0x40 + set_node_attribute(matter_node, 1, 97, 4, 0x40) + await trigger_subscription_callback(hass, matter_client) + state = hass.states.get(entity_id) + assert state + assert state.state == "returning" + + # confirm state is 'error' by setting the operational state to 0x01 + set_node_attribute(matter_node, 1, 97, 4, 0x01) + await trigger_subscription_callback(hass, matter_client) + state = hass.states.get(entity_id) + assert state + assert state.state == "error" + + # confirm state is 'error' by setting the operational state to 0x02 + set_node_attribute(matter_node, 1, 97, 4, 0x02) + await trigger_subscription_callback(hass, matter_client) + state = hass.states.get(entity_id) + assert state + assert state.state == "error" + + # confirm state is 'cleaning' by setting; + # - the operational state to 0x00 + # - the run mode is set to a mode which has cleaning tag + set_node_attribute(matter_node, 1, 97, 4, 0) + set_node_attribute(matter_node, 1, 84, 1, 1) + await trigger_subscription_callback(hass, matter_client) + state = hass.states.get(entity_id) + assert state + assert state.state == "cleaning" + + # confirm state is 'idle' by setting; + # - the operational state to 0x00 + # - the run mode is set to a mode which has idle tag + set_node_attribute(matter_node, 1, 97, 4, 0) + set_node_attribute(matter_node, 1, 84, 1, 0) + await trigger_subscription_callback(hass, matter_client) + state = hass.states.get(entity_id) + assert state + assert state.state == "idle" + + # confirm state is 'unknown' by setting; + # - the operational state to 0x00 + # - the run mode is set to a mode which has neither cleaning or idle tag + set_node_attribute(matter_node, 1, 97, 4, 0) + set_node_attribute(matter_node, 1, 84, 1, 2) + await trigger_subscription_callback(hass, matter_client) + state = hass.states.get(entity_id) + assert state + assert state.state == "unknown" diff --git a/tests/components/matter/test_valve.py b/tests/components/matter/test_valve.py index df9e186f4a92b3..9c4429dda65e8c 100644 --- a/tests/components/matter/test_valve.py +++ b/tests/components/matter/test_valve.py @@ -5,10 +5,27 @@ from chip.clusters import Objects as clusters from matter_server.client.models.node import MatterNode import pytest +from syrupy import SnapshotAssertion +from homeassistant.const import Platform from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er -from .common import set_node_attribute, trigger_subscription_callback +from .common import ( + set_node_attribute, + snapshot_matter_entities, + trigger_subscription_callback, +) + + +@pytest.mark.usefixtures("matter_devices") +async def test_valves( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test valves.""" + snapshot_matter_entities(hass, entity_registry, snapshot, Platform.VALVE) @pytest.mark.parametrize("node_fixture", ["valve"]) @@ -18,11 +35,11 @@ async def test_valve( matter_node: MatterNode, ) -> None: """Test valve entity is created for a Matter ValveConfigurationAndControl Cluster.""" - entity_id = "valve.valve_valve" + entity_id = "valve.valve" state = hass.states.get(entity_id) assert state assert state.state == "closed" - assert state.attributes["friendly_name"] == "Valve Valve" + assert state.attributes["friendly_name"] == "Valve" # test close_valve action await hass.services.async_call( diff --git a/tests/components/mealie/conftest.py b/tests/components/mealie/conftest.py index ba42d16e56ee57..8e724e4d8ea8ff 100644 --- a/tests/components/mealie/conftest.py +++ b/tests/components/mealie/conftest.py @@ -1,7 +1,7 @@ """Mealie tests configuration.""" from collections.abc import Generator -from unittest.mock import patch +from unittest.mock import AsyncMock, patch from aiomealie import ( About, @@ -20,7 +20,6 @@ from homeassistant.const import CONF_API_TOKEN, CONF_HOST from tests.common import MockConfigEntry, load_fixture -from tests.components.smhi.common import AsyncMock SHOPPING_LIST_ID = "list-id-1" SHOPPING_ITEM_NOTE = "Shopping Item 1" diff --git a/tests/components/mealie/test_config_flow.py b/tests/components/mealie/test_config_flow.py index aee2506b8651a4..15c629ec3da33b 100644 --- a/tests/components/mealie/test_config_flow.py +++ b/tests/components/mealie/test_config_flow.py @@ -244,7 +244,7 @@ async def test_reconfigure_flow( result = await mock_config_entry.start_reconfigure_flow(hass) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure_confirm" + assert result["step_id"] == "reconfigure" result = await hass.config_entries.flow.async_configure( result["flow_id"], @@ -273,7 +273,7 @@ async def test_reconfigure_flow_wrong_account( result = await mock_config_entry.start_reconfigure_flow(hass) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure_confirm" + assert result["step_id"] == "reconfigure" mock_mealie_client.get_user_info.return_value.user_id = "wrong_user_id" @@ -308,7 +308,7 @@ async def test_reconfigure_flow_exceptions( result = await mock_config_entry.start_reconfigure_flow(hass) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure_confirm" + assert result["step_id"] == "reconfigure" result = await hass.config_entries.flow.async_configure( result["flow_id"], @@ -316,7 +316,7 @@ async def test_reconfigure_flow_exceptions( ) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure_confirm" + assert result["step_id"] == "reconfigure" assert result["errors"] == {"base": error} mock_mealie_client.get_user_info.side_effect = None diff --git a/tests/components/media_player/test_browse_media.py b/tests/components/media_player/test_browse_media.py index 2b7e40923bf623..ea684ea2bc2a56 100644 --- a/tests/components/media_player/test_browse_media.py +++ b/tests/components/media_player/test_browse_media.py @@ -7,8 +7,8 @@ from homeassistant.components.media_player.browse_media import ( async_process_play_media_url, ) -from homeassistant.config import async_process_ha_core_config from homeassistant.core import HomeAssistant +from homeassistant.core_config import async_process_ha_core_config from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.network import NoURLAvailableError diff --git a/tests/components/media_source/test_local_source.py b/tests/components/media_source/test_local_source.py index de90f229a85be1..d3ae95736a5f54 100644 --- a/tests/components/media_source/test_local_source.py +++ b/tests/components/media_source/test_local_source.py @@ -11,8 +11,8 @@ from homeassistant.components import media_source, websocket_api from homeassistant.components.media_source import const -from homeassistant.config import async_process_ha_core_config from homeassistant.core import HomeAssistant +from homeassistant.core_config import async_process_ha_core_config from homeassistant.setup import async_setup_component from tests.common import MockUser diff --git a/tests/components/met/test_config_flow.py b/tests/components/met/test_config_flow.py index c7f0311edefb60..1a2485615d7023 100644 --- a/tests/components/met/test_config_flow.py +++ b/tests/components/met/test_config_flow.py @@ -8,9 +8,9 @@ from homeassistant import config_entries from homeassistant.components.met.const import DOMAIN, HOME_LOCATION_NAME -from homeassistant.config import async_process_ha_core_config from homeassistant.const import CONF_ELEVATION, CONF_LATITUDE, CONF_LONGITUDE, CONF_NAME from homeassistant.core import HomeAssistant +from homeassistant.core_config import async_process_ha_core_config from homeassistant.data_entry_flow import FlowResultType from . import init_integration diff --git a/tests/components/met/test_init.py b/tests/components/met/test_init.py index b329e2ff01cb78..54f6930513b952 100644 --- a/tests/components/met/test_init.py +++ b/tests/components/met/test_init.py @@ -7,9 +7,9 @@ DEFAULT_HOME_LONGITUDE, DOMAIN, ) -from homeassistant.config import async_process_ha_core_config from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant +from homeassistant.core_config import async_process_ha_core_config from homeassistant.helpers import device_registry as dr from . import init_integration diff --git a/tests/components/microsoft/test_tts.py b/tests/components/microsoft/test_tts.py index 0f11501843ed05..e10ec589113859 100644 --- a/tests/components/microsoft/test_tts.py +++ b/tests/components/microsoft/test_tts.py @@ -10,8 +10,8 @@ from homeassistant.components import tts from homeassistant.components.media_player import ATTR_MEDIA_CONTENT_ID from homeassistant.components.microsoft.tts import SUPPORTED_LANGUAGES -from homeassistant.config import async_process_ha_core_config from homeassistant.core import HomeAssistant, ServiceCall +from homeassistant.core_config import async_process_ha_core_config from homeassistant.exceptions import ServiceNotFound from homeassistant.setup import async_setup_component diff --git a/tests/components/mikrotik/test_config_flow.py b/tests/components/mikrotik/test_config_flow.py index d95a6488fc7b00..f65c7f0dfc5b04 100644 --- a/tests/components/mikrotik/test_config_flow.py +++ b/tests/components/mikrotik/test_config_flow.py @@ -14,6 +14,7 @@ ) from homeassistant.const import ( CONF_HOST, + CONF_NAME, CONF_PASSWORD, CONF_PORT, CONF_USERNAME, @@ -179,7 +180,10 @@ async def test_reauth_success(hass: HomeAssistant, api) -> None: assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" - assert result["description_placeholders"] == {CONF_USERNAME: "username"} + assert result["description_placeholders"] == { + CONF_NAME: "Mock Title", + CONF_USERNAME: "username", + } result2 = await hass.config_entries.flow.async_configure( result["flow_id"], diff --git a/tests/components/mobile_app/test_init.py b/tests/components/mobile_app/test_init.py index e1c7ed27cf99b8..a4edbea6ecf8ba 100644 --- a/tests/components/mobile_app/test_init.py +++ b/tests/components/mobile_app/test_init.py @@ -226,3 +226,37 @@ async def test_delete_cloud_hook( await hass.async_block_till_done() assert config_entry.state is ConfigEntryState.LOADED assert (CONF_CLOUDHOOK_URL in config_entry.data) == should_cloudhook_exist + + +async def test_remove_entry_on_user_remove( + hass: HomeAssistant, + hass_admin_user: MockUser, +) -> None: + """Test removing related config entry, when a user gets removed from HA.""" + + config_entry = MockConfigEntry( + data={ + **REGISTER_CLEARTEXT, + CONF_WEBHOOK_ID: "test-webhook-id", + ATTR_DEVICE_NAME: "Test", + ATTR_DEVICE_ID: "Test", + CONF_USER_ID: hass_admin_user.id, + CONF_CLOUDHOOK_URL: "https://hook-url-already-exists", + }, + domain=DOMAIN, + title="Test", + ) + config_entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + entries = hass.config_entries.async_entries(DOMAIN) + assert len(entries) == 1 + + await hass.auth.async_remove_user(hass_admin_user) + await hass.async_block_till_done() + + entries = hass.config_entries.async_entries(DOMAIN) + assert len(entries) == 0 diff --git a/tests/components/modbus/conftest.py b/tests/components/modbus/conftest.py index 5c612f9f8ad914..cdea046ceeab81 100644 --- a/tests/components/modbus/conftest.py +++ b/tests/components/modbus/conftest.py @@ -57,7 +57,7 @@ def check_config_loaded_fixture(): @pytest.fixture(name="register_words") def register_words_fixture(): """Set default for register_words.""" - return [0x00, 0x00] + return [0x00] @pytest.fixture(name="config_addon") diff --git a/tests/components/modbus/test_binary_sensor.py b/tests/components/modbus/test_binary_sensor.py index 6aae0e7feaeac6..24293377174c54 100644 --- a/tests/components/modbus/test_binary_sensor.py +++ b/tests/components/modbus/test_binary_sensor.py @@ -3,6 +3,7 @@ import pytest from homeassistant.components.binary_sensor import DOMAIN as SENSOR_DOMAIN +from homeassistant.components.homeassistant import SERVICE_UPDATE_ENTITY from homeassistant.components.modbus.const import ( CALL_TYPE_COIL, CALL_TYPE_DISCRETE, @@ -15,10 +16,12 @@ MODBUS_DOMAIN, ) from homeassistant.const import ( + ATTR_ENTITY_ID, CONF_ADDRESS, CONF_BINARY_SENSORS, CONF_DEVICE_CLASS, CONF_NAME, + CONF_PLATFORM, CONF_SCAN_INTERVAL, CONF_SLAVE, CONF_UNIQUE_ID, @@ -26,7 +29,7 @@ STATE_ON, STATE_UNAVAILABLE, ) -from homeassistant.core import HomeAssistant, State +from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant, State from homeassistant.helpers import entity_registry as er from homeassistant.setup import async_setup_component @@ -212,14 +215,20 @@ async def test_service_binary_sensor_update( """Run test for service homeassistant.update_entity.""" await hass.services.async_call( - "homeassistant", "update_entity", {"entity_id": ENTITY_ID}, blocking=True + HOMEASSISTANT_DOMAIN, + SERVICE_UPDATE_ENTITY, + {ATTR_ENTITY_ID: ENTITY_ID}, + blocking=True, ) await hass.async_block_till_done() assert hass.states.get(ENTITY_ID).state == STATE_OFF mock_modbus_ha.read_coils.return_value = ReadResult([0x01]) await hass.services.async_call( - "homeassistant", "update_entity", {"entity_id": ENTITY_ID}, blocking=True + HOMEASSISTANT_DOMAIN, + SERVICE_UPDATE_ENTITY, + {ATTR_ENTITY_ID: ENTITY_ID}, + blocking=True, ) await hass.async_block_till_done() assert hass.states.get(ENTITY_ID).state == STATE_ON @@ -428,7 +437,7 @@ async def test_no_discovery_info_binary_sensor( assert await async_setup_component( hass, SENSOR_DOMAIN, - {SENSOR_DOMAIN: {"platform": MODBUS_DOMAIN}}, + {SENSOR_DOMAIN: {CONF_PLATFORM: MODBUS_DOMAIN}}, ) await hass.async_block_till_done() assert SENSOR_DOMAIN in hass.config.components diff --git a/tests/components/modbus/test_climate.py b/tests/components/modbus/test_climate.py index 5578234ee6eaa9..d34846639b5bf5 100644 --- a/tests/components/modbus/test_climate.py +++ b/tests/components/modbus/test_climate.py @@ -20,6 +20,10 @@ FAN_OFF, FAN_ON, FAN_TOP, + SERVICE_SET_FAN_MODE, + SERVICE_SET_HVAC_MODE, + SERVICE_SET_SWING_MODE, + SERVICE_SET_TEMPERATURE, SWING_BOTH, SWING_HORIZONTAL, SWING_OFF, @@ -27,6 +31,7 @@ SWING_VERTICAL, HVACMode, ) +from homeassistant.components.homeassistant import SERVICE_UPDATE_ENTITY from homeassistant.components.modbus.const import ( CONF_CLIMATES, CONF_DATA_TYPE, @@ -66,15 +71,17 @@ DataType, ) from homeassistant.const import ( + ATTR_ENTITY_ID, ATTR_TEMPERATURE, CONF_ADDRESS, CONF_NAME, + CONF_PLATFORM, CONF_SCAN_INTERVAL, CONF_SLAVE, STATE_UNAVAILABLE, STATE_UNKNOWN, ) -from homeassistant.core import HomeAssistant, State +from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant, State from homeassistant.setup import async_setup_component from .conftest import TEST_ENTITY_NAME, ReadResult @@ -152,13 +159,13 @@ CONF_HVAC_MODE_REGISTER: { CONF_ADDRESS: 11, CONF_HVAC_MODE_VALUES: { - "state_off": 0, - "state_heat": 1, - "state_cool": 2, - "state_heat_cool": 3, - "state_dry": 4, - "state_fan_only": 5, - "state_auto": 6, + CONF_HVAC_MODE_OFF: 0, + CONF_HVAC_MODE_HEAT: 1, + CONF_HVAC_MODE_COOL: 2, + CONF_HVAC_MODE_HEAT_COOL: 3, + CONF_HVAC_MODE_DRY: 4, + CONF_HVAC_MODE_FAN_ONLY: 5, + CONF_HVAC_MODE_AUTO: 6, }, }, } @@ -176,13 +183,13 @@ CONF_ADDRESS: 11, CONF_WRITE_REGISTERS: True, CONF_HVAC_MODE_VALUES: { - "state_off": 0, - "state_heat": 1, - "state_cool": 2, - "state_heat_cool": 3, - "state_dry": 4, - "state_fan_only": 5, - "state_auto": 6, + CONF_HVAC_MODE_OFF: 0, + CONF_HVAC_MODE_HEAT: 1, + CONF_HVAC_MODE_COOL: 2, + CONF_HVAC_MODE_HEAT_COOL: 3, + CONF_HVAC_MODE_DRY: 4, + CONF_HVAC_MODE_FAN_ONLY: 5, + CONF_HVAC_MODE_AUTO: 6, }, }, } @@ -501,7 +508,10 @@ async def test_service_climate_update( """Run test for service homeassistant.update_entity.""" mock_modbus_ha.read_holding_registers.return_value = ReadResult(register_words) await hass.services.async_call( - "homeassistant", "update_entity", {"entity_id": ENTITY_ID}, blocking=True + HOMEASSISTANT_DOMAIN, + SERVICE_UPDATE_ENTITY, + {ATTR_ENTITY_ID: ENTITY_ID}, + blocking=True, ) await hass.async_block_till_done() assert hass.states.get(ENTITY_ID).state == result @@ -616,7 +626,10 @@ async def test_service_climate_fan_update( """Run test for service homeassistant.update_entity.""" mock_modbus_ha.read_holding_registers.return_value = ReadResult(register_words) await hass.services.async_call( - "homeassistant", "update_entity", {"entity_id": ENTITY_ID}, blocking=True + HOMEASSISTANT_DOMAIN, + SERVICE_UPDATE_ENTITY, + {ATTR_ENTITY_ID: ENTITY_ID}, + blocking=True, ) await hass.async_block_till_done() assert hass.states.get(ENTITY_ID).attributes[ATTR_FAN_MODE] == result @@ -756,7 +769,10 @@ async def test_service_climate_swing_update( """Run test for service homeassistant.update_entity.""" mock_modbus_ha.read_holding_registers.return_value = ReadResult(register_words) await hass.services.async_call( - "homeassistant", "update_entity", {"entity_id": ENTITY_ID}, blocking=True + HOMEASSISTANT_DOMAIN, + SERVICE_UPDATE_ENTITY, + {ATTR_ENTITY_ID: ENTITY_ID}, + blocking=True, ) await hass.async_block_till_done() assert hass.states.get(ENTITY_ID).attributes[ATTR_SWING_MODE] == result @@ -850,9 +866,9 @@ async def test_service_climate_set_temperature( mock_modbus_ha.read_holding_registers.return_value = ReadResult(result) await hass.services.async_call( CLIMATE_DOMAIN, - "set_temperature", + SERVICE_SET_TEMPERATURE, { - "entity_id": ENTITY_ID, + ATTR_ENTITY_ID: ENTITY_ID, ATTR_TEMPERATURE: temperature, }, blocking=True, @@ -961,9 +977,9 @@ async def test_service_set_hvac_mode( await hass.services.async_call( CLIMATE_DOMAIN, - "set_hvac_mode", + SERVICE_SET_HVAC_MODE, { - "entity_id": ENTITY_ID, + ATTR_ENTITY_ID: ENTITY_ID, ATTR_HVAC_MODE: hvac_mode, }, blocking=True, @@ -1024,9 +1040,9 @@ async def test_service_set_fan_mode( mock_modbus_ha.read_holding_registers.return_value = ReadResult(result) await hass.services.async_call( CLIMATE_DOMAIN, - "set_fan_mode", + SERVICE_SET_FAN_MODE, { - "entity_id": ENTITY_ID, + ATTR_ENTITY_ID: ENTITY_ID, ATTR_FAN_MODE: fan_mode, }, blocking=True, @@ -1087,9 +1103,9 @@ async def test_service_set_swing_mode( mock_modbus_ha.read_holding_registers.return_value = ReadResult(result) await hass.services.async_call( CLIMATE_DOMAIN, - "set_swing_mode", + SERVICE_SET_SWING_MODE, { - "entity_id": ENTITY_ID, + ATTR_ENTITY_ID: ENTITY_ID, ATTR_SWING_MODE: swing_mode, }, blocking=True, @@ -1174,7 +1190,7 @@ async def test_no_discovery_info_climate( assert await async_setup_component( hass, CLIMATE_DOMAIN, - {CLIMATE_DOMAIN: {"platform": MODBUS_DOMAIN}}, + {CLIMATE_DOMAIN: {CONF_PLATFORM: MODBUS_DOMAIN}}, ) await hass.async_block_till_done() assert CLIMATE_DOMAIN in hass.config.components diff --git a/tests/components/modbus/test_cover.py b/tests/components/modbus/test_cover.py index 0860b3136ba838..ae709f483e1114 100644 --- a/tests/components/modbus/test_cover.py +++ b/tests/components/modbus/test_cover.py @@ -3,7 +3,8 @@ from pymodbus.exceptions import ModbusException import pytest -from homeassistant.components.cover import DOMAIN as COVER_DOMAIN +from homeassistant.components.cover import DOMAIN as COVER_DOMAIN, CoverState +from homeassistant.components.homeassistant import SERVICE_UPDATE_ENTITY from homeassistant.components.modbus.const import ( CALL_TYPE_COIL, CALL_TYPE_REGISTER_HOLDING, @@ -18,18 +19,18 @@ MODBUS_DOMAIN, ) from homeassistant.const import ( + ATTR_ENTITY_ID, CONF_ADDRESS, CONF_COVERS, CONF_NAME, + CONF_PLATFORM, CONF_SCAN_INTERVAL, CONF_SLAVE, - STATE_CLOSED, - STATE_CLOSING, - STATE_OPEN, - STATE_OPENING, + SERVICE_CLOSE_COVER, + SERVICE_OPEN_COVER, STATE_UNAVAILABLE, ) -from homeassistant.core import HomeAssistant, State +from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant, State from homeassistant.setup import async_setup_component from .conftest import TEST_ENTITY_NAME, ReadResult @@ -99,23 +100,23 @@ async def test_config_cover(hass: HomeAssistant, mock_modbus) -> None: [ ( [0x00], - STATE_CLOSED, + CoverState.CLOSED, ), ( [0x80], - STATE_CLOSED, + CoverState.CLOSED, ), ( [0xFE], - STATE_CLOSED, + CoverState.CLOSED, ), ( [0xFF], - STATE_OPEN, + CoverState.OPEN, ), ( [0x01], - STATE_OPEN, + CoverState.OPEN, ), ], ) @@ -143,23 +144,23 @@ async def test_coil_cover(hass: HomeAssistant, expected, mock_do_cycle) -> None: [ ( [0x00], - STATE_CLOSED, + CoverState.CLOSED, ), ( [0x80], - STATE_OPEN, + CoverState.OPEN, ), ( [0xFE], - STATE_OPEN, + CoverState.OPEN, ), ( [0xFF], - STATE_OPEN, + CoverState.OPEN, ), ( [0x01], - STATE_OPEN, + CoverState.OPEN, ), ], ) @@ -185,23 +186,29 @@ async def test_register_cover(hass: HomeAssistant, expected, mock_do_cycle) -> N async def test_service_cover_update(hass: HomeAssistant, mock_modbus_ha) -> None: """Run test for service homeassistant.update_entity.""" await hass.services.async_call( - "homeassistant", "update_entity", {"entity_id": ENTITY_ID}, blocking=True + HOMEASSISTANT_DOMAIN, + "update_entity", + {ATTR_ENTITY_ID: ENTITY_ID}, + blocking=True, ) - assert hass.states.get(ENTITY_ID).state == STATE_CLOSED + assert hass.states.get(ENTITY_ID).state == CoverState.CLOSED mock_modbus_ha.read_holding_registers.return_value = ReadResult([0x01]) await hass.services.async_call( - "homeassistant", "update_entity", {"entity_id": ENTITY_ID}, blocking=True + HOMEASSISTANT_DOMAIN, + SERVICE_UPDATE_ENTITY, + {ATTR_ENTITY_ID: ENTITY_ID}, + blocking=True, ) - assert hass.states.get(ENTITY_ID).state == STATE_OPEN + assert hass.states.get(ENTITY_ID).state == CoverState.OPEN @pytest.mark.parametrize( "mock_test_state", [ - (State(ENTITY_ID, STATE_CLOSED),), - (State(ENTITY_ID, STATE_CLOSING),), - (State(ENTITY_ID, STATE_OPENING),), - (State(ENTITY_ID, STATE_OPEN),), + (State(ENTITY_ID, CoverState.CLOSED),), + (State(ENTITY_ID, CoverState.CLOSING),), + (State(ENTITY_ID, CoverState.OPENING),), + (State(ENTITY_ID, CoverState.OPEN),), ], indirect=True, ) @@ -260,27 +267,27 @@ async def test_service_cover_move(hass: HomeAssistant, mock_modbus_ha) -> None: mock_modbus_ha.read_holding_registers.return_value = ReadResult([0x01]) await hass.services.async_call( - "cover", "open_cover", {"entity_id": ENTITY_ID}, blocking=True + COVER_DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: ENTITY_ID}, blocking=True ) - assert hass.states.get(ENTITY_ID).state == STATE_OPEN + assert hass.states.get(ENTITY_ID).state == CoverState.OPEN mock_modbus_ha.read_holding_registers.return_value = ReadResult([0x00]) await hass.services.async_call( - "cover", "close_cover", {"entity_id": ENTITY_ID}, blocking=True + COVER_DOMAIN, SERVICE_CLOSE_COVER, {ATTR_ENTITY_ID: ENTITY_ID}, blocking=True ) - assert hass.states.get(ENTITY_ID).state == STATE_CLOSED + assert hass.states.get(ENTITY_ID).state == CoverState.CLOSED await mock_modbus_ha.reset() mock_modbus_ha.read_holding_registers.side_effect = ModbusException("fail write_") await hass.services.async_call( - "cover", "close_cover", {"entity_id": ENTITY_ID}, blocking=True + COVER_DOMAIN, SERVICE_CLOSE_COVER, {ATTR_ENTITY_ID: ENTITY_ID}, blocking=True ) assert mock_modbus_ha.read_holding_registers.called assert hass.states.get(ENTITY_ID).state == STATE_UNAVAILABLE mock_modbus_ha.read_coils.side_effect = ModbusException("fail write_") await hass.services.async_call( - "cover", "close_cover", {"entity_id": ENTITY_ID2}, blocking=True + COVER_DOMAIN, SERVICE_CLOSE_COVER, {ATTR_ENTITY_ID: ENTITY_ID2}, blocking=True ) assert hass.states.get(ENTITY_ID2).state == STATE_UNAVAILABLE @@ -293,7 +300,7 @@ async def test_no_discovery_info_cover( assert await async_setup_component( hass, COVER_DOMAIN, - {COVER_DOMAIN: {"platform": MODBUS_DOMAIN}}, + {COVER_DOMAIN: {CONF_PLATFORM: MODBUS_DOMAIN}}, ) await hass.async_block_till_done() assert COVER_DOMAIN in hass.config.components diff --git a/tests/components/modbus/test_fan.py b/tests/components/modbus/test_fan.py index d52b9dc309a82b..2afc6314048be2 100644 --- a/tests/components/modbus/test_fan.py +++ b/tests/components/modbus/test_fan.py @@ -4,6 +4,7 @@ import pytest from homeassistant.components.fan import DOMAIN as FAN_DOMAIN +from homeassistant.components.homeassistant import SERVICE_UPDATE_ENTITY from homeassistant.components.modbus.const import ( CALL_TYPE_COIL, CALL_TYPE_DISCRETE, @@ -19,17 +20,21 @@ MODBUS_DOMAIN, ) from homeassistant.const import ( + ATTR_ENTITY_ID, CONF_ADDRESS, CONF_COMMAND_OFF, CONF_COMMAND_ON, CONF_NAME, + CONF_PLATFORM, CONF_SCAN_INTERVAL, CONF_SLAVE, + SERVICE_TURN_OFF, + SERVICE_TURN_ON, STATE_OFF, STATE_ON, STATE_UNAVAILABLE, ) -from homeassistant.core import HomeAssistant, State +from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant, State from homeassistant.setup import async_setup_component from .conftest import TEST_ENTITY_NAME, ReadResult @@ -269,12 +274,12 @@ async def test_fan_service_turn( assert hass.states.get(ENTITY_ID).state == STATE_OFF await hass.services.async_call( - "fan", "turn_on", service_data={"entity_id": ENTITY_ID} + FAN_DOMAIN, SERVICE_TURN_ON, service_data={ATTR_ENTITY_ID: ENTITY_ID} ) await hass.async_block_till_done() assert hass.states.get(ENTITY_ID).state == STATE_ON await hass.services.async_call( - "fan", "turn_off", service_data={"entity_id": ENTITY_ID} + FAN_DOMAIN, SERVICE_TURN_OFF, service_data={ATTR_ENTITY_ID: ENTITY_ID} ) await hass.async_block_till_done() assert hass.states.get(ENTITY_ID).state == STATE_OFF @@ -282,26 +287,26 @@ async def test_fan_service_turn( mock_modbus.read_holding_registers.return_value = ReadResult([0x01]) assert hass.states.get(ENTITY_ID2).state == STATE_OFF await hass.services.async_call( - "fan", "turn_on", service_data={"entity_id": ENTITY_ID2} + FAN_DOMAIN, SERVICE_TURN_ON, service_data={ATTR_ENTITY_ID: ENTITY_ID2} ) await hass.async_block_till_done() assert hass.states.get(ENTITY_ID2).state == STATE_ON mock_modbus.read_holding_registers.return_value = ReadResult([0x00]) await hass.services.async_call( - "fan", "turn_off", service_data={"entity_id": ENTITY_ID2} + FAN_DOMAIN, SERVICE_TURN_OFF, service_data={ATTR_ENTITY_ID: ENTITY_ID2} ) await hass.async_block_till_done() assert hass.states.get(ENTITY_ID2).state == STATE_OFF mock_modbus.write_register.side_effect = ModbusException("fail write_") await hass.services.async_call( - "fan", "turn_on", service_data={"entity_id": ENTITY_ID2} + FAN_DOMAIN, SERVICE_TURN_ON, service_data={ATTR_ENTITY_ID: ENTITY_ID2} ) await hass.async_block_till_done() assert hass.states.get(ENTITY_ID2).state == STATE_UNAVAILABLE mock_modbus.write_coil.side_effect = ModbusException("fail write_") await hass.services.async_call( - "fan", "turn_off", service_data={"entity_id": ENTITY_ID} + FAN_DOMAIN, SERVICE_TURN_OFF, service_data={ATTR_ENTITY_ID: ENTITY_ID} ) await hass.async_block_till_done() assert hass.states.get(ENTITY_ID).state == STATE_UNAVAILABLE @@ -325,12 +330,18 @@ async def test_fan_service_turn( async def test_service_fan_update(hass: HomeAssistant, mock_modbus_ha) -> None: """Run test for service homeassistant.update_entity.""" await hass.services.async_call( - "homeassistant", "update_entity", {"entity_id": ENTITY_ID}, blocking=True + HOMEASSISTANT_DOMAIN, + SERVICE_UPDATE_ENTITY, + {ATTR_ENTITY_ID: ENTITY_ID}, + blocking=True, ) assert hass.states.get(ENTITY_ID).state == STATE_OFF mock_modbus_ha.read_coils.return_value = ReadResult([0x01]) await hass.services.async_call( - "homeassistant", "update_entity", {"entity_id": ENTITY_ID}, blocking=True + HOMEASSISTANT_DOMAIN, + SERVICE_UPDATE_ENTITY, + {ATTR_ENTITY_ID: ENTITY_ID}, + blocking=True, ) assert hass.states.get(ENTITY_ID).state == STATE_ON @@ -343,7 +354,7 @@ async def test_no_discovery_info_fan( assert await async_setup_component( hass, FAN_DOMAIN, - {FAN_DOMAIN: {"platform": MODBUS_DOMAIN}}, + {FAN_DOMAIN: {CONF_PLATFORM: MODBUS_DOMAIN}}, ) await hass.async_block_till_done() assert FAN_DOMAIN in hass.config.components diff --git a/tests/components/modbus/test_init.py b/tests/components/modbus/test_init.py index 70230e7d326195..3b8a76f560680a 100644 --- a/tests/components/modbus/test_init.py +++ b/tests/components/modbus/test_init.py @@ -52,7 +52,6 @@ CONF_INPUT_TYPE, CONF_MSG_WAIT, CONF_PARITY, - CONF_RETRIES, CONF_SLAVE_COUNT, CONF_STOPBITS, CONF_SWAP, @@ -68,7 +67,6 @@ MODBUS_DOMAIN as DOMAIN, RTUOVERTCP, SERIAL, - SERVICE_RESTART, SERVICE_STOP, SERVICE_WRITE_COIL, SERVICE_WRITE_REGISTER, @@ -573,18 +571,6 @@ async def test_no_duplicate_names(hass: HomeAssistant, do_config) -> None: } ], }, - { - CONF_TYPE: TCP, - CONF_HOST: TEST_MODBUS_HOST, - CONF_PORT: TEST_PORT_TCP, - CONF_RETRIES: 3, - CONF_SENSORS: [ - { - CONF_NAME: "dummy", - CONF_ADDRESS: 9999, - } - ], - }, { CONF_TYPE: TCP, CONF_HOST: TEST_MODBUS_HOST, @@ -1149,61 +1135,6 @@ async def test_shutdown( assert caplog.text == "" -@pytest.mark.parametrize( - "do_config", - [ - { - CONF_SENSORS: [ - { - CONF_NAME: TEST_ENTITY_NAME, - CONF_ADDRESS: 51, - CONF_SLAVE: 0, - } - ] - }, - ], -) -async def test_stop_restart( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture, mock_modbus -) -> None: - """Run test for service stop.""" - - caplog.set_level(logging.WARNING) - entity_id = f"{SENSOR_DOMAIN}.{TEST_ENTITY_NAME}".replace(" ", "_") - assert hass.states.get(entity_id).state in (STATE_UNKNOWN, STATE_UNAVAILABLE) - hass.states.async_set(entity_id, 17) - await hass.async_block_till_done() - assert hass.states.get(entity_id).state == "17" - - mock_modbus.reset_mock() - caplog.clear() - data = { - ATTR_HUB: TEST_MODBUS_NAME, - } - await hass.services.async_call(DOMAIN, SERVICE_STOP, data, blocking=True) - await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_UNAVAILABLE - assert mock_modbus.close.called - assert f"modbus {TEST_MODBUS_NAME} communication closed" in caplog.text - - mock_modbus.reset_mock() - caplog.clear() - await hass.services.async_call(DOMAIN, SERVICE_RESTART, data, blocking=True) - await hass.async_block_till_done() - assert not mock_modbus.close.called - assert mock_modbus.connect.called - assert f"modbus {TEST_MODBUS_NAME} communication open" in caplog.text - - mock_modbus.reset_mock() - caplog.clear() - await hass.services.async_call(DOMAIN, SERVICE_RESTART, data, blocking=True) - await hass.async_block_till_done() - assert mock_modbus.close.called - assert mock_modbus.connect.called - assert f"modbus {TEST_MODBUS_NAME} communication closed" in caplog.text - assert f"modbus {TEST_MODBUS_NAME} communication open" in caplog.text - - @pytest.mark.parametrize("do_config", [{}]) async def test_write_no_client(hass: HomeAssistant, mock_modbus) -> None: """Run test for service stop and write without client.""" diff --git a/tests/components/modbus/test_light.py b/tests/components/modbus/test_light.py index e74da085180e12..745249ff866f3f 100644 --- a/tests/components/modbus/test_light.py +++ b/tests/components/modbus/test_light.py @@ -3,6 +3,7 @@ from pymodbus.exceptions import ModbusException import pytest +from homeassistant.components.homeassistant import SERVICE_UPDATE_ENTITY from homeassistant.components.light import DOMAIN as LIGHT_DOMAIN from homeassistant.components.modbus.const import ( CALL_TYPE_COIL, @@ -18,18 +19,22 @@ MODBUS_DOMAIN, ) from homeassistant.const import ( + ATTR_ENTITY_ID, CONF_ADDRESS, CONF_COMMAND_OFF, CONF_COMMAND_ON, CONF_LIGHTS, CONF_NAME, + CONF_PLATFORM, CONF_SCAN_INTERVAL, CONF_SLAVE, + SERVICE_TURN_OFF, + SERVICE_TURN_ON, STATE_OFF, STATE_ON, STATE_UNAVAILABLE, ) -from homeassistant.core import HomeAssistant, State +from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant, State from homeassistant.setup import async_setup_component from .conftest import TEST_ENTITY_NAME, ReadResult @@ -269,12 +274,12 @@ async def test_light_service_turn( assert hass.states.get(ENTITY_ID).state == STATE_OFF await hass.services.async_call( - "light", "turn_on", service_data={"entity_id": ENTITY_ID} + LIGHT_DOMAIN, SERVICE_TURN_ON, service_data={ATTR_ENTITY_ID: ENTITY_ID} ) await hass.async_block_till_done() assert hass.states.get(ENTITY_ID).state == STATE_ON await hass.services.async_call( - "light", "turn_off", service_data={"entity_id": ENTITY_ID} + LIGHT_DOMAIN, SERVICE_TURN_OFF, service_data={ATTR_ENTITY_ID: ENTITY_ID} ) await hass.async_block_till_done() assert hass.states.get(ENTITY_ID).state == STATE_OFF @@ -282,20 +287,20 @@ async def test_light_service_turn( mock_modbus.read_holding_registers.return_value = ReadResult([0x01]) assert hass.states.get(ENTITY_ID2).state == STATE_OFF await hass.services.async_call( - "light", "turn_on", service_data={"entity_id": ENTITY_ID2} + LIGHT_DOMAIN, SERVICE_TURN_ON, service_data={ATTR_ENTITY_ID: ENTITY_ID2} ) await hass.async_block_till_done() assert hass.states.get(ENTITY_ID2).state == STATE_ON mock_modbus.read_holding_registers.return_value = ReadResult([0x00]) await hass.services.async_call( - "light", "turn_off", service_data={"entity_id": ENTITY_ID2} + LIGHT_DOMAIN, SERVICE_TURN_OFF, service_data={ATTR_ENTITY_ID: ENTITY_ID2} ) await hass.async_block_till_done() assert hass.states.get(ENTITY_ID2).state == STATE_OFF mock_modbus.write_register.side_effect = ModbusException("fail write_") await hass.services.async_call( - "light", "turn_on", service_data={"entity_id": ENTITY_ID2} + LIGHT_DOMAIN, SERVICE_TURN_ON, service_data={ATTR_ENTITY_ID: ENTITY_ID2} ) await hass.async_block_till_done() assert hass.states.get(ENTITY_ID2).state == STATE_UNAVAILABLE @@ -319,12 +324,18 @@ async def test_light_service_turn( async def test_service_light_update(hass: HomeAssistant, mock_modbus_ha) -> None: """Run test for service homeassistant.update_entity.""" await hass.services.async_call( - "homeassistant", "update_entity", {"entity_id": ENTITY_ID}, blocking=True + HOMEASSISTANT_DOMAIN, + SERVICE_UPDATE_ENTITY, + {ATTR_ENTITY_ID: ENTITY_ID}, + blocking=True, ) assert hass.states.get(ENTITY_ID).state == STATE_OFF mock_modbus_ha.read_coils.return_value = ReadResult([0x01]) await hass.services.async_call( - "homeassistant", "update_entity", {"entity_id": ENTITY_ID}, blocking=True + HOMEASSISTANT_DOMAIN, + SERVICE_UPDATE_ENTITY, + {ATTR_ENTITY_ID: ENTITY_ID}, + blocking=True, ) assert hass.states.get(ENTITY_ID).state == STATE_ON @@ -337,7 +348,7 @@ async def test_no_discovery_info_light( assert await async_setup_component( hass, LIGHT_DOMAIN, - {LIGHT_DOMAIN: {"platform": MODBUS_DOMAIN}}, + {LIGHT_DOMAIN: {CONF_PLATFORM: MODBUS_DOMAIN}}, ) await hass.async_block_till_done() assert LIGHT_DOMAIN in hass.config.components diff --git a/tests/components/modbus/test_sensor.py b/tests/components/modbus/test_sensor.py index 87015fa634ced4..fc63a300c5c623 100644 --- a/tests/components/modbus/test_sensor.py +++ b/tests/components/modbus/test_sensor.py @@ -4,13 +4,13 @@ import pytest +from homeassistant.components.homeassistant import SERVICE_UPDATE_ENTITY from homeassistant.components.modbus.const import ( CALL_TYPE_REGISTER_HOLDING, CALL_TYPE_REGISTER_INPUT, CONF_DATA_TYPE, CONF_DEVICE_ADDRESS, CONF_INPUT_TYPE, - CONF_LAZY_ERROR, CONF_MAX_VALUE, CONF_MIN_VALUE, CONF_NAN_VALUE, @@ -32,11 +32,13 @@ SensorStateClass, ) from homeassistant.const import ( + ATTR_ENTITY_ID, CONF_ADDRESS, CONF_COUNT, CONF_DEVICE_CLASS, CONF_NAME, CONF_OFFSET, + CONF_PLATFORM, CONF_SCAN_INTERVAL, CONF_SENSORS, CONF_SLAVE, @@ -45,7 +47,7 @@ STATE_UNAVAILABLE, STATE_UNKNOWN, ) -from homeassistant.core import HomeAssistant, State +from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant, State from homeassistant.helpers import entity_registry as er from homeassistant.setup import async_setup_component @@ -166,17 +168,6 @@ } ] }, - { - CONF_SENSORS: [ - { - CONF_NAME: TEST_ENTITY_NAME, - CONF_ADDRESS: 51, - CONF_DATA_TYPE: DataType.INT32, - CONF_VIRTUAL_COUNT: 5, - CONF_LAZY_ERROR: 3, - } - ] - }, { CONF_SENSORS: [ { @@ -1395,12 +1386,18 @@ async def test_service_sensor_update(hass: HomeAssistant, mock_modbus_ha) -> Non """Run test for service homeassistant.update_entity.""" mock_modbus_ha.read_input_registers.return_value = ReadResult([27]) await hass.services.async_call( - "homeassistant", "update_entity", {"entity_id": ENTITY_ID}, blocking=True + HOMEASSISTANT_DOMAIN, + SERVICE_UPDATE_ENTITY, + {ATTR_ENTITY_ID: ENTITY_ID}, + blocking=True, ) assert hass.states.get(ENTITY_ID).state == "27" mock_modbus_ha.read_input_registers.return_value = ReadResult([32]) await hass.services.async_call( - "homeassistant", "update_entity", {"entity_id": ENTITY_ID}, blocking=True + HOMEASSISTANT_DOMAIN, + SERVICE_UPDATE_ENTITY, + {ATTR_ENTITY_ID: ENTITY_ID}, + blocking=True, ) assert hass.states.get(ENTITY_ID).state == "32" @@ -1413,7 +1410,7 @@ async def test_no_discovery_info_sensor( assert await async_setup_component( hass, SENSOR_DOMAIN, - {SENSOR_DOMAIN: {"platform": MODBUS_DOMAIN}}, + {SENSOR_DOMAIN: {CONF_PLATFORM: MODBUS_DOMAIN}}, ) await hass.async_block_till_done() assert SENSOR_DOMAIN in hass.config.components diff --git a/tests/components/modbus/test_switch.py b/tests/components/modbus/test_switch.py index bdb95c667c765f..4e0ad0841eaf5b 100644 --- a/tests/components/modbus/test_switch.py +++ b/tests/components/modbus/test_switch.py @@ -6,6 +6,7 @@ from pymodbus.exceptions import ModbusException import pytest +from homeassistant.components.homeassistant import SERVICE_UPDATE_ENTITY from homeassistant.components.modbus.const import ( CALL_TYPE_COIL, CALL_TYPE_DISCRETE, @@ -21,20 +22,24 @@ ) from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN from homeassistant.const import ( + ATTR_ENTITY_ID, CONF_ADDRESS, CONF_COMMAND_OFF, CONF_COMMAND_ON, CONF_DELAY, CONF_DEVICE_CLASS, CONF_NAME, + CONF_PLATFORM, CONF_SCAN_INTERVAL, CONF_SLAVE, CONF_SWITCHES, + SERVICE_TURN_OFF, + SERVICE_TURN_ON, STATE_OFF, STATE_ON, STATE_UNAVAILABLE, ) -from homeassistant.core import HomeAssistant, State +from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant, State from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util @@ -44,6 +49,7 @@ ENTITY_ID = f"{SWITCH_DOMAIN}.{TEST_ENTITY_NAME}".replace(" ", "_") ENTITY_ID2 = f"{ENTITY_ID}_2" +ENTITY_ID3 = f"{ENTITY_ID}_3" @pytest.mark.parametrize( @@ -74,7 +80,7 @@ CONF_SLAVE: 1, CONF_COMMAND_OFF: 0x00, CONF_COMMAND_ON: 0x01, - CONF_DEVICE_CLASS: "switch", + CONF_DEVICE_CLASS: SWITCH_DOMAIN, CONF_VERIFY: { CONF_INPUT_TYPE: CALL_TYPE_REGISTER_HOLDING, CONF_ADDRESS: 1235, @@ -92,7 +98,7 @@ CONF_DEVICE_ADDRESS: 1, CONF_COMMAND_OFF: 0x00, CONF_COMMAND_ON: 0x01, - CONF_DEVICE_CLASS: "switch", + CONF_DEVICE_CLASS: SWITCH_DOMAIN, CONF_VERIFY: { CONF_INPUT_TYPE: CALL_TYPE_REGISTER_HOLDING, CONF_ADDRESS: 1235, @@ -110,7 +116,7 @@ CONF_SLAVE: 1, CONF_COMMAND_OFF: 0x00, CONF_COMMAND_ON: 0x01, - CONF_DEVICE_CLASS: "switch", + CONF_DEVICE_CLASS: SWITCH_DOMAIN, CONF_VERIFY: { CONF_INPUT_TYPE: CALL_TYPE_REGISTER_INPUT, CONF_ADDRESS: 1235, @@ -129,7 +135,7 @@ CONF_SLAVE: 1, CONF_COMMAND_OFF: 0x00, CONF_COMMAND_ON: 0x01, - CONF_DEVICE_CLASS: "switch", + CONF_DEVICE_CLASS: SWITCH_DOMAIN, CONF_VERIFY: { CONF_INPUT_TYPE: CALL_TYPE_DISCRETE, CONF_ADDRESS: 1235, @@ -147,12 +153,48 @@ CONF_SLAVE: 1, CONF_COMMAND_OFF: 0x00, CONF_COMMAND_ON: 0x01, - CONF_DEVICE_CLASS: "switch", + CONF_DEVICE_CLASS: SWITCH_DOMAIN, CONF_SCAN_INTERVAL: 0, CONF_VERIFY: None, } ] }, + { + CONF_SWITCHES: [ + { + CONF_NAME: TEST_ENTITY_NAME, + CONF_ADDRESS: 1234, + CONF_DEVICE_ADDRESS: 10, + CONF_COMMAND_OFF: 0x00, + CONF_COMMAND_ON: 0x01, + CONF_DEVICE_CLASS: SWITCH_DOMAIN, + CONF_VERIFY: { + CONF_INPUT_TYPE: CALL_TYPE_REGISTER_HOLDING, + CONF_ADDRESS: 1235, + CONF_STATE_OFF: 0, + CONF_STATE_ON: [1, 2, 3], + }, + } + ] + }, + { + CONF_SWITCHES: [ + { + CONF_NAME: TEST_ENTITY_NAME, + CONF_ADDRESS: 1236, + CONF_DEVICE_ADDRESS: 10, + CONF_COMMAND_OFF: 0x00, + CONF_COMMAND_ON: 0x01, + CONF_DEVICE_CLASS: SWITCH_DOMAIN, + CONF_VERIFY: { + CONF_INPUT_TYPE: CALL_TYPE_REGISTER_HOLDING, + CONF_ADDRESS: 1235, + CONF_STATE_OFF: [0, 5, 6], + CONF_STATE_ON: 1, + }, + } + ] + }, ], ) async def test_config_switch(hass: HomeAssistant, mock_modbus) -> None: @@ -218,6 +260,18 @@ async def test_config_switch(hass: HomeAssistant, mock_modbus) -> None: None, STATE_OFF, ), + ( + [0x03], + False, + {CONF_VERIFY: {CONF_STATE_ON: [1, 3]}}, + STATE_ON, + ), + ( + [0x04], + False, + {CONF_VERIFY: {CONF_STATE_OFF: [0, 4]}}, + STATE_OFF, + ), ], ) async def test_all_switch(hass: HomeAssistant, mock_do_cycle, expected) -> None: @@ -269,6 +323,13 @@ async def test_restore_state_switch( CONF_SCAN_INTERVAL: 0, CONF_VERIFY: {}, }, + { + CONF_NAME: f"{TEST_ENTITY_NAME} 3", + CONF_ADDRESS: 18, + CONF_WRITE_TYPE: CALL_TYPE_REGISTER_HOLDING, + CONF_SCAN_INTERVAL: 0, + CONF_VERIFY: {CONF_STATE_ON: [1, 3]}, + }, ], }, ], @@ -283,12 +344,12 @@ async def test_switch_service_turn( assert hass.states.get(ENTITY_ID).state == STATE_OFF await hass.services.async_call( - "switch", "turn_on", service_data={"entity_id": ENTITY_ID} + SWITCH_DOMAIN, SERVICE_TURN_ON, service_data={ATTR_ENTITY_ID: ENTITY_ID} ) await hass.async_block_till_done() assert hass.states.get(ENTITY_ID).state == STATE_ON await hass.services.async_call( - "switch", "turn_off", service_data={"entity_id": ENTITY_ID} + SWITCH_DOMAIN, SERVICE_TURN_OFF, service_data={ATTR_ENTITY_ID: ENTITY_ID} ) await hass.async_block_till_done() assert hass.states.get(ENTITY_ID).state == STATE_OFF @@ -296,29 +357,48 @@ async def test_switch_service_turn( mock_modbus.read_holding_registers.return_value = ReadResult([0x01]) assert hass.states.get(ENTITY_ID2).state == STATE_OFF await hass.services.async_call( - "switch", "turn_on", service_data={"entity_id": ENTITY_ID2} + SWITCH_DOMAIN, SERVICE_TURN_ON, service_data={ATTR_ENTITY_ID: ENTITY_ID2} ) await hass.async_block_till_done() assert hass.states.get(ENTITY_ID2).state == STATE_ON mock_modbus.read_holding_registers.return_value = ReadResult([0x00]) await hass.services.async_call( - "switch", "turn_off", service_data={"entity_id": ENTITY_ID2} + SWITCH_DOMAIN, SERVICE_TURN_OFF, service_data={ATTR_ENTITY_ID: ENTITY_ID2} ) await hass.async_block_till_done() assert hass.states.get(ENTITY_ID2).state == STATE_OFF + mock_modbus.read_holding_registers.return_value = ReadResult([0x03]) + assert hass.states.get(ENTITY_ID3).state == STATE_OFF + await hass.services.async_call( + SWITCH_DOMAIN, SERVICE_TURN_ON, service_data={ATTR_ENTITY_ID: ENTITY_ID3} + ) + await hass.async_block_till_done() + assert hass.states.get(ENTITY_ID3).state == STATE_ON + mock_modbus.read_holding_registers.return_value = ReadResult([0x00]) + await hass.services.async_call( + SWITCH_DOMAIN, SERVICE_TURN_OFF, service_data={ATTR_ENTITY_ID: ENTITY_ID3} + ) + await hass.async_block_till_done() + assert hass.states.get(ENTITY_ID3).state == STATE_OFF mock_modbus.write_register.side_effect = ModbusException("fail write_") await hass.services.async_call( - "switch", "turn_on", service_data={"entity_id": ENTITY_ID2} + SWITCH_DOMAIN, SERVICE_TURN_ON, service_data={ATTR_ENTITY_ID: ENTITY_ID2} ) await hass.async_block_till_done() assert hass.states.get(ENTITY_ID2).state == STATE_UNAVAILABLE mock_modbus.write_coil.side_effect = ModbusException("fail write_") await hass.services.async_call( - "switch", "turn_off", service_data={"entity_id": ENTITY_ID} + SWITCH_DOMAIN, SERVICE_TURN_OFF, service_data={ATTR_ENTITY_ID: ENTITY_ID} ) await hass.async_block_till_done() assert hass.states.get(ENTITY_ID).state == STATE_UNAVAILABLE + mock_modbus.write_register.side_effect = ModbusException("fail write_") + await hass.services.async_call( + SWITCH_DOMAIN, SERVICE_TURN_ON, service_data={ATTR_ENTITY_ID: ENTITY_ID3} + ) + await hass.async_block_till_done() + assert hass.states.get(ENTITY_ID3).state == STATE_UNAVAILABLE @pytest.mark.parametrize( @@ -334,17 +414,43 @@ async def test_switch_service_turn( } ] }, + { + CONF_SWITCHES: [ + { + CONF_NAME: TEST_ENTITY_NAME, + CONF_ADDRESS: 1236, + CONF_WRITE_TYPE: CALL_TYPE_COIL, + CONF_VERIFY: {CONF_STATE_ON: [1, 3]}, + } + ] + }, + { + CONF_SWITCHES: [ + { + CONF_NAME: TEST_ENTITY_NAME, + CONF_ADDRESS: 1235, + CONF_WRITE_TYPE: CALL_TYPE_COIL, + CONF_VERIFY: {CONF_STATE_OFF: [0, 5]}, + } + ] + }, ], ) async def test_service_switch_update(hass: HomeAssistant, mock_modbus_ha) -> None: """Run test for service homeassistant.update_entity.""" await hass.services.async_call( - "homeassistant", "update_entity", {"entity_id": ENTITY_ID}, blocking=True + HOMEASSISTANT_DOMAIN, + SERVICE_UPDATE_ENTITY, + {ATTR_ENTITY_ID: ENTITY_ID}, + blocking=True, ) assert hass.states.get(ENTITY_ID).state == STATE_OFF mock_modbus_ha.read_coils.return_value = ReadResult([0x01]) await hass.services.async_call( - "homeassistant", "update_entity", {"entity_id": ENTITY_ID}, blocking=True + HOMEASSISTANT_DOMAIN, + SERVICE_UPDATE_ENTITY, + {ATTR_ENTITY_ID: ENTITY_ID}, + blocking=True, ) assert hass.states.get(ENTITY_ID).state == STATE_ON @@ -372,7 +478,7 @@ async def test_delay_switch(hass: HomeAssistant, mock_modbus) -> None: mock_modbus.read_holding_registers.return_value = ReadResult([0x01]) now = dt_util.utcnow() await hass.services.async_call( - "switch", "turn_on", service_data={"entity_id": ENTITY_ID} + SWITCH_DOMAIN, SERVICE_TURN_ON, service_data={ATTR_ENTITY_ID: ENTITY_ID} ) await hass.async_block_till_done() assert hass.states.get(ENTITY_ID).state == STATE_OFF @@ -391,7 +497,7 @@ async def test_no_discovery_info_switch( assert await async_setup_component( hass, SWITCH_DOMAIN, - {SWITCH_DOMAIN: {"platform": MODBUS_DOMAIN}}, + {SWITCH_DOMAIN: {CONF_PLATFORM: MODBUS_DOMAIN}}, ) await hass.async_block_till_done() assert SWITCH_DOMAIN in hass.config.components diff --git a/tests/components/motionblinds_ble/test_cover.py b/tests/components/motionblinds_ble/test_cover.py index 2f6b33b3017cf1..009bd1d0fa3a63 100644 --- a/tests/components/motionblinds_ble/test_cover.py +++ b/tests/components/motionblinds_ble/test_cover.py @@ -18,10 +18,7 @@ SERVICE_SET_COVER_TILT_POSITION, SERVICE_STOP_COVER, SERVICE_STOP_COVER_TILT, - STATE_CLOSED, - STATE_CLOSING, - STATE_OPEN, - STATE_OPENING, + CoverState, ) from homeassistant.const import ATTR_ENTITY_ID from homeassistant.core import HomeAssistant @@ -74,8 +71,8 @@ async def test_cover_service( [ (None, "unknown"), (MotionRunningType.STILL, "unknown"), - (MotionRunningType.OPENING, STATE_OPENING), - (MotionRunningType.CLOSING, STATE_CLOSING), + (MotionRunningType.OPENING, CoverState.OPENING), + (MotionRunningType.CLOSING, CoverState.CLOSING), ], ) async def test_cover_update_running( @@ -101,9 +98,9 @@ async def test_cover_update_running( ("position", "tilt", "state"), [ (None, None, "unknown"), - (0, 0, STATE_OPEN), - (50, 90, STATE_OPEN), - (100, 180, STATE_CLOSED), + (0, 0, CoverState.OPEN), + (50, 90, CoverState.OPEN), + (100, 180, CoverState.CLOSED), ], ) async def test_cover_update_position( diff --git a/tests/components/motioneye/__init__.py b/tests/components/motioneye/__init__.py index 3a80e6dc63d355..842d862a222f5a 100644 --- a/tests/components/motioneye/__init__.py +++ b/tests/components/motioneye/__init__.py @@ -9,10 +9,10 @@ from homeassistant.components.motioneye.const import DOMAIN from homeassistant.components.motioneye.entity import get_motioneye_entity_unique_id -from homeassistant.config import async_process_ha_core_config from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_URL from homeassistant.core import HomeAssistant +from homeassistant.core_config import async_process_ha_core_config from homeassistant.helpers import entity_registry as er from tests.common import MockConfigEntry diff --git a/tests/components/motioneye/test_config_flow.py b/tests/components/motioneye/test_config_flow.py index d2ec91b08e38f4..8d942e7a2a1f89 100644 --- a/tests/components/motioneye/test_config_flow.py +++ b/tests/components/motioneye/test_config_flow.py @@ -9,7 +9,6 @@ ) from homeassistant import config_entries -from homeassistant.components.hassio import HassioServiceInfo from homeassistant.components.motioneye.const import ( CONF_ADMIN_PASSWORD, CONF_ADMIN_USERNAME, @@ -23,6 +22,7 @@ from homeassistant.const import CONF_URL, CONF_WEBHOOK_ID from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType +from homeassistant.helpers.service_info.hassio import HassioServiceInfo from . import TEST_URL, create_mock_motioneye_client, create_mock_motioneye_config_entry diff --git a/tests/components/mqtt/conftest.py b/tests/components/mqtt/conftest.py index 7395767aeae3ef..22f0416a2c61a0 100644 --- a/tests/components/mqtt/conftest.py +++ b/tests/components/mqtt/conftest.py @@ -4,7 +4,7 @@ from collections.abc import AsyncGenerator, Generator from random import getrandbits from typing import Any -from unittest.mock import patch +from unittest.mock import AsyncMock, patch import pytest @@ -87,7 +87,8 @@ async def setup_with_birth_msg_client_mock( patch("homeassistant.components.mqtt.client.SUBSCRIBE_COOLDOWN", 0.0), ): entry = MockConfigEntry( - domain=mqtt.DOMAIN, data={mqtt.CONF_BROKER: "test-broker"} + domain=mqtt.DOMAIN, + data=mqtt_config_entry_data or {mqtt.CONF_BROKER: "test-broker"}, ) entry.add_to_hass(hass) hass.config.components.add(mqtt.DOMAIN) @@ -121,3 +122,10 @@ def record_calls(msg: ReceiveMessage) -> None: recorded_calls.append(msg) return record_calls + + +@pytest.fixture +def tag_mock() -> Generator[AsyncMock]: + """Fixture to mock tag.""" + with patch("homeassistant.components.tag.async_scan_tag") as mock_tag: + yield mock_tag diff --git a/tests/components/mqtt/test_alarm_control_panel.py b/tests/components/mqtt/test_alarm_control_panel.py index 07ebb671e37282..b46829650f6bce 100644 --- a/tests/components/mqtt/test_alarm_control_panel.py +++ b/tests/components/mqtt/test_alarm_control_panel.py @@ -9,7 +9,10 @@ import pytest from homeassistant.components import alarm_control_panel, mqtt -from homeassistant.components.alarm_control_panel import AlarmControlPanelEntityFeature +from homeassistant.components.alarm_control_panel import ( + AlarmControlPanelEntityFeature, + AlarmControlPanelState, +) from homeassistant.components.mqtt.alarm_control_panel import ( MQTT_ALARM_ATTRIBUTES_BLOCKED, ) @@ -25,16 +28,6 @@ SERVICE_ALARM_DISARM, SERVICE_ALARM_TRIGGER, SERVICE_RELOAD, - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_CUSTOM_BYPASS, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_ARMED_VACATION, - STATE_ALARM_ARMING, - STATE_ALARM_DISARMED, - STATE_ALARM_DISARMING, - STATE_ALARM_PENDING, - STATE_ALARM_TRIGGERED, STATE_UNKNOWN, ) from homeassistant.core import HomeAssistant @@ -57,6 +50,7 @@ help_test_entity_device_info_update, help_test_entity_device_info_with_connection, help_test_entity_device_info_with_identifier, + help_test_entity_icon_and_entity_picture, help_test_entity_id_update_discovery_update, help_test_entity_id_update_subscriptions, help_test_entity_name, @@ -213,23 +207,23 @@ async def test_update_state_via_state_topic( assert hass.states.get(entity_id).state == STATE_UNKNOWN for state in ( - STATE_ALARM_DISARMED, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_ARMED_VACATION, - STATE_ALARM_ARMED_CUSTOM_BYPASS, - STATE_ALARM_PENDING, - STATE_ALARM_ARMING, - STATE_ALARM_DISARMING, - STATE_ALARM_TRIGGERED, + AlarmControlPanelState.DISARMED, + AlarmControlPanelState.ARMED_HOME, + AlarmControlPanelState.ARMED_AWAY, + AlarmControlPanelState.ARMED_NIGHT, + AlarmControlPanelState.ARMED_VACATION, + AlarmControlPanelState.ARMED_CUSTOM_BYPASS, + AlarmControlPanelState.PENDING, + AlarmControlPanelState.ARMING, + AlarmControlPanelState.DISARMING, + AlarmControlPanelState.TRIGGERED, ): async_fire_mqtt_message(hass, "alarm/state", state) assert hass.states.get(entity_id).state == state - # Ignore empty payload (last state is STATE_ALARM_TRIGGERED) + # Ignore empty payload (last state is AlarmControlPanelState.TRIGGERED) async_fire_mqtt_message(hass, "alarm/state", "") - assert hass.states.get(entity_id).state == STATE_ALARM_TRIGGERED + assert hass.states.get(entity_id).state == AlarmControlPanelState.TRIGGERED # Reset state on `None` payload async_fire_mqtt_message(hass, "alarm/state", "None") @@ -769,7 +763,7 @@ async def test_update_state_via_state_topic_template( async_fire_mqtt_message(hass, "test-topic", "100") state = hass.states.get("alarm_control_panel.test") - assert state.state == STATE_ALARM_ARMED_AWAY + assert state.state == AlarmControlPanelState.ARMED_AWAY @pytest.mark.parametrize( @@ -1287,6 +1281,18 @@ async def test_entity_name( ) +async def test_entity_icon_and_entity_picture( + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, +) -> None: + """Test the entity icon or picture setup.""" + domain = alarm_control_panel.DOMAIN + config = DEFAULT_CONFIG + await help_test_entity_icon_and_entity_picture( + hass, mqtt_mock_entry, domain, config + ) + + @pytest.mark.parametrize( "hass_config", [ @@ -1306,7 +1312,11 @@ async def test_entity_name( @pytest.mark.parametrize( ("topic", "payload1", "payload2"), [ - ("test-topic", STATE_ALARM_DISARMED, STATE_ALARM_ARMED_HOME), + ( + "test-topic", + AlarmControlPanelState.DISARMED, + AlarmControlPanelState.ARMED_HOME, + ), ("availability-topic", "online", "offline"), ("json-attributes-topic", '{"attr1": "val1"}', '{"attr1": "val2"}'), ], diff --git a/tests/components/mqtt/test_binary_sensor.py b/tests/components/mqtt/test_binary_sensor.py index e2c168bd46e5b8..d27163c34234f8 100644 --- a/tests/components/mqtt/test_binary_sensor.py +++ b/tests/components/mqtt/test_binary_sensor.py @@ -40,6 +40,7 @@ help_test_entity_device_info_update, help_test_entity_device_info_with_connection, help_test_entity_device_info_with_identifier, + help_test_entity_icon_and_entity_picture, help_test_entity_id_update_discovery_update, help_test_entity_id_update_subscriptions, help_test_entity_name, @@ -1133,7 +1134,7 @@ async def test_skip_restoring_state_with_over_due_expire_trigger( freezer.move_to("2022-02-02 12:02:00+01:00") domain = binary_sensor.DOMAIN - config3 = copy.deepcopy(DEFAULT_CONFIG[mqtt.DOMAIN][domain]) + config3: ConfigType = copy.deepcopy(DEFAULT_CONFIG[mqtt.DOMAIN][domain]) config3["name"] = "test3" config3["expire_after"] = 10 config3["state_topic"] = "test-topic3" @@ -1193,6 +1194,18 @@ async def test_entity_name( ) +async def test_entity_icon_and_entity_picture( + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, +) -> None: + """Test the entity icon or picture setup.""" + domain = binary_sensor.DOMAIN + config = DEFAULT_CONFIG + await help_test_entity_icon_and_entity_picture( + hass, mqtt_mock_entry, domain, config + ) + + @pytest.mark.parametrize( "hass_config", [ diff --git a/tests/components/mqtt/test_button.py b/tests/components/mqtt/test_button.py index d85ead6ecee300..f147b33c88bd09 100644 --- a/tests/components/mqtt/test_button.py +++ b/tests/components/mqtt/test_button.py @@ -25,6 +25,7 @@ help_test_entity_device_info_update, help_test_entity_device_info_with_connection, help_test_entity_device_info_with_identifier, + help_test_entity_icon_and_entity_picture, help_test_entity_id_update_discovery_update, help_test_entity_name, help_test_publishing_with_custom_encoding, @@ -534,3 +535,15 @@ async def test_entity_name( await help_test_entity_name( hass, mqtt_mock_entry, domain, config, expected_friendly_name, device_class ) + + +async def test_entity_icon_and_entity_picture( + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, +) -> None: + """Test the entity icon or picture setup.""" + domain = button.DOMAIN + config = DEFAULT_CONFIG + await help_test_entity_icon_and_entity_picture( + hass, mqtt_mock_entry, domain, config + ) diff --git a/tests/components/mqtt/test_client.py b/tests/components/mqtt/test_client.py index 31c062b1abda06..164c164cdfc97b 100644 --- a/tests/components/mqtt/test_client.py +++ b/tests/components/mqtt/test_client.py @@ -1,9 +1,10 @@ """The tests for the MQTT client.""" import asyncio -from datetime import datetime, timedelta +from datetime import timedelta import socket import ssl +import time from typing import Any from unittest.mock import MagicMock, Mock, call, patch @@ -296,10 +297,13 @@ async def test_subscribe_mqtt_config_entry_disabled( mqtt_mock.connected = True mqtt_config_entry = hass.config_entries.async_entries(mqtt.DOMAIN)[0] - assert mqtt_config_entry.state is ConfigEntryState.LOADED + + mqtt_config_entry_state = mqtt_config_entry.state + assert mqtt_config_entry_state is ConfigEntryState.LOADED assert await hass.config_entries.async_unload(mqtt_config_entry.entry_id) - assert mqtt_config_entry.state is ConfigEntryState.NOT_LOADED + mqtt_config_entry_state = mqtt_config_entry.state + assert mqtt_config_entry_state is ConfigEntryState.NOT_LOADED await hass.config_entries.async_set_disabled_by( mqtt_config_entry.entry_id, ConfigEntryDisabler.USER @@ -1279,7 +1283,7 @@ def _callback(args) -> None: callbacks.append(args) msg = ReceiveMessage( - "some-topic", b"test-payload", 1, False, "some-topic", datetime.now() + "some-topic", b"test-payload", 1, False, "some-topic", time.monotonic() ) mock_debouncer.clear() await mqtt.async_subscribe(hass, "some-topic", _callback) @@ -1712,6 +1716,97 @@ async def test_mqtt_subscribes_topics_on_connect( assert ("still/pending", 1) in subscribe_calls +@pytest.mark.parametrize("mqtt_config_entry_data", [ENTRY_DEFAULT_BIRTH_MESSAGE]) +async def test_mqtt_subscribes_wildcard_topics_in_correct_order( + hass: HomeAssistant, + mock_debouncer: asyncio.Event, + setup_with_birth_msg_client_mock: MqttMockPahoClient, + record_calls: MessageCallbackType, +) -> None: + """Test subscription to wildcard topics on connect in the order of subscription.""" + mqtt_client_mock = setup_with_birth_msg_client_mock + + mock_debouncer.clear() + await mqtt.async_subscribe(hass, "integration/test#", record_calls) + await mqtt.async_subscribe(hass, "integration/kitchen_sink#", record_calls) + await mock_debouncer.wait() + + def _assert_subscription_order(): + discovery_subscribes = [ + f"homeassistant/{platform}/+/config" for platform in SUPPORTED_COMPONENTS + ] + discovery_subscribes.extend( + [ + f"homeassistant/{platform}/+/+/config" + for platform in SUPPORTED_COMPONENTS + ] + ) + discovery_subscribes.extend( + ["homeassistant/device/+/config", "homeassistant/device/+/+/config"] + ) + discovery_subscribes.extend(["integration/test#", "integration/kitchen_sink#"]) + + expected_discovery_subscribes = discovery_subscribes.copy() + + # Assert we see the expected subscribes and in the correct order + actual_subscribes = [ + discovery_subscribes.pop(0) + for call in help_all_subscribe_calls(mqtt_client_mock) + if discovery_subscribes and discovery_subscribes[0] == call[0] + ] + + # Assert we have processed all items and that they are in the correct order + assert len(discovery_subscribes) == 0 + assert actual_subscribes == expected_discovery_subscribes + + # Assert the initial wildcard topic subscription order + _assert_subscription_order() + + mqtt_client_mock.on_disconnect(Mock(), None, 0) + + mqtt_client_mock.reset_mock() + + mock_debouncer.clear() + mqtt_client_mock.on_connect(Mock(), None, 0, 0) + await mock_debouncer.wait() + + # Assert the wildcard topic subscription order after a reconnect + _assert_subscription_order() + + +@pytest.mark.parametrize( + "mqtt_config_entry_data", + [ENTRY_DEFAULT_BIRTH_MESSAGE | {mqtt.CONF_DISCOVERY: False}], +) +async def test_mqtt_discovery_not_subscribes_when_disabled( + hass: HomeAssistant, + mock_debouncer: asyncio.Event, + setup_with_birth_msg_client_mock: MqttMockPahoClient, +) -> None: + """Test discovery subscriptions not performend when discovery is disabled.""" + mqtt_client_mock = setup_with_birth_msg_client_mock + + await mock_debouncer.wait() + + subscribe_calls = help_all_subscribe_calls(mqtt_client_mock) + for component in SUPPORTED_COMPONENTS: + assert (f"homeassistant/{component}/+/config", 0) not in subscribe_calls + assert (f"homeassistant/{component}/+/+/config", 0) not in subscribe_calls + + mqtt_client_mock.on_disconnect(Mock(), None, 0) + + mqtt_client_mock.reset_mock() + + mock_debouncer.clear() + mqtt_client_mock.on_connect(Mock(), None, 0, 0) + await mock_debouncer.wait() + + subscribe_calls = help_all_subscribe_calls(mqtt_client_mock) + for component in SUPPORTED_COMPONENTS: + assert (f"homeassistant/{component}/+/config", 0) not in subscribe_calls + assert (f"homeassistant/{component}/+/+/config", 0) not in subscribe_calls + + @pytest.mark.parametrize( "mqtt_config_entry_data", [ENTRY_DEFAULT_BIRTH_MESSAGE], diff --git a/tests/components/mqtt/test_climate.py b/tests/components/mqtt/test_climate.py index 13bd6b5feda555..5edd73e3f5ae03 100644 --- a/tests/components/mqtt/test_climate.py +++ b/tests/components/mqtt/test_climate.py @@ -53,6 +53,7 @@ help_test_entity_device_info_update, help_test_entity_device_info_with_connection, help_test_entity_device_info_with_identifier, + help_test_entity_icon_and_entity_picture, help_test_entity_id_update_discovery_update, help_test_entity_id_update_subscriptions, help_test_publishing_with_custom_encoding, @@ -202,7 +203,7 @@ async def test_set_operation_bad_attr_and_state( state = hass.states.get(ENTITY_CLIMATE) assert state.state == "off" with pytest.raises(vol.Invalid) as excinfo: - await common.async_set_hvac_mode(hass, None, ENTITY_CLIMATE) + await common.async_set_hvac_mode(hass, None, ENTITY_CLIMATE) # type:ignore[arg-type] assert ( "expected HVACMode or one of 'off', 'heat', 'cool', 'heat_cool', 'auto', 'dry'," " 'fan_only' for dictionary value @ data['hvac_mode']" in str(excinfo.value) @@ -220,10 +221,9 @@ async def test_set_operation( state = hass.states.get(ENTITY_CLIMATE) assert state.state == "off" - await common.async_set_hvac_mode(hass, "cool", ENTITY_CLIMATE) + await common.async_set_hvac_mode(hass, HVACMode.COOL, ENTITY_CLIMATE) state = hass.states.get(ENTITY_CLIMATE) assert state.state == "cool" - assert state.state == "cool" mqtt_mock.async_publish.assert_called_once_with("mode-topic", "cool", 0, False) @@ -245,7 +245,7 @@ async def test_set_operation_pessimistic( state = hass.states.get(ENTITY_CLIMATE) assert state.state == STATE_UNKNOWN - await common.async_set_hvac_mode(hass, "cool", ENTITY_CLIMATE) + await common.async_set_hvac_mode(hass, HVACMode.COOL, ENTITY_CLIMATE) state = hass.states.get(ENTITY_CLIMATE) assert state.state == STATE_UNKNOWN @@ -287,7 +287,7 @@ async def test_set_operation_optimistic( state = hass.states.get(ENTITY_CLIMATE) assert state.state == "off" - await common.async_set_hvac_mode(hass, "cool", ENTITY_CLIMATE) + await common.async_set_hvac_mode(hass, HVACMode.COOL, ENTITY_CLIMATE) state = hass.states.get(ENTITY_CLIMATE) assert state.state == "cool" @@ -316,13 +316,13 @@ async def test_set_operation_with_power_command( state = hass.states.get(ENTITY_CLIMATE) assert state.state == "off" - await common.async_set_hvac_mode(hass, "cool", ENTITY_CLIMATE) + await common.async_set_hvac_mode(hass, HVACMode.COOL, ENTITY_CLIMATE) state = hass.states.get(ENTITY_CLIMATE) assert state.state == "cool" mqtt_mock.async_publish.assert_has_calls([call("mode-topic", "cool", 0, False)]) mqtt_mock.async_publish.reset_mock() - await common.async_set_hvac_mode(hass, "off", ENTITY_CLIMATE) + await common.async_set_hvac_mode(hass, HVACMode.OFF, ENTITY_CLIMATE) state = hass.states.get(ENTITY_CLIMATE) assert state.state == "off" mqtt_mock.async_publish.assert_has_calls([call("mode-topic", "off", 0, False)]) @@ -358,12 +358,12 @@ async def test_turn_on_and_off_optimistic_with_power_command( state = hass.states.get(ENTITY_CLIMATE) assert state.state == "off" - await common.async_set_hvac_mode(hass, "cool", ENTITY_CLIMATE) + await common.async_set_hvac_mode(hass, HVACMode.COOL, ENTITY_CLIMATE) state = hass.states.get(ENTITY_CLIMATE) assert state.state == "cool" mqtt_mock.async_publish.assert_has_calls([call("mode-topic", "cool", 0, False)]) mqtt_mock.async_publish.reset_mock() - await common.async_set_hvac_mode(hass, "off", ENTITY_CLIMATE) + await common.async_set_hvac_mode(hass, HVACMode.OFF, ENTITY_CLIMATE) state = hass.states.get(ENTITY_CLIMATE) assert state.state == "off" @@ -374,7 +374,7 @@ async def test_turn_on_and_off_optimistic_with_power_command( mqtt_mock.async_publish.assert_has_calls([call("power-command", "ON", 0, False)]) mqtt_mock.async_publish.reset_mock() - await common.async_set_hvac_mode(hass, "cool", ENTITY_CLIMATE) + await common.async_set_hvac_mode(hass, HVACMode.COOL, ENTITY_CLIMATE) state = hass.states.get(ENTITY_CLIMATE) assert state.state == "cool" await common.async_turn_off(hass, ENTITY_CLIMATE) @@ -433,7 +433,7 @@ async def test_turn_on_and_off_without_power_command( else: mqtt_mock.async_publish.assert_has_calls([]) - await common.async_set_hvac_mode(hass, "cool", ENTITY_CLIMATE) + await common.async_set_hvac_mode(hass, HVACMode.COOL, ENTITY_CLIMATE) state = hass.states.get(ENTITY_CLIMATE) assert state.state == "cool" mqtt_mock.async_publish.reset_mock() @@ -460,7 +460,7 @@ async def test_set_fan_mode_bad_attr( state = hass.states.get(ENTITY_CLIMATE) assert state.attributes.get("fan_mode") == "low" with pytest.raises(vol.Invalid) as excinfo: - await common.async_set_fan_mode(hass, None, ENTITY_CLIMATE) + await common.async_set_fan_mode(hass, None, ENTITY_CLIMATE) # type:ignore[arg-type] assert "string value is None for dictionary value @ data['fan_mode']" in str( excinfo.value ) @@ -555,7 +555,7 @@ async def test_set_swing_mode_bad_attr( state = hass.states.get(ENTITY_CLIMATE) assert state.attributes.get("swing_mode") == "off" with pytest.raises(vol.Invalid) as excinfo: - await common.async_set_swing_mode(hass, None, ENTITY_CLIMATE) + await common.async_set_swing_mode(hass, None, ENTITY_CLIMATE) # type:ignore[arg-type] assert "string value is None for dictionary value @ data['swing_mode']" in str( excinfo.value ) @@ -649,7 +649,7 @@ async def test_set_target_temperature( state = hass.states.get(ENTITY_CLIMATE) assert state.attributes.get("temperature") == 21 - await common.async_set_hvac_mode(hass, "heat", ENTITY_CLIMATE) + await common.async_set_hvac_mode(hass, HVACMode.HEAT, ENTITY_CLIMATE) state = hass.states.get(ENTITY_CLIMATE) assert state.state == "heat" mqtt_mock.async_publish.assert_called_once_with("mode-topic", "heat", 0, False) @@ -712,7 +712,7 @@ async def test_set_target_temperature_pessimistic( state = hass.states.get(ENTITY_CLIMATE) assert state.attributes.get("temperature") is None - await common.async_set_hvac_mode(hass, "heat", ENTITY_CLIMATE) + await common.async_set_hvac_mode(hass, HVACMode.HEAT, ENTITY_CLIMATE) await common.async_set_temperature(hass, temperature=35, entity_id=ENTITY_CLIMATE) state = hass.states.get(ENTITY_CLIMATE) assert state.attributes.get("temperature") is None @@ -744,7 +744,7 @@ async def test_set_target_temperature_optimistic( state = hass.states.get(ENTITY_CLIMATE) assert state.attributes.get("temperature") == 21 - await common.async_set_hvac_mode(hass, "heat", ENTITY_CLIMATE) + await common.async_set_hvac_mode(hass, HVACMode.HEAT, ENTITY_CLIMATE) await common.async_set_temperature(hass, temperature=17, entity_id=ENTITY_CLIMATE) state = hass.states.get(ENTITY_CLIMATE) assert state.attributes.get("temperature") == 17 @@ -1547,14 +1547,14 @@ async def test_set_and_templates( assert state.attributes.get("preset_mode") == PRESET_ECO # Mode - await common.async_set_hvac_mode(hass, "cool", ENTITY_CLIMATE) + await common.async_set_hvac_mode(hass, HVACMode.COOL, ENTITY_CLIMATE) mqtt_mock.async_publish.assert_any_call("mode-topic", "mode: cool", 0, False) assert mqtt_mock.async_publish.call_count == 1 mqtt_mock.async_publish.reset_mock() state = hass.states.get(ENTITY_CLIMATE) assert state.state == "cool" - await common.async_set_hvac_mode(hass, "off", ENTITY_CLIMATE) + await common.async_set_hvac_mode(hass, HVACMode.OFF, ENTITY_CLIMATE) mqtt_mock.async_publish.assert_any_call("mode-topic", "mode: off", 0, False) assert mqtt_mock.async_publish.call_count == 1 mqtt_mock.async_publish.reset_mock() @@ -2449,3 +2449,15 @@ async def test_value_template_fails( "TypeError: unsupported operand type(s) for *: 'NoneType' and 'int' rendering template" in caplog.text ) + + +async def test_entity_icon_and_entity_picture( + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, +) -> None: + """Test the entity name setup.""" + domain = climate.DOMAIN + config = DEFAULT_CONFIG + await help_test_entity_icon_and_entity_picture( + hass, mqtt_mock_entry, domain, config + ) diff --git a/tests/components/mqtt/test_common.py b/tests/components/mqtt/test_common.py index b89baf06254dac..95a26daf562272 100644 --- a/tests/components/mqtt/test_common.py +++ b/tests/components/mqtt/test_common.py @@ -69,10 +69,14 @@ _SENTINEL = object() DISCOVERY_COUNT = len(MQTT) +DEVICE_DISCOVERY_COUNT = 2 type _MqttMessageType = list[tuple[str, str]] type _AttributesType = list[tuple[str, Any]] -type _StateDataType = list[tuple[_MqttMessageType, str | None, _AttributesType | None]] +type _StateDataType = ( + list[tuple[_MqttMessageType, str, _AttributesType | None]] + | list[tuple[_MqttMessageType, str, None]] +) def help_all_subscribe_calls(mqtt_client_mock: MqttMockPahoClient) -> list[Any]: @@ -106,7 +110,7 @@ def help_custom_config( ) base.update(instance) entity_instances.append(base) - config[mqtt.DOMAIN][mqtt_entity_domain]: list[ConfigType] = entity_instances + config[mqtt.DOMAIN][mqtt_entity_domain] = entity_instances return config @@ -1186,7 +1190,10 @@ async def help_test_entity_id_update_subscriptions( assert state is not None assert ( mqtt_mock.async_subscribe.call_count - == len(topics) + 2 * len(SUPPORTED_COMPONENTS) + DISCOVERY_COUNT + == len(topics) + + 2 * len(SUPPORTED_COMPONENTS) + + DISCOVERY_COUNT + + DEVICE_DISCOVERY_COUNT ) for topic in topics: mqtt_mock.async_subscribe.assert_any_call( @@ -1360,11 +1367,11 @@ async def help_test_entity_debug_info_message( mqtt_mock_entry: MqttMockHAClientGenerator, domain: str, config: ConfigType, - service: str, + service: str | None, command_topic: str | None = None, command_payload: str | None = None, state_topic: str | object | None = _SENTINEL, - state_payload: str | None = None, + state_payload: bytes | str | None = None, service_parameters: dict[str, Any] | None = None, ) -> None: """Test debug_info. @@ -1665,6 +1672,61 @@ async def help_test_entity_category( assert not ent_registry.async_get_entity_id(domain, mqtt.DOMAIN, unique_id) +async def help_test_entity_icon_and_entity_picture( + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, + domain: str, + config: ConfigType, + default_entity_picture: str | None = None, +) -> None: + """Test entity picture and icon.""" + await mqtt_mock_entry() + # Add device settings to config + config = copy.deepcopy(config[mqtt.DOMAIN][domain]) + config["device"] = copy.deepcopy(DEFAULT_CONFIG_DEVICE_INFO_ID) + + ent_registry = er.async_get(hass) + + # Discover an entity without entity icon or picture + unique_id = "veryunique1" + config["unique_id"] = unique_id + data = json.dumps(config) + async_fire_mqtt_message(hass, f"homeassistant/{domain}/{unique_id}/config", data) + await hass.async_block_till_done() + entity_id = ent_registry.async_get_entity_id(domain, mqtt.DOMAIN, unique_id) + state = hass.states.get(entity_id) + assert entity_id is not None and state + assert state.attributes.get("icon") is None + assert state.attributes.get("entity_picture") == default_entity_picture + + # Discover an entity with an entity picture set + unique_id = "veryunique2" + config["entity_picture"] = "https://example.com/mypicture.png" + config["unique_id"] = unique_id + data = json.dumps(config) + async_fire_mqtt_message(hass, f"homeassistant/{domain}/{unique_id}/config", data) + await hass.async_block_till_done() + entity_id = ent_registry.async_get_entity_id(domain, mqtt.DOMAIN, unique_id) + state = hass.states.get(entity_id) + assert entity_id is not None and state + assert state.attributes.get("icon") is None + assert state.attributes.get("entity_picture") == "https://example.com/mypicture.png" + config.pop("entity_picture") + + # Discover an entity with an entity icon set + unique_id = "veryunique3" + config["icon"] = "mdi:emoji-happy-outline" + config["unique_id"] = unique_id + data = json.dumps(config) + async_fire_mqtt_message(hass, f"homeassistant/{domain}/{unique_id}/config", data) + await hass.async_block_till_done() + entity_id = ent_registry.async_get_entity_id(domain, mqtt.DOMAIN, unique_id) + state = hass.states.get(entity_id) + assert entity_id is not None and state + assert state.attributes.get("icon") == "mdi:emoji-happy-outline" + assert state.attributes.get("entity_picture") == default_entity_picture + + async def help_test_publishing_with_custom_encoding( hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator, diff --git a/tests/components/mqtt/test_config_flow.py b/tests/components/mqtt/test_config_flow.py index 6812ab39247a3b..e99063b088b998 100644 --- a/tests/components/mqtt/test_config_flow.py +++ b/tests/components/mqtt/test_config_flow.py @@ -9,16 +9,13 @@ from uuid import uuid4 from aiohasupervisor import SupervisorError +from aiohasupervisor.models import Discovery import pytest import voluptuous as vol from homeassistant import config_entries from homeassistant.components import mqtt -from homeassistant.components.hassio import ( - AddonError, - HassioAPIError, - HassioServiceInfo, -) +from homeassistant.components.hassio import AddonError from homeassistant.components.mqtt.config_flow import PWD_NOT_CHANGED from homeassistant.const import ( CONF_CLIENT_ID, @@ -29,6 +26,7 @@ ) from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType +from homeassistant.helpers.service_info.hassio import HassioServiceInfo from tests.common import MockConfigEntry from tests.typing import MqttMockHAClientGenerator, MqttMockPahoClient @@ -253,7 +251,7 @@ async def test_user_connection_works( assert len(mock_finish_setup.mock_calls) == 1 -@pytest.mark.usefixtures("mqtt_client_mock", "supervisor") +@pytest.mark.usefixtures("mqtt_client_mock", "supervisor", "supervisor_client") async def test_user_connection_works_with_supervisor( hass: HomeAssistant, mock_try_connection: MagicMock, @@ -420,7 +418,7 @@ async def test_hassio_already_configured(hass: HomeAssistant) -> None: "mqtt", context={"source": config_entries.SOURCE_HASSIO} ) assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "already_configured" + assert result["reason"] == "single_instance_allowed" async def test_hassio_ignored(hass: HomeAssistant) -> None: @@ -446,7 +444,7 @@ async def test_hassio_ignored(hass: HomeAssistant) -> None: ) assert result assert result.get("type") is FlowResultType.ABORT - assert result.get("reason") == "already_configured" + assert result.get("reason") == "single_instance_allowed" async def test_hassio_confirm( @@ -455,8 +453,6 @@ async def test_hassio_confirm( mock_finish_setup: MagicMock, ) -> None: """Test we can finish a config flow.""" - mock_try_connection.return_value = True - result = await hass.config_entries.flow.async_init( "mqtt", data=HassioServiceInfo( @@ -534,7 +530,19 @@ async def test_hassio_cannot_connect( @pytest.mark.usefixtures( "mqtt_client_mock", "supervisor", "addon_info", "addon_running" ) -@pytest.mark.parametrize("discovery_info", [{"config": ADD_ON_DISCOVERY_INFO.copy()}]) +@pytest.mark.parametrize( + "discovery_info", + [ + [ + Discovery( + addon="core_mosquitto", + service="mqtt", + uuid=uuid4(), + config=ADD_ON_DISCOVERY_INFO.copy(), + ) + ] + ], +) async def test_addon_flow_with_supervisor_addon_running( hass: HomeAssistant, mock_try_connection_success: MagicMock, @@ -576,7 +584,19 @@ async def test_addon_flow_with_supervisor_addon_running( @pytest.mark.usefixtures( "mqtt_client_mock", "supervisor", "addon_info", "addon_installed", "start_addon" ) -@pytest.mark.parametrize("discovery_info", [{"config": ADD_ON_DISCOVERY_INFO.copy()}]) +@pytest.mark.parametrize( + "discovery_info", + [ + [ + Discovery( + addon="core_mosquitto", + service="mqtt", + uuid=uuid4(), + config=ADD_ON_DISCOVERY_INFO.copy(), + ) + ] + ], +) async def test_addon_flow_with_supervisor_addon_installed( hass: HomeAssistant, mock_try_connection_success: MagicMock, @@ -631,7 +651,19 @@ async def test_addon_flow_with_supervisor_addon_installed( @pytest.mark.usefixtures( "mqtt_client_mock", "supervisor", "addon_info", "addon_running" ) -@pytest.mark.parametrize("discovery_info", [{"config": ADD_ON_DISCOVERY_INFO.copy()}]) +@pytest.mark.parametrize( + "discovery_info", + [ + [ + Discovery( + addon="core_mosquitto", + service="mqtt", + uuid=uuid4(), + config=ADD_ON_DISCOVERY_INFO.copy(), + ) + ] + ], +) async def test_addon_flow_with_supervisor_addon_running_connection_fails( hass: HomeAssistant, mock_try_connection: MagicMock, @@ -786,7 +818,19 @@ async def test_addon_info_error( "install_addon", "start_addon", ) -@pytest.mark.parametrize("discovery_info", [{"config": ADD_ON_DISCOVERY_INFO.copy()}]) +@pytest.mark.parametrize( + "discovery_info", + [ + [ + Discovery( + addon="core_mosquitto", + service="mqtt", + uuid=uuid4(), + config=ADD_ON_DISCOVERY_INFO.copy(), + ) + ] + ], +) async def test_addon_flow_with_supervisor_addon_not_installed( hass: HomeAssistant, mock_try_connection_success: MagicMock, @@ -858,7 +902,7 @@ async def test_addon_not_installed_failures( Case: The Mosquitto add-on install fails. """ - install_addon.side_effect = HassioAPIError() + install_addon.side_effect = SupervisorError() result = await hass.config_entries.flow.async_init( "mqtt", context={"source": config_entries.SOURCE_USER} @@ -1027,7 +1071,6 @@ async def test_bad_certificate( test_input.pop(mqtt.CONF_CLIENT_KEY) mqtt_mock = await mqtt_mock_entry() - mock_try_connection.return_value = True config_entry = hass.config_entries.async_entries(mqtt.DOMAIN)[0] # Add at least one advanced option to get the full form hass.config_entries.async_update_entry( @@ -1276,7 +1319,7 @@ async def test_invalid_discovery_prefix( def get_default(schema: vol.Schema, key: str) -> Any | None: """Get default value for key in voluptuous schema.""" - for schema_key in schema: + for schema_key in schema: # type:ignore[attr-defined] if schema_key == key: if schema_key.default == vol.UNDEFINED: return None @@ -1286,7 +1329,7 @@ def get_default(schema: vol.Schema, key: str) -> Any | None: def get_suggested(schema: vol.Schema, key: str) -> Any | None: """Get suggested value for key in voluptuous schema.""" - for schema_key in schema: + for schema_key in schema: # type:ignore[attr-defined] if schema_key == key: if ( schema_key.description is None @@ -1583,7 +1626,19 @@ async def test_step_reauth( await hass.async_block_till_done() -@pytest.mark.parametrize("discovery_info", [{"config": ADD_ON_DISCOVERY_INFO.copy()}]) +@pytest.mark.parametrize( + "discovery_info", + [ + [ + Discovery( + addon="core_mosquitto", + service="mqtt", + uuid=uuid4(), + config=ADD_ON_DISCOVERY_INFO.copy(), + ) + ] + ], +) @pytest.mark.usefixtures( "mqtt_client_mock", "mock_reload_after_entry_update", "supervisor", "addon_running" ) @@ -1632,8 +1687,30 @@ async def test_step_hassio_reauth( @pytest.mark.parametrize( ("discovery_info", "discovery_info_side_effect", "broker"), [ - ({"config": ADD_ON_DISCOVERY_INFO.copy()}, AddonError, "core-mosquitto"), - ({"config": ADD_ON_DISCOVERY_INFO.copy()}, None, "broker-not-addon"), + ( + [ + Discovery( + addon="core_mosquitto", + service="mqtt", + uuid=uuid4(), + config=ADD_ON_DISCOVERY_INFO.copy(), + ) + ], + AddonError, + "core-mosquitto", + ), + ( + [ + Discovery( + addon="core_mosquitto", + service="mqtt", + uuid=uuid4(), + config=ADD_ON_DISCOVERY_INFO.copy(), + ) + ], + None, + "broker-not-addon", + ), ], ) @pytest.mark.usefixtures( diff --git a/tests/components/mqtt/test_cover.py b/tests/components/mqtt/test_cover.py index 451665de96a6da..ee74b78be81cca 100644 --- a/tests/components/mqtt/test_cover.py +++ b/tests/components/mqtt/test_cover.py @@ -12,6 +12,7 @@ ATTR_CURRENT_TILT_POSITION, ATTR_POSITION, ATTR_TILT_POSITION, + CoverState, ) from homeassistant.components.mqtt.const import CONF_STATE_TOPIC from homeassistant.components.mqtt.cover import ( @@ -39,9 +40,7 @@ SERVICE_TOGGLE, SERVICE_TOGGLE_COVER_TILT, STATE_CLOSED, - STATE_CLOSING, STATE_OPEN, - STATE_OPENING, STATE_UNKNOWN, ) from homeassistant.core import HomeAssistant @@ -63,6 +62,7 @@ help_test_entity_device_info_update, help_test_entity_device_info_with_connection, help_test_entity_device_info_with_identifier, + help_test_entity_icon_and_entity_picture, help_test_entity_id_update_discovery_update, help_test_entity_id_update_subscriptions, help_test_publishing_with_custom_encoding, @@ -116,12 +116,12 @@ async def test_state_via_state_topic( async_fire_mqtt_message(hass, "state-topic", STATE_CLOSED) state = hass.states.get("cover.test") - assert state.state == STATE_CLOSED + assert state.state == CoverState.CLOSED async_fire_mqtt_message(hass, "state-topic", STATE_OPEN) state = hass.states.get("cover.test") - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN async_fire_mqtt_message(hass, "state-topic", "None") @@ -162,17 +162,17 @@ async def test_opening_and_closing_state_via_custom_state_payload( async_fire_mqtt_message(hass, "state-topic", "34") state = hass.states.get("cover.test") - assert state.state == STATE_OPENING + assert state.state == CoverState.OPENING async_fire_mqtt_message(hass, "state-topic", "--43") state = hass.states.get("cover.test") - assert state.state == STATE_CLOSING + assert state.state == CoverState.CLOSING async_fire_mqtt_message(hass, "state-topic", STATE_CLOSED) state = hass.states.get("cover.test") - assert state.state == STATE_CLOSED + assert state.state == CoverState.CLOSED @pytest.mark.parametrize( @@ -197,11 +197,11 @@ async def test_opening_and_closing_state_via_custom_state_payload( @pytest.mark.parametrize( ("position", "assert_state"), [ - (0, STATE_CLOSED), - (1, STATE_OPEN), - (30, STATE_OPEN), - (99, STATE_OPEN), - (100, STATE_OPEN), + (0, CoverState.CLOSED), + (1, CoverState.OPEN), + (30, CoverState.OPEN), + (99, CoverState.OPEN), + (100, CoverState.OPEN), ], ) async def test_open_closed_state_from_position_optimistic( @@ -253,13 +253,13 @@ async def test_open_closed_state_from_position_optimistic( @pytest.mark.parametrize( ("position", "assert_state"), [ - (0, STATE_CLOSED), - (1, STATE_CLOSED), - (10, STATE_CLOSED), - (11, STATE_OPEN), - (30, STATE_OPEN), - (99, STATE_OPEN), - (100, STATE_OPEN), + (0, CoverState.CLOSED), + (1, CoverState.CLOSED), + (10, CoverState.CLOSED), + (11, CoverState.OPEN), + (30, CoverState.OPEN), + (99, CoverState.OPEN), + (100, CoverState.OPEN), ], ) async def test_open_closed_state_from_position_optimistic_alt_positions( @@ -449,12 +449,12 @@ async def test_position_via_position_topic( async_fire_mqtt_message(hass, "get-position-topic", "0") state = hass.states.get("cover.test") - assert state.state == STATE_CLOSED + assert state.state == CoverState.CLOSED async_fire_mqtt_message(hass, "get-position-topic", "100") state = hass.states.get("cover.test") - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN @pytest.mark.parametrize( @@ -490,12 +490,12 @@ async def test_state_via_template( async_fire_mqtt_message(hass, "state-topic", "10000") state = hass.states.get("cover.test") - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN async_fire_mqtt_message(hass, "state-topic", "99") state = hass.states.get("cover.test") - assert state.state == STATE_CLOSED + assert state.state == CoverState.CLOSED @pytest.mark.parametrize( @@ -532,13 +532,13 @@ async def test_state_via_template_and_entity_id( async_fire_mqtt_message(hass, "state-topic", "invalid") state = hass.states.get("cover.test") - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN async_fire_mqtt_message(hass, "state-topic", "closed") async_fire_mqtt_message(hass, "state-topic", "invalid") state = hass.states.get("cover.test") - assert state.state == STATE_CLOSED + assert state.state == CoverState.CLOSED @pytest.mark.parametrize( @@ -571,14 +571,14 @@ async def test_state_via_template_with_json_value( async_fire_mqtt_message(hass, "state-topic", '{ "Var1": "open", "Var2": "other" }') state = hass.states.get("cover.test") - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN async_fire_mqtt_message( hass, "state-topic", '{ "Var1": "closed", "Var2": "other" }' ) state = hass.states.get("cover.test") - assert state.state == STATE_CLOSED + assert state.state == CoverState.CLOSED async_fire_mqtt_message(hass, "state-topic", '{ "Var2": "other" }') assert ( @@ -741,7 +741,7 @@ async def test_optimistic_state_change( mqtt_mock.async_publish.assert_called_once_with("command-topic", "OPEN", 0, False) mqtt_mock.async_publish.reset_mock() state = hass.states.get("cover.test") - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN await hass.services.async_call( cover.DOMAIN, SERVICE_CLOSE_COVER, {ATTR_ENTITY_ID: "cover.test"}, blocking=True @@ -750,7 +750,7 @@ async def test_optimistic_state_change( mqtt_mock.async_publish.assert_called_once_with("command-topic", "CLOSE", 0, False) mqtt_mock.async_publish.reset_mock() state = hass.states.get("cover.test") - assert state.state == STATE_CLOSED + assert state.state == CoverState.CLOSED await hass.services.async_call( cover.DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: "cover.test"}, blocking=True @@ -759,7 +759,7 @@ async def test_optimistic_state_change( mqtt_mock.async_publish.assert_called_once_with("command-topic", "OPEN", 0, False) mqtt_mock.async_publish.reset_mock() state = hass.states.get("cover.test") - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN await hass.services.async_call( cover.DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: "cover.test"}, blocking=True @@ -767,7 +767,7 @@ async def test_optimistic_state_change( mqtt_mock.async_publish.assert_called_once_with("command-topic", "CLOSE", 0, False) state = hass.states.get("cover.test") - assert state.state == STATE_CLOSED + assert state.state == CoverState.CLOSED @pytest.mark.parametrize( @@ -804,7 +804,7 @@ async def test_optimistic_state_change_with_position( mqtt_mock.async_publish.assert_called_once_with("command-topic", "OPEN", 0, False) mqtt_mock.async_publish.reset_mock() state = hass.states.get("cover.test") - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN assert state.attributes.get(ATTR_CURRENT_POSITION) == 100 await hass.services.async_call( @@ -814,7 +814,7 @@ async def test_optimistic_state_change_with_position( mqtt_mock.async_publish.assert_called_once_with("command-topic", "CLOSE", 0, False) mqtt_mock.async_publish.reset_mock() state = hass.states.get("cover.test") - assert state.state == STATE_CLOSED + assert state.state == CoverState.CLOSED assert state.attributes.get(ATTR_CURRENT_POSITION) == 0 await hass.services.async_call( @@ -824,7 +824,7 @@ async def test_optimistic_state_change_with_position( mqtt_mock.async_publish.assert_called_once_with("command-topic", "OPEN", 0, False) mqtt_mock.async_publish.reset_mock() state = hass.states.get("cover.test") - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN assert state.attributes.get(ATTR_CURRENT_POSITION) == 100 await hass.services.async_call( @@ -833,7 +833,7 @@ async def test_optimistic_state_change_with_position( mqtt_mock.async_publish.assert_called_once_with("command-topic", "CLOSE", 0, False) state = hass.states.get("cover.test") - assert state.state == STATE_CLOSED + assert state.state == CoverState.CLOSED assert state.attributes.get(ATTR_CURRENT_POSITION) == 0 @@ -1026,35 +1026,35 @@ async def test_current_cover_position_inverted( ATTR_CURRENT_POSITION ] assert current_percentage_cover_position == 0 - assert hass.states.get("cover.test").state == STATE_CLOSED + assert hass.states.get("cover.test").state == CoverState.CLOSED async_fire_mqtt_message(hass, "get-position-topic", "0") current_percentage_cover_position = hass.states.get("cover.test").attributes[ ATTR_CURRENT_POSITION ] assert current_percentage_cover_position == 100 - assert hass.states.get("cover.test").state == STATE_OPEN + assert hass.states.get("cover.test").state == CoverState.OPEN async_fire_mqtt_message(hass, "get-position-topic", "50") current_percentage_cover_position = hass.states.get("cover.test").attributes[ ATTR_CURRENT_POSITION ] assert current_percentage_cover_position == 50 - assert hass.states.get("cover.test").state == STATE_OPEN + assert hass.states.get("cover.test").state == CoverState.OPEN async_fire_mqtt_message(hass, "get-position-topic", "non-numeric") current_percentage_cover_position = hass.states.get("cover.test").attributes[ ATTR_CURRENT_POSITION ] assert current_percentage_cover_position == 50 - assert hass.states.get("cover.test").state == STATE_OPEN + assert hass.states.get("cover.test").state == CoverState.OPEN async_fire_mqtt_message(hass, "get-position-topic", "101") current_percentage_cover_position = hass.states.get("cover.test").attributes[ ATTR_CURRENT_POSITION ] assert current_percentage_cover_position == 0 - assert hass.states.get("cover.test").state == STATE_CLOSED + assert hass.states.get("cover.test").state == CoverState.CLOSED @pytest.mark.parametrize( @@ -2738,32 +2738,32 @@ async def test_state_and_position_topics_state_not_set_via_position_topic( async_fire_mqtt_message(hass, "state-topic", "OPEN") state = hass.states.get("cover.test") - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN async_fire_mqtt_message(hass, "get-position-topic", "0") state = hass.states.get("cover.test") - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN async_fire_mqtt_message(hass, "get-position-topic", "100") state = hass.states.get("cover.test") - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN async_fire_mqtt_message(hass, "state-topic", "CLOSE") state = hass.states.get("cover.test") - assert state.state == STATE_CLOSED + assert state.state == CoverState.CLOSED async_fire_mqtt_message(hass, "get-position-topic", "0") state = hass.states.get("cover.test") - assert state.state == STATE_CLOSED + assert state.state == CoverState.CLOSED async_fire_mqtt_message(hass, "get-position-topic", "100") state = hass.states.get("cover.test") - assert state.state == STATE_CLOSED + assert state.state == CoverState.CLOSED @pytest.mark.parametrize( @@ -2800,27 +2800,27 @@ async def test_set_state_via_position_using_stopped_state( async_fire_mqtt_message(hass, "state-topic", "OPEN") state = hass.states.get("cover.test") - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN async_fire_mqtt_message(hass, "get-position-topic", "0") state = hass.states.get("cover.test") - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN async_fire_mqtt_message(hass, "state-topic", "STOPPED") state = hass.states.get("cover.test") - assert state.state == STATE_CLOSED + assert state.state == CoverState.CLOSED async_fire_mqtt_message(hass, "get-position-topic", "100") state = hass.states.get("cover.test") - assert state.state == STATE_CLOSED + assert state.state == CoverState.CLOSED async_fire_mqtt_message(hass, "state-topic", "STOPPED") state = hass.states.get("cover.test") - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN @pytest.mark.parametrize( @@ -3136,32 +3136,32 @@ async def test_set_state_via_stopped_state_no_position_topic( async_fire_mqtt_message(hass, "state-topic", "OPEN") state = hass.states.get("cover.test") - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN async_fire_mqtt_message(hass, "state-topic", "OPENING") state = hass.states.get("cover.test") - assert state.state == STATE_OPENING + assert state.state == CoverState.OPENING async_fire_mqtt_message(hass, "state-topic", "STOPPED") state = hass.states.get("cover.test") - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN async_fire_mqtt_message(hass, "state-topic", "CLOSING") state = hass.states.get("cover.test") - assert state.state == STATE_CLOSING + assert state.state == CoverState.CLOSING async_fire_mqtt_message(hass, "state-topic", "STOPPED") state = hass.states.get("cover.test") - assert state.state == STATE_CLOSED + assert state.state == CoverState.CLOSED async_fire_mqtt_message(hass, "state-topic", "STOPPED") state = hass.states.get("cover.test") - assert state.state == STATE_CLOSED + assert state.state == CoverState.CLOSED @pytest.mark.parametrize( @@ -3549,3 +3549,15 @@ async def test_value_template_fails( "TypeError: unsupported operand type(s) for *: 'NoneType' and 'int' rendering template" in caplog.text ) + + +async def test_entity_icon_and_entity_picture( + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, +) -> None: + """Test the entity name setup.""" + domain = cover.DOMAIN + config = DEFAULT_CONFIG + await help_test_entity_icon_and_entity_picture( + hass, mqtt_mock_entry, domain, config + ) diff --git a/tests/components/mqtt/test_device_trigger.py b/tests/components/mqtt/test_device_trigger.py index 1acfe8dd9f5a96..009a0315029914 100644 --- a/tests/components/mqtt/test_device_trigger.py +++ b/tests/components/mqtt/test_device_trigger.py @@ -26,26 +26,46 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" +@pytest.mark.parametrize( + ("discovery_topic", "data"), + [ + ( + "homeassistant/device_automation/0AFFD2/bla/config", + '{ "automation_type":"trigger",' + ' "device":{"identifiers":["0AFFD2"]},' + ' "payload": "short_press",' + ' "topic": "foobar/triggers/button1",' + ' "type": "button_short_press",' + ' "subtype": "button_1" }', + ), + ( + "homeassistant/device/0AFFD2/config", + '{ "device":{"identifiers":["0AFFD2"]},' + ' "o": {"name": "foobar"}, "cmps": ' + '{ "bla": {' + ' "automation_type":"trigger", ' + ' "payload": "short_press",' + ' "topic": "foobar/triggers/button1",' + ' "type": "button_short_press",' + ' "subtype": "button_1",' + ' "platform":"device_automation"}}}', + ), + ], +) async def test_get_triggers( hass: HomeAssistant, device_registry: dr.DeviceRegistry, mqtt_mock_entry: MqttMockHAClientGenerator, + discovery_topic: str, + data: str, ) -> None: """Test we get the expected triggers from a discovered mqtt device.""" await mqtt_mock_entry() - data1 = ( - '{ "automation_type":"trigger",' - ' "device":{"identifiers":["0AFFD2"]},' - ' "payload": "short_press",' - ' "topic": "foobar/triggers/button1",' - ' "type": "button_short_press",' - ' "subtype": "button_1" }' - ) - async_fire_mqtt_message(hass, "homeassistant/device_automation/bla/config", data1) + async_fire_mqtt_message(hass, discovery_topic, data) await hass.async_block_till_done() device_entry = device_registry.async_get_device(identifiers={("mqtt", "0AFFD2")}) - expected_triggers = [ + expected_triggers: list[dict[str, Any]] = [ { "platform": "device", "domain": DOMAIN, @@ -165,7 +185,7 @@ async def test_discover_bad_triggers( await hass.async_block_till_done() device_entry = device_registry.async_get_device(identifiers={("mqtt", "0AFFD2")}) - expected_triggers = [ + expected_triggers: list[dict[str, Any]] = [ { "platform": "device", "domain": DOMAIN, @@ -226,7 +246,7 @@ async def test_update_remove_triggers( device_entry = device_registry.async_get_device(identifiers={("mqtt", "0AFFD2")}) assert device_entry.name == "milk" - expected_triggers1 = [ + expected_triggers1: list[dict[str, Any]] = [ { "platform": "device", "domain": DOMAIN, @@ -1263,7 +1283,7 @@ async def test_entity_device_info_update( """Test device registry update.""" await mqtt_mock_entry() - config = { + config: dict[str, Any] = { "automation_type": "trigger", "topic": "test-topic", "type": "foo", diff --git a/tests/components/mqtt/test_discovery.py b/tests/components/mqtt/test_discovery.py index cc7142236d0b0e..e49e7a27c8d46a 100644 --- a/tests/components/mqtt/test_discovery.py +++ b/tests/components/mqtt/test_discovery.py @@ -6,12 +6,14 @@ import logging from pathlib import Path import re -from unittest.mock import AsyncMock, call, patch +from typing import Any +from unittest.mock import ANY, AsyncMock, call, patch import pytest from homeassistant import config_entries from homeassistant.components import mqtt +from homeassistant.components.device_automation import DeviceAutomationType from homeassistant.components.mqtt.abbreviations import ( ABBREVIATIONS, DEVICE_ABBREVIATIONS, @@ -34,7 +36,7 @@ Platform, ) from homeassistant.core import Event, HomeAssistant, callback -from homeassistant.data_entry_flow import FlowResult +from homeassistant.data_entry_flow import AbortFlow, FlowResult from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.dispatcher import ( async_dispatcher_connect, @@ -46,12 +48,14 @@ from .conftest import ENTRY_DEFAULT_BIRTH_MESSAGE from .test_common import help_all_subscribe_calls, help_test_unload_config_entry +from .test_tag import DEFAULT_TAG_ID, DEFAULT_TAG_SCAN from tests.common import ( MockConfigEntry, MockModule, async_capture_events, async_fire_mqtt_message, + async_get_device_automations, mock_config_flow, mock_integration, mock_platform, @@ -62,6 +66,133 @@ WebSocketGenerator, ) +TEST_SINGLE_CONFIGS = [ + ( + "homeassistant/device_automation/0AFFD2/bla1/config", + { + "device": {"identifiers": ["0AFFD2"], "name": "test_device"}, + "o": {"name": "Foo2Mqtt", "sw": "1.40.2", "url": "https://www.foo2mqtt.io"}, + "automation_type": "trigger", + "payload": "short_press", + "topic": "foobar/triggers/button1", + "type": "button_short_press", + "subtype": "button_1", + }, + ), + ( + "homeassistant/sensor/0AFFD2/bla2/config", + { + "device": {"identifiers": ["0AFFD2"], "name": "test_device"}, + "o": {"name": "Foo2Mqtt", "sw": "1.40.2", "url": "https://www.foo2mqtt.io"}, + "state_topic": "foobar/sensors/bla2/state", + "unique_id": "bla002", + }, + ), + ( + "homeassistant/tag/0AFFD2/bla3/config", + { + "device": {"identifiers": ["0AFFD2"], "name": "test_device"}, + "o": {"name": "Foo2Mqtt", "sw": "1.40.2", "url": "https://www.foo2mqtt.io"}, + "topic": "foobar/tags/bla3/see", + }, + ), +] +TEST_DEVICE_CONFIG = { + "device": {"identifiers": ["0AFFD2"], "name": "test_device"}, + "o": {"name": "Foo2Mqtt", "sw": "1.50.0", "url": "https://www.foo2mqtt.io"}, + "cmps": { + "bla1": { + "platform": "device_automation", + "automation_type": "trigger", + "payload": "short_press", + "topic": "foobar/triggers/button1", + "type": "button_short_press", + "subtype": "button_1", + }, + "bla2": { + "platform": "sensor", + "state_topic": "foobar/sensors/bla2/state", + "unique_id": "bla002", + "name": "mqtt_sensor", + }, + "bla3": { + "platform": "tag", + "topic": "foobar/tags/bla3/see", + }, + }, +} +TEST_DEVICE_DISCOVERY_TOPIC = "homeassistant/device/0AFFD2/config" + + +async def help_check_discovered_items( + hass: HomeAssistant, device_registry: dr.DeviceRegistry, tag_mock: AsyncMock +) -> None: + """Help checking discovered test items are still available.""" + + # Check the device_trigger was discovered + device_entry = device_registry.async_get_device(identifiers={("mqtt", "0AFFD2")}) + assert device_entry is not None + triggers = await async_get_device_automations( + hass, DeviceAutomationType.TRIGGER, device_entry.id + ) + assert len(triggers) == 1 + # Check the sensor was discovered + state = hass.states.get("sensor.test_device_mqtt_sensor") + assert state is not None + + # Check the tag works + async_fire_mqtt_message(hass, "foobar/tags/bla3/see", DEFAULT_TAG_SCAN) + await hass.async_block_till_done() + tag_mock.assert_called_once_with(ANY, DEFAULT_TAG_ID, device_entry.id) + tag_mock.reset_mock() + + +@pytest.fixture +def mqtt_data_flow_calls() -> list[MqttServiceInfo]: + """Return list to capture MQTT data data flow calls.""" + return [] + + +@pytest.fixture +async def mock_mqtt_flow( + hass: HomeAssistant, mqtt_data_flow_calls: list[MqttServiceInfo] +) -> config_entries.ConfigFlow: + """Test fixure for mqtt integration flow. + + The topic is used as a unique ID. + The component test domain used is: `comp`. + + Creates an entry if does not exist. + Updates an entry if it exists, and there is an updated payload. + """ + + class TestFlow(config_entries.ConfigFlow): + """Test flow.""" + + async def async_step_mqtt(self, discovery_info: MqttServiceInfo) -> FlowResult: + """Test mqtt step.""" + await asyncio.sleep(0) + mqtt_data_flow_calls.append(discovery_info) + # Abort a flow if there is an update for the existing entry + if entry := self.hass.config_entries.async_entry_for_domain_unique_id( + "comp", discovery_info.topic + ): + hass.config_entries.async_update_entry( + entry, + data={ + "name": discovery_info.topic, + "payload": discovery_info.payload, + }, + ) + raise AbortFlow("already_configured") + await self.async_set_unique_id(discovery_info.topic) + return self.async_create_entry( + title="Test", + data={"name": discovery_info.topic, "payload": discovery_info.payload}, + ) + + return TestFlow + @pytest.mark.parametrize( "mqtt_config_entry_data", @@ -88,6 +219,8 @@ async def test_subscribing_config_topic( [ ("homeassistant/binary_sensor/bla/not_config", False), ("homeassistant/binary_sensor/rörkrökare/config", True), + ("homeassistant/device/bla/not_config", False), + ("homeassistant/device/rörkrökare/config", True), ], ) async def test_invalid_topic( @@ -116,10 +249,15 @@ async def test_invalid_topic( caplog.clear() +@pytest.mark.parametrize( + "discovery_topic", + ["homeassistant/binary_sensor/bla/config", "homeassistant/device/bla/config"], +) async def test_invalid_json( hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator, caplog: pytest.LogCaptureFixture, + discovery_topic: str, ) -> None: """Test sending in invalid JSON.""" await mqtt_mock_entry() @@ -128,9 +266,7 @@ async def test_invalid_json( ) as mock_dispatcher_send: mock_dispatcher_send = AsyncMock(return_value=None) - async_fire_mqtt_message( - hass, "homeassistant/binary_sensor/bla/config", "not json" - ) + async_fire_mqtt_message(hass, discovery_topic, "not json") await hass.async_block_till_done() assert "Unable to parse JSON" in caplog.text assert not mock_dispatcher_send.called @@ -179,6 +315,56 @@ async def test_invalid_config( assert "Error 'expected int for dictionary value @ data['qos']'" in caplog.text +async def test_invalid_device_discovery_config( + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test sending in JSON that violates the discovery schema if device or platform key is missing.""" + await mqtt_mock_entry() + async_fire_mqtt_message( + hass, + "homeassistant/device/bla/config", + '{ "o": {"name": "foobar"}, "cmps": ' + '{ "acp1": {"name": "abc", "state_topic": "home/alarm", ' + '"unique_id": "very_unique",' + '"command_topic": "home/alarm/set", ' + '"platform":"alarm_control_panel"}}}', + ) + await hass.async_block_till_done() + assert ( + "Invalid MQTT device discovery payload for bla, " + "required key not provided @ data['device']" in caplog.text + ) + + caplog.clear() + async_fire_mqtt_message( + hass, + "homeassistant/device/bla/config", + '{ "o": {"name": "foobar"}, "dev": {"identifiers": ["ABDE03"]}, ' + '"cmps": { "acp1": {"name": "abc", "state_topic": "home/alarm", ' + '"command_topic": "home/alarm/set" }}}', + ) + await hass.async_block_till_done() + assert ( + "Invalid MQTT device discovery payload for bla, " + "required key not provided @ data['components']['acp1']['platform']" + in caplog.text + ) + + caplog.clear() + async_fire_mqtt_message( + hass, + "homeassistant/device/bla/config", + '{ "o": {"name": "foobar"}, "dev": {"identifiers": ["ABDE03"]}, ' '"cmps": ""}', + ) + await hass.async_block_till_done() + assert ( + "Invalid MQTT device discovery payload for bla, " + "expected a dictionary for dictionary value @ data['components']" in caplog.text + ) + + async def test_only_valid_components( hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator, @@ -191,105 +377,736 @@ async def test_only_valid_components( ) as mock_dispatcher_send: invalid_component = "timer" - mock_dispatcher_send = AsyncMock(return_value=None) + mock_dispatcher_send = AsyncMock(return_value=None) + + async_fire_mqtt_message( + hass, f"homeassistant/{invalid_component}/bla/config", "{}" + ) + + await hass.async_block_till_done() + + assert not mock_dispatcher_send.called + + +async def test_correct_config_discovery( + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator +) -> None: + """Test sending in correct JSON.""" + await mqtt_mock_entry() + async_fire_mqtt_message( + hass, + "homeassistant/binary_sensor/bla/config", + '{ "name": "Beer", "state_topic": "test-topic" }', + ) + await hass.async_block_till_done() + + state = hass.states.get("binary_sensor.beer") + + assert state is not None + assert state.name == "Beer" + assert ("binary_sensor", "bla") in hass.data["mqtt"].discovery_already_discovered + + +@pytest.mark.parametrize( + ("discovery_topic", "payloads", "discovery_id"), + [ + ( + "homeassistant/binary_sensor/bla/config", + ( + '{"name":"Beer","state_topic": "test-topic",' + '"unique_id": "very_unique1",' + '"o":{"name":"bla2mqtt","sw":"1.0"},' + '"dev":{"identifiers":["bla"],"name": "bla"}}', + '{"name":"Milk","state_topic": "test-topic",' + '"unique_id": "very_unique1",' + '"o":{"name":"bla2mqtt","sw":"1.1",' + '"url":"https://bla2mqtt.example.com/support"},' + '"dev":{"identifiers":["bla"],"name": "bla"}}', + ), + "bla", + ), + ( + "homeassistant/device/bla/config", + ( + '{"cmps":{"bin_sens1":{"platform":"binary_sensor",' + '"unique_id": "very_unique1",' + '"name":"Beer","state_topic": "test-topic"}},' + '"o":{"name":"bla2mqtt","sw":"1.0"},' + '"dev":{"identifiers":["bla"],"name": "bla"}}', + '{"cmps":{"bin_sens1":{"platform":"binary_sensor",' + '"unique_id": "very_unique1",' + '"name":"Milk","state_topic": "test-topic"}},' + '"o":{"name":"bla2mqtt","sw":"1.1",' + '"url":"https://bla2mqtt.example.com/support"},' + '"dev":{"identifiers":["bla"],"name": "bla"}}', + ), + "bla bin_sens1", + ), + ], +) +async def test_discovery_integration_info( + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, + caplog: pytest.LogCaptureFixture, + discovery_topic: str, + payloads: tuple[str, str], + discovery_id: str, +) -> None: + """Test discovery of integration info.""" + await mqtt_mock_entry() + async_fire_mqtt_message( + hass, + discovery_topic, + payloads[0], + ) + await hass.async_block_till_done() + + state = hass.states.get("binary_sensor.bla_beer") + + assert state is not None + assert state.name == "bla Beer" + + assert ( + "Processing device discovery for 'bla' from external " + "application bla2mqtt, version: 1.0" + in caplog.text + or f"Found new component: binary_sensor {discovery_id} from external application bla2mqtt, version: 1.0" + in caplog.text + ) + caplog.clear() + + # Send an update and add support url + async_fire_mqtt_message( + hass, + discovery_topic, + payloads[1], + ) + await hass.async_block_till_done() + state = hass.states.get("binary_sensor.bla_beer") + + assert state is not None + assert state.name == "bla Milk" + + assert ( + f"Component has already been discovered: binary_sensor {discovery_id}" + in caplog.text + ) + + +@pytest.mark.parametrize( + ("single_configs", "device_discovery_topic", "device_config"), + [(TEST_SINGLE_CONFIGS, TEST_DEVICE_DISCOVERY_TOPIC, TEST_DEVICE_CONFIG)], +) +async def test_discovery_migration_to_device_base( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, + mqtt_mock_entry: MqttMockHAClientGenerator, + tag_mock: AsyncMock, + caplog: pytest.LogCaptureFixture, + single_configs: list[tuple[str, dict[str, Any]]], + device_discovery_topic: str, + device_config: dict[str, Any], +) -> None: + """Test the migration of single discovery to device discovery.""" + await mqtt_mock_entry() + + # Discovery single config schema + for discovery_topic, config in single_configs: + payload = json.dumps(config) + async_fire_mqtt_message( + hass, + discovery_topic, + payload, + ) + await hass.async_block_till_done() + await hass.async_block_till_done() + + await help_check_discovered_items(hass, device_registry, tag_mock) + + # Try to migrate to device based discovery without migrate_discovery flag + payload = json.dumps(device_config) + async_fire_mqtt_message( + hass, + device_discovery_topic, + payload, + ) + await hass.async_block_till_done() + assert ( + "Received a conflicting MQTT discovery message for device_automation " + "'0AFFD2 bla1' which was previously discovered on topic homeassistant/" + "device_automation/0AFFD2/bla1/config from external application Foo2Mqtt, " + "version: 1.40.2; the conflicting discovery message was received on topic " + "homeassistant/device/0AFFD2/config from external application Foo2Mqtt, " + "version: 1.50.0; for support visit https://www.foo2mqtt.io" in caplog.text + ) + assert ( + "Received a conflicting MQTT discovery message for entity sensor." + "test_device_mqtt_sensor; the entity was previously discovered on topic " + "homeassistant/sensor/0AFFD2/bla2/config from external application Foo2Mqtt, " + "version: 1.40.2; the conflicting discovery message was received on topic " + "homeassistant/device/0AFFD2/config from external application Foo2Mqtt, " + "version: 1.50.0; for support visit https://www.foo2mqtt.io" in caplog.text + ) + assert ( + "Received a conflicting MQTT discovery message for tag '0AFFD2 bla3' which " + "was previously discovered on topic homeassistant/tag/0AFFD2/bla3/config " + "from external application Foo2Mqtt, version: 1.40.2; the conflicting " + "discovery message was received on topic homeassistant/device/0AFFD2/config " + "from external application Foo2Mqtt, version: 1.50.0; for support visit " + "https://www.foo2mqtt.io" in caplog.text + ) + + # Check we still have our mqtt items + await help_check_discovered_items(hass, device_registry, tag_mock) + + # Test Enable discovery migration + # Discovery single config schema + caplog.clear() + for discovery_topic, _ in single_configs: + # migr_discvry is abbreviation for migrate_discovery + payload = json.dumps({"migr_discvry": True}) + async_fire_mqtt_message( + hass, + discovery_topic, + payload, + ) + await hass.async_block_till_done() + await hass.async_block_till_done() + + # Assert we still have our device entry + device_entry = device_registry.async_get_device(identifiers={("mqtt", "0AFFD2")}) + assert device_entry is not None + # Check our trigger was unloaden + triggers = await async_get_device_automations( + hass, DeviceAutomationType.TRIGGER, device_entry.id + ) + assert len(triggers) == 0 + # Check the sensor was unloaded + state = hass.states.get("sensor.test_device_mqtt_sensor") + assert state is None + # Check the entity registry entry is retained + assert entity_registry.async_is_registered("sensor.test_device_mqtt_sensor") + + assert ( + "Migration to MQTT device discovery schema started for device_automation " + "'0AFFD2 bla1' from external application Foo2Mqtt, version: 1.40.2 on topic " + "homeassistant/device_automation/0AFFD2/bla1/config. To complete migration, " + "publish a device discovery message with device_automation '0AFFD2 bla1'. " + "After completed migration, publish an empty (retained) payload to " + "homeassistant/device_automation/0AFFD2/bla1/config" in caplog.text + ) + assert ( + "Migration to MQTT device discovery schema started for entity sensor." + "test_device_mqtt_sensor from external application Foo2Mqtt, version: 1.40.2 " + "on topic homeassistant/sensor/0AFFD2/bla2/config. To complete migration, " + "publish a device discovery message with sensor entity '0AFFD2 bla2'. After " + "completed migration, publish an empty (retained) payload to " + "homeassistant/sensor/0AFFD2/bla2/config" in caplog.text + ) + + # Migrate to device based discovery + caplog.clear() + payload = json.dumps(device_config) + async_fire_mqtt_message( + hass, + device_discovery_topic, + payload, + ) + await hass.async_block_till_done() + + caplog.clear() + for _ in range(2): + # Test publishing an empty payload twice to the migrated discovery topics + # does not remove the migrated items + for discovery_topic, _ in single_configs: + async_fire_mqtt_message( + hass, + discovery_topic, + "", + ) + await hass.async_block_till_done() + await hass.async_block_till_done() + + # Check we still have our mqtt items after publishing an + # empty payload to the old discovery topics + await help_check_discovered_items(hass, device_registry, tag_mock) + + # Check we cannot accidentally migrate back and remove the items + caplog.clear() + for discovery_topic, config in single_configs: + payload = json.dumps(config) + async_fire_mqtt_message( + hass, + discovery_topic, + payload, + ) + await hass.async_block_till_done() + await hass.async_block_till_done() + + assert ( + "Received a conflicting MQTT discovery message for device_automation " + "'0AFFD2 bla1' which was previously discovered on topic homeassistant/device" + "/0AFFD2/config from external application Foo2Mqtt, version: 1.50.0; the " + "conflicting discovery message was received on topic homeassistant/" + "device_automation/0AFFD2/bla1/config from external application Foo2Mqtt, " + "version: 1.40.2; for support visit https://www.foo2mqtt.io" in caplog.text + ) + assert ( + "Received a conflicting MQTT discovery message for entity sensor." + "test_device_mqtt_sensor; the entity was previously discovered on topic " + "homeassistant/device/0AFFD2/config from external application Foo2Mqtt, " + "version: 1.50.0; the conflicting discovery message was received on topic " + "homeassistant/sensor/0AFFD2/bla2/config from external application Foo2Mqtt, " + "version: 1.40.2; for support visit https://www.foo2mqtt.io" in caplog.text + ) + assert ( + "Received a conflicting MQTT discovery message for tag '0AFFD2 bla3' which was " + "previously discovered on topic homeassistant/device/0AFFD2/config from " + "external application Foo2Mqtt, version: 1.50.0; the conflicting discovery " + "message was received on topic homeassistant/tag/0AFFD2/bla3/config from " + "external application Foo2Mqtt, version: 1.40.2; for support visit " + "https://www.foo2mqtt.io" in caplog.text + ) + + caplog.clear() + for discovery_topic, config in single_configs: + payload = json.dumps(config) + async_fire_mqtt_message( + hass, + discovery_topic, + "", + ) + await hass.async_block_till_done() + await hass.async_block_till_done() + + # Check we still have our mqtt items after publishing an + # empty payload to the old discovery topics + await help_check_discovered_items(hass, device_registry, tag_mock) + + # Check we can remove the config using the new discovery topic + async_fire_mqtt_message( + hass, + device_discovery_topic, + "", + ) + await hass.async_block_till_done() + await hass.async_block_till_done() + # Check the device was removed as all device components were removed + device_entry = device_registry.async_get_device(identifiers={("mqtt", "0AFFD2")}) + assert device_entry is None + await hass.async_block_till_done(wait_background_tasks=True) + + +@pytest.mark.parametrize( + "config", + [ + {"state_topic": "foobar/sensors/bla2/state", "name": "none_test"}, + { + "state_topic": "foobar/sensors/bla2/state", + "name": "none_test", + "unique_id": "very_unique", + }, + { + "state_topic": "foobar/sensors/bla2/state", + "device": {"identifiers": ["0AFFD2"], "name": "none_test"}, + }, + ], +) +async def test_discovery_migration_unique_id( + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, + caplog: pytest.LogCaptureFixture, + config: dict[str, Any], +) -> None: + """Test entity has a unique_id and device context when migrating.""" + await mqtt_mock_entry() + + discovery_topic = "homeassistant/sensor/0AFFD2/bla2/config" + + # Discovery with single config schema + payload = json.dumps(config) + async_fire_mqtt_message( + hass, + discovery_topic, + payload, + ) + await hass.async_block_till_done() + await hass.async_block_till_done() + + # Try discovery migration + payload = json.dumps({"migr_discvry": True}) + async_fire_mqtt_message( + hass, + discovery_topic, + payload, + ) + await hass.async_block_till_done() + await hass.async_block_till_done() + + # Assert the migration attempt fails + assert "Discovery migration is not possible" in caplog.text + + +@pytest.mark.parametrize( + ("single_configs", "device_discovery_topic", "device_config"), + [(TEST_SINGLE_CONFIGS, TEST_DEVICE_DISCOVERY_TOPIC, TEST_DEVICE_CONFIG)], +) +async def test_discovery_rollback_to_single_base( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, + mqtt_mock_entry: MqttMockHAClientGenerator, + tag_mock: AsyncMock, + caplog: pytest.LogCaptureFixture, + single_configs: list[tuple[str, dict[str, Any]]], + device_discovery_topic: str, + device_config: dict[str, Any], +) -> None: + """Test the rollback of device discovery to a single component discovery.""" + await mqtt_mock_entry() + + # Start device based discovery + # any single component discovery will be migrated + payload = json.dumps(device_config) + async_fire_mqtt_message( + hass, + device_discovery_topic, + payload, + ) + await hass.async_block_till_done() + await hass.async_block_till_done() + + await help_check_discovered_items(hass, device_registry, tag_mock) + + # Migrate to single component discovery + # Test the schema + caplog.clear() + payload = json.dumps({"migrate_discovery": "invalid"}) + async_fire_mqtt_message( + hass, + device_discovery_topic, + payload, + ) + await hass.async_block_till_done() + assert "Invalid MQTT device discovery payload for 0AFFD2" in caplog.text + + # Set the correct migrate_discovery flag in the device payload + # to allow rollback + payload = json.dumps({"migrate_discovery": True}) + async_fire_mqtt_message( + hass, + device_discovery_topic, + payload, + ) + await hass.async_block_till_done() + + # Check the log messages + assert ( + "Rollback to MQTT platform discovery schema started for entity sensor." + "test_device_mqtt_sensor from external application Foo2Mqtt, version: 1.50.0 " + "on topic homeassistant/device/0AFFD2/config. To complete rollback, publish a " + "platform discovery message with sensor entity '0AFFD2 bla2'. After completed " + "rollback, publish an empty (retained) payload to " + "homeassistant/device/0AFFD2/config" in caplog.text + ) + assert ( + "Rollback to MQTT platform discovery schema started for device_automation " + "'0AFFD2 bla1' from external application Foo2Mqtt, version: 1.50.0 on topic " + "homeassistant/device/0AFFD2/config. To complete rollback, publish a platform " + "discovery message with device_automation '0AFFD2 bla1'. After completed " + "rollback, publish an empty (retained) payload to " + "homeassistant/device/0AFFD2/config" in caplog.text + ) + + # Assert we still have our device entry + device_entry = device_registry.async_get_device(identifiers={("mqtt", "0AFFD2")}) + assert device_entry is not None + # Check our trigger was unloaded + triggers = await async_get_device_automations( + hass, DeviceAutomationType.TRIGGER, device_entry.id + ) + assert len(triggers) == 0 + # Check the sensor was unloaded + state = hass.states.get("sensor.test_device_mqtt_sensor") + assert state is None + # Check the entity registry entry is retained + assert entity_registry.async_is_registered("sensor.test_device_mqtt_sensor") + + # Publish the new component based payloads + # to switch back to component based discovery + for discovery_topic, config in single_configs: + payload = json.dumps(config) + async_fire_mqtt_message( + hass, + discovery_topic, + payload, + ) + await hass.async_block_till_done() + await hass.async_block_till_done() + # Check we still have our mqtt items + # await help_check_discovered_items(hass, device_registry, tag_mock) + + for _ in range(2): + # Test publishing an empty payload twice to the migrated discovery topic + # does not remove the migrated items async_fire_mqtt_message( - hass, f"homeassistant/{invalid_component}/bla/config", "{}" + hass, + device_discovery_topic, + "", ) + await hass.async_block_till_done() + await hass.async_block_till_done() + + # Check we still have our mqtt items after publishing an + # empty payload to the old discovery topics + await help_check_discovered_items(hass, device_registry, tag_mock) + # Check we cannot accidentally migrate back and remove the items + payload = json.dumps(device_config) + async_fire_mqtt_message( + hass, + device_discovery_topic, + payload, + ) + await hass.async_block_till_done() await hass.async_block_till_done() - assert not mock_dispatcher_send.called + # Check we still have our mqtt items after publishing an + # empty payload to the old discovery topics + await help_check_discovered_items(hass, device_registry, tag_mock) + + # Check we can remove the the config using the new discovery topics + for discovery_topic, config in single_configs: + payload = json.dumps(config) + async_fire_mqtt_message( + hass, + discovery_topic, + "", + ) + await hass.async_block_till_done() + await hass.async_block_till_done() + # Check the device was removed as all device components were removed + device_entry = device_registry.async_get_device(identifiers={("mqtt", "0AFFD2")}) + assert device_entry is None -async def test_correct_config_discovery( - hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator +@pytest.mark.parametrize( + ("discovery_topic", "payload"), + [ + ( + "homeassistant/binary_sensor/bla/config", + '{"state_topic": "test-topic",' + '"name":"bla","unique_id":"very_unique1",' + '"avty": {"topic": "avty-topic"},' + '"o":{"name":"bla2mqtt","sw":"1.0"},' + '"dev":{"identifiers":["bla"],"name":"Beer"}}', + ), + ( + "homeassistant/device/bla/config", + '{"cmps":{"bin_sens1":{"platform":"binary_sensor",' + '"name":"bla","unique_id":"very_unique1",' + '"state_topic": "test-topic"}},' + '"avty": {"topic": "avty-topic"},' + '"o":{"name":"bla2mqtt","sw":"1.0"},' + '"dev":{"identifiers":["bla"],"name":"Beer"}}', + ), + ], + ids=["component", "device"], +) +async def test_discovery_availability( + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, + discovery_topic: str, + payload: str, ) -> None: - """Test sending in correct JSON.""" + """Test device discovery with shared availability mapping.""" await mqtt_mock_entry() async_fire_mqtt_message( hass, - "homeassistant/binary_sensor/bla/config", - '{ "name": "Beer", "state_topic": "test-topic" }', + discovery_topic, + payload, ) await hass.async_block_till_done() + state = hass.states.get("binary_sensor.beer_bla") + assert state is not None + assert state.name == "Beer bla" + assert state.state == STATE_UNAVAILABLE - state = hass.states.get("binary_sensor.beer") + async_fire_mqtt_message( + hass, + "avty-topic", + "online", + ) + await hass.async_block_till_done() + state = hass.states.get("binary_sensor.beer_bla") + assert state is not None + assert state.state == STATE_UNKNOWN + async_fire_mqtt_message( + hass, + "test-topic", + "ON", + ) + await hass.async_block_till_done() + state = hass.states.get("binary_sensor.beer_bla") assert state is not None - assert state.name == "Beer" - assert ("binary_sensor", "bla") in hass.data["mqtt"].discovery_already_discovered + assert state.state == STATE_ON -async def test_discovery_integration_info( +@pytest.mark.parametrize( + ("discovery_topic", "payload"), + [ + ( + "homeassistant/device/bla/config", + '{"cmps":{"bin_sens1":{"platform":"binary_sensor",' + '"unique_id":"very_unique",' + '"avty": {"topic": "avty-topic-component"},' + '"name":"Beer","state_topic": "test-topic"}},' + '"avty": {"topic": "avty-topic-device"},' + '"o":{"name":"bla2mqtt","sw":"1.0"},"dev":{"identifiers":["bla"]}}', + ), + ( + "homeassistant/device/bla/config", + '{"cmps":{"bin_sens1":{"platform":"binary_sensor",' + '"unique_id":"very_unique",' + '"availability_topic": "avty-topic-component",' + '"name":"Beer","state_topic": "test-topic"}},' + '"availability_topic": "avty-topic-device",' + '"o":{"name":"bla2mqtt","sw":"1.0"},"dev":{"identifiers":["bla"]}}', + ), + ], + ids=["test1", "test2"], +) +async def test_discovery_component_availability_overridden( hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + discovery_topic: str, + payload: str, ) -> None: - """Test logging discovery of new and updated items.""" + """Test device discovery with overridden shared availability mapping.""" await mqtt_mock_entry() async_fire_mqtt_message( hass, - "homeassistant/binary_sensor/bla/config", - '{ "name": "Beer", "state_topic": "test-topic", "o": {"name": "bla2mqtt", "sw": "1.0" } }', + discovery_topic, + payload, ) await hass.async_block_till_done() - - state = hass.states.get("binary_sensor.beer") - + state = hass.states.get("binary_sensor.none_beer") assert state is not None assert state.name == "Beer" + assert state.state == STATE_UNAVAILABLE - assert ( - "Found new component: binary_sensor bla from external application bla2mqtt, version: 1.0" - in caplog.text + async_fire_mqtt_message( + hass, + "avty-topic-device", + "online", ) - caplog.clear() + await hass.async_block_till_done() + state = hass.states.get("binary_sensor.none_beer") + assert state is not None + assert state.state == STATE_UNAVAILABLE - # Send an update and add support url async_fire_mqtt_message( hass, - "homeassistant/binary_sensor/bla/config", - '{ "name": "Milk", "state_topic": "test-topic", "o": {"name": "bla2mqtt", "sw": "1.1", "url": "https://bla2mqtt.example.com/support" } }', + "avty-topic-component", + "online", ) await hass.async_block_till_done() - state = hass.states.get("binary_sensor.beer") - + state = hass.states.get("binary_sensor.none_beer") assert state is not None - assert state.name == "Milk" + assert state.state == STATE_UNKNOWN - assert ( - "Component has already been discovered: binary_sensor bla, sending update from external application bla2mqtt, version: 1.1, support URL: https://bla2mqtt.example.com/support" - in caplog.text + async_fire_mqtt_message( + hass, + "test-topic", + "ON", ) + await hass.async_block_till_done() + state = hass.states.get("binary_sensor.none_beer") + assert state is not None + assert state.state == STATE_ON @pytest.mark.parametrize( - "config_message", + ("discovery_topic", "config_message", "error_message"), [ - '{ "name": "Beer", "state_topic": "test-topic", "o": "bla2mqtt" }', - '{ "name": "Beer", "state_topic": "test-topic", "o": 2.0 }', - '{ "name": "Beer", "state_topic": "test-topic", "o": null }', - '{ "name": "Beer", "state_topic": "test-topic", "o": {"sw": "bla2mqtt"} }', + ( + "homeassistant/binary_sensor/bla/config", + '{ "name": "Beer", "unique_id": "very_unique", ' + '"state_topic": "test-topic", "o": "bla2mqtt" }', + "Unable to parse origin information from discovery message", + ), + ( + "homeassistant/binary_sensor/bla/config", + '{ "name": "Beer", "unique_id": "very_unique", ' + '"state_topic": "test-topic", "o": 2.0 }', + "Unable to parse origin information from discovery message", + ), + ( + "homeassistant/binary_sensor/bla/config", + '{ "name": "Beer", "unique_id": "very_unique", ' + '"state_topic": "test-topic", "o": null }', + "Unable to parse origin information from discovery message", + ), + ( + "homeassistant/binary_sensor/bla/config", + '{ "name": "Beer", "unique_id": "very_unique", ' + '"state_topic": "test-topic", "o": {"sw": "bla2mqtt"} }', + "Unable to parse origin information from discovery message", + ), + ( + "homeassistant/device/bla/config", + '{"dev":{"identifiers":["bs1"]},"cmps":{"bs1":' + '{"platform":"binary_sensor","name":"Beer","unique_id": "very_unique",' + '"state_topic":"test-topic"}},"o": "bla2mqtt"}', + "Invalid MQTT device discovery payload for bla, " + "expected a dictionary for dictionary value @ data['origin']", + ), + ( + "homeassistant/device/bla/config", + '{"dev":{"identifiers":["bs1"]},"cmps":{"bs1":' + '{"platform":"binary_sensor","name":"Beer","unique_id": "very_unique",' + '"state_topic":"test-topic"}},"o": 2.0}', + "Invalid MQTT device discovery payload for bla, " + "expected a dictionary for dictionary value @ data['origin']", + ), + ( + "homeassistant/device/bla/config", + '{"dev":{"identifiers":["bs1"]},"cmps":{"bs1":' + '{"platform":"binary_sensor","name":"Beer","unique_id": "very_unique",' + '"state_topic":"test-topic"}},"o": null}', + "Invalid MQTT device discovery payload for bla, " + "expected a dictionary for dictionary value @ data['origin']", + ), + ( + "homeassistant/device/bla/config", + '{"dev":{"identifiers":["bs1"]},"cmps":{"bs1":' + '{"platform":"binary_sensor","name":"Beer","unique_id": "very_unique",' + '"state_topic":"test-topic"}},"o": {"sw": "bla2mqtt"}}', + "Invalid MQTT device discovery payload for bla, " + "required key not provided @ data['origin']['name']", + ), ], ) async def test_discovery_with_invalid_integration_info( hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator, caplog: pytest.LogCaptureFixture, + discovery_topic: str, config_message: str, + error_message: str, ) -> None: """Test sending in correct JSON.""" await mqtt_mock_entry() - async_fire_mqtt_message( - hass, "homeassistant/binary_sensor/bla/config", config_message - ) + async_fire_mqtt_message(hass, discovery_topic, config_message) await hass.async_block_till_done() - state = hass.states.get("binary_sensor.beer") + state = hass.states.get("binary_sensor.none_beer") assert state is None - assert "Unable to parse origin information from discovery message" in caplog.text + assert error_message in caplog.text async def test_discover_fan( @@ -808,43 +1625,86 @@ async def test_duplicate_removal( assert "Component has already been discovered: binary_sensor bla" not in caplog.text +@pytest.mark.parametrize( + ("discovery_payloads", "entity_ids"), + [ + ( + { + "homeassistant/sensor/sens1/config": "{" + '"device":{"identifiers":["0AFFD2"]},' + '"state_topic": "foobar/sensor1",' + '"unique_id": "unique1",' + '"name": "sensor1"' + "}", + "homeassistant/sensor/sens2/config": "{" + '"device":{"identifiers":["0AFFD2"]},' + '"state_topic": "foobar/sensor2",' + '"unique_id": "unique2",' + '"name": "sensor2"' + "}", + }, + ["sensor.none_sensor1", "sensor.none_sensor2"], + ), + ( + { + "homeassistant/device/bla/config": "{" + '"device":{"identifiers":["0AFFD2"]},' + '"o": {"name": "foobar"},' + '"cmps": {"sens1": {' + '"platform": "sensor",' + '"name": "sensor1",' + '"state_topic": "foobar/sensor1",' + '"unique_id": "unique1"' + '},"sens2": {' + '"platform": "sensor",' + '"name": "sensor2",' + '"state_topic": "foobar/sensor2",' + '"unique_id": "unique2"' + "}}}" + }, + ["sensor.none_sensor1", "sensor.none_sensor2"], + ), + ], +) async def test_cleanup_device_manual( hass: HomeAssistant, + mock_debouncer: asyncio.Event, hass_ws_client: WebSocketGenerator, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, mqtt_mock_entry: MqttMockHAClientGenerator, + discovery_payloads: dict[str, str], + entity_ids: list[str], ) -> None: """Test discovered device is cleaned up when entry removed from device.""" mqtt_mock = await mqtt_mock_entry() assert await async_setup_component(hass, "config", {}) ws_client = await hass_ws_client(hass) - data = ( - '{ "device":{"identifiers":["0AFFD2"]},' - ' "state_topic": "foobar/sensor",' - ' "unique_id": "unique" }' - ) - - async_fire_mqtt_message(hass, "homeassistant/sensor/bla/config", data) - await hass.async_block_till_done() + mock_debouncer.clear() + for discovery_topic, discovery_payload in discovery_payloads.items(): + async_fire_mqtt_message(hass, discovery_topic, discovery_payload) + await mock_debouncer.wait() # Verify device and registry entries are created device_entry = device_registry.async_get_device(identifiers={("mqtt", "0AFFD2")}) assert device_entry is not None - entity_entry = entity_registry.async_get("sensor.none_mqtt_sensor") - assert entity_entry is not None - state = hass.states.get("sensor.none_mqtt_sensor") - assert state is not None + for entity_id in entity_ids: + entity_entry = entity_registry.async_get(entity_id) + assert entity_entry is not None + + state = hass.states.get(entity_id) + assert state is not None # Remove MQTT from the device mqtt_config_entry = hass.config_entries.async_entries(mqtt.DOMAIN)[0] + mock_debouncer.clear() response = await ws_client.remove_device( device_entry.id, mqtt_config_entry.entry_id ) assert response["success"] - await hass.async_block_till_done() + await mock_debouncer.wait() await hass.async_block_till_done() # Verify device and registry entries are cleared @@ -854,60 +1714,224 @@ async def test_cleanup_device_manual( assert entity_entry is None # Verify state is removed - state = hass.states.get("sensor.none_mqtt_sensor") - assert state is None - await hass.async_block_till_done() + for entity_id in entity_ids: + state = hass.states.get(entity_id) + assert state is None - # Verify retained discovery topic has been cleared - mqtt_mock.async_publish.assert_called_once_with( - "homeassistant/sensor/bla/config", None, 0, True + # Verify retained discovery topics have been cleared + mqtt_mock.async_publish.assert_has_calls( + [call(discovery_topic, None, 0, True) for discovery_topic in discovery_payloads] ) + await hass.async_block_till_done(wait_background_tasks=True) + +@pytest.mark.parametrize( + ("discovery_topic", "discovery_payload", "entity_ids"), + [ + ( + "homeassistant/sensor/bla/config", + '{ "device":{"identifiers":["0AFFD2"]},' + ' "state_topic": "foobar/sensor",' + ' "unique_id": "unique" }', + ["sensor.none_mqtt_sensor"], + ), + ( + "homeassistant/device/bla/config", + '{ "device":{"identifiers":["0AFFD2"]},' + ' "o": {"name": "foobar"},' + ' "cmps": {"sens1": {' + ' "platform": "sensor",' + ' "name": "sensor1",' + ' "state_topic": "foobar/sensor1",' + ' "unique_id": "unique1"' + ' },"sens2": {' + ' "platform": "sensor",' + ' "name": "sensor2",' + ' "state_topic": "foobar/sensor2",' + ' "unique_id": "unique2"' + "}}}", + ["sensor.none_sensor1", "sensor.none_sensor2"], + ), + ], +) async def test_cleanup_device_mqtt( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, mqtt_mock_entry: MqttMockHAClientGenerator, + discovery_topic: str, + discovery_payload: str, + entity_ids: list[str], ) -> None: - """Test discvered device is cleaned up when removed through MQTT.""" + """Test discovered device is cleaned up when removed through MQTT.""" mqtt_mock = await mqtt_mock_entry() + + # set up an existing sensor first data = ( - '{ "device":{"identifiers":["0AFFD2"]},' + '{ "device":{"identifiers":["0AFFD3"]},' + ' "name": "sensor_base",' ' "state_topic": "foobar/sensor",' - ' "unique_id": "unique" }' + ' "unique_id": "unique_base" }' + ) + base_discovery_topic = "homeassistant/sensor/bla_base/config" + base_entity_id = "sensor.none_sensor_base" + async_fire_mqtt_message(hass, base_discovery_topic, data) + await hass.async_block_till_done() + + # Verify the base entity has been created and it has a state + base_device_entry = device_registry.async_get_device( + identifiers={("mqtt", "0AFFD3")} ) + assert base_device_entry is not None + entity_entry = entity_registry.async_get(base_entity_id) + assert entity_entry is not None + state = hass.states.get(base_entity_id) + assert state is not None - async_fire_mqtt_message(hass, "homeassistant/sensor/bla/config", data) + async_fire_mqtt_message(hass, discovery_topic, discovery_payload) await hass.async_block_till_done() # Verify device and registry entries are created device_entry = device_registry.async_get_device(identifiers={("mqtt", "0AFFD2")}) assert device_entry is not None - entity_entry = entity_registry.async_get("sensor.none_mqtt_sensor") - assert entity_entry is not None + for entity_id in entity_ids: + entity_entry = entity_registry.async_get(entity_id) + assert entity_entry is not None - state = hass.states.get("sensor.none_mqtt_sensor") - assert state is not None + state = hass.states.get(entity_id) + assert state is not None - async_fire_mqtt_message(hass, "homeassistant/sensor/bla/config", "") + async_fire_mqtt_message(hass, discovery_topic, "") await hass.async_block_till_done() await hass.async_block_till_done() # Verify device and registry entries are cleared device_entry = device_registry.async_get_device(identifiers={("mqtt", "0AFFD2")}) assert device_entry is None - entity_entry = entity_registry.async_get("sensor.none_mqtt_sensor") - assert entity_entry is None - # Verify state is removed - state = hass.states.get("sensor.none_mqtt_sensor") - assert state is None - await hass.async_block_till_done() + for entity_id in entity_ids: + entity_entry = entity_registry.async_get(entity_id) + assert entity_entry is None + + # Verify state is removed + state = hass.states.get(entity_id) + assert state is None + await hass.async_block_till_done() # Verify retained discovery topics have not been cleared again mqtt_mock.async_publish.assert_not_called() + # Verify the base entity still exists and it has a state + base_device_entry = device_registry.async_get_device( + identifiers={("mqtt", "0AFFD3")} + ) + assert base_device_entry is not None + entity_entry = entity_registry.async_get(base_entity_id) + assert entity_entry is not None + state = hass.states.get(base_entity_id) + assert state is not None + + +async def test_cleanup_device_mqtt_device_discovery( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, + mqtt_mock_entry: MqttMockHAClientGenerator, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test discovered device is cleaned up partly when removed through MQTT.""" + await mqtt_mock_entry() + + discovery_topic = "homeassistant/device/bla/config" + discovery_payload = ( + '{ "device":{"identifiers":["0AFFD2"]},' + ' "o": {"name": "foobar"},' + ' "cmps": {"sens1": {' + ' "p": "sensor",' + ' "name": "sensor1",' + ' "state_topic": "foobar/sensor1",' + ' "unique_id": "unique1"' + ' },"sens2": {' + ' "p": "sensor",' + ' "name": "sensor2",' + ' "state_topic": "foobar/sensor2",' + ' "unique_id": "unique2"' + "}}}" + ) + entity_ids = ["sensor.none_sensor1", "sensor.none_sensor2"] + async_fire_mqtt_message(hass, discovery_topic, discovery_payload) + await hass.async_block_till_done() + + # Verify device and registry entries are created + device_entry = device_registry.async_get_device(identifiers={("mqtt", "0AFFD2")}) + assert device_entry is not None + for entity_id in entity_ids: + entity_entry = entity_registry.async_get(entity_id) + assert entity_entry is not None + + state = hass.states.get(entity_id) + assert state is not None + + # Do update and remove sensor 2 from device + discovery_payload_update1 = ( + '{ "device":{"identifiers":["0AFFD2"]},' + ' "o": {"name": "foobar"},' + ' "cmps": {"sens1": {' + ' "p": "sensor",' + ' "name": "sensor1",' + ' "state_topic": "foobar/sensor1",' + ' "unique_id": "unique1"' + ' },"sens2": {' + ' "p": "sensor"' + "}}}" + ) + async_fire_mqtt_message(hass, discovery_topic, discovery_payload_update1) + await hass.async_block_till_done() + state = hass.states.get(entity_ids[0]) + assert state is not None + state = hass.states.get(entity_ids[1]) + assert state is None + + # Repeating the update + async_fire_mqtt_message(hass, discovery_topic, discovery_payload_update1) + await hass.async_block_till_done() + state = hass.states.get(entity_ids[0]) + assert state is not None + state = hass.states.get(entity_ids[1]) + assert state is None + + # Removing last sensor + discovery_payload_update2 = ( + '{ "device":{"identifiers":["0AFFD2"]},' + ' "o": {"name": "foobar"},' + ' "cmps": {"sens1": {' + ' "p": "sensor"' + ' },"sens2": {' + ' "p": "sensor"' + "}}}" + ) + async_fire_mqtt_message(hass, discovery_topic, discovery_payload_update2) + await hass.async_block_till_done() + device_entry = device_registry.async_get_device(identifiers={("mqtt", "0AFFD2")}) + # Verify the device entry was removed with the last sensor + assert device_entry is None + for entity_id in entity_ids: + entity_entry = entity_registry.async_get(entity_id) + assert entity_entry is None + + state = hass.states.get(entity_id) + assert state is None + + # Repeating the update + async_fire_mqtt_message(hass, discovery_topic, discovery_payload_update2) + await hass.async_block_till_done() + + # Clear the empty discovery payload and verify there was nothing to cleanup + async_fire_mqtt_message(hass, discovery_topic, "") + await hass.async_block_till_done() + assert "No device components to cleanup" in caplog.text + async def test_cleanup_device_multiple_config_entries( hass: HomeAssistant, @@ -1518,20 +2542,14 @@ async def test_mqtt_discovery_flow_starts_once( hass: HomeAssistant, mqtt_client_mock: MqttMockPahoClient, caplog: pytest.LogCaptureFixture, + mock_mqtt_flow: config_entries.ConfigFlow, + mqtt_data_flow_calls: list[MqttServiceInfo], ) -> None: - """Check MQTT integration discovery starts a flow once.""" - - flow_calls: list[MqttServiceInfo] = [] - - class TestFlow(config_entries.ConfigFlow): - """Test flow.""" - - async def async_step_mqtt(self, discovery_info: MqttServiceInfo) -> FlowResult: - """Test mqtt step.""" - await asyncio.sleep(0) - flow_calls.append(discovery_info) - return self.async_create_entry(title="Test", data={}) + """Check MQTT integration discovery starts a flow once. + A flow should be started once after discovery, + and after an entry was removed, to trigger re-discovery. + """ mock_integration( hass, MockModule(domain="comp", async_setup_entry=AsyncMock(return_value=True)) ) @@ -1552,7 +2570,7 @@ def wait_birth(msg: ReceiveMessage) -> None: "homeassistant.components.mqtt.discovery.async_get_mqtt", return_value={"comp": ["comp/discovery/#"]}, ), - mock_config_flow("comp", TestFlow), + mock_config_flow("comp", mock_mqtt_flow), ): assert await hass.config_entries.async_setup(entry.entry_id) await mqtt.async_subscribe(hass, "homeassistant/status", wait_birth) @@ -1561,41 +2579,82 @@ def wait_birth(msg: ReceiveMessage) -> None: assert ("comp/discovery/#", 0) in help_all_subscribe_calls(mqtt_client_mock) + # Test the initial flow async_fire_mqtt_message(hass, "comp/discovery/bla/config1", "initial message") await hass.async_block_till_done(wait_background_tasks=True) - assert len(flow_calls) == 1 - assert flow_calls[0].topic == "comp/discovery/bla/config1" - assert flow_calls[0].payload == "initial message" + assert len(mqtt_data_flow_calls) == 1 + assert mqtt_data_flow_calls[0].topic == "comp/discovery/bla/config1" + assert mqtt_data_flow_calls[0].payload == "initial message" + # Test we can ignore updates if they are the same with caplog.at_level(logging.DEBUG): async_fire_mqtt_message( hass, "comp/discovery/bla/config1", "initial message" ) await hass.async_block_till_done(wait_background_tasks=True) assert "Ignoring already processed discovery message" in caplog.text - assert len(flow_calls) == 1 + assert len(mqtt_data_flow_calls) == 1 + + # Test we can apply updates + async_fire_mqtt_message(hass, "comp/discovery/bla/config1", "update message") + await hass.async_block_till_done(wait_background_tasks=True) + + assert len(mqtt_data_flow_calls) == 2 + assert mqtt_data_flow_calls[1].topic == "comp/discovery/bla/config1" + assert mqtt_data_flow_calls[1].payload == "update message" + # Test we set up multiple entries async_fire_mqtt_message(hass, "comp/discovery/bla/config2", "initial message") await hass.async_block_till_done(wait_background_tasks=True) - assert len(flow_calls) == 2 - assert flow_calls[1].topic == "comp/discovery/bla/config2" - assert flow_calls[1].payload == "initial message" + assert len(mqtt_data_flow_calls) == 3 + assert mqtt_data_flow_calls[2].topic == "comp/discovery/bla/config2" + assert mqtt_data_flow_calls[2].payload == "initial message" + # Test we update multiple entries async_fire_mqtt_message(hass, "comp/discovery/bla/config2", "update message") await hass.async_block_till_done(wait_background_tasks=True) - assert len(flow_calls) == 3 - assert flow_calls[2].topic == "comp/discovery/bla/config2" - assert flow_calls[2].payload == "update message" + assert len(mqtt_data_flow_calls) == 4 + assert mqtt_data_flow_calls[3].topic == "comp/discovery/bla/config2" + assert mqtt_data_flow_calls[3].payload == "update message" - # An empty message triggers a flow to allow cleanup + # Test an empty message triggers a flow to allow cleanup (if needed) async_fire_mqtt_message(hass, "comp/discovery/bla/config2", "") await hass.async_block_till_done(wait_background_tasks=True) - assert len(flow_calls) == 4 - assert flow_calls[3].topic == "comp/discovery/bla/config2" - assert flow_calls[3].payload == "" + assert len(mqtt_data_flow_calls) == 5 + assert mqtt_data_flow_calls[4].topic == "comp/discovery/bla/config2" + assert mqtt_data_flow_calls[4].payload == "" + + # Cleanup the the second entry + assert ( + entry := hass.config_entries.async_entry_for_domain_unique_id( + "comp", "comp/discovery/bla/config2" + ) + ) is not None + await hass.config_entries.async_remove(entry.entry_id) + assert len(hass.config_entries.async_entries(domain="comp")) == 1 + + # Remove remaining entry1 and assert this triggers an + # automatic re-discovery flow with latest config + assert ( + entry := hass.config_entries.async_entry_for_domain_unique_id( + "comp", "comp/discovery/bla/config1" + ) + ) is not None + assert entry.unique_id == "comp/discovery/bla/config1" + await hass.config_entries.async_remove(entry.entry_id) + assert len(hass.config_entries.async_entries(domain="comp")) == 0 + + # Wait for re-discovery flow to complete + await hass.async_block_till_done(wait_background_tasks=True) + assert len(mqtt_data_flow_calls) == 6 + assert mqtt_data_flow_calls[5].topic == "comp/discovery/bla/config1" + assert mqtt_data_flow_calls[5].payload == "update message" + + # Re-discovery triggered the config flow + assert len(hass.config_entries.async_entries(domain="comp")) == 1 assert not mqtt_client_mock.unsubscribe.called @@ -1854,3 +2913,77 @@ def _callback(*args) -> None: assert len(calls) == 1 assert calls[0] == test_data unsub() + + +@pytest.mark.parametrize( + ("discovery_topic", "discovery_payload", "entity_ids"), + [ + ( + "homeassistant/device/bla/config", + '{ "device":{"identifiers":["0AFFD2"]},' + ' "o": {"name": "foobar"},' + ' "state_topic": "foobar/sensor-shared",' + ' "cmps": {"sens1": {' + ' "platform": "sensor",' + ' "name": "sensor1",' + ' "unique_id": "unique1"' + ' },"sens2": {' + ' "platform": "sensor",' + ' "name": "sensor2",' + ' "unique_id": "unique2"' + ' },"sens3": {' + ' "platform": "sensor",' + ' "name": "sensor3",' + ' "state_topic": "foobar/sensor3",' + ' "unique_id": "unique3"' + "}}}", + ["sensor.none_sensor1", "sensor.none_sensor2", "sensor.none_sensor3"], + ), + ], +) +async def test_shared_state_topic( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, + mqtt_mock_entry: MqttMockHAClientGenerator, + discovery_topic: str, + discovery_payload: str, + entity_ids: list[str], +) -> None: + """Test a shared state_topic can be used.""" + await mqtt_mock_entry() + + async_fire_mqtt_message(hass, discovery_topic, discovery_payload) + await hass.async_block_till_done() + + # Verify device and registry entries are created + device_entry = device_registry.async_get_device(identifiers={("mqtt", "0AFFD2")}) + assert device_entry is not None + for entity_id in entity_ids: + entity_entry = entity_registry.async_get(entity_id) + assert entity_entry is not None + + state = hass.states.get(entity_id) + assert state is not None + assert state.state == STATE_UNKNOWN + + async_fire_mqtt_message(hass, "foobar/sensor-shared", "New state") + + entity_id = entity_ids[0] + state = hass.states.get(entity_id) + assert state is not None + assert state.state == "New state" + entity_id = entity_ids[1] + state = hass.states.get(entity_id) + assert state is not None + assert state.state == "New state" + entity_id = entity_ids[2] + state = hass.states.get(entity_id) + assert state is not None + assert state.state == STATE_UNKNOWN + + async_fire_mqtt_message(hass, "foobar/sensor3", "New state3") + entity_id = entity_ids[2] + state = hass.states.get(entity_id) + assert state is not None + assert state.state == "New state3" diff --git a/tests/components/mqtt/test_event.py b/tests/components/mqtt/test_event.py index ea46f514d3da90..41049ed0887558 100644 --- a/tests/components/mqtt/test_event.py +++ b/tests/components/mqtt/test_event.py @@ -37,6 +37,7 @@ help_test_entity_device_info_with_connection, help_test_entity_device_info_with_identifier, help_test_entity_disabled_by_default, + help_test_entity_icon_and_entity_picture, help_test_entity_id_update_discovery_update, help_test_entity_id_update_subscriptions, help_test_entity_name, @@ -705,6 +706,18 @@ async def test_entity_name( ) +async def test_entity_icon_and_entity_picture( + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, +) -> None: + """Test the entity icon or picture setup.""" + domain = event.DOMAIN + config = DEFAULT_CONFIG + await help_test_entity_icon_and_entity_picture( + hass, mqtt_mock_entry, domain, config + ) + + @pytest.mark.parametrize( "hass_config", [ diff --git a/tests/components/mqtt/test_fan.py b/tests/components/mqtt/test_fan.py index 1d0cc809fd6833..6c8afe8c1b4768 100644 --- a/tests/components/mqtt/test_fan.py +++ b/tests/components/mqtt/test_fan.py @@ -1486,7 +1486,7 @@ async def test_encoding_subscribable_topics( attribute_value: Any, ) -> None: """Test handling of incoming encoded payload.""" - config = copy.deepcopy(DEFAULT_CONFIG[mqtt.DOMAIN][fan.DOMAIN]) + config: dict[str, Any] = copy.deepcopy(DEFAULT_CONFIG[mqtt.DOMAIN][fan.DOMAIN]) config[ATTR_PRESET_MODES] = ["eco", "auto"] config[CONF_PRESET_MODE_COMMAND_TOPIC] = "fan/some_preset_mode_command_topic" config[CONF_PERCENTAGE_COMMAND_TOPIC] = "fan/some_percentage_command_topic" @@ -2201,7 +2201,7 @@ async def test_publishing_with_custom_encoding( ) -> None: """Test publishing MQTT payload with different encoding.""" domain = fan.DOMAIN - config = copy.deepcopy(DEFAULT_CONFIG) + config: dict[str, Any] = copy.deepcopy(DEFAULT_CONFIG) if topic == "preset_mode_command_topic": config[mqtt.DOMAIN][domain]["preset_modes"] = ["auto", "eco"] diff --git a/tests/components/mqtt/test_humidifier.py b/tests/components/mqtt/test_humidifier.py index f5bdf52c8aae5e..20ca89181ebc40 100644 --- a/tests/components/mqtt/test_humidifier.py +++ b/tests/components/mqtt/test_humidifier.py @@ -862,7 +862,9 @@ async def test_encoding_subscribable_topics( attribute_value: Any, ) -> None: """Test handling of incoming encoded payload.""" - config = copy.deepcopy(DEFAULT_CONFIG[mqtt.DOMAIN][humidifier.DOMAIN]) + config: dict[str, Any] = copy.deepcopy( + DEFAULT_CONFIG[mqtt.DOMAIN][humidifier.DOMAIN] + ) config["modes"] = ["eco", "auto"] config[CONF_MODE_COMMAND_TOPIC] = "humidifier/some_mode_command_topic" await help_test_encoding_subscribable_topics( @@ -1473,7 +1475,7 @@ async def test_publishing_with_custom_encoding( ) -> None: """Test publishing MQTT payload with different encoding.""" domain = humidifier.DOMAIN - config = copy.deepcopy(DEFAULT_CONFIG) + config: dict[str, Any] = copy.deepcopy(DEFAULT_CONFIG) if topic == "mode_command_topic": config[mqtt.DOMAIN][domain]["modes"] = ["auto", "eco"] diff --git a/tests/components/mqtt/test_init.py b/tests/components/mqtt/test_init.py index 562e74bfd1d884..145016751e723c 100644 --- a/tests/components/mqtt/test_init.py +++ b/tests/components/mqtt/test_init.py @@ -230,7 +230,7 @@ async def test_value_template_fails(hass: HomeAssistant) -> None: ) with pytest.raises(MqttValueTemplateException) as exc: val_tpl.async_render_with_possible_json_value( - '{"some_var": null }', default=100 + '{"some_var": null }', default="100" ) assert str(exc.value) == ( "TypeError: unsupported operand type(s) for *: 'NoneType' and 'int' " @@ -835,7 +835,7 @@ async def test_receiving_message_with_non_utf8_topic_gets_logged( msg.payload = b"Payload" msg.qos = 2 msg.retain = True - msg.timestamp = time.monotonic() + msg.timestamp = time.monotonic() # type:ignore[assignment] mqtt_data: MqttData = hass.data["mqtt"] assert mqtt_data.client @@ -1197,7 +1197,6 @@ async def test_mqtt_ws_get_device_debug_info( } data_sensor = json.dumps(config_sensor) data_trigger = json.dumps(config_trigger) - config_sensor["platform"] = config_trigger["platform"] = mqtt.DOMAIN async_fire_mqtt_message(hass, "homeassistant/sensor/bla/config", data_sensor) async_fire_mqtt_message( @@ -1254,7 +1253,6 @@ async def test_mqtt_ws_get_device_debug_info_binary( "unique_id": "unique", } data = json.dumps(config) - config["platform"] = mqtt.DOMAIN async_fire_mqtt_message(hass, "homeassistant/camera/bla/config", data) await hass.async_block_till_done() @@ -1489,7 +1487,7 @@ async def test_debug_info_non_mqtt( """Test we get empty debug_info for a device with non MQTT entities.""" await mqtt_mock_entry() domain = "sensor" - setup_test_component_platform(hass, domain, mock_sensor_entities) + setup_test_component_platform(hass, domain, mock_sensor_entities.values()) config_entry = MockConfigEntry(domain="test", data={}) config_entry.add_to_hass(hass) diff --git a/tests/components/mqtt/test_lawn_mower.py b/tests/components/mqtt/test_lawn_mower.py index 101a45787ef4ec..0bef4196ef2cde 100644 --- a/tests/components/mqtt/test_lawn_mower.py +++ b/tests/components/mqtt/test_lawn_mower.py @@ -802,7 +802,9 @@ async def test_encoding_subscribable_topics( attribute_value: Any, ) -> None: """Test handling of incoming encoded payload.""" - config = copy.deepcopy(DEFAULT_CONFIG[mqtt.DOMAIN][lawn_mower.DOMAIN]) + config: dict[str, Any] = copy.deepcopy( + DEFAULT_CONFIG[mqtt.DOMAIN][lawn_mower.DOMAIN] + ) config["actions"] = ["milk", "beer"] await help_test_encoding_subscribable_topics( hass, diff --git a/tests/components/mqtt/test_light.py b/tests/components/mqtt/test_light.py index 18815281f633ac..0ef7cda2a7d1ac 100644 --- a/tests/components/mqtt/test_light.py +++ b/tests/components/mqtt/test_light.py @@ -1053,7 +1053,7 @@ async def test_sending_mqtt_commands_and_optimistic( assert state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == color_modes await common.async_turn_on( - hass, "light.test", brightness=10, rgb_color=[80, 40, 20] + hass, "light.test", brightness=10, rgb_color=(80, 40, 20) ) mqtt_mock.async_publish.assert_has_calls( [ @@ -1073,7 +1073,7 @@ async def test_sending_mqtt_commands_and_optimistic( assert state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == color_modes await common.async_turn_on( - hass, "light.test", brightness=20, rgbw_color=[80, 40, 20, 10] + hass, "light.test", brightness=20, rgbw_color=(80, 40, 20, 10) ) mqtt_mock.async_publish.assert_has_calls( [ @@ -1093,7 +1093,7 @@ async def test_sending_mqtt_commands_and_optimistic( assert state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == color_modes await common.async_turn_on( - hass, "light.test", brightness=40, rgbww_color=[80, 40, 20, 10, 8] + hass, "light.test", brightness=40, rgbww_color=(80, 40, 20, 10, 8) ) mqtt_mock.async_publish.assert_has_calls( [ @@ -1112,7 +1112,7 @@ async def test_sending_mqtt_commands_and_optimistic( assert state.attributes.get(light.ATTR_COLOR_MODE) == "rgbww" assert state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == color_modes - await common.async_turn_on(hass, "light.test", brightness=50, hs_color=[359, 78]) + await common.async_turn_on(hass, "light.test", brightness=50, hs_color=(359, 78)) mqtt_mock.async_publish.assert_has_calls( [ call("test_light_rgb/set", "on", 2, False), @@ -1130,7 +1130,7 @@ async def test_sending_mqtt_commands_and_optimistic( assert state.attributes.get(light.ATTR_COLOR_MODE) == "hs" assert state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == color_modes - await common.async_turn_on(hass, "light.test", brightness=60, xy_color=[0.2, 0.3]) + await common.async_turn_on(hass, "light.test", brightness=60, xy_color=(0.2, 0.3)) mqtt_mock.async_publish.assert_has_calls( [ call("test_light_rgb/set", "on", 2, False), @@ -1193,7 +1193,7 @@ async def test_sending_mqtt_rgb_command_with_template( state = hass.states.get("light.test") assert state.state == STATE_UNKNOWN - await common.async_turn_on(hass, "light.test", rgb_color=[255, 128, 64]) + await common.async_turn_on(hass, "light.test", rgb_color=(255, 128, 64)) mqtt_mock.async_publish.assert_has_calls( [ @@ -1236,7 +1236,7 @@ async def test_sending_mqtt_rgbw_command_with_template( state = hass.states.get("light.test") assert state.state == STATE_UNKNOWN - await common.async_turn_on(hass, "light.test", rgbw_color=[255, 128, 64, 32]) + await common.async_turn_on(hass, "light.test", rgbw_color=(255, 128, 64, 32)) mqtt_mock.async_publish.assert_has_calls( [ @@ -1279,7 +1279,7 @@ async def test_sending_mqtt_rgbww_command_with_template( state = hass.states.get("light.test") assert state.state == STATE_UNKNOWN - await common.async_turn_on(hass, "light.test", rgbww_color=[255, 128, 64, 32, 16]) + await common.async_turn_on(hass, "light.test", rgbww_color=(255, 128, 64, 32, 16)) mqtt_mock.async_publish.assert_has_calls( [ @@ -1469,7 +1469,7 @@ async def test_on_command_brightness( # Turn on w/ just a color to ensure brightness gets # added and sent. - await common.async_turn_on(hass, "light.test", rgb_color=[255, 128, 0]) + await common.async_turn_on(hass, "light.test", rgb_color=(255, 128, 0)) mqtt_mock.async_publish.assert_has_calls( [ @@ -1545,7 +1545,7 @@ async def test_on_command_brightness_scaled( # Turn on w/ just a color to ensure brightness gets # added and sent. - await common.async_turn_on(hass, "light.test", rgb_color=[255, 128, 0]) + await common.async_turn_on(hass, "light.test", rgb_color=(255, 128, 0)) mqtt_mock.async_publish.assert_has_calls( [ @@ -1626,7 +1626,7 @@ async def test_on_command_rgb( mqtt_mock.async_publish.assert_called_once_with("test_light/set", "OFF", 0, False) # Ensure color gets scaled with brightness. - await common.async_turn_on(hass, "light.test", rgb_color=[255, 128, 0]) + await common.async_turn_on(hass, "light.test", rgb_color=(255, 128, 0)) mqtt_mock.async_publish.assert_has_calls( [ @@ -1722,7 +1722,7 @@ async def test_on_command_rgbw( mqtt_mock.async_publish.assert_called_once_with("test_light/set", "OFF", 0, False) # Ensure color gets scaled with brightness. - await common.async_turn_on(hass, "light.test", rgbw_color=[255, 128, 0, 16]) + await common.async_turn_on(hass, "light.test", rgbw_color=(255, 128, 0, 16)) mqtt_mock.async_publish.assert_has_calls( [ @@ -1818,7 +1818,7 @@ async def test_on_command_rgbww( mqtt_mock.async_publish.assert_called_once_with("test_light/set", "OFF", 0, False) # Ensure color gets scaled with brightness. - await common.async_turn_on(hass, "light.test", rgbww_color=[255, 128, 0, 16, 32]) + await common.async_turn_on(hass, "light.test", rgbww_color=(255, 128, 0, 16, 32)) mqtt_mock.async_publish.assert_has_calls( [ @@ -3262,7 +3262,7 @@ async def test_publishing_with_custom_encoding( ) -> None: """Test publishing MQTT payload with different encoding.""" domain = light.DOMAIN - config = copy.deepcopy(DEFAULT_CONFIG) + config: dict[str, Any] = copy.deepcopy(DEFAULT_CONFIG) if topic == "effect_command_topic": config[mqtt.DOMAIN][domain]["effect_list"] = ["random", "color_loop"] elif topic == "white_command_topic": @@ -3333,7 +3333,7 @@ async def test_encoding_subscribable_topics( init_payload: tuple[str, str] | None, ) -> None: """Test handling of incoming encoded payload.""" - config = copy.deepcopy(DEFAULT_CONFIG[mqtt.DOMAIN][light.DOMAIN]) + config: dict[str, Any] = copy.deepcopy(DEFAULT_CONFIG[mqtt.DOMAIN][light.DOMAIN]) config[CONF_EFFECT_COMMAND_TOPIC] = "light/CONF_EFFECT_COMMAND_TOPIC" config[CONF_RGB_COMMAND_TOPIC] = "light/CONF_RGB_COMMAND_TOPIC" config[CONF_BRIGHTNESS_COMMAND_TOPIC] = "light/CONF_BRIGHTNESS_COMMAND_TOPIC" diff --git a/tests/components/mqtt/test_light_json.py b/tests/components/mqtt/test_light_json.py index 829222e03043c8..31573ad88c6103 100644 --- a/tests/components/mqtt/test_light_json.py +++ b/tests/components/mqtt/test_light_json.py @@ -99,7 +99,7 @@ ) from homeassistant.core import HomeAssistant, State from homeassistant.helpers.json import json_dumps -from homeassistant.util.json import JsonValueType, json_loads +from homeassistant.util.json import json_loads from .test_common import ( help_custom_config, @@ -172,11 +172,11 @@ class JsonValidator: """Helper to compare JSON.""" - def __init__(self, jsondata: JsonValueType) -> None: + def __init__(self, jsondata: bytes | str) -> None: """Initialize JSON validator.""" self.jsondata = jsondata - def __eq__(self, other: JsonValueType) -> bool: + def __eq__(self, other: bytes | str) -> bool: # type:ignore[override] """Compare JSON data.""" return json_loads(self.jsondata) == json_loads(other) @@ -1108,7 +1108,7 @@ async def test_sending_mqtt_commands_and_optimistic( mqtt_mock.reset_mock() await common.async_turn_on( - hass, "light.test", brightness=50, xy_color=[0.123, 0.123] + hass, "light.test", brightness=50, xy_color=(0.123, 0.123) ) mqtt_mock.async_publish.assert_called_once_with( "test_light_rgb/set", @@ -1128,7 +1128,7 @@ async def test_sending_mqtt_commands_and_optimistic( assert state.attributes["rgb_color"] == (0, 123, 255) assert state.attributes["xy_color"] == (0.14, 0.131) - await common.async_turn_on(hass, "light.test", brightness=50, hs_color=[359, 78]) + await common.async_turn_on(hass, "light.test", brightness=50, hs_color=(359, 78)) mqtt_mock.async_publish.assert_called_once_with( "test_light_rgb/set", JsonValidator( @@ -1148,7 +1148,7 @@ async def test_sending_mqtt_commands_and_optimistic( assert state.attributes["rgb_color"] == (255, 56, 59) assert state.attributes["xy_color"] == (0.654, 0.301) - await common.async_turn_on(hass, "light.test", rgb_color=[255, 128, 0]) + await common.async_turn_on(hass, "light.test", rgb_color=(255, 128, 0)) mqtt_mock.async_publish.assert_called_once_with( "test_light_rgb/set", JsonValidator( @@ -1265,7 +1265,7 @@ async def test_sending_mqtt_commands_and_optimistic2( assert state.state == STATE_OFF # Set hs color - await common.async_turn_on(hass, "light.test", brightness=75, hs_color=[359, 78]) + await common.async_turn_on(hass, "light.test", brightness=75, hs_color=(359, 78)) state = hass.states.get("light.test") assert state.state == STATE_ON assert state.attributes["brightness"] == 75 @@ -1286,7 +1286,7 @@ async def test_sending_mqtt_commands_and_optimistic2( mqtt_mock.async_publish.reset_mock() # Set rgb color - await common.async_turn_on(hass, "light.test", rgb_color=[255, 128, 0]) + await common.async_turn_on(hass, "light.test", rgb_color=(255, 128, 0)) state = hass.states.get("light.test") assert state.state == STATE_ON assert state.attributes["brightness"] == 75 @@ -1305,7 +1305,7 @@ async def test_sending_mqtt_commands_and_optimistic2( mqtt_mock.async_publish.reset_mock() # Set rgbw color - await common.async_turn_on(hass, "light.test", rgbw_color=[255, 128, 0, 123]) + await common.async_turn_on(hass, "light.test", rgbw_color=(255, 128, 0, 123)) state = hass.states.get("light.test") assert state.state == STATE_ON assert state.attributes["brightness"] == 75 @@ -1326,7 +1326,7 @@ async def test_sending_mqtt_commands_and_optimistic2( mqtt_mock.async_publish.reset_mock() # Set rgbww color - await common.async_turn_on(hass, "light.test", rgbww_color=[255, 128, 0, 45, 32]) + await common.async_turn_on(hass, "light.test", rgbww_color=(255, 128, 0, 45, 32)) state = hass.states.get("light.test") assert state.state == STATE_ON assert state.attributes["brightness"] == 75 @@ -1348,7 +1348,7 @@ async def test_sending_mqtt_commands_and_optimistic2( # Set xy color await common.async_turn_on( - hass, "light.test", brightness=50, xy_color=[0.123, 0.223] + hass, "light.test", brightness=50, xy_color=(0.123, 0.223) ) state = hass.states.get("light.test") assert state.state == STATE_ON @@ -1435,10 +1435,10 @@ async def test_sending_hs_color( mqtt_mock.reset_mock() await common.async_turn_on( - hass, "light.test", brightness=50, xy_color=[0.123, 0.123] + hass, "light.test", brightness=50, xy_color=(0.123, 0.123) ) - await common.async_turn_on(hass, "light.test", brightness=50, hs_color=[359, 78]) - await common.async_turn_on(hass, "light.test", rgb_color=[255, 128, 0]) + await common.async_turn_on(hass, "light.test", brightness=50, hs_color=(359, 78)) + await common.async_turn_on(hass, "light.test", rgb_color=(255, 128, 0)) mqtt_mock.async_publish.assert_has_calls( [ @@ -1497,11 +1497,11 @@ async def test_sending_rgb_color_no_brightness( assert state.state == STATE_UNKNOWN await common.async_turn_on( - hass, "light.test", brightness=50, xy_color=[0.123, 0.123] + hass, "light.test", brightness=50, xy_color=(0.123, 0.123) ) - await common.async_turn_on(hass, "light.test", brightness=50, hs_color=[359, 78]) + await common.async_turn_on(hass, "light.test", brightness=50, hs_color=(359, 78)) await common.async_turn_on( - hass, "light.test", rgb_color=[255, 128, 0], brightness=255 + hass, "light.test", rgb_color=(255, 128, 0), brightness=255 ) mqtt_mock.async_publish.assert_has_calls( @@ -1555,17 +1555,17 @@ async def test_sending_rgb_color_no_brightness2( assert state.state == STATE_UNKNOWN await common.async_turn_on( - hass, "light.test", brightness=50, xy_color=[0.123, 0.123] + hass, "light.test", brightness=50, xy_color=(0.123, 0.123) ) - await common.async_turn_on(hass, "light.test", brightness=50, hs_color=[359, 78]) + await common.async_turn_on(hass, "light.test", brightness=50, hs_color=(359, 78)) await common.async_turn_on( - hass, "light.test", rgb_color=[255, 128, 0], brightness=255 + hass, "light.test", rgb_color=(255, 128, 0), brightness=255 ) await common.async_turn_on( - hass, "light.test", rgbw_color=[128, 64, 32, 16], brightness=128 + hass, "light.test", rgbw_color=(128, 64, 32, 16), brightness=128 ) await common.async_turn_on( - hass, "light.test", rgbww_color=[128, 64, 32, 16, 8], brightness=64 + hass, "light.test", rgbww_color=(128, 64, 32, 16, 8), brightness=64 ) mqtt_mock.async_publish.assert_has_calls( @@ -1635,11 +1635,11 @@ async def test_sending_rgb_color_with_brightness( assert state.state == STATE_UNKNOWN await common.async_turn_on( - hass, "light.test", brightness=50, xy_color=[0.123, 0.123] + hass, "light.test", brightness=50, xy_color=(0.123, 0.123) ) - await common.async_turn_on(hass, "light.test", brightness=255, hs_color=[359, 78]) + await common.async_turn_on(hass, "light.test", brightness=255, hs_color=(359, 78)) await common.async_turn_on(hass, "light.test", brightness=1) - await common.async_turn_on(hass, "light.test", rgb_color=[255, 128, 0]) + await common.async_turn_on(hass, "light.test", rgb_color=(255, 128, 0)) mqtt_mock.async_publish.assert_has_calls( [ @@ -1705,11 +1705,11 @@ async def test_sending_rgb_color_with_scaled_brightness( assert state.state == STATE_UNKNOWN await common.async_turn_on( - hass, "light.test", brightness=50, xy_color=[0.123, 0.123] + hass, "light.test", brightness=50, xy_color=(0.123, 0.123) ) - await common.async_turn_on(hass, "light.test", brightness=255, hs_color=[359, 78]) + await common.async_turn_on(hass, "light.test", brightness=255, hs_color=(359, 78)) await common.async_turn_on(hass, "light.test", brightness=1) - await common.async_turn_on(hass, "light.test", rgb_color=[255, 128, 0]) + await common.async_turn_on(hass, "light.test", rgb_color=(255, 128, 0)) mqtt_mock.async_publish.assert_has_calls( [ @@ -1820,10 +1820,10 @@ async def test_sending_xy_color( assert state.state == STATE_UNKNOWN await common.async_turn_on( - hass, "light.test", brightness=50, xy_color=[0.123, 0.123] + hass, "light.test", brightness=50, xy_color=(0.123, 0.123) ) - await common.async_turn_on(hass, "light.test", brightness=50, hs_color=[359, 78]) - await common.async_turn_on(hass, "light.test", rgb_color=[255, 128, 0]) + await common.async_turn_on(hass, "light.test", brightness=50, hs_color=(359, 78)) + await common.async_turn_on(hass, "light.test", rgb_color=(255, 128, 0)) mqtt_mock.async_publish.assert_has_calls( [ @@ -2629,7 +2629,7 @@ async def test_publishing_with_custom_encoding( ) -> None: """Test publishing MQTT payload with different encoding.""" domain = light.DOMAIN - config = copy.deepcopy(DEFAULT_CONFIG) + config: dict[str, Any] = copy.deepcopy(DEFAULT_CONFIG) if topic == "effect_command_topic": config[mqtt.DOMAIN][domain]["effect_list"] = ["random", "color_loop"] @@ -2680,7 +2680,7 @@ async def test_encoding_subscribable_topics( init_payload: tuple[str, str] | None, ) -> None: """Test handling of incoming encoded payload.""" - config = copy.deepcopy(DEFAULT_CONFIG[mqtt.DOMAIN][light.DOMAIN]) + config: dict[str, Any] = copy.deepcopy(DEFAULT_CONFIG[mqtt.DOMAIN][light.DOMAIN]) config["color_mode"] = True config["supported_color_modes"] = [ "color_temp", diff --git a/tests/components/mqtt/test_light_template.py b/tests/components/mqtt/test_light_template.py index d570454a6bfe69..63e110ba7c0a42 100644 --- a/tests/components/mqtt/test_light_template.py +++ b/tests/components/mqtt/test_light_template.py @@ -482,7 +482,7 @@ async def test_sending_mqtt_commands_and_optimistic( assert state.state == STATE_ON # Full brightness - no scaling of RGB values sent over MQTT - await common.async_turn_on(hass, "light.test", rgb_color=[255, 128, 0]) + await common.async_turn_on(hass, "light.test", rgb_color=(255, 128, 0)) mqtt_mock.async_publish.assert_called_once_with( "test_light_rgb/set", "on,,,255-128-0,30.118-100.0", 2, False ) @@ -492,7 +492,7 @@ async def test_sending_mqtt_commands_and_optimistic( assert state.attributes.get("rgb_color") == (255, 128, 0) # Full brightness - normalization of RGB values sent over MQTT - await common.async_turn_on(hass, "light.test", rgb_color=[128, 64, 0]) + await common.async_turn_on(hass, "light.test", rgb_color=(128, 64, 0)) mqtt_mock.async_publish.assert_called_once_with( "test_light_rgb/set", "on,,,255-127-0,30.0-100.0", 2, False ) @@ -511,7 +511,7 @@ async def test_sending_mqtt_commands_and_optimistic( assert state.state == STATE_ON # Half brightness - scaling of RGB values sent over MQTT - await common.async_turn_on(hass, "light.test", rgb_color=[0, 255, 128]) + await common.async_turn_on(hass, "light.test", rgb_color=(0, 255, 128)) mqtt_mock.async_publish.assert_called_once_with( "test_light_rgb/set", "on,,,0-128-64,150.118-100.0", 2, False ) @@ -521,7 +521,7 @@ async def test_sending_mqtt_commands_and_optimistic( assert state.attributes.get("rgb_color") == (0, 255, 128) # Half brightness - normalization+scaling of RGB values sent over MQTT - await common.async_turn_on(hass, "light.test", rgb_color=[0, 32, 16]) + await common.async_turn_on(hass, "light.test", rgb_color=(0, 32, 16)) mqtt_mock.async_publish.assert_called_once_with( "test_light_rgb/set", "on,,,0-128-64,150.0-100.0", 2, False ) @@ -614,7 +614,7 @@ async def test_sending_mqtt_commands_non_optimistic_brightness_template( assert not state.attributes.get("brightness") # Full brightness - no scaling of RGB values sent over MQTT - await common.async_turn_on(hass, "light.test", rgb_color=[255, 128, 0]) + await common.async_turn_on(hass, "light.test", rgb_color=(255, 128, 0)) mqtt_mock.async_publish.assert_called_once_with( "test_light_rgb/set", "on,,,255-128-0,30.118-100.0", 0, False ) @@ -624,7 +624,7 @@ async def test_sending_mqtt_commands_non_optimistic_brightness_template( assert not state.attributes.get("rgb_color") # Full brightness - normalization of RGB values sent over MQTT - await common.async_turn_on(hass, "light.test", rgb_color=[128, 64, 0]) + await common.async_turn_on(hass, "light.test", rgb_color=(128, 64, 0)) mqtt_mock.async_publish.assert_called_once_with( "test_light_rgb/set", "on,,,255-127-0,30.0-100.0", 0, False ) @@ -638,7 +638,7 @@ async def test_sending_mqtt_commands_non_optimistic_brightness_template( mqtt_mock.async_publish.reset_mock() # Half brightness - no scaling of RGB values sent over MQTT - await common.async_turn_on(hass, "light.test", rgb_color=[0, 255, 128]) + await common.async_turn_on(hass, "light.test", rgb_color=(0, 255, 128)) mqtt_mock.async_publish.assert_called_once_with( "test_light_rgb/set", "on,,,0-255-128,150.118-100.0", 0, False ) @@ -646,7 +646,7 @@ async def test_sending_mqtt_commands_non_optimistic_brightness_template( state = hass.states.get("light.test") # Half brightness - normalization but no scaling of RGB values sent over MQTT - await common.async_turn_on(hass, "light.test", rgb_color=[0, 32, 16]) + await common.async_turn_on(hass, "light.test", rgb_color=(0, 32, 16)) mqtt_mock.async_publish.assert_called_once_with( "test_light_rgb/set", "on,,,0-255-127,150.0-100.0", 0, False ) @@ -1259,7 +1259,7 @@ async def test_publishing_with_custom_encoding( ) -> None: """Test publishing MQTT payload with different encoding.""" domain = light.DOMAIN - config = copy.deepcopy(DEFAULT_CONFIG) + config: dict[str, Any] = copy.deepcopy(DEFAULT_CONFIG) if topic == "effect_command_topic": config[mqtt.DOMAIN][domain]["effect_list"] = ["random", "color_loop"] diff --git a/tests/components/mqtt/test_number.py b/tests/components/mqtt/test_number.py index 44652681fc3710..48aaa11f672178 100644 --- a/tests/components/mqtt/test_number.py +++ b/tests/components/mqtt/test_number.py @@ -47,6 +47,7 @@ help_test_entity_device_info_update, help_test_entity_device_info_with_connection, help_test_entity_device_info_with_identifier, + help_test_entity_icon_and_entity_picture, help_test_entity_id_update_discovery_update, help_test_entity_id_update_subscriptions, help_test_entity_name, @@ -1100,6 +1101,18 @@ async def test_entity_name( ) +async def test_entity_icon_and_entity_picture( + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, +) -> None: + """Test the entity icon or picture setup.""" + domain = number.DOMAIN + config = DEFAULT_CONFIG + await help_test_entity_icon_and_entity_picture( + hass, mqtt_mock_entry, domain, config + ) + + @pytest.mark.parametrize( "hass_config", [ diff --git a/tests/components/mqtt/test_select.py b/tests/components/mqtt/test_select.py index 60eb4893760da2..8d79a3ce609c61 100644 --- a/tests/components/mqtt/test_select.py +++ b/tests/components/mqtt/test_select.py @@ -610,7 +610,7 @@ def _test_options_attributes_options_config( @pytest.mark.parametrize( ("hass_config", "options"), - _test_options_attributes_options_config((["milk", "beer"], ["milk"], [])), + _test_options_attributes_options_config((["milk", "beer"], ["milk"], [])), # type:ignore[arg-type] ) async def test_options_attributes( hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator, options: list[str] diff --git a/tests/components/mqtt/test_sensor.py b/tests/components/mqtt/test_sensor.py index 7b63afbc60373a..7f418864872802 100644 --- a/tests/components/mqtt/test_sensor.py +++ b/tests/components/mqtt/test_sensor.py @@ -53,6 +53,7 @@ help_test_entity_device_info_with_connection, help_test_entity_device_info_with_identifier, help_test_entity_disabled_by_default, + help_test_entity_icon_and_entity_picture, help_test_entity_id_update_discovery_update, help_test_entity_id_update_subscriptions, help_test_entity_name, @@ -713,7 +714,7 @@ async def test_force_update_disabled( def test_callback(event: Event) -> None: events.append(event) - hass.bus.async_listen(EVENT_STATE_CHANGED, test_callback) + hass.bus.async_listen(EVENT_STATE_CHANGED, test_callback) # type:ignore[arg-type] async_fire_mqtt_message(hass, "test-topic", "100") await hass.async_block_till_done() @@ -751,7 +752,7 @@ async def test_force_update_enabled( def test_callback(event: Event) -> None: events.append(event) - hass.bus.async_listen(EVENT_STATE_CHANGED, test_callback) + hass.bus.async_listen(EVENT_STATE_CHANGED, test_callback) # type:ignore[arg-type] async_fire_mqtt_message(hass, "test-topic", "100") await hass.async_block_till_done() @@ -1583,6 +1584,18 @@ async def test_entity_name( ) +async def test_entity_icon_and_entity_picture( + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, +) -> None: + """Test the entity name setup.""" + domain = sensor.DOMAIN + config = DEFAULT_CONFIG + await help_test_entity_icon_and_entity_picture( + hass, mqtt_mock_entry, domain, config + ) + + @pytest.mark.parametrize( "hass_config", [ diff --git a/tests/components/mqtt/test_siren.py b/tests/components/mqtt/test_siren.py index 3f720e3ee3c8b5..58a5cb735f9ce1 100644 --- a/tests/components/mqtt/test_siren.py +++ b/tests/components/mqtt/test_siren.py @@ -594,7 +594,7 @@ async def test_setting_blocked_attribute_via_mqtt_json_message( ) -> None: """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_blocked_attribute_via_mqtt_json_message( - hass, mqtt_mock_entry, siren.DOMAIN, DEFAULT_CONFIG, {} + hass, mqtt_mock_entry, siren.DOMAIN, DEFAULT_CONFIG, None ) @@ -974,7 +974,7 @@ async def test_publishing_with_custom_encoding( ) -> None: """Test publishing MQTT payload with command templates and different encoding.""" domain = siren.DOMAIN - config = copy.deepcopy(DEFAULT_CONFIG) + config: dict[str, Any] = copy.deepcopy(DEFAULT_CONFIG) config[mqtt.DOMAIN][domain][siren.ATTR_AVAILABLE_TONES] = ["siren", "xylophone"] await help_test_publishing_with_custom_encoding( diff --git a/tests/components/mqtt/test_switch.py b/tests/components/mqtt/test_switch.py index fddbfd8fbe2373..dceeff07377b30 100644 --- a/tests/components/mqtt/test_switch.py +++ b/tests/components/mqtt/test_switch.py @@ -403,7 +403,7 @@ async def test_setting_blocked_attribute_via_mqtt_json_message( ) -> None: """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_blocked_attribute_via_mqtt_json_message( - hass, mqtt_mock_entry, switch.DOMAIN, DEFAULT_CONFIG, {} + hass, mqtt_mock_entry, switch.DOMAIN, DEFAULT_CONFIG, None ) diff --git a/tests/components/mqtt/test_tag.py b/tests/components/mqtt/test_tag.py index adebd15758809a..41c417fe3e9ebb 100644 --- a/tests/components/mqtt/test_tag.py +++ b/tests/components/mqtt/test_tag.py @@ -1,9 +1,9 @@ """The tests for MQTT tag scanner.""" -from collections.abc import Generator import copy import json -from unittest.mock import ANY, AsyncMock, patch +from typing import Any +from unittest.mock import ANY, AsyncMock import pytest @@ -46,13 +46,6 @@ ) -@pytest.fixture -def tag_mock() -> Generator[AsyncMock]: - """Fixture to mock tag.""" - with patch("homeassistant.components.tag.async_scan_tag") as mock_tag: - yield mock_tag - - @pytest.mark.no_fail_on_log_exception async def test_discover_bad_tag( hass: HomeAssistant, @@ -504,7 +497,7 @@ async def test_entity_device_info_update( """Test device registry update.""" await mqtt_mock_entry() - config = { + config: dict[str, Any] = { "topic": "test-topic", "device": { "identifiers": ["helloworld"], diff --git a/tests/components/mqtt/test_text.py b/tests/components/mqtt/test_text.py index ebcb835844d258..96924030279c8c 100644 --- a/tests/components/mqtt/test_text.py +++ b/tests/components/mqtt/test_text.py @@ -469,7 +469,7 @@ async def test_setting_blocked_attribute_via_mqtt_json_message( ) -> None: """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_blocked_attribute_via_mqtt_json_message( - hass, mqtt_mock_entry, text.DOMAIN, DEFAULT_CONFIG, {} + hass, mqtt_mock_entry, text.DOMAIN, DEFAULT_CONFIG, None ) diff --git a/tests/components/mqtt/test_update.py b/tests/components/mqtt/test_update.py index 937b8cdebd0344..4ca10cbe8b2d59 100644 --- a/tests/components/mqtt/test_update.py +++ b/tests/components/mqtt/test_update.py @@ -25,6 +25,7 @@ help_test_entity_device_info_update, help_test_entity_device_info_with_connection, help_test_entity_device_info_with_identifier, + help_test_entity_icon_and_entity_picture, help_test_entity_id_update_discovery_update, help_test_reloadable, help_test_setting_attribute_via_mqtt_json_message, @@ -313,6 +314,60 @@ async def test_empty_json_state_message( } ], ) +async def test_invalid_json_state_message( + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test an empty JSON payload.""" + state_topic = "test/state-topic" + await mqtt_mock_entry() + + async_fire_mqtt_message( + hass, + state_topic, + '{"installed_version":"1.9.0","latest_version":"1.9.0",' + '"title":"Test Update 1 Title","release_url":"https://example.com/release1",' + '"release_summary":"Test release summary 1",' + '"entity_picture": "https://example.com/icon1.png"}', + ) + + await hass.async_block_till_done() + + state = hass.states.get("update.test_update") + assert state.state == STATE_OFF + assert state.attributes.get("installed_version") == "1.9.0" + assert state.attributes.get("latest_version") == "1.9.0" + assert state.attributes.get("release_summary") == "Test release summary 1" + assert state.attributes.get("release_url") == "https://example.com/release1" + assert state.attributes.get("title") == "Test Update 1 Title" + assert state.attributes.get("entity_picture") == "https://example.com/icon1.png" + + # Test update schema validation with invalid value in JSON update + async_fire_mqtt_message(hass, state_topic, '{"update_percentage":101}') + + await hass.async_block_till_done() + assert ( + "Schema violation after processing payload '{\"update_percentage\":101}' on " + "topic 'test/state-topic' for entity 'update.test_update': value must be at " + "most 100 for dictionary value @ data['update_percentage']" in caplog.text + ) + + +@pytest.mark.parametrize( + "hass_config", + [ + { + mqtt.DOMAIN: { + update.DOMAIN: { + "state_topic": "test/state-topic", + "name": "Test Update", + "display_precision": 1, + } + } + } + ], +) async def test_json_state_message( hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: @@ -354,6 +409,45 @@ async def test_json_state_message( assert state.attributes.get("installed_version") == "1.9.0" assert state.attributes.get("latest_version") == "2.0.0" assert state.attributes.get("entity_picture") == "https://example.com/icon2.png" + assert state.attributes.get("in_progress") is False + assert state.attributes.get("update_percentage") is None + + # Test in_progress status + async_fire_mqtt_message(hass, state_topic, '{"in_progress":true}') + await hass.async_block_till_done() + + state = hass.states.get("update.test_update") + assert state.state == STATE_ON + assert state.attributes.get("installed_version") == "1.9.0" + assert state.attributes.get("latest_version") == "2.0.0" + assert state.attributes.get("entity_picture") == "https://example.com/icon2.png" + assert state.attributes.get("in_progress") is True + assert state.attributes.get("update_percentage") is None + + async_fire_mqtt_message(hass, state_topic, '{"in_progress":false}') + await hass.async_block_till_done() + state = hass.states.get("update.test_update") + assert state.attributes.get("in_progress") is False + + # Test update_percentage status + async_fire_mqtt_message(hass, state_topic, '{"update_percentage":51.75}') + await hass.async_block_till_done() + state = hass.states.get("update.test_update") + assert state.attributes.get("in_progress") is True + assert state.attributes.get("update_percentage") == 51.75 + assert state.attributes.get("display_precision") == 1 + + async_fire_mqtt_message(hass, state_topic, '{"update_percentage":100}') + await hass.async_block_till_done() + state = hass.states.get("update.test_update") + assert state.attributes.get("in_progress") is True + assert state.attributes.get("update_percentage") == 100 + + async_fire_mqtt_message(hass, state_topic, '{"update_percentage":null}') + await hass.async_block_till_done() + state = hass.states.get("update.test_update") + assert state.attributes.get("in_progress") is False + assert state.attributes.get("update_percentage") is None @pytest.mark.parametrize( @@ -724,6 +818,10 @@ async def test_reloadable( '{"entity_picture": "https://example.com/icon1.png"}', '{"entity_picture": "https://example.com/icon2.png"}', ), + ("test-topic", '{"in_progress": true}', '{"in_progress": false}'), + ("test-topic", '{"update_percentage": 0}', '{"update_percentage": 50}'), + ("test-topic", '{"update_percentage": 50}', '{"update_percentage": 100}'), + ("test-topic", '{"update_percentage": 100}', '{"update_percentage": null}'), ("availability-topic", "online", "offline"), ("json-attributes-topic", '{"attr1": "val1"}', '{"attr1": "val2"}'), ], @@ -775,3 +873,19 @@ async def test_value_template_fails( "TypeError: unsupported operand type(s) for *: 'NoneType' and 'int' rendering template" in caplog.text ) + + +async def test_entity_icon_and_entity_picture( + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, +) -> None: + """Test the entity icon or picture setup.""" + domain = update.DOMAIN + config = DEFAULT_CONFIG + await help_test_entity_icon_and_entity_picture( + hass, + mqtt_mock_entry, + domain, + config, + default_entity_picture="https://brands.home-assistant.io/_/mqtt/icon.png", + ) diff --git a/tests/components/mqtt/test_util.py b/tests/components/mqtt/test_util.py index a3802de69dad27..37bf6982b7a091 100644 --- a/tests/components/mqtt/test_util.py +++ b/tests/components/mqtt/test_util.py @@ -236,8 +236,7 @@ async def test_waiting_for_client_not_loaded( unsubs: list[Callable[[], None]] = [] - async def _async_just_in_time_subscribe() -> Callable[[], None]: - nonlocal unsub + async def _async_just_in_time_subscribe() -> None: assert await mqtt.async_wait_for_mqtt_client(hass) # Awaiting a second time should work too and return True assert await mqtt.async_wait_for_mqtt_client(hass) @@ -261,12 +260,12 @@ async def test_waiting_for_client_loaded( """Test waiting for client where mqtt entry is loaded.""" unsub: Callable[[], None] | None = None - async def _async_just_in_time_subscribe() -> Callable[[], None]: + async def _async_just_in_time_subscribe() -> None: nonlocal unsub assert await mqtt.async_wait_for_mqtt_client(hass) unsub = await mqtt.async_subscribe(hass, "test_topic", lambda msg: None) - entry = hass.config_entries.async_entries(mqtt.DATA_MQTT)[0] + entry = hass.config_entries.async_entries(mqtt.DOMAIN)[0] assert entry.state is ConfigEntryState.LOADED await _async_just_in_time_subscribe() @@ -290,7 +289,7 @@ async def test_waiting_for_client_entry_fails( ) entry.add_to_hass(hass) - async def _async_just_in_time_subscribe() -> Callable[[], None]: + async def _async_just_in_time_subscribe() -> None: assert not await mqtt.async_wait_for_mqtt_client(hass) hass.async_create_task(_async_just_in_time_subscribe()) @@ -300,7 +299,7 @@ async def _async_just_in_time_subscribe() -> Callable[[], None]: side_effect=Exception, ): await hass.config_entries.async_setup(entry.entry_id) - assert entry.state is ConfigEntryState.SETUP_ERROR + assert entry.state is ConfigEntryState.SETUP_ERROR # type:ignore[comparison-overlap] async def test_waiting_for_client_setup_fails( @@ -318,7 +317,7 @@ async def test_waiting_for_client_setup_fails( ) entry.add_to_hass(hass) - async def _async_just_in_time_subscribe() -> Callable[[], None]: + async def _async_just_in_time_subscribe() -> None: assert not await mqtt.async_wait_for_mqtt_client(hass) hass.async_create_task(_async_just_in_time_subscribe()) @@ -327,7 +326,7 @@ async def _async_just_in_time_subscribe() -> Callable[[], None]: # Simulate MQTT setup fails before the client would become available mqtt_client_mock.connect.side_effect = Exception assert not await hass.config_entries.async_setup(entry.entry_id) - assert entry.state is ConfigEntryState.SETUP_ERROR + assert entry.state is ConfigEntryState.SETUP_ERROR # type:ignore[comparison-overlap] @patch("homeassistant.components.mqtt.util.AVAILABILITY_TIMEOUT", 0.01) diff --git a/tests/components/mqtt/test_vacuum.py b/tests/components/mqtt/test_vacuum.py index 9b80d3814579bb..fef62c33a93f12 100644 --- a/tests/components/mqtt/test_vacuum.py +++ b/tests/components/mqtt/test_vacuum.py @@ -292,7 +292,7 @@ async def test_command_without_command_topic( mqtt_mock.async_publish.assert_not_called() mqtt_mock.async_publish.reset_mock() - await common.async_send_command(hass, "some command", "vacuum.test") + await common.async_send_command(hass, "some command", entity_id="vacuum.test") mqtt_mock.async_publish.assert_not_called() mqtt_mock.async_publish.reset_mock() diff --git a/tests/components/mqtt/test_water_heater.py b/tests/components/mqtt/test_water_heater.py index 7bab4a5e23358a..02ae54c1a85887 100644 --- a/tests/components/mqtt/test_water_heater.py +++ b/tests/components/mqtt/test_water_heater.py @@ -162,7 +162,7 @@ async def test_set_operation_mode_bad_attr_and_state( state = hass.states.get(ENTITY_WATER_HEATER) assert state.state == "off" with pytest.raises(vol.Invalid) as excinfo: - await common.async_set_operation_mode(hass, None, ENTITY_WATER_HEATER) + await common.async_set_operation_mode(hass, None, ENTITY_WATER_HEATER) # type:ignore[arg-type] assert "string value is None for dictionary value @ data['operation_mode']" in str( excinfo.value ) diff --git a/tests/components/music_assistant/__init__.py b/tests/components/music_assistant/__init__.py new file mode 100644 index 00000000000000..6893b862e2d08d --- /dev/null +++ b/tests/components/music_assistant/__init__.py @@ -0,0 +1 @@ +"""The tests for the Music Assistant component.""" diff --git a/tests/components/music_assistant/conftest.py b/tests/components/music_assistant/conftest.py new file mode 100644 index 00000000000000..b03a56ab4a6ea7 --- /dev/null +++ b/tests/components/music_assistant/conftest.py @@ -0,0 +1,35 @@ +"""Music Assistant test fixtures.""" + +from collections.abc import Generator +from unittest.mock import patch + +from music_assistant_models.api import ServerInfoMessage +import pytest + +from homeassistant.components.music_assistant.config_flow import CONF_URL +from homeassistant.components.music_assistant.const import DOMAIN + +from tests.common import AsyncMock, MockConfigEntry, load_fixture + + +@pytest.fixture +def mock_get_server_info() -> Generator[AsyncMock]: + """Mock the function to get server info.""" + with patch( + "homeassistant.components.music_assistant.config_flow.get_server_info" + ) as mock_get_server_info: + mock_get_server_info.return_value = ServerInfoMessage.from_json( + load_fixture("server_info_message.json", DOMAIN) + ) + yield mock_get_server_info + + +@pytest.fixture +def mock_config_entry() -> MockConfigEntry: + """Mock a config entry.""" + return MockConfigEntry( + domain=DOMAIN, + title="Music Assistant", + data={CONF_URL: "http://localhost:8095"}, + unique_id="1234", + ) diff --git a/tests/components/music_assistant/fixtures/server_info_message.json b/tests/components/music_assistant/fixtures/server_info_message.json new file mode 100644 index 00000000000000..907ec8af820ae2 --- /dev/null +++ b/tests/components/music_assistant/fixtures/server_info_message.json @@ -0,0 +1,9 @@ +{ + "server_id": "1234", + "server_version": "0.0.0", + "schema_version": 23, + "min_supported_schema_version": 23, + "base_url": "http://localhost:8095", + "homeassistant_addon": false, + "onboard_done": false +} diff --git a/tests/components/music_assistant/test_config_flow.py b/tests/components/music_assistant/test_config_flow.py new file mode 100644 index 00000000000000..c700060889c217 --- /dev/null +++ b/tests/components/music_assistant/test_config_flow.py @@ -0,0 +1,217 @@ +"""Define tests for the Music Assistant Integration config flow.""" + +from copy import deepcopy +from ipaddress import ip_address +from unittest import mock +from unittest.mock import AsyncMock + +from music_assistant_client.exceptions import ( + CannotConnect, + InvalidServerVersion, + MusicAssistantClientException, +) +from music_assistant_models.api import ServerInfoMessage +import pytest + +from homeassistant.components.music_assistant.config_flow import CONF_URL +from homeassistant.components.music_assistant.const import DEFAULT_NAME, DOMAIN +from homeassistant.components.zeroconf import ZeroconfServiceInfo +from homeassistant.config_entries import SOURCE_USER, SOURCE_ZEROCONF +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from tests.common import MockConfigEntry, load_fixture + +SERVER_INFO = { + "server_id": "1234", + "base_url": "http://localhost:8095", + "server_version": "0.0.0", + "schema_version": 23, + "min_supported_schema_version": 23, + "homeassistant_addon": True, +} + +ZEROCONF_DATA = ZeroconfServiceInfo( + ip_address=ip_address("127.0.0.1"), + ip_addresses=[ip_address("127.0.0.1")], + hostname="mock_hostname", + port=None, + type=mock.ANY, + name=mock.ANY, + properties=SERVER_INFO, +) + + +async def test_full_flow( + hass: HomeAssistant, + mock_get_server_info: AsyncMock, +) -> None: + """Test full flow.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_URL: "http://localhost:8095"}, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == DEFAULT_NAME + assert result["data"] == { + CONF_URL: "http://localhost:8095", + } + assert result["result"].unique_id == "1234" + + +async def test_zero_conf_flow( + hass: HomeAssistant, + mock_get_server_info: AsyncMock, +) -> None: + """Test zeroconf flow.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_ZEROCONF}, + data=ZEROCONF_DATA, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "discovery_confirm" + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {}, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == DEFAULT_NAME + assert result["data"] == { + CONF_URL: "http://localhost:8095", + } + assert result["result"].unique_id == "1234" + + +async def test_zero_conf_missing_server_id( + hass: HomeAssistant, + mock_get_server_info: AsyncMock, +) -> None: + """Test zeroconf flow with missing server id.""" + bad_zero_conf_data = deepcopy(ZEROCONF_DATA) + bad_zero_conf_data.properties.pop("server_id") + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_ZEROCONF}, + data=bad_zero_conf_data, + ) + await hass.async_block_till_done() + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "missing_server_id" + + +async def test_duplicate_user( + hass: HomeAssistant, + mock_get_server_info: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test duplicate user flow.""" + mock_config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + await hass.async_block_till_done() + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_URL: "http://localhost:8095"}, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + +async def test_duplicate_zeroconf( + hass: HomeAssistant, + mock_get_server_info: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test duplicate zeroconf flow.""" + mock_config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_ZEROCONF}, + data=ZEROCONF_DATA, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + +@pytest.mark.parametrize( + ("exception", "error_message"), + [ + (InvalidServerVersion("invalid_server_version"), "invalid_server_version"), + (CannotConnect("cannot_connect"), "cannot_connect"), + (MusicAssistantClientException("unknown"), "unknown"), + ], +) +async def test_flow_user_server_version_invalid( + hass: HomeAssistant, + mock_get_server_info: AsyncMock, + exception: MusicAssistantClientException, + error_message: str, +) -> None: + """Test user flow when server url is invalid.""" + mock_get_server_info.side_effect = exception + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + await hass.async_block_till_done() + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_URL: "http://localhost:8095"}, + ) + await hass.async_block_till_done() + assert result["errors"] == {"base": error_message} + + mock_get_server_info.side_effect = None + mock_get_server_info.return_value = ServerInfoMessage.from_json( + load_fixture("server_info_message.json", DOMAIN) + ) + + assert result["type"] is FlowResultType.FORM + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_URL: "http://localhost:8095"}, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.CREATE_ENTRY + + +async def test_flow_zeroconf_connect_issue( + hass: HomeAssistant, + mock_get_server_info: AsyncMock, +) -> None: + """Test zeroconf flow when server connect be reached.""" + mock_get_server_info.side_effect = CannotConnect("cannot_connect") + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_ZEROCONF}, + data=ZEROCONF_DATA, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "cannot_connect" diff --git a/tests/components/mysensors/test_cover.py b/tests/components/mysensors/test_cover.py index e056bff80fa7b6..a063aa8f8d8305 100644 --- a/tests/components/mysensors/test_cover.py +++ b/tests/components/mysensors/test_cover.py @@ -15,10 +15,7 @@ SERVICE_OPEN_COVER, SERVICE_SET_COVER_POSITION, SERVICE_STOP_COVER, - STATE_CLOSED, - STATE_CLOSING, - STATE_OPEN, - STATE_OPENING, + CoverState, ) from homeassistant.const import ATTR_BATTERY_LEVEL, ATTR_ENTITY_ID from homeassistant.core import HomeAssistant @@ -36,7 +33,7 @@ async def test_cover_node_percentage( state = hass.states.get(entity_id) assert state - assert state.state == STATE_CLOSED + assert state.state == CoverState.CLOSED assert state.attributes[ATTR_CURRENT_POSITION] == 0 assert state.attributes[ATTR_BATTERY_LEVEL] == 0 @@ -57,7 +54,7 @@ async def test_cover_node_percentage( state = hass.states.get(entity_id) assert state - assert state.state == STATE_OPENING + assert state.state == CoverState.OPENING assert state.attributes[ATTR_CURRENT_POSITION] == 50 transport_write.reset_mock() @@ -79,7 +76,7 @@ async def test_cover_node_percentage( state = hass.states.get(entity_id) assert state - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN assert state.attributes[ATTR_CURRENT_POSITION] == 50 transport_write.reset_mock() @@ -102,7 +99,7 @@ async def test_cover_node_percentage( state = hass.states.get(entity_id) assert state - assert state.state == STATE_OPENING + assert state.state == CoverState.OPENING assert state.attributes[ATTR_CURRENT_POSITION] == 75 receive_message("1;1;1;0;29;0\n") @@ -112,7 +109,7 @@ async def test_cover_node_percentage( state = hass.states.get(entity_id) assert state - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN assert state.attributes[ATTR_CURRENT_POSITION] == 100 transport_write.reset_mock() @@ -134,7 +131,7 @@ async def test_cover_node_percentage( state = hass.states.get(entity_id) assert state - assert state.state == STATE_CLOSING + assert state.state == CoverState.CLOSING assert state.attributes[ATTR_CURRENT_POSITION] == 50 receive_message("1;1;1;0;30;0\n") @@ -144,7 +141,7 @@ async def test_cover_node_percentage( state = hass.states.get(entity_id) assert state - assert state.state == STATE_CLOSED + assert state.state == CoverState.CLOSED assert state.attributes[ATTR_CURRENT_POSITION] == 0 transport_write.reset_mock() @@ -165,7 +162,7 @@ async def test_cover_node_percentage( state = hass.states.get(entity_id) assert state - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN assert state.attributes[ATTR_CURRENT_POSITION] == 25 @@ -181,7 +178,7 @@ async def test_cover_node_binary( state = hass.states.get(entity_id) assert state - assert state.state == STATE_CLOSED + assert state.state == CoverState.CLOSED await hass.services.async_call( COVER_DOMAIN, @@ -200,7 +197,7 @@ async def test_cover_node_binary( state = hass.states.get(entity_id) assert state - assert state.state == STATE_OPENING + assert state.state == CoverState.OPENING transport_write.reset_mock() @@ -220,7 +217,7 @@ async def test_cover_node_binary( state = hass.states.get(entity_id) assert state - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN transport_write.reset_mock() @@ -241,7 +238,7 @@ async def test_cover_node_binary( state = hass.states.get(entity_id) assert state - assert state.state == STATE_OPENING + assert state.state == CoverState.OPENING receive_message("1;1;1;0;29;0\n") receive_message("1;1;1;0;2;1\n") @@ -250,7 +247,7 @@ async def test_cover_node_binary( state = hass.states.get(entity_id) assert state - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN transport_write.reset_mock() @@ -270,7 +267,7 @@ async def test_cover_node_binary( state = hass.states.get(entity_id) assert state - assert state.state == STATE_CLOSING + assert state.state == CoverState.CLOSING receive_message("1;1;1;0;30;0\n") receive_message("1;1;1;0;2;0\n") @@ -279,4 +276,4 @@ async def test_cover_node_binary( state = hass.states.get(entity_id) assert state - assert state.state == STATE_CLOSED + assert state.state == CoverState.CLOSED diff --git a/tests/components/myuplink/fixtures/device_points_nibe_f730.json b/tests/components/myuplink/fixtures/device_points_nibe_f730.json index 9ec5db0ea3bd30..99dd9c857e6d82 100644 --- a/tests/components/myuplink/fixtures/device_points_nibe_f730.json +++ b/tests/components/myuplink/fixtures/device_points_nibe_f730.json @@ -989,5 +989,56 @@ ], "scaleValue": "1", "zoneId": null + }, + { + "category": "F730 CU 3x400V", + "parameterId": "147641", + "parameterName": "Start Wednesday", + "parameterUnit": "", + "writable": true, + "timestamp": "2024-10-18T09:52:01+00:00", + "value": 0, + "strVal": "0", + "smartHomeCategories": [], + "minValue": 0, + "maxValue": 86400, + "stepValue": 900, + "enumValues": [], + "scaleValue": "1", + "zoneId": null + }, + { + "category": "F730 CU 3x400V", + "parameterId": "148072", + "parameterName": "start diff additional heat", + "parameterUnit": "DM", + "writable": true, + "timestamp": "2024-10-18T09:51:39+00:00", + "value": 700, + "strVal": "700DM", + "smartHomeCategories": [], + "minValue": 100, + "maxValue": 2000, + "stepValue": 10, + "enumValues": [], + "scaleValue": "1", + "zoneId": null + }, + { + "category": "F730 CU 3x400V", + "parameterId": "47011", + "parameterName": "Heating offset climate system 1", + "parameterUnit": "", + "writable": true, + "timestamp": "2024-10-18T09:51:39+00:00", + "value": 1, + "strVal": "1", + "smartHomeCategories": ["sh-indoorSpOffsHeat"], + "minValue": -10, + "maxValue": 10, + "stepValue": 1, + "enumValues": [], + "scaleValue": "1", + "zoneId": null } ] diff --git a/tests/components/myuplink/snapshots/test_diagnostics.ambr b/tests/components/myuplink/snapshots/test_diagnostics.ambr index 9160fd3b365d8d..1b3502c1f0460e 100644 --- a/tests/components/myuplink/snapshots/test_diagnostics.ambr +++ b/tests/components/myuplink/snapshots/test_diagnostics.ambr @@ -1050,6 +1050,57 @@ ], "scaleValue": "1", "zoneId": null + }, + { + "category": "F730 CU 3x400V", + "parameterId": "147641", + "parameterName": "Start Wednesday", + "parameterUnit": "", + "writable": true, + "timestamp": "2024-10-18T09:52:01+00:00", + "value": 0, + "strVal": "0", + "smartHomeCategories": [], + "minValue": 0, + "maxValue": 86400, + "stepValue": 900, + "enumValues": [], + "scaleValue": "1", + "zoneId": null + }, + { + "category": "F730 CU 3x400V", + "parameterId": "148072", + "parameterName": "start diff additional heat", + "parameterUnit": "DM", + "writable": true, + "timestamp": "2024-10-18T09:51:39+00:00", + "value": 700, + "strVal": "700DM", + "smartHomeCategories": [], + "minValue": 100, + "maxValue": 2000, + "stepValue": 10, + "enumValues": [], + "scaleValue": "1", + "zoneId": null + }, + { + "category": "F730 CU 3x400V", + "parameterId": "47011", + "parameterName": "Heating offset climate system 1", + "parameterUnit": "", + "writable": true, + "timestamp": "2024-10-18T09:51:39+00:00", + "value": 1, + "strVal": "1", + "smartHomeCategories": ["sh-indoorSpOffsHeat"], + "minValue": -10, + "maxValue": 10, + "stepValue": 1, + "enumValues": [], + "scaleValue": "1", + "zoneId": null } ] @@ -2093,6 +2144,57 @@ ], "scaleValue": "1", "zoneId": null + }, + { + "category": "F730 CU 3x400V", + "parameterId": "147641", + "parameterName": "Start Wednesday", + "parameterUnit": "", + "writable": true, + "timestamp": "2024-10-18T09:52:01+00:00", + "value": 0, + "strVal": "0", + "smartHomeCategories": [], + "minValue": 0, + "maxValue": 86400, + "stepValue": 900, + "enumValues": [], + "scaleValue": "1", + "zoneId": null + }, + { + "category": "F730 CU 3x400V", + "parameterId": "148072", + "parameterName": "start diff additional heat", + "parameterUnit": "DM", + "writable": true, + "timestamp": "2024-10-18T09:51:39+00:00", + "value": 700, + "strVal": "700DM", + "smartHomeCategories": [], + "minValue": 100, + "maxValue": 2000, + "stepValue": 10, + "enumValues": [], + "scaleValue": "1", + "zoneId": null + }, + { + "category": "F730 CU 3x400V", + "parameterId": "47011", + "parameterName": "Heating offset climate system 1", + "parameterUnit": "", + "writable": true, + "timestamp": "2024-10-18T09:51:39+00:00", + "value": 1, + "strVal": "1", + "smartHomeCategories": ["sh-indoorSpOffsHeat"], + "minValue": -10, + "maxValue": 10, + "stepValue": 1, + "enumValues": [], + "scaleValue": "1", + "zoneId": null } ] diff --git a/tests/components/myuplink/test_number.py b/tests/components/myuplink/test_number.py index 273c35ab749c0b..4106af1b5b9987 100644 --- a/tests/components/myuplink/test_number.py +++ b/tests/components/myuplink/test_number.py @@ -14,9 +14,9 @@ TEST_PLATFORM = Platform.NUMBER pytestmark = pytest.mark.parametrize("platforms", [(TEST_PLATFORM,)]) -ENTITY_ID = "number.gotham_city_degree_minutes" -ENTITY_FRIENDLY_NAME = "Gotham City Degree minutes" -ENTITY_UID = "robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-40940" +ENTITY_ID = "number.gotham_city_heating_offset_climate_system_1" +ENTITY_FRIENDLY_NAME = "Gotham City Heating offset climate system 1" +ENTITY_UID = "robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-47011" async def test_entity_registry( @@ -36,17 +36,16 @@ async def test_attributes( mock_myuplink_client: MagicMock, setup_platform: None, ) -> None: - """Test the switch attributes are correct.""" + """Test the entity attributes are correct.""" state = hass.states.get(ENTITY_ID) - assert state.state == "-875.0" + assert state.state == "1.0" assert state.attributes == { "friendly_name": ENTITY_FRIENDLY_NAME, - "min": -3000, - "max": 3000, + "min": -10.0, + "max": 10.0, "mode": "auto", "step": 1.0, - "unit_of_measurement": "DM", } @@ -60,7 +59,7 @@ async def test_set_value( await hass.services.async_call( TEST_PLATFORM, SERVICE_SET_VALUE, - {ATTR_ENTITY_ID: ENTITY_ID, "value": -125}, + {ATTR_ENTITY_ID: ENTITY_ID, "value": 1}, blocking=True, ) await hass.async_block_till_done() @@ -79,7 +78,7 @@ async def test_api_failure( await hass.services.async_call( TEST_PLATFORM, SERVICE_SET_VALUE, - {ATTR_ENTITY_ID: ENTITY_ID, "value": -125}, + {ATTR_ENTITY_ID: ENTITY_ID, "value": 1}, blocking=True, ) mock_myuplink_client.async_set_device_points.assert_called_once() diff --git a/tests/components/nam/test_config_flow.py b/tests/components/nam/test_config_flow.py index 1d2376945785a9..6c11399c888e39 100644 --- a/tests/components/nam/test_config_flow.py +++ b/tests/components/nam/test_config_flow.py @@ -445,7 +445,7 @@ async def test_reconfigure_successful(hass: HomeAssistant) -> None: result = await entry.start_reconfigure_flow(hass) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure_confirm" + assert result["step_id"] == "reconfigure" with ( patch( @@ -488,7 +488,7 @@ async def test_reconfigure_not_successful(hass: HomeAssistant) -> None: result = await entry.start_reconfigure_flow(hass) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure_confirm" + assert result["step_id"] == "reconfigure" with patch( "homeassistant.components.nam.NettigoAirMonitor.async_check_credentials", @@ -500,7 +500,7 @@ async def test_reconfigure_not_successful(hass: HomeAssistant) -> None: ) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure_confirm" + assert result["step_id"] == "reconfigure" assert result["errors"] == {"base": "cannot_connect"} with ( @@ -544,7 +544,7 @@ async def test_reconfigure_not_the_same_device(hass: HomeAssistant) -> None: result = await entry.start_reconfigure_flow(hass) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure_confirm" + assert result["step_id"] == "reconfigure" with ( patch( diff --git a/tests/components/nasweb/__init__.py b/tests/components/nasweb/__init__.py new file mode 100644 index 00000000000000..d4906d710d5f69 --- /dev/null +++ b/tests/components/nasweb/__init__.py @@ -0,0 +1 @@ +"""Tests for the NASweb integration.""" diff --git a/tests/components/nasweb/conftest.py b/tests/components/nasweb/conftest.py new file mode 100644 index 00000000000000..7757f40ee44dfc --- /dev/null +++ b/tests/components/nasweb/conftest.py @@ -0,0 +1,61 @@ +"""Common fixtures for the NASweb tests.""" + +from collections.abc import Generator +from unittest.mock import AsyncMock, MagicMock, patch + +import pytest + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Override async_setup_entry.""" + with patch( + "homeassistant.components.nasweb.async_setup_entry", return_value=True + ) as mock_setup_entry: + yield mock_setup_entry + + +BASE_CONFIG_FLOW = "homeassistant.components.nasweb.config_flow." +BASE_NASWEB_DATA = "homeassistant.components.nasweb.nasweb_data." +BASE_COORDINATOR = "homeassistant.components.nasweb.coordinator." +TEST_SERIAL_NUMBER = "0011223344556677" + + +@pytest.fixture +def validate_input_all_ok() -> Generator[dict[str, AsyncMock | MagicMock]]: + """Yield dictionary of mocked functions required for successful test_form execution.""" + with ( + patch( + BASE_CONFIG_FLOW + "WebioAPI.check_connection", + return_value=True, + ) as check_connection, + patch( + BASE_CONFIG_FLOW + "WebioAPI.refresh_device_info", + return_value=True, + ) as refresh_device_info, + patch( + BASE_NASWEB_DATA + "NASwebData.get_webhook_url", + return_value="http://127.0.0.1:8123/api/webhook/de705e77291402afa0dd961426e9f19bb53631a9f2a106c52cfd2d2266913c04", + ) as get_webhook_url, + patch( + BASE_CONFIG_FLOW + "WebioAPI.get_serial_number", + return_value=TEST_SERIAL_NUMBER, + ) as get_serial, + patch( + BASE_CONFIG_FLOW + "WebioAPI.status_subscription", + return_value=True, + ) as status_subscription, + patch( + BASE_NASWEB_DATA + "NotificationCoordinator.check_connection", + return_value=True, + ) as check_status_confirmation, + ): + yield { + BASE_CONFIG_FLOW + "WebioAPI.check_connection": check_connection, + BASE_CONFIG_FLOW + "WebioAPI.refresh_device_info": refresh_device_info, + BASE_NASWEB_DATA + "NASwebData.get_webhook_url": get_webhook_url, + BASE_CONFIG_FLOW + "WebioAPI.get_serial_number": get_serial, + BASE_CONFIG_FLOW + "WebioAPI.status_subscription": status_subscription, + BASE_NASWEB_DATA + + "NotificationCoordinator.check_connection": check_status_confirmation, + } diff --git a/tests/components/nasweb/test_config_flow.py b/tests/components/nasweb/test_config_flow.py new file mode 100644 index 00000000000000..a5f2dca680d697 --- /dev/null +++ b/tests/components/nasweb/test_config_flow.py @@ -0,0 +1,208 @@ +"""Test the NASweb config flow.""" + +from unittest.mock import AsyncMock, MagicMock, patch + +import pytest +from webio_api.api_client import AuthError + +from homeassistant import config_entries +from homeassistant.components.nasweb.const import DOMAIN +from homeassistant.config_entries import ConfigFlowResult +from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType +from homeassistant.helpers.network import NoURLAvailableError + +from .conftest import ( + BASE_CONFIG_FLOW, + BASE_COORDINATOR, + BASE_NASWEB_DATA, + TEST_SERIAL_NUMBER, +) + +pytestmark = pytest.mark.usefixtures("mock_setup_entry") + + +TEST_USER_INPUT = { + CONF_HOST: "1.1.1.1", + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", +} + + +async def _add_test_config_entry(hass: HomeAssistant) -> ConfigFlowResult: + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result.get("type") == FlowResultType.FORM + assert not result.get("errors") + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], TEST_USER_INPUT + ) + await hass.async_block_till_done() + return result2 + + +async def test_form( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + validate_input_all_ok: dict[str, AsyncMock | MagicMock], +) -> None: + """Test the form.""" + result = await _add_test_config_entry(hass) + + assert result.get("type") == FlowResultType.CREATE_ENTRY + assert result.get("title") == "1.1.1.1" + assert result.get("data") == TEST_USER_INPUT + + config_entry = result.get("result") + assert config_entry is not None + assert config_entry.unique_id == TEST_SERIAL_NUMBER + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_form_cannot_connect( + hass: HomeAssistant, + validate_input_all_ok: dict[str, AsyncMock | MagicMock], +) -> None: + """Test cannot connect error.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + + with patch(BASE_CONFIG_FLOW + "WebioAPI.check_connection", return_value=False): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], TEST_USER_INPUT + ) + + assert result2.get("type") == FlowResultType.FORM + assert result2.get("errors") == {"base": "cannot_connect"} + + +async def test_form_invalid_auth( + hass: HomeAssistant, + validate_input_all_ok: dict[str, AsyncMock | MagicMock], +) -> None: + """Test invalid auth.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + + with patch( + BASE_CONFIG_FLOW + "WebioAPI.refresh_device_info", + side_effect=AuthError, + ): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], TEST_USER_INPUT + ) + + assert result2.get("type") == FlowResultType.FORM + assert result2.get("errors") == {"base": "invalid_auth"} + + +async def test_form_missing_internal_url( + hass: HomeAssistant, + validate_input_all_ok: dict[str, AsyncMock | MagicMock], +) -> None: + """Test missing internal url.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + + with patch( + BASE_NASWEB_DATA + "NASwebData.get_webhook_url", side_effect=NoURLAvailableError + ): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], TEST_USER_INPUT + ) + assert result2.get("type") == FlowResultType.FORM + assert result2.get("errors") == {"base": "missing_internal_url"} + + +async def test_form_missing_nasweb_data( + hass: HomeAssistant, + validate_input_all_ok: dict[str, AsyncMock | MagicMock], +) -> None: + """Test invalid auth.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + + with patch( + BASE_CONFIG_FLOW + "WebioAPI.get_serial_number", + return_value=None, + ): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], TEST_USER_INPUT + ) + assert result2.get("type") == FlowResultType.FORM + assert result2.get("errors") == {"base": "missing_nasweb_data"} + with patch(BASE_CONFIG_FLOW + "WebioAPI.status_subscription", return_value=False): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], TEST_USER_INPUT + ) + assert result2.get("type") == FlowResultType.FORM + assert result2.get("errors") == {"base": "missing_nasweb_data"} + + +async def test_missing_status( + hass: HomeAssistant, + validate_input_all_ok: dict[str, AsyncMock | MagicMock], +) -> None: + """Test missing status update.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + + with patch( + BASE_COORDINATOR + "NotificationCoordinator.check_connection", + return_value=False, + ): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], TEST_USER_INPUT + ) + assert result2.get("type") == FlowResultType.FORM + assert result2.get("errors") == {"base": "missing_status"} + + +async def test_form_exception( + hass: HomeAssistant, + validate_input_all_ok: dict[str, AsyncMock | MagicMock], +) -> None: + """Test other exceptions.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + + with patch( + "homeassistant.components.nasweb.config_flow.validate_input", + side_effect=Exception, + ): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], TEST_USER_INPUT + ) + assert result2.get("type") == FlowResultType.FORM + assert result2.get("errors") == {"base": "unknown"} + + +async def test_form_already_configured( + hass: HomeAssistant, + validate_input_all_ok: dict[str, AsyncMock | MagicMock], +) -> None: + """Test already configured device.""" + result = await _add_test_config_entry(hass) + config_entry = result.get("result") + assert config_entry is not None + assert config_entry.unique_id == TEST_SERIAL_NUMBER + + result2_1 = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + result2_2 = await hass.config_entries.flow.async_configure( + result2_1["flow_id"], TEST_USER_INPUT + ) + await hass.async_block_till_done() + + assert result2_2.get("type") == FlowResultType.ABORT + assert result2_2.get("reason") == "already_configured" diff --git a/tests/components/ness_alarm/test_init.py b/tests/components/ness_alarm/test_init.py index fb003d253dee22..48821d3e68deaa 100644 --- a/tests/components/ness_alarm/test_init.py +++ b/tests/components/ness_alarm/test_init.py @@ -6,6 +6,7 @@ import pytest from homeassistant.components import alarm_control_panel +from homeassistant.components.alarm_control_panel import AlarmControlPanelState from homeassistant.components.ness_alarm import ( ATTR_CODE, ATTR_OUTPUT_ID, @@ -24,13 +25,6 @@ SERVICE_ALARM_ARM_HOME, SERVICE_ALARM_DISARM, SERVICE_ALARM_TRIGGER, - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_ARMING, - STATE_ALARM_DISARMED, - STATE_ALARM_PENDING, - STATE_ALARM_TRIGGERED, STATE_UNKNOWN, ) from homeassistant.core import HomeAssistant @@ -90,7 +84,9 @@ async def test_dispatch_state_change(hass: HomeAssistant, mock_nessclient) -> No on_state_change(ArmingState.ARMING, None) await hass.async_block_till_done() - assert hass.states.is_state("alarm_control_panel.alarm_panel", STATE_ALARM_ARMING) + assert hass.states.is_state( + "alarm_control_panel.alarm_panel", AlarmControlPanelState.ARMING + ) async def test_alarm_disarm(hass: HomeAssistant, mock_nessclient) -> None: @@ -178,15 +174,27 @@ async def test_arming_state_change(hass: HomeAssistant, mock_nessclient) -> None """Test arming state change handing.""" states = [ (ArmingState.UNKNOWN, None, STATE_UNKNOWN), - (ArmingState.DISARMED, None, STATE_ALARM_DISARMED), - (ArmingState.ARMING, None, STATE_ALARM_ARMING), - (ArmingState.EXIT_DELAY, None, STATE_ALARM_ARMING), - (ArmingState.ARMED, None, STATE_ALARM_ARMED_AWAY), - (ArmingState.ARMED, ArmingMode.ARMED_AWAY, STATE_ALARM_ARMED_AWAY), - (ArmingState.ARMED, ArmingMode.ARMED_HOME, STATE_ALARM_ARMED_HOME), - (ArmingState.ARMED, ArmingMode.ARMED_NIGHT, STATE_ALARM_ARMED_NIGHT), - (ArmingState.ENTRY_DELAY, None, STATE_ALARM_PENDING), - (ArmingState.TRIGGERED, None, STATE_ALARM_TRIGGERED), + (ArmingState.DISARMED, None, AlarmControlPanelState.DISARMED), + (ArmingState.ARMING, None, AlarmControlPanelState.ARMING), + (ArmingState.EXIT_DELAY, None, AlarmControlPanelState.ARMING), + (ArmingState.ARMED, None, AlarmControlPanelState.ARMED_AWAY), + ( + ArmingState.ARMED, + ArmingMode.ARMED_AWAY, + AlarmControlPanelState.ARMED_AWAY, + ), + ( + ArmingState.ARMED, + ArmingMode.ARMED_HOME, + AlarmControlPanelState.ARMED_HOME, + ), + ( + ArmingState.ARMED, + ArmingMode.ARMED_NIGHT, + AlarmControlPanelState.ARMED_NIGHT, + ), + (ArmingState.ENTRY_DELAY, None, AlarmControlPanelState.PENDING), + (ArmingState.TRIGGERED, None, AlarmControlPanelState.TRIGGERED), ] await async_setup_component(hass, DOMAIN, VALID_CONFIG) diff --git a/tests/components/nest/common.py b/tests/components/nest/common.py index 9c8de0224f0d4a..f34c40e09f9cb4 100644 --- a/tests/components/nest/common.py +++ b/tests/components/nest/common.py @@ -30,6 +30,7 @@ CLIENT_SECRET = "some-client-secret" CLOUD_PROJECT_ID = "cloud-id-9876" SUBSCRIBER_ID = "projects/cloud-id-9876/subscriptions/subscriber-id-9876" +SUBSCRIPTION_NAME = "projects/cloud-id-9876/subscriptions/subscriber-id-9876" @dataclass @@ -86,6 +87,17 @@ class NestTestConfig: }, ) +TEST_CONFIG_NEW_SUBSCRIPTION = NestTestConfig( + config_entry_data={ + "sdm": {}, + "project_id": PROJECT_ID, + "cloud_project_id": CLOUD_PROJECT_ID, + "subscription_name": SUBSCRIPTION_NAME, + "auth_implementation": "imported-cred", + }, + credential=ClientCredential(CLIENT_ID, CLIENT_SECRET), +) + class FakeSubscriber(GoogleNestSubscriber): """Fake subscriber that supplies a FakeDeviceManager.""" @@ -95,6 +107,7 @@ class FakeSubscriber(GoogleNestSubscriber): def __init__(self) -> None: # pylint: disable=super-init-not-called """Initialize Fake Subscriber.""" self._device_manager = DeviceManager() + self._subscriber_name = "fake-name" def set_update_callback(self, target: Callable[[EventMessage], Awaitable[None]]): """Capture the callback set by Home Assistant.""" diff --git a/tests/components/nest/conftest.py b/tests/components/nest/conftest.py index 85c64aff37954f..b070d0256124e5 100644 --- a/tests/components/nest/conftest.py +++ b/tests/components/nest/conftest.py @@ -22,6 +22,7 @@ ) from homeassistant.components.nest import DOMAIN from homeassistant.components.nest.const import CONF_SUBSCRIBER_ID, SDM_SCOPES +from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component @@ -287,6 +288,8 @@ async def _setup_func() -> bool: await hass.async_block_till_done() yield _setup_func + if config_entry and config_entry.state == ConfigEntryState.LOADED: + await hass.config_entries.async_unload(config_entry.entry_id) @pytest.fixture diff --git a/tests/components/nest/test_camera.py b/tests/components/nest/test_camera.py index dda7bcfa09330f..029879f1413c27 100644 --- a/tests/components/nest/test_camera.py +++ b/tests/components/nest/test_camera.py @@ -28,7 +28,7 @@ from .conftest import FakeAuth from tests.common import async_fire_time_changed -from tests.typing import WebSocketGenerator +from tests.typing import MockHAClientWebSocket, WebSocketGenerator PLATFORM = "camera" CAMERA_DEVICE_TYPE = "sdm.devices.types.CAMERA" @@ -176,6 +176,30 @@ async def async_get_image( return image.content +def get_frontend_stream_type_attribute( + hass: HomeAssistant, entity_id: str +) -> StreamType: + """Get the frontend_stream_type camera attribute.""" + cam = hass.states.get(entity_id) + assert cam is not None + assert cam.state == CameraState.STREAMING + return cam.attributes.get("frontend_stream_type") + + +async def async_frontend_stream_types( + client: MockHAClientWebSocket, entity_id: str +) -> list[str] | None: + """Get the frontend stream types supported.""" + await client.send_json_auto_id( + {"type": "camera/capabilities", "entity_id": entity_id} + ) + msg = await client.receive_json() + assert msg.get("type") == TYPE_RESULT + assert msg.get("success") + assert msg.get("result") + return msg["result"].get("frontend_stream_types") + + async def fire_alarm(hass: HomeAssistant, point_in_time: datetime.datetime) -> None: """Fire an alarm and wait for callbacks to run.""" with freeze_time(point_in_time): @@ -237,16 +261,21 @@ async def test_camera_stream( camera_device: None, auth: FakeAuth, mock_create_stream: Mock, + hass_ws_client: WebSocketGenerator, ) -> None: """Test a basic camera and fetch its live stream.""" auth.responses = [make_stream_url_response()] await setup_platform() assert len(hass.states.async_all()) == 1 - cam = hass.states.get("camera.my_camera") - assert cam is not None - assert cam.state == CameraState.STREAMING - assert cam.attributes["frontend_stream_type"] == StreamType.HLS + assert ( + get_frontend_stream_type_attribute(hass, "camera.my_camera") == StreamType.HLS + ) + client = await hass_ws_client(hass) + frontend_stream_types = await async_frontend_stream_types( + client, "camera.my_camera" + ) + assert frontend_stream_types == [StreamType.HLS] stream_source = await camera.async_get_stream_source(hass, "camera.my_camera") assert stream_source == "rtsp://some/url?auth=g.0.streamingToken" @@ -265,12 +294,16 @@ async def test_camera_ws_stream( await setup_platform() assert len(hass.states.async_all()) == 1 - cam = hass.states.get("camera.my_camera") - assert cam is not None - assert cam.state == CameraState.STREAMING - assert cam.attributes["frontend_stream_type"] == StreamType.HLS + assert ( + get_frontend_stream_type_attribute(hass, "camera.my_camera") == StreamType.HLS + ) client = await hass_ws_client(hass) + frontend_stream_types = await async_frontend_stream_types( + client, "camera.my_camera" + ) + assert frontend_stream_types == [StreamType.HLS] + await client.send_json( { "id": 2, @@ -322,7 +355,7 @@ async def test_camera_ws_stream_failure( async def test_camera_stream_missing_trait( hass: HomeAssistant, setup_platform, create_device ) -> None: - """Test fetching a video stream when not supported by the API.""" + """Test that cameras missing a live stream are not supported.""" create_device.create( { "sdm.devices.traits.Info": { @@ -338,16 +371,7 @@ async def test_camera_stream_missing_trait( ) await setup_platform() - assert len(hass.states.async_all()) == 1 - cam = hass.states.get("camera.my_camera") - assert cam is not None - assert cam.state == CameraState.IDLE - - stream_source = await camera.async_get_stream_source(hass, "camera.my_camera") - assert stream_source is None - - # Fallback to placeholder image - await async_get_image(hass) + assert len(hass.states.async_all()) == 0 async def test_refresh_expired_stream_token( @@ -459,6 +483,50 @@ async def test_stream_response_already_expired( assert stream_source == "rtsp://some/url?auth=g.2.streamingToken" +async def test_extending_stream_already_expired( + hass: HomeAssistant, + auth: FakeAuth, + setup_platform: PlatformSetup, + camera_device: None, +) -> None: + """Test a API response when extending the stream returns an expired stream url.""" + now = utcnow() + stream_1_expiration = now + datetime.timedelta(seconds=180) + stream_2_expiration = now + datetime.timedelta(seconds=30) # Will be in the past + stream_3_expiration = now + datetime.timedelta(seconds=600) + auth.responses = [ + make_stream_url_response(stream_1_expiration, token_num=1), + make_stream_url_response(stream_2_expiration, token_num=2), + make_stream_url_response(stream_3_expiration, token_num=3), + ] + await setup_platform() + + assert len(hass.states.async_all()) == 1 + cam = hass.states.get("camera.my_camera") + assert cam is not None + assert cam.state == CameraState.STREAMING + + # The stream is expired, but we return it anyway + stream_source = await camera.async_get_stream_source(hass, "camera.my_camera") + assert stream_source == "rtsp://some/url?auth=g.1.streamingToken" + + # Jump to when the stream will be refreshed + await fire_alarm(hass, now + datetime.timedelta(seconds=160)) + stream_source = await camera.async_get_stream_source(hass, "camera.my_camera") + assert stream_source == "rtsp://some/url?auth=g.2.streamingToken" + + # The stream will have expired in the past, but 1 minute min refresh interval is applied. + # The stream token is not updated. + await fire_alarm(hass, now + datetime.timedelta(seconds=170)) + stream_source = await camera.async_get_stream_source(hass, "camera.my_camera") + assert stream_source == "rtsp://some/url?auth=g.2.streamingToken" + + # Now go past the min update interval and the stream is refreshed + await fire_alarm(hass, now + datetime.timedelta(seconds=225)) + stream_source = await camera.async_get_stream_source(hass, "camera.my_camera") + assert stream_source == "rtsp://some/url?auth=g.3.streamingToken" + + async def test_camera_removed( hass: HomeAssistant, auth: FakeAuth, @@ -577,11 +645,11 @@ async def test_refresh_expired_stream_failure( assert create_stream.called +@pytest.mark.usefixtures("webrtc_camera_device") async def test_camera_web_rtc( hass: HomeAssistant, auth, hass_ws_client: WebSocketGenerator, - webrtc_camera_device, setup_platform, ) -> None: """Test a basic camera that supports web rtc.""" @@ -606,31 +674,43 @@ async def test_camera_web_rtc( assert cam.attributes["frontend_stream_type"] == StreamType.WEB_RTC client = await hass_ws_client(hass) - await client.send_json( + await client.send_json_auto_id( { - "id": 5, - "type": "camera/web_rtc_offer", + "type": "camera/webrtc/offer", "entity_id": "camera.my_camera", "offer": "a=recvonly", } ) - msg = await client.receive_json() - assert msg["id"] == 5 - assert msg["type"] == TYPE_RESULT - assert msg["success"] - assert msg["result"]["answer"] == "v=0\r\ns=-\r\n" + response = await client.receive_json() + assert response["type"] == TYPE_RESULT + assert response["success"] + subscription_id = response["id"] + + # Session id + response = await client.receive_json() + assert response["id"] == subscription_id + assert response["type"] == "event" + assert response["event"]["type"] == "session" + + # Answer + response = await client.receive_json() + assert response["id"] == subscription_id + assert response["type"] == "event" + assert response["event"] == { + "type": "answer", + "answer": "v=0\r\ns=-\r\n", + } # Nest WebRTC cameras return a placeholder await async_get_image(hass) await async_get_image(hass, width=1024, height=768) +@pytest.mark.usefixtures("auth", "camera_device") async def test_camera_web_rtc_unsupported( hass: HomeAssistant, - auth, hass_ws_client: WebSocketGenerator, - camera_device, setup_platform, ) -> None: """Test a basic camera that supports web rtc.""" @@ -643,28 +723,37 @@ async def test_camera_web_rtc_unsupported( assert cam.attributes["frontend_stream_type"] == StreamType.HLS client = await hass_ws_client(hass) - await client.send_json( + await client.send_json_auto_id( + {"type": "camera/capabilities", "entity_id": "camera.my_camera"} + ) + msg = await client.receive_json() + + assert msg["type"] == TYPE_RESULT + assert msg["success"] + assert msg["result"] == {"frontend_stream_types": ["hls"]} + + await client.send_json_auto_id( { - "id": 5, - "type": "camera/web_rtc_offer", + "type": "camera/webrtc/offer", "entity_id": "camera.my_camera", "offer": "a=recvonly", } ) msg = await client.receive_json() - assert msg["id"] == 5 assert msg["type"] == TYPE_RESULT assert not msg["success"] - assert msg["error"]["code"] == "web_rtc_offer_failed" - assert msg["error"]["message"].startswith("Camera does not support WebRTC") + assert msg["error"] == { + "code": "webrtc_offer_failed", + "message": "Camera does not support WebRTC, frontend_stream_type=hls", + } +@pytest.mark.usefixtures("webrtc_camera_device") async def test_camera_web_rtc_offer_failure( hass: HomeAssistant, auth, hass_ws_client: WebSocketGenerator, - webrtc_camera_device, setup_platform, ) -> None: """Test a basic camera that supports web rtc.""" @@ -679,36 +768,47 @@ async def test_camera_web_rtc_offer_failure( assert cam.state == CameraState.STREAMING client = await hass_ws_client(hass) - await client.send_json( + await client.send_json_auto_id( { - "id": 5, - "type": "camera/web_rtc_offer", + "type": "camera/webrtc/offer", "entity_id": "camera.my_camera", "offer": "a=recvonly", } ) - msg = await client.receive_json() - assert msg["id"] == 5 - assert msg["type"] == TYPE_RESULT - assert not msg["success"] - assert msg["error"]["code"] == "web_rtc_offer_failed" - assert msg["error"]["message"].startswith("Nest API error") - - + response = await client.receive_json() + assert response["type"] == TYPE_RESULT + assert response["success"] + subscription_id = response["id"] + + # Session id + response = await client.receive_json() + assert response["id"] == subscription_id + assert response["type"] == "event" + assert response["event"]["type"] == "session" + + # Answer + response = await client.receive_json() + assert response["id"] == subscription_id + assert response["type"] == "event" + assert response["event"] == { + "type": "error", + "code": "webrtc_offer_failed", + "message": "Nest API error: Bad Request response from API (400)", + } + + +@pytest.mark.usefixtures("mock_create_stream") async def test_camera_multiple_streams( hass: HomeAssistant, auth, hass_ws_client: WebSocketGenerator, create_device, setup_platform, - mock_create_stream, ) -> None: """Test a camera supporting multiple stream types.""" expiration = utcnow() + datetime.timedelta(seconds=100) auth.responses = [ - # RTSP response - make_stream_url_response(), # WebRTC response aiohttp.web.json_response( { @@ -745,23 +845,127 @@ async def test_camera_multiple_streams( # Prefer WebRTC over RTSP/HLS assert cam.attributes["frontend_stream_type"] == StreamType.WEB_RTC - # RTSP stream + # RTSP stream is not supported stream_source = await camera.async_get_stream_source(hass, "camera.my_camera") - assert stream_source == "rtsp://some/url?auth=g.0.streamingToken" + assert not stream_source # WebRTC stream client = await hass_ws_client(hass) - await client.send_json( + await client.send_json_auto_id( { - "id": 5, - "type": "camera/web_rtc_offer", + "type": "camera/webrtc/offer", "entity_id": "camera.my_camera", "offer": "a=recvonly", } ) - msg = await client.receive_json() - assert msg["id"] == 5 - assert msg["type"] == TYPE_RESULT - assert msg["success"] - assert msg["result"]["answer"] == "v=0\r\ns=-\r\n" + response = await client.receive_json() + assert response["type"] == TYPE_RESULT + assert response["success"] + subscription_id = response["id"] + + # Session id + response = await client.receive_json() + assert response["id"] == subscription_id + assert response["type"] == "event" + assert response["event"]["type"] == "session" + + # Answer + response = await client.receive_json() + assert response["id"] == subscription_id + assert response["type"] == "event" + assert response["event"] == { + "type": "answer", + "answer": "v=0\r\ns=-\r\n", + } + + +@pytest.mark.usefixtures("webrtc_camera_device") +async def test_webrtc_refresh_expired_stream( + hass: HomeAssistant, + setup_platform: PlatformSetup, + hass_ws_client: WebSocketGenerator, + auth: FakeAuth, +) -> None: + """Test a camera webrtc expiration and refresh.""" + now = utcnow() + + stream_1_expiration = now + datetime.timedelta(seconds=90) + stream_2_expiration = now + datetime.timedelta(seconds=180) + auth.responses = [ + aiohttp.web.json_response( + { + "results": { + "answerSdp": "v=0\r\ns=-\r\n", + "mediaSessionId": "yP2grqz0Y1V_wgiX9KEbMWHoLd...", + "expiresAt": stream_1_expiration.isoformat(timespec="seconds"), + }, + } + ), + aiohttp.web.json_response( + { + "results": { + "mediaSessionId": "yP2grqz0Y1V_wgiX9KEbMWHoLd...", + "expiresAt": stream_2_expiration.isoformat(timespec="seconds"), + }, + } + ), + ] + await setup_platform() + await hass.async_block_till_done() + + assert len(hass.states.async_all()) == 1 + cam = hass.states.get("camera.my_camera") + assert cam is not None + assert cam.state == CameraState.STREAMING + assert cam.attributes["frontend_stream_type"] == StreamType.WEB_RTC + + client = await hass_ws_client(hass) + await client.send_json_auto_id( + { + "type": "camera/webrtc/offer", + "entity_id": "camera.my_camera", + "offer": "a=recvonly", + } + ) + + response = await client.receive_json() + assert response["type"] == TYPE_RESULT + assert response["success"] + subscription_id = response["id"] + + # Session id + response = await client.receive_json() + assert response["id"] == subscription_id + assert response["type"] == "event" + assert response["event"]["type"] == "session" + + # Answer + response = await client.receive_json() + assert response["id"] == subscription_id + assert response["type"] == "event" + assert response["event"] == { + "type": "answer", + "answer": "v=0\r\ns=-\r\n", + } + + assert len(auth.captured_requests) == 1 + assert ( + auth.captured_requests[0][2].get("command") + == "sdm.devices.commands.CameraLiveStream.GenerateWebRtcStream" + ) + + # Fire alarm before stream_1_expiration. The stream url is not refreshed + next_update = now + datetime.timedelta(seconds=25) + await fire_alarm(hass, next_update) + assert len(auth.captured_requests) == 1 + + # Alarm is near stream_1_expiration which causes the stream extension + next_update = now + datetime.timedelta(seconds=60) + await fire_alarm(hass, next_update) + + assert len(auth.captured_requests) >= 2 + assert ( + auth.captured_requests[1][2].get("command") + == "sdm.devices.commands.CameraLiveStream.ExtendWebRtcStream" + ) diff --git a/tests/components/nest/test_config_flow.py b/tests/components/nest/test_config_flow.py index b6e84ce358f534..8b05ace6d4de02 100644 --- a/tests/components/nest/test_config_flow.py +++ b/tests/components/nest/test_config_flow.py @@ -6,11 +6,7 @@ from typing import Any from unittest.mock import patch -from google_nest_sdm.exceptions import ( - AuthException, - ConfigurationException, - SubscriberException, -) +from google_nest_sdm.exceptions import AuthException from google_nest_sdm.structure import Structure import pytest @@ -40,7 +36,7 @@ WEB_REDIRECT_URL = "https://example.com/auth/external/callback" APP_REDIRECT_URL = "urn:ietf:wg:oauth:2.0:oob" - +RAND_SUBSCRIBER_SUFFIX = "ABCDEF" FAKE_DHCP_DATA = dhcp.DhcpServiceInfo( ip="127.0.0.2", macaddress="001122334455", hostname="fake_hostname" @@ -53,6 +49,16 @@ def nest_test_config() -> NestTestConfig: return TEST_CONFIGFLOW_APP_CREDS +@pytest.fixture(autouse=True) +def mock_rand_topic_name_fixture() -> None: + """Set the topic name random string to a constant.""" + with patch( + "homeassistant.components.nest.config_flow.get_random_string", + return_value=RAND_SUBSCRIBER_SUFFIX, + ): + yield + + class OAuthFixture: """Simulate the oauth flow used by the config flow.""" @@ -158,6 +164,43 @@ def async_mock_refresh(self) -> None: }, ) + async def async_complete_pubsub_flow( + self, + result: dict, + selected_topic: str, + selected_subscription: str = "create_new_subscription", + user_input: dict | None = None, + ) -> ConfigEntry: + """Fixture to walk through the Pub/Sub topic and subscription steps. + + This picks a simple set of steps that are reusable for most flows without + exercising the corner cases. + """ + + # Validate Pub/Sub topics are shown + assert result.get("type") is FlowResultType.FORM + assert result.get("step_id") == "pubsub_topic" + assert not result.get("errors") + + # Select Pub/Sub topic the show available subscriptions (none) + result = await self.async_configure( + result, + { + "topic_name": selected_topic, + }, + ) + assert result.get("type") is FlowResultType.FORM + assert result.get("step_id") == "pubsub_subscription" + assert not result.get("errors") + + # Create the subscription and end the flow + return await self.async_finish_setup( + result, + { + "subscription_name": selected_subscription, + }, + ) + async def async_finish_setup( self, result: dict, user_input: dict | None = None ) -> ConfigEntry: @@ -179,15 +222,6 @@ async def async_configure( user_input, ) - async def async_pubsub_flow(self, result: dict, cloud_project_id="") -> None: - """Verify the pubsub creation step.""" - # Render form with a link to get an auth token - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "pubsub" - assert "description_placeholders" in result - assert "url" in result["description_placeholders"] - assert result["data_schema"]({}) == {"cloud_project_id": cloud_project_id} - def get_config_entry(self) -> ConfigEntry: """Get the config entry.""" entries = self.hass.config_entries.async_entries(DOMAIN) @@ -206,6 +240,115 @@ async def oauth( return OAuthFixture(hass, hass_client_no_auth, aioclient_mock) +@pytest.fixture(name="sdm_managed_topic") +def mock_sdm_managed_topic() -> bool: + """Fixture to configure fake server responses for SDM owend Pub/Sub topics.""" + return False + + +@pytest.fixture(name="user_managed_topics") +def mock_user_managed_topics() -> list[str]: + """Fixture to configure fake server response for user owned Pub/Sub topics.""" + return [] + + +@pytest.fixture(name="subscriptions") +def mock_subscriptions() -> list[tuple[str, str]]: + """Fixture to configure fake server response for user subscriptions that exist.""" + return [] + + +@pytest.fixture(name="device_access_project_id") +def mock_device_access_project_id() -> str: + """Fixture to configure the device access console project id used in tests.""" + return PROJECT_ID + + +@pytest.fixture(name="cloud_project_id") +def mock_cloud_project_id() -> str: + """Fixture to configure the cloud console project id used in tests.""" + return CLOUD_PROJECT_ID + + +@pytest.fixture(name="create_subscription_status") +def mock_create_subscription_status() -> str: + """Fixture to configure the return code when creating the subscription.""" + return HTTPStatus.OK + + +@pytest.fixture(name="list_topics_status") +def mock_list_topics_status() -> str: + """Fixture to configure the return code when listing topics.""" + return HTTPStatus.OK + + +@pytest.fixture(name="list_subscriptions_status") +def mock_list_subscriptions_status() -> str: + """Fixture to configure the return code when listing subscriptions.""" + return HTTPStatus.OK + + +@pytest.fixture(autouse=True) +def mock_pubsub_api_responses( + aioclient_mock: AiohttpClientMocker, + sdm_managed_topic: bool, + user_managed_topics: list[str], + subscriptions: list[tuple[str, str]], + device_access_project_id: str, + cloud_project_id: str, + create_subscription_status: HTTPStatus, + list_topics_status: HTTPStatus, + list_subscriptions_status: HTTPStatus, +) -> None: + """Configure a server response for an SDM managed Pub/Sub topic. + + We check for a topic created by the SDM Device Access Console (but note we don't have permission to read it) + or the user has created one themselves in the Google Cloud Project. + """ + aioclient_mock.get( + f"https://pubsub.googleapis.com/v1/projects/sdm-prod/topics/enterprise-{device_access_project_id}", + status=HTTPStatus.FORBIDDEN if sdm_managed_topic else HTTPStatus.NOT_FOUND, + ) + aioclient_mock.get( + f"https://pubsub.googleapis.com/v1/projects/{cloud_project_id}/topics", + json={ + "topics": [ + { + "name": topic_name, + } + for topic_name in user_managed_topics or () + ] + }, + status=list_topics_status, + ) + # We check for a topic created by the SDM Device Access Console (but note we don't have permission to read it) + # or the user has created one themselves in the Google Cloud Project. + aioclient_mock.get( + f"https://pubsub.googleapis.com/v1/projects/{cloud_project_id}/subscriptions", + json={ + "subscriptions": [ + { + "name": subscription_name, + "topic": topic, + "pushConfig": {}, + "ackDeadlineSeconds": 10, + "messageRetentionDuration": "604800s", + "expirationPolicy": {"ttl": "2678400s"}, + "state": "ACTIVE", + } + for (subscription_name, topic) in subscriptions or () + ] + }, + status=list_subscriptions_status, + ) + aioclient_mock.put( + f"https://pubsub.googleapis.com/v1/projects/{cloud_project_id}/subscriptions/home-assistant-{RAND_SUBSCRIBER_SUFFIX}", + json={}, + status=create_subscription_status, + ) + + +@pytest.mark.parametrize(("sdm_managed_topic"), [(True)]) async def test_app_credentials( hass: HomeAssistant, oauth, subscriber, setup_platform ) -> None: @@ -218,20 +361,22 @@ async def test_app_credentials( await oauth.async_app_creds_flow(result) oauth.async_mock_refresh() - entry = await oauth.async_finish_setup(result) + result = await oauth.async_configure(result, None) + entry = await oauth.async_complete_pubsub_flow( + result, selected_topic=f"projects/sdm-prod/topics/enterprise-{PROJECT_ID}" + ) data = dict(entry.data) assert "token" in data data["token"].pop("expires_in") data["token"].pop("expires_at") - assert "subscriber_id" in data - assert f"projects/{CLOUD_PROJECT_ID}/subscriptions" in data["subscriber_id"] - data.pop("subscriber_id") assert data == { "sdm": {}, "auth_implementation": "imported-cred", "cloud_project_id": CLOUD_PROJECT_ID, "project_id": PROJECT_ID, + "subscription_name": f"projects/{CLOUD_PROJECT_ID}/subscriptions/home-assistant-{RAND_SUBSCRIBER_SUFFIX}", + "topic_name": f"projects/sdm-prod/topics/enterprise-{PROJECT_ID}", "token": { "refresh_token": "mock-refresh-token", "access_token": "mock-access-token", @@ -240,6 +385,10 @@ async def test_app_credentials( } +@pytest.mark.parametrize( + ("sdm_managed_topic", "device_access_project_id", "cloud_project_id"), + [(True, "new-project-id", "new-cloud-project-id")], +) async def test_config_flow_restart( hass: HomeAssistant, oauth, subscriber, setup_platform ) -> None: @@ -272,20 +421,22 @@ async def test_config_flow_restart( await oauth.async_oauth_web_flow(result, "new-project-id") oauth.async_mock_refresh() - entry = await oauth.async_finish_setup(result, {"code": "1234"}) + result = await oauth.async_configure(result, {"code": "1234"}) + entry = await oauth.async_complete_pubsub_flow( + result, selected_topic="projects/sdm-prod/topics/enterprise-new-project-id" + ) data = dict(entry.data) assert "token" in data data["token"].pop("expires_in") data["token"].pop("expires_at") - assert "subscriber_id" in data - assert "projects/new-cloud-project-id/subscriptions" in data["subscriber_id"] - data.pop("subscriber_id") assert data == { "sdm": {}, "auth_implementation": "imported-cred", "cloud_project_id": "new-cloud-project-id", "project_id": "new-project-id", + "subscription_name": "projects/new-cloud-project-id/subscriptions/home-assistant-ABCDEF", + "topic_name": "projects/sdm-prod/topics/enterprise-new-project-id", "token": { "refresh_token": "mock-refresh-token", "access_token": "mock-access-token", @@ -294,6 +445,7 @@ async def test_config_flow_restart( } +@pytest.mark.parametrize(("sdm_managed_topic"), [(True)]) async def test_config_flow_wrong_project_id( hass: HomeAssistant, oauth, subscriber, setup_platform ) -> None: @@ -324,20 +476,22 @@ async def test_config_flow_wrong_project_id( await hass.async_block_till_done() oauth.async_mock_refresh() - entry = await oauth.async_finish_setup(result, {"code": "1234"}) + result = await oauth.async_configure(result, {"code": "1234"}) + entry = await oauth.async_complete_pubsub_flow( + result, selected_topic="projects/sdm-prod/topics/enterprise-some-project-id" + ) data = dict(entry.data) assert "token" in data data["token"].pop("expires_in") data["token"].pop("expires_at") - assert "subscriber_id" in data - assert f"projects/{CLOUD_PROJECT_ID}/subscriptions" in data["subscriber_id"] - data.pop("subscriber_id") assert data == { "sdm": {}, "auth_implementation": "imported-cred", "cloud_project_id": CLOUD_PROJECT_ID, "project_id": PROJECT_ID, + "subscription_name": "projects/cloud-id-9876/subscriptions/home-assistant-ABCDEF", + "topic_name": "projects/sdm-prod/topics/enterprise-some-project-id", "token": { "refresh_token": "mock-refresh-token", "access_token": "mock-access-token", @@ -346,6 +500,9 @@ async def test_config_flow_wrong_project_id( } +@pytest.mark.parametrize( + ("sdm_managed_topic", "create_subscription_status"), [(True, HTTPStatus.NOT_FOUND)] +) async def test_config_flow_pubsub_configuration_error( hass: HomeAssistant, oauth, @@ -361,14 +518,41 @@ async def test_config_flow_pubsub_configuration_error( await oauth.async_app_creds_flow(result) oauth.async_mock_refresh() - mock_subscriber.create_subscription.side_effect = ConfigurationException result = await oauth.async_configure(result, {"code": "1234"}) - assert result["type"] is FlowResultType.FORM - assert "errors" in result - assert "cloud_project_id" in result["errors"] - assert result["errors"]["cloud_project_id"] == "bad_project_id" + assert result.get("type") is FlowResultType.FORM + assert result.get("step_id") == "pubsub_topic" + assert result.get("data_schema")({}) == { + "topic_name": f"projects/sdm-prod/topics/enterprise-{PROJECT_ID}", + } + # Select Pub/Sub topic the show available subscriptions (none) + result = await oauth.async_configure( + result, + { + "topic_name": f"projects/sdm-prod/topics/enterprise-{PROJECT_ID}", + }, + ) + assert result.get("type") is FlowResultType.FORM + assert result.get("step_id") == "pubsub_subscription" + assert result.get("data_schema")({}) == { + "subscription_name": "create_new_subscription", + } + + # Failure when creating the subscription + result = await oauth.async_configure( + result, + { + "subscription_name": "create_new_subscription", + }, + ) + assert result.get("type") is FlowResultType.FORM + assert result.get("errors") == {"base": "pubsub_api_error"} + +@pytest.mark.parametrize( + ("sdm_managed_topic", "create_subscription_status"), + [(True, HTTPStatus.INTERNAL_SERVER_ERROR)], +) async def test_config_flow_pubsub_subscriber_error( hass: HomeAssistant, oauth, setup_platform, mock_subscriber ) -> None: @@ -380,17 +564,42 @@ async def test_config_flow_pubsub_subscriber_error( ) await oauth.async_app_creds_flow(result) oauth.async_mock_refresh() - - mock_subscriber.create_subscription.side_effect = SubscriberException() result = await oauth.async_configure(result, {"code": "1234"}) + assert result.get("type") is FlowResultType.FORM + assert result.get("step_id") == "pubsub_topic" + assert result.get("data_schema")({}) == { + "topic_name": f"projects/sdm-prod/topics/enterprise-{PROJECT_ID}", + } - assert result["type"] is FlowResultType.FORM - assert "errors" in result - assert "cloud_project_id" in result["errors"] - assert result["errors"]["cloud_project_id"] == "subscriber_error" + # Select Pub/Sub topic the show available subscriptions (none) + result = await oauth.async_configure( + result, + { + "topic_name": f"projects/sdm-prod/topics/enterprise-{PROJECT_ID}", + }, + ) + assert result.get("type") is FlowResultType.FORM + assert result.get("step_id") == "pubsub_subscription" + assert result.get("data_schema")({}) == { + "subscription_name": "create_new_subscription", + } + + # Failure when creating the subscription + result = await oauth.async_configure( + result, + { + "subscription_name": "create_new_subscription", + }, + ) + + assert result.get("type") is FlowResultType.FORM + assert result.get("errors") == {"base": "pubsub_api_error"} -@pytest.mark.parametrize("nest_test_config", [TEST_CONFIG_APP_CREDS]) +@pytest.mark.parametrize( + ("nest_test_config", "sdm_managed_topic", "device_access_project_id"), + [(TEST_CONFIG_APP_CREDS, True, "project-id-2")], +) async def test_multiple_config_entries( hass: HomeAssistant, oauth, setup_platform ) -> None: @@ -405,7 +614,10 @@ async def test_multiple_config_entries( ) await oauth.async_app_creds_flow(result, project_id="project-id-2") oauth.async_mock_refresh() - entry = await oauth.async_finish_setup(result) + result = await oauth.async_configure(result, user_input={}) + entry = await oauth.async_complete_pubsub_flow( + result, selected_topic="projects/sdm-prod/topics/enterprise-project-id-2" + ) assert entry.title == "Mock Title" assert "token" in entry.data @@ -413,7 +625,9 @@ async def test_multiple_config_entries( assert len(entries) == 2 -@pytest.mark.parametrize("nest_test_config", [TEST_CONFIG_APP_CREDS]) +@pytest.mark.parametrize( + ("nest_test_config", "sdm_managed_topic"), [(TEST_CONFIG_APP_CREDS, True)] +) async def test_duplicate_config_entries( hass: HomeAssistant, oauth, setup_platform ) -> None: @@ -438,7 +652,9 @@ async def test_duplicate_config_entries( assert result.get("reason") == "already_configured" -@pytest.mark.parametrize("nest_test_config", [TEST_CONFIG_APP_CREDS]) +@pytest.mark.parametrize( + ("nest_test_config", "sdm_managed_topic"), [(TEST_CONFIG_APP_CREDS, True)] +) async def test_reauth_multiple_config_entries( hass: HomeAssistant, oauth, setup_platform, config_entry ) -> None: @@ -489,6 +705,7 @@ async def test_reauth_multiple_config_entries( assert entry.data.get("extra_data") +@pytest.mark.parametrize(("sdm_managed_topic"), [(True)]) async def test_pubsub_subscription_strip_whitespace( hass: HomeAssistant, oauth, subscriber, setup_platform ) -> None: @@ -502,8 +719,10 @@ async def test_pubsub_subscription_strip_whitespace( result, cloud_project_id=" " + CLOUD_PROJECT_ID + " " ) oauth.async_mock_refresh() - entry = await oauth.async_finish_setup(result, {"code": "1234"}) - + result = await oauth.async_configure(result, {"code": "1234"}) + entry = await oauth.async_complete_pubsub_flow( + result, selected_topic="projects/sdm-prod/topics/enterprise-some-project-id" + ) assert entry.title == "Import from configuration.yaml" assert "token" in entry.data entry.data["token"].pop("expires_at") @@ -514,10 +733,14 @@ async def test_pubsub_subscription_strip_whitespace( "type": "Bearer", "expires_in": 60, } - assert "subscriber_id" in entry.data + assert "subscription_name" in entry.data assert entry.data["cloud_project_id"] == CLOUD_PROJECT_ID +@pytest.mark.parametrize( + ("sdm_managed_topic", "create_subscription_status"), + [(True, HTTPStatus.UNAUTHORIZED)], +) async def test_pubsub_subscription_auth_failure( hass: HomeAssistant, oauth, setup_platform, mock_subscriber ) -> None: @@ -528,17 +751,43 @@ async def test_pubsub_subscription_auth_failure( DOMAIN, context={"source": config_entries.SOURCE_USER} ) - mock_subscriber.create_subscription.side_effect = AuthException() - await oauth.async_app_creds_flow(result) oauth.async_mock_refresh() result = await oauth.async_configure(result, {"code": "1234"}) + assert result.get("type") is FlowResultType.FORM + assert result.get("step_id") == "pubsub_topic" + assert result.get("data_schema")({}) == { + "topic_name": f"projects/sdm-prod/topics/enterprise-{PROJECT_ID}", + } - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "invalid_access_token" + # Select Pub/Sub topic the show available subscriptions (none) + result = await oauth.async_configure( + result, + { + "topic_name": f"projects/sdm-prod/topics/enterprise-{PROJECT_ID}", + }, + ) + assert result.get("type") is FlowResultType.FORM + assert result.get("step_id") == "pubsub_subscription" + assert result.get("data_schema")({}) == { + "subscription_name": "create_new_subscription", + } + + # Failure when creating the subscription + result = await oauth.async_configure( + result, + { + "subscription_name": "create_new_subscription", + }, + ) + assert result.get("type") is FlowResultType.FORM + assert result.get("step_id") == "pubsub_subscription" + assert result.get("errors") == {"base": "pubsub_api_error"} -@pytest.mark.parametrize("nest_test_config", [TEST_CONFIG_APP_CREDS]) +@pytest.mark.parametrize( + ("nest_test_config", "sdm_managed_topic"), [(TEST_CONFIG_APP_CREDS, True)] +) async def test_pubsub_subscriber_config_entry_reauth( hass: HomeAssistant, oauth, @@ -568,6 +817,7 @@ async def test_pubsub_subscriber_config_entry_reauth( assert entry.data["cloud_project_id"] == CLOUD_PROJECT_ID +@pytest.mark.parametrize(("sdm_managed_topic"), [(True)]) async def test_config_entry_title_from_home( hass: HomeAssistant, oauth, setup_platform, subscriber ) -> None: @@ -595,13 +845,24 @@ async def test_config_entry_title_from_home( await oauth.async_app_creds_flow(result) oauth.async_mock_refresh() - entry = await oauth.async_finish_setup(result, {"code": "1234"}) + result = await oauth.async_configure(result, {"code": "1234"}) + entry = await oauth.async_complete_pubsub_flow( + result, selected_topic=f"projects/sdm-prod/topics/enterprise-{PROJECT_ID}" + ) assert entry.title == "Example Home" assert "token" in entry.data - assert "subscriber_id" in entry.data - assert entry.data["cloud_project_id"] == CLOUD_PROJECT_ID + assert entry.data.get("cloud_project_id") == CLOUD_PROJECT_ID + assert ( + entry.data.get("subscription_name") + == f"projects/{CLOUD_PROJECT_ID}/subscriptions/home-assistant-{RAND_SUBSCRIBER_SUFFIX}" + ) + assert ( + entry.data.get("topic_name") + == f"projects/sdm-prod/topics/enterprise-{PROJECT_ID}" + ) +@pytest.mark.parametrize(("sdm_managed_topic"), [(True)]) async def test_config_entry_title_multiple_homes( hass: HomeAssistant, oauth, setup_platform, subscriber ) -> None: @@ -641,10 +902,14 @@ async def test_config_entry_title_multiple_homes( await oauth.async_app_creds_flow(result) oauth.async_mock_refresh() - entry = await oauth.async_finish_setup(result, {"code": "1234"}) + result = await oauth.async_configure(result, {"code": "1234"}) + entry = await oauth.async_complete_pubsub_flow( + result, selected_topic=f"projects/sdm-prod/topics/enterprise-{PROJECT_ID}" + ) assert entry.title == "Example Home #1, Example Home #2" +@pytest.mark.parametrize(("sdm_managed_topic"), [(True)]) async def test_title_failure_fallback( hass: HomeAssistant, oauth, setup_platform, mock_subscriber ) -> None: @@ -658,13 +923,26 @@ async def test_title_failure_fallback( oauth.async_mock_refresh() mock_subscriber.async_get_device_manager.side_effect = AuthException() - entry = await oauth.async_finish_setup(result, {"code": "1234"}) + + result = await oauth.async_configure(result, {"code": "1234"}) + entry = await oauth.async_complete_pubsub_flow( + result, selected_topic=f"projects/sdm-prod/topics/enterprise-{PROJECT_ID}" + ) + assert entry.title == "Import from configuration.yaml" assert "token" in entry.data - assert "subscriber_id" in entry.data - assert entry.data["cloud_project_id"] == CLOUD_PROJECT_ID + assert entry.data.get("cloud_project_id") == CLOUD_PROJECT_ID + assert ( + entry.data.get("subscription_name") + == f"projects/{CLOUD_PROJECT_ID}/subscriptions/home-assistant-{RAND_SUBSCRIBER_SUFFIX}" + ) + assert ( + entry.data.get("topic_name") + == f"projects/sdm-prod/topics/enterprise-{PROJECT_ID}" + ) +@pytest.mark.parametrize(("sdm_managed_topic"), [(True)]) async def test_structure_missing_trait( hass: HomeAssistant, oauth, setup_platform, subscriber ) -> None: @@ -689,7 +967,10 @@ async def test_structure_missing_trait( await oauth.async_app_creds_flow(result) oauth.async_mock_refresh() - entry = await oauth.async_finish_setup(result, {"code": "1234"}) + result = await oauth.async_configure(result, {"code": "1234"}) + entry = await oauth.async_complete_pubsub_flow( + result, selected_topic=f"projects/sdm-prod/topics/enterprise-{PROJECT_ID}" + ) # Fallback to default name assert entry.title == "Import from configuration.yaml" @@ -713,6 +994,7 @@ async def test_dhcp_discovery( assert result.get("reason") == "missing_credentials" +@pytest.mark.parametrize(("sdm_managed_topic"), [(True)]) async def test_dhcp_discovery_with_creds( hass: HomeAssistant, oauth, subscriber, setup_platform ) -> None: @@ -735,21 +1017,23 @@ async def test_dhcp_discovery_with_creds( result = await oauth.async_configure(result, {"project_id": PROJECT_ID}) await oauth.async_oauth_web_flow(result) oauth.async_mock_refresh() - entry = await oauth.async_finish_setup(result, {"code": "1234"}) - await hass.async_block_till_done() + + result = await oauth.async_configure(result, {"code": "1234"}) + entry = await oauth.async_complete_pubsub_flow( + result, selected_topic=f"projects/sdm-prod/topics/enterprise-{PROJECT_ID}" + ) data = dict(entry.data) assert "token" in data data["token"].pop("expires_in") data["token"].pop("expires_at") - assert "subscriber_id" in data - assert f"projects/{CLOUD_PROJECT_ID}/subscriptions" in data["subscriber_id"] - data.pop("subscriber_id") assert data == { "sdm": {}, "auth_implementation": "imported-cred", "cloud_project_id": CLOUD_PROJECT_ID, "project_id": PROJECT_ID, + "subscription_name": f"projects/{CLOUD_PROJECT_ID}/subscriptions/home-assistant-{RAND_SUBSCRIBER_SUFFIX}", + "topic_name": f"projects/sdm-prod/topics/enterprise-{PROJECT_ID}", "token": { "refresh_token": "mock-refresh-token", "access_token": "mock-access-token", @@ -789,3 +1073,133 @@ async def test_token_error( result = await oauth.async_configure(result, user_input=None) assert result.get("type") is FlowResultType.ABORT assert result.get("reason") == error_reason + + +@pytest.mark.parametrize( + ("user_managed_topics", "subscriptions"), + [ + ( + [f"projects/{CLOUD_PROJECT_ID}/topics/some-topic-id"], + [ + ( + f"projects/{CLOUD_PROJECT_ID}/subscriptions/some-subscription-id", + f"projects/{CLOUD_PROJECT_ID}/topics/some-topic-id", + ) + ], + ) + ], +) +async def test_existing_topic_and_subscription( + hass: HomeAssistant, oauth, subscriber, setup_platform +) -> None: + """Test selecting existing user managed topic and subscription.""" + await setup_platform() + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + await oauth.async_app_creds_flow(result) + oauth.async_mock_refresh() + + result = await oauth.async_configure(result, None) + entry = await oauth.async_complete_pubsub_flow( + result, + selected_topic=f"projects/{CLOUD_PROJECT_ID}/topics/some-topic-id", + selected_subscription=f"projects/{CLOUD_PROJECT_ID}/subscriptions/some-subscription-id", + ) + + data = dict(entry.data) + assert "token" in data + data["token"].pop("expires_in") + data["token"].pop("expires_at") + assert data == { + "sdm": {}, + "auth_implementation": "imported-cred", + "cloud_project_id": CLOUD_PROJECT_ID, + "project_id": PROJECT_ID, + "subscription_name": f"projects/{CLOUD_PROJECT_ID}/subscriptions/some-subscription-id", + "subscriber_id_imported": True, + "topic_name": f"projects/{CLOUD_PROJECT_ID}/topics/some-topic-id", + "token": { + "refresh_token": "mock-refresh-token", + "access_token": "mock-access-token", + "type": "Bearer", + }, + } + + +async def test_no_eligible_topics( + hass: HomeAssistant, oauth, subscriber, setup_platform +) -> None: + """Test the case where there are no eligible pub/sub topics.""" + await setup_platform() + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + await oauth.async_app_creds_flow(result) + oauth.async_mock_refresh() + + result = await oauth.async_configure(result, None) + assert result.get("type") is FlowResultType.FORM + assert result.get("step_id") == "pubsub" + assert result.get("errors") == {"base": "no_pubsub_topics"} + + +@pytest.mark.parametrize( + ("list_topics_status"), + [ + (HTTPStatus.INTERNAL_SERVER_ERROR), + ], +) +async def test_list_topics_failure( + hass: HomeAssistant, oauth, subscriber, setup_platform +) -> None: + """Test selecting existing user managed topic and subscription.""" + await setup_platform() + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + await oauth.async_app_creds_flow(result) + oauth.async_mock_refresh() + + result = await oauth.async_configure(result, None) + assert result.get("type") is FlowResultType.FORM + assert result.get("step_id") == "pubsub" + assert result.get("errors") == {"base": "pubsub_api_error"} + + +@pytest.mark.parametrize( + ("sdm_managed_topic", "list_subscriptions_status"), + [ + (True, HTTPStatus.INTERNAL_SERVER_ERROR), + ], +) +async def test_list_subscriptions_failure( + hass: HomeAssistant, oauth, subscriber, setup_platform +) -> None: + """Test selecting existing user managed topic and subscription.""" + await setup_platform() + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + await oauth.async_app_creds_flow(result) + oauth.async_mock_refresh() + + result = await oauth.async_configure(result, None) + assert result.get("type") is FlowResultType.FORM + assert result.get("step_id") == "pubsub_topic" + assert not result.get("errors") + + # Select Pub/Sub topic the show available subscriptions (none) + result = await oauth.async_configure( + result, + { + "topic_name": f"projects/sdm-prod/topics/enterprise-{PROJECT_ID}", + }, + ) + assert result.get("type") is FlowResultType.FORM + assert result.get("step_id") == "pubsub_subscription" + assert result.get("errors") == {"base": "pubsub_api_error"} diff --git a/tests/components/nest/test_init.py b/tests/components/nest/test_init.py index f3226c936fb0a3..a17803a6cdeddb 100644 --- a/tests/components/nest/test_init.py +++ b/tests/components/nest/test_init.py @@ -31,6 +31,7 @@ SUBSCRIBER_ID, TEST_CONFIG_ENTRY_LEGACY, TEST_CONFIG_LEGACY, + TEST_CONFIG_NEW_SUBSCRIPTION, TEST_CONFIGFLOW_APP_CREDS, FakeSubscriber, PlatformSetup, @@ -97,6 +98,19 @@ async def test_setup_success( assert entries[0].state is ConfigEntryState.LOADED +@pytest.mark.parametrize("nest_test_config", [(TEST_CONFIG_NEW_SUBSCRIPTION)]) +async def test_setup_success_new_subscription_format( + hass: HomeAssistant, error_caplog: pytest.LogCaptureFixture, setup_platform +) -> None: + """Test successful setup.""" + await setup_platform() + assert not error_caplog.records + + entries = hass.config_entries.async_entries(DOMAIN) + assert len(entries) == 1 + assert entries[0].state is ConfigEntryState.LOADED + + @pytest.mark.parametrize("subscriber_id", [("invalid-subscriber-format")]) async def test_setup_configuration_failure( hass: HomeAssistant, @@ -171,19 +185,6 @@ async def test_subscriber_auth_failure( assert flows[0]["step_id"] == "reauth_confirm" -@pytest.mark.parametrize("subscriber_id", [(None)]) -async def test_setup_missing_subscriber_id( - hass: HomeAssistant, warning_caplog: pytest.LogCaptureFixture, setup_base_platform -) -> None: - """Test missing subscriber id from configuration.""" - await setup_base_platform() - assert "Configuration option" in warning_caplog.text - - entries = hass.config_entries.async_entries(DOMAIN) - assert len(entries) == 1 - assert entries[0].state is ConfigEntryState.SETUP_ERROR - - @pytest.mark.parametrize("subscriber_side_effect", [(ConfigurationException())]) async def test_subscriber_configuration_failure( hass: HomeAssistant, diff --git a/tests/components/nest/test_media_source.py b/tests/components/nest/test_media_source.py index 101bfae089d3e7..2526bfdf975f6e 100644 --- a/tests/components/nest/test_media_source.py +++ b/tests/components/nest/test_media_source.py @@ -48,6 +48,9 @@ "customName": DEVICE_NAME, }, "sdm.devices.traits.CameraImage": {}, + "sdm.devices.traits.CameraLiveStream": { + "supportedProtocols": ["RTSP"], + }, "sdm.devices.traits.CameraEventImage": {}, "sdm.devices.traits.CameraPerson": {}, "sdm.devices.traits.CameraMotion": {}, @@ -57,7 +60,9 @@ "customName": DEVICE_NAME, }, "sdm.devices.traits.CameraClipPreview": {}, - "sdm.devices.traits.CameraLiveStream": {}, + "sdm.devices.traits.CameraLiveStream": { + "supportedProtocols": ["WEB_RTC"], + }, "sdm.devices.traits.CameraPerson": {}, "sdm.devices.traits.CameraMotion": {}, } diff --git a/tests/components/netatmo/snapshots/test_climate.ambr b/tests/components/netatmo/snapshots/test_climate.ambr index b9a92882b9ed32..aeae1fd71c702c 100644 --- a/tests/components/netatmo/snapshots/test_climate.ambr +++ b/tests/components/netatmo/snapshots/test_climate.ambr @@ -14,8 +14,8 @@ 'preset_modes': list([ 'away', 'boost', - 'Frost Guard', - 'Schedule', + 'frost_guard', + 'schedule', ]), 'target_temp_step': 0.5, }), @@ -41,7 +41,7 @@ 'platform': 'netatmo', 'previous_unique_id': None, 'supported_features': , - 'translation_key': None, + 'translation_key': 'thermostat', 'unique_id': '222452125-DeviceType.OTM', 'unit_of_measurement': None, }) @@ -60,8 +60,8 @@ 'preset_modes': list([ 'away', 'boost', - 'Frost Guard', - 'Schedule', + 'frost_guard', + 'schedule', ]), 'supported_features': , 'target_temp_step': 0.5, @@ -89,8 +89,8 @@ 'preset_modes': list([ 'away', 'boost', - 'Frost Guard', - 'Schedule', + 'frost_guard', + 'schedule', ]), 'target_temp_step': 0.5, }), @@ -116,7 +116,7 @@ 'platform': 'netatmo', 'previous_unique_id': None, 'supported_features': , - 'translation_key': None, + 'translation_key': 'thermostat', 'unique_id': '2940411577-DeviceType.NRV', 'unit_of_measurement': None, }) @@ -135,12 +135,12 @@ ]), 'max_temp': 30, 'min_temp': 7, - 'preset_mode': 'Frost Guard', + 'preset_mode': 'frost_guard', 'preset_modes': list([ 'away', 'boost', - 'Frost Guard', - 'Schedule', + 'frost_guard', + 'schedule', ]), 'selected_schedule': 'Default', 'supported_features': , @@ -170,8 +170,8 @@ 'preset_modes': list([ 'away', 'boost', - 'Frost Guard', - 'Schedule', + 'frost_guard', + 'schedule', ]), 'target_temp_step': 0.5, }), @@ -197,7 +197,7 @@ 'platform': 'netatmo', 'previous_unique_id': None, 'supported_features': , - 'translation_key': None, + 'translation_key': 'thermostat', 'unique_id': '1002003001-DeviceType.BNS', 'unit_of_measurement': None, }) @@ -215,12 +215,12 @@ ]), 'max_temp': 30, 'min_temp': 7, - 'preset_mode': 'Schedule', + 'preset_mode': 'schedule', 'preset_modes': list([ 'away', 'boost', - 'Frost Guard', - 'Schedule', + 'frost_guard', + 'schedule', ]), 'selected_schedule': 'Default', 'supported_features': , @@ -250,8 +250,8 @@ 'preset_modes': list([ 'away', 'boost', - 'Frost Guard', - 'Schedule', + 'frost_guard', + 'schedule', ]), 'target_temp_step': 0.5, }), @@ -277,7 +277,7 @@ 'platform': 'netatmo', 'previous_unique_id': None, 'supported_features': , - 'translation_key': None, + 'translation_key': 'thermostat', 'unique_id': '2833524037-DeviceType.NRV', 'unit_of_measurement': None, }) @@ -296,12 +296,12 @@ ]), 'max_temp': 30, 'min_temp': 7, - 'preset_mode': 'Frost Guard', + 'preset_mode': 'frost_guard', 'preset_modes': list([ 'away', 'boost', - 'Frost Guard', - 'Schedule', + 'frost_guard', + 'schedule', ]), 'selected_schedule': 'Default', 'supported_features': , @@ -332,8 +332,8 @@ 'preset_modes': list([ 'away', 'boost', - 'Frost Guard', - 'Schedule', + 'frost_guard', + 'schedule', ]), 'target_temp_step': 0.5, }), @@ -359,7 +359,7 @@ 'platform': 'netatmo', 'previous_unique_id': None, 'supported_features': , - 'translation_key': None, + 'translation_key': 'thermostat', 'unique_id': '2746182631-DeviceType.NATherm1', 'unit_of_measurement': None, }) @@ -382,8 +382,8 @@ 'preset_modes': list([ 'away', 'boost', - 'Frost Guard', - 'Schedule', + 'frost_guard', + 'schedule', ]), 'selected_schedule': 'Default', 'supported_features': , diff --git a/tests/components/netatmo/snapshots/test_sensor.ambr b/tests/components/netatmo/snapshots/test_sensor.ambr index 0d13a88cd674ea..ba18c2ca21a73c 100644 --- a/tests/components/netatmo/snapshots/test_sensor.ambr +++ b/tests/components/netatmo/snapshots/test_sensor.ambr @@ -1162,58 +1162,6 @@ 'state': 'True', }) # --- -# name: test_entity[sensor.cold_water_power-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.cold_water_power', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Power', - 'platform': 'netatmo', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '12:34:56:00:16:0e#8-12:34:56:00:16:0e#8-power', - 'unit_of_measurement': , - }) -# --- -# name: test_entity[sensor.cold_water_power-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Netatmo', - 'device_class': 'power', - 'friendly_name': 'Cold water Power', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.cold_water_power', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unavailable', - }) -# --- # name: test_entity[sensor.consumption_meter_none-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -1412,58 +1360,6 @@ 'state': 'unavailable', }) # --- -# name: test_entity[sensor.ecocompteur_power-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.ecocompteur_power', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Power', - 'platform': 'netatmo', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '12:34:56:00:16:0e-12:34:56:00:16:0e-power', - 'unit_of_measurement': , - }) -# --- -# name: test_entity[sensor.ecocompteur_power-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Netatmo', - 'device_class': 'power', - 'friendly_name': 'Écocompteur Power', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.ecocompteur_power', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unavailable', - }) -# --- # name: test_entity[sensor.gas_none-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -1511,58 +1407,6 @@ 'state': 'True', }) # --- -# name: test_entity[sensor.gas_power-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.gas_power', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Power', - 'platform': 'netatmo', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '12:34:56:00:16:0e#6-12:34:56:00:16:0e#6-power', - 'unit_of_measurement': , - }) -# --- -# name: test_entity[sensor.gas_power-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Netatmo', - 'device_class': 'power', - 'friendly_name': 'Gas Power', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.gas_power', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unavailable', - }) -# --- # name: test_entity[sensor.home_avg_atmospheric_pressure-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -3260,58 +3104,6 @@ 'state': 'True', }) # --- -# name: test_entity[sensor.hot_water_power-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.hot_water_power', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Power', - 'platform': 'netatmo', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '12:34:56:00:16:0e#7-12:34:56:00:16:0e#7-power', - 'unit_of_measurement': , - }) -# --- -# name: test_entity[sensor.hot_water_power-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Netatmo', - 'device_class': 'power', - 'friendly_name': 'Hot water Power', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.hot_water_power', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unavailable', - }) -# --- # name: test_entity[sensor.kitchen_atmospheric_pressure-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -3899,58 +3691,6 @@ 'state': 'True', }) # --- -# name: test_entity[sensor.line_1_power-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.line_1_power', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Power', - 'platform': 'netatmo', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '12:34:56:00:16:0e#0-12:34:56:00:16:0e#0-power', - 'unit_of_measurement': , - }) -# --- -# name: test_entity[sensor.line_1_power-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Netatmo', - 'device_class': 'power', - 'friendly_name': 'Line 1 Power', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.line_1_power', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unavailable', - }) -# --- # name: test_entity[sensor.line_2_none-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -3998,58 +3738,6 @@ 'state': 'True', }) # --- -# name: test_entity[sensor.line_2_power-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.line_2_power', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Power', - 'platform': 'netatmo', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '12:34:56:00:16:0e#1-12:34:56:00:16:0e#1-power', - 'unit_of_measurement': , - }) -# --- -# name: test_entity[sensor.line_2_power-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Netatmo', - 'device_class': 'power', - 'friendly_name': 'Line 2 Power', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.line_2_power', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unavailable', - }) -# --- # name: test_entity[sensor.line_3_none-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -4097,58 +3785,6 @@ 'state': 'True', }) # --- -# name: test_entity[sensor.line_3_power-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.line_3_power', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Power', - 'platform': 'netatmo', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '12:34:56:00:16:0e#2-12:34:56:00:16:0e#2-power', - 'unit_of_measurement': , - }) -# --- -# name: test_entity[sensor.line_3_power-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Netatmo', - 'device_class': 'power', - 'friendly_name': 'Line 3 Power', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.line_3_power', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unavailable', - }) -# --- # name: test_entity[sensor.line_4_none-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -4196,58 +3832,6 @@ 'state': 'True', }) # --- -# name: test_entity[sensor.line_4_power-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.line_4_power', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Power', - 'platform': 'netatmo', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '12:34:56:00:16:0e#3-12:34:56:00:16:0e#3-power', - 'unit_of_measurement': , - }) -# --- -# name: test_entity[sensor.line_4_power-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Netatmo', - 'device_class': 'power', - 'friendly_name': 'Line 4 Power', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.line_4_power', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unavailable', - }) -# --- # name: test_entity[sensor.line_5_none-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -4295,58 +3879,6 @@ 'state': 'True', }) # --- -# name: test_entity[sensor.line_5_power-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.line_5_power', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Power', - 'platform': 'netatmo', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '12:34:56:00:16:0e#4-12:34:56:00:16:0e#4-power', - 'unit_of_measurement': , - }) -# --- -# name: test_entity[sensor.line_5_power-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Netatmo', - 'device_class': 'power', - 'friendly_name': 'Line 5 Power', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.line_5_power', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unavailable', - }) -# --- # name: test_entity[sensor.livingroom_atmospheric_pressure-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -5625,58 +5157,6 @@ 'state': 'True', }) # --- -# name: test_entity[sensor.total_power-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.total_power', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Power', - 'platform': 'netatmo', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '12:34:56:00:16:0e#5-12:34:56:00:16:0e#5-power', - 'unit_of_measurement': , - }) -# --- -# name: test_entity[sensor.total_power-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Netatmo', - 'device_class': 'power', - 'friendly_name': 'Total Power', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.total_power', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unavailable', - }) -# --- # name: test_entity[sensor.valve1_battery-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/netatmo/test_climate.py b/tests/components/netatmo/test_climate.py index 4b908580346de7..dc0312f7acde0e 100644 --- a/tests/components/netatmo/test_climate.py +++ b/tests/components/netatmo/test_climate.py @@ -282,7 +282,7 @@ async def test_service_preset_mode_frost_guard_thermostat( assert hass.states.get(climate_entity_livingroom).state == "auto" assert ( hass.states.get(climate_entity_livingroom).attributes["preset_mode"] - == "Frost Guard" + == "frost_guard" ) # Test service setting the preset mode to "frost guard" @@ -779,7 +779,7 @@ async def test_service_preset_mode_already_boost_valves( assert hass.states.get(climate_entity_entrada).state == "auto" assert ( hass.states.get(climate_entity_entrada).attributes["preset_mode"] - == "Frost Guard" + == "frost_guard" ) assert hass.states.get(climate_entity_entrada).attributes["temperature"] == 7 diff --git a/tests/components/network/test_init.py b/tests/components/network/test_init.py index 57a12868d0a89c..dca31106dba9eb 100644 --- a/tests/components/network/test_init.py +++ b/tests/components/network/test_init.py @@ -886,3 +886,42 @@ async def test_async_get_announce_addresses_no_source_ip(hass: HomeAssistant) -> "172.16.1.5", "fe80::dead:beef:dead:beef", ] + + +async def test_websocket_network_url( + hass: HomeAssistant, hass_ws_client: WebSocketGenerator +) -> None: + """Test the network/url websocket command.""" + assert await async_setup_component(hass, "network", {}) + + client = await hass_ws_client(hass) + + with ( + patch( + "homeassistant.helpers.network._get_internal_url", return_value="internal" + ), + patch("homeassistant.helpers.network._get_cloud_url", return_value="cloud"), + ): + await client.send_json({"id": 1, "type": "network/url"}) + msg = await client.receive_json() + assert msg["success"] + assert msg["result"] == { + "internal": "internal", + "external": "cloud", + "cloud": "cloud", + } + + # Test with no cloud URL + with ( + patch( + "homeassistant.helpers.network._get_internal_url", return_value="internal" + ), + ): + await client.send_json({"id": 2, "type": "network/url"}) + msg = await client.receive_json() + assert msg["success"] + assert msg["result"] == { + "internal": "internal", + "external": None, + "cloud": None, + } diff --git a/tests/components/nextbus/__init__.py b/tests/components/nextbus/__init__.py index 609e0bb574b803..e0af11965c4f1c 100644 --- a/tests/components/nextbus/__init__.py +++ b/tests/components/nextbus/__init__.py @@ -1 +1,34 @@ """The tests for the nexbus component.""" + +from homeassistant.components.nextbus.const import CONF_AGENCY, CONF_ROUTE, DOMAIN +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import CONF_STOP +from homeassistant.core import HomeAssistant + +from .const import VALID_AGENCY_TITLE, VALID_ROUTE_TITLE, VALID_STOP_TITLE + +from tests.common import MockConfigEntry + + +async def assert_setup_sensor( + hass: HomeAssistant, + config: dict[str, dict[str, str]], + expected_state=ConfigEntryState.LOADED, + route_title: str = VALID_ROUTE_TITLE, +) -> MockConfigEntry: + """Set up the sensor and assert it's been created.""" + unique_id = f"{config[DOMAIN][CONF_AGENCY]}_{config[DOMAIN][CONF_ROUTE]}_{config[DOMAIN][CONF_STOP]}" + config_entry = MockConfigEntry( + domain=DOMAIN, + data=config[DOMAIN], + title=f"{VALID_AGENCY_TITLE} {route_title} {VALID_STOP_TITLE}", + unique_id=unique_id, + ) + config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is expected_state + + return config_entry diff --git a/tests/components/nextbus/conftest.py b/tests/components/nextbus/conftest.py index 03e62a811f4198..3f6879893131a8 100644 --- a/tests/components/nextbus/conftest.py +++ b/tests/components/nextbus/conftest.py @@ -1,10 +1,13 @@ """Test helpers for NextBus tests.""" +from collections.abc import Generator from typing import Any -from unittest.mock import MagicMock +from unittest.mock import MagicMock, patch import pytest +from .const import BASIC_RESULTS + @pytest.fixture( params=[ @@ -128,3 +131,21 @@ def route_details_side_effect(agency: str, route: str) -> dict: instance.route_details.side_effect = route_details_side_effect return instance + + +@pytest.fixture +def mock_nextbus() -> Generator[MagicMock]: + """Create a mock py_nextbus module.""" + with patch("homeassistant.components.nextbus.coordinator.NextBusClient") as client: + yield client + + +@pytest.fixture +def mock_nextbus_predictions( + mock_nextbus: MagicMock, +) -> Generator[MagicMock]: + """Create a mock of NextBusClient predictions.""" + instance = mock_nextbus.return_value + instance.predictions_for_stop.return_value = BASIC_RESULTS + + return instance.predictions_for_stop diff --git a/tests/components/nextbus/const.py b/tests/components/nextbus/const.py new file mode 100644 index 00000000000000..66eb3635ca92de --- /dev/null +++ b/tests/components/nextbus/const.py @@ -0,0 +1,101 @@ +"""Constants for NextBus tests.""" + +from homeassistant.components.nextbus.const import CONF_AGENCY, CONF_ROUTE, DOMAIN +from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN +from homeassistant.const import CONF_STOP + +VALID_AGENCY = "sfmta-cis" +VALID_ROUTE = "F" +VALID_STOP = "5184" +VALID_COORDINATOR_KEY = f"{VALID_AGENCY}-{VALID_STOP}" +VALID_AGENCY_TITLE = "San Francisco Muni" +VALID_ROUTE_TITLE = "F-Market & Wharves" +VALID_STOP_TITLE = "Market St & 7th St" +SENSOR_ID = "sensor.san_francisco_muni_f_market_wharves_market_st_7th_st" + +ROUTE_2 = "G" +ROUTE_TITLE_2 = "G-Market & Wharves" +SENSOR_ID_2 = "sensor.san_francisco_muni_g_market_wharves_market_st_7th_st" + +PLATFORM_CONFIG = { + SENSOR_DOMAIN: { + "platform": DOMAIN, + CONF_AGENCY: VALID_AGENCY, + CONF_ROUTE: VALID_ROUTE, + CONF_STOP: VALID_STOP, + }, +} + + +CONFIG_BASIC = { + DOMAIN: { + CONF_AGENCY: VALID_AGENCY, + CONF_ROUTE: VALID_ROUTE, + CONF_STOP: VALID_STOP, + } +} + +CONFIG_BASIC_2 = { + DOMAIN: { + CONF_AGENCY: VALID_AGENCY, + CONF_ROUTE: ROUTE_2, + CONF_STOP: VALID_STOP, + } +} + +BASIC_RESULTS = [ + { + "route": { + "title": VALID_ROUTE_TITLE, + "id": VALID_ROUTE, + }, + "stop": { + "name": VALID_STOP_TITLE, + "id": VALID_STOP, + }, + "values": [ + {"minutes": 1, "timestamp": 1553807371000}, + {"minutes": 2, "timestamp": 1553807372000}, + {"minutes": 3, "timestamp": 1553807373000}, + {"minutes": 10, "timestamp": 1553807380000}, + ], + }, + { + "route": { + "title": ROUTE_TITLE_2, + "id": ROUTE_2, + }, + "stop": { + "name": VALID_STOP_TITLE, + "id": VALID_STOP, + }, + "values": [ + {"minutes": 90, "timestamp": 1553807379000}, + ], + }, +] + +NO_UPCOMING = [ + { + "route": { + "title": VALID_ROUTE_TITLE, + "id": VALID_ROUTE, + }, + "stop": { + "name": VALID_STOP_TITLE, + "id": VALID_STOP, + }, + "values": [], + }, + { + "route": { + "title": ROUTE_TITLE_2, + "id": ROUTE_2, + }, + "stop": { + "name": VALID_STOP_TITLE, + "id": VALID_STOP, + }, + "values": [], + }, +] diff --git a/tests/components/nextbus/test_init.py b/tests/components/nextbus/test_init.py new file mode 100644 index 00000000000000..d44b8d1ecc0f25 --- /dev/null +++ b/tests/components/nextbus/test_init.py @@ -0,0 +1,27 @@ +"""The tests for the nexbus sensor component.""" + +from unittest.mock import MagicMock +from urllib.error import HTTPError + +from homeassistant.components.nextbus.coordinator import NextBusHTTPError +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant + +from . import assert_setup_sensor +from .const import CONFIG_BASIC + + +async def test_setup_retry( + hass: HomeAssistant, + mock_nextbus: MagicMock, + mock_nextbus_lists: MagicMock, + mock_nextbus_predictions: MagicMock, +) -> None: + """Verify that a list of messages are rendered correctly.""" + + mock_nextbus_predictions.side_effect = NextBusHTTPError( + "failed", HTTPError("url", 500, "error", MagicMock(), None) + ) + await assert_setup_sensor( + hass, CONFIG_BASIC, expected_state=ConfigEntryState.SETUP_RETRY + ) diff --git a/tests/components/nextbus/test_sensor.py b/tests/components/nextbus/test_sensor.py index 8b62ed453b263e..04140a17c4f33c 100644 --- a/tests/components/nextbus/test_sensor.py +++ b/tests/components/nextbus/test_sensor.py @@ -1,161 +1,36 @@ """The tests for the nexbus sensor component.""" -from collections.abc import Generator from copy import deepcopy -from unittest.mock import MagicMock, patch +from unittest.mock import MagicMock from urllib.error import HTTPError from freezegun.api import FrozenDateTimeFactory from py_nextbus.client import NextBusFormatError, NextBusHTTPError import pytest -from homeassistant.components import sensor -from homeassistant.components.nextbus.const import CONF_AGENCY, CONF_ROUTE, DOMAIN +from homeassistant.components.nextbus.const import DOMAIN from homeassistant.components.nextbus.coordinator import NextBusDataUpdateCoordinator from homeassistant.config_entries import ConfigEntryState -from homeassistant.const import CONF_NAME, CONF_STOP +from homeassistant.const import CONF_NAME from homeassistant.core import HomeAssistant from homeassistant.helpers.update_coordinator import UpdateFailed -from tests.common import MockConfigEntry, async_fire_time_changed - -VALID_AGENCY = "sfmta-cis" -VALID_ROUTE = "F" -VALID_STOP = "5184" -VALID_COORDINATOR_KEY = f"{VALID_AGENCY}-{VALID_STOP}" -VALID_AGENCY_TITLE = "San Francisco Muni" -VALID_ROUTE_TITLE = "F-Market & Wharves" -VALID_STOP_TITLE = "Market St & 7th St" -SENSOR_ID = "sensor.san_francisco_muni_f_market_wharves_market_st_7th_st" - -ROUTE_2 = "G" -ROUTE_TITLE_2 = "G-Market & Wharves" -SENSOR_ID_2 = "sensor.san_francisco_muni_g_market_wharves_market_st_7th_st" - -PLATFORM_CONFIG = { - sensor.DOMAIN: { - "platform": DOMAIN, - CONF_AGENCY: VALID_AGENCY, - CONF_ROUTE: VALID_ROUTE, - CONF_STOP: VALID_STOP, - }, -} - - -CONFIG_BASIC = { - DOMAIN: { - CONF_AGENCY: VALID_AGENCY, - CONF_ROUTE: VALID_ROUTE, - CONF_STOP: VALID_STOP, - } -} - -CONFIG_BASIC_2 = { - DOMAIN: { - CONF_AGENCY: VALID_AGENCY, - CONF_ROUTE: ROUTE_2, - CONF_STOP: VALID_STOP, - } -} - -BASIC_RESULTS = [ - { - "route": { - "title": VALID_ROUTE_TITLE, - "id": VALID_ROUTE, - }, - "stop": { - "name": VALID_STOP_TITLE, - "id": VALID_STOP, - }, - "values": [ - {"minutes": 1, "timestamp": 1553807371000}, - {"minutes": 2, "timestamp": 1553807372000}, - {"minutes": 3, "timestamp": 1553807373000}, - {"minutes": 10, "timestamp": 1553807380000}, - ], - }, - { - "route": { - "title": ROUTE_TITLE_2, - "id": ROUTE_2, - }, - "stop": { - "name": VALID_STOP_TITLE, - "id": VALID_STOP, - }, - "values": [ - {"minutes": 90, "timestamp": 1553807379000}, - ], - }, -] - -NO_UPCOMING = [ - { - "route": { - "title": VALID_ROUTE_TITLE, - "id": VALID_ROUTE, - }, - "stop": { - "name": VALID_STOP_TITLE, - "id": VALID_STOP, - }, - "values": [], - }, - { - "route": { - "title": ROUTE_TITLE_2, - "id": ROUTE_2, - }, - "stop": { - "name": VALID_STOP_TITLE, - "id": VALID_STOP, - }, - "values": [], - }, -] - - -@pytest.fixture -def mock_nextbus() -> Generator[MagicMock]: - """Create a mock py_nextbus module.""" - with patch("homeassistant.components.nextbus.coordinator.NextBusClient") as client: - yield client - - -@pytest.fixture -def mock_nextbus_predictions( - mock_nextbus: MagicMock, -) -> Generator[MagicMock]: - """Create a mock of NextBusClient predictions.""" - instance = mock_nextbus.return_value - instance.predictions_for_stop.return_value = BASIC_RESULTS - - return instance.predictions_for_stop - - -async def assert_setup_sensor( - hass: HomeAssistant, - config: dict[str, dict[str, str]], - expected_state=ConfigEntryState.LOADED, - route_title: str = VALID_ROUTE_TITLE, -) -> MockConfigEntry: - """Set up the sensor and assert it's been created.""" - unique_id = f"{config[DOMAIN][CONF_AGENCY]}_{config[DOMAIN][CONF_ROUTE]}_{config[DOMAIN][CONF_STOP]}" - config_entry = MockConfigEntry( - domain=DOMAIN, - data=config[DOMAIN], - title=f"{VALID_AGENCY_TITLE} {route_title} {VALID_STOP_TITLE}", - unique_id=unique_id, - ) - config_entry.add_to_hass(hass) - - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - assert config_entry.state is expected_state +from . import assert_setup_sensor +from .const import ( + BASIC_RESULTS, + CONFIG_BASIC, + CONFIG_BASIC_2, + NO_UPCOMING, + ROUTE_TITLE_2, + SENSOR_ID, + SENSOR_ID_2, + VALID_AGENCY, + VALID_COORDINATOR_KEY, + VALID_ROUTE_TITLE, + VALID_STOP_TITLE, +) - return config_entry +from tests.common import async_fire_time_changed async def test_predictions( diff --git a/tests/components/nextcloud/snapshots/test_update.ambr b/tests/components/nextcloud/snapshots/test_update.ambr index 1ee6264c204bf9..484106580b1d9f 100644 --- a/tests/components/nextcloud/snapshots/test_update.ambr +++ b/tests/components/nextcloud/snapshots/test_update.ambr @@ -36,6 +36,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'auto_update': False, + 'display_precision': 0, 'entity_picture': 'https://brands.home-assistant.io/_/nextcloud/icon.png', 'friendly_name': 'my.nc_url.local None', 'in_progress': False, @@ -46,6 +47,7 @@ 'skipped_version': None, 'supported_features': , 'title': None, + 'update_percentage': None, }), 'context': , 'entity_id': 'update.my_nc_url_local_none', diff --git a/tests/components/nice_go/conftest.py b/tests/components/nice_go/conftest.py index 9ed3d0d19cf755..cf85cd7e09222a 100644 --- a/tests/components/nice_go/conftest.py +++ b/tests/components/nice_go/conftest.py @@ -52,7 +52,9 @@ def mock_nice_go() -> Generator[AsyncMock]: attr=barrier["attr"], state=BarrierState( **barrier["state"], - connectionState=ConnectionState(**barrier["connectionState"]), + connectionState=ConnectionState(**barrier["connectionState"]) + if barrier.get("connectionState") + else None, ), api=client, ) diff --git a/tests/components/nice_go/fixtures/get_all_barriers.json b/tests/components/nice_go/fixtures/get_all_barriers.json index 0597f0038dcd6e..84799e0dd32f33 100644 --- a/tests/components/nice_go/fixtures/get_all_barriers.json +++ b/tests/components/nice_go/fixtures/get_all_barriers.json @@ -63,7 +63,7 @@ }, { "id": "3", - "type": "WallStation", + "type": "Mms100", "controlLevel": "Owner", "attr": [ { @@ -79,16 +79,42 @@ "autoDisabled": false, "migrationStatus": "DONE", "deviceId": "3", - "vcnMode": false, "deviceFwVersion": "1.2.3.4.5.6", - "barrierStatus": "2,100,0,0,-1,0,3,0" + "barrierStatus": "1,100,0,0,1,0,0,0", + "radioConnected": 1, + "powerLevel": "LOW" }, "timestamp": null, "version": null }, - "connectionState": { - "connected": true, - "updatedTimestamp": "123" - } + "connectionState": null + }, + { + "id": "4", + "type": "unknown-device-type", + "controlLevel": "Owner", + "attr": [ + { + "key": "organization", + "value": "test_organization" + } + ], + "state": { + "deviceId": "4", + "desired": { "key": "value" }, + "reported": { + "displayName": "Test Garage 4", + "autoDisabled": false, + "migrationStatus": "DONE", + "deviceId": "4", + "deviceFwVersion": "1.2.3.4.5.6", + "barrierStatus": "1,100,0,0,1,0,0,0", + "radioConnected": 1, + "powerLevel": "LOW" + }, + "timestamp": null, + "version": null + }, + "connectionState": null } ] diff --git a/tests/components/nice_go/snapshots/test_cover.ambr b/tests/components/nice_go/snapshots/test_cover.ambr index fa65b3b9b4c123..49b5267df56d63 100644 --- a/tests/components/nice_go/snapshots/test_cover.ambr +++ b/tests/components/nice_go/snapshots/test_cover.ambr @@ -117,7 +117,7 @@ 'name': None, 'options': dict({ }), - 'original_device_class': , + 'original_device_class': , 'original_icon': None, 'original_name': None, 'platform': 'nice_go', @@ -131,7 +131,7 @@ # name: test_covers[cover.test_garage_3-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'device_class': 'garage', + 'device_class': 'gate', 'friendly_name': 'Test Garage 3', 'supported_features': , }), @@ -140,7 +140,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'closed', + 'state': 'open', }) # --- # name: test_covers[cover.test_garage_4-entry] @@ -168,11 +168,11 @@ 'original_device_class': , 'original_icon': None, 'original_name': None, - 'platform': 'linear_garage_door', + 'platform': 'nice_go', 'previous_unique_id': None, 'supported_features': , 'translation_key': None, - 'unique_id': 'test4-GDO', + 'unique_id': '4', 'unit_of_measurement': None, }) # --- @@ -188,6 +188,6 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'closing', + 'state': 'open', }) # --- diff --git a/tests/components/nice_go/snapshots/test_diagnostics.ambr b/tests/components/nice_go/snapshots/test_diagnostics.ambr index be67643c5b724b..f4ba363a4214d0 100644 --- a/tests/components/nice_go/snapshots/test_diagnostics.ambr +++ b/tests/components/nice_go/snapshots/test_diagnostics.ambr @@ -9,6 +9,7 @@ 'id': '1', 'light_status': True, 'name': 'Test Garage 1', + 'type': 'WallStation', 'vacation_mode': False, }), '2': dict({ @@ -18,16 +19,28 @@ 'id': '2', 'light_status': False, 'name': 'Test Garage 2', + 'type': 'WallStation', 'vacation_mode': True, }), '3': dict({ - 'barrier_status': 'closed', + 'barrier_status': 'open', 'connected': True, 'fw_version': '1.2.3.4.5.6', 'id': '3', 'light_status': None, 'name': 'Test Garage 3', - 'vacation_mode': False, + 'type': 'Mms100', + 'vacation_mode': None, + }), + '4': dict({ + 'barrier_status': 'open', + 'connected': True, + 'fw_version': '1.2.3.4.5.6', + 'id': '4', + 'light_status': None, + 'name': 'Test Garage 4', + 'type': 'unknown-device-type', + 'vacation_mode': None, }), }), 'entry': dict({ diff --git a/tests/components/nice_go/snapshots/test_light.ambr b/tests/components/nice_go/snapshots/test_light.ambr index 2e29d9589dd987..529df95a5707e5 100644 --- a/tests/components/nice_go/snapshots/test_light.ambr +++ b/tests/components/nice_go/snapshots/test_light.ambr @@ -109,115 +109,3 @@ 'state': 'off', }) # --- -# name: test_data[light.test_garage_3_light-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'supported_color_modes': list([ - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'light', - 'entity_category': None, - 'entity_id': 'light.test_garage_3_light', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Light', - 'platform': 'linear_garage_door', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'light', - 'unique_id': 'test3-Light', - 'unit_of_measurement': None, - }) -# --- -# name: test_data[light.test_garage_3_light-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'brightness': None, - 'color_mode': None, - 'friendly_name': 'Test Garage 3 Light', - 'supported_color_modes': list([ - , - ]), - 'supported_features': , - }), - 'context': , - 'entity_id': 'light.test_garage_3_light', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_data[light.test_garage_4_light-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'supported_color_modes': list([ - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'light', - 'entity_category': None, - 'entity_id': 'light.test_garage_4_light', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Light', - 'platform': 'linear_garage_door', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'light', - 'unique_id': 'test4-Light', - 'unit_of_measurement': None, - }) -# --- -# name: test_data[light.test_garage_4_light-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'brightness': 255, - 'color_mode': , - 'friendly_name': 'Test Garage 4 Light', - 'supported_color_modes': list([ - , - ]), - 'supported_features': , - }), - 'context': , - 'entity_id': 'light.test_garage_4_light', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- diff --git a/tests/components/nice_go/test_cover.py b/tests/components/nice_go/test_cover.py index 737fa104d0c83c..f90c2d438b07be 100644 --- a/tests/components/nice_go/test_cover.py +++ b/tests/components/nice_go/test_cover.py @@ -12,16 +12,10 @@ DOMAIN as COVER_DOMAIN, SERVICE_CLOSE_COVER, SERVICE_OPEN_COVER, + CoverState, ) from homeassistant.components.nice_go.const import DOMAIN -from homeassistant.const import ( - ATTR_ENTITY_ID, - STATE_CLOSED, - STATE_CLOSING, - STATE_OPEN, - STATE_OPENING, - Platform, -) +from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import entity_registry as er @@ -107,16 +101,16 @@ async def test_update_cover_state( await setup_integration(hass, mock_config_entry, [Platform.COVER]) - assert hass.states.get("cover.test_garage_1").state == STATE_CLOSED - assert hass.states.get("cover.test_garage_2").state == STATE_OPEN + assert hass.states.get("cover.test_garage_1").state == CoverState.CLOSED + assert hass.states.get("cover.test_garage_2").state == CoverState.OPEN device_update = load_json_object_fixture("device_state_update.json", DOMAIN) await mock_config_entry.runtime_data.on_data(device_update) device_update_1 = load_json_object_fixture("device_state_update_1.json", DOMAIN) await mock_config_entry.runtime_data.on_data(device_update_1) - assert hass.states.get("cover.test_garage_1").state == STATE_OPENING - assert hass.states.get("cover.test_garage_2").state == STATE_CLOSING + assert hass.states.get("cover.test_garage_1").state == CoverState.OPENING + assert hass.states.get("cover.test_garage_2").state == CoverState.CLOSING @pytest.mark.parametrize( diff --git a/tests/components/nice_go/test_init.py b/tests/components/nice_go/test_init.py index 23d496df2386d6..4eb3851516e455 100644 --- a/tests/components/nice_go/test_init.py +++ b/tests/components/nice_go/test_init.py @@ -347,7 +347,7 @@ async def test_no_connection_state( } ) - assert hass.states.get("cover.test_garage_1").state == "unavailable" + assert hass.states.get("cover.test_garage_1").state == "open" async def test_connection_attempts_exhausted( diff --git a/tests/components/nice_go/test_light.py b/tests/components/nice_go/test_light.py index f7aa015c3bd8c9..b170a0ee3ab048 100644 --- a/tests/components/nice_go/test_light.py +++ b/tests/components/nice_go/test_light.py @@ -134,3 +134,29 @@ async def test_error( {ATTR_ENTITY_ID: entity_id}, blocking=True, ) + + +async def test_unsupported_device_type( + hass: HomeAssistant, + mock_nice_go: AsyncMock, + mock_config_entry: MockConfigEntry, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test that unsupported device types are handled appropriately.""" + + await setup_integration(hass, mock_config_entry, [Platform.LIGHT]) + + assert hass.states.get("light.test_garage_4_light") is None + assert ( + "Device 'Test Garage 4' has unknown device type 'unknown-device-type'" + in caplog.text + ) + assert "which is not supported by this integration" in caplog.text + assert ( + "We try to support it with a cover and event entity, but nothing else." + in caplog.text + ) + assert ( + "Please create an issue with your device model in additional info" + in caplog.text + ) diff --git a/tests/components/nina/test_config_flow.py b/tests/components/nina/test_config_flow.py index 6bc17cdf674942..309c8860c20946 100644 --- a/tests/components/nina/test_config_flow.py +++ b/tests/components/nina/test_config_flow.py @@ -89,7 +89,9 @@ async def test_step_user_unexpected_exception(hass: HomeAssistant) -> None: DOMAIN, context={"source": SOURCE_USER}, data=deepcopy(DUMMY_DATA) ) - assert result["type"] is FlowResultType.ABORT + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "unknown"} + hass.config_entries.flow.async_abort(result["flow_id"]) async def test_step_user(hass: HomeAssistant) -> None: @@ -300,7 +302,9 @@ async def test_options_flow_unexpected_exception(hass: HomeAssistant) -> None: result = await hass.config_entries.options.async_init(config_entry.entry_id) - assert result["type"] is FlowResultType.ABORT + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "unknown"} + hass.config_entries.options.async_abort(result["flow_id"]) async def test_options_flow_entity_removal( diff --git a/tests/components/nordpool/__init__.py b/tests/components/nordpool/__init__.py new file mode 100644 index 00000000000000..20d74d38486375 --- /dev/null +++ b/tests/components/nordpool/__init__.py @@ -0,0 +1,9 @@ +"""Tests for the Nord Pool integration.""" + +from homeassistant.components.nordpool.const import CONF_AREAS +from homeassistant.const import CONF_CURRENCY + +ENTRY_CONFIG = { + CONF_AREAS: ["SE3", "SE4"], + CONF_CURRENCY: "SEK", +} diff --git a/tests/components/nordpool/conftest.py b/tests/components/nordpool/conftest.py new file mode 100644 index 00000000000000..d1c1972c568fec --- /dev/null +++ b/tests/components/nordpool/conftest.py @@ -0,0 +1,75 @@ +"""Fixtures for the Nord Pool integration.""" + +from __future__ import annotations + +from datetime import datetime +import json +from typing import Any +from unittest.mock import patch + +from pynordpool import NordPoolClient +from pynordpool.const import Currency +from pynordpool.model import DeliveryPeriodData +import pytest + +from homeassistant.components.nordpool.const import DOMAIN +from homeassistant.config_entries import SOURCE_USER +from homeassistant.core import HomeAssistant +from homeassistant.util import dt as dt_util + +from . import ENTRY_CONFIG + +from tests.common import MockConfigEntry, load_fixture +from tests.test_util.aiohttp import AiohttpClientMocker + + +@pytest.fixture +async def load_int( + hass: HomeAssistant, get_data: DeliveryPeriodData +) -> MockConfigEntry: + """Set up the Nord Pool integration in Home Assistant.""" + config_entry = MockConfigEntry( + domain=DOMAIN, + source=SOURCE_USER, + data=ENTRY_CONFIG, + ) + + config_entry.add_to_hass(hass) + + with ( + patch( + "homeassistant.components.nordpool.coordinator.NordPoolClient.async_get_delivery_period", + return_value=get_data, + ), + ): + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + return config_entry + + +@pytest.fixture(name="get_data") +async def get_data_from_library( + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, load_json: dict[str, Any] +) -> DeliveryPeriodData: + """Retrieve data from Nord Pool library.""" + + client = NordPoolClient(aioclient_mock.create_session(hass.loop)) + with patch("pynordpool.NordPoolClient._get", return_value=load_json): + output = await client.async_get_delivery_period( + datetime(2024, 11, 5, 13, tzinfo=dt_util.UTC), Currency.SEK, ["SE3", "SE4"] + ) + await client._session.close() + return output + + +@pytest.fixture(name="load_json") +def load_json_from_fixture(load_data: str) -> dict[str, Any]: + """Load fixture with json data and return.""" + return json.loads(load_data) + + +@pytest.fixture(name="load_data", scope="package") +def load_data_from_fixture() -> str: + """Load fixture with fixture data and return.""" + return load_fixture("delivery_period.json", DOMAIN) diff --git a/tests/components/nordpool/fixtures/delivery_period.json b/tests/components/nordpool/fixtures/delivery_period.json new file mode 100644 index 00000000000000..77d51dc94339b0 --- /dev/null +++ b/tests/components/nordpool/fixtures/delivery_period.json @@ -0,0 +1,272 @@ +{ + "deliveryDateCET": "2024-11-05", + "version": 3, + "updatedAt": "2024-11-04T12:15:03.9456464Z", + "deliveryAreas": ["SE3", "SE4"], + "market": "DayAhead", + "multiAreaEntries": [ + { + "deliveryStart": "2024-11-04T23:00:00Z", + "deliveryEnd": "2024-11-05T00:00:00Z", + "entryPerArea": { + "SE3": 250.73, + "SE4": 283.79 + } + }, + { + "deliveryStart": "2024-11-05T00:00:00Z", + "deliveryEnd": "2024-11-05T01:00:00Z", + "entryPerArea": { + "SE3": 76.36, + "SE4": 81.36 + } + }, + { + "deliveryStart": "2024-11-05T01:00:00Z", + "deliveryEnd": "2024-11-05T02:00:00Z", + "entryPerArea": { + "SE3": 73.92, + "SE4": 79.15 + } + }, + { + "deliveryStart": "2024-11-05T02:00:00Z", + "deliveryEnd": "2024-11-05T03:00:00Z", + "entryPerArea": { + "SE3": 61.69, + "SE4": 65.19 + } + }, + { + "deliveryStart": "2024-11-05T03:00:00Z", + "deliveryEnd": "2024-11-05T04:00:00Z", + "entryPerArea": { + "SE3": 64.6, + "SE4": 68.44 + } + }, + { + "deliveryStart": "2024-11-05T04:00:00Z", + "deliveryEnd": "2024-11-05T05:00:00Z", + "entryPerArea": { + "SE3": 453.27, + "SE4": 516.71 + } + }, + { + "deliveryStart": "2024-11-05T05:00:00Z", + "deliveryEnd": "2024-11-05T06:00:00Z", + "entryPerArea": { + "SE3": 996.28, + "SE4": 1240.85 + } + }, + { + "deliveryStart": "2024-11-05T06:00:00Z", + "deliveryEnd": "2024-11-05T07:00:00Z", + "entryPerArea": { + "SE3": 1406.14, + "SE4": 1648.25 + } + }, + { + "deliveryStart": "2024-11-05T07:00:00Z", + "deliveryEnd": "2024-11-05T08:00:00Z", + "entryPerArea": { + "SE3": 1346.54, + "SE4": 1570.5 + } + }, + { + "deliveryStart": "2024-11-05T08:00:00Z", + "deliveryEnd": "2024-11-05T09:00:00Z", + "entryPerArea": { + "SE3": 1150.28, + "SE4": 1345.37 + } + }, + { + "deliveryStart": "2024-11-05T09:00:00Z", + "deliveryEnd": "2024-11-05T10:00:00Z", + "entryPerArea": { + "SE3": 1031.32, + "SE4": 1206.51 + } + }, + { + "deliveryStart": "2024-11-05T10:00:00Z", + "deliveryEnd": "2024-11-05T11:00:00Z", + "entryPerArea": { + "SE3": 927.37, + "SE4": 1085.8 + } + }, + { + "deliveryStart": "2024-11-05T11:00:00Z", + "deliveryEnd": "2024-11-05T12:00:00Z", + "entryPerArea": { + "SE3": 925.05, + "SE4": 1081.72 + } + }, + { + "deliveryStart": "2024-11-05T12:00:00Z", + "deliveryEnd": "2024-11-05T13:00:00Z", + "entryPerArea": { + "SE3": 949.49, + "SE4": 1130.38 + } + }, + { + "deliveryStart": "2024-11-05T13:00:00Z", + "deliveryEnd": "2024-11-05T14:00:00Z", + "entryPerArea": { + "SE3": 1042.03, + "SE4": 1256.91 + } + }, + { + "deliveryStart": "2024-11-05T14:00:00Z", + "deliveryEnd": "2024-11-05T15:00:00Z", + "entryPerArea": { + "SE3": 1258.89, + "SE4": 1765.82 + } + }, + { + "deliveryStart": "2024-11-05T15:00:00Z", + "deliveryEnd": "2024-11-05T16:00:00Z", + "entryPerArea": { + "SE3": 1816.45, + "SE4": 2522.55 + } + }, + { + "deliveryStart": "2024-11-05T16:00:00Z", + "deliveryEnd": "2024-11-05T17:00:00Z", + "entryPerArea": { + "SE3": 2512.65, + "SE4": 3533.03 + } + }, + { + "deliveryStart": "2024-11-05T17:00:00Z", + "deliveryEnd": "2024-11-05T18:00:00Z", + "entryPerArea": { + "SE3": 1819.83, + "SE4": 2524.06 + } + }, + { + "deliveryStart": "2024-11-05T18:00:00Z", + "deliveryEnd": "2024-11-05T19:00:00Z", + "entryPerArea": { + "SE3": 1011.77, + "SE4": 1804.46 + } + }, + { + "deliveryStart": "2024-11-05T19:00:00Z", + "deliveryEnd": "2024-11-05T20:00:00Z", + "entryPerArea": { + "SE3": 835.53, + "SE4": 1112.57 + } + }, + { + "deliveryStart": "2024-11-05T20:00:00Z", + "deliveryEnd": "2024-11-05T21:00:00Z", + "entryPerArea": { + "SE3": 796.19, + "SE4": 1051.69 + } + }, + { + "deliveryStart": "2024-11-05T21:00:00Z", + "deliveryEnd": "2024-11-05T22:00:00Z", + "entryPerArea": { + "SE3": 522.3, + "SE4": 662.44 + } + }, + { + "deliveryStart": "2024-11-05T22:00:00Z", + "deliveryEnd": "2024-11-05T23:00:00Z", + "entryPerArea": { + "SE3": 289.14, + "SE4": 349.21 + } + } + ], + "blockPriceAggregates": [ + { + "blockName": "Off-peak 1", + "deliveryStart": "2024-11-04T23:00:00Z", + "deliveryEnd": "2024-11-05T07:00:00Z", + "averagePricePerArea": { + "SE3": { + "average": 422.87, + "min": 61.69, + "max": 1406.14 + }, + "SE4": { + "average": 497.97, + "min": 65.19, + "max": 1648.25 + } + } + }, + { + "blockName": "Peak", + "deliveryStart": "2024-11-05T07:00:00Z", + "deliveryEnd": "2024-11-05T19:00:00Z", + "averagePricePerArea": { + "SE3": { + "average": 1315.97, + "min": 925.05, + "max": 2512.65 + }, + "SE4": { + "average": 1735.59, + "min": 1081.72, + "max": 3533.03 + } + } + }, + { + "blockName": "Off-peak 2", + "deliveryStart": "2024-11-05T19:00:00Z", + "deliveryEnd": "2024-11-05T23:00:00Z", + "averagePricePerArea": { + "SE3": { + "average": 610.79, + "min": 289.14, + "max": 835.53 + }, + "SE4": { + "average": 793.98, + "min": 349.21, + "max": 1112.57 + } + } + } + ], + "currency": "SEK", + "exchangeRate": 11.6402, + "areaStates": [ + { + "state": "Final", + "areas": ["SE3", "SE4"] + } + ], + "areaAverages": [ + { + "areaCode": "SE3", + "price": 900.74 + }, + { + "areaCode": "SE4", + "price": 1166.12 + } + ] +} diff --git a/tests/components/nordpool/snapshots/test_sensor.ambr b/tests/components/nordpool/snapshots/test_sensor.ambr new file mode 100644 index 00000000000000..01600352861419 --- /dev/null +++ b/tests/components/nordpool/snapshots/test_sensor.ambr @@ -0,0 +1,2215 @@ +# serializer version: 1 +# name: test_sensor[sensor.nord_pool_se3_currency-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.nord_pool_se3_currency', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Currency', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'currency', + 'unique_id': 'SE3-currency', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_currency-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Nord Pool SE3 Currency', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se3_currency', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'SEK', + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_current_price-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se3_current_price', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Current price', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_price', + 'unique_id': 'SE3-current_price', + 'unit_of_measurement': 'SEK/kWh', + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_current_price-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Nord Pool SE3 Current price', + 'state_class': , + 'unit_of_measurement': 'SEK/kWh', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se3_current_price', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.01177', + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_daily_average-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se3_daily_average', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Daily average', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'daily_average', + 'unique_id': 'SE3-daily_average', + 'unit_of_measurement': 'SEK/kWh', + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_daily_average-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Nord Pool SE3 Daily average', + 'state_class': , + 'unit_of_measurement': 'SEK/kWh', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se3_daily_average', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.90074', + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_exchange_rate-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.nord_pool_se3_exchange_rate', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Exchange rate', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'exchange_rate', + 'unique_id': 'SE3-exchange_rate', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_exchange_rate-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Nord Pool SE3 Exchange rate', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se3_exchange_rate', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '11.6402', + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_last_updated-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.nord_pool_se3_last_updated', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Last updated', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'updated_at', + 'unique_id': 'SE3-updated_at', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_last_updated-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Nord Pool SE3 Last updated', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se3_last_updated', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-11-04T12:15:03+00:00', + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_next_price-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se3_next_price', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Next price', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'next_price', + 'unique_id': 'SE3-next_price', + 'unit_of_measurement': 'SEK/kWh', + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_next_price-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Nord Pool SE3 Next price', + 'unit_of_measurement': 'SEK/kWh', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se3_next_price', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.83553', + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_off_peak_1_average-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se3_off_peak_1_average', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Off-peak 1 average', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_average', + 'unique_id': 'off_peak_1-SE3-block_average', + 'unit_of_measurement': 'SEK/kWh', + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_off_peak_1_average-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Nord Pool SE3 Off-peak 1 average', + 'state_class': , + 'unit_of_measurement': 'SEK/kWh', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se3_off_peak_1_average', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.42287', + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_off_peak_1_highest_price-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se3_off_peak_1_highest_price', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Off-peak 1 highest price', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_max', + 'unique_id': 'off_peak_1-SE3-block_max', + 'unit_of_measurement': 'SEK/kWh', + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_off_peak_1_highest_price-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Nord Pool SE3 Off-peak 1 highest price', + 'state_class': , + 'unit_of_measurement': 'SEK/kWh', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se3_off_peak_1_highest_price', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.40614', + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_off_peak_1_lowest_price-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se3_off_peak_1_lowest_price', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Off-peak 1 lowest price', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_min', + 'unique_id': 'off_peak_1-SE3-block_min', + 'unit_of_measurement': 'SEK/kWh', + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_off_peak_1_lowest_price-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Nord Pool SE3 Off-peak 1 lowest price', + 'state_class': , + 'unit_of_measurement': 'SEK/kWh', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se3_off_peak_1_lowest_price', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.06169', + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_off_peak_1_time_from-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se3_off_peak_1_time_from', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Off-peak 1 time from', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_start_time', + 'unique_id': 'off_peak_1-SE3-block_start_time', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_off_peak_1_time_from-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Nord Pool SE3 Off-peak 1 time from', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se3_off_peak_1_time_from', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-11-04T23:00:00+00:00', + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_off_peak_1_time_until-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se3_off_peak_1_time_until', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Off-peak 1 time until', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_end_time', + 'unique_id': 'off_peak_1-SE3-block_end_time', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_off_peak_1_time_until-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Nord Pool SE3 Off-peak 1 time until', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se3_off_peak_1_time_until', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-11-05T07:00:00+00:00', + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_off_peak_2_average-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se3_off_peak_2_average', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Off-peak 2 average', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_average', + 'unique_id': 'off_peak_2-SE3-block_average', + 'unit_of_measurement': 'SEK/kWh', + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_off_peak_2_average-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Nord Pool SE3 Off-peak 2 average', + 'state_class': , + 'unit_of_measurement': 'SEK/kWh', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se3_off_peak_2_average', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.61079', + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_off_peak_2_highest_price-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se3_off_peak_2_highest_price', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Off-peak 2 highest price', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_max', + 'unique_id': 'off_peak_2-SE3-block_max', + 'unit_of_measurement': 'SEK/kWh', + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_off_peak_2_highest_price-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Nord Pool SE3 Off-peak 2 highest price', + 'state_class': , + 'unit_of_measurement': 'SEK/kWh', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se3_off_peak_2_highest_price', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.83553', + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_off_peak_2_lowest_price-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se3_off_peak_2_lowest_price', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Off-peak 2 lowest price', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_min', + 'unique_id': 'off_peak_2-SE3-block_min', + 'unit_of_measurement': 'SEK/kWh', + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_off_peak_2_lowest_price-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Nord Pool SE3 Off-peak 2 lowest price', + 'state_class': , + 'unit_of_measurement': 'SEK/kWh', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se3_off_peak_2_lowest_price', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.28914', + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_off_peak_2_time_from-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se3_off_peak_2_time_from', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Off-peak 2 time from', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_start_time', + 'unique_id': 'off_peak_2-SE3-block_start_time', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_off_peak_2_time_from-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Nord Pool SE3 Off-peak 2 time from', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se3_off_peak_2_time_from', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-11-05T19:00:00+00:00', + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_off_peak_2_time_until-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se3_off_peak_2_time_until', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Off-peak 2 time until', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_end_time', + 'unique_id': 'off_peak_2-SE3-block_end_time', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_off_peak_2_time_until-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Nord Pool SE3 Off-peak 2 time until', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se3_off_peak_2_time_until', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-11-05T23:00:00+00:00', + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_peak_average-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se3_peak_average', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Peak average', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_average', + 'unique_id': 'peak-SE3-block_average', + 'unit_of_measurement': 'SEK/kWh', + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_peak_average-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Nord Pool SE3 Peak average', + 'state_class': , + 'unit_of_measurement': 'SEK/kWh', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se3_peak_average', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.31597', + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_peak_highest_price-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se3_peak_highest_price', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Peak highest price', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_max', + 'unique_id': 'peak-SE3-block_max', + 'unit_of_measurement': 'SEK/kWh', + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_peak_highest_price-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Nord Pool SE3 Peak highest price', + 'state_class': , + 'unit_of_measurement': 'SEK/kWh', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se3_peak_highest_price', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.51265', + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_peak_lowest_price-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se3_peak_lowest_price', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Peak lowest price', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_min', + 'unique_id': 'peak-SE3-block_min', + 'unit_of_measurement': 'SEK/kWh', + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_peak_lowest_price-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Nord Pool SE3 Peak lowest price', + 'state_class': , + 'unit_of_measurement': 'SEK/kWh', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se3_peak_lowest_price', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.92505', + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_peak_time_from-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se3_peak_time_from', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Peak time from', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_start_time', + 'unique_id': 'peak-SE3-block_start_time', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_peak_time_from-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Nord Pool SE3 Peak time from', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se3_peak_time_from', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-11-05T07:00:00+00:00', + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_peak_time_until-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se3_peak_time_until', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Peak time until', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_end_time', + 'unique_id': 'peak-SE3-block_end_time', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_peak_time_until-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Nord Pool SE3 Peak time until', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se3_peak_time_until', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-11-05T19:00:00+00:00', + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_previous_price-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se3_previous_price', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Previous price', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'last_price', + 'unique_id': 'SE3-last_price', + 'unit_of_measurement': 'SEK/kWh', + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_previous_price-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Nord Pool SE3 Previous price', + 'unit_of_measurement': 'SEK/kWh', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se3_previous_price', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.81983', + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_currency-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.nord_pool_se4_currency', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Currency', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'currency', + 'unique_id': 'SE4-currency', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_currency-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Nord Pool SE4 Currency', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se4_currency', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'SEK', + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_current_price-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se4_current_price', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Current price', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_price', + 'unique_id': 'SE4-current_price', + 'unit_of_measurement': 'SEK/kWh', + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_current_price-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Nord Pool SE4 Current price', + 'state_class': , + 'unit_of_measurement': 'SEK/kWh', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se4_current_price', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.80446', + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_daily_average-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se4_daily_average', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Daily average', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'daily_average', + 'unique_id': 'SE4-daily_average', + 'unit_of_measurement': 'SEK/kWh', + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_daily_average-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Nord Pool SE4 Daily average', + 'state_class': , + 'unit_of_measurement': 'SEK/kWh', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se4_daily_average', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.16612', + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_exchange_rate-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.nord_pool_se4_exchange_rate', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Exchange rate', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'exchange_rate', + 'unique_id': 'SE4-exchange_rate', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_exchange_rate-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Nord Pool SE4 Exchange rate', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se4_exchange_rate', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '11.6402', + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_last_updated-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.nord_pool_se4_last_updated', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Last updated', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'updated_at', + 'unique_id': 'SE4-updated_at', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_last_updated-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Nord Pool SE4 Last updated', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se4_last_updated', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-11-04T12:15:03+00:00', + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_next_price-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se4_next_price', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Next price', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'next_price', + 'unique_id': 'SE4-next_price', + 'unit_of_measurement': 'SEK/kWh', + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_next_price-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Nord Pool SE4 Next price', + 'unit_of_measurement': 'SEK/kWh', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se4_next_price', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.11257', + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_off_peak_1_average-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se4_off_peak_1_average', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Off-peak 1 average', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_average', + 'unique_id': 'off_peak_1-SE4-block_average', + 'unit_of_measurement': 'SEK/kWh', + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_off_peak_1_average-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Nord Pool SE4 Off-peak 1 average', + 'state_class': , + 'unit_of_measurement': 'SEK/kWh', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se4_off_peak_1_average', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.49797', + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_off_peak_1_highest_price-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se4_off_peak_1_highest_price', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Off-peak 1 highest price', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_max', + 'unique_id': 'off_peak_1-SE4-block_max', + 'unit_of_measurement': 'SEK/kWh', + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_off_peak_1_highest_price-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Nord Pool SE4 Off-peak 1 highest price', + 'state_class': , + 'unit_of_measurement': 'SEK/kWh', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se4_off_peak_1_highest_price', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.64825', + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_off_peak_1_lowest_price-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se4_off_peak_1_lowest_price', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Off-peak 1 lowest price', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_min', + 'unique_id': 'off_peak_1-SE4-block_min', + 'unit_of_measurement': 'SEK/kWh', + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_off_peak_1_lowest_price-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Nord Pool SE4 Off-peak 1 lowest price', + 'state_class': , + 'unit_of_measurement': 'SEK/kWh', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se4_off_peak_1_lowest_price', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.06519', + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_off_peak_1_time_from-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se4_off_peak_1_time_from', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Off-peak 1 time from', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_start_time', + 'unique_id': 'off_peak_1-SE4-block_start_time', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_off_peak_1_time_from-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Nord Pool SE4 Off-peak 1 time from', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se4_off_peak_1_time_from', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-11-04T23:00:00+00:00', + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_off_peak_1_time_until-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se4_off_peak_1_time_until', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Off-peak 1 time until', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_end_time', + 'unique_id': 'off_peak_1-SE4-block_end_time', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_off_peak_1_time_until-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Nord Pool SE4 Off-peak 1 time until', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se4_off_peak_1_time_until', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-11-05T07:00:00+00:00', + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_off_peak_2_average-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se4_off_peak_2_average', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Off-peak 2 average', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_average', + 'unique_id': 'off_peak_2-SE4-block_average', + 'unit_of_measurement': 'SEK/kWh', + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_off_peak_2_average-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Nord Pool SE4 Off-peak 2 average', + 'state_class': , + 'unit_of_measurement': 'SEK/kWh', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se4_off_peak_2_average', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.79398', + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_off_peak_2_highest_price-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se4_off_peak_2_highest_price', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Off-peak 2 highest price', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_max', + 'unique_id': 'off_peak_2-SE4-block_max', + 'unit_of_measurement': 'SEK/kWh', + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_off_peak_2_highest_price-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Nord Pool SE4 Off-peak 2 highest price', + 'state_class': , + 'unit_of_measurement': 'SEK/kWh', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se4_off_peak_2_highest_price', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.11257', + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_off_peak_2_lowest_price-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se4_off_peak_2_lowest_price', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Off-peak 2 lowest price', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_min', + 'unique_id': 'off_peak_2-SE4-block_min', + 'unit_of_measurement': 'SEK/kWh', + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_off_peak_2_lowest_price-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Nord Pool SE4 Off-peak 2 lowest price', + 'state_class': , + 'unit_of_measurement': 'SEK/kWh', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se4_off_peak_2_lowest_price', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.34921', + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_off_peak_2_time_from-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se4_off_peak_2_time_from', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Off-peak 2 time from', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_start_time', + 'unique_id': 'off_peak_2-SE4-block_start_time', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_off_peak_2_time_from-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Nord Pool SE4 Off-peak 2 time from', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se4_off_peak_2_time_from', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-11-05T19:00:00+00:00', + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_off_peak_2_time_until-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se4_off_peak_2_time_until', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Off-peak 2 time until', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_end_time', + 'unique_id': 'off_peak_2-SE4-block_end_time', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_off_peak_2_time_until-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Nord Pool SE4 Off-peak 2 time until', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se4_off_peak_2_time_until', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-11-05T23:00:00+00:00', + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_peak_average-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se4_peak_average', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Peak average', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_average', + 'unique_id': 'peak-SE4-block_average', + 'unit_of_measurement': 'SEK/kWh', + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_peak_average-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Nord Pool SE4 Peak average', + 'state_class': , + 'unit_of_measurement': 'SEK/kWh', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se4_peak_average', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.73559', + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_peak_highest_price-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se4_peak_highest_price', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Peak highest price', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_max', + 'unique_id': 'peak-SE4-block_max', + 'unit_of_measurement': 'SEK/kWh', + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_peak_highest_price-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Nord Pool SE4 Peak highest price', + 'state_class': , + 'unit_of_measurement': 'SEK/kWh', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se4_peak_highest_price', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3.53303', + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_peak_lowest_price-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se4_peak_lowest_price', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Peak lowest price', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_min', + 'unique_id': 'peak-SE4-block_min', + 'unit_of_measurement': 'SEK/kWh', + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_peak_lowest_price-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Nord Pool SE4 Peak lowest price', + 'state_class': , + 'unit_of_measurement': 'SEK/kWh', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se4_peak_lowest_price', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.08172', + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_peak_time_from-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se4_peak_time_from', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Peak time from', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_start_time', + 'unique_id': 'peak-SE4-block_start_time', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_peak_time_from-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Nord Pool SE4 Peak time from', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se4_peak_time_from', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-11-05T07:00:00+00:00', + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_peak_time_until-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se4_peak_time_until', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Peak time until', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_end_time', + 'unique_id': 'peak-SE4-block_end_time', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_peak_time_until-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Nord Pool SE4 Peak time until', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se4_peak_time_until', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-11-05T19:00:00+00:00', + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_previous_price-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se4_previous_price', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Previous price', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'last_price', + 'unique_id': 'SE4-last_price', + 'unit_of_measurement': 'SEK/kWh', + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_previous_price-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Nord Pool SE4 Previous price', + 'unit_of_measurement': 'SEK/kWh', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se4_previous_price', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.52406', + }) +# --- diff --git a/tests/components/nordpool/test_config_flow.py b/tests/components/nordpool/test_config_flow.py new file mode 100644 index 00000000000000..cfdfc63aca7690 --- /dev/null +++ b/tests/components/nordpool/test_config_flow.py @@ -0,0 +1,206 @@ +"""Test the Nord Pool config flow.""" + +from __future__ import annotations + +from unittest.mock import patch + +from pynordpool import ( + DeliveryPeriodData, + NordPoolConnectionError, + NordPoolEmptyResponseError, + NordPoolError, + NordPoolResponseError, +) +import pytest + +from homeassistant import config_entries +from homeassistant.components.nordpool.const import CONF_AREAS, DOMAIN +from homeassistant.const import CONF_CURRENCY +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from . import ENTRY_CONFIG + +from tests.common import MockConfigEntry + + +@pytest.mark.freeze_time("2024-11-05T18:00:00+00:00") +async def test_form(hass: HomeAssistant, get_data: DeliveryPeriodData) -> None: + """Test we get the form.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["step_id"] == "user" + assert result["type"] is FlowResultType.FORM + + with ( + patch( + "homeassistant.components.nordpool.coordinator.NordPoolClient.async_get_delivery_period", + return_value=get_data, + ), + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + ENTRY_CONFIG, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["version"] == 1 + assert result["title"] == "Nord Pool" + assert result["data"] == {"areas": ["SE3", "SE4"], "currency": "SEK"} + + +@pytest.mark.freeze_time("2024-11-05T18:00:00+00:00") +async def test_single_config_entry( + hass: HomeAssistant, load_int: None, get_data: DeliveryPeriodData +) -> None: + """Test abort for single config entry.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "single_instance_allowed" + + +@pytest.mark.freeze_time("2024-11-05T18:00:00+00:00") +@pytest.mark.parametrize( + ("error_message", "p_error"), + [ + (NordPoolConnectionError, "cannot_connect"), + (NordPoolEmptyResponseError, "no_data"), + (NordPoolError, "cannot_connect"), + (NordPoolResponseError, "cannot_connect"), + ], +) +async def test_cannot_connect( + hass: HomeAssistant, + get_data: DeliveryPeriodData, + error_message: Exception, + p_error: str, +) -> None: + """Test cannot connect error.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == config_entries.SOURCE_USER + + with patch( + "homeassistant.components.nordpool.coordinator.NordPoolClient.async_get_delivery_period", + side_effect=error_message, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input=ENTRY_CONFIG, + ) + + assert result["errors"] == {"base": p_error} + + with patch( + "homeassistant.components.nordpool.coordinator.NordPoolClient.async_get_delivery_period", + return_value=get_data, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input=ENTRY_CONFIG, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "Nord Pool" + assert result["data"] == {"areas": ["SE3", "SE4"], "currency": "SEK"} + + +@pytest.mark.freeze_time("2024-11-05T18:00:00+00:00") +async def test_reconfigure( + hass: HomeAssistant, + load_int: MockConfigEntry, + get_data: DeliveryPeriodData, +) -> None: + """Test reconfiguration.""" + + result = await load_int.start_reconfigure_flow(hass) + + with ( + patch( + "homeassistant.components.nordpool.coordinator.NordPoolClient.async_get_delivery_period", + return_value=get_data, + ), + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_AREAS: ["SE3"], + CONF_CURRENCY: "EUR", + }, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" + assert load_int.data == { + "areas": [ + "SE3", + ], + "currency": "EUR", + } + + +@pytest.mark.freeze_time("2024-11-05T18:00:00+00:00") +@pytest.mark.parametrize( + ("error_message", "p_error"), + [ + (NordPoolConnectionError, "cannot_connect"), + (NordPoolEmptyResponseError, "no_data"), + (NordPoolError, "cannot_connect"), + (NordPoolResponseError, "cannot_connect"), + ], +) +async def test_reconfigure_cannot_connect( + hass: HomeAssistant, + load_int: MockConfigEntry, + get_data: DeliveryPeriodData, + error_message: Exception, + p_error: str, +) -> None: + """Test cannot connect error in a reeconfigure flow.""" + + result = await load_int.start_reconfigure_flow(hass) + + with patch( + "homeassistant.components.nordpool.coordinator.NordPoolClient.async_get_delivery_period", + side_effect=error_message, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_AREAS: ["SE3"], + CONF_CURRENCY: "EUR", + }, + ) + + assert result["errors"] == {"base": p_error} + + with patch( + "homeassistant.components.nordpool.coordinator.NordPoolClient.async_get_delivery_period", + return_value=get_data, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_AREAS: ["SE3"], + CONF_CURRENCY: "EUR", + }, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" + assert load_int.data == { + "areas": [ + "SE3", + ], + "currency": "EUR", + } diff --git a/tests/components/nordpool/test_coordinator.py b/tests/components/nordpool/test_coordinator.py new file mode 100644 index 00000000000000..d2d912b1b9907a --- /dev/null +++ b/tests/components/nordpool/test_coordinator.py @@ -0,0 +1,106 @@ +"""The test for the Nord Pool coordinator.""" + +from __future__ import annotations + +from datetime import timedelta +from unittest.mock import patch + +from freezegun.api import FrozenDateTimeFactory +from pynordpool import ( + DeliveryPeriodData, + NordPoolAuthenticationError, + NordPoolEmptyResponseError, + NordPoolError, + NordPoolResponseError, +) +import pytest + +from homeassistant.components.nordpool.const import DOMAIN +from homeassistant.config_entries import SOURCE_USER +from homeassistant.const import STATE_UNAVAILABLE +from homeassistant.core import HomeAssistant + +from . import ENTRY_CONFIG + +from tests.common import MockConfigEntry, async_fire_time_changed + + +@pytest.mark.freeze_time("2024-11-05T10:00:00+00:00") +async def test_coordinator( + hass: HomeAssistant, + get_data: DeliveryPeriodData, + freezer: FrozenDateTimeFactory, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test the Nord Pool coordinator with errors.""" + config_entry = MockConfigEntry( + domain=DOMAIN, + source=SOURCE_USER, + data=ENTRY_CONFIG, + ) + + config_entry.add_to_hass(hass) + + with ( + patch( + "homeassistant.components.nordpool.coordinator.NordPoolClient.async_get_delivery_period", + ) as mock_data, + ): + mock_data.return_value = get_data + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + mock_data.assert_called_once() + state = hass.states.get("sensor.nord_pool_se3_current_price") + assert state.state == "0.92737" + mock_data.reset_mock() + + mock_data.side_effect = NordPoolError("error") + freezer.tick(timedelta(hours=1)) + async_fire_time_changed(hass) + await hass.async_block_till_done(wait_background_tasks=True) + mock_data.assert_called_once() + state = hass.states.get("sensor.nord_pool_se3_current_price") + assert state.state == STATE_UNAVAILABLE + mock_data.reset_mock() + + assert "Authentication error" not in caplog.text + mock_data.side_effect = NordPoolAuthenticationError("Authentication error") + freezer.tick(timedelta(hours=1)) + async_fire_time_changed(hass) + await hass.async_block_till_done(wait_background_tasks=True) + mock_data.assert_called_once() + state = hass.states.get("sensor.nord_pool_se3_current_price") + assert state.state == STATE_UNAVAILABLE + assert "Authentication error" in caplog.text + mock_data.reset_mock() + + assert "Empty response" not in caplog.text + mock_data.side_effect = NordPoolEmptyResponseError("Empty response") + freezer.tick(timedelta(hours=1)) + async_fire_time_changed(hass) + await hass.async_block_till_done(wait_background_tasks=True) + mock_data.assert_called_once() + state = hass.states.get("sensor.nord_pool_se3_current_price") + assert state.state == STATE_UNAVAILABLE + assert "Empty response" in caplog.text + mock_data.reset_mock() + + assert "Response error" not in caplog.text + mock_data.side_effect = NordPoolResponseError("Response error") + freezer.tick(timedelta(hours=1)) + async_fire_time_changed(hass) + await hass.async_block_till_done(wait_background_tasks=True) + mock_data.assert_called_once() + state = hass.states.get("sensor.nord_pool_se3_current_price") + assert state.state == STATE_UNAVAILABLE + assert "Response error" in caplog.text + mock_data.reset_mock() + + mock_data.return_value = get_data + mock_data.side_effect = None + freezer.tick(timedelta(hours=1)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + mock_data.assert_called_once() + state = hass.states.get("sensor.nord_pool_se3_current_price") + assert state.state == "1.81645" diff --git a/tests/components/nordpool/test_init.py b/tests/components/nordpool/test_init.py new file mode 100644 index 00000000000000..5ec1c4b3a0bf92 --- /dev/null +++ b/tests/components/nordpool/test_init.py @@ -0,0 +1,39 @@ +"""Test for Nord Pool component Init.""" + +from __future__ import annotations + +from unittest.mock import patch + +from pynordpool import DeliveryPeriodData + +from homeassistant.components.nordpool.const import DOMAIN +from homeassistant.config_entries import SOURCE_USER, ConfigEntryState +from homeassistant.core import HomeAssistant + +from . import ENTRY_CONFIG + +from tests.common import MockConfigEntry + + +async def test_unload_entry(hass: HomeAssistant, get_data: DeliveryPeriodData) -> None: + """Test load and unload an entry.""" + entry = MockConfigEntry( + domain=DOMAIN, + source=SOURCE_USER, + data=ENTRY_CONFIG, + ) + entry.add_to_hass(hass) + + with ( + patch( + "homeassistant.components.nordpool.coordinator.NordPoolClient.async_get_delivery_period", + return_value=get_data, + ), + ): + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done(wait_background_tasks=True) + + assert entry.state is ConfigEntryState.LOADED + assert await hass.config_entries.async_unload(entry.entry_id) + await hass.async_block_till_done() + assert entry.state is ConfigEntryState.NOT_LOADED diff --git a/tests/components/nordpool/test_sensor.py b/tests/components/nordpool/test_sensor.py new file mode 100644 index 00000000000000..c7a305c8a40d66 --- /dev/null +++ b/tests/components/nordpool/test_sensor.py @@ -0,0 +1,25 @@ +"""The test for the Nord Pool sensor platform.""" + +from __future__ import annotations + +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from tests.common import snapshot_platform + + +@pytest.mark.freeze_time("2024-11-05T18:00:00+00:00") +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_sensor( + hass: HomeAssistant, + load_int: ConfigEntry, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test the Nord Pool sensor.""" + + await snapshot_platform(hass, entity_registry, snapshot, load_int.entry_id) diff --git a/tests/components/nut/test_init.py b/tests/components/nut/test_init.py index 61a5187407ba27..d5d85daa3362c6 100644 --- a/tests/components/nut/test_init.py +++ b/tests/components/nut/test_init.py @@ -8,8 +8,9 @@ from homeassistant.config_entries import ConfigEntryState from homeassistant.const import CONF_HOST, CONF_PORT, STATE_UNAVAILABLE from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr -from .util import _get_mock_nutclient +from .util import _get_mock_nutclient, async_init_integration from tests.common import MockConfigEntry @@ -96,3 +97,53 @@ async def test_auth_fails(hass: HomeAssistant) -> None: flows = hass.config_entries.flow.async_progress() assert len(flows) == 1 assert flows[0]["context"]["source"] == "reauth" + + +async def test_serial_number(hass: HomeAssistant) -> None: + """Test for serial number set on device.""" + mock_serial_number = "A00000000000" + await async_init_integration( + hass, + username="someuser", + password="somepassword", + list_vars={"ups.serial": mock_serial_number}, + list_ups={"ups1": "UPS 1"}, + list_commands_return_value=[], + ) + + device_registry = dr.async_get(hass) + assert device_registry is not None + + device_entry = device_registry.async_get_device( + identifiers={(DOMAIN, mock_serial_number)} + ) + + assert device_entry is not None + assert device_entry.serial_number == mock_serial_number + + +async def test_device_location(hass: HomeAssistant) -> None: + """Test for suggested location on device.""" + mock_serial_number = "A00000000000" + mock_device_location = "XYZ Location" + await async_init_integration( + hass, + username="someuser", + password="somepassword", + list_vars={ + "ups.serial": mock_serial_number, + "device.location": mock_device_location, + }, + list_ups={"ups1": "UPS 1"}, + list_commands_return_value=[], + ) + + device_registry = dr.async_get(hass) + assert device_registry is not None + + device_entry = device_registry.async_get_device( + identifiers={(DOMAIN, mock_serial_number)} + ) + + assert device_entry is not None + assert device_entry.suggested_area == mock_device_location diff --git a/tests/components/nyt_games/conftest.py b/tests/components/nyt_games/conftest.py index 2999ae115b185f..1004b6eb42ad1b 100644 --- a/tests/components/nyt_games/conftest.py +++ b/tests/components/nyt_games/conftest.py @@ -1,7 +1,7 @@ """NYTGames tests configuration.""" from collections.abc import Generator -from unittest.mock import patch +from unittest.mock import AsyncMock, patch from nyt_games.models import ConnectionsStats, WordleStats import pytest @@ -10,7 +10,6 @@ from homeassistant.const import CONF_TOKEN from tests.common import MockConfigEntry, load_fixture -from tests.components.smhi.common import AsyncMock @pytest.fixture diff --git a/tests/components/nyt_games/snapshots/test_sensor.ambr b/tests/components/nyt_games/snapshots/test_sensor.ambr index fdec7d58d9d319..84b74a26f0df68 100644 --- a/tests/components/nyt_games/snapshots/test_sensor.ambr +++ b/tests/components/nyt_games/snapshots/test_sensor.ambr @@ -98,7 +98,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '0', + 'state': '2', }) # --- # name: test_all_entities[sensor.connections_last_played-entry] diff --git a/tests/components/onboarding/test_views.py b/tests/components/onboarding/test_views.py index dd53d6cbce648f..35f6b7d739c22a 100644 --- a/tests/components/onboarding/test_views.py +++ b/tests/components/onboarding/test_views.py @@ -5,7 +5,7 @@ from http import HTTPStatus import os from typing import Any -from unittest.mock import Mock, patch +from unittest.mock import AsyncMock, Mock, patch import pytest @@ -70,23 +70,13 @@ async def no_rpi_fixture( @pytest.fixture(name="mock_supervisor") async def mock_supervisor_fixture( aioclient_mock: AiohttpClientMocker, + store_info: AsyncMock, + supervisor_is_connected: AsyncMock, + resolution_info: AsyncMock, ) -> AsyncGenerator[None]: """Mock supervisor.""" aioclient_mock.post("http://127.0.0.1/homeassistant/options", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/supervisor/options", json={"result": "ok"}) - aioclient_mock.get( - "http://127.0.0.1/resolution/info", - json={ - "result": "ok", - "data": { - "unsupported": [], - "unhealthy": [], - "suggestions": [], - "issues": [], - "checks": [], - }, - }, - ) aioclient_mock.get( "http://127.0.0.1/network/info", json={ @@ -99,10 +89,6 @@ async def mock_supervisor_fixture( ) with ( patch.dict(os.environ, {"SUPERVISOR": "127.0.0.1"}), - patch( - "homeassistant.components.hassio.HassIO.is_connected", - return_value=True, - ), patch( "homeassistant.components.hassio.HassIO.get_info", return_value={}, @@ -111,10 +97,6 @@ async def mock_supervisor_fixture( "homeassistant.components.hassio.HassIO.get_host_info", return_value={}, ), - patch( - "homeassistant.components.hassio.HassIO.get_store", - return_value={}, - ), patch( "homeassistant.components.hassio.HassIO.get_supervisor_info", return_value={"diagnostics": True}, diff --git a/tests/components/onewire/test_config_flow.py b/tests/components/onewire/test_config_flow.py index c147a522a5965b..c554624267da38 100644 --- a/tests/components/onewire/test_config_flow.py +++ b/tests/components/onewire/test_config_flow.py @@ -253,6 +253,10 @@ async def test_user_options_set_multiple( ) +@pytest.mark.parametrize( # Remove when translations fixed + "ignore_translations", + ["component.onewire.options.abort.No configurable devices found."], +) async def test_user_options_no_devices( hass: HomeAssistant, config_entry: ConfigEntry ) -> None: diff --git a/tests/components/onkyo/__init__.py b/tests/components/onkyo/__init__.py new file mode 100644 index 00000000000000..8900f189aea60a --- /dev/null +++ b/tests/components/onkyo/__init__.py @@ -0,0 +1,78 @@ +"""Tests for the Onkyo integration.""" + +from unittest.mock import AsyncMock, Mock, patch + +from homeassistant.components.onkyo.receiver import Receiver, ReceiverInfo +from homeassistant.const import CONF_HOST +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +def create_receiver_info(id: int) -> ReceiverInfo: + """Create an empty receiver info object for testing.""" + return ReceiverInfo( + host=f"host {id}", + port=id, + model_name=f"type {id}", + identifier=f"id{id}", + ) + + +def create_config_entry_from_info(info: ReceiverInfo) -> MockConfigEntry: + """Create a config entry from receiver info.""" + data = {CONF_HOST: info.host} + options = { + "volume_resolution": 80, + "input_sources": {"12": "tv"}, + "max_volume": 100, + } + + return MockConfigEntry( + data=data, + options=options, + title=info.model_name, + domain="onkyo", + unique_id=info.identifier, + ) + + +def create_empty_config_entry() -> MockConfigEntry: + """Create an empty config entry for use in unit tests.""" + data = {CONF_HOST: ""} + options = { + "volume_resolution": 80, + "input_sources": {"12": "tv"}, + "max_volume": 100, + } + + return MockConfigEntry( + data=data, + options=options, + title="Unit test Onkyo", + domain="onkyo", + unique_id="onkyo_unique_id", + ) + + +async def setup_integration( + hass: HomeAssistant, config_entry: MockConfigEntry, receiver_info: ReceiverInfo +) -> None: + """Fixture for setting up the component.""" + + config_entry.add_to_hass(hass) + + mock_receiver = AsyncMock() + mock_receiver.conn.close = Mock() + mock_receiver.callbacks.connect = Mock() + mock_receiver.callbacks.update = Mock() + + with ( + patch( + "homeassistant.components.onkyo.async_interview", + return_value=receiver_info, + ), + patch.object(Receiver, "async_create", return_value=mock_receiver), + ): + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() diff --git a/tests/components/onkyo/conftest.py b/tests/components/onkyo/conftest.py new file mode 100644 index 00000000000000..c37966e3bae647 --- /dev/null +++ b/tests/components/onkyo/conftest.py @@ -0,0 +1,30 @@ +"""Configure tests for the Onkyo integration.""" + +from collections.abc import Generator +from unittest.mock import AsyncMock, patch + +import pytest + +from homeassistant.components.onkyo.const import DOMAIN + +from tests.common import MockConfigEntry + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Override async_setup_entry.""" + with patch( + "homeassistant.components.onkyo.async_setup_entry", + return_value=True, + ) as mock_setup_entry: + yield mock_setup_entry + + +@pytest.fixture(name="config_entry") +def mock_config_entry() -> MockConfigEntry: + """Create Onkyo entry in Home Assistant.""" + return MockConfigEntry( + domain=DOMAIN, + title="Onkyo", + data={}, + ) diff --git a/tests/components/onkyo/test_config_flow.py b/tests/components/onkyo/test_config_flow.py new file mode 100644 index 00000000000000..f230ab124bdae2 --- /dev/null +++ b/tests/components/onkyo/test_config_flow.py @@ -0,0 +1,532 @@ +"""Test Onkyo config flow.""" + +from typing import Any +from unittest.mock import patch + +import pytest + +from homeassistant import config_entries +from homeassistant.components.onkyo import InputSource +from homeassistant.components.onkyo.config_flow import OnkyoConfigFlow +from homeassistant.components.onkyo.const import ( + DOMAIN, + OPTION_MAX_VOLUME, + OPTION_VOLUME_RESOLUTION, +) +from homeassistant.config_entries import SOURCE_USER +from homeassistant.const import CONF_HOST +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType, InvalidData + +from . import ( + create_config_entry_from_info, + create_empty_config_entry, + create_receiver_info, + setup_integration, +) + +from tests.common import Mock, MockConfigEntry + + +async def test_user_initial_menu(hass: HomeAssistant) -> None: + """Test initial menu.""" + init_result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + + assert init_result["type"] is FlowResultType.MENU + # Check if the values are there, but ignore order + assert not set(init_result["menu_options"]) ^ {"manual", "eiscp_discovery"} + + +async def test_manual_valid_host(hass: HomeAssistant) -> None: + """Test valid host entered.""" + + init_result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + + form_result = await hass.config_entries.flow.async_configure( + init_result["flow_id"], + {"next_step_id": "manual"}, + ) + + mock_info = Mock() + mock_info.identifier = "mock_id" + mock_info.host = "mock_host" + mock_info.model_name = "mock_model" + + with patch( + "homeassistant.components.onkyo.config_flow.async_interview", + return_value=mock_info, + ): + select_result = await hass.config_entries.flow.async_configure( + form_result["flow_id"], + user_input={CONF_HOST: "sample-host-name"}, + ) + + assert select_result["step_id"] == "configure_receiver" + assert ( + select_result["description_placeholders"]["name"] + == "mock_model (mock_host)" + ) + + +async def test_manual_invalid_host(hass: HomeAssistant) -> None: + """Test invalid host entered.""" + + init_result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + + form_result = await hass.config_entries.flow.async_configure( + init_result["flow_id"], + {"next_step_id": "manual"}, + ) + + with patch( + "homeassistant.components.onkyo.config_flow.async_interview", return_value=None + ): + host_result = await hass.config_entries.flow.async_configure( + form_result["flow_id"], + user_input={CONF_HOST: "sample-host-name"}, + ) + + assert host_result["step_id"] == "manual" + assert host_result["errors"]["base"] == "cannot_connect" + + +async def test_manual_valid_host_unexpected_error(hass: HomeAssistant) -> None: + """Test valid host entered.""" + + init_result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + + form_result = await hass.config_entries.flow.async_configure( + init_result["flow_id"], + {"next_step_id": "manual"}, + ) + + with patch( + "homeassistant.components.onkyo.config_flow.async_interview", + side_effect=Exception(), + ): + host_result = await hass.config_entries.flow.async_configure( + form_result["flow_id"], + user_input={CONF_HOST: "sample-host-name"}, + ) + + assert host_result["step_id"] == "manual" + assert host_result["errors"]["base"] == "unknown" + + +async def test_discovery_and_no_devices_discovered(hass: HomeAssistant) -> None: + """Test initial menu.""" + init_result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + + with patch( + "homeassistant.components.onkyo.config_flow.async_discover", return_value=[] + ): + form_result = await hass.config_entries.flow.async_configure( + init_result["flow_id"], + {"next_step_id": "eiscp_discovery"}, + ) + + assert form_result["type"] is FlowResultType.ABORT + assert form_result["reason"] == "no_devices_found" + + +async def test_discovery_with_exception(hass: HomeAssistant) -> None: + """Test discovery which throws an unexpected exception.""" + init_result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + with patch( + "homeassistant.components.onkyo.config_flow.async_discover", + side_effect=Exception(), + ): + form_result = await hass.config_entries.flow.async_configure( + init_result["flow_id"], + {"next_step_id": "eiscp_discovery"}, + ) + + assert form_result["type"] is FlowResultType.ABORT + assert form_result["reason"] == "unknown" + + +async def test_discovery_with_new_and_existing_found(hass: HomeAssistant) -> None: + """Test discovery with a new and an existing entry.""" + init_result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + + infos = [create_receiver_info(1), create_receiver_info(2)] + + with ( + patch( + "homeassistant.components.onkyo.config_flow.async_discover", + return_value=infos, + ), + # Fake it like the first entry was already added + patch.object(OnkyoConfigFlow, "_async_current_ids", return_value=["id1"]), + ): + form_result = await hass.config_entries.flow.async_configure( + init_result["flow_id"], + {"next_step_id": "eiscp_discovery"}, + ) + + assert form_result["type"] is FlowResultType.FORM + + assert form_result["data_schema"] is not None + schema = form_result["data_schema"].schema + container = schema["device"].container + assert container == {"id2": "type 2 (host 2)"} + + +async def test_discovery_with_one_selected(hass: HomeAssistant) -> None: + """Test discovery after a selection.""" + init_result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + + infos = [create_receiver_info(42), create_receiver_info(0)] + + with ( + patch( + "homeassistant.components.onkyo.config_flow.async_discover", + return_value=infos, + ), + ): + form_result = await hass.config_entries.flow.async_configure( + init_result["flow_id"], + {"next_step_id": "eiscp_discovery"}, + ) + + select_result = await hass.config_entries.flow.async_configure( + form_result["flow_id"], + user_input={"device": "id42"}, + ) + + assert select_result["step_id"] == "configure_receiver" + assert select_result["description_placeholders"]["name"] == "type 42 (host 42)" + + +async def test_configure_empty_source_list(hass: HomeAssistant) -> None: + """Test receiver configuration with no sources set.""" + + init_result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + + form_result = await hass.config_entries.flow.async_configure( + init_result["flow_id"], + {"next_step_id": "manual"}, + ) + + mock_info = Mock() + mock_info.identifier = "mock_id" + + with patch( + "homeassistant.components.onkyo.config_flow.async_interview", + return_value=mock_info, + ): + select_result = await hass.config_entries.flow.async_configure( + form_result["flow_id"], + user_input={CONF_HOST: "sample-host-name"}, + ) + + configure_result = await hass.config_entries.flow.async_configure( + select_result["flow_id"], + user_input={"volume_resolution": 200, "input_sources": []}, + ) + + assert configure_result["errors"] == { + "input_sources": "empty_input_source_list" + } + + +async def test_configure_no_resolution(hass: HomeAssistant) -> None: + """Test receiver configure with no resolution set.""" + + init_result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + + form_result = await hass.config_entries.flow.async_configure( + init_result["flow_id"], + {"next_step_id": "manual"}, + ) + + mock_info = Mock() + mock_info.identifier = "mock_id" + + with patch( + "homeassistant.components.onkyo.config_flow.async_interview", + return_value=mock_info, + ): + select_result = await hass.config_entries.flow.async_configure( + form_result["flow_id"], + user_input={CONF_HOST: "sample-host-name"}, + ) + + with pytest.raises(InvalidData): + await hass.config_entries.flow.async_configure( + select_result["flow_id"], + user_input={"input_sources": ["TV"]}, + ) + + +async def test_configure_resolution_set(hass: HomeAssistant) -> None: + """Test receiver configure with specified resolution.""" + + init_result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + + form_result = await hass.config_entries.flow.async_configure( + init_result["flow_id"], + {"next_step_id": "manual"}, + ) + + receiver_info = create_receiver_info(1) + + with patch( + "homeassistant.components.onkyo.config_flow.async_interview", + return_value=receiver_info, + ): + select_result = await hass.config_entries.flow.async_configure( + form_result["flow_id"], + user_input={CONF_HOST: "sample-host-name"}, + ) + + configure_result = await hass.config_entries.flow.async_configure( + select_result["flow_id"], + user_input={"volume_resolution": 200, "input_sources": ["TV"]}, + ) + + assert configure_result["type"] is FlowResultType.CREATE_ENTRY + assert configure_result["options"]["volume_resolution"] == 200 + + +async def test_configure_invalid_resolution_set(hass: HomeAssistant) -> None: + """Test receiver configure with invalid resolution.""" + + init_result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + + form_result = await hass.config_entries.flow.async_configure( + init_result["flow_id"], + {"next_step_id": "manual"}, + ) + + mock_info = Mock() + mock_info.identifier = "mock_id" + + with patch( + "homeassistant.components.onkyo.config_flow.async_interview", + return_value=mock_info, + ): + select_result = await hass.config_entries.flow.async_configure( + form_result["flow_id"], + user_input={CONF_HOST: "sample-host-name"}, + ) + + with pytest.raises(InvalidData): + await hass.config_entries.flow.async_configure( + select_result["flow_id"], + user_input={"volume_resolution": 42, "input_sources": ["TV"]}, + ) + + +async def test_reconfigure(hass: HomeAssistant) -> None: + """Test the reconfigure config flow.""" + receiver_info = create_receiver_info(1) + config_entry = create_config_entry_from_info(receiver_info) + await setup_integration(hass, config_entry, receiver_info) + + old_host = config_entry.data[CONF_HOST] + old_max_volume = config_entry.options[OPTION_MAX_VOLUME] + + result = await config_entry.start_reconfigure_flow(hass) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "manual" + + with patch( + "homeassistant.components.onkyo.config_flow.async_interview", + return_value=receiver_info, + ): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={"host": receiver_info.host} + ) + await hass.async_block_till_done() + + assert result2["type"] is FlowResultType.FORM + assert result2["step_id"] == "configure_receiver" + + result3 = await hass.config_entries.flow.async_configure( + result2["flow_id"], + user_input={"volume_resolution": 200, "input_sources": ["TUNER"]}, + ) + + assert result3["type"] is FlowResultType.ABORT + assert result3["reason"] == "reconfigure_successful" + + assert config_entry.data[CONF_HOST] == old_host + assert config_entry.options[OPTION_VOLUME_RESOLUTION] == 200 + assert config_entry.options[OPTION_MAX_VOLUME] == old_max_volume + + +async def test_reconfigure_new_device(hass: HomeAssistant) -> None: + """Test the reconfigure config flow with new device.""" + receiver_info = create_receiver_info(1) + config_entry = create_config_entry_from_info(receiver_info) + await setup_integration(hass, config_entry, receiver_info) + + old_unique_id = receiver_info.identifier + + result = await config_entry.start_reconfigure_flow(hass) + + receiver_info_2 = create_receiver_info(2) + + with patch( + "homeassistant.components.onkyo.config_flow.async_interview", + return_value=receiver_info_2, + ): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={"host": receiver_info_2.host} + ) + await hass.async_block_till_done() + + assert result2["type"] is FlowResultType.ABORT + assert result2["reason"] == "unique_id_mismatch" + + # unique id should remain unchanged + assert config_entry.unique_id == old_unique_id + + +@pytest.mark.parametrize( + ("user_input", "exception", "error"), + [ + ( + # No host, and thus no host reachable + { + CONF_HOST: None, + "receiver_max_volume": 100, + "max_volume": 100, + "sources": {}, + }, + None, + "cannot_connect", + ), + ( + # No host, and connection exception + { + CONF_HOST: None, + "receiver_max_volume": 100, + "max_volume": 100, + "sources": {}, + }, + Exception(), + "cannot_connect", + ), + ], +) +async def test_import_fail( + hass: HomeAssistant, + user_input: dict[str, Any], + exception: Exception, + error: str, +) -> None: + """Test import flow failed.""" + with ( + patch( + "homeassistant.components.onkyo.config_flow.async_interview", + return_value=None, + side_effect=exception, + ), + ): + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=user_input + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == error + + +async def test_import_success( + hass: HomeAssistant, +) -> None: + """Test import flow succeeded.""" + info = create_receiver_info(1) + + user_input = { + CONF_HOST: info.host, + "receiver_max_volume": 80, + "max_volume": 110, + "sources": { + InputSource("00"): "Auxiliary", + InputSource("01"): "Video", + }, + "info": info, + } + + import_result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=user_input + ) + await hass.async_block_till_done() + + assert import_result["type"] is FlowResultType.CREATE_ENTRY + assert import_result["data"]["host"] == "host 1" + assert import_result["options"]["volume_resolution"] == 80 + assert import_result["options"]["max_volume"] == 100 + assert import_result["options"]["input_sources"] == { + "00": "Auxiliary", + "01": "Video", + } + + +async def test_options_flow(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: + """Test options flow.""" + + receiver_info = create_receiver_info(1) + config_entry = create_empty_config_entry() + await setup_integration(hass, config_entry, receiver_info) + + result = await hass.config_entries.options.async_init(config_entry.entry_id) + await hass.async_block_till_done() + + result = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input={ + "max_volume": 42, + "TV": "television", + }, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["data"] == { + "volume_resolution": 80, + "max_volume": 42.0, + "input_sources": { + "12": "television", + }, + } diff --git a/tests/components/onkyo/test_init.py b/tests/components/onkyo/test_init.py new file mode 100644 index 00000000000000..17086a3088e2eb --- /dev/null +++ b/tests/components/onkyo/test_init.py @@ -0,0 +1,72 @@ +"""Test Onkyo component setup process.""" + +from __future__ import annotations + +from unittest.mock import patch + +import pytest + +from homeassistant.components.onkyo import async_setup_entry +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryNotReady + +from . import create_empty_config_entry, create_receiver_info, setup_integration + +from tests.common import MockConfigEntry + + +async def test_load_unload_entry( + hass: HomeAssistant, + config_entry: MockConfigEntry, +) -> None: + """Test load and unload entry.""" + + config_entry = create_empty_config_entry() + receiver_info = create_receiver_info(1) + await setup_integration(hass, config_entry, receiver_info) + + assert config_entry.state is ConfigEntryState.LOADED + + await hass.config_entries.async_unload(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.NOT_LOADED + + +async def test_update_entry( + hass: HomeAssistant, + config_entry: MockConfigEntry, +) -> None: + """Test update options.""" + + with patch.object(hass.config_entries, "async_reload", return_value=True): + config_entry = create_empty_config_entry() + receiver_info = create_receiver_info(1) + await setup_integration(hass, config_entry, receiver_info) + + # Force option change + assert hass.config_entries.async_update_entry( + config_entry, options={"option": "new_value"} + ) + await hass.async_block_till_done() + + hass.config_entries.async_reload.assert_called_with(config_entry.entry_id) + + +async def test_no_connection( + hass: HomeAssistant, + config_entry: MockConfigEntry, +) -> None: + """Test update options.""" + + config_entry = create_empty_config_entry() + config_entry.add_to_hass(hass) + + with ( + patch( + "homeassistant.components.onkyo.async_interview", + return_value=None, + ), + pytest.raises(ConfigEntryNotReady), + ): + await async_setup_entry(hass, config_entry) diff --git a/tests/components/onvif/test_config_flow.py b/tests/components/onvif/test_config_flow.py index f7200aa7a00dd8..5c01fb2d2009bb 100644 --- a/tests/components/onvif/test_config_flow.py +++ b/tests/components/onvif/test_config_flow.py @@ -9,7 +9,7 @@ from homeassistant.components import dhcp from homeassistant.components.onvif import DOMAIN, config_flow from homeassistant.config_entries import SOURCE_DHCP -from homeassistant.const import CONF_HOST, CONF_USERNAME +from homeassistant.const import CONF_HOST, CONF_NAME, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType from homeassistant.helpers import device_registry as dr @@ -803,7 +803,8 @@ async def test_form_reauth(hass: HomeAssistant) -> None: assert result2["step_id"] == "reauth_confirm" assert result2["errors"] == {config_flow.CONF_PASSWORD: "auth_failed"} assert result2["description_placeholders"] == { - "error": "not authorized (subcodes:NotAuthorized)" + CONF_NAME: "Mock Title", + "error": "not authorized (subcodes:NotAuthorized)", } with ( diff --git a/tests/components/openweathermap/test_config_flow.py b/tests/components/openweathermap/test_config_flow.py index f18aa432e2f1e1..aec343607544a8 100644 --- a/tests/components/openweathermap/test_config_flow.py +++ b/tests/components/openweathermap/test_config_flow.py @@ -7,6 +7,7 @@ CurrentWeather, DailyTemperature, DailyWeatherForecast, + MinutelyWeatherForecast, RequestError, WeatherCondition, WeatherReport, @@ -105,7 +106,12 @@ def _create_mocked_owm_factory(is_valid: bool): rain=0, snow=0, ) - weather_report = WeatherReport(current_weather, [], [daily_weather_forecast]) + minutely_weather_forecast = MinutelyWeatherForecast( + date_time=1728672360, precipitation=2.54 + ) + weather_report = WeatherReport( + current_weather, [minutely_weather_forecast], [], [daily_weather_forecast] + ) mocked_owm_client = MagicMock() mocked_owm_client.validate_key = AsyncMock(return_value=is_valid) diff --git a/tests/components/osoenergy/conftest.py b/tests/components/osoenergy/conftest.py new file mode 100644 index 00000000000000..bb14fec0241bba --- /dev/null +++ b/tests/components/osoenergy/conftest.py @@ -0,0 +1,90 @@ +"""Common fixtures for the OSO Energy tests.""" + +from collections.abc import Generator +from typing import Any +from unittest.mock import AsyncMock, MagicMock, patch + +from apyosoenergyapi.waterheater import OSOEnergyWaterHeaterData +import pytest + +from homeassistant.components.osoenergy.const import DOMAIN +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_API_KEY +from homeassistant.core import HomeAssistant +from homeassistant.util.json import JsonObjectType + +from tests.common import MockConfigEntry, load_json_object_fixture + +MOCK_CONFIG = { + CONF_API_KEY: "secret_api_key", +} +TEST_USER_EMAIL = "test_user_email@domain.com" + + +@pytest.fixture +def water_heater_fixture() -> JsonObjectType: + """Load the water heater fixture.""" + return load_json_object_fixture("water_heater.json", DOMAIN) + + +@pytest.fixture +def mock_water_heater(water_heater_fixture) -> MagicMock: + """Water heater mock object.""" + mock_heater = MagicMock(OSOEnergyWaterHeaterData) + for key, value in water_heater_fixture.items(): + setattr(mock_heater, key, value) + return mock_heater + + +@pytest.fixture +def mock_entry_data() -> dict[str, Any]: + """Mock config entry data for fixture.""" + return MOCK_CONFIG + + +@pytest.fixture +def mock_config_entry( + hass: HomeAssistant, mock_entry_data: dict[str, Any] +) -> ConfigEntry: + """Mock a config entry setup for incomfort integration.""" + entry = MockConfigEntry(domain=DOMAIN, data=mock_entry_data) + entry.add_to_hass(hass) + return entry + + +@pytest.fixture +async def mock_osoenergy_client(mock_water_heater) -> Generator[AsyncMock]: + """Mock a OSO Energy client.""" + + with ( + patch( + "homeassistant.components.osoenergy.OSOEnergy", MagicMock() + ) as mock_client, + patch( + "homeassistant.components.osoenergy.config_flow.OSOEnergy", new=mock_client + ), + ): + mock_session = MagicMock() + mock_session.device_list = {"water_heater": [mock_water_heater]} + mock_session.start_session = AsyncMock( + return_value={"water_heater": [mock_water_heater]} + ) + mock_session.update_data = AsyncMock(return_value=True) + + mock_client().session = mock_session + + mock_hotwater = MagicMock() + mock_hotwater.get_water_heater = AsyncMock(return_value=mock_water_heater) + mock_hotwater.set_profile = AsyncMock(return_value=True) + mock_hotwater.set_v40_min = AsyncMock(return_value=True) + mock_hotwater.turn_on = AsyncMock(return_value=True) + mock_hotwater.turn_off = AsyncMock(return_value=True) + + mock_client().hotwater = mock_hotwater + + mock_client().get_user_email = AsyncMock(return_value=TEST_USER_EMAIL) + mock_client().start_session = AsyncMock( + return_value={"water_heater": [mock_water_heater]} + ) + + yield mock_client diff --git a/tests/components/osoenergy/fixtures/water_heater.json b/tests/components/osoenergy/fixtures/water_heater.json new file mode 100644 index 00000000000000..82bdafb5d8a67e --- /dev/null +++ b/tests/components/osoenergy/fixtures/water_heater.json @@ -0,0 +1,20 @@ +{ + "device_id": "osoenergy_water_heater", + "device_type": "SAGA S200", + "device_name": "TEST DEVICE", + "current_temperature": 60, + "min_temperature": 10, + "max_temperature": 75, + "target_temperature": 60, + "target_temperature_low": 57, + "target_temperature_high": 63, + "available": true, + "online": true, + "current_operation": "on", + "optimization_mode": "oso", + "heater_mode": "auto", + "profile": [ + 10, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, + 60, 60, 60, 60, 60 + ] +} diff --git a/tests/components/osoenergy/snapshots/test_water_heater.ambr b/tests/components/osoenergy/snapshots/test_water_heater.ambr new file mode 100644 index 00000000000000..5ebac405144d97 --- /dev/null +++ b/tests/components/osoenergy/snapshots/test_water_heater.ambr @@ -0,0 +1,57 @@ +# serializer version: 1 +# name: test_water_heater[water_heater.test_device-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max_temp': 75, + 'min_temp': 10, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'water_heater', + 'entity_category': None, + 'entity_id': 'water_heater.test_device', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'osoenergy', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': 'osoenergy_water_heater', + 'unit_of_measurement': None, + }) +# --- +# name: test_water_heater[water_heater.test_device-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 60, + 'friendly_name': 'TEST DEVICE', + 'max_temp': 75, + 'min_temp': 10, + 'supported_features': , + 'target_temp_high': 63, + 'target_temp_low': 57, + 'temperature': 60, + }), + 'context': , + 'entity_id': 'water_heater.test_device', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'eco', + }) +# --- diff --git a/tests/components/osoenergy/test_config_flow.py b/tests/components/osoenergy/test_config_flow.py index 0b7a3c30cf277e..0d77781a538efc 100644 --- a/tests/components/osoenergy/test_config_flow.py +++ b/tests/components/osoenergy/test_config_flow.py @@ -68,7 +68,8 @@ async def test_reauth_flow(hass: HomeAssistant) -> None: result = await mock_config.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM - assert result["errors"] == {"base": "invalid_auth"} + assert result["step_id"] == "user" + assert result["errors"] is None with patch( "homeassistant.components.osoenergy.config_flow.OSOEnergy.get_user_email", diff --git a/tests/components/osoenergy/test_water_heater.py b/tests/components/osoenergy/test_water_heater.py new file mode 100644 index 00000000000000..851e710fa1cacc --- /dev/null +++ b/tests/components/osoenergy/test_water_heater.py @@ -0,0 +1,276 @@ +"""The water heater tests for the OSO Energy platform.""" + +from unittest.mock import ANY, MagicMock, patch + +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.components.osoenergy.const import DOMAIN +from homeassistant.components.osoenergy.water_heater import ( + ATTR_UNTIL_TEMP_LIMIT, + ATTR_V40MIN, + SERVICE_GET_PROFILE, + SERVICE_SET_PROFILE, + SERVICE_SET_V40MIN, +) +from homeassistant.components.water_heater import ( + DOMAIN as WATER_HEATER_DOMAIN, + SERVICE_SET_TEMPERATURE, +) +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import ( + ATTR_ENTITY_ID, + ATTR_TEMPERATURE, + SERVICE_TURN_OFF, + SERVICE_TURN_ON, + Platform, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from tests.common import snapshot_platform + + +@patch("homeassistant.components.osoenergy.PLATFORMS", [Platform.WATER_HEATER]) +async def test_water_heater( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + mock_osoenergy_client: MagicMock, + snapshot: SnapshotAssertion, + mock_config_entry: ConfigEntry, +) -> None: + """Test states of the water heater.""" + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +@pytest.mark.freeze_time("2024-10-10 00:00:00") +async def test_get_profile( + hass: HomeAssistant, + mock_osoenergy_client: MagicMock, + mock_config_entry: ConfigEntry, +) -> None: + """Test getting the heater profile.""" + await hass.config_entries.async_setup(mock_config_entry.entry_id) + profile = await hass.services.async_call( + DOMAIN, + SERVICE_GET_PROFILE, + {ATTR_ENTITY_ID: "water_heater.test_device"}, + blocking=True, + return_response=True, + ) + + # The profile is returned in UTC format from the server + # Each index represents an hour from the current day (0-23). For example index 2 - 02:00 UTC + # Depending on the time zone and the DST the UTC hour is converted to local time and the value is placed in the correct index + # Example: time zone 'US/Pacific' and DST (-7 hours difference) - index 9 (09:00 UTC) will be converted to index 2 (02:00 Local) + assert profile == { + "water_heater.test_device": { + "profile": [ + 60, + 60, + 60, + 60, + 60, + 60, + 60, + 60, + 60, + 60, + 60, + 60, + 60, + 60, + 60, + 60, + 60, + 10, + 60, + 60, + 60, + 60, + 60, + 60, + ], + }, + } + + +@pytest.mark.freeze_time("2024-10-10 00:00:00") +async def test_set_profile( + hass: HomeAssistant, + mock_osoenergy_client: MagicMock, + mock_config_entry: ConfigEntry, +) -> None: + """Test getting the heater profile.""" + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.services.async_call( + DOMAIN, + SERVICE_SET_PROFILE, + {ATTR_ENTITY_ID: "water_heater.test_device", "hour_01": 45}, + blocking=True, + ) + + # The server expects to receive the profile in UTC format + # Each field represents an hour from the current day (0-23). For example field hour_01 - 01:00 Local time + # Depending on the time zone and the DST the Local hour is converted to UTC time and the value is placed in the correct index + # Example: time zone 'US/Pacific' and DST (-7 hours difference) - index 1 (01:00 Local) will be converted to index 8 (08:00 Utc) + mock_osoenergy_client().hotwater.set_profile.assert_called_once_with( + ANY, + [ + 10, + 60, + 60, + 60, + 60, + 60, + 60, + 60, + 45, + 60, + 60, + 60, + 60, + 60, + 60, + 60, + 60, + 60, + 60, + 60, + 60, + 60, + 60, + 60, + ], + ) + + +async def test_set_v40_min( + hass: HomeAssistant, + mock_osoenergy_client: MagicMock, + mock_config_entry: ConfigEntry, +) -> None: + """Test getting the heater profile.""" + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.services.async_call( + DOMAIN, + SERVICE_SET_V40MIN, + {ATTR_ENTITY_ID: "water_heater.test_device", ATTR_V40MIN: 300}, + blocking=True, + ) + + mock_osoenergy_client().hotwater.set_v40_min.assert_called_once_with(ANY, 300) + + +async def test_set_temperature( + hass: HomeAssistant, + mock_osoenergy_client: MagicMock, + mock_config_entry: ConfigEntry, +) -> None: + """Test getting the heater profile.""" + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.services.async_call( + WATER_HEATER_DOMAIN, + SERVICE_SET_TEMPERATURE, + {ATTR_ENTITY_ID: "water_heater.test_device", ATTR_TEMPERATURE: 45}, + blocking=True, + ) + + mock_osoenergy_client().hotwater.set_profile.assert_called_once_with( + ANY, + [ + 45, + 45, + 45, + 45, + 45, + 45, + 45, + 45, + 45, + 45, + 45, + 45, + 45, + 45, + 45, + 45, + 45, + 45, + 45, + 45, + 45, + 45, + 45, + 45, + ], + ) + + +async def test_turn_on( + hass: HomeAssistant, + mock_osoenergy_client: MagicMock, + mock_config_entry: ConfigEntry, +) -> None: + """Test turning the heater on.""" + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.services.async_call( + WATER_HEATER_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: "water_heater.test_device"}, + blocking=True, + ) + + mock_osoenergy_client().hotwater.turn_on.assert_called_once_with(ANY, True) + + +async def test_turn_off( + hass: HomeAssistant, + mock_osoenergy_client: MagicMock, + mock_config_entry: ConfigEntry, +) -> None: + """Test getting the heater profile.""" + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.services.async_call( + WATER_HEATER_DOMAIN, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: "water_heater.test_device"}, + blocking=True, + ) + + mock_osoenergy_client().hotwater.turn_off.assert_called_once_with(ANY, True) + + +async def test_oso_turn_on( + hass: HomeAssistant, + mock_osoenergy_client: MagicMock, + mock_config_entry: ConfigEntry, +) -> None: + """Test turning the heater on.""" + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.services.async_call( + DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: "water_heater.test_device", ATTR_UNTIL_TEMP_LIMIT: False}, + blocking=True, + ) + + mock_osoenergy_client().hotwater.turn_on.assert_called_once_with(ANY, False) + + +async def test_oso_turn_off( + hass: HomeAssistant, + mock_osoenergy_client: MagicMock, + mock_config_entry: ConfigEntry, +) -> None: + """Test getting the heater profile.""" + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.services.async_call( + DOMAIN, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: "water_heater.test_device", ATTR_UNTIL_TEMP_LIMIT: False}, + blocking=True, + ) + + mock_osoenergy_client().hotwater.turn_off.assert_called_once_with(ANY, False) diff --git a/tests/components/otbr/test_config_flow.py b/tests/components/otbr/test_config_flow.py index 966f80d0bd88db..cd02c14e4eb520 100644 --- a/tests/components/otbr/test_config_flow.py +++ b/tests/components/otbr/test_config_flow.py @@ -9,22 +9,23 @@ import pytest import python_otbr_api -from homeassistant.components import hassio, otbr +from homeassistant.components import otbr from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType +from homeassistant.helpers.service_info.hassio import HassioServiceInfo from . import DATASET_CH15, DATASET_CH16, TEST_BORDER_AGENT_ID, TEST_BORDER_AGENT_ID_2 from tests.common import MockConfigEntry, MockModule, mock_integration from tests.test_util.aiohttp import AiohttpClientMocker -HASSIO_DATA = hassio.HassioServiceInfo( +HASSIO_DATA = HassioServiceInfo( config={"host": "core-silabs-multiprotocol", "port": 8081}, name="Silicon Labs Multiprotocol", slug="otbr", uuid="12345", ) -HASSIO_DATA_2 = hassio.HassioServiceInfo( +HASSIO_DATA_2 = HassioServiceInfo( config={"host": "core-silabs-multiprotocol_2", "port": 8082}, name="Silicon Labs Multiprotocol", slug="other_addon", diff --git a/tests/components/otbr/test_init.py b/tests/components/otbr/test_init.py index ca1cbd6483b616..faf137861072fb 100644 --- a/tests/components/otbr/test_init.py +++ b/tests/components/otbr/test_init.py @@ -47,6 +47,7 @@ def enable_mocks_fixture( """Enable API mocks.""" +@pytest.mark.usefixtures("supervisor_client") async def test_import_dataset( hass: HomeAssistant, mock_async_zeroconf: MagicMock, @@ -201,6 +202,7 @@ async def test_import_share_radio_no_channel_collision( ) +@pytest.mark.usefixtures("supervisor_client") @pytest.mark.parametrize("enable_compute_pskc", [True]) @pytest.mark.parametrize( "dataset", [DATASET_INSECURE_NW_KEY, DATASET_INSECURE_PASSPHRASE] @@ -310,6 +312,7 @@ async def test_config_entry_update(hass: HomeAssistant) -> None: mock_otrb_api.assert_called_once_with(new_config_entry_data["url"], ANY, ANY) +@pytest.mark.usefixtures("supervisor_client") async def test_remove_entry( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, otbr_config_entry_multipan ) -> None: diff --git a/tests/components/otbr/test_silabs_multiprotocol.py b/tests/components/otbr/test_silabs_multiprotocol.py index 01b1ab63f56cb5..c4123c256607de 100644 --- a/tests/components/otbr/test_silabs_multiprotocol.py +++ b/tests/components/otbr/test_silabs_multiprotocol.py @@ -1,6 +1,6 @@ """Test OTBR Silicon Labs Multiprotocol support.""" -from unittest.mock import patch +from unittest.mock import AsyncMock, patch import pytest from python_otbr_api import ActiveDataSet, tlv_parser @@ -31,6 +31,11 @@ ) +@pytest.fixture(autouse=True) +def mock_supervisor_client(supervisor_client: AsyncMock) -> None: + """Mock supervisor client.""" + + async def test_async_change_channel( hass: HomeAssistant, otbr_config_entry_multipan ) -> None: diff --git a/tests/components/otbr/test_util.py b/tests/components/otbr/test_util.py index 0ed3041bea84ce..c11d8fe573618b 100644 --- a/tests/components/otbr/test_util.py +++ b/tests/components/otbr/test_util.py @@ -13,6 +13,11 @@ OTBR_NON_MULTIPAN_URL = "/dev/ttyAMA1" +@pytest.fixture(autouse=True) +def mock_supervisor_client(supervisor_client: AsyncMock) -> None: + """Mock supervisor client.""" + + async def test_get_allowed_channel( hass: HomeAssistant, multiprotocol_addon_manager_mock ) -> None: diff --git a/tests/components/otbr/test_websocket_api.py b/tests/components/otbr/test_websocket_api.py index 5361b56c688103..7311b194df465c 100644 --- a/tests/components/otbr/test_websocket_api.py +++ b/tests/components/otbr/test_websocket_api.py @@ -1,6 +1,6 @@ """Test OTBR Websocket API.""" -from unittest.mock import patch +from unittest.mock import AsyncMock, patch import pytest import python_otbr_api @@ -29,6 +29,11 @@ async def websocket_client( return await hass_ws_client(hass) +@pytest.fixture(autouse=True) +def mock_supervisor_client(supervisor_client: AsyncMock) -> None: + """Mock supervisor client.""" + + async def test_get_info( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, diff --git a/tests/components/ovo_energy/test_config_flow.py b/tests/components/ovo_energy/test_config_flow.py index c3f77ca5007cce..cfe679a254acd3 100644 --- a/tests/components/ovo_energy/test_config_flow.py +++ b/tests/components/ovo_energy/test_config_flow.py @@ -117,6 +117,7 @@ async def test_full_flow_implementation(hass: HomeAssistant) -> None: assert result2["type"] is FlowResultType.CREATE_ENTRY assert result2["data"][CONF_USERNAME] == FIXTURE_USER_INPUT[CONF_USERNAME] assert result2["data"][CONF_PASSWORD] == FIXTURE_USER_INPUT[CONF_PASSWORD] + assert result2["data"][CONF_ACCOUNT] == FIXTURE_USER_INPUT[CONF_ACCOUNT] async def test_reauth_authorization_error(hass: HomeAssistant) -> None: @@ -125,15 +126,14 @@ async def test_reauth_authorization_error(hass: HomeAssistant) -> None: domain=DOMAIN, unique_id=UNIQUE_ID, data=FIXTURE_USER_INPUT ) mock_config.add_to_hass(hass) + result = await mock_config.start_reauth_flow(hass) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" with patch( "homeassistant.components.ovo_energy.config_flow.OVOEnergy.authenticate", return_value=False, ): - result = await mock_config.start_reauth_flow(hass) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth" - result2 = await hass.config_entries.flow.async_configure( result["flow_id"], FIXTURE_REAUTH_INPUT, @@ -141,7 +141,7 @@ async def test_reauth_authorization_error(hass: HomeAssistant) -> None: await hass.async_block_till_done() assert result2["type"] is FlowResultType.FORM - assert result2["step_id"] == "reauth" + assert result2["step_id"] == "reauth_confirm" assert result2["errors"] == {"base": "authorization_error"} @@ -151,15 +151,16 @@ async def test_reauth_connection_error(hass: HomeAssistant) -> None: domain=DOMAIN, unique_id=UNIQUE_ID, data=FIXTURE_USER_INPUT ) mock_config.add_to_hass(hass) + result = await mock_config.start_reauth_flow(hass) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" + assert result["errors"] == {} + with patch( "homeassistant.components.ovo_energy.config_flow.OVOEnergy.authenticate", side_effect=aiohttp.ClientError, ): - result = await mock_config.start_reauth_flow(hass) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth" - result2 = await hass.config_entries.flow.async_configure( result["flow_id"], FIXTURE_REAUTH_INPUT, @@ -167,7 +168,7 @@ async def test_reauth_connection_error(hass: HomeAssistant) -> None: await hass.async_block_till_done() assert result2["type"] is FlowResultType.FORM - assert result2["step_id"] == "reauth" + assert result2["step_id"] == "reauth_confirm" assert result2["errors"] == {"base": "connection_error"} @@ -177,14 +178,22 @@ async def test_reauth_flow(hass: HomeAssistant) -> None: domain=DOMAIN, unique_id=UNIQUE_ID, data=FIXTURE_USER_INPUT ) mock_config.add_to_hass(hass) + result = await mock_config.start_reauth_flow(hass) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" + assert result["errors"] == {} + with patch( "homeassistant.components.ovo_energy.config_flow.OVOEnergy.authenticate", return_value=False, ): - result = await mock_config.start_reauth_flow(hass) - + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + FIXTURE_REAUTH_INPUT, + ) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth" + assert result["step_id"] == "reauth_confirm" assert result["errors"] == {"base": "authorization_error"} with ( diff --git a/tests/components/owntracks/test_config_flow.py b/tests/components/owntracks/test_config_flow.py index b1172eb4a3111f..a80685e9b1e236 100644 --- a/tests/components/owntracks/test_config_flow.py +++ b/tests/components/owntracks/test_config_flow.py @@ -8,9 +8,9 @@ from homeassistant.components.owntracks import config_flow from homeassistant.components.owntracks.config_flow import CONF_CLOUDHOOK, CONF_SECRET from homeassistant.components.owntracks.const import DOMAIN -from homeassistant.config import async_process_ha_core_config from homeassistant.const import CONF_WEBHOOK_ID from homeassistant.core import HomeAssistant +from homeassistant.core_config import async_process_ha_core_config from homeassistant.data_entry_flow import FlowResultType from homeassistant.setup import async_setup_component @@ -94,13 +94,14 @@ async def test_import_setup(hass: HomeAssistant) -> None: async def test_abort_if_already_setup(hass: HomeAssistant) -> None: """Test that we can't add more than one instance.""" - flow = await init_config_flow(hass) - MockConfigEntry(domain=DOMAIN, data={}).add_to_hass(hass) assert hass.config_entries.async_entries(DOMAIN) + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + # Should fail, already setup (flow) - result = await flow.async_step_user({}) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "single_instance_allowed" diff --git a/tests/components/p1_monitor/conftest.py b/tests/components/p1_monitor/conftest.py index 1d5f349f858c49..fbd39914536f0d 100644 --- a/tests/components/p1_monitor/conftest.py +++ b/tests/components/p1_monitor/conftest.py @@ -7,7 +7,7 @@ import pytest from homeassistant.components.p1_monitor.const import DOMAIN -from homeassistant.const import CONF_HOST +from homeassistant.const import CONF_HOST, CONF_PORT from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry, load_fixture @@ -19,8 +19,9 @@ def mock_config_entry() -> MockConfigEntry: return MockConfigEntry( title="monitor", domain=DOMAIN, - data={CONF_HOST: "example"}, + data={CONF_HOST: "example", CONF_PORT: 80}, unique_id="unique_thingy", + version=2, ) diff --git a/tests/components/p1_monitor/snapshots/test_init.ambr b/tests/components/p1_monitor/snapshots/test_init.ambr new file mode 100644 index 00000000000000..d0a676fce1b3d8 --- /dev/null +++ b/tests/components/p1_monitor/snapshots/test_init.ambr @@ -0,0 +1,45 @@ +# serializer version: 1 +# name: test_migration + ConfigEntrySnapshot({ + 'data': dict({ + 'host': 'example', + 'port': 80, + }), + 'disabled_by': None, + 'discovery_keys': dict({ + }), + 'domain': 'p1_monitor', + 'entry_id': , + 'minor_version': 1, + 'options': dict({ + }), + 'pref_disable_new_entities': False, + 'pref_disable_polling': False, + 'source': 'user', + 'title': 'Mock Title', + 'unique_id': 'unique_thingy', + 'version': 2, + }) +# --- +# name: test_port_migration + ConfigEntrySnapshot({ + 'data': dict({ + 'host': 'example', + 'port': 80, + }), + 'disabled_by': None, + 'discovery_keys': dict({ + }), + 'domain': 'p1_monitor', + 'entry_id': , + 'minor_version': 1, + 'options': dict({ + }), + 'pref_disable_new_entities': False, + 'pref_disable_polling': False, + 'source': 'user', + 'title': 'Mock Title', + 'unique_id': 'unique_thingy', + 'version': 2, + }) +# --- diff --git a/tests/components/p1_monitor/test_config_flow.py b/tests/components/p1_monitor/test_config_flow.py index 12a6a6f5d11f81..cbd89320074a88 100644 --- a/tests/components/p1_monitor/test_config_flow.py +++ b/tests/components/p1_monitor/test_config_flow.py @@ -6,7 +6,7 @@ from homeassistant.components.p1_monitor.const import DOMAIN from homeassistant.config_entries import SOURCE_USER -from homeassistant.const import CONF_HOST +from homeassistant.const import CONF_HOST, CONF_PORT from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -30,12 +30,13 @@ async def test_full_user_flow(hass: HomeAssistant) -> None: ): result2 = await hass.config_entries.flow.async_configure( result["flow_id"], - user_input={CONF_HOST: "example.com"}, + user_input={CONF_HOST: "example.com", CONF_PORT: 80}, ) assert result2.get("type") is FlowResultType.CREATE_ENTRY assert result2.get("title") == "P1 Monitor" - assert result2.get("data") == {CONF_HOST: "example.com"} + assert result2.get("data") == {CONF_HOST: "example.com", CONF_PORT: 80} + assert isinstance(result2["data"][CONF_PORT], int) assert len(mock_setup_entry.mock_calls) == 1 assert len(mock_p1monitor.mock_calls) == 1 @@ -50,7 +51,7 @@ async def test_api_error(hass: HomeAssistant) -> None: result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER}, - data={CONF_HOST: "example.com"}, + data={CONF_HOST: "example.com", CONF_PORT: 80}, ) assert result.get("type") is FlowResultType.FORM diff --git a/tests/components/p1_monitor/test_diagnostics.py b/tests/components/p1_monitor/test_diagnostics.py index 55d4ccc5e67c92..396a3d3bd0dadd 100644 --- a/tests/components/p1_monitor/test_diagnostics.py +++ b/tests/components/p1_monitor/test_diagnostics.py @@ -21,6 +21,7 @@ async def test_diagnostics( "title": "monitor", "data": { "host": REDACTED, + "port": REDACTED, }, }, "data": { diff --git a/tests/components/p1_monitor/test_init.py b/tests/components/p1_monitor/test_init.py index 02888b5ae97af4..20714740385b53 100644 --- a/tests/components/p1_monitor/test_init.py +++ b/tests/components/p1_monitor/test_init.py @@ -3,9 +3,11 @@ from unittest.mock import AsyncMock, MagicMock, patch from p1monitor import P1MonitorConnectionError +from syrupy import SnapshotAssertion from homeassistant.components.p1_monitor.const import DOMAIN from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import CONF_HOST from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry @@ -44,3 +46,35 @@ async def test_config_entry_not_ready( assert mock_request.call_count == 1 assert mock_config_entry.state is ConfigEntryState.SETUP_RETRY + + +async def test_migration(hass: HomeAssistant, snapshot: SnapshotAssertion) -> None: + """Test config entry version 1 -> 2 migration.""" + mock_config_entry = MockConfigEntry( + unique_id="unique_thingy", + domain=DOMAIN, + data={CONF_HOST: "example"}, + version=1, + ) + mock_config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + assert hass.config_entries.async_get_entry(mock_config_entry.entry_id) == snapshot + + +async def test_port_migration(hass: HomeAssistant, snapshot: SnapshotAssertion) -> None: + """Test migration of host:port to separate host and port.""" + mock_config_entry = MockConfigEntry( + unique_id="unique_thingy", + domain=DOMAIN, + data={CONF_HOST: "example:80"}, + version=1, + ) + mock_config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + assert hass.config_entries.async_get_entry(mock_config_entry.entry_id) == snapshot diff --git a/tests/components/palazzetti/__init__.py b/tests/components/palazzetti/__init__.py new file mode 100644 index 00000000000000..0aafdf553adc99 --- /dev/null +++ b/tests/components/palazzetti/__init__.py @@ -0,0 +1,13 @@ +"""Tests for the Palazzetti integration.""" + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def setup_integration(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: + """Fixture for setting up the component.""" + config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() diff --git a/tests/components/palazzetti/conftest.py b/tests/components/palazzetti/conftest.py new file mode 100644 index 00000000000000..33dca845098d30 --- /dev/null +++ b/tests/components/palazzetti/conftest.py @@ -0,0 +1,74 @@ +"""Fixtures for Palazzetti integration tests.""" + +from collections.abc import Generator +from unittest.mock import AsyncMock, patch + +import pytest + +from homeassistant.components.palazzetti.const import DOMAIN +from homeassistant.const import CONF_HOST + +from tests.common import MockConfigEntry + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Override async_setup_entry.""" + with patch( + "homeassistant.components.palazzetti.async_setup_entry", + return_value=True, + ) as mock_setup_entry: + yield mock_setup_entry + + +@pytest.fixture +def mock_config_entry() -> MockConfigEntry: + """Return the default mocked config entry.""" + return MockConfigEntry( + title="palazzetti", + domain=DOMAIN, + data={CONF_HOST: "127.0.0.1"}, + unique_id="11:22:33:44:55:66", + ) + + +@pytest.fixture +def mock_palazzetti_client() -> Generator[AsyncMock]: + """Return a mocked PalazzettiClient.""" + with ( + patch( + "homeassistant.components.palazzetti.coordinator.PalazzettiClient", + autospec=True, + ) as client, + patch( + "homeassistant.components.palazzetti.config_flow.PalazzettiClient", + new=client, + ), + ): + mock_client = client.return_value + mock_client.mac = "11:22:33:44:55:66" + mock_client.name = "Stove" + mock_client.sw_version = "0.0.0" + mock_client.hw_version = "1.1.1" + mock_client.fan_speed_min = 1 + mock_client.fan_speed_max = 5 + mock_client.has_fan_silent = True + mock_client.has_fan_high = True + mock_client.has_fan_auto = True + mock_client.has_on_off_switch = True + mock_client.connected = True + mock_client.is_heating = True + mock_client.room_temperature = 18 + mock_client.target_temperature = 21 + mock_client.target_temperature_min = 5 + mock_client.target_temperature_max = 50 + mock_client.fan_speed = 3 + mock_client.connect.return_value = True + mock_client.update_state.return_value = True + mock_client.set_on.return_value = True + mock_client.set_target_temperature.return_value = True + mock_client.set_fan_speed.return_value = True + mock_client.set_fan_silent.return_value = True + mock_client.set_fan_high.return_value = True + mock_client.set_fan_auto.return_value = True + yield mock_client diff --git a/tests/components/palazzetti/snapshots/test_climate.ambr b/tests/components/palazzetti/snapshots/test_climate.ambr new file mode 100644 index 00000000000000..eb3b323272e3d1 --- /dev/null +++ b/tests/components/palazzetti/snapshots/test_climate.ambr @@ -0,0 +1,86 @@ +# serializer version: 1 +# name: test_all_entities[climate.stove-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'fan_modes': list([ + 'silent', + '1', + '2', + '3', + '4', + '5', + 'high', + 'auto', + ]), + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 50, + 'min_temp': 5, + 'target_temp_step': 1.0, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'climate', + 'entity_category': None, + 'entity_id': 'climate.stove', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'palazzetti', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': 'palazzetti', + 'unique_id': '11:22:33:44:55:66', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[climate.stove-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 18, + 'fan_mode': '3', + 'fan_modes': list([ + 'silent', + '1', + '2', + '3', + '4', + '5', + 'high', + 'auto', + ]), + 'friendly_name': 'Stove', + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 50, + 'min_temp': 5, + 'supported_features': , + 'target_temp_step': 1.0, + 'temperature': 21, + }), + 'context': , + 'entity_id': 'climate.stove', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heat', + }) +# --- diff --git a/tests/components/palazzetti/snapshots/test_init.ambr b/tests/components/palazzetti/snapshots/test_init.ambr new file mode 100644 index 00000000000000..abdee6b7f6fb34 --- /dev/null +++ b/tests/components/palazzetti/snapshots/test_init.ambr @@ -0,0 +1,33 @@ +# serializer version: 1 +# name: test_device + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': None, + 'connections': set({ + tuple( + 'mac', + '11:22:33:44:55:66', + ), + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': '1.1.1', + 'id': , + 'identifiers': set({ + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'Palazzetti', + 'model': None, + 'model_id': None, + 'name': 'Stove', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': None, + 'suggested_area': None, + 'sw_version': '0.0.0', + 'via_device_id': None, + }) +# --- diff --git a/tests/components/palazzetti/test_climate.py b/tests/components/palazzetti/test_climate.py new file mode 100644 index 00000000000000..78af8f00bdb9c0 --- /dev/null +++ b/tests/components/palazzetti/test_climate.py @@ -0,0 +1,174 @@ +"""Tests for the Palazzetti climate platform.""" + +from unittest.mock import AsyncMock, patch + +from pypalazzetti.exceptions import CommunicationError, ValidationError +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.components.climate import ( + ATTR_FAN_MODE, + ATTR_HVAC_MODE, + DOMAIN as CLIMATE_DOMAIN, + SERVICE_SET_FAN_MODE, + SERVICE_SET_HVAC_MODE, + SERVICE_SET_TEMPERATURE, + HVACMode, +) +from homeassistant.components.palazzetti.const import FAN_AUTO, FAN_HIGH, FAN_SILENT +from homeassistant.const import ATTR_ENTITY_ID, ATTR_TEMPERATURE, Platform +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError, ServiceValidationError +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + +ENTITY_ID = "climate.stove" + + +async def test_all_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_palazzetti_client: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test all entities.""" + with patch("homeassistant.components.palazzetti.PLATFORMS", [Platform.CLIMATE]): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +async def test_async_set_data( + hass: HomeAssistant, + mock_palazzetti_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test setting climate data via service call.""" + await setup_integration(hass, mock_config_entry) + + # Set HVAC Mode: Success + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_HVAC_MODE, + {ATTR_ENTITY_ID: ENTITY_ID, ATTR_HVAC_MODE: HVACMode.HEAT}, + blocking=True, + ) + mock_palazzetti_client.set_on.assert_called_once_with(True) + mock_palazzetti_client.set_on.reset_mock() + + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_HVAC_MODE, + {ATTR_ENTITY_ID: ENTITY_ID, ATTR_HVAC_MODE: HVACMode.OFF}, + blocking=True, + ) + mock_palazzetti_client.set_on.assert_called_once_with(False) + mock_palazzetti_client.set_on.reset_mock() + + # Set HVAC Mode: Error + mock_palazzetti_client.set_on.side_effect = CommunicationError() + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_HVAC_MODE, + {ATTR_ENTITY_ID: ENTITY_ID, ATTR_HVAC_MODE: HVACMode.HEAT}, + blocking=True, + ) + + mock_palazzetti_client.set_on.side_effect = ValidationError() + with pytest.raises(ServiceValidationError): + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_HVAC_MODE, + {ATTR_ENTITY_ID: ENTITY_ID, ATTR_HVAC_MODE: HVACMode.HEAT}, + blocking=True, + ) + + # Set Temperature: Success + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_TEMPERATURE, + {ATTR_ENTITY_ID: ENTITY_ID, ATTR_TEMPERATURE: 22}, + blocking=True, + ) + mock_palazzetti_client.set_target_temperature.assert_called_once_with(22) + mock_palazzetti_client.set_target_temperature.reset_mock() + + # Set Temperature: Error + mock_palazzetti_client.set_target_temperature.side_effect = CommunicationError() + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_TEMPERATURE, + {ATTR_ENTITY_ID: ENTITY_ID, ATTR_TEMPERATURE: 22}, + blocking=True, + ) + + mock_palazzetti_client.set_target_temperature.side_effect = ValidationError() + with pytest.raises(ServiceValidationError): + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_TEMPERATURE, + {ATTR_ENTITY_ID: ENTITY_ID, ATTR_TEMPERATURE: 22}, + blocking=True, + ) + + # Set Fan Mode: Success + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_FAN_MODE, + {ATTR_ENTITY_ID: ENTITY_ID, ATTR_FAN_MODE: FAN_SILENT}, + blocking=True, + ) + mock_palazzetti_client.set_fan_silent.assert_called_once() + mock_palazzetti_client.set_fan_silent.reset_mock() + + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_FAN_MODE, + {ATTR_ENTITY_ID: ENTITY_ID, ATTR_FAN_MODE: FAN_HIGH}, + blocking=True, + ) + mock_palazzetti_client.set_fan_high.assert_called_once() + mock_palazzetti_client.set_fan_high.reset_mock() + + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_FAN_MODE, + {ATTR_ENTITY_ID: ENTITY_ID, ATTR_FAN_MODE: FAN_AUTO}, + blocking=True, + ) + mock_palazzetti_client.set_fan_auto.assert_called_once() + mock_palazzetti_client.set_fan_auto.reset_mock() + + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_FAN_MODE, + {ATTR_ENTITY_ID: ENTITY_ID, ATTR_FAN_MODE: "3"}, + blocking=True, + ) + mock_palazzetti_client.set_fan_speed.assert_called_once_with(3) + mock_palazzetti_client.set_fan_speed.reset_mock() + + # Set Fan Mode: Error + mock_palazzetti_client.set_fan_speed.side_effect = CommunicationError() + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_FAN_MODE, + {ATTR_ENTITY_ID: ENTITY_ID, ATTR_FAN_MODE: 3}, + blocking=True, + ) + + mock_palazzetti_client.set_fan_speed.side_effect = ValidationError() + with pytest.raises(ServiceValidationError): + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_FAN_MODE, + {ATTR_ENTITY_ID: ENTITY_ID, ATTR_FAN_MODE: 3}, + blocking=True, + ) diff --git a/tests/components/palazzetti/test_config_flow.py b/tests/components/palazzetti/test_config_flow.py new file mode 100644 index 00000000000000..03c56c33d0ce34 --- /dev/null +++ b/tests/components/palazzetti/test_config_flow.py @@ -0,0 +1,140 @@ +"""Test the Palazzetti config flow.""" + +from unittest.mock import AsyncMock + +from pypalazzetti.exceptions import CommunicationError + +from homeassistant.components import dhcp +from homeassistant.components.palazzetti.const import DOMAIN +from homeassistant.config_entries import SOURCE_DHCP, SOURCE_USER +from homeassistant.const import CONF_HOST +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from tests.common import MockConfigEntry + + +async def test_full_user_flow( + hass: HomeAssistant, mock_palazzetti_client: AsyncMock, mock_setup_entry: AsyncMock +) -> None: + """Test the full user configuration flow.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_HOST: "192.168.1.1"}, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "Stove" + assert result["data"] == {CONF_HOST: "192.168.1.1"} + assert result["result"].unique_id == "11:22:33:44:55:66" + assert len(mock_palazzetti_client.connect.mock_calls) > 0 + + +async def test_invalid_host( + hass: HomeAssistant, + mock_palazzetti_client: AsyncMock, + mock_setup_entry: AsyncMock, +) -> None: + """Test cannot connect error.""" + + mock_palazzetti_client.connect.side_effect = CommunicationError() + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_HOST: "192.168.1.1"}, + ) + + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "cannot_connect"} + + mock_palazzetti_client.connect.side_effect = None + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_HOST: "192.168.1.1"}, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + + +async def test_duplicate( + hass: HomeAssistant, + mock_palazzetti_client: AsyncMock, + mock_setup_entry: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test duplicate flow.""" + mock_config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: "192.168.1.1"}, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + +async def test_dhcp_flow( + hass: HomeAssistant, mock_palazzetti_client: AsyncMock, mock_setup_entry: AsyncMock +) -> None: + """Test the DHCP flow.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + data=dhcp.DhcpServiceInfo( + hostname="connbox1234", ip="192.168.1.1", macaddress="11:22:33:44:55:66" + ), + context={"source": SOURCE_DHCP}, + ) + + await hass.async_block_till_done() + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "discovery_confirm" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {}, + ) + + await hass.async_block_till_done() + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "Stove" + assert result["result"].unique_id == "11:22:33:44:55:66" + + +async def test_dhcp_flow_error( + hass: HomeAssistant, mock_palazzetti_client: AsyncMock, mock_setup_entry: AsyncMock +) -> None: + """Test the DHCP flow.""" + mock_palazzetti_client.connect.side_effect = CommunicationError() + + result = await hass.config_entries.flow.async_init( + DOMAIN, + data=dhcp.DhcpServiceInfo( + hostname="connbox1234", ip="192.168.1.1", macaddress="11:22:33:44:55:66" + ), + context={"source": SOURCE_DHCP}, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "cannot_connect" diff --git a/tests/components/palazzetti/test_init.py b/tests/components/palazzetti/test_init.py new file mode 100644 index 00000000000000..710144b2b7bf99 --- /dev/null +++ b/tests/components/palazzetti/test_init.py @@ -0,0 +1,46 @@ +"""Tests for the Palazzetti integration.""" + +from unittest.mock import AsyncMock + +from syrupy import SnapshotAssertion + +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr + +from . import setup_integration + +from tests.common import MockConfigEntry + + +async def test_load_unload_config_entry( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_palazzetti_client: AsyncMock, +) -> None: + """Test the Palazzetti configuration entry loading/unloading.""" + await setup_integration(hass, mock_config_entry) + + assert mock_config_entry.state is ConfigEntryState.LOADED + + await hass.config_entries.async_unload(mock_config_entry.entry_id) + await hass.async_block_till_done() + + assert mock_config_entry.state is ConfigEntryState.NOT_LOADED + + +async def test_device( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_palazzetti_client: AsyncMock, + snapshot: SnapshotAssertion, + device_registry: dr.DeviceRegistry, +) -> None: + """Test the device information.""" + await setup_integration(hass, mock_config_entry) + + device = device_registry.async_get_device( + connections={(dr.CONNECTION_NETWORK_MAC, "11:22:33:44:55:66")} + ) + assert device is not None + assert device == snapshot diff --git a/tests/components/panel_iframe/__init__.py b/tests/components/panel_iframe/__init__.py deleted file mode 100644 index df7115d9e97619..00000000000000 --- a/tests/components/panel_iframe/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""Tests for the panel_iframe component.""" diff --git a/tests/components/panel_iframe/test_init.py b/tests/components/panel_iframe/test_init.py deleted file mode 100644 index 74e1b642df5b8f..00000000000000 --- a/tests/components/panel_iframe/test_init.py +++ /dev/null @@ -1,154 +0,0 @@ -"""The tests for the panel_iframe component.""" - -from typing import Any - -import pytest - -from homeassistant.components.panel_iframe import DOMAIN -from homeassistant.core import HomeAssistant -from homeassistant.helpers import issue_registry as ir -from homeassistant.setup import async_setup_component - -from tests.typing import WebSocketGenerator - -TEST_CONFIG = { - "router": { - "icon": "mdi:network-wireless", - "title": "Router", - "url": "http://192.168.1.1", - "require_admin": True, - }, - "weather": { - "icon": "mdi:weather", - "title": "Weather", - "url": "https://www.wunderground.com/us/ca/san-diego", - "require_admin": True, - }, - "api": {"icon": "mdi:weather", "title": "Api", "url": "/api"}, - "ftp": { - "icon": "mdi:weather", - "title": "FTP", - "url": "ftp://some/ftp", - }, -} - - -@pytest.mark.parametrize( - "config_to_try", - [ - {"invalid space": {"url": "https://home-assistant.io"}}, - {"router": {"url": "not-a-url"}}, - ], -) -async def test_wrong_config(hass: HomeAssistant, config_to_try) -> None: - """Test setup with wrong configuration.""" - assert not await async_setup_component( - hass, "panel_iframe", {"panel_iframe": config_to_try} - ) - - -async def test_import_config( - hass: HomeAssistant, - hass_storage: dict[str, Any], - hass_ws_client: WebSocketGenerator, -) -> None: - """Test import config.""" - client = await hass_ws_client(hass) - - assert await async_setup_component( - hass, - "panel_iframe", - {"panel_iframe": TEST_CONFIG}, - ) - - # List dashboards - await client.send_json_auto_id({"type": "lovelace/dashboards/list"}) - response = await client.receive_json() - assert response["success"] - assert response["result"] == [ - { - "icon": "mdi:network-wireless", - "id": "router", - "mode": "storage", - "require_admin": True, - "show_in_sidebar": True, - "title": "Router", - "url_path": "router", - }, - { - "icon": "mdi:weather", - "id": "weather", - "mode": "storage", - "require_admin": True, - "show_in_sidebar": True, - "title": "Weather", - "url_path": "weather", - }, - { - "icon": "mdi:weather", - "id": "api", - "mode": "storage", - "require_admin": False, - "show_in_sidebar": True, - "title": "Api", - "url_path": "api", - }, - { - "icon": "mdi:weather", - "id": "ftp", - "mode": "storage", - "require_admin": False, - "show_in_sidebar": True, - "title": "FTP", - "url_path": "ftp", - }, - ] - - for url_path in ("api", "ftp", "router", "weather"): - await client.send_json_auto_id( - {"type": "lovelace/config", "url_path": url_path} - ) - response = await client.receive_json() - assert response["success"] - assert response["result"] == { - "strategy": {"type": "iframe", "url": TEST_CONFIG[url_path]["url"]} - } - - assert hass_storage[DOMAIN]["data"] == {"migrated": True} - - -async def test_import_config_once( - hass: HomeAssistant, - hass_storage: dict[str, Any], - hass_ws_client: WebSocketGenerator, -) -> None: - """Test import config only happens once.""" - client = await hass_ws_client(hass) - - hass_storage[DOMAIN] = { - "version": 1, - "minor_version": 1, - "key": "map", - "data": {"migrated": True}, - } - - assert await async_setup_component( - hass, - "panel_iframe", - {"panel_iframe": TEST_CONFIG}, - ) - - # List dashboards - await client.send_json_auto_id({"type": "lovelace/dashboards/list"}) - response = await client.receive_json() - assert response["success"] - assert response["result"] == [] - - -async def test_create_issue_when_manually_configured( - hass: HomeAssistant, issue_registry: ir.IssueRegistry -) -> None: - """Test creating issue registry issues.""" - assert await async_setup_component(hass, DOMAIN, {DOMAIN: {}}) - - assert issue_registry.async_get_issue(DOMAIN, "deprecated_yaml") diff --git a/tests/components/pegel_online/snapshots/test_diagnostics.ambr b/tests/components/pegel_online/snapshots/test_diagnostics.ambr new file mode 100644 index 00000000000000..1e55805f86768d --- /dev/null +++ b/tests/components/pegel_online/snapshots/test_diagnostics.ambr @@ -0,0 +1,39 @@ +# serializer version: 1 +# name: test_entry_diagnostics + dict({ + 'data': dict({ + 'air_temperature': None, + 'clearance_height': None, + 'oxygen_level': None, + 'ph_value': None, + 'water_flow': dict({ + 'uom': 'm³/s', + 'value': 88.4, + }), + 'water_level': dict({ + 'uom': 'cm', + 'value': 62, + }), + 'water_speed': None, + 'water_temperature': None, + }), + 'entry': dict({ + 'data': dict({ + 'station': '70272185-xxxx-xxxx-xxxx-43bea330dcae', + }), + 'disabled_by': None, + 'discovery_keys': dict({ + }), + 'domain': 'pegel_online', + 'minor_version': 1, + 'options': dict({ + }), + 'pref_disable_new_entities': False, + 'pref_disable_polling': False, + 'source': 'user', + 'title': 'Mock Title', + 'unique_id': '70272185-xxxx-xxxx-xxxx-43bea330dcae', + 'version': 1, + }), + }) +# --- diff --git a/tests/components/pegel_online/test_diagnostics.py b/tests/components/pegel_online/test_diagnostics.py new file mode 100644 index 00000000000000..220f244b7510cb --- /dev/null +++ b/tests/components/pegel_online/test_diagnostics.py @@ -0,0 +1,44 @@ +"""Test pegel_online diagnostics.""" + +from unittest.mock import patch + +from syrupy import SnapshotAssertion +from syrupy.filters import props + +from homeassistant.components.pegel_online.const import CONF_STATION, DOMAIN +from homeassistant.core import HomeAssistant + +from . import PegelOnlineMock +from .const import ( + MOCK_CONFIG_ENTRY_DATA_DRESDEN, + MOCK_STATION_DETAILS_DRESDEN, + MOCK_STATION_MEASUREMENT_DRESDEN, +) + +from tests.common import MockConfigEntry +from tests.components.diagnostics import get_diagnostics_for_config_entry +from tests.typing import ClientSessionGenerator + + +async def test_entry_diagnostics( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + snapshot: SnapshotAssertion, +) -> None: + """Test config entry diagnostics.""" + entry = MockConfigEntry( + domain=DOMAIN, + data=MOCK_CONFIG_ENTRY_DATA_DRESDEN, + unique_id=MOCK_CONFIG_ENTRY_DATA_DRESDEN[CONF_STATION], + ) + entry.add_to_hass(hass) + with patch("homeassistant.components.pegel_online.PegelOnline") as pegelonline: + pegelonline.return_value = PegelOnlineMock( + station_details=MOCK_STATION_DETAILS_DRESDEN, + station_measurements=MOCK_STATION_MEASUREMENT_DRESDEN, + ) + assert await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + result = await get_diagnostics_for_config_entry(hass, hass_client, entry) + assert result == snapshot(exclude=props("entry_id", "created_at", "modified_at")) diff --git a/tests/components/permobil/test_config_flow.py b/tests/components/permobil/test_config_flow.py index f9121f8f268371..7067566a74d704 100644 --- a/tests/components/permobil/test_config_flow.py +++ b/tests/components/permobil/test_config_flow.py @@ -284,11 +284,7 @@ async def test_config_flow_reauth_success( "homeassistant.components.permobil.config_flow.MyPermobil", return_value=my_permobil, ): - result = await hass.config_entries.flow.async_init( - config_flow.DOMAIN, - context={"source": "reauth", "entry_id": mock_entry.entry_id}, - data=mock_entry.data, - ) + result = await mock_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "email_code" @@ -328,11 +324,7 @@ async def test_config_flow_reauth_fail_invalid_code( "homeassistant.components.permobil.config_flow.MyPermobil", return_value=my_permobil, ): - result = await hass.config_entries.flow.async_init( - config_flow.DOMAIN, - context={"source": "reauth", "entry_id": mock_entry.entry_id}, - data=mock_entry.data, - ) + result = await mock_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "email_code" @@ -360,17 +352,11 @@ async def test_config_flow_reauth_fail_code_request( ) mock_entry.add_to_hass(hass) # test the reauth and have request_application_code fail leading to an abort - my_permobil.request_application_code.side_effect = MyPermobilAPIException - reauth_entry = hass.config_entries.async_entries(config_flow.DOMAIN)[0] with patch( "homeassistant.components.permobil.config_flow.MyPermobil", return_value=my_permobil, ): - result = await hass.config_entries.flow.async_init( - config_flow.DOMAIN, - context={"source": "reauth", "entry_id": reauth_entry.entry_id}, - data=mock_entry.data, - ) + result = await mock_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "unknown" diff --git a/tests/components/philips_js/test_config_flow.py b/tests/components/philips_js/test_config_flow.py index 80d059618133d5..c08885634dbfe5 100644 --- a/tests/components/philips_js/test_config_flow.py +++ b/tests/components/philips_js/test_config_flow.py @@ -161,6 +161,10 @@ async def test_pairing(hass: HomeAssistant, mock_tv_pairable, mock_setup_entry) assert len(mock_setup_entry.mock_calls) == 1 +@pytest.mark.parametrize( # Remove when translations fixed + "ignore_translations", + ["component.philips_js.config.abort.pairing_failure"], +) async def test_pair_request_failed( hass: HomeAssistant, mock_tv_pairable, mock_setup_entry ) -> None: @@ -188,6 +192,10 @@ async def test_pair_request_failed( } +@pytest.mark.parametrize( # Remove when translations fixed + "ignore_translations", + ["component.philips_js.config.abort.pairing_failure"], +) async def test_pair_grant_failed( hass: HomeAssistant, mock_tv_pairable, mock_setup_entry ) -> None: diff --git a/tests/components/plugwise/conftest.py b/tests/components/plugwise/conftest.py index ace3ccbda60dce..f18c96d36c5f8d 100644 --- a/tests/components/plugwise/conftest.py +++ b/tests/components/plugwise/conftest.py @@ -7,6 +7,7 @@ from typing import Any from unittest.mock import AsyncMock, MagicMock, patch +from packaging.version import Version from plugwise import PlugwiseData import pytest @@ -67,7 +68,7 @@ def mock_smile_config_flow() -> Generator[MagicMock]: smile.smile_model = "Test Model" smile.smile_model_id = "Test Model ID" smile.smile_name = "Test Smile Name" - smile.connect.return_value = True + smile.connect.return_value = Version("4.3.2") yield smile @@ -89,7 +90,7 @@ def mock_smile_adam() -> Generator[MagicMock]: smile.smile_model = "Gateway" smile.smile_model_id = "smile_open_therm" smile.smile_name = "Adam" - smile.connect.return_value = True + smile.connect.return_value = Version("3.0.15") all_data = _read_json(chosen_env, "all_data") smile.async_update.return_value = PlugwiseData( all_data["gateway"], all_data["devices"] @@ -116,7 +117,7 @@ def mock_smile_adam_2() -> Generator[MagicMock]: smile.smile_model = "Gateway" smile.smile_model_id = "smile_open_therm" smile.smile_name = "Adam" - smile.connect.return_value = True + smile.connect.return_value = Version("3.6.4") all_data = _read_json(chosen_env, "all_data") smile.async_update.return_value = PlugwiseData( all_data["gateway"], all_data["devices"] @@ -143,7 +144,7 @@ def mock_smile_adam_3() -> Generator[MagicMock]: smile.smile_model = "Gateway" smile.smile_model_id = "smile_open_therm" smile.smile_name = "Adam" - smile.connect.return_value = True + smile.connect.return_value = Version("3.6.4") all_data = _read_json(chosen_env, "all_data") smile.async_update.return_value = PlugwiseData( all_data["gateway"], all_data["devices"] @@ -170,7 +171,7 @@ def mock_smile_adam_4() -> Generator[MagicMock]: smile.smile_model = "Gateway" smile.smile_model_id = "smile_open_therm" smile.smile_name = "Adam" - smile.connect.return_value = True + smile.connect.return_value = Version("3.2.8") all_data = _read_json(chosen_env, "all_data") smile.async_update.return_value = PlugwiseData( all_data["gateway"], all_data["devices"] @@ -196,7 +197,7 @@ def mock_smile_anna() -> Generator[MagicMock]: smile.smile_model = "Gateway" smile.smile_model_id = "smile_thermo" smile.smile_name = "Smile Anna" - smile.connect.return_value = True + smile.connect.return_value = Version("4.0.15") all_data = _read_json(chosen_env, "all_data") smile.async_update.return_value = PlugwiseData( all_data["gateway"], all_data["devices"] @@ -222,7 +223,7 @@ def mock_smile_anna_2() -> Generator[MagicMock]: smile.smile_model = "Gateway" smile.smile_model_id = "smile_thermo" smile.smile_name = "Smile Anna" - smile.connect.return_value = True + smile.connect.return_value = Version("4.0.15") all_data = _read_json(chosen_env, "all_data") smile.async_update.return_value = PlugwiseData( all_data["gateway"], all_data["devices"] @@ -248,7 +249,7 @@ def mock_smile_anna_3() -> Generator[MagicMock]: smile.smile_model = "Gateway" smile.smile_model_id = "smile_thermo" smile.smile_name = "Smile Anna" - smile.connect.return_value = True + smile.connect.return_value = Version("4.0.15") all_data = _read_json(chosen_env, "all_data") smile.async_update.return_value = PlugwiseData( all_data["gateway"], all_data["devices"] @@ -274,7 +275,7 @@ def mock_smile_p1() -> Generator[MagicMock]: smile.smile_model = "Gateway" smile.smile_model_id = "smile" smile.smile_name = "Smile P1" - smile.connect.return_value = True + smile.connect.return_value = Version("4.4.2") all_data = _read_json(chosen_env, "all_data") smile.async_update.return_value = PlugwiseData( all_data["gateway"], all_data["devices"] @@ -300,7 +301,7 @@ def mock_smile_p1_2() -> Generator[MagicMock]: smile.smile_model = "Gateway" smile.smile_model_id = "smile" smile.smile_name = "Smile P1" - smile.connect.return_value = True + smile.connect.return_value = Version("4.4.2") all_data = _read_json(chosen_env, "all_data") smile.async_update.return_value = PlugwiseData( all_data["gateway"], all_data["devices"] @@ -326,9 +327,7 @@ def mock_smile_legacy_anna() -> Generator[MagicMock]: smile.smile_model = "Gateway" smile.smile_model_id = None smile.smile_name = "Smile Anna" - - smile.connect.return_value = True - + smile.connect.return_value = Version("1.8.22") all_data = _read_json(chosen_env, "all_data") smile.async_update.return_value = PlugwiseData( all_data["gateway"], all_data["devices"] @@ -354,7 +353,7 @@ def mock_stretch() -> Generator[MagicMock]: smile.smile_model = "Gateway" smile.smile_model_id = None smile.smile_name = "Stretch" - smile.connect.return_value = True + smile.connect.return_value = Version("3.1.11") all_data = _read_json(chosen_env, "all_data") smile.async_update.return_value = PlugwiseData( all_data["gateway"], all_data["devices"] diff --git a/tests/components/plugwise/fixtures/m_adam_jip/all_data.json b/tests/components/plugwise/fixtures/m_adam_jip/all_data.json index 50c3fa5a7dccba..ec2095648b889a 100644 --- a/tests/components/plugwise/fixtures/m_adam_jip/all_data.json +++ b/tests/components/plugwise/fixtures/m_adam_jip/all_data.json @@ -86,7 +86,7 @@ }, "457ce8414de24596a2d5e7dbc9c7682f": { "available": true, - "dev_class": "zz_misc", + "dev_class": "zz_misc_plug", "location": "9e4433a9d69f40b3aefd15e74395eaec", "model": "Aqara Smart Plug", "model_id": "lumi.plug.maeu01", diff --git a/tests/components/plugwise/fixtures/m_adam_multiple_devices_per_zone/all_data.json b/tests/components/plugwise/fixtures/m_adam_multiple_devices_per_zone/all_data.json index 7a61bf10602e7c..a182b1ac8dd04d 100644 --- a/tests/components/plugwise/fixtures/m_adam_multiple_devices_per_zone/all_data.json +++ b/tests/components/plugwise/fixtures/m_adam_multiple_devices_per_zone/all_data.json @@ -2,7 +2,7 @@ "devices": { "02cf28bfec924855854c544690a609ef": { "available": true, - "dev_class": "vcr", + "dev_class": "vcr_plug", "firmware": "2019-06-21T02:00:00+02:00", "location": "cd143c07248f491493cea0533bc3d669", "model": "Plug", @@ -23,7 +23,7 @@ }, "21f2b542c49845e6bb416884c55778d6": { "available": true, - "dev_class": "game_console", + "dev_class": "game_console_plug", "firmware": "2019-06-21T02:00:00+02:00", "location": "cd143c07248f491493cea0533bc3d669", "model": "Plug", @@ -44,7 +44,7 @@ }, "4a810418d5394b3f82727340b91ba740": { "available": true, - "dev_class": "router", + "dev_class": "router_plug", "firmware": "2019-06-21T02:00:00+02:00", "location": "cd143c07248f491493cea0533bc3d669", "model": "Plug", @@ -65,7 +65,7 @@ }, "675416a629f343c495449970e2ca37b5": { "available": true, - "dev_class": "router", + "dev_class": "router_plug", "firmware": "2019-06-21T02:00:00+02:00", "location": "cd143c07248f491493cea0533bc3d669", "model": "Plug", @@ -158,7 +158,7 @@ }, "78d1126fc4c743db81b61c20e88342a7": { "available": true, - "dev_class": "central_heating_pump", + "dev_class": "central_heating_pump_plug", "firmware": "2019-06-21T02:00:00+02:00", "location": "c50f167537524366a5af7aa3942feb1e", "model": "Plug", @@ -192,7 +192,7 @@ }, "a28f588dc4a049a483fd03a30361ad3a": { "available": true, - "dev_class": "settop", + "dev_class": "settop_plug", "firmware": "2019-06-21T02:00:00+02:00", "location": "cd143c07248f491493cea0533bc3d669", "model": "Plug", @@ -309,7 +309,7 @@ }, "cd0ddb54ef694e11ac18ed1cbce5dbbd": { "available": true, - "dev_class": "vcr", + "dev_class": "vcr_plug", "firmware": "2019-06-21T02:00:00+02:00", "location": "cd143c07248f491493cea0533bc3d669", "model": "Plug", diff --git a/tests/components/plugwise/snapshots/test_diagnostics.ambr b/tests/components/plugwise/snapshots/test_diagnostics.ambr index 30aae633125f42..d187e0355bf7dc 100644 --- a/tests/components/plugwise/snapshots/test_diagnostics.ambr +++ b/tests/components/plugwise/snapshots/test_diagnostics.ambr @@ -4,7 +4,7 @@ 'devices': dict({ '02cf28bfec924855854c544690a609ef': dict({ 'available': True, - 'dev_class': 'vcr', + 'dev_class': 'vcr_plug', 'firmware': '2019-06-21T02:00:00+02:00', 'location': 'cd143c07248f491493cea0533bc3d669', 'model': 'Plug', @@ -25,7 +25,7 @@ }), '21f2b542c49845e6bb416884c55778d6': dict({ 'available': True, - 'dev_class': 'game_console', + 'dev_class': 'game_console_plug', 'firmware': '2019-06-21T02:00:00+02:00', 'location': 'cd143c07248f491493cea0533bc3d669', 'model': 'Plug', @@ -46,7 +46,7 @@ }), '4a810418d5394b3f82727340b91ba740': dict({ 'available': True, - 'dev_class': 'router', + 'dev_class': 'router_plug', 'firmware': '2019-06-21T02:00:00+02:00', 'location': 'cd143c07248f491493cea0533bc3d669', 'model': 'Plug', @@ -67,7 +67,7 @@ }), '675416a629f343c495449970e2ca37b5': dict({ 'available': True, - 'dev_class': 'router', + 'dev_class': 'router_plug', 'firmware': '2019-06-21T02:00:00+02:00', 'location': 'cd143c07248f491493cea0533bc3d669', 'model': 'Plug', @@ -166,7 +166,7 @@ }), '78d1126fc4c743db81b61c20e88342a7': dict({ 'available': True, - 'dev_class': 'central_heating_pump', + 'dev_class': 'central_heating_pump_plug', 'firmware': '2019-06-21T02:00:00+02:00', 'location': 'c50f167537524366a5af7aa3942feb1e', 'model': 'Plug', @@ -200,7 +200,7 @@ }), 'a28f588dc4a049a483fd03a30361ad3a': dict({ 'available': True, - 'dev_class': 'settop', + 'dev_class': 'settop_plug', 'firmware': '2019-06-21T02:00:00+02:00', 'location': 'cd143c07248f491493cea0533bc3d669', 'model': 'Plug', @@ -323,7 +323,7 @@ }), 'cd0ddb54ef694e11ac18ed1cbce5dbbd': dict({ 'available': True, - 'dev_class': 'vcr', + 'dev_class': 'vcr_plug', 'firmware': '2019-06-21T02:00:00+02:00', 'location': 'cd143c07248f491493cea0533bc3d669', 'model': 'Plug', diff --git a/tests/components/powerwall/test_config_flow.py b/tests/components/powerwall/test_config_flow.py index 5074a289d191e9..1ff1470f81c03e 100644 --- a/tests/components/powerwall/test_config_flow.py +++ b/tests/components/powerwall/test_config_flow.py @@ -339,6 +339,11 @@ async def test_form_reauth(hass: HomeAssistant) -> None: result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["errors"] == {} + flow = hass.config_entries.flow.async_get(result["flow_id"]) + assert flow["context"]["title_placeholders"] == { + "ip_address": VALID_CONFIG[CONF_IP_ADDRESS], + "name": entry.title, + } mock_powerwall = await _mock_powerwall_site_name(hass, "My site") diff --git a/tests/components/profiler/test_init.py b/tests/components/profiler/test_init.py index 3f0e0b92056ac1..37940df437bd31 100644 --- a/tests/components/profiler/test_init.py +++ b/tests/components/profiler/test_init.py @@ -5,6 +5,7 @@ import logging import os from pathlib import Path +import sys from unittest.mock import patch from freezegun.api import FrozenDateTimeFactory @@ -70,6 +71,9 @@ def _mock_path(filename: str) -> str: await hass.async_block_till_done() +@pytest.mark.skipif( + sys.version_info >= (3, 13), reason="not yet available on Python 3.13" +) async def test_memory_usage(hass: HomeAssistant, tmp_path: Path) -> None: """Test we can setup and the service is registered.""" test_dir = tmp_path / "profiles" @@ -101,6 +105,24 @@ def _mock_path(filename: str) -> str: await hass.async_block_till_done() +@pytest.mark.skipif(sys.version_info < (3, 13), reason="still works on python 3.12") +async def test_memory_usage_py313(hass: HomeAssistant, tmp_path: Path) -> None: + """Test raise an error on python3.13.""" + entry = MockConfigEntry(domain=DOMAIN) + entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + assert hass.services.has_service(DOMAIN, SERVICE_MEMORY) + with pytest.raises( + HomeAssistantError, + match="Memory profiling is not supported on Python 3.13. Please use Python 3.12.", + ): + await hass.services.async_call( + DOMAIN, SERVICE_MEMORY, {CONF_SECONDS: 0.000001}, blocking=True + ) + + async def test_object_growth_logging( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, diff --git a/tests/components/prometheus/test_init.py b/tests/components/prometheus/test_init.py index b505fc81a35a44..043a9cc438937b 100644 --- a/tests/components/prometheus/test_init.py +++ b/tests/components/prometheus/test_init.py @@ -3,11 +3,12 @@ from dataclasses import dataclass import datetime from http import HTTPStatus -from typing import Any +from typing import Any, Self from unittest import mock from freezegun import freeze_time import prometheus_client +from prometheus_client.utils import floatToGoString import pytest from homeassistant.components import ( @@ -30,6 +31,7 @@ switch, update, ) +from homeassistant.components.alarm_control_panel import AlarmControlPanelState from homeassistant.components.climate import ( ATTR_CURRENT_TEMPERATURE, ATTR_FAN_MODE, @@ -63,8 +65,6 @@ CONTENT_TYPE_TEXT_PLAIN, DEGREE, PERCENTAGE, - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_HOME, STATE_CLOSED, STATE_CLOSING, STATE_HOME, @@ -74,6 +74,7 @@ STATE_OPEN, STATE_OPENING, STATE_UNAVAILABLE, + STATE_UNKNOWN, UnitOfEnergy, UnitOfTemperature, ) @@ -87,6 +88,77 @@ PROMETHEUS_PATH = "homeassistant.components.prometheus" +class EntityMetric: + """Represents a Prometheus metric for a Home Assistant entity.""" + + metric_name: str + labels: dict[str, str] + + @classmethod + def required_labels(cls) -> list[str]: + """List of all required labels for a Prometheus metric.""" + return [ + "domain", + "friendly_name", + "entity", + ] + + def __init__(self, metric_name: str, **kwargs: Any) -> None: + """Create a new EntityMetric based on metric name and labels.""" + self.metric_name = metric_name + self.labels = kwargs + + # Labels that are required for all entities. + for labelname in self.required_labels(): + assert labelname in self.labels + assert self.labels[labelname] != "" + + def withValue(self, value: float) -> Self: + """Return a metric with value.""" + return EntityMetricWithValue(self, value) + + @property + def _metric_name_string(self) -> str: + """Return a full metric name as a string.""" + labels = ",".join( + f'{key}="{value}"' for key, value in sorted(self.labels.items()) + ) + return f"{self.metric_name}{{{labels}}}" + + def _in_metrics(self, metrics: list[str]) -> bool: + """Report whether this metric exists in the provided Prometheus output.""" + return any(line.startswith(self._metric_name_string) for line in metrics) + + def assert_in_metrics(self, metrics: list[str]) -> None: + """Assert that this metric exists in the provided Prometheus output.""" + assert self._in_metrics(metrics) + + def assert_not_in_metrics(self, metrics: list[str]) -> None: + """Assert that this metric does not exist in Prometheus output.""" + assert not self._in_metrics(metrics) + + +class EntityMetricWithValue(EntityMetric): + """Represents a Prometheus metric with a value.""" + + value: float + + def __init__(self, metric: EntityMetric, value: float) -> None: + """Create a new metric with a value based on a metric.""" + super().__init__(metric.metric_name, **metric.labels) + self.value = value + + @property + def _metric_string(self) -> str: + """Return a full metric string.""" + value = floatToGoString(self.value) + return f"{self._metric_name_string} {value}" + + def assert_in_metrics(self, metrics: list[str]) -> None: + """Assert that this metric exists in the provided Prometheus output.""" + assert self._metric_string in metrics + + @dataclass class FilterTest: """Class for capturing a filter test.""" @@ -95,6 +167,299 @@ class FilterTest: should_pass: bool +def test_entity_metric_generates_metric_name_string_without_value() -> None: + """Test using EntityMetric to format a simple metric string without any value.""" + domain = "sensor" + object_id = "outside_temperature" + entity_metric = EntityMetric( + metric_name="homeassistant_sensor_temperature_celsius", + domain=domain, + friendly_name="Outside Temperature", + entity=f"{domain}.{object_id}", + ) + assert entity_metric._metric_name_string == ( + "homeassistant_sensor_temperature_celsius{" + 'domain="sensor",' + 'entity="sensor.outside_temperature",' + 'friendly_name="Outside Temperature"}' + ) + + +def test_entity_metric_generates_metric_string_with_value() -> None: + """Test using EntityMetric to format a simple metric string but with a metric value included.""" + domain = "sensor" + object_id = "outside_temperature" + entity_metric = EntityMetric( + metric_name="homeassistant_sensor_temperature_celsius", + domain=domain, + friendly_name="Outside Temperature", + entity=f"{domain}.{object_id}", + ).withValue(17.2) + assert entity_metric._metric_string == ( + "homeassistant_sensor_temperature_celsius{" + 'domain="sensor",' + 'entity="sensor.outside_temperature",' + 'friendly_name="Outside Temperature"}' + " 17.2" + ) + + +def test_entity_metric_raises_exception_without_required_labels() -> None: + """Test using EntityMetric to raise exception when required labels are missing.""" + domain = "sensor" + object_id = "outside_temperature" + test_kwargs = { + "metric_name": "homeassistant_sensor_temperature_celsius", + "domain": domain, + "friendly_name": "Outside Temperature", + "entity": f"{domain}.{object_id}", + } + + assert len(EntityMetric.required_labels()) > 0 + + for labelname in EntityMetric.required_labels(): + label_kwargs = dict(test_kwargs) + # Delete the required label and ensure we get an exception + del label_kwargs[labelname] + with pytest.raises(AssertionError): + EntityMetric(**label_kwargs) + + +def test_entity_metric_raises_exception_if_required_label_is_empty_string() -> None: + """Test using EntityMetric to raise exception when required label value is empty string.""" + domain = "sensor" + object_id = "outside_temperature" + test_kwargs = { + "metric_name": "homeassistant_sensor_temperature_celsius", + "domain": domain, + "friendly_name": "Outside Temperature", + "entity": f"{domain}.{object_id}", + } + + assert len(EntityMetric.required_labels()) > 0 + + for labelname in EntityMetric.required_labels(): + label_kwargs = dict(test_kwargs) + # Replace the required label with "" and ensure we get an exception + label_kwargs[labelname] = "" + with pytest.raises(AssertionError): + EntityMetric(**label_kwargs) + + +def test_entity_metric_generates_alphabetically_ordered_labels() -> None: + """Test using EntityMetric to format a simple metric string with labels alphabetically ordered.""" + domain = "sensor" + object_id = "outside_temperature" + + static_metric_string = ( + "homeassistant_sensor_temperature_celsius{" + 'domain="sensor",' + 'entity="sensor.outside_temperature",' + 'friendly_name="Outside Temperature",' + 'zed_label="foo"' + "}" + " 17.2" + ) + + ordered_entity_metric = EntityMetric( + metric_name="homeassistant_sensor_temperature_celsius", + domain=domain, + entity=f"{domain}.{object_id}", + friendly_name="Outside Temperature", + zed_label="foo", + ).withValue(17.2) + assert ordered_entity_metric._metric_string == static_metric_string + + unordered_entity_metric = EntityMetric( + metric_name="homeassistant_sensor_temperature_celsius", + zed_label="foo", + entity=f"{domain}.{object_id}", + friendly_name="Outside Temperature", + domain=domain, + ).withValue(17.2) + assert unordered_entity_metric._metric_string == static_metric_string + + +def test_entity_metric_generates_metric_string_with_non_required_labels() -> None: + """Test using EntityMetric to format a simple metric string but with extra labels and values included.""" + mode_entity_metric = EntityMetric( + metric_name="climate_preset_mode", + domain="climate", + friendly_name="Ecobee", + entity="climate.ecobee", + mode="away", + ).withValue(1) + assert mode_entity_metric._metric_string == ( + "climate_preset_mode{" + 'domain="climate",' + 'entity="climate.ecobee",' + 'friendly_name="Ecobee",' + 'mode="away"' + "}" + " 1.0" + ) + + action_entity_metric = EntityMetric( + metric_name="climate_action", + domain="climate", + friendly_name="HeatPump", + entity="climate.heatpump", + action="heating", + ).withValue(1) + assert action_entity_metric._metric_string == ( + "climate_action{" + 'action="heating",' + 'domain="climate",' + 'entity="climate.heatpump",' + 'friendly_name="HeatPump"' + "}" + " 1.0" + ) + + state_entity_metric = EntityMetric( + metric_name="cover_state", + domain="cover", + friendly_name="Curtain", + entity="cover.curtain", + state="open", + ).withValue(1) + assert state_entity_metric._metric_string == ( + "cover_state{" + 'domain="cover",' + 'entity="cover.curtain",' + 'friendly_name="Curtain",' + 'state="open"' + "}" + " 1.0" + ) + + foo_entity_metric = EntityMetric( + metric_name="homeassistant_sensor_temperature_celsius", + domain="sensor", + friendly_name="Outside Temperature", + entity="sensor.outside_temperature", + foo="bar", + ).withValue(17.2) + assert foo_entity_metric._metric_string == ( + "homeassistant_sensor_temperature_celsius{" + 'domain="sensor",' + 'entity="sensor.outside_temperature",' + 'foo="bar",' + 'friendly_name="Outside Temperature"' + "}" + " 17.2" + ) + + +def test_entity_metric_assert_helpers() -> None: + """Test using EntityMetric for both assert_in_metrics and assert_not_in_metrics.""" + temp_metric = ( + "homeassistant_sensor_temperature_celsius{" + 'domain="sensor",' + 'entity="sensor.outside_temperature",' + 'foo="bar",' + 'friendly_name="Outside Temperature"' + "}" + ) + climate_metric = ( + "climate_preset_mode{" + 'domain="climate",' + 'entity="climate.ecobee",' + 'friendly_name="Ecobee",' + 'mode="away"' + "}" + ) + excluded_cover_metric = ( + "cover_state{" + 'domain="cover",' + 'entity="cover.curtain",' + 'friendly_name="Curtain",' + 'state="open"' + "}" + ) + metrics = [ + temp_metric, + climate_metric, + ] + # First make sure the excluded metric is not present + assert excluded_cover_metric not in metrics + # now check for actual metrics + temp_entity_metric = EntityMetric( + metric_name="homeassistant_sensor_temperature_celsius", + domain="sensor", + friendly_name="Outside Temperature", + entity="sensor.outside_temperature", + foo="bar", + ) + assert temp_entity_metric._metric_name_string == temp_metric + temp_entity_metric.assert_in_metrics(metrics) + + climate_entity_metric = EntityMetric( + metric_name="climate_preset_mode", + domain="climate", + friendly_name="Ecobee", + entity="climate.ecobee", + mode="away", + ) + assert climate_entity_metric._metric_name_string == climate_metric + climate_entity_metric.assert_in_metrics(metrics) + + excluded_cover_entity_metric = EntityMetric( + metric_name="cover_state", + domain="cover", + friendly_name="Curtain", + entity="cover.curtain", + state="open", + ) + assert excluded_cover_entity_metric._metric_name_string == excluded_cover_metric + excluded_cover_entity_metric.assert_not_in_metrics(metrics) + + +def test_entity_metric_with_value_assert_helpers() -> None: + """Test using EntityMetricWithValue helpers, which is only assert_in_metrics.""" + temp_metric = ( + "homeassistant_sensor_temperature_celsius{" + 'domain="sensor",' + 'entity="sensor.outside_temperature",' + 'foo="bar",' + 'friendly_name="Outside Temperature"' + "}" + " 17.2" + ) + climate_metric = ( + "climate_preset_mode{" + 'domain="climate",' + 'entity="climate.ecobee",' + 'friendly_name="Ecobee",' + 'mode="away"' + "}" + " 1.0" + ) + metrics = [ + temp_metric, + climate_metric, + ] + temp_entity_metric = EntityMetric( + metric_name="homeassistant_sensor_temperature_celsius", + domain="sensor", + friendly_name="Outside Temperature", + entity="sensor.outside_temperature", + foo="bar", + ).withValue(17.2) + assert temp_entity_metric._metric_string == temp_metric + temp_entity_metric.assert_in_metrics(metrics) + + climate_entity_metric = EntityMetric( + metric_name="climate_preset_mode", + domain="climate", + friendly_name="Ecobee", + entity="climate.ecobee", + mode="away", + ).withValue(1) + assert climate_entity_metric._metric_string == climate_metric + climate_entity_metric.assert_in_metrics(metrics) + + @pytest.fixture(name="client") async def setup_prometheus_client( hass: HomeAssistant, @@ -153,16 +518,18 @@ async def test_setup_enumeration( suggested_object_id="outside_temperature", original_name="Outside Temperature", ) - set_state_with_entry(hass, sensor_1, 12.3, {}) + state = 12.3 + set_state_with_entry(hass, sensor_1, state, {}) assert await async_setup_component(hass, prometheus.DOMAIN, {prometheus.DOMAIN: {}}) client = await hass_client() body = await generate_latest_metrics(client) - assert ( - 'homeassistant_sensor_temperature_celsius{domain="sensor",' - 'entity="sensor.outside_temperature",' - 'friendly_name="Outside Temperature"} 12.3' in body - ) + EntityMetric( + metric_name="homeassistant_sensor_temperature_celsius", + domain="sensor", + friendly_name="Outside Temperature", + entity="sensor.outside_temperature", + ).withValue(state).assert_in_metrics(body) @pytest.mark.parametrize("namespace", [""]) @@ -178,17 +545,19 @@ async def test_view_empty_namespace( "Objects collected during gc" in body ) - assert ( - 'entity_available{domain="sensor",' - 'entity="sensor.radio_energy",' - 'friendly_name="Radio Energy"} 1.0' in body - ) + EntityMetric( + metric_name="entity_available", + domain="sensor", + friendly_name="Radio Energy", + entity="sensor.radio_energy", + ).withValue(1).assert_in_metrics(body) - assert ( - 'last_updated_time_seconds{domain="sensor",' - 'entity="sensor.radio_energy",' - 'friendly_name="Radio Energy"} 86400.0' in body - ) + EntityMetric( + metric_name="last_updated_time_seconds", + domain="sensor", + friendly_name="Radio Energy", + entity="sensor.radio_energy", + ).withValue(86400.0).assert_in_metrics(body) @pytest.mark.parametrize("namespace", [None]) @@ -204,11 +573,12 @@ async def test_view_default_namespace( "Objects collected during gc" in body ) - assert ( - 'homeassistant_sensor_temperature_celsius{domain="sensor",' - 'entity="sensor.outside_temperature",' - 'friendly_name="Outside Temperature"} 15.6' in body - ) + EntityMetric( + metric_name="homeassistant_sensor_temperature_celsius", + domain="sensor", + friendly_name="Outside Temperature", + entity="sensor.outside_temperature", + ).withValue(15.6).assert_in_metrics(body) @pytest.mark.parametrize("namespace", [""]) @@ -218,29 +588,33 @@ async def test_sensor_unit( """Test prometheus metrics for sensors with a unit.""" body = await generate_latest_metrics(client) - assert ( - 'sensor_unit_kwh{domain="sensor",' - 'entity="sensor.television_energy",' - 'friendly_name="Television Energy"} 74.0' in body - ) - - assert ( - 'sensor_unit_sek_per_kwh{domain="sensor",' - 'entity="sensor.electricity_price",' - 'friendly_name="Electricity price"} 0.123' in body - ) - - assert ( - 'sensor_unit_u0xb0{domain="sensor",' - 'entity="sensor.wind_direction",' - 'friendly_name="Wind Direction"} 25.0' in body - ) - - assert ( - 'sensor_unit_u0xb5g_per_mu0xb3{domain="sensor",' - 'entity="sensor.sps30_pm_1um_weight_concentration",' - 'friendly_name="SPS30 PM <1µm Weight concentration"} 3.7069' in body - ) + EntityMetric( + metric_name="sensor_unit_kwh", + domain="sensor", + friendly_name="Television Energy", + entity="sensor.television_energy", + ).withValue(74.0).assert_in_metrics(body) + + EntityMetric( + metric_name="sensor_unit_sek_per_kwh", + domain="sensor", + friendly_name="Electricity price", + entity="sensor.electricity_price", + ).withValue(0.123).assert_in_metrics(body) + + EntityMetric( + metric_name="sensor_unit_u0xb0", + domain="sensor", + friendly_name="Wind Direction", + entity="sensor.wind_direction", + ).withValue(25.0).assert_in_metrics(body) + + EntityMetric( + metric_name="sensor_unit_u0xb5g_per_mu0xb3", + domain="sensor", + friendly_name="SPS30 PM <1µm Weight concentration", + entity="sensor.sps30_pm_1um_weight_concentration", + ).withValue(3.7069).assert_in_metrics(body) @pytest.mark.parametrize("namespace", [""]) @@ -250,23 +624,26 @@ async def test_sensor_without_unit( """Test prometheus metrics for sensors without a unit.""" body = await generate_latest_metrics(client) - assert ( - 'sensor_state{domain="sensor",' - 'entity="sensor.trend_gradient",' - 'friendly_name="Trend Gradient"} 0.002' in body - ) + EntityMetric( + metric_name="sensor_state", + domain="sensor", + friendly_name="Trend Gradient", + entity="sensor.trend_gradient", + ).withValue(0.002).assert_in_metrics(body) - assert ( - 'sensor_state{domain="sensor",' - 'entity="sensor.text",' - 'friendly_name="Text"} 0' not in body - ) + EntityMetric( + metric_name="sensor_state", + domain="sensor", + friendly_name="Text", + entity="sensor.text", + ).assert_not_in_metrics(body) - assert ( - 'sensor_unit_text{domain="sensor",' - 'entity="sensor.text_unit",' - 'friendly_name="Text Unit"} 0' not in body - ) + EntityMetric( + metric_name="sensor_unit_text", + domain="sensor", + friendly_name="Text Unit", + entity="sensor.text_unit", + ).assert_not_in_metrics(body) @pytest.mark.parametrize("namespace", [""]) @@ -276,35 +653,40 @@ async def test_sensor_device_class( """Test prometheus metrics for sensor with a device_class.""" body = await generate_latest_metrics(client) - assert ( - 'sensor_temperature_celsius{domain="sensor",' - 'entity="sensor.fahrenheit",' - 'friendly_name="Fahrenheit"} 10.0' in body - ) - - assert ( - 'sensor_temperature_celsius{domain="sensor",' - 'entity="sensor.outside_temperature",' - 'friendly_name="Outside Temperature"} 15.6' in body - ) - - assert ( - 'sensor_humidity_percent{domain="sensor",' - 'entity="sensor.outside_humidity",' - 'friendly_name="Outside Humidity"} 54.0' in body - ) - - assert ( - 'sensor_power_kwh{domain="sensor",' - 'entity="sensor.radio_energy",' - 'friendly_name="Radio Energy"} 14.0' in body - ) - - assert ( - 'sensor_timestamp_seconds{domain="sensor",' - 'entity="sensor.timestamp",' - 'friendly_name="Timestamp"} 1.691445808136036e+09' in body - ) + EntityMetric( + metric_name="sensor_temperature_celsius", + domain="sensor", + friendly_name="Fahrenheit", + entity="sensor.fahrenheit", + ).withValue(10.0).assert_in_metrics(body) + + EntityMetric( + metric_name="sensor_temperature_celsius", + domain="sensor", + friendly_name="Outside Temperature", + entity="sensor.outside_temperature", + ).withValue(15.6).assert_in_metrics(body) + + EntityMetric( + metric_name="sensor_humidity_percent", + domain="sensor", + friendly_name="Outside Humidity", + entity="sensor.outside_humidity", + ).withValue(54.0).assert_in_metrics(body) + + EntityMetric( + metric_name="sensor_power_kwh", + domain="sensor", + friendly_name="Radio Energy", + entity="sensor.radio_energy", + ).withValue(14.0).assert_in_metrics(body) + + EntityMetric( + metric_name="sensor_timestamp_seconds", + domain="sensor", + friendly_name="Timestamp", + entity="sensor.timestamp", + ).withValue(1.691445808136036e09).assert_in_metrics(body) @pytest.mark.parametrize("namespace", [""]) @@ -314,23 +696,33 @@ async def test_input_number( """Test prometheus metrics for input_number.""" body = await generate_latest_metrics(client) - assert ( - 'input_number_state{domain="input_number",' - 'entity="input_number.threshold",' - 'friendly_name="Threshold"} 5.2' in body - ) - - assert ( - 'input_number_state{domain="input_number",' - 'entity="input_number.brightness",' - 'friendly_name="None"} 60.0' in body - ) - - assert ( - 'input_number_state_celsius{domain="input_number",' - 'entity="input_number.target_temperature",' - 'friendly_name="Target temperature"} 22.7' in body - ) + EntityMetric( + metric_name="input_number_state", + domain="input_number", + friendly_name="Threshold", + entity="input_number.threshold", + ).withValue(5.2).assert_in_metrics(body) + + EntityMetric( + metric_name="input_number_state", + domain="input_number", + friendly_name="None", + entity="input_number.brightness", + ).withValue(60.0).assert_in_metrics(body) + + EntityMetric( + metric_name="input_number_state_celsius", + domain="input_number", + friendly_name="Target temperature", + entity="input_number.target_temperature", + ).withValue(22.7).assert_in_metrics(body) + + EntityMetric( + metric_name="input_number_state_celsius", + domain="input_number", + friendly_name="Converted temperature", + entity="input_number.converted_temperature", + ).withValue(100).assert_in_metrics(body) @pytest.mark.parametrize("namespace", [""]) @@ -340,23 +732,26 @@ async def test_number( """Test prometheus metrics for number.""" body = await generate_latest_metrics(client) - assert ( - 'number_state{domain="number",' - 'entity="number.threshold",' - 'friendly_name="Threshold"} 5.2' in body - ) + EntityMetric( + metric_name="number_state", + domain="number", + friendly_name="Threshold", + entity="number.threshold", + ).withValue(5.2).assert_in_metrics(body) - assert ( - 'number_state{domain="number",' - 'entity="number.brightness",' - 'friendly_name="None"} 60.0' in body - ) + EntityMetric( + metric_name="number_state", + domain="number", + friendly_name="None", + entity="number.brightness", + ).withValue(60.0).assert_in_metrics(body) - assert ( - 'number_state_celsius{domain="number",' - 'entity="number.target_temperature",' - 'friendly_name="Target temperature"} 22.7' in body - ) + EntityMetric( + metric_name="number_state_celsius", + domain="number", + friendly_name="Target temperature", + entity="number.target_temperature", + ).withValue(22.7).assert_in_metrics(body) @pytest.mark.parametrize("namespace", [""]) @@ -366,11 +761,12 @@ async def test_battery( """Test prometheus metrics for battery.""" body = await generate_latest_metrics(client) - assert ( - 'battery_level_percent{domain="sensor",' - 'entity="sensor.outside_temperature",' - 'friendly_name="Outside Temperature"} 12.0' in body - ) + EntityMetric( + metric_name="battery_level_percent", + domain="sensor", + friendly_name="Outside Temperature", + entity="sensor.outside_temperature", + ).withValue(12.0).assert_in_metrics(body) @pytest.mark.parametrize("namespace", [""]) @@ -381,47 +777,56 @@ async def test_climate( """Test prometheus metrics for climate entities.""" body = await generate_latest_metrics(client) - assert ( - 'climate_current_temperature_celsius{domain="climate",' - 'entity="climate.heatpump",' - 'friendly_name="HeatPump"} 25.0' in body - ) - - assert ( - 'climate_target_temperature_celsius{domain="climate",' - 'entity="climate.heatpump",' - 'friendly_name="HeatPump"} 20.0' in body - ) - - assert ( - 'climate_target_temperature_low_celsius{domain="climate",' - 'entity="climate.ecobee",' - 'friendly_name="Ecobee"} 21.0' in body - ) - - assert ( - 'climate_target_temperature_high_celsius{domain="climate",' - 'entity="climate.ecobee",' - 'friendly_name="Ecobee"} 24.0' in body - ) - - assert ( - 'climate_target_temperature_celsius{domain="climate",' - 'entity="climate.fritzdect",' - 'friendly_name="Fritz!DECT"} 0.0' in body - ) - assert ( - 'climate_preset_mode{domain="climate",' - 'entity="climate.ecobee",' - 'friendly_name="Ecobee",' - 'mode="away"} 1.0' in body - ) - assert ( - 'climate_fan_mode{domain="climate",' - 'entity="climate.ecobee",' - 'friendly_name="Ecobee",' - 'mode="auto"} 1.0' in body - ) + EntityMetric( + metric_name="climate_current_temperature_celsius", + domain="climate", + friendly_name="HeatPump", + entity="climate.heatpump", + ).withValue(25.0).assert_in_metrics(body) + + EntityMetric( + metric_name="climate_target_temperature_celsius", + domain="climate", + friendly_name="HeatPump", + entity="climate.heatpump", + ).withValue(20.0).assert_in_metrics(body) + + EntityMetric( + metric_name="climate_target_temperature_low_celsius", + domain="climate", + friendly_name="Ecobee", + entity="climate.ecobee", + ).withValue(21.0).assert_in_metrics(body) + + EntityMetric( + metric_name="climate_target_temperature_high_celsius", + domain="climate", + friendly_name="Ecobee", + entity="climate.ecobee", + ).withValue(24.0).assert_in_metrics(body) + + EntityMetric( + metric_name="climate_target_temperature_celsius", + domain="climate", + friendly_name="Fritz!DECT", + entity="climate.fritzdect", + ).withValue(0.0).assert_in_metrics(body) + + EntityMetric( + metric_name="climate_preset_mode", + domain="climate", + friendly_name="Ecobee", + entity="climate.ecobee", + mode="away", + ).withValue(1).assert_in_metrics(body) + + EntityMetric( + metric_name="climate_fan_mode", + domain="climate", + friendly_name="Ecobee", + entity="climate.ecobee", + mode="auto", + ).withValue(1).assert_in_metrics(body) @pytest.mark.parametrize("namespace", [""]) @@ -432,30 +837,35 @@ async def test_humidifier( """Test prometheus metrics for humidifier entities.""" body = await generate_latest_metrics(client) - assert ( - 'humidifier_target_humidity_percent{domain="humidifier",' - 'entity="humidifier.humidifier",' - 'friendly_name="Humidifier"} 68.0' in body - ) - - assert ( - 'humidifier_state{domain="humidifier",' - 'entity="humidifier.dehumidifier",' - 'friendly_name="Dehumidifier"} 1.0' in body - ) - - assert ( - 'humidifier_mode{domain="humidifier",' - 'entity="humidifier.hygrostat",' - 'friendly_name="Hygrostat",' - 'mode="home"} 1.0' in body - ) - assert ( - 'humidifier_mode{domain="humidifier",' - 'entity="humidifier.hygrostat",' - 'friendly_name="Hygrostat",' - 'mode="eco"} 0.0' in body - ) + EntityMetric( + metric_name="humidifier_target_humidity_percent", + domain="humidifier", + friendly_name="Humidifier", + entity="humidifier.humidifier", + ).withValue(68.0).assert_in_metrics(body) + + EntityMetric( + metric_name="humidifier_state", + domain="humidifier", + friendly_name="Dehumidifier", + entity="humidifier.dehumidifier", + ).withValue(1).assert_in_metrics(body) + + EntityMetric( + metric_name="humidifier_mode", + domain="humidifier", + friendly_name="Hygrostat", + entity="humidifier.hygrostat", + mode="home", + ).withValue(1).assert_in_metrics(body) + + EntityMetric( + metric_name="humidifier_mode", + domain="humidifier", + friendly_name="Hygrostat", + entity="humidifier.hygrostat", + mode="eco", + ).withValue(0.0).assert_in_metrics(body) @pytest.mark.parametrize("namespace", [""]) @@ -466,29 +876,33 @@ async def test_attributes( """Test prometheus metrics for entity attributes.""" body = await generate_latest_metrics(client) - assert ( - 'switch_state{domain="switch",' - 'entity="switch.boolean",' - 'friendly_name="Boolean"} 1.0' in body - ) - - assert ( - 'switch_attr_boolean{domain="switch",' - 'entity="switch.boolean",' - 'friendly_name="Boolean"} 1.0' in body - ) - - assert ( - 'switch_state{domain="switch",' - 'entity="switch.number",' - 'friendly_name="Number"} 0.0' in body - ) - - assert ( - 'switch_attr_number{domain="switch",' - 'entity="switch.number",' - 'friendly_name="Number"} 10.2' in body - ) + EntityMetric( + metric_name="switch_state", + domain="switch", + friendly_name="Boolean", + entity="switch.boolean", + ).withValue(1).assert_in_metrics(body) + + EntityMetric( + metric_name="switch_attr_boolean", + domain="switch", + friendly_name="Boolean", + entity="switch.boolean", + ).withValue(1).assert_in_metrics(body) + + EntityMetric( + metric_name="switch_state", + domain="switch", + friendly_name="Number", + entity="switch.number", + ).withValue(0.0).assert_in_metrics(body) + + EntityMetric( + metric_name="switch_attr_number", + domain="switch", + friendly_name="Number", + entity="switch.number", + ).withValue(10.2).assert_in_metrics(body) @pytest.mark.parametrize("namespace", [""]) @@ -498,17 +912,19 @@ async def test_binary_sensor( """Test prometheus metrics for binary_sensor.""" body = await generate_latest_metrics(client) - assert ( - 'binary_sensor_state{domain="binary_sensor",' - 'entity="binary_sensor.door",' - 'friendly_name="Door"} 1.0' in body - ) + EntityMetric( + metric_name="binary_sensor_state", + domain="binary_sensor", + friendly_name="Door", + entity="binary_sensor.door", + ).withValue(1).assert_in_metrics(body) - assert ( - 'binary_sensor_state{domain="binary_sensor",' - 'entity="binary_sensor.window",' - 'friendly_name="Window"} 0.0' in body - ) + EntityMetric( + metric_name="binary_sensor_state", + domain="binary_sensor", + friendly_name="Window", + entity="binary_sensor.window", + ).withValue(0.0).assert_in_metrics(body) @pytest.mark.parametrize("namespace", [""]) @@ -518,17 +934,19 @@ async def test_input_boolean( """Test prometheus metrics for input_boolean.""" body = await generate_latest_metrics(client) - assert ( - 'input_boolean_state{domain="input_boolean",' - 'entity="input_boolean.test",' - 'friendly_name="Test"} 1.0' in body - ) + EntityMetric( + metric_name="input_boolean_state", + domain="input_boolean", + friendly_name="Test", + entity="input_boolean.test", + ).withValue(1).assert_in_metrics(body) - assert ( - 'input_boolean_state{domain="input_boolean",' - 'entity="input_boolean.helper",' - 'friendly_name="Helper"} 0.0' in body - ) + EntityMetric( + metric_name="input_boolean_state", + domain="input_boolean", + friendly_name="Helper", + entity="input_boolean.helper", + ).withValue(0.0).assert_in_metrics(body) @pytest.mark.parametrize("namespace", [""]) @@ -538,35 +956,40 @@ async def test_light( """Test prometheus metrics for lights.""" body = await generate_latest_metrics(client) - assert ( - 'light_brightness_percent{domain="light",' - 'entity="light.desk",' - 'friendly_name="Desk"} 100.0' in body - ) - - assert ( - 'light_brightness_percent{domain="light",' - 'entity="light.wall",' - 'friendly_name="Wall"} 0.0' in body - ) - - assert ( - 'light_brightness_percent{domain="light",' - 'entity="light.tv",' - 'friendly_name="TV"} 100.0' in body - ) - - assert ( - 'light_brightness_percent{domain="light",' - 'entity="light.pc",' - 'friendly_name="PC"} 70.58823529411765' in body - ) - - assert ( - 'light_brightness_percent{domain="light",' - 'entity="light.hallway",' - 'friendly_name="Hallway"} 100.0' in body - ) + EntityMetric( + metric_name="light_brightness_percent", + domain="light", + friendly_name="Desk", + entity="light.desk", + ).withValue(100.0).assert_in_metrics(body) + + EntityMetric( + metric_name="light_brightness_percent", + domain="light", + friendly_name="Wall", + entity="light.wall", + ).withValue(0.0).assert_in_metrics(body) + + EntityMetric( + metric_name="light_brightness_percent", + domain="light", + friendly_name="TV", + entity="light.tv", + ).withValue(100.0).assert_in_metrics(body) + + EntityMetric( + metric_name="light_brightness_percent", + domain="light", + friendly_name="PC", + entity="light.pc", + ).withValue(70.58823529411765).assert_in_metrics(body) + + EntityMetric( + metric_name="light_brightness_percent", + domain="light", + friendly_name="Hallway", + entity="light.hallway", + ).withValue(100.0).assert_in_metrics(body) @pytest.mark.parametrize("namespace", [""]) @@ -576,17 +999,19 @@ async def test_lock( """Test prometheus metrics for lock.""" body = await generate_latest_metrics(client) - assert ( - 'lock_state{domain="lock",' - 'entity="lock.front_door",' - 'friendly_name="Front Door"} 1.0' in body - ) + EntityMetric( + metric_name="lock_state", + domain="lock", + friendly_name="Front Door", + entity="lock.front_door", + ).withValue(1).assert_in_metrics(body) - assert ( - 'lock_state{domain="lock",' - 'entity="lock.kitchen_door",' - 'friendly_name="Kitchen Door"} 0.0' in body - ) + EntityMetric( + metric_name="lock_state", + domain="lock", + friendly_name="Kitchen Door", + entity="lock.kitchen_door", + ).withValue(0.0).assert_in_metrics(body) @pytest.mark.parametrize("namespace", [""]) @@ -596,42 +1021,48 @@ async def test_fan( """Test prometheus metrics for fan.""" body = await generate_latest_metrics(client) - assert ( - 'fan_state{domain="fan",' - 'entity="fan.fan_1",' - 'friendly_name="Fan 1"} 1.0' in body - ) - - assert ( - 'fan_speed_percent{domain="fan",' - 'entity="fan.fan_1",' - 'friendly_name="Fan 1"} 33.0' in body - ) - - assert ( - 'fan_is_oscillating{domain="fan",' - 'entity="fan.fan_1",' - 'friendly_name="Fan 1"} 1.0' in body - ) - - assert ( - 'fan_direction_reversed{domain="fan",' - 'entity="fan.fan_1",' - 'friendly_name="Fan 1"} 0.0' in body - ) - - assert ( - 'fan_preset_mode{domain="fan",' - 'entity="fan.fan_1",' - 'friendly_name="Fan 1",' - 'mode="LO"} 1.0' in body - ) - - assert ( - 'fan_direction_reversed{domain="fan",' - 'entity="fan.fan_2",' - 'friendly_name="Reverse Fan"} 1.0' in body - ) + EntityMetric( + metric_name="fan_state", + domain="fan", + friendly_name="Fan 1", + entity="fan.fan_1", + ).withValue(1).assert_in_metrics(body) + + EntityMetric( + metric_name="fan_speed_percent", + domain="fan", + friendly_name="Fan 1", + entity="fan.fan_1", + ).withValue(33.0).assert_in_metrics(body) + + EntityMetric( + metric_name="fan_is_oscillating", + domain="fan", + friendly_name="Fan 1", + entity="fan.fan_1", + ).withValue(1).assert_in_metrics(body) + + EntityMetric( + metric_name="fan_direction_reversed", + domain="fan", + friendly_name="Fan 1", + entity="fan.fan_1", + ).withValue(0.0).assert_in_metrics(body) + + EntityMetric( + metric_name="fan_preset_mode", + domain="fan", + friendly_name="Fan 1", + entity="fan.fan_1", + mode="LO", + ).withValue(1).assert_in_metrics(body) + + EntityMetric( + metric_name="fan_direction_reversed", + domain="fan", + friendly_name="Reverse Fan", + entity="fan.fan_2", + ).withValue(1).assert_in_metrics(body) @pytest.mark.parametrize("namespace", [""]) @@ -642,33 +1073,37 @@ async def test_alarm_control_panel( """Test prometheus metrics for alarm control panel.""" body = await generate_latest_metrics(client) - assert ( - 'alarm_control_panel_state{domain="alarm_control_panel",' - 'entity="alarm_control_panel.alarm_control_panel_1",' - 'friendly_name="Alarm Control Panel 1",' - 'state="armed_away"} 1.0' in body - ) - - assert ( - 'alarm_control_panel_state{domain="alarm_control_panel",' - 'entity="alarm_control_panel.alarm_control_panel_1",' - 'friendly_name="Alarm Control Panel 1",' - 'state="disarmed"} 0.0' in body - ) - - assert ( - 'alarm_control_panel_state{domain="alarm_control_panel",' - 'entity="alarm_control_panel.alarm_control_panel_2",' - 'friendly_name="Alarm Control Panel 2",' - 'state="armed_home"} 1.0' in body - ) - - assert ( - 'alarm_control_panel_state{domain="alarm_control_panel",' - 'entity="alarm_control_panel.alarm_control_panel_2",' - 'friendly_name="Alarm Control Panel 2",' - 'state="armed_away"} 0.0' in body - ) + EntityMetric( + metric_name="alarm_control_panel_state", + domain="alarm_control_panel", + friendly_name="Alarm Control Panel 1", + entity="alarm_control_panel.alarm_control_panel_1", + state="armed_away", + ).withValue(1).assert_in_metrics(body) + + EntityMetric( + metric_name="alarm_control_panel_state", + domain="alarm_control_panel", + friendly_name="Alarm Control Panel 1", + entity="alarm_control_panel.alarm_control_panel_1", + state="disarmed", + ).withValue(0.0).assert_in_metrics(body) + + EntityMetric( + metric_name="alarm_control_panel_state", + domain="alarm_control_panel", + friendly_name="Alarm Control Panel 2", + entity="alarm_control_panel.alarm_control_panel_2", + state="armed_home", + ).withValue(1).assert_in_metrics(body) + + EntityMetric( + metric_name="alarm_control_panel_state", + domain="alarm_control_panel", + friendly_name="Alarm Control Panel 2", + entity="alarm_control_panel.alarm_control_panel_2", + state="armed_away", + ).withValue(0.0).assert_in_metrics(body) @pytest.mark.parametrize("namespace", [""]) @@ -681,55 +1116,61 @@ async def test_cover( open_covers = ["cover_open", "cover_position", "cover_tilt_position"] for testcover in data: - open_metric = ( - f'cover_state{{domain="cover",' - f'entity="{cover_entities[testcover].entity_id}",' - f'friendly_name="{cover_entities[testcover].original_name}",' - f'state="open"}} {1.0 if cover_entities[testcover].unique_id in open_covers else 0.0}' - ) - assert open_metric in body - - closed_metric = ( - f'cover_state{{domain="cover",' - f'entity="{cover_entities[testcover].entity_id}",' - f'friendly_name="{cover_entities[testcover].original_name}",' - f'state="closed"}} {1.0 if cover_entities[testcover].unique_id == "cover_closed" else 0.0}' - ) - assert closed_metric in body - - opening_metric = ( - f'cover_state{{domain="cover",' - f'entity="{cover_entities[testcover].entity_id}",' - f'friendly_name="{cover_entities[testcover].original_name}",' - f'state="opening"}} {1.0 if cover_entities[testcover].unique_id == "cover_opening" else 0.0}' - ) - assert opening_metric in body - - closing_metric = ( - f'cover_state{{domain="cover",' - f'entity="{cover_entities[testcover].entity_id}",' - f'friendly_name="{cover_entities[testcover].original_name}",' - f'state="closing"}} {1.0 if cover_entities[testcover].unique_id == "cover_closing" else 0.0}' - ) - assert closing_metric in body + EntityMetric( + metric_name="cover_state", + domain="cover", + friendly_name=cover_entities[testcover].original_name, + entity=cover_entities[testcover].entity_id, + state="open", + ).withValue( + 1.0 if cover_entities[testcover].unique_id in open_covers else 0.0 + ).assert_in_metrics(body) + + EntityMetric( + metric_name="cover_state", + domain="cover", + friendly_name=cover_entities[testcover].original_name, + entity=cover_entities[testcover].entity_id, + state="closed", + ).withValue( + 1.0 if cover_entities[testcover].unique_id == "cover_closed" else 0.0 + ).assert_in_metrics(body) + + EntityMetric( + metric_name="cover_state", + domain="cover", + friendly_name=cover_entities[testcover].original_name, + entity=cover_entities[testcover].entity_id, + state="opening", + ).withValue( + 1.0 if cover_entities[testcover].unique_id == "cover_opening" else 0.0 + ).assert_in_metrics(body) + + EntityMetric( + metric_name="cover_state", + domain="cover", + friendly_name=cover_entities[testcover].original_name, + entity=cover_entities[testcover].entity_id, + state="closing", + ).withValue( + 1.0 if cover_entities[testcover].unique_id == "cover_closing" else 0.0 + ).assert_in_metrics(body) if testcover == "cover_position": - position_metric = ( - f'cover_position{{domain="cover",' - f'entity="{cover_entities[testcover].entity_id}",' - f'friendly_name="{cover_entities[testcover].original_name}"' - f"}} 50.0" - ) - assert position_metric in body + EntityMetric( + metric_name="cover_position", + domain="cover", + friendly_name=cover_entities[testcover].original_name, + entity=cover_entities[testcover].entity_id, + ).withValue(50.0).assert_in_metrics(body) if testcover == "cover_tilt_position": - tilt_position_metric = ( - f'cover_tilt_position{{domain="cover",' - f'entity="{cover_entities[testcover].entity_id}",' - f'friendly_name="{cover_entities[testcover].original_name}"' - f"}} 50.0" - ) - assert tilt_position_metric in body + EntityMetric( + metric_name="cover_tilt_position", + domain="cover", + friendly_name=cover_entities[testcover].original_name, + entity=cover_entities[testcover].entity_id, + ).withValue(50.0).assert_in_metrics(body) @pytest.mark.parametrize("namespace", [""]) @@ -739,16 +1180,40 @@ async def test_device_tracker( """Test prometheus metrics for device_tracker.""" body = await generate_latest_metrics(client) - assert ( - 'device_tracker_state{domain="device_tracker",' - 'entity="device_tracker.phone",' - 'friendly_name="Phone"} 1.0' in body - ) - assert ( - 'device_tracker_state{domain="device_tracker",' - 'entity="device_tracker.watch",' - 'friendly_name="Watch"} 0.0' in body - ) + EntityMetric( + metric_name="device_tracker_state", + domain="device_tracker", + friendly_name="Phone", + entity="device_tracker.phone", + ).withValue(1).assert_in_metrics(body) + + EntityMetric( + metric_name="device_tracker_state", + domain="device_tracker", + friendly_name="Watch", + entity="device_tracker.watch", + ).withValue(0.0).assert_in_metrics(body) + + +@pytest.mark.parametrize("namespace", [""]) +async def test_person( + client: ClientSessionGenerator, person_entities: dict[str, er.RegistryEntry] +) -> None: + """Test prometheus metrics for person.""" + body = await generate_latest_metrics(client) + + EntityMetric( + metric_name="person_state", + domain="person", + friendly_name="Bob", + entity="person.bob", + ).withValue(1).assert_in_metrics(body) + EntityMetric( + metric_name="person_state", + domain="person", + friendly_name="Alice", + entity="person.alice", + ).withValue(0.0).assert_in_metrics(body) @pytest.mark.parametrize("namespace", [""]) @@ -758,11 +1223,12 @@ async def test_counter( """Test prometheus metrics for counter.""" body = await generate_latest_metrics(client) - assert ( - 'counter_value{domain="counter",' - 'entity="counter.counter",' - 'friendly_name="None"} 2.0' in body - ) + EntityMetric( + metric_name="counter_value", + domain="counter", + friendly_name="None", + entity="counter.counter", + ).withValue(2.0).assert_in_metrics(body) @pytest.mark.parametrize("namespace", [""]) @@ -772,16 +1238,18 @@ async def test_update( """Test prometheus metrics for update.""" body = await generate_latest_metrics(client) - assert ( - 'update_state{domain="update",' - 'entity="update.firmware",' - 'friendly_name="Firmware"} 1.0' in body - ) - assert ( - 'update_state{domain="update",' - 'entity="update.addon",' - 'friendly_name="Addon"} 0.0' in body - ) + EntityMetric( + metric_name="update_state", + domain="update", + friendly_name="Firmware", + entity="update.firmware", + ).withValue(1).assert_in_metrics(body) + EntityMetric( + metric_name="update_state", + domain="update", + friendly_name="Addon", + entity="update.addon", + ).withValue(0.0).assert_in_metrics(body) @pytest.mark.parametrize("namespace", [""]) @@ -796,43 +1264,49 @@ async def test_renaming_entity_name( data = {**sensor_entities, **climate_entities} body = await generate_latest_metrics(client) - assert ( - 'sensor_temperature_celsius{domain="sensor",' - 'entity="sensor.outside_temperature",' - 'friendly_name="Outside Temperature"} 15.6' in body - ) - - assert ( - 'entity_available{domain="sensor",' - 'entity="sensor.outside_temperature",' - 'friendly_name="Outside Temperature"} 1.0' in body - ) - - assert ( - 'sensor_humidity_percent{domain="sensor",' - 'entity="sensor.outside_humidity",' - 'friendly_name="Outside Humidity"} 54.0' in body - ) - - assert ( - 'entity_available{domain="sensor",' - 'entity="sensor.outside_humidity",' - 'friendly_name="Outside Humidity"} 1.0' in body - ) - - assert ( - 'climate_action{action="heating",' - 'domain="climate",' - 'entity="climate.heatpump",' - 'friendly_name="HeatPump"} 1.0' in body - ) - - assert ( - 'climate_action{action="cooling",' - 'domain="climate",' - 'entity="climate.heatpump",' - 'friendly_name="HeatPump"} 0.0' in body - ) + EntityMetric( + metric_name="sensor_temperature_celsius", + domain="sensor", + friendly_name="Outside Temperature", + entity="sensor.outside_temperature", + ).withValue(15.6).assert_in_metrics(body) + + EntityMetric( + metric_name="entity_available", + domain="sensor", + friendly_name="Outside Temperature", + entity="sensor.outside_temperature", + ).withValue(1).assert_in_metrics(body) + + EntityMetric( + metric_name="sensor_humidity_percent", + domain="sensor", + friendly_name="Outside Humidity", + entity="sensor.outside_humidity", + ).withValue(54.0).assert_in_metrics(body) + + EntityMetric( + metric_name="entity_available", + domain="sensor", + friendly_name="Outside Humidity", + entity="sensor.outside_humidity", + ).withValue(1).assert_in_metrics(body) + + EntityMetric( + metric_name="climate_action", + domain="climate", + friendly_name="HeatPump", + entity="climate.heatpump", + action="heating", + ).withValue(1).assert_in_metrics(body) + + EntityMetric( + metric_name="climate_action", + domain="climate", + friendly_name="HeatPump", + entity="climate.heatpump", + action="cooling", + ).withValue(0.0).assert_in_metrics(body) assert "sensor.outside_temperature" in entity_registry.entities assert "climate.heatpump" in entity_registry.entities @@ -870,44 +1344,50 @@ async def test_renaming_entity_name( assert 'friendly_name="HeatPump"' not in body_line # Check if new metrics created - assert ( - 'sensor_temperature_celsius{domain="sensor",' - 'entity="sensor.outside_temperature",' - 'friendly_name="Outside Temperature Renamed"} 15.6' in body - ) - - assert ( - 'entity_available{domain="sensor",' - 'entity="sensor.outside_temperature",' - 'friendly_name="Outside Temperature Renamed"} 1.0' in body - ) - - assert ( - 'climate_action{action="heating",' - 'domain="climate",' - 'entity="climate.heatpump",' - 'friendly_name="HeatPump Renamed"} 1.0' in body - ) - - assert ( - 'climate_action{action="cooling",' - 'domain="climate",' - 'entity="climate.heatpump",' - 'friendly_name="HeatPump Renamed"} 0.0' in body - ) + EntityMetric( + metric_name="sensor_temperature_celsius", + domain="sensor", + friendly_name="Outside Temperature Renamed", + entity="sensor.outside_temperature", + ).withValue(15.6).assert_in_metrics(body) + + EntityMetric( + metric_name="entity_available", + domain="sensor", + friendly_name="Outside Temperature Renamed", + entity="sensor.outside_temperature", + ).withValue(1).assert_in_metrics(body) + + EntityMetric( + metric_name="climate_action", + domain="climate", + friendly_name="HeatPump Renamed", + entity="climate.heatpump", + action="heating", + ).withValue(1).assert_in_metrics(body) + + EntityMetric( + metric_name="climate_action", + domain="climate", + friendly_name="HeatPump Renamed", + entity="climate.heatpump", + action="cooling", + ).withValue(0.0).assert_in_metrics(body) # Keep other sensors - assert ( - 'sensor_humidity_percent{domain="sensor",' - 'entity="sensor.outside_humidity",' - 'friendly_name="Outside Humidity"} 54.0' in body - ) + EntityMetric( + metric_name="sensor_humidity_percent", + domain="sensor", + friendly_name="Outside Humidity", + entity="sensor.outside_humidity", + ).withValue(54.0).assert_in_metrics(body) - assert ( - 'entity_available{domain="sensor",' - 'entity="sensor.outside_humidity",' - 'friendly_name="Outside Humidity"} 1.0' in body - ) + EntityMetric( + metric_name="entity_available", + domain="sensor", + friendly_name="Outside Humidity", + entity="sensor.outside_humidity", + ).withValue(1).assert_in_metrics(body) @pytest.mark.parametrize("namespace", [""]) @@ -922,29 +1402,33 @@ async def test_renaming_entity_id( data = {**sensor_entities, **climate_entities} body = await generate_latest_metrics(client) - assert ( - 'sensor_temperature_celsius{domain="sensor",' - 'entity="sensor.outside_temperature",' - 'friendly_name="Outside Temperature"} 15.6' in body - ) - - assert ( - 'entity_available{domain="sensor",' - 'entity="sensor.outside_temperature",' - 'friendly_name="Outside Temperature"} 1.0' in body - ) - - assert ( - 'sensor_humidity_percent{domain="sensor",' - 'entity="sensor.outside_humidity",' - 'friendly_name="Outside Humidity"} 54.0' in body - ) - - assert ( - 'entity_available{domain="sensor",' - 'entity="sensor.outside_humidity",' - 'friendly_name="Outside Humidity"} 1.0' in body - ) + EntityMetric( + metric_name="sensor_temperature_celsius", + domain="sensor", + friendly_name="Outside Temperature", + entity="sensor.outside_temperature", + ).withValue(15.6).assert_in_metrics(body) + + EntityMetric( + metric_name="entity_available", + domain="sensor", + friendly_name="Outside Temperature", + entity="sensor.outside_temperature", + ).withValue(1).assert_in_metrics(body) + + EntityMetric( + metric_name="sensor_humidity_percent", + domain="sensor", + friendly_name="Outside Humidity", + entity="sensor.outside_humidity", + ).withValue(54.0).assert_in_metrics(body) + + EntityMetric( + metric_name="entity_available", + domain="sensor", + friendly_name="Outside Humidity", + entity="sensor.outside_humidity", + ).withValue(1).assert_in_metrics(body) assert "sensor.outside_temperature" in entity_registry.entities assert "climate.heatpump" in entity_registry.entities @@ -964,30 +1448,33 @@ async def test_renaming_entity_id( assert 'entity="sensor.outside_temperature"' not in body_line # Check if new metrics created - assert ( - 'sensor_temperature_celsius{domain="sensor",' - 'entity="sensor.outside_temperature_renamed",' - 'friendly_name="Outside Temperature"} 15.6' in body - ) - - assert ( - 'entity_available{domain="sensor",' - 'entity="sensor.outside_temperature_renamed",' - 'friendly_name="Outside Temperature"} 1.0' in body - ) + EntityMetric( + metric_name="sensor_temperature_celsius", + domain="sensor", + friendly_name="Outside Temperature", + entity="sensor.outside_temperature_renamed", + ).withValue(15.6).assert_in_metrics(body) + + EntityMetric( + metric_name="entity_available", + domain="sensor", + friendly_name="Outside Temperature", + entity="sensor.outside_temperature_renamed", + ).withValue(1).assert_in_metrics(body) # Keep other sensors - assert ( - 'sensor_humidity_percent{domain="sensor",' - 'entity="sensor.outside_humidity",' - 'friendly_name="Outside Humidity"} 54.0' in body - ) - - assert ( - 'entity_available{domain="sensor",' - 'entity="sensor.outside_humidity",' - 'friendly_name="Outside Humidity"} 1.0' in body - ) + EntityMetric( + metric_name="sensor_humidity_percent", + domain="sensor", + friendly_name="Outside Humidity", + entity="sensor.outside_humidity", + ).withValue(54.0).assert_in_metrics(body) + EntityMetric( + metric_name="entity_available", + domain="sensor", + friendly_name="Outside Humidity", + entity="sensor.outside_humidity", + ).withValue(1).assert_in_metrics(body) @pytest.mark.parametrize("namespace", [""]) @@ -1002,43 +1489,49 @@ async def test_deleting_entity( data = {**sensor_entities, **climate_entities} body = await generate_latest_metrics(client) - assert ( - 'sensor_temperature_celsius{domain="sensor",' - 'entity="sensor.outside_temperature",' - 'friendly_name="Outside Temperature"} 15.6' in body - ) - - assert ( - 'entity_available{domain="sensor",' - 'entity="sensor.outside_temperature",' - 'friendly_name="Outside Temperature"} 1.0' in body - ) - - assert ( - 'sensor_humidity_percent{domain="sensor",' - 'entity="sensor.outside_humidity",' - 'friendly_name="Outside Humidity"} 54.0' in body - ) - - assert ( - 'entity_available{domain="sensor",' - 'entity="sensor.outside_humidity",' - 'friendly_name="Outside Humidity"} 1.0' in body - ) - - assert ( - 'climate_action{action="heating",' - 'domain="climate",' - 'entity="climate.heatpump",' - 'friendly_name="HeatPump"} 1.0' in body - ) - - assert ( - 'climate_action{action="cooling",' - 'domain="climate",' - 'entity="climate.heatpump",' - 'friendly_name="HeatPump"} 0.0' in body - ) + EntityMetric( + metric_name="sensor_temperature_celsius", + domain="sensor", + friendly_name="Outside Temperature", + entity="sensor.outside_temperature", + ).withValue(15.6).assert_in_metrics(body) + + EntityMetric( + metric_name="entity_available", + domain="sensor", + friendly_name="Outside Temperature", + entity="sensor.outside_temperature", + ).withValue(1).assert_in_metrics(body) + + EntityMetric( + metric_name="sensor_humidity_percent", + domain="sensor", + friendly_name="Outside Humidity", + entity="sensor.outside_humidity", + ).withValue(54.0).assert_in_metrics(body) + + EntityMetric( + metric_name="entity_available", + domain="sensor", + friendly_name="Outside Humidity", + entity="sensor.outside_humidity", + ).withValue(1).assert_in_metrics(body) + + EntityMetric( + metric_name="climate_action", + domain="climate", + friendly_name="HeatPump", + entity="climate.heatpump", + action="heating", + ).withValue(1).assert_in_metrics(body) + + EntityMetric( + metric_name="climate_action", + domain="climate", + friendly_name="HeatPump", + entity="climate.heatpump", + action="cooling", + ).withValue(0.0).assert_in_metrics(body) assert "sensor.outside_temperature" in entity_registry.entities assert "climate.heatpump" in entity_registry.entities @@ -1056,17 +1549,19 @@ async def test_deleting_entity( assert 'friendly_name="HeatPump"' not in body_line # Keep other sensors - assert ( - 'sensor_humidity_percent{domain="sensor",' - 'entity="sensor.outside_humidity",' - 'friendly_name="Outside Humidity"} 54.0' in body - ) + EntityMetric( + metric_name="sensor_humidity_percent", + domain="sensor", + friendly_name="Outside Humidity", + entity="sensor.outside_humidity", + ).withValue(54.0).assert_in_metrics(body) - assert ( - 'entity_available{domain="sensor",' - 'entity="sensor.outside_humidity",' - 'friendly_name="Outside Humidity"} 1.0' in body - ) + EntityMetric( + metric_name="entity_available", + domain="sensor", + friendly_name="Outside Humidity", + entity="sensor.outside_humidity", + ).withValue(1).assert_in_metrics(body) @pytest.mark.parametrize("namespace", [""]) @@ -1083,50 +1578,56 @@ async def test_disabling_entity( await hass.async_block_till_done() body = await generate_latest_metrics(client) - assert ( - 'sensor_temperature_celsius{domain="sensor",' - 'entity="sensor.outside_temperature",' - 'friendly_name="Outside Temperature"} 15.6' in body - ) - - assert ( - 'state_change_total{domain="sensor",' - 'entity="sensor.outside_temperature",' - 'friendly_name="Outside Temperature"} 1.0' in body - ) - - assert any( - 'state_change_created{domain="sensor",' - 'entity="sensor.outside_temperature",' - 'friendly_name="Outside Temperature"}' in metric - for metric in body - ) - - assert ( - 'sensor_humidity_percent{domain="sensor",' - 'entity="sensor.outside_humidity",' - 'friendly_name="Outside Humidity"} 54.0' in body - ) - - assert ( - 'entity_available{domain="sensor",' - 'entity="sensor.outside_humidity",' - 'friendly_name="Outside Humidity"} 1.0' in body - ) - - assert ( - 'climate_action{action="heating",' - 'domain="climate",' - 'entity="climate.heatpump",' - 'friendly_name="HeatPump"} 1.0' in body - ) - - assert ( - 'climate_action{action="cooling",' - 'domain="climate",' - 'entity="climate.heatpump",' - 'friendly_name="HeatPump"} 0.0' in body - ) + EntityMetric( + metric_name="sensor_temperature_celsius", + domain="sensor", + friendly_name="Outside Temperature", + entity="sensor.outside_temperature", + ).withValue(15.6).assert_in_metrics(body) + + EntityMetric( + metric_name="state_change_total", + domain="sensor", + friendly_name="Outside Temperature", + entity="sensor.outside_temperature", + ).withValue(1).assert_in_metrics(body) + + EntityMetric( + metric_name="state_change_created", + domain="sensor", + friendly_name="Outside Temperature", + entity="sensor.outside_temperature", + ).assert_in_metrics(body) + + EntityMetric( + metric_name="sensor_humidity_percent", + domain="sensor", + friendly_name="Outside Humidity", + entity="sensor.outside_humidity", + ).withValue(54.0).assert_in_metrics(body) + + EntityMetric( + metric_name="entity_available", + domain="sensor", + friendly_name="Outside Humidity", + entity="sensor.outside_humidity", + ).withValue(1).assert_in_metrics(body) + + EntityMetric( + metric_name="climate_action", + domain="climate", + friendly_name="HeatPump", + entity="climate.heatpump", + action="heating", + ).withValue(1).assert_in_metrics(body) + + EntityMetric( + metric_name="climate_action", + domain="climate", + friendly_name="HeatPump", + entity="climate.heatpump", + action="cooling", + ).withValue(0.0).assert_in_metrics(body) assert "sensor.outside_temperature" in entity_registry.entities assert "climate.heatpump" in entity_registry.entities @@ -1150,137 +1651,191 @@ async def test_disabling_entity( assert 'friendly_name="HeatPump"' not in body_line # Keep other sensors - assert ( - 'sensor_humidity_percent{domain="sensor",' - 'entity="sensor.outside_humidity",' - 'friendly_name="Outside Humidity"} 54.0' in body - ) + EntityMetric( + metric_name="sensor_humidity_percent", + domain="sensor", + friendly_name="Outside Humidity", + entity="sensor.outside_humidity", + ).withValue(54.0).assert_in_metrics(body) - assert ( - 'entity_available{domain="sensor",' - 'entity="sensor.outside_humidity",' - 'friendly_name="Outside Humidity"} 1.0' in body - ) + EntityMetric( + metric_name="entity_available", + domain="sensor", + friendly_name="Outside Humidity", + entity="sensor.outside_humidity", + ).withValue(1).assert_in_metrics(body) @pytest.mark.parametrize("namespace", [""]) -async def test_entity_becomes_unavailable_with_export( +@pytest.mark.parametrize("unavailable_state", [STATE_UNAVAILABLE, STATE_UNKNOWN]) +async def test_entity_becomes_unavailable( hass: HomeAssistant, entity_registry: er.EntityRegistry, client: ClientSessionGenerator, sensor_entities: dict[str, er.RegistryEntry], + unavailable_state: str, ) -> None: - """Test an entity that becomes unavailable is still exported.""" + """Test an entity that becomes unavailable/unknown is no longer exported.""" data = {**sensor_entities} await hass.async_block_till_done() body = await generate_latest_metrics(client) - assert ( - 'sensor_temperature_celsius{domain="sensor",' - 'entity="sensor.outside_temperature",' - 'friendly_name="Outside Temperature"} 15.6' in body - ) - - assert ( - 'state_change_total{domain="sensor",' - 'entity="sensor.outside_temperature",' - 'friendly_name="Outside Temperature"} 1.0' in body - ) - - assert ( - 'entity_available{domain="sensor",' - 'entity="sensor.outside_temperature",' - 'friendly_name="Outside Temperature"} 1.0' in body - ) - - assert ( - 'sensor_humidity_percent{domain="sensor",' - 'entity="sensor.outside_humidity",' - 'friendly_name="Outside Humidity"} 54.0' in body - ) - - assert ( - 'state_change_total{domain="sensor",' - 'entity="sensor.outside_humidity",' - 'friendly_name="Outside Humidity"} 1.0' in body - ) - - assert ( - 'entity_available{domain="sensor",' - 'entity="sensor.outside_humidity",' - 'friendly_name="Outside Humidity"} 1.0' in body - ) - - # Make sensor_1 unavailable. + EntityMetric( + metric_name="sensor_temperature_celsius", + domain="sensor", + friendly_name="Outside Temperature", + entity="sensor.outside_temperature", + ).withValue(15.6).assert_in_metrics(body) + + EntityMetric( + metric_name="state_change_total", + domain="sensor", + friendly_name="Outside Temperature", + entity="sensor.outside_temperature", + ).withValue(1).assert_in_metrics(body) + + EntityMetric( + metric_name="entity_available", + domain="sensor", + friendly_name="Outside Temperature", + entity="sensor.outside_temperature", + ).withValue(1).assert_in_metrics(body) + + EntityMetric( + metric_name="last_updated_time_seconds", + domain="sensor", + friendly_name="Outside Temperature", + entity="sensor.outside_temperature", + ).assert_in_metrics(body) + + EntityMetric( + metric_name="battery_level_percent", + domain="sensor", + friendly_name="Outside Temperature", + entity="sensor.outside_temperature", + ).withValue(12.0).assert_in_metrics(body) + + EntityMetric( + metric_name="sensor_humidity_percent", + domain="sensor", + friendly_name="Outside Humidity", + entity="sensor.outside_humidity", + ).withValue(54.0).assert_in_metrics(body) + + EntityMetric( + metric_name="state_change_total", + domain="sensor", + friendly_name="Outside Humidity", + entity="sensor.outside_humidity", + ).withValue(1).assert_in_metrics(body) + + EntityMetric( + metric_name="entity_available", + domain="sensor", + friendly_name="Outside Humidity", + entity="sensor.outside_humidity", + ).withValue(1).assert_in_metrics(body) + + # Make sensor_1 unavailable/unknown. set_state_with_entry( - hass, data["sensor_1"], STATE_UNAVAILABLE, data["sensor_1_attributes"] + hass, data["sensor_1"], unavailable_state, data["sensor_1_attributes"] ) await hass.async_block_till_done() body = await generate_latest_metrics(client) - # Check that only the availability changed on sensor_1. - assert ( - 'sensor_temperature_celsius{domain="sensor",' - 'entity="sensor.outside_temperature",' - 'friendly_name="Outside Temperature"} 15.6' in body - ) - - assert ( - 'state_change_total{domain="sensor",' - 'entity="sensor.outside_temperature",' - 'friendly_name="Outside Temperature"} 2.0' in body - ) - - assert ( - 'entity_available{domain="sensor",' - 'entity="sensor.outside_temperature",' - 'friendly_name="Outside Temperature"} 0.0' in body - ) + # Check that the availability changed on sensor_1 and the metric with the value is gone. + EntityMetric( + metric_name="sensor_temperature_celsius", + domain="sensor", + friendly_name="Outside Temperature", + entity="sensor.outside_temperature", + ).assert_not_in_metrics(body) + + EntityMetric( + metric_name="battery_level_percent", + domain="sensor", + friendly_name="Outside Temperature", + entity="sensor.outside_temperature", + ).assert_not_in_metrics(body) + + EntityMetric( + metric_name="state_change_total", + domain="sensor", + friendly_name="Outside Temperature", + entity="sensor.outside_temperature", + ).withValue(2.0).assert_in_metrics(body) + + EntityMetric( + metric_name="entity_available", + domain="sensor", + friendly_name="Outside Temperature", + entity="sensor.outside_temperature", + ).withValue(0.0).assert_in_metrics(body) + + EntityMetric( + metric_name="last_updated_time_seconds", + domain="sensor", + friendly_name="Outside Temperature", + entity="sensor.outside_temperature", + ).assert_in_metrics(body) # The other sensor should be unchanged. - assert ( - 'sensor_humidity_percent{domain="sensor",' - 'entity="sensor.outside_humidity",' - 'friendly_name="Outside Humidity"} 54.0' in body - ) - - assert ( - 'state_change_total{domain="sensor",' - 'entity="sensor.outside_humidity",' - 'friendly_name="Outside Humidity"} 1.0' in body - ) - - assert ( - 'entity_available{domain="sensor",' - 'entity="sensor.outside_humidity",' - 'friendly_name="Outside Humidity"} 1.0' in body - ) - - # Bring sensor_1 back and check that it is correct. - set_state_with_entry(hass, data["sensor_1"], 200.0, data["sensor_1_attributes"]) + EntityMetric( + metric_name="sensor_humidity_percent", + domain="sensor", + friendly_name="Outside Humidity", + entity="sensor.outside_humidity", + ).withValue(54.0).assert_in_metrics(body) + + EntityMetric( + metric_name="state_change_total", + domain="sensor", + friendly_name="Outside Humidity", + entity="sensor.outside_humidity", + ).withValue(1).assert_in_metrics(body) + + EntityMetric( + metric_name="entity_available", + domain="sensor", + friendly_name="Outside Humidity", + entity="sensor.outside_humidity", + ).withValue(1).assert_in_metrics(body) + + # Bring sensor_1 back and check that it returned. + set_state_with_entry(hass, data["sensor_1"], 201.0, data["sensor_1_attributes"]) await hass.async_block_till_done() body = await generate_latest_metrics(client) - assert ( - 'sensor_temperature_celsius{domain="sensor",' - 'entity="sensor.outside_temperature",' - 'friendly_name="Outside Temperature"} 200.0' in body - ) - - assert ( - 'state_change_total{domain="sensor",' - 'entity="sensor.outside_temperature",' - 'friendly_name="Outside Temperature"} 3.0' in body - ) - - assert ( - 'entity_available{domain="sensor",' - 'entity="sensor.outside_temperature",' - 'friendly_name="Outside Temperature"} 1.0' in body - ) + EntityMetric( + metric_name="sensor_temperature_celsius", + domain="sensor", + friendly_name="Outside Temperature", + entity="sensor.outside_temperature", + ).withValue(201.0).assert_in_metrics(body) + + EntityMetric( + metric_name="battery_level_percent", + domain="sensor", + friendly_name="Outside Temperature", + entity="sensor.outside_temperature", + ).withValue(12.0).assert_in_metrics(body) + + EntityMetric( + metric_name="state_change_total", + domain="sensor", + friendly_name="Outside Temperature", + entity="sensor.outside_temperature", + ).withValue(3.0).assert_in_metrics(body) + + EntityMetric( + metric_name="entity_available", + domain="sensor", + friendly_name="Outside Temperature", + entity="sensor.outside_temperature", + ).withValue(1).assert_in_metrics(body) @pytest.fixture(name="sensor_entities") @@ -1697,6 +2252,17 @@ async def input_number_fixture( set_state_with_entry(hass, input_number_3, 22.7) data["input_number_3"] = input_number_3 + input_number_4 = entity_registry.async_get_or_create( + domain=input_number.DOMAIN, + platform="test", + unique_id="input_number_4", + suggested_object_id="converted_temperature", + original_name="Converted temperature", + unit_of_measurement=UnitOfTemperature.FAHRENHEIT, + ) + set_state_with_entry(hass, input_number_4, 212) + data["input_number_4"] = input_number_4 + await hass.async_block_till_done() return data @@ -1955,7 +2521,7 @@ async def alarm_control_panel_fixture( suggested_object_id="alarm_control_panel_1", original_name="Alarm Control Panel 1", ) - set_state_with_entry(hass, alarm_control_panel_1, STATE_ALARM_ARMED_AWAY) + set_state_with_entry(hass, alarm_control_panel_1, AlarmControlPanelState.ARMED_AWAY) data["alarm_control_panel_1"] = alarm_control_panel_1 alarm_control_panel_2 = entity_registry.async_get_or_create( @@ -1965,7 +2531,7 @@ async def alarm_control_panel_fixture( suggested_object_id="alarm_control_panel_2", original_name="Alarm Control Panel 2", ) - set_state_with_entry(hass, alarm_control_panel_2, STATE_ALARM_ARMED_HOME) + set_state_with_entry(hass, alarm_control_panel_2, AlarmControlPanelState.ARMED_HOME) data["alarm_control_panel_2"] = alarm_control_panel_2 await hass.async_block_till_done() diff --git a/tests/components/prosegur/test_alarm_control_panel.py b/tests/components/prosegur/test_alarm_control_panel.py index f66d070f2183de..4e3dcdc3fd8fc4 100644 --- a/tests/components/prosegur/test_alarm_control_panel.py +++ b/tests/components/prosegur/test_alarm_control_panel.py @@ -6,7 +6,10 @@ from pyprosegur.installation import Status import pytest -from homeassistant.components.alarm_control_panel import DOMAIN as ALARM_DOMAIN +from homeassistant.components.alarm_control_panel import ( + DOMAIN as ALARM_DOMAIN, + AlarmControlPanelState, +) from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_FRIENDLY_NAME, @@ -14,9 +17,6 @@ SERVICE_ALARM_ARM_AWAY, SERVICE_ALARM_ARM_HOME, SERVICE_ALARM_DISARM, - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_DISARMED, STATE_UNAVAILABLE, ) from homeassistant.core import HomeAssistant @@ -93,9 +93,13 @@ async def test_connection_error( @pytest.mark.parametrize( ("code", "alarm_service", "alarm_state"), [ - (Status.ARMED, SERVICE_ALARM_ARM_AWAY, STATE_ALARM_ARMED_AWAY), - (Status.PARTIALLY, SERVICE_ALARM_ARM_HOME, STATE_ALARM_ARMED_HOME), - (Status.DISARMED, SERVICE_ALARM_DISARM, STATE_ALARM_DISARMED), + (Status.ARMED, SERVICE_ALARM_ARM_AWAY, AlarmControlPanelState.ARMED_AWAY), + ( + Status.PARTIALLY, + SERVICE_ALARM_ARM_HOME, + AlarmControlPanelState.ARMED_HOME, + ), + (Status.DISARMED, SERVICE_ALARM_DISARM, AlarmControlPanelState.DISARMED), ], ) async def test_arm( diff --git a/tests/components/push/test_camera.py b/tests/components/push/test_camera.py index df296e7cb571ff..0088aa6a9c2097 100644 --- a/tests/components/push/test_camera.py +++ b/tests/components/push/test_camera.py @@ -4,8 +4,8 @@ from http import HTTPStatus import io -from homeassistant.config import async_process_ha_core_config from homeassistant.core import HomeAssistant +from homeassistant.core_config import async_process_ha_core_config from homeassistant.setup import async_setup_component from homeassistant.util import dt as dt_util diff --git a/tests/components/pyload/test_config_flow.py b/tests/components/pyload/test_config_flow.py index a3966987ae2862..5ada856d78e6d3 100644 --- a/tests/components/pyload/test_config_flow.py +++ b/tests/components/pyload/test_config_flow.py @@ -250,7 +250,7 @@ async def test_reconfiguration( result = await config_entry.start_reconfigure_flow(hass) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure_confirm" + assert result["step_id"] == "reconfigure" result = await hass.config_entries.flow.async_configure( result["flow_id"], @@ -285,7 +285,7 @@ async def test_reconfigure_errors( result = await config_entry.start_reconfigure_flow(hass) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure_confirm" + assert result["step_id"] == "reconfigure" mock_pyloadapi.login.side_effect = side_effect result = await hass.config_entries.flow.async_configure( diff --git a/tests/components/qnap_qsw/test_sensor.py b/tests/components/qnap_qsw/test_sensor.py index 646058add62482..16335e878fddf4 100644 --- a/tests/components/qnap_qsw/test_sensor.py +++ b/tests/components/qnap_qsw/test_sensor.py @@ -1,19 +1,27 @@ """The sensor tests for the QNAP QSW platform.""" +from unittest.mock import patch + +from freezegun.api import FrozenDateTimeFactory import pytest -from homeassistant.components.qnap_qsw.const import ATTR_MAX +from homeassistant.components.qnap_qsw.const import ATTR_MAX, DOMAIN +from homeassistant.const import Platform from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er, issue_registry as ir -from .util import async_init_integration +from .util import async_init_integration, init_config_entry @pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_qnap_qsw_create_sensors( hass: HomeAssistant, + freezer: FrozenDateTimeFactory, ) -> None: """Test creation of sensors.""" + await hass.config.async_set_time_zone("UTC") + freezer.move_to("2024-07-25 12:00:00+00:00") await async_init_integration(hass) state = hass.states.get("sensor.qsw_m408_4c_fan_1_speed") @@ -45,8 +53,8 @@ async def test_qnap_qsw_create_sensors( state = hass.states.get("sensor.qsw_m408_4c_tx_speed") assert state.state == "0" - state = hass.states.get("sensor.qsw_m408_4c_uptime") - assert state.state == "91" + state = hass.states.get("sensor.qsw_m408_4c_uptime_timestamp") + assert state.state == "2024-07-25T11:58:29+00:00" # LACP Ports state = hass.states.get("sensor.qsw_m408_4c_lacp_port_1_link_speed") @@ -373,3 +381,60 @@ async def test_qnap_qsw_create_sensors( state = hass.states.get("sensor.qsw_m408_4c_port_12_tx_speed") assert state.state == "0" + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_deprecated_uptime_seconds( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + issue_registry: ir.IssueRegistry, +) -> None: + """Test deprecation warning of the Uptime seconds sensor entity.""" + original_id = "sensor.qsw_m408_4c_uptime" + domain = Platform.SENSOR + + config_entry = init_config_entry(hass) + + entity = entity_registry.async_get_or_create( + domain=domain, + platform=DOMAIN, + unique_id=original_id, + config_entry=config_entry, + suggested_object_id=original_id, + disabled_by=None, + ) + + assert entity_registry.async_get_entity_id(domain, DOMAIN, original_id) + + with patch( + "homeassistant.components.qnap_qsw.sensor.automations_with_entity", + return_value=["item"], + ): + await async_init_integration(hass, config_entry=config_entry) + assert issue_registry.async_get_issue( + DOMAIN, f"uptime_seconds_deprecated_{entity.entity_id}_item" + ) + + +async def test_cleanup_deprecated_uptime_seconds( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, +) -> None: + """Test cleanup of the Uptime seconds sensor entity.""" + original_id = "sensor.qsw_m408_4c_uptime_seconds" + domain = Platform.SENSOR + + config_entry = init_config_entry(hass) + + entity_registry.async_get_or_create( + domain=domain, + platform=DOMAIN, + unique_id=original_id, + config_entry=config_entry, + suggested_object_id=original_id, + disabled_by=er.RegistryEntryDisabler.USER, + ) + + assert entity_registry.async_get_entity_id(domain, DOMAIN, original_id) + + await async_init_integration(hass, config_entry=config_entry) diff --git a/tests/components/qnap_qsw/util.py b/tests/components/qnap_qsw/util.py index 63238bb30a16c2..5132c1061ec591 100644 --- a/tests/components/qnap_qsw/util.py +++ b/tests/components/qnap_qsw/util.py @@ -491,11 +491,10 @@ } -async def async_init_integration( +def init_config_entry( hass: HomeAssistant, -) -> None: - """Set up the QNAP QSW integration in Home Assistant.""" - +) -> MockConfigEntry: + """Set up the QNAP QSW entry in Home Assistant.""" config_entry = MockConfigEntry( data=CONFIG, domain=DOMAIN, @@ -503,6 +502,18 @@ async def async_init_integration( ) config_entry.add_to_hass(hass) + return config_entry + + +async def async_init_integration( + hass: HomeAssistant, + config_entry: MockConfigEntry | None = None, +) -> None: + """Set up the QNAP QSW integration in Home Assistant.""" + + if config_entry is None: + config_entry = init_config_entry(hass) + with ( patch( "homeassistant.components.qnap_qsw.QnapQswApi.get_firmware_condition", diff --git a/tests/components/rachio/test_config_flow.py b/tests/components/rachio/test_config_flow.py index 1eaec1bc46e0f9..586b31b092f0ab 100644 --- a/tests/components/rachio/test_config_flow.py +++ b/tests/components/rachio/test_config_flow.py @@ -183,3 +183,16 @@ async def test_form_homekit_ignored(hass: HomeAssistant) -> None: ) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" + + +async def test_options_flow(hass: HomeAssistant) -> None: + """Test option flow.""" + entry = MockConfigEntry(domain=DOMAIN, data={CONF_API_KEY: "api_key"}) + entry.add_to_hass(hass) + + result = await hass.config_entries.options.async_init(entry.entry_id) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "init" + + # This should be improved at a later stage to increase test coverage + hass.config_entries.options.async_abort(result["flow_id"]) diff --git a/tests/components/rainforest_raven/__init__.py b/tests/components/rainforest_raven/__init__.py index 9d40652b42d78f..ead1bb2ad3f0a9 100644 --- a/tests/components/rainforest_raven/__init__.py +++ b/tests/components/rainforest_raven/__init__.py @@ -1,5 +1,7 @@ """Tests for the Rainforest RAVEn component.""" +from unittest.mock import AsyncMock + from homeassistant.components.rainforest_raven.const import DOMAIN from homeassistant.const import CONF_DEVICE, CONF_MAC @@ -14,7 +16,7 @@ SUMMATION, ) -from tests.common import AsyncMock, MockConfigEntry +from tests.common import MockConfigEntry def create_mock_device() -> AsyncMock: @@ -42,4 +44,5 @@ def create_mock_entry(no_meters: bool = False) -> MockConfigEntry: CONF_DEVICE: DISCOVERY_INFO.device, CONF_MAC: [] if no_meters else [METER_INFO[None].meter_mac_id.hex()], }, + entry_id="01JADXBJSPYEBAFPKGXDJWZBQ8", ) diff --git a/tests/components/rainforest_raven/snapshots/test_diagnostics.ambr b/tests/components/rainforest_raven/snapshots/test_diagnostics.ambr new file mode 100644 index 00000000000000..e131bf3d952b0d --- /dev/null +++ b/tests/components/rainforest_raven/snapshots/test_diagnostics.ambr @@ -0,0 +1,107 @@ +# serializer version: 1 +# name: test_entry_diagnostics + dict({ + 'config_entry': dict({ + 'data': dict({ + 'device': '/dev/ttyACM0', + 'mac': '**REDACTED**', + }), + 'disabled_by': None, + 'discovery_keys': dict({ + }), + 'domain': 'rainforest_raven', + 'entry_id': '01JADXBJSPYEBAFPKGXDJWZBQ8', + 'minor_version': 1, + 'options': dict({ + }), + 'pref_disable_new_entities': False, + 'pref_disable_polling': False, + 'source': 'user', + 'title': 'Mock Title', + 'unique_id': None, + 'version': 1, + }), + 'data': dict({ + 'Meters': dict({ + '**REDACTED0**': dict({ + 'CurrentSummationDelivered': dict({ + 'device_mac_id': '**REDACTED**', + 'meter_mac_id': '**REDACTED**', + 'summation_delivered': '23456.7890', + 'summation_received': '00000.0000', + 'time_stamp': None, + }), + 'InstantaneousDemand': dict({ + 'demand': '1.2345', + 'device_mac_id': '**REDACTED**', + 'meter_mac_id': '**REDACTED**', + 'time_stamp': None, + }), + 'PriceCluster': dict({ + 'currency': dict({ + '__type': "", + 'repr': "", + }), + 'device_mac_id': '**REDACTED**', + 'meter_mac_id': '**REDACTED**', + 'price': '0.10', + 'rate_label': 'Set by user', + 'tier': 3, + 'tier_label': 'Set by user', + 'time_stamp': None, + }), + }), + }), + 'NetworkInfo': dict({ + 'channel': 13, + 'coord_mac_id': None, + 'description': None, + 'device_mac_id': '**REDACTED**', + 'ext_pan_id': None, + 'link_strength': 100, + 'short_addr': None, + 'status': None, + 'status_code': None, + }), + }), + }) +# --- +# name: test_entry_diagnostics_no_meters + dict({ + 'config_entry': dict({ + 'data': dict({ + 'device': '/dev/ttyACM0', + 'mac': '**REDACTED**', + }), + 'disabled_by': None, + 'discovery_keys': dict({ + }), + 'domain': 'rainforest_raven', + 'entry_id': '01JADXBJSPYEBAFPKGXDJWZBQ8', + 'minor_version': 1, + 'options': dict({ + }), + 'pref_disable_new_entities': False, + 'pref_disable_polling': False, + 'source': 'user', + 'title': 'Mock Title', + 'unique_id': None, + 'version': 1, + }), + 'data': dict({ + 'Meters': dict({ + }), + 'NetworkInfo': dict({ + 'channel': 13, + 'coord_mac_id': None, + 'description': None, + 'device_mac_id': '**REDACTED**', + 'ext_pan_id': None, + 'link_strength': 100, + 'short_addr': None, + 'status': None, + 'status_code': None, + }), + }), + }) +# --- diff --git a/tests/components/rainforest_raven/snapshots/test_init.ambr b/tests/components/rainforest_raven/snapshots/test_init.ambr new file mode 100644 index 00000000000000..768bbc729d4244 --- /dev/null +++ b/tests/components/rainforest_raven/snapshots/test_init.ambr @@ -0,0 +1,39 @@ +# serializer version: 1 +# name: test_device_registry[None-0] + list([ + ]) +# --- +# name: test_device_registry[device_info0-1] + list([ + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': None, + 'connections': set({ + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': '2.7.3', + 'id': , + 'identifiers': set({ + tuple( + 'rainforest_raven', + 'abcdef0123456789', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'Rainforest Automation, Inc.', + 'model': 'Z105-2-EMU2-LEDD_JM', + 'model_id': 'Z105-2-EMU2-LEDD_JM', + 'name': 'RAVEn Device', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': None, + 'suggested_area': None, + 'sw_version': '2.0.0 (7400)', + 'via_device_id': None, + }), + ]) +# --- diff --git a/tests/components/rainforest_raven/snapshots/test_sensor.ambr b/tests/components/rainforest_raven/snapshots/test_sensor.ambr new file mode 100644 index 00000000000000..34a5e031885dbb --- /dev/null +++ b/tests/components/rainforest_raven/snapshots/test_sensor.ambr @@ -0,0 +1,257 @@ +# serializer version: 1 +# name: test_sensors[sensor.raven_device_meter_power_demand-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.raven_device_meter_power_demand', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Meter power demand', + 'platform': 'rainforest_raven', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_demand', + 'unique_id': '1234567890abcdef.InstantaneousDemand.demand', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.raven_device_meter_power_demand-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'RAVEn Device Meter power demand', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.raven_device_meter_power_demand', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.2345', + }) +# --- +# name: test_sensors[sensor.raven_device_meter_price-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.raven_device_meter_price', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Meter price', + 'platform': 'rainforest_raven', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'meter_price', + 'unique_id': '1234567890abcdef.PriceCluster.price', + 'unit_of_measurement': 'USD/kWh', + }) +# --- +# name: test_sensors[sensor.raven_device_meter_price-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'RAVEn Device Meter price', + 'rate_label': 'Set by user', + 'state_class': , + 'tier': 3, + 'unit_of_measurement': 'USD/kWh', + }), + 'context': , + 'entity_id': 'sensor.raven_device_meter_price', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.10', + }) +# --- +# name: test_sensors[sensor.raven_device_meter_signal_strength-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.raven_device_meter_signal_strength', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Meter signal strength', + 'platform': 'rainforest_raven', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'signal_strength', + 'unique_id': 'abcdef0123456789.NetworkInfo.link_strength', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[sensor.raven_device_meter_signal_strength-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'channel': 13, + 'friendly_name': 'RAVEn Device Meter signal strength', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.raven_device_meter_signal_strength', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '100', + }) +# --- +# name: test_sensors[sensor.raven_device_total_meter_energy_delivered-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.raven_device_total_meter_energy_delivered', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Total meter energy delivered', + 'platform': 'rainforest_raven', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'total_energy_delivered', + 'unique_id': '1234567890abcdef.CurrentSummationDelivered.summation_delivered', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.raven_device_total_meter_energy_delivered-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'RAVEn Device Total meter energy delivered', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.raven_device_total_meter_energy_delivered', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '23456.7890', + }) +# --- +# name: test_sensors[sensor.raven_device_total_meter_energy_received-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.raven_device_total_meter_energy_received', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Total meter energy received', + 'platform': 'rainforest_raven', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'total_energy_received', + 'unique_id': '1234567890abcdef.CurrentSummationDelivered.summation_received', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.raven_device_total_meter_energy_received-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'RAVEn Device Total meter energy received', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.raven_device_total_meter_energy_received', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '00000.0000', + }) +# --- diff --git a/tests/components/rainforest_raven/test_coordinator.py b/tests/components/rainforest_raven/test_coordinator.py deleted file mode 100644 index db70118f7b9e18..00000000000000 --- a/tests/components/rainforest_raven/test_coordinator.py +++ /dev/null @@ -1,109 +0,0 @@ -"""Tests for the Rainforest RAVEn data coordinator.""" - -import asyncio -import functools -from unittest.mock import AsyncMock - -from aioraven.device import RAVEnConnectionError -import pytest - -from homeassistant.components.rainforest_raven.coordinator import RAVEnDataCoordinator -from homeassistant.core import HomeAssistant -from homeassistant.exceptions import ConfigEntryNotReady - -from . import create_mock_entry - - -@pytest.mark.usefixtures("mock_device") -async def test_coordinator_device_info(hass: HomeAssistant) -> None: - """Test reporting device information from the coordinator.""" - entry = create_mock_entry() - coordinator = RAVEnDataCoordinator(hass, entry) - - assert coordinator.device_fw_version is None - assert coordinator.device_hw_version is None - assert coordinator.device_info is None - assert coordinator.device_mac_address is None - assert coordinator.device_manufacturer is None - assert coordinator.device_model is None - assert coordinator.device_name == "RAVEn Device" - - await coordinator.async_config_entry_first_refresh() - - assert coordinator.device_fw_version == "2.0.0 (7400)" - assert coordinator.device_hw_version == "2.7.3" - assert coordinator.device_info - assert coordinator.device_mac_address - assert coordinator.device_manufacturer == "Rainforest Automation, Inc." - assert coordinator.device_model == "Z105-2-EMU2-LEDD_JM" - assert coordinator.device_name == "RAVEn Device" - - -async def test_coordinator_cache_device( - hass: HomeAssistant, mock_device: AsyncMock -) -> None: - """Test that the device isn't re-opened for subsequent refreshes.""" - entry = create_mock_entry() - coordinator = RAVEnDataCoordinator(hass, entry) - - await coordinator.async_config_entry_first_refresh() - assert mock_device.get_network_info.call_count == 1 - assert mock_device.open.call_count == 1 - - await coordinator.async_refresh() - assert mock_device.get_network_info.call_count == 2 - assert mock_device.open.call_count == 1 - - -async def test_coordinator_device_error_setup( - hass: HomeAssistant, mock_device: AsyncMock -) -> None: - """Test handling of a device error during initialization.""" - entry = create_mock_entry() - coordinator = RAVEnDataCoordinator(hass, entry) - - mock_device.get_network_info.side_effect = RAVEnConnectionError - with pytest.raises(ConfigEntryNotReady): - await coordinator.async_config_entry_first_refresh() - - -async def test_coordinator_device_error_update( - hass: HomeAssistant, mock_device: AsyncMock -) -> None: - """Test handling of a device error during an update.""" - entry = create_mock_entry() - coordinator = RAVEnDataCoordinator(hass, entry) - - await coordinator.async_config_entry_first_refresh() - assert coordinator.last_update_success is True - - mock_device.get_network_info.side_effect = RAVEnConnectionError - await coordinator.async_refresh() - assert coordinator.last_update_success is False - - -async def test_coordinator_device_timeout_update( - hass: HomeAssistant, mock_device: AsyncMock -) -> None: - """Test handling of a device timeout during an update.""" - entry = create_mock_entry() - coordinator = RAVEnDataCoordinator(hass, entry) - - await coordinator.async_config_entry_first_refresh() - assert coordinator.last_update_success is True - - mock_device.get_network_info.side_effect = functools.partial(asyncio.sleep, 10) - await coordinator.async_refresh() - assert coordinator.last_update_success is False - - -async def test_coordinator_comm_error( - hass: HomeAssistant, mock_device: AsyncMock -) -> None: - """Test handling of an error parsing or reading raw device data.""" - entry = create_mock_entry() - coordinator = RAVEnDataCoordinator(hass, entry) - - mock_device.synchronize.side_effect = RAVEnConnectionError - with pytest.raises(ConfigEntryNotReady): - await coordinator.async_config_entry_first_refresh() diff --git a/tests/components/rainforest_raven/test_diagnostics.py b/tests/components/rainforest_raven/test_diagnostics.py index 93cf12b434f6dc..ae231b3c8c246a 100644 --- a/tests/components/rainforest_raven/test_diagnostics.py +++ b/tests/components/rainforest_raven/test_diagnostics.py @@ -1,22 +1,24 @@ """Test the Rainforest Eagle diagnostics.""" -from dataclasses import asdict +from unittest.mock import AsyncMock import pytest +from syrupy.assertion import SnapshotAssertion +from syrupy.filters import props -from homeassistant.components.diagnostics import REDACTED -from homeassistant.const import CONF_MAC from homeassistant.core import HomeAssistant from . import create_mock_entry -from .const import DEMAND, NETWORK_INFO, PRICE_CLUSTER, SUMMATION +from tests.common import MockConfigEntry from tests.components.diagnostics import get_diagnostics_for_config_entry from tests.typing import ClientSessionGenerator @pytest.fixture -async def mock_entry_no_meters(hass: HomeAssistant, mock_device): +async def mock_entry_no_meters( + hass: HomeAssistant, mock_device: AsyncMock +) -> MockConfigEntry: """Mock a RAVEn config entry with no meters.""" mock_entry = create_mock_entry(True) mock_entry.add_to_hass(hass) @@ -28,61 +30,23 @@ async def mock_entry_no_meters(hass: HomeAssistant, mock_device): async def test_entry_diagnostics_no_meters( hass: HomeAssistant, hass_client: ClientSessionGenerator, - mock_device, - mock_entry_no_meters, + mock_entry_no_meters: MockConfigEntry, + snapshot: SnapshotAssertion, ) -> None: """Test RAVEn diagnostics before the coordinator has updated.""" result = await get_diagnostics_for_config_entry( hass, hass_client, mock_entry_no_meters ) - - config_entry_dict = mock_entry_no_meters.as_dict() - config_entry_dict["data"][CONF_MAC] = REDACTED - - assert result == { - "config_entry": config_entry_dict | {"discovery_keys": {}}, - "data": { - "Meters": {}, - "NetworkInfo": {**asdict(NETWORK_INFO), "device_mac_id": REDACTED}, - }, - } + assert result == snapshot(exclude=props("created_at", "modified_at")) async def test_entry_diagnostics( - hass: HomeAssistant, hass_client: ClientSessionGenerator, mock_device, mock_entry + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + mock_entry: MockConfigEntry, + snapshot: SnapshotAssertion, ) -> None: """Test RAVEn diagnostics.""" result = await get_diagnostics_for_config_entry(hass, hass_client, mock_entry) - config_entry_dict = mock_entry.as_dict() - config_entry_dict["data"][CONF_MAC] = REDACTED - - assert result == { - "config_entry": config_entry_dict | {"discovery_keys": {}}, - "data": { - "Meters": { - "**REDACTED0**": { - "CurrentSummationDelivered": { - **asdict(SUMMATION), - "device_mac_id": REDACTED, - "meter_mac_id": REDACTED, - }, - "InstantaneousDemand": { - **asdict(DEMAND), - "device_mac_id": REDACTED, - "meter_mac_id": REDACTED, - }, - "PriceCluster": { - **asdict(PRICE_CLUSTER), - "device_mac_id": REDACTED, - "meter_mac_id": REDACTED, - "currency": { - "__type": str(type(PRICE_CLUSTER.currency)), - "repr": repr(PRICE_CLUSTER.currency), - }, - }, - }, - }, - "NetworkInfo": {**asdict(NETWORK_INFO), "device_mac_id": REDACTED}, - }, - } + assert result == snapshot(exclude=props("created_at", "modified_at")) diff --git a/tests/components/rainforest_raven/test_init.py b/tests/components/rainforest_raven/test_init.py index 974c45150a6857..acd1f606a078a0 100644 --- a/tests/components/rainforest_raven/test_init.py +++ b/tests/components/rainforest_raven/test_init.py @@ -1,8 +1,19 @@ """Tests for the Rainforest RAVEn component initialisation.""" +from unittest.mock import AsyncMock + +from aioraven.data import DeviceInfo as RAVenDeviceInfo +from aioraven.device import RAVEnConnectionError +import pytest +from syrupy.assertion import SnapshotAssertion + from homeassistant.components.rainforest_raven.const import DOMAIN from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr + +from . import create_mock_entry +from .const import DEVICE_INFO from tests.common import MockConfigEntry @@ -18,4 +29,55 @@ async def test_load_unload_entry( await hass.async_block_till_done() assert mock_entry.state is ConfigEntryState.NOT_LOADED - assert not hass.data.get(DOMAIN) + + +@pytest.mark.parametrize( + ("device_info", "device_count"), + [(DEVICE_INFO, 1), (None, 0)], +) +async def test_device_registry( + hass: HomeAssistant, + mock_device: AsyncMock, + device_registry: dr.DeviceRegistry, + snapshot: SnapshotAssertion, + device_info: RAVenDeviceInfo | None, + device_count: int, +) -> None: + """Test device registry, including if get_device_info returns None.""" + mock_device.get_device_info.return_value = device_info + entry = create_mock_entry() + entry.add_to_hass(hass) + await hass.config_entries.async_setup(entry.entry_id) + + assert entry.state is ConfigEntryState.LOADED + + assert len(hass.states.async_all()) == 5 + + entries = dr.async_entries_for_config_entry(device_registry, entry.entry_id) + assert len(entries) == device_count + assert entries == snapshot + + +async def test_synchronize_error(hass: HomeAssistant, mock_device: AsyncMock) -> None: + """Test handling of an error parsing or reading raw device data.""" + entry = create_mock_entry() + entry.add_to_hass(hass) + + mock_device.synchronize.side_effect = RAVEnConnectionError + + await hass.config_entries.async_setup(entry.entry_id) + + assert entry.state is ConfigEntryState.SETUP_RETRY + + +async def test_get_network_info_error( + hass: HomeAssistant, mock_device: AsyncMock +) -> None: + """Test handling of a device error during initialization.""" + entry = create_mock_entry() + entry.add_to_hass(hass) + + mock_device.get_network_info.side_effect = RAVEnConnectionError + await hass.config_entries.async_setup(entry.entry_id) + + assert entry.state is ConfigEntryState.SETUP_RETRY diff --git a/tests/components/rainforest_raven/test_sensor.py b/tests/components/rainforest_raven/test_sensor.py index 3b859621cb46a1..2319b628374397 100644 --- a/tests/components/rainforest_raven/test_sensor.py +++ b/tests/components/rainforest_raven/test_sensor.py @@ -1,36 +1,102 @@ """Tests for the Rainforest RAVEn sensors.""" +from datetime import timedelta +from unittest.mock import AsyncMock + +from aioraven.device import RAVEnConnectionError +from freezegun.api import FrozenDateTimeFactory import pytest +from syrupy.assertion import SnapshotAssertion +from homeassistant.const import STATE_UNAVAILABLE from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from .const import NETWORK_INFO + +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform @pytest.mark.usefixtures("mock_entry") -async def test_sensors(hass: HomeAssistant) -> None: +async def test_sensors( + hass: HomeAssistant, + mock_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: """Test the sensors.""" assert len(hass.states.async_all()) == 5 - demand = hass.states.get("sensor.raven_device_meter_power_demand") - assert demand is not None - assert demand.state == "1.2345" - assert demand.attributes["unit_of_measurement"] == "kW" - - delivered = hass.states.get("sensor.raven_device_total_meter_energy_delivered") - assert delivered is not None - assert delivered.state == "23456.7890" - assert delivered.attributes["unit_of_measurement"] == "kWh" - - received = hass.states.get("sensor.raven_device_total_meter_energy_received") - assert received is not None - assert received.state == "00000.0000" - assert received.attributes["unit_of_measurement"] == "kWh" - - price = hass.states.get("sensor.raven_device_meter_price") - assert price is not None - assert price.state == "0.10" - assert price.attributes["unit_of_measurement"] == "USD/kWh" - - signal = hass.states.get("sensor.raven_device_meter_signal_strength") - assert signal is not None - assert signal.state == "100" - assert signal.attributes["unit_of_measurement"] == "%" + await snapshot_platform(hass, entity_registry, snapshot, mock_entry.entry_id) + + +@pytest.mark.usefixtures("mock_entry") +async def test_device_update_error( + hass: HomeAssistant, + mock_device: AsyncMock, + freezer: FrozenDateTimeFactory, +) -> None: + """Test handling of a device error during an update.""" + mock_device.get_network_info.side_effect = (RAVEnConnectionError, NETWORK_INFO) + + states = hass.states.async_all() + assert len(states) == 5 + assert all(state.state != STATE_UNAVAILABLE for state in states) + + freezer.tick(timedelta(seconds=60)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + states = hass.states.async_all() + assert len(states) == 5 + assert all(state.state == STATE_UNAVAILABLE for state in states) + + freezer.tick(timedelta(seconds=60)) + async_fire_time_changed(hass) + + states = hass.states.async_all() + assert len(states) == 5 + assert all(state.state != STATE_UNAVAILABLE for state in states) + + +@pytest.mark.usefixtures("mock_entry") +async def test_device_update_timeout( + hass: HomeAssistant, mock_device: AsyncMock, freezer: FrozenDateTimeFactory +) -> None: + """Test handling of a device timeout during an update.""" + mock_device.get_network_info.side_effect = (TimeoutError, NETWORK_INFO) + + states = hass.states.async_all() + assert len(states) == 5 + assert all(state.state != STATE_UNAVAILABLE for state in states) + + freezer.tick(timedelta(seconds=60)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + states = hass.states.async_all() + assert len(states) == 5 + assert all(state.state == STATE_UNAVAILABLE for state in states) + + freezer.tick(timedelta(seconds=60)) + async_fire_time_changed(hass) + + states = hass.states.async_all() + assert len(states) == 5 + assert all(state.state != STATE_UNAVAILABLE for state in states) + + +@pytest.mark.usefixtures("mock_entry") +async def test_device_cache( + hass: HomeAssistant, mock_device: AsyncMock, freezer: FrozenDateTimeFactory +) -> None: + """Test that the device isn't re-opened for subsequent refreshes.""" + assert mock_device.get_network_info.call_count == 1 + assert mock_device.open.call_count == 1 + + freezer.tick(timedelta(seconds=60)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert mock_device.get_network_info.call_count == 2 + assert mock_device.open.call_count == 1 diff --git a/tests/components/recorder/auto_repairs/statistics/test_duplicates.py b/tests/components/recorder/auto_repairs/statistics/test_duplicates.py index a2cf41578c7115..9e287d13594d8b 100644 --- a/tests/components/recorder/auto_repairs/statistics/test_duplicates.py +++ b/tests/components/recorder/auto_repairs/statistics/test_duplicates.py @@ -189,6 +189,9 @@ def get_statistics_meta(hass: HomeAssistant) -> list: patch.object( recorder.migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION ), + patch.object( + recorder.migration, "non_live_data_migration_needed", return_value=False + ), patch( "homeassistant.components.recorder.core.create_engine", new=_create_engine_28, @@ -306,6 +309,9 @@ def get_statistics_meta(hass: HomeAssistant) -> list: patch.object( recorder.migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION ), + patch.object( + recorder.migration, "non_live_data_migration_needed", return_value=False + ), patch( "homeassistant.components.recorder.core.create_engine", new=_create_engine_28, diff --git a/tests/components/recorder/common.py b/tests/components/recorder/common.py index 18e58d9e572937..60168f5e6ef887 100644 --- a/tests/components/recorder/common.py +++ b/tests/components/recorder/common.py @@ -445,9 +445,8 @@ def old_db_schema(schema_version_postfix: str) -> Iterator[None]: with ( patch.object(recorder, "db_schema", old_db_schema), - patch.object( - recorder.migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION - ), + patch.object(migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION), + patch.object(migration, "non_live_data_migration_needed", return_value=False), patch.object(core, "StatesMeta", old_db_schema.StatesMeta), patch.object(core, "EventTypes", old_db_schema.EventTypes), patch.object(core, "EventData", old_db_schema.EventData), diff --git a/tests/components/recorder/test_migrate.py b/tests/components/recorder/test_migrate.py index 0e473b702efb06..14978bee5a949d 100644 --- a/tests/components/recorder/test_migrate.py +++ b/tests/components/recorder/test_migrate.py @@ -95,7 +95,13 @@ async def test_schema_update_calls( hass, engine, session_maker, - migration.SchemaValidationStatus(0, True, set(), 0), + migration.SchemaValidationStatus( + current_version=0, + migration_needed=True, + non_live_data_migration_needed=True, + schema_errors=set(), + start_version=0, + ), 42, ), call( @@ -103,7 +109,13 @@ async def test_schema_update_calls( hass, engine, session_maker, - migration.SchemaValidationStatus(42, True, set(), 0), + migration.SchemaValidationStatus( + current_version=42, + migration_needed=True, + non_live_data_migration_needed=True, + schema_errors=set(), + start_version=0, + ), db_schema.SCHEMA_VERSION, ), ] diff --git a/tests/components/recorder/test_migration_from_schema_32.py b/tests/components/recorder/test_migration_from_schema_32.py index 8a54a752989d4f..dcf2d792407bac 100644 --- a/tests/components/recorder/test_migration_from_schema_32.py +++ b/tests/components/recorder/test_migration_from_schema_32.py @@ -49,6 +49,7 @@ async_recorder_block_till_done, async_wait_recording_done, ) +from .conftest import instrument_migration from tests.common import async_test_home_assistant from tests.typing import RecorderInstanceGenerator @@ -105,9 +106,8 @@ def db_schema_32(): with ( patch.object(recorder, "db_schema", old_db_schema), - patch.object( - recorder.migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION - ), + patch.object(migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION), + patch.object(migration, "non_live_data_migration_needed", return_value=False), patch.object(core, "StatesMeta", old_db_schema.StatesMeta), patch.object(core, "EventTypes", old_db_schema.EventTypes), patch.object(core, "EventData", old_db_schema.EventData), @@ -120,13 +120,13 @@ def db_schema_32(): yield +@pytest.mark.parametrize("persistent_database", [True]) @pytest.mark.parametrize("enable_migrate_event_context_ids", [True]) -@pytest.mark.usefixtures("db_schema_32") +@pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage async def test_migrate_events_context_ids( - hass: HomeAssistant, recorder_mock: Recorder + async_test_recorder: RecorderInstanceGenerator, ) -> None: """Test we can migrate old uuid context ids and ulid context ids to binary format.""" - await async_wait_recording_done(hass) importlib.import_module(SCHEMA_MODULE) old_db_schema = sys.modules[SCHEMA_MODULE] @@ -219,18 +219,28 @@ def _insert_events(): ) ) - await recorder_mock.async_add_executor_job(_insert_events) + # Create database with old schema + with ( + patch.object(recorder, "db_schema", old_db_schema), + patch.object(migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION), + patch.object(migration.EventsContextIDMigration, "migrate_data"), + patch(CREATE_ENGINE_TARGET, new=_create_engine_test), + ): + async with ( + async_test_home_assistant() as hass, + async_test_recorder(hass) as instance, + ): + await instance.async_add_executor_job(_insert_events) - await async_wait_recording_done(hass) - now = dt_util.utcnow() - expected_ulid_fallback_start = ulid_to_bytes(ulid_at_time(now.timestamp()))[0:6] - await _async_wait_migration_done(hass) + await async_wait_recording_done(hass) + now = dt_util.utcnow() + expected_ulid_fallback_start = ulid_to_bytes(ulid_at_time(now.timestamp()))[ + 0:6 + ] + await _async_wait_migration_done(hass) - with freeze_time(now): - # This is a threadsafe way to add a task to the recorder - migrator = migration.EventsContextIDMigration(None, None) - recorder_mock.queue_task(migrator.task(migrator)) - await _async_wait_migration_done(hass) + await hass.async_stop() + await hass.async_block_till_done() def _object_as_dict(obj): return {c.key: getattr(obj, c.key) for c in inspect(obj).mapper.column_attrs} @@ -256,7 +266,38 @@ def _fetch_migrated_events(): assert len(events) == 6 return {event.event_type: _object_as_dict(event) for event in events} - events_by_type = await recorder_mock.async_add_executor_job(_fetch_migrated_events) + # Run again with new schema, let migration run + async with async_test_home_assistant() as hass: + with freeze_time(now), instrument_migration(hass) as instrumented_migration: + async with async_test_recorder( + hass, wait_recorder=False, wait_recorder_setup=False + ) as instance: + # Check the context ID migrator is considered non-live + assert recorder.util.async_migration_is_live(hass) is False + instrumented_migration.migration_stall.set() + instance.recorder_and_worker_thread_ids.add(threading.get_ident()) + + await hass.async_block_till_done() + await async_wait_recording_done(hass) + await async_wait_recording_done(hass) + + events_by_type = await instance.async_add_executor_job( + _fetch_migrated_events + ) + + migration_changes = await instance.async_add_executor_job( + _get_migration_id, hass + ) + + # Check the index which will be removed by the migrator no longer exists + with session_scope(hass=hass) as session: + assert ( + get_index_by_name(session, "events", "ix_events_context_id") + is None + ) + + await hass.async_stop() + await hass.async_block_till_done() old_uuid_context_id_event = events_by_type["old_uuid_context_id_event"] assert old_uuid_context_id_event["context_id"] is None @@ -327,18 +368,11 @@ def _fetch_migrated_events(): event_with_garbage_context_id_no_time_fired_ts["context_parent_id_bin"] is None ) - migration_changes = await recorder_mock.async_add_executor_job( - _get_migration_id, hass - ) assert ( migration_changes[migration.EventsContextIDMigration.migration_id] == migration.EventsContextIDMigration.migration_version ) - # Check the index which will be removed by the migrator no longer exists - with session_scope(hass=hass) as session: - assert get_index_by_name(session, "events", "ix_events_context_id") is None - @pytest.mark.parametrize("persistent_database", [True]) @pytest.mark.parametrize("enable_migrate_event_context_ids", [True]) @@ -448,13 +482,13 @@ def _insert_migration(): await hass.async_block_till_done() +@pytest.mark.parametrize("persistent_database", [True]) @pytest.mark.parametrize("enable_migrate_state_context_ids", [True]) -@pytest.mark.usefixtures("db_schema_32") +@pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage async def test_migrate_states_context_ids( - hass: HomeAssistant, recorder_mock: Recorder + async_test_recorder: RecorderInstanceGenerator, ) -> None: """Test we can migrate old uuid context ids and ulid context ids to binary format.""" - await async_wait_recording_done(hass) importlib.import_module(SCHEMA_MODULE) old_db_schema = sys.modules[SCHEMA_MODULE] @@ -529,12 +563,24 @@ def _insert_states(): ) ) - await recorder_mock.async_add_executor_job(_insert_states) + # Create database with old schema + with ( + patch.object(recorder, "db_schema", old_db_schema), + patch.object(migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION), + patch.object(migration.StatesContextIDMigration, "migrate_data"), + patch(CREATE_ENGINE_TARGET, new=_create_engine_test), + ): + async with ( + async_test_home_assistant() as hass, + async_test_recorder(hass) as instance, + ): + await instance.async_add_executor_job(_insert_states) - await async_wait_recording_done(hass) - migrator = migration.StatesContextIDMigration(None, None) - recorder_mock.queue_task(migrator.task(migrator)) - await _async_wait_migration_done(hass) + await async_wait_recording_done(hass) + await _async_wait_migration_done(hass) + + await hass.async_stop() + await hass.async_block_till_done() def _object_as_dict(obj): return {c.key: getattr(obj, c.key) for c in inspect(obj).mapper.column_attrs} @@ -560,9 +606,38 @@ def _fetch_migrated_states(): assert len(events) == 6 return {state.entity_id: _object_as_dict(state) for state in events} - states_by_entity_id = await recorder_mock.async_add_executor_job( - _fetch_migrated_states - ) + # Run again with new schema, let migration run + async with async_test_home_assistant() as hass: + with instrument_migration(hass) as instrumented_migration: + async with async_test_recorder( + hass, wait_recorder=False, wait_recorder_setup=False + ) as instance: + # Check the context ID migrator is considered non-live + assert recorder.util.async_migration_is_live(hass) is False + instrumented_migration.migration_stall.set() + instance.recorder_and_worker_thread_ids.add(threading.get_ident()) + + await hass.async_block_till_done() + await async_wait_recording_done(hass) + await async_wait_recording_done(hass) + + states_by_entity_id = await instance.async_add_executor_job( + _fetch_migrated_states + ) + + migration_changes = await instance.async_add_executor_job( + _get_migration_id, hass + ) + + # Check the index which will be removed by the migrator no longer exists + with session_scope(hass=hass) as session: + assert ( + get_index_by_name(session, "states", "ix_states_context_id") + is None + ) + + await hass.async_stop() + await hass.async_block_till_done() old_uuid_context_id = states_by_entity_id["state.old_uuid_context_id"] assert old_uuid_context_id["context_id"] is None @@ -637,18 +712,11 @@ def _fetch_migrated_states(): == b"\n\xe2\x97\x99\xeeNOE\x81\x16\xf5\x82\xd7\xd3\xeee" ) - migration_changes = await recorder_mock.async_add_executor_job( - _get_migration_id, hass - ) assert ( migration_changes[migration.StatesContextIDMigration.migration_id] == migration.StatesContextIDMigration.migration_version ) - # Check the index which will be removed by the migrator no longer exists - with session_scope(hass=hass) as session: - assert get_index_by_name(session, "states", "ix_states_context_id") is None - @pytest.mark.parametrize("persistent_database", [True]) @pytest.mark.parametrize("enable_migrate_state_context_ids", [True]) @@ -797,6 +865,7 @@ def _insert_events(): migrator = migration.EventTypeIDMigration(None, None) recorder_mock.queue_task(migrator.task(migrator)) await _async_wait_migration_done(hass) + await _async_wait_migration_done(hass) def _fetch_migrated_events(): with session_scope(hass=hass, read_only=True) as session: @@ -887,6 +956,7 @@ def _insert_states(): migrator = migration.EntityIDMigration(old_db_schema.SCHEMA_VERSION, {}) recorder_mock.queue_task(migration.CommitBeforeMigrationTask(migrator)) await _async_wait_migration_done(hass) + await _async_wait_migration_done(hass) def _fetch_migrated_states(): with session_scope(hass=hass, read_only=True) as session: @@ -966,6 +1036,7 @@ def _insert_events(): migrator = migration.EntityIDPostMigration(None, None) recorder_mock.queue_task(migrator.task(migrator)) await _async_wait_migration_done(hass) + await _async_wait_migration_done(hass) def _fetch_migrated_states(): with session_scope(hass=hass, read_only=True) as session: @@ -1024,6 +1095,7 @@ def _insert_states(): migrator = migration.EntityIDMigration(old_db_schema.SCHEMA_VERSION, {}) recorder_mock.queue_task(migration.CommitBeforeMigrationTask(migrator)) await _async_wait_migration_done(hass) + await _async_wait_migration_done(hass) def _fetch_migrated_states(): with session_scope(hass=hass, read_only=True) as session: @@ -1108,6 +1180,7 @@ def _insert_events(): migrator = migration.EventTypeIDMigration(None, None) recorder_mock.queue_task(migrator.task(migrator)) await _async_wait_migration_done(hass) + await _async_wait_migration_done(hass) def _fetch_migrated_events(): with session_scope(hass=hass, read_only=True) as session: @@ -1763,6 +1836,7 @@ def _get_index_names(table): with ( patch.object(recorder, "db_schema", old_db_schema), patch.object(migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION), + patch.object(migration, "non_live_data_migration_needed", return_value=False), patch(CREATE_ENGINE_TARGET, new=_create_engine_test), ): async with ( diff --git a/tests/components/recorder/test_migration_run_time_migrations_remember.py b/tests/components/recorder/test_migration_run_time_migrations_remember.py index 880e4d6d61e80a..93fa16b8364cad 100644 --- a/tests/components/recorder/test_migration_run_time_migrations_remember.py +++ b/tests/components/recorder/test_migration_run_time_migrations_remember.py @@ -94,9 +94,8 @@ async def test_migration_changes_prevent_trying_to_migrate_again( # Start with db schema that needs migration (version 32) with ( patch.object(recorder, "db_schema", old_db_schema), - patch.object( - recorder.migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION - ), + patch.object(migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION), + patch.object(migration, "non_live_data_migration_needed", return_value=False), patch.object(core, "StatesMeta", old_db_schema.StatesMeta), patch.object(core, "EventTypes", old_db_schema.EventTypes), patch.object(core, "EventData", old_db_schema.EventData), diff --git a/tests/components/recorder/test_statistics_v23_migration.py b/tests/components/recorder/test_statistics_v23_migration.py index 53c59635e8c376..1f9be0cabeed15 100644 --- a/tests/components/recorder/test_statistics_v23_migration.py +++ b/tests/components/recorder/test_statistics_v23_migration.py @@ -168,6 +168,9 @@ async def test_delete_duplicates( patch.object( recorder.migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION ), + patch.object( + recorder.migration, "non_live_data_migration_needed", return_value=False + ), patch( CREATE_ENGINE_TARGET, new=partial( @@ -352,6 +355,9 @@ async def test_delete_duplicates_many( patch.object( recorder.migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION ), + patch.object( + recorder.migration, "non_live_data_migration_needed", return_value=False + ), patch( CREATE_ENGINE_TARGET, new=partial( @@ -515,6 +521,9 @@ async def test_delete_duplicates_non_identical( patch.object( recorder.migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION ), + patch.object( + recorder.migration, "non_live_data_migration_needed", return_value=False + ), patch( CREATE_ENGINE_TARGET, new=partial( @@ -638,6 +647,9 @@ async def test_delete_duplicates_short_term( patch.object( recorder.migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION ), + patch.object( + recorder.migration, "non_live_data_migration_needed", return_value=False + ), patch( CREATE_ENGINE_TARGET, new=partial( diff --git a/tests/components/recorder/test_util.py b/tests/components/recorder/test_util.py index ad68e415df5d6a..4904bdecc4d9a9 100644 --- a/tests/components/recorder/test_util.py +++ b/tests/components/recorder/test_util.py @@ -1134,19 +1134,32 @@ async def test_resolve_period(hass: HomeAssistant) -> None: @pytest.mark.parametrize( - ("side_effect", "dialect", "expected_result", "num_calls"), + ("side_effect", "dialect", "retval", "expected_result", "num_calls"), [ - (None, SupportedDialect.MYSQL, does_not_raise(), 1), - (ValueError, SupportedDialect.MYSQL, pytest.raises(ValueError), 1), - (NonRetryable, SupportedDialect.MYSQL, pytest.raises(OperationalError), 1), - (Retryable, SupportedDialect.MYSQL, pytest.raises(OperationalError), 5), - (NonRetryable, SupportedDialect.SQLITE, pytest.raises(OperationalError), 1), - (Retryable, SupportedDialect.SQLITE, pytest.raises(OperationalError), 1), + (None, SupportedDialect.MYSQL, None, does_not_raise(), 1), + (ValueError, SupportedDialect.MYSQL, None, pytest.raises(ValueError), 1), + ( + NonRetryable, + SupportedDialect.MYSQL, + None, + pytest.raises(OperationalError), + 1, + ), + (Retryable, SupportedDialect.MYSQL, None, pytest.raises(OperationalError), 5), + ( + NonRetryable, + SupportedDialect.SQLITE, + None, + pytest.raises(OperationalError), + 1, + ), + (Retryable, SupportedDialect.SQLITE, None, pytest.raises(OperationalError), 1), ], ) def test_database_job_retry_wrapper( side_effect: Any, dialect: str, + retval: Any, expected_result: AbstractContextManager, num_calls: int, ) -> None: @@ -1157,12 +1170,13 @@ def test_database_job_retry_wrapper( instance.engine.dialect.name = dialect mock_job = Mock(side_effect=side_effect) - @database_job_retry_wrapper(description="test") + @database_job_retry_wrapper("test", 5) def job(instance, *args, **kwargs) -> None: mock_job() + return retval with expected_result: - job(instance) + assert job(instance) == retval assert len(mock_job.mock_calls) == num_calls diff --git a/tests/components/recorder/test_v32_migration.py b/tests/components/recorder/test_v32_migration.py index 9a61695917495c..d59486b61f0da6 100644 --- a/tests/components/recorder/test_v32_migration.py +++ b/tests/components/recorder/test_v32_migration.py @@ -110,6 +110,7 @@ def _get_states_index_names(): with ( patch.object(recorder, "db_schema", old_db_schema), patch.object(migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION), + patch.object(migration, "non_live_data_migration_needed", return_value=False), patch.object(migration, "post_migrate_entity_ids", return_value=False), patch.object(migration.EventsContextIDMigration, "migrate_data"), patch.object(migration.StatesContextIDMigration, "migrate_data"), @@ -266,6 +267,7 @@ def _get_states_index_names(): patch.object(recorder, "db_schema", old_db_schema), patch.object(migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION), patch.object(migration.EventIDPostMigration, "migrate_data"), + patch.object(migration, "non_live_data_migration_needed", return_value=False), patch.object(migration, "post_migrate_entity_ids", return_value=False), patch.object(core, "StatesMeta", old_db_schema.StatesMeta), patch.object(core, "EventTypes", old_db_schema.EventTypes), @@ -385,6 +387,7 @@ def _get_states_index_names(): patch.object(recorder, "db_schema", old_db_schema), patch.object(migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION), patch.object(migration.EventIDPostMigration, "migrate_data"), + patch.object(migration, "non_live_data_migration_needed", return_value=False), patch.object(migration, "post_migrate_entity_ids", return_value=False), patch.object(core, "StatesMeta", old_db_schema.StatesMeta), patch.object(core, "EventTypes", old_db_schema.EventTypes), @@ -517,6 +520,7 @@ def _get_states_index_names(): patch.object(recorder, "db_schema", old_db_schema), patch.object(migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION), patch.object(migration.EventIDPostMigration, "migrate_data"), + patch.object(migration, "non_live_data_migration_needed", return_value=False), patch.object(migration, "post_migrate_entity_ids", return_value=False), patch.object(core, "StatesMeta", old_db_schema.StatesMeta), patch.object(core, "EventTypes", old_db_schema.EventTypes), @@ -694,6 +698,7 @@ def _get_states_index_names(): patch.object(recorder, "db_schema", old_db_schema), patch.object(migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION), patch.object(migration.EventIDPostMigration, "migrate_data"), + patch.object(migration, "non_live_data_migration_needed", return_value=False), patch.object(migration, "post_migrate_entity_ids", return_value=False), patch.object(core, "StatesMeta", old_db_schema.StatesMeta), patch.object(core, "EventTypes", old_db_schema.EventTypes), diff --git a/tests/components/renault/test_config_flow.py b/tests/components/renault/test_config_flow.py index 69bfdf0842e336..234d1dca069314 100644 --- a/tests/components/renault/test_config_flow.py +++ b/tests/components/renault/test_config_flow.py @@ -13,7 +13,7 @@ CONF_LOCALE, DOMAIN, ) -from homeassistant.const import CONF_PASSWORD, CONF_USERNAME +from homeassistant.const import CONF_NAME, CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType from homeassistant.helpers import aiohttp_client @@ -224,7 +224,10 @@ async def test_reauth(hass: HomeAssistant, config_entry: MockConfigEntry) -> Non result = await config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM - assert result["description_placeholders"] == {CONF_USERNAME: "email@test.com"} + assert result["description_placeholders"] == { + CONF_NAME: "Mock Title", + CONF_USERNAME: "email@test.com", + } assert result["errors"] == {} # Failed credentials @@ -238,7 +241,10 @@ async def test_reauth(hass: HomeAssistant, config_entry: MockConfigEntry) -> Non ) assert result2["type"] is FlowResultType.FORM - assert result2["description_placeholders"] == {CONF_USERNAME: "email@test.com"} + assert result2["description_placeholders"] == { + CONF_NAME: "Mock Title", + CONF_USERNAME: "email@test.com", + } assert result2["errors"] == {"base": "invalid_credentials"} # Valid credentials diff --git a/tests/components/reolink/conftest.py b/tests/components/reolink/conftest.py index f9b8504f14f318..94192c3502e081 100644 --- a/tests/components/reolink/conftest.py +++ b/tests/components/reolink/conftest.py @@ -1,10 +1,12 @@ """Setup the Reolink tests.""" from collections.abc import Generator -from unittest.mock import AsyncMock, MagicMock, patch +from unittest.mock import AsyncMock, MagicMock, create_autospec, patch import pytest from reolink_aio.api import Chime +from reolink_aio.baichuan import Baichuan +from reolink_aio.exceptions import ReolinkError from homeassistant.components.reolink.config_flow import DEFAULT_PROTOCOL from homeassistant.components.reolink.const import CONF_USE_HTTPS, DOMAIN @@ -118,6 +120,12 @@ def reolink_connect_class() -> Generator[MagicMock]: host_mock.doorbell_led_list.return_value = ["stayoff", "auto"] host_mock.auto_track_method.return_value = 3 host_mock.daynight_state.return_value = "Black&White" + + # Baichuan + host_mock.baichuan = create_autospec(Baichuan) + # Disable tcp push by default for tests + host_mock.baichuan.events_active = False + host_mock.baichuan.subscribe_events.side_effect = ReolinkError("Test error") yield host_mock_class diff --git a/tests/components/reolink/snapshots/test_diagnostics.ambr b/tests/components/reolink/snapshots/test_diagnostics.ambr index 33e9c78c55022c..71c5397fbd1056 100644 --- a/tests/components/reolink/snapshots/test_diagnostics.ambr +++ b/tests/components/reolink/snapshots/test_diagnostics.ambr @@ -118,8 +118,8 @@ 'null': 2, }), 'GetPtzCurPos': dict({ - '0': 1, - 'null': 1, + '0': 2, + 'null': 2, }), 'GetPtzGuard': dict({ '0': 2, diff --git a/tests/components/reolink/test_binary_sensor.py b/tests/components/reolink/test_binary_sensor.py index a2c5ba07aa8791..71318c27b25286 100644 --- a/tests/components/reolink/test_binary_sensor.py +++ b/tests/components/reolink/test_binary_sensor.py @@ -1,5 +1,6 @@ """Test the Reolink binary sensor platform.""" +from collections.abc import Callable from unittest.mock import MagicMock, patch from freezegun.api import FrozenDateTimeFactory @@ -8,9 +9,8 @@ from homeassistant.config_entries import ConfigEntryState from homeassistant.const import STATE_OFF, STATE_ON, Platform from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er -from .conftest import TEST_DUO_MODEL, TEST_NVR_NAME +from .conftest import TEST_DUO_MODEL, TEST_HOST_MODEL, TEST_NVR_NAME from tests.common import MockConfigEntry, async_fire_time_changed from tests.typing import ClientSessionGenerator @@ -22,7 +22,6 @@ async def test_motion_sensor( freezer: FrozenDateTimeFactory, config_entry: MockConfigEntry, reolink_connect: MagicMock, - entity_registry: er.EntityRegistry, ) -> None: """Test binary sensor entity with motion sensor.""" reolink_connect.model = TEST_DUO_MODEL @@ -42,7 +41,7 @@ async def test_motion_sensor( assert hass.states.get(entity_id).state == STATE_OFF - # test webhook callback + # test ONVIF webhook callback reolink_connect.motion_detected.return_value = True reolink_connect.ONVIF_event_callback.return_value = [0] webhook_id = config_entry.runtime_data.host.webhook_id @@ -50,3 +49,43 @@ async def test_motion_sensor( await client.post(f"/api/webhook/{webhook_id}", data="test_data") assert hass.states.get(entity_id).state == STATE_ON + + +async def test_tcp_callback( + hass: HomeAssistant, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, +) -> None: + """Test tcp callback using motion sensor.""" + + class callback_mock_class: + callback_func = None + + def register_callback( + self, callback_id: str, callback: Callable[[], None], *args, **key_args + ) -> None: + if callback_id.endswith("_motion"): + self.callback_func = callback + + callback_mock = callback_mock_class() + + reolink_connect.model = TEST_HOST_MODEL + reolink_connect.baichuan.events_active = True + reolink_connect.baichuan.subscribe_events.reset_mock(side_effect=True) + reolink_connect.baichuan.register_callback = callback_mock.register_callback + reolink_connect.motion_detected.return_value = True + + with patch("homeassistant.components.reolink.PLATFORMS", [Platform.BINARY_SENSOR]): + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + entity_id = f"{Platform.BINARY_SENSOR}.{TEST_NVR_NAME}_motion" + assert hass.states.get(entity_id).state == STATE_ON + + # simulate a TCP push callback + reolink_connect.motion_detected.return_value = False + assert callback_mock.callback_func is not None + callback_mock.callback_func() + + assert hass.states.get(entity_id).state == STATE_OFF diff --git a/tests/components/reolink/test_config_flow.py b/tests/components/reolink/test_config_flow.py index 9382d9f7901992..bb896428b9919b 100644 --- a/tests/components/reolink/test_config_flow.py +++ b/tests/components/reolink/test_config_flow.py @@ -7,7 +7,12 @@ from aiohttp import ClientSession from freezegun.api import FrozenDateTimeFactory import pytest -from reolink_aio.exceptions import ApiError, CredentialsInvalidError, ReolinkError +from reolink_aio.exceptions import ( + ApiError, + CredentialsInvalidError, + LoginFirmwareError, + ReolinkError, +) from homeassistant import config_entries from homeassistant.components import dhcp @@ -171,6 +176,20 @@ async def test_config_flow_errors( assert result["step_id"] == "user" assert result["errors"] == {CONF_PASSWORD: "invalid_auth"} + reolink_connect.get_host_data.side_effect = LoginFirmwareError("Test error") + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_USERNAME: TEST_USERNAME, + CONF_PASSWORD: TEST_PASSWORD, + CONF_HOST: TEST_HOST, + }, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] == {"base": "update_needed"} + reolink_connect.valid_password.return_value = False result = await hass.config_entries.flow.async_configure( result["flow_id"], diff --git a/tests/components/reolink/test_host.py b/tests/components/reolink/test_host.py index 77d156c9486696..2286ca5d266c6d 100644 --- a/tests/components/reolink/test_host.py +++ b/tests/components/reolink/test_host.py @@ -14,12 +14,14 @@ from homeassistant.components.reolink.host import ( FIRST_ONVIF_LONG_POLL_TIMEOUT, FIRST_ONVIF_TIMEOUT, + FIRST_TCP_PUSH_TIMEOUT, LONG_POLL_COOLDOWN, LONG_POLL_ERROR_COOLDOWN, POLL_INTERVAL_NO_PUSH, ) from homeassistant.components.webhook import async_handle_webhook from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er from homeassistant.helpers.dispatcher import async_dispatcher_connect @@ -31,6 +33,56 @@ from tests.typing import ClientSessionGenerator +async def test_setup_with_tcp_push( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, +) -> None: + """Test successful setup of the integration with TCP push callbacks.""" + reolink_connect.baichuan.events_active = True + reolink_connect.baichuan.subscribe_events.reset_mock(side_effect=True) + with patch("homeassistant.components.reolink.PLATFORMS", [Platform.BINARY_SENSOR]): + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + freezer.tick(timedelta(seconds=FIRST_TCP_PUSH_TIMEOUT)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + # ONVIF push subscription not called + assert not reolink_connect.subscribe.called + + reolink_connect.baichuan.events_active = False + reolink_connect.baichuan.subscribe_events.side_effect = ReolinkError("Test error") + + +async def test_unloading_with_tcp_push( + hass: HomeAssistant, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, +) -> None: + """Test successful unloading of the integration with TCP push callbacks.""" + reolink_connect.baichuan.events_active = True + reolink_connect.baichuan.subscribe_events.reset_mock(side_effect=True) + with patch("homeassistant.components.reolink.PLATFORMS", [Platform.BINARY_SENSOR]): + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + reolink_connect.baichuan.unsubscribe_events.side_effect = ReolinkError("Test error") + + # Unload the config entry + assert await hass.config_entries.async_unload(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.NOT_LOADED + + reolink_connect.baichuan.events_active = False + reolink_connect.baichuan.subscribe_events.side_effect = ReolinkError("Test error") + reolink_connect.baichuan.unsubscribe_events.reset_mock(side_effect=True) + + async def test_webhook_callback( hass: HomeAssistant, hass_client_no_auth: ClientSessionGenerator, @@ -402,3 +454,8 @@ async def test_diagnostics_event_connection( diag = await get_diagnostics_for_config_entry(hass, hass_client, config_entry) assert diag["event connection"] == "ONVIF push" + + # set TCP push as active + reolink_connect.baichuan.events_active = True + diag = await get_diagnostics_for_config_entry(hass, hass_client, config_entry) + assert diag["event connection"] == "TCP push" diff --git a/tests/components/reolink/test_init.py b/tests/components/reolink/test_init.py index 82cdbfa9139af1..67ac2db826270b 100644 --- a/tests/components/reolink/test_init.py +++ b/tests/components/reolink/test_init.py @@ -15,10 +15,10 @@ NUM_CRED_ERRORS, ) from homeassistant.components.reolink.const import DOMAIN -from homeassistant.config import async_process_ha_core_config from homeassistant.config_entries import ConfigEntryState -from homeassistant.const import STATE_OFF, STATE_UNAVAILABLE, Platform +from homeassistant.const import CONF_PORT, STATE_OFF, STATE_UNAVAILABLE, Platform from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant +from homeassistant.core_config import async_process_ha_core_config from homeassistant.helpers import ( device_registry as dr, entity_registry as er, @@ -31,6 +31,7 @@ TEST_HOST_MODEL, TEST_MAC, TEST_NVR_NAME, + TEST_PORT, TEST_UID, TEST_UID_CAM, ) @@ -623,3 +624,18 @@ async def test_new_device_discovered( await hass.async_block_till_done() assert reolink_connect.logout.call_count == 1 + + +async def test_port_changed( + hass: HomeAssistant, + reolink_connect: MagicMock, + config_entry: MockConfigEntry, +) -> None: + """Test config_entry port update when it has changed during initial login.""" + assert config_entry.data[CONF_PORT] == TEST_PORT + reolink_connect.port = 4567 + + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.data[CONF_PORT] == 4567 diff --git a/tests/components/rest/test_init.py b/tests/components/rest/test_init.py index 02dfe6364ff153..c401362d60437d 100644 --- a/tests/components/rest/test_init.py +++ b/tests/components/rest/test_init.py @@ -12,6 +12,7 @@ from homeassistant.components.rest.const import DOMAIN from homeassistant.const import ( ATTR_ENTITY_ID, + CONF_PACKAGES, SERVICE_RELOAD, STATE_UNAVAILABLE, UnitOfInformation, @@ -468,7 +469,7 @@ async def test_config_schema_via_packages(hass: HomeAssistant) -> None: "pack_11": {"rest": {"resource": "http://url1"}}, "pack_list": {"rest": [{"resource": "http://url2"}]}, } - config = {HOMEASSISTANT_DOMAIN: {hass_config.CONF_PACKAGES: packages}} + config = {HOMEASSISTANT_DOMAIN: {CONF_PACKAGES: packages}} await hass_config.merge_packages_config(hass, config, packages) assert len(config) == 2 diff --git a/tests/components/rflink/test_cover.py b/tests/components/rflink/test_cover.py index af61cc698e0f97..578221c70514ae 100644 --- a/tests/components/rflink/test_cover.py +++ b/tests/components/rflink/test_cover.py @@ -7,14 +7,9 @@ import pytest +from homeassistant.components.cover import CoverState from homeassistant.components.rflink.entity import EVENT_BUTTON_PRESSED -from homeassistant.const import ( - ATTR_ENTITY_ID, - SERVICE_CLOSE_COVER, - SERVICE_OPEN_COVER, - STATE_CLOSED, - STATE_OPEN, -) +from homeassistant.const import ATTR_ENTITY_ID, SERVICE_CLOSE_COVER, SERVICE_OPEN_COVER from homeassistant.core import CoreState, HomeAssistant, State, callback from .test_init import mock_rflink @@ -53,7 +48,7 @@ async def test_default_setup( # test default state of cover loaded from config cover_initial = hass.states.get(f"{DOMAIN}.test") - assert cover_initial.state == STATE_CLOSED + assert cover_initial.state == CoverState.CLOSED assert cover_initial.attributes["assumed_state"] # cover should follow state of the hardware device by interpreting @@ -64,7 +59,7 @@ async def test_default_setup( await hass.async_block_till_done() cover_after_first_command = hass.states.get(f"{DOMAIN}.test") - assert cover_after_first_command.state == STATE_OPEN + assert cover_after_first_command.state == CoverState.OPEN # not sure why, but cover have always assumed_state=true assert cover_after_first_command.attributes.get("assumed_state") @@ -72,34 +67,34 @@ async def test_default_setup( event_callback({"id": "protocol_0_0", "command": "down"}) await hass.async_block_till_done() - assert hass.states.get(f"{DOMAIN}.test").state == STATE_CLOSED + assert hass.states.get(f"{DOMAIN}.test").state == CoverState.CLOSED # should respond to group command event_callback({"id": "protocol_0_0", "command": "allon"}) await hass.async_block_till_done() cover_after_first_command = hass.states.get(f"{DOMAIN}.test") - assert cover_after_first_command.state == STATE_OPEN + assert cover_after_first_command.state == CoverState.OPEN # should respond to group command event_callback({"id": "protocol_0_0", "command": "alloff"}) await hass.async_block_till_done() - assert hass.states.get(f"{DOMAIN}.test").state == STATE_CLOSED + assert hass.states.get(f"{DOMAIN}.test").state == CoverState.CLOSED # test following aliases # mock incoming command event for this device alias event_callback({"id": "test_alias_0_0", "command": "up"}) await hass.async_block_till_done() - assert hass.states.get(f"{DOMAIN}.test").state == STATE_OPEN + assert hass.states.get(f"{DOMAIN}.test").state == CoverState.OPEN # test changing state from HA propagates to RFLink await hass.services.async_call( DOMAIN, SERVICE_CLOSE_COVER, {ATTR_ENTITY_ID: f"{DOMAIN}.test"} ) await hass.async_block_till_done() - assert hass.states.get(f"{DOMAIN}.test").state == STATE_CLOSED + assert hass.states.get(f"{DOMAIN}.test").state == CoverState.CLOSED assert protocol.send_command_ack.call_args_list[0][0][0] == "protocol_0_0" assert protocol.send_command_ack.call_args_list[0][0][1] == "DOWN" @@ -107,7 +102,7 @@ async def test_default_setup( DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: f"{DOMAIN}.test"} ) await hass.async_block_till_done() - assert hass.states.get(f"{DOMAIN}.test").state == STATE_OPEN + assert hass.states.get(f"{DOMAIN}.test").state == CoverState.OPEN assert protocol.send_command_ack.call_args_list[1][0][1] == "UP" @@ -269,19 +264,19 @@ async def test_group_alias( # setup mocking rflink module event_callback, _, _, _ = await mock_rflink(hass, config, DOMAIN, monkeypatch) - assert hass.states.get(f"{DOMAIN}.test").state == STATE_CLOSED + assert hass.states.get(f"{DOMAIN}.test").state == CoverState.CLOSED # test sending group command to group alias event_callback({"id": "test_group_0_0", "command": "allon"}) await hass.async_block_till_done() - assert hass.states.get(f"{DOMAIN}.test").state == STATE_OPEN + assert hass.states.get(f"{DOMAIN}.test").state == CoverState.OPEN # test sending group command to group alias event_callback({"id": "test_group_0_0", "command": "down"}) await hass.async_block_till_done() - assert hass.states.get(f"{DOMAIN}.test").state == STATE_OPEN + assert hass.states.get(f"{DOMAIN}.test").state == CoverState.OPEN async def test_nogroup_alias( @@ -304,19 +299,19 @@ async def test_nogroup_alias( # setup mocking rflink module event_callback, _, _, _ = await mock_rflink(hass, config, DOMAIN, monkeypatch) - assert hass.states.get(f"{DOMAIN}.test").state == STATE_CLOSED + assert hass.states.get(f"{DOMAIN}.test").state == CoverState.CLOSED # test sending group command to nogroup alias event_callback({"id": "test_nogroup_0_0", "command": "allon"}) await hass.async_block_till_done() # should not affect state - assert hass.states.get(f"{DOMAIN}.test").state == STATE_CLOSED + assert hass.states.get(f"{DOMAIN}.test").state == CoverState.CLOSED # test sending group command to nogroup alias event_callback({"id": "test_nogroup_0_0", "command": "up"}) await hass.async_block_till_done() # should affect state - assert hass.states.get(f"{DOMAIN}.test").state == STATE_OPEN + assert hass.states.get(f"{DOMAIN}.test").state == CoverState.OPEN async def test_nogroup_device_id( @@ -334,19 +329,19 @@ async def test_nogroup_device_id( # setup mocking rflink module event_callback, _, _, _ = await mock_rflink(hass, config, DOMAIN, monkeypatch) - assert hass.states.get(f"{DOMAIN}.test").state == STATE_CLOSED + assert hass.states.get(f"{DOMAIN}.test").state == CoverState.CLOSED # test sending group command to nogroup event_callback({"id": "test_nogroup_0_0", "command": "allon"}) await hass.async_block_till_done() # should not affect state - assert hass.states.get(f"{DOMAIN}.test").state == STATE_CLOSED + assert hass.states.get(f"{DOMAIN}.test").state == CoverState.CLOSED # test sending group command to nogroup event_callback({"id": "test_nogroup_0_0", "command": "up"}) await hass.async_block_till_done() # should affect state - assert hass.states.get(f"{DOMAIN}.test").state == STATE_OPEN + assert hass.states.get(f"{DOMAIN}.test").state == CoverState.OPEN async def test_restore_state( @@ -367,7 +362,11 @@ async def test_restore_state( } mock_restore_cache( - hass, (State(f"{DOMAIN}.c1", STATE_OPEN), State(f"{DOMAIN}.c2", STATE_CLOSED)) + hass, + ( + State(f"{DOMAIN}.c1", CoverState.OPEN), + State(f"{DOMAIN}.c2", CoverState.CLOSED), + ), ) hass.set_state(CoreState.starting) @@ -377,20 +376,20 @@ async def test_restore_state( state = hass.states.get(f"{DOMAIN}.c1") assert state - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN state = hass.states.get(f"{DOMAIN}.c2") assert state - assert state.state == STATE_CLOSED + assert state.state == CoverState.CLOSED state = hass.states.get(f"{DOMAIN}.c3") assert state - assert state.state == STATE_CLOSED + assert state.state == CoverState.CLOSED # not cached cover must default values state = hass.states.get(f"{DOMAIN}.c4") assert state - assert state.state == STATE_CLOSED + assert state.state == CoverState.CLOSED assert state.attributes["assumed_state"] @@ -435,7 +434,7 @@ async def test_inverted_cover( # test default state of cover loaded from config standard_cover = hass.states.get(f"{DOMAIN}.nonkaku_type_standard") - assert standard_cover.state == STATE_CLOSED + assert standard_cover.state == CoverState.CLOSED assert standard_cover.attributes["assumed_state"] # mock incoming up command event for nonkaku_device_1 @@ -443,7 +442,7 @@ async def test_inverted_cover( await hass.async_block_till_done() standard_cover = hass.states.get(f"{DOMAIN}.nonkaku_type_standard") - assert standard_cover.state == STATE_OPEN + assert standard_cover.state == CoverState.OPEN assert standard_cover.attributes.get("assumed_state") # mock incoming up command event for nonkaku_device_2 @@ -451,7 +450,7 @@ async def test_inverted_cover( await hass.async_block_till_done() standard_cover = hass.states.get(f"{DOMAIN}.nonkaku_type_none") - assert standard_cover.state == STATE_OPEN + assert standard_cover.state == CoverState.OPEN assert standard_cover.attributes.get("assumed_state") # mock incoming up command event for nonkaku_device_3 @@ -460,7 +459,7 @@ async def test_inverted_cover( await hass.async_block_till_done() inverted_cover = hass.states.get(f"{DOMAIN}.nonkaku_type_inverted") - assert inverted_cover.state == STATE_OPEN + assert inverted_cover.state == CoverState.OPEN assert inverted_cover.attributes.get("assumed_state") # mock incoming up command event for newkaku_device_4 @@ -469,7 +468,7 @@ async def test_inverted_cover( await hass.async_block_till_done() inverted_cover = hass.states.get(f"{DOMAIN}.newkaku_type_standard") - assert inverted_cover.state == STATE_OPEN + assert inverted_cover.state == CoverState.OPEN assert inverted_cover.attributes.get("assumed_state") # mock incoming up command event for newkaku_device_5 @@ -478,7 +477,7 @@ async def test_inverted_cover( await hass.async_block_till_done() inverted_cover = hass.states.get(f"{DOMAIN}.newkaku_type_none") - assert inverted_cover.state == STATE_OPEN + assert inverted_cover.state == CoverState.OPEN assert inverted_cover.attributes.get("assumed_state") # mock incoming up command event for newkaku_device_6 @@ -487,7 +486,7 @@ async def test_inverted_cover( await hass.async_block_till_done() inverted_cover = hass.states.get(f"{DOMAIN}.newkaku_type_inverted") - assert inverted_cover.state == STATE_OPEN + assert inverted_cover.state == CoverState.OPEN assert inverted_cover.attributes.get("assumed_state") # mock incoming down command event for nonkaku_device_1 @@ -496,7 +495,7 @@ async def test_inverted_cover( await hass.async_block_till_done() standard_cover = hass.states.get(f"{DOMAIN}.nonkaku_type_standard") - assert standard_cover.state == STATE_CLOSED + assert standard_cover.state == CoverState.CLOSED assert standard_cover.attributes.get("assumed_state") # mock incoming down command event for nonkaku_device_2 @@ -505,7 +504,7 @@ async def test_inverted_cover( await hass.async_block_till_done() standard_cover = hass.states.get(f"{DOMAIN}.nonkaku_type_none") - assert standard_cover.state == STATE_CLOSED + assert standard_cover.state == CoverState.CLOSED assert standard_cover.attributes.get("assumed_state") # mock incoming down command event for nonkaku_device_3 @@ -514,7 +513,7 @@ async def test_inverted_cover( await hass.async_block_till_done() inverted_cover = hass.states.get(f"{DOMAIN}.nonkaku_type_inverted") - assert inverted_cover.state == STATE_CLOSED + assert inverted_cover.state == CoverState.CLOSED assert inverted_cover.attributes.get("assumed_state") # mock incoming down command event for newkaku_device_4 @@ -523,7 +522,7 @@ async def test_inverted_cover( await hass.async_block_till_done() inverted_cover = hass.states.get(f"{DOMAIN}.newkaku_type_standard") - assert inverted_cover.state == STATE_CLOSED + assert inverted_cover.state == CoverState.CLOSED assert inverted_cover.attributes.get("assumed_state") # mock incoming down command event for newkaku_device_5 @@ -532,7 +531,7 @@ async def test_inverted_cover( await hass.async_block_till_done() inverted_cover = hass.states.get(f"{DOMAIN}.newkaku_type_none") - assert inverted_cover.state == STATE_CLOSED + assert inverted_cover.state == CoverState.CLOSED assert inverted_cover.attributes.get("assumed_state") # mock incoming down command event for newkaku_device_6 @@ -541,7 +540,7 @@ async def test_inverted_cover( await hass.async_block_till_done() inverted_cover = hass.states.get(f"{DOMAIN}.newkaku_type_inverted") - assert inverted_cover.state == STATE_CLOSED + assert inverted_cover.state == CoverState.CLOSED assert inverted_cover.attributes.get("assumed_state") # We are only testing the 'inverted' devices, the 'standard' devices @@ -553,7 +552,7 @@ async def test_inverted_cover( await hass.async_block_till_done() inverted_cover = hass.states.get(f"{DOMAIN}.nonkaku_type_inverted") - assert inverted_cover.state == STATE_CLOSED + assert inverted_cover.state == CoverState.CLOSED # should respond to group command event_callback({"id": "nonkaku_device_3", "command": "allon"}) @@ -561,7 +560,7 @@ async def test_inverted_cover( await hass.async_block_till_done() inverted_cover = hass.states.get(f"{DOMAIN}.nonkaku_type_inverted") - assert inverted_cover.state == STATE_OPEN + assert inverted_cover.state == CoverState.OPEN # should respond to group command event_callback({"id": "newkaku_device_4", "command": "alloff"}) @@ -569,7 +568,7 @@ async def test_inverted_cover( await hass.async_block_till_done() inverted_cover = hass.states.get(f"{DOMAIN}.newkaku_type_standard") - assert inverted_cover.state == STATE_CLOSED + assert inverted_cover.state == CoverState.CLOSED # should respond to group command event_callback({"id": "newkaku_device_4", "command": "allon"}) @@ -577,7 +576,7 @@ async def test_inverted_cover( await hass.async_block_till_done() inverted_cover = hass.states.get(f"{DOMAIN}.newkaku_type_standard") - assert inverted_cover.state == STATE_OPEN + assert inverted_cover.state == CoverState.OPEN # should respond to group command event_callback({"id": "newkaku_device_5", "command": "alloff"}) @@ -585,7 +584,7 @@ async def test_inverted_cover( await hass.async_block_till_done() inverted_cover = hass.states.get(f"{DOMAIN}.newkaku_type_none") - assert inverted_cover.state == STATE_CLOSED + assert inverted_cover.state == CoverState.CLOSED # should respond to group command event_callback({"id": "newkaku_device_5", "command": "allon"}) @@ -593,7 +592,7 @@ async def test_inverted_cover( await hass.async_block_till_done() inverted_cover = hass.states.get(f"{DOMAIN}.newkaku_type_none") - assert inverted_cover.state == STATE_OPEN + assert inverted_cover.state == CoverState.OPEN # should respond to group command event_callback({"id": "newkaku_device_6", "command": "alloff"}) @@ -601,7 +600,7 @@ async def test_inverted_cover( await hass.async_block_till_done() inverted_cover = hass.states.get(f"{DOMAIN}.newkaku_type_inverted") - assert inverted_cover.state == STATE_CLOSED + assert inverted_cover.state == CoverState.CLOSED # should respond to group command event_callback({"id": "newkaku_device_6", "command": "allon"}) @@ -609,7 +608,7 @@ async def test_inverted_cover( await hass.async_block_till_done() inverted_cover = hass.states.get(f"{DOMAIN}.newkaku_type_inverted") - assert inverted_cover.state == STATE_OPEN + assert inverted_cover.state == CoverState.OPEN # Sending the close command from HA should result # in an 'DOWN' command sent to a non-newkaku device @@ -622,7 +621,7 @@ async def test_inverted_cover( await hass.async_block_till_done() - assert hass.states.get(f"{DOMAIN}.nonkaku_type_standard").state == STATE_CLOSED + assert hass.states.get(f"{DOMAIN}.nonkaku_type_standard").state == CoverState.CLOSED assert protocol.send_command_ack.call_args_list[0][0][0] == "nonkaku_device_1" assert protocol.send_command_ack.call_args_list[0][0][1] == "DOWN" @@ -637,7 +636,7 @@ async def test_inverted_cover( await hass.async_block_till_done() - assert hass.states.get(f"{DOMAIN}.nonkaku_type_standard").state == STATE_OPEN + assert hass.states.get(f"{DOMAIN}.nonkaku_type_standard").state == CoverState.OPEN assert protocol.send_command_ack.call_args_list[1][0][0] == "nonkaku_device_1" assert protocol.send_command_ack.call_args_list[1][0][1] == "UP" @@ -650,7 +649,7 @@ async def test_inverted_cover( await hass.async_block_till_done() - assert hass.states.get(f"{DOMAIN}.nonkaku_type_none").state == STATE_CLOSED + assert hass.states.get(f"{DOMAIN}.nonkaku_type_none").state == CoverState.CLOSED assert protocol.send_command_ack.call_args_list[2][0][0] == "nonkaku_device_2" assert protocol.send_command_ack.call_args_list[2][0][1] == "DOWN" @@ -663,7 +662,7 @@ async def test_inverted_cover( await hass.async_block_till_done() - assert hass.states.get(f"{DOMAIN}.nonkaku_type_none").state == STATE_OPEN + assert hass.states.get(f"{DOMAIN}.nonkaku_type_none").state == CoverState.OPEN assert protocol.send_command_ack.call_args_list[3][0][0] == "nonkaku_device_2" assert protocol.send_command_ack.call_args_list[3][0][1] == "UP" @@ -678,7 +677,7 @@ async def test_inverted_cover( await hass.async_block_till_done() - assert hass.states.get(f"{DOMAIN}.nonkaku_type_inverted").state == STATE_CLOSED + assert hass.states.get(f"{DOMAIN}.nonkaku_type_inverted").state == CoverState.CLOSED assert protocol.send_command_ack.call_args_list[4][0][0] == "nonkaku_device_3" assert protocol.send_command_ack.call_args_list[4][0][1] == "UP" @@ -693,7 +692,7 @@ async def test_inverted_cover( await hass.async_block_till_done() - assert hass.states.get(f"{DOMAIN}.nonkaku_type_inverted").state == STATE_OPEN + assert hass.states.get(f"{DOMAIN}.nonkaku_type_inverted").state == CoverState.OPEN assert protocol.send_command_ack.call_args_list[5][0][0] == "nonkaku_device_3" assert protocol.send_command_ack.call_args_list[5][0][1] == "DOWN" @@ -708,7 +707,7 @@ async def test_inverted_cover( await hass.async_block_till_done() - assert hass.states.get(f"{DOMAIN}.newkaku_type_standard").state == STATE_CLOSED + assert hass.states.get(f"{DOMAIN}.newkaku_type_standard").state == CoverState.CLOSED assert protocol.send_command_ack.call_args_list[6][0][0] == "newkaku_device_4" assert protocol.send_command_ack.call_args_list[6][0][1] == "DOWN" @@ -723,7 +722,7 @@ async def test_inverted_cover( await hass.async_block_till_done() - assert hass.states.get(f"{DOMAIN}.newkaku_type_standard").state == STATE_OPEN + assert hass.states.get(f"{DOMAIN}.newkaku_type_standard").state == CoverState.OPEN assert protocol.send_command_ack.call_args_list[7][0][0] == "newkaku_device_4" assert protocol.send_command_ack.call_args_list[7][0][1] == "UP" @@ -736,7 +735,7 @@ async def test_inverted_cover( await hass.async_block_till_done() - assert hass.states.get(f"{DOMAIN}.newkaku_type_none").state == STATE_CLOSED + assert hass.states.get(f"{DOMAIN}.newkaku_type_none").state == CoverState.CLOSED assert protocol.send_command_ack.call_args_list[8][0][0] == "newkaku_device_5" assert protocol.send_command_ack.call_args_list[8][0][1] == "UP" @@ -749,7 +748,7 @@ async def test_inverted_cover( await hass.async_block_till_done() - assert hass.states.get(f"{DOMAIN}.newkaku_type_none").state == STATE_OPEN + assert hass.states.get(f"{DOMAIN}.newkaku_type_none").state == CoverState.OPEN assert protocol.send_command_ack.call_args_list[9][0][0] == "newkaku_device_5" assert protocol.send_command_ack.call_args_list[9][0][1] == "DOWN" @@ -764,7 +763,7 @@ async def test_inverted_cover( await hass.async_block_till_done() - assert hass.states.get(f"{DOMAIN}.newkaku_type_inverted").state == STATE_CLOSED + assert hass.states.get(f"{DOMAIN}.newkaku_type_inverted").state == CoverState.CLOSED assert protocol.send_command_ack.call_args_list[10][0][0] == "newkaku_device_6" assert protocol.send_command_ack.call_args_list[10][0][1] == "UP" @@ -779,6 +778,6 @@ async def test_inverted_cover( await hass.async_block_till_done() - assert hass.states.get(f"{DOMAIN}.newkaku_type_inverted").state == STATE_OPEN + assert hass.states.get(f"{DOMAIN}.newkaku_type_inverted").state == CoverState.OPEN assert protocol.send_command_ack.call_args_list[11][0][0] == "newkaku_device_6" assert protocol.send_command_ack.call_args_list[11][0][1] == "DOWN" diff --git a/tests/components/ring/conftest.py b/tests/components/ring/conftest.py index 90f2fd2a956b23..1296c2f58c5c42 100644 --- a/tests/components/ring/conftest.py +++ b/tests/components/ring/conftest.py @@ -8,7 +8,8 @@ import ring_doorbell from homeassistant.components.ring import DOMAIN -from homeassistant.const import CONF_USERNAME +from homeassistant.components.ring.const import CONF_CONFIG_ENTRY_MINOR_VERSION +from homeassistant.const import CONF_DEVICE_ID, CONF_USERNAME from homeassistant.core import HomeAssistant from .device_mocks import get_devices_data, get_mock_devices @@ -16,6 +17,8 @@ from tests.common import MockConfigEntry from tests.components.light.conftest import mock_light_profiles # noqa: F401 +MOCK_HARDWARE_ID = "foo-bar" + @pytest.fixture def mock_setup_entry() -> Generator[AsyncMock]: @@ -116,10 +119,13 @@ def mock_config_entry() -> MockConfigEntry: title="Ring", domain=DOMAIN, data={ + CONF_DEVICE_ID: MOCK_HARDWARE_ID, CONF_USERNAME: "foo@bar.com", "token": {"access_token": "mock-token"}, }, unique_id="foo@bar.com", + version=1, + minor_version=CONF_CONFIG_ENTRY_MINOR_VERSION, ) diff --git a/tests/components/ring/snapshots/test_number.ambr b/tests/components/ring/snapshots/test_number.ambr index 9228589dc81c1f..0873319b837e84 100644 --- a/tests/components/ring/snapshots/test_number.ambr +++ b/tests/components/ring/snapshots/test_number.ambr @@ -1,1797 +1,5 @@ # serializer version: 1 -# name: test_states[number.downstairs_volume-2.0][number.downstairs_volume-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max': 10, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'number', - 'entity_category': None, - 'entity_id': 'number.downstairs_volume', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Volume', - 'platform': 'ring', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'volume', - 'unique_id': '123456-volume', - 'unit_of_measurement': None, - }) -# --- -# name: test_states[number.downstairs_volume-2.0][number.downstairs_volume-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Ring.com', - 'friendly_name': 'Downstairs Volume', - 'max': 10, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'context': , - 'entity_id': 'number.downstairs_volume', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2.0', - }) -# --- -# name: test_states[number.downstairs_volume-2.0][number.front_door_volume-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max': 11, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'number', - 'entity_category': None, - 'entity_id': 'number.front_door_volume', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Volume', - 'platform': 'ring', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'volume', - 'unique_id': '987654-volume', - 'unit_of_measurement': None, - }) -# --- -# name: test_states[number.downstairs_volume-2.0][number.front_door_volume-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Ring.com', - 'friendly_name': 'Front Door Volume', - 'max': 11, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'context': , - 'entity_id': 'number.front_door_volume', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1.0', - }) -# --- -# name: test_states[number.downstairs_volume-2.0][number.front_volume-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max': 11, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'number', - 'entity_category': None, - 'entity_id': 'number.front_volume', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Volume', - 'platform': 'ring', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'volume', - 'unique_id': '765432-volume', - 'unit_of_measurement': None, - }) -# --- -# name: test_states[number.downstairs_volume-2.0][number.front_volume-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Ring.com', - 'friendly_name': 'Front Volume', - 'max': 11, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'context': , - 'entity_id': 'number.front_volume', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '11.0', - }) -# --- -# name: test_states[number.downstairs_volume-2.0][number.ingress_doorbell_volume-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max': 8, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'number', - 'entity_category': None, - 'entity_id': 'number.ingress_doorbell_volume', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Doorbell volume', - 'platform': 'ring', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'doorbell_volume', - 'unique_id': '185036587-doorbell_volume', - 'unit_of_measurement': None, - }) -# --- -# name: test_states[number.downstairs_volume-2.0][number.ingress_doorbell_volume-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Ring.com', - 'friendly_name': 'Ingress Doorbell volume', - 'max': 8, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'context': , - 'entity_id': 'number.ingress_doorbell_volume', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '8.0', - }) -# --- -# name: test_states[number.downstairs_volume-2.0][number.ingress_mic_volume-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max': 11, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'number', - 'entity_category': None, - 'entity_id': 'number.ingress_mic_volume', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Mic volume', - 'platform': 'ring', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'mic_volume', - 'unique_id': '185036587-mic_volume', - 'unit_of_measurement': None, - }) -# --- -# name: test_states[number.downstairs_volume-2.0][number.ingress_mic_volume-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Ring.com', - 'friendly_name': 'Ingress Mic volume', - 'max': 11, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'context': , - 'entity_id': 'number.ingress_mic_volume', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '11.0', - }) -# --- -# name: test_states[number.downstairs_volume-2.0][number.ingress_voice_volume-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max': 11, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'number', - 'entity_category': None, - 'entity_id': 'number.ingress_voice_volume', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Voice volume', - 'platform': 'ring', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'voice_volume', - 'unique_id': '185036587-voice_volume', - 'unit_of_measurement': None, - }) -# --- -# name: test_states[number.downstairs_volume-2.0][number.ingress_voice_volume-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Ring.com', - 'friendly_name': 'Ingress Voice volume', - 'max': 11, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'context': , - 'entity_id': 'number.ingress_voice_volume', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '11.0', - }) -# --- -# name: test_states[number.downstairs_volume-2.0][number.internal_volume-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max': 11, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'number', - 'entity_category': None, - 'entity_id': 'number.internal_volume', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Volume', - 'platform': 'ring', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'volume', - 'unique_id': '345678-volume', - 'unit_of_measurement': None, - }) -# --- -# name: test_states[number.downstairs_volume-2.0][number.internal_volume-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Ring.com', - 'friendly_name': 'Internal Volume', - 'max': 11, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'context': , - 'entity_id': 'number.internal_volume', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '11.0', - }) -# --- -# name: test_states[number.downstairs_volume-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max': 11, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'number', - 'entity_category': None, - 'entity_id': 'number.downstairs_volume', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Volume', - 'platform': 'ring', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'volume', - 'unique_id': '123456-volume', - 'unit_of_measurement': None, - }) -# --- -# name: test_states[number.downstairs_volume-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Ring.com', - 'friendly_name': 'Downstairs Volume', - 'max': 11, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'context': , - 'entity_id': 'number.downstairs_volume', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2.0', - }) -# --- -# name: test_states[number.front_door_volume-1.0][number.downstairs_volume-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max': 10, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'number', - 'entity_category': None, - 'entity_id': 'number.downstairs_volume', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Volume', - 'platform': 'ring', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'volume', - 'unique_id': '123456-volume', - 'unit_of_measurement': None, - }) -# --- -# name: test_states[number.front_door_volume-1.0][number.downstairs_volume-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Ring.com', - 'friendly_name': 'Downstairs Volume', - 'max': 10, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'context': , - 'entity_id': 'number.downstairs_volume', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2.0', - }) -# --- -# name: test_states[number.front_door_volume-1.0][number.front_door_volume-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max': 11, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'number', - 'entity_category': None, - 'entity_id': 'number.front_door_volume', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Volume', - 'platform': 'ring', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'volume', - 'unique_id': '987654-volume', - 'unit_of_measurement': None, - }) -# --- -# name: test_states[number.front_door_volume-1.0][number.front_door_volume-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Ring.com', - 'friendly_name': 'Front Door Volume', - 'max': 11, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'context': , - 'entity_id': 'number.front_door_volume', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1.0', - }) -# --- -# name: test_states[number.front_door_volume-1.0][number.front_volume-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max': 11, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'number', - 'entity_category': None, - 'entity_id': 'number.front_volume', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Volume', - 'platform': 'ring', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'volume', - 'unique_id': '765432-volume', - 'unit_of_measurement': None, - }) -# --- -# name: test_states[number.front_door_volume-1.0][number.front_volume-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Ring.com', - 'friendly_name': 'Front Volume', - 'max': 11, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'context': , - 'entity_id': 'number.front_volume', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '11.0', - }) -# --- -# name: test_states[number.front_door_volume-1.0][number.ingress_doorbell_volume-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max': 8, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'number', - 'entity_category': None, - 'entity_id': 'number.ingress_doorbell_volume', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Doorbell volume', - 'platform': 'ring', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'doorbell_volume', - 'unique_id': '185036587-doorbell_volume', - 'unit_of_measurement': None, - }) -# --- -# name: test_states[number.front_door_volume-1.0][number.ingress_doorbell_volume-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Ring.com', - 'friendly_name': 'Ingress Doorbell volume', - 'max': 8, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'context': , - 'entity_id': 'number.ingress_doorbell_volume', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '8.0', - }) -# --- -# name: test_states[number.front_door_volume-1.0][number.ingress_mic_volume-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max': 11, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'number', - 'entity_category': None, - 'entity_id': 'number.ingress_mic_volume', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Mic volume', - 'platform': 'ring', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'mic_volume', - 'unique_id': '185036587-mic_volume', - 'unit_of_measurement': None, - }) -# --- -# name: test_states[number.front_door_volume-1.0][number.ingress_mic_volume-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Ring.com', - 'friendly_name': 'Ingress Mic volume', - 'max': 11, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'context': , - 'entity_id': 'number.ingress_mic_volume', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '11.0', - }) -# --- -# name: test_states[number.front_door_volume-1.0][number.ingress_voice_volume-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max': 11, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'number', - 'entity_category': None, - 'entity_id': 'number.ingress_voice_volume', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Voice volume', - 'platform': 'ring', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'voice_volume', - 'unique_id': '185036587-voice_volume', - 'unit_of_measurement': None, - }) -# --- -# name: test_states[number.front_door_volume-1.0][number.ingress_voice_volume-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Ring.com', - 'friendly_name': 'Ingress Voice volume', - 'max': 11, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'context': , - 'entity_id': 'number.ingress_voice_volume', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '11.0', - }) -# --- -# name: test_states[number.front_door_volume-1.0][number.internal_volume-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max': 11, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'number', - 'entity_category': None, - 'entity_id': 'number.internal_volume', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Volume', - 'platform': 'ring', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'volume', - 'unique_id': '345678-volume', - 'unit_of_measurement': None, - }) -# --- -# name: test_states[number.front_door_volume-1.0][number.internal_volume-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Ring.com', - 'friendly_name': 'Internal Volume', - 'max': 11, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'context': , - 'entity_id': 'number.internal_volume', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '11.0', - }) -# --- -# name: test_states[number.front_door_volume-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max': 11, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'number', - 'entity_category': None, - 'entity_id': 'number.front_door_volume', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Volume', - 'platform': 'ring', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'volume', - 'unique_id': '987654-volume', - 'unit_of_measurement': None, - }) -# --- -# name: test_states[number.front_door_volume-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Ring.com', - 'friendly_name': 'Front Door Volume', - 'max': 11, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'context': , - 'entity_id': 'number.front_door_volume', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1.0', - }) -# --- -# name: test_states[number.front_volume-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max': 11, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'number', - 'entity_category': None, - 'entity_id': 'number.front_volume', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Volume', - 'platform': 'ring', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'volume', - 'unique_id': '765432-volume', - 'unit_of_measurement': None, - }) -# --- -# name: test_states[number.front_volume-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Ring.com', - 'friendly_name': 'Front Volume', - 'max': 11, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'context': , - 'entity_id': 'number.front_volume', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '11.0', - }) -# --- -# name: test_states[number.ingress_doorbell_volume-8.0][number.downstairs_volume-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max': 10, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'number', - 'entity_category': None, - 'entity_id': 'number.downstairs_volume', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Volume', - 'platform': 'ring', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'volume', - 'unique_id': '123456-volume', - 'unit_of_measurement': None, - }) -# --- -# name: test_states[number.ingress_doorbell_volume-8.0][number.downstairs_volume-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Ring.com', - 'friendly_name': 'Downstairs Volume', - 'max': 10, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'context': , - 'entity_id': 'number.downstairs_volume', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2.0', - }) -# --- -# name: test_states[number.ingress_doorbell_volume-8.0][number.front_door_volume-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max': 11, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'number', - 'entity_category': None, - 'entity_id': 'number.front_door_volume', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Volume', - 'platform': 'ring', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'volume', - 'unique_id': '987654-volume', - 'unit_of_measurement': None, - }) -# --- -# name: test_states[number.ingress_doorbell_volume-8.0][number.front_door_volume-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Ring.com', - 'friendly_name': 'Front Door Volume', - 'max': 11, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'context': , - 'entity_id': 'number.front_door_volume', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1.0', - }) -# --- -# name: test_states[number.ingress_doorbell_volume-8.0][number.front_volume-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max': 11, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'number', - 'entity_category': None, - 'entity_id': 'number.front_volume', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Volume', - 'platform': 'ring', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'volume', - 'unique_id': '765432-volume', - 'unit_of_measurement': None, - }) -# --- -# name: test_states[number.ingress_doorbell_volume-8.0][number.front_volume-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Ring.com', - 'friendly_name': 'Front Volume', - 'max': 11, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'context': , - 'entity_id': 'number.front_volume', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '11.0', - }) -# --- -# name: test_states[number.ingress_doorbell_volume-8.0][number.ingress_doorbell_volume-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max': 8, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'number', - 'entity_category': None, - 'entity_id': 'number.ingress_doorbell_volume', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Doorbell volume', - 'platform': 'ring', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'doorbell_volume', - 'unique_id': '185036587-doorbell_volume', - 'unit_of_measurement': None, - }) -# --- -# name: test_states[number.ingress_doorbell_volume-8.0][number.ingress_doorbell_volume-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Ring.com', - 'friendly_name': 'Ingress Doorbell volume', - 'max': 8, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'context': , - 'entity_id': 'number.ingress_doorbell_volume', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '8.0', - }) -# --- -# name: test_states[number.ingress_doorbell_volume-8.0][number.ingress_mic_volume-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max': 11, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'number', - 'entity_category': None, - 'entity_id': 'number.ingress_mic_volume', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Mic volume', - 'platform': 'ring', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'mic_volume', - 'unique_id': '185036587-mic_volume', - 'unit_of_measurement': None, - }) -# --- -# name: test_states[number.ingress_doorbell_volume-8.0][number.ingress_mic_volume-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Ring.com', - 'friendly_name': 'Ingress Mic volume', - 'max': 11, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'context': , - 'entity_id': 'number.ingress_mic_volume', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '11.0', - }) -# --- -# name: test_states[number.ingress_doorbell_volume-8.0][number.ingress_voice_volume-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max': 11, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'number', - 'entity_category': None, - 'entity_id': 'number.ingress_voice_volume', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Voice volume', - 'platform': 'ring', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'voice_volume', - 'unique_id': '185036587-voice_volume', - 'unit_of_measurement': None, - }) -# --- -# name: test_states[number.ingress_doorbell_volume-8.0][number.ingress_voice_volume-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Ring.com', - 'friendly_name': 'Ingress Voice volume', - 'max': 11, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'context': , - 'entity_id': 'number.ingress_voice_volume', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '11.0', - }) -# --- -# name: test_states[number.ingress_doorbell_volume-8.0][number.internal_volume-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max': 11, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'number', - 'entity_category': None, - 'entity_id': 'number.internal_volume', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Volume', - 'platform': 'ring', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'volume', - 'unique_id': '345678-volume', - 'unit_of_measurement': None, - }) -# --- -# name: test_states[number.ingress_doorbell_volume-8.0][number.internal_volume-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Ring.com', - 'friendly_name': 'Internal Volume', - 'max': 11, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'context': , - 'entity_id': 'number.internal_volume', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '11.0', - }) -# --- -# name: test_states[number.ingress_doorbell_volume-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max': 8, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'number', - 'entity_category': None, - 'entity_id': 'number.ingress_doorbell_volume', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Doorbell volume', - 'platform': 'ring', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'doorbell_volume', - 'unique_id': '185036587-doorbell_volume', - 'unit_of_measurement': None, - }) -# --- -# name: test_states[number.ingress_doorbell_volume-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Ring.com', - 'friendly_name': 'Ingress Doorbell volume', - 'max': 8, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'context': , - 'entity_id': 'number.ingress_doorbell_volume', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '8.0', - }) -# --- -# name: test_states[number.ingress_mic_volume-11.0][number.downstairs_volume-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max': 10, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'number', - 'entity_category': None, - 'entity_id': 'number.downstairs_volume', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Volume', - 'platform': 'ring', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'volume', - 'unique_id': '123456-volume', - 'unit_of_measurement': None, - }) -# --- -# name: test_states[number.ingress_mic_volume-11.0][number.downstairs_volume-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Ring.com', - 'friendly_name': 'Downstairs Volume', - 'max': 10, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'context': , - 'entity_id': 'number.downstairs_volume', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2.0', - }) -# --- -# name: test_states[number.ingress_mic_volume-11.0][number.front_door_volume-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max': 11, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'number', - 'entity_category': None, - 'entity_id': 'number.front_door_volume', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Volume', - 'platform': 'ring', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'volume', - 'unique_id': '987654-volume', - 'unit_of_measurement': None, - }) -# --- -# name: test_states[number.ingress_mic_volume-11.0][number.front_door_volume-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Ring.com', - 'friendly_name': 'Front Door Volume', - 'max': 11, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'context': , - 'entity_id': 'number.front_door_volume', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1.0', - }) -# --- -# name: test_states[number.ingress_mic_volume-11.0][number.front_volume-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max': 11, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'number', - 'entity_category': None, - 'entity_id': 'number.front_volume', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Volume', - 'platform': 'ring', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'volume', - 'unique_id': '765432-volume', - 'unit_of_measurement': None, - }) -# --- -# name: test_states[number.ingress_mic_volume-11.0][number.front_volume-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Ring.com', - 'friendly_name': 'Front Volume', - 'max': 11, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'context': , - 'entity_id': 'number.front_volume', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '11.0', - }) -# --- -# name: test_states[number.ingress_mic_volume-11.0][number.ingress_doorbell_volume-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max': 8, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'number', - 'entity_category': None, - 'entity_id': 'number.ingress_doorbell_volume', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Doorbell volume', - 'platform': 'ring', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'doorbell_volume', - 'unique_id': '185036587-doorbell_volume', - 'unit_of_measurement': None, - }) -# --- -# name: test_states[number.ingress_mic_volume-11.0][number.ingress_doorbell_volume-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Ring.com', - 'friendly_name': 'Ingress Doorbell volume', - 'max': 8, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'context': , - 'entity_id': 'number.ingress_doorbell_volume', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '8.0', - }) -# --- -# name: test_states[number.ingress_mic_volume-11.0][number.ingress_mic_volume-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max': 11, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'number', - 'entity_category': None, - 'entity_id': 'number.ingress_mic_volume', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Mic volume', - 'platform': 'ring', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'mic_volume', - 'unique_id': '185036587-mic_volume', - 'unit_of_measurement': None, - }) -# --- -# name: test_states[number.ingress_mic_volume-11.0][number.ingress_mic_volume-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Ring.com', - 'friendly_name': 'Ingress Mic volume', - 'max': 11, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'context': , - 'entity_id': 'number.ingress_mic_volume', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '11.0', - }) -# --- -# name: test_states[number.ingress_mic_volume-11.0][number.ingress_voice_volume-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max': 11, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'number', - 'entity_category': None, - 'entity_id': 'number.ingress_voice_volume', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Voice volume', - 'platform': 'ring', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'voice_volume', - 'unique_id': '185036587-voice_volume', - 'unit_of_measurement': None, - }) -# --- -# name: test_states[number.ingress_mic_volume-11.0][number.ingress_voice_volume-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Ring.com', - 'friendly_name': 'Ingress Voice volume', - 'max': 11, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'context': , - 'entity_id': 'number.ingress_voice_volume', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '11.0', - }) -# --- -# name: test_states[number.ingress_mic_volume-11.0][number.internal_volume-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max': 11, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'number', - 'entity_category': None, - 'entity_id': 'number.internal_volume', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Volume', - 'platform': 'ring', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'volume', - 'unique_id': '345678-volume', - 'unit_of_measurement': None, - }) -# --- -# name: test_states[number.ingress_mic_volume-11.0][number.internal_volume-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Ring.com', - 'friendly_name': 'Internal Volume', - 'max': 11, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'context': , - 'entity_id': 'number.internal_volume', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '11.0', - }) -# --- -# name: test_states[number.ingress_mic_volume-entry] +# name: test_states[number.downstairs_volume-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -1808,62 +16,6 @@ 'disabled_by': None, 'domain': 'number', 'entity_category': None, - 'entity_id': 'number.ingress_mic_volume', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Mic volume', - 'platform': 'ring', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'mic_volume', - 'unique_id': '185036587-mic_volume', - 'unit_of_measurement': None, - }) -# --- -# name: test_states[number.ingress_mic_volume-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Ring.com', - 'friendly_name': 'Ingress Mic volume', - 'max': 11, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'context': , - 'entity_id': 'number.ingress_mic_volume', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '11.0', - }) -# --- -# name: test_states[number.ingress_voice_volume-11.0][number.downstairs_volume-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max': 10, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'number', - 'entity_category': None, 'entity_id': 'number.downstairs_volume', 'has_entity_name': True, 'hidden_by': None, @@ -1885,12 +37,12 @@ 'unit_of_measurement': None, }) # --- -# name: test_states[number.ingress_voice_volume-11.0][number.downstairs_volume-state] +# name: test_states[number.downstairs_volume-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'attribution': 'Data provided by Ring.com', 'friendly_name': 'Downstairs Volume', - 'max': 10, + 'max': 11, 'min': 0, 'mode': , 'step': 1, @@ -1903,7 +55,7 @@ 'state': '2.0', }) # --- -# name: test_states[number.ingress_voice_volume-11.0][number.front_door_volume-entry] +# name: test_states[number.front_door_volume-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -1941,7 +93,7 @@ 'unit_of_measurement': None, }) # --- -# name: test_states[number.ingress_voice_volume-11.0][number.front_door_volume-state] +# name: test_states[number.front_door_volume-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'attribution': 'Data provided by Ring.com', @@ -1959,7 +111,7 @@ 'state': '1.0', }) # --- -# name: test_states[number.ingress_voice_volume-11.0][number.front_volume-entry] +# name: test_states[number.front_volume-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -1997,7 +149,7 @@ 'unit_of_measurement': None, }) # --- -# name: test_states[number.ingress_voice_volume-11.0][number.front_volume-state] +# name: test_states[number.front_volume-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'attribution': 'Data provided by Ring.com', @@ -2015,7 +167,7 @@ 'state': '11.0', }) # --- -# name: test_states[number.ingress_voice_volume-11.0][number.ingress_doorbell_volume-entry] +# name: test_states[number.ingress_doorbell_volume-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -2053,7 +205,7 @@ 'unit_of_measurement': None, }) # --- -# name: test_states[number.ingress_voice_volume-11.0][number.ingress_doorbell_volume-state] +# name: test_states[number.ingress_doorbell_volume-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'attribution': 'Data provided by Ring.com', @@ -2071,7 +223,7 @@ 'state': '8.0', }) # --- -# name: test_states[number.ingress_voice_volume-11.0][number.ingress_mic_volume-entry] +# name: test_states[number.ingress_mic_volume-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -2109,7 +261,7 @@ 'unit_of_measurement': None, }) # --- -# name: test_states[number.ingress_voice_volume-11.0][number.ingress_mic_volume-state] +# name: test_states[number.ingress_mic_volume-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'attribution': 'Data provided by Ring.com', @@ -2127,118 +279,6 @@ 'state': '11.0', }) # --- -# name: test_states[number.ingress_voice_volume-11.0][number.ingress_voice_volume-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max': 11, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'number', - 'entity_category': None, - 'entity_id': 'number.ingress_voice_volume', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Voice volume', - 'platform': 'ring', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'voice_volume', - 'unique_id': '185036587-voice_volume', - 'unit_of_measurement': None, - }) -# --- -# name: test_states[number.ingress_voice_volume-11.0][number.ingress_voice_volume-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Ring.com', - 'friendly_name': 'Ingress Voice volume', - 'max': 11, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'context': , - 'entity_id': 'number.ingress_voice_volume', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '11.0', - }) -# --- -# name: test_states[number.ingress_voice_volume-11.0][number.internal_volume-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max': 11, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'number', - 'entity_category': None, - 'entity_id': 'number.internal_volume', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Volume', - 'platform': 'ring', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'volume', - 'unique_id': '345678-volume', - 'unit_of_measurement': None, - }) -# --- -# name: test_states[number.ingress_voice_volume-11.0][number.internal_volume-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Ring.com', - 'friendly_name': 'Internal Volume', - 'max': 11, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'context': , - 'entity_id': 'number.internal_volume', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '11.0', - }) -# --- # name: test_states[number.ingress_voice_volume-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/ring/snapshots/test_sensor.ambr b/tests/components/ring/snapshots/test_sensor.ambr index 063675ce2140ad..9fd1ac7ba846fc 100644 --- a/tests/components/ring/snapshots/test_sensor.ambr +++ b/tests/components/ring/snapshots/test_sensor.ambr @@ -341,39 +341,6 @@ 'state': '11', }) # --- -# name: test_states[sensor.front_door_wi_fi_signal_category-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.front_door_wi_fi_signal_category', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Wi-Fi signal category', - 'platform': 'ring', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'wifi_signal_category', - 'unique_id': '987654-wifi_signal_category', - 'unit_of_measurement': None, - }) -# --- # name: test_states[sensor.front_door_wifi_signal_category-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/ring/test_config_flow.py b/tests/components/ring/test_config_flow.py index f947a968cf31e4..409cdac55aaa81 100644 --- a/tests/components/ring/test_config_flow.py +++ b/tests/components/ring/test_config_flow.py @@ -1,6 +1,6 @@ """Test the Ring config flow.""" -from unittest.mock import AsyncMock, Mock +from unittest.mock import AsyncMock, Mock, patch import pytest import ring_doorbell @@ -8,11 +8,13 @@ from homeassistant import config_entries from homeassistant.components import dhcp from homeassistant.components.ring import DOMAIN -from homeassistant.const import CONF_PASSWORD, CONF_USERNAME +from homeassistant.const import CONF_DEVICE_ID, CONF_PASSWORD, CONF_TOKEN, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType from homeassistant.helpers import device_registry as dr +from .conftest import MOCK_HARDWARE_ID + from tests.common import MockConfigEntry @@ -29,17 +31,19 @@ async def test_form( assert result["type"] is FlowResultType.FORM assert result["errors"] == {} - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - {"username": "hello@home-assistant.io", "password": "test-password"}, - ) - await hass.async_block_till_done() + with patch("uuid.uuid4", return_value=MOCK_HARDWARE_ID): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + {"username": "hello@home-assistant.io", "password": "test-password"}, + ) + await hass.async_block_till_done() assert result2["type"] is FlowResultType.CREATE_ENTRY assert result2["title"] == "hello@home-assistant.io" assert result2["data"] == { - "username": "hello@home-assistant.io", - "token": {"access_token": "mock-token"}, + CONF_DEVICE_ID: MOCK_HARDWARE_ID, + CONF_USERNAME: "hello@home-assistant.io", + CONF_TOKEN: {"access_token": "mock-token"}, } assert len(mock_setup_entry.mock_calls) == 1 @@ -82,13 +86,14 @@ async def test_form_2fa( assert result["errors"] == {} mock_ring_auth.async_fetch_token.side_effect = ring_doorbell.Requires2FAError - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_USERNAME: "foo@bar.com", - CONF_PASSWORD: "fake-password", - }, - ) + with patch("uuid.uuid4", return_value=MOCK_HARDWARE_ID): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_USERNAME: "foo@bar.com", + CONF_PASSWORD: "fake-password", + }, + ) await hass.async_block_till_done() mock_ring_auth.async_fetch_token.assert_called_once_with( "foo@bar.com", "fake-password", None @@ -109,8 +114,9 @@ async def test_form_2fa( assert result3["type"] is FlowResultType.CREATE_ENTRY assert result3["title"] == "foo@bar.com" assert result3["data"] == { - "username": "foo@bar.com", - "token": "new-foobar", + CONF_DEVICE_ID: MOCK_HARDWARE_ID, + CONF_USERNAME: "foo@bar.com", + CONF_TOKEN: "new-foobar", } assert len(mock_setup_entry.mock_calls) == 1 @@ -156,8 +162,9 @@ async def test_reauth( assert result3["type"] is FlowResultType.ABORT assert result3["reason"] == "reauth_successful" assert mock_added_config_entry.data == { - "username": "foo@bar.com", - "token": "new-foobar", + CONF_DEVICE_ID: MOCK_HARDWARE_ID, + CONF_USERNAME: "foo@bar.com", + CONF_TOKEN: "new-foobar", } assert len(mock_setup_entry.mock_calls) == 1 @@ -218,8 +225,9 @@ async def test_reauth_error( assert result3["type"] is FlowResultType.ABORT assert result3["reason"] == "reauth_successful" assert mock_added_config_entry.data == { - "username": "foo@bar.com", - "token": "new-foobar", + CONF_DEVICE_ID: MOCK_HARDWARE_ID, + CONF_USERNAME: "foo@bar.com", + CONF_TOKEN: "new-foobar", } assert len(mock_setup_entry.mock_calls) == 1 @@ -268,15 +276,17 @@ async def test_dhcp_discovery( assert result["type"] is FlowResultType.FORM assert result["errors"] == {} assert result["step_id"] == "user" - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {"username": username, "password": "test-password"}, - ) + with patch("uuid.uuid4", return_value=MOCK_HARDWARE_ID): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {"username": username, "password": "test-password"}, + ) assert result["type"] is FlowResultType.CREATE_ENTRY assert result["title"] == "hello@home-assistant.io" assert result["data"] == { - "username": username, - "token": {"access_token": "mock-token"}, + CONF_DEVICE_ID: MOCK_HARDWARE_ID, + CONF_USERNAME: username, + CONF_TOKEN: {"access_token": "mock-token"}, } config_entry = hass.config_entries.async_entry_for_domain_unique_id( @@ -298,3 +308,102 @@ async def test_dhcp_discovery( ) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" + + +async def test_reconfigure( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_ring_client: Mock, + mock_added_config_entry: MockConfigEntry, +) -> None: + """Test the reconfigure config flow.""" + + assert mock_added_config_entry.data[CONF_DEVICE_ID] == MOCK_HARDWARE_ID + + result = await mock_added_config_entry.start_reconfigure_flow(hass) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reconfigure" + + with patch("uuid.uuid4", return_value="new-hardware-id"): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + {"password": "test-password"}, + ) + await hass.async_block_till_done() + + assert result2["type"] is FlowResultType.ABORT + assert result2["reason"] == "reconfigure_successful" + assert mock_added_config_entry.data[CONF_DEVICE_ID] == "new-hardware-id" + + +@pytest.mark.parametrize( + ("error_type", "errors_msg"), + [ + (ring_doorbell.AuthenticationError, "invalid_auth"), + (Exception, "unknown"), + ], + ids=["invalid-auth", "unknown-error"], +) +async def test_reconfigure_errors( + hass: HomeAssistant, + mock_added_config_entry: MockConfigEntry, + mock_setup_entry: AsyncMock, + mock_ring_auth: Mock, + error_type, + errors_msg, +) -> None: + """Test errors during the reconfigure config flow.""" + result = await mock_added_config_entry.start_reconfigure_flow(hass) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reconfigure" + + mock_ring_auth.async_fetch_token.side_effect = error_type + with patch("uuid.uuid4", return_value="new-hardware-id"): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_PASSWORD: "error_fake_password", + }, + ) + await hass.async_block_till_done() + mock_ring_auth.async_fetch_token.assert_called_with( + "foo@bar.com", "error_fake_password", None + ) + mock_ring_auth.async_fetch_token.side_effect = ring_doorbell.Requires2FAError + result3 = await hass.config_entries.flow.async_configure( + result2["flow_id"], + user_input={ + CONF_PASSWORD: "other_fake_password", + }, + ) + + mock_ring_auth.async_fetch_token.assert_called_with( + "foo@bar.com", "other_fake_password", None + ) + assert result3["type"] is FlowResultType.FORM + assert result3["step_id"] == "2fa" + + # Now test reconfigure can go on to succeed + mock_ring_auth.async_fetch_token.reset_mock(side_effect=True) + mock_ring_auth.async_fetch_token.return_value = "new-foobar" + + result4 = await hass.config_entries.flow.async_configure( + result3["flow_id"], + user_input={"2fa": "123456"}, + ) + + mock_ring_auth.async_fetch_token.assert_called_with( + "foo@bar.com", "other_fake_password", "123456" + ) + + assert result4["type"] is FlowResultType.ABORT + assert result4["reason"] == "reconfigure_successful" + assert mock_added_config_entry.data == { + CONF_DEVICE_ID: "new-hardware-id", + CONF_USERNAME: "foo@bar.com", + CONF_TOKEN: "new-foobar", + } + assert len(mock_setup_entry.mock_calls) == 1 diff --git a/tests/components/ring/test_init.py b/tests/components/ring/test_init.py index 5ac9e444ccae42..1b5ee68c659c31 100644 --- a/tests/components/ring/test_init.py +++ b/tests/components/ring/test_init.py @@ -1,5 +1,7 @@ """The tests for the Ring component.""" +from unittest.mock import AsyncMock, patch + from freezegun.api import FrozenDateTimeFactory import pytest from ring_doorbell import AuthenticationError, Ring, RingError, RingTimeout @@ -12,11 +14,12 @@ from homeassistant.components.ring.const import CONF_LISTEN_CREDENTIALS, SCAN_INTERVAL from homeassistant.components.ring.coordinator import RingEventListener from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntryState -from homeassistant.const import CONF_TOKEN, CONF_USERNAME +from homeassistant.const import CONF_DEVICE_ID, CONF_TOKEN, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er from homeassistant.setup import async_setup_component +from .conftest import MOCK_HARDWARE_ID from .device_mocks import FRONT_DOOR_DEVICE_ID from tests.common import MockConfigEntry, async_fire_time_changed @@ -450,3 +453,32 @@ async def test_no_listen_start( assert "Ring event listener failed to start after 10 seconds" in [ record.message for record in caplog.records if record.levelname == "WARNING" ] + + +async def test_migrate_create_device_id( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test migration creates new device id created.""" + entry = MockConfigEntry( + title="Ring", + domain=DOMAIN, + data={ + CONF_USERNAME: "foo@bar.com", + "token": {"access_token": "mock-token"}, + }, + unique_id="foo@bar.com", + version=1, + minor_version=1, + ) + entry.add_to_hass(hass) + with patch("uuid.uuid4", return_value=MOCK_HARDWARE_ID): + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + assert entry.minor_version == 2 + assert CONF_DEVICE_ID in entry.data + assert entry.data[CONF_DEVICE_ID] == MOCK_HARDWARE_ID + + assert "Migration to version 1.2 complete" in caplog.text diff --git a/tests/components/risco/test_alarm_control_panel.py b/tests/components/risco/test_alarm_control_panel.py index 9b554ddbf283c8..8caef1fbfc457b 100644 --- a/tests/components/risco/test_alarm_control_panel.py +++ b/tests/components/risco/test_alarm_control_panel.py @@ -9,6 +9,7 @@ from homeassistant.components.alarm_control_panel import ( DOMAIN as ALARM_DOMAIN, AlarmControlPanelEntityFeature, + AlarmControlPanelState, ) from homeassistant.components.risco import CannotConnectError, UnauthorizedError from homeassistant.components.risco.const import DOMAIN @@ -18,13 +19,6 @@ SERVICE_ALARM_ARM_HOME, SERVICE_ALARM_ARM_NIGHT, SERVICE_ALARM_DISARM, - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_CUSTOM_BYPASS, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_ARMING, - STATE_ALARM_DISARMED, - STATE_ALARM_TRIGGERED, STATE_UNKNOWN, ) from homeassistant.core import HomeAssistant @@ -42,25 +36,25 @@ CODES_REQUIRED_OPTIONS = {"code_arm_required": True, "code_disarm_required": True} TEST_RISCO_TO_HA = { - "arm": STATE_ALARM_ARMED_AWAY, - "partial_arm": STATE_ALARM_ARMED_HOME, - "A": STATE_ALARM_ARMED_HOME, - "B": STATE_ALARM_ARMED_HOME, - "C": STATE_ALARM_ARMED_NIGHT, - "D": STATE_ALARM_ARMED_NIGHT, + "arm": AlarmControlPanelState.ARMED_AWAY, + "partial_arm": AlarmControlPanelState.ARMED_HOME, + "A": AlarmControlPanelState.ARMED_HOME, + "B": AlarmControlPanelState.ARMED_HOME, + "C": AlarmControlPanelState.ARMED_NIGHT, + "D": AlarmControlPanelState.ARMED_NIGHT, } TEST_FULL_RISCO_TO_HA = { **TEST_RISCO_TO_HA, - "D": STATE_ALARM_ARMED_CUSTOM_BYPASS, + "D": AlarmControlPanelState.ARMED_CUSTOM_BYPASS, } TEST_HA_TO_RISCO = { - STATE_ALARM_ARMED_AWAY: "arm", - STATE_ALARM_ARMED_HOME: "partial_arm", - STATE_ALARM_ARMED_NIGHT: "C", + AlarmControlPanelState.ARMED_AWAY: "arm", + AlarmControlPanelState.ARMED_HOME: "partial_arm", + AlarmControlPanelState.ARMED_NIGHT: "C", } TEST_FULL_HA_TO_RISCO = { **TEST_HA_TO_RISCO, - STATE_ALARM_ARMED_CUSTOM_BYPASS: "D", + AlarmControlPanelState.ARMED_CUSTOM_BYPASS: "D", } CUSTOM_MAPPING_OPTIONS = { "risco_states_to_ha": TEST_RISCO_TO_HA, @@ -210,7 +204,7 @@ async def test_cloud_states( hass, two_part_cloud_alarm, "triggered", - STATE_ALARM_TRIGGERED, + AlarmControlPanelState.TRIGGERED, entity_id, partition_id, ) @@ -218,7 +212,7 @@ async def test_cloud_states( hass, two_part_cloud_alarm, "arming", - STATE_ALARM_ARMING, + AlarmControlPanelState.ARMING, entity_id, partition_id, ) @@ -226,7 +220,7 @@ async def test_cloud_states( hass, two_part_cloud_alarm, "armed", - STATE_ALARM_ARMED_AWAY, + AlarmControlPanelState.ARMED_AWAY, entity_id, partition_id, ) @@ -234,7 +228,7 @@ async def test_cloud_states( hass, two_part_cloud_alarm, "partially_armed", - STATE_ALARM_ARMED_HOME, + AlarmControlPanelState.ARMED_HOME, entity_id, partition_id, ) @@ -242,7 +236,7 @@ async def test_cloud_states( hass, two_part_cloud_alarm, "disarmed", - STATE_ALARM_DISARMED, + AlarmControlPanelState.DISARMED, entity_id, partition_id, ) @@ -257,7 +251,7 @@ async def test_cloud_states( hass, two_part_cloud_alarm, "partially_armed", - STATE_ALARM_ARMED_NIGHT, + AlarmControlPanelState.ARMED_NIGHT, entity_id, partition_id, ) @@ -595,7 +589,7 @@ async def test_local_states( hass, two_part_local_alarm, "triggered", - STATE_ALARM_TRIGGERED, + AlarmControlPanelState.TRIGGERED, entity_id, partition_id, callback, @@ -604,7 +598,7 @@ async def test_local_states( hass, two_part_local_alarm, "arming", - STATE_ALARM_ARMING, + AlarmControlPanelState.ARMING, entity_id, partition_id, callback, @@ -613,7 +607,7 @@ async def test_local_states( hass, two_part_local_alarm, "armed", - STATE_ALARM_ARMED_AWAY, + AlarmControlPanelState.ARMED_AWAY, entity_id, partition_id, callback, @@ -622,7 +616,7 @@ async def test_local_states( hass, two_part_local_alarm, "partially_armed", - STATE_ALARM_ARMED_HOME, + AlarmControlPanelState.ARMED_HOME, entity_id, partition_id, callback, @@ -631,7 +625,7 @@ async def test_local_states( hass, two_part_local_alarm, "disarmed", - STATE_ALARM_DISARMED, + AlarmControlPanelState.DISARMED, entity_id, partition_id, callback, @@ -647,7 +641,7 @@ async def test_local_states( hass, two_part_local_alarm, "partially_armed", - STATE_ALARM_ARMED_NIGHT, + AlarmControlPanelState.ARMED_NIGHT, entity_id, partition_id, callback, diff --git a/tests/components/roborock/snapshots/test_diagnostics.ambr b/tests/components/roborock/snapshots/test_diagnostics.ambr index 805a498041a63b..26ecb729312e0f 100644 --- a/tests/components/roborock/snapshots/test_diagnostics.ambr +++ b/tests/components/roborock/snapshots/test_diagnostics.ambr @@ -102,6 +102,7 @@ 'id': '120', 'mode': 'ro', 'name': '错误代码', + 'property': '{"range": []}', 'type': 'ENUM', }), dict({ @@ -109,6 +110,7 @@ 'id': '121', 'mode': 'ro', 'name': '设备状态', + 'property': '{"range": []}', 'type': 'ENUM', }), dict({ @@ -116,6 +118,7 @@ 'id': '122', 'mode': 'ro', 'name': '设备电量', + 'property': '{"range": []}', 'type': 'ENUM', }), dict({ @@ -123,6 +126,7 @@ 'id': '123', 'mode': 'rw', 'name': '清扫模式', + 'property': '{"range": []}', 'type': 'ENUM', }), dict({ @@ -130,6 +134,7 @@ 'id': '124', 'mode': 'rw', 'name': '拖地模式', + 'property': '{"range": []}', 'type': 'ENUM', }), dict({ @@ -137,6 +142,7 @@ 'id': '125', 'mode': 'rw', 'name': '主刷寿命', + 'property': '{"max": 100, "min": 0, "step": 1, "unit": null, "scale": 1}', 'type': 'VALUE', }), dict({ @@ -144,6 +150,7 @@ 'id': '126', 'mode': 'rw', 'name': '边刷寿命', + 'property': '{"max": 100, "min": 0, "step": 1, "unit": null, "scale": 1}', 'type': 'VALUE', }), dict({ @@ -151,6 +158,7 @@ 'id': '127', 'mode': 'rw', 'name': '滤网寿命', + 'property': '{"max": 100, "min": 0, "step": 1, "unit": null, "scale": 1}', 'type': 'VALUE', }), dict({ @@ -381,6 +389,7 @@ 'id': '120', 'mode': 'ro', 'name': '错误代码', + 'property': '{"range": []}', 'type': 'ENUM', }), dict({ @@ -388,6 +397,7 @@ 'id': '121', 'mode': 'ro', 'name': '设备状态', + 'property': '{"range": []}', 'type': 'ENUM', }), dict({ @@ -395,6 +405,7 @@ 'id': '122', 'mode': 'ro', 'name': '设备电量', + 'property': '{"range": []}', 'type': 'ENUM', }), dict({ @@ -402,6 +413,7 @@ 'id': '123', 'mode': 'rw', 'name': '清扫模式', + 'property': '{"range": []}', 'type': 'ENUM', }), dict({ @@ -409,6 +421,7 @@ 'id': '124', 'mode': 'rw', 'name': '拖地模式', + 'property': '{"range": []}', 'type': 'ENUM', }), dict({ @@ -416,6 +429,7 @@ 'id': '125', 'mode': 'rw', 'name': '主刷寿命', + 'property': '{"max": 100, "min": 0, "step": 1, "unit": null, "scale": 1}', 'type': 'VALUE', }), dict({ @@ -423,6 +437,7 @@ 'id': '126', 'mode': 'rw', 'name': '边刷寿命', + 'property': '{"max": 100, "min": 0, "step": 1, "unit": null, "scale": 1}', 'type': 'VALUE', }), dict({ @@ -430,6 +445,7 @@ 'id': '127', 'mode': 'rw', 'name': '滤网寿命', + 'property': '{"max": 100, "min": 0, "step": 1, "unit": null, "scale": 1}', 'type': 'VALUE', }), dict({ diff --git a/tests/components/roku/test_config_flow.py b/tests/components/roku/test_config_flow.py index 3cf5627f342e98..7144c77cad9433 100644 --- a/tests/components/roku/test_config_flow.py +++ b/tests/components/roku/test_config_flow.py @@ -6,7 +6,7 @@ import pytest from rokuecp import RokuConnectionError -from homeassistant.components.roku.const import DOMAIN +from homeassistant.components.roku.const import CONF_PLAY_MEDIA_APP_ID, DOMAIN from homeassistant.config_entries import SOURCE_HOMEKIT, SOURCE_SSDP, SOURCE_USER from homeassistant.const import CONF_HOST, CONF_NAME, CONF_SOURCE from homeassistant.core import HomeAssistant @@ -254,3 +254,25 @@ async def test_ssdp_discovery( assert result["data"] assert result["data"][CONF_HOST] == HOST assert result["data"][CONF_NAME] == UPNP_FRIENDLY_NAME + + +async def test_options_flow( + hass: HomeAssistant, mock_config_entry: MockConfigEntry +) -> None: + """Test options config flow.""" + mock_config_entry.add_to_hass(hass) + + result = await hass.config_entries.options.async_init(mock_config_entry.entry_id) + + assert result.get("type") is FlowResultType.FORM + assert result.get("step_id") == "init" + + result2 = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input={CONF_PLAY_MEDIA_APP_ID: "782875"}, + ) + + assert result2.get("type") is FlowResultType.CREATE_ENTRY + assert result2.get("data") == { + CONF_PLAY_MEDIA_APP_ID: "782875", + } diff --git a/tests/components/roku/test_media_player.py b/tests/components/roku/test_media_player.py index 9aff8f581d7cfb..5f8a41d16aca9a 100644 --- a/tests/components/roku/test_media_player.py +++ b/tests/components/roku/test_media_player.py @@ -32,12 +32,12 @@ ATTR_FORMAT, ATTR_KEYWORD, ATTR_MEDIA_TYPE, + DEFAULT_PLAY_MEDIA_APP_ID, DOMAIN, SERVICE_SEARCH, ) from homeassistant.components.stream import FORMAT_CONTENT_TYPE, HLS_PROVIDER from homeassistant.components.websocket_api import TYPE_RESULT -from homeassistant.config import async_process_ha_core_config from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_NAME, @@ -59,6 +59,7 @@ STATE_UNAVAILABLE, ) from homeassistant.core import HomeAssistant +from homeassistant.core_config import async_process_ha_core_config from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.setup import async_setup_component from homeassistant.util import dt as dt_util @@ -495,7 +496,7 @@ async def test_services_play_media( blocking=True, ) - assert mock_roku.play_on_roku.call_count == 0 + assert mock_roku.launch.call_count == 0 await hass.services.async_call( MP_DOMAIN, @@ -509,7 +510,7 @@ async def test_services_play_media( blocking=True, ) - assert mock_roku.play_on_roku.call_count == 0 + assert mock_roku.launch.call_count == 0 @pytest.mark.parametrize( @@ -546,9 +547,10 @@ async def test_services_play_media_audio( }, blocking=True, ) - mock_roku.play_on_roku.assert_called_once_with( - content_id, + mock_roku.launch.assert_called_once_with( + DEFAULT_PLAY_MEDIA_APP_ID, { + "u": content_id, "t": "a", "songName": resolved_name, "songFormat": resolved_format, @@ -591,9 +593,11 @@ async def test_services_play_media_video( }, blocking=True, ) - mock_roku.play_on_roku.assert_called_once_with( - content_id, + mock_roku.launch.assert_called_once_with( + DEFAULT_PLAY_MEDIA_APP_ID, { + "u": content_id, + "t": "v", "videoName": resolved_name, "videoFormat": resolved_format, }, @@ -617,10 +621,12 @@ async def test_services_camera_play_stream( blocking=True, ) - assert mock_roku.play_on_roku.call_count == 1 - mock_roku.play_on_roku.assert_called_with( - "https://awesome.tld/api/hls/api_token/master_playlist.m3u8", + assert mock_roku.launch.call_count == 1 + mock_roku.launch.assert_called_with( + DEFAULT_PLAY_MEDIA_APP_ID, { + "u": "https://awesome.tld/api/hls/api_token/master_playlist.m3u8", + "t": "v", "videoName": "Camera Stream", "videoFormat": "hls", }, @@ -653,14 +659,21 @@ async def test_services_play_media_local_source( blocking=True, ) - assert mock_roku.play_on_roku.call_count == 1 - assert mock_roku.play_on_roku.call_args - call_args = mock_roku.play_on_roku.call_args.args - assert "/local/Epic%20Sax%20Guy%2010%20Hours.mp4?authSig=" in call_args[0] - assert call_args[1] == { - "videoFormat": "mp4", - "videoName": "media-source://media_source/local/Epic Sax Guy 10 Hours.mp4", - } + assert mock_roku.launch.call_count == 1 + assert mock_roku.launch.call_args + call_args = mock_roku.launch.call_args.args + assert call_args[0] == DEFAULT_PLAY_MEDIA_APP_ID + assert "u" in call_args[1] + assert "/local/Epic%20Sax%20Guy%2010%20Hours.mp4?authSig=" in call_args[1]["u"] + assert "t" in call_args[1] + assert call_args[1]["t"] == "v" + assert "videoFormat" in call_args[1] + assert call_args[1]["videoFormat"] == "mp4" + assert "videoName" in call_args[1] + assert ( + call_args[1]["videoName"] + == "media-source://media_source/local/Epic Sax Guy 10 Hours.mp4" + ) @pytest.mark.parametrize("mock_device", ["roku/rokutv-7820x.json"], indirect=True) diff --git a/tests/components/roomba/test_config_flow.py b/tests/components/roomba/test_config_flow.py index 8139e42d43d4f5..dedccc14249582 100644 --- a/tests/components/roomba/test_config_flow.py +++ b/tests/components/roomba/test_config_flow.py @@ -8,7 +8,12 @@ from homeassistant.components import dhcp, zeroconf from homeassistant.components.roomba import config_flow -from homeassistant.components.roomba.const import CONF_BLID, CONF_CONTINUOUS, DOMAIN +from homeassistant.components.roomba.const import ( + CONF_BLID, + CONF_CONTINUOUS, + DEFAULT_DELAY, + DOMAIN, +) from homeassistant.config_entries import ( SOURCE_DHCP, SOURCE_IGNORE, @@ -206,7 +211,7 @@ async def test_form_user_discovery_and_password_fetch(hass: HomeAssistant) -> No assert result3["data"] == { CONF_BLID: "BLID", CONF_CONTINUOUS: True, - CONF_DELAY: 1, + CONF_DELAY: DEFAULT_DELAY, CONF_HOST: MOCK_IP, CONF_PASSWORD: "password", } @@ -331,7 +336,7 @@ async def test_form_user_discovery_manual_and_auto_password_fetch( assert result4["data"] == { CONF_BLID: "BLID", CONF_CONTINUOUS: True, - CONF_DELAY: 1, + CONF_DELAY: DEFAULT_DELAY, CONF_HOST: MOCK_IP, CONF_PASSWORD: "password", } @@ -468,7 +473,7 @@ async def test_form_user_discovery_no_devices_found_and_auto_password_fetch( assert result3["data"] == { CONF_BLID: "BLID", CONF_CONTINUOUS: True, - CONF_DELAY: 1, + CONF_DELAY: DEFAULT_DELAY, CONF_HOST: MOCK_IP, CONF_PASSWORD: "password", } @@ -541,7 +546,7 @@ async def test_form_user_discovery_no_devices_found_and_password_fetch_fails( assert result4["data"] == { CONF_BLID: "BLID", CONF_CONTINUOUS: True, - CONF_DELAY: 1, + CONF_DELAY: DEFAULT_DELAY, CONF_HOST: MOCK_IP, CONF_PASSWORD: "password", } @@ -677,7 +682,7 @@ async def test_form_user_discovery_and_password_fetch_gets_connection_refused( assert result4["data"] == { CONF_BLID: "BLID", CONF_CONTINUOUS: True, - CONF_DELAY: 1, + CONF_DELAY: DEFAULT_DELAY, CONF_HOST: MOCK_IP, CONF_PASSWORD: "password", } @@ -738,7 +743,7 @@ async def test_dhcp_discovery_and_roomba_discovery_finds( assert result2["data"] == { CONF_BLID: "BLID", CONF_CONTINUOUS: True, - CONF_DELAY: 1, + CONF_DELAY: DEFAULT_DELAY, CONF_HOST: MOCK_IP, CONF_PASSWORD: "password", } @@ -816,7 +821,7 @@ async def test_dhcp_discovery_falls_back_to_manual( assert result4["data"] == { CONF_BLID: "BLID", CONF_CONTINUOUS: True, - CONF_DELAY: 1, + CONF_DELAY: DEFAULT_DELAY, CONF_HOST: MOCK_IP, CONF_PASSWORD: "password", } @@ -886,7 +891,7 @@ async def test_dhcp_discovery_no_devices_falls_back_to_manual( assert result3["data"] == { CONF_BLID: "BLID", CONF_CONTINUOUS: True, - CONF_DELAY: 1, + CONF_DELAY: DEFAULT_DELAY, CONF_HOST: MOCK_IP, CONF_PASSWORD: "password", } @@ -1119,10 +1124,10 @@ async def test_options_flow( result = await hass.config_entries.options.async_configure( result["flow_id"], - user_input={CONF_CONTINUOUS: True, CONF_DELAY: 1}, + user_input={CONF_CONTINUOUS: True, CONF_DELAY: DEFAULT_DELAY}, ) await hass.async_block_till_done() assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["data"] == {CONF_CONTINUOUS: True, CONF_DELAY: 1} - assert config_entry.options == {CONF_CONTINUOUS: True, CONF_DELAY: 1} + assert result["data"] == {CONF_CONTINUOUS: True, CONF_DELAY: DEFAULT_DELAY} + assert config_entry.options == {CONF_CONTINUOUS: True, CONF_DELAY: DEFAULT_DELAY} diff --git a/tests/components/rtsp_to_webrtc/test_config_flow.py b/tests/components/rtsp_to_webrtc/test_config_flow.py index 5daf9400396825..d3afa80b0b47e7 100644 --- a/tests/components/rtsp_to_webrtc/test_config_flow.py +++ b/tests/components/rtsp_to_webrtc/test_config_flow.py @@ -7,11 +7,11 @@ import rtsp_to_webrtc from homeassistant import config_entries -from homeassistant.components.hassio import HassioServiceInfo from homeassistant.components.rtsp_to_webrtc import DOMAIN from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType +from homeassistant.helpers.service_info.hassio import HassioServiceInfo from .conftest import ComponentSetup diff --git a/tests/components/rtsp_to_webrtc/test_init.py b/tests/components/rtsp_to_webrtc/test_init.py index cb4d5f7a1316ac..85155855a099b9 100644 --- a/tests/components/rtsp_to_webrtc/test_init.py +++ b/tests/components/rtsp_to_webrtc/test_init.py @@ -86,12 +86,11 @@ async def test_setup_communication_failure( assert entries[0].state is ConfigEntryState.SETUP_RETRY +@pytest.mark.usefixtures("mock_camera", "rtsp_to_webrtc_client") async def test_offer_for_stream_source( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, hass_ws_client: WebSocketGenerator, - mock_camera: Any, - rtsp_to_webrtc_client: Any, setup_integration: ComponentSetup, ) -> None: """Test successful response from RTSPtoWebRTC server.""" @@ -103,21 +102,33 @@ async def test_offer_for_stream_source( ) client = await hass_ws_client(hass) - await client.send_json( + await client.send_json_auto_id( { - "id": 1, - "type": "camera/web_rtc_offer", + "type": "camera/webrtc/offer", "entity_id": "camera.demo_camera", "offer": OFFER_SDP, } ) + + response = await client.receive_json() + assert response["type"] == TYPE_RESULT + assert response["success"] + subscription_id = response["id"] + + # Session id response = await client.receive_json() - assert response.get("id") == 1 - assert response.get("type") == TYPE_RESULT - assert response.get("success") - assert "result" in response - assert response["result"].get("answer") == ANSWER_SDP - assert "error" not in response + assert response["id"] == subscription_id + assert response["type"] == "event" + assert response["event"]["type"] == "session" + + # Answer + response = await client.receive_json() + assert response["id"] == subscription_id + assert response["type"] == "event" + assert response["event"] == { + "type": "answer", + "answer": ANSWER_SDP, + } # Validate request parameters were sent correctly assert len(aioclient_mock.mock_calls) == 1 @@ -127,12 +138,11 @@ async def test_offer_for_stream_source( } +@pytest.mark.usefixtures("mock_camera", "rtsp_to_webrtc_client") async def test_offer_failure( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, hass_ws_client: WebSocketGenerator, - mock_camera: Any, - rtsp_to_webrtc_client: Any, setup_integration: ComponentSetup, ) -> None: """Test a transient failure talking to RTSPtoWebRTC server.""" @@ -144,20 +154,31 @@ async def test_offer_failure( ) client = await hass_ws_client(hass) - await client.send_json( + await client.send_json_auto_id( { - "id": 2, - "type": "camera/web_rtc_offer", + "type": "camera/webrtc/offer", "entity_id": "camera.demo_camera", "offer": OFFER_SDP, } ) + + response = await client.receive_json() + assert response["type"] == TYPE_RESULT + assert response["success"] + subscription_id = response["id"] + + # Session id response = await client.receive_json() - assert response.get("id") == 2 - assert response.get("type") == TYPE_RESULT - assert "success" in response - assert not response.get("success") - assert "error" in response - assert response["error"].get("code") == "web_rtc_offer_failed" - assert "message" in response["error"] - assert "RTSPtoWebRTC server communication failure" in response["error"]["message"] + assert response["id"] == subscription_id + assert response["type"] == "event" + assert response["event"]["type"] == "session" + + # Answer + response = await client.receive_json() + assert response["id"] == subscription_id + assert response["type"] == "event" + assert response["event"] == { + "type": "error", + "code": "webrtc_offer_failed", + "message": "RTSPtoWebRTC server communication failure: ", + } diff --git a/tests/components/russound_rio/__init__.py b/tests/components/russound_rio/__init__.py index 96171071907eb0..d0e6d77f1ee149 100644 --- a/tests/components/russound_rio/__init__.py +++ b/tests/components/russound_rio/__init__.py @@ -1 +1,13 @@ """Tests for the Russound RIO integration.""" + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def setup_integration(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: + """Fixture for setting up the component.""" + config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() diff --git a/tests/components/russound_rio/conftest.py b/tests/components/russound_rio/conftest.py index 91d009f13f4734..09cccd7d83fc32 100644 --- a/tests/components/russound_rio/conftest.py +++ b/tests/components/russound_rio/conftest.py @@ -1,16 +1,19 @@ """Test fixtures for Russound RIO integration.""" from collections.abc import Generator -from unittest.mock import AsyncMock, patch +from unittest.mock import AsyncMock, Mock, patch +from aiorussound import Controller, RussoundTcpConnectionHandler, Source +from aiorussound.rio import ZoneControlSurface +from aiorussound.util import controller_device_str, zone_device_str import pytest from homeassistant.components.russound_rio.const import DOMAIN from homeassistant.core import HomeAssistant -from .const import HARDWARE_MAC, MOCK_CONFIG, MOCK_CONTROLLERS, MODEL +from .const import HARDWARE_MAC, HOST, MOCK_CONFIG, MODEL, PORT -from tests.common import MockConfigEntry +from tests.common import MockConfigEntry, load_json_object_fixture @pytest.fixture @@ -25,15 +28,13 @@ def mock_setup_entry(): @pytest.fixture def mock_config_entry(hass: HomeAssistant) -> MockConfigEntry: """Mock a Russound RIO config entry.""" - entry = MockConfigEntry( + return MockConfigEntry( domain=DOMAIN, data=MOCK_CONFIG, unique_id=HARDWARE_MAC, title=MODEL ) - entry.add_to_hass(hass) - return entry @pytest.fixture -def mock_russound() -> Generator[AsyncMock]: +def mock_russound_client() -> Generator[AsyncMock]: """Mock the Russound RIO client.""" with ( patch( @@ -41,8 +42,32 @@ def mock_russound() -> Generator[AsyncMock]: ) as mock_client, patch( "homeassistant.components.russound_rio.config_flow.RussoundClient", - return_value=mock_client, + new=mock_client, ), ): - mock_client.controllers = MOCK_CONTROLLERS - yield mock_client + client = mock_client.return_value + zones = { + int(k): ZoneControlSurface.from_dict(v) + for k, v in load_json_object_fixture("get_zones.json", DOMAIN).items() + } + client.sources = { + int(k): Source.from_dict(v) + for k, v in load_json_object_fixture("get_sources.json", DOMAIN).items() + } + for k, v in zones.items(): + v.device_str = zone_device_str(1, k) + v.fetch_current_source = Mock( + side_effect=lambda current_source=v.current_source: client.sources.get( + int(current_source) + ) + ) + + client.controllers = { + 1: Controller( + 1, "MCA-C5", client, controller_device_str(1), HARDWARE_MAC, None, zones + ) + } + client.connection_handler = RussoundTcpConnectionHandler(HOST, PORT) + client.is_connected = Mock(return_value=True) + client.unregister_state_update_callbacks.return_value = True + yield client diff --git a/tests/components/russound_rio/const.py b/tests/components/russound_rio/const.py index 527f4fe337739d..3d2924693d2764 100644 --- a/tests/components/russound_rio/const.py +++ b/tests/components/russound_rio/const.py @@ -2,6 +2,8 @@ from collections import namedtuple +from homeassistant.components.media_player import DOMAIN as MP_DOMAIN + HOST = "127.0.0.1" PORT = 9621 MODEL = "MCA-C5" @@ -14,3 +16,7 @@ _CONTROLLER = namedtuple("Controller", ["mac_address", "controller_type"]) # noqa: PYI024 MOCK_CONTROLLERS = {1: _CONTROLLER(mac_address=HARDWARE_MAC, controller_type=MODEL)} + +DEVICE_NAME = "mca_c5" +NAME_ZONE_1 = "backyard" +ENTITY_ID_ZONE_1 = f"{MP_DOMAIN}.{DEVICE_NAME}_{NAME_ZONE_1}" diff --git a/tests/components/russound_rio/fixtures/get_sources.json b/tests/components/russound_rio/fixtures/get_sources.json new file mode 100644 index 00000000000000..e39d702b8a1cf5 --- /dev/null +++ b/tests/components/russound_rio/fixtures/get_sources.json @@ -0,0 +1,10 @@ +{ + "1": { + "name": "Aux", + "type": "Miscellaneous Audio" + }, + "2": { + "name": "Spotify", + "type": "Russound Media Streamer" + } +} diff --git a/tests/components/russound_rio/fixtures/get_zones.json b/tests/components/russound_rio/fixtures/get_zones.json new file mode 100644 index 00000000000000..396310339b3cec --- /dev/null +++ b/tests/components/russound_rio/fixtures/get_zones.json @@ -0,0 +1,22 @@ +{ + "1": { + "name": "Backyard", + "volume": "10", + "status": "ON", + "enabled": "True", + "current_source": "1" + }, + "2": { + "name": "Kitchen", + "volume": "50", + "status": "OFF", + "enabled": "True", + "current_source": "2" + }, + "3": { + "name": "Bedroom", + "volume": "10", + "status": "OFF", + "enabled": "False" + } +} diff --git a/tests/components/russound_rio/snapshots/test_init.ambr b/tests/components/russound_rio/snapshots/test_init.ambr new file mode 100644 index 00000000000000..fcd59dd06f7054 --- /dev/null +++ b/tests/components/russound_rio/snapshots/test_init.ambr @@ -0,0 +1,37 @@ +# serializer version: 1 +# name: test_device_info + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': 'http://127.0.0.1', + 'connections': set({ + tuple( + 'mac', + '00:11:22:33:44:55', + ), + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'russound_rio', + '00:11:22:33:44:55', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'Russound', + 'model': 'MCA-C5', + 'model_id': None, + 'name': 'MCA-C5', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': None, + 'suggested_area': None, + 'sw_version': None, + 'via_device_id': None, + }) +# --- diff --git a/tests/components/russound_rio/test_config_flow.py b/tests/components/russound_rio/test_config_flow.py index 9461fe1d5bee1e..cf754852731e39 100644 --- a/tests/components/russound_rio/test_config_flow.py +++ b/tests/components/russound_rio/test_config_flow.py @@ -11,7 +11,7 @@ async def test_form( - hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_russound: AsyncMock + hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_russound_client: AsyncMock ) -> None: """Test we get the form.""" result = await hass.config_entries.flow.async_init( @@ -32,13 +32,13 @@ async def test_form( async def test_form_cannot_connect( - hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_russound: AsyncMock + hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_russound_client: AsyncMock ) -> None: """Test we handle cannot connect error.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER} ) - mock_russound.connect.side_effect = TimeoutError + mock_russound_client.connect.side_effect = TimeoutError result = await hass.config_entries.flow.async_configure( result["flow_id"], MOCK_CONFIG, @@ -48,7 +48,7 @@ async def test_form_cannot_connect( assert result["errors"] == {"base": "cannot_connect"} # Recover with correct information - mock_russound.connect.side_effect = None + mock_russound_client.connect.side_effect = None result = await hass.config_entries.flow.async_configure( result["flow_id"], MOCK_CONFIG, @@ -61,7 +61,7 @@ async def test_form_cannot_connect( async def test_import( - hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_russound: AsyncMock + hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_russound_client: AsyncMock ) -> None: """Test we import a config entry.""" result = await hass.config_entries.flow.async_init( @@ -77,10 +77,10 @@ async def test_import( async def test_import_cannot_connect( - hass: HomeAssistant, mock_russound: AsyncMock + hass: HomeAssistant, mock_russound_client: AsyncMock ) -> None: """Test we handle import cannot connect error.""" - mock_russound.connect.side_effect = TimeoutError + mock_russound_client.connect.side_effect = TimeoutError result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_IMPORT}, data=MOCK_CONFIG diff --git a/tests/components/russound_rio/test_init.py b/tests/components/russound_rio/test_init.py new file mode 100644 index 00000000000000..6787ee37c79ce4 --- /dev/null +++ b/tests/components/russound_rio/test_init.py @@ -0,0 +1,44 @@ +"""Tests for the Russound RIO integration.""" + +from unittest.mock import AsyncMock + +from syrupy import SnapshotAssertion + +from homeassistant.components.russound_rio.const import DOMAIN +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr + +from . import setup_integration + +from tests.common import MockConfigEntry + + +async def test_config_entry_not_ready( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_russound_client: AsyncMock, +) -> None: + """Test the Cambridge Audio configuration entry not ready.""" + mock_russound_client.connect.side_effect = TimeoutError + await setup_integration(hass, mock_config_entry) + + assert mock_config_entry.state is ConfigEntryState.SETUP_RETRY + + mock_russound_client.connect = AsyncMock(return_value=True) + + +async def test_device_info( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_russound_client: AsyncMock, + mock_config_entry: MockConfigEntry, + device_registry: dr.DeviceRegistry, +) -> None: + """Test device registry integration.""" + await setup_integration(hass, mock_config_entry) + device_entry = device_registry.async_get_device( + identifiers={(DOMAIN, mock_config_entry.unique_id)} + ) + assert device_entry is not None + assert device_entry == snapshot diff --git a/tests/components/russound_rio/test_media_player.py b/tests/components/russound_rio/test_media_player.py new file mode 100644 index 00000000000000..e720e2c7f657ab --- /dev/null +++ b/tests/components/russound_rio/test_media_player.py @@ -0,0 +1,58 @@ +"""Tests for the Russound RIO media player.""" + +from unittest.mock import AsyncMock + +from aiorussound.models import CallbackType, PlayStatus +import pytest + +from homeassistant.const import ( + STATE_BUFFERING, + STATE_IDLE, + STATE_OFF, + STATE_ON, + STATE_PAUSED, + STATE_PLAYING, +) +from homeassistant.core import HomeAssistant + +from . import setup_integration +from .const import ENTITY_ID_ZONE_1 + +from tests.common import MockConfigEntry + + +async def mock_state_update(client: AsyncMock) -> None: + """Trigger a callback in the media player.""" + for callback in client.register_state_update_callbacks.call_args_list: + await callback[0][0](client, CallbackType.STATE) + + +@pytest.mark.parametrize( + ("zone_status", "source_play_status", "media_player_state"), + [ + (True, None, STATE_ON), + (True, PlayStatus.PLAYING, STATE_PLAYING), + (True, PlayStatus.PAUSED, STATE_PAUSED), + (True, PlayStatus.TRANSITIONING, STATE_BUFFERING), + (True, PlayStatus.STOPPED, STATE_IDLE), + (False, None, STATE_OFF), + (False, PlayStatus.STOPPED, STATE_OFF), + ], +) +async def test_entity_state( + hass: HomeAssistant, + mock_russound_client: AsyncMock, + mock_config_entry: MockConfigEntry, + zone_status: bool, + source_play_status: PlayStatus | None, + media_player_state: str, +) -> None: + """Test media player state.""" + await setup_integration(hass, mock_config_entry) + mock_russound_client.controllers[1].zones[1].status = zone_status + mock_russound_client.sources[1].play_status = source_play_status + await mock_state_update(mock_russound_client) + await hass.async_block_till_done() + + state = hass.states.get(ENTITY_ID_ZONE_1) + assert state.state == media_player_state diff --git a/tests/components/samsungtv/test_device_trigger.py b/tests/components/samsungtv/test_device_trigger.py index acc7ecb904db86..fa6efd0807667e 100644 --- a/tests/components/samsungtv/test_device_trigger.py +++ b/tests/components/samsungtv/test_device_trigger.py @@ -7,7 +7,8 @@ from homeassistant.components.device_automation.exceptions import ( InvalidDeviceAutomationConfig, ) -from homeassistant.components.samsungtv import DOMAIN, device_trigger +from homeassistant.components.samsungtv import device_trigger +from homeassistant.components.samsungtv.const import DOMAIN from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant, ServiceCall from homeassistant.exceptions import HomeAssistantError diff --git a/tests/components/samsungtv/test_trigger.py b/tests/components/samsungtv/test_trigger.py index 8076ceb2807815..e1d26043bb070c 100644 --- a/tests/components/samsungtv/test_trigger.py +++ b/tests/components/samsungtv/test_trigger.py @@ -5,7 +5,7 @@ import pytest from homeassistant.components import automation -from homeassistant.components.samsungtv import DOMAIN +from homeassistant.components.samsungtv.const import DOMAIN from homeassistant.const import SERVICE_RELOAD, SERVICE_TURN_ON from homeassistant.core import HomeAssistant, ServiceCall from homeassistant.helpers import device_registry as dr diff --git a/tests/components/schlage/conftest.py b/tests/components/schlage/conftest.py index 5ff8d0456061a1..f774b8cfb89181 100644 --- a/tests/components/schlage/conftest.py +++ b/tests/components/schlage/conftest.py @@ -91,6 +91,7 @@ def mock_lock_attrs() -> dict[str, Any]: "is_locked": False, "is_jammed": False, "battery_level": 20, + "auto_lock_time": 15, "firmware_version": "1.0", "lock_and_leave_enabled": True, "beeper_enabled": True, diff --git a/tests/components/schlage/test_config_flow.py b/tests/components/schlage/test_config_flow.py index 15ef3858c0ceac..7f4a40f9b53ba5 100644 --- a/tests/components/schlage/test_config_flow.py +++ b/tests/components/schlage/test_config_flow.py @@ -15,8 +15,18 @@ pytestmark = pytest.mark.usefixtures("mock_setup_entry") +@pytest.mark.parametrize( + "username", + [ + "test-username", + "TEST-USERNAME", + ], +) async def test_form( - hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_pyschlage_auth: Mock + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_pyschlage_auth: Mock, + username: str, ) -> None: """Test we get the form.""" result = await hass.config_entries.flow.async_init( @@ -28,7 +38,7 @@ async def test_form( result2 = await hass.config_entries.flow.async_configure( result["flow_id"], { - "username": "test-username", + "username": username, "password": "test-password", }, ) diff --git a/tests/components/schlage/test_select.py b/tests/components/schlage/test_select.py new file mode 100644 index 00000000000000..c27fd4c8813411 --- /dev/null +++ b/tests/components/schlage/test_select.py @@ -0,0 +1,31 @@ +"""Test Schlage select.""" + +from unittest.mock import Mock + +from homeassistant.components.select import ( + ATTR_OPTION, + DOMAIN as SELECT_DOMAIN, + SERVICE_SELECT_OPTION, +) +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import ATTR_ENTITY_ID +from homeassistant.core import HomeAssistant + + +async def test_select( + hass: HomeAssistant, mock_lock: Mock, mock_added_config_entry: ConfigEntry +) -> None: + """Test the auto-lock time select entity.""" + entity_id = "select.vault_door_auto_lock_time" + + select = hass.states.get(entity_id) + assert select is not None + assert select.state == "15" + + await hass.services.async_call( + SELECT_DOMAIN, + SERVICE_SELECT_OPTION, + {ATTR_ENTITY_ID: entity_id, ATTR_OPTION: "30"}, + blocking=True, + ) + mock_lock.set_auto_lock_time.assert_called_once_with(30) diff --git a/tests/components/sense/__init__.py b/tests/components/sense/__init__.py index bf0a87737b9762..d604bcba737d3a 100644 --- a/tests/components/sense/__init__.py +++ b/tests/components/sense/__init__.py @@ -1 +1,23 @@ """Tests for the Sense integration.""" + +from unittest.mock import patch + +from homeassistant.components.sense.const import DOMAIN +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.setup import async_setup_component + +from tests.common import MockConfigEntry + + +async def setup_platform( + hass: HomeAssistant, config_entry: MockConfigEntry, platform: Platform +) -> MockConfigEntry: + """Set up the Sense platform.""" + config_entry.add_to_hass(hass) + + with patch("homeassistant.components.sense.PLATFORMS", [platform]): + assert await async_setup_component(hass, DOMAIN, {}) + await hass.async_block_till_done() + + return config_entry diff --git a/tests/components/sense/conftest.py b/tests/components/sense/conftest.py new file mode 100644 index 00000000000000..7cf1626f40e93d --- /dev/null +++ b/tests/components/sense/conftest.py @@ -0,0 +1,84 @@ +"""Common methods for Sense.""" + +from __future__ import annotations + +from collections.abc import Generator +import datetime +from unittest.mock import AsyncMock, MagicMock, PropertyMock, patch + +import pytest +from sense_energy import Scale + +from homeassistant.components.sense.binary_sensor import SenseDevice +from homeassistant.components.sense.const import DOMAIN + +from .const import ( + DEVICE_1_DAY_ENERGY, + DEVICE_1_ID, + DEVICE_1_NAME, + DEVICE_1_POWER, + DEVICE_2_DAY_ENERGY, + DEVICE_2_ID, + DEVICE_2_NAME, + DEVICE_2_POWER, + MOCK_CONFIG, + MONITOR_ID, +) + +from tests.common import MockConfigEntry + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Override async_setup_entry.""" + with patch( + "homeassistant.components.sense.async_setup_entry", return_value=True + ) as mock_setup_entry: + yield mock_setup_entry + + +@pytest.fixture +def config_entry() -> MockConfigEntry: + """Mock sense config entry.""" + return MockConfigEntry( + domain=DOMAIN, + data=MOCK_CONFIG, + unique_id="test-email", + ) + + +@pytest.fixture +def mock_sense() -> Generator[MagicMock]: + """Mock an ASyncSenseable object with a split foundation.""" + with patch("homeassistant.components.sense.ASyncSenseable", autospec=True) as mock: + gateway = mock.return_value + gateway.sense_monitor_id = MONITOR_ID + gateway.get_monitor_data.return_value = None + gateway.update_realtime.return_value = None + gateway.fetch_devices.return_value = None + gateway.update_trend_data.return_value = None + + type(gateway).active_power = PropertyMock(return_value=100) + type(gateway).active_solar_power = PropertyMock(return_value=500) + type(gateway).active_voltage = PropertyMock(return_value=[120, 240]) + gateway.get_stat.return_value = 15 + gateway.trend_start.return_value = datetime.datetime.fromisoformat( + "2024-01-01 01:01:00+00:00" + ) + + device_1 = SenseDevice(DEVICE_1_ID) + device_1.name = DEVICE_1_NAME + device_1.icon = "car" + device_1.is_on = False + device_1.power_w = DEVICE_1_POWER + device_1.energy_kwh[Scale.DAY] = DEVICE_1_DAY_ENERGY + + device_2 = SenseDevice(DEVICE_2_ID) + device_2.name = DEVICE_2_NAME + device_2.icon = "stove" + device_2.is_on = False + device_2.power_w = DEVICE_2_POWER + device_2.energy_kwh[Scale.DAY] = DEVICE_2_DAY_ENERGY + type(gateway).devices = PropertyMock(return_value=[device_1, device_2]) + + yield gateway diff --git a/tests/components/sense/const.py b/tests/components/sense/const.py new file mode 100644 index 00000000000000..d040c0bc38c3eb --- /dev/null +++ b/tests/components/sense/const.py @@ -0,0 +1,29 @@ +"""Cosntants for the Sense integration tests.""" + +MONITOR_ID = "456" + +MOCK_CONFIG = { + "timeout": 6, + "email": "test-email", + "password": "test-password", + "access_token": "ABC", + "user_id": "123", + "monitor_id": MONITOR_ID, + "device_id": "789", + "refresh_token": "XYZ", +} + + +DEVICE_1_NAME = "Car" +DEVICE_1_ID = "abc123" +DEVICE_1_ICON = "car-electric" +DEVICE_1_POWER = 100.0 +DEVICE_1_DAY_ENERGY = 500 + +DEVICE_2_NAME = "Oven" +DEVICE_2_ID = "def456" +DEVICE_2_ICON = "stove" +DEVICE_2_POWER = 50.0 +DEVICE_2_DAY_ENERGY = 42 + +MONITOR_ID = "12345" diff --git a/tests/components/sense/snapshots/test_binary_sensor.ambr b/tests/components/sense/snapshots/test_binary_sensor.ambr new file mode 100644 index 00000000000000..339830b16d3714 --- /dev/null +++ b/tests/components/sense/snapshots/test_binary_sensor.ambr @@ -0,0 +1,99 @@ +# serializer version: 1 +# name: test_binary_sensors[binary_sensor.car_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.car_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:car-electric', + 'original_name': 'Power', + 'platform': 'sense', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '12345-abc123', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[binary_sensor.car_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Sense.com', + 'device_class': 'power', + 'friendly_name': 'Car Power', + 'icon': 'mdi:car-electric', + }), + 'context': , + 'entity_id': 'binary_sensor.car_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensors[binary_sensor.oven_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.oven_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:stove', + 'original_name': 'Power', + 'platform': 'sense', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '12345-def456', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[binary_sensor.oven_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Sense.com', + 'device_class': 'power', + 'friendly_name': 'Oven Power', + 'icon': 'mdi:stove', + }), + 'context': , + 'entity_id': 'binary_sensor.oven_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- diff --git a/tests/components/sense/snapshots/test_sensor.ambr b/tests/components/sense/snapshots/test_sensor.ambr new file mode 100644 index 00000000000000..4a3507880a16b8 --- /dev/null +++ b/tests/components/sense/snapshots/test_sensor.ambr @@ -0,0 +1,2680 @@ +# serializer version: 1 +# name: test_sensors[sensor.car_bill_energy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.car_bill_energy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:car-electric', + 'original_name': 'Bill energy', + 'platform': 'sense', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'bill_energy', + 'unique_id': '12345-abc123-bill-energy', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.car_bill_energy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Sense.com', + 'device_class': 'energy', + 'friendly_name': 'Car Bill energy', + 'icon': 'mdi:car-electric', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.car_bill_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_sensors[sensor.car_daily_energy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.car_daily_energy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:car-electric', + 'original_name': 'Daily energy', + 'platform': 'sense', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'daily_energy', + 'unique_id': '12345-abc123-daily-energy', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.car_daily_energy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Sense.com', + 'device_class': 'energy', + 'friendly_name': 'Car Daily energy', + 'icon': 'mdi:car-electric', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.car_daily_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '500', + }) +# --- +# name: test_sensors[sensor.car_monthly_energy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.car_monthly_energy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:car-electric', + 'original_name': 'Monthly energy', + 'platform': 'sense', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'monthly_energy', + 'unique_id': '12345-abc123-monthly-energy', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.car_monthly_energy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Sense.com', + 'device_class': 'energy', + 'friendly_name': 'Car Monthly energy', + 'icon': 'mdi:car-electric', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.car_monthly_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_sensors[sensor.car_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.car_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:car-electric', + 'original_name': 'Power', + 'platform': 'sense', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '12345-abc123-usage', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.car_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Sense.com', + 'device_class': 'power', + 'friendly_name': 'Car Power', + 'icon': 'mdi:car-electric', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.car_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '100.0', + }) +# --- +# name: test_sensors[sensor.car_weekly_energy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.car_weekly_energy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:car-electric', + 'original_name': 'Weekly energy', + 'platform': 'sense', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'weekly_energy', + 'unique_id': '12345-abc123-weekly-energy', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.car_weekly_energy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Sense.com', + 'device_class': 'energy', + 'friendly_name': 'Car Weekly energy', + 'icon': 'mdi:car-electric', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.car_weekly_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_sensors[sensor.car_yearly_energy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.car_yearly_energy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:car-electric', + 'original_name': 'Yearly energy', + 'platform': 'sense', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'yearly_energy', + 'unique_id': '12345-abc123-yearly-energy', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.car_yearly_energy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Sense.com', + 'device_class': 'energy', + 'friendly_name': 'Car Yearly energy', + 'icon': 'mdi:car-electric', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.car_yearly_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_sensors[sensor.oven_bill_energy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.oven_bill_energy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:stove', + 'original_name': 'Bill energy', + 'platform': 'sense', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'bill_energy', + 'unique_id': '12345-def456-bill-energy', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.oven_bill_energy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Sense.com', + 'device_class': 'energy', + 'friendly_name': 'Oven Bill energy', + 'icon': 'mdi:stove', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.oven_bill_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_sensors[sensor.oven_daily_energy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.oven_daily_energy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:stove', + 'original_name': 'Daily energy', + 'platform': 'sense', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'daily_energy', + 'unique_id': '12345-def456-daily-energy', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.oven_daily_energy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Sense.com', + 'device_class': 'energy', + 'friendly_name': 'Oven Daily energy', + 'icon': 'mdi:stove', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.oven_daily_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '42', + }) +# --- +# name: test_sensors[sensor.oven_monthly_energy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.oven_monthly_energy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:stove', + 'original_name': 'Monthly energy', + 'platform': 'sense', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'monthly_energy', + 'unique_id': '12345-def456-monthly-energy', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.oven_monthly_energy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Sense.com', + 'device_class': 'energy', + 'friendly_name': 'Oven Monthly energy', + 'icon': 'mdi:stove', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.oven_monthly_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_sensors[sensor.oven_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.oven_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:stove', + 'original_name': 'Power', + 'platform': 'sense', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '12345-def456-usage', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.oven_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Sense.com', + 'device_class': 'power', + 'friendly_name': 'Oven Power', + 'icon': 'mdi:stove', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.oven_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '50.0', + }) +# --- +# name: test_sensors[sensor.oven_weekly_energy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.oven_weekly_energy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:stove', + 'original_name': 'Weekly energy', + 'platform': 'sense', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'weekly_energy', + 'unique_id': '12345-def456-weekly-energy', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.oven_weekly_energy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Sense.com', + 'device_class': 'energy', + 'friendly_name': 'Oven Weekly energy', + 'icon': 'mdi:stove', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.oven_weekly_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_sensors[sensor.oven_yearly_energy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.oven_yearly_energy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:stove', + 'original_name': 'Yearly energy', + 'platform': 'sense', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'yearly_energy', + 'unique_id': '12345-def456-yearly-energy', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.oven_yearly_energy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Sense.com', + 'device_class': 'energy', + 'friendly_name': 'Oven Yearly energy', + 'icon': 'mdi:stove', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.oven_yearly_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_sensors[sensor.sense_12345_bill_energy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sense_12345_bill_energy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Bill Energy', + 'platform': 'sense', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '12345-bill-usage', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.sense_12345_bill_energy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Sense.com', + 'device_class': 'energy', + 'friendly_name': 'Sense 12345 Bill Energy', + 'last_reset': '2024-01-01T01:01:00+00:00', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.sense_12345_bill_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '15', + }) +# --- +# name: test_sensors[sensor.sense_12345_bill_from_grid-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sense_12345_bill_from_grid', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Bill From Grid', + 'platform': 'sense', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '12345-bill-from_grid', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.sense_12345_bill_from_grid-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Sense.com', + 'device_class': 'energy', + 'friendly_name': 'Sense 12345 Bill From Grid', + 'last_reset': '2024-01-01T01:01:00+00:00', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.sense_12345_bill_from_grid', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '15', + }) +# --- +# name: test_sensors[sensor.sense_12345_bill_net_production-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sense_12345_bill_net_production', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Bill Net Production', + 'platform': 'sense', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '12345-bill-net_production', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.sense_12345_bill_net_production-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Sense.com', + 'device_class': 'energy', + 'friendly_name': 'Sense 12345 Bill Net Production', + 'last_reset': '2024-01-01T01:01:00+00:00', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.sense_12345_bill_net_production', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '15', + }) +# --- +# name: test_sensors[sensor.sense_12345_bill_net_production_percentage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sense_12345_bill_net_production_percentage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Bill Net Production Percentage', + 'platform': 'sense', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '12345-bill-production_pct', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[sensor.sense_12345_bill_net_production_percentage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Sense.com', + 'friendly_name': 'Sense 12345 Bill Net Production Percentage', + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.sense_12345_bill_net_production_percentage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '15', + }) +# --- +# name: test_sensors[sensor.sense_12345_bill_production-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sense_12345_bill_production', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Bill Production', + 'platform': 'sense', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '12345-bill-production', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.sense_12345_bill_production-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Sense.com', + 'device_class': 'energy', + 'friendly_name': 'Sense 12345 Bill Production', + 'last_reset': '2024-01-01T01:01:00+00:00', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.sense_12345_bill_production', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '15', + }) +# --- +# name: test_sensors[sensor.sense_12345_bill_solar_powered_percentage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sense_12345_bill_solar_powered_percentage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Bill Solar Powered Percentage', + 'platform': 'sense', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '12345-bill-solar_powered', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[sensor.sense_12345_bill_solar_powered_percentage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Sense.com', + 'friendly_name': 'Sense 12345 Bill Solar Powered Percentage', + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.sense_12345_bill_solar_powered_percentage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '15', + }) +# --- +# name: test_sensors[sensor.sense_12345_bill_to_grid-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sense_12345_bill_to_grid', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Bill To Grid', + 'platform': 'sense', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '12345-bill-to_grid', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.sense_12345_bill_to_grid-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Sense.com', + 'device_class': 'energy', + 'friendly_name': 'Sense 12345 Bill To Grid', + 'last_reset': '2024-01-01T01:01:00+00:00', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.sense_12345_bill_to_grid', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '15', + }) +# --- +# name: test_sensors[sensor.sense_12345_daily_energy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sense_12345_daily_energy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Daily Energy', + 'platform': 'sense', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '12345-daily-usage', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.sense_12345_daily_energy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Sense.com', + 'device_class': 'energy', + 'friendly_name': 'Sense 12345 Daily Energy', + 'last_reset': '2024-01-01T01:01:00+00:00', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.sense_12345_daily_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '15', + }) +# --- +# name: test_sensors[sensor.sense_12345_daily_from_grid-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sense_12345_daily_from_grid', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Daily From Grid', + 'platform': 'sense', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '12345-daily-from_grid', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.sense_12345_daily_from_grid-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Sense.com', + 'device_class': 'energy', + 'friendly_name': 'Sense 12345 Daily From Grid', + 'last_reset': '2024-01-01T01:01:00+00:00', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.sense_12345_daily_from_grid', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '15', + }) +# --- +# name: test_sensors[sensor.sense_12345_daily_net_production-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sense_12345_daily_net_production', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Daily Net Production', + 'platform': 'sense', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '12345-daily-net_production', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.sense_12345_daily_net_production-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Sense.com', + 'device_class': 'energy', + 'friendly_name': 'Sense 12345 Daily Net Production', + 'last_reset': '2024-01-01T01:01:00+00:00', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.sense_12345_daily_net_production', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '15', + }) +# --- +# name: test_sensors[sensor.sense_12345_daily_net_production_percentage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sense_12345_daily_net_production_percentage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Daily Net Production Percentage', + 'platform': 'sense', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '12345-daily-production_pct', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[sensor.sense_12345_daily_net_production_percentage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Sense.com', + 'friendly_name': 'Sense 12345 Daily Net Production Percentage', + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.sense_12345_daily_net_production_percentage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '15', + }) +# --- +# name: test_sensors[sensor.sense_12345_daily_production-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sense_12345_daily_production', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Daily Production', + 'platform': 'sense', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '12345-daily-production', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.sense_12345_daily_production-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Sense.com', + 'device_class': 'energy', + 'friendly_name': 'Sense 12345 Daily Production', + 'last_reset': '2024-01-01T01:01:00+00:00', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.sense_12345_daily_production', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '15', + }) +# --- +# name: test_sensors[sensor.sense_12345_daily_solar_powered_percentage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sense_12345_daily_solar_powered_percentage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Daily Solar Powered Percentage', + 'platform': 'sense', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '12345-daily-solar_powered', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[sensor.sense_12345_daily_solar_powered_percentage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Sense.com', + 'friendly_name': 'Sense 12345 Daily Solar Powered Percentage', + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.sense_12345_daily_solar_powered_percentage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '15', + }) +# --- +# name: test_sensors[sensor.sense_12345_daily_to_grid-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sense_12345_daily_to_grid', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Daily To Grid', + 'platform': 'sense', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '12345-daily-to_grid', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.sense_12345_daily_to_grid-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Sense.com', + 'device_class': 'energy', + 'friendly_name': 'Sense 12345 Daily To Grid', + 'last_reset': '2024-01-01T01:01:00+00:00', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.sense_12345_daily_to_grid', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '15', + }) +# --- +# name: test_sensors[sensor.sense_12345_energy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sense_12345_energy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Energy', + 'platform': 'sense', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '12345-active-usage', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.sense_12345_energy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Sense.com', + 'device_class': 'power', + 'friendly_name': 'Sense 12345 Energy', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.sense_12345_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '100', + }) +# --- +# name: test_sensors[sensor.sense_12345_l1_voltage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sense_12345_l1_voltage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'L1 Voltage', + 'platform': 'sense', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '12345-L1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.sense_12345_l1_voltage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Sense.com', + 'device_class': 'voltage', + 'friendly_name': 'Sense 12345 L1 Voltage', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.sense_12345_l1_voltage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '120', + }) +# --- +# name: test_sensors[sensor.sense_12345_l2_voltage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sense_12345_l2_voltage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'L2 Voltage', + 'platform': 'sense', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '12345-L2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.sense_12345_l2_voltage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Sense.com', + 'device_class': 'voltage', + 'friendly_name': 'Sense 12345 L2 Voltage', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.sense_12345_l2_voltage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '240', + }) +# --- +# name: test_sensors[sensor.sense_12345_monthly_energy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sense_12345_monthly_energy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Monthly Energy', + 'platform': 'sense', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '12345-monthly-usage', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.sense_12345_monthly_energy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Sense.com', + 'device_class': 'energy', + 'friendly_name': 'Sense 12345 Monthly Energy', + 'last_reset': '2024-01-01T01:01:00+00:00', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.sense_12345_monthly_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '15', + }) +# --- +# name: test_sensors[sensor.sense_12345_monthly_from_grid-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sense_12345_monthly_from_grid', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Monthly From Grid', + 'platform': 'sense', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '12345-monthly-from_grid', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.sense_12345_monthly_from_grid-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Sense.com', + 'device_class': 'energy', + 'friendly_name': 'Sense 12345 Monthly From Grid', + 'last_reset': '2024-01-01T01:01:00+00:00', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.sense_12345_monthly_from_grid', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '15', + }) +# --- +# name: test_sensors[sensor.sense_12345_monthly_net_production-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sense_12345_monthly_net_production', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Monthly Net Production', + 'platform': 'sense', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '12345-monthly-net_production', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.sense_12345_monthly_net_production-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Sense.com', + 'device_class': 'energy', + 'friendly_name': 'Sense 12345 Monthly Net Production', + 'last_reset': '2024-01-01T01:01:00+00:00', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.sense_12345_monthly_net_production', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '15', + }) +# --- +# name: test_sensors[sensor.sense_12345_monthly_net_production_percentage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sense_12345_monthly_net_production_percentage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Monthly Net Production Percentage', + 'platform': 'sense', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '12345-monthly-production_pct', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[sensor.sense_12345_monthly_net_production_percentage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Sense.com', + 'friendly_name': 'Sense 12345 Monthly Net Production Percentage', + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.sense_12345_monthly_net_production_percentage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '15', + }) +# --- +# name: test_sensors[sensor.sense_12345_monthly_production-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sense_12345_monthly_production', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Monthly Production', + 'platform': 'sense', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '12345-monthly-production', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.sense_12345_monthly_production-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Sense.com', + 'device_class': 'energy', + 'friendly_name': 'Sense 12345 Monthly Production', + 'last_reset': '2024-01-01T01:01:00+00:00', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.sense_12345_monthly_production', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '15', + }) +# --- +# name: test_sensors[sensor.sense_12345_monthly_solar_powered_percentage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sense_12345_monthly_solar_powered_percentage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Monthly Solar Powered Percentage', + 'platform': 'sense', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '12345-monthly-solar_powered', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[sensor.sense_12345_monthly_solar_powered_percentage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Sense.com', + 'friendly_name': 'Sense 12345 Monthly Solar Powered Percentage', + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.sense_12345_monthly_solar_powered_percentage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '15', + }) +# --- +# name: test_sensors[sensor.sense_12345_monthly_to_grid-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sense_12345_monthly_to_grid', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Monthly To Grid', + 'platform': 'sense', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '12345-monthly-to_grid', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.sense_12345_monthly_to_grid-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Sense.com', + 'device_class': 'energy', + 'friendly_name': 'Sense 12345 Monthly To Grid', + 'last_reset': '2024-01-01T01:01:00+00:00', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.sense_12345_monthly_to_grid', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '15', + }) +# --- +# name: test_sensors[sensor.sense_12345_production-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sense_12345_production', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Production', + 'platform': 'sense', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '12345-active-production', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.sense_12345_production-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Sense.com', + 'device_class': 'power', + 'friendly_name': 'Sense 12345 Production', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.sense_12345_production', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '500', + }) +# --- +# name: test_sensors[sensor.sense_12345_weekly_energy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sense_12345_weekly_energy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Weekly Energy', + 'platform': 'sense', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '12345-weekly-usage', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.sense_12345_weekly_energy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Sense.com', + 'device_class': 'energy', + 'friendly_name': 'Sense 12345 Weekly Energy', + 'last_reset': '2024-01-01T01:01:00+00:00', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.sense_12345_weekly_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '15', + }) +# --- +# name: test_sensors[sensor.sense_12345_weekly_from_grid-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sense_12345_weekly_from_grid', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Weekly From Grid', + 'platform': 'sense', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '12345-weekly-from_grid', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.sense_12345_weekly_from_grid-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Sense.com', + 'device_class': 'energy', + 'friendly_name': 'Sense 12345 Weekly From Grid', + 'last_reset': '2024-01-01T01:01:00+00:00', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.sense_12345_weekly_from_grid', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '15', + }) +# --- +# name: test_sensors[sensor.sense_12345_weekly_net_production-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sense_12345_weekly_net_production', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Weekly Net Production', + 'platform': 'sense', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '12345-weekly-net_production', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.sense_12345_weekly_net_production-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Sense.com', + 'device_class': 'energy', + 'friendly_name': 'Sense 12345 Weekly Net Production', + 'last_reset': '2024-01-01T01:01:00+00:00', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.sense_12345_weekly_net_production', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '15', + }) +# --- +# name: test_sensors[sensor.sense_12345_weekly_net_production_percentage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sense_12345_weekly_net_production_percentage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Weekly Net Production Percentage', + 'platform': 'sense', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '12345-weekly-production_pct', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[sensor.sense_12345_weekly_net_production_percentage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Sense.com', + 'friendly_name': 'Sense 12345 Weekly Net Production Percentage', + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.sense_12345_weekly_net_production_percentage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '15', + }) +# --- +# name: test_sensors[sensor.sense_12345_weekly_production-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sense_12345_weekly_production', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Weekly Production', + 'platform': 'sense', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '12345-weekly-production', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.sense_12345_weekly_production-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Sense.com', + 'device_class': 'energy', + 'friendly_name': 'Sense 12345 Weekly Production', + 'last_reset': '2024-01-01T01:01:00+00:00', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.sense_12345_weekly_production', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '15', + }) +# --- +# name: test_sensors[sensor.sense_12345_weekly_solar_powered_percentage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sense_12345_weekly_solar_powered_percentage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Weekly Solar Powered Percentage', + 'platform': 'sense', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '12345-weekly-solar_powered', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[sensor.sense_12345_weekly_solar_powered_percentage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Sense.com', + 'friendly_name': 'Sense 12345 Weekly Solar Powered Percentage', + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.sense_12345_weekly_solar_powered_percentage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '15', + }) +# --- +# name: test_sensors[sensor.sense_12345_weekly_to_grid-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sense_12345_weekly_to_grid', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Weekly To Grid', + 'platform': 'sense', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '12345-weekly-to_grid', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.sense_12345_weekly_to_grid-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Sense.com', + 'device_class': 'energy', + 'friendly_name': 'Sense 12345 Weekly To Grid', + 'last_reset': '2024-01-01T01:01:00+00:00', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.sense_12345_weekly_to_grid', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '15', + }) +# --- +# name: test_sensors[sensor.sense_12345_yearly_energy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sense_12345_yearly_energy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Yearly Energy', + 'platform': 'sense', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '12345-yearly-usage', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.sense_12345_yearly_energy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Sense.com', + 'device_class': 'energy', + 'friendly_name': 'Sense 12345 Yearly Energy', + 'last_reset': '2024-01-01T01:01:00+00:00', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.sense_12345_yearly_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '15', + }) +# --- +# name: test_sensors[sensor.sense_12345_yearly_from_grid-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sense_12345_yearly_from_grid', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Yearly From Grid', + 'platform': 'sense', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '12345-yearly-from_grid', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.sense_12345_yearly_from_grid-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Sense.com', + 'device_class': 'energy', + 'friendly_name': 'Sense 12345 Yearly From Grid', + 'last_reset': '2024-01-01T01:01:00+00:00', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.sense_12345_yearly_from_grid', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '15', + }) +# --- +# name: test_sensors[sensor.sense_12345_yearly_net_production-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sense_12345_yearly_net_production', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Yearly Net Production', + 'platform': 'sense', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '12345-yearly-net_production', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.sense_12345_yearly_net_production-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Sense.com', + 'device_class': 'energy', + 'friendly_name': 'Sense 12345 Yearly Net Production', + 'last_reset': '2024-01-01T01:01:00+00:00', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.sense_12345_yearly_net_production', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '15', + }) +# --- +# name: test_sensors[sensor.sense_12345_yearly_net_production_percentage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sense_12345_yearly_net_production_percentage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Yearly Net Production Percentage', + 'platform': 'sense', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '12345-yearly-production_pct', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[sensor.sense_12345_yearly_net_production_percentage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Sense.com', + 'friendly_name': 'Sense 12345 Yearly Net Production Percentage', + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.sense_12345_yearly_net_production_percentage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '15', + }) +# --- +# name: test_sensors[sensor.sense_12345_yearly_production-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sense_12345_yearly_production', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Yearly Production', + 'platform': 'sense', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '12345-yearly-production', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.sense_12345_yearly_production-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Sense.com', + 'device_class': 'energy', + 'friendly_name': 'Sense 12345 Yearly Production', + 'last_reset': '2024-01-01T01:01:00+00:00', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.sense_12345_yearly_production', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '15', + }) +# --- +# name: test_sensors[sensor.sense_12345_yearly_solar_powered_percentage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sense_12345_yearly_solar_powered_percentage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Yearly Solar Powered Percentage', + 'platform': 'sense', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '12345-yearly-solar_powered', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[sensor.sense_12345_yearly_solar_powered_percentage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Sense.com', + 'friendly_name': 'Sense 12345 Yearly Solar Powered Percentage', + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.sense_12345_yearly_solar_powered_percentage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '15', + }) +# --- +# name: test_sensors[sensor.sense_12345_yearly_to_grid-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sense_12345_yearly_to_grid', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Yearly To Grid', + 'platform': 'sense', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '12345-yearly-to_grid', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.sense_12345_yearly_to_grid-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Sense.com', + 'device_class': 'energy', + 'friendly_name': 'Sense 12345 Yearly To Grid', + 'last_reset': '2024-01-01T01:01:00+00:00', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.sense_12345_yearly_to_grid', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '15', + }) +# --- diff --git a/tests/components/sense/test_binary_sensor.py b/tests/components/sense/test_binary_sensor.py new file mode 100644 index 00000000000000..ae91b7a9a2191c --- /dev/null +++ b/tests/components/sense/test_binary_sensor.py @@ -0,0 +1,68 @@ +"""The tests for Sense binary sensor platform.""" + +from datetime import timedelta +from unittest.mock import MagicMock + +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSOR_DOMAIN +from homeassistant.components.sense.const import ACTIVE_UPDATE_RATE +from homeassistant.const import STATE_OFF, STATE_ON, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er +from homeassistant.util.dt import utcnow + +from . import setup_platform +from .const import DEVICE_1_NAME, DEVICE_2_NAME + +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform + + +async def test_binary_sensors( + hass: HomeAssistant, + mock_sense: MagicMock, + config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test Sensor.""" + await setup_platform(hass, config_entry, Platform.BINARY_SENSOR) + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) + + +async def test_on_off_sensors( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + mock_sense: MagicMock, + config_entry: MockConfigEntry, +) -> None: + """Test the Sense binary sensors.""" + await setup_platform(hass, config_entry, BINARY_SENSOR_DOMAIN) + device_1, device_2 = mock_sense.devices + + state = hass.states.get(f"binary_sensor.{DEVICE_1_NAME.lower()}_power") + assert state.state == STATE_OFF + + state = hass.states.get(f"binary_sensor.{DEVICE_2_NAME.lower()}_power") + assert state.state == STATE_OFF + + device_1.is_on = True + async_fire_time_changed(hass, utcnow() + timedelta(seconds=ACTIVE_UPDATE_RATE)) + await hass.async_block_till_done() + + state = hass.states.get(f"binary_sensor.{DEVICE_1_NAME.lower()}_power") + assert state.state == STATE_ON + + state = hass.states.get(f"binary_sensor.{DEVICE_2_NAME.lower()}_power") + assert state.state == STATE_OFF + + device_1.is_on = False + device_2.is_on = True + async_fire_time_changed(hass, utcnow() + timedelta(seconds=ACTIVE_UPDATE_RATE)) + await hass.async_block_till_done() + + state = hass.states.get(f"binary_sensor.{DEVICE_1_NAME.lower()}_power") + assert state.state == STATE_OFF + + state = hass.states.get(f"binary_sensor.{DEVICE_2_NAME.lower()}_power") + assert state.state == STATE_ON diff --git a/tests/components/sense/test_config_flow.py b/tests/components/sense/test_config_flow.py index 0ba8d94e17b0e3..acef82dd0ba562 100644 --- a/tests/components/sense/test_config_flow.py +++ b/tests/components/sense/test_config_flow.py @@ -16,18 +16,9 @@ from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType -from tests.common import MockConfigEntry +from .const import MOCK_CONFIG -MOCK_CONFIG = { - "timeout": 6, - "email": "test-email", - "password": "test-password", - "access_token": "ABC", - "user_id": "123", - "monitor_id": "456", - "device_id": "789", - "refresh_token": "XYZ", -} +from tests.common import MockConfigEntry @pytest.fixture(name="mock_sense") diff --git a/tests/components/sense/test_sensor.py b/tests/components/sense/test_sensor.py new file mode 100644 index 00000000000000..d43b422ec38c30 --- /dev/null +++ b/tests/components/sense/test_sensor.py @@ -0,0 +1,234 @@ +"""The tests for Sense sensor platform.""" + +from datetime import timedelta +from unittest.mock import MagicMock, PropertyMock + +from freezegun.api import FrozenDateTimeFactory +import pytest +from sense_energy import Scale +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.sense.const import ACTIVE_UPDATE_RATE, TREND_UPDATE_RATE +from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er +from homeassistant.util.dt import utcnow + +from . import setup_platform +from .const import ( + DEVICE_1_DAY_ENERGY, + DEVICE_1_NAME, + DEVICE_2_DAY_ENERGY, + DEVICE_2_NAME, + DEVICE_2_POWER, + MONITOR_ID, +) + +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_sensors( + hass: HomeAssistant, + mock_sense: MagicMock, + config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test Sensor.""" + await setup_platform(hass, config_entry, Platform.SENSOR) + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) + + +async def test_device_power_sensors( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + mock_sense: MagicMock, + config_entry: MockConfigEntry, +) -> None: + """Test the Sense device power sensors.""" + device_1, device_2 = mock_sense.devices + device_1.power_w = 0 + device_2.power_w = 0 + await setup_platform(hass, config_entry, SENSOR_DOMAIN) + device_1, device_2 = mock_sense.devices + + state = hass.states.get(f"sensor.{DEVICE_1_NAME.lower()}_power") + assert state.state == "0" + + state = hass.states.get(f"sensor.{DEVICE_2_NAME.lower()}_power") + assert state.state == "0" + + device_2.power_w = DEVICE_2_POWER + async_fire_time_changed(hass, utcnow() + timedelta(seconds=ACTIVE_UPDATE_RATE)) + await hass.async_block_till_done() + + state = hass.states.get(f"sensor.{DEVICE_1_NAME.lower()}_power") + assert state.state == "0" + + state = hass.states.get(f"sensor.{DEVICE_2_NAME.lower()}_power") + assert state.state == f"{DEVICE_2_POWER:.1f}" + + +async def test_device_energy_sensors( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + mock_sense: MagicMock, + config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, +) -> None: + """Test the Sense device power sensors.""" + await setup_platform(hass, config_entry, SENSOR_DOMAIN) + device_1, device_2 = mock_sense.devices + + state = hass.states.get(f"sensor.{DEVICE_1_NAME.lower()}_daily_energy") + assert state.state == f"{DEVICE_1_DAY_ENERGY:.0f}" + + state = hass.states.get(f"sensor.{DEVICE_2_NAME.lower()}_daily_energy") + assert state.state == f"{DEVICE_2_DAY_ENERGY:.0f}" + + device_1.energy_kwh[Scale.DAY] = 0 + device_2.energy_kwh[Scale.DAY] = 0 + freezer.tick(timedelta(seconds=TREND_UPDATE_RATE)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + state = hass.states.get(f"sensor.{DEVICE_1_NAME.lower()}_daily_energy") + assert state.state == "0" + + state = hass.states.get(f"sensor.{DEVICE_2_NAME.lower()}_daily_energy") + assert state.state == "0" + + device_2.energy_kwh[Scale.DAY] = DEVICE_1_DAY_ENERGY + freezer.tick(timedelta(seconds=TREND_UPDATE_RATE)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + state = hass.states.get(f"sensor.{DEVICE_1_NAME.lower()}_daily_energy") + assert state.state == "0" + + state = hass.states.get(f"sensor.{DEVICE_2_NAME.lower()}_daily_energy") + assert state.state == f"{DEVICE_1_DAY_ENERGY:.0f}" + + +async def test_voltage_sensors( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + mock_sense: MagicMock, + config_entry: MockConfigEntry, +) -> None: + """Test the Sense voltage sensors.""" + + type(mock_sense).active_voltage = PropertyMock(return_value=[120, 121]) + + await setup_platform(hass, config_entry, SENSOR_DOMAIN) + + state = hass.states.get(f"sensor.sense_{MONITOR_ID}_l1_voltage") + assert state.state == "120" + + state = hass.states.get(f"sensor.sense_{MONITOR_ID}_l2_voltage") + assert state.state == "121" + + type(mock_sense).active_voltage = PropertyMock(return_value=[122, 123]) + async_fire_time_changed(hass, utcnow() + timedelta(seconds=ACTIVE_UPDATE_RATE)) + await hass.async_block_till_done() + + state = hass.states.get(f"sensor.sense_{MONITOR_ID}_l1_voltage") + assert state.state == "122" + + state = hass.states.get(f"sensor.sense_{MONITOR_ID}_l2_voltage") + assert state.state == "123" + + +async def test_active_power_sensors( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + mock_sense: MagicMock, + config_entry: MockConfigEntry, +) -> None: + """Test the Sense power sensors.""" + + type(mock_sense).active_power = PropertyMock(return_value=400) + type(mock_sense).active_solar_power = PropertyMock(return_value=500) + + await setup_platform(hass, config_entry, SENSOR_DOMAIN) + + state = hass.states.get(f"sensor.sense_{MONITOR_ID}_energy") + assert state.state == "400" + + state = hass.states.get(f"sensor.sense_{MONITOR_ID}_production") + assert state.state == "500" + + type(mock_sense).active_power = PropertyMock(return_value=600) + type(mock_sense).active_solar_power = PropertyMock(return_value=700) + async_fire_time_changed(hass, utcnow() + timedelta(seconds=ACTIVE_UPDATE_RATE)) + await hass.async_block_till_done() + + state = hass.states.get(f"sensor.sense_{MONITOR_ID}_energy") + assert state.state == "600" + + state = hass.states.get(f"sensor.sense_{MONITOR_ID}_production") + assert state.state == "700" + + +async def test_trend_energy_sensors( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + mock_sense: MagicMock, + config_entry: MockConfigEntry, +) -> None: + """Test the Sense power sensors.""" + mock_sense.get_stat.side_effect = lambda sensor_type, variant: { + (Scale.DAY, "usage"): 100, + (Scale.DAY, "production"): 200, + (Scale.DAY, "from_grid"): 300, + (Scale.DAY, "to_grid"): 400, + (Scale.DAY, "net_production"): 500, + (Scale.DAY, "production_pct"): 600, + (Scale.DAY, "solar_powered"): 700, + }.get((sensor_type, variant), 0) + + await setup_platform(hass, config_entry, SENSOR_DOMAIN) + + state = hass.states.get(f"sensor.sense_{MONITOR_ID}_daily_energy") + assert state.state == "100" + + state = hass.states.get(f"sensor.sense_{MONITOR_ID}_daily_production") + assert state.state == "200" + + state = hass.states.get(f"sensor.sense_{MONITOR_ID}_daily_from_grid") + assert state.state == "300" + + state = hass.states.get(f"sensor.sense_{MONITOR_ID}_daily_to_grid") + assert state.state == "400" + + state = hass.states.get(f"sensor.sense_{MONITOR_ID}_daily_net_production") + assert state.state == "500" + + mock_sense.get_stat.side_effect = lambda sensor_type, variant: { + (Scale.DAY, "usage"): 1000, + (Scale.DAY, "production"): 2000, + (Scale.DAY, "from_grid"): 3000, + (Scale.DAY, "to_grid"): 4000, + (Scale.DAY, "net_production"): 5000, + (Scale.DAY, "production_pct"): 6000, + (Scale.DAY, "solar_powered"): 7000, + }.get((sensor_type, variant), 0) + async_fire_time_changed(hass, utcnow() + timedelta(seconds=600)) + await hass.async_block_till_done() + + state = hass.states.get(f"sensor.sense_{MONITOR_ID}_daily_energy") + assert state.state == "1000" + + state = hass.states.get(f"sensor.sense_{MONITOR_ID}_daily_production") + assert state.state == "2000" + + state = hass.states.get(f"sensor.sense_{MONITOR_ID}_daily_from_grid") + assert state.state == "3000" + + state = hass.states.get(f"sensor.sense_{MONITOR_ID}_daily_to_grid") + assert state.state == "4000" + + state = hass.states.get(f"sensor.sense_{MONITOR_ID}_daily_net_production") + assert state.state == "5000" diff --git a/tests/components/sensibo/test_config_flow.py b/tests/components/sensibo/test_config_flow.py index 3f53495f0f246f..d6edb1c7ae0a34 100644 --- a/tests/components/sensibo/test_config_flow.py +++ b/tests/components/sensibo/test_config_flow.py @@ -348,3 +348,171 @@ async def test_flow_reauth_no_username_or_device( assert result2["step_id"] == "reauth_confirm" assert result2["type"] is FlowResultType.FORM assert result2["errors"] == {"base": p_error} + + +async def test_reconfigure_flow(hass: HomeAssistant) -> None: + """Test a reconfigure flow.""" + entry = MockConfigEntry( + version=2, + domain=DOMAIN, + unique_id="username", + data={"api_key": "1234567890"}, + ) + entry.add_to_hass(hass) + + result = await entry.start_reconfigure_flow(hass) + assert result["step_id"] == "reconfigure" + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {} + + with ( + patch( + "homeassistant.components.sensibo.util.SensiboClient.async_get_devices", + return_value={"result": [{"id": "xyzxyz"}, {"id": "abcabc"}]}, + ), + patch( + "homeassistant.components.sensibo.util.SensiboClient.async_get_me", + return_value={"result": {"username": "username"}}, + ) as mock_sensibo, + patch( + "homeassistant.components.sensibo.async_setup_entry", + return_value=True, + ) as mock_setup_entry, + ): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_API_KEY: "1234567891"}, + ) + await hass.async_block_till_done() + + assert result2["type"] is FlowResultType.ABORT + assert result2["reason"] == "reconfigure_successful" + assert entry.data == {"api_key": "1234567891"} + + assert len(mock_sensibo.mock_calls) == 1 + assert len(mock_setup_entry.mock_calls) == 1 + + +@pytest.mark.parametrize( + ("sideeffect", "p_error"), + [ + (aiohttp.ClientConnectionError, "cannot_connect"), + (TimeoutError, "cannot_connect"), + (AuthenticationError, "invalid_auth"), + (SensiboError, "cannot_connect"), + ], +) +async def test_reconfigure_flow_error( + hass: HomeAssistant, sideeffect: Exception, p_error: str +) -> None: + """Test a reconfigure flow with error.""" + entry = MockConfigEntry( + version=2, + domain=DOMAIN, + unique_id="username", + data={"api_key": "1234567890"}, + ) + entry.add_to_hass(hass) + + result = await entry.start_reconfigure_flow(hass) + + with patch( + "homeassistant.components.sensibo.util.SensiboClient.async_get_devices", + side_effect=sideeffect, + ): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_API_KEY: "1234567890"}, + ) + await hass.async_block_till_done() + + assert result2["step_id"] == "reconfigure" + assert result2["type"] is FlowResultType.FORM + assert result2["errors"] == {"base": p_error} + + with ( + patch( + "homeassistant.components.sensibo.util.SensiboClient.async_get_devices", + return_value={"result": [{"id": "xyzxyz"}, {"id": "abcabc"}]}, + ), + patch( + "homeassistant.components.sensibo.util.SensiboClient.async_get_me", + return_value={"result": {"username": "username"}}, + ), + patch( + "homeassistant.components.sensibo.async_setup_entry", + return_value=True, + ), + ): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_API_KEY: "1234567891"}, + ) + await hass.async_block_till_done() + + assert result2["type"] is FlowResultType.ABORT + assert result2["reason"] == "reconfigure_successful" + assert entry.data == {"api_key": "1234567891"} + + +@pytest.mark.parametrize( + ("get_devices", "get_me", "p_error"), + [ + ( + {"result": [{"id": "xyzxyz"}, {"id": "abcabc"}]}, + {"result": {}}, + "no_username", + ), + ( + {"result": []}, + {"result": {"username": "username"}}, + "no_devices", + ), + ( + {"result": [{"id": "xyzxyz"}, {"id": "abcabc"}]}, + {"result": {"username": "username2"}}, + "incorrect_api_key", + ), + ], +) +async def test_flow_reconfigure_no_username_or_device( + hass: HomeAssistant, + get_devices: dict[str, Any], + get_me: dict[str, Any], + p_error: str, +) -> None: + """Test config flow get no username from api.""" + entry = MockConfigEntry( + version=2, + domain=DOMAIN, + unique_id="username", + data={"api_key": "1234567890"}, + ) + entry.add_to_hass(hass) + + result = await entry.start_reconfigure_flow(hass) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reconfigure" + + with ( + patch( + "homeassistant.components.sensibo.util.SensiboClient.async_get_devices", + return_value=get_devices, + ), + patch( + "homeassistant.components.sensibo.util.SensiboClient.async_get_me", + return_value=get_me, + ), + ): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_API_KEY: "1234567890", + }, + ) + await hass.async_block_till_done() + + assert result2["step_id"] == "reconfigure" + assert result2["type"] is FlowResultType.FORM + assert result2["errors"] == {"base": p_error} diff --git a/tests/components/sensor/test_recorder.py b/tests/components/sensor/test_recorder.py index 04e0a1b7de8b7c..0e8c2a5e188e3c 100644 --- a/tests/components/sensor/test_recorder.py +++ b/tests/components/sensor/test_recorder.py @@ -4233,8 +4233,8 @@ def set_state(entity_id, state, **kwargs): @pytest.mark.parametrize( ("units", "attributes", "unit", "unit2", "supported_unit"), [ - (US_CUSTOMARY_SYSTEM, POWER_SENSOR_ATTRIBUTES, "W", "kW", "W, kW"), - (METRIC_SYSTEM, POWER_SENSOR_ATTRIBUTES, "W", "kW", "W, kW"), + (US_CUSTOMARY_SYSTEM, POWER_SENSOR_ATTRIBUTES, "W", "kW", "GW, MW, TW, W, kW"), + (METRIC_SYSTEM, POWER_SENSOR_ATTRIBUTES, "W", "kW", "GW, MW, TW, W, kW"), ( US_CUSTOMARY_SYSTEM, TEMPERATURE_SENSOR_ATTRIBUTES, @@ -4445,8 +4445,8 @@ async def test_validate_statistics_unit_ignore_device_class( @pytest.mark.parametrize( ("units", "attributes", "unit", "unit2", "supported_unit"), [ - (US_CUSTOMARY_SYSTEM, POWER_SENSOR_ATTRIBUTES, "W", "kW", "W, kW"), - (METRIC_SYSTEM, POWER_SENSOR_ATTRIBUTES, "W", "kW", "W, kW"), + (US_CUSTOMARY_SYSTEM, POWER_SENSOR_ATTRIBUTES, "W", "kW", "GW, MW, TW, W, kW"), + (METRIC_SYSTEM, POWER_SENSOR_ATTRIBUTES, "W", "kW", "GW, MW, TW, W, kW"), ( US_CUSTOMARY_SYSTEM, TEMPERATURE_SENSOR_ATTRIBUTES, @@ -4682,6 +4682,65 @@ async def test_validate_statistics_state_class_removed( await assert_validation_result(hass, client, {}, {}) +@pytest.mark.parametrize( + ("units", "attributes", "unit"), + [ + (US_CUSTOMARY_SYSTEM, POWER_SENSOR_ATTRIBUTES, "W"), + ], +) +async def test_validate_statistics_state_class_removed_issue_cleaned_up( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + units, + attributes, + unit, +) -> None: + """Test validate_statistics.""" + now = get_start_time(dt_util.utcnow()) + + hass.config.units = units + await async_setup_component(hass, "sensor", {}) + await async_recorder_block_till_done(hass) + client = await hass_ws_client() + + # No statistics, no state - empty response + await assert_validation_result(hass, client, {}, {}) + + # No statistics, valid state - empty response + hass.states.async_set( + "sensor.test", 10, attributes=attributes, timestamp=now.timestamp() + ) + await hass.async_block_till_done() + await assert_validation_result(hass, client, {}, {}) + + # Statistics has run, empty response + do_adhoc_statistics(hass, start=now) + await async_recorder_block_till_done(hass) + await assert_validation_result(hass, client, {}, {}) + + # State update with invalid state class, expect error + _attributes = dict(attributes) + _attributes.pop("state_class") + hass.states.async_set( + "sensor.test", 12, attributes=_attributes, timestamp=now.timestamp() + ) + await hass.async_block_till_done() + expected = { + "sensor.test": [ + { + "data": {"statistic_id": "sensor.test"}, + "type": "state_class_removed", + } + ], + } + await assert_validation_result(hass, client, expected, {"state_class_removed"}) + + # Remove the statistics - empty response + get_instance(hass).async_clear_statistics(["sensor.test"]) + await async_recorder_block_till_done(hass) + await assert_validation_result(hass, client, {}, {}) + + @pytest.mark.parametrize( ("units", "attributes", "unit"), [ @@ -5371,3 +5430,51 @@ def _fetch_states() -> list[State]: assert len(states) == 1 assert ATTR_OPTIONS not in states[0].attributes assert ATTR_FRIENDLY_NAME in states[0].attributes + + +async def test_clean_up_repairs( + hass: HomeAssistant, hass_ws_client: WebSocketGenerator +) -> None: + """Test cleaning up repairs.""" + await async_setup_component(hass, "sensor", {}) + issue_registry = ir.async_get(hass) + client = await hass_ws_client() + + # Create some issues + def create_issue(domain: str, issue_id: str, data: dict | None) -> None: + ir.async_create_issue( + hass, + domain, + issue_id, + data=data, + is_fixable=False, + severity=ir.IssueSeverity.WARNING, + translation_key="", + ) + + create_issue("test", "test_issue", None) + create_issue(DOMAIN, "test_issue_1", None) + create_issue(DOMAIN, "test_issue_2", {"issue_type": "another_issue"}) + create_issue(DOMAIN, "test_issue_3", {"issue_type": "state_class_removed"}) + create_issue(DOMAIN, "test_issue_4", {"issue_type": "units_changed"}) + + # Check the issues + assert set(issue_registry.issues) == { + ("test", "test_issue"), + ("sensor", "test_issue_1"), + ("sensor", "test_issue_2"), + ("sensor", "test_issue_3"), + ("sensor", "test_issue_4"), + } + + # Request update of issues + await client.send_json_auto_id({"type": "recorder/update_statistics_issues"}) + response = await client.receive_json() + assert response["success"] + + # Check the issues + assert set(issue_registry.issues) == { + ("test", "test_issue"), + ("sensor", "test_issue_1"), + ("sensor", "test_issue_2"), + } diff --git a/tests/components/seventeentrack/snapshots/test_services.ambr b/tests/components/seventeentrack/snapshots/test_services.ambr index 568acea33a5fe5..e172a2de5947dd 100644 --- a/tests/components/seventeentrack/snapshots/test_services.ambr +++ b/tests/components/seventeentrack/snapshots/test_services.ambr @@ -71,3 +71,32 @@ ]), }) # --- +# name: test_packages_with_none_timestamp + dict({ + 'packages': list([ + dict({ + 'destination_country': 'Belgium', + 'friendly_name': 'friendly name 1', + 'info_text': 'info text 1', + 'location': 'location 1', + 'origin_country': 'Belgium', + 'package_type': 'Registered Parcel', + 'status': 'In Transit', + 'tracking_info_language': 'Unknown', + 'tracking_number': '456', + }), + dict({ + 'destination_country': 'Belgium', + 'friendly_name': 'friendly name 2', + 'info_text': 'info text 1', + 'location': 'location 1', + 'origin_country': 'Belgium', + 'package_type': 'Registered Parcel', + 'status': 'Delivered', + 'timestamp': '2020-08-10T10:32:00+00:00', + 'tracking_info_language': 'Unknown', + 'tracking_number': '789', + }), + ]), + }) +# --- diff --git a/tests/components/seventeentrack/test_services.py b/tests/components/seventeentrack/test_services.py index 54c9349c121fdc..bbd5644ad63c7d 100644 --- a/tests/components/seventeentrack/test_services.py +++ b/tests/components/seventeentrack/test_services.py @@ -150,6 +150,28 @@ async def test_archive_package( ) +async def test_packages_with_none_timestamp( + hass: HomeAssistant, + mock_seventeentrack: AsyncMock, + mock_config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, +) -> None: + """Ensure service returns all packages when non provided.""" + await _mock_invalid_packages(mock_seventeentrack) + await init_integration(hass, mock_config_entry) + service_response = await hass.services.async_call( + DOMAIN, + SERVICE_GET_PACKAGES, + { + CONFIG_ENTRY_ID_KEY: mock_config_entry.entry_id, + }, + blocking=True, + return_response=True, + ) + + assert service_response == snapshot + + async def _mock_packages(mock_seventeentrack): package1 = get_package(status=10) package2 = get_package( @@ -167,3 +189,19 @@ async def _mock_packages(mock_seventeentrack): package2, package3, ] + + +async def _mock_invalid_packages(mock_seventeentrack): + package1 = get_package( + status=10, + timestamp=None, + ) + package2 = get_package( + tracking_number="789", + friendly_name="friendly name 2", + status=40, + ) + mock_seventeentrack.return_value.profile.packages.return_value = [ + package1, + package2, + ] diff --git a/tests/components/shelly/test_config_flow.py b/tests/components/shelly/test_config_flow.py index 316f9794471845..93b3a46910c3bb 100644 --- a/tests/components/shelly/test_config_flow.py +++ b/tests/components/shelly/test_config_flow.py @@ -1364,7 +1364,7 @@ async def test_reconfigure_successful( result = await entry.start_reconfigure_flow(hass) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure_confirm" + assert result["step_id"] == "reconfigure" with patch( "homeassistant.components.shelly.config_flow.get_info", @@ -1396,7 +1396,7 @@ async def test_reconfigure_unsuccessful( result = await entry.start_reconfigure_flow(hass) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure_confirm" + assert result["step_id"] == "reconfigure" with patch( "homeassistant.components.shelly.config_flow.get_info", @@ -1433,7 +1433,7 @@ async def test_reconfigure_with_exception( result = await entry.start_reconfigure_flow(hass) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure_confirm" + assert result["step_id"] == "reconfigure" with patch("homeassistant.components.shelly.config_flow.get_info", side_effect=exc): result = await hass.config_entries.flow.async_configure( diff --git a/tests/components/shelly/test_cover.py b/tests/components/shelly/test_cover.py index f2b8567f540c0d..40a364fd43583a 100644 --- a/tests/components/shelly/test_cover.py +++ b/tests/components/shelly/test_cover.py @@ -19,10 +19,7 @@ SERVICE_SET_COVER_TILT_POSITION, SERVICE_STOP_COVER, SERVICE_STOP_COVER_TILT, - STATE_CLOSED, - STATE_CLOSING, - STATE_OPEN, - STATE_OPENING, + CoverState, ) from homeassistant.const import ATTR_ENTITY_ID from homeassistant.core import HomeAssistant @@ -59,7 +56,7 @@ async def test_block_device_services( {ATTR_ENTITY_ID: entity_id}, blocking=True, ) - assert hass.states.get(entity_id).state == STATE_OPENING + assert hass.states.get(entity_id).state == CoverState.OPENING await hass.services.async_call( COVER_DOMAIN, @@ -67,7 +64,7 @@ async def test_block_device_services( {ATTR_ENTITY_ID: entity_id}, blocking=True, ) - assert hass.states.get(entity_id).state == STATE_CLOSING + assert hass.states.get(entity_id).state == CoverState.CLOSING await hass.services.async_call( COVER_DOMAIN, @@ -75,7 +72,7 @@ async def test_block_device_services( {ATTR_ENTITY_ID: entity_id}, blocking=True, ) - assert hass.states.get(entity_id).state == STATE_CLOSED + assert hass.states.get(entity_id).state == CoverState.CLOSED entry = entity_registry.async_get(entity_id) assert entry @@ -89,11 +86,11 @@ async def test_block_device_update( monkeypatch.setattr(mock_block_device.blocks[ROLLER_BLOCK_ID], "rollerPos", 0) await init_integration(hass, 1) - assert hass.states.get("cover.test_name").state == STATE_CLOSED + assert hass.states.get("cover.test_name").state == CoverState.CLOSED monkeypatch.setattr(mock_block_device.blocks[ROLLER_BLOCK_ID], "rollerPos", 100) mock_block_device.mock_update() - assert hass.states.get("cover.test_name").state == STATE_OPEN + assert hass.states.get("cover.test_name").state == CoverState.OPEN async def test_block_device_no_roller_blocks( @@ -134,7 +131,7 @@ async def test_rpc_device_services( blocking=True, ) mock_rpc_device.mock_update() - assert hass.states.get(entity_id).state == STATE_OPENING + assert hass.states.get(entity_id).state == CoverState.OPENING mutate_rpc_device_status( monkeypatch, mock_rpc_device, "cover:0", "state", "closing" @@ -146,7 +143,7 @@ async def test_rpc_device_services( blocking=True, ) mock_rpc_device.mock_update() - assert hass.states.get(entity_id).state == STATE_CLOSING + assert hass.states.get(entity_id).state == CoverState.CLOSING mutate_rpc_device_status(monkeypatch, mock_rpc_device, "cover:0", "state", "closed") await hass.services.async_call( @@ -156,7 +153,7 @@ async def test_rpc_device_services( blocking=True, ) mock_rpc_device.mock_update() - assert hass.states.get(entity_id).state == STATE_CLOSED + assert hass.states.get(entity_id).state == CoverState.CLOSED entry = entity_registry.async_get(entity_id) assert entry @@ -178,11 +175,11 @@ async def test_rpc_device_update( """Test RPC device update.""" mutate_rpc_device_status(monkeypatch, mock_rpc_device, "cover:0", "state", "closed") await init_integration(hass, 2) - assert hass.states.get("cover.test_cover_0").state == STATE_CLOSED + assert hass.states.get("cover.test_cover_0").state == CoverState.CLOSED mutate_rpc_device_status(monkeypatch, mock_rpc_device, "cover:0", "state", "open") mock_rpc_device.mock_update() - assert hass.states.get("cover.test_cover_0").state == STATE_OPEN + assert hass.states.get("cover.test_cover_0").state == CoverState.OPEN async def test_rpc_device_no_position_control( @@ -193,7 +190,7 @@ async def test_rpc_device_no_position_control( monkeypatch, mock_rpc_device, "cover:0", "pos_control", False ) await init_integration(hass, 2) - assert hass.states.get("cover.test_cover_0").state == STATE_OPEN + assert hass.states.get("cover.test_cover_0").state == CoverState.OPEN async def test_rpc_cover_tilt( diff --git a/tests/components/shelly/test_switch.py b/tests/components/shelly/test_switch.py index c891d1d7b2d896..5c7933afd7e8df 100644 --- a/tests/components/shelly/test_switch.py +++ b/tests/components/shelly/test_switch.py @@ -572,3 +572,62 @@ async def test_rpc_remove_virtual_switch_when_orphaned( entry = entity_registry.async_get(entity_id) assert not entry + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_rpc_device_script_switch( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + mock_rpc_device: Mock, + monkeypatch: pytest.MonkeyPatch, +) -> None: + """Test a script switch for RPC device.""" + config = deepcopy(mock_rpc_device.config) + key = "script:1" + script_name = "aioshelly_ble_integration" + entity_id = f"switch.test_name_{script_name}" + config[key] = { + "id": 1, + "name": script_name, + "enable": False, + } + monkeypatch.setattr(mock_rpc_device, "config", config) + + status = deepcopy(mock_rpc_device.status) + status[key] = { + "running": True, + } + monkeypatch.setattr(mock_rpc_device, "status", status) + + await init_integration(hass, 3) + + state = hass.states.get(entity_id) + assert state + assert state.state == STATE_ON + entry = entity_registry.async_get(entity_id) + assert entry + assert entry.unique_id == f"123456789ABC-{key}-script" + + monkeypatch.setitem(mock_rpc_device.status[key], "running", False) + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + mock_rpc_device.mock_update() + state = hass.states.get(entity_id) + assert state + assert state.state == STATE_OFF + + monkeypatch.setitem(mock_rpc_device.status[key], "running", True) + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + mock_rpc_device.mock_update() + state = hass.states.get(entity_id) + assert state + assert state.state == STATE_ON diff --git a/tests/components/shelly/test_update.py b/tests/components/shelly/test_update.py index a89dfcd1e717de..cd4cdf877a5829 100644 --- a/tests/components/shelly/test_update.py +++ b/tests/components/shelly/test_update.py @@ -16,6 +16,7 @@ ATTR_INSTALLED_VERSION, ATTR_LATEST_VERSION, ATTR_RELEASE_URL, + ATTR_UPDATE_PERCENTAGE, DOMAIN as UPDATE_DOMAIN, SERVICE_INSTALL, UpdateEntityFeature, @@ -64,6 +65,7 @@ async def test_block_update( assert state.attributes[ATTR_INSTALLED_VERSION] == "1.0.0" assert state.attributes[ATTR_LATEST_VERSION] == "2.0.0" assert state.attributes[ATTR_IN_PROGRESS] is False + assert state.attributes[ATTR_UPDATE_PERCENTAGE] is None supported_feat = state.attributes[ATTR_SUPPORTED_FEATURES] assert supported_feat == UpdateEntityFeature.INSTALL | UpdateEntityFeature.PROGRESS @@ -80,6 +82,7 @@ async def test_block_update( assert state.attributes[ATTR_INSTALLED_VERSION] == "1.0.0" assert state.attributes[ATTR_LATEST_VERSION] == "2.0.0" assert state.attributes[ATTR_IN_PROGRESS] is True + assert state.attributes[ATTR_UPDATE_PERCENTAGE] is None assert state.attributes[ATTR_RELEASE_URL] == GEN1_RELEASE_URL monkeypatch.setitem(mock_block_device.status["update"], "old_version", "2.0.0") @@ -90,6 +93,7 @@ async def test_block_update( assert state.attributes[ATTR_INSTALLED_VERSION] == "2.0.0" assert state.attributes[ATTR_LATEST_VERSION] == "2.0.0" assert state.attributes[ATTR_IN_PROGRESS] is False + assert state.attributes[ATTR_UPDATE_PERCENTAGE] is None entry = entity_registry.async_get(entity_id) assert entry @@ -117,6 +121,7 @@ async def test_block_beta_update( assert state.attributes[ATTR_INSTALLED_VERSION] == "1.0.0" assert state.attributes[ATTR_LATEST_VERSION] == "1.0.0" assert state.attributes[ATTR_IN_PROGRESS] is False + assert state.attributes[ATTR_UPDATE_PERCENTAGE] is None monkeypatch.setitem( mock_block_device.status["update"], "beta_version", "2.0.0-beta" @@ -128,6 +133,7 @@ async def test_block_beta_update( assert state.attributes[ATTR_INSTALLED_VERSION] == "1.0.0" assert state.attributes[ATTR_LATEST_VERSION] == "2.0.0-beta" assert state.attributes[ATTR_IN_PROGRESS] is False + assert state.attributes[ATTR_UPDATE_PERCENTAGE] is None assert state.attributes[ATTR_RELEASE_URL] is None await hass.services.async_call( @@ -143,6 +149,7 @@ async def test_block_beta_update( assert state.attributes[ATTR_INSTALLED_VERSION] == "1.0.0" assert state.attributes[ATTR_LATEST_VERSION] == "2.0.0-beta" assert state.attributes[ATTR_IN_PROGRESS] is True + assert state.attributes[ATTR_UPDATE_PERCENTAGE] is None monkeypatch.setitem(mock_block_device.status["update"], "old_version", "2.0.0-beta") await mock_rest_update(hass, freezer) @@ -152,6 +159,7 @@ async def test_block_beta_update( assert state.attributes[ATTR_INSTALLED_VERSION] == "2.0.0-beta" assert state.attributes[ATTR_LATEST_VERSION] == "2.0.0-beta" assert state.attributes[ATTR_IN_PROGRESS] is False + assert state.attributes[ATTR_UPDATE_PERCENTAGE] is None entry = entity_registry.async_get(entity_id) assert entry @@ -292,6 +300,7 @@ async def test_rpc_update( assert state.attributes[ATTR_INSTALLED_VERSION] == "1" assert state.attributes[ATTR_LATEST_VERSION] == "2" assert state.attributes[ATTR_IN_PROGRESS] is False + assert state.attributes[ATTR_UPDATE_PERCENTAGE] is None supported_feat = state.attributes[ATTR_SUPPORTED_FEATURES] assert supported_feat == UpdateEntityFeature.INSTALL | UpdateEntityFeature.PROGRESS @@ -309,6 +318,7 @@ async def test_rpc_update( assert state.attributes[ATTR_INSTALLED_VERSION] == "1" assert state.attributes[ATTR_LATEST_VERSION] == "2" assert state.attributes[ATTR_IN_PROGRESS] is True + assert state.attributes[ATTR_UPDATE_PERCENTAGE] is None assert state.attributes[ATTR_RELEASE_URL] == GEN2_RELEASE_URL inject_rpc_device_event( @@ -326,7 +336,9 @@ async def test_rpc_update( }, ) - assert hass.states.get(entity_id).attributes[ATTR_IN_PROGRESS] == 0 + state = hass.states.get(entity_id) + assert state.attributes[ATTR_IN_PROGRESS] is True + assert state.attributes[ATTR_UPDATE_PERCENTAGE] == 0 inject_rpc_device_event( monkeypatch, @@ -344,7 +356,9 @@ async def test_rpc_update( }, ) - assert hass.states.get(entity_id).attributes[ATTR_IN_PROGRESS] == 50 + state = hass.states.get(entity_id) + assert state.attributes[ATTR_IN_PROGRESS] is True + assert state.attributes[ATTR_UPDATE_PERCENTAGE] == 50 inject_rpc_device_event( monkeypatch, @@ -368,6 +382,7 @@ async def test_rpc_update( assert state.attributes[ATTR_INSTALLED_VERSION] == "2" assert state.attributes[ATTR_LATEST_VERSION] == "2" assert state.attributes[ATTR_IN_PROGRESS] is False + assert state.attributes[ATTR_UPDATE_PERCENTAGE] is None entry = entity_registry.async_get(entity_id) assert entry @@ -406,6 +421,7 @@ async def test_rpc_sleeping_update( assert state.attributes[ATTR_INSTALLED_VERSION] == "1" assert state.attributes[ATTR_LATEST_VERSION] == "2" assert state.attributes[ATTR_IN_PROGRESS] is False + assert state.attributes[ATTR_UPDATE_PERCENTAGE] is None assert state.attributes[ATTR_SUPPORTED_FEATURES] == UpdateEntityFeature(0) assert state.attributes[ATTR_RELEASE_URL] == GEN2_RELEASE_URL @@ -417,6 +433,7 @@ async def test_rpc_sleeping_update( assert state.attributes[ATTR_INSTALLED_VERSION] == "2" assert state.attributes[ATTR_LATEST_VERSION] == "2" assert state.attributes[ATTR_IN_PROGRESS] is False + assert state.attributes[ATTR_UPDATE_PERCENTAGE] is None assert state.attributes[ATTR_SUPPORTED_FEATURES] == UpdateEntityFeature(0) entry = entity_registry.async_get(entity_id) @@ -456,6 +473,7 @@ async def test_rpc_restored_sleeping_update( assert state.attributes[ATTR_INSTALLED_VERSION] == "1" assert state.attributes[ATTR_LATEST_VERSION] == "2" assert state.attributes[ATTR_IN_PROGRESS] is False + assert state.attributes[ATTR_UPDATE_PERCENTAGE] is None assert state.attributes[ATTR_SUPPORTED_FEATURES] == UpdateEntityFeature(0) # Make device online @@ -472,6 +490,7 @@ async def test_rpc_restored_sleeping_update( assert state.attributes[ATTR_INSTALLED_VERSION] == "2" assert state.attributes[ATTR_LATEST_VERSION] == "2" assert state.attributes[ATTR_IN_PROGRESS] is False + assert state.attributes[ATTR_UPDATE_PERCENTAGE] is None assert state.attributes[ATTR_SUPPORTED_FEATURES] == UpdateEntityFeature(0) @@ -522,6 +541,7 @@ async def test_rpc_restored_sleeping_update_no_last_state( assert state.attributes[ATTR_INSTALLED_VERSION] == "1" assert state.attributes[ATTR_LATEST_VERSION] == "2" assert state.attributes[ATTR_IN_PROGRESS] is False + assert state.attributes[ATTR_UPDATE_PERCENTAGE] is None assert state.attributes[ATTR_SUPPORTED_FEATURES] == UpdateEntityFeature(0) @@ -551,6 +571,7 @@ async def test_rpc_beta_update( assert state.attributes[ATTR_INSTALLED_VERSION] == "1" assert state.attributes[ATTR_LATEST_VERSION] == "1" assert state.attributes[ATTR_IN_PROGRESS] is False + assert state.attributes[ATTR_UPDATE_PERCENTAGE] is None assert state.attributes[ATTR_RELEASE_URL] is None monkeypatch.setitem( @@ -568,6 +589,7 @@ async def test_rpc_beta_update( assert state.attributes[ATTR_INSTALLED_VERSION] == "1" assert state.attributes[ATTR_LATEST_VERSION] == "2b" assert state.attributes[ATTR_IN_PROGRESS] is False + assert state.attributes[ATTR_UPDATE_PERCENTAGE] is None await hass.services.async_call( UPDATE_DOMAIN, @@ -596,7 +618,8 @@ async def test_rpc_beta_update( assert state.state == STATE_ON assert state.attributes[ATTR_INSTALLED_VERSION] == "1" assert state.attributes[ATTR_LATEST_VERSION] == "2b" - assert state.attributes[ATTR_IN_PROGRESS] == 0 + assert state.attributes[ATTR_IN_PROGRESS] is True + assert state.attributes[ATTR_UPDATE_PERCENTAGE] == 0 inject_rpc_device_event( monkeypatch, @@ -614,7 +637,9 @@ async def test_rpc_beta_update( }, ) - assert hass.states.get(entity_id).attributes[ATTR_IN_PROGRESS] == 40 + state = hass.states.get(entity_id) + assert state.attributes[ATTR_IN_PROGRESS] is True + assert state.attributes[ATTR_UPDATE_PERCENTAGE] == 40 inject_rpc_device_event( monkeypatch, @@ -638,6 +663,7 @@ async def test_rpc_beta_update( assert state.attributes[ATTR_INSTALLED_VERSION] == "2b" assert state.attributes[ATTR_LATEST_VERSION] == "2b" assert state.attributes[ATTR_IN_PROGRESS] is False + assert state.attributes[ATTR_UPDATE_PERCENTAGE] is None entry = entity_registry.async_get(entity_id) assert entry diff --git a/tests/components/shopping_list/test_init.py b/tests/components/shopping_list/test_init.py index 4e758764e3d9bc..276602f794eab4 100644 --- a/tests/components/shopping_list/test_init.py +++ b/tests/components/shopping_list/test_init.py @@ -32,8 +32,10 @@ async def test_add_item(hass: HomeAssistant, sl_setup) -> None: """Test adding an item intent.""" response = await intent.async_handle( - hass, "test", "HassShoppingListAddItem", {"item": {"value": "beer"}} + hass, "test", "HassShoppingListAddItem", {"item": {"value": " beer "}} ) + assert len(hass.data[DOMAIN].items) == 1 + assert hass.data[DOMAIN].items[0]["name"] == "beer" # name was trimmed # Response text is now handled by default conversation agent assert response.response_type == intent.IntentResponseType.ACTION_DONE diff --git a/tests/components/simplefin/snapshots/test_binary_sensor.ambr b/tests/components/simplefin/snapshots/test_binary_sensor.ambr index be26ae1a03dd0e..44fe2a10b78008 100644 --- a/tests/components/simplefin/snapshots/test_binary_sensor.ambr +++ b/tests/components/simplefin/snapshots/test_binary_sensor.ambr @@ -47,54 +47,6 @@ 'state': 'on', }) # --- -# name: test_all_entities[binary_sensor.investments_dr_evil_problem-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.investments_dr_evil_problem', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Problem', - 'platform': 'simplefin', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'possible_error', - 'unique_id': 'account_ACT-4k5l6m7n-8o9p-1q2r-3s4t_possible_error', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[binary_sensor.investments_dr_evil_problem-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by SimpleFIN API', - 'device_class': 'problem', - 'friendly_name': 'Investments Dr Evil Problem', - }), - 'context': , - 'entity_id': 'binary_sensor.investments_dr_evil_problem', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- # name: test_all_entities[binary_sensor.investments_my_checking_possible_error-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -143,54 +95,6 @@ 'state': 'on', }) # --- -# name: test_all_entities[binary_sensor.investments_my_checking_problem-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.investments_my_checking_problem', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Problem', - 'platform': 'simplefin', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'possible_error', - 'unique_id': 'account_ACT-1k2l3m4n-5o6p-7q8r-9s0t_possible_error', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[binary_sensor.investments_my_checking_problem-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by SimpleFIN API', - 'device_class': 'problem', - 'friendly_name': 'Investments My Checking Problem', - }), - 'context': , - 'entity_id': 'binary_sensor.investments_my_checking_problem', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- # name: test_all_entities[binary_sensor.investments_nerdcorp_series_b_possible_error-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -239,54 +143,6 @@ 'state': 'on', }) # --- -# name: test_all_entities[binary_sensor.investments_nerdcorp_series_b_problem-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.investments_nerdcorp_series_b_problem', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Problem', - 'platform': 'simplefin', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'possible_error', - 'unique_id': 'account_ACT-5k6l7m8n-9o0p-1q2r-3s4t_possible_error', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[binary_sensor.investments_nerdcorp_series_b_problem-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by SimpleFIN API', - 'device_class': 'problem', - 'friendly_name': 'Investments NerdCorp Series B Problem', - }), - 'context': , - 'entity_id': 'binary_sensor.investments_nerdcorp_series_b_problem', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- # name: test_all_entities[binary_sensor.mythical_randomsavings_castle_mortgage_possible_error-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -335,54 +191,6 @@ 'state': 'off', }) # --- -# name: test_all_entities[binary_sensor.mythical_randomsavings_castle_mortgage_problem-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.mythical_randomsavings_castle_mortgage_problem', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Problem', - 'platform': 'simplefin', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'possible_error', - 'unique_id': 'account_ACT-7a8b9c0d-1e2f-3g4h-5i6j_possible_error', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[binary_sensor.mythical_randomsavings_castle_mortgage_problem-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by SimpleFIN API', - 'device_class': 'problem', - 'friendly_name': 'Mythical RandomSavings Castle Mortgage Problem', - }), - 'context': , - 'entity_id': 'binary_sensor.mythical_randomsavings_castle_mortgage_problem', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- # name: test_all_entities[binary_sensor.mythical_randomsavings_unicorn_pot_possible_error-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -431,54 +239,6 @@ 'state': 'off', }) # --- -# name: test_all_entities[binary_sensor.mythical_randomsavings_unicorn_pot_problem-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.mythical_randomsavings_unicorn_pot_problem', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Problem', - 'platform': 'simplefin', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'possible_error', - 'unique_id': 'account_ACT-6a7b8c9d-0e1f-2g3h-4i5j_possible_error', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[binary_sensor.mythical_randomsavings_unicorn_pot_problem-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by SimpleFIN API', - 'device_class': 'problem', - 'friendly_name': 'Mythical RandomSavings Unicorn Pot Problem', - }), - 'context': , - 'entity_id': 'binary_sensor.mythical_randomsavings_unicorn_pot_problem', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- # name: test_all_entities[binary_sensor.random_bank_costco_anywhere_visa_r_card_possible_error-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -527,54 +287,6 @@ 'state': 'off', }) # --- -# name: test_all_entities[binary_sensor.random_bank_costco_anywhere_visa_r_card_problem-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.random_bank_costco_anywhere_visa_r_card_problem', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Problem', - 'platform': 'simplefin', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'possible_error', - 'unique_id': 'account_ACT-3a4b5c6d-7e8f-9g0h-1i2j_possible_error', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[binary_sensor.random_bank_costco_anywhere_visa_r_card_problem-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by SimpleFIN API', - 'device_class': 'problem', - 'friendly_name': 'Random Bank Costco Anywhere Visa® Card Problem', - }), - 'context': , - 'entity_id': 'binary_sensor.random_bank_costco_anywhere_visa_r_card_problem', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- # name: test_all_entities[binary_sensor.the_bank_of_go_prime_savings_possible_error-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -623,54 +335,6 @@ 'state': 'on', }) # --- -# name: test_all_entities[binary_sensor.the_bank_of_go_prime_savings_problem-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.the_bank_of_go_prime_savings_problem', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Problem', - 'platform': 'simplefin', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'possible_error', - 'unique_id': 'account_ACT-2a3b4c5d-6e7f-8g9h-0i1j_possible_error', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[binary_sensor.the_bank_of_go_prime_savings_problem-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by SimpleFIN API', - 'device_class': 'problem', - 'friendly_name': 'The Bank of Go PRIME SAVINGS Problem', - }), - 'context': , - 'entity_id': 'binary_sensor.the_bank_of_go_prime_savings_problem', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- # name: test_all_entities[binary_sensor.the_bank_of_go_the_bank_possible_error-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -719,51 +383,3 @@ 'state': 'on', }) # --- -# name: test_all_entities[binary_sensor.the_bank_of_go_the_bank_problem-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.the_bank_of_go_the_bank_problem', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Problem', - 'platform': 'simplefin', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'possible_error', - 'unique_id': 'account_ACT-1a2b3c4d-5e6f-7g8h-9i0j_possible_error', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[binary_sensor.the_bank_of_go_the_bank_problem-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by SimpleFIN API', - 'device_class': 'problem', - 'friendly_name': 'The Bank of Go The Bank Problem', - }), - 'context': , - 'entity_id': 'binary_sensor.the_bank_of_go_the_bank_problem', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- diff --git a/tests/components/smartthings/conftest.py b/tests/components/smartthings/conftest.py index 70fd9db07446d4..71a36c7885a1e4 100644 --- a/tests/components/smartthings/conftest.py +++ b/tests/components/smartthings/conftest.py @@ -38,7 +38,6 @@ STORAGE_KEY, STORAGE_VERSION, ) -from homeassistant.config import async_process_ha_core_config from homeassistant.config_entries import SOURCE_USER, ConfigEntryState from homeassistant.const import ( CONF_ACCESS_TOKEN, @@ -47,6 +46,7 @@ CONF_WEBHOOK_ID, ) from homeassistant.core import HomeAssistant +from homeassistant.core_config import async_process_ha_core_config from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry diff --git a/tests/components/smartthings/test_config_flow.py b/tests/components/smartthings/test_config_flow.py index 49444e47780bde..3621e58bc3d614 100644 --- a/tests/components/smartthings/test_config_flow.py +++ b/tests/components/smartthings/test_config_flow.py @@ -16,9 +16,9 @@ CONF_LOCATION_ID, DOMAIN, ) -from homeassistant.config import async_process_ha_core_config from homeassistant.const import CONF_ACCESS_TOKEN, CONF_CLIENT_ID, CONF_CLIENT_SECRET from homeassistant.core import HomeAssistant +from homeassistant.core_config import async_process_ha_core_config from homeassistant.data_entry_flow import FlowResultType from tests.common import MockConfigEntry diff --git a/tests/components/smartthings/test_cover.py b/tests/components/smartthings/test_cover.py index bb292b53ee80de..31443c12ab2cf4 100644 --- a/tests/components/smartthings/test_cover.py +++ b/tests/components/smartthings/test_cover.py @@ -13,10 +13,7 @@ SERVICE_CLOSE_COVER, SERVICE_OPEN_COVER, SERVICE_SET_COVER_POSITION, - STATE_CLOSED, - STATE_CLOSING, - STATE_OPEN, - STATE_OPENING, + CoverState, ) from homeassistant.components.smartthings.const import DOMAIN, SIGNAL_SMARTTHINGS_UPDATE from homeassistant.config_entries import ConfigEntryState @@ -87,7 +84,7 @@ async def test_open(hass: HomeAssistant, device_factory) -> None: for entity_id in entity_ids: state = hass.states.get(entity_id) assert state is not None - assert state.state == STATE_OPENING + assert state.state == CoverState.OPENING async def test_close(hass: HomeAssistant, device_factory) -> None: @@ -112,7 +109,7 @@ async def test_close(hass: HomeAssistant, device_factory) -> None: for entity_id in entity_ids: state = hass.states.get(entity_id) assert state is not None - assert state.state == STATE_CLOSING + assert state.state == CoverState.CLOSING async def test_set_cover_position_switch_level( @@ -136,7 +133,7 @@ async def test_set_cover_position_switch_level( state = hass.states.get("cover.shade") # Result of call does not update state - assert state.state == STATE_OPENING + assert state.state == CoverState.OPENING assert state.attributes[ATTR_BATTERY_LEVEL] == 95 assert state.attributes[ATTR_CURRENT_POSITION] == 10 # Ensure API called @@ -167,7 +164,7 @@ async def test_set_cover_position(hass: HomeAssistant, device_factory) -> None: state = hass.states.get("cover.shade") # Result of call does not update state - assert state.state == STATE_OPENING + assert state.state == CoverState.OPENING assert state.attributes[ATTR_BATTERY_LEVEL] == 95 assert state.attributes[ATTR_CURRENT_POSITION] == 10 # Ensure API called @@ -208,14 +205,14 @@ async def test_update_to_open_from_signal(hass: HomeAssistant, device_factory) - ) await setup_platform(hass, COVER_DOMAIN, devices=[device]) device.status.update_attribute_value(Attribute.door, "open") - assert hass.states.get("cover.garage").state == STATE_OPENING + assert hass.states.get("cover.garage").state == CoverState.OPENING # Act async_dispatcher_send(hass, SIGNAL_SMARTTHINGS_UPDATE, [device.device_id]) # Assert await hass.async_block_till_done() state = hass.states.get("cover.garage") assert state is not None - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN async def test_update_to_closed_from_signal( @@ -228,14 +225,14 @@ async def test_update_to_closed_from_signal( ) await setup_platform(hass, COVER_DOMAIN, devices=[device]) device.status.update_attribute_value(Attribute.door, "closed") - assert hass.states.get("cover.garage").state == STATE_CLOSING + assert hass.states.get("cover.garage").state == CoverState.CLOSING # Act async_dispatcher_send(hass, SIGNAL_SMARTTHINGS_UPDATE, [device.device_id]) # Assert await hass.async_block_till_done() state = hass.states.get("cover.garage") assert state is not None - assert state.state == STATE_CLOSED + assert state.state == CoverState.CLOSED async def test_unload_config_entry(hass: HomeAssistant, device_factory) -> None: diff --git a/tests/components/smartthings/test_init.py b/tests/components/smartthings/test_init.py index fa30fa258cfa12..e518f84aecb8c8 100644 --- a/tests/components/smartthings/test_init.py +++ b/tests/components/smartthings/test_init.py @@ -23,8 +23,8 @@ PLATFORMS, SIGNAL_SMARTTHINGS_UPDATE, ) -from homeassistant.config import async_process_ha_core_config from homeassistant.core import HomeAssistant +from homeassistant.core_config import async_process_ha_core_config from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers.dispatcher import async_dispatcher_connect diff --git a/tests/components/smarty/__init__.py b/tests/components/smarty/__init__.py new file mode 100644 index 00000000000000..c5ae7f2d3829f8 --- /dev/null +++ b/tests/components/smarty/__init__.py @@ -0,0 +1,13 @@ +"""Tests for the Smarty integration.""" + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def setup_integration(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: + """Set up the component.""" + config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() diff --git a/tests/components/smarty/conftest.py b/tests/components/smarty/conftest.py new file mode 100644 index 00000000000000..a9b518d88f42b1 --- /dev/null +++ b/tests/components/smarty/conftest.py @@ -0,0 +1,64 @@ +"""Smarty tests configuration.""" + +from collections.abc import Generator +from unittest.mock import AsyncMock, patch + +import pytest + +from homeassistant.components.smarty import DOMAIN +from homeassistant.const import CONF_HOST + +from tests.common import MockConfigEntry + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Override integration setup.""" + with patch( + "homeassistant.components.smarty.async_setup_entry", + return_value=True, + ) as mock_setup_entry: + yield mock_setup_entry + + +@pytest.fixture +def mock_smarty() -> Generator[AsyncMock]: + """Mock a Smarty client.""" + with ( + patch( + "homeassistant.components.smarty.coordinator.Smarty", + autospec=True, + ) as mock_client, + patch( + "homeassistant.components.smarty.config_flow.Smarty", + new=mock_client, + ), + ): + client = mock_client.return_value + client.update.return_value = True + client.fan_speed = 100 + client.warning = False + client.alarm = False + client.boost = False + client.enable_boost.return_value = True + client.disable_boost.return_value = True + client.supply_air_temperature = 20 + client.extract_air_temperature = 23 + client.outdoor_air_temperature = 24 + client.supply_fan_speed = 66 + client.extract_fan_speed = 100 + client.filter_timer = 31 + client.get_configuration_version.return_value = 111 + client.get_software_version.return_value = 127 + client.reset_filters_timer.return_value = True + yield client + + +@pytest.fixture +def mock_config_entry() -> MockConfigEntry: + """Return the default mocked config entry.""" + return MockConfigEntry( + domain=DOMAIN, + data={CONF_HOST: "192.168.0.2"}, + entry_id="01JAZ5DPW8C62D620DGYNG2R8H", + ) diff --git a/tests/components/smarty/snapshots/test_binary_sensor.ambr b/tests/components/smarty/snapshots/test_binary_sensor.ambr new file mode 100644 index 00000000000000..2f943a2501249e --- /dev/null +++ b/tests/components/smarty/snapshots/test_binary_sensor.ambr @@ -0,0 +1,141 @@ +# serializer version: 1 +# name: test_all_entities[binary_sensor.mock_title_alarm-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.mock_title_alarm', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Alarm', + 'platform': 'smarty', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'alarm', + 'unique_id': '01JAZ5DPW8C62D620DGYNG2R8H_alarm', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[binary_sensor.mock_title_alarm-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': 'Mock Title Alarm', + }), + 'context': , + 'entity_id': 'binary_sensor.mock_title_alarm', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[binary_sensor.mock_title_boost_state-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.mock_title_boost_state', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Boost state', + 'platform': 'smarty', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'boost_state', + 'unique_id': '01JAZ5DPW8C62D620DGYNG2R8H_boost', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[binary_sensor.mock_title_boost_state-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock Title Boost state', + }), + 'context': , + 'entity_id': 'binary_sensor.mock_title_boost_state', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[binary_sensor.mock_title_warning-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.mock_title_warning', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Warning', + 'platform': 'smarty', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'warning', + 'unique_id': '01JAZ5DPW8C62D620DGYNG2R8H_warning', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[binary_sensor.mock_title_warning-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': 'Mock Title Warning', + }), + 'context': , + 'entity_id': 'binary_sensor.mock_title_warning', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- diff --git a/tests/components/smarty/snapshots/test_button.ambr b/tests/components/smarty/snapshots/test_button.ambr new file mode 100644 index 00000000000000..38849bd2b2eedd --- /dev/null +++ b/tests/components/smarty/snapshots/test_button.ambr @@ -0,0 +1,47 @@ +# serializer version: 1 +# name: test_all_entities[button.mock_title_reset_filters_timer-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.mock_title_reset_filters_timer', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Reset filters timer', + 'platform': 'smarty', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'reset_filters_timer', + 'unique_id': '01JAZ5DPW8C62D620DGYNG2R8H_reset_filters_timer', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[button.mock_title_reset_filters_timer-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock Title Reset filters timer', + }), + 'context': , + 'entity_id': 'button.mock_title_reset_filters_timer', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- diff --git a/tests/components/smarty/snapshots/test_fan.ambr b/tests/components/smarty/snapshots/test_fan.ambr new file mode 100644 index 00000000000000..8ca95beeb86129 --- /dev/null +++ b/tests/components/smarty/snapshots/test_fan.ambr @@ -0,0 +1,54 @@ +# serializer version: 1 +# name: test_all_entities[fan.mock_title-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'preset_modes': None, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'fan', + 'entity_category': None, + 'entity_id': 'fan.mock_title', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'smarty', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': 'fan', + 'unique_id': '01JAZ5DPW8C62D620DGYNG2R8H', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[fan.mock_title-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock Title', + 'percentage': 0, + 'percentage_step': 33.333333333333336, + 'preset_mode': None, + 'preset_modes': None, + 'supported_features': , + }), + 'context': , + 'entity_id': 'fan.mock_title', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- diff --git a/tests/components/smarty/snapshots/test_init.ambr b/tests/components/smarty/snapshots/test_init.ambr new file mode 100644 index 00000000000000..b25cdb9dc3a8ab --- /dev/null +++ b/tests/components/smarty/snapshots/test_init.ambr @@ -0,0 +1,33 @@ +# serializer version: 1 +# name: test_device + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': None, + 'connections': set({ + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': 111, + 'id': , + 'identifiers': set({ + tuple( + 'smarty', + '01JAZ5DPW8C62D620DGYNG2R8H', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'Salda', + 'model': None, + 'model_id': None, + 'name': 'Mock Title', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': None, + 'suggested_area': None, + 'sw_version': 127, + 'via_device_id': None, + }) +# --- diff --git a/tests/components/smarty/snapshots/test_sensor.ambr b/tests/components/smarty/snapshots/test_sensor.ambr new file mode 100644 index 00000000000000..2f713db7f839fe --- /dev/null +++ b/tests/components/smarty/snapshots/test_sensor.ambr @@ -0,0 +1,286 @@ +# serializer version: 1 +# name: test_all_entities[sensor.mock_title_extract_air_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mock_title_extract_air_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Extract air temperature', + 'platform': 'smarty', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'extract_air_temperature', + 'unique_id': '01JAZ5DPW8C62D620DGYNG2R8H_extract_air_temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.mock_title_extract_air_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Mock Title Extract air temperature', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.mock_title_extract_air_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '23', + }) +# --- +# name: test_all_entities[sensor.mock_title_extract_fan_speed-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mock_title_extract_fan_speed', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Extract fan speed', + 'platform': 'smarty', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'extract_fan_speed', + 'unique_id': '01JAZ5DPW8C62D620DGYNG2R8H_extract_fan_speed', + 'unit_of_measurement': 'rpm', + }) +# --- +# name: test_all_entities[sensor.mock_title_extract_fan_speed-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock Title Extract fan speed', + 'unit_of_measurement': 'rpm', + }), + 'context': , + 'entity_id': 'sensor.mock_title_extract_fan_speed', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '100', + }) +# --- +# name: test_all_entities[sensor.mock_title_filter_days_left-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mock_title_filter_days_left', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Filter days left', + 'platform': 'smarty', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'filter_days_left', + 'unique_id': '01JAZ5DPW8C62D620DGYNG2R8H_filter_days_left', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[sensor.mock_title_filter_days_left-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Mock Title Filter days left', + }), + 'context': , + 'entity_id': 'sensor.mock_title_filter_days_left', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2023-11-21T01:00:00+00:00', + }) +# --- +# name: test_all_entities[sensor.mock_title_outdoor_air_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mock_title_outdoor_air_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Outdoor air temperature', + 'platform': 'smarty', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'outdoor_air_temperature', + 'unique_id': '01JAZ5DPW8C62D620DGYNG2R8H_outdoor_air_temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.mock_title_outdoor_air_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Mock Title Outdoor air temperature', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.mock_title_outdoor_air_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '24', + }) +# --- +# name: test_all_entities[sensor.mock_title_supply_air_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mock_title_supply_air_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Supply air temperature', + 'platform': 'smarty', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'supply_air_temperature', + 'unique_id': '01JAZ5DPW8C62D620DGYNG2R8H_supply_air_temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.mock_title_supply_air_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Mock Title Supply air temperature', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.mock_title_supply_air_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '20', + }) +# --- +# name: test_all_entities[sensor.mock_title_supply_fan_speed-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mock_title_supply_fan_speed', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Supply fan speed', + 'platform': 'smarty', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'supply_fan_speed', + 'unique_id': '01JAZ5DPW8C62D620DGYNG2R8H_supply_fan_speed', + 'unit_of_measurement': 'rpm', + }) +# --- +# name: test_all_entities[sensor.mock_title_supply_fan_speed-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock Title Supply fan speed', + 'unit_of_measurement': 'rpm', + }), + 'context': , + 'entity_id': 'sensor.mock_title_supply_fan_speed', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '66', + }) +# --- diff --git a/tests/components/smarty/snapshots/test_switch.ambr b/tests/components/smarty/snapshots/test_switch.ambr new file mode 100644 index 00000000000000..be1da7c696174c --- /dev/null +++ b/tests/components/smarty/snapshots/test_switch.ambr @@ -0,0 +1,47 @@ +# serializer version: 1 +# name: test_all_entities[switch.mock_title_boost-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.mock_title_boost', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Boost', + 'platform': 'smarty', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'boost', + 'unique_id': '01JAZ5DPW8C62D620DGYNG2R8H_boost', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[switch.mock_title_boost-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock Title Boost', + }), + 'context': , + 'entity_id': 'switch.mock_title_boost', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- diff --git a/tests/components/smarty/test_binary_sensor.py b/tests/components/smarty/test_binary_sensor.py new file mode 100644 index 00000000000000..d28fb44e1cec54 --- /dev/null +++ b/tests/components/smarty/test_binary_sensor.py @@ -0,0 +1,27 @@ +"""Tests for the Smarty binary sensor platform.""" + +from unittest.mock import AsyncMock, patch + +from syrupy import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + + +async def test_all_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_smarty: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test all entities.""" + with patch("homeassistant.components.smarty.PLATFORMS", [Platform.BINARY_SENSOR]): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/smarty/test_button.py b/tests/components/smarty/test_button.py new file mode 100644 index 00000000000000..0a7b67f2be6a69 --- /dev/null +++ b/tests/components/smarty/test_button.py @@ -0,0 +1,45 @@ +"""Tests for the Smarty button platform.""" + +from unittest.mock import AsyncMock, patch + +from syrupy import SnapshotAssertion + +from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS +from homeassistant.const import ATTR_ENTITY_ID, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + + +async def test_all_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_smarty: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test all entities.""" + with patch("homeassistant.components.smarty.PLATFORMS", [Platform.BUTTON]): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +async def test_setting_value( + hass: HomeAssistant, + mock_smarty: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test setting value.""" + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + target={ATTR_ENTITY_ID: "button.mock_title_reset_filters_timer"}, + blocking=True, + ) + mock_smarty.reset_filters_timer.assert_called_once_with() diff --git a/tests/components/smarty/test_config_flow.py b/tests/components/smarty/test_config_flow.py new file mode 100644 index 00000000000000..fad4f27ca1c66f --- /dev/null +++ b/tests/components/smarty/test_config_flow.py @@ -0,0 +1,165 @@ +"""Test the smarty config flow.""" + +from unittest.mock import AsyncMock + +from homeassistant.components.smarty.const import DOMAIN +from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_USER +from homeassistant.const import CONF_HOST, CONF_NAME +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from tests.common import MockConfigEntry + + +async def test_full_flow( + hass: HomeAssistant, mock_smarty: AsyncMock, mock_setup_entry: AsyncMock +) -> None: + """Test the full flow.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {} + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: "192.168.0.2"}, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "192.168.0.2" + assert result["data"] == {CONF_HOST: "192.168.0.2"} + + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_cannot_connect( + hass: HomeAssistant, mock_smarty: AsyncMock, mock_setup_entry: AsyncMock +) -> None: + """Test we handle cannot connect error.""" + + mock_smarty.update.return_value = False + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {} + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: "192.168.0.2"}, + ) + + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "cannot_connect"} + + mock_smarty.update.return_value = True + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: "192.168.0.2"}, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + + +async def test_unknown_error( + hass: HomeAssistant, mock_smarty: AsyncMock, mock_setup_entry: AsyncMock +) -> None: + """Test we handle unknown error.""" + + mock_smarty.update.side_effect = Exception + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {} + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: "192.168.0.2"}, + ) + + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "unknown"} + + mock_smarty.update.side_effect = None + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: "192.168.0.2"}, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + + +async def test_existing_entry( + hass: HomeAssistant, mock_config_entry: MockConfigEntry +) -> None: + """Test we handle existing entry.""" + mock_config_entry.add_to_hass(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {} + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: "192.168.0.2"}, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + +async def test_import_flow( + hass: HomeAssistant, mock_smarty: AsyncMock, mock_setup_entry: AsyncMock +) -> None: + """Test the import flow.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_IMPORT}, + data={CONF_HOST: "192.168.0.2", CONF_NAME: "Smarty"}, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "Smarty" + assert result["data"] == {CONF_HOST: "192.168.0.2"} + + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_import_cannot_connect( + hass: HomeAssistant, mock_smarty: AsyncMock +) -> None: + """Test we handle cannot connect error.""" + + mock_smarty.update.return_value = False + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_IMPORT}, + data={CONF_HOST: "192.168.0.2", CONF_NAME: "Smarty"}, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "cannot_connect" + + +async def test_import_unknown_error( + hass: HomeAssistant, mock_smarty: AsyncMock +) -> None: + """Test we handle unknown error.""" + + mock_smarty.update.side_effect = Exception + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_IMPORT}, + data={CONF_HOST: "192.168.0.2", CONF_NAME: "Smarty"}, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "unknown" diff --git a/tests/components/smarty/test_fan.py b/tests/components/smarty/test_fan.py new file mode 100644 index 00000000000000..2c0135b7aa2df8 --- /dev/null +++ b/tests/components/smarty/test_fan.py @@ -0,0 +1,27 @@ +"""Tests for the Smarty fan platform.""" + +from unittest.mock import AsyncMock, patch + +from syrupy import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + + +async def test_all_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_smarty: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test all entities.""" + with patch("homeassistant.components.smarty.PLATFORMS", [Platform.FAN]): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/smarty/test_init.py b/tests/components/smarty/test_init.py new file mode 100644 index 00000000000000..0366ea9eade5bd --- /dev/null +++ b/tests/components/smarty/test_init.py @@ -0,0 +1,82 @@ +"""Tests for the Smarty component.""" + +from unittest.mock import AsyncMock + +from syrupy import SnapshotAssertion + +from homeassistant.components.smarty import DOMAIN +from homeassistant.const import CONF_HOST, CONF_NAME +from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant +from homeassistant.helpers import device_registry as dr, issue_registry as ir +from homeassistant.setup import async_setup_component + +from . import setup_integration + +from tests.common import MockConfigEntry + + +async def test_import_flow( + hass: HomeAssistant, + mock_smarty: AsyncMock, + issue_registry: ir.IssueRegistry, + mock_setup_entry: AsyncMock, +) -> None: + """Test import flow.""" + assert await async_setup_component( + hass, DOMAIN, {DOMAIN: {CONF_HOST: "192.168.0.2", CONF_NAME: "smarty"}} + ) + await hass.async_block_till_done() + assert len(hass.config_entries.async_entries(DOMAIN)) == 1 + assert (HOMEASSISTANT_DOMAIN, "deprecated_yaml_smarty") in issue_registry.issues + + +async def test_import_flow_already_exists( + hass: HomeAssistant, + mock_smarty: AsyncMock, + issue_registry: ir.IssueRegistry, + mock_setup_entry: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test import flow when entry already exists.""" + mock_config_entry.add_to_hass(hass) + assert await async_setup_component( + hass, DOMAIN, {DOMAIN: {CONF_HOST: "192.168.0.2", CONF_NAME: "smarty"}} + ) + await hass.async_block_till_done() + assert len(hass.config_entries.async_entries(DOMAIN)) == 1 + assert (HOMEASSISTANT_DOMAIN, "deprecated_yaml_smarty") in issue_registry.issues + + +async def test_import_flow_error( + hass: HomeAssistant, + mock_smarty: AsyncMock, + issue_registry: ir.IssueRegistry, + mock_setup_entry: AsyncMock, +) -> None: + """Test import flow when error occurs.""" + mock_smarty.update.return_value = False + assert await async_setup_component( + hass, DOMAIN, {DOMAIN: {CONF_HOST: "192.168.0.2", CONF_NAME: "smarty"}} + ) + await hass.async_block_till_done() + assert len(hass.config_entries.async_entries(DOMAIN)) == 0 + assert ( + DOMAIN, + "deprecated_yaml_import_issue_cannot_connect", + ) in issue_registry.issues + + +async def test_device( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_smarty: AsyncMock, + mock_config_entry: MockConfigEntry, + device_registry: dr.DeviceRegistry, +) -> None: + """Test device.""" + await setup_integration(hass, mock_config_entry) + device = device_registry.async_get_device( + identifiers={(DOMAIN, mock_config_entry.entry_id)} + ) + assert device + assert device == snapshot diff --git a/tests/components/smarty/test_sensor.py b/tests/components/smarty/test_sensor.py new file mode 100644 index 00000000000000..a534a2ebb0fa9f --- /dev/null +++ b/tests/components/smarty/test_sensor.py @@ -0,0 +1,29 @@ +"""Tests for the Smarty sensor platform.""" + +from unittest.mock import AsyncMock, patch + +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + + +@pytest.mark.freeze_time("2023-10-21") +async def test_all_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_smarty: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test all entities.""" + with patch("homeassistant.components.smarty.PLATFORMS", [Platform.SENSOR]): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/smarty/test_switch.py b/tests/components/smarty/test_switch.py new file mode 100644 index 00000000000000..1a6748e2d23ede --- /dev/null +++ b/tests/components/smarty/test_switch.py @@ -0,0 +1,58 @@ +"""Tests for the Smarty switch platform.""" + +from unittest.mock import AsyncMock, patch + +from syrupy import SnapshotAssertion + +from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN +from homeassistant.const import ( + ATTR_ENTITY_ID, + SERVICE_TURN_OFF, + SERVICE_TURN_ON, + Platform, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + + +async def test_all_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_smarty: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test all entities.""" + with patch("homeassistant.components.smarty.PLATFORMS", [Platform.SWITCH]): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +async def test_setting_value( + hass: HomeAssistant, + mock_smarty: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test setting value.""" + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_ON, + target={ATTR_ENTITY_ID: "switch.mock_title_boost"}, + blocking=True, + ) + mock_smarty.enable_boost.assert_called_once_with() + + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_OFF, + target={ATTR_ENTITY_ID: "switch.mock_title_boost"}, + blocking=True, + ) + mock_smarty.disable_boost.assert_called_once_with() diff --git a/tests/components/smhi/common.py b/tests/components/smhi/common.py deleted file mode 100644 index 7339ba76ac1fb6..00000000000000 --- a/tests/components/smhi/common.py +++ /dev/null @@ -1,11 +0,0 @@ -"""Common test utilities.""" - -from unittest.mock import Mock - - -class AsyncMock(Mock): - """Implements Mock async.""" - - async def __call__(self, *args, **kwargs): - """Hack for async support for Mock.""" - return super().__call__(*args, **kwargs) diff --git a/tests/components/smlight/snapshots/test_update.ambr b/tests/components/smlight/snapshots/test_update.ambr index 755c9bc7312109..ed0085dcdc8ea8 100644 --- a/tests/components/smlight/snapshots/test_update.ambr +++ b/tests/components/smlight/snapshots/test_update.ambr @@ -37,6 +37,7 @@ 'attributes': ReadOnlyDict({ 'auto_update': False, 'device_class': 'firmware', + 'display_precision': 0, 'entity_picture': 'https://brands.home-assistant.io/_/smlight/icon.png', 'friendly_name': 'Mock Title Core firmware', 'in_progress': False, @@ -47,6 +48,7 @@ 'skipped_version': None, 'supported_features': , 'title': None, + 'update_percentage': None, }), 'context': , 'entity_id': 'update.mock_title_core_firmware', @@ -94,6 +96,7 @@ 'attributes': ReadOnlyDict({ 'auto_update': False, 'device_class': 'firmware', + 'display_precision': 0, 'entity_picture': 'https://brands.home-assistant.io/_/smlight/icon.png', 'friendly_name': 'Mock Title Zigbee firmware', 'in_progress': False, @@ -104,6 +107,7 @@ 'skipped_version': None, 'supported_features': , 'title': None, + 'update_percentage': None, }), 'context': , 'entity_id': 'update.mock_title_zigbee_firmware', diff --git a/tests/components/smlight/test_update.py b/tests/components/smlight/test_update.py index 7bff12bb027a81..0bb2e34d7cabb7 100644 --- a/tests/components/smlight/test_update.py +++ b/tests/components/smlight/test_update.py @@ -1,6 +1,7 @@ """Tests for the SMLIGHT update platform.""" -from unittest.mock import MagicMock +from datetime import timedelta +from unittest.mock import MagicMock, patch from freezegun.api import FrozenDateTimeFactory from pysmlight import Firmware, Info @@ -14,6 +15,7 @@ ATTR_IN_PROGRESS, ATTR_INSTALLED_VERSION, ATTR_LATEST_VERSION, + ATTR_UPDATE_PERCENTAGE, DOMAIN as PLATFORM, SERVICE_INSTALL, ) @@ -87,7 +89,9 @@ async def test_update_setup( await hass.config_entries.async_unload(entry.entry_id) +@patch("homeassistant.components.smlight.update.asyncio.sleep", return_value=None) async def test_update_firmware( + mock_sleep: MagicMock, hass: HomeAssistant, freezer: FrozenDateTimeFactory, mock_config_entry: MockConfigEntry, @@ -114,7 +118,8 @@ async def test_update_firmware( event_function(MOCK_FIRMWARE_PROGRESS) state = hass.states.get(entity_id) - assert state.attributes[ATTR_IN_PROGRESS] == 50 + assert state.attributes[ATTR_IN_PROGRESS] is True + assert state.attributes[ATTR_UPDATE_PERCENTAGE] == 50 event_function = get_mock_event_function(mock_smlight_client, SmEvents.FW_UPD_done) @@ -124,7 +129,7 @@ async def test_update_firmware( sw_version="v2.5.2", ) - freezer.tick(SCAN_FIRMWARE_INTERVAL) + freezer.tick(timedelta(seconds=5)) async_fire_time_changed(hass) await hass.async_block_till_done() @@ -211,6 +216,55 @@ async def _call_event_function(event: MessageEvent): await _call_event_function(MOCK_FIRMWARE_FAIL) state = hass.states.get(entity_id) assert state.attributes[ATTR_IN_PROGRESS] is False + assert state.attributes[ATTR_UPDATE_PERCENTAGE] is None + + +@patch("homeassistant.components.smlight.const.LOGGER.warning") +async def test_update_reboot_timeout( + mock_warning: MagicMock, + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + mock_config_entry: MockConfigEntry, + mock_smlight_client: MagicMock, +) -> None: + """Test firmware updates.""" + await setup_integration(hass, mock_config_entry) + entity_id = "update.mock_title_core_firmware" + state = hass.states.get(entity_id) + assert state.state == STATE_ON + assert state.attributes[ATTR_INSTALLED_VERSION] == "v2.3.6" + assert state.attributes[ATTR_LATEST_VERSION] == "v2.5.2" + + with ( + patch( + "homeassistant.components.smlight.update.asyncio.timeout", + side_effect=TimeoutError, + ), + patch( + "homeassistant.components.smlight.update.asyncio.sleep", + return_value=None, + ), + ): + await hass.services.async_call( + PLATFORM, + SERVICE_INSTALL, + {ATTR_ENTITY_ID: entity_id}, + blocking=False, + ) + + assert len(mock_smlight_client.fw_update.mock_calls) == 1 + + event_function = get_mock_event_function( + mock_smlight_client, SmEvents.FW_UPD_done + ) + + event_function(MOCK_FIRMWARE_DONE) + + freezer.tick(timedelta(seconds=5)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + mock_warning.assert_called_once() async def test_update_release_notes( diff --git a/tests/components/solarlog/conftest.py b/tests/components/solarlog/conftest.py index 22b85a590ffb27..2d4b4e32522fb4 100644 --- a/tests/components/solarlog/conftest.py +++ b/tests/components/solarlog/conftest.py @@ -65,7 +65,7 @@ def mock_solarlog_connector(): mock_solarlog_api.update_device_list.return_value = DEVICE_LIST mock_solarlog_api.update_inverter_data.return_value = INVERTER_DATA mock_solarlog_api.device_name = {0: "Inverter 1", 1: "Inverter 2"}.get - mock_solarlog_api.device_enabled = {0: True, 1: False}.get + mock_solarlog_api.device_enabled = {0: True, 1: True}.get mock_solarlog_api.password.return_value = "pwd" with ( diff --git a/tests/components/solarlog/snapshots/test_sensor.ambr b/tests/components/solarlog/snapshots/test_sensor.ambr index 9f95e04a38fa09..32be560fc629ca 100644 --- a/tests/components/solarlog/snapshots/test_sensor.ambr +++ b/tests/components/solarlog/snapshots/test_sensor.ambr @@ -1,61 +1,12 @@ # serializer version: 1 -# name: test_all_entities[sensor.inverter_1_consumption_total-entry] +# name: test_all_entities[sensor.inverter_1_consumption_year-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), 'area_id': None, 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.inverter_1_consumption_total', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Consumption total', - 'platform': 'solarlog', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'consumption_total', - 'unique_id': 'ce5f5431554d101905d31797e1232da8-inverter_1-consumption_total', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.inverter_1_consumption_total-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Inverter 1 Consumption total', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.inverter_1_consumption_total', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '354.687', - }) -# --- -# name: test_all_entities[sensor.inverter_1_consumption_year-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ + 'state_class': , }), - 'area_id': None, - 'capabilities': None, 'config_entry_id': , 'device_class': None, 'device_id': , @@ -85,7 +36,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'consumption_year', - 'unique_id': 'ce5f5431554d101905d31797e1232da8-inverter_1-consumption_year', + 'unique_id': 'ce5f5431554d101905d31797e1232da8_inverter_1_consumption_year', 'unit_of_measurement': , }) # --- @@ -94,6 +45,7 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Inverter 1 Consumption year', + 'state_class': , 'unit_of_measurement': , }), 'context': , @@ -135,7 +87,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'current_power', - 'unique_id': 'ce5f5431554d101905d31797e1232da8-inverter_1-current_power', + 'unique_id': 'ce5f5431554d101905d31797e1232da8_inverter_1_current_power', 'unit_of_measurement': , }) # --- @@ -160,7 +112,9 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'state_class': , + }), 'config_entry_id': , 'device_class': None, 'device_id': , @@ -190,7 +144,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'consumption_year', - 'unique_id': 'ce5f5431554d101905d31797e1232da8-inverter_2-consumption_year', + 'unique_id': 'ce5f5431554d101905d31797e1232da8_inverter_2_consumption_year', 'unit_of_measurement': , }) # --- @@ -199,6 +153,7 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'Inverter 2 Consumption year', + 'state_class': , 'unit_of_measurement': , }), 'context': , @@ -240,7 +195,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'current_power', - 'unique_id': 'ce5f5431554d101905d31797e1232da8-inverter_2-current_power', + 'unique_id': 'ce5f5431554d101905d31797e1232da8_inverter_2_current_power', 'unit_of_measurement': , }) # --- @@ -291,7 +246,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'alternator_loss', - 'unique_id': 'ce5f5431554d101905d31797e1232da8-alternator_loss', + 'unique_id': 'ce5f5431554d101905d31797e1232da8_alternator_loss', 'unit_of_measurement': , }) # --- @@ -345,7 +300,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'capacity', - 'unique_id': 'ce5f5431554d101905d31797e1232da8-capacity', + 'unique_id': 'ce5f5431554d101905d31797e1232da8_capacity', 'unit_of_measurement': '%', }) # --- @@ -396,7 +351,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'consumption_ac', - 'unique_id': 'ce5f5431554d101905d31797e1232da8-consumption_ac', + 'unique_id': 'ce5f5431554d101905d31797e1232da8_consumption_ac', 'unit_of_measurement': , }) # --- @@ -421,7 +376,9 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'state_class': , + }), 'config_entry_id': , 'device_class': None, 'device_id': , @@ -451,7 +408,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'consumption_day', - 'unique_id': 'ce5f5431554d101905d31797e1232da8-consumption_day', + 'unique_id': 'ce5f5431554d101905d31797e1232da8_consumption_day', 'unit_of_measurement': , }) # --- @@ -460,6 +417,7 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'solarlog Consumption day', + 'state_class': , 'unit_of_measurement': , }), 'context': , @@ -475,7 +433,9 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'state_class': , + }), 'config_entry_id': , 'device_class': None, 'device_id': , @@ -505,7 +465,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'consumption_month', - 'unique_id': 'ce5f5431554d101905d31797e1232da8-consumption_month', + 'unique_id': 'ce5f5431554d101905d31797e1232da8_consumption_month', 'unit_of_measurement': , }) # --- @@ -514,6 +474,7 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'solarlog Consumption month', + 'state_class': , 'unit_of_measurement': , }), 'context': , @@ -561,7 +522,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'consumption_total', - 'unique_id': 'ce5f5431554d101905d31797e1232da8-consumption_total', + 'unique_id': 'ce5f5431554d101905d31797e1232da8_consumption_total', 'unit_of_measurement': , }) # --- @@ -586,7 +547,9 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'state_class': , + }), 'config_entry_id': , 'device_class': None, 'device_id': , @@ -616,7 +579,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'consumption_year', - 'unique_id': 'ce5f5431554d101905d31797e1232da8-consumption_year', + 'unique_id': 'ce5f5431554d101905d31797e1232da8_consumption_year', 'unit_of_measurement': , }) # --- @@ -625,6 +588,7 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'solarlog Consumption year', + 'state_class': , 'unit_of_measurement': , }), 'context': , @@ -670,7 +634,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'consumption_yesterday', - 'unique_id': 'ce5f5431554d101905d31797e1232da8-consumption_yesterday', + 'unique_id': 'ce5f5431554d101905d31797e1232da8_consumption_yesterday', 'unit_of_measurement': , }) # --- @@ -723,7 +687,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'efficiency', - 'unique_id': 'ce5f5431554d101905d31797e1232da8-efficiency', + 'unique_id': 'ce5f5431554d101905d31797e1232da8_efficiency', 'unit_of_measurement': '%', }) # --- @@ -748,7 +712,9 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'state_class': , + }), 'config_entry_id': , 'device_class': None, 'device_id': , @@ -772,7 +738,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'total_power', - 'unique_id': 'ce5f5431554d101905d31797e1232da8-total_power', + 'unique_id': 'ce5f5431554d101905d31797e1232da8_total_power', 'unit_of_measurement': , }) # --- @@ -781,6 +747,7 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power', 'friendly_name': 'solarlog Installed peak power', + 'state_class': , 'unit_of_measurement': , }), 'context': , @@ -820,7 +787,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'last_update', - 'unique_id': 'ce5f5431554d101905d31797e1232da8-last_updated', + 'unique_id': 'ce5f5431554d101905d31797e1232da8_last_updated', 'unit_of_measurement': None, }) # --- @@ -869,7 +836,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'power_ac', - 'unique_id': 'ce5f5431554d101905d31797e1232da8-power_ac', + 'unique_id': 'ce5f5431554d101905d31797e1232da8_power_ac', 'unit_of_measurement': , }) # --- @@ -920,7 +887,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'power_available', - 'unique_id': 'ce5f5431554d101905d31797e1232da8-power_available', + 'unique_id': 'ce5f5431554d101905d31797e1232da8_power_available', 'unit_of_measurement': , }) # --- @@ -971,7 +938,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'power_dc', - 'unique_id': 'ce5f5431554d101905d31797e1232da8-power_dc', + 'unique_id': 'ce5f5431554d101905d31797e1232da8_power_dc', 'unit_of_measurement': , }) # --- @@ -1022,7 +989,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'self_consumption_year', - 'unique_id': 'ce5f5431554d101905d31797e1232da8-self_consumption_year', + 'unique_id': 'ce5f5431554d101905d31797e1232da8_self_consumption_year', 'unit_of_measurement': , }) # --- @@ -1076,7 +1043,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'usage', - 'unique_id': 'ce5f5431554d101905d31797e1232da8-usage', + 'unique_id': 'ce5f5431554d101905d31797e1232da8_usage', 'unit_of_measurement': '%', }) # --- @@ -1127,7 +1094,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'voltage_ac', - 'unique_id': 'ce5f5431554d101905d31797e1232da8-voltage_ac', + 'unique_id': 'ce5f5431554d101905d31797e1232da8_voltage_ac', 'unit_of_measurement': , }) # --- @@ -1178,7 +1145,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'voltage_dc', - 'unique_id': 'ce5f5431554d101905d31797e1232da8-voltage_dc', + 'unique_id': 'ce5f5431554d101905d31797e1232da8_voltage_dc', 'unit_of_measurement': , }) # --- @@ -1203,7 +1170,9 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'state_class': , + }), 'config_entry_id': , 'device_class': None, 'device_id': , @@ -1233,7 +1202,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'yield_day', - 'unique_id': 'ce5f5431554d101905d31797e1232da8-yield_day', + 'unique_id': 'ce5f5431554d101905d31797e1232da8_yield_day', 'unit_of_measurement': , }) # --- @@ -1242,6 +1211,7 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'solarlog Yield day', + 'state_class': , 'unit_of_measurement': , }), 'context': , @@ -1257,7 +1227,9 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'state_class': , + }), 'config_entry_id': , 'device_class': None, 'device_id': , @@ -1287,7 +1259,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'yield_month', - 'unique_id': 'ce5f5431554d101905d31797e1232da8-yield_month', + 'unique_id': 'ce5f5431554d101905d31797e1232da8_yield_month', 'unit_of_measurement': , }) # --- @@ -1296,6 +1268,7 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'solarlog Yield month', + 'state_class': , 'unit_of_measurement': , }), 'context': , @@ -1343,7 +1316,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'yield_total', - 'unique_id': 'ce5f5431554d101905d31797e1232da8-yield_total', + 'unique_id': 'ce5f5431554d101905d31797e1232da8_yield_total', 'unit_of_measurement': , }) # --- @@ -1368,7 +1341,9 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'state_class': , + }), 'config_entry_id': , 'device_class': None, 'device_id': , @@ -1395,7 +1370,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'yield_year', - 'unique_id': 'ce5f5431554d101905d31797e1232da8-yield_year', + 'unique_id': 'ce5f5431554d101905d31797e1232da8_yield_year', 'unit_of_measurement': , }) # --- @@ -1404,6 +1379,7 @@ 'attributes': ReadOnlyDict({ 'device_class': 'energy', 'friendly_name': 'solarlog Yield year', + 'state_class': , 'unit_of_measurement': , }), 'context': , @@ -1449,7 +1425,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'yield_yesterday', - 'unique_id': 'ce5f5431554d101905d31797e1232da8-yield_yesterday', + 'unique_id': 'ce5f5431554d101905d31797e1232da8_yield_yesterday', 'unit_of_measurement': , }) # --- diff --git a/tests/components/solarlog/test_config_flow.py b/tests/components/solarlog/test_config_flow.py index df5c4bb3c7f0c0..8a34407ff5484d 100644 --- a/tests/components/solarlog/test_config_flow.py +++ b/tests/components/solarlog/test_config_flow.py @@ -207,7 +207,7 @@ async def test_reconfigure_flow( result = await entry.start_reconfigure_flow(hass) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure_confirm" + assert result["step_id"] == "reconfigure" # test with all data provided result = await hass.config_entries.flow.async_configure( diff --git a/tests/components/solarlog/test_sensor.py b/tests/components/solarlog/test_sensor.py index bc90e8b25c04e8..77aa0308cdacd3 100644 --- a/tests/components/solarlog/test_sensor.py +++ b/tests/components/solarlog/test_sensor.py @@ -9,11 +9,13 @@ SolarLogConnectionError, SolarLogUpdateError, ) +from solarlog_cli.solarlog_models import InverterData from syrupy import SnapshotAssertion from homeassistant.const import STATE_UNAVAILABLE, Platform from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er +from homeassistant.helpers.device_registry import DeviceRegistry +from homeassistant.helpers.entity_registry import EntityRegistry from . import setup_platform @@ -25,7 +27,7 @@ async def test_all_entities( snapshot: SnapshotAssertion, mock_solarlog_connector: AsyncMock, mock_config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, + entity_registry: EntityRegistry, ) -> None: """Test all entities.""" @@ -33,6 +35,49 @@ async def test_all_entities( await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) +async def test_add_remove_entities( + hass: HomeAssistant, + mock_solarlog_connector: AsyncMock, + mock_config_entry: MockConfigEntry, + device_registry: DeviceRegistry, + entity_registry: EntityRegistry, + freezer: FrozenDateTimeFactory, +) -> None: + """Test if entities are added and old are removed.""" + await setup_platform(hass, mock_config_entry, [Platform.SENSOR]) + + assert hass.states.get("sensor.inverter_1_consumption_year").state == "354.687" + + # test no changes (coordinator.py line 114) + freezer.tick(delta=timedelta(minutes=1)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + mock_solarlog_connector.update_device_list.return_value = { + 0: InverterData(name="Inv 1", enabled=True), + 2: InverterData(name="Inverter 3", enabled=True), + } + mock_solarlog_connector.update_inverter_data.return_value = { + 0: InverterData( + name="Inv 1", enabled=True, consumption_year=354687, current_power=5 + ), + 2: InverterData( + name="Inverter 3", enabled=True, consumption_year=454, current_power=7 + ), + } + mock_solarlog_connector.device_name = {0: "Inv 1", 2: "Inverter 3"}.get + mock_solarlog_connector.device_enabled = {0: True, 2: True}.get + + freezer.tick(delta=timedelta(minutes=1)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert hass.states.get("sensor.inverter_1_consumption_year") is None + assert hass.states.get("sensor.inv_1_consumption_year").state == "354.687" + assert hass.states.get("sensor.inverter_2_consumption_year") is None + assert hass.states.get("sensor.inverter_3_consumption_year").state == "0.454" + + @pytest.mark.parametrize( "exception", [ diff --git a/tests/components/sonarr/__init__.py b/tests/components/sonarr/__init__.py index b6050808a34cc7..660102ed08267d 100644 --- a/tests/components/sonarr/__init__.py +++ b/tests/components/sonarr/__init__.py @@ -5,6 +5,6 @@ MOCK_REAUTH_INPUT = {CONF_API_KEY: "test-api-key-reauth"} MOCK_USER_INPUT = { - CONF_URL: "http://192.168.1.189:8989", + CONF_URL: "http://192.168.1.189:8989/", CONF_API_KEY: "MOCK_API_KEY", } diff --git a/tests/components/sonarr/test_config_flow.py b/tests/components/sonarr/test_config_flow.py index 118d5020cba3ee..efbfbd749b3e94 100644 --- a/tests/components/sonarr/test_config_flow.py +++ b/tests/components/sonarr/test_config_flow.py @@ -50,6 +50,34 @@ async def test_cannot_connect( assert result["errors"] == {"base": "cannot_connect"} +async def test_url_rewrite( + hass: HomeAssistant, + mock_sonarr_config_flow: MagicMock, + mock_setup_entry: None, +) -> None: + """Test the full manual user flow from start to finish.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={CONF_SOURCE: SOURCE_USER}, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + user_input = MOCK_USER_INPUT.copy() + user_input[CONF_URL] = "https://192.168.1.189" + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input=user_input, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "192.168.1.189" + + assert result["data"] + assert result["data"][CONF_URL] == "https://192.168.1.189:443/" + + async def test_invalid_auth( hass: HomeAssistant, mock_sonarr_config_flow: MagicMock ) -> None: @@ -145,7 +173,7 @@ async def test_full_user_flow_implementation( assert result["title"] == "192.168.1.189" assert result["data"] - assert result["data"][CONF_URL] == "http://192.168.1.189:8989" + assert result["data"][CONF_URL] == "http://192.168.1.189:8989/" async def test_full_user_flow_advanced_options( @@ -175,7 +203,7 @@ async def test_full_user_flow_advanced_options( assert result["title"] == "192.168.1.189" assert result["data"] - assert result["data"][CONF_URL] == "http://192.168.1.189:8989" + assert result["data"][CONF_URL] == "http://192.168.1.189:8989/" assert result["data"][CONF_VERIFY_SSL] diff --git a/tests/components/spc/test_alarm_control_panel.py b/tests/components/spc/test_alarm_control_panel.py index 7b1ab4ff9471d1..12fb885b92b5e2 100644 --- a/tests/components/spc/test_alarm_control_panel.py +++ b/tests/components/spc/test_alarm_control_panel.py @@ -4,7 +4,7 @@ from pyspcwebgw.const import AreaMode -from homeassistant.const import STATE_ALARM_ARMED_AWAY, STATE_ALARM_DISARMED +from homeassistant.components.alarm_control_panel import AlarmControlPanelState from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component @@ -19,7 +19,7 @@ async def test_update_alarm_device(hass: HomeAssistant, mock_client: AsyncMock) entity_id = "alarm_control_panel.house" - assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_AWAY + assert hass.states.get(entity_id).state == AlarmControlPanelState.ARMED_AWAY assert hass.states.get(entity_id).attributes["changed_by"] == "Sven" mock_area = mock_client.return_value.areas["1"] @@ -30,5 +30,5 @@ async def test_update_alarm_device(hass: HomeAssistant, mock_client: AsyncMock) await mock_client.call_args_list[0][1]["async_callback"](mock_area) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED assert hass.states.get(entity_id).attributes["changed_by"] == "Anna" diff --git a/tests/components/spotify/conftest.py b/tests/components/spotify/conftest.py index 0adeb63c8a53ea..d3fc418f1cd5d3 100644 --- a/tests/components/spotify/conftest.py +++ b/tests/components/spotify/conftest.py @@ -2,9 +2,34 @@ from collections.abc import Generator import time -from unittest.mock import MagicMock, patch +from unittest.mock import AsyncMock, patch import pytest +from spotifyaio.models import ( + Album, + Artist, + ArtistResponse, + AudioFeatures, + CategoriesResponse, + Category, + CategoryPlaylistResponse, + Devices, + FeaturedPlaylistResponse, + NewReleasesResponse, + NewReleasesResponseInner, + PlaybackState, + PlayedTrackResponse, + Playlist, + PlaylistResponse, + SavedAlbumResponse, + SavedShowResponse, + SavedTrackResponse, + Show, + ShowEpisodesResponse, + TopArtistsResponse, + TopTracksResponse, + UserProfile, +) from homeassistant.components.application_credentials import ( ClientCredential, @@ -14,7 +39,7 @@ from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component -from tests.common import MockConfigEntry, load_json_value_fixture +from tests.common import MockConfigEntry, load_fixture SCOPES = " ".join(SPOTIFY_SCOPES) @@ -59,24 +84,83 @@ async def setup_credentials(hass: HomeAssistant) -> None: ) +@pytest.fixture(autouse=True) +async def patch_sleep() -> Generator[AsyncMock]: + """Fixture to setup credentials.""" + with patch("homeassistant.components.spotify.media_player.AFTER_REQUEST_SLEEP", 0): + yield + + @pytest.fixture -def mock_spotify() -> Generator[MagicMock]: +def mock_spotify() -> Generator[AsyncMock]: """Mock the Spotify API.""" with ( patch( - "homeassistant.components.spotify.Spotify", - autospec=True, + "homeassistant.components.spotify.SpotifyClient", autospec=True ) as spotify_mock, patch( - "homeassistant.components.spotify.config_flow.Spotify", + "homeassistant.components.spotify.config_flow.SpotifyClient", new=spotify_mock, ), ): client = spotify_mock.return_value - client.current_user_playlists.return_value = load_json_value_fixture( - "current_user_playlist.json", DOMAIN - ) - client.current_user.return_value = load_json_value_fixture( - "current_user.json", DOMAIN - ) + # All these fixtures can be retrieved using the Web API client at + # https://developer.spotify.com/documentation/web-api + for fixture, method, obj in ( + ( + "current_user_playlist.json", + "get_playlists_for_current_user", + PlaylistResponse, + ), + ("saved_albums.json", "get_saved_albums", SavedAlbumResponse), + ("saved_tracks.json", "get_saved_tracks", SavedTrackResponse), + ("saved_shows.json", "get_saved_shows", SavedShowResponse), + ( + "recently_played_tracks.json", + "get_recently_played_tracks", + PlayedTrackResponse, + ), + ("top_artists.json", "get_top_artists", TopArtistsResponse), + ("top_tracks.json", "get_top_tracks", TopTracksResponse), + ("show_episodes.json", "get_show_episodes", ShowEpisodesResponse), + ("artist_albums.json", "get_artist_albums", NewReleasesResponseInner), + ): + getattr(client, method).return_value = obj.from_json( + load_fixture(fixture, DOMAIN) + ).items + for fixture, method, obj in ( + ( + "playback.json", + "get_playback", + PlaybackState, + ), + ("current_user.json", "get_current_user", UserProfile), + ("category.json", "get_category", Category), + ("playlist.json", "get_playlist", Playlist), + ("album.json", "get_album", Album), + ("artist.json", "get_artist", Artist), + ("show.json", "get_show", Show), + ("audio_features.json", "get_audio_features", AudioFeatures), + ): + getattr(client, method).return_value = obj.from_json( + load_fixture(fixture, DOMAIN) + ) + client.get_followed_artists.return_value = ArtistResponse.from_json( + load_fixture("followed_artists.json", DOMAIN) + ).artists.items + client.get_featured_playlists.return_value = FeaturedPlaylistResponse.from_json( + load_fixture("featured_playlists.json", DOMAIN) + ).playlists.items + client.get_categories.return_value = CategoriesResponse.from_json( + load_fixture("categories.json", DOMAIN) + ).categories.items + client.get_category_playlists.return_value = CategoryPlaylistResponse.from_json( + load_fixture("category_playlists.json", DOMAIN) + ).playlists.items + client.get_new_releases.return_value = NewReleasesResponse.from_json( + load_fixture("new_releases.json", DOMAIN) + ).albums.items + client.get_devices.return_value = Devices.from_json( + load_fixture("devices.json", DOMAIN) + ).devices yield spotify_mock diff --git a/tests/components/spotify/fixtures/album.json b/tests/components/spotify/fixtures/album.json new file mode 100644 index 00000000000000..d7240298e9f9bb --- /dev/null +++ b/tests/components/spotify/fixtures/album.json @@ -0,0 +1,128 @@ +{ + "album_type": "album", + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/3jULn43a6xfzqleyeFjPIq" + }, + "href": "https://api.spotify.com/v1/artists/3jULn43a6xfzqleyeFjPIq", + "id": "3jULn43a6xfzqleyeFjPIq", + "name": "Area 11", + "type": "artist", + "uri": "spotify:artist:3jULn43a6xfzqleyeFjPIq" + } + ], + "available_markets": [], + "copyrights": [ + { + "text": "2020 Smihilism Records", + "type": "C" + }, + { + "text": "2020 Smihilism Records", + "type": "P" + } + ], + "external_ids": { + "upc": "195916707034" + }, + "external_urls": { + "spotify": "https://open.spotify.com/album/3IqzqH6ShrRtie9Yd2ODyG" + }, + "genres": [], + "href": "https://api.spotify.com/v1/albums/3IqzqH6ShrRtie9Yd2ODyG", + "id": "3IqzqH6ShrRtie9Yd2ODyG", + "images": [ + { + "height": 640, + "url": "https://i.scdn.co/image/ab67616d0000b273a61a28c2f084761f8833bce6", + "width": 640 + }, + { + "height": 300, + "url": "https://i.scdn.co/image/ab67616d00001e02a61a28c2f084761f8833bce6", + "width": 300 + }, + { + "height": 64, + "url": "https://i.scdn.co/image/ab67616d00004851a61a28c2f084761f8833bce6", + "width": 64 + } + ], + "label": "Smihilism Records", + "name": "SINGLARITY", + "popularity": 29, + "release_date": "2020-12-18", + "release_date_precision": "day", + "total_tracks": 11, + "tracks": { + "href": "https://api.spotify.com/v1/albums/3IqzqH6ShrRtie9Yd2ODyG/tracks?offset=0&limit=50&locale=en-US,en;q=0.5", + "items": [ + { + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/3jULn43a6xfzqleyeFjPIq" + }, + "href": "https://api.spotify.com/v1/artists/3jULn43a6xfzqleyeFjPIq", + "id": "3jULn43a6xfzqleyeFjPIq", + "name": "Area 11", + "type": "artist", + "uri": "spotify:artist:3jULn43a6xfzqleyeFjPIq" + } + ], + "available_markets": [], + "disc_number": 1, + "duration_ms": 260372, + "explicit": false, + "external_urls": { + "spotify": "https://open.spotify.com/track/6akJGriy4njdP8fZTPGjwz" + }, + "href": "https://api.spotify.com/v1/tracks/6akJGriy4njdP8fZTPGjwz", + "id": "6akJGriy4njdP8fZTPGjwz", + "is_local": false, + "name": "All Your Friends", + "preview_url": "https://p.scdn.co/mp3-preview/484344e579edfdb8e8f872d73299aff2c3d0369d?cid=cfe923b2d660439caf2b557b21f31221", + "track_number": 1, + "type": "track", + "uri": "spotify:track:6akJGriy4njdP8fZTPGjwz" + }, + { + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/3jULn43a6xfzqleyeFjPIq" + }, + "href": "https://api.spotify.com/v1/artists/3jULn43a6xfzqleyeFjPIq", + "id": "3jULn43a6xfzqleyeFjPIq", + "name": "Area 11", + "type": "artist", + "uri": "spotify:artist:3jULn43a6xfzqleyeFjPIq" + } + ], + "available_markets": [], + "disc_number": 1, + "duration_ms": 206613, + "explicit": true, + "external_urls": { + "spotify": "https://open.spotify.com/track/7N02bJK1amhplZ8yAapRS5" + }, + "href": "https://api.spotify.com/v1/tracks/7N02bJK1amhplZ8yAapRS5", + "id": "7N02bJK1amhplZ8yAapRS5", + "is_local": false, + "name": "New Magiks", + "preview_url": "https://p.scdn.co/mp3-preview/b59a5a73ed2e9a61be471822993e91210d5f255a?cid=cfe923b2d660439caf2b557b21f31221", + "track_number": 2, + "type": "track", + "uri": "spotify:track:7N02bJK1amhplZ8yAapRS5" + } + ], + "limit": 50, + "next": null, + "offset": 0, + "previous": null, + "total": 11 + }, + "type": "album", + "uri": "spotify:album:3IqzqH6ShrRtie9Yd2ODyG" +} diff --git a/tests/components/spotify/fixtures/artist.json b/tests/components/spotify/fixtures/artist.json new file mode 100644 index 00000000000000..e60429fa030479 --- /dev/null +++ b/tests/components/spotify/fixtures/artist.json @@ -0,0 +1,33 @@ +{ + "external_urls": { + "spotify": "https://open.spotify.com/artist/0TnOYISbd1XYRBk9myaseg" + }, + "followers": { + "href": null, + "total": 10817055 + }, + "genres": ["dance pop", "miami hip hop", "pop"], + "href": "https://api.spotify.com/v1/artists/0TnOYISbd1XYRBk9myaseg?locale=en-US%2Cen%3Bq%3D0.5", + "id": "0TnOYISbd1XYRBk9myaseg", + "images": [ + { + "url": "https://i.scdn.co/image/ab6761610000e5ebee07b5820dd91d15d397e29c", + "height": 640, + "width": 640 + }, + { + "url": "https://i.scdn.co/image/ab67616100005174ee07b5820dd91d15d397e29c", + "height": 320, + "width": 320 + }, + { + "url": "https://i.scdn.co/image/ab6761610000f178ee07b5820dd91d15d397e29c", + "height": 160, + "width": 160 + } + ], + "name": "Pitbull", + "popularity": 85, + "type": "artist", + "uri": "spotify:artist:0TnOYISbd1XYRBk9myaseg" +} diff --git a/tests/components/spotify/fixtures/artist_albums.json b/tests/components/spotify/fixtures/artist_albums.json new file mode 100644 index 00000000000000..2cc66d1ac0b1b7 --- /dev/null +++ b/tests/components/spotify/fixtures/artist_albums.json @@ -0,0 +1,472 @@ +{ + "href": "https://api.spotify.com/v1/artists/0TnOYISbd1XYRBk9myaseg/albums?offset=0&limit=20&locale=en-US,en;q%3D0.5&include_groups=album,single,compilation,appears_on", + "limit": 20, + "next": "https://api.spotify.com/v1/artists/0TnOYISbd1XYRBk9myaseg/albums?offset=20&limit=20&locale=en-US,en;q%3D0.5&include_groups=album,single,compilation,appears_on", + "offset": 0, + "previous": null, + "total": 903, + "items": [ + { + "album_type": "album", + "total_tracks": 7, + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "BY", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "PR", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "external_urls": { + "spotify": "https://open.spotify.com/album/56jg3KJcYmfL7RzYmG2O1Q" + }, + "href": "https://api.spotify.com/v1/albums/56jg3KJcYmfL7RzYmG2O1Q", + "id": "56jg3KJcYmfL7RzYmG2O1Q", + "images": [ + { + "url": "https://i.scdn.co/image/ab67616d0000b273a0bac1996f26274685db1520", + "height": 640, + "width": 640 + }, + { + "url": "https://i.scdn.co/image/ab67616d00001e02a0bac1996f26274685db1520", + "height": 300, + "width": 300 + }, + { + "url": "https://i.scdn.co/image/ab67616d00004851a0bac1996f26274685db1520", + "height": 64, + "width": 64 + } + ], + "name": "Trackhouse (Daytona 500 Edition)", + "release_date": "2024-02-16", + "release_date_precision": "day", + "type": "album", + "uri": "spotify:album:56jg3KJcYmfL7RzYmG2O1Q", + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/0TnOYISbd1XYRBk9myaseg" + }, + "href": "https://api.spotify.com/v1/artists/0TnOYISbd1XYRBk9myaseg", + "id": "0TnOYISbd1XYRBk9myaseg", + "name": "Pitbull", + "type": "artist", + "uri": "spotify:artist:0TnOYISbd1XYRBk9myaseg" + } + ], + "album_group": "album" + }, + { + "album_type": "album", + "total_tracks": 14, + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "BY", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "PR", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "external_urls": { + "spotify": "https://open.spotify.com/album/1l86t4bTNT2j1X0ZBCIv6R" + }, + "href": "https://api.spotify.com/v1/albums/1l86t4bTNT2j1X0ZBCIv6R", + "id": "1l86t4bTNT2j1X0ZBCIv6R", + "images": [ + { + "url": "https://i.scdn.co/image/ab67616d0000b27333a4ba8f73271a749c5d953d", + "height": 640, + "width": 640 + }, + { + "url": "https://i.scdn.co/image/ab67616d00001e0233a4ba8f73271a749c5d953d", + "height": 300, + "width": 300 + }, + { + "url": "https://i.scdn.co/image/ab67616d0000485133a4ba8f73271a749c5d953d", + "height": 64, + "width": 64 + } + ], + "name": "Trackhouse", + "release_date": "2023-10-06", + "release_date_precision": "day", + "type": "album", + "uri": "spotify:album:1l86t4bTNT2j1X0ZBCIv6R", + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/0TnOYISbd1XYRBk9myaseg" + }, + "href": "https://api.spotify.com/v1/artists/0TnOYISbd1XYRBk9myaseg", + "id": "0TnOYISbd1XYRBk9myaseg", + "name": "Pitbull", + "type": "artist", + "uri": "spotify:artist:0TnOYISbd1XYRBk9myaseg" + } + ], + "album_group": "album" + } + ] +} diff --git a/tests/components/spotify/fixtures/audio_features.json b/tests/components/spotify/fixtures/audio_features.json new file mode 100644 index 00000000000000..52dfee060f72fd --- /dev/null +++ b/tests/components/spotify/fixtures/audio_features.json @@ -0,0 +1,20 @@ +{ + "danceability": 0.696, + "energy": 0.905, + "key": 3, + "loudness": -2.743, + "mode": 1, + "speechiness": 0.103, + "acousticness": 0.011, + "instrumentalness": 0.000905, + "liveness": 0.302, + "valence": 0.625, + "tempo": 114.944, + "type": "audio_features", + "id": "11dFghVXANMlKmJXsNCbNl", + "uri": "spotify:track:11dFghVXANMlKmJXsNCbNl", + "track_href": "https://api.spotify.com/v1/tracks/11dFghVXANMlKmJXsNCbNl", + "analysis_url": "https://api.spotify.com/v1/audio-analysis/11dFghVXANMlKmJXsNCbNl", + "duration_ms": 207960, + "time_signature": 4 +} diff --git a/tests/components/spotify/fixtures/categories.json b/tests/components/spotify/fixtures/categories.json new file mode 100644 index 00000000000000..ed873c95c304a2 --- /dev/null +++ b/tests/components/spotify/fixtures/categories.json @@ -0,0 +1,36 @@ +{ + "categories": { + "href": "https://api.spotify.com/v1/browse/categories?offset=0&limit=20&locale=en-US,en;q%3D0.5", + "items": [ + { + "href": "https://api.spotify.com/v1/browse/categories/0JQ5DAt0tbjZptfcdMSKl3", + "id": "0JQ5DAt0tbjZptfcdMSKl3", + "icons": [ + { + "height": 274, + "url": "https://t.scdn.co/images/728ed47fc1674feb95f7ac20236eb6d7.jpeg", + "width": 274 + } + ], + "name": "Made For You" + }, + { + "href": "https://api.spotify.com/v1/browse/categories/0JQ5DAqbMKFz6FAsUtgAab", + "id": "0JQ5DAqbMKFz6FAsUtgAab", + "icons": [ + { + "height": 274, + "url": "https://t.scdn.co/images/728ed47fc1674feb95f7ac20236eb6d7.jpeg", + "width": 274 + } + ], + "name": "New Releases" + } + ], + "limit": 20, + "next": "https://api.spotify.com/v1/browse/categories?offset=20&limit=20&locale=en-US,en;q%3D0.5", + "offset": 0, + "previous": null, + "total": 56 + } +} diff --git a/tests/components/spotify/fixtures/category.json b/tests/components/spotify/fixtures/category.json new file mode 100644 index 00000000000000..d60605cf94f8c0 --- /dev/null +++ b/tests/components/spotify/fixtures/category.json @@ -0,0 +1,12 @@ +{ + "href": "https://api.spotify.com/v1/browse/categories/0JQ5DAqbMKFRY5ok2pxXJ0", + "id": "0JQ5DAqbMKFRY5ok2pxXJ0", + "icons": [ + { + "height": 274, + "url": "https://t.scdn.co/media/original/dinner_1b6506abba0ba52c54e6d695c8571078_274x274.jpg", + "width": 274 + } + ], + "name": "Cooking & Dining" +} diff --git a/tests/components/spotify/fixtures/category_playlists.json b/tests/components/spotify/fixtures/category_playlists.json new file mode 100644 index 00000000000000..c2262708d5a932 --- /dev/null +++ b/tests/components/spotify/fixtures/category_playlists.json @@ -0,0 +1,84 @@ +{ + "playlists": { + "href": "https://api.spotify.com/v1/browse/categories/0JQ5DAqbMKFRY5ok2pxXJ0/playlists?country=NL&offset=0&limit=20", + "items": [ + { + "collaborative": false, + "description": "Lekker eten en lang natafelen? Daar hoort muziek bij.", + "external_urls": { + "spotify": "https://open.spotify.com/playlist/37i9dQZF1DX7yhuKT9G4qk" + }, + "href": "https://api.spotify.com/v1/playlists/37i9dQZF1DX7yhuKT9G4qk", + "id": "37i9dQZF1DX7yhuKT9G4qk", + "images": [ + { + "height": null, + "url": "https://i.scdn.co/image/ab67706f0000000343319faa9428405f3312b588", + "width": null + } + ], + "name": "eten met vrienden", + "owner": { + "display_name": "Spotify", + "external_urls": { + "spotify": "https://open.spotify.com/user/spotify" + }, + "href": "https://api.spotify.com/v1/users/spotify", + "id": "spotify", + "type": "user", + "uri": "spotify:user:spotify" + }, + "primary_color": null, + "public": null, + "snapshot_id": "MTcwMTY5Njk3NywwMDAwMDAwMDkyY2JjZDA1MjA2YTBmNzMxMmFlNGI0YzRhMjg0ZWZl", + "tracks": { + "href": "https://api.spotify.com/v1/playlists/37i9dQZF1DX7yhuKT9G4qk/tracks", + "total": 313 + }, + "type": "playlist", + "uri": "spotify:playlist:37i9dQZF1DX7yhuKT9G4qk" + }, + { + "collaborative": false, + "description": "From new retro to classic country blues, honky tonk, rockabilly, and more.", + "external_urls": { + "spotify": "https://open.spotify.com/playlist/37i9dQZF1DXbvE0SE0Cczh" + }, + "href": "https://api.spotify.com/v1/playlists/37i9dQZF1DXbvE0SE0Cczh", + "id": "37i9dQZF1DXbvE0SE0Cczh", + "images": [ + { + "height": null, + "url": "https://i.scdn.co/image/ab67706f00000003b93c270883619dde61725fc8", + "width": null + } + ], + "name": "Jukebox Joint", + "owner": { + "display_name": "Spotify", + "external_urls": { + "spotify": "https://open.spotify.com/user/spotify" + }, + "href": "https://api.spotify.com/v1/users/spotify", + "id": "spotify", + "type": "user", + "uri": "spotify:user:spotify" + }, + "primary_color": null, + "public": null, + "snapshot_id": "MTY4NjkxODgwMiwwMDAwMDAwMGUwNWRkNjY5N2UzM2Q4NzI4NzRiZmNhMGVmMzAyZTA5", + "tracks": { + "href": "https://api.spotify.com/v1/playlists/37i9dQZF1DXbvE0SE0Cczh/tracks", + "total": 60 + }, + "type": "playlist", + "uri": "spotify:playlist:37i9dQZF1DXbvE0SE0Cczh" + } + ], + "limit": 20, + "next": "https://api.spotify.com/v1/browse/categories/0JQ5DAqbMKFRY5ok2pxXJ0/playlists?country=NL&offset=20&limit=20", + "offset": 0, + "previous": null, + "total": 46 + } +} diff --git a/tests/components/spotify/fixtures/devices.json b/tests/components/spotify/fixtures/devices.json new file mode 100644 index 00000000000000..2dd8dfd7c3bf01 --- /dev/null +++ b/tests/components/spotify/fixtures/devices.json @@ -0,0 +1,14 @@ +{ + "devices": [ + { + "id": "21dac6b0e0a1f181870fdc9749b2656466557666", + "is_active": false, + "is_private_session": false, + "is_restricted": false, + "name": "DESKTOP-BKC5SIK", + "supports_volume": true, + "type": "Computer", + "volume_percent": 69 + } + ] +} diff --git a/tests/components/spotify/fixtures/featured_playlists.json b/tests/components/spotify/fixtures/featured_playlists.json new file mode 100644 index 00000000000000..5e6e53a7ee1ba3 --- /dev/null +++ b/tests/components/spotify/fixtures/featured_playlists.json @@ -0,0 +1,85 @@ +{ + "message": "Popular Playlists", + "playlists": { + "href": "https://api.spotify.com/v1/browse/featured-playlists?country=NL×tamp=2023-12-18T18%3A35%3A35&offset=0&limit=20", + "items": [ + { + "collaborative": false, + "description": "De ideale playlist voor het fijne kerstgevoel bij de boom!", + "external_urls": { + "spotify": "https://open.spotify.com/playlist/37i9dQZF1DX4dopZ9vOp1t" + }, + "href": "https://api.spotify.com/v1/playlists/37i9dQZF1DX4dopZ9vOp1t", + "id": "37i9dQZF1DX4dopZ9vOp1t", + "images": [ + { + "height": null, + "url": "https://i.scdn.co/image/ab67706f000000037d14c267b8ee5fea2246a8fe", + "width": null + } + ], + "name": "Kerst Hits 2023", + "owner": { + "display_name": "Spotify", + "external_urls": { + "spotify": "https://open.spotify.com/user/spotify" + }, + "href": "https://api.spotify.com/v1/users/spotify", + "id": "spotify", + "type": "user", + "uri": "spotify:user:spotify" + }, + "primary_color": null, + "public": null, + "snapshot_id": "MTcwMjU2ODI4MSwwMDAwMDAwMDE1ZGRiNzI3OGY4OGU2MzA1MWNkZGMyNTdmNDUwMTc1", + "tracks": { + "href": "https://api.spotify.com/v1/playlists/37i9dQZF1DX4dopZ9vOp1t/tracks", + "total": 298 + }, + "type": "playlist", + "uri": "spotify:playlist:37i9dQZF1DX4dopZ9vOp1t" + }, + { + "collaborative": false, + "description": "De 50 populairste hits van Nederland. Cover: Jack Harlow", + "external_urls": { + "spotify": "https://open.spotify.com/playlist/37i9dQZF1DWSBi5svWQ9Nk" + }, + "href": "https://api.spotify.com/v1/playlists/37i9dQZF1DWSBi5svWQ9Nk", + "id": "37i9dQZF1DWSBi5svWQ9Nk", + "images": [ + { + "height": null, + "url": "https://i.scdn.co/image/ab67706f00000003f7b99051789611a49101c1cf", + "width": null + } + ], + "name": "Top Hits NL", + "owner": { + "display_name": "Spotify", + "external_urls": { + "spotify": "https://open.spotify.com/user/spotify" + }, + "href": "https://api.spotify.com/v1/users/spotify", + "id": "spotify", + "type": "user", + "uri": "spotify:user:spotify" + }, + "primary_color": null, + "public": null, + "snapshot_id": "MTcwMjU5NDgwMCwwMDAwMDAwMDU4NWY2MTE4NmU4NmIwMDdlMGE4ZGRkOTZkN2U2MzAx", + "tracks": { + "href": "https://api.spotify.com/v1/playlists/37i9dQZF1DWSBi5svWQ9Nk/tracks", + "total": 50 + }, + "type": "playlist", + "uri": "spotify:playlist:37i9dQZF1DWSBi5svWQ9Nk" + } + ], + "limit": 20, + "next": "https://api.spotify.com/v1/browse/featured-playlists?country=NL×tamp=2023-12-18T18%3A35%3A35&offset=20&limit=20", + "offset": 0, + "previous": null, + "total": 24 + } +} diff --git a/tests/components/spotify/fixtures/followed_artists.json b/tests/components/spotify/fixtures/followed_artists.json new file mode 100644 index 00000000000000..4e03ed8291bed5 --- /dev/null +++ b/tests/components/spotify/fixtures/followed_artists.json @@ -0,0 +1,87 @@ +{ + "artists": { + "items": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/0lLY20XpZ9yDobkbHI7u1y" + }, + "followers": { + "href": null, + "total": 349437 + }, + "genres": [ + "brostep", + "complextro", + "danish electronic", + "edm", + "electro house", + "glitch", + "speedrun" + ], + "href": "https://api.spotify.com/v1/artists/0lLY20XpZ9yDobkbHI7u1y", + "id": "0lLY20XpZ9yDobkbHI7u1y", + "images": [ + { + "height": 640, + "url": "https://i.scdn.co/image/ab6761610000e5eb0fb1220e7e3ace47ebad023e", + "width": 640 + }, + { + "height": 320, + "url": "https://i.scdn.co/image/ab676161000051740fb1220e7e3ace47ebad023e", + "width": 320 + }, + { + "height": 160, + "url": "https://i.scdn.co/image/ab6761610000f1780fb1220e7e3ace47ebad023e", + "width": 160 + } + ], + "name": "Pegboard Nerds", + "popularity": 52, + "type": "artist", + "uri": "spotify:artist:0lLY20XpZ9yDobkbHI7u1y" + }, + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/0p4nmQO2msCgU4IF37Wi3j" + }, + "followers": { + "href": null, + "total": 11296082 + }, + "genres": ["canadian pop", "candy pop", "dance pop", "pop"], + "href": "https://api.spotify.com/v1/artists/0p4nmQO2msCgU4IF37Wi3j", + "id": "0p4nmQO2msCgU4IF37Wi3j", + "images": [ + { + "height": 640, + "url": "https://i.scdn.co/image/ab6761610000e5eb5c3349ddba6b8e064c1bab16", + "width": 640 + }, + { + "height": 320, + "url": "https://i.scdn.co/image/ab676161000051745c3349ddba6b8e064c1bab16", + "width": 320 + }, + { + "height": 160, + "url": "https://i.scdn.co/image/ab6761610000f1785c3349ddba6b8e064c1bab16", + "width": 160 + } + ], + "name": "Avril Lavigne", + "popularity": 78, + "type": "artist", + "uri": "spotify:artist:0p4nmQO2msCgU4IF37Wi3j" + } + ], + "next": "https://api.spotify.com/v1/me/following?type=artist&limit=20&locale=en-US,en;q=0.5&after=2NZMqINcyfepvLxQJdzcZk", + "total": 74, + "cursors": { + "after": "2NZMqINcyfepvLxQJdzcZk" + }, + "limit": 20, + "href": "https://api.spotify.com/v1/me/following?type=artist&limit=20&locale=en-US,en;q=0.5" + } +} diff --git a/tests/components/spotify/fixtures/new_releases.json b/tests/components/spotify/fixtures/new_releases.json new file mode 100644 index 00000000000000..b6948ef79a5c64 --- /dev/null +++ b/tests/components/spotify/fixtures/new_releases.json @@ -0,0 +1,469 @@ +{ + "albums": { + "href": "https://api.spotify.com/v1/browse/new-releases?offset=0&limit=20&locale=en-US,en;q%3D0.5", + "items": [ + { + "album_type": "album", + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/4gzpq5DPGxSnKTe4SA8HAU" + }, + "href": "https://api.spotify.com/v1/artists/4gzpq5DPGxSnKTe4SA8HAU", + "id": "4gzpq5DPGxSnKTe4SA8HAU", + "name": "Coldplay", + "type": "artist", + "uri": "spotify:artist:4gzpq5DPGxSnKTe4SA8HAU" + } + ], + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "PR", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "external_urls": { + "spotify": "https://open.spotify.com/album/5SGtrmYbIo0Dsg4kJ4qjM6" + }, + "href": "https://api.spotify.com/v1/albums/5SGtrmYbIo0Dsg4kJ4qjM6", + "id": "5SGtrmYbIo0Dsg4kJ4qjM6", + "images": [ + { + "height": 300, + "url": "https://i.scdn.co/image/ab67616d00001e0209ba52a5116e0c3e8461f58b", + "width": 300 + }, + { + "height": 64, + "url": "https://i.scdn.co/image/ab67616d0000485109ba52a5116e0c3e8461f58b", + "width": 64 + }, + { + "height": 640, + "url": "https://i.scdn.co/image/ab67616d0000b27309ba52a5116e0c3e8461f58b", + "width": 640 + } + ], + "name": "Moon Music", + "release_date": "2024-10-04", + "release_date_precision": "day", + "total_tracks": 10, + "type": "album", + "uri": "spotify:album:5SGtrmYbIo0Dsg4kJ4qjM6" + }, + { + "album_type": "album", + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/4U9nsRTH2mr9L4UXEWqG5e" + }, + "href": "https://api.spotify.com/v1/artists/4U9nsRTH2mr9L4UXEWqG5e", + "id": "4U9nsRTH2mr9L4UXEWqG5e", + "name": "Bente", + "type": "artist", + "uri": "spotify:artist:4U9nsRTH2mr9L4UXEWqG5e" + } + ], + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "external_urls": { + "spotify": "https://open.spotify.com/album/713lZ7AF55fEFSQgcttj9y" + }, + "href": "https://api.spotify.com/v1/albums/713lZ7AF55fEFSQgcttj9y", + "id": "713lZ7AF55fEFSQgcttj9y", + "images": [ + { + "height": 300, + "url": "https://i.scdn.co/image/ab67616d00001e02ab9953b1d18f8233f6b26027", + "width": 300 + }, + { + "height": 64, + "url": "https://i.scdn.co/image/ab67616d00004851ab9953b1d18f8233f6b26027", + "width": 64 + }, + { + "height": 640, + "url": "https://i.scdn.co/image/ab67616d0000b273ab9953b1d18f8233f6b26027", + "width": 640 + } + ], + "name": "drift", + "release_date": "2024-10-03", + "release_date_precision": "day", + "total_tracks": 14, + "type": "album", + "uri": "spotify:album:713lZ7AF55fEFSQgcttj9y" + } + ], + "limit": 20, + "next": "https://api.spotify.com/v1/browse/new-releases?offset=20&limit=20&locale=en-US,en;q%3D0.5", + "offset": 0, + "previous": null, + "total": 100 + } +} diff --git a/tests/components/spotify/fixtures/playback.json b/tests/components/spotify/fixtures/playback.json new file mode 100644 index 00000000000000..d0bf8e0478aeba --- /dev/null +++ b/tests/components/spotify/fixtures/playback.json @@ -0,0 +1,106 @@ +{ + "device": { + "id": "a19f7a03a25aff3e43f457a328a8ba67a8c44789", + "is_active": true, + "is_private_session": false, + "is_restricted": false, + "name": "Master Bathroom Speaker", + "type": "Speaker", + "volume_percent": 25 + }, + "shuffle_state": false, + "repeat_state": "off", + "timestamp": 1689639030791, + "context": { + "external_urls": { + "spotify": "https://open.spotify.com/playlist/2r35vbe6hHl6yDSMfjKgmm" + }, + "href": "https://api.spotify.com/v1/playlists/2r35vbe6hHl6yDSMfjKgmm", + "type": "playlist", + "uri": "spotify:user:rushofficial:playlist:2r35vbe6hHl6yDSMfjKgmm" + }, + "progress_ms": 249367, + "item": { + "album": { + "album_type": "album", + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/2Hkut4rAAyrQxRdof7FVJq" + }, + "href": "https://api.spotify.com/v1/artists/2Hkut4rAAyrQxRdof7FVJq", + "id": "2Hkut4rAAyrQxRdof7FVJq", + "name": "Rush", + "type": "artist", + "uri": "spotify:artist:2Hkut4rAAyrQxRdof7FVJq" + } + ], + "external_urls": { + "spotify": "https://open.spotify.com/album/3nUNxSh2szhmN7iifAKv5i" + }, + "href": "https://api.spotify.com/v1/albums/3nUNxSh2szhmN7iifAKv5i", + "id": "3nUNxSh2szhmN7iifAKv5i", + "images": [ + { + "height": 640, + "url": "https://i.scdn.co/image/ab67616d0000b27306c0d7ebcabad0c39b566983", + "width": 640 + }, + { + "height": 300, + "url": "https://i.scdn.co/image/ab67616d00001e0206c0d7ebcabad0c39b566983", + "width": 300 + }, + { + "height": 64, + "url": "https://i.scdn.co/image/ab67616d0000485106c0d7ebcabad0c39b566983", + "width": 64 + } + ], + "name": "Permanent Waves", + "release_date": "1980-01-01", + "release_date_precision": "day", + "total_tracks": 6, + "type": "album", + "uri": "spotify:album:3nUNxSh2szhmN7iifAKv5i" + }, + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/2Hkut4rAAyrQxRdof7FVJq" + }, + "href": "https://api.spotify.com/v1/artists/2Hkut4rAAyrQxRdof7FVJq", + "id": "2Hkut4rAAyrQxRdof7FVJq", + "name": "Rush", + "type": "artist", + "uri": "spotify:artist:2Hkut4rAAyrQxRdof7FVJq" + } + ], + "disc_number": 1, + "duration_ms": 296466, + "explicit": false, + "external_ids": { + "isrc": "USMR18070028" + }, + "external_urls": { + "spotify": "https://open.spotify.com/track/4e9hUiLsN4mx61ARosFi7p" + }, + "href": "https://api.spotify.com/v1/tracks/4e9hUiLsN4mx61ARosFi7p", + "id": "4e9hUiLsN4mx61ARosFi7p", + "is_local": false, + "name": "The Spirit Of Radio", + "popularity": 68, + "preview_url": "https://p.scdn.co/mp3-preview/75cc52f458b2416f33f15c499783c51119ba9a93?cid=20bbc62823a3412ba5267ea5398e52d0", + "track_number": 1, + "type": "track", + "uri": "spotify:track:4e9hUiLsN4mx61ARosFi7p" + }, + "currently_playing_type": "track", + "actions": { + "disallows": { + "skipping_prev": true, + "toggling_repeat_track": true + } + }, + "is_playing": true +} diff --git a/tests/components/spotify/fixtures/playback_episode.json b/tests/components/spotify/fixtures/playback_episode.json new file mode 100644 index 00000000000000..6a9de50a5345c8 --- /dev/null +++ b/tests/components/spotify/fixtures/playback_episode.json @@ -0,0 +1,110 @@ +{ + "device": { + "id": null, + "is_active": true, + "is_private_session": false, + "is_restricted": true, + "name": "Sonos Roam SL", + "supports_volume": true, + "type": "Speaker", + "volume_percent": 46 + }, + "shuffle_state": false, + "smart_shuffle": false, + "repeat_state": "off", + "timestamp": 1728219605131, + "context": { + "external_urls": { + "spotify": "https://open.spotify.com/show/1Y9ExMgMxoBVrgrfU7u0nD" + }, + "href": "https://api.spotify.com/v1/shows/1Y9ExMgMxoBVrgrfU7u0nD", + "type": "show", + "uri": "spotify:show:1Y9ExMgMxoBVrgrfU7u0nD" + }, + "progress_ms": 5410, + "item": { + "audio_preview_url": "https://podz-content.spotifycdn.com/audio/clips/06lRxUmh8UNVTByuyxLYqh/clip_132296_192296.mp3", + "description": "Patreon: https://www.patreon.com/safetythirdMerch: https://safetythird.shopYouTube: https://www.youtube.com/@safetythird/Advertising Inquiries: https://redcircle.com/brandsPrivacy & Opt-Out: https://redcircle.com/privacy", + "duration_ms": 3690161, + "explicit": false, + "external_urls": { + "spotify": "https://open.spotify.com/episode/3o0RYoo5iOMKSmEbunsbvW" + }, + "href": "https://api.spotify.com/v1/episodes/3o0RYoo5iOMKSmEbunsbvW", + "html_description": "

Patreon: https://www.patreon.com/safetythird

Merch: https://safetythird.shop

YouTube: https://www.youtube.com/@safetythird/



Advertising Inquiries: https://redcircle.com/brands

Privacy & Opt-Out: https://redcircle.com/privacy", + "id": "3o0RYoo5iOMKSmEbunsbvW", + "images": [ + { + "height": 640, + "url": "https://i.scdn.co/image/ab6765630000ba8ac7bedd27a4413b1abf926d8a", + "width": 640 + }, + { + "height": 300, + "url": "https://i.scdn.co/image/ab67656300005f1fc7bedd27a4413b1abf926d8a", + "width": 300 + }, + { + "height": 64, + "url": "https://i.scdn.co/image/ab6765630000f68dc7bedd27a4413b1abf926d8a", + "width": 64 + } + ], + "is_externally_hosted": false, + "is_playable": true, + "language": "en-US", + "languages": ["en-US"], + "name": "My Squirrel Has Brain Damage - Safety Third 119", + "release_date": "2024-07-26", + "release_date_precision": "day", + "resume_point": { + "fully_played": false, + "resume_position_ms": 0 + }, + "show": { + "copyrights": [], + "description": "Safety Third is a weekly show hosted by William Osman, NileRed, The Backyard Scientist, Allen Pan, and a couple other YouTube \"Scientists\". Sometimes we have guests, sometimes it's just us, but always: safety is our number three priority.", + "explicit": true, + "external_urls": { + "spotify": "https://open.spotify.com/show/1Y9ExMgMxoBVrgrfU7u0nD" + }, + "href": "https://api.spotify.com/v1/shows/1Y9ExMgMxoBVrgrfU7u0nD", + "html_description": "

Safety Third is a weekly show hosted by William Osman, NileRed, The Backyard Scientist, Allen Pan, and a couple other YouTube "Scientists". Sometimes we have guests, sometimes it's just us, but always: safety is our number three priority.

", + "id": "1Y9ExMgMxoBVrgrfU7u0nD", + "images": [ + { + "height": 640, + "url": "https://i.scdn.co/image/ab6765630000ba8ac7bedd27a4413b1abf926d8b", + "width": 640 + }, + { + "height": 300, + "url": "https://i.scdn.co/image/ab67656300005f1fc7bedd27a4413b1abf926d8a", + "width": 300 + }, + { + "height": 64, + "url": "https://i.scdn.co/image/ab6765630000f68dc7bedd27a4413b1abf926d8a", + "width": 64 + } + ], + "is_externally_hosted": false, + "languages": ["en-US"], + "media_type": "audio", + "name": "Safety Third", + "publisher": "Safety Third ", + "total_episodes": 120, + "type": "show", + "uri": "spotify:show:1Y9ExMgMxoBVrgrfU7u0nD" + }, + "type": "episode", + "uri": "spotify:episode:3o0RYoo5iOMKSmEbunsbvW" + }, + "currently_playing_type": "episode", + "actions": { + "disallows": { + "resuming": true + } + }, + "is_playing": true +} diff --git a/tests/components/spotify/fixtures/playlist.json b/tests/components/spotify/fixtures/playlist.json new file mode 100644 index 00000000000000..36c28cc814b7d6 --- /dev/null +++ b/tests/components/spotify/fixtures/playlist.json @@ -0,0 +1,520 @@ +{ + "collaborative": false, + "external_urls": { + "spotify": "https://open.spotify.com/playlist/3cEYpjA9oz9GiPac4AsH4n" + }, + "followers": { + "href": null, + "total": 562 + }, + "href": "https://api.spotify.com/v1/playlists/3cEYpjA9oz9GiPac4AsH4n?locale=en-US%2Cen%3Bq%3D0.5", + "id": "3cEYpjA9oz9GiPac4AsH4n", + "images": [ + { + "url": "https://i.scdn.co/image/ab67706c0000da848d0ce13d55f634e290f744ba", + "height": null, + "width": null + } + ], + "primary_color": null, + "name": "Spotify Web API Testing playlist", + "description": "A playlist for testing pourposes", + "type": "playlist", + "uri": "spotify:playlist:3cEYpjA9oz9GiPac4AsH4n", + "owner": { + "href": "https://api.spotify.com/v1/users/jmperezperez", + "id": "jmperezperez", + "type": "user", + "uri": "spotify:user:jmperezperez", + "display_name": "JMPerez²", + "external_urls": { + "spotify": "https://open.spotify.com/user/jmperezperez" + } + }, + "public": true, + "snapshot_id": "MTgsZWFmNmZiNTIzYTg4ODM0OGQzZWQzOGI4NTdkNTJlMjU0OWFkYTUxMA==", + "tracks": { + "limit": 100, + "next": null, + "offset": 0, + "previous": null, + "href": "https://api.spotify.com/v1/playlists/3cEYpjA9oz9GiPac4AsH4n/tracks?offset=0&limit=100&locale=en-US%2Cen%3Bq%3D0.5", + "total": 5, + "items": [ + { + "added_at": "2015-01-15T12:39:22Z", + "primary_color": null, + "video_thumbnail": { + "url": null + }, + "is_local": false, + "added_by": { + "external_urls": { + "spotify": "https://open.spotify.com/user/jmperezperez" + }, + "id": "jmperezperez", + "type": "user", + "uri": "spotify:user:jmperezperez", + "href": "https://api.spotify.com/v1/users/jmperezperez" + }, + "track": { + "preview_url": "https://p.scdn.co/mp3-preview/04599a1fe12ffac01d2bcb08340f84c0dd2cc335?cid=c7c59b798aab4892ac040a25f7dd1575", + "explicit": false, + "type": "track", + "episode": false, + "track": true, + "album": { + "type": "album", + "album_type": "compilation", + "href": "https://api.spotify.com/v1/albums/2pANdqPvxInB0YvcDiw4ko", + "id": "2pANdqPvxInB0YvcDiw4ko", + "images": [ + { + "url": "https://i.scdn.co/image/ab67616d0000b273ce6d0eef0c1ce77e5f95bbbc", + "width": 640, + "height": 640 + }, + { + "url": "https://i.scdn.co/image/ab67616d00001e02ce6d0eef0c1ce77e5f95bbbc", + "width": 300, + "height": 300 + }, + { + "url": "https://i.scdn.co/image/ab67616d00004851ce6d0eef0c1ce77e5f95bbbc", + "width": 64, + "height": 64 + } + ], + "name": "Progressive Psy Trance Picks Vol.8", + "release_date": "2012-04-02", + "release_date_precision": "day", + "uri": "spotify:album:2pANdqPvxInB0YvcDiw4ko", + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/0LyfQWJT6nXafLPZqxe9Of" + }, + "href": "https://api.spotify.com/v1/artists/0LyfQWJT6nXafLPZqxe9Of", + "id": "0LyfQWJT6nXafLPZqxe9Of", + "name": "Various Artists", + "type": "artist", + "uri": "spotify:artist:0LyfQWJT6nXafLPZqxe9Of" + } + ], + "external_urls": { + "spotify": "https://open.spotify.com/album/2pANdqPvxInB0YvcDiw4ko" + }, + "total_tracks": 20 + }, + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/6eSdhw46riw2OUHgMwR8B5" + }, + "href": "https://api.spotify.com/v1/artists/6eSdhw46riw2OUHgMwR8B5", + "id": "6eSdhw46riw2OUHgMwR8B5", + "name": "Odiseo", + "type": "artist", + "uri": "spotify:artist:6eSdhw46riw2OUHgMwR8B5" + } + ], + "disc_number": 1, + "track_number": 10, + "duration_ms": 376000, + "external_ids": { + "isrc": "DEKC41200989" + }, + "external_urls": { + "spotify": "https://open.spotify.com/track/4rzfv0JLZfVhOhbSQ8o5jZ" + }, + "href": "https://api.spotify.com/v1/tracks/4rzfv0JLZfVhOhbSQ8o5jZ", + "id": "4rzfv0JLZfVhOhbSQ8o5jZ", + "name": "Api", + "popularity": 2, + "uri": "spotify:track:4rzfv0JLZfVhOhbSQ8o5jZ", + "is_local": false + } + }, + { + "added_at": "2015-01-15T12:40:03Z", + "primary_color": null, + "video_thumbnail": { + "url": null + }, + "is_local": false, + "added_by": { + "external_urls": { + "spotify": "https://open.spotify.com/user/jmperezperez" + }, + "id": "jmperezperez", + "type": "user", + "uri": "spotify:user:jmperezperez", + "href": "https://api.spotify.com/v1/users/jmperezperez" + }, + "track": { + "preview_url": "https://p.scdn.co/mp3-preview/d61fbb7016904624373008ea056d45e6df891071?cid=c7c59b798aab4892ac040a25f7dd1575", + "available_markets": [], + "explicit": false, + "type": "track", + "episode": false, + "track": true, + "album": { + "available_markets": [], + "type": "album", + "album_type": "compilation", + "href": "https://api.spotify.com/v1/albums/6nlfkk5GoXRL1nktlATNsy", + "id": "6nlfkk5GoXRL1nktlATNsy", + "images": [ + { + "url": "https://i.scdn.co/image/ab67616d0000b273aa2ff29970d9a63a49dfaeb2", + "width": 640, + "height": 640 + }, + { + "url": "https://i.scdn.co/image/ab67616d00001e02aa2ff29970d9a63a49dfaeb2", + "width": 300, + "height": 300 + }, + { + "url": "https://i.scdn.co/image/ab67616d00004851aa2ff29970d9a63a49dfaeb2", + "width": 64, + "height": 64 + } + ], + "name": "Wellness & Dreaming Source", + "release_date": "2015-01-09", + "release_date_precision": "day", + "uri": "spotify:album:6nlfkk5GoXRL1nktlATNsy", + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/0LyfQWJT6nXafLPZqxe9Of" + }, + "href": "https://api.spotify.com/v1/artists/0LyfQWJT6nXafLPZqxe9Of", + "id": "0LyfQWJT6nXafLPZqxe9Of", + "name": "Various Artists", + "type": "artist", + "uri": "spotify:artist:0LyfQWJT6nXafLPZqxe9Of" + } + ], + "external_urls": { + "spotify": "https://open.spotify.com/album/6nlfkk5GoXRL1nktlATNsy" + }, + "total_tracks": 25 + }, + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/5VQE4WOzPu9h3HnGLuBoA6" + }, + "href": "https://api.spotify.com/v1/artists/5VQE4WOzPu9h3HnGLuBoA6", + "id": "5VQE4WOzPu9h3HnGLuBoA6", + "name": "Vlasta Marek", + "type": "artist", + "uri": "spotify:artist:5VQE4WOzPu9h3HnGLuBoA6" + } + ], + "disc_number": 1, + "track_number": 21, + "duration_ms": 730066, + "external_ids": { + "isrc": "FR2X41475057" + }, + "external_urls": { + "spotify": "https://open.spotify.com/track/5o3jMYOSbaVz3tkgwhELSV" + }, + "href": "https://api.spotify.com/v1/tracks/5o3jMYOSbaVz3tkgwhELSV", + "id": "5o3jMYOSbaVz3tkgwhELSV", + "name": "Is", + "popularity": 0, + "uri": "spotify:track:5o3jMYOSbaVz3tkgwhELSV", + "is_local": false + } + }, + { + "added_at": "2015-01-15T12:22:30Z", + "primary_color": null, + "video_thumbnail": { + "url": null + }, + "is_local": false, + "added_by": { + "external_urls": { + "spotify": "https://open.spotify.com/user/jmperezperez" + }, + "id": "jmperezperez", + "type": "user", + "uri": "spotify:user:jmperezperez", + "href": "https://api.spotify.com/v1/users/jmperezperez" + }, + "track": { + "preview_url": "https://p.scdn.co/mp3-preview/cc680ec0f5fd5ff21f0cd11ac47e10d3cbb92190?cid=c7c59b798aab4892ac040a25f7dd1575", + "explicit": false, + "type": "track", + "episode": false, + "track": true, + "album": { + "type": "album", + "album_type": "album", + "href": "https://api.spotify.com/v1/albums/4hnqM0JK4CM1phwfq1Ldyz", + "id": "4hnqM0JK4CM1phwfq1Ldyz", + "images": [ + { + "url": "https://i.scdn.co/image/ab67616d0000b273ee0d0dce888c6c8a70db6e8b", + "width": 640, + "height": 640 + }, + { + "url": "https://i.scdn.co/image/ab67616d00001e02ee0d0dce888c6c8a70db6e8b", + "width": 300, + "height": 300 + }, + { + "url": "https://i.scdn.co/image/ab67616d00004851ee0d0dce888c6c8a70db6e8b", + "width": 64, + "height": 64 + } + ], + "name": "This Is Happening", + "release_date": "2010-05-17", + "release_date_precision": "day", + "uri": "spotify:album:4hnqM0JK4CM1phwfq1Ldyz", + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/066X20Nz7iquqkkCW6Jxy6" + }, + "href": "https://api.spotify.com/v1/artists/066X20Nz7iquqkkCW6Jxy6", + "id": "066X20Nz7iquqkkCW6Jxy6", + "name": "LCD Soundsystem", + "type": "artist", + "uri": "spotify:artist:066X20Nz7iquqkkCW6Jxy6" + } + ], + "external_urls": { + "spotify": "https://open.spotify.com/album/4hnqM0JK4CM1phwfq1Ldyz" + }, + "total_tracks": 9 + }, + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/066X20Nz7iquqkkCW6Jxy6" + }, + "href": "https://api.spotify.com/v1/artists/066X20Nz7iquqkkCW6Jxy6", + "id": "066X20Nz7iquqkkCW6Jxy6", + "name": "LCD Soundsystem", + "type": "artist", + "uri": "spotify:artist:066X20Nz7iquqkkCW6Jxy6" + } + ], + "disc_number": 1, + "track_number": 4, + "duration_ms": 401440, + "external_ids": { + "isrc": "US4GE1000022" + }, + "external_urls": { + "spotify": "https://open.spotify.com/track/4Cy0NHJ8Gh0xMdwyM9RkQm" + }, + "href": "https://api.spotify.com/v1/tracks/4Cy0NHJ8Gh0xMdwyM9RkQm", + "id": "4Cy0NHJ8Gh0xMdwyM9RkQm", + "name": "All I Want", + "popularity": 45, + "uri": "spotify:track:4Cy0NHJ8Gh0xMdwyM9RkQm", + "is_local": false + } + }, + { + "added_at": "2015-01-15T12:40:35Z", + "primary_color": null, + "video_thumbnail": { + "url": null + }, + "is_local": false, + "added_by": { + "external_urls": { + "spotify": "https://open.spotify.com/user/jmperezperez" + }, + "id": "jmperezperez", + "type": "user", + "uri": "spotify:user:jmperezperez", + "href": "https://api.spotify.com/v1/users/jmperezperez" + }, + "track": { + "preview_url": "https://p.scdn.co/mp3-preview/d6ecf1f98d0b1fdc8c535de8e2010d0d8b8d040b?cid=c7c59b798aab4892ac040a25f7dd1575", + "explicit": false, + "type": "track", + "episode": false, + "track": true, + "album": { + "type": "album", + "album_type": "album", + "href": "https://api.spotify.com/v1/albums/2usKFntxa98WHMcyW6xJBz", + "id": "2usKFntxa98WHMcyW6xJBz", + "images": [ + { + "url": "https://i.scdn.co/image/ab67616d0000b2738b7447ac3daa1da18811cf7b", + "width": 640, + "height": 640 + }, + { + "url": "https://i.scdn.co/image/ab67616d00001e028b7447ac3daa1da18811cf7b", + "width": 300, + "height": 300 + }, + { + "url": "https://i.scdn.co/image/ab67616d000048518b7447ac3daa1da18811cf7b", + "width": 64, + "height": 64 + } + ], + "name": "Glenn Horiuchi Trio / Gelenn Horiuchi Quartet: Mercy / Jump Start / Endpoints / Curl Out / Earthworks / Mind Probe / Null Set / Another Space (A)", + "release_date": "2011-04-01", + "release_date_precision": "day", + "uri": "spotify:album:2usKFntxa98WHMcyW6xJBz", + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/272ArH9SUAlslQqsSgPJA2" + }, + "href": "https://api.spotify.com/v1/artists/272ArH9SUAlslQqsSgPJA2", + "id": "272ArH9SUAlslQqsSgPJA2", + "name": "Glenn Horiuchi Trio", + "type": "artist", + "uri": "spotify:artist:272ArH9SUAlslQqsSgPJA2" + } + ], + "external_urls": { + "spotify": "https://open.spotify.com/album/2usKFntxa98WHMcyW6xJBz" + }, + "total_tracks": 8 + }, + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/272ArH9SUAlslQqsSgPJA2" + }, + "href": "https://api.spotify.com/v1/artists/272ArH9SUAlslQqsSgPJA2", + "id": "272ArH9SUAlslQqsSgPJA2", + "name": "Glenn Horiuchi Trio", + "type": "artist", + "uri": "spotify:artist:272ArH9SUAlslQqsSgPJA2" + } + ], + "disc_number": 1, + "track_number": 2, + "duration_ms": 358760, + "external_ids": { + "isrc": "USB8U1025969" + }, + "external_urls": { + "spotify": "https://open.spotify.com/track/6hvFrZNocdt2FcKGCSY5NI" + }, + "href": "https://api.spotify.com/v1/tracks/6hvFrZNocdt2FcKGCSY5NI", + "id": "6hvFrZNocdt2FcKGCSY5NI", + "name": "Endpoints", + "popularity": 0, + "uri": "spotify:track:6hvFrZNocdt2FcKGCSY5NI", + "is_local": false + } + }, + { + "added_at": "2015-01-15T12:41:10Z", + "primary_color": null, + "video_thumbnail": { + "url": null + }, + "is_local": false, + "added_by": { + "external_urls": { + "spotify": "https://open.spotify.com/user/jmperezperez" + }, + "id": "jmperezperez", + "type": "user", + "uri": "spotify:user:jmperezperez", + "href": "https://api.spotify.com/v1/users/jmperezperez" + }, + "track": { + "preview_url": "https://p.scdn.co/mp3-preview/47b974e463b1e862c7b3c18fa2ceedc513f2106b?cid=c7c59b798aab4892ac040a25f7dd1575", + "available_markets": [], + "explicit": false, + "type": "track", + "episode": false, + "track": true, + "album": { + "available_markets": [], + "type": "album", + "album_type": "album", + "href": "https://api.spotify.com/v1/albums/0ivM6kSawaug0j3tZVusG2", + "id": "0ivM6kSawaug0j3tZVusG2", + "images": [ + { + "url": "https://i.scdn.co/image/ab67616d0000b27304e57d181ff062f8339d6c71", + "width": 640, + "height": 640 + }, + { + "url": "https://i.scdn.co/image/ab67616d00001e0204e57d181ff062f8339d6c71", + "width": 300, + "height": 300 + }, + { + "url": "https://i.scdn.co/image/ab67616d0000485104e57d181ff062f8339d6c71", + "width": 64, + "height": 64 + } + ], + "name": "All The Best (Spanish Version)", + "release_date": "2007-01-01", + "release_date_precision": "day", + "uri": "spotify:album:0ivM6kSawaug0j3tZVusG2", + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/2KftmGt9sk1yLjsAoloC3M" + }, + "href": "https://api.spotify.com/v1/artists/2KftmGt9sk1yLjsAoloC3M", + "id": "2KftmGt9sk1yLjsAoloC3M", + "name": "Zucchero", + "type": "artist", + "uri": "spotify:artist:2KftmGt9sk1yLjsAoloC3M" + } + ], + "external_urls": { + "spotify": "https://open.spotify.com/album/0ivM6kSawaug0j3tZVusG2" + }, + "total_tracks": 18 + }, + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/2KftmGt9sk1yLjsAoloC3M" + }, + "href": "https://api.spotify.com/v1/artists/2KftmGt9sk1yLjsAoloC3M", + "id": "2KftmGt9sk1yLjsAoloC3M", + "name": "Zucchero", + "type": "artist", + "uri": "spotify:artist:2KftmGt9sk1yLjsAoloC3M" + } + ], + "disc_number": 1, + "track_number": 18, + "duration_ms": 176093, + "external_ids": { + "isrc": "ITUM70701043" + }, + "external_urls": { + "spotify": "https://open.spotify.com/track/2E2znCPaS8anQe21GLxcvJ" + }, + "href": "https://api.spotify.com/v1/tracks/2E2znCPaS8anQe21GLxcvJ", + "id": "2E2znCPaS8anQe21GLxcvJ", + "name": "You Are So Beautiful", + "popularity": 0, + "uri": "spotify:track:2E2znCPaS8anQe21GLxcvJ", + "is_local": false + } + } + ] + } +} diff --git a/tests/components/spotify/fixtures/recently_played_tracks.json b/tests/components/spotify/fixtures/recently_played_tracks.json new file mode 100644 index 00000000000000..f000d76a52f1a2 --- /dev/null +++ b/tests/components/spotify/fixtures/recently_played_tracks.json @@ -0,0 +1,964 @@ +{ + "items": [ + { + "track": { + "album": { + "album_type": "single", + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/6emHCSoB4tJxTVXakbrpPz" + }, + "href": "https://api.spotify.com/v1/artists/6emHCSoB4tJxTVXakbrpPz", + "id": "6emHCSoB4tJxTVXakbrpPz", + "name": "Karen O", + "type": "artist", + "uri": "spotify:artist:6emHCSoB4tJxTVXakbrpPz" + }, + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/2dBj3prW7gP9bCCOIQeDUf" + }, + "href": "https://api.spotify.com/v1/artists/2dBj3prW7gP9bCCOIQeDUf", + "id": "2dBj3prW7gP9bCCOIQeDUf", + "name": "Danger Mouse", + "type": "artist", + "uri": "spotify:artist:2dBj3prW7gP9bCCOIQeDUf" + } + ], + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "BY", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "PR", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "external_urls": { + "spotify": "https://open.spotify.com/album/6Ab1VSoMD5fvlagOW2QDOJ" + }, + "href": "https://api.spotify.com/v1/albums/6Ab1VSoMD5fvlagOW2QDOJ", + "id": "6Ab1VSoMD5fvlagOW2QDOJ", + "images": [ + { + "height": 640, + "url": "https://i.scdn.co/image/ab67616d0000b273cdac047e7894fb56a0dfdcde", + "width": 640 + }, + { + "height": 300, + "url": "https://i.scdn.co/image/ab67616d00001e02cdac047e7894fb56a0dfdcde", + "width": 300 + }, + { + "height": 64, + "url": "https://i.scdn.co/image/ab67616d00004851cdac047e7894fb56a0dfdcde", + "width": 64 + } + ], + "name": "Super Breath", + "release_date": "2024-07-24", + "release_date_precision": "day", + "total_tracks": 1, + "type": "album", + "uri": "spotify:album:6Ab1VSoMD5fvlagOW2QDOJ" + }, + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/6emHCSoB4tJxTVXakbrpPz" + }, + "href": "https://api.spotify.com/v1/artists/6emHCSoB4tJxTVXakbrpPz", + "id": "6emHCSoB4tJxTVXakbrpPz", + "name": "Karen O", + "type": "artist", + "uri": "spotify:artist:6emHCSoB4tJxTVXakbrpPz" + }, + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/2dBj3prW7gP9bCCOIQeDUf" + }, + "href": "https://api.spotify.com/v1/artists/2dBj3prW7gP9bCCOIQeDUf", + "id": "2dBj3prW7gP9bCCOIQeDUf", + "name": "Danger Mouse", + "type": "artist", + "uri": "spotify:artist:2dBj3prW7gP9bCCOIQeDUf" + } + ], + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "BY", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "PR", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "disc_number": 1, + "duration_ms": 211800, + "explicit": false, + "external_ids": { + "isrc": "QMB622409101" + }, + "external_urls": { + "spotify": "https://open.spotify.com/track/71dMjqJ8UJV700zYs5YZCh" + }, + "href": "https://api.spotify.com/v1/tracks/71dMjqJ8UJV700zYs5YZCh", + "id": "71dMjqJ8UJV700zYs5YZCh", + "is_local": false, + "name": "Super Breath", + "popularity": 58, + "preview_url": "https://p.scdn.co/mp3-preview/f1ee3ade75c6eb5cb227ed8c96de8674d8ce581f?cid=cfe923b2d660439caf2b557b21f31221", + "track_number": 1, + "type": "track", + "uri": "spotify:track:71dMjqJ8UJV700zYs5YZCh" + }, + "played_at": "2024-10-06T18:09:18.556Z", + "context": null + }, + { + "track": { + "album": { + "album_type": "single", + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/6emHCSoB4tJxTVXakbrpPz" + }, + "href": "https://api.spotify.com/v1/artists/6emHCSoB4tJxTVXakbrpPz", + "id": "6emHCSoB4tJxTVXakbrpPz", + "name": "Karen O", + "type": "artist", + "uri": "spotify:artist:6emHCSoB4tJxTVXakbrpPz" + }, + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/2dBj3prW7gP9bCCOIQeDUf" + }, + "href": "https://api.spotify.com/v1/artists/2dBj3prW7gP9bCCOIQeDUf", + "id": "2dBj3prW7gP9bCCOIQeDUf", + "name": "Danger Mouse", + "type": "artist", + "uri": "spotify:artist:2dBj3prW7gP9bCCOIQeDUf" + } + ], + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "BY", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "PR", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "external_urls": { + "spotify": "https://open.spotify.com/album/6Ab1VSoMD5fvlagOW2QDOJ" + }, + "href": "https://api.spotify.com/v1/albums/6Ab1VSoMD5fvlagOW2QDOJ", + "id": "6Ab1VSoMD5fvlagOW2QDOJ", + "images": [ + { + "height": 640, + "url": "https://i.scdn.co/image/ab67616d0000b273cdac047e7894fb56a0dfdcde", + "width": 640 + }, + { + "height": 300, + "url": "https://i.scdn.co/image/ab67616d00001e02cdac047e7894fb56a0dfdcde", + "width": 300 + }, + { + "height": 64, + "url": "https://i.scdn.co/image/ab67616d00004851cdac047e7894fb56a0dfdcde", + "width": 64 + } + ], + "name": "Super Breath", + "release_date": "2024-07-24", + "release_date_precision": "day", + "total_tracks": 1, + "type": "album", + "uri": "spotify:album:6Ab1VSoMD5fvlagOW2QDOJ" + }, + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/6emHCSoB4tJxTVXakbrpPz" + }, + "href": "https://api.spotify.com/v1/artists/6emHCSoB4tJxTVXakbrpPz", + "id": "6emHCSoB4tJxTVXakbrpPz", + "name": "Karen O", + "type": "artist", + "uri": "spotify:artist:6emHCSoB4tJxTVXakbrpPz" + }, + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/2dBj3prW7gP9bCCOIQeDUf" + }, + "href": "https://api.spotify.com/v1/artists/2dBj3prW7gP9bCCOIQeDUf", + "id": "2dBj3prW7gP9bCCOIQeDUf", + "name": "Danger Mouse", + "type": "artist", + "uri": "spotify:artist:2dBj3prW7gP9bCCOIQeDUf" + } + ], + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "BY", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "PR", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "disc_number": 1, + "duration_ms": 211800, + "explicit": false, + "external_ids": { + "isrc": "QMB622409101" + }, + "external_urls": { + "spotify": "https://open.spotify.com/track/71dMjqJ8UJV700zYs5YZCh" + }, + "href": "https://api.spotify.com/v1/tracks/71dMjqJ8UJV700zYs5YZCh", + "id": "71dMjqJ8UJV700zYs5YZCh", + "is_local": false, + "name": "Super Breath", + "popularity": 58, + "preview_url": "https://p.scdn.co/mp3-preview/f1ee3ade75c6eb5cb227ed8c96de8674d8ce581f?cid=cfe923b2d660439caf2b557b21f31221", + "track_number": 1, + "type": "track", + "uri": "spotify:track:71dMjqJ8UJV700zYs5YZCh" + }, + "played_at": "2024-10-06T18:05:33.902Z", + "context": { + "type": "album", + "href": "https://api.spotify.com/v1/albums/57MSBg5pBQZH5bfLVDmeuP", + "external_urls": { + "spotify": "https://open.spotify.com/album/57MSBg5pBQZH5bfLVDmeuP" + }, + "uri": "spotify:album:57MSBg5pBQZH5bfLVDmeuP" + } + } + ], + "next": "https://api.spotify.com/v1/me/player/recently-played?before=1728234176022", + "cursors": { + "after": "1728238158556", + "before": "1728234176022" + }, + "limit": 20, + "href": "https://api.spotify.com/v1/me/player/recently-played" +} diff --git a/tests/components/spotify/fixtures/saved_albums.json b/tests/components/spotify/fixtures/saved_albums.json new file mode 100644 index 00000000000000..0d58ecb89ea29e --- /dev/null +++ b/tests/components/spotify/fixtures/saved_albums.json @@ -0,0 +1,7637 @@ +{ + "href": "https://api.spotify.com/v1/me/albums?offset=0&limit=20&locale=en-US,en;q%3D0.5", + "items": [ + { + "added_at": "2024-09-19T22:00:00Z", + "album": { + "album_type": "album", + "total_tracks": 12, + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "BY", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "PR", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "external_urls": { + "spotify": "https://open.spotify.com/album/57MSBg5pBQZH5bfLVDmeuP" + }, + "href": "https://api.spotify.com/v1/albums/57MSBg5pBQZH5bfLVDmeuP?locale=en-US%2Cen%3Bq%3D0.5", + "id": "57MSBg5pBQZH5bfLVDmeuP", + "images": [ + { + "url": "https://i.scdn.co/image/ab67616d0000b2733126a95bb7ed4146a80c7fc6", + "height": 640, + "width": 640 + }, + { + "url": "https://i.scdn.co/image/ab67616d00001e023126a95bb7ed4146a80c7fc6", + "height": 300, + "width": 300 + }, + { + "url": "https://i.scdn.co/image/ab67616d000048513126a95bb7ed4146a80c7fc6", + "height": 64, + "width": 64 + } + ], + "name": "In Waves", + "release_date": "2024-09-20", + "release_date_precision": "day", + "type": "album", + "uri": "spotify:album:57MSBg5pBQZH5bfLVDmeuP", + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/7A0awCXkE1FtSU8B0qwOJQ" + }, + "href": "https://api.spotify.com/v1/artists/7A0awCXkE1FtSU8B0qwOJQ", + "id": "7A0awCXkE1FtSU8B0qwOJQ", + "name": "Jamie xx", + "type": "artist", + "uri": "spotify:artist:7A0awCXkE1FtSU8B0qwOJQ" + } + ], + "tracks": { + "href": "https://api.spotify.com/v1/albums/57MSBg5pBQZH5bfLVDmeuP/tracks?offset=0&limit=50&locale=en-US,en;q%3D0.5", + "limit": 50, + "next": null, + "offset": 0, + "previous": null, + "total": 12, + "items": [ + { + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/7A0awCXkE1FtSU8B0qwOJQ" + }, + "href": "https://api.spotify.com/v1/artists/7A0awCXkE1FtSU8B0qwOJQ", + "id": "7A0awCXkE1FtSU8B0qwOJQ", + "name": "Jamie xx", + "type": "artist", + "uri": "spotify:artist:7A0awCXkE1FtSU8B0qwOJQ" + } + ], + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "BY", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "PR", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "disc_number": 1, + "duration_ms": 135835, + "explicit": false, + "external_urls": { + "spotify": "https://open.spotify.com/track/7uLBdV19ad7kAjU2oB1l6p" + }, + "href": "https://api.spotify.com/v1/tracks/7uLBdV19ad7kAjU2oB1l6p", + "id": "7uLBdV19ad7kAjU2oB1l6p", + "name": "Wanna", + "preview_url": "https://p.scdn.co/mp3-preview/fc112f83fe770b09e4c1bd586e5b9c144e384bd7?cid=cfe923b2d660439caf2b557b21f31221", + "track_number": 1, + "type": "track", + "uri": "spotify:track:7uLBdV19ad7kAjU2oB1l6p", + "is_local": false + }, + { + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/7A0awCXkE1FtSU8B0qwOJQ" + }, + "href": "https://api.spotify.com/v1/artists/7A0awCXkE1FtSU8B0qwOJQ", + "id": "7A0awCXkE1FtSU8B0qwOJQ", + "name": "Jamie xx", + "type": "artist", + "uri": "spotify:artist:7A0awCXkE1FtSU8B0qwOJQ" + } + ], + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "BY", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "PR", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "disc_number": 1, + "duration_ms": 240580, + "explicit": false, + "external_urls": { + "spotify": "https://open.spotify.com/track/3pjX4hC8adabkXGu3X9GTC" + }, + "href": "https://api.spotify.com/v1/tracks/3pjX4hC8adabkXGu3X9GTC", + "id": "3pjX4hC8adabkXGu3X9GTC", + "name": "Treat Each Other Right", + "preview_url": "https://p.scdn.co/mp3-preview/a518fdb34284daa9a2298fd5491d6cede24a3e01?cid=cfe923b2d660439caf2b557b21f31221", + "track_number": 2, + "type": "track", + "uri": "spotify:track:3pjX4hC8adabkXGu3X9GTC", + "is_local": false + }, + { + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/7A0awCXkE1FtSU8B0qwOJQ" + }, + "href": "https://api.spotify.com/v1/artists/7A0awCXkE1FtSU8B0qwOJQ", + "id": "7A0awCXkE1FtSU8B0qwOJQ", + "name": "Jamie xx", + "type": "artist", + "uri": "spotify:artist:7A0awCXkE1FtSU8B0qwOJQ" + }, + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/3X2DdnmoANw8Rg8luHyZQb" + }, + "href": "https://api.spotify.com/v1/artists/3X2DdnmoANw8Rg8luHyZQb", + "id": "3X2DdnmoANw8Rg8luHyZQb", + "name": "Romy", + "type": "artist", + "uri": "spotify:artist:3X2DdnmoANw8Rg8luHyZQb" + }, + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/4KDu9uqzqseVCpQXMa8Pvm" + }, + "href": "https://api.spotify.com/v1/artists/4KDu9uqzqseVCpQXMa8Pvm", + "id": "4KDu9uqzqseVCpQXMa8Pvm", + "name": "Oliver Sim", + "type": "artist", + "uri": "spotify:artist:4KDu9uqzqseVCpQXMa8Pvm" + }, + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/3iOvXCl6edW5Um0fXEBRXy" + }, + "href": "https://api.spotify.com/v1/artists/3iOvXCl6edW5Um0fXEBRXy", + "id": "3iOvXCl6edW5Um0fXEBRXy", + "name": "The xx", + "type": "artist", + "uri": "spotify:artist:3iOvXCl6edW5Um0fXEBRXy" + } + ], + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "BY", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "PR", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "disc_number": 1, + "duration_ms": 208334, + "explicit": false, + "external_urls": { + "spotify": "https://open.spotify.com/track/4gBniy3TwR9o2JDBx48TlD" + }, + "href": "https://api.spotify.com/v1/tracks/4gBniy3TwR9o2JDBx48TlD", + "id": "4gBniy3TwR9o2JDBx48TlD", + "name": "Waited All Night", + "preview_url": "https://p.scdn.co/mp3-preview/b7820ac10349ca374242240f69887c073a4980f2?cid=cfe923b2d660439caf2b557b21f31221", + "track_number": 3, + "type": "track", + "uri": "spotify:track:4gBniy3TwR9o2JDBx48TlD", + "is_local": false + }, + { + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/7A0awCXkE1FtSU8B0qwOJQ" + }, + "href": "https://api.spotify.com/v1/artists/7A0awCXkE1FtSU8B0qwOJQ", + "id": "7A0awCXkE1FtSU8B0qwOJQ", + "name": "Jamie xx", + "type": "artist", + "uri": "spotify:artist:7A0awCXkE1FtSU8B0qwOJQ" + }, + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/0XfQBWgzisaS9ltDV9bXAS" + }, + "href": "https://api.spotify.com/v1/artists/0XfQBWgzisaS9ltDV9bXAS", + "id": "0XfQBWgzisaS9ltDV9bXAS", + "name": "Honey Dijon", + "type": "artist", + "uri": "spotify:artist:0XfQBWgzisaS9ltDV9bXAS" + } + ], + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "BY", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "PR", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "disc_number": 1, + "duration_ms": 222315, + "explicit": true, + "external_urls": { + "spotify": "https://open.spotify.com/track/79gWc6dZ1dXH7rC67DTunz" + }, + "href": "https://api.spotify.com/v1/tracks/79gWc6dZ1dXH7rC67DTunz", + "id": "79gWc6dZ1dXH7rC67DTunz", + "name": "Baddy On The Floor", + "preview_url": "https://p.scdn.co/mp3-preview/c260664dd5adc2290fce52cb51aa8667e39c2118?cid=cfe923b2d660439caf2b557b21f31221", + "track_number": 4, + "type": "track", + "uri": "spotify:track:79gWc6dZ1dXH7rC67DTunz", + "is_local": false + }, + { + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/7A0awCXkE1FtSU8B0qwOJQ" + }, + "href": "https://api.spotify.com/v1/artists/7A0awCXkE1FtSU8B0qwOJQ", + "id": "7A0awCXkE1FtSU8B0qwOJQ", + "name": "Jamie xx", + "type": "artist", + "uri": "spotify:artist:7A0awCXkE1FtSU8B0qwOJQ" + }, + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/0fEfMW5bypHZ0A8eLnhwj5" + }, + "href": "https://api.spotify.com/v1/artists/0fEfMW5bypHZ0A8eLnhwj5", + "id": "0fEfMW5bypHZ0A8eLnhwj5", + "name": "Kelsey Lu", + "type": "artist", + "uri": "spotify:artist:0fEfMW5bypHZ0A8eLnhwj5" + }, + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/0FNfiTQCR5o3ounOlWzm1d" + }, + "href": "https://api.spotify.com/v1/artists/0FNfiTQCR5o3ounOlWzm1d", + "id": "0FNfiTQCR5o3ounOlWzm1d", + "name": "John Glacier", + "type": "artist", + "uri": "spotify:artist:0FNfiTQCR5o3ounOlWzm1d" + }, + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/1R84VlXnFFULOsWWV8IrCQ" + }, + "href": "https://api.spotify.com/v1/artists/1R84VlXnFFULOsWWV8IrCQ", + "id": "1R84VlXnFFULOsWWV8IrCQ", + "name": "Panda Bear", + "type": "artist", + "uri": "spotify:artist:1R84VlXnFFULOsWWV8IrCQ" + } + ], + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "BY", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "PR", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "disc_number": 1, + "duration_ms": 212339, + "explicit": false, + "external_urls": { + "spotify": "https://open.spotify.com/track/1gRMKwvMvp6LcQVMpMXQg2" + }, + "href": "https://api.spotify.com/v1/tracks/1gRMKwvMvp6LcQVMpMXQg2", + "id": "1gRMKwvMvp6LcQVMpMXQg2", + "name": "Dafodil", + "preview_url": "https://p.scdn.co/mp3-preview/173fad98e5e51a6cfb02b3cb394ab46c70d44303?cid=cfe923b2d660439caf2b557b21f31221", + "track_number": 5, + "type": "track", + "uri": "spotify:track:1gRMKwvMvp6LcQVMpMXQg2", + "is_local": false + }, + { + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/7A0awCXkE1FtSU8B0qwOJQ" + }, + "href": "https://api.spotify.com/v1/artists/7A0awCXkE1FtSU8B0qwOJQ", + "id": "7A0awCXkE1FtSU8B0qwOJQ", + "name": "Jamie xx", + "type": "artist", + "uri": "spotify:artist:7A0awCXkE1FtSU8B0qwOJQ" + } + ], + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "BY", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "PR", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "disc_number": 1, + "duration_ms": 205638, + "explicit": false, + "external_urls": { + "spotify": "https://open.spotify.com/track/27D9YN3uHPD3PTXvzNtbto" + }, + "href": "https://api.spotify.com/v1/tracks/27D9YN3uHPD3PTXvzNtbto", + "id": "27D9YN3uHPD3PTXvzNtbto", + "name": "Still Summer", + "preview_url": "https://p.scdn.co/mp3-preview/e959ae6394e9d19e00cd474ed2b76bb43b6063d9?cid=cfe923b2d660439caf2b557b21f31221", + "track_number": 6, + "type": "track", + "uri": "spotify:track:27D9YN3uHPD3PTXvzNtbto", + "is_local": false + }, + { + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/7A0awCXkE1FtSU8B0qwOJQ" + }, + "href": "https://api.spotify.com/v1/artists/7A0awCXkE1FtSU8B0qwOJQ", + "id": "7A0awCXkE1FtSU8B0qwOJQ", + "name": "Jamie xx", + "type": "artist", + "uri": "spotify:artist:7A0awCXkE1FtSU8B0qwOJQ" + }, + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/6UE7nl9mha6s8z0wFQFIZ2" + }, + "href": "https://api.spotify.com/v1/artists/6UE7nl9mha6s8z0wFQFIZ2", + "id": "6UE7nl9mha6s8z0wFQFIZ2", + "name": "Robyn", + "type": "artist", + "uri": "spotify:artist:6UE7nl9mha6s8z0wFQFIZ2" + } + ], + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "BY", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "PR", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "disc_number": 1, + "duration_ms": 202648, + "explicit": true, + "external_urls": { + "spotify": "https://open.spotify.com/track/0pMj03SiaZ9bkFlXQWNhtZ" + }, + "href": "https://api.spotify.com/v1/tracks/0pMj03SiaZ9bkFlXQWNhtZ", + "id": "0pMj03SiaZ9bkFlXQWNhtZ", + "name": "Life", + "preview_url": "https://p.scdn.co/mp3-preview/261bc3bd3192ef4158b1ca42e95262113241a326?cid=cfe923b2d660439caf2b557b21f31221", + "track_number": 7, + "type": "track", + "uri": "spotify:track:0pMj03SiaZ9bkFlXQWNhtZ", + "is_local": false + }, + { + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/7A0awCXkE1FtSU8B0qwOJQ" + }, + "href": "https://api.spotify.com/v1/artists/7A0awCXkE1FtSU8B0qwOJQ", + "id": "7A0awCXkE1FtSU8B0qwOJQ", + "name": "Jamie xx", + "type": "artist", + "uri": "spotify:artist:7A0awCXkE1FtSU8B0qwOJQ" + } + ], + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "BY", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "PR", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "disc_number": 1, + "duration_ms": 222365, + "explicit": false, + "external_urls": { + "spotify": "https://open.spotify.com/track/7gb0pekqHQYTGo6NWLBvT5" + }, + "href": "https://api.spotify.com/v1/tracks/7gb0pekqHQYTGo6NWLBvT5", + "id": "7gb0pekqHQYTGo6NWLBvT5", + "name": "The Feeling I Get From You", + "preview_url": "https://p.scdn.co/mp3-preview/da24fadc4bca20394435e53f5d61e8f6c36f9614?cid=cfe923b2d660439caf2b557b21f31221", + "track_number": 8, + "type": "track", + "uri": "spotify:track:7gb0pekqHQYTGo6NWLBvT5", + "is_local": false + }, + { + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/7A0awCXkE1FtSU8B0qwOJQ" + }, + "href": "https://api.spotify.com/v1/artists/7A0awCXkE1FtSU8B0qwOJQ", + "id": "7A0awCXkE1FtSU8B0qwOJQ", + "name": "Jamie xx", + "type": "artist", + "uri": "spotify:artist:7A0awCXkE1FtSU8B0qwOJQ" + } + ], + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "BY", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "PR", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "disc_number": 1, + "duration_ms": 376918, + "explicit": false, + "external_urls": { + "spotify": "https://open.spotify.com/track/6pOzbdJKEr4hvXkX7VkfY6" + }, + "href": "https://api.spotify.com/v1/tracks/6pOzbdJKEr4hvXkX7VkfY6", + "id": "6pOzbdJKEr4hvXkX7VkfY6", + "name": "Breather", + "preview_url": "https://p.scdn.co/mp3-preview/dc7cd612c205968f5d6cb32696305656ae7ad888?cid=cfe923b2d660439caf2b557b21f31221", + "track_number": 9, + "type": "track", + "uri": "spotify:track:6pOzbdJKEr4hvXkX7VkfY6", + "is_local": false + }, + { + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/7A0awCXkE1FtSU8B0qwOJQ" + }, + "href": "https://api.spotify.com/v1/artists/7A0awCXkE1FtSU8B0qwOJQ", + "id": "7A0awCXkE1FtSU8B0qwOJQ", + "name": "Jamie xx", + "type": "artist", + "uri": "spotify:artist:7A0awCXkE1FtSU8B0qwOJQ" + }, + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/3C8RpaI3Go0yFF9whvKoED" + }, + "href": "https://api.spotify.com/v1/artists/3C8RpaI3Go0yFF9whvKoED", + "id": "3C8RpaI3Go0yFF9whvKoED", + "name": "The Avalanches", + "type": "artist", + "uri": "spotify:artist:3C8RpaI3Go0yFF9whvKoED" + } + ], + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "BY", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "PR", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "disc_number": 1, + "duration_ms": 254142, + "explicit": false, + "external_urls": { + "spotify": "https://open.spotify.com/track/3cfgisz6DhZmooQk08P4Eu" + }, + "href": "https://api.spotify.com/v1/tracks/3cfgisz6DhZmooQk08P4Eu", + "id": "3cfgisz6DhZmooQk08P4Eu", + "name": "All You Children", + "preview_url": "https://p.scdn.co/mp3-preview/ff3fc064f340e47347d4677332daf6da8155ae38?cid=cfe923b2d660439caf2b557b21f31221", + "track_number": 10, + "type": "track", + "uri": "spotify:track:3cfgisz6DhZmooQk08P4Eu", + "is_local": false + }, + { + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/7A0awCXkE1FtSU8B0qwOJQ" + }, + "href": "https://api.spotify.com/v1/artists/7A0awCXkE1FtSU8B0qwOJQ", + "id": "7A0awCXkE1FtSU8B0qwOJQ", + "name": "Jamie xx", + "type": "artist", + "uri": "spotify:artist:7A0awCXkE1FtSU8B0qwOJQ" + } + ], + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "BY", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "PR", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "disc_number": 1, + "duration_ms": 71680, + "explicit": false, + "external_urls": { + "spotify": "https://open.spotify.com/track/1wpcJ6TCrKpH6KdBmrp9yN" + }, + "href": "https://api.spotify.com/v1/tracks/1wpcJ6TCrKpH6KdBmrp9yN", + "id": "1wpcJ6TCrKpH6KdBmrp9yN", + "name": "Every Single Weekend - Interlude", + "preview_url": "https://p.scdn.co/mp3-preview/2c46e4cea66da846807b70c7974d19b7837eba52?cid=cfe923b2d660439caf2b557b21f31221", + "track_number": 11, + "type": "track", + "uri": "spotify:track:1wpcJ6TCrKpH6KdBmrp9yN", + "is_local": false + }, + { + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/7A0awCXkE1FtSU8B0qwOJQ" + }, + "href": "https://api.spotify.com/v1/artists/7A0awCXkE1FtSU8B0qwOJQ", + "id": "7A0awCXkE1FtSU8B0qwOJQ", + "name": "Jamie xx", + "type": "artist", + "uri": "spotify:artist:7A0awCXkE1FtSU8B0qwOJQ" + }, + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/2Q4FR4Ss0mh6EvbiQBHEOU" + }, + "href": "https://api.spotify.com/v1/artists/2Q4FR4Ss0mh6EvbiQBHEOU", + "id": "2Q4FR4Ss0mh6EvbiQBHEOU", + "name": "Oona Doherty", + "type": "artist", + "uri": "spotify:artist:2Q4FR4Ss0mh6EvbiQBHEOU" + } + ], + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "BY", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "PR", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "disc_number": 1, + "duration_ms": 337414, + "explicit": true, + "external_urls": { + "spotify": "https://open.spotify.com/track/08Jhu8OZ6gCIGWQn6vP3uI" + }, + "href": "https://api.spotify.com/v1/tracks/08Jhu8OZ6gCIGWQn6vP3uI", + "id": "08Jhu8OZ6gCIGWQn6vP3uI", + "name": "Falling Together", + "preview_url": "https://p.scdn.co/mp3-preview/2fa5fc5e733495719170f672a07b172bf678a89f?cid=cfe923b2d660439caf2b557b21f31221", + "track_number": 12, + "type": "track", + "uri": "spotify:track:08Jhu8OZ6gCIGWQn6vP3uI", + "is_local": false + } + ] + }, + "copyrights": [ + { + "text": "2024 Young", + "type": "C" + }, + { + "text": "2024 Young", + "type": "P" + } + ], + "external_ids": { + "upc": "889030035653" + }, + "genres": [], + "label": "Young", + "popularity": 73 + } + }, + { + "added_at": "2024-09-05T22:00:00Z", + "album": { + "album_type": "album", + "total_tracks": 20, + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "PR", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "external_urls": { + "spotify": "https://open.spotify.com/album/3DQueEd1Ft9PHWgovDzPKh" + }, + "href": "https://api.spotify.com/v1/albums/3DQueEd1Ft9PHWgovDzPKh?locale=en-US%2Cen%3Bq%3D0.5", + "id": "3DQueEd1Ft9PHWgovDzPKh", + "images": [ + { + "url": "https://i.scdn.co/image/ab67616d0000b2736b8a4828e057b7dc1c4a4d39", + "height": 640, + "width": 640 + }, + { + "url": "https://i.scdn.co/image/ab67616d00001e026b8a4828e057b7dc1c4a4d39", + "height": 300, + "width": 300 + }, + { + "url": "https://i.scdn.co/image/ab67616d000048516b8a4828e057b7dc1c4a4d39", + "height": 64, + "width": 64 + } + ], + "name": "ten days", + "release_date": "2024-09-06", + "release_date_precision": "day", + "type": "album", + "uri": "spotify:album:3DQueEd1Ft9PHWgovDzPKh", + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/4oLeXFyACqeem2VImYeBFe" + }, + "href": "https://api.spotify.com/v1/artists/4oLeXFyACqeem2VImYeBFe", + "id": "4oLeXFyACqeem2VImYeBFe", + "name": "Fred again..", + "type": "artist", + "uri": "spotify:artist:4oLeXFyACqeem2VImYeBFe" + } + ], + "tracks": { + "href": "https://api.spotify.com/v1/albums/3DQueEd1Ft9PHWgovDzPKh/tracks?offset=0&limit=50&locale=en-US,en;q%3D0.5", + "limit": 50, + "next": null, + "offset": 0, + "previous": null, + "total": 20, + "items": [ + { + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/4oLeXFyACqeem2VImYeBFe" + }, + "href": "https://api.spotify.com/v1/artists/4oLeXFyACqeem2VImYeBFe", + "id": "4oLeXFyACqeem2VImYeBFe", + "name": "Fred again..", + "type": "artist", + "uri": "spotify:artist:4oLeXFyACqeem2VImYeBFe" + } + ], + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "PR", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "disc_number": 1, + "duration_ms": 30857, + "explicit": false, + "external_urls": { + "spotify": "https://open.spotify.com/track/00nDbqJkHBGUFdim9M0xGc" + }, + "href": "https://api.spotify.com/v1/tracks/00nDbqJkHBGUFdim9M0xGc", + "id": "00nDbqJkHBGUFdim9M0xGc", + "name": ".one", + "preview_url": "https://p.scdn.co/mp3-preview/52224422e178fa35baa9ffbf097372b7031fbecf?cid=cfe923b2d660439caf2b557b21f31221", + "track_number": 1, + "type": "track", + "uri": "spotify:track:00nDbqJkHBGUFdim9M0xGc", + "is_local": false + }, + { + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/4oLeXFyACqeem2VImYeBFe" + }, + "href": "https://api.spotify.com/v1/artists/4oLeXFyACqeem2VImYeBFe", + "id": "4oLeXFyACqeem2VImYeBFe", + "name": "Fred again..", + "type": "artist", + "uri": "spotify:artist:4oLeXFyACqeem2VImYeBFe" + }, + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/6l7R1jntPahGxwJt7Tky8h" + }, + "href": "https://api.spotify.com/v1/artists/6l7R1jntPahGxwJt7Tky8h", + "id": "6l7R1jntPahGxwJt7Tky8h", + "name": "Obongjayar", + "type": "artist", + "uri": "spotify:artist:6l7R1jntPahGxwJt7Tky8h" + } + ], + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "PR", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "disc_number": 1, + "duration_ms": 220653, + "explicit": false, + "external_urls": { + "spotify": "https://open.spotify.com/track/1rf4SX7dduNbrNnOmupLzi" + }, + "href": "https://api.spotify.com/v1/tracks/1rf4SX7dduNbrNnOmupLzi", + "id": "1rf4SX7dduNbrNnOmupLzi", + "name": "adore u", + "preview_url": "https://p.scdn.co/mp3-preview/49ddf22bfe3925899cbb9ecf5d5157525becdcb4?cid=cfe923b2d660439caf2b557b21f31221", + "track_number": 2, + "type": "track", + "uri": "spotify:track:1rf4SX7dduNbrNnOmupLzi", + "is_local": false + }, + { + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/4oLeXFyACqeem2VImYeBFe" + }, + "href": "https://api.spotify.com/v1/artists/4oLeXFyACqeem2VImYeBFe", + "id": "4oLeXFyACqeem2VImYeBFe", + "name": "Fred again..", + "type": "artist", + "uri": "spotify:artist:4oLeXFyACqeem2VImYeBFe" + } + ], + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "PR", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "disc_number": 1, + "duration_ms": 10670, + "explicit": false, + "external_urls": { + "spotify": "https://open.spotify.com/track/0lt9clHEwYyheuC9rik9UH" + }, + "href": "https://api.spotify.com/v1/tracks/0lt9clHEwYyheuC9rik9UH", + "id": "0lt9clHEwYyheuC9rik9UH", + "name": ".two", + "preview_url": "https://p.scdn.co/mp3-preview/59a26651d9742fa1856469cf1c0f8c7c55819525?cid=cfe923b2d660439caf2b557b21f31221", + "track_number": 3, + "type": "track", + "uri": "spotify:track:0lt9clHEwYyheuC9rik9UH", + "is_local": false + }, + { + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/4oLeXFyACqeem2VImYeBFe" + }, + "href": "https://api.spotify.com/v1/artists/4oLeXFyACqeem2VImYeBFe", + "id": "4oLeXFyACqeem2VImYeBFe", + "name": "Fred again..", + "type": "artist", + "uri": "spotify:artist:4oLeXFyACqeem2VImYeBFe" + }, + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/6Ja6zFB5d7XRihhfMo6KzY" + }, + "href": "https://api.spotify.com/v1/artists/6Ja6zFB5d7XRihhfMo6KzY", + "id": "6Ja6zFB5d7XRihhfMo6KzY", + "name": "Jozzy", + "type": "artist", + "uri": "spotify:artist:6Ja6zFB5d7XRihhfMo6KzY" + }, + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/7IrBqZo6diq3hV3GpUhrs2" + }, + "href": "https://api.spotify.com/v1/artists/7IrBqZo6diq3hV3GpUhrs2", + "id": "7IrBqZo6diq3hV3GpUhrs2", + "name": "Jim Legxacy", + "type": "artist", + "uri": "spotify:artist:7IrBqZo6diq3hV3GpUhrs2" + } + ], + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "PR", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "disc_number": 1, + "duration_ms": 181545, + "explicit": false, + "external_urls": { + "spotify": "https://open.spotify.com/track/6twB0uYXJYW9t5GHfYaQ3i" + }, + "href": "https://api.spotify.com/v1/tracks/6twB0uYXJYW9t5GHfYaQ3i", + "id": "6twB0uYXJYW9t5GHfYaQ3i", + "name": "ten", + "preview_url": "https://p.scdn.co/mp3-preview/99fc4c0f25e64d30af9e619ea820bed60aa2b1c6?cid=cfe923b2d660439caf2b557b21f31221", + "track_number": 4, + "type": "track", + "uri": "spotify:track:6twB0uYXJYW9t5GHfYaQ3i", + "is_local": false + }, + { + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/4oLeXFyACqeem2VImYeBFe" + }, + "href": "https://api.spotify.com/v1/artists/4oLeXFyACqeem2VImYeBFe", + "id": "4oLeXFyACqeem2VImYeBFe", + "name": "Fred again..", + "type": "artist", + "uri": "spotify:artist:4oLeXFyACqeem2VImYeBFe" + } + ], + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "PR", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "disc_number": 1, + "duration_ms": 15034, + "explicit": false, + "external_urls": { + "spotify": "https://open.spotify.com/track/6G7TRmzTt9tnrM0QqSVpJW" + }, + "href": "https://api.spotify.com/v1/tracks/6G7TRmzTt9tnrM0QqSVpJW", + "id": "6G7TRmzTt9tnrM0QqSVpJW", + "name": ".three", + "preview_url": "https://p.scdn.co/mp3-preview/7aeb75b213d74995df23a41d86494834bc801d78?cid=cfe923b2d660439caf2b557b21f31221", + "track_number": 5, + "type": "track", + "uri": "spotify:track:6G7TRmzTt9tnrM0QqSVpJW", + "is_local": false + }, + { + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/4oLeXFyACqeem2VImYeBFe" + }, + "href": "https://api.spotify.com/v1/artists/4oLeXFyACqeem2VImYeBFe", + "id": "4oLeXFyACqeem2VImYeBFe", + "name": "Fred again..", + "type": "artist", + "uri": "spotify:artist:4oLeXFyACqeem2VImYeBFe" + }, + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/2WoVwexZuODvclzULjPQtm" + }, + "href": "https://api.spotify.com/v1/artists/2WoVwexZuODvclzULjPQtm", + "id": "2WoVwexZuODvclzULjPQtm", + "name": "Sampha", + "type": "artist", + "uri": "spotify:artist:2WoVwexZuODvclzULjPQtm" + } + ], + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "PR", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "disc_number": 1, + "duration_ms": 214469, + "explicit": false, + "external_urls": { + "spotify": "https://open.spotify.com/track/4IHblO52meh2jwqES1BA7X" + }, + "href": "https://api.spotify.com/v1/tracks/4IHblO52meh2jwqES1BA7X", + "id": "4IHblO52meh2jwqES1BA7X", + "name": "fear less", + "preview_url": "https://p.scdn.co/mp3-preview/c0952ae5c7423cc08ca7a53f0f182a6f20586cde?cid=cfe923b2d660439caf2b557b21f31221", + "track_number": 6, + "type": "track", + "uri": "spotify:track:4IHblO52meh2jwqES1BA7X", + "is_local": false + }, + { + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/4oLeXFyACqeem2VImYeBFe" + }, + "href": "https://api.spotify.com/v1/artists/4oLeXFyACqeem2VImYeBFe", + "id": "4oLeXFyACqeem2VImYeBFe", + "name": "Fred again..", + "type": "artist", + "uri": "spotify:artist:4oLeXFyACqeem2VImYeBFe" + } + ], + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "PR", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "disc_number": 1, + "duration_ms": 9856, + "explicit": false, + "external_urls": { + "spotify": "https://open.spotify.com/track/1wU9pfdw6ht8HKfxz6wMNq" + }, + "href": "https://api.spotify.com/v1/tracks/1wU9pfdw6ht8HKfxz6wMNq", + "id": "1wU9pfdw6ht8HKfxz6wMNq", + "name": ".four", + "preview_url": "https://p.scdn.co/mp3-preview/a4a6f591cb0cf93a7d57df33ad70ac1d8b7db349?cid=cfe923b2d660439caf2b557b21f31221", + "track_number": 7, + "type": "track", + "uri": "spotify:track:1wU9pfdw6ht8HKfxz6wMNq", + "is_local": false + }, + { + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/4oLeXFyACqeem2VImYeBFe" + }, + "href": "https://api.spotify.com/v1/artists/4oLeXFyACqeem2VImYeBFe", + "id": "4oLeXFyACqeem2VImYeBFe", + "name": "Fred again..", + "type": "artist", + "uri": "spotify:artist:4oLeXFyACqeem2VImYeBFe" + }, + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/4PLsMEk2DCRVlVL2a9aZAv" + }, + "href": "https://api.spotify.com/v1/artists/4PLsMEk2DCRVlVL2a9aZAv", + "id": "4PLsMEk2DCRVlVL2a9aZAv", + "name": "SOAK", + "type": "artist", + "uri": "spotify:artist:4PLsMEk2DCRVlVL2a9aZAv" + } + ], + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "PR", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "disc_number": 1, + "duration_ms": 260997, + "explicit": false, + "external_urls": { + "spotify": "https://open.spotify.com/track/2D9a9CXeo3HFtVeaNlzp4a" + }, + "href": "https://api.spotify.com/v1/tracks/2D9a9CXeo3HFtVeaNlzp4a", + "id": "2D9a9CXeo3HFtVeaNlzp4a", + "name": "just stand there", + "preview_url": "https://p.scdn.co/mp3-preview/06a95f2285831e3f4848718f5c8c2f7deeafaf80?cid=cfe923b2d660439caf2b557b21f31221", + "track_number": 8, + "type": "track", + "uri": "spotify:track:2D9a9CXeo3HFtVeaNlzp4a", + "is_local": false + }, + { + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/4oLeXFyACqeem2VImYeBFe" + }, + "href": "https://api.spotify.com/v1/artists/4oLeXFyACqeem2VImYeBFe", + "id": "4oLeXFyACqeem2VImYeBFe", + "name": "Fred again..", + "type": "artist", + "uri": "spotify:artist:4oLeXFyACqeem2VImYeBFe" + } + ], + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "PR", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "disc_number": 1, + "duration_ms": 15254, + "explicit": false, + "external_urls": { + "spotify": "https://open.spotify.com/track/3vTHKAYJy0hY1OkVv1qLNM" + }, + "href": "https://api.spotify.com/v1/tracks/3vTHKAYJy0hY1OkVv1qLNM", + "id": "3vTHKAYJy0hY1OkVv1qLNM", + "name": ".five", + "preview_url": "https://p.scdn.co/mp3-preview/29846c63d0cf33c05ee69ea92d412a2f473e1604?cid=cfe923b2d660439caf2b557b21f31221", + "track_number": 9, + "type": "track", + "uri": "spotify:track:3vTHKAYJy0hY1OkVv1qLNM", + "is_local": false + }, + { + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/4oLeXFyACqeem2VImYeBFe" + }, + "href": "https://api.spotify.com/v1/artists/4oLeXFyACqeem2VImYeBFe", + "id": "4oLeXFyACqeem2VImYeBFe", + "name": "Fred again..", + "type": "artist", + "uri": "spotify:artist:4oLeXFyACqeem2VImYeBFe" + }, + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/3jK9MiCrA42lLAdMGUZpwa" + }, + "href": "https://api.spotify.com/v1/artists/3jK9MiCrA42lLAdMGUZpwa", + "id": "3jK9MiCrA42lLAdMGUZpwa", + "name": "Anderson .Paak", + "type": "artist", + "uri": "spotify:artist:3jK9MiCrA42lLAdMGUZpwa" + }, + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/6UtYvUtXnmg5EtllDFlWp8" + }, + "href": "https://api.spotify.com/v1/artists/6UtYvUtXnmg5EtllDFlWp8", + "id": "6UtYvUtXnmg5EtllDFlWp8", + "name": "CHIKA", + "type": "artist", + "uri": "spotify:artist:6UtYvUtXnmg5EtllDFlWp8" + } + ], + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "PR", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "disc_number": 1, + "duration_ms": 224073, + "explicit": true, + "external_urls": { + "spotify": "https://open.spotify.com/track/1qfJ6OvxrspQTmcvdIEoX6" + }, + "href": "https://api.spotify.com/v1/tracks/1qfJ6OvxrspQTmcvdIEoX6", + "id": "1qfJ6OvxrspQTmcvdIEoX6", + "name": "places to be", + "preview_url": "https://p.scdn.co/mp3-preview/5c1c520365bbd3c9e2e84be42d9d70b0ec71ed01?cid=cfe923b2d660439caf2b557b21f31221", + "track_number": 10, + "type": "track", + "uri": "spotify:track:1qfJ6OvxrspQTmcvdIEoX6", + "is_local": false + }, + { + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/4oLeXFyACqeem2VImYeBFe" + }, + "href": "https://api.spotify.com/v1/artists/4oLeXFyACqeem2VImYeBFe", + "id": "4oLeXFyACqeem2VImYeBFe", + "name": "Fred again..", + "type": "artist", + "uri": "spotify:artist:4oLeXFyACqeem2VImYeBFe" + } + ], + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "PR", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "disc_number": 1, + "duration_ms": 28836, + "explicit": false, + "external_urls": { + "spotify": "https://open.spotify.com/track/13H2XgH3k8SEptaoD5qeLG" + }, + "href": "https://api.spotify.com/v1/tracks/13H2XgH3k8SEptaoD5qeLG", + "id": "13H2XgH3k8SEptaoD5qeLG", + "name": ".six", + "preview_url": "https://p.scdn.co/mp3-preview/e630a09889f8e86bca24bcb54a6448e8c969936f?cid=cfe923b2d660439caf2b557b21f31221", + "track_number": 11, + "type": "track", + "uri": "spotify:track:13H2XgH3k8SEptaoD5qeLG", + "is_local": false + }, + { + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/4oLeXFyACqeem2VImYeBFe" + }, + "href": "https://api.spotify.com/v1/artists/4oLeXFyACqeem2VImYeBFe", + "id": "4oLeXFyACqeem2VImYeBFe", + "name": "Fred again..", + "type": "artist", + "uri": "spotify:artist:4oLeXFyACqeem2VImYeBFe" + }, + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/59MDSNIYoOY0WRYuodzJPD" + }, + "href": "https://api.spotify.com/v1/artists/59MDSNIYoOY0WRYuodzJPD", + "id": "59MDSNIYoOY0WRYuodzJPD", + "name": "Duskus", + "type": "artist", + "uri": "spotify:artist:59MDSNIYoOY0WRYuodzJPD" + }, + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/7Eu1txygG6nJttLHbZdQOh" + }, + "href": "https://api.spotify.com/v1/artists/7Eu1txygG6nJttLHbZdQOh", + "id": "7Eu1txygG6nJttLHbZdQOh", + "name": "Four Tet", + "type": "artist", + "uri": "spotify:artist:7Eu1txygG6nJttLHbZdQOh" + }, + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/3pK4EcflBpG1Kpmjk5LK2R" + }, + "href": "https://api.spotify.com/v1/artists/3pK4EcflBpG1Kpmjk5LK2R", + "id": "3pK4EcflBpG1Kpmjk5LK2R", + "name": "Joy Anonymous", + "type": "artist", + "uri": "spotify:artist:3pK4EcflBpG1Kpmjk5LK2R" + }, + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/5he5w2lnU9x7JFhnwcekXX" + }, + "href": "https://api.spotify.com/v1/artists/5he5w2lnU9x7JFhnwcekXX", + "id": "5he5w2lnU9x7JFhnwcekXX", + "name": "Skrillex", + "type": "artist", + "uri": "spotify:artist:5he5w2lnU9x7JFhnwcekXX" + } + ], + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "PR", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "disc_number": 1, + "duration_ms": 453068, + "explicit": false, + "external_urls": { + "spotify": "https://open.spotify.com/track/3i9QKRl5Ql3pgUfNdYBVTc" + }, + "href": "https://api.spotify.com/v1/tracks/3i9QKRl5Ql3pgUfNdYBVTc", + "id": "3i9QKRl5Ql3pgUfNdYBVTc", + "name": "glow", + "preview_url": "https://p.scdn.co/mp3-preview/4ddd31cf8fe9f76b8aa72e2a1b5d51ccc9e00e5a?cid=cfe923b2d660439caf2b557b21f31221", + "track_number": 12, + "type": "track", + "uri": "spotify:track:3i9QKRl5Ql3pgUfNdYBVTc", + "is_local": false + }, + { + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/4oLeXFyACqeem2VImYeBFe" + }, + "href": "https://api.spotify.com/v1/artists/4oLeXFyACqeem2VImYeBFe", + "id": "4oLeXFyACqeem2VImYeBFe", + "name": "Fred again..", + "type": "artist", + "uri": "spotify:artist:4oLeXFyACqeem2VImYeBFe" + } + ], + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "PR", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "disc_number": 1, + "duration_ms": 31749, + "explicit": false, + "external_urls": { + "spotify": "https://open.spotify.com/track/2OLH9ukOFDVBMuVUuy2sFW" + }, + "href": "https://api.spotify.com/v1/tracks/2OLH9ukOFDVBMuVUuy2sFW", + "id": "2OLH9ukOFDVBMuVUuy2sFW", + "name": ".seven", + "preview_url": "https://p.scdn.co/mp3-preview/cc0e8af8b91eff643b65fefdbc6b32fe2a7ad7db?cid=cfe923b2d660439caf2b557b21f31221", + "track_number": 13, + "type": "track", + "uri": "spotify:track:2OLH9ukOFDVBMuVUuy2sFW", + "is_local": false + }, + { + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/4oLeXFyACqeem2VImYeBFe" + }, + "href": "https://api.spotify.com/v1/artists/4oLeXFyACqeem2VImYeBFe", + "id": "4oLeXFyACqeem2VImYeBFe", + "name": "Fred again..", + "type": "artist", + "uri": "spotify:artist:4oLeXFyACqeem2VImYeBFe" + } + ], + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "PR", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "disc_number": 1, + "duration_ms": 220656, + "explicit": false, + "external_urls": { + "spotify": "https://open.spotify.com/track/3DzWFxyzsAVblVNndiU9CW" + }, + "href": "https://api.spotify.com/v1/tracks/3DzWFxyzsAVblVNndiU9CW", + "id": "3DzWFxyzsAVblVNndiU9CW", + "name": "i saw you", + "preview_url": "https://p.scdn.co/mp3-preview/e2b23e98a35b1ccbce037d34c2c38c49b2371142?cid=cfe923b2d660439caf2b557b21f31221", + "track_number": 14, + "type": "track", + "uri": "spotify:track:3DzWFxyzsAVblVNndiU9CW", + "is_local": false + }, + { + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/4oLeXFyACqeem2VImYeBFe" + }, + "href": "https://api.spotify.com/v1/artists/4oLeXFyACqeem2VImYeBFe", + "id": "4oLeXFyACqeem2VImYeBFe", + "name": "Fred again..", + "type": "artist", + "uri": "spotify:artist:4oLeXFyACqeem2VImYeBFe" + } + ], + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "PR", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "disc_number": 1, + "duration_ms": 15037, + "explicit": false, + "external_urls": { + "spotify": "https://open.spotify.com/track/1aTcAf7K1ym8lBcuu8nmJA" + }, + "href": "https://api.spotify.com/v1/tracks/1aTcAf7K1ym8lBcuu8nmJA", + "id": "1aTcAf7K1ym8lBcuu8nmJA", + "name": ".eight", + "preview_url": "https://p.scdn.co/mp3-preview/d2910a98ace82ead87c06aad442b0f8104263feb?cid=cfe923b2d660439caf2b557b21f31221", + "track_number": 15, + "type": "track", + "uri": "spotify:track:1aTcAf7K1ym8lBcuu8nmJA", + "is_local": false + }, + { + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/4oLeXFyACqeem2VImYeBFe" + }, + "href": "https://api.spotify.com/v1/artists/4oLeXFyACqeem2VImYeBFe", + "id": "4oLeXFyACqeem2VImYeBFe", + "name": "Fred again..", + "type": "artist", + "uri": "spotify:artist:4oLeXFyACqeem2VImYeBFe" + }, + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/5s6TJEuHTr9GR894wc6VfP" + }, + "href": "https://api.spotify.com/v1/artists/5s6TJEuHTr9GR894wc6VfP", + "id": "5s6TJEuHTr9GR894wc6VfP", + "name": "Emmylou Harris", + "type": "artist", + "uri": "spotify:artist:5s6TJEuHTr9GR894wc6VfP" + } + ], + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "PR", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "disc_number": 1, + "duration_ms": 200737, + "explicit": false, + "external_urls": { + "spotify": "https://open.spotify.com/track/4S05mkyTtAiWy5l4umch0X" + }, + "href": "https://api.spotify.com/v1/tracks/4S05mkyTtAiWy5l4umch0X", + "id": "4S05mkyTtAiWy5l4umch0X", + "name": "where will i be", + "preview_url": "https://p.scdn.co/mp3-preview/c8b398eaced8e21a97b1460480ab58a2c44364dd?cid=cfe923b2d660439caf2b557b21f31221", + "track_number": 16, + "type": "track", + "uri": "spotify:track:4S05mkyTtAiWy5l4umch0X", + "is_local": false + }, + { + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/4oLeXFyACqeem2VImYeBFe" + }, + "href": "https://api.spotify.com/v1/artists/4oLeXFyACqeem2VImYeBFe", + "id": "4oLeXFyACqeem2VImYeBFe", + "name": "Fred again..", + "type": "artist", + "uri": "spotify:artist:4oLeXFyACqeem2VImYeBFe" + } + ], + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "PR", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "disc_number": 1, + "duration_ms": 19060, + "explicit": false, + "external_urls": { + "spotify": "https://open.spotify.com/track/5aNwAqN5Gk5oZIwW5KfhXN" + }, + "href": "https://api.spotify.com/v1/tracks/5aNwAqN5Gk5oZIwW5KfhXN", + "id": "5aNwAqN5Gk5oZIwW5KfhXN", + "name": ".nine", + "preview_url": "https://p.scdn.co/mp3-preview/d444f5f0921bee7a12beff1649a3cf295a822c76?cid=cfe923b2d660439caf2b557b21f31221", + "track_number": 17, + "type": "track", + "uri": "spotify:track:5aNwAqN5Gk5oZIwW5KfhXN", + "is_local": false + }, + { + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/4oLeXFyACqeem2VImYeBFe" + }, + "href": "https://api.spotify.com/v1/artists/4oLeXFyACqeem2VImYeBFe", + "id": "4oLeXFyACqeem2VImYeBFe", + "name": "Fred again..", + "type": "artist", + "uri": "spotify:artist:4oLeXFyACqeem2VImYeBFe" + }, + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/3pK4EcflBpG1Kpmjk5LK2R" + }, + "href": "https://api.spotify.com/v1/artists/3pK4EcflBpG1Kpmjk5LK2R", + "id": "3pK4EcflBpG1Kpmjk5LK2R", + "name": "Joy Anonymous", + "type": "artist", + "uri": "spotify:artist:3pK4EcflBpG1Kpmjk5LK2R" + } + ], + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "PR", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "disc_number": 1, + "duration_ms": 344068, + "explicit": false, + "external_urls": { + "spotify": "https://open.spotify.com/track/4A8tKYA7gwZzQ4jVwIv1sv" + }, + "href": "https://api.spotify.com/v1/tracks/4A8tKYA7gwZzQ4jVwIv1sv", + "id": "4A8tKYA7gwZzQ4jVwIv1sv", + "name": "peace u need", + "preview_url": "https://p.scdn.co/mp3-preview/d333ce79ff70629051c9db4c5850b2b22288df71?cid=cfe923b2d660439caf2b557b21f31221", + "track_number": 18, + "type": "track", + "uri": "spotify:track:4A8tKYA7gwZzQ4jVwIv1sv", + "is_local": false + }, + { + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/4oLeXFyACqeem2VImYeBFe" + }, + "href": "https://api.spotify.com/v1/artists/4oLeXFyACqeem2VImYeBFe", + "id": "4oLeXFyACqeem2VImYeBFe", + "name": "Fred again..", + "type": "artist", + "uri": "spotify:artist:4oLeXFyACqeem2VImYeBFe" + } + ], + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "PR", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "disc_number": 1, + "duration_ms": 29540, + "explicit": false, + "external_urls": { + "spotify": "https://open.spotify.com/track/2feEZkLf7dZUueeVBNsdor" + }, + "href": "https://api.spotify.com/v1/tracks/2feEZkLf7dZUueeVBNsdor", + "id": "2feEZkLf7dZUueeVBNsdor", + "name": ".ten", + "preview_url": "https://p.scdn.co/mp3-preview/72d66fa681d50abf590a9cca9553b112fa03c1ee?cid=cfe923b2d660439caf2b557b21f31221", + "track_number": 19, + "type": "track", + "uri": "spotify:track:2feEZkLf7dZUueeVBNsdor", + "is_local": false + }, + { + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/4oLeXFyACqeem2VImYeBFe" + }, + "href": "https://api.spotify.com/v1/artists/4oLeXFyACqeem2VImYeBFe", + "id": "4oLeXFyACqeem2VImYeBFe", + "name": "Fred again..", + "type": "artist", + "uri": "spotify:artist:4oLeXFyACqeem2VImYeBFe" + }, + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/3IunaFjvNKj98JW89JYv9u" + }, + "href": "https://api.spotify.com/v1/artists/3IunaFjvNKj98JW89JYv9u", + "id": "3IunaFjvNKj98JW89JYv9u", + "name": "The Japanese House", + "type": "artist", + "uri": "spotify:artist:3IunaFjvNKj98JW89JYv9u" + }, + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/6M98IZJK2tx6x2YVyHua9K" + }, + "href": "https://api.spotify.com/v1/artists/6M98IZJK2tx6x2YVyHua9K", + "id": "6M98IZJK2tx6x2YVyHua9K", + "name": "Scott Hardkiss", + "type": "artist", + "uri": "spotify:artist:6M98IZJK2tx6x2YVyHua9K" + } + ], + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "PR", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "disc_number": 1, + "duration_ms": 314007, + "explicit": false, + "external_urls": { + "spotify": "https://open.spotify.com/track/61pyjiweMDS1h930OgS0XO" + }, + "href": "https://api.spotify.com/v1/tracks/61pyjiweMDS1h930OgS0XO", + "id": "61pyjiweMDS1h930OgS0XO", + "name": "backseat", + "preview_url": "https://p.scdn.co/mp3-preview/f14667711679c1f2c09e356ed12f1a1fad7464ac?cid=cfe923b2d660439caf2b557b21f31221", + "track_number": 20, + "type": "track", + "uri": "spotify:track:61pyjiweMDS1h930OgS0XO", + "is_local": false + } + ] + }, + "copyrights": [ + { + "text": "Under exclusive licence to Warner Music UK Limited. An Atlantic Records UK., © 2024 Fred Gibson", + "type": "C" + }, + { + "text": "Under exclusive licence to Warner Music UK Limited. An Atlantic Records UK., ℗ 2024 Fred Gibson", + "type": "P" + } + ], + "external_ids": { + "upc": "5021732457110" + }, + "genres": [], + "label": "Atlantic Records UK", + "popularity": 80 + } + } + ], + "limit": 20, + "next": "https://api.spotify.com/v1/me/albums?offset=20&limit=20&locale=en-US,en;q%3D0.5", + "offset": 0, + "previous": null, + "total": 34 +} diff --git a/tests/components/spotify/fixtures/saved_shows.json b/tests/components/spotify/fixtures/saved_shows.json new file mode 100644 index 00000000000000..acfd5a1b465e49 --- /dev/null +++ b/tests/components/spotify/fixtures/saved_shows.json @@ -0,0 +1,462 @@ +{ + "href": "https://api.spotify.com/v1/me/shows?offset=0&limit=20&locale=en-US,en;q%3D0.5", + "items": [ + { + "added_at": "2023-08-10T08:17:09Z", + "show": { + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "BY", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "PR", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "copyrights": [], + "description": "We’ll all giggle along at naughty jokes, your dating horror stories and give questionable recommendations on movies, food and relationships. This podcast is hot, fun garbage and we (Toni Lodge and Ryan Jon here in Melbourne, Australia) would love you to climb aboard and be our friends. Hosted on Acast. See acast.com/privacy for more information.", + "explicit": true, + "external_urls": { + "spotify": "https://open.spotify.com/show/5OzkclFjD6iAjtAuo7aIYt" + }, + "href": "https://api.spotify.com/v1/shows/5OzkclFjD6iAjtAuo7aIYt", + "html_description": "We’ll all giggle along at naughty jokes, your dating horror stories and give questionable recommendations on movies, food and relationships. This podcast is hot, fun garbage and we (Toni Lodge and Ryan Jon here in Melbourne, Australia) would love you to climb aboard and be our friends.

Hosted on Acast. See acast.com/privacy for more information.

", + "id": "5OzkclFjD6iAjtAuo7aIYt", + "images": [ + { + "height": 64, + "url": "https://i.scdn.co/image/ab6765630000f68db5f65a943ef4f707bf79949b", + "width": 64 + }, + { + "height": 300, + "url": "https://i.scdn.co/image/ab67656300005f1fb5f65a943ef4f707bf79949b", + "width": 300 + }, + { + "height": 640, + "url": "https://i.scdn.co/image/ab6765630000ba8ab5f65a943ef4f707bf79949b", + "width": 640 + } + ], + "is_externally_hosted": false, + "languages": ["en"], + "media_type": "audio", + "name": "Toni and Ryan", + "publisher": "Toni Lodge and Ryan Jon", + "total_episodes": 741, + "type": "show", + "uri": "spotify:show:5OzkclFjD6iAjtAuo7aIYt" + } + }, + { + "added_at": "2022-09-15T23:48:23Z", + "show": { + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "BY", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "PR", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "copyrights": [], + "description": "Welcome to BLAST Push To Talk, Counter-Strike like you’ve never heard it before.Join our host Moses and our field reporters Scrawny and Launders as they interview pro players, share their hot takes on the latest and greatest news in the CS world courtesy of EPOS.", + "explicit": false, + "external_urls": { + "spotify": "https://open.spotify.com/show/6XYRres0KZtnTqKcLavWR2" + }, + "href": "https://api.spotify.com/v1/shows/6XYRres0KZtnTqKcLavWR2", + "html_description": "Welcome to BLAST Push To Talk, Counter-Strike like you’ve never heard it before.

Join our host Moses and our field reporters Scrawny and Launders as they interview pro players, share their hot takes on the latest and greatest news in the CS world courtesy of EPOS.", + "id": "6XYRres0KZtnTqKcLavWR2", + "images": [ + { + "height": 64, + "url": "https://i.scdn.co/image/ab6765630000f68d5fccb05c5685c081d5c2ad9c", + "width": 64 + }, + { + "height": 300, + "url": "https://i.scdn.co/image/ab67656300005f1f5fccb05c5685c081d5c2ad9c", + "width": 300 + }, + { + "height": 640, + "url": "https://i.scdn.co/image/ab6765630000ba8a5fccb05c5685c081d5c2ad9c", + "width": 640 + } + ], + "is_externally_hosted": false, + "languages": ["en"], + "media_type": "audio", + "name": "BLAST Push To Talk", + "publisher": "BLAST Premier", + "total_episodes": 19, + "type": "show", + "uri": "spotify:show:6XYRres0KZtnTqKcLavWR2" + } + } + ], + "limit": 20, + "next": null, + "offset": 0, + "previous": null, + "total": 10 +} diff --git a/tests/components/spotify/fixtures/saved_tracks.json b/tests/components/spotify/fixtures/saved_tracks.json new file mode 100644 index 00000000000000..e80d5b39dcd535 --- /dev/null +++ b/tests/components/spotify/fixtures/saved_tracks.json @@ -0,0 +1,978 @@ +{ + "href": "https://api.spotify.com/v1/me/tracks?offset=0&limit=20&locale=en-US,en;q%3D0.5", + "items": [ + { + "added_at": "2024-10-06T11:35:02Z", + "track": { + "album": { + "album_type": "single", + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/7zrkALJ9ayRjzysp4QYoEg" + }, + "href": "https://api.spotify.com/v1/artists/7zrkALJ9ayRjzysp4QYoEg", + "id": "7zrkALJ9ayRjzysp4QYoEg", + "name": "Maribou State", + "type": "artist", + "uri": "spotify:artist:7zrkALJ9ayRjzysp4QYoEg" + }, + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/5vssQp6TyMHsx4mihKVAsC" + }, + "href": "https://api.spotify.com/v1/artists/5vssQp6TyMHsx4mihKVAsC", + "id": "5vssQp6TyMHsx4mihKVAsC", + "name": "Holly Walker", + "type": "artist", + "uri": "spotify:artist:5vssQp6TyMHsx4mihKVAsC" + } + ], + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "BY", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "PR", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "external_urls": { + "spotify": "https://open.spotify.com/album/3BYf1IG8EqDbhzdpljcFWY" + }, + "href": "https://api.spotify.com/v1/albums/3BYf1IG8EqDbhzdpljcFWY", + "id": "3BYf1IG8EqDbhzdpljcFWY", + "images": [ + { + "height": 640, + "width": 640, + "url": "https://i.scdn.co/image/ab67616d0000b273ac9dd449e38e5e8952fd22ad" + }, + { + "height": 300, + "width": 300, + "url": "https://i.scdn.co/image/ab67616d00001e02ac9dd449e38e5e8952fd22ad" + }, + { + "height": 64, + "width": 64, + "url": "https://i.scdn.co/image/ab67616d00004851ac9dd449e38e5e8952fd22ad" + } + ], + "is_playable": true, + "name": "Otherside", + "release_date": "2024-10-02", + "release_date_precision": "day", + "total_tracks": 2, + "type": "album", + "uri": "spotify:album:3BYf1IG8EqDbhzdpljcFWY" + }, + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/7zrkALJ9ayRjzysp4QYoEg" + }, + "href": "https://api.spotify.com/v1/artists/7zrkALJ9ayRjzysp4QYoEg", + "id": "7zrkALJ9ayRjzysp4QYoEg", + "name": "Maribou State", + "type": "artist", + "uri": "spotify:artist:7zrkALJ9ayRjzysp4QYoEg" + }, + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/5vssQp6TyMHsx4mihKVAsC" + }, + "href": "https://api.spotify.com/v1/artists/5vssQp6TyMHsx4mihKVAsC", + "id": "5vssQp6TyMHsx4mihKVAsC", + "name": "Holly Walker", + "type": "artist", + "uri": "spotify:artist:5vssQp6TyMHsx4mihKVAsC" + } + ], + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "BY", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "PR", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "disc_number": 1, + "duration_ms": 233211, + "explicit": false, + "external_ids": { + "isrc": "GBCFB2300767" + }, + "external_urls": { + "spotify": "https://open.spotify.com/track/2pj2A25YQK4uMxhZheNx7R" + }, + "href": "https://api.spotify.com/v1/tracks/2pj2A25YQK4uMxhZheNx7R", + "id": "2pj2A25YQK4uMxhZheNx7R", + "is_local": false, + "is_playable": true, + "name": "Otherside", + "popularity": 47, + "preview_url": "https://p.scdn.co/mp3-preview/f18011c5d9a973f85ed8dce6d698e6043efdcf60?cid=cfe923b2d660439caf2b557b21f31221", + "track_number": 1, + "type": "track", + "uri": "spotify:track:2pj2A25YQK4uMxhZheNx7R" + } + }, + { + "added_at": "2024-10-06T07:37:53Z", + "track": { + "album": { + "album_type": "single", + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/0HHa7ZJZxUQlg5l2mB0N0f" + }, + "href": "https://api.spotify.com/v1/artists/0HHa7ZJZxUQlg5l2mB0N0f", + "id": "0HHa7ZJZxUQlg5l2mB0N0f", + "name": "Marlon Hoffstadt", + "type": "artist", + "uri": "spotify:artist:0HHa7ZJZxUQlg5l2mB0N0f" + }, + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/68sTQgQtPe9e4Bb7OtoqET" + }, + "href": "https://api.spotify.com/v1/artists/68sTQgQtPe9e4Bb7OtoqET", + "id": "68sTQgQtPe9e4Bb7OtoqET", + "name": "Crybaby", + "type": "artist", + "uri": "spotify:artist:68sTQgQtPe9e4Bb7OtoqET" + }, + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/4lBSzo2LS8asEzoePv6VLM" + }, + "href": "https://api.spotify.com/v1/artists/4lBSzo2LS8asEzoePv6VLM", + "id": "4lBSzo2LS8asEzoePv6VLM", + "name": "DJ Daddy Trance", + "type": "artist", + "uri": "spotify:artist:4lBSzo2LS8asEzoePv6VLM" + } + ], + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "BY", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "PR", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "external_urls": { + "spotify": "https://open.spotify.com/album/1ElP3WFqq5sgMcc3ScIR4l" + }, + "href": "https://api.spotify.com/v1/albums/1ElP3WFqq5sgMcc3ScIR4l", + "id": "1ElP3WFqq5sgMcc3ScIR4l", + "images": [ + { + "height": 640, + "width": 640, + "url": "https://i.scdn.co/image/ab67616d0000b2733d710ab088ff797e80cc5aed" + }, + { + "height": 300, + "width": 300, + "url": "https://i.scdn.co/image/ab67616d00001e023d710ab088ff797e80cc5aed" + }, + { + "height": 64, + "width": 64, + "url": "https://i.scdn.co/image/ab67616d000048513d710ab088ff797e80cc5aed" + } + ], + "is_playable": true, + "name": "I Think I Need A DJ", + "release_date": "2024-09-20", + "release_date_precision": "day", + "total_tracks": 1, + "type": "album", + "uri": "spotify:album:1ElP3WFqq5sgMcc3ScIR4l" + }, + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/0HHa7ZJZxUQlg5l2mB0N0f" + }, + "href": "https://api.spotify.com/v1/artists/0HHa7ZJZxUQlg5l2mB0N0f", + "id": "0HHa7ZJZxUQlg5l2mB0N0f", + "name": "Marlon Hoffstadt", + "type": "artist", + "uri": "spotify:artist:0HHa7ZJZxUQlg5l2mB0N0f" + }, + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/68sTQgQtPe9e4Bb7OtoqET" + }, + "href": "https://api.spotify.com/v1/artists/68sTQgQtPe9e4Bb7OtoqET", + "id": "68sTQgQtPe9e4Bb7OtoqET", + "name": "Crybaby", + "type": "artist", + "uri": "spotify:artist:68sTQgQtPe9e4Bb7OtoqET" + }, + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/4lBSzo2LS8asEzoePv6VLM" + }, + "href": "https://api.spotify.com/v1/artists/4lBSzo2LS8asEzoePv6VLM", + "id": "4lBSzo2LS8asEzoePv6VLM", + "name": "DJ Daddy Trance", + "type": "artist", + "uri": "spotify:artist:4lBSzo2LS8asEzoePv6VLM" + } + ], + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "BY", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "PR", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "disc_number": 1, + "duration_ms": 155000, + "explicit": false, + "external_ids": { + "isrc": "DEKF22400978" + }, + "external_urls": { + "spotify": "https://open.spotify.com/track/2lKOI1nwP5qZtZC7TGQVY8" + }, + "href": "https://api.spotify.com/v1/tracks/2lKOI1nwP5qZtZC7TGQVY8", + "id": "2lKOI1nwP5qZtZC7TGQVY8", + "is_local": false, + "is_playable": true, + "name": "I Think I Need A DJ", + "popularity": 53, + "preview_url": "https://p.scdn.co/mp3-preview/ad1c9d47d0f5ed500118e9dfc2558bd77612cae3?cid=cfe923b2d660439caf2b557b21f31221", + "track_number": 1, + "type": "track", + "uri": "spotify:track:2lKOI1nwP5qZtZC7TGQVY8" + } + } + ], + "limit": 2, + "next": "https://api.spotify.com/v1/me/tracks?offset=20&limit=20&locale=en-US,en;q%3D0.5", + "offset": 0, + "previous": null, + "total": 4816 +} diff --git a/tests/components/spotify/fixtures/show.json b/tests/components/spotify/fixtures/show.json new file mode 100644 index 00000000000000..d9a89b2cc8da8c --- /dev/null +++ b/tests/components/spotify/fixtures/show.json @@ -0,0 +1,317 @@ +{ + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "BY", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "PR", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "copyrights": [], + "description": "Safety Third is a weekly show hosted by William Osman, NileRed, The Backyard Scientist, Allen Pan, and a couple other YouTube \"Scientists\". Sometimes we have guests, sometimes it's just us, but always: safety is our number three priority.", + "html_description": "

Safety Third is a weekly show hosted by William Osman, NileRed, The Backyard Scientist, Allen Pan, and a couple other YouTube "Scientists". Sometimes we have guests, sometimes it's just us, but always: safety is our number three priority.

", + "explicit": true, + "external_urls": { + "spotify": "https://open.spotify.com/show/1Y9ExMgMxoBVrgrfU7u0nD" + }, + "href": "https://api.spotify.com/v1/shows/1Y9ExMgMxoBVrgrfU7u0nD?locale=en-US%2Cen%3Bq%3D0.5", + "id": "1Y9ExMgMxoBVrgrfU7u0nD", + "images": [ + { + "height": 640, + "url": "https://i.scdn.co/image/ab6765630000ba8ac7bedd27a4413b1abf926d8a", + "width": 640 + }, + { + "height": 300, + "url": "https://i.scdn.co/image/ab67656300005f1fc7bedd27a4413b1abf926d8a", + "width": 300 + }, + { + "height": 64, + "url": "https://i.scdn.co/image/ab6765630000f68dc7bedd27a4413b1abf926d8a", + "width": 64 + } + ], + "is_externally_hosted": false, + "languages": ["en-US"], + "media_type": "audio", + "name": "Safety Third", + "publisher": "Safety Third ", + "type": "show", + "uri": "spotify:show:1Y9ExMgMxoBVrgrfU7u0nD", + "total_episodes": 120, + "episodes": { + "href": "https://api.spotify.com/v1/shows/1Y9ExMgMxoBVrgrfU7u0nD/episodes?offset=0&limit=50&locale=en-US,en;q%3D0.5", + "limit": 50, + "next": "https://api.spotify.com/v1/shows/1Y9ExMgMxoBVrgrfU7u0nD/episodes?offset=50&limit=50&locale=en-US,en;q%3D0.5", + "offset": 0, + "previous": null, + "total": 120, + "items": [ + { + "audio_preview_url": "https://podz-content.spotifycdn.com/audio/clips/06lRxUmh8UNVTByuyxLYqh/clip_132296_192296.mp3", + "description": "Patreon: https://www.patreon.com/safetythirdMerch: https://safetythird.shopYouTube: https://www.youtube.com/@safetythird/Advertising Inquiries: https://redcircle.com/brandsPrivacy & Opt-Out: https://redcircle.com/privacy", + "html_description": "

Patreon: https://www.patreon.com/safetythird

Merch: https://safetythird.shop

YouTube: https://www.youtube.com/@safetythird/



Advertising Inquiries: https://redcircle.com/brands

Privacy & Opt-Out: https://redcircle.com/privacy", + "duration_ms": 3690161, + "explicit": false, + "external_urls": { + "spotify": "https://open.spotify.com/episode/3o0RYoo5iOMKSmEbunsbvW" + }, + "href": "https://api.spotify.com/v1/episodes/3o0RYoo5iOMKSmEbunsbvW", + "id": "3o0RYoo5iOMKSmEbunsbvW", + "images": [ + { + "url": "https://i.scdn.co/image/ab6765630000ba8ac7bedd27a4413b1abf926d8a", + "height": 640, + "width": 640 + }, + { + "url": "https://i.scdn.co/image/ab67656300005f1fc7bedd27a4413b1abf926d8a", + "height": 300, + "width": 300 + }, + { + "url": "https://i.scdn.co/image/ab6765630000f68dc7bedd27a4413b1abf926d8a", + "height": 64, + "width": 64 + } + ], + "is_externally_hosted": true, + "is_playable": true, + "language": "en-US", + "languages": ["en-US"], + "name": "My Squirrel Has Brain Damage - Safety Third 119", + "release_date": "2024-07-26", + "release_date_precision": "day", + "resume_point": { + "fully_played": false, + "resume_position_ms": 0 + }, + "type": "episode", + "uri": "spotify:episode:3o0RYoo5iOMKSmEbunsbvW" + }, + { + "audio_preview_url": "https://podz-content.spotifycdn.com/audio/clips/6msRFio3561me28DofTad7/clip_570865_630865.mp3", + "description": "Patreon: https://www.patreon.com/safetythirdMerch: https://safetythird.shopYouTube: https://www.youtube.com/@safetythird/Advertising Inquiries: https://redcircle.com/brandsPrivacy & Opt-Out: https://redcircle.com/privacy", + "html_description": "

Patreon: https://www.patreon.com/safetythird

Merch: https://safetythird.shop

YouTube: https://www.youtube.com/@safetythird/



Advertising Inquiries: https://redcircle.com/brands

Privacy & Opt-Out: https://redcircle.com/privacy", + "duration_ms": 5690591, + "explicit": false, + "external_urls": { + "spotify": "https://open.spotify.com/episode/7CbsFHQq8ljztiUSGw46Fj" + }, + "href": "https://api.spotify.com/v1/episodes/7CbsFHQq8ljztiUSGw46Fj", + "id": "7CbsFHQq8ljztiUSGw46Fj", + "images": [ + { + "url": "https://i.scdn.co/image/ab6765630000ba8ac7bedd27a4413b1abf926d8a", + "height": 640, + "width": 640 + }, + { + "url": "https://i.scdn.co/image/ab67656300005f1fc7bedd27a4413b1abf926d8a", + "height": 300, + "width": 300 + }, + { + "url": "https://i.scdn.co/image/ab6765630000f68dc7bedd27a4413b1abf926d8a", + "height": 64, + "width": 64 + } + ], + "is_externally_hosted": true, + "is_playable": true, + "language": "en-US", + "languages": ["en-US"], + "name": "Math Haters vs Math Nerd - Safety Third 118", + "release_date": "2024-07-18", + "release_date_precision": "day", + "resume_point": { + "fully_played": false, + "resume_position_ms": 0 + }, + "type": "episode", + "uri": "spotify:episode:7CbsFHQq8ljztiUSGw46Fj" + } + ] + } +} diff --git a/tests/components/spotify/fixtures/show_episodes.json b/tests/components/spotify/fixtures/show_episodes.json new file mode 100644 index 00000000000000..0189fb10c11e9b --- /dev/null +++ b/tests/components/spotify/fixtures/show_episodes.json @@ -0,0 +1,94 @@ +{ + "href": "https://api.spotify.com/v1/shows/0e30iIgSffe6xJhFKe35Db/episodes?offset=0&limit=20&locale=en-US,en;q%3D0.5", + "items": [ + { + "audio_preview_url": "https://podz-content.spotifycdn.com/audio/clips/2O4OLlf7wsvLzCeUbNB3UK/clip_1204000_1256300.mp3", + "description": "The Great War of 2077 and how the Fallout world diverged from our own.Sponsors: Patreon: Become a patron! https://patreon.com/falloutlorecastBuy cool stuff and support the show!Fallout 76: https://amzn.to/3h99B3UFallout Cookbook: https://amzn.to/3aGjeodFallout Boardgame: https://amzn.to/2EgmBq3The Art of Fallout 4: https://amzn.to/3gfQST3Get a REAL Nuca-Cola Quantum! https://amzn.to/322O3zGFallout Funco Pop Figures: https://amzn.to/3gcYsOcLinks: Live Shows every Monday Night and game streams: twitch.tv/robotsradioFallout Hub Podcast w/ Tom & others: https://anchor.fm/the-fallout-hubTalk Fallout and join the Robots Radio fam: Discord: discord.gg/JXKfVhMStay plugged in on Twitter: twitter.com/falloutlorecastRobots Radio Youtube: youtube.com/c/r0b0tsSend me a note! Email: falloutlorecast@gmail.com www.robotsradio.netOur Sponsors:* Check out Bandai Namco: unknown9.com/FALLOUTLOREAdvertising Inquiries: https://redcircle.com/brandsPrivacy & Opt-Out: https://redcircle.com/privacy", + "duration_ms": 2117616, + "explicit": false, + "external_urls": { + "spotify": "https://open.spotify.com/episode/3ssmxnilHYaKhwRWoBGMbU" + }, + "href": "https://api.spotify.com/v1/episodes/3ssmxnilHYaKhwRWoBGMbU", + "html_description": "

The Great War of 2077 and how the Fallout world diverged from our own.

Sponsors: Patreon: Become a patron! https://patreon.com/falloutlorecast

Buy cool stuff and support the show!

Fallout 76: https://amzn.to/3h99B3U

Fallout Cookbook: https://amzn.to/3aGjeod

Fallout Boardgame: https://amzn.to/2EgmBq3

The Art of Fallout 4: https://amzn.to/3gfQST3

Get a REAL Nuca-Cola Quantum! https://amzn.to/322O3zG

Fallout Funco Pop Figures: https://amzn.to/3gcYsOc

Links: Live Shows every Monday Night and game streams: twitch.tv/robotsradio

Fallout Hub Podcast w/ Tom & others: https://anchor.fm/the-fallout-hub

Talk Fallout and join the Robots Radio fam: Discord: discord.gg/JXKfVhM

Stay plugged in on Twitter: twitter.com/falloutlorecast

Robots Radio Youtube: youtube.com/c/r0b0ts

Send me a note! Email: falloutlorecast@gmail.com www.robotsradio.net



Our Sponsors:
* Check out Bandai Namco: unknown9.com/FALLOUTLORE


Advertising Inquiries: https://redcircle.com/brands

Privacy & Opt-Out: https://redcircle.com/privacy", + "id": "3ssmxnilHYaKhwRWoBGMbU", + "images": [ + { + "height": 640, + "url": "https://i.scdn.co/image/ab6765630000ba8af44e9ef63c2d6fb44cb0c9bf", + "width": 640 + }, + { + "height": 300, + "url": "https://i.scdn.co/image/ab67656300005f1ff44e9ef63c2d6fb44cb0c9bf", + "width": 300 + }, + { + "height": 64, + "url": "https://i.scdn.co/image/ab6765630000f68df44e9ef63c2d6fb44cb0c9bf", + "width": 64 + } + ], + "is_externally_hosted": false, + "is_playable": true, + "language": "en-US", + "languages": ["en-US"], + "name": "The Great War - Fallout Lorecast EP 1", + "release_date": "2019-01-09", + "release_date_precision": "day", + "resume_point": { + "fully_played": false, + "resume_position_ms": 0 + }, + "type": "episode", + "uri": "spotify:episode:3ssmxnilHYaKhwRWoBGMbU" + }, + { + "audio_preview_url": "https://podz-content.spotifycdn.com/audio/clips/0PGDORXTYiO2Til9131l6X/clip_310950_371500.mp3", + "description": "Support the show to keep it going, plus get great rewards at patreon.com/falloutlorecast Sponsors: Patreon: Become a patron! https://patreon.com/falloutlorecast Audiobooks.com - Get 3 FREE Audiobooks! https://www.dpbolvw.net/click-100173810-11099382?sid=flore Gamefly - Want 2 months of rentals for the price of 1 at Gamefly? https://www.dpbolvw.net/click-100173810-10495782?sid=flore Loot Crate - 15% off Loot Crate. Click the link and use coupon code: ROBOTSRADIO https://www.dpbolvw.net/click-100173810-13902093?sid=flore GreenMan Gaming - Get awesome discounts on games. https://www.dpbolvw.net/click-100173810-13764551?sid=flore NordVPN - Stay Safe on the Internet and get 68% off. https://www.dpbolvw.net/click-100173810-12814552?sid=flore Buy cool stuff and support the show! Fallout 76: https://amzn.to/3h99B3U Fallout Cookbook: https://amzn.to/3aGjeod Fallout Boardgame: https://amzn.to/2EgmBq3 The Art of Fallout 4: https://amzn.to/3gfQST3 Get a REAL Nuca-Cola Quantum! https://amzn.to/322O3zG Fallout Funco Pop Figures: https://amzn.to/3gcYsOc Links: Live Shows every Monday Night and game streams: twitch.tv/robotsradio Fallout Hub Podcast w/ Tom & others: https://anchor.fm/the-fallout-hub Talk Fallout and join the Robots Radio fam: Discord: discord.gg/JXKfVhM Stay plugged in on Twitter: twitter.com/falloutlorecast Robots Radio Youtube: youtube.com/c/r0b0ts Send me a note! Email: falloutlorecast@gmail.com www.robotsradio.netOur Sponsors:* Check out Bandai Namco: unknown9.com/FALLOUTLOREAdvertising Inquiries: https://redcircle.com/brandsPrivacy & Opt-Out: https://redcircle.com/privacy", + "duration_ms": 2376881, + "explicit": false, + "external_urls": { + "spotify": "https://open.spotify.com/episode/1bbj9aqeeZ3UMUlcWN0S03" + }, + "href": "https://api.spotify.com/v1/episodes/1bbj9aqeeZ3UMUlcWN0S03", + "html_description": "

Support the show to keep it going, plus get great rewards at patreon.com/falloutlorecast Sponsors: Patreon: Become a patron! https://patreon.com/falloutlorecast Audiobooks.com - Get 3 FREE Audiobooks! https://www.dpbolvw.net/click-100173810-11099382?sid=flore Gamefly - Want 2 months of rentals for the price of 1 at Gamefly? https://www.dpbolvw.net/click-100173810-10495782?sid=flore Loot Crate - 15% off Loot Crate. Click the link and use coupon code: ROBOTSRADIO https://www.dpbolvw.net/click-100173810-13902093?sid=flore GreenMan Gaming - Get awesome discounts on games. https://www.dpbolvw.net/click-100173810-13764551?sid=flore NordVPN - Stay Safe on the Internet and get 68% off. https://www.dpbolvw.net/click-100173810-12814552?sid=flore Buy cool stuff and support the show! Fallout 76: https://amzn.to/3h99B3U Fallout Cookbook: https://amzn.to/3aGjeod Fallout Boardgame: https://amzn.to/2EgmBq3 The Art of Fallout 4: https://amzn.to/3gfQST3 Get a REAL Nuca-Cola Quantum! https://amzn.to/322O3zG Fallout Funco Pop Figures: https://amzn.to/3gcYsOc Links: Live Shows every Monday Night and game streams: twitch.tv/robotsradio Fallout Hub Podcast w/ Tom & others: https://anchor.fm/the-fallout-hub Talk Fallout and join the Robots Radio fam: Discord: discord.gg/JXKfVhM Stay plugged in on Twitter: twitter.com/falloutlorecast Robots Radio Youtube: youtube.com/c/r0b0ts Send me a note! Email: falloutlorecast@gmail.com www.robotsradio.net



Our Sponsors:
* Check out Bandai Namco: unknown9.com/FALLOUTLORE


Advertising Inquiries: https://redcircle.com/brands

Privacy & Opt-Out: https://redcircle.com/privacy", + "id": "1bbj9aqeeZ3UMUlcWN0S03", + "images": [ + { + "height": 640, + "url": "https://i.scdn.co/image/ab6765630000ba8a655b54a66471089d27dbb03f", + "width": 640 + }, + { + "height": 300, + "url": "https://i.scdn.co/image/ab67656300005f1f655b54a66471089d27dbb03f", + "width": 300 + }, + { + "height": 64, + "url": "https://i.scdn.co/image/ab6765630000f68d655b54a66471089d27dbb03f", + "width": 64 + } + ], + "is_externally_hosted": false, + "is_playable": true, + "language": "en-US", + "languages": ["en-US"], + "name": "Who Dropped the First Bomb?", + "release_date": "2019-01-15", + "release_date_precision": "day", + "resume_point": { + "fully_played": false, + "resume_position_ms": 0 + }, + "type": "episode", + "uri": "spotify:episode:1bbj9aqeeZ3UMUlcWN0S03" + } + ], + "limit": 20, + "next": "https://api.spotify.com/v1/shows/0e30iIgSffe6xJhFKe35Db/episodes?offset=20&limit=20&locale=en-US,en;q%3D0.5", + "offset": 0, + "previous": null, + "total": 323 +} diff --git a/tests/components/spotify/fixtures/top_artists.json b/tests/components/spotify/fixtures/top_artists.json new file mode 100644 index 00000000000000..cd39d57e4ee8cc --- /dev/null +++ b/tests/components/spotify/fixtures/top_artists.json @@ -0,0 +1,76 @@ +{ + "items": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/74Yus6IHfa3tWZzXXAYtS2" + }, + "followers": { + "href": null, + "total": 488 + }, + "genres": [], + "href": "https://api.spotify.com/v1/artists/74Yus6IHfa3tWZzXXAYtS2", + "id": "74Yus6IHfa3tWZzXXAYtS2", + "images": [ + { + "height": 640, + "url": "https://i.scdn.co/image/ab6761610000e5ebf749f53f8bb5ffccf6105ce3", + "width": 640 + }, + { + "height": 320, + "url": "https://i.scdn.co/image/ab67616100005174f749f53f8bb5ffccf6105ce3", + "width": 320 + }, + { + "height": 160, + "url": "https://i.scdn.co/image/ab6761610000f178f749f53f8bb5ffccf6105ce3", + "width": 160 + } + ], + "name": "Onkruid", + "popularity": 7, + "type": "artist", + "uri": "spotify:artist:74Yus6IHfa3tWZzXXAYtS2" + }, + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/6s5ubAp65wXoTZefE01RNR" + }, + "followers": { + "href": null, + "total": 805497 + }, + "genres": [], + "href": "https://api.spotify.com/v1/artists/6s5ubAp65wXoTZefE01RNR", + "id": "6s5ubAp65wXoTZefE01RNR", + "images": [ + { + "height": 640, + "url": "https://i.scdn.co/image/ab6761610000e5eb8e750249623067fe3c557cf0", + "width": 640 + }, + { + "height": 320, + "url": "https://i.scdn.co/image/ab676161000051748e750249623067fe3c557cf0", + "width": 320 + }, + { + "height": 160, + "url": "https://i.scdn.co/image/ab6761610000f1788e750249623067fe3c557cf0", + "width": 160 + } + ], + "name": "Joost", + "popularity": 69, + "type": "artist", + "uri": "spotify:artist:6s5ubAp65wXoTZefE01RNR" + } + ], + "total": 192, + "limit": 20, + "offset": 0, + "href": "https://api.spotify.com/v1/me/top/artists?locale=en-US,en;q%3D0.5", + "next": "https://api.spotify.com/v1/me/top/artists?offset=20&limit=20&locale=en-US,en;q%3D0.5", + "previous": null +} diff --git a/tests/components/spotify/fixtures/top_tracks.json b/tests/components/spotify/fixtures/top_tracks.json new file mode 100644 index 00000000000000..9b99b5974f3c43 --- /dev/null +++ b/tests/components/spotify/fixtures/top_tracks.json @@ -0,0 +1,922 @@ +{ + "items": [ + { + "album": { + "album_type": "SINGLE", + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/0PCCGZ0wGLizHt2KZ7hhA2" + }, + "href": "https://api.spotify.com/v1/artists/0PCCGZ0wGLizHt2KZ7hhA2", + "id": "0PCCGZ0wGLizHt2KZ7hhA2", + "name": "Artemas", + "type": "artist", + "uri": "spotify:artist:0PCCGZ0wGLizHt2KZ7hhA2" + } + ], + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "PR", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "external_urls": { + "spotify": "https://open.spotify.com/album/45Qix7gFNajr6IofEIhhE4" + }, + "href": "https://api.spotify.com/v1/albums/45Qix7gFNajr6IofEIhhE4", + "id": "45Qix7gFNajr6IofEIhhE4", + "images": [ + { + "height": 640, + "url": "https://i.scdn.co/image/ab67616d0000b273c88e6a4447087f41eb388b14", + "width": 640 + }, + { + "height": 300, + "url": "https://i.scdn.co/image/ab67616d00001e02c88e6a4447087f41eb388b14", + "width": 300 + }, + { + "height": 64, + "url": "https://i.scdn.co/image/ab67616d00004851c88e6a4447087f41eb388b14", + "width": 64 + } + ], + "name": "i like the way you kiss me (burnt)", + "release_date": "2024-03-26", + "release_date_precision": "day", + "total_tracks": 2, + "type": "album", + "uri": "spotify:album:45Qix7gFNajr6IofEIhhE4" + }, + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/0PCCGZ0wGLizHt2KZ7hhA2" + }, + "href": "https://api.spotify.com/v1/artists/0PCCGZ0wGLizHt2KZ7hhA2", + "id": "0PCCGZ0wGLizHt2KZ7hhA2", + "name": "Artemas", + "type": "artist", + "uri": "spotify:artist:0PCCGZ0wGLizHt2KZ7hhA2" + } + ], + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "PR", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "disc_number": 1, + "duration_ms": 142514, + "explicit": false, + "external_ids": { + "isrc": "QZJ842400387" + }, + "external_urls": { + "spotify": "https://open.spotify.com/track/3oRoMXsP2NRzm51lldj1RO" + }, + "href": "https://api.spotify.com/v1/tracks/3oRoMXsP2NRzm51lldj1RO", + "id": "3oRoMXsP2NRzm51lldj1RO", + "is_local": false, + "name": "i like the way you kiss me", + "popularity": 51, + "preview_url": "https://p.scdn.co/mp3-preview/6ce9233edb212fe7cf02273f4369d2c60c28e887?cid=cfe923b2d660439caf2b557b21f31221", + "track_number": 2, + "type": "track", + "uri": "spotify:track:3oRoMXsP2NRzm51lldj1RO" + }, + { + "album": { + "album_type": "SINGLE", + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/4YLtscXsxbVgi031ovDDdh" + }, + "href": "https://api.spotify.com/v1/artists/4YLtscXsxbVgi031ovDDdh", + "id": "4YLtscXsxbVgi031ovDDdh", + "name": "Chris Stapleton", + "type": "artist", + "uri": "spotify:artist:4YLtscXsxbVgi031ovDDdh" + }, + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/6M2wZ9GZgrQXHCFfjv46we" + }, + "href": "https://api.spotify.com/v1/artists/6M2wZ9GZgrQXHCFfjv46we", + "id": "6M2wZ9GZgrQXHCFfjv46we", + "name": "Dua Lipa", + "type": "artist", + "uri": "spotify:artist:6M2wZ9GZgrQXHCFfjv46we" + } + ], + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "external_urls": { + "spotify": "https://open.spotify.com/album/3pjMBXbDLg2oGL7HtVxWgY" + }, + "href": "https://api.spotify.com/v1/albums/3pjMBXbDLg2oGL7HtVxWgY", + "id": "3pjMBXbDLg2oGL7HtVxWgY", + "images": [ + { + "height": 640, + "url": "https://i.scdn.co/image/ab67616d0000b27386f028311a5a746aa46b412f", + "width": 640 + }, + { + "height": 300, + "url": "https://i.scdn.co/image/ab67616d00001e0286f028311a5a746aa46b412f", + "width": 300 + }, + { + "height": 64, + "url": "https://i.scdn.co/image/ab67616d0000485186f028311a5a746aa46b412f", + "width": 64 + } + ], + "name": "Think I'm In Love With You (With Dua Lipa) (Live From The 59th ACM Awards)", + "release_date": "2024-05-01", + "release_date_precision": "day", + "total_tracks": 1, + "type": "album", + "uri": "spotify:album:3pjMBXbDLg2oGL7HtVxWgY" + }, + "artists": [ + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/4YLtscXsxbVgi031ovDDdh" + }, + "href": "https://api.spotify.com/v1/artists/4YLtscXsxbVgi031ovDDdh", + "id": "4YLtscXsxbVgi031ovDDdh", + "name": "Chris Stapleton", + "type": "artist", + "uri": "spotify:artist:4YLtscXsxbVgi031ovDDdh" + }, + { + "external_urls": { + "spotify": "https://open.spotify.com/artist/6M2wZ9GZgrQXHCFfjv46we" + }, + "href": "https://api.spotify.com/v1/artists/6M2wZ9GZgrQXHCFfjv46we", + "id": "6M2wZ9GZgrQXHCFfjv46we", + "name": "Dua Lipa", + "type": "artist", + "uri": "spotify:artist:6M2wZ9GZgrQXHCFfjv46we" + } + ], + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "disc_number": 1, + "duration_ms": 277066, + "explicit": false, + "external_ids": { + "isrc": "USUG12403278" + }, + "external_urls": { + "spotify": "https://open.spotify.com/track/69zgu5rlAie3IPZOEXLxyS" + }, + "href": "https://api.spotify.com/v1/tracks/69zgu5rlAie3IPZOEXLxyS", + "id": "69zgu5rlAie3IPZOEXLxyS", + "is_local": false, + "name": "Think I'm In Love With You (With Dua Lipa) (Live From The 59th ACM Awards)", + "popularity": 60, + "preview_url": "https://p.scdn.co/mp3-preview/c4fa0377538248e0a3c7e92bcf5a58be2f32b342?cid=cfe923b2d660439caf2b557b21f31221", + "track_number": 1, + "type": "track", + "uri": "spotify:track:69zgu5rlAie3IPZOEXLxyS" + } + ], + "total": 2951, + "limit": 20, + "offset": 0, + "href": "https://api.spotify.com/v1/me/top/tracks?locale=en-US,en;q%3D0.5", + "next": "https://api.spotify.com/v1/me/top/tracks?offset=20&limit=20&locale=en-US,en;q%3D0.5", + "previous": null +} diff --git a/tests/components/spotify/snapshots/test_diagnostics.ambr b/tests/components/spotify/snapshots/test_diagnostics.ambr new file mode 100644 index 00000000000000..161b6025ff3778 --- /dev/null +++ b/tests/components/spotify/snapshots/test_diagnostics.ambr @@ -0,0 +1,432 @@ +# serializer version: 1 +# name: test_diagnostics_polling_instance + dict({ + 'devices': list([ + dict({ + 'device_id': '21dac6b0e0a1f181870fdc9749b2656466557666', + 'device_type': 'Computer', + 'is_active': False, + 'is_private_session': False, + 'is_restricted': False, + 'name': 'DESKTOP-BKC5SIK', + 'supports_volume': True, + 'volume_percent': 69, + }), + ]), + 'playback': dict({ + 'audio_features': dict({ + 'acousticness': 0.011, + 'danceability': 0.696, + 'energy': 0.905, + 'instrumentalness': 0.000905, + 'key': 3, + 'liveness': 0.302, + 'loudness': -2.743, + 'mode': 1, + 'speechiness': 0.103, + 'tempo': 114.944, + 'time_signature': 4, + 'valence': 0.625, + }), + 'current_playback': dict({ + 'context': dict({ + 'context_type': 'playlist', + 'external_urls': dict({ + 'spotify': 'https://open.spotify.com/playlist/2r35vbe6hHl6yDSMfjKgmm', + }), + 'href': 'https://api.spotify.com/v1/playlists/2r35vbe6hHl6yDSMfjKgmm', + 'uri': 'spotify:user:rushofficial:playlist:2r35vbe6hHl6yDSMfjKgmm', + }), + 'currently_playing_type': 'track', + 'device': dict({ + 'device_id': 'a19f7a03a25aff3e43f457a328a8ba67a8c44789', + 'device_type': 'Speaker', + 'is_active': True, + 'is_private_session': False, + 'is_restricted': False, + 'name': 'Master Bathroom Speaker', + 'supports_volume': True, + 'volume_percent': 25, + }), + 'is_playing': True, + 'item': dict({ + 'album': dict({ + 'album_id': '3nUNxSh2szhmN7iifAKv5i', + 'album_type': 'album', + 'artists': list([ + dict({ + 'artist_id': '2Hkut4rAAyrQxRdof7FVJq', + 'name': 'Rush', + 'uri': 'spotify:artist:2Hkut4rAAyrQxRdof7FVJq', + }), + ]), + 'images': list([ + dict({ + 'height': 640, + 'url': 'https://i.scdn.co/image/ab67616d0000b27306c0d7ebcabad0c39b566983', + 'width': 640, + }), + dict({ + 'height': 300, + 'url': 'https://i.scdn.co/image/ab67616d00001e0206c0d7ebcabad0c39b566983', + 'width': 300, + }), + dict({ + 'height': 64, + 'url': 'https://i.scdn.co/image/ab67616d0000485106c0d7ebcabad0c39b566983', + 'width': 64, + }), + ]), + 'name': 'Permanent Waves', + 'release_date': '1980-01-01', + 'release_date_precision': 'day', + 'total_tracks': 6, + 'uri': 'spotify:album:3nUNxSh2szhmN7iifAKv5i', + }), + 'artists': list([ + dict({ + 'artist_id': '2Hkut4rAAyrQxRdof7FVJq', + 'name': 'Rush', + 'uri': 'spotify:artist:2Hkut4rAAyrQxRdof7FVJq', + }), + ]), + 'disc_number': 1, + 'duration_ms': 296466, + 'explicit': False, + 'external_urls': dict({ + 'spotify': 'https://open.spotify.com/track/4e9hUiLsN4mx61ARosFi7p', + }), + 'href': 'https://api.spotify.com/v1/tracks/4e9hUiLsN4mx61ARosFi7p', + 'is_local': False, + 'name': 'The Spirit Of Radio', + 'track_id': '4e9hUiLsN4mx61ARosFi7p', + 'track_number': 1, + 'type': 'track', + 'uri': 'spotify:track:4e9hUiLsN4mx61ARosFi7p', + }), + 'progress_ms': 249367, + 'repeat_mode': 'off', + 'shuffle': False, + }), + 'dj_playlist': False, + 'playlist': dict({ + 'collaborative': False, + 'description': 'A playlist for testing pourposes', + 'external_urls': dict({ + 'spotify': 'https://open.spotify.com/playlist/3cEYpjA9oz9GiPac4AsH4n', + }), + 'images': list([ + dict({ + 'height': None, + 'url': 'https://i.scdn.co/image/ab67706c0000da848d0ce13d55f634e290f744ba', + 'width': None, + }), + ]), + 'name': 'Spotify Web API Testing playlist', + 'object_type': 'playlist', + 'owner': dict({ + 'display_name': 'JMPerez²', + 'external_urls': dict({ + 'spotify': 'https://open.spotify.com/user/jmperezperez', + }), + 'href': 'https://api.spotify.com/v1/users/jmperezperez', + 'object_type': 'user', + 'owner_id': 'jmperezperez', + 'uri': 'spotify:user:jmperezperez', + }), + 'playlist_id': '3cEYpjA9oz9GiPac4AsH4n', + 'public': True, + 'tracks': dict({ + 'items': list([ + dict({ + 'track': dict({ + 'album': dict({ + 'album_id': '2pANdqPvxInB0YvcDiw4ko', + 'album_type': 'compilation', + 'artists': list([ + dict({ + 'artist_id': '0LyfQWJT6nXafLPZqxe9Of', + 'name': 'Various Artists', + 'uri': 'spotify:artist:0LyfQWJT6nXafLPZqxe9Of', + }), + ]), + 'images': list([ + dict({ + 'height': 640, + 'url': 'https://i.scdn.co/image/ab67616d0000b273ce6d0eef0c1ce77e5f95bbbc', + 'width': 640, + }), + dict({ + 'height': 300, + 'url': 'https://i.scdn.co/image/ab67616d00001e02ce6d0eef0c1ce77e5f95bbbc', + 'width': 300, + }), + dict({ + 'height': 64, + 'url': 'https://i.scdn.co/image/ab67616d00004851ce6d0eef0c1ce77e5f95bbbc', + 'width': 64, + }), + ]), + 'name': 'Progressive Psy Trance Picks Vol.8', + 'release_date': '2012-04-02', + 'release_date_precision': 'day', + 'total_tracks': 20, + 'uri': 'spotify:album:2pANdqPvxInB0YvcDiw4ko', + }), + 'artists': list([ + dict({ + 'artist_id': '6eSdhw46riw2OUHgMwR8B5', + 'name': 'Odiseo', + 'uri': 'spotify:artist:6eSdhw46riw2OUHgMwR8B5', + }), + ]), + 'disc_number': 1, + 'duration_ms': 376000, + 'explicit': False, + 'external_urls': dict({ + 'spotify': 'https://open.spotify.com/track/4rzfv0JLZfVhOhbSQ8o5jZ', + }), + 'href': 'https://api.spotify.com/v1/tracks/4rzfv0JLZfVhOhbSQ8o5jZ', + 'is_local': False, + 'name': 'Api', + 'track_id': '4rzfv0JLZfVhOhbSQ8o5jZ', + 'track_number': 10, + 'type': 'track', + 'uri': 'spotify:track:4rzfv0JLZfVhOhbSQ8o5jZ', + }), + }), + dict({ + 'track': dict({ + 'album': dict({ + 'album_id': '6nlfkk5GoXRL1nktlATNsy', + 'album_type': 'compilation', + 'artists': list([ + dict({ + 'artist_id': '0LyfQWJT6nXafLPZqxe9Of', + 'name': 'Various Artists', + 'uri': 'spotify:artist:0LyfQWJT6nXafLPZqxe9Of', + }), + ]), + 'images': list([ + dict({ + 'height': 640, + 'url': 'https://i.scdn.co/image/ab67616d0000b273aa2ff29970d9a63a49dfaeb2', + 'width': 640, + }), + dict({ + 'height': 300, + 'url': 'https://i.scdn.co/image/ab67616d00001e02aa2ff29970d9a63a49dfaeb2', + 'width': 300, + }), + dict({ + 'height': 64, + 'url': 'https://i.scdn.co/image/ab67616d00004851aa2ff29970d9a63a49dfaeb2', + 'width': 64, + }), + ]), + 'name': 'Wellness & Dreaming Source', + 'release_date': '2015-01-09', + 'release_date_precision': 'day', + 'total_tracks': 25, + 'uri': 'spotify:album:6nlfkk5GoXRL1nktlATNsy', + }), + 'artists': list([ + dict({ + 'artist_id': '5VQE4WOzPu9h3HnGLuBoA6', + 'name': 'Vlasta Marek', + 'uri': 'spotify:artist:5VQE4WOzPu9h3HnGLuBoA6', + }), + ]), + 'disc_number': 1, + 'duration_ms': 730066, + 'explicit': False, + 'external_urls': dict({ + 'spotify': 'https://open.spotify.com/track/5o3jMYOSbaVz3tkgwhELSV', + }), + 'href': 'https://api.spotify.com/v1/tracks/5o3jMYOSbaVz3tkgwhELSV', + 'is_local': False, + 'name': 'Is', + 'track_id': '5o3jMYOSbaVz3tkgwhELSV', + 'track_number': 21, + 'type': 'track', + 'uri': 'spotify:track:5o3jMYOSbaVz3tkgwhELSV', + }), + }), + dict({ + 'track': dict({ + 'album': dict({ + 'album_id': '4hnqM0JK4CM1phwfq1Ldyz', + 'album_type': 'album', + 'artists': list([ + dict({ + 'artist_id': '066X20Nz7iquqkkCW6Jxy6', + 'name': 'LCD Soundsystem', + 'uri': 'spotify:artist:066X20Nz7iquqkkCW6Jxy6', + }), + ]), + 'images': list([ + dict({ + 'height': 640, + 'url': 'https://i.scdn.co/image/ab67616d0000b273ee0d0dce888c6c8a70db6e8b', + 'width': 640, + }), + dict({ + 'height': 300, + 'url': 'https://i.scdn.co/image/ab67616d00001e02ee0d0dce888c6c8a70db6e8b', + 'width': 300, + }), + dict({ + 'height': 64, + 'url': 'https://i.scdn.co/image/ab67616d00004851ee0d0dce888c6c8a70db6e8b', + 'width': 64, + }), + ]), + 'name': 'This Is Happening', + 'release_date': '2010-05-17', + 'release_date_precision': 'day', + 'total_tracks': 9, + 'uri': 'spotify:album:4hnqM0JK4CM1phwfq1Ldyz', + }), + 'artists': list([ + dict({ + 'artist_id': '066X20Nz7iquqkkCW6Jxy6', + 'name': 'LCD Soundsystem', + 'uri': 'spotify:artist:066X20Nz7iquqkkCW6Jxy6', + }), + ]), + 'disc_number': 1, + 'duration_ms': 401440, + 'explicit': False, + 'external_urls': dict({ + 'spotify': 'https://open.spotify.com/track/4Cy0NHJ8Gh0xMdwyM9RkQm', + }), + 'href': 'https://api.spotify.com/v1/tracks/4Cy0NHJ8Gh0xMdwyM9RkQm', + 'is_local': False, + 'name': 'All I Want', + 'track_id': '4Cy0NHJ8Gh0xMdwyM9RkQm', + 'track_number': 4, + 'type': 'track', + 'uri': 'spotify:track:4Cy0NHJ8Gh0xMdwyM9RkQm', + }), + }), + dict({ + 'track': dict({ + 'album': dict({ + 'album_id': '2usKFntxa98WHMcyW6xJBz', + 'album_type': 'album', + 'artists': list([ + dict({ + 'artist_id': '272ArH9SUAlslQqsSgPJA2', + 'name': 'Glenn Horiuchi Trio', + 'uri': 'spotify:artist:272ArH9SUAlslQqsSgPJA2', + }), + ]), + 'images': list([ + dict({ + 'height': 640, + 'url': 'https://i.scdn.co/image/ab67616d0000b2738b7447ac3daa1da18811cf7b', + 'width': 640, + }), + dict({ + 'height': 300, + 'url': 'https://i.scdn.co/image/ab67616d00001e028b7447ac3daa1da18811cf7b', + 'width': 300, + }), + dict({ + 'height': 64, + 'url': 'https://i.scdn.co/image/ab67616d000048518b7447ac3daa1da18811cf7b', + 'width': 64, + }), + ]), + 'name': 'Glenn Horiuchi Trio / Gelenn Horiuchi Quartet: Mercy / Jump Start / Endpoints / Curl Out / Earthworks / Mind Probe / Null Set / Another Space (A)', + 'release_date': '2011-04-01', + 'release_date_precision': 'day', + 'total_tracks': 8, + 'uri': 'spotify:album:2usKFntxa98WHMcyW6xJBz', + }), + 'artists': list([ + dict({ + 'artist_id': '272ArH9SUAlslQqsSgPJA2', + 'name': 'Glenn Horiuchi Trio', + 'uri': 'spotify:artist:272ArH9SUAlslQqsSgPJA2', + }), + ]), + 'disc_number': 1, + 'duration_ms': 358760, + 'explicit': False, + 'external_urls': dict({ + 'spotify': 'https://open.spotify.com/track/6hvFrZNocdt2FcKGCSY5NI', + }), + 'href': 'https://api.spotify.com/v1/tracks/6hvFrZNocdt2FcKGCSY5NI', + 'is_local': False, + 'name': 'Endpoints', + 'track_id': '6hvFrZNocdt2FcKGCSY5NI', + 'track_number': 2, + 'type': 'track', + 'uri': 'spotify:track:6hvFrZNocdt2FcKGCSY5NI', + }), + }), + dict({ + 'track': dict({ + 'album': dict({ + 'album_id': '0ivM6kSawaug0j3tZVusG2', + 'album_type': 'album', + 'artists': list([ + dict({ + 'artist_id': '2KftmGt9sk1yLjsAoloC3M', + 'name': 'Zucchero', + 'uri': 'spotify:artist:2KftmGt9sk1yLjsAoloC3M', + }), + ]), + 'images': list([ + dict({ + 'height': 640, + 'url': 'https://i.scdn.co/image/ab67616d0000b27304e57d181ff062f8339d6c71', + 'width': 640, + }), + dict({ + 'height': 300, + 'url': 'https://i.scdn.co/image/ab67616d00001e0204e57d181ff062f8339d6c71', + 'width': 300, + }), + dict({ + 'height': 64, + 'url': 'https://i.scdn.co/image/ab67616d0000485104e57d181ff062f8339d6c71', + 'width': 64, + }), + ]), + 'name': 'All The Best (Spanish Version)', + 'release_date': '2007-01-01', + 'release_date_precision': 'day', + 'total_tracks': 18, + 'uri': 'spotify:album:0ivM6kSawaug0j3tZVusG2', + }), + 'artists': list([ + dict({ + 'artist_id': '2KftmGt9sk1yLjsAoloC3M', + 'name': 'Zucchero', + 'uri': 'spotify:artist:2KftmGt9sk1yLjsAoloC3M', + }), + ]), + 'disc_number': 1, + 'duration_ms': 176093, + 'explicit': False, + 'external_urls': dict({ + 'spotify': 'https://open.spotify.com/track/2E2znCPaS8anQe21GLxcvJ', + }), + 'href': 'https://api.spotify.com/v1/tracks/2E2znCPaS8anQe21GLxcvJ', + 'is_local': False, + 'name': 'You Are So Beautiful', + 'track_id': '2E2znCPaS8anQe21GLxcvJ', + 'track_number': 18, + 'type': 'track', + 'uri': 'spotify:track:2E2znCPaS8anQe21GLxcvJ', + }), + }), + ]), + }), + 'uri': 'spotify:playlist:3cEYpjA9oz9GiPac4AsH4n', + }), + }), + }) +# --- diff --git a/tests/components/spotify/snapshots/test_media_browser.ambr b/tests/components/spotify/snapshots/test_media_browser.ambr index 4c39708780523b..e1ff42cb7c851f 100644 --- a/tests/components/spotify/snapshots/test_media_browser.ambr +++ b/tests/components/spotify/snapshots/test_media_browser.ambr @@ -229,3 +229,593 @@ 'title': 'Spotify', }) # --- +# name: test_browsing[album-spotify:album:3IqzqH6ShrRtie9Yd2ODyG] + dict({ + 'can_expand': True, + 'can_play': True, + 'children': list([ + dict({ + 'can_expand': False, + 'can_play': True, + 'children_media_class': None, + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:track:6akJGriy4njdP8fZTPGjwz', + 'media_content_type': 'spotify://track', + 'thumbnail': None, + 'title': 'All Your Friends', + }), + dict({ + 'can_expand': False, + 'can_play': True, + 'children_media_class': None, + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:track:7N02bJK1amhplZ8yAapRS5', + 'media_content_type': 'spotify://track', + 'thumbnail': None, + 'title': 'New Magiks', + }), + ]), + 'children_media_class': , + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:album:3IqzqH6ShrRtie9Yd2ODyG', + 'media_content_type': 'spotify://album', + 'not_shown': 0, + 'thumbnail': 'https://i.scdn.co/image/ab67616d0000b273a61a28c2f084761f8833bce6', + 'title': 'SINGLARITY', + }) +# --- +# name: test_browsing[artist-spotify:artist:0TnOYISbd1XYRBk9myaseg] + dict({ + 'can_expand': True, + 'can_play': True, + 'children': list([ + dict({ + 'can_expand': True, + 'can_play': True, + 'children_media_class': , + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:album:56jg3KJcYmfL7RzYmG2O1Q', + 'media_content_type': 'spotify://album', + 'thumbnail': 'https://i.scdn.co/image/ab67616d0000b273a0bac1996f26274685db1520', + 'title': 'Trackhouse (Daytona 500 Edition)', + }), + dict({ + 'can_expand': True, + 'can_play': True, + 'children_media_class': , + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:album:1l86t4bTNT2j1X0ZBCIv6R', + 'media_content_type': 'spotify://album', + 'thumbnail': 'https://i.scdn.co/image/ab67616d0000b27333a4ba8f73271a749c5d953d', + 'title': 'Trackhouse', + }), + ]), + 'children_media_class': , + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:artist:0TnOYISbd1XYRBk9myaseg', + 'media_content_type': 'spotify://artist', + 'not_shown': 0, + 'thumbnail': 'https://i.scdn.co/image/ab6761610000e5ebee07b5820dd91d15d397e29c', + 'title': 'Pitbull', + }) +# --- +# name: test_browsing[categories-categories] + dict({ + 'can_expand': True, + 'can_play': False, + 'children': list([ + dict({ + 'can_expand': True, + 'can_play': False, + 'children_media_class': , + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/0JQ5DAt0tbjZptfcdMSKl3', + 'media_content_type': 'spotify://category_playlists', + 'thumbnail': 'https://t.scdn.co/images/728ed47fc1674feb95f7ac20236eb6d7.jpeg', + 'title': 'Made For You', + }), + dict({ + 'can_expand': True, + 'can_play': False, + 'children_media_class': , + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/0JQ5DAqbMKFz6FAsUtgAab', + 'media_content_type': 'spotify://category_playlists', + 'thumbnail': 'https://t.scdn.co/images/728ed47fc1674feb95f7ac20236eb6d7.jpeg', + 'title': 'New Releases', + }), + ]), + 'children_media_class': , + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/categories', + 'media_content_type': 'spotify://categories', + 'not_shown': 0, + 'thumbnail': None, + 'title': 'Categories', + }) +# --- +# name: test_browsing[category_playlists-dinner] + dict({ + 'can_expand': True, + 'can_play': False, + 'children': list([ + dict({ + 'can_expand': True, + 'can_play': True, + 'children_media_class': , + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:playlist:37i9dQZF1DX7yhuKT9G4qk', + 'media_content_type': 'spotify://playlist', + 'thumbnail': 'https://i.scdn.co/image/ab67706f0000000343319faa9428405f3312b588', + 'title': 'eten met vrienden', + }), + dict({ + 'can_expand': True, + 'can_play': True, + 'children_media_class': , + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:playlist:37i9dQZF1DXbvE0SE0Cczh', + 'media_content_type': 'spotify://playlist', + 'thumbnail': 'https://i.scdn.co/image/ab67706f00000003b93c270883619dde61725fc8', + 'title': 'Jukebox Joint', + }), + ]), + 'children_media_class': , + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/dinner', + 'media_content_type': 'spotify://category_playlists', + 'not_shown': 0, + 'thumbnail': 'https://t.scdn.co/media/original/dinner_1b6506abba0ba52c54e6d695c8571078_274x274.jpg', + 'title': 'Cooking & Dining', + }) +# --- +# name: test_browsing[current_user_followed_artists-current_user_followed_artists] + dict({ + 'can_expand': True, + 'can_play': False, + 'children': list([ + dict({ + 'can_expand': True, + 'can_play': True, + 'children_media_class': , + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:artist:0lLY20XpZ9yDobkbHI7u1y', + 'media_content_type': 'spotify://artist', + 'thumbnail': 'https://i.scdn.co/image/ab6761610000e5eb0fb1220e7e3ace47ebad023e', + 'title': 'Pegboard Nerds', + }), + dict({ + 'can_expand': True, + 'can_play': True, + 'children_media_class': , + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:artist:0p4nmQO2msCgU4IF37Wi3j', + 'media_content_type': 'spotify://artist', + 'thumbnail': 'https://i.scdn.co/image/ab6761610000e5eb5c3349ddba6b8e064c1bab16', + 'title': 'Avril Lavigne', + }), + ]), + 'children_media_class': , + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/current_user_followed_artists', + 'media_content_type': 'spotify://current_user_followed_artists', + 'not_shown': 0, + 'thumbnail': None, + 'title': 'Artists', + }) +# --- +# name: test_browsing[current_user_playlists-current_user_playlists] + dict({ + 'can_expand': True, + 'can_play': False, + 'children': list([ + dict({ + 'can_expand': True, + 'can_play': True, + 'children_media_class': , + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:playlist:4WkWJ0EjHEFASDevhM8oPw', + 'media_content_type': 'spotify://playlist', + 'thumbnail': 'https://i.scdn.co/image/ab67616d0000b273d061f5bfae8d38558f3698c1', + 'title': 'Hyper', + }), + dict({ + 'can_expand': True, + 'can_play': True, + 'children_media_class': , + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:playlist:1RHirWgH1weMsBLi4KOK9d', + 'media_content_type': 'spotify://playlist', + 'thumbnail': 'https://mosaic.scdn.co/640/ab67616d0000b2732f3e58dd611d177973cb3a8cab67616d0000b27345cab965cb4639a4e669564aab67616d0000b2739e83c93811be6abfad8649d6ab67616d0000b273e4c03429788f0aff263a5fc6', + 'title': 'Ain’t got shit on me', + }), + ]), + 'children_media_class': , + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/current_user_playlists', + 'media_content_type': 'spotify://current_user_playlists', + 'not_shown': 0, + 'thumbnail': None, + 'title': 'Playlists', + }) +# --- +# name: test_browsing[current_user_recently_played-current_user_recently_played] + dict({ + 'can_expand': True, + 'can_play': False, + 'children': list([ + dict({ + 'can_expand': False, + 'can_play': True, + 'children_media_class': None, + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:track:71dMjqJ8UJV700zYs5YZCh', + 'media_content_type': 'spotify://track', + 'thumbnail': 'https://i.scdn.co/image/ab67616d0000b273cdac047e7894fb56a0dfdcde', + 'title': 'Super Breath', + }), + dict({ + 'can_expand': False, + 'can_play': True, + 'children_media_class': None, + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:track:71dMjqJ8UJV700zYs5YZCh', + 'media_content_type': 'spotify://track', + 'thumbnail': 'https://i.scdn.co/image/ab67616d0000b273cdac047e7894fb56a0dfdcde', + 'title': 'Super Breath', + }), + ]), + 'children_media_class': , + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/current_user_recently_played', + 'media_content_type': 'spotify://current_user_recently_played', + 'not_shown': 0, + 'thumbnail': None, + 'title': 'Recently played', + }) +# --- +# name: test_browsing[current_user_saved_albums-current_user_saved_albums] + dict({ + 'can_expand': True, + 'can_play': False, + 'children': list([ + dict({ + 'can_expand': True, + 'can_play': True, + 'children_media_class': , + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:album:57MSBg5pBQZH5bfLVDmeuP', + 'media_content_type': 'spotify://album', + 'thumbnail': 'https://i.scdn.co/image/ab67616d0000b2733126a95bb7ed4146a80c7fc6', + 'title': 'In Waves', + }), + dict({ + 'can_expand': True, + 'can_play': True, + 'children_media_class': , + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:album:3DQueEd1Ft9PHWgovDzPKh', + 'media_content_type': 'spotify://album', + 'thumbnail': 'https://i.scdn.co/image/ab67616d0000b2736b8a4828e057b7dc1c4a4d39', + 'title': 'ten days', + }), + ]), + 'children_media_class': , + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/current_user_saved_albums', + 'media_content_type': 'spotify://current_user_saved_albums', + 'not_shown': 0, + 'thumbnail': None, + 'title': 'Albums', + }) +# --- +# name: test_browsing[current_user_saved_shows-current_user_saved_shows] + dict({ + 'can_expand': True, + 'can_play': False, + 'children': list([ + dict({ + 'can_expand': True, + 'can_play': True, + 'children_media_class': , + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:show:5OzkclFjD6iAjtAuo7aIYt', + 'media_content_type': 'spotify://show', + 'thumbnail': 'https://i.scdn.co/image/ab6765630000f68db5f65a943ef4f707bf79949b', + 'title': 'Toni and Ryan', + }), + dict({ + 'can_expand': True, + 'can_play': True, + 'children_media_class': , + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:show:6XYRres0KZtnTqKcLavWR2', + 'media_content_type': 'spotify://show', + 'thumbnail': 'https://i.scdn.co/image/ab6765630000f68d5fccb05c5685c081d5c2ad9c', + 'title': 'BLAST Push To Talk', + }), + ]), + 'children_media_class': , + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/current_user_saved_shows', + 'media_content_type': 'spotify://current_user_saved_shows', + 'not_shown': 0, + 'thumbnail': None, + 'title': 'Podcasts', + }) +# --- +# name: test_browsing[current_user_saved_tracks-current_user_saved_tracks] + dict({ + 'can_expand': True, + 'can_play': False, + 'children': list([ + dict({ + 'can_expand': False, + 'can_play': True, + 'children_media_class': None, + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:track:2pj2A25YQK4uMxhZheNx7R', + 'media_content_type': 'spotify://track', + 'thumbnail': 'https://i.scdn.co/image/ab67616d0000b273ac9dd449e38e5e8952fd22ad', + 'title': 'Otherside', + }), + dict({ + 'can_expand': False, + 'can_play': True, + 'children_media_class': None, + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:track:2lKOI1nwP5qZtZC7TGQVY8', + 'media_content_type': 'spotify://track', + 'thumbnail': 'https://i.scdn.co/image/ab67616d0000b2733d710ab088ff797e80cc5aed', + 'title': 'I Think I Need A DJ', + }), + ]), + 'children_media_class': , + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/current_user_saved_tracks', + 'media_content_type': 'spotify://current_user_saved_tracks', + 'not_shown': 0, + 'thumbnail': None, + 'title': 'Tracks', + }) +# --- +# name: test_browsing[current_user_top_artists-current_user_top_artists] + dict({ + 'can_expand': True, + 'can_play': False, + 'children': list([ + dict({ + 'can_expand': True, + 'can_play': True, + 'children_media_class': , + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:artist:74Yus6IHfa3tWZzXXAYtS2', + 'media_content_type': 'spotify://artist', + 'thumbnail': 'https://i.scdn.co/image/ab6761610000e5ebf749f53f8bb5ffccf6105ce3', + 'title': 'Onkruid', + }), + dict({ + 'can_expand': True, + 'can_play': True, + 'children_media_class': , + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:artist:6s5ubAp65wXoTZefE01RNR', + 'media_content_type': 'spotify://artist', + 'thumbnail': 'https://i.scdn.co/image/ab6761610000e5eb8e750249623067fe3c557cf0', + 'title': 'Joost', + }), + ]), + 'children_media_class': , + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/current_user_top_artists', + 'media_content_type': 'spotify://current_user_top_artists', + 'not_shown': 0, + 'thumbnail': None, + 'title': 'Top Artists', + }) +# --- +# name: test_browsing[current_user_top_tracks-current_user_top_tracks] + dict({ + 'can_expand': True, + 'can_play': False, + 'children': list([ + dict({ + 'can_expand': False, + 'can_play': True, + 'children_media_class': None, + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:track:3oRoMXsP2NRzm51lldj1RO', + 'media_content_type': 'spotify://track', + 'thumbnail': 'https://i.scdn.co/image/ab67616d0000b273c88e6a4447087f41eb388b14', + 'title': 'i like the way you kiss me', + }), + dict({ + 'can_expand': False, + 'can_play': True, + 'children_media_class': None, + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:track:69zgu5rlAie3IPZOEXLxyS', + 'media_content_type': 'spotify://track', + 'thumbnail': 'https://i.scdn.co/image/ab67616d0000b27386f028311a5a746aa46b412f', + 'title': "Think I'm In Love With You (With Dua Lipa) (Live From The 59th ACM Awards)", + }), + ]), + 'children_media_class': , + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/current_user_top_tracks', + 'media_content_type': 'spotify://current_user_top_tracks', + 'not_shown': 0, + 'thumbnail': None, + 'title': 'Top Tracks', + }) +# --- +# name: test_browsing[featured_playlists-featured_playlists] + dict({ + 'can_expand': True, + 'can_play': False, + 'children': list([ + dict({ + 'can_expand': True, + 'can_play': True, + 'children_media_class': , + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:playlist:37i9dQZF1DX4dopZ9vOp1t', + 'media_content_type': 'spotify://playlist', + 'thumbnail': 'https://i.scdn.co/image/ab67706f000000037d14c267b8ee5fea2246a8fe', + 'title': 'Kerst Hits 2023', + }), + dict({ + 'can_expand': True, + 'can_play': True, + 'children_media_class': , + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:playlist:37i9dQZF1DWSBi5svWQ9Nk', + 'media_content_type': 'spotify://playlist', + 'thumbnail': 'https://i.scdn.co/image/ab67706f00000003f7b99051789611a49101c1cf', + 'title': 'Top Hits NL', + }), + ]), + 'children_media_class': , + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/featured_playlists', + 'media_content_type': 'spotify://featured_playlists', + 'not_shown': 0, + 'thumbnail': None, + 'title': 'Featured Playlists', + }) +# --- +# name: test_browsing[new_releases-new_releases] + dict({ + 'can_expand': True, + 'can_play': False, + 'children': list([ + dict({ + 'can_expand': True, + 'can_play': True, + 'children_media_class': , + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:album:5SGtrmYbIo0Dsg4kJ4qjM6', + 'media_content_type': 'spotify://album', + 'thumbnail': 'https://i.scdn.co/image/ab67616d00001e0209ba52a5116e0c3e8461f58b', + 'title': 'Moon Music', + }), + dict({ + 'can_expand': True, + 'can_play': True, + 'children_media_class': , + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:album:713lZ7AF55fEFSQgcttj9y', + 'media_content_type': 'spotify://album', + 'thumbnail': 'https://i.scdn.co/image/ab67616d00001e02ab9953b1d18f8233f6b26027', + 'title': 'drift', + }), + ]), + 'children_media_class': , + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/new_releases', + 'media_content_type': 'spotify://new_releases', + 'not_shown': 0, + 'thumbnail': None, + 'title': 'New Releases', + }) +# --- +# name: test_browsing[playlist-spotify:playlist:3cEYpjA9oz9GiPac4AsH4n] + dict({ + 'can_expand': True, + 'can_play': True, + 'children': list([ + dict({ + 'can_expand': False, + 'can_play': True, + 'children_media_class': None, + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:track:4rzfv0JLZfVhOhbSQ8o5jZ', + 'media_content_type': 'spotify://track', + 'thumbnail': 'https://i.scdn.co/image/ab67616d0000b273ce6d0eef0c1ce77e5f95bbbc', + 'title': 'Api', + }), + dict({ + 'can_expand': False, + 'can_play': True, + 'children_media_class': None, + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:track:5o3jMYOSbaVz3tkgwhELSV', + 'media_content_type': 'spotify://track', + 'thumbnail': 'https://i.scdn.co/image/ab67616d0000b273aa2ff29970d9a63a49dfaeb2', + 'title': 'Is', + }), + dict({ + 'can_expand': False, + 'can_play': True, + 'children_media_class': None, + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:track:4Cy0NHJ8Gh0xMdwyM9RkQm', + 'media_content_type': 'spotify://track', + 'thumbnail': 'https://i.scdn.co/image/ab67616d0000b273ee0d0dce888c6c8a70db6e8b', + 'title': 'All I Want', + }), + dict({ + 'can_expand': False, + 'can_play': True, + 'children_media_class': None, + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:track:6hvFrZNocdt2FcKGCSY5NI', + 'media_content_type': 'spotify://track', + 'thumbnail': 'https://i.scdn.co/image/ab67616d0000b2738b7447ac3daa1da18811cf7b', + 'title': 'Endpoints', + }), + dict({ + 'can_expand': False, + 'can_play': True, + 'children_media_class': None, + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:track:2E2znCPaS8anQe21GLxcvJ', + 'media_content_type': 'spotify://track', + 'thumbnail': 'https://i.scdn.co/image/ab67616d0000b27304e57d181ff062f8339d6c71', + 'title': 'You Are So Beautiful', + }), + ]), + 'children_media_class': , + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:playlist:3cEYpjA9oz9GiPac4AsH4n', + 'media_content_type': 'spotify://playlist', + 'not_shown': 0, + 'thumbnail': 'https://i.scdn.co/image/ab67706c0000da848d0ce13d55f634e290f744ba', + 'title': 'Spotify Web API Testing playlist', + }) +# --- +# name: test_browsing[show-spotify:show:1Y9ExMgMxoBVrgrfU7u0nD] + dict({ + 'can_expand': True, + 'can_play': True, + 'children': list([ + dict({ + 'can_expand': False, + 'can_play': True, + 'children_media_class': None, + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:episode:3ssmxnilHYaKhwRWoBGMbU', + 'media_content_type': 'spotify://episode', + 'thumbnail': 'https://i.scdn.co/image/ab6765630000ba8af44e9ef63c2d6fb44cb0c9bf', + 'title': 'The Great War - Fallout Lorecast EP 1', + }), + dict({ + 'can_expand': False, + 'can_play': True, + 'children_media_class': None, + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:episode:1bbj9aqeeZ3UMUlcWN0S03', + 'media_content_type': 'spotify://episode', + 'thumbnail': 'https://i.scdn.co/image/ab6765630000ba8a655b54a66471089d27dbb03f', + 'title': 'Who Dropped the First Bomb?', + }), + ]), + 'children_media_class': , + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:show:1Y9ExMgMxoBVrgrfU7u0nD', + 'media_content_type': 'spotify://show', + 'not_shown': 0, + 'thumbnail': 'https://i.scdn.co/image/ab6765630000ba8ac7bedd27a4413b1abf926d8a', + 'title': 'Safety Third', + }) +# --- diff --git a/tests/components/spotify/snapshots/test_media_player.ambr b/tests/components/spotify/snapshots/test_media_player.ambr new file mode 100644 index 00000000000000..9692d59cfd12a2 --- /dev/null +++ b/tests/components/spotify/snapshots/test_media_player.ambr @@ -0,0 +1,137 @@ +# serializer version: 1 +# name: test_entities[media_player.spotify_spotify_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'source_list': list([ + 'DESKTOP-BKC5SIK', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'media_player', + 'entity_category': None, + 'entity_id': 'media_player.spotify_spotify_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'spotify', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': 'spotify', + 'unique_id': '1112264111', + 'unit_of_measurement': None, + }) +# --- +# name: test_entities[media_player.spotify_spotify_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'entity_picture': '/api/media_player_proxy/media_player.spotify_spotify_1?token=mock-token&cache=7bb89748322acb6c', + 'friendly_name': 'Spotify spotify_1', + 'media_album_name': 'Permanent Waves', + 'media_artist': 'Rush', + 'media_content_id': 'spotify:track:4e9hUiLsN4mx61ARosFi7p', + 'media_content_type': , + 'media_duration': 296, + 'media_playlist': 'Spotify Web API Testing playlist', + 'media_position': 249, + 'media_position_updated_at': HAFakeDatetime(2023, 10, 21, 0, 0, tzinfo=datetime.timezone.utc), + 'media_title': 'The Spirit Of Radio', + 'media_track': 1, + 'repeat': , + 'shuffle': False, + 'source': 'Master Bathroom Speaker', + 'source_list': list([ + 'DESKTOP-BKC5SIK', + ]), + 'supported_features': , + 'volume_level': 0.25, + }), + 'context': , + 'entity_id': 'media_player.spotify_spotify_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'playing', + }) +# --- +# name: test_podcast[media_player.spotify_spotify_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'source_list': list([ + 'DESKTOP-BKC5SIK', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'media_player', + 'entity_category': None, + 'entity_id': 'media_player.spotify_spotify_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'spotify', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': 'spotify', + 'unique_id': '1112264111', + 'unit_of_measurement': None, + }) +# --- +# name: test_podcast[media_player.spotify_spotify_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'entity_picture': '/api/media_player_proxy/media_player.spotify_spotify_1?token=mock-token&cache=cf1e6e1e830f08d3', + 'friendly_name': 'Spotify spotify_1', + 'media_album_name': 'Safety Third', + 'media_artist': 'Safety Third ', + 'media_content_id': 'spotify:episode:3o0RYoo5iOMKSmEbunsbvW', + 'media_content_type': , + 'media_duration': 3690, + 'media_position': 5, + 'media_position_updated_at': HAFakeDatetime(2023, 10, 21, 0, 0, tzinfo=datetime.timezone.utc), + 'media_title': 'My Squirrel Has Brain Damage - Safety Third 119', + 'repeat': , + 'shuffle': False, + 'source': 'Sonos Roam SL', + 'source_list': list([ + 'DESKTOP-BKC5SIK', + ]), + 'supported_features': , + 'volume_level': 0.46, + }), + 'context': , + 'entity_id': 'media_player.spotify_spotify_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'playing', + }) +# --- diff --git a/tests/components/spotify/snapshots/test_sensor.ambr b/tests/components/spotify/snapshots/test_sensor.ambr new file mode 100644 index 00000000000000..ce77dda479f2c7 --- /dev/null +++ b/tests/components/spotify/snapshots/test_sensor.ambr @@ -0,0 +1,595 @@ +# serializer version: 1 +# name: test_entities[sensor.spotify_spotify_1_song_acousticness-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.spotify_spotify_1_song_acousticness', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Song acousticness', + 'platform': 'spotify', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'acousticness', + 'unique_id': '1112264111_acousticness', + 'unit_of_measurement': '%', + }) +# --- +# name: test_entities[sensor.spotify_spotify_1_song_acousticness-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Spotify spotify_1 Song acousticness', + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.spotify_spotify_1_song_acousticness', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.1', + }) +# --- +# name: test_entities[sensor.spotify_spotify_1_song_danceability-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.spotify_spotify_1_song_danceability', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Song danceability', + 'platform': 'spotify', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'danceability', + 'unique_id': '1112264111_danceability', + 'unit_of_measurement': '%', + }) +# --- +# name: test_entities[sensor.spotify_spotify_1_song_danceability-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Spotify spotify_1 Song danceability', + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.spotify_spotify_1_song_danceability', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '69.6', + }) +# --- +# name: test_entities[sensor.spotify_spotify_1_song_energy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.spotify_spotify_1_song_energy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Song energy', + 'platform': 'spotify', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy', + 'unique_id': '1112264111_energy', + 'unit_of_measurement': '%', + }) +# --- +# name: test_entities[sensor.spotify_spotify_1_song_energy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Spotify spotify_1 Song energy', + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.spotify_spotify_1_song_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '90.5', + }) +# --- +# name: test_entities[sensor.spotify_spotify_1_song_instrumentalness-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.spotify_spotify_1_song_instrumentalness', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Song instrumentalness', + 'platform': 'spotify', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'instrumentalness', + 'unique_id': '1112264111_instrumentalness', + 'unit_of_measurement': '%', + }) +# --- +# name: test_entities[sensor.spotify_spotify_1_song_instrumentalness-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Spotify spotify_1 Song instrumentalness', + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.spotify_spotify_1_song_instrumentalness', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0905', + }) +# --- +# name: test_entities[sensor.spotify_spotify_1_song_key-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'C', + 'C♯/D♭', + 'D', + 'D♯/E♭', + 'E', + 'F', + 'F♯/G♭', + 'G', + 'G♯/A♭', + 'A', + 'A♯/B♭', + 'B', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.spotify_spotify_1_song_key', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Song key', + 'platform': 'spotify', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'key', + 'unique_id': '1112264111_key', + 'unit_of_measurement': None, + }) +# --- +# name: test_entities[sensor.spotify_spotify_1_song_key-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Spotify spotify_1 Song key', + 'options': list([ + 'C', + 'C♯/D♭', + 'D', + 'D♯/E♭', + 'E', + 'F', + 'F♯/G♭', + 'G', + 'G♯/A♭', + 'A', + 'A♯/B♭', + 'B', + ]), + }), + 'context': , + 'entity_id': 'sensor.spotify_spotify_1_song_key', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'D♯/E♭', + }) +# --- +# name: test_entities[sensor.spotify_spotify_1_song_liveness-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.spotify_spotify_1_song_liveness', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Song liveness', + 'platform': 'spotify', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'liveness', + 'unique_id': '1112264111_liveness', + 'unit_of_measurement': '%', + }) +# --- +# name: test_entities[sensor.spotify_spotify_1_song_liveness-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Spotify spotify_1 Song liveness', + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.spotify_spotify_1_song_liveness', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '30.2', + }) +# --- +# name: test_entities[sensor.spotify_spotify_1_song_mode-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'major', + 'minor', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.spotify_spotify_1_song_mode', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Song mode', + 'platform': 'spotify', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'mode', + 'unique_id': '1112264111_mode', + 'unit_of_measurement': None, + }) +# --- +# name: test_entities[sensor.spotify_spotify_1_song_mode-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Spotify spotify_1 Song mode', + 'options': list([ + 'major', + 'minor', + ]), + }), + 'context': , + 'entity_id': 'sensor.spotify_spotify_1_song_mode', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'major', + }) +# --- +# name: test_entities[sensor.spotify_spotify_1_song_speechiness-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.spotify_spotify_1_song_speechiness', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Song speechiness', + 'platform': 'spotify', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'speechiness', + 'unique_id': '1112264111_speechiness', + 'unit_of_measurement': '%', + }) +# --- +# name: test_entities[sensor.spotify_spotify_1_song_speechiness-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Spotify spotify_1 Song speechiness', + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.spotify_spotify_1_song_speechiness', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '10.3', + }) +# --- +# name: test_entities[sensor.spotify_spotify_1_song_tempo-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.spotify_spotify_1_song_tempo', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Song tempo', + 'platform': 'spotify', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'song_tempo', + 'unique_id': '1112264111_bpm', + 'unit_of_measurement': 'bpm', + }) +# --- +# name: test_entities[sensor.spotify_spotify_1_song_tempo-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Spotify spotify_1 Song tempo', + 'unit_of_measurement': 'bpm', + }), + 'context': , + 'entity_id': 'sensor.spotify_spotify_1_song_tempo', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '114.944', + }) +# --- +# name: test_entities[sensor.spotify_spotify_1_song_time_signature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + '3/4', + '4/4', + '5/4', + '6/4', + '7/4', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.spotify_spotify_1_song_time_signature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Song time signature', + 'platform': 'spotify', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'time_signature', + 'unique_id': '1112264111_time_signature', + 'unit_of_measurement': None, + }) +# --- +# name: test_entities[sensor.spotify_spotify_1_song_time_signature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Spotify spotify_1 Song time signature', + 'options': list([ + '3/4', + '4/4', + '5/4', + '6/4', + '7/4', + ]), + }), + 'context': , + 'entity_id': 'sensor.spotify_spotify_1_song_time_signature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '4/4', + }) +# --- +# name: test_entities[sensor.spotify_spotify_1_song_valence-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.spotify_spotify_1_song_valence', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Song valence', + 'platform': 'spotify', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'valence', + 'unique_id': '1112264111_valence', + 'unit_of_measurement': '%', + }) +# --- +# name: test_entities[sensor.spotify_spotify_1_song_valence-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Spotify spotify_1 Song valence', + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.spotify_spotify_1_song_valence', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '62.5', + }) +# --- diff --git a/tests/components/spotify/test_config_flow.py b/tests/components/spotify/test_config_flow.py index f4719c0147cf87..cb942a635684e4 100644 --- a/tests/components/spotify/test_config_flow.py +++ b/tests/components/spotify/test_config_flow.py @@ -5,7 +5,7 @@ from unittest.mock import MagicMock, patch import pytest -from spotipy import SpotifyException +from spotifyaio import SpotifyConnectionError from homeassistant.components import zeroconf from homeassistant.components.spotify.const import DOMAIN @@ -111,6 +111,7 @@ async def test_full_flow( ): result = await hass.config_entries.flow.async_configure(result["flow_id"]) + assert result["type"] is FlowResultType.CREATE_ENTRY assert len(hass.config_entries.async_entries(DOMAIN)) == 1, result assert result["type"] is FlowResultType.CREATE_ENTRY @@ -122,6 +123,7 @@ async def test_full_flow( "type": "Bearer", "expires_in": 60, } + assert result["result"].unique_id == "1112264111" @pytest.mark.usefixtures("current_request_with_host") @@ -157,9 +159,7 @@ async def test_abort_if_spotify_error( }, ) - mock_spotify.return_value.current_user.side_effect = SpotifyException( - 400, -1, "message" - ) + mock_spotify.return_value.get_current_user.side_effect = SpotifyConnectionError result = await hass.config_entries.flow.async_configure(result["flow_id"]) @@ -200,7 +200,7 @@ async def test_reauthentication( "https://accounts.spotify.com/api/token", json={ "refresh_token": "new-refresh-token", - "access_token": "mew-access-token", + "access_token": "new-access-token", "type": "Bearer", "expires_in": 60, }, @@ -213,11 +213,10 @@ async def test_reauthentication( assert result["type"] is FlowResultType.ABORT assert result["reason"] == "reauth_successful" - mock_config_entry.data["token"].pop("expires_at") assert mock_config_entry.data["token"] == { "refresh_token": "new-refresh-token", - "access_token": "mew-access-token", + "access_token": "new-access-token", "type": "Bearer", "expires_in": 60, } @@ -237,9 +236,6 @@ async def test_reauth_account_mismatch( result = await mock_config_entry.start_reauth_flow(hass) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth_confirm" - result = await hass.config_entries.flow.async_configure(result["flow_id"], {}) state = config_entry_oauth2_flow._encode_jwt( @@ -262,7 +258,9 @@ async def test_reauth_account_mismatch( }, ) - mock_spotify.return_value.current_user.return_value["id"] = "new_user_id" + mock_spotify.return_value.get_current_user.return_value.user_id = ( + "different_user_id" + ) result = await hass.config_entries.flow.async_configure(result["flow_id"]) assert result["type"] is FlowResultType.ABORT diff --git a/tests/components/spotify/test_diagnostics.py b/tests/components/spotify/test_diagnostics.py new file mode 100644 index 00000000000000..6744ca11a0089d --- /dev/null +++ b/tests/components/spotify/test_diagnostics.py @@ -0,0 +1,31 @@ +"""Tests for the diagnostics data provided by the Spotify integration.""" + +from unittest.mock import AsyncMock + +import pytest +from syrupy import SnapshotAssertion +from syrupy.filters import props + +from homeassistant.core import HomeAssistant + +from . import setup_integration + +from tests.common import MockConfigEntry +from tests.components.diagnostics import get_diagnostics_for_config_entry +from tests.typing import ClientSessionGenerator + + +@pytest.mark.usefixtures("setup_credentials") +async def test_diagnostics_polling_instance( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + mock_spotify: AsyncMock, + mock_config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, +) -> None: + """Test diagnostics.""" + await setup_integration(hass, mock_config_entry) + + assert await get_diagnostics_for_config_entry( + hass, hass_client, mock_config_entry + ) == snapshot(exclude=props("position_updated_at")) diff --git a/tests/components/spotify/test_init.py b/tests/components/spotify/test_init.py new file mode 100644 index 00000000000000..21129d20c07722 --- /dev/null +++ b/tests/components/spotify/test_init.py @@ -0,0 +1,50 @@ +"""Tests for the Spotify initialization.""" + +from unittest.mock import MagicMock + +import pytest +from spotifyaio import SpotifyConnectionError + +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant + +from . import setup_integration + +from tests.common import MockConfigEntry + + +@pytest.mark.usefixtures("setup_credentials") +async def test_setup( + hass: HomeAssistant, + mock_spotify: MagicMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test the Spotify setup.""" + await setup_integration(hass, mock_config_entry) + + assert mock_config_entry.state is ConfigEntryState.LOADED + + await hass.config_entries.async_unload(mock_config_entry.entry_id) + await hass.async_block_till_done() + assert mock_config_entry.state is ConfigEntryState.NOT_LOADED + + +@pytest.mark.usefixtures("setup_credentials") +@pytest.mark.parametrize( + "method", + [ + "get_current_user", + "get_devices", + ], +) +async def test_setup_with_required_calls_failing( + hass: HomeAssistant, + mock_spotify: MagicMock, + mock_config_entry: MockConfigEntry, + method: str, +) -> None: + """Test the Spotify setup with required calls failing.""" + getattr(mock_spotify.return_value, method).side_effect = SpotifyConnectionError + mock_config_entry.add_to_hass(hass) + + assert not await hass.config_entries.async_setup(mock_config_entry.entry_id) diff --git a/tests/components/spotify/test_media_browser.py b/tests/components/spotify/test_media_browser.py index 1b17da74d4ac66..dcacc23bbeeae5 100644 --- a/tests/components/spotify/test_media_browser.py +++ b/tests/components/spotify/test_media_browser.py @@ -5,6 +5,7 @@ import pytest from syrupy import SnapshotAssertion +from homeassistant.components.media_player import BrowseError from homeassistant.components.spotify import DOMAIN from homeassistant.components.spotify.browse_media import async_browse_media from homeassistant.const import CONF_ID @@ -98,3 +99,82 @@ async def test_browse_media_playlists( f"spotify://{config_entry_id}/current_user_playlists", ) assert response.as_dict() == snapshot + + +@pytest.mark.parametrize( + ("media_content_type", "media_content_id"), + [ + ("current_user_playlists", "current_user_playlists"), + ("current_user_followed_artists", "current_user_followed_artists"), + ("current_user_saved_albums", "current_user_saved_albums"), + ("current_user_saved_tracks", "current_user_saved_tracks"), + ("current_user_saved_shows", "current_user_saved_shows"), + ("current_user_recently_played", "current_user_recently_played"), + ("current_user_top_artists", "current_user_top_artists"), + ("current_user_top_tracks", "current_user_top_tracks"), + ("featured_playlists", "featured_playlists"), + ("categories", "categories"), + ("category_playlists", "dinner"), + ("new_releases", "new_releases"), + ("playlist", "spotify:playlist:3cEYpjA9oz9GiPac4AsH4n"), + ("album", "spotify:album:3IqzqH6ShrRtie9Yd2ODyG"), + ("artist", "spotify:artist:0TnOYISbd1XYRBk9myaseg"), + ("show", "spotify:show:1Y9ExMgMxoBVrgrfU7u0nD"), + ], +) +@pytest.mark.usefixtures("setup_credentials") +async def test_browsing( + hass: HomeAssistant, + mock_spotify: MagicMock, + snapshot: SnapshotAssertion, + mock_config_entry: MockConfigEntry, + media_content_type: str, + media_content_id: str, +) -> None: + """Test browsing playlists for the two config entries.""" + await setup_integration(hass, mock_config_entry) + response = await async_browse_media( + hass, + f"spotify://{media_content_type}", + f"spotify://{mock_config_entry.entry_id}/{media_content_id}", + ) + assert response.as_dict() == snapshot + + +@pytest.mark.parametrize( + ("media_content_id"), + [ + "artist", + None, + ], +) +@pytest.mark.usefixtures("setup_credentials") +async def test_invalid_spotify_url( + hass: HomeAssistant, + mock_spotify: MagicMock, + mock_config_entry: MockConfigEntry, + media_content_id: str | None, +) -> None: + """Test browsing with an invalid Spotify URL.""" + await setup_integration(hass, mock_config_entry) + with pytest.raises(BrowseError, match="Invalid Spotify URL specified"): + await async_browse_media( + hass, + "spotify://artist", + media_content_id, + ) + + +@pytest.mark.usefixtures("setup_credentials") +async def test_browsing_not_loaded_entry( + hass: HomeAssistant, + mock_spotify: MagicMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test browsing with an unloaded config entry.""" + with pytest.raises(BrowseError, match="Invalid Spotify account specified"): + await async_browse_media( + hass, + "spotify://artist", + f"spotify://{mock_config_entry.entry_id}/spotify:artist:0TnOYISbd1XYRBk9myaseg", + ) diff --git a/tests/components/spotify/test_media_player.py b/tests/components/spotify/test_media_player.py new file mode 100644 index 00000000000000..b03424f8459cee --- /dev/null +++ b/tests/components/spotify/test_media_player.py @@ -0,0 +1,550 @@ +"""Tests for the Spotify media player platform.""" + +from datetime import timedelta +from unittest.mock import MagicMock, patch + +from freezegun.api import FrozenDateTimeFactory +import pytest +from spotifyaio import ( + PlaybackState, + ProductType, + RepeatMode as SpotifyRepeatMode, + SpotifyConnectionError, +) +from syrupy import SnapshotAssertion + +from homeassistant.components.media_player import ( + ATTR_INPUT_SOURCE, + ATTR_INPUT_SOURCE_LIST, + ATTR_MEDIA_CONTENT_ID, + ATTR_MEDIA_CONTENT_TYPE, + ATTR_MEDIA_ENQUEUE, + ATTR_MEDIA_REPEAT, + ATTR_MEDIA_SEEK_POSITION, + ATTR_MEDIA_SHUFFLE, + ATTR_MEDIA_VOLUME_LEVEL, + DOMAIN as MEDIA_PLAYER_DOMAIN, + SERVICE_PLAY_MEDIA, + SERVICE_SELECT_SOURCE, + MediaPlayerEnqueue, + MediaPlayerEntityFeature, + MediaPlayerState, + MediaType, + RepeatMode, +) +from homeassistant.components.spotify import DOMAIN +from homeassistant.const import ( + ATTR_ENTITY_ID, + ATTR_ENTITY_PICTURE, + SERVICE_MEDIA_NEXT_TRACK, + SERVICE_MEDIA_PAUSE, + SERVICE_MEDIA_PLAY, + SERVICE_MEDIA_PREVIOUS_TRACK, + SERVICE_MEDIA_SEEK, + SERVICE_REPEAT_SET, + SERVICE_SHUFFLE_SET, + SERVICE_VOLUME_SET, + STATE_UNAVAILABLE, + Platform, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import ( + MockConfigEntry, + async_fire_time_changed, + load_fixture, + snapshot_platform, +) + + +@pytest.mark.usefixtures("setup_credentials") +async def test_entities( + hass: HomeAssistant, + mock_spotify: MagicMock, + freezer: FrozenDateTimeFactory, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test the Spotify entities.""" + freezer.move_to("2023-10-21") + with ( + patch("secrets.token_hex", return_value="mock-token"), + patch("homeassistant.components.spotify.PLATFORMS", [Platform.MEDIA_PLAYER]), + ): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform( + hass, entity_registry, snapshot, mock_config_entry.entry_id + ) + + +@pytest.mark.usefixtures("setup_credentials") +async def test_podcast( + hass: HomeAssistant, + mock_spotify: MagicMock, + freezer: FrozenDateTimeFactory, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test the Spotify entities while listening a podcast.""" + freezer.move_to("2023-10-21") + mock_spotify.return_value.get_playback.return_value = PlaybackState.from_json( + load_fixture("playback_episode.json", DOMAIN) + ) + with ( + patch("secrets.token_hex", return_value="mock-token"), + patch("homeassistant.components.spotify.PLATFORMS", [Platform.MEDIA_PLAYER]), + ): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform( + hass, entity_registry, snapshot, mock_config_entry.entry_id + ) + + +@pytest.mark.usefixtures("setup_credentials") +async def test_free_account( + hass: HomeAssistant, + mock_spotify: MagicMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test the Spotify entities with a free account.""" + mock_spotify.return_value.get_current_user.return_value.product = ProductType.FREE + await setup_integration(hass, mock_config_entry) + state = hass.states.get("media_player.spotify_spotify_1") + assert state + assert state.attributes["supported_features"] == 0 + + +@pytest.mark.usefixtures("setup_credentials") +async def test_restricted_device( + hass: HomeAssistant, + mock_spotify: MagicMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test the Spotify entities with a restricted device.""" + mock_spotify.return_value.get_playback.return_value.device.is_restricted = True + await setup_integration(hass, mock_config_entry) + state = hass.states.get("media_player.spotify_spotify_1") + assert state + assert ( + state.attributes["supported_features"] == MediaPlayerEntityFeature.SELECT_SOURCE + ) + + +@pytest.mark.usefixtures("setup_credentials") +async def test_spotify_dj_list( + hass: HomeAssistant, + mock_spotify: MagicMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test the Spotify entities with a Spotify DJ playlist.""" + mock_spotify.return_value.get_playback.return_value.context.uri = ( + "spotify:playlist:37i9dQZF1EYkqdzj48dyYq" + ) + await setup_integration(hass, mock_config_entry) + state = hass.states.get("media_player.spotify_spotify_1") + assert state + assert state.attributes["media_playlist"] == "DJ" + + +@pytest.mark.usefixtures("setup_credentials") +async def test_fetching_playlist_does_not_fail( + hass: HomeAssistant, + mock_spotify: MagicMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test failing fetching playlist does not fail update.""" + mock_spotify.return_value.get_playlist.side_effect = SpotifyConnectionError + await setup_integration(hass, mock_config_entry) + state = hass.states.get("media_player.spotify_spotify_1") + assert state + assert "media_playlist" not in state.attributes + + +@pytest.mark.usefixtures("setup_credentials") +async def test_idle( + hass: HomeAssistant, + mock_spotify: MagicMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test the Spotify entities in idle state.""" + mock_spotify.return_value.get_playback.return_value = {} + await setup_integration(hass, mock_config_entry) + state = hass.states.get("media_player.spotify_spotify_1") + assert state + assert state.state == MediaPlayerState.IDLE + assert ( + state.attributes["supported_features"] == MediaPlayerEntityFeature.SELECT_SOURCE + ) + + +@pytest.mark.usefixtures("setup_credentials") +@pytest.mark.parametrize( + ("service", "method"), + [ + (SERVICE_MEDIA_PLAY, "start_playback"), + (SERVICE_MEDIA_PAUSE, "pause_playback"), + (SERVICE_MEDIA_PREVIOUS_TRACK, "previous_track"), + (SERVICE_MEDIA_NEXT_TRACK, "next_track"), + ], +) +async def test_simple_actions( + hass: HomeAssistant, + mock_spotify: MagicMock, + mock_config_entry: MockConfigEntry, + service: str, + method: str, +) -> None: + """Test the Spotify media player.""" + await setup_integration(hass, mock_config_entry) + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + service, + {ATTR_ENTITY_ID: "media_player.spotify_spotify_1"}, + blocking=True, + ) + getattr(mock_spotify.return_value, method).assert_called_once_with() + + +@pytest.mark.usefixtures("setup_credentials") +async def test_repeat_mode( + hass: HomeAssistant, + mock_spotify: MagicMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test the Spotify media player repeat mode.""" + await setup_integration(hass, mock_config_entry) + for mode, spotify_mode in ( + (RepeatMode.ALL, SpotifyRepeatMode.CONTEXT), + (RepeatMode.ONE, SpotifyRepeatMode.TRACK), + (RepeatMode.OFF, SpotifyRepeatMode.OFF), + ): + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_REPEAT_SET, + {ATTR_ENTITY_ID: "media_player.spotify_spotify_1", ATTR_MEDIA_REPEAT: mode}, + blocking=True, + ) + mock_spotify.return_value.set_repeat.assert_called_once_with(spotify_mode) + mock_spotify.return_value.set_repeat.reset_mock() + + +@pytest.mark.usefixtures("setup_credentials") +async def test_shuffle( + hass: HomeAssistant, + mock_spotify: MagicMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test the Spotify media player shuffle.""" + await setup_integration(hass, mock_config_entry) + for shuffle in (True, False): + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_SHUFFLE_SET, + { + ATTR_ENTITY_ID: "media_player.spotify_spotify_1", + ATTR_MEDIA_SHUFFLE: shuffle, + }, + blocking=True, + ) + mock_spotify.return_value.set_shuffle.assert_called_once_with(state=shuffle) + mock_spotify.return_value.set_shuffle.reset_mock() + + +@pytest.mark.usefixtures("setup_credentials") +async def test_volume_level( + hass: HomeAssistant, + mock_spotify: MagicMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test the Spotify media player volume level.""" + await setup_integration(hass, mock_config_entry) + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_VOLUME_SET, + { + ATTR_ENTITY_ID: "media_player.spotify_spotify_1", + ATTR_MEDIA_VOLUME_LEVEL: 0.5, + }, + blocking=True, + ) + mock_spotify.return_value.set_volume.assert_called_with(50) + + +@pytest.mark.usefixtures("setup_credentials") +async def test_seek( + hass: HomeAssistant, + mock_spotify: MagicMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test the Spotify media player seeking.""" + await setup_integration(hass, mock_config_entry) + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_MEDIA_SEEK, + { + ATTR_ENTITY_ID: "media_player.spotify_spotify_1", + ATTR_MEDIA_SEEK_POSITION: 100, + }, + blocking=True, + ) + mock_spotify.return_value.seek_track.assert_called_with(100000) + + +@pytest.mark.usefixtures("setup_credentials") +@pytest.mark.parametrize( + ("media_type", "media_id"), + [ + ("spotify://track", "spotify:track:3oRoMXsP2NRzm51lldj1RO"), + ("spotify://episode", "spotify:episode:3oRoMXsP2NRzm51lldj1RO"), + (MediaType.MUSIC, "spotify:track:3oRoMXsP2NRzm51lldj1RO"), + ], +) +async def test_play_media_in_queue( + hass: HomeAssistant, + mock_spotify: MagicMock, + mock_config_entry: MockConfigEntry, + media_type: str, + media_id: str, +) -> None: + """Test the Spotify media player play media.""" + await setup_integration(hass, mock_config_entry) + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_PLAY_MEDIA, + { + ATTR_ENTITY_ID: "media_player.spotify_spotify_1", + ATTR_MEDIA_CONTENT_TYPE: media_type, + ATTR_MEDIA_CONTENT_ID: media_id, + ATTR_MEDIA_ENQUEUE: MediaPlayerEnqueue.ADD, + }, + blocking=True, + ) + mock_spotify.return_value.add_to_queue.assert_called_with(media_id, None) + + +@pytest.mark.usefixtures("setup_credentials") +@pytest.mark.parametrize( + ("media_type", "media_id", "called_with"), + [ + ( + "spotify://artist", + "spotify:artist:74Yus6IHfa3tWZzXXAYtS2", + {"context_uri": "spotify:artist:74Yus6IHfa3tWZzXXAYtS2"}, + ), + ( + "spotify://playlist", + "spotify:playlist:74Yus6IHfa3tWZzXXAYtS2", + {"context_uri": "spotify:playlist:74Yus6IHfa3tWZzXXAYtS2"}, + ), + ( + "spotify://album", + "spotify:album:74Yus6IHfa3tWZzXXAYtS2", + {"context_uri": "spotify:album:74Yus6IHfa3tWZzXXAYtS2"}, + ), + ( + "spotify://show", + "spotify:show:74Yus6IHfa3tWZzXXAYtS2", + {"context_uri": "spotify:show:74Yus6IHfa3tWZzXXAYtS2"}, + ), + ( + MediaType.MUSIC, + "spotify:track:3oRoMXsP2NRzm51lldj1RO", + {"uris": ["spotify:track:3oRoMXsP2NRzm51lldj1RO"]}, + ), + ( + "spotify://track", + "spotify:track:3oRoMXsP2NRzm51lldj1RO", + {"uris": ["spotify:track:3oRoMXsP2NRzm51lldj1RO"]}, + ), + ( + "spotify://episode", + "spotify:episode:3oRoMXsP2NRzm51lldj1RO", + {"uris": ["spotify:episode:3oRoMXsP2NRzm51lldj1RO"]}, + ), + ], +) +async def test_play_media( + hass: HomeAssistant, + mock_spotify: MagicMock, + mock_config_entry: MockConfigEntry, + media_type: str, + media_id: str, + called_with: dict, +) -> None: + """Test the Spotify media player play media.""" + await setup_integration(hass, mock_config_entry) + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_PLAY_MEDIA, + { + ATTR_ENTITY_ID: "media_player.spotify_spotify_1", + ATTR_MEDIA_CONTENT_TYPE: media_type, + ATTR_MEDIA_CONTENT_ID: media_id, + }, + blocking=True, + ) + mock_spotify.return_value.start_playback.assert_called_with(**called_with) + + +@pytest.mark.usefixtures("setup_credentials") +async def test_add_unsupported_media_to_queue( + hass: HomeAssistant, + mock_spotify: MagicMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test the Spotify media player add unsupported media to queue.""" + await setup_integration(hass, mock_config_entry) + with pytest.raises( + ValueError, match="Media type playlist is not supported when enqueue is ADD" + ): + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_PLAY_MEDIA, + { + ATTR_ENTITY_ID: "media_player.spotify_spotify_1", + ATTR_MEDIA_CONTENT_TYPE: "spotify://playlist", + ATTR_MEDIA_CONTENT_ID: "spotify:playlist:74Yus6IHfa3tWZzXXAYtS2", + ATTR_MEDIA_ENQUEUE: MediaPlayerEnqueue.ADD, + }, + blocking=True, + ) + + +@pytest.mark.usefixtures("setup_credentials") +async def test_play_unsupported_media( + hass: HomeAssistant, + mock_spotify: MagicMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test the Spotify media player play media.""" + await setup_integration(hass, mock_config_entry) + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_PLAY_MEDIA, + { + ATTR_ENTITY_ID: "media_player.spotify_spotify_1", + ATTR_MEDIA_CONTENT_TYPE: MediaType.COMPOSER, + ATTR_MEDIA_CONTENT_ID: "spotify:track:3oRoMXsP2NRzm51lldj1RO", + }, + blocking=True, + ) + assert mock_spotify.return_value.start_playback.call_count == 0 + assert mock_spotify.return_value.add_to_queue.call_count == 0 + + +@pytest.mark.usefixtures("setup_credentials") +async def test_select_source( + hass: HomeAssistant, + mock_spotify: MagicMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test the Spotify media player source select.""" + await setup_integration(hass, mock_config_entry) + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_SELECT_SOURCE, + { + ATTR_ENTITY_ID: "media_player.spotify_spotify_1", + ATTR_INPUT_SOURCE: "DESKTOP-BKC5SIK", + }, + blocking=True, + ) + mock_spotify.return_value.transfer_playback.assert_called_with( + "21dac6b0e0a1f181870fdc9749b2656466557666" + ) + + +@pytest.mark.usefixtures("setup_credentials") +async def test_source_devices( + hass: HomeAssistant, + mock_spotify: MagicMock, + mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, +) -> None: + """Test the Spotify media player available source devices.""" + await setup_integration(hass, mock_config_entry) + state = hass.states.get("media_player.spotify_spotify_1") + + assert state.attributes[ATTR_INPUT_SOURCE_LIST] == ["DESKTOP-BKC5SIK"] + + mock_spotify.return_value.get_devices.side_effect = SpotifyConnectionError + freezer.tick(timedelta(minutes=5)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + state = hass.states.get("media_player.spotify_spotify_1") + assert state + assert state.state != STATE_UNAVAILABLE + assert state.attributes[ATTR_INPUT_SOURCE_LIST] == ["DESKTOP-BKC5SIK"] + + +@pytest.mark.usefixtures("setup_credentials") +async def test_paused_playback( + hass: HomeAssistant, + mock_spotify: MagicMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test the Spotify media player with paused playback.""" + mock_spotify.return_value.get_playback.return_value.is_playing = False + await setup_integration(hass, mock_config_entry) + state = hass.states.get("media_player.spotify_spotify_1") + assert state + assert state.state == MediaPlayerState.PAUSED + + +@pytest.mark.usefixtures("setup_credentials") +async def test_fallback_show_image( + hass: HomeAssistant, + mock_spotify: MagicMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test the Spotify media player with a fallback image.""" + playback = PlaybackState.from_json(load_fixture("playback_episode.json", DOMAIN)) + playback.item.images = [] + mock_spotify.return_value.get_playback.return_value = playback + with patch("secrets.token_hex", return_value="mock-token"): + await setup_integration(hass, mock_config_entry) + state = hass.states.get("media_player.spotify_spotify_1") + assert state + assert ( + state.attributes[ATTR_ENTITY_PICTURE] + == "/api/media_player_proxy/media_player.spotify_spotify_1?token=mock-token&cache=16ff384dbae94fea" + ) + + +@pytest.mark.usefixtures("setup_credentials") +async def test_no_episode_images( + hass: HomeAssistant, + mock_spotify: MagicMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test the Spotify media player with no episode images.""" + playback = PlaybackState.from_json(load_fixture("playback_episode.json", DOMAIN)) + playback.item.images = [] + playback.item.show.images = [] + mock_spotify.return_value.get_playback.return_value = playback + await setup_integration(hass, mock_config_entry) + state = hass.states.get("media_player.spotify_spotify_1") + assert state + assert ATTR_ENTITY_PICTURE not in state.attributes + + +@pytest.mark.usefixtures("setup_credentials") +async def test_no_album_images( + hass: HomeAssistant, + mock_spotify: MagicMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test the Spotify media player with no album images.""" + mock_spotify.return_value.get_playback.return_value.item.album.images = [] + await setup_integration(hass, mock_config_entry) + state = hass.states.get("media_player.spotify_spotify_1") + assert state + assert ATTR_ENTITY_PICTURE not in state.attributes diff --git a/tests/components/spotify/test_sensor.py b/tests/components/spotify/test_sensor.py new file mode 100644 index 00000000000000..11ce361034adad --- /dev/null +++ b/tests/components/spotify/test_sensor.py @@ -0,0 +1,66 @@ +"""Tests for the Spotify sensor platform.""" + +from unittest.mock import MagicMock, patch + +import pytest +from spotifyaio import PlaybackState +from syrupy import SnapshotAssertion + +from homeassistant.components.spotify import DOMAIN +from homeassistant.const import STATE_UNKNOWN, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, load_fixture, snapshot_platform + + +@pytest.mark.usefixtures("setup_credentials") +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_entities( + hass: HomeAssistant, + mock_spotify: MagicMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test the Spotify entities.""" + with patch("homeassistant.components.spotify.PLATFORMS", [Platform.SENSOR]): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +@pytest.mark.usefixtures("setup_credentials") +async def test_audio_features_unavailable( + hass: HomeAssistant, + mock_spotify: MagicMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test the Spotify entities.""" + mock_spotify.return_value.get_audio_features.return_value = None + + await setup_integration(hass, mock_config_entry) + + assert hass.states.get("sensor.spotify_spotify_1_song_tempo").state == STATE_UNKNOWN + + +@pytest.mark.usefixtures("setup_credentials") +async def test_audio_features_unknown_during_podcast( + hass: HomeAssistant, + mock_spotify: MagicMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test the Spotify audio features sensor during a podcast.""" + mock_spotify.return_value.get_playback.return_value = PlaybackState.from_json( + load_fixture("playback_episode.json", DOMAIN) + ) + + await setup_integration(hass, mock_config_entry) + + assert hass.states.get("sensor.spotify_spotify_1_song_tempo").state == STATE_UNKNOWN diff --git a/tests/components/squeezebox/conftest.py b/tests/components/squeezebox/conftest.py index 2a8c4aacbd3987..2dc0cabeaa61d4 100644 --- a/tests/components/squeezebox/conftest.py +++ b/tests/components/squeezebox/conftest.py @@ -120,6 +120,7 @@ async def mock_async_browse( """Mock the async_browse method of pysqueezebox.Player.""" child_types = { "favorites": "favorites", + "new music": "album", "albums": "album", "album": "track", "genres": "genre", @@ -206,7 +207,7 @@ def player_factory() -> MagicMock: def mock_pysqueezebox_player(uuid: str) -> MagicMock: """Mock a Lyrion Media Server player.""" with patch( - "homeassistant.components.squeezebox.media_player.Player", autospec=True + "homeassistant.components.squeezebox.Player", autospec=True ) as mock_player: mock_player.async_browse = AsyncMock(side_effect=mock_async_browse) mock_player.generate_image_url_from_track_id = MagicMock( diff --git a/tests/components/squeezebox/test_init.py b/tests/components/squeezebox/test_init.py new file mode 100644 index 00000000000000..9074f57cdcb76d --- /dev/null +++ b/tests/components/squeezebox/test_init.py @@ -0,0 +1,23 @@ +"""Test squeezebox initialization.""" + +from unittest.mock import patch + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def test_init_api_fail( + hass: HomeAssistant, + config_entry: MockConfigEntry, +) -> None: + """Test init fail due to API fail.""" + + # Setup component to fail... + with ( + patch( + "homeassistant.components.squeezebox.Server.async_query", + return_value=False, + ), + ): + assert not await hass.config_entries.async_setup(config_entry.entry_id) diff --git a/tests/components/squeezebox/test_media_browser.py b/tests/components/squeezebox/test_media_browser.py index c3398d24aa3d98..c03c1b6344ded4 100644 --- a/tests/components/squeezebox/test_media_browser.py +++ b/tests/components/squeezebox/test_media_browser.py @@ -72,7 +72,14 @@ async def test_async_browse_media_with_subitems( hass_ws_client: WebSocketGenerator, ) -> None: """Test each category with subitems.""" - for category in ("Favorites", "Artists", "Albums", "Playlists", "Genres"): + for category in ( + "Favorites", + "Artists", + "Albums", + "Playlists", + "Genres", + "New Music", + ): with patch( "homeassistant.components.squeezebox.browse_media.is_internal_request", return_value=False, diff --git a/tests/components/squeezebox/test_media_player.py b/tests/components/squeezebox/test_media_player.py index 7721a2b86b4baf..080a2161b4d578 100644 --- a/tests/components/squeezebox/test_media_player.py +++ b/tests/components/squeezebox/test_media_player.py @@ -30,10 +30,14 @@ MediaType, RepeatMode, ) -from homeassistant.components.squeezebox.const import DOMAIN, SENSOR_UPDATE_INTERVAL +from homeassistant.components.squeezebox.const import ( + DISCOVERY_INTERVAL, + DOMAIN, + PLAYER_UPDATE_INTERVAL, + SENSOR_UPDATE_INTERVAL, +) from homeassistant.components.squeezebox.media_player import ( ATTR_PARAMETERS, - DISCOVERY_INTERVAL, SERVICE_CALL_METHOD, SERVICE_CALL_QUERY, ) @@ -101,12 +105,9 @@ async def test_squeezebox_player_rediscovery( # Make the player appear unavailable configured_player.connected = False - await hass.services.async_call( - MEDIA_PLAYER_DOMAIN, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "media_player.test_player"}, - blocking=True, - ) + freezer.tick(timedelta(seconds=PLAYER_UPDATE_INTERVAL)) + async_fire_time_changed(hass) + await hass.async_block_till_done() assert hass.states.get("media_player.test_player").state == STATE_UNAVAILABLE # Make the player available again @@ -115,7 +116,7 @@ async def test_squeezebox_player_rediscovery( async_fire_time_changed(hass) await hass.async_block_till_done() - freezer.tick(timedelta(seconds=SENSOR_UPDATE_INTERVAL)) + freezer.tick(timedelta(seconds=PLAYER_UPDATE_INTERVAL)) async_fire_time_changed(hass) await hass.async_block_till_done() assert hass.states.get("media_player.test_player").state == MediaPlayerState.IDLE diff --git a/tests/components/srp_energy/test_config_flow.py b/tests/components/srp_energy/test_config_flow.py index e3abb3c98df2df..149e08014ac3b9 100644 --- a/tests/components/srp_energy/test_config_flow.py +++ b/tests/components/srp_energy/test_config_flow.py @@ -100,6 +100,10 @@ async def test_form_invalid_auth( assert result["errors"] == {"base": "invalid_auth"} +@pytest.mark.parametrize( # Remove when translations fixed + "ignore_translations", + ["component.srp_energy.config.abort.unknown"], +) async def test_form_unknown_error( hass: HomeAssistant, mock_srp_energy_config_flow: MagicMock, diff --git a/tests/components/statistics/snapshots/test_config_flow.ambr b/tests/components/statistics/snapshots/test_config_flow.ambr index 8d274cd86c6b54..5f79c56dec7edd 100644 --- a/tests/components/statistics/snapshots/test_config_flow.ambr +++ b/tests/components/statistics/snapshots/test_config_flow.ambr @@ -4,7 +4,6 @@ 'attributes': dict({ 'friendly_name': 'Statistical characteristic', 'icon': 'mdi:calculator', - 'state_class': 'measurement', }), 'state': 'unavailable', }) diff --git a/tests/components/statistics/test_sensor.py b/tests/components/statistics/test_sensor.py index c90d685714cf8f..7e2bc1cb16b1c8 100644 --- a/tests/components/statistics/test_sensor.py +++ b/tests/components/statistics/test_sensor.py @@ -2,9 +2,11 @@ from __future__ import annotations +from asyncio import Event as AsyncioEvent from collections.abc import Sequence from datetime import datetime, timedelta import statistics +from threading import Event from typing import Any from unittest.mock import patch @@ -12,7 +14,7 @@ import pytest from homeassistant import config as hass_config -from homeassistant.components.recorder import Recorder +from homeassistant.components.recorder import Recorder, history from homeassistant.components.sensor import ( ATTR_STATE_CLASS, SensorDeviceClass, @@ -50,6 +52,7 @@ VALUES_BINARY = ["on", "off", "on", "off", "on", "off", "on", "off", "on"] VALUES_NUMERIC = [17, 20, 15.2, 5, 3.8, 9.2, 6.7, 14, 6] +VALUES_NUMERIC_LINEAR = [1, 2, 3, 4, 5, 6, 7, 8, 9] async def test_unique_id( @@ -247,8 +250,15 @@ async def test_sensor_defaults_binary(hass: HomeAssistant) -> None: assert "age_coverage_ratio" not in state.attributes -async def test_sensor_source_with_force_update(hass: HomeAssistant) -> None: - """Test the behavior of the sensor when the source sensor force-updates with same value.""" +async def test_sensor_state_reported(hass: HomeAssistant) -> None: + """Test the behavior of the sensor with a sequence of identical values. + + Forced updates no longer make a difference, since the statistics are now reacting not + only to state change events but also to state report events (EVENT_STATE_REPORTED). + This means repeating values will be added to the buffer repeatedly in both cases. + This fixes problems with time based averages and some other functions that behave + differently when repeating values are reported. + """ repeating_values = [18, 0, 0, 0, 0, 0, 0, 0, 9] assert await async_setup_component( hass, @@ -291,9 +301,9 @@ async def test_sensor_source_with_force_update(hass: HomeAssistant) -> None: state_normal = hass.states.get("sensor.test_normal") state_force = hass.states.get("sensor.test_force") assert state_normal and state_force - assert state_normal.state == str(round(sum(repeating_values) / 3, 2)) + assert state_normal.state == str(round(sum(repeating_values) / 9, 2)) assert state_force.state == str(round(sum(repeating_values) / 9, 2)) - assert state_normal.attributes.get("buffer_usage_ratio") == round(3 / 20, 2) + assert state_normal.attributes.get("buffer_usage_ratio") == round(9 / 20, 2) assert state_force.attributes.get("buffer_usage_ratio") == round(9 / 20, 2) @@ -1013,7 +1023,7 @@ async def test_state_characteristics(hass: HomeAssistant) -> None: "source_sensor_domain": "sensor", "name": "average_linear", "value_0": STATE_UNKNOWN, - "value_1": STATE_UNKNOWN, + "value_1": 6.0, "value_9": 10.68, "unit": "°C", }, @@ -1021,7 +1031,7 @@ async def test_state_characteristics(hass: HomeAssistant) -> None: "source_sensor_domain": "sensor", "name": "average_step", "value_0": STATE_UNKNOWN, - "value_1": STATE_UNKNOWN, + "value_1": 6.0, "value_9": 11.36, "unit": "°C", }, @@ -1113,7 +1123,7 @@ async def test_state_characteristics(hass: HomeAssistant) -> None: "source_sensor_domain": "sensor", "name": "distance_95_percent_of_values", "value_0": STATE_UNKNOWN, - "value_1": STATE_UNKNOWN, + "value_1": 0.0, "value_9": float(round(2 * 1.96 * statistics.stdev(VALUES_NUMERIC), 2)), "unit": "°C", }, @@ -1121,7 +1131,7 @@ async def test_state_characteristics(hass: HomeAssistant) -> None: "source_sensor_domain": "sensor", "name": "distance_99_percent_of_values", "value_0": STATE_UNKNOWN, - "value_1": STATE_UNKNOWN, + "value_1": 0.0, "value_9": float(round(2 * 2.58 * statistics.stdev(VALUES_NUMERIC), 2)), "unit": "°C", }, @@ -1161,7 +1171,7 @@ async def test_state_characteristics(hass: HomeAssistant) -> None: "source_sensor_domain": "sensor", "name": "noisiness", "value_0": STATE_UNKNOWN, - "value_1": STATE_UNKNOWN, + "value_1": 0.0, "value_9": float(round(sum([3, 4.8, 10.2, 1.2, 5.4, 2.5, 7.3, 8]) / 8, 2)), "unit": "°C", }, @@ -1169,7 +1179,7 @@ async def test_state_characteristics(hass: HomeAssistant) -> None: "source_sensor_domain": "sensor", "name": "percentile", "value_0": STATE_UNKNOWN, - "value_1": STATE_UNKNOWN, + "value_1": 6.0, "value_9": 9.2, "unit": "°C", }, @@ -1177,7 +1187,7 @@ async def test_state_characteristics(hass: HomeAssistant) -> None: "source_sensor_domain": "sensor", "name": "standard_deviation", "value_0": STATE_UNKNOWN, - "value_1": STATE_UNKNOWN, + "value_1": 0.0, "value_9": float(round(statistics.stdev(VALUES_NUMERIC), 2)), "unit": "°C", }, @@ -1193,7 +1203,7 @@ async def test_state_characteristics(hass: HomeAssistant) -> None: "source_sensor_domain": "sensor", "name": "sum_differences", "value_0": STATE_UNKNOWN, - "value_1": STATE_UNKNOWN, + "value_1": 0.0, "value_9": float( sum( [ @@ -1214,7 +1224,7 @@ async def test_state_characteristics(hass: HomeAssistant) -> None: "source_sensor_domain": "sensor", "name": "sum_differences_nonnegative", "value_0": STATE_UNKNOWN, - "value_1": STATE_UNKNOWN, + "value_1": 0.0, "value_9": float( sum( [ @@ -1259,7 +1269,7 @@ async def test_state_characteristics(hass: HomeAssistant) -> None: "source_sensor_domain": "sensor", "name": "variance", "value_0": STATE_UNKNOWN, - "value_1": STATE_UNKNOWN, + "value_1": 0.0, "value_9": float(round(statistics.variance(VALUES_NUMERIC), 2)), "unit": "°C²", }, @@ -1267,7 +1277,7 @@ async def test_state_characteristics(hass: HomeAssistant) -> None: "source_sensor_domain": "binary_sensor", "name": "average_step", "value_0": STATE_UNKNOWN, - "value_1": STATE_UNKNOWN, + "value_1": 100.0, "value_9": 50.0, "unit": "%", }, @@ -1701,3 +1711,324 @@ async def test_device_id( statistics_entity = entity_registry.async_get("sensor.statistics") assert statistics_entity is not None assert statistics_entity.device_id == source_entity.device_id + + +async def test_update_before_load(recorder_mock: Recorder, hass: HomeAssistant) -> None: + """Verify that updates happening before reloading from the database are handled correctly.""" + + current_time = dt_util.utcnow() + + # enable and pre-fill the recorder + await hass.async_block_till_done() + await async_wait_recording_done(hass) + + with ( + freeze_time(current_time) as freezer, + ): + for value in VALUES_NUMERIC_LINEAR: + hass.states.async_set( + "sensor.test_monitored", + str(value), + {ATTR_UNIT_OF_MEASUREMENT: UnitOfTemperature.CELSIUS}, + ) + await hass.async_block_till_done() + current_time += timedelta(seconds=1) + freezer.move_to(current_time) + + await async_wait_recording_done(hass) + + # some synchronisation is needed to prevent that loading from the database finishes too soon + # we want this to take long enough to be able to try to add a value BEFORE loading is done + state_changes_during_period_called_evt = AsyncioEvent() + state_changes_during_period_stall_evt = Event() + real_state_changes_during_period = history.state_changes_during_period + + def mock_state_changes_during_period(*args, **kwargs): + states = real_state_changes_during_period(*args, **kwargs) + hass.loop.call_soon_threadsafe(state_changes_during_period_called_evt.set) + state_changes_during_period_stall_evt.wait() + return states + + # create the statistics component, get filled from database + with patch( + "homeassistant.components.statistics.sensor.history.state_changes_during_period", + mock_state_changes_during_period, + ): + assert await async_setup_component( + hass, + "sensor", + { + "sensor": [ + { + "platform": "statistics", + "name": "test", + "entity_id": "sensor.test_monitored", + "state_characteristic": "average_step", + "max_age": {"seconds": 10}, + }, + ] + }, + ) + # adding this value is going to be ignored, since loading from the database hasn't finished yet + # if this value would be added before loading from the database is done + # it would mess up the order of the internal queue which is supposed to be sorted by time + await state_changes_during_period_called_evt.wait() + hass.states.async_set( + "sensor.test_monitored", + "10", + {ATTR_UNIT_OF_MEASUREMENT: DEGREE}, + ) + state_changes_during_period_stall_evt.set() + await hass.async_block_till_done() + + # we will end up with a buffer of [1 .. 9] (10 wasn't added) + # so the computed average_step is 1+2+3+4+5+6+7+8/8 = 4.5 + assert float(hass.states.get("sensor.test").state) == pytest.approx(4.5) + + +async def test_average_linear_unevenly_timed(hass: HomeAssistant) -> None: + """Test the average_linear state characteristic with unevenly distributed values. + + This also implicitly tests the correct timing of repeating values. + """ + values_and_times = [[5.0, 2], [10.0, 1], [10.0, 1], [10.0, 2], [5.0, 1]] + + current_time = dt_util.utcnow() + + with ( + freeze_time(current_time) as freezer, + ): + assert await async_setup_component( + hass, + "sensor", + { + "sensor": [ + { + "platform": "statistics", + "name": "test_sensor_average_linear", + "entity_id": "sensor.test_monitored", + "state_characteristic": "average_linear", + "max_age": {"seconds": 10}, + }, + ] + }, + ) + await hass.async_block_till_done() + + for value_and_time in values_and_times: + hass.states.async_set( + "sensor.test_monitored", + str(value_and_time[0]), + {ATTR_UNIT_OF_MEASUREMENT: DEGREE}, + ) + current_time += timedelta(seconds=value_and_time[1]) + freezer.move_to(current_time) + + await hass.async_block_till_done() + + state = hass.states.get("sensor.test_sensor_average_linear") + assert state is not None + assert state.state == "8.33", ( + "value mismatch for characteristic 'sensor/average_linear' - " + f"assert {state.state} == 8.33" + ) + + +async def test_sensor_unit_gets_removed(hass: HomeAssistant) -> None: + """Test when input lose its unit of measurement.""" + assert await async_setup_component( + hass, + "sensor", + { + "sensor": [ + { + "platform": "statistics", + "name": "test", + "entity_id": "sensor.test_monitored", + "state_characteristic": "mean", + "sampling_size": 10, + }, + ] + }, + ) + await hass.async_block_till_done() + + input_attributes = { + ATTR_STATE_CLASS: SensorStateClass.MEASUREMENT, + ATTR_DEVICE_CLASS: SensorDeviceClass.TEMPERATURE, + ATTR_UNIT_OF_MEASUREMENT: UnitOfTemperature.CELSIUS, + } + + for value in VALUES_NUMERIC: + hass.states.async_set( + "sensor.test_monitored", + str(value), + input_attributes, + ) + await hass.async_block_till_done() + + state = hass.states.get("sensor.test") + assert state is not None + assert state.state == str(round(sum(VALUES_NUMERIC) / len(VALUES_NUMERIC), 2)) + assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfTemperature.CELSIUS + assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.TEMPERATURE + assert state.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT + + hass.states.async_set( + "sensor.test_monitored", + str(VALUES_NUMERIC[0]), + { + ATTR_STATE_CLASS: SensorStateClass.MEASUREMENT, + ATTR_DEVICE_CLASS: SensorDeviceClass.TEMPERATURE, + }, + ) + await hass.async_block_till_done() + + state = hass.states.get("sensor.test") + assert state is not None + assert state.state == "11.39" + assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) is None + # Temperature device class is not valid with no unit of measurement + assert state.attributes.get(ATTR_DEVICE_CLASS) is None + assert state.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT + + for value in VALUES_NUMERIC: + hass.states.async_set( + "sensor.test_monitored", + str(value), + input_attributes, + ) + await hass.async_block_till_done() + + state = hass.states.get("sensor.test") + assert state is not None + assert state.state == "11.39" + assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfTemperature.CELSIUS + assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.TEMPERATURE + assert state.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT + + +async def test_sensor_device_class_gets_removed(hass: HomeAssistant) -> None: + """Test when device class gets removed.""" + assert await async_setup_component( + hass, + "sensor", + { + "sensor": [ + { + "platform": "statistics", + "name": "test", + "entity_id": "sensor.test_monitored", + "state_characteristic": "mean", + "sampling_size": 10, + }, + ] + }, + ) + await hass.async_block_till_done() + + input_attributes = { + ATTR_STATE_CLASS: SensorStateClass.MEASUREMENT, + ATTR_DEVICE_CLASS: SensorDeviceClass.TEMPERATURE, + ATTR_UNIT_OF_MEASUREMENT: UnitOfTemperature.CELSIUS, + } + + for value in VALUES_NUMERIC: + hass.states.async_set( + "sensor.test_monitored", + str(value), + input_attributes, + ) + await hass.async_block_till_done() + + state = hass.states.get("sensor.test") + assert state is not None + assert state.state == str(round(sum(VALUES_NUMERIC) / len(VALUES_NUMERIC), 2)) + assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfTemperature.CELSIUS + assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.TEMPERATURE + assert state.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT + + hass.states.async_set( + "sensor.test_monitored", + str(VALUES_NUMERIC[0]), + { + ATTR_STATE_CLASS: SensorStateClass.MEASUREMENT, + ATTR_UNIT_OF_MEASUREMENT: UnitOfTemperature.CELSIUS, + }, + ) + await hass.async_block_till_done() + + state = hass.states.get("sensor.test") + assert state is not None + assert state.state == "11.39" + assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfTemperature.CELSIUS + assert state.attributes.get(ATTR_DEVICE_CLASS) is None + assert state.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT + + for value in VALUES_NUMERIC: + hass.states.async_set( + "sensor.test_monitored", + str(value), + input_attributes, + ) + await hass.async_block_till_done() + + state = hass.states.get("sensor.test") + assert state is not None + assert state.state == "11.39" + assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfTemperature.CELSIUS + assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.TEMPERATURE + assert state.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT + + +async def test_not_valid_device_class(hass: HomeAssistant) -> None: + """Test when not valid device class.""" + assert await async_setup_component( + hass, + "sensor", + { + "sensor": [ + { + "platform": "statistics", + "name": "test", + "entity_id": "sensor.test_monitored", + "state_characteristic": "mean", + "sampling_size": 10, + }, + ] + }, + ) + await hass.async_block_till_done() + + for value in VALUES_NUMERIC: + hass.states.async_set( + "sensor.test_monitored", + str(value), + { + ATTR_DEVICE_CLASS: SensorDeviceClass.DATE, + }, + ) + await hass.async_block_till_done() + + state = hass.states.get("sensor.test") + assert state is not None + assert state.state == str(round(sum(VALUES_NUMERIC) / len(VALUES_NUMERIC), 2)) + assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) is None + assert state.attributes.get(ATTR_DEVICE_CLASS) is None + assert state.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT + + hass.states.async_set( + "sensor.test_monitored", + str(10), + { + ATTR_DEVICE_CLASS: "not_exist", + }, + ) + await hass.async_block_till_done() + + state = hass.states.get("sensor.test") + assert state is not None + assert state.state == "10.69" + assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) is None + assert state.attributes.get(ATTR_DEVICE_CLASS) is None + assert state.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT diff --git a/tests/components/subaru/test_config_flow.py b/tests/components/subaru/test_config_flow.py index 6abc544c92a4e2..d930aafbdfb1e4 100644 --- a/tests/components/subaru/test_config_flow.py +++ b/tests/components/subaru/test_config_flow.py @@ -192,6 +192,10 @@ async def test_two_factor_request_success( assert len(mock_two_factor_request.mock_calls) == 1 +@pytest.mark.parametrize( # Remove when translations fixed + "ignore_translations", + ["component.subaru.config.abort.two_factor_request_failed"], +) async def test_two_factor_request_fail( hass: HomeAssistant, two_factor_start_form ) -> None: diff --git a/tests/components/suez_water/__init__.py b/tests/components/suez_water/__init__.py index 4605e06344add2..a90df7384545e3 100644 --- a/tests/components/suez_water/__init__.py +++ b/tests/components/suez_water/__init__.py @@ -1 +1,15 @@ """Tests for the Suez Water integration.""" + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def setup_integration( + hass: HomeAssistant, mock_config_entry: MockConfigEntry +) -> None: + """Init suez water integration.""" + mock_config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() diff --git a/tests/components/suez_water/conftest.py b/tests/components/suez_water/conftest.py index f218fb7d833a1a..f634a053c65a67 100644 --- a/tests/components/suez_water/conftest.py +++ b/tests/components/suez_water/conftest.py @@ -3,8 +3,31 @@ from collections.abc import Generator from unittest.mock import AsyncMock, patch +from pysuez import AggregatedData, PriceResult +from pysuez.const import ATTRIBUTION import pytest +from homeassistant.components.suez_water.const import DOMAIN + +from tests.common import MockConfigEntry + +MOCK_DATA = { + "username": "test-username", + "password": "test-password", + "counter_id": "test-counter", +} + + +@pytest.fixture +def mock_config_entry() -> MockConfigEntry: + """Create mock config_entry needed by suez_water integration.""" + return MockConfigEntry( + unique_id=MOCK_DATA["username"], + domain=DOMAIN, + title="Suez mock device", + data=MOCK_DATA, + ) + @pytest.fixture def mock_setup_entry() -> Generator[AsyncMock]: @@ -13,3 +36,45 @@ def mock_setup_entry() -> Generator[AsyncMock]: "homeassistant.components.suez_water.async_setup_entry", return_value=True ) as mock_setup_entry: yield mock_setup_entry + + +@pytest.fixture(name="suez_client") +def mock_suez_client() -> Generator[AsyncMock]: + """Create mock for suez_water external api.""" + with ( + patch( + "homeassistant.components.suez_water.coordinator.SuezClient", autospec=True + ) as mock_client, + patch( + "homeassistant.components.suez_water.config_flow.SuezClient", + new=mock_client, + ), + ): + suez_client = mock_client.return_value + suez_client.check_credentials.return_value = True + + result = AggregatedData( + value=160, + current_month={ + "2024-01-01": 130, + "2024-01-02": 145, + }, + previous_month={ + "2024-12-01": 154, + "2024-12-02": 166, + }, + current_year=1500, + previous_year=1000, + attribution=ATTRIBUTION, + highest_monthly_consumption=2558, + history={ + "2024-01-01": 130, + "2024-01-02": 145, + "2024-12-01": 154, + "2024-12-02": 166, + }, + ) + + suez_client.fetch_aggregated_data.return_value = result + suez_client.get_price.return_value = PriceResult("4.74") + yield suez_client diff --git a/tests/components/suez_water/snapshots/test_sensor.ambr b/tests/components/suez_water/snapshots/test_sensor.ambr new file mode 100644 index 00000000000000..da0ed3df7dd4e2 --- /dev/null +++ b/tests/components/suez_water/snapshots/test_sensor.ambr @@ -0,0 +1,116 @@ +# serializer version: 1 +# name: test_sensors_valid_state[sensor.suez_mock_device_water_price-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.suez_mock_device_water_price', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Water price', + 'platform': 'suez_water', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'water_price', + 'unique_id': 'test-counter_water_price', + 'unit_of_measurement': '€', + }) +# --- +# name: test_sensors_valid_state[sensor.suez_mock_device_water_price-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by toutsurmoneau.fr', + 'device_class': 'monetary', + 'friendly_name': 'Suez mock device Water price', + 'unit_of_measurement': '€', + }), + 'context': , + 'entity_id': 'sensor.suez_mock_device_water_price', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '4.74', + }) +# --- +# name: test_sensors_valid_state[sensor.suez_mock_device_water_usage_yesterday-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.suez_mock_device_water_usage_yesterday', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Water usage yesterday', + 'platform': 'suez_water', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'water_usage_yesterday', + 'unique_id': 'test-counter_water_usage_yesterday', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors_valid_state[sensor.suez_mock_device_water_usage_yesterday-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by toutsurmoneau.fr', + 'device_class': 'water', + 'friendly_name': 'Suez mock device Water usage yesterday', + 'highest_monthly_consumption': 2558, + 'history': dict({ + '2024-01-01': 130, + '2024-01-02': 145, + '2024-12-01': 154, + '2024-12-02': 166, + }), + 'last_year_overall': 1000, + 'previous_month_consumption': dict({ + '2024-12-01': 154, + '2024-12-02': 166, + }), + 'this_month_consumption': dict({ + '2024-01-01': 130, + '2024-01-02': 145, + }), + 'this_year_overall': 1500, + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.suez_mock_device_water_usage_yesterday', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '160', + }) +# --- diff --git a/tests/components/suez_water/test_config_flow.py b/tests/components/suez_water/test_config_flow.py index 3170a6779f0bec..6779b4c7d02200 100644 --- a/tests/components/suez_water/test_config_flow.py +++ b/tests/components/suez_water/test_config_flow.py @@ -1,25 +1,23 @@ """Test the Suez Water config flow.""" -from unittest.mock import AsyncMock, patch +from unittest.mock import AsyncMock -from pysuez.client import PySuezError +from pysuez.exception import PySuezError import pytest from homeassistant import config_entries -from homeassistant.components.suez_water.const import DOMAIN +from homeassistant.components.suez_water.const import CONF_COUNTER_ID, DOMAIN from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType -from tests.common import MockConfigEntry +from .conftest import MOCK_DATA -MOCK_DATA = { - "username": "test-username", - "password": "test-password", - "counter_id": "test-counter", -} +from tests.common import MockConfigEntry -async def test_form(hass: HomeAssistant, mock_setup_entry: AsyncMock) -> None: +async def test_form( + hass: HomeAssistant, mock_setup_entry: AsyncMock, suez_client: AsyncMock +) -> None: """Test we get the form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} @@ -27,12 +25,11 @@ async def test_form(hass: HomeAssistant, mock_setup_entry: AsyncMock) -> None: assert result["type"] is FlowResultType.FORM assert result["errors"] == {} - with patch("homeassistant.components.suez_water.config_flow.SuezClient"): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - MOCK_DATA, - ) - await hass.async_block_till_done() + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + MOCK_DATA, + ) + await hass.async_block_till_done() assert result["type"] is FlowResultType.CREATE_ENTRY assert result["title"] == "test-username" @@ -42,37 +39,28 @@ async def test_form(hass: HomeAssistant, mock_setup_entry: AsyncMock) -> None: async def test_form_invalid_auth( - hass: HomeAssistant, mock_setup_entry: AsyncMock + hass: HomeAssistant, mock_setup_entry: AsyncMock, suez_client: AsyncMock ) -> None: """Test we handle invalid auth.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) - with ( - patch( - "homeassistant.components.suez_water.config_flow.SuezClient.__init__", - return_value=None, - ), - patch( - "homeassistant.components.suez_water.config_flow.SuezClient.check_credentials", - return_value=False, - ), - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - MOCK_DATA, - ) + suez_client.check_credentials.return_value = False + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + MOCK_DATA, + ) assert result["type"] is FlowResultType.FORM assert result["errors"] == {"base": "invalid_auth"} - with patch("homeassistant.components.suez_water.config_flow.SuezClient"): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - MOCK_DATA, - ) - await hass.async_block_till_done() + suez_client.check_credentials.return_value = True + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + MOCK_DATA, + ) + await hass.async_block_till_done() assert result["type"] is FlowResultType.CREATE_ENTRY assert result["title"] == "test-username" @@ -108,34 +96,71 @@ async def test_form_already_configured(hass: HomeAssistant) -> None: ("exception", "error"), [(PySuezError, "cannot_connect"), (Exception, "unknown")] ) async def test_form_error( - hass: HomeAssistant, mock_setup_entry: AsyncMock, exception: Exception, error: str + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + exception: Exception, + suez_client: AsyncMock, + error: str, ) -> None: """Test we handle errors.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) - with patch( - "homeassistant.components.suez_water.config_flow.SuezClient", - side_effect=exception, - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - MOCK_DATA, - ) + suez_client.check_credentials.side_effect = exception + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + MOCK_DATA, + ) assert result["type"] is FlowResultType.FORM assert result["errors"] == {"base": error} - with patch( - "homeassistant.components.suez_water.config_flow.SuezClient", - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - MOCK_DATA, - ) + suez_client.check_credentials.return_value = True + suez_client.check_credentials.side_effect = None + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + MOCK_DATA, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "test-username" + assert result["data"] == MOCK_DATA + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_form_auto_counter( + hass: HomeAssistant, mock_setup_entry: AsyncMock, suez_client: AsyncMock +) -> None: + """Test form set counter if not set by user.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {} + + partial_form = {**MOCK_DATA} + partial_form.pop(CONF_COUNTER_ID) + suez_client.find_counter.side_effect = PySuezError("test counter not found") + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + partial_form, + ) + + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "counter_not_found"} + + suez_client.find_counter.side_effect = None + suez_client.find_counter.return_value = MOCK_DATA[CONF_COUNTER_ID] + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + partial_form, + ) + await hass.async_block_till_done() assert result["type"] is FlowResultType.CREATE_ENTRY assert result["title"] == "test-username" + assert result["result"].unique_id == "test-username" assert result["data"] == MOCK_DATA assert len(mock_setup_entry.mock_calls) == 1 diff --git a/tests/components/suez_water/test_init.py b/tests/components/suez_water/test_init.py new file mode 100644 index 00000000000000..78d086af38f9d2 --- /dev/null +++ b/tests/components/suez_water/test_init.py @@ -0,0 +1,37 @@ +"""Test Suez_water integration initialization.""" + +from unittest.mock import AsyncMock + +from homeassistant.components.suez_water.coordinator import PySuezError +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant + +from . import setup_integration + +from tests.common import MockConfigEntry + + +async def test_initialization_invalid_credentials( + hass: HomeAssistant, + suez_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test that suez_water can't be loaded with invalid credentials.""" + + suez_client.check_credentials.return_value = False + await setup_integration(hass, mock_config_entry) + + assert mock_config_entry.state is ConfigEntryState.SETUP_ERROR + + +async def test_initialization_setup_api_error( + hass: HomeAssistant, + suez_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test that suez_water needs to retry loading if api failed to connect.""" + + suez_client.check_credentials.side_effect = PySuezError("Test failure") + await setup_integration(hass, mock_config_entry) + + assert mock_config_entry.state is ConfigEntryState.SETUP_RETRY diff --git a/tests/components/suez_water/test_sensor.py b/tests/components/suez_water/test_sensor.py new file mode 100644 index 00000000000000..cb578432f6232f --- /dev/null +++ b/tests/components/suez_water/test_sensor.py @@ -0,0 +1,67 @@ +"""Test Suez_water sensor platform.""" + +from unittest.mock import AsyncMock, patch + +from freezegun.api import FrozenDateTimeFactory +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.components.suez_water.const import DATA_REFRESH_INTERVAL +from homeassistant.components.suez_water.coordinator import PySuezError +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import STATE_UNAVAILABLE, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform + + +async def test_sensors_valid_state( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + suez_client: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test that suez_water sensor is loaded and in a valid state.""" + with patch("homeassistant.components.suez_water.PLATFORMS", [Platform.SENSOR]): + await setup_integration(hass, mock_config_entry) + + assert mock_config_entry.state is ConfigEntryState.LOADED + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +@pytest.mark.parametrize("method", [("fetch_aggregated_data"), ("get_price")]) +async def test_sensors_failed_update( + hass: HomeAssistant, + suez_client: AsyncMock, + mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, + method: str, +) -> None: + """Test that suez_water sensor reflect failure when api fails.""" + + await setup_integration(hass, mock_config_entry) + + assert mock_config_entry.state is ConfigEntryState.LOADED + + entity_ids = await hass.async_add_executor_job(hass.states.entity_ids) + assert len(entity_ids) == 2 + + for entity in entity_ids: + state = hass.states.get(entity) + assert entity + assert state.state != STATE_UNAVAILABLE + + getattr(suez_client, method).side_effect = PySuezError("Should fail to update") + + freezer.tick(DATA_REFRESH_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done(True) + + for entity in entity_ids: + state = hass.states.get(entity) + assert entity + assert state.state == STATE_UNAVAILABLE diff --git a/tests/components/swiss_public_transport/fixtures/connections.json b/tests/components/swiss_public_transport/fixtures/connections.json index f2cd1014e63263..7e61206c36634a 100644 --- a/tests/components/swiss_public_transport/fixtures/connections.json +++ b/tests/components/swiss_public_transport/fixtures/connections.json @@ -99,7 +99,7 @@ "line": "T10" }, { - "departure": "2024-01-06T18:14:00+0100", + "departure": "invalid", "number": 11, "platform": 11, "transfers": 0, diff --git a/tests/components/switch_as_x/test_cover.py b/tests/components/switch_as_x/test_cover.py index 78a76c20bebf06..acb382a635a78e 100644 --- a/tests/components/switch_as_x/test_cover.py +++ b/tests/components/switch_as_x/test_cover.py @@ -1,6 +1,6 @@ """Tests for the Switch as X Cover platform.""" -from homeassistant.components.cover import DOMAIN as COVER_DOMAIN +from homeassistant.components.cover import DOMAIN as COVER_DOMAIN, CoverState from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN from homeassistant.components.switch_as_x.config_flow import SwitchAsXConfigFlowHandler from homeassistant.components.switch_as_x.const import ( @@ -15,10 +15,8 @@ SERVICE_TOGGLE, SERVICE_TURN_OFF, SERVICE_TURN_ON, - STATE_CLOSED, STATE_OFF, STATE_ON, - STATE_OPEN, Platform, ) from homeassistant.core import HomeAssistant @@ -71,7 +69,7 @@ async def test_service_calls(hass: HomeAssistant) -> None: assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - assert hass.states.get("cover.decorative_lights").state == STATE_OPEN + assert hass.states.get("cover.decorative_lights").state == CoverState.OPEN await hass.services.async_call( COVER_DOMAIN, @@ -81,7 +79,7 @@ async def test_service_calls(hass: HomeAssistant) -> None: ) assert hass.states.get("switch.decorative_lights").state == STATE_OFF - assert hass.states.get("cover.decorative_lights").state == STATE_CLOSED + assert hass.states.get("cover.decorative_lights").state == CoverState.CLOSED await hass.services.async_call( COVER_DOMAIN, @@ -91,7 +89,7 @@ async def test_service_calls(hass: HomeAssistant) -> None: ) assert hass.states.get("switch.decorative_lights").state == STATE_ON - assert hass.states.get("cover.decorative_lights").state == STATE_OPEN + assert hass.states.get("cover.decorative_lights").state == CoverState.OPEN await hass.services.async_call( COVER_DOMAIN, @@ -101,7 +99,7 @@ async def test_service_calls(hass: HomeAssistant) -> None: ) assert hass.states.get("switch.decorative_lights").state == STATE_OFF - assert hass.states.get("cover.decorative_lights").state == STATE_CLOSED + assert hass.states.get("cover.decorative_lights").state == CoverState.CLOSED await hass.services.async_call( SWITCH_DOMAIN, @@ -111,7 +109,7 @@ async def test_service_calls(hass: HomeAssistant) -> None: ) assert hass.states.get("switch.decorative_lights").state == STATE_ON - assert hass.states.get("cover.decorative_lights").state == STATE_OPEN + assert hass.states.get("cover.decorative_lights").state == CoverState.OPEN await hass.services.async_call( SWITCH_DOMAIN, @@ -121,7 +119,7 @@ async def test_service_calls(hass: HomeAssistant) -> None: ) assert hass.states.get("switch.decorative_lights").state == STATE_OFF - assert hass.states.get("cover.decorative_lights").state == STATE_CLOSED + assert hass.states.get("cover.decorative_lights").state == CoverState.CLOSED await hass.services.async_call( SWITCH_DOMAIN, @@ -131,7 +129,7 @@ async def test_service_calls(hass: HomeAssistant) -> None: ) assert hass.states.get("switch.decorative_lights").state == STATE_ON - assert hass.states.get("cover.decorative_lights").state == STATE_OPEN + assert hass.states.get("cover.decorative_lights").state == CoverState.OPEN async def test_service_calls_inverted(hass: HomeAssistant) -> None: @@ -154,7 +152,7 @@ async def test_service_calls_inverted(hass: HomeAssistant) -> None: assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - assert hass.states.get("cover.decorative_lights").state == STATE_CLOSED + assert hass.states.get("cover.decorative_lights").state == CoverState.CLOSED await hass.services.async_call( COVER_DOMAIN, @@ -164,7 +162,7 @@ async def test_service_calls_inverted(hass: HomeAssistant) -> None: ) assert hass.states.get("switch.decorative_lights").state == STATE_OFF - assert hass.states.get("cover.decorative_lights").state == STATE_OPEN + assert hass.states.get("cover.decorative_lights").state == CoverState.OPEN await hass.services.async_call( COVER_DOMAIN, @@ -174,7 +172,7 @@ async def test_service_calls_inverted(hass: HomeAssistant) -> None: ) assert hass.states.get("switch.decorative_lights").state == STATE_OFF - assert hass.states.get("cover.decorative_lights").state == STATE_OPEN + assert hass.states.get("cover.decorative_lights").state == CoverState.OPEN await hass.services.async_call( COVER_DOMAIN, @@ -184,7 +182,7 @@ async def test_service_calls_inverted(hass: HomeAssistant) -> None: ) assert hass.states.get("switch.decorative_lights").state == STATE_ON - assert hass.states.get("cover.decorative_lights").state == STATE_CLOSED + assert hass.states.get("cover.decorative_lights").state == CoverState.CLOSED await hass.services.async_call( SWITCH_DOMAIN, @@ -194,7 +192,7 @@ async def test_service_calls_inverted(hass: HomeAssistant) -> None: ) assert hass.states.get("switch.decorative_lights").state == STATE_ON - assert hass.states.get("cover.decorative_lights").state == STATE_CLOSED + assert hass.states.get("cover.decorative_lights").state == CoverState.CLOSED await hass.services.async_call( SWITCH_DOMAIN, @@ -204,7 +202,7 @@ async def test_service_calls_inverted(hass: HomeAssistant) -> None: ) assert hass.states.get("switch.decorative_lights").state == STATE_OFF - assert hass.states.get("cover.decorative_lights").state == STATE_OPEN + assert hass.states.get("cover.decorative_lights").state == CoverState.OPEN await hass.services.async_call( SWITCH_DOMAIN, @@ -214,4 +212,4 @@ async def test_service_calls_inverted(hass: HomeAssistant) -> None: ) assert hass.states.get("switch.decorative_lights").state == STATE_ON - assert hass.states.get("cover.decorative_lights").state == STATE_CLOSED + assert hass.states.get("cover.decorative_lights").state == CoverState.CLOSED diff --git a/tests/components/switchbot/__init__.py b/tests/components/switchbot/__init__.py index b2a8445546edbd..bd3985ff062a5c 100644 --- a/tests/components/switchbot/__init__.py +++ b/tests/components/switchbot/__init__.py @@ -205,3 +205,28 @@ async def init_integration(hass: HomeAssistant) -> MockConfigEntry: connectable=True, tx_power=-127, ) + + +WOMETERTHPC_SERVICE_INFO = BluetoothServiceInfoBleak( + name="WoTHPc", + manufacturer_data={ + 2409: b"\xb0\xe9\xfeT2\x15\xb7\xe4\x07\x9b\xa4\x007\x02\xd5\x00" + }, + service_data={"0000fd3d-0000-1000-8000-00805f9b34fb": b"5\x00d"}, + service_uuids=["cba20d00-224d-11e6-9fb8-0002a5d5c51b"], + address="AA:BB:CC:DD:EE:AA", + rssi=-60, + source="local", + advertisement=generate_advertisement_data( + local_name="WoTHPc", + manufacturer_data={ + 2409: b"\xb0\xe9\xfeT2\x15\xb7\xe4\x07\x9b\xa4\x007\x02\xd5\x00" + }, + service_data={"0000fd3d-0000-1000-8000-00805f9b34fb": b"5\x00d"}, + service_uuids=["cba20d00-224d-11e6-9fb8-0002a5d5c51b"], + ), + device=generate_ble_device("AA:BB:CC:DD:EE:AA", "WoTHPc"), + time=0, + connectable=True, + tx_power=-127, +) diff --git a/tests/components/switchbot/test_sensor.py b/tests/components/switchbot/test_sensor.py index 030a477596c3ec..3adeaef936c222 100644 --- a/tests/components/switchbot/test_sensor.py +++ b/tests/components/switchbot/test_sensor.py @@ -15,7 +15,7 @@ from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component -from . import WOHAND_SERVICE_INFO +from . import WOHAND_SERVICE_INFO, WOMETERTHPC_SERVICE_INFO from tests.common import MockConfigEntry from tests.components.bluetooth import inject_bluetooth_service_info @@ -59,3 +59,49 @@ async def test_sensors(hass: HomeAssistant) -> None: assert await hass.config_entries.async_unload(entry.entry_id) await hass.async_block_till_done() + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_co2_sensor(hass: HomeAssistant) -> None: + """Test setting up creates the co2 sensor for a WoTHPc.""" + await async_setup_component(hass, DOMAIN, {}) + inject_bluetooth_service_info(hass, WOMETERTHPC_SERVICE_INFO) + + entry = MockConfigEntry( + domain=DOMAIN, + data={ + CONF_ADDRESS: "AA:BB:CC:DD:EE:AA", + CONF_NAME: "test-name", + CONF_PASSWORD: "test-password", + CONF_SENSOR_TYPE: "hygrometer_co2", + }, + unique_id="aabbccddeeaa", + ) + entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + assert len(hass.states.async_all("sensor")) == 5 + + battery_sensor = hass.states.get("sensor.test_name_battery") + battery_sensor_attrs = battery_sensor.attributes + assert battery_sensor.state == "100" + assert battery_sensor_attrs[ATTR_FRIENDLY_NAME] == "test-name Battery" + assert battery_sensor_attrs[ATTR_UNIT_OF_MEASUREMENT] == "%" + assert battery_sensor_attrs[ATTR_STATE_CLASS] == "measurement" + + rssi_sensor = hass.states.get("sensor.test_name_bluetooth_signal") + rssi_sensor_attrs = rssi_sensor.attributes + assert rssi_sensor.state == "-60" + assert rssi_sensor_attrs[ATTR_FRIENDLY_NAME] == "test-name Bluetooth signal" + assert rssi_sensor_attrs[ATTR_UNIT_OF_MEASUREMENT] == "dBm" + + co2_sensor = hass.states.get("sensor.test_name_carbon_dioxide") + co2_sensor_attrs = co2_sensor.attributes + assert co2_sensor.state == "725" + assert co2_sensor_attrs[ATTR_FRIENDLY_NAME] == "test-name Carbon dioxide" + assert co2_sensor_attrs[ATTR_UNIT_OF_MEASUREMENT] == "ppm" + + assert await hass.config_entries.async_unload(entry.entry_id) + await hass.async_block_till_done() diff --git a/tests/components/switchbot_cloud/conftest.py b/tests/components/switchbot_cloud/conftest.py index b559930dedbe8d..09c953da06b5b7 100644 --- a/tests/components/switchbot_cloud/conftest.py +++ b/tests/components/switchbot_cloud/conftest.py @@ -5,6 +5,8 @@ import pytest +from homeassistant.components.switchbot_cloud import SwitchBotAPI + @pytest.fixture def mock_setup_entry() -> Generator[AsyncMock]: @@ -14,3 +16,17 @@ def mock_setup_entry() -> Generator[AsyncMock]: return_value=True, ) as mock_setup_entry: yield mock_setup_entry + + +@pytest.fixture +def mock_list_devices(): + """Mock list_devices.""" + with patch.object(SwitchBotAPI, "list_devices") as mock_list_devices: + yield mock_list_devices + + +@pytest.fixture +def mock_get_status(): + """Mock get_status.""" + with patch.object(SwitchBotAPI, "get_status") as mock_get_status: + yield mock_get_status diff --git a/tests/components/switchbot_cloud/test_init.py b/tests/components/switchbot_cloud/test_init.py index 25ea370efe54e2..43431ae04c0ef8 100644 --- a/tests/components/switchbot_cloud/test_init.py +++ b/tests/components/switchbot_cloud/test_init.py @@ -50,6 +50,18 @@ async def test_setup_entry_success( remoteType="DIY Plug", hubDeviceId="test-hub-id", ), + Remote( + deviceId="meter-pro-1", + deviceName="meter-pro-name-1", + deviceType="MeterPro(CO2)", + hubDeviceId="test-hub-id", + ), + Remote( + deviceId="hub2-1", + deviceName="hub2-name-1", + deviceType="Hub 2", + hubDeviceId="test-hub-id", + ), ] mock_get_status.return_value = {"power": PowerState.ON.value} entry = configure_integration(hass) diff --git a/tests/components/switchbot_cloud/test_lock.py b/tests/components/switchbot_cloud/test_lock.py new file mode 100644 index 00000000000000..a09d7241794dea --- /dev/null +++ b/tests/components/switchbot_cloud/test_lock.py @@ -0,0 +1,48 @@ +"""Test for the switchbot_cloud lock.""" + +from unittest.mock import patch + +from switchbot_api import Device + +from homeassistant.components.lock import DOMAIN as LOCK_DOMAIN, LockState +from homeassistant.components.switchbot_cloud import SwitchBotAPI +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import ATTR_ENTITY_ID, SERVICE_LOCK, SERVICE_UNLOCK +from homeassistant.core import HomeAssistant + +from . import configure_integration + + +async def test_lock(hass: HomeAssistant, mock_list_devices, mock_get_status) -> None: + """Test locking and unlocking.""" + mock_list_devices.return_value = [ + Device( + deviceId="lock-id-1", + deviceName="lock-1", + deviceType="Smart Lock", + hubDeviceId="test-hub-id", + ), + ] + + mock_get_status.return_value = {"lockState": "locked"} + + entry = configure_integration(hass) + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + assert entry.state is ConfigEntryState.LOADED + + lock_id = "lock.lock_1" + assert hass.states.get(lock_id).state == LockState.LOCKED + + with patch.object(SwitchBotAPI, "send_command"): + await hass.services.async_call( + LOCK_DOMAIN, SERVICE_UNLOCK, {ATTR_ENTITY_ID: lock_id}, blocking=True + ) + assert hass.states.get(lock_id).state == LockState.UNLOCKED + + with patch.object(SwitchBotAPI, "send_command"): + await hass.services.async_call( + LOCK_DOMAIN, SERVICE_LOCK, {ATTR_ENTITY_ID: lock_id}, blocking=True + ) + assert hass.states.get(lock_id).state == LockState.LOCKED diff --git a/tests/components/switcher_kis/consts.py b/tests/components/switcher_kis/consts.py index 7b0b5c28f3f04b..fe77ee0236b957 100644 --- a/tests/components/switcher_kis/consts.py +++ b/tests/components/switcher_kis/consts.py @@ -4,6 +4,8 @@ DeviceState, DeviceType, ShutterDirection, + SwitcherDualShutterSingleLight, + SwitcherLight, SwitcherPowerPlug, SwitcherShutter, SwitcherSingleShutterDualLight, @@ -21,16 +23,28 @@ DUMMY_DEVICE_ID3 = "bada77" DUMMY_DEVICE_ID4 = "bbd164" DUMMY_DEVICE_ID5 = "bcdb64" +DUMMY_DEVICE_ID6 = "bcdc64" +DUMMY_DEVICE_ID7 = "bcdd64" +DUMMY_DEVICE_ID8 = "bcde64" +DUMMY_DEVICE_ID9 = "bcdf64" DUMMY_DEVICE_KEY1 = "18" DUMMY_DEVICE_KEY2 = "01" DUMMY_DEVICE_KEY3 = "12" DUMMY_DEVICE_KEY4 = "07" DUMMY_DEVICE_KEY5 = "15" +DUMMY_DEVICE_KEY6 = "16" +DUMMY_DEVICE_KEY7 = "17" +DUMMY_DEVICE_KEY8 = "18" +DUMMY_DEVICE_KEY9 = "19" DUMMY_DEVICE_NAME1 = "Plug 23BC" DUMMY_DEVICE_NAME2 = "Heater FE12" DUMMY_DEVICE_NAME3 = "Breeze AB39" DUMMY_DEVICE_NAME4 = "Runner DD77" DUMMY_DEVICE_NAME5 = "RunnerS11 6CF5" +DUMMY_DEVICE_NAME6 = "RunnerS12 A9BE" +DUMMY_DEVICE_NAME7 = "Light 36BB" +DUMMY_DEVICE_NAME8 = "Light 36CB" +DUMMY_DEVICE_NAME9 = "Light 36DB" DUMMY_DEVICE_PASSWORD = "12345678" DUMMY_ELECTRIC_CURRENT1 = 0.5 DUMMY_ELECTRIC_CURRENT2 = 12.8 @@ -39,16 +53,28 @@ DUMMY_IP_ADDRESS3 = "192.168.100.159" DUMMY_IP_ADDRESS4 = "192.168.100.160" DUMMY_IP_ADDRESS5 = "192.168.100.161" +DUMMY_IP_ADDRESS6 = "192.168.100.162" +DUMMY_IP_ADDRESS7 = "192.168.100.163" +DUMMY_IP_ADDRESS8 = "192.168.100.164" +DUMMY_IP_ADDRESS9 = "192.168.100.165" DUMMY_MAC_ADDRESS1 = "A1:B2:C3:45:67:D8" DUMMY_MAC_ADDRESS2 = "A1:B2:C3:45:67:D9" DUMMY_MAC_ADDRESS3 = "A1:B2:C3:45:67:DA" DUMMY_MAC_ADDRESS4 = "A1:B2:C3:45:67:DB" DUMMY_MAC_ADDRESS5 = "A1:B2:C3:45:67:DC" +DUMMY_MAC_ADDRESS6 = "A1:B2:C3:45:67:DD" +DUMMY_MAC_ADDRESS7 = "A1:B2:C3:45:67:DE" +DUMMY_MAC_ADDRESS8 = "A1:B2:C3:45:67:DF" +DUMMY_MAC_ADDRESS9 = "A1:B2:C3:45:67:DG" DUMMY_TOKEN_NEEDED1 = False DUMMY_TOKEN_NEEDED2 = False DUMMY_TOKEN_NEEDED3 = False DUMMY_TOKEN_NEEDED4 = False DUMMY_TOKEN_NEEDED5 = True +DUMMY_TOKEN_NEEDED6 = True +DUMMY_TOKEN_NEEDED7 = True +DUMMY_TOKEN_NEEDED8 = True +DUMMY_TOKEN_NEEDED9 = True DUMMY_PHONE_ID = "1234" DUMMY_POWER_CONSUMPTION1 = 100 DUMMY_POWER_CONSUMPTION2 = 2780 @@ -60,11 +86,15 @@ DUMMY_FAN_LEVEL = ThermostatFanLevel.LOW DUMMY_SWING = ThermostatSwing.OFF DUMMY_REMOTE_ID = "ELEC7001" -DUMMY_POSITION = 54 -DUMMY_DIRECTION = ShutterDirection.SHUTTER_STOP +DUMMY_POSITION = [54] +DUMMY_POSITION_2 = [54, 54] +DUMMY_DIRECTION = [ShutterDirection.SHUTTER_STOP] +DUMMY_DIRECTION_2 = [ShutterDirection.SHUTTER_STOP, ShutterDirection.SHUTTER_STOP] DUMMY_USERNAME = "email" DUMMY_TOKEN = "zvVvd7JxtN7CgvkD1Psujw==" -DUMMY_LIGHTS = [DeviceState.ON, DeviceState.ON] +DUMMY_LIGHT = [DeviceState.ON] +DUMMY_LIGHT_2 = [DeviceState.ON, DeviceState.ON] +DUMMY_LIGHT_3 = [DeviceState.ON, DeviceState.ON, DeviceState.ON] DUMMY_PLUG_DEVICE = SwitcherPowerPlug( DeviceType.POWER_PLUG, @@ -118,7 +148,21 @@ DUMMY_TOKEN_NEEDED5, DUMMY_POSITION, DUMMY_DIRECTION, - DUMMY_LIGHTS, + DUMMY_LIGHT_2, +) + +DUMMY_DUAL_SHUTTER_SINGLE_LIGHT_DEVICE = SwitcherDualShutterSingleLight( + DeviceType.RUNNER_S12, + DeviceState.ON, + DUMMY_DEVICE_ID6, + DUMMY_DEVICE_KEY6, + DUMMY_IP_ADDRESS6, + DUMMY_MAC_ADDRESS6, + DUMMY_DEVICE_NAME6, + DUMMY_TOKEN_NEEDED6, + DUMMY_POSITION_2, + DUMMY_DIRECTION_2, + DUMMY_LIGHT, ) DUMMY_THERMOSTAT_DEVICE = SwitcherThermostat( @@ -138,4 +182,40 @@ DUMMY_REMOTE_ID, ) +DUMMY_LIGHT_DEVICE = SwitcherLight( + DeviceType.LIGHT_SL01, + DeviceState.ON, + DUMMY_DEVICE_ID7, + DUMMY_DEVICE_KEY7, + DUMMY_IP_ADDRESS7, + DUMMY_MAC_ADDRESS7, + DUMMY_DEVICE_NAME7, + DUMMY_TOKEN_NEEDED7, + DUMMY_LIGHT, +) + +DUMMY_DUAL_LIGHT_DEVICE = SwitcherLight( + DeviceType.LIGHT_SL02, + DeviceState.ON, + DUMMY_DEVICE_ID8, + DUMMY_DEVICE_KEY8, + DUMMY_IP_ADDRESS8, + DUMMY_MAC_ADDRESS8, + DUMMY_DEVICE_NAME8, + DUMMY_TOKEN_NEEDED8, + DUMMY_LIGHT_2, +) + +DUMMY_TRIPLE_LIGHT_DEVICE = SwitcherLight( + DeviceType.LIGHT_SL03, + DeviceState.ON, + DUMMY_DEVICE_ID9, + DUMMY_DEVICE_KEY9, + DUMMY_IP_ADDRESS9, + DUMMY_MAC_ADDRESS9, + DUMMY_DEVICE_NAME9, + DUMMY_TOKEN_NEEDED9, + DUMMY_LIGHT_3, +) + DUMMY_SWITCHER_DEVICES = [DUMMY_PLUG_DEVICE, DUMMY_WATER_HEATER_DEVICE] diff --git a/tests/components/switcher_kis/test_config_flow.py b/tests/components/switcher_kis/test_config_flow.py index e1c017b2b96c83..48cc0beacb8caf 100644 --- a/tests/components/switcher_kis/test_config_flow.py +++ b/tests/components/switcher_kis/test_config_flow.py @@ -11,6 +11,7 @@ from homeassistant.data_entry_flow import FlowResultType from .consts import ( + DUMMY_DUAL_SHUTTER_SINGLE_LIGHT_DEVICE, DUMMY_PLUG_DEVICE, DUMMY_SINGLE_SHUTTER_DUAL_LIGHT_DEVICE, DUMMY_TOKEN, @@ -62,6 +63,7 @@ async def test_user_setup( [ [ DUMMY_SINGLE_SHUTTER_DUAL_LIGHT_DEVICE, + DUMMY_DUAL_SHUTTER_SINGLE_LIGHT_DEVICE, ] ], indirect=True, @@ -106,6 +108,7 @@ async def test_user_setup_found_token_device_valid_token( [ [ DUMMY_SINGLE_SHUTTER_DUAL_LIGHT_DEVICE, + DUMMY_DUAL_SHUTTER_SINGLE_LIGHT_DEVICE, ] ], indirect=True, diff --git a/tests/components/switcher_kis/test_cover.py b/tests/components/switcher_kis/test_cover.py index 88e92b927e2689..d26fff8754c033 100644 --- a/tests/components/switcher_kis/test_cover.py +++ b/tests/components/switcher_kis/test_cover.py @@ -14,10 +14,7 @@ SERVICE_OPEN_COVER, SERVICE_SET_COVER_POSITION, SERVICE_STOP_COVER, - STATE_CLOSED, - STATE_CLOSING, - STATE_OPEN, - STATE_OPENING, + CoverState, ) from homeassistant.const import ATTR_ENTITY_ID, STATE_UNAVAILABLE from homeassistant.core import HomeAssistant @@ -26,6 +23,7 @@ from . import init_integration from .consts import ( + DUMMY_DUAL_SHUTTER_SINGLE_LIGHT_DEVICE as DEVICE3, DUMMY_SHUTTER_DEVICE as DEVICE, DUMMY_SINGLE_SHUTTER_DUAL_LIGHT_DEVICE as DEVICE2, DUMMY_TOKEN as TOKEN, @@ -34,23 +32,78 @@ ENTITY_ID = f"{COVER_DOMAIN}.{slugify(DEVICE.name)}" ENTITY_ID2 = f"{COVER_DOMAIN}.{slugify(DEVICE2.name)}" +ENTITY_ID3 = f"{COVER_DOMAIN}.{slugify(DEVICE3.name)}_cover_1" +ENTITY_ID3_2 = f"{COVER_DOMAIN}.{slugify(DEVICE3.name)}_cover_2" @pytest.mark.parametrize( - ("device", "entity_id"), + ( + "device", + "entity_id", + "cover_id", + "position_open", + "position_close", + "direction_open", + "direction_close", + "direction_stop", + ), [ - (DEVICE, ENTITY_ID), - (DEVICE2, ENTITY_ID2), + ( + DEVICE, + ENTITY_ID, + 0, + [77], + [0], + [ShutterDirection.SHUTTER_UP], + [ShutterDirection.SHUTTER_DOWN], + [ShutterDirection.SHUTTER_STOP], + ), + ( + DEVICE2, + ENTITY_ID2, + 0, + [77], + [0], + [ShutterDirection.SHUTTER_UP], + [ShutterDirection.SHUTTER_DOWN], + [ShutterDirection.SHUTTER_STOP], + ), + ( + DEVICE3, + ENTITY_ID3, + 0, + [77, 0], + [0, 0], + [ShutterDirection.SHUTTER_UP, ShutterDirection.SHUTTER_STOP], + [ShutterDirection.SHUTTER_DOWN, ShutterDirection.SHUTTER_STOP], + [ShutterDirection.SHUTTER_STOP, ShutterDirection.SHUTTER_STOP], + ), + ( + DEVICE3, + ENTITY_ID3_2, + 1, + [0, 77], + [0, 0], + [ShutterDirection.SHUTTER_STOP, ShutterDirection.SHUTTER_UP], + [ShutterDirection.SHUTTER_STOP, ShutterDirection.SHUTTER_DOWN], + [ShutterDirection.SHUTTER_STOP, ShutterDirection.SHUTTER_STOP], + ), ], ) -@pytest.mark.parametrize("mock_bridge", [[DEVICE, DEVICE2]], indirect=True) +@pytest.mark.parametrize("mock_bridge", [[DEVICE, DEVICE2, DEVICE3]], indirect=True) async def test_cover( hass: HomeAssistant, mock_bridge, mock_api, monkeypatch: pytest.MonkeyPatch, device, - entity_id, + entity_id: str, + cover_id: int, + position_open: list[int], + position_close: list[int], + direction_open: list[ShutterDirection], + direction_close: list[ShutterDirection], + direction_stop: list[ShutterDirection], ) -> None: """Test cover services.""" await init_integration(hass, USERNAME, TOKEN) @@ -58,7 +111,7 @@ async def test_cover( # Test initial state - open state = hass.states.get(entity_id) - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN # Test set position with patch( @@ -71,14 +124,14 @@ async def test_cover( blocking=True, ) - monkeypatch.setattr(device, "position", 77) + monkeypatch.setattr(device, "position", position_open) mock_bridge.mock_callbacks([device]) await hass.async_block_till_done() assert mock_api.call_count == 2 - mock_control_device.assert_called_once_with(77, 0) + mock_control_device.assert_called_once_with(77, cover_id) state = hass.states.get(entity_id) - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN assert state.attributes[ATTR_CURRENT_POSITION] == 77 # Test open @@ -92,14 +145,14 @@ async def test_cover( blocking=True, ) - monkeypatch.setattr(device, "direction", ShutterDirection.SHUTTER_UP) + monkeypatch.setattr(device, "direction", direction_open) mock_bridge.mock_callbacks([device]) await hass.async_block_till_done() assert mock_api.call_count == 4 - mock_control_device.assert_called_once_with(100, 0) + mock_control_device.assert_called_once_with(100, cover_id) state = hass.states.get(entity_id) - assert state.state == STATE_OPENING + assert state.state == CoverState.OPENING # Test close with patch( @@ -112,14 +165,14 @@ async def test_cover( blocking=True, ) - monkeypatch.setattr(device, "direction", ShutterDirection.SHUTTER_DOWN) + monkeypatch.setattr(device, "direction", direction_close) mock_bridge.mock_callbacks([device]) await hass.async_block_till_done() assert mock_api.call_count == 6 - mock_control_device.assert_called_once_with(0, 0) + mock_control_device.assert_called_once_with(0, cover_id) state = hass.states.get(entity_id) - assert state.state == STATE_CLOSING + assert state.state == CoverState.CLOSING # Test stop with patch( @@ -132,39 +185,42 @@ async def test_cover( blocking=True, ) - monkeypatch.setattr(device, "direction", ShutterDirection.SHUTTER_STOP) + monkeypatch.setattr(device, "direction", direction_stop) mock_bridge.mock_callbacks([device]) await hass.async_block_till_done() assert mock_api.call_count == 8 - mock_control_device.assert_called_once_with(0) + mock_control_device.assert_called_once_with(cover_id) state = hass.states.get(entity_id) - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN # Test closed on position == 0 - monkeypatch.setattr(device, "position", 0) + monkeypatch.setattr(device, "position", position_close) mock_bridge.mock_callbacks([device]) await hass.async_block_till_done() state = hass.states.get(entity_id) - assert state.state == STATE_CLOSED + assert state.state == CoverState.CLOSED assert state.attributes[ATTR_CURRENT_POSITION] == 0 @pytest.mark.parametrize( - ("device", "entity_id"), + ("device", "entity_id", "cover_id"), [ - (DEVICE, ENTITY_ID), - (DEVICE2, ENTITY_ID2), + (DEVICE, ENTITY_ID, 0), + (DEVICE2, ENTITY_ID2, 0), + (DEVICE3, ENTITY_ID3, 0), + (DEVICE3, ENTITY_ID3_2, 1), ], ) -@pytest.mark.parametrize("mock_bridge", [[DEVICE, DEVICE2]], indirect=True) +@pytest.mark.parametrize("mock_bridge", [[DEVICE, DEVICE2, DEVICE3]], indirect=True) async def test_cover_control_fail( hass: HomeAssistant, mock_bridge, mock_api, device, - entity_id, + entity_id: str, + cover_id: int, ) -> None: """Test cover control fail.""" await init_integration(hass, USERNAME, TOKEN) @@ -172,7 +228,7 @@ async def test_cover_control_fail( # Test initial state - open state = hass.states.get(entity_id) - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN # Test exception during set position with patch( @@ -188,7 +244,7 @@ async def test_cover_control_fail( ) assert mock_api.call_count == 2 - mock_control_device.assert_called_once_with(44, 0) + mock_control_device.assert_called_once_with(44, cover_id) state = hass.states.get(entity_id) assert state.state == STATE_UNAVAILABLE @@ -197,7 +253,7 @@ async def test_cover_control_fail( await hass.async_block_till_done() state = hass.states.get(entity_id) - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN # Test error response during set position with patch( @@ -213,16 +269,16 @@ async def test_cover_control_fail( ) assert mock_api.call_count == 4 - mock_control_device.assert_called_once_with(27, 0) + mock_control_device.assert_called_once_with(27, cover_id) state = hass.states.get(entity_id) assert state.state == STATE_UNAVAILABLE -@pytest.mark.parametrize("mock_bridge", [[DEVICE2]], indirect=True) +@pytest.mark.parametrize("mock_bridge", [[DEVICE2, DEVICE3]], indirect=True) async def test_cover2_no_token( hass: HomeAssistant, mock_bridge, mock_api, monkeypatch: pytest.MonkeyPatch ) -> None: - """Test single cover dual light without token services.""" + """Test cover with token needed without token specified.""" await init_integration(hass) assert mock_bridge diff --git a/tests/components/switcher_kis/test_light.py b/tests/components/switcher_kis/test_light.py index 0fb036967e7aae..60c851bf6a946f 100644 --- a/tests/components/switcher_kis/test_light.py +++ b/tests/components/switcher_kis/test_light.py @@ -21,28 +21,49 @@ from . import init_integration from .consts import ( + DUMMY_DUAL_LIGHT_DEVICE as DEVICE4, + DUMMY_DUAL_SHUTTER_SINGLE_LIGHT_DEVICE as DEVICE2, + DUMMY_LIGHT_DEVICE as DEVICE3, DUMMY_SINGLE_SHUTTER_DUAL_LIGHT_DEVICE as DEVICE, DUMMY_TOKEN as TOKEN, + DUMMY_TRIPLE_LIGHT_DEVICE as DEVICE5, DUMMY_USERNAME as USERNAME, ) ENTITY_ID = f"{LIGHT_DOMAIN}.{slugify(DEVICE.name)}_light_1" -ENTITY_ID2 = f"{LIGHT_DOMAIN}.{slugify(DEVICE.name)}_light_2" +ENTITY_ID_2 = f"{LIGHT_DOMAIN}.{slugify(DEVICE.name)}_light_2" +ENTITY_ID2 = f"{LIGHT_DOMAIN}.{slugify(DEVICE2.name)}" +ENTITY_ID3 = f"{LIGHT_DOMAIN}.{slugify(DEVICE3.name)}" +ENTITY_ID4 = f"{LIGHT_DOMAIN}.{slugify(DEVICE4.name)}_light_1" +ENTITY_ID4_2 = f"{LIGHT_DOMAIN}.{slugify(DEVICE4.name)}_light_2" +ENTITY_ID5 = f"{LIGHT_DOMAIN}.{slugify(DEVICE5.name)}_light_1" +ENTITY_ID5_2 = f"{LIGHT_DOMAIN}.{slugify(DEVICE5.name)}_light_2" +ENTITY_ID5_3 = f"{LIGHT_DOMAIN}.{slugify(DEVICE5.name)}_light_3" -@pytest.mark.parametrize("mock_bridge", [[DEVICE]], indirect=True) @pytest.mark.parametrize( - ("entity_id", "light_id", "device_state"), + ("device", "entity_id", "light_id", "device_state"), [ - (ENTITY_ID, 0, [DeviceState.OFF, DeviceState.ON]), - (ENTITY_ID2, 1, [DeviceState.ON, DeviceState.OFF]), + (DEVICE, ENTITY_ID, 0, [DeviceState.OFF, DeviceState.ON]), + (DEVICE, ENTITY_ID_2, 1, [DeviceState.ON, DeviceState.OFF]), + (DEVICE2, ENTITY_ID2, 0, [DeviceState.OFF]), + (DEVICE3, ENTITY_ID3, 0, [DeviceState.OFF]), + (DEVICE4, ENTITY_ID4, 0, [DeviceState.OFF, DeviceState.ON]), + (DEVICE4, ENTITY_ID4_2, 1, [DeviceState.ON, DeviceState.OFF]), + (DEVICE5, ENTITY_ID5, 0, [DeviceState.OFF, DeviceState.ON, DeviceState.ON]), + (DEVICE5, ENTITY_ID5_2, 1, [DeviceState.ON, DeviceState.OFF, DeviceState.ON]), + (DEVICE5, ENTITY_ID5_3, 2, [DeviceState.ON, DeviceState.ON, DeviceState.OFF]), ], ) +@pytest.mark.parametrize( + "mock_bridge", [[DEVICE, DEVICE2, DEVICE3, DEVICE4, DEVICE5]], indirect=True +) async def test_light( hass: HomeAssistant, mock_bridge, mock_api, monkeypatch: pytest.MonkeyPatch, + device, entity_id: str, light_id: int, device_state: list[DeviceState], @@ -56,8 +77,8 @@ async def test_light( assert state.state == STATE_ON # Test state change on --> off for light - monkeypatch.setattr(DEVICE, "lights", device_state) - mock_bridge.mock_callbacks([DEVICE]) + monkeypatch.setattr(device, "light", device_state) + mock_bridge.mock_callbacks([device]) await hass.async_block_till_done() state = hass.states.get(entity_id) @@ -90,24 +111,44 @@ async def test_light( assert state.state == STATE_OFF -@pytest.mark.parametrize("mock_bridge", [[DEVICE]], indirect=True) +@pytest.mark.parametrize( + ("device", "entity_id", "light_id", "device_state"), + [ + (DEVICE, ENTITY_ID, 0, [DeviceState.OFF, DeviceState.ON]), + (DEVICE, ENTITY_ID_2, 1, [DeviceState.ON, DeviceState.OFF]), + (DEVICE2, ENTITY_ID2, 0, [DeviceState.OFF]), + (DEVICE3, ENTITY_ID3, 0, [DeviceState.OFF]), + (DEVICE4, ENTITY_ID4, 0, [DeviceState.OFF, DeviceState.ON]), + (DEVICE4, ENTITY_ID4_2, 1, [DeviceState.ON, DeviceState.OFF]), + (DEVICE5, ENTITY_ID5, 0, [DeviceState.OFF, DeviceState.ON, DeviceState.ON]), + (DEVICE5, ENTITY_ID5_2, 1, [DeviceState.ON, DeviceState.OFF, DeviceState.ON]), + (DEVICE5, ENTITY_ID5_3, 2, [DeviceState.ON, DeviceState.ON, DeviceState.OFF]), + ], +) +@pytest.mark.parametrize( + "mock_bridge", [[DEVICE, DEVICE2, DEVICE3, DEVICE4, DEVICE5]], indirect=True +) async def test_light_control_fail( hass: HomeAssistant, mock_bridge, mock_api, monkeypatch: pytest.MonkeyPatch, caplog: pytest.LogCaptureFixture, + device, + entity_id: str, + light_id: int, + device_state: list[DeviceState], ) -> None: """Test light control fail.""" await init_integration(hass, USERNAME, TOKEN) assert mock_bridge # Test initial state - light off - monkeypatch.setattr(DEVICE, "lights", [DeviceState.OFF, DeviceState.ON]) - mock_bridge.mock_callbacks([DEVICE]) + monkeypatch.setattr(device, "light", device_state) + mock_bridge.mock_callbacks([device]) await hass.async_block_till_done() - state = hass.states.get(ENTITY_ID) + state = hass.states.get(entity_id) assert state.state == STATE_OFF # Test exception during turn on @@ -119,20 +160,20 @@ async def test_light_control_fail( await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: ENTITY_ID}, + {ATTR_ENTITY_ID: entity_id}, blocking=True, ) assert mock_api.call_count == 2 - mock_control_device.assert_called_once_with(DeviceState.ON, 0) - state = hass.states.get(ENTITY_ID) + mock_control_device.assert_called_once_with(DeviceState.ON, light_id) + state = hass.states.get(entity_id) assert state.state == STATE_UNAVAILABLE # Make device available again - mock_bridge.mock_callbacks([DEVICE]) + mock_bridge.mock_callbacks([device]) await hass.async_block_till_done() - state = hass.states.get(ENTITY_ID) + state = hass.states.get(entity_id) assert state.state == STATE_OFF # Test error response during turn on @@ -144,11 +185,11 @@ async def test_light_control_fail( await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: ENTITY_ID}, + {ATTR_ENTITY_ID: entity_id}, blocking=True, ) assert mock_api.call_count == 4 - mock_control_device.assert_called_once_with(DeviceState.ON, 0) - state = hass.states.get(ENTITY_ID) + mock_control_device.assert_called_once_with(DeviceState.ON, light_id) + state = hass.states.get(entity_id) assert state.state == STATE_UNAVAILABLE diff --git a/tests/components/system_bridge/test_config_flow.py b/tests/components/system_bridge/test_config_flow.py index 727d93de89380a..ada44de2d12bec 100644 --- a/tests/components/system_bridge/test_config_flow.py +++ b/tests/components/system_bridge/test_config_flow.py @@ -259,9 +259,12 @@ async def test_form_unknown_error(hass: HomeAssistant) -> None: async def test_reauth_authorization_error(hass: HomeAssistant) -> None: """Test we show user form on authorization error.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": "reauth"}, data=FIXTURE_USER_INPUT + mock_config = MockConfigEntry( + domain=DOMAIN, unique_id=FIXTURE_UUID, data=FIXTURE_USER_INPUT ) + mock_config.add_to_hass(hass) + + result = await mock_config.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "authenticate" @@ -291,9 +294,12 @@ async def test_reauth_authorization_error(hass: HomeAssistant) -> None: async def test_reauth_connection_error(hass: HomeAssistant) -> None: """Test we show user form on connection error.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": "reauth"}, data=FIXTURE_USER_INPUT + mock_config = MockConfigEntry( + domain=DOMAIN, unique_id=FIXTURE_UUID, data=FIXTURE_USER_INPUT ) + mock_config.add_to_hass(hass) + + result = await mock_config.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "authenticate" @@ -336,9 +342,12 @@ async def test_reauth_connection_error(hass: HomeAssistant) -> None: async def test_reauth_connection_closed_error(hass: HomeAssistant) -> None: """Test we show user form on connection error.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": "reauth"}, data=FIXTURE_USER_INPUT + mock_config = MockConfigEntry( + domain=DOMAIN, unique_id=FIXTURE_UUID, data=FIXTURE_USER_INPUT ) + mock_config.add_to_hass(hass) + + result = await mock_config.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "authenticate" @@ -373,9 +382,7 @@ async def test_reauth_flow(hass: HomeAssistant) -> None: ) mock_config.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": "reauth"}, data=FIXTURE_USER_INPUT - ) + result = await mock_config.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "authenticate" diff --git a/tests/components/systemmonitor/snapshots/test_diagnostics.ambr b/tests/components/systemmonitor/snapshots/test_diagnostics.ambr index 303074e3c2cb93..75d942fc601184 100644 --- a/tests/components/systemmonitor/snapshots/test_diagnostics.ambr +++ b/tests/components/systemmonitor/snapshots/test_diagnostics.ambr @@ -62,3 +62,58 @@ }), }) # --- +# name: test_diagnostics_missing_items[test_diagnostics_missing_items] + dict({ + 'coordinators': dict({ + 'data': dict({ + 'addresses': None, + 'boot_time': '2024-02-24 15:00:00+00:00', + 'cpu_percent': '10.0', + 'disk_usage': dict({ + '/': 'sdiskusage(total=536870912000, used=322122547200, free=214748364800, percent=60.0)', + '/home/notexist/': 'sdiskusage(total=536870912000, used=322122547200, free=214748364800, percent=60.0)', + '/media/share': 'sdiskusage(total=536870912000, used=322122547200, free=214748364800, percent=60.0)', + }), + 'io_counters': None, + 'load': '(1, 2, 3)', + 'memory': 'VirtualMemory(total=104857600, available=41943040, percent=40.0, used=62914560, free=31457280)', + 'processes': "[tests.components.systemmonitor.conftest.MockProcess(pid=1, name='python3', status='sleeping', started='2024-02-23 15:00:00'), tests.components.systemmonitor.conftest.MockProcess(pid=1, name='pip', status='sleeping', started='2024-02-23 15:00:00')]", + 'swap': 'sswap(total=104857600, used=62914560, free=41943040, percent=60.0, sin=1, sout=1)', + 'temperatures': dict({ + 'cpu0-thermal': "[shwtemp(label='cpu0-thermal', current=50.0, high=60.0, critical=70.0)]", + }), + }), + 'last_update_success': True, + }), + 'entry': dict({ + 'data': dict({ + }), + 'disabled_by': None, + 'discovery_keys': dict({ + }), + 'domain': 'systemmonitor', + 'minor_version': 3, + 'options': dict({ + 'binary_sensor': dict({ + 'process': list([ + 'python3', + 'pip', + ]), + }), + 'resources': list([ + 'disk_use_percent_/', + 'disk_use_percent_/home/notexist/', + 'memory_free_', + 'network_out_eth0', + 'process_python3', + ]), + }), + 'pref_disable_new_entities': False, + 'pref_disable_polling': False, + 'source': 'user', + 'title': 'System Monitor', + 'unique_id': None, + 'version': 1, + }), + }) +# --- diff --git a/tests/components/systemmonitor/snapshots/test_repairs.ambr b/tests/components/systemmonitor/snapshots/test_repairs.ambr deleted file mode 100644 index dc659918b5fcaf..00000000000000 --- a/tests/components/systemmonitor/snapshots/test_repairs.ambr +++ /dev/null @@ -1,73 +0,0 @@ -# serializer version: 1 -# name: test_migrate_process_sensor[after_migration] - list([ - ConfigEntrySnapshot({ - 'data': dict({ - }), - 'disabled_by': None, - 'domain': 'systemmonitor', - 'entry_id': , - 'minor_version': 2, - 'options': dict({ - 'binary_sensor': dict({ - 'process': list([ - 'python3', - 'pip', - ]), - }), - 'resources': list([ - 'disk_use_percent_/', - 'disk_use_percent_/home/notexist/', - 'memory_free_', - 'network_out_eth0', - 'process_python3', - ]), - }), - 'pref_disable_new_entities': False, - 'pref_disable_polling': False, - 'source': 'user', - 'title': 'System Monitor', - 'unique_id': None, - 'version': 1, - }), - ]) -# --- -# name: test_migrate_process_sensor[before_migration] - list([ - ConfigEntrySnapshot({ - 'data': dict({ - }), - 'disabled_by': None, - 'domain': 'systemmonitor', - 'entry_id': , - 'minor_version': 2, - 'options': dict({ - 'binary_sensor': dict({ - 'process': list([ - 'python3', - 'pip', - ]), - }), - 'resources': list([ - 'disk_use_percent_/', - 'disk_use_percent_/home/notexist/', - 'memory_free_', - 'network_out_eth0', - 'process_python3', - ]), - 'sensor': dict({ - 'process': list([ - 'python3', - 'pip', - ]), - }), - }), - 'pref_disable_new_entities': False, - 'pref_disable_polling': False, - 'source': 'user', - 'title': 'System Monitor', - 'unique_id': None, - 'version': 1, - }), - ]) -# --- diff --git a/tests/components/systemmonitor/test_diagnostics.py b/tests/components/systemmonitor/test_diagnostics.py index b0f4fca3d0c850..26e421e65745ad 100644 --- a/tests/components/systemmonitor/test_diagnostics.py +++ b/tests/components/systemmonitor/test_diagnostics.py @@ -2,6 +2,7 @@ from unittest.mock import Mock +from freezegun.api import FrozenDateTimeFactory from syrupy import SnapshotAssertion from syrupy.filters import props @@ -24,3 +25,26 @@ async def test_diagnostics( assert await get_diagnostics_for_config_entry( hass, hass_client, mock_added_config_entry ) == snapshot(exclude=props("last_update", "entry_id", "created_at", "modified_at")) + + +async def test_diagnostics_missing_items( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + mock_psutil: Mock, + mock_os: Mock, + mock_config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, + freezer: FrozenDateTimeFactory, +) -> None: + """Test diagnostics.""" + mock_psutil.net_if_addrs.return_value = None + mock_psutil.net_io_counters.return_value = None + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + assert await get_diagnostics_for_config_entry( + hass, hass_client, mock_config_entry + ) == snapshot( + exclude=props("last_update", "entry_id", "created_at", "modified_at"), + name="test_diagnostics_missing_items", + ) diff --git a/tests/components/tado/fixtures/home.json b/tests/components/tado/fixtures/home.json new file mode 100644 index 00000000000000..3431c1c24713cf --- /dev/null +++ b/tests/components/tado/fixtures/home.json @@ -0,0 +1,47 @@ +{ + "id": 1, + "name": "My Home", + "dateTimeZone": "Europe/Berlin", + "dateCreated": "2019-03-24T16:16:19.541Z", + "temperatureUnit": "CELSIUS", + "partner": null, + "simpleSmartScheduleEnabled": true, + "awayRadiusInMeters": 100.0, + "installationCompleted": true, + "incidentDetection": { "supported": true, "enabled": true }, + "generation": "PRE_LINE_X", + "zonesCount": 7, + "language": "de-DE", + "skills": ["AUTO_ASSIST"], + "christmasModeEnabled": true, + "showAutoAssistReminders": true, + "contactDetails": { + "name": "Max Mustermann", + "email": "max@example.com", + "phone": "+493023125431" + }, + "address": { + "addressLine1": "Musterstrasse 123", + "addressLine2": null, + "zipCode": "12345", + "city": "Berlin", + "state": null, + "country": "DEU" + }, + "geolocation": { "latitude": 52.0, "longitude": 13.0 }, + "consentGrantSkippable": true, + "enabledFeatures": [ + "EIQ_SETTINGS_AS_WEBVIEW", + "HIDE_BOILER_REPAIR_SERVICE", + "INTERCOM_ENABLED", + "MORE_AS_WEBVIEW", + "OWD_SETTINGS_AS_WEBVIEW", + "SETTINGS_OVERVIEW_AS_WEBVIEW" + ], + "isAirComfortEligible": true, + "isBalanceAcEligible": false, + "isEnergyIqEligible": true, + "isHeatSourceInstalled": false, + "isHeatPumpInstalled": false, + "supportsFlowTemperatureOptimization": false +} diff --git a/tests/components/tado/util.py b/tests/components/tado/util.py index de4fd515e5a94a..a76858ab98ea62 100644 --- a/tests/components/tado/util.py +++ b/tests/components/tado/util.py @@ -20,6 +20,7 @@ async def async_init_integration( mobile_devices_fixture = "tado/mobile_devices.json" me_fixture = "tado/me.json" weather_fixture = "tado/weather.json" + home_fixture = "tado/home.json" home_state_fixture = "tado/home_state.json" zones_fixture = "tado/zones.json" zone_states_fixture = "tado/zone_states.json" @@ -65,6 +66,10 @@ async def async_init_integration( "https://my.tado.com/api/v2/me", text=load_fixture(me_fixture), ) + m.get( + "https://my.tado.com/api/v2/homes/1/", + text=load_fixture(home_fixture), + ) m.get( "https://my.tado.com/api/v2/homes/1/weather", text=load_fixture(weather_fixture), diff --git a/tests/components/tag/test_init.py b/tests/components/tag/test_init.py index 6f309391d2b804..5c1e80c2d8bc87 100644 --- a/tests/components/tag/test_init.py +++ b/tests/components/tag/test_init.py @@ -294,6 +294,10 @@ async def test_entity_created_and_removed( assert item["id"] == "1234567890" assert item["name"] == "Kitchen tag" + await hass.async_block_till_done() + er_entity = entity_registry.async_get("tag.kitchen_tag") + assert er_entity.name == "Kitchen tag" + entity = hass.states.get("tag.kitchen_tag") assert entity assert entity.state == STATE_UNKNOWN diff --git a/tests/components/tedee/conftest.py b/tests/components/tedee/conftest.py index 68444de640c082..8e028cb53003ae 100644 --- a/tests/components/tedee/conftest.py +++ b/tests/components/tedee/conftest.py @@ -6,8 +6,8 @@ import json from unittest.mock import AsyncMock, MagicMock, patch -from pytedee_async.bridge import TedeeBridge -from pytedee_async.lock import TedeeLock +from aiotedee.bridge import TedeeBridge +from aiotedee.lock import TedeeLock import pytest from homeassistant.components.tedee.const import CONF_LOCAL_ACCESS_TOKEN, DOMAIN diff --git a/tests/components/tedee/snapshots/test_lock.ambr b/tests/components/tedee/snapshots/test_lock.ambr index 14913e32ba558d..3eba6f3f0af604 100644 --- a/tests/components/tedee/snapshots/test_lock.ambr +++ b/tests/components/tedee/snapshots/test_lock.ambr @@ -68,7 +68,7 @@ }), 'manufacturer': 'Tedee', 'model': 'Tedee PRO', - 'model_id': None, + 'model_id': 'Tedee PRO', 'name': 'Lock-1A2B', 'name_by_user': None, 'primary_config_entry': , @@ -147,7 +147,7 @@ }), 'manufacturer': 'Tedee', 'model': 'Tedee GO', - 'model_id': None, + 'model_id': 'Tedee GO', 'name': 'Lock-2C3D', 'name_by_user': None, 'primary_config_entry': , diff --git a/tests/components/tedee/test_binary_sensor.py b/tests/components/tedee/test_binary_sensor.py index 788d31c84d21ae..dfe70e7a2ea557 100644 --- a/tests/components/tedee/test_binary_sensor.py +++ b/tests/components/tedee/test_binary_sensor.py @@ -3,8 +3,8 @@ from datetime import timedelta from unittest.mock import MagicMock +from aiotedee import TedeeLock from freezegun.api import FrozenDateTimeFactory -from pytedee_async import TedeeLock import pytest from syrupy import SnapshotAssertion diff --git a/tests/components/tedee/test_config_flow.py b/tests/components/tedee/test_config_flow.py index 664c2f249d8c62..825e01aca70a5b 100644 --- a/tests/components/tedee/test_config_flow.py +++ b/tests/components/tedee/test_config_flow.py @@ -2,15 +2,16 @@ from unittest.mock import MagicMock, patch -from pytedee_async import ( +from aiotedee import ( TedeeClientException, TedeeDataUpdateException, TedeeLocalAuthException, ) +from aiotedee.bridge import TedeeBridge import pytest from homeassistant.components.tedee.const import CONF_LOCAL_ACCESS_TOKEN, DOMAIN -from homeassistant.config_entries import SOURCE_USER +from homeassistant.config_entries import SOURCE_USER, ConfigFlowResult from homeassistant.const import CONF_HOST, CONF_WEBHOOK_ID from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -134,23 +135,30 @@ async def test_reauth_flow( assert result["reason"] == "reauth_successful" -async def test_reconfigure_flow( - hass: HomeAssistant, mock_config_entry: MockConfigEntry, mock_tedee: MagicMock -) -> None: - """Test that the reconfigure flow works.""" - +async def __do_reconfigure_flow( + hass: HomeAssistant, mock_config_entry: MockConfigEntry +) -> ConfigFlowResult: + """Initialize a reconfigure flow.""" mock_config_entry.add_to_hass(hass) reconfigure_result = await mock_config_entry.start_reconfigure_flow(hass) assert reconfigure_result["type"] is FlowResultType.FORM - assert reconfigure_result["step_id"] == "reconfigure_confirm" + assert reconfigure_result["step_id"] == "reconfigure" - result = await hass.config_entries.flow.async_configure( + return await hass.config_entries.flow.async_configure( reconfigure_result["flow_id"], {CONF_LOCAL_ACCESS_TOKEN: LOCAL_ACCESS_TOKEN, CONF_HOST: "192.168.1.43"}, ) + +async def test_reconfigure_flow( + hass: HomeAssistant, mock_config_entry: MockConfigEntry, mock_tedee: MagicMock +) -> None: + """Test that the reconfigure flow works.""" + + result = await __do_reconfigure_flow(hass, mock_config_entry) + assert result["type"] is FlowResultType.ABORT assert result["reason"] == "reconfigure_successful" @@ -162,3 +170,18 @@ async def test_reconfigure_flow( CONF_LOCAL_ACCESS_TOKEN: LOCAL_ACCESS_TOKEN, CONF_WEBHOOK_ID: WEBHOOK_ID, } + + +async def test_reconfigure_unique_id_mismatch( + hass: HomeAssistant, mock_config_entry: MockConfigEntry, mock_tedee: MagicMock +) -> None: + """Ensure reconfigure flow aborts when the bride changes.""" + + mock_tedee.get_local_bridge.return_value = TedeeBridge( + 0, "1111-1111", "Bridge-R2D2" + ) + + result = await __do_reconfigure_flow(hass, mock_config_entry) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "unique_id_mismatch" diff --git a/tests/components/tedee/test_init.py b/tests/components/tedee/test_init.py index d4ac1c9d2901db..63701bb17889f9 100644 --- a/tests/components/tedee/test_init.py +++ b/tests/components/tedee/test_init.py @@ -5,7 +5,7 @@ from unittest.mock import MagicMock, patch from urllib.parse import urlparse -from pytedee_async.exception import ( +from aiotedee.exception import ( TedeeAuthException, TedeeClientException, TedeeWebhookException, diff --git a/tests/components/tedee/test_lock.py b/tests/components/tedee/test_lock.py index 3f6b97e2c70406..45eae6e22d99c7 100644 --- a/tests/components/tedee/test_lock.py +++ b/tests/components/tedee/test_lock.py @@ -4,13 +4,13 @@ from unittest.mock import MagicMock from urllib.parse import urlparse -from freezegun.api import FrozenDateTimeFactory -from pytedee_async import TedeeLock, TedeeLockState -from pytedee_async.exception import ( +from aiotedee import TedeeLock, TedeeLockState +from aiotedee.exception import ( TedeeClientException, TedeeDataUpdateException, TedeeLocalAuthException, ) +from freezegun.api import FrozenDateTimeFactory import pytest from syrupy.assertion import SnapshotAssertion diff --git a/tests/components/tedee/test_sensor.py b/tests/components/tedee/test_sensor.py index 72fbd9cbe8d571..ddbcd5086afd10 100644 --- a/tests/components/tedee/test_sensor.py +++ b/tests/components/tedee/test_sensor.py @@ -3,8 +3,8 @@ from datetime import timedelta from unittest.mock import MagicMock +from aiotedee import TedeeLock from freezegun.api import FrozenDateTimeFactory -from pytedee_async import TedeeLock import pytest from syrupy import SnapshotAssertion diff --git a/tests/components/template/test_alarm_control_panel.py b/tests/components/template/test_alarm_control_panel.py index 666dfe744a2998..4b259fabac2681 100644 --- a/tests/components/template/test_alarm_control_panel.py +++ b/tests/components/template/test_alarm_control_panel.py @@ -4,21 +4,15 @@ from syrupy.assertion import SnapshotAssertion from homeassistant.components import template -from homeassistant.components.alarm_control_panel import DOMAIN as ALARM_DOMAIN +from homeassistant.components.alarm_control_panel import ( + DOMAIN as ALARM_DOMAIN, + AlarmControlPanelState, +) from homeassistant.const import ( ATTR_DOMAIN, ATTR_ENTITY_ID, ATTR_SERVICE_DATA, EVENT_CALL_SERVICE, - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_CUSTOM_BYPASS, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_ARMED_VACATION, - STATE_ALARM_ARMING, - STATE_ALARM_DISARMED, - STATE_ALARM_PENDING, - STATE_ALARM_TRIGGERED, STATE_UNAVAILABLE, STATE_UNKNOWN, ) @@ -113,15 +107,15 @@ async def test_template_state_text(hass: HomeAssistant) -> None: """Test the state text of a template.""" for set_state in ( - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_ARMED_VACATION, - STATE_ALARM_ARMED_CUSTOM_BYPASS, - STATE_ALARM_ARMING, - STATE_ALARM_DISARMED, - STATE_ALARM_PENDING, - STATE_ALARM_TRIGGERED, + AlarmControlPanelState.ARMED_HOME, + AlarmControlPanelState.ARMED_AWAY, + AlarmControlPanelState.ARMED_NIGHT, + AlarmControlPanelState.ARMED_VACATION, + AlarmControlPanelState.ARMED_CUSTOM_BYPASS, + AlarmControlPanelState.ARMING, + AlarmControlPanelState.DISARMED, + AlarmControlPanelState.PENDING, + AlarmControlPanelState.TRIGGERED, ): hass.states.async_set(PANEL_NAME, set_state) await hass.async_block_till_done() @@ -166,7 +160,7 @@ async def test_setup_config_entry( hass.states.async_set("alarm_control_panel.one", "disarmed", {}) await hass.async_block_till_done() state = hass.states.get("alarm_control_panel.my_template") - assert state.state == STATE_ALARM_DISARMED + assert state.state == AlarmControlPanelState.DISARMED @pytest.mark.parametrize(("count", "domain"), [(1, "alarm_control_panel")]) @@ -190,13 +184,13 @@ async def test_optimistic_states(hass: HomeAssistant) -> None: assert state.state == "unknown" for service, set_state in ( - ("alarm_arm_away", STATE_ALARM_ARMED_AWAY), - ("alarm_arm_home", STATE_ALARM_ARMED_HOME), - ("alarm_arm_night", STATE_ALARM_ARMED_NIGHT), - ("alarm_arm_vacation", STATE_ALARM_ARMED_VACATION), - ("alarm_arm_custom_bypass", STATE_ALARM_ARMED_CUSTOM_BYPASS), - ("alarm_disarm", STATE_ALARM_DISARMED), - ("alarm_trigger", STATE_ALARM_TRIGGERED), + ("alarm_arm_away", AlarmControlPanelState.ARMED_AWAY), + ("alarm_arm_home", AlarmControlPanelState.ARMED_HOME), + ("alarm_arm_night", AlarmControlPanelState.ARMED_NIGHT), + ("alarm_arm_vacation", AlarmControlPanelState.ARMED_VACATION), + ("alarm_arm_custom_bypass", AlarmControlPanelState.ARMED_CUSTOM_BYPASS), + ("alarm_disarm", AlarmControlPanelState.DISARMED), + ("alarm_trigger", AlarmControlPanelState.TRIGGERED), ): await hass.services.async_call( ALARM_DOMAIN, @@ -465,15 +459,33 @@ async def test_code_config(hass: HomeAssistant, code_format, code_arm_required) @pytest.mark.parametrize( ("restored_state", "initial_state"), [ - (STATE_ALARM_ARMED_AWAY, STATE_ALARM_ARMED_AWAY), - (STATE_ALARM_ARMED_CUSTOM_BYPASS, STATE_ALARM_ARMED_CUSTOM_BYPASS), - (STATE_ALARM_ARMED_HOME, STATE_ALARM_ARMED_HOME), - (STATE_ALARM_ARMED_NIGHT, STATE_ALARM_ARMED_NIGHT), - (STATE_ALARM_ARMED_VACATION, STATE_ALARM_ARMED_VACATION), - (STATE_ALARM_ARMING, STATE_ALARM_ARMING), - (STATE_ALARM_DISARMED, STATE_ALARM_DISARMED), - (STATE_ALARM_PENDING, STATE_ALARM_PENDING), - (STATE_ALARM_TRIGGERED, STATE_ALARM_TRIGGERED), + ( + AlarmControlPanelState.ARMED_AWAY, + AlarmControlPanelState.ARMED_AWAY, + ), + ( + AlarmControlPanelState.ARMED_CUSTOM_BYPASS, + AlarmControlPanelState.ARMED_CUSTOM_BYPASS, + ), + ( + AlarmControlPanelState.ARMED_HOME, + AlarmControlPanelState.ARMED_HOME, + ), + ( + AlarmControlPanelState.ARMED_NIGHT, + AlarmControlPanelState.ARMED_NIGHT, + ), + ( + AlarmControlPanelState.ARMED_VACATION, + AlarmControlPanelState.ARMED_VACATION, + ), + (AlarmControlPanelState.ARMING, AlarmControlPanelState.ARMING), + (AlarmControlPanelState.DISARMED, AlarmControlPanelState.DISARMED), + (AlarmControlPanelState.PENDING, AlarmControlPanelState.PENDING), + ( + AlarmControlPanelState.TRIGGERED, + AlarmControlPanelState.TRIGGERED, + ), (STATE_UNAVAILABLE, STATE_UNKNOWN), (STATE_UNKNOWN, STATE_UNKNOWN), ("faulty_state", STATE_UNKNOWN), diff --git a/tests/components/template/test_binary_sensor.py b/tests/components/template/test_binary_sensor.py index 74662d2ab09476..3e3a629b4be230 100644 --- a/tests/components/template/test_binary_sensor.py +++ b/tests/components/template/test_binary_sensor.py @@ -33,9 +33,6 @@ mock_restore_cache_with_extra_data, ) -ON = "on" -OFF = "off" - @pytest.mark.parametrize("count", [1]) @pytest.mark.parametrize( @@ -78,7 +75,7 @@ async def test_setup_minimal(hass: HomeAssistant, entity_id, name, attributes) - state = hass.states.get(entity_id) assert state is not None assert state.name == name - assert state.state == ON + assert state.state == STATE_ON assert state.attributes == attributes @@ -123,7 +120,7 @@ async def test_setup(hass: HomeAssistant, entity_id) -> None: state = hass.states.get(entity_id) assert state is not None assert state.name == "virtual thingy" - assert state.state == ON + assert state.state == STATE_ON assert state.attributes["device_class"] == "motion" @@ -253,7 +250,7 @@ async def test_setup_invalid_sensors(hass: HomeAssistant, count) -> None: "value_template": "{{ states.sensor.xyz.state }}", "icon_template": "{% if " "states.binary_sensor.test_state.state == " - "'Works' %}" + "'on' %}" "mdi:check" "{% endif %}", }, @@ -270,7 +267,7 @@ async def test_setup_invalid_sensors(hass: HomeAssistant, count) -> None: "state": "{{ states.sensor.xyz.state }}", "icon": "{% if " "states.binary_sensor.test_state.state == " - "'Works' %}" + "'on' %}" "mdi:check" "{% endif %}", }, @@ -287,7 +284,7 @@ async def test_icon_template(hass: HomeAssistant, entity_id) -> None: state = hass.states.get(entity_id) assert state.attributes.get("icon") == "" - hass.states.async_set("binary_sensor.test_state", "Works") + hass.states.async_set("binary_sensor.test_state", STATE_ON) await hass.async_block_till_done() state = hass.states.get(entity_id) assert state.attributes["icon"] == "mdi:check" @@ -306,7 +303,7 @@ async def test_icon_template(hass: HomeAssistant, entity_id) -> None: "value_template": "{{ states.sensor.xyz.state }}", "entity_picture_template": "{% if " "states.binary_sensor.test_state.state == " - "'Works' %}" + "'on' %}" "/local/sensor.png" "{% endif %}", }, @@ -323,7 +320,7 @@ async def test_icon_template(hass: HomeAssistant, entity_id) -> None: "state": "{{ states.sensor.xyz.state }}", "picture": "{% if " "states.binary_sensor.test_state.state == " - "'Works' %}" + "'on' %}" "/local/sensor.png" "{% endif %}", }, @@ -340,7 +337,7 @@ async def test_entity_picture_template(hass: HomeAssistant, entity_id) -> None: state = hass.states.get(entity_id) assert state.attributes.get("entity_picture") == "" - hass.states.async_set("binary_sensor.test_state", "Works") + hass.states.async_set("binary_sensor.test_state", STATE_ON) await hass.async_block_till_done() state = hass.states.get(entity_id) assert state.attributes["entity_picture"] == "/local/sensor.png" @@ -460,13 +457,13 @@ async def test_match_all(hass: HomeAssistant, setup_mock) -> None: async def test_event(hass: HomeAssistant) -> None: """Test the event.""" state = hass.states.get("binary_sensor.test") - assert state.state == OFF + assert state.state == STATE_OFF - hass.states.async_set("sensor.test_state", ON) + hass.states.async_set("sensor.test_state", STATE_ON) await hass.async_block_till_done() state = hass.states.get("binary_sensor.test") - assert state.state == ON + assert state.state == STATE_ON @pytest.mark.parametrize( @@ -571,42 +568,42 @@ async def test_event(hass: HomeAssistant) -> None: async def test_template_delay_on_off(hass: HomeAssistant) -> None: """Test binary sensor template delay on.""" # Ensure the initial state is not on - assert hass.states.get("binary_sensor.test_on").state != ON - assert hass.states.get("binary_sensor.test_off").state != ON + assert hass.states.get("binary_sensor.test_on").state != STATE_ON + assert hass.states.get("binary_sensor.test_off").state != STATE_ON hass.states.async_set("input_number.delay", 5) - hass.states.async_set("sensor.test_state", ON) + hass.states.async_set("sensor.test_state", STATE_ON) await hass.async_block_till_done() - assert hass.states.get("binary_sensor.test_on").state == OFF - assert hass.states.get("binary_sensor.test_off").state == ON + assert hass.states.get("binary_sensor.test_on").state == STATE_OFF + assert hass.states.get("binary_sensor.test_off").state == STATE_ON future = dt_util.utcnow() + timedelta(seconds=5) async_fire_time_changed(hass, future) await hass.async_block_till_done() - assert hass.states.get("binary_sensor.test_on").state == ON - assert hass.states.get("binary_sensor.test_off").state == ON + assert hass.states.get("binary_sensor.test_on").state == STATE_ON + assert hass.states.get("binary_sensor.test_off").state == STATE_ON # check with time changes - hass.states.async_set("sensor.test_state", OFF) + hass.states.async_set("sensor.test_state", STATE_OFF) await hass.async_block_till_done() - assert hass.states.get("binary_sensor.test_on").state == OFF - assert hass.states.get("binary_sensor.test_off").state == ON + assert hass.states.get("binary_sensor.test_on").state == STATE_OFF + assert hass.states.get("binary_sensor.test_off").state == STATE_ON - hass.states.async_set("sensor.test_state", ON) + hass.states.async_set("sensor.test_state", STATE_ON) await hass.async_block_till_done() - assert hass.states.get("binary_sensor.test_on").state == OFF - assert hass.states.get("binary_sensor.test_off").state == ON + assert hass.states.get("binary_sensor.test_on").state == STATE_OFF + assert hass.states.get("binary_sensor.test_off").state == STATE_ON - hass.states.async_set("sensor.test_state", OFF) + hass.states.async_set("sensor.test_state", STATE_OFF) await hass.async_block_till_done() - assert hass.states.get("binary_sensor.test_on").state == OFF - assert hass.states.get("binary_sensor.test_off").state == ON + assert hass.states.get("binary_sensor.test_on").state == STATE_OFF + assert hass.states.get("binary_sensor.test_off").state == STATE_ON future = dt_util.utcnow() + timedelta(seconds=5) async_fire_time_changed(hass, future) await hass.async_block_till_done() - assert hass.states.get("binary_sensor.test_on").state == OFF - assert hass.states.get("binary_sensor.test_off").state == OFF + assert hass.states.get("binary_sensor.test_on").state == STATE_OFF + assert hass.states.get("binary_sensor.test_off").state == STATE_OFF @pytest.mark.parametrize("count", [1]) @@ -737,7 +734,7 @@ async def test_invalid_attribute_template( hass: HomeAssistant, caplog_setup_text ) -> None: """Test that errors are logged if rendering template fails.""" - hass.states.async_set("binary_sensor.test_sensor", "true") + hass.states.async_set("binary_sensor.test_sensor", STATE_ON) assert len(hass.states.async_all()) == 2 assert ("test_attribute") in caplog_setup_text assert ("TemplateError") in caplog_setup_text @@ -802,7 +799,7 @@ async def test_no_update_template_match_all( }, ) await hass.async_block_till_done() - hass.states.async_set("binary_sensor.test_sensor", "true") + hass.states.async_set("binary_sensor.test_sensor", STATE_ON) assert len(hass.states.async_all()) == 5 assert hass.states.get("binary_sensor.all_state").state == STATE_UNKNOWN @@ -813,29 +810,29 @@ async def test_no_update_template_match_all( hass.bus.async_fire(EVENT_HOMEASSISTANT_START) await hass.async_block_till_done() - assert hass.states.get("binary_sensor.all_state").state == ON - assert hass.states.get("binary_sensor.all_icon").state == ON - assert hass.states.get("binary_sensor.all_entity_picture").state == ON - assert hass.states.get("binary_sensor.all_attribute").state == ON + assert hass.states.get("binary_sensor.all_state").state == STATE_ON + assert hass.states.get("binary_sensor.all_icon").state == STATE_ON + assert hass.states.get("binary_sensor.all_entity_picture").state == STATE_ON + assert hass.states.get("binary_sensor.all_attribute").state == STATE_ON - hass.states.async_set("binary_sensor.test_sensor", "false") + hass.states.async_set("binary_sensor.test_sensor", STATE_OFF) await hass.async_block_till_done() - assert hass.states.get("binary_sensor.all_state").state == ON + assert hass.states.get("binary_sensor.all_state").state == STATE_ON # Will now process because we have one valid template - assert hass.states.get("binary_sensor.all_icon").state == OFF - assert hass.states.get("binary_sensor.all_entity_picture").state == OFF - assert hass.states.get("binary_sensor.all_attribute").state == OFF + assert hass.states.get("binary_sensor.all_icon").state == STATE_OFF + assert hass.states.get("binary_sensor.all_entity_picture").state == STATE_OFF + assert hass.states.get("binary_sensor.all_attribute").state == STATE_OFF await async_update_entity(hass, "binary_sensor.all_state") await async_update_entity(hass, "binary_sensor.all_icon") await async_update_entity(hass, "binary_sensor.all_entity_picture") await async_update_entity(hass, "binary_sensor.all_attribute") - assert hass.states.get("binary_sensor.all_state").state == ON - assert hass.states.get("binary_sensor.all_icon").state == OFF - assert hass.states.get("binary_sensor.all_entity_picture").state == OFF - assert hass.states.get("binary_sensor.all_attribute").state == OFF + assert hass.states.get("binary_sensor.all_state").state == STATE_ON + assert hass.states.get("binary_sensor.all_icon").state == STATE_OFF + assert hass.states.get("binary_sensor.all_entity_picture").state == STATE_OFF + assert hass.states.get("binary_sensor.all_attribute").state == STATE_OFF @pytest.mark.parametrize(("count", "domain"), [(1, "template")]) @@ -848,7 +845,7 @@ async def test_no_update_template_match_all( "binary_sensor": { "name": "top-level", "unique_id": "sensor-id", - "state": ON, + "state": STATE_ON, }, }, "binary_sensor": { @@ -1008,30 +1005,30 @@ async def test_availability_icon_picture(hass: HomeAssistant, entity_id) -> None @pytest.mark.parametrize( ("extra_config", "source_state", "restored_state", "initial_state"), [ - ({}, OFF, ON, OFF), - ({}, OFF, OFF, OFF), - ({}, OFF, STATE_UNAVAILABLE, OFF), - ({}, OFF, STATE_UNKNOWN, OFF), - ({"delay_off": 5}, OFF, ON, ON), - ({"delay_off": 5}, OFF, OFF, OFF), - ({"delay_off": 5}, OFF, STATE_UNAVAILABLE, STATE_UNKNOWN), - ({"delay_off": 5}, OFF, STATE_UNKNOWN, STATE_UNKNOWN), - ({"delay_on": 5}, OFF, ON, OFF), - ({"delay_on": 5}, OFF, OFF, OFF), - ({"delay_on": 5}, OFF, STATE_UNAVAILABLE, OFF), - ({"delay_on": 5}, OFF, STATE_UNKNOWN, OFF), - ({}, ON, ON, ON), - ({}, ON, OFF, ON), - ({}, ON, STATE_UNAVAILABLE, ON), - ({}, ON, STATE_UNKNOWN, ON), - ({"delay_off": 5}, ON, ON, ON), - ({"delay_off": 5}, ON, OFF, ON), - ({"delay_off": 5}, ON, STATE_UNAVAILABLE, ON), - ({"delay_off": 5}, ON, STATE_UNKNOWN, ON), - ({"delay_on": 5}, ON, ON, ON), - ({"delay_on": 5}, ON, OFF, OFF), - ({"delay_on": 5}, ON, STATE_UNAVAILABLE, STATE_UNKNOWN), - ({"delay_on": 5}, ON, STATE_UNKNOWN, STATE_UNKNOWN), + ({}, STATE_OFF, STATE_ON, STATE_OFF), + ({}, STATE_OFF, STATE_OFF, STATE_OFF), + ({}, STATE_OFF, STATE_UNAVAILABLE, STATE_OFF), + ({}, STATE_OFF, STATE_UNKNOWN, STATE_OFF), + ({"delay_off": 5}, STATE_OFF, STATE_ON, STATE_ON), + ({"delay_off": 5}, STATE_OFF, STATE_OFF, STATE_OFF), + ({"delay_off": 5}, STATE_OFF, STATE_UNAVAILABLE, STATE_UNKNOWN), + ({"delay_off": 5}, STATE_OFF, STATE_UNKNOWN, STATE_UNKNOWN), + ({"delay_on": 5}, STATE_OFF, STATE_ON, STATE_OFF), + ({"delay_on": 5}, STATE_OFF, STATE_OFF, STATE_OFF), + ({"delay_on": 5}, STATE_OFF, STATE_UNAVAILABLE, STATE_OFF), + ({"delay_on": 5}, STATE_OFF, STATE_UNKNOWN, STATE_OFF), + ({}, STATE_ON, STATE_ON, STATE_ON), + ({}, STATE_ON, STATE_OFF, STATE_ON), + ({}, STATE_ON, STATE_UNAVAILABLE, STATE_ON), + ({}, STATE_ON, STATE_UNKNOWN, STATE_ON), + ({"delay_off": 5}, STATE_ON, STATE_ON, STATE_ON), + ({"delay_off": 5}, STATE_ON, STATE_OFF, STATE_ON), + ({"delay_off": 5}, STATE_ON, STATE_UNAVAILABLE, STATE_ON), + ({"delay_off": 5}, STATE_ON, STATE_UNKNOWN, STATE_ON), + ({"delay_on": 5}, STATE_ON, STATE_ON, STATE_ON), + ({"delay_on": 5}, STATE_ON, STATE_OFF, STATE_OFF), + ({"delay_on": 5}, STATE_ON, STATE_UNAVAILABLE, STATE_UNKNOWN), + ({"delay_on": 5}, STATE_ON, STATE_UNKNOWN, STATE_UNKNOWN), ], ) async def test_restore_state( @@ -1145,7 +1142,7 @@ async def test_trigger_entity( await hass.async_block_till_done() state = hass.states.get("binary_sensor.hello_name") - assert state.state == ON + assert state.state == STATE_ON assert state.attributes.get("device_class") == "battery" assert state.attributes.get("icon") == "mdi:pirate" assert state.attributes.get("entity_picture") == "/local/dogs.png" @@ -1163,7 +1160,7 @@ async def test_trigger_entity( ) state = hass.states.get("binary_sensor.via_list") - assert state.state == ON + assert state.state == STATE_ON assert state.attributes.get("device_class") == "battery" assert state.attributes.get("icon") == "mdi:pirate" assert state.attributes.get("entity_picture") == "/local/dogs.png" @@ -1175,7 +1172,7 @@ async def test_trigger_entity( hass.bus.async_fire("test_event", {"beer": 2, "uno_mas": "si"}) await hass.async_block_till_done() state = hass.states.get("binary_sensor.via_list") - assert state.state == ON + assert state.state == STATE_ON assert state.attributes.get("another") == "si" @@ -1217,7 +1214,7 @@ async def test_template_with_trigger_templated_delay_on(hass: HomeAssistant) -> await hass.async_block_till_done() state = hass.states.get("binary_sensor.test") - assert state.state == ON + assert state.state == STATE_ON # Now wait for the auto-off future = dt_util.utcnow() + timedelta(seconds=2) @@ -1225,7 +1222,7 @@ async def test_template_with_trigger_templated_delay_on(hass: HomeAssistant) -> await hass.async_block_till_done() state = hass.states.get("binary_sensor.test") - assert state.state == OFF + assert state.state == STATE_OFF @pytest.mark.parametrize(("count", "domain"), [(1, "template")]) @@ -1253,8 +1250,8 @@ async def test_template_with_trigger_templated_delay_on(hass: HomeAssistant) -> @pytest.mark.parametrize( ("restored_state", "initial_state", "initial_attributes"), [ - (ON, ON, ["entity_picture", "icon", "plus_one"]), - (OFF, OFF, ["entity_picture", "icon", "plus_one"]), + (STATE_ON, STATE_ON, ["entity_picture", "icon", "plus_one"]), + (STATE_OFF, STATE_OFF, ["entity_picture", "icon", "plus_one"]), (STATE_UNAVAILABLE, STATE_UNKNOWN, []), (STATE_UNKNOWN, STATE_UNKNOWN, []), ], @@ -1309,7 +1306,7 @@ async def test_trigger_entity_restore_state( await hass.async_block_till_done() state = hass.states.get("binary_sensor.test") - assert state.state == ON + assert state.state == STATE_ON assert state.attributes["icon"] == "mdi:pirate" assert state.attributes["entity_picture"] == "/local/dogs.png" assert state.attributes["plus_one"] == 3 @@ -1333,7 +1330,7 @@ async def test_trigger_entity_restore_state( }, ], ) -@pytest.mark.parametrize("restored_state", [ON, OFF]) +@pytest.mark.parametrize("restored_state", [STATE_ON, STATE_OFF]) async def test_trigger_entity_restore_state_auto_off( hass: HomeAssistant, count, @@ -1377,7 +1374,7 @@ async def test_trigger_entity_restore_state_auto_off( await hass.async_block_till_done() state = hass.states.get("binary_sensor.test") - assert state.state == OFF + assert state.state == STATE_OFF @pytest.mark.parametrize(("count", "domain"), [(1, "template")]) @@ -1405,7 +1402,7 @@ async def test_trigger_entity_restore_state_auto_off_expired( freezer.move_to("2022-02-02 12:02:00+00:00") fake_state = State( "binary_sensor.test", - ON, + STATE_ON, {}, ) fake_extra_data = { @@ -1427,7 +1424,7 @@ async def test_trigger_entity_restore_state_auto_off_expired( await hass.async_block_till_done() state = hass.states.get("binary_sensor.test") - assert state.state == OFF + assert state.state == STATE_OFF async def test_device_id( diff --git a/tests/components/template/test_config_flow.py b/tests/components/template/test_config_flow.py index 72c453d48dcb62..a3e53aab9e1136 100644 --- a/tests/components/template/test_config_flow.py +++ b/tests/components/template/test_config_flow.py @@ -794,7 +794,7 @@ async def test_config_flow_preview( ), "unit_of_measurement": ( "'None' is not a valid unit for device class 'energy'; " - "expected one of 'cal', 'Gcal', 'GJ', 'J', 'kcal', 'kJ', 'kWh', 'Mcal', 'MJ', 'MWh', 'Wh'" + "expected one of 'cal', 'Gcal', 'GJ', 'GWh', 'J', 'kcal', 'kJ', 'kWh', 'Mcal', 'MJ', 'MWh', 'TWh', 'Wh'" ), }, ), diff --git a/tests/components/template/test_cover.py b/tests/components/template/test_cover.py index 3783ce62fd41dc..c49db59c2ee156 100644 --- a/tests/components/template/test_cover.py +++ b/tests/components/template/test_cover.py @@ -9,6 +9,7 @@ ATTR_POSITION, ATTR_TILT_POSITION, DOMAIN as COVER_DOMAIN, + CoverState, ) from homeassistant.const import ( ATTR_ENTITY_ID, @@ -21,12 +22,8 @@ SERVICE_STOP_COVER, SERVICE_TOGGLE, SERVICE_TOGGLE_COVER_TILT, - STATE_CLOSED, - STATE_CLOSING, STATE_OFF, STATE_ON, - STATE_OPEN, - STATE_OPENING, STATE_UNAVAILABLE, STATE_UNKNOWN, ) @@ -72,10 +69,24 @@ } }, [ - ("cover.test_state", STATE_OPEN, STATE_OPEN, {}, -1, ""), - ("cover.test_state", STATE_CLOSED, STATE_CLOSED, {}, -1, ""), - ("cover.test_state", STATE_OPENING, STATE_OPENING, {}, -1, ""), - ("cover.test_state", STATE_CLOSING, STATE_CLOSING, {}, -1, ""), + ("cover.test_state", CoverState.OPEN, CoverState.OPEN, {}, -1, ""), + ("cover.test_state", CoverState.CLOSED, CoverState.CLOSED, {}, -1, ""), + ( + "cover.test_state", + CoverState.OPENING, + CoverState.OPENING, + {}, + -1, + "", + ), + ( + "cover.test_state", + CoverState.CLOSING, + CoverState.CLOSING, + {}, + -1, + "", + ), ( "cover.test_state", "dog", @@ -84,7 +95,7 @@ -1, "Received invalid cover is_on state: dog", ), - ("cover.test_state", STATE_OPEN, STATE_OPEN, {}, -1, ""), + ("cover.test_state", CoverState.OPEN, CoverState.OPEN, {}, -1, ""), ( "cover.test_state", "cat", @@ -93,7 +104,7 @@ -1, "Received invalid cover is_on state: cat", ), - ("cover.test_state", STATE_CLOSED, STATE_CLOSED, {}, -1, ""), + ("cover.test_state", CoverState.CLOSED, CoverState.CLOSED, {}, -1, ""), ( "cover.test_state", "bear", @@ -120,17 +131,45 @@ } }, [ - ("cover.test_state", STATE_OPEN, STATE_UNKNOWN, {}, -1, ""), - ("cover.test_state", STATE_CLOSED, STATE_UNKNOWN, {}, -1, ""), - ("cover.test_state", STATE_OPENING, STATE_OPENING, {}, -1, ""), - ("cover.test_state", STATE_CLOSING, STATE_CLOSING, {}, -1, ""), - ("cover.test", STATE_CLOSED, STATE_CLOSING, {"position": 0}, 0, ""), - ("cover.test_state", STATE_OPEN, STATE_CLOSED, {}, -1, ""), - ("cover.test", STATE_CLOSED, STATE_OPEN, {"position": 10}, 10, ""), + ("cover.test_state", CoverState.OPEN, STATE_UNKNOWN, {}, -1, ""), + ("cover.test_state", CoverState.CLOSED, STATE_UNKNOWN, {}, -1, ""), + ( + "cover.test_state", + CoverState.OPENING, + CoverState.OPENING, + {}, + -1, + "", + ), + ( + "cover.test_state", + CoverState.CLOSING, + CoverState.CLOSING, + {}, + -1, + "", + ), + ( + "cover.test", + CoverState.CLOSED, + CoverState.CLOSING, + {"position": 0}, + 0, + "", + ), + ("cover.test_state", CoverState.OPEN, CoverState.CLOSED, {}, -1, ""), + ( + "cover.test", + CoverState.CLOSED, + CoverState.OPEN, + {"position": 10}, + 10, + "", + ), ( "cover.test_state", "dog", - STATE_OPEN, + CoverState.OPEN, {}, -1, "Received invalid cover is_on state: dog", @@ -244,7 +283,7 @@ async def test_template_state_text_ignored_if_none_or_empty( async def test_template_state_boolean(hass: HomeAssistant) -> None: """Test the value_template attribute.""" state = hass.states.get("cover.test_template_cover") - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN @pytest.mark.parametrize(("count", "domain"), [(1, COVER_DOMAIN)]) @@ -271,13 +310,13 @@ async def test_template_position( hass: HomeAssistant, caplog: pytest.LogCaptureFixture ) -> None: """Test the position_template attribute.""" - hass.states.async_set("cover.test", STATE_OPEN) + hass.states.async_set("cover.test", CoverState.OPEN) attrs = {} for set_state, pos, test_state in ( - (STATE_CLOSED, 42, STATE_OPEN), - (STATE_OPEN, 0.0, STATE_CLOSED), - (STATE_CLOSED, None, STATE_UNKNOWN), + (CoverState.CLOSED, 42, CoverState.OPEN), + (CoverState.OPEN, 0.0, CoverState.CLOSED), + (CoverState.CLOSED, None, STATE_UNKNOWN), ): attrs["position"] = pos hass.states.async_set("cover.test", set_state, attributes=attrs) @@ -458,7 +497,7 @@ async def test_template_open_or_position( async def test_open_action(hass: HomeAssistant, calls: list[ServiceCall]) -> None: """Test the open_cover command.""" state = hass.states.get("cover.test_template_cover") - assert state.state == STATE_CLOSED + assert state.state == CoverState.CLOSED await hass.services.async_call( COVER_DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True @@ -498,7 +537,7 @@ async def test_open_action(hass: HomeAssistant, calls: list[ServiceCall]) -> Non async def test_close_stop_action(hass: HomeAssistant, calls: list[ServiceCall]) -> None: """Test the close-cover and stop_cover commands.""" state = hass.states.get("cover.test_template_cover") - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN await hass.services.async_call( COVER_DOMAIN, SERVICE_CLOSE_COVER, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True @@ -712,10 +751,10 @@ async def test_set_position_optimistic( assert state.attributes.get("current_position") == 42.0 for service, test_state in ( - (SERVICE_CLOSE_COVER, STATE_CLOSED), - (SERVICE_OPEN_COVER, STATE_OPEN), - (SERVICE_TOGGLE, STATE_CLOSED), - (SERVICE_TOGGLE, STATE_OPEN), + (SERVICE_CLOSE_COVER, CoverState.CLOSED), + (SERVICE_OPEN_COVER, CoverState.OPEN), + (SERVICE_TOGGLE, CoverState.CLOSED), + (SERVICE_TOGGLE, CoverState.OPEN), ): await hass.services.async_call( COVER_DOMAIN, service, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True @@ -801,7 +840,7 @@ async def test_icon_template(hass: HomeAssistant) -> None: state = hass.states.get("cover.test_template_cover") assert state.attributes.get("icon") == "" - state = hass.states.async_set("cover.test_state", STATE_OPEN) + state = hass.states.async_set("cover.test_state", CoverState.OPEN) await hass.async_block_till_done() state = hass.states.get("cover.test_template_cover") @@ -837,7 +876,7 @@ async def test_entity_picture_template(hass: HomeAssistant) -> None: state = hass.states.get("cover.test_template_cover") assert state.attributes.get("entity_picture") == "" - state = hass.states.async_set("cover.test_state", STATE_OPEN) + state = hass.states.async_set("cover.test_state", CoverState.OPEN) await hass.async_block_till_done() state = hass.states.get("cover.test_template_cover") @@ -1038,10 +1077,10 @@ async def test_state_gets_lowercased(hass: HomeAssistant) -> None: assert len(hass.states.async_all()) == 2 - assert hass.states.get("cover.garage_door").state == STATE_OPEN + assert hass.states.get("cover.garage_door").state == CoverState.OPEN hass.states.async_set("binary_sensor.garage_door_sensor", "on") await hass.async_block_till_done() - assert hass.states.get("cover.garage_door").state == STATE_CLOSED + assert hass.states.get("cover.garage_door").state == CoverState.CLOSED @pytest.mark.parametrize(("count", "domain"), [(1, COVER_DOMAIN)]) diff --git a/tests/components/template/test_sensor.py b/tests/components/template/test_sensor.py index 5a7521f98c7378..929a890ab384ff 100644 --- a/tests/components/template/test_sensor.py +++ b/tests/components/template/test_sensor.py @@ -12,6 +12,7 @@ from homeassistant.components.template.sensor import TriggerSensorEntity from homeassistant.const import ( ATTR_ENTITY_PICTURE, + ATTR_FRIENDLY_NAME, ATTR_ICON, EVENT_COMPONENT_LOADED, EVENT_HOMEASSISTANT_START, @@ -983,6 +984,7 @@ async def test_self_referencing_sensor_with_icon_and_picture_entity_loop( "test": { "value_template": "{{ 1 }}", "entity_picture_template": "{{ ((states.sensor.test.attributes['entity_picture'] or 0) | int) + 1 }}", + "friendly_name_template": "{{ ((states.sensor.test.attributes['friendly_name'] or 0) | int) + 1 }}", }, }, } @@ -1007,7 +1009,8 @@ async def test_self_referencing_entity_picture_loop( state = hass.states.get("sensor.test") assert int(state.state) == 1 - assert state.attributes[ATTR_ENTITY_PICTURE] == 2 + assert state.attributes[ATTR_ENTITY_PICTURE] == "3" + assert state.attributes[ATTR_FRIENDLY_NAME] == "3" await hass.async_block_till_done() assert int(state.state) == 1 diff --git a/tests/components/tesla_fleet/conftest.py b/tests/components/tesla_fleet/conftest.py index cc580212233e58..0dc5d87984f332 100644 --- a/tests/components/tesla_fleet/conftest.py +++ b/tests/components/tesla_fleet/conftest.py @@ -167,3 +167,13 @@ def mock_request(): return_value=COMMAND_OK, ) as mock_request: yield mock_request + + +@pytest.fixture(autouse=True) +def mock_signed_command() -> Generator[AsyncMock]: + """Mock Tesla Fleet Api signed_command method.""" + with patch( + "homeassistant.components.tesla_fleet.VehicleSigned.signed_command", + return_value=COMMAND_OK, + ) as mock_signed_command: + yield mock_signed_command diff --git a/tests/components/tesla_fleet/fixtures/vehicle_data.json b/tests/components/tesla_fleet/fixtures/vehicle_data.json index 3845ae4855981e..d99bc8de5a809b 100644 --- a/tests/components/tesla_fleet/fixtures/vehicle_data.json +++ b/tests/components/tesla_fleet/fixtures/vehicle_data.json @@ -112,6 +112,7 @@ "wiper_blade_heater": false }, "drive_state": { + "active_route_destination": "Home", "active_route_latitude": 30.2226265, "active_route_longitude": -97.6236871, "active_route_miles_to_arrival": 0.039491, diff --git a/tests/components/tesla_fleet/snapshots/test_cover.ambr b/tests/components/tesla_fleet/snapshots/test_cover.ambr index c8eb9fb257ebea..dbdb003d802384 100644 --- a/tests/components/tesla_fleet/snapshots/test_cover.ambr +++ b/tests/components/tesla_fleet/snapshots/test_cover.ambr @@ -95,246 +95,6 @@ 'state': 'closed', }) # --- -# name: test_cover[cover.test_none-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'cover', - 'entity_category': None, - 'entity_id': 'cover.test_none', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': None, - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': 'windows', - 'unique_id': 'LRWXF7EK4KC700000-windows', - 'unit_of_measurement': None, - }) -# --- -# name: test_cover[cover.test_none-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'window', - 'friendly_name': 'Test None', - 'supported_features': , - }), - 'context': , - 'entity_id': 'cover.test_none', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'closed', - }) -# --- -# name: test_cover[cover.test_none_2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'cover', - 'entity_category': None, - 'entity_id': 'cover.test_none_2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': None, - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': 'charge_state_charge_port_door_open', - 'unique_id': 'LRWXF7EK4KC700000-charge_state_charge_port_door_open', - 'unit_of_measurement': None, - }) -# --- -# name: test_cover[cover.test_none_2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'door', - 'friendly_name': 'Test None', - 'supported_features': , - }), - 'context': , - 'entity_id': 'cover.test_none_2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'open', - }) -# --- -# name: test_cover[cover.test_none_3-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'cover', - 'entity_category': None, - 'entity_id': 'cover.test_none_3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': None, - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': 'vehicle_state_ft', - 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_ft', - 'unit_of_measurement': None, - }) -# --- -# name: test_cover[cover.test_none_3-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'door', - 'friendly_name': 'Test None', - 'supported_features': , - }), - 'context': , - 'entity_id': 'cover.test_none_3', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'closed', - }) -# --- -# name: test_cover[cover.test_none_4-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'cover', - 'entity_category': None, - 'entity_id': 'cover.test_none_4', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': None, - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': 'vehicle_state_rt', - 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_rt', - 'unit_of_measurement': None, - }) -# --- -# name: test_cover[cover.test_none_4-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'door', - 'friendly_name': 'Test None', - 'supported_features': , - }), - 'context': , - 'entity_id': 'cover.test_none_4', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'closed', - }) -# --- -# name: test_cover[cover.test_none_5-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'cover', - 'entity_category': None, - 'entity_id': 'cover.test_none_5', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': None, - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': 'vehicle_state_sun_roof_state', - 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_sun_roof_state', - 'unit_of_measurement': None, - }) -# --- -# name: test_cover[cover.test_none_5-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'window', - 'friendly_name': 'Test None', - 'supported_features': , - }), - 'context': , - 'entity_id': 'cover.test_none_5', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'open', - }) -# --- # name: test_cover[cover.test_sunroof-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -575,246 +335,6 @@ 'state': 'open', }) # --- -# name: test_cover_alt[cover.test_none-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'cover', - 'entity_category': None, - 'entity_id': 'cover.test_none', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': None, - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': 'windows', - 'unique_id': 'LRWXF7EK4KC700000-windows', - 'unit_of_measurement': None, - }) -# --- -# name: test_cover_alt[cover.test_none-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'window', - 'friendly_name': 'Test None', - 'supported_features': , - }), - 'context': , - 'entity_id': 'cover.test_none', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'open', - }) -# --- -# name: test_cover_alt[cover.test_none_2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'cover', - 'entity_category': None, - 'entity_id': 'cover.test_none_2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': None, - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': 'charge_state_charge_port_door_open', - 'unique_id': 'LRWXF7EK4KC700000-charge_state_charge_port_door_open', - 'unit_of_measurement': None, - }) -# --- -# name: test_cover_alt[cover.test_none_2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'door', - 'friendly_name': 'Test None', - 'supported_features': , - }), - 'context': , - 'entity_id': 'cover.test_none_2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'open', - }) -# --- -# name: test_cover_alt[cover.test_none_3-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'cover', - 'entity_category': None, - 'entity_id': 'cover.test_none_3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': None, - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': 'vehicle_state_ft', - 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_ft', - 'unit_of_measurement': None, - }) -# --- -# name: test_cover_alt[cover.test_none_3-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'door', - 'friendly_name': 'Test None', - 'supported_features': , - }), - 'context': , - 'entity_id': 'cover.test_none_3', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'open', - }) -# --- -# name: test_cover_alt[cover.test_none_4-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'cover', - 'entity_category': None, - 'entity_id': 'cover.test_none_4', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': None, - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': 'vehicle_state_rt', - 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_rt', - 'unit_of_measurement': None, - }) -# --- -# name: test_cover_alt[cover.test_none_4-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'door', - 'friendly_name': 'Test None', - 'supported_features': , - }), - 'context': , - 'entity_id': 'cover.test_none_4', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'open', - }) -# --- -# name: test_cover_alt[cover.test_none_5-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'cover', - 'entity_category': None, - 'entity_id': 'cover.test_none_5', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': None, - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': 'vehicle_state_sun_roof_state', - 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_sun_roof_state', - 'unit_of_measurement': None, - }) -# --- -# name: test_cover_alt[cover.test_none_5-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'window', - 'friendly_name': 'Test None', - 'supported_features': , - }), - 'context': , - 'entity_id': 'cover.test_none_5', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- # name: test_cover_alt[cover.test_sunroof-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/tesla_fleet/snapshots/test_device_tracker.ambr b/tests/components/tesla_fleet/snapshots/test_device_tracker.ambr index 194eda6fcff2df..02ad4b01002684 100644 --- a/tests/components/tesla_fleet/snapshots/test_device_tracker.ambr +++ b/tests/components/tesla_fleet/snapshots/test_device_tracker.ambr @@ -96,6 +96,6 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'not_home', + 'state': 'home', }) # --- diff --git a/tests/components/tesla_fleet/snapshots/test_diagnostics.ambr b/tests/components/tesla_fleet/snapshots/test_diagnostics.ambr index 902c7af131ef63..eb8c57910a4905 100644 --- a/tests/components/tesla_fleet/snapshots/test_diagnostics.ambr +++ b/tests/components/tesla_fleet/snapshots/test_diagnostics.ambr @@ -269,6 +269,7 @@ 'climate_state_timestamp': 1705707520649, 'climate_state_wiper_blade_heater': False, 'color': None, + 'drive_state_active_route_destination': 'Home', 'drive_state_active_route_latitude': '**REDACTED**', 'drive_state_active_route_longitude': '**REDACTED**', 'drive_state_active_route_miles_to_arrival': 0.039491, diff --git a/tests/components/tesla_fleet/snapshots/test_media_player.ambr b/tests/components/tesla_fleet/snapshots/test_media_player.ambr index d6f3f3e48253c1..cc3018364a5ebc 100644 --- a/tests/components/tesla_fleet/snapshots/test_media_player.ambr +++ b/tests/components/tesla_fleet/snapshots/test_media_player.ambr @@ -105,7 +105,7 @@ 'original_name': 'Media player', 'platform': 'tesla_fleet', 'previous_unique_id': None, - 'supported_features': , + 'supported_features': 0, 'translation_key': 'media', 'unique_id': 'LRWXF7EK4KC700000-media', 'unit_of_measurement': None, @@ -123,7 +123,7 @@ 'media_position': 1.0, 'media_title': 'Chapter 51: Cybertruck: Tesla, 2018–2019', 'source': 'Audible', - 'supported_features': , + 'supported_features': , 'volume_level': 0.16129355359011466, }), 'context': , diff --git a/tests/components/tesla_fleet/snapshots/test_sensor.ambr b/tests/components/tesla_fleet/snapshots/test_sensor.ambr index c6a4860056ac74..2c3780749ca48e 100644 --- a/tests/components/tesla_fleet/snapshots/test_sensor.ambr +++ b/tests/components/tesla_fleet/snapshots/test_sensor.ambr @@ -364,6 +364,89 @@ 'state': '0.0', }) # --- +# name: test_sensors[sensor.energy_site_grid_status-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'island_status_unknown', + 'on_grid', + 'off_grid', + 'off_grid_unintentional', + 'off_grid_intentional', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.energy_site_grid_status', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Grid Status', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'island_status', + 'unique_id': '123456-island_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[sensor.energy_site_grid_status-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Energy Site Grid Status', + 'options': list([ + 'island_status_unknown', + 'on_grid', + 'off_grid', + 'off_grid_unintentional', + 'off_grid_intentional', + ]), + }), + 'context': , + 'entity_id': 'sensor.energy_site_grid_status', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on_grid', + }) +# --- +# name: test_sensors[sensor.energy_site_grid_status-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Energy Site Grid Status', + 'options': list([ + 'island_status_unknown', + 'on_grid', + 'off_grid', + 'off_grid_unintentional', + 'off_grid_intentional', + ]), + }), + 'context': , + 'entity_id': 'sensor.energy_site_grid_status', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on_grid', + }) +# --- # name: test_sensors[sensor.energy_site_load_power-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/tesla_fleet/test_button.py b/tests/components/tesla_fleet/test_button.py index addba00b93defb..ef1cfd9035742a 100644 --- a/tests/components/tesla_fleet/test_button.py +++ b/tests/components/tesla_fleet/test_button.py @@ -1,13 +1,16 @@ """Test the Tesla Fleet button platform.""" -from unittest.mock import patch +from copy import deepcopy +from unittest.mock import AsyncMock, patch import pytest from syrupy import SnapshotAssertion +from tesla_fleet_api.exceptions import NotOnWhitelistFault from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import entity_registry as er from . import assert_entities, setup_platform @@ -63,3 +66,34 @@ async def test_press( blocking=True, ) command.assert_called_once() + + +async def test_press_signing_error( + hass: HomeAssistant, normal_config_entry: MockConfigEntry, mock_products: AsyncMock +) -> None: + """Test pressing a button with a signing error.""" + # Enable Signing + new_product = deepcopy(mock_products.return_value) + new_product["response"][0]["command_signing"] = "required" + mock_products.return_value = new_product + + with ( + patch("homeassistant.components.tesla_fleet.TeslaFleetApi.get_private_key"), + ): + await setup_platform(hass, normal_config_entry, [Platform.BUTTON]) + + with ( + patch("homeassistant.components.tesla_fleet.TeslaFleetApi.get_private_key"), + patch( + "homeassistant.components.tesla_fleet.VehicleSigned.flash_lights", + side_effect=NotOnWhitelistFault, + ), + pytest.raises(HomeAssistantError) as error, + ): + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + {ATTR_ENTITY_ID: ["button.test_flash_lights"]}, + blocking=True, + ) + assert error.from_exception(NotOnWhitelistFault) diff --git a/tests/components/tesla_fleet/test_cover.py b/tests/components/tesla_fleet/test_cover.py index 97636ec3ae5e3e..ac5307b2fdd8d9 100644 --- a/tests/components/tesla_fleet/test_cover.py +++ b/tests/components/tesla_fleet/test_cover.py @@ -11,14 +11,9 @@ SERVICE_CLOSE_COVER, SERVICE_OPEN_COVER, SERVICE_STOP_COVER, + CoverState, ) -from homeassistant.const import ( - ATTR_ENTITY_ID, - STATE_CLOSED, - STATE_OPEN, - STATE_UNKNOWN, - Platform, -) +from homeassistant.const import ATTR_ENTITY_ID, STATE_UNKNOWN, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -106,7 +101,7 @@ async def test_cover_services( call.assert_called_once() state = hass.states.get(entity_id) assert state - assert state.state is STATE_OPEN + assert state.state == CoverState.OPEN call.reset_mock() await hass.services.async_call( @@ -118,7 +113,7 @@ async def test_cover_services( call.assert_called_once() state = hass.states.get(entity_id) assert state - assert state.state is STATE_CLOSED + assert state.state == CoverState.CLOSED # Charge Port Door entity_id = "cover.test_charge_port_door" @@ -135,7 +130,7 @@ async def test_cover_services( call.assert_called_once() state = hass.states.get(entity_id) assert state - assert state.state is STATE_OPEN + assert state.state == CoverState.OPEN with patch( "homeassistant.components.teslemetry.VehicleSpecific.charge_port_door_close", @@ -150,7 +145,7 @@ async def test_cover_services( call.assert_called_once() state = hass.states.get(entity_id) assert state - assert state.state is STATE_CLOSED + assert state.state == CoverState.CLOSED # Frunk entity_id = "cover.test_frunk" @@ -167,7 +162,7 @@ async def test_cover_services( call.assert_called_once() state = hass.states.get(entity_id) assert state - assert state.state is STATE_OPEN + assert state.state == CoverState.OPEN # Trunk entity_id = "cover.test_trunk" @@ -184,7 +179,7 @@ async def test_cover_services( call.assert_called_once() state = hass.states.get(entity_id) assert state - assert state.state is STATE_OPEN + assert state.state == CoverState.OPEN call.reset_mock() await hass.services.async_call( @@ -196,7 +191,7 @@ async def test_cover_services( call.assert_called_once() state = hass.states.get(entity_id) assert state - assert state.state is STATE_CLOSED + assert state.state == CoverState.CLOSED # Sunroof entity_id = "cover.test_sunroof" @@ -213,7 +208,7 @@ async def test_cover_services( call.assert_called_once() state = hass.states.get(entity_id) assert state - assert state.state is STATE_OPEN + assert state.state == CoverState.OPEN call.reset_mock() await hass.services.async_call( @@ -225,7 +220,7 @@ async def test_cover_services( call.assert_called_once() state = hass.states.get(entity_id) assert state - assert state.state is STATE_OPEN + assert state.state == CoverState.OPEN call.reset_mock() await hass.services.async_call( @@ -237,4 +232,4 @@ async def test_cover_services( call.assert_called_once() state = hass.states.get(entity_id) assert state - assert state.state is STATE_CLOSED + assert state.state == CoverState.CLOSED diff --git a/tests/components/tesla_fleet/test_init.py b/tests/components/tesla_fleet/test_init.py index 9dcac4ec388c72..7c17f986663e91 100644 --- a/tests/components/tesla_fleet/test_init.py +++ b/tests/components/tesla_fleet/test_init.py @@ -1,5 +1,6 @@ """Test the Tesla Fleet init.""" +from copy import deepcopy from unittest.mock import AsyncMock, patch from aiohttp import RequestInfo @@ -404,3 +405,22 @@ async def test_init_region_issue_failed( await setup_platform(hass, normal_config_entry) mock_find_server.assert_called_once() assert normal_config_entry.state is ConfigEntryState.SETUP_ERROR + + +async def test_signing( + hass: HomeAssistant, + normal_config_entry: MockConfigEntry, + mock_products: AsyncMock, +) -> None: + """Tests when a vehicle requires signing.""" + + # Make the vehicle require command signing + products = deepcopy(mock_products.return_value) + products["response"][0]["command_signing"] = "required" + mock_products.return_value = products + + with patch( + "homeassistant.components.tesla_fleet.TeslaFleetApi.get_private_key" + ) as mock_get_private_key: + await setup_platform(hass, normal_config_entry) + mock_get_private_key.assert_called_once() diff --git a/tests/components/tesla_fleet/test_sensor.py b/tests/components/tesla_fleet/test_sensor.py index 377179ca26a94a..5faebbc47e26a5 100644 --- a/tests/components/tesla_fleet/test_sensor.py +++ b/tests/components/tesla_fleet/test_sensor.py @@ -1,13 +1,14 @@ """Test the Tesla Fleet sensor platform.""" -from unittest.mock import AsyncMock +from unittest.mock import AsyncMock, patch from freezegun.api import FrozenDateTimeFactory import pytest from syrupy.assertion import SnapshotAssertion +from tesla_fleet_api.exceptions import VehicleOffline from homeassistant.components.tesla_fleet.coordinator import VEHICLE_INTERVAL -from homeassistant.const import Platform +from homeassistant.const import STATE_UNAVAILABLE, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -41,3 +42,38 @@ async def test_sensors( await hass.async_block_till_done() assert_entities_alt(hass, normal_config_entry.entry_id, entity_registry, snapshot) + + +@pytest.mark.parametrize( + ("entity_id", "initial", "restored"), + [ + ("sensor.test_battery_level", "77", "77"), + ("sensor.test_outside_temperature", "30", "30"), + ("sensor.test_time_to_arrival", "2024-01-01T00:00:06+00:00", STATE_UNAVAILABLE), + ], +) +async def test_sensors_restore( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + normal_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + freezer: FrozenDateTimeFactory, + mock_vehicle_data: AsyncMock, + entity_id: str, + initial: str, + restored: str, +) -> None: + """Test if the sensor should restore it's state or not when vehicle is offline.""" + + freezer.move_to("2024-01-01 00:00:00+00:00") + + await setup_platform(hass, normal_config_entry, [Platform.SENSOR]) + + assert hass.states.get(entity_id).state == initial + + mock_vehicle_data.side_effect = VehicleOffline + + with patch("homeassistant.components.tesla_fleet.PLATFORMS", [Platform.SENSOR]): + assert await hass.config_entries.async_reload(normal_config_entry.entry_id) + + assert hass.states.get(entity_id).state == restored diff --git a/tests/components/tesla_fleet/test_switch.py b/tests/components/tesla_fleet/test_switch.py index 5cf812439a545d..fba4fc05cc4024 100644 --- a/tests/components/tesla_fleet/test_switch.py +++ b/tests/components/tesla_fleet/test_switch.py @@ -1,6 +1,5 @@ """Test the tesla_fleet switch platform.""" -from copy import deepcopy from unittest.mock import AsyncMock, patch import pytest @@ -166,29 +165,3 @@ async def test_switch_no_scope( {ATTR_ENTITY_ID: "switch.test_auto_steering_wheel_heater"}, blocking=True, ) - - -async def test_switch_no_signing( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - normal_config_entry: MockConfigEntry, - mock_products: AsyncMock, -) -> None: - """Tests that the switch entities are correct.""" - - # Make the vehicle require command signing - products = deepcopy(mock_products.return_value) - products["response"][0]["command_signing"] = "required" - mock_products.return_value = products - - await setup_platform(hass, normal_config_entry, [Platform.SWITCH]) - with pytest.raises( - ServiceValidationError, - match="Vehicle requires command signing. Please see documentation for more details", - ): - await hass.services.async_call( - SWITCH_DOMAIN, - SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: "switch.test_auto_steering_wheel_heater"}, - blocking=True, - ) diff --git a/tests/components/teslemetry/conftest.py b/tests/components/teslemetry/conftest.py index d50986bdb432f9..256428aa7035dc 100644 --- a/tests/components/teslemetry/conftest.py +++ b/tests/components/teslemetry/conftest.py @@ -1,4 +1,4 @@ -"""Fixtures for Tessie.""" +"""Fixtures for Teslemetry.""" from __future__ import annotations @@ -106,3 +106,12 @@ def mock_energy_history(): return_value=ENERGY_HISTORY, ) as mock_live_status: yield mock_live_status + + +@pytest.fixture(autouse=True) +def mock_listen(): + """Mock Teslemetry Stream listen method.""" + with patch( + "homeassistant.components.teslemetry.TeslemetryStream.listen", + ) as mock_listen: + yield mock_listen diff --git a/tests/components/teslemetry/fixtures/vehicle_data.json b/tests/components/teslemetry/fixtures/vehicle_data.json index 3845ae4855981e..d99bc8de5a809b 100644 --- a/tests/components/teslemetry/fixtures/vehicle_data.json +++ b/tests/components/teslemetry/fixtures/vehicle_data.json @@ -112,6 +112,7 @@ "wiper_blade_heater": false }, "drive_state": { + "active_route_destination": "Home", "active_route_latitude": 30.2226265, "active_route_longitude": -97.6236871, "active_route_miles_to_arrival": 0.039491, diff --git a/tests/components/teslemetry/snapshots/test_device_tracker.ambr b/tests/components/teslemetry/snapshots/test_device_tracker.ambr index 9859d9db360192..6c18cdf75c6b28 100644 --- a/tests/components/teslemetry/snapshots/test_device_tracker.ambr +++ b/tests/components/teslemetry/snapshots/test_device_tracker.ambr @@ -96,6 +96,6 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'not_home', + 'state': 'home', }) # --- diff --git a/tests/components/teslemetry/snapshots/test_diagnostics.ambr b/tests/components/teslemetry/snapshots/test_diagnostics.ambr index 11f8a91c1aac98..3b96d6f70c0d8a 100644 --- a/tests/components/teslemetry/snapshots/test_diagnostics.ambr +++ b/tests/components/teslemetry/snapshots/test_diagnostics.ambr @@ -270,6 +270,7 @@ 'climate_state_timestamp': 1705707520649, 'climate_state_wiper_blade_heater': False, 'color': None, + 'drive_state_active_route_destination': 'Home', 'drive_state_active_route_latitude': '**REDACTED**', 'drive_state_active_route_longitude': '**REDACTED**', 'drive_state_active_route_miles_to_arrival': 0.039491, diff --git a/tests/components/teslemetry/snapshots/test_update.ambr b/tests/components/teslemetry/snapshots/test_update.ambr index 19dac1615160be..a1213f3d94b6ce 100644 --- a/tests/components/teslemetry/snapshots/test_update.ambr +++ b/tests/components/teslemetry/snapshots/test_update.ambr @@ -36,6 +36,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'auto_update': False, + 'display_precision': 0, 'entity_picture': 'https://brands.home-assistant.io/_/teslemetry/icon.png', 'friendly_name': 'Test Update', 'in_progress': False, @@ -46,6 +47,7 @@ 'skipped_version': None, 'supported_features': , 'title': None, + 'update_percentage': None, }), 'context': , 'entity_id': 'update.test_update', @@ -92,6 +94,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'auto_update': False, + 'display_precision': 0, 'entity_picture': 'https://brands.home-assistant.io/_/teslemetry/icon.png', 'friendly_name': 'Test Update', 'in_progress': False, @@ -102,6 +105,7 @@ 'skipped_version': None, 'supported_features': , 'title': None, + 'update_percentage': None, }), 'context': , 'entity_id': 'update.test_update', diff --git a/tests/components/teslemetry/test_cover.py b/tests/components/teslemetry/test_cover.py index 464f91aabfcd33..5801a356ac565b 100644 --- a/tests/components/teslemetry/test_cover.py +++ b/tests/components/teslemetry/test_cover.py @@ -11,14 +11,9 @@ SERVICE_CLOSE_COVER, SERVICE_OPEN_COVER, SERVICE_STOP_COVER, + CoverState, ) -from homeassistant.const import ( - ATTR_ENTITY_ID, - STATE_CLOSED, - STATE_OPEN, - STATE_UNKNOWN, - Platform, -) +from homeassistant.const import ATTR_ENTITY_ID, STATE_UNKNOWN, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -101,7 +96,7 @@ async def test_cover_services( call.assert_called_once() state = hass.states.get(entity_id) assert state - assert state.state is STATE_OPEN + assert state.state == CoverState.OPEN call.reset_mock() await hass.services.async_call( @@ -113,7 +108,7 @@ async def test_cover_services( call.assert_called_once() state = hass.states.get(entity_id) assert state - assert state.state is STATE_CLOSED + assert state.state == CoverState.CLOSED # Charge Port Door entity_id = "cover.test_charge_port_door" @@ -130,7 +125,7 @@ async def test_cover_services( call.assert_called_once() state = hass.states.get(entity_id) assert state - assert state.state is STATE_OPEN + assert state.state == CoverState.OPEN with patch( "homeassistant.components.teslemetry.VehicleSpecific.charge_port_door_close", @@ -145,7 +140,7 @@ async def test_cover_services( call.assert_called_once() state = hass.states.get(entity_id) assert state - assert state.state is STATE_CLOSED + assert state.state == CoverState.CLOSED # Frunk entity_id = "cover.test_frunk" @@ -162,7 +157,7 @@ async def test_cover_services( call.assert_called_once() state = hass.states.get(entity_id) assert state - assert state.state is STATE_OPEN + assert state.state == CoverState.OPEN # Trunk entity_id = "cover.test_trunk" @@ -179,7 +174,7 @@ async def test_cover_services( call.assert_called_once() state = hass.states.get(entity_id) assert state - assert state.state is STATE_OPEN + assert state.state == CoverState.OPEN call.reset_mock() await hass.services.async_call( @@ -191,7 +186,7 @@ async def test_cover_services( call.assert_called_once() state = hass.states.get(entity_id) assert state - assert state.state is STATE_CLOSED + assert state.state == CoverState.CLOSED # Sunroof entity_id = "cover.test_sunroof" @@ -208,7 +203,7 @@ async def test_cover_services( call.assert_called_once() state = hass.states.get(entity_id) assert state - assert state.state is STATE_OPEN + assert state.state == CoverState.OPEN call.reset_mock() await hass.services.async_call( @@ -220,7 +215,7 @@ async def test_cover_services( call.assert_called_once() state = hass.states.get(entity_id) assert state - assert state.state is STATE_OPEN + assert state.state == CoverState.OPEN call.reset_mock() await hass.services.async_call( @@ -232,4 +227,4 @@ async def test_cover_services( call.assert_called_once() state = hass.states.get(entity_id) assert state - assert state.state is STATE_CLOSED + assert state.state == CoverState.CLOSED diff --git a/tests/components/teslemetry/test_init.py b/tests/components/teslemetry/test_init.py index a7afff9e341029..2a33e1def66166 100644 --- a/tests/components/teslemetry/test_init.py +++ b/tests/components/teslemetry/test_init.py @@ -18,7 +18,7 @@ ) from homeassistant.components.teslemetry.models import TeslemetryData from homeassistant.config_entries import ConfigEntryState -from homeassistant.const import Platform +from homeassistant.const import STATE_OFF, STATE_ON, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr @@ -214,3 +214,47 @@ async def test_energy_history_refresh_error( mock_energy_history.side_effect = side_effect entry = await setup_platform(hass) assert entry.state is state + + +async def test_vehicle_stream( + hass: HomeAssistant, + mock_listen: AsyncMock, + snapshot: SnapshotAssertion, +) -> None: + """Test vehicle stream events.""" + + entry = await setup_platform(hass, [Platform.BINARY_SENSOR]) + mock_listen.assert_called_once() + + state = hass.states.get("binary_sensor.test_status") + assert state.state == STATE_ON + + state = hass.states.get("binary_sensor.test_user_present") + assert state.state == STATE_OFF + + runtime_data: TeslemetryData = entry.runtime_data + for listener, _ in runtime_data.vehicles[0].stream._listeners.values(): + listener( + { + "vin": VEHICLE_DATA_ALT["response"]["vin"], + "vehicle_data": VEHICLE_DATA_ALT["response"], + "createdAt": "2024-10-04T10:45:17.537Z", + } + ) + await hass.async_block_till_done() + + state = hass.states.get("binary_sensor.test_user_present") + assert state.state == STATE_ON + + for listener, _ in runtime_data.vehicles[0].stream._listeners.values(): + listener( + { + "vin": VEHICLE_DATA_ALT["response"]["vin"], + "state": "offline", + "createdAt": "2024-10-04T10:45:17.537Z", + } + ) + await hass.async_block_till_done() + + state = hass.states.get("binary_sensor.test_status") + assert state.state == STATE_OFF diff --git a/tests/components/tessie/snapshots/test_update.ambr b/tests/components/tessie/snapshots/test_update.ambr index 622cf69c7f0ae5..1728c13b0ad7af 100644 --- a/tests/components/tessie/snapshots/test_update.ambr +++ b/tests/components/tessie/snapshots/test_update.ambr @@ -36,6 +36,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'auto_update': False, + 'display_precision': 0, 'entity_picture': 'https://brands.home-assistant.io/_/tessie/icon.png', 'friendly_name': 'Test Update', 'in_progress': False, @@ -46,6 +47,7 @@ 'skipped_version': None, 'supported_features': , 'title': None, + 'update_percentage': None, }), 'context': , 'entity_id': 'update.test_update', diff --git a/tests/components/tessie/test_cover.py b/tests/components/tessie/test_cover.py index be4dda3ec7b134..451d1758e560bc 100644 --- a/tests/components/tessie/test_cover.py +++ b/tests/components/tessie/test_cover.py @@ -9,8 +9,7 @@ DOMAIN as COVER_DOMAIN, SERVICE_CLOSE_COVER, SERVICE_OPEN_COVER, - STATE_CLOSED, - STATE_OPEN, + CoverState, ) from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant @@ -57,7 +56,7 @@ async def test_covers( blocking=True, ) mock_open.assert_called_once() - assert hass.states.get(entity_id).state == STATE_OPEN + assert hass.states.get(entity_id).state == CoverState.OPEN # Test close windows if closefunc: @@ -72,7 +71,7 @@ async def test_covers( blocking=True, ) mock_close.assert_called_once() - assert hass.states.get(entity_id).state == STATE_CLOSED + assert hass.states.get(entity_id).state == CoverState.CLOSED async def test_errors(hass: HomeAssistant) -> None: diff --git a/tests/components/tessie/test_lock.py b/tests/components/tessie/test_lock.py index 43f8e23fb50191..1208bb17d55002 100644 --- a/tests/components/tessie/test_lock.py +++ b/tests/components/tessie/test_lock.py @@ -6,7 +6,6 @@ from syrupy import SnapshotAssertion from homeassistant.components.lock import ( - ATTR_CODE, DOMAIN as LOCK_DOMAIN, SERVICE_LOCK, SERVICE_UNLOCK, @@ -15,9 +14,9 @@ from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceValidationError -from homeassistant.helpers import entity_registry as er, issue_registry as ir +from homeassistant.helpers import entity_registry as er -from .common import DOMAIN, assert_entities, setup_platform +from .common import assert_entities, setup_platform async def test_locks( @@ -25,17 +24,6 @@ async def test_locks( ) -> None: """Tests that the lock entity is correct.""" - # Create the deprecated speed limit lock entity - entity_registry.async_get_or_create( - LOCK_DOMAIN, - DOMAIN, - "VINVINVIN-vehicle_state_speed_limit_mode_active", - original_name="Charge cable lock", - has_entity_name=True, - translation_key="vehicle_state_speed_limit_mode_active", - disabled_by=er.RegistryEntryDisabler.INTEGRATION, - ) - entry = await setup_platform(hass, [Platform.LOCK]) assert_entities(hass, entry.entry_id, entity_registry, snapshot) @@ -83,65 +71,3 @@ async def test_locks( ) assert hass.states.get(entity_id).state == LockState.UNLOCKED mock_run.assert_called_once() - - -async def test_speed_limit_lock( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - issue_registry: ir.IssueRegistry, -) -> None: - """Tests that the deprecated speed limit lock entity is correct.""" - # Create the deprecated speed limit lock entity - entity = entity_registry.async_get_or_create( - LOCK_DOMAIN, - DOMAIN, - "VINVINVIN-vehicle_state_speed_limit_mode_active", - original_name="Charge cable lock", - has_entity_name=True, - translation_key="vehicle_state_speed_limit_mode_active", - ) - - with patch( - "homeassistant.components.tessie.lock.automations_with_entity", - return_value=["item"], - ): - await setup_platform(hass, [Platform.LOCK]) - assert issue_registry.async_get_issue( - DOMAIN, f"deprecated_speed_limit_{entity.entity_id}_item" - ) - - # Test lock set value functions - with patch( - "homeassistant.components.tessie.lock.enable_speed_limit" - ) as mock_enable_speed_limit: - await hass.services.async_call( - LOCK_DOMAIN, - SERVICE_LOCK, - {ATTR_ENTITY_ID: [entity.entity_id], ATTR_CODE: "1234"}, - blocking=True, - ) - assert hass.states.get(entity.entity_id).state == LockState.LOCKED - mock_enable_speed_limit.assert_called_once() - # Assert issue has been raised in the issue register - assert issue_registry.async_get_issue(DOMAIN, "deprecated_speed_limit_locked") - - with patch( - "homeassistant.components.tessie.lock.disable_speed_limit" - ) as mock_disable_speed_limit: - await hass.services.async_call( - LOCK_DOMAIN, - SERVICE_UNLOCK, - {ATTR_ENTITY_ID: [entity.entity_id], ATTR_CODE: "1234"}, - blocking=True, - ) - assert hass.states.get(entity.entity_id).state == LockState.UNLOCKED - mock_disable_speed_limit.assert_called_once() - assert issue_registry.async_get_issue(DOMAIN, "deprecated_speed_limit_unlocked") - - with pytest.raises(ServiceValidationError): - await hass.services.async_call( - LOCK_DOMAIN, - SERVICE_UNLOCK, - {ATTR_ENTITY_ID: [entity.entity_id], ATTR_CODE: "abc"}, - blocking=True, - ) diff --git a/tests/components/thethingsnetwork/test_init.py b/tests/components/thethingsnetwork/test_init.py index 1e0b64c933ddba..e39c764d5f97bf 100644 --- a/tests/components/thethingsnetwork/test_init.py +++ b/tests/components/thethingsnetwork/test_init.py @@ -4,22 +4,6 @@ from ttn_client import TTNAuthError from homeassistant.core import HomeAssistant -from homeassistant.helpers import issue_registry as ir -from homeassistant.setup import async_setup_component - -from .conftest import DOMAIN - - -async def test_error_configuration( - hass: HomeAssistant, - issue_registry: ir.IssueRegistry, -) -> None: - """Test issue is logged when deprecated configuration is used.""" - await async_setup_component( - hass, DOMAIN, {DOMAIN: {"app_id": "123", "access_key": "42"}} - ) - await hass.async_block_till_done() - assert issue_registry.async_get_issue(DOMAIN, "manual_migration") @pytest.mark.parametrize(("exception_class"), [TTNAuthError, Exception]) diff --git a/tests/components/threshold/test_binary_sensor.py b/tests/components/threshold/test_binary_sensor.py index e0973c7a5803eb..259009c6319cf3 100644 --- a/tests/components/threshold/test_binary_sensor.py +++ b/tests/components/threshold/test_binary_sensor.py @@ -538,7 +538,7 @@ async def test_sensor_no_lower_upper( await async_setup_component(hass, Platform.BINARY_SENSOR, config) await hass.async_block_till_done() - assert "Lower or Upper thresholds not provided" in caplog.text + assert "Lower or Upper thresholds are not provided" in caplog.text async def test_device_id( diff --git a/tests/components/tibber/conftest.py b/tests/components/tibber/conftest.py index 0b48531bde148b..441a9d0b8883a9 100644 --- a/tests/components/tibber/conftest.py +++ b/tests/components/tibber/conftest.py @@ -5,6 +5,7 @@ import pytest +from homeassistant.components.recorder import Recorder from homeassistant.components.tibber.const import DOMAIN from homeassistant.const import CONF_ACCESS_TOKEN from homeassistant.core import HomeAssistant @@ -26,7 +27,7 @@ def config_entry(hass: HomeAssistant) -> MockConfigEntry: @pytest.fixture async def mock_tibber_setup( - config_entry: MockConfigEntry, hass: HomeAssistant + recorder_mock: Recorder, config_entry: MockConfigEntry, hass: HomeAssistant ) -> AsyncGenerator[MagicMock]: """Mock tibber entry setup.""" unique_user_id = "unique_user_id" diff --git a/tests/components/tibber/test_diagnostics.py b/tests/components/tibber/test_diagnostics.py index 34ecb63dfec969..16c735596d0868 100644 --- a/tests/components/tibber/test_diagnostics.py +++ b/tests/components/tibber/test_diagnostics.py @@ -19,12 +19,9 @@ async def test_entry_diagnostics( config_entry, ) -> None: """Test config entry diagnostics.""" - with ( - patch( - "tibber.Tibber.update_info", - return_value=None, - ), - patch("homeassistant.components.tibber.discovery.async_load_platform"), + with patch( + "tibber.Tibber.update_info", + return_value=None, ): assert await async_setup_component(hass, "tibber", {}) diff --git a/tests/components/tibber/test_notify.py b/tests/components/tibber/test_notify.py index 69af92c4d5dc00..9b731e78bf694d 100644 --- a/tests/components/tibber/test_notify.py +++ b/tests/components/tibber/test_notify.py @@ -6,7 +6,6 @@ import pytest from homeassistant.components.recorder import Recorder -from homeassistant.components.tibber import DOMAIN from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError @@ -19,18 +18,8 @@ async def test_notification_services( notify_state = hass.states.get("notify.tibber") assert notify_state is not None - # Assert legacy notify service hass been added - assert hass.services.has_service("notify", DOMAIN) - - # Test legacy notify service - service = "tibber" - service_data = {"message": "The message", "title": "A title"} - await hass.services.async_call("notify", service, service_data, blocking=True) calls: MagicMock = mock_tibber_setup.send_notification - calls.assert_called_once_with(message="The message", title="A title") - calls.reset_mock() - # Test notify entity service service = "send_message" service_data = { @@ -44,15 +33,6 @@ async def test_notification_services( calls.side_effect = TimeoutError - with pytest.raises(HomeAssistantError): - # Test legacy notify service - await hass.services.async_call( - "notify", - service="tibber", - service_data={"message": "The message", "title": "A title"}, - blocking=True, - ) - with pytest.raises(HomeAssistantError): # Test notify entity service await hass.services.async_call( diff --git a/tests/components/tibber/test_repairs.py b/tests/components/tibber/test_repairs.py deleted file mode 100644 index 5e5fde4569e21d..00000000000000 --- a/tests/components/tibber/test_repairs.py +++ /dev/null @@ -1,56 +0,0 @@ -"""Test loading of the Tibber config entry.""" - -from unittest.mock import MagicMock - -from homeassistant.components.recorder import Recorder -from homeassistant.core import HomeAssistant -from homeassistant.helpers import issue_registry as ir - -from tests.components.repairs import process_repair_fix_flow, start_repair_fix_flow -from tests.typing import ClientSessionGenerator - - -async def test_repair_flow( - recorder_mock: Recorder, - hass: HomeAssistant, - issue_registry: ir.IssueRegistry, - mock_tibber_setup: MagicMock, - hass_client: ClientSessionGenerator, -) -> None: - """Test unloading the entry.""" - - # Test legacy notify service - service = "tibber" - service_data = {"message": "The message", "title": "A title"} - await hass.services.async_call("notify", service, service_data, blocking=True) - calls: MagicMock = mock_tibber_setup.send_notification - - calls.assert_called_once_with(message="The message", title="A title") - calls.reset_mock() - - http_client = await hass_client() - # Assert the issue is present - assert issue_registry.async_get_issue( - domain="notify", - issue_id=f"migrate_notify_tibber_{service}", - ) - assert len(issue_registry.issues) == 1 - - data = await start_repair_fix_flow( - http_client, "notify", f"migrate_notify_tibber_{service}" - ) - - flow_id = data["flow_id"] - assert data["step_id"] == "confirm" - - # Simulate the users confirmed the repair flow - data = await process_repair_fix_flow(http_client, flow_id) - assert data["type"] == "create_entry" - await hass.async_block_till_done() - - # Assert the issue is no longer present - assert not issue_registry.async_get_issue( - domain="notify", - issue_id=f"migrate_notify_tibber_{service}", - ) - assert len(issue_registry.issues) == 0 diff --git a/tests/components/tibber/test_services.py b/tests/components/tibber/test_services.py index 1df91d719fe818..dc6f5d2789df2c 100644 --- a/tests/components/tibber/test_services.py +++ b/tests/components/tibber/test_services.py @@ -1,6 +1,5 @@ """Test service for Tibber integration.""" -import asyncio import datetime as dt from unittest.mock import MagicMock @@ -8,159 +7,104 @@ import pytest from homeassistant.components.tibber.const import DOMAIN -from homeassistant.components.tibber.services import PRICE_SERVICE_NAME, __get_prices -from homeassistant.core import ServiceCall +from homeassistant.components.tibber.services import PRICE_SERVICE_NAME +from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceValidationError -from homeassistant.util import dt as dt_util -STARTTIME = dt.datetime.fromtimestamp(1615766400).replace( - tzinfo=dt_util.get_default_time_zone() -) +START_TIME = dt.datetime.fromtimestamp(1615766400).replace(tzinfo=dt.UTC) def generate_mock_home_data(): """Create mock data from the tibber connection.""" - tomorrow = STARTTIME + dt.timedelta(days=1) + tomorrow = START_TIME + dt.timedelta(days=1) mock_homes = [ MagicMock( name="first_home", - info={ - "viewer": { - "home": { - "currentSubscription": { - "priceInfo": { - "today": [ - { - "startsAt": STARTTIME.isoformat(), - "total": 0.46914, - "level": "VERY_EXPENSIVE", - }, - { - "startsAt": ( - STARTTIME + dt.timedelta(hours=1) - ).isoformat(), - "total": 0.46914, - "level": "VERY_EXPENSIVE", - }, - ], - "tomorrow": [ - { - "startsAt": tomorrow.isoformat(), - "total": 0.46914, - "level": "VERY_EXPENSIVE", - }, - { - "startsAt": ( - tomorrow + dt.timedelta(hours=1) - ).isoformat(), - "total": 0.46914, - "level": "VERY_EXPENSIVE", - }, - ], - } - } - } - } + price_total={ + START_TIME.isoformat(): 0.36914, + (START_TIME + dt.timedelta(hours=1)).isoformat(): 0.36914, + tomorrow.isoformat(): 0.46914, + (tomorrow + dt.timedelta(hours=1)).isoformat(): 0.46914, + }, + price_level={ + START_TIME.isoformat(): "VERY_EXPENSIVE", + (START_TIME + dt.timedelta(hours=1)).isoformat(): "VERY_EXPENSIVE", + tomorrow.isoformat(): "VERY_EXPENSIVE", + (tomorrow + dt.timedelta(hours=1)).isoformat(): "VERY_EXPENSIVE", }, ), MagicMock( name="second_home", - info={ - "viewer": { - "home": { - "currentSubscription": { - "priceInfo": { - "today": [ - { - "startsAt": STARTTIME.isoformat(), - "total": 0.46914, - "level": "VERY_EXPENSIVE", - }, - { - "startsAt": ( - STARTTIME + dt.timedelta(hours=1) - ).isoformat(), - "total": 0.46914, - "level": "VERY_EXPENSIVE", - }, - ], - "tomorrow": [ - { - "startsAt": tomorrow.isoformat(), - "total": 0.46914, - "level": "VERY_EXPENSIVE", - }, - { - "startsAt": ( - tomorrow + dt.timedelta(hours=1) - ).isoformat(), - "total": 0.46914, - "level": "VERY_EXPENSIVE", - }, - ], - } - } - } - } + price_total={ + START_TIME.isoformat(): 0.36914, + (START_TIME + dt.timedelta(hours=1)).isoformat(): 0.36914, + tomorrow.isoformat(): 0.46914, + (tomorrow + dt.timedelta(hours=1)).isoformat(): 0.46914, + }, + price_level={ + START_TIME.isoformat(): "VERY_EXPENSIVE", + (START_TIME + dt.timedelta(hours=1)).isoformat(): "VERY_EXPENSIVE", + tomorrow.isoformat(): "VERY_EXPENSIVE", + (tomorrow + dt.timedelta(hours=1)).isoformat(): "VERY_EXPENSIVE", }, ), ] + # set name again, as the name is special in mock objects + # see documentation: https://docs.python.org/3/library/unittest.mock.html#mock-names-and-the-name-attribute mock_homes[0].name = "first_home" mock_homes[1].name = "second_home" return mock_homes -def create_mock_tibber_connection(): - """Create a mock tibber connection.""" - tibber_connection = MagicMock() - tibber_connection.get_homes.return_value = generate_mock_home_data() - return tibber_connection - - -def create_mock_hass(): - """Create a mock hass object.""" - mock_hass = MagicMock - mock_hass.data = {"tibber": create_mock_tibber_connection()} - return mock_hass - - +@pytest.mark.parametrize( + "data", + [ + {}, + {"start": START_TIME.isoformat()}, + { + "start": START_TIME.isoformat(), + "end": (START_TIME + dt.timedelta(days=1)).isoformat(), + }, + ], +) async def test_get_prices( + mock_tibber_setup: MagicMock, + hass: HomeAssistant, freezer: FrozenDateTimeFactory, + data, ) -> None: - """Test __get_prices with mock data.""" - freezer.move_to(STARTTIME) - tomorrow = STARTTIME + dt.timedelta(days=1) - call = ServiceCall( - DOMAIN, - PRICE_SERVICE_NAME, - {"start": STARTTIME.date().isoformat(), "end": tomorrow.date().isoformat()}, - ) + """Test get_prices with mock data.""" + freezer.move_to(START_TIME) + mock_tibber_setup.get_homes.return_value = generate_mock_home_data() - result = await __get_prices(call, hass=create_mock_hass()) + result = await hass.services.async_call( + DOMAIN, PRICE_SERVICE_NAME, data, blocking=True, return_response=True + ) + await hass.async_block_till_done() assert result == { "prices": { "first_home": [ { - "start_time": STARTTIME, - "price": 0.46914, + "start_time": START_TIME.isoformat(), + "price": 0.36914, "level": "VERY_EXPENSIVE", }, { - "start_time": STARTTIME + dt.timedelta(hours=1), - "price": 0.46914, + "start_time": (START_TIME + dt.timedelta(hours=1)).isoformat(), + "price": 0.36914, "level": "VERY_EXPENSIVE", }, ], "second_home": [ { - "start_time": STARTTIME, - "price": 0.46914, + "start_time": START_TIME.isoformat(), + "price": 0.36914, "level": "VERY_EXPENSIVE", }, { - "start_time": STARTTIME + dt.timedelta(hours=1), - "price": 0.46914, + "start_time": (START_TIME + dt.timedelta(hours=1)).isoformat(), + "price": 0.36914, "level": "VERY_EXPENSIVE", }, ], @@ -168,79 +112,48 @@ async def test_get_prices( } -async def test_get_prices_no_input( +async def test_get_prices_start_tomorrow( + mock_tibber_setup: MagicMock, + hass: HomeAssistant, freezer: FrozenDateTimeFactory, ) -> None: - """Test __get_prices with no input.""" - freezer.move_to(STARTTIME) - call = ServiceCall(DOMAIN, PRICE_SERVICE_NAME, {}) - - result = await __get_prices(call, hass=create_mock_hass()) - - assert result == { - "prices": { - "first_home": [ - { - "start_time": STARTTIME, - "price": 0.46914, - "level": "VERY_EXPENSIVE", - }, - { - "start_time": STARTTIME + dt.timedelta(hours=1), - "price": 0.46914, - "level": "VERY_EXPENSIVE", - }, - ], - "second_home": [ - { - "start_time": STARTTIME, - "price": 0.46914, - "level": "VERY_EXPENSIVE", - }, - { - "start_time": STARTTIME + dt.timedelta(hours=1), - "price": 0.46914, - "level": "VERY_EXPENSIVE", - }, - ], - } - } + """Test get_prices with start date tomorrow.""" + freezer.move_to(START_TIME) + tomorrow = START_TIME + dt.timedelta(days=1) + mock_tibber_setup.get_homes.return_value = generate_mock_home_data() -async def test_get_prices_start_tomorrow( - freezer: FrozenDateTimeFactory, -) -> None: - """Test __get_prices with start date tomorrow.""" - freezer.move_to(STARTTIME) - tomorrow = STARTTIME + dt.timedelta(days=1) - call = ServiceCall( - DOMAIN, PRICE_SERVICE_NAME, {"start": tomorrow.date().isoformat()} + result = await hass.services.async_call( + DOMAIN, + PRICE_SERVICE_NAME, + {"start": tomorrow.isoformat()}, + blocking=True, + return_response=True, ) - - result = await __get_prices(call, hass=create_mock_hass()) + await hass.async_block_till_done() assert result == { "prices": { "first_home": [ { - "start_time": tomorrow, + "start_time": tomorrow.isoformat(), "price": 0.46914, "level": "VERY_EXPENSIVE", }, { - "start_time": tomorrow + dt.timedelta(hours=1), + "start_time": (tomorrow + dt.timedelta(hours=1)).isoformat(), "price": 0.46914, "level": "VERY_EXPENSIVE", }, ], "second_home": [ { - "start_time": tomorrow, + "start_time": tomorrow.isoformat(), "price": 0.46914, "level": "VERY_EXPENSIVE", }, { - "start_time": tomorrow + dt.timedelta(hours=1), + "start_time": (tomorrow + dt.timedelta(hours=1)).isoformat(), "price": 0.46914, "level": "VERY_EXPENSIVE", }, @@ -252,46 +165,55 @@ async def test_get_prices_start_tomorrow( @pytest.mark.parametrize( "start_time", [ - STARTTIME.isoformat(), - STARTTIME.replace(tzinfo=None).isoformat(), - (STARTTIME + dt.timedelta(hours=4)) + START_TIME.isoformat(), + (START_TIME + dt.timedelta(hours=4)) .replace(tzinfo=dt.timezone(dt.timedelta(hours=4))) .isoformat(), ], ) async def test_get_prices_with_timezones( + mock_tibber_setup: MagicMock, + hass: HomeAssistant, freezer: FrozenDateTimeFactory, start_time: str, ) -> None: - """Test __get_prices with timezone and without.""" - freezer.move_to(STARTTIME) - call = ServiceCall(DOMAIN, PRICE_SERVICE_NAME, {"start": start_time}) + """Test get_prices with timezone and without.""" + freezer.move_to(START_TIME) - result = await __get_prices(call, hass=create_mock_hass()) + mock_tibber_setup.get_homes.return_value = generate_mock_home_data() + + result = await hass.services.async_call( + DOMAIN, + PRICE_SERVICE_NAME, + {"start": start_time}, + blocking=True, + return_response=True, + ) + await hass.async_block_till_done() assert result == { "prices": { "first_home": [ { - "start_time": STARTTIME, - "price": 0.46914, + "start_time": START_TIME.isoformat(), + "price": 0.36914, "level": "VERY_EXPENSIVE", }, { - "start_time": STARTTIME + dt.timedelta(hours=1), - "price": 0.46914, + "start_time": (START_TIME + dt.timedelta(hours=1)).isoformat(), + "price": 0.36914, "level": "VERY_EXPENSIVE", }, ], "second_home": [ { - "start_time": STARTTIME, - "price": 0.46914, + "start_time": START_TIME.isoformat(), + "price": 0.36914, "level": "VERY_EXPENSIVE", }, { - "start_time": STARTTIME + dt.timedelta(hours=1), - "price": 0.46914, + "start_time": (START_TIME + dt.timedelta(hours=1)).isoformat(), + "price": 0.36914, "level": "VERY_EXPENSIVE", }, ], @@ -302,29 +224,53 @@ async def test_get_prices_with_timezones( @pytest.mark.parametrize( "start_time", [ - (STARTTIME + dt.timedelta(hours=4)).isoformat(), - (STARTTIME + dt.timedelta(hours=4)).replace(tzinfo=None).isoformat(), + (START_TIME + dt.timedelta(hours=2)).isoformat(), + (START_TIME + dt.timedelta(hours=2)) + .astimezone(tz=dt.timezone(dt.timedelta(hours=5))) + .isoformat(), + (START_TIME + dt.timedelta(hours=2)) + .astimezone(tz=dt.timezone(dt.timedelta(hours=8))) + .isoformat(), + (START_TIME + dt.timedelta(hours=2)) + .astimezone(tz=dt.timezone(dt.timedelta(hours=-8))) + .isoformat(), ], ) async def test_get_prices_with_wrong_timezones( + mock_tibber_setup: MagicMock, + hass: HomeAssistant, freezer: FrozenDateTimeFactory, start_time: str, ) -> None: - """Test __get_prices with timezone and without, while expecting it to fail.""" - freezer.move_to(STARTTIME) - call = ServiceCall(DOMAIN, PRICE_SERVICE_NAME, {"start": start_time}) - - result = await __get_prices(call, hass=create_mock_hass()) - assert result == {"prices": {"first_home": [], "second_home": []}} + """Test get_prices with incorrect time and/or timezone. We expect an empty list.""" + freezer.move_to(START_TIME) + tomorrow = START_TIME + dt.timedelta(days=1) + mock_tibber_setup.get_homes.return_value = generate_mock_home_data() -async def test_get_prices_invalid_input() -> None: - """Test __get_prices with invalid input.""" + result = await hass.services.async_call( + DOMAIN, + PRICE_SERVICE_NAME, + {"start": start_time, "end": tomorrow.isoformat()}, + blocking=True, + return_response=True, + ) + await hass.async_block_till_done() - call = ServiceCall(DOMAIN, PRICE_SERVICE_NAME, {"start": "test"}) - task = asyncio.create_task(__get_prices(call, hass=create_mock_hass())) + assert result == {"prices": {"first_home": [], "second_home": []}} - with pytest.raises(ServiceValidationError) as excinfo: - await task - assert "Invalid datetime provided." in str(excinfo.value) +async def test_get_prices_invalid_input( + mock_tibber_setup: MagicMock, + hass: HomeAssistant, +) -> None: + """Test get_prices with invalid input.""" + + with pytest.raises(ServiceValidationError): + await hass.services.async_call( + DOMAIN, + PRICE_SERVICE_NAME, + {"start": "test"}, + blocking=True, + return_response=True, + ) diff --git a/tests/components/todo/__init__.py b/tests/components/todo/__init__.py index dfee74599cdb58..0138e561fad154 100644 --- a/tests/components/todo/__init__.py +++ b/tests/components/todo/__init__.py @@ -1 +1,63 @@ """Tests for the To-do integration.""" + +from homeassistant.components.todo import DOMAIN, TodoItem, TodoListEntity +from homeassistant.config_entries import ConfigEntry, ConfigFlow +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from tests.common import MockConfigEntry, MockPlatform, mock_platform + +TEST_DOMAIN = "test" + + +class MockFlow(ConfigFlow): + """Test flow.""" + + +class MockTodoListEntity(TodoListEntity): + """Test todo list entity.""" + + def __init__(self, items: list[TodoItem] | None = None) -> None: + """Initialize entity.""" + self._attr_todo_items = items or [] + + @property + def items(self) -> list[TodoItem]: + """Return the items in the To-do list.""" + return self._attr_todo_items + + async def async_create_todo_item(self, item: TodoItem) -> None: + """Add an item to the To-do list.""" + self._attr_todo_items.append(item) + + async def async_delete_todo_items(self, uids: list[str]) -> None: + """Delete an item in the To-do list.""" + self._attr_todo_items = [item for item in self.items if item.uid not in uids] + + +async def create_mock_platform( + hass: HomeAssistant, + entities: list[TodoListEntity], +) -> MockConfigEntry: + """Create a todo platform with the specified entities.""" + + async def async_setup_entry_platform( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, + ) -> None: + """Set up test event platform via config entry.""" + async_add_entities(entities) + + mock_platform( + hass, + f"{TEST_DOMAIN}.{DOMAIN}", + MockPlatform(async_setup_entry=async_setup_entry_platform), + ) + + config_entry = MockConfigEntry(domain=TEST_DOMAIN) + config_entry.add_to_hass(hass) + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + return config_entry diff --git a/tests/components/todo/conftest.py b/tests/components/todo/conftest.py new file mode 100644 index 00000000000000..bcee60e1d96608 --- /dev/null +++ b/tests/components/todo/conftest.py @@ -0,0 +1,92 @@ +"""Fixtures for the todo component tests.""" + +from collections.abc import Generator +from unittest.mock import AsyncMock + +import pytest + +from homeassistant.components.todo import ( + DOMAIN, + TodoItem, + TodoItemStatus, + TodoListEntity, + TodoListEntityFeature, +) +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant + +from . import TEST_DOMAIN, MockFlow, MockTodoListEntity + +from tests.common import MockModule, mock_config_flow, mock_integration, mock_platform + + +@pytest.fixture(autouse=True) +def config_flow_fixture(hass: HomeAssistant) -> Generator[None]: + """Mock config flow.""" + mock_platform(hass, f"{TEST_DOMAIN}.config_flow") + + with mock_config_flow(TEST_DOMAIN, MockFlow): + yield + + +@pytest.fixture(autouse=True) +def mock_setup_integration(hass: HomeAssistant) -> None: + """Fixture to set up a mock integration.""" + + async def async_setup_entry_init( + hass: HomeAssistant, config_entry: ConfigEntry + ) -> bool: + """Set up test config entry.""" + await hass.config_entries.async_forward_entry_setups(config_entry, [DOMAIN]) + return True + + async def async_unload_entry_init( + hass: HomeAssistant, + config_entry: ConfigEntry, + ) -> bool: + await hass.config_entries.async_unload_platforms(config_entry, [Platform.TODO]) + return True + + mock_platform(hass, f"{TEST_DOMAIN}.config_flow") + mock_integration( + hass, + MockModule( + TEST_DOMAIN, + async_setup_entry=async_setup_entry_init, + async_unload_entry=async_unload_entry_init, + ), + ) + + +@pytest.fixture(autouse=True) +async def set_time_zone(hass: HomeAssistant) -> None: + """Set the time zone for the tests that keesp UTC-6 all year round.""" + await hass.config.async_set_time_zone("America/Regina") + + +@pytest.fixture(name="test_entity_items") +def mock_test_entity_items() -> list[TodoItem]: + """Fixture that creates the items returned by the test entity.""" + return [ + TodoItem(summary="Item #1", uid="1", status=TodoItemStatus.NEEDS_ACTION), + TodoItem(summary="Item #2", uid="2", status=TodoItemStatus.COMPLETED), + ] + + +@pytest.fixture(name="test_entity") +def mock_test_entity(test_entity_items: list[TodoItem]) -> TodoListEntity: + """Fixture that creates a test TodoList entity with mock service calls.""" + entity1 = MockTodoListEntity(test_entity_items) + entity1.entity_id = "todo.entity1" + entity1._attr_supported_features = ( + TodoListEntityFeature.CREATE_TODO_ITEM + | TodoListEntityFeature.UPDATE_TODO_ITEM + | TodoListEntityFeature.DELETE_TODO_ITEM + | TodoListEntityFeature.MOVE_TODO_ITEM + ) + entity1.async_create_todo_item = AsyncMock(wraps=entity1.async_create_todo_item) + entity1.async_update_todo_item = AsyncMock() + entity1.async_delete_todo_items = AsyncMock(wraps=entity1.async_delete_todo_items) + entity1.async_move_todo_item = AsyncMock() + return entity1 diff --git a/tests/components/todo/test_init.py b/tests/components/todo/test_init.py index b62505b14b44c7..fd052a7f8a3599 100644 --- a/tests/components/todo/test_init.py +++ b/tests/components/todo/test_init.py @@ -1,9 +1,7 @@ """Tests for the todo integration.""" -from collections.abc import Generator import datetime from typing import Any -from unittest.mock import AsyncMock import zoneinfo import pytest @@ -26,25 +24,17 @@ TodoServices, intent as todo_intent, ) -from homeassistant.config_entries import ConfigEntry, ConfigEntryState, ConfigFlow -from homeassistant.const import ATTR_ENTITY_ID, ATTR_SUPPORTED_FEATURES, Platform +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import ATTR_ENTITY_ID, ATTR_SUPPORTED_FEATURES from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from homeassistant.helpers import intent -from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.setup import async_setup_component -from tests.common import ( - MockConfigEntry, - MockModule, - MockPlatform, - mock_config_flow, - mock_integration, - mock_platform, -) +from . import MockTodoListEntity, create_mock_platform + from tests.typing import WebSocketGenerator -TEST_DOMAIN = "test" ITEM_1 = { "uid": "1", "summary": "Item #1", @@ -59,130 +49,6 @@ TEST_OFFSET = "-06:00" -class MockFlow(ConfigFlow): - """Test flow.""" - - -class MockTodoListEntity(TodoListEntity): - """Test todo list entity.""" - - def __init__(self, items: list[TodoItem] | None = None) -> None: - """Initialize entity.""" - self._attr_todo_items = items or [] - - @property - def items(self) -> list[TodoItem]: - """Return the items in the To-do list.""" - return self._attr_todo_items - - async def async_create_todo_item(self, item: TodoItem) -> None: - """Add an item to the To-do list.""" - self._attr_todo_items.append(item) - - async def async_delete_todo_items(self, uids: list[str]) -> None: - """Delete an item in the To-do list.""" - self._attr_todo_items = [item for item in self.items if item.uid not in uids] - - -@pytest.fixture(autouse=True) -def config_flow_fixture(hass: HomeAssistant) -> Generator[None]: - """Mock config flow.""" - mock_platform(hass, f"{TEST_DOMAIN}.config_flow") - - with mock_config_flow(TEST_DOMAIN, MockFlow): - yield - - -@pytest.fixture(autouse=True) -def mock_setup_integration(hass: HomeAssistant) -> None: - """Fixture to set up a mock integration.""" - - async def async_setup_entry_init( - hass: HomeAssistant, config_entry: ConfigEntry - ) -> bool: - """Set up test config entry.""" - await hass.config_entries.async_forward_entry_setups(config_entry, [DOMAIN]) - return True - - async def async_unload_entry_init( - hass: HomeAssistant, - config_entry: ConfigEntry, - ) -> bool: - await hass.config_entries.async_unload_platforms(config_entry, [Platform.TODO]) - return True - - mock_platform(hass, f"{TEST_DOMAIN}.config_flow") - mock_integration( - hass, - MockModule( - TEST_DOMAIN, - async_setup_entry=async_setup_entry_init, - async_unload_entry=async_unload_entry_init, - ), - ) - - -@pytest.fixture(autouse=True) -async def set_time_zone(hass: HomeAssistant) -> None: - """Set the time zone for the tests that keesp UTC-6 all year round.""" - await hass.config.async_set_time_zone("America/Regina") - - -async def create_mock_platform( - hass: HomeAssistant, - entities: list[TodoListEntity], -) -> MockConfigEntry: - """Create a todo platform with the specified entities.""" - - async def async_setup_entry_platform( - hass: HomeAssistant, - config_entry: ConfigEntry, - async_add_entities: AddEntitiesCallback, - ) -> None: - """Set up test event platform via config entry.""" - async_add_entities(entities) - - mock_platform( - hass, - f"{TEST_DOMAIN}.{DOMAIN}", - MockPlatform(async_setup_entry=async_setup_entry_platform), - ) - - config_entry = MockConfigEntry(domain=TEST_DOMAIN) - config_entry.add_to_hass(hass) - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - return config_entry - - -@pytest.fixture(name="test_entity_items") -def mock_test_entity_items() -> list[TodoItem]: - """Fixture that creates the items returned by the test entity.""" - return [ - TodoItem(summary="Item #1", uid="1", status=TodoItemStatus.NEEDS_ACTION), - TodoItem(summary="Item #2", uid="2", status=TodoItemStatus.COMPLETED), - ] - - -@pytest.fixture(name="test_entity") -def mock_test_entity(test_entity_items: list[TodoItem]) -> TodoListEntity: - """Fixture that creates a test TodoList entity with mock service calls.""" - entity1 = MockTodoListEntity(test_entity_items) - entity1.entity_id = "todo.entity1" - entity1._attr_supported_features = ( - TodoListEntityFeature.CREATE_TODO_ITEM - | TodoListEntityFeature.UPDATE_TODO_ITEM - | TodoListEntityFeature.DELETE_TODO_ITEM - | TodoListEntityFeature.MOVE_TODO_ITEM - ) - entity1.async_create_todo_item = AsyncMock(wraps=entity1.async_create_todo_item) - entity1.async_update_todo_item = AsyncMock() - entity1.async_delete_todo_items = AsyncMock(wraps=entity1.async_delete_todo_items) - entity1.async_move_todo_item = AsyncMock() - return entity1 - - async def test_unload_entry( hass: HomeAssistant, test_entity: TodoListEntity, @@ -1141,14 +1007,17 @@ async def test_add_item_intent( hass, "test", todo_intent.INTENT_LIST_ADD_ITEM, - {ATTR_ITEM: {"value": "beer"}, "name": {"value": "list 1"}}, + {ATTR_ITEM: {"value": " beer "}, "name": {"value": "list 1"}}, assistant=conversation.DOMAIN, ) assert response.response_type == intent.IntentResponseType.ACTION_DONE + assert response.success_results[0].name == "list 1" + assert response.success_results[0].type == intent.IntentResponseTargetType.ENTITY + assert response.success_results[0].id == entity1.entity_id assert len(entity1.items) == 1 assert len(entity2.items) == 0 - assert entity1.items[0].summary == "beer" + assert entity1.items[0].summary == "beer" # summary is trimmed assert entity1.items[0].status == TodoItemStatus.NEEDS_ACTION entity1.items.clear() diff --git a/tests/components/toon/test_config_flow.py b/tests/components/toon/test_config_flow.py index 228cb0b0239112..7855379db5b4b0 100644 --- a/tests/components/toon/test_config_flow.py +++ b/tests/components/toon/test_config_flow.py @@ -7,10 +7,10 @@ from toonapi import Agreement, ToonError from homeassistant.components.toon.const import CONF_AGREEMENT, DOMAIN -from homeassistant.config import async_process_ha_core_config from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_USER from homeassistant.const import CONF_CLIENT_ID, CONF_CLIENT_SECRET from homeassistant.core import HomeAssistant +from homeassistant.core_config import async_process_ha_core_config from homeassistant.data_entry_flow import FlowResultType from homeassistant.helpers import config_entry_oauth2_flow from homeassistant.setup import async_setup_component @@ -249,6 +249,10 @@ async def test_agreement_already_set_up( assert result3["reason"] == "already_configured" +@pytest.mark.parametrize( # Remove when translations fixed + "ignore_translations", + ["component.toon.config.abort.connection_error"], +) @pytest.mark.usefixtures("current_request_with_host") async def test_toon_abort( hass: HomeAssistant, diff --git a/tests/components/totalconnect/test_alarm_control_panel.py b/tests/components/totalconnect/test_alarm_control_panel.py index 453c9be485ae6e..bc76f7243ca402 100644 --- a/tests/components/totalconnect/test_alarm_control_panel.py +++ b/tests/components/totalconnect/test_alarm_control_panel.py @@ -12,7 +12,10 @@ TotalConnectError, ) -from homeassistant.components.alarm_control_panel import DOMAIN as ALARM_DOMAIN +from homeassistant.components.alarm_control_panel import ( + DOMAIN as ALARM_DOMAIN, + AlarmControlPanelState, +) from homeassistant.components.totalconnect.alarm_control_panel import ( SERVICE_ALARM_ARM_AWAY_INSTANT, SERVICE_ALARM_ARM_HOME_INSTANT, @@ -26,14 +29,6 @@ SERVICE_ALARM_ARM_HOME, SERVICE_ALARM_ARM_NIGHT, SERVICE_ALARM_DISARM, - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_CUSTOM_BYPASS, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_ARMING, - STATE_ALARM_DISARMED, - STATE_ALARM_DISARMING, - STATE_ALARM_TRIGGERED, STATE_UNAVAILABLE, ) from homeassistant.core import HomeAssistant @@ -100,8 +95,8 @@ async def test_arm_home_success( with patch(TOTALCONNECT_REQUEST, side_effect=responses) as mock_request: await async_update_entity(hass, ENTITY_ID) await hass.async_block_till_done() - assert hass.states.get(ENTITY_ID).state == STATE_ALARM_DISARMED - assert hass.states.get(ENTITY_ID_2).state == STATE_ALARM_DISARMED + assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.DISARMED + assert hass.states.get(ENTITY_ID_2).state == AlarmControlPanelState.DISARMED assert mock_request.call_count == 1 await hass.services.async_call( @@ -113,9 +108,9 @@ async def test_arm_home_success( async_fire_time_changed(hass) await hass.async_block_till_done() assert mock_request.call_count == 3 - assert hass.states.get(ENTITY_ID).state == STATE_ALARM_ARMED_HOME + assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.ARMED_HOME # second partition should not be armed - assert hass.states.get(ENTITY_ID_2).state == STATE_ALARM_DISARMED + assert hass.states.get(ENTITY_ID_2).state == AlarmControlPanelState.DISARMED async def test_arm_home_failure(hass: HomeAssistant) -> None: @@ -125,7 +120,7 @@ async def test_arm_home_failure(hass: HomeAssistant) -> None: with patch(TOTALCONNECT_REQUEST, side_effect=responses) as mock_request: await async_update_entity(hass, ENTITY_ID) await hass.async_block_till_done() - assert hass.states.get(ENTITY_ID).state == STATE_ALARM_DISARMED + assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.DISARMED assert mock_request.call_count == 1 with pytest.raises(HomeAssistantError) as err: @@ -134,7 +129,7 @@ async def test_arm_home_failure(hass: HomeAssistant) -> None: ) await hass.async_block_till_done() assert f"{err.value}" == "Failed to arm home test" - assert hass.states.get(ENTITY_ID).state == STATE_ALARM_DISARMED + assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.DISARMED assert mock_request.call_count == 2 # config entry usercode is invalid @@ -144,7 +139,7 @@ async def test_arm_home_failure(hass: HomeAssistant) -> None: ) await hass.async_block_till_done() assert f"{err.value}" == "Usercode is invalid, did not arm home" - assert hass.states.get(ENTITY_ID).state == STATE_ALARM_DISARMED + assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.DISARMED # should have started a re-auth flow assert len(hass.config_entries.flow.async_progress_by_handler(DOMAIN)) == 1 assert mock_request.call_count == 3 @@ -159,8 +154,8 @@ async def test_arm_home_instant_success( with patch(TOTALCONNECT_REQUEST, side_effect=responses) as mock_request: await async_update_entity(hass, ENTITY_ID) await hass.async_block_till_done() - assert hass.states.get(ENTITY_ID).state == STATE_ALARM_DISARMED - assert hass.states.get(ENTITY_ID_2).state == STATE_ALARM_DISARMED + assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.DISARMED + assert hass.states.get(ENTITY_ID_2).state == AlarmControlPanelState.DISARMED assert mock_request.call_count == 1 await hass.services.async_call( @@ -172,7 +167,7 @@ async def test_arm_home_instant_success( async_fire_time_changed(hass) await hass.async_block_till_done() assert mock_request.call_count == 3 - assert hass.states.get(ENTITY_ID).state == STATE_ALARM_ARMED_HOME + assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.ARMED_HOME async def test_arm_home_instant_failure(hass: HomeAssistant) -> None: @@ -182,7 +177,7 @@ async def test_arm_home_instant_failure(hass: HomeAssistant) -> None: with patch(TOTALCONNECT_REQUEST, side_effect=responses) as mock_request: await async_update_entity(hass, ENTITY_ID) await hass.async_block_till_done() - assert hass.states.get(ENTITY_ID).state == STATE_ALARM_DISARMED + assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.DISARMED assert mock_request.call_count == 1 with pytest.raises(HomeAssistantError) as err: @@ -191,7 +186,7 @@ async def test_arm_home_instant_failure(hass: HomeAssistant) -> None: ) await hass.async_block_till_done() assert f"{err.value}" == "Failed to arm home instant test" - assert hass.states.get(ENTITY_ID).state == STATE_ALARM_DISARMED + assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.DISARMED assert mock_request.call_count == 2 # usercode is invalid @@ -201,7 +196,7 @@ async def test_arm_home_instant_failure(hass: HomeAssistant) -> None: ) await hass.async_block_till_done() assert f"{err.value}" == "Usercode is invalid, did not arm home instant" - assert hass.states.get(ENTITY_ID).state == STATE_ALARM_DISARMED + assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.DISARMED # should have started a re-auth flow assert len(hass.config_entries.flow.async_progress_by_handler(DOMAIN)) == 1 assert mock_request.call_count == 3 @@ -216,8 +211,8 @@ async def test_arm_away_instant_success( with patch(TOTALCONNECT_REQUEST, side_effect=responses) as mock_request: await async_update_entity(hass, ENTITY_ID) await hass.async_block_till_done() - assert hass.states.get(ENTITY_ID).state == STATE_ALARM_DISARMED - assert hass.states.get(ENTITY_ID_2).state == STATE_ALARM_DISARMED + assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.DISARMED + assert hass.states.get(ENTITY_ID_2).state == AlarmControlPanelState.DISARMED assert mock_request.call_count == 1 await hass.services.async_call( @@ -229,7 +224,7 @@ async def test_arm_away_instant_success( async_fire_time_changed(hass) await hass.async_block_till_done() assert mock_request.call_count == 3 - assert hass.states.get(ENTITY_ID).state == STATE_ALARM_ARMED_AWAY + assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.ARMED_AWAY async def test_arm_away_instant_failure(hass: HomeAssistant) -> None: @@ -239,7 +234,7 @@ async def test_arm_away_instant_failure(hass: HomeAssistant) -> None: with patch(TOTALCONNECT_REQUEST, side_effect=responses) as mock_request: await async_update_entity(hass, ENTITY_ID) await hass.async_block_till_done() - assert hass.states.get(ENTITY_ID).state == STATE_ALARM_DISARMED + assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.DISARMED assert mock_request.call_count == 1 with pytest.raises(HomeAssistantError) as err: @@ -248,7 +243,7 @@ async def test_arm_away_instant_failure(hass: HomeAssistant) -> None: ) await hass.async_block_till_done() assert f"{err.value}" == "Failed to arm away instant test" - assert hass.states.get(ENTITY_ID).state == STATE_ALARM_DISARMED + assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.DISARMED assert mock_request.call_count == 2 # usercode is invalid @@ -258,7 +253,7 @@ async def test_arm_away_instant_failure(hass: HomeAssistant) -> None: ) await hass.async_block_till_done() assert f"{err.value}" == "Usercode is invalid, did not arm away instant" - assert hass.states.get(ENTITY_ID).state == STATE_ALARM_DISARMED + assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.DISARMED # should have started a re-auth flow assert len(hass.config_entries.flow.async_progress_by_handler(DOMAIN)) == 1 assert mock_request.call_count == 3 @@ -273,7 +268,7 @@ async def test_arm_away_success( with patch(TOTALCONNECT_REQUEST, side_effect=responses) as mock_request: await async_update_entity(hass, ENTITY_ID) await hass.async_block_till_done() - assert hass.states.get(ENTITY_ID).state == STATE_ALARM_DISARMED + assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.DISARMED assert mock_request.call_count == 1 await hass.services.async_call( @@ -285,7 +280,7 @@ async def test_arm_away_success( async_fire_time_changed(hass) await hass.async_block_till_done() assert mock_request.call_count == 3 - assert hass.states.get(ENTITY_ID).state == STATE_ALARM_ARMED_AWAY + assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.ARMED_AWAY async def test_arm_away_failure(hass: HomeAssistant) -> None: @@ -295,7 +290,7 @@ async def test_arm_away_failure(hass: HomeAssistant) -> None: with patch(TOTALCONNECT_REQUEST, side_effect=responses) as mock_request: await async_update_entity(hass, ENTITY_ID) await hass.async_block_till_done() - assert hass.states.get(ENTITY_ID).state == STATE_ALARM_DISARMED + assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.DISARMED assert mock_request.call_count == 1 with pytest.raises(HomeAssistantError) as err: @@ -304,7 +299,7 @@ async def test_arm_away_failure(hass: HomeAssistant) -> None: ) await hass.async_block_till_done() assert f"{err.value}" == "Failed to arm away test" - assert hass.states.get(ENTITY_ID).state == STATE_ALARM_DISARMED + assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.DISARMED assert mock_request.call_count == 2 # usercode is invalid @@ -314,7 +309,7 @@ async def test_arm_away_failure(hass: HomeAssistant) -> None: ) await hass.async_block_till_done() assert f"{err.value}" == "Usercode is invalid, did not arm away" - assert hass.states.get(ENTITY_ID).state == STATE_ALARM_DISARMED + assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.DISARMED # should have started a re-auth flow assert len(hass.config_entries.flow.async_progress_by_handler(DOMAIN)) == 1 assert mock_request.call_count == 3 @@ -329,7 +324,7 @@ async def test_disarm_success( with patch(TOTALCONNECT_REQUEST, side_effect=responses) as mock_request: await async_update_entity(hass, ENTITY_ID) await hass.async_block_till_done() - assert hass.states.get(ENTITY_ID).state == STATE_ALARM_ARMED_AWAY + assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.ARMED_AWAY assert mock_request.call_count == 1 await hass.services.async_call( @@ -341,7 +336,7 @@ async def test_disarm_success( async_fire_time_changed(hass) await hass.async_block_till_done() assert mock_request.call_count == 3 - assert hass.states.get(ENTITY_ID).state == STATE_ALARM_DISARMED + assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.DISARMED async def test_disarm_failure(hass: HomeAssistant) -> None: @@ -355,7 +350,7 @@ async def test_disarm_failure(hass: HomeAssistant) -> None: with patch(TOTALCONNECT_REQUEST, side_effect=responses) as mock_request: await async_update_entity(hass, ENTITY_ID) await hass.async_block_till_done() - assert hass.states.get(ENTITY_ID).state == STATE_ALARM_ARMED_AWAY + assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.ARMED_AWAY assert mock_request.call_count == 1 with pytest.raises(HomeAssistantError) as err: @@ -364,7 +359,7 @@ async def test_disarm_failure(hass: HomeAssistant) -> None: ) await hass.async_block_till_done() assert f"{err.value}" == "Failed to disarm test" - assert hass.states.get(ENTITY_ID).state == STATE_ALARM_ARMED_AWAY + assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.ARMED_AWAY assert mock_request.call_count == 2 # usercode is invalid @@ -374,7 +369,7 @@ async def test_disarm_failure(hass: HomeAssistant) -> None: ) await hass.async_block_till_done() assert f"{err.value}" == "Usercode is invalid, did not disarm" - assert hass.states.get(ENTITY_ID).state == STATE_ALARM_ARMED_AWAY + assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.ARMED_AWAY # should have started a re-auth flow assert len(hass.config_entries.flow.async_progress_by_handler(DOMAIN)) == 1 assert mock_request.call_count == 3 @@ -389,7 +384,7 @@ async def test_disarm_code_required( with patch(TOTALCONNECT_REQUEST, side_effect=responses) as mock_request: await async_update_entity(hass, ENTITY_ID) await hass.async_block_till_done() - assert hass.states.get(ENTITY_ID).state == STATE_ALARM_ARMED_AWAY + assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.ARMED_AWAY assert mock_request.call_count == 1 # runtime user entered code is bad @@ -399,7 +394,7 @@ async def test_disarm_code_required( await hass.services.async_call( ALARM_DOMAIN, SERVICE_ALARM_DISARM, DATA_WITH_CODE, blocking=True ) - assert hass.states.get(ENTITY_ID).state == STATE_ALARM_ARMED_AWAY + assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.ARMED_AWAY # code check means the call to total_connect never happens assert mock_request.call_count == 1 @@ -415,7 +410,7 @@ async def test_disarm_code_required( async_fire_time_changed(hass) await hass.async_block_till_done() assert mock_request.call_count == 3 - assert hass.states.get(ENTITY_ID).state == STATE_ALARM_DISARMED + assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.DISARMED async def test_arm_night_success( @@ -427,7 +422,7 @@ async def test_arm_night_success( with patch(TOTALCONNECT_REQUEST, side_effect=responses) as mock_request: await async_update_entity(hass, ENTITY_ID) await hass.async_block_till_done() - assert hass.states.get(ENTITY_ID).state == STATE_ALARM_DISARMED + assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.DISARMED assert mock_request.call_count == 1 await hass.services.async_call( @@ -439,7 +434,7 @@ async def test_arm_night_success( async_fire_time_changed(hass) await hass.async_block_till_done() assert mock_request.call_count == 3 - assert hass.states.get(ENTITY_ID).state == STATE_ALARM_ARMED_NIGHT + assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.ARMED_NIGHT async def test_arm_night_failure(hass: HomeAssistant) -> None: @@ -449,7 +444,7 @@ async def test_arm_night_failure(hass: HomeAssistant) -> None: with patch(TOTALCONNECT_REQUEST, side_effect=responses) as mock_request: await async_update_entity(hass, ENTITY_ID) await hass.async_block_till_done() - assert hass.states.get(ENTITY_ID).state == STATE_ALARM_DISARMED + assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.DISARMED assert mock_request.call_count == 1 with pytest.raises(HomeAssistantError) as err: @@ -458,7 +453,7 @@ async def test_arm_night_failure(hass: HomeAssistant) -> None: ) await hass.async_block_till_done() assert f"{err.value}" == "Failed to arm night test" - assert hass.states.get(ENTITY_ID).state == STATE_ALARM_DISARMED + assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.DISARMED assert mock_request.call_count == 2 # usercode is invalid @@ -468,7 +463,7 @@ async def test_arm_night_failure(hass: HomeAssistant) -> None: ) await hass.async_block_till_done() assert f"{err.value}" == "Usercode is invalid, did not arm night" - assert hass.states.get(ENTITY_ID).state == STATE_ALARM_DISARMED + assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.DISARMED # should have started a re-auth flow assert len(hass.config_entries.flow.async_progress_by_handler(DOMAIN)) == 1 assert mock_request.call_count == 3 @@ -481,7 +476,7 @@ async def test_arming(hass: HomeAssistant, freezer: FrozenDateTimeFactory) -> No with patch(TOTALCONNECT_REQUEST, side_effect=responses) as mock_request: await async_update_entity(hass, ENTITY_ID) await hass.async_block_till_done() - assert hass.states.get(ENTITY_ID).state == STATE_ALARM_DISARMED + assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.DISARMED assert mock_request.call_count == 1 await hass.services.async_call( @@ -493,7 +488,7 @@ async def test_arming(hass: HomeAssistant, freezer: FrozenDateTimeFactory) -> No async_fire_time_changed(hass) await hass.async_block_till_done() assert mock_request.call_count == 3 - assert hass.states.get(ENTITY_ID).state == STATE_ALARM_ARMING + assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.ARMING async def test_disarming(hass: HomeAssistant, freezer: FrozenDateTimeFactory) -> None: @@ -503,7 +498,7 @@ async def test_disarming(hass: HomeAssistant, freezer: FrozenDateTimeFactory) -> with patch(TOTALCONNECT_REQUEST, side_effect=responses) as mock_request: await async_update_entity(hass, ENTITY_ID) await hass.async_block_till_done() - assert hass.states.get(ENTITY_ID).state == STATE_ALARM_ARMED_AWAY + assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.ARMED_AWAY assert mock_request.call_count == 1 await hass.services.async_call( @@ -515,7 +510,7 @@ async def test_disarming(hass: HomeAssistant, freezer: FrozenDateTimeFactory) -> async_fire_time_changed(hass) await hass.async_block_till_done() assert mock_request.call_count == 3 - assert hass.states.get(ENTITY_ID).state == STATE_ALARM_DISARMING + assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.DISARMING async def test_triggered_fire(hass: HomeAssistant) -> None: @@ -526,7 +521,7 @@ async def test_triggered_fire(hass: HomeAssistant) -> None: await async_update_entity(hass, ENTITY_ID) await hass.async_block_till_done() state = hass.states.get(ENTITY_ID) - assert state.state == STATE_ALARM_TRIGGERED + assert state.state == AlarmControlPanelState.TRIGGERED assert state.attributes.get("triggered_source") == "Fire/Smoke" assert mock_request.call_count == 1 @@ -539,7 +534,7 @@ async def test_triggered_police(hass: HomeAssistant) -> None: await async_update_entity(hass, ENTITY_ID) await hass.async_block_till_done() state = hass.states.get(ENTITY_ID) - assert state.state == STATE_ALARM_TRIGGERED + assert state.state == AlarmControlPanelState.TRIGGERED assert state.attributes.get("triggered_source") == "Police/Medical" assert mock_request.call_count == 1 @@ -552,7 +547,7 @@ async def test_triggered_carbon_monoxide(hass: HomeAssistant) -> None: await async_update_entity(hass, ENTITY_ID) await hass.async_block_till_done() state = hass.states.get(ENTITY_ID) - assert state.state == STATE_ALARM_TRIGGERED + assert state.state == AlarmControlPanelState.TRIGGERED assert state.attributes.get("triggered_source") == "Carbon Monoxide" assert mock_request.call_count == 1 @@ -564,7 +559,10 @@ async def test_armed_custom(hass: HomeAssistant) -> None: with patch(TOTALCONNECT_REQUEST, side_effect=responses) as mock_request: await async_update_entity(hass, ENTITY_ID) await hass.async_block_till_done() - assert hass.states.get(ENTITY_ID).state == STATE_ALARM_ARMED_CUSTOM_BYPASS + assert ( + hass.states.get(ENTITY_ID).state + == AlarmControlPanelState.ARMED_CUSTOM_BYPASS + ) assert mock_request.call_count == 1 @@ -596,7 +594,7 @@ async def test_other_update_failures( # first things work as planned await async_update_entity(hass, ENTITY_ID) await hass.async_block_till_done() - assert hass.states.get(ENTITY_ID).state == STATE_ALARM_DISARMED + assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.DISARMED assert mock_request.call_count == 1 # then an error: ServiceUnavailable --> UpdateFailed @@ -610,7 +608,7 @@ async def test_other_update_failures( freezer.tick(SCAN_INTERVAL) async_fire_time_changed(hass) await hass.async_block_till_done(wait_background_tasks=True) - assert hass.states.get(ENTITY_ID).state == STATE_ALARM_DISARMED + assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.DISARMED assert mock_request.call_count == 3 # then an error: TotalConnectError --> UpdateFailed @@ -624,7 +622,7 @@ async def test_other_update_failures( freezer.tick(SCAN_INTERVAL) async_fire_time_changed(hass) await hass.async_block_till_done(wait_background_tasks=True) - assert hass.states.get(ENTITY_ID).state == STATE_ALARM_DISARMED + assert hass.states.get(ENTITY_ID).state == AlarmControlPanelState.DISARMED assert mock_request.call_count == 5 # unknown TotalConnect status via ValueError diff --git a/tests/components/tplink/__init__.py b/tests/components/tplink/__init__.py index 4100d8781d48ed..75eab8eeb73262 100644 --- a/tests/components/tplink/__init__.py +++ b/tests/components/tplink/__init__.py @@ -168,12 +168,18 @@ async def snapshot_platform( ), "Please limit the loaded platforms to 1 platform." translations = await async_get_translations(hass, "en", "entity", [DOMAIN]) + unique_device_classes = [] for entity_entry in entity_entries: if entity_entry.translation_key: key = f"component.{DOMAIN}.entity.{entity_entry.domain}.{entity_entry.translation_key}.name" + single_device_class_translation = False + if key not in translations and entity_entry.original_device_class: + if entity_entry.original_device_class not in unique_device_classes: + single_device_class_translation = True + unique_device_classes.append(entity_entry.original_device_class) assert ( - key in translations - ), f"No translation for entity {entity_entry.unique_id}, expected {key}" + (key in translations) or single_device_class_translation + ), f"No translation or non unique device_class for entity {entity_entry.unique_id}, expected {key}" assert entity_entry == snapshot( name=f"{entity_entry.entity_id}-entry" ), f"entity entry snapshot failed for {entity_entry.entity_id}" diff --git a/tests/components/tplink/conftest.py b/tests/components/tplink/conftest.py index f1586ee4a0a17f..25a4bd202707c5 100644 --- a/tests/components/tplink/conftest.py +++ b/tests/components/tplink/conftest.py @@ -32,11 +32,12 @@ def mock_discovery(): "homeassistant.components.tplink.Discover", discover=DEFAULT, discover_single=DEFAULT, + try_connect_all=DEFAULT, ) as mock_discovery: device = _mocked_device( device_config=DeviceConfig.from_dict(DEVICE_CONFIG_KLAP.to_dict()), credentials_hash=CREDENTIALS_HASH_KLAP, - alias=None, + alias="My Bulb", ) devices = { "127.0.0.1": _mocked_device( @@ -47,6 +48,7 @@ def mock_discovery(): } mock_discovery["discover"].return_value = devices mock_discovery["discover_single"].return_value = device + mock_discovery["try_connect_all"].return_value = device mock_discovery["mock_device"] = device yield mock_discovery diff --git a/tests/components/tplink/fixtures/features.json b/tests/components/tplink/fixtures/features.json index 9f9d61b6e11561..f60132fd2c2869 100644 --- a/tests/components/tplink/fixtures/features.json +++ b/tests/components/tplink/fixtures/features.json @@ -34,6 +34,16 @@ "type": "Switch", "category": "Config" }, + "child_lock": { + "value": true, + "type": "Switch", + "category": "Config" + }, + "pir_enabled": { + "value": true, + "type": "Switch", + "category": "Config" + }, "current_consumption": { "value": 5.23, "type": "Sensor", @@ -200,11 +210,21 @@ "type": "BinarySensor", "category": "Primary" }, + "motion_detected": { + "value": false, + "type": "BinarySensor", + "category": "Primary" + }, "alarm": { "value": false, "type": "BinarySensor", "category": "Info" }, + "reboot": { + "value": "", + "type": "Action", + "category": "Debug" + }, "test_alarm": { "value": "", "type": "Action", @@ -293,5 +313,10 @@ "type": "Choice", "category": "Config", "choices": ["low", "normal", "high"] + }, + "water_alert_timestamp": { + "type": "Sensor", + "category": "Info", + "value": "2024-06-24 10:03:11.046643+01:00" } } diff --git a/tests/components/tplink/snapshots/test_binary_sensor.ambr b/tests/components/tplink/snapshots/test_binary_sensor.ambr index cded74da363e1d..4a1cfe5b4110dd 100644 --- a/tests/components/tplink/snapshots/test_binary_sensor.ambr +++ b/tests/components/tplink/snapshots/test_binary_sensor.ambr @@ -206,7 +206,7 @@ 'state': 'off', }) # --- -# name: test_states[binary_sensor.my_device_overheated-entry] +# name: test_states[binary_sensor.my_device_motion-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -217,8 +217,8 @@ 'device_id': , 'disabled_by': None, 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.my_device_overheated', + 'entity_category': None, + 'entity_id': 'binary_sensor.my_device_motion', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -228,32 +228,32 @@ 'name': None, 'options': dict({ }), - 'original_device_class': , + 'original_device_class': , 'original_icon': None, - 'original_name': 'Overheated', + 'original_name': 'Motion', 'platform': 'tplink', 'previous_unique_id': None, 'supported_features': 0, - 'translation_key': 'overheated', - 'unique_id': '123456789ABCDEFGH_overheated', + 'translation_key': 'motion_detected', + 'unique_id': '123456789ABCDEFGH_motion_detected', 'unit_of_measurement': None, }) # --- -# name: test_states[binary_sensor.my_device_overheated-state] +# name: test_states[binary_sensor.my_device_motion-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'device_class': 'problem', - 'friendly_name': 'my_device Overheated', + 'device_class': 'motion', + 'friendly_name': 'my_device Motion', }), 'context': , - 'entity_id': 'binary_sensor.my_device_overheated', + 'entity_id': 'binary_sensor.my_device_motion', 'last_changed': , 'last_reported': , 'last_updated': , 'state': 'off', }) # --- -# name: test_states[binary_sensor.my_device_temperature_warning-entry] +# name: test_states[binary_sensor.my_device_overheated-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -262,10 +262,10 @@ 'config_entry_id': , 'device_class': None, 'device_id': , - 'disabled_by': , + 'disabled_by': None, 'domain': 'binary_sensor', 'entity_category': , - 'entity_id': 'binary_sensor.my_device_temperature_warning', + 'entity_id': 'binary_sensor.my_device_overheated', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -275,18 +275,32 @@ 'name': None, 'options': dict({ }), - 'original_device_class': None, + 'original_device_class': , 'original_icon': None, - 'original_name': 'Temperature warning', + 'original_name': 'Overheated', 'platform': 'tplink', 'previous_unique_id': None, 'supported_features': 0, - 'translation_key': 'temperature_warning', - 'unique_id': '123456789ABCDEFGH_temperature_warning', + 'translation_key': 'overheated', + 'unique_id': '123456789ABCDEFGH_overheated', 'unit_of_measurement': None, }) # --- -# name: test_states[binary_sensor.my_device_update-entry] +# name: test_states[binary_sensor.my_device_overheated-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': 'my_device Overheated', + }), + 'context': , + 'entity_id': 'binary_sensor.my_device_overheated', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_states[binary_sensor.my_device_temperature_warning-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -295,10 +309,10 @@ 'config_entry_id': , 'device_class': None, 'device_id': , - 'disabled_by': None, + 'disabled_by': , 'domain': 'binary_sensor', 'entity_category': , - 'entity_id': 'binary_sensor.my_device_update', + 'entity_id': 'binary_sensor.my_device_temperature_warning', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -308,31 +322,17 @@ 'name': None, 'options': dict({ }), - 'original_device_class': , + 'original_device_class': None, 'original_icon': None, - 'original_name': 'Update', + 'original_name': 'Temperature warning', 'platform': 'tplink', 'previous_unique_id': None, 'supported_features': 0, - 'translation_key': 'update_available', - 'unique_id': '123456789ABCDEFGH_update_available', + 'translation_key': 'temperature_warning', + 'unique_id': '123456789ABCDEFGH_temperature_warning', 'unit_of_measurement': None, }) # --- -# name: test_states[binary_sensor.my_device_update-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'update', - 'friendly_name': 'my_device Update', - }), - 'context': , - 'entity_id': 'binary_sensor.my_device_update', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- # name: test_states[my_device-entry] DeviceRegistryEntrySnapshot({ 'area_id': None, diff --git a/tests/components/tplink/snapshots/test_button.ambr b/tests/components/tplink/snapshots/test_button.ambr index d6019861804916..bb75f4642e1654 100644 --- a/tests/components/tplink/snapshots/test_button.ambr +++ b/tests/components/tplink/snapshots/test_button.ambr @@ -1,4 +1,37 @@ # serializer version: 1 +# name: test_states[button.my_device_restart-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': , + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.my_device_restart', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Restart', + 'platform': 'tplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'reboot', + 'unique_id': '123456789ABCDEFGH_reboot', + 'unit_of_measurement': None, + }) +# --- # name: test_states[button.my_device_stop_alarm-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/tplink/snapshots/test_climate.ambr b/tests/components/tplink/snapshots/test_climate.ambr index ad863fc79ae725..8236f332046e11 100644 --- a/tests/components/tplink/snapshots/test_climate.ambr +++ b/tests/components/tplink/snapshots/test_climate.ambr @@ -42,7 +42,7 @@ # name: test_states[climate.thermostat-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'current_temperature': 20, + 'current_temperature': 20.2, 'friendly_name': 'thermostat', 'hvac_action': , 'hvac_modes': list([ @@ -52,7 +52,7 @@ 'max_temp': 65536, 'min_temp': None, 'supported_features': , - 'temperature': 22, + 'temperature': 22.2, }), 'context': , 'entity_id': 'climate.thermostat', diff --git a/tests/components/tplink/snapshots/test_sensor.ambr b/tests/components/tplink/snapshots/test_sensor.ambr index e639540e552098..739f02e51f0f99 100644 --- a/tests/components/tplink/snapshots/test_sensor.ambr +++ b/tests/components/tplink/snapshots/test_sensor.ambr @@ -358,6 +358,53 @@ 'state': '12', }) # --- +# name: test_states[sensor.my_device_last_water_leak_alert-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_device_last_water_leak_alert', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Last water leak alert', + 'platform': 'tplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'water_alert_timestamp', + 'unique_id': '123456789ABCDEFGH_water_alert_timestamp', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[sensor.my_device_last_water_leak_alert-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'my_device Last water leak alert', + }), + 'context': , + 'entity_id': 'sensor.my_device_last_water_leak_alert', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-06-24T09:03:11+00:00', + }) +# --- # name: test_states[sensor.my_device_on_since-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -546,7 +593,9 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'state_class': , + }), 'config_entry_id': , 'device_class': None, 'device_id': , diff --git a/tests/components/tplink/snapshots/test_switch.ambr b/tests/components/tplink/snapshots/test_switch.ambr index 4354ea1905a02e..36c630474c8367 100644 --- a/tests/components/tplink/snapshots/test_switch.ambr +++ b/tests/components/tplink/snapshots/test_switch.ambr @@ -173,6 +173,52 @@ 'state': 'on', }) # --- +# name: test_states[switch.my_device_child_lock-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.my_device_child_lock', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Child lock', + 'platform': 'tplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'child_lock', + 'unique_id': '123456789ABCDEFGH_child_lock', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[switch.my_device_child_lock-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my_device Child lock', + }), + 'context': , + 'entity_id': 'switch.my_device_child_lock', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- # name: test_states[switch.my_device_fan_sleep_mode-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -265,6 +311,52 @@ 'state': 'on', }) # --- +# name: test_states[switch.my_device_motion_sensor-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.my_device_motion_sensor', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Motion sensor', + 'platform': 'tplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'pir_enabled', + 'unique_id': '123456789ABCDEFGH_pir_enabled', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[switch.my_device_motion_sensor-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my_device Motion sensor', + }), + 'context': , + 'entity_id': 'switch.my_device_motion_sensor', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- # name: test_states[switch.my_device_smooth_transitions-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/tplink/test_climate.py b/tests/components/tplink/test_climate.py index 2f24fa829f9f74..3a54048e1d64e9 100644 --- a/tests/components/tplink/test_climate.py +++ b/tests/components/tplink/test_climate.py @@ -45,11 +45,11 @@ async def mocked_hub(hass: HomeAssistant) -> Device: features = [ _mocked_feature( - "temperature", value=20, category=Feature.Category.Primary, unit="celsius" + "temperature", value=20.2, category=Feature.Category.Primary, unit="celsius" ), _mocked_feature( "target_temperature", - value=22, + value=22.2, type_=Feature.Type.Number, category=Feature.Category.Primary, unit="celsius", @@ -94,8 +94,8 @@ async def test_climate( state = hass.states.get(ENTITY_ID) assert state.attributes[ATTR_HVAC_ACTION] is HVACAction.HEATING - assert state.attributes[ATTR_CURRENT_TEMPERATURE] == 20 - assert state.attributes[ATTR_TEMPERATURE] == 22 + assert state.attributes[ATTR_CURRENT_TEMPERATURE] == 20.2 + assert state.attributes[ATTR_TEMPERATURE] == 22.2 async def test_states( diff --git a/tests/components/tplink/test_config_flow.py b/tests/components/tplink/test_config_flow.py index 40bd4383513ebc..2697696c6679ef 100644 --- a/tests/components/tplink/test_config_flow.py +++ b/tests/components/tplink/test_config_flow.py @@ -2,7 +2,7 @@ from contextlib import contextmanager import logging -from unittest.mock import AsyncMock, patch +from unittest.mock import ANY, AsyncMock, patch from kasa import TimeoutError import pytest @@ -30,6 +30,7 @@ CONF_HOST, CONF_MAC, CONF_PASSWORD, + CONF_PORT, CONF_USERNAME, ) from homeassistant.core import HomeAssistant @@ -665,6 +666,93 @@ async def test_manual_auth_errors( await hass.async_block_till_done() +@pytest.mark.parametrize( + ("host_str", "host", "port"), + [ + (f"{IP_ADDRESS}:1234", IP_ADDRESS, 1234), + ("[2001:db8:0::1]:4321", "2001:db8:0::1", 4321), + ], +) +async def test_manual_port_override( + hass: HomeAssistant, + mock_connect: AsyncMock, + mock_discovery: AsyncMock, + host_str, + host, + port, +) -> None: + """Test manually setup.""" + mock_discovery["mock_device"].config.port_override = port + mock_discovery["mock_device"].host = host + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert not result["errors"] + + # side_effects to cause auth confirm as the port override usually only + # works with direct connections. + mock_discovery["discover_single"].side_effect = TimeoutError + mock_connect["connect"].side_effect = AuthenticationError + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], {CONF_HOST: host_str} + ) + await hass.async_block_till_done() + + assert result2["type"] is FlowResultType.FORM + assert result2["step_id"] == "user_auth_confirm" + assert not result2["errors"] + + creds = Credentials("fake_username", "fake_password") + result3 = await hass.config_entries.flow.async_configure( + result2["flow_id"], + user_input={ + CONF_USERNAME: "fake_username", + CONF_PASSWORD: "fake_password", + }, + ) + await hass.async_block_till_done() + mock_discovery["try_connect_all"].assert_called_once_with( + host, credentials=creds, port=port, http_client=ANY + ) + assert result3["type"] is FlowResultType.CREATE_ENTRY + assert result3["title"] == DEFAULT_ENTRY_TITLE + assert result3["data"] == { + **CREATE_ENTRY_DATA_KLAP, + CONF_PORT: port, + CONF_HOST: host, + } + assert result3["context"]["unique_id"] == MAC_ADDRESS + + +async def test_manual_port_override_invalid( + hass: HomeAssistant, mock_connect: AsyncMock, mock_discovery: AsyncMock +) -> None: + """Test manually setup.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert not result["errors"] + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], {CONF_HOST: f"{IP_ADDRESS}:foo"} + ) + await hass.async_block_till_done() + + mock_discovery["discover_single"].assert_called_once_with( + "127.0.0.1", credentials=None, port=None + ) + + assert result2["type"] is FlowResultType.CREATE_ENTRY + assert result2["title"] == DEFAULT_ENTRY_TITLE + assert result2["data"] == CREATE_ENTRY_DATA_KLAP + assert result2["context"]["unique_id"] == MAC_ADDRESS + + async def test_discovered_by_discovery_and_dhcp(hass: HomeAssistant) -> None: """Test we get the form with discovery and abort for dhcp source when we get both.""" @@ -1023,6 +1111,30 @@ async def test_dhcp_discovery_with_ip_change( assert mock_config_entry.data[CONF_HOST] == "127.0.0.2" +async def test_dhcp_discovery_discover_fail( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_discovery: AsyncMock, + mock_connect: AsyncMock, +) -> None: + """Test dhcp discovery source cannot discover_single.""" + + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 0 + assert mock_config_entry.data[CONF_HOST] == "127.0.0.1" + + with override_side_effect(mock_discovery["discover_single"], TimeoutError): + discovery_result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_DHCP}, + data=dhcp.DhcpServiceInfo( + ip="127.0.0.2", macaddress=DHCP_FORMATTED_MAC_ADDRESS, hostname=ALIAS + ), + ) + assert discovery_result["type"] is FlowResultType.ABORT + assert discovery_result["reason"] == "cannot_connect" + + async def test_reauth( hass: HomeAssistant, mock_added_config_entry: MockConfigEntry, @@ -1048,7 +1160,7 @@ async def test_reauth( ) credentials = Credentials("fake_username", "fake_password") mock_discovery["discover_single"].assert_called_once_with( - "127.0.0.1", credentials=credentials + "127.0.0.1", credentials=credentials, port=None ) mock_discovery["mock_device"].update.assert_called_once_with() assert result2["type"] is FlowResultType.ABORT @@ -1057,6 +1169,76 @@ async def test_reauth( await hass.async_block_till_done() +async def test_reauth_try_connect_all( + hass: HomeAssistant, + mock_added_config_entry: MockConfigEntry, + mock_discovery: AsyncMock, + mock_connect: AsyncMock, +) -> None: + """Test reauth flow.""" + mock_added_config_entry.async_start_reauth(hass) + await hass.async_block_till_done() + + assert mock_added_config_entry.state is ConfigEntryState.LOADED + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 1 + [result] = flows + assert result["step_id"] == "reauth_confirm" + + with override_side_effect(mock_discovery["discover_single"], TimeoutError): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_USERNAME: "fake_username", + CONF_PASSWORD: "fake_password", + }, + ) + credentials = Credentials("fake_username", "fake_password") + mock_discovery["discover_single"].assert_called_once_with( + "127.0.0.1", credentials=credentials, port=None + ) + mock_discovery["try_connect_all"].assert_called_once() + assert result2["type"] is FlowResultType.ABORT + assert result2["reason"] == "reauth_successful" + + await hass.async_block_till_done() + + +async def test_reauth_try_connect_all_fail( + hass: HomeAssistant, + mock_added_config_entry: MockConfigEntry, + mock_discovery: AsyncMock, + mock_connect: AsyncMock, +) -> None: + """Test reauth flow.""" + mock_added_config_entry.async_start_reauth(hass) + await hass.async_block_till_done() + + assert mock_added_config_entry.state is ConfigEntryState.LOADED + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 1 + [result] = flows + assert result["step_id"] == "reauth_confirm" + + with ( + override_side_effect(mock_discovery["discover_single"], TimeoutError), + override_side_effect(mock_discovery["try_connect_all"], lambda *_, **__: None), + ): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_USERNAME: "fake_username", + CONF_PASSWORD: "fake_password", + }, + ) + credentials = Credentials("fake_username", "fake_password") + mock_discovery["discover_single"].assert_called_once_with( + "127.0.0.1", credentials=credentials, port=None + ) + mock_discovery["try_connect_all"].assert_called_once() + assert result2["errors"] == {"base": "cannot_connect"} + + async def test_reauth_update_with_encryption_change( hass: HomeAssistant, mock_discovery: AsyncMock, @@ -1120,7 +1302,7 @@ async def test_reauth_update_with_encryption_change( assert "Connection type changed for 127.0.0.2" in caplog.text credentials = Credentials("fake_username", "fake_password") mock_discovery["discover_single"].assert_called_once_with( - "127.0.0.2", credentials=credentials + "127.0.0.2", credentials=credentials, port=None ) mock_discovery["mock_device"].update.assert_called_once_with() assert result2["type"] is FlowResultType.ABORT @@ -1322,7 +1504,7 @@ async def test_reauth_errors( credentials = Credentials("fake_username", "fake_password") mock_discovery["discover_single"].assert_called_once_with( - "127.0.0.1", credentials=credentials + "127.0.0.1", credentials=credentials, port=None ) mock_discovery["mock_device"].update.assert_called_once_with() assert result2["type"] is FlowResultType.FORM @@ -1340,7 +1522,7 @@ async def test_reauth_errors( ) mock_discovery["discover_single"].assert_called_once_with( - "127.0.0.1", credentials=credentials + "127.0.0.1", credentials=credentials, port=None ) mock_discovery["mock_device"].update.assert_called_once_with() @@ -1398,7 +1580,7 @@ async def test_pick_device_errors( assert result4["context"]["unique_id"] == MAC_ADDRESS -async def test_discovery_timeout_connect( +async def test_discovery_timeout_try_connect_all( hass: HomeAssistant, mock_discovery: AsyncMock, mock_connect: AsyncMock, @@ -1424,7 +1606,7 @@ async def test_discovery_timeout_connect( assert mock_connect["connect"].call_count == 1 -async def test_discovery_timeout_connect_legacy_error( +async def test_discovery_timeout_try_connect_all_needs_creds( hass: HomeAssistant, mock_discovery: AsyncMock, mock_connect: AsyncMock, @@ -1446,8 +1628,57 @@ async def test_discovery_timeout_connect_legacy_error( result["flow_id"], {CONF_HOST: IP_ADDRESS} ) await hass.async_block_till_done() + assert result2["step_id"] == "user_auth_confirm" assert result2["type"] is FlowResultType.FORM - assert result2["errors"] == {"base": "cannot_connect"} + + result3 = await hass.config_entries.flow.async_configure( + result2["flow_id"], + user_input={ + CONF_USERNAME: "fake_username", + CONF_PASSWORD: "fake_password", + }, + ) + await hass.async_block_till_done() + assert result3["type"] is FlowResultType.CREATE_ENTRY + assert result3["context"]["unique_id"] == MAC_ADDRESS + assert mock_connect["connect"].call_count == 1 + + +async def test_discovery_timeout_try_connect_all_fail( + hass: HomeAssistant, + mock_discovery: AsyncMock, + mock_connect: AsyncMock, + mock_init, +) -> None: + """Test discovery tries legacy connect on timeout.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + mock_discovery["discover_single"].side_effect = TimeoutError + await hass.async_block_till_done() + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert not result["errors"] + assert mock_connect["connect"].call_count == 0 + + with override_side_effect(mock_connect["connect"], KasaException): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], {CONF_HOST: IP_ADDRESS} + ) + await hass.async_block_till_done() + assert result2["step_id"] == "user_auth_confirm" + assert result2["type"] is FlowResultType.FORM + + with override_side_effect(mock_discovery["try_connect_all"], lambda *_, **__: None): + result3 = await hass.config_entries.flow.async_configure( + result2["flow_id"], + user_input={ + CONF_USERNAME: "fake_username", + CONF_PASSWORD: "fake_password", + }, + ) + await hass.async_block_till_done() + assert result3["errors"] == {"base": "cannot_connect"} assert mock_connect["connect"].call_count == 1 @@ -1500,7 +1731,7 @@ async def test_reauth_update_other_flows( ) credentials = Credentials("fake_username", "fake_password") mock_discovery["discover_single"].assert_called_once_with( - "127.0.0.1", credentials=credentials + "127.0.0.1", credentials=credentials, port=None ) mock_discovery["mock_device"].update.assert_called_once_with() assert result2["type"] is FlowResultType.ABORT diff --git a/tests/components/tplink_omada/conftest.py b/tests/components/tplink_omada/conftest.py index 510a2e7a87c540..b9bdb5ef94a959 100644 --- a/tests/components/tplink_omada/conftest.py +++ b/tests/components/tplink_omada/conftest.py @@ -163,21 +163,10 @@ def mock_omada_clients_only_client( @pytest.fixture async def init_integration( hass: HomeAssistant, + mock_config_entry: MockConfigEntry, mock_omada_client: MagicMock, ) -> MockConfigEntry: """Set up the TP-Link Omada integration for testing.""" - mock_config_entry = MockConfigEntry( - title="Test Omada Controller", - domain=DOMAIN, - data={ - CONF_HOST: "127.0.0.1", - CONF_PASSWORD: "mocked-password", - CONF_USERNAME: "mocked-user", - CONF_VERIFY_SSL: False, - CONF_SITE: "Default", - }, - unique_id="12345", - ) mock_config_entry.add_to_hass(hass) await hass.config_entries.async_setup(mock_config_entry.entry_id) diff --git a/tests/components/tplink_omada/snapshots/test_sensor.ambr b/tests/components/tplink_omada/snapshots/test_sensor.ambr new file mode 100644 index 00000000000000..6c332eb9696ac7 --- /dev/null +++ b/tests/components/tplink_omada/snapshots/test_sensor.ambr @@ -0,0 +1,333 @@ +# serializer version: 1 +# name: test_entities[sensor.test_poe_switch_cpu_usage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.test_poe_switch_cpu_usage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'CPU usage', + 'platform': 'tplink_omada', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'cpu_usage', + 'unique_id': '54-AF-97-00-00-01_cpu_usage', + 'unit_of_measurement': '%', + }) +# --- +# name: test_entities[sensor.test_poe_switch_cpu_usage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test PoE Switch CPU usage', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.test_poe_switch_cpu_usage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '10', + }) +# --- +# name: test_entities[sensor.test_poe_switch_device_status-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'disconnected', + 'connected', + 'pending', + 'heartbeat_missed', + 'isolated', + 'adopt_failed', + 'managed_externally', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.test_poe_switch_device_status', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Device status', + 'platform': 'tplink_omada', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'device_status', + 'unique_id': '54-AF-97-00-00-01_device_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_entities[sensor.test_poe_switch_device_status-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Test PoE Switch Device status', + 'options': list([ + 'disconnected', + 'connected', + 'pending', + 'heartbeat_missed', + 'isolated', + 'adopt_failed', + 'managed_externally', + ]), + }), + 'context': , + 'entity_id': 'sensor.test_poe_switch_device_status', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'connected', + }) +# --- +# name: test_entities[sensor.test_poe_switch_memory_usage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.test_poe_switch_memory_usage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Memory usage', + 'platform': 'tplink_omada', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'mem_usage', + 'unique_id': '54-AF-97-00-00-01_mem_usage', + 'unit_of_measurement': '%', + }) +# --- +# name: test_entities[sensor.test_poe_switch_memory_usage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test PoE Switch Memory usage', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.test_poe_switch_memory_usage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '20', + }) +# --- +# name: test_entities[sensor.test_router_cpu_usage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.test_router_cpu_usage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'CPU usage', + 'platform': 'tplink_omada', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'cpu_usage', + 'unique_id': 'AA-BB-CC-DD-EE-FF_cpu_usage', + 'unit_of_measurement': '%', + }) +# --- +# name: test_entities[sensor.test_router_cpu_usage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test Router CPU usage', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.test_router_cpu_usage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '16', + }) +# --- +# name: test_entities[sensor.test_router_device_status-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'disconnected', + 'connected', + 'pending', + 'heartbeat_missed', + 'isolated', + 'adopt_failed', + 'managed_externally', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.test_router_device_status', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Device status', + 'platform': 'tplink_omada', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'device_status', + 'unique_id': 'AA-BB-CC-DD-EE-FF_device_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_entities[sensor.test_router_device_status-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Test Router Device status', + 'options': list([ + 'disconnected', + 'connected', + 'pending', + 'heartbeat_missed', + 'isolated', + 'adopt_failed', + 'managed_externally', + ]), + }), + 'context': , + 'entity_id': 'sensor.test_router_device_status', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'connected', + }) +# --- +# name: test_entities[sensor.test_router_memory_usage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.test_router_memory_usage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Memory usage', + 'platform': 'tplink_omada', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'mem_usage', + 'unique_id': 'AA-BB-CC-DD-EE-FF_mem_usage', + 'unit_of_measurement': '%', + }) +# --- +# name: test_entities[sensor.test_router_memory_usage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test Router Memory usage', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.test_router_memory_usage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '47', + }) +# --- diff --git a/tests/components/tplink_omada/test_sensor.py b/tests/components/tplink_omada/test_sensor.py new file mode 100644 index 00000000000000..54df7c5bcad01a --- /dev/null +++ b/tests/components/tplink_omada/test_sensor.py @@ -0,0 +1,117 @@ +"""Tests for TP-Link Omada sensor entities.""" + +from datetime import timedelta +import json +from unittest.mock import MagicMock, patch + +from freezegun.api import FrozenDateTimeFactory +import pytest +from syrupy.assertion import SnapshotAssertion +from tplink_omada_client.definitions import DeviceStatus, DeviceStatusCategory +from tplink_omada_client.devices import OmadaGatewayPortStatus, OmadaListDevice + +from homeassistant.components.tplink_omada.const import DOMAIN +from homeassistant.components.tplink_omada.coordinator import POLL_DEVICES +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from tests.common import ( + MockConfigEntry, + async_fire_time_changed, + load_fixture, + snapshot_platform, +) + +POLL_INTERVAL = timedelta(seconds=POLL_DEVICES) + + +@pytest.fixture +async def init_integration( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_omada_client: MagicMock, +) -> MockConfigEntry: + """Set up the TP-Link Omada integration for testing.""" + mock_config_entry.add_to_hass(hass) + + with patch("homeassistant.components.tplink_omada.PLATFORMS", ["sensor"]): + assert await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + return mock_config_entry + + +async def test_entities( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + init_integration: MockConfigEntry, + snapshot: SnapshotAssertion, +) -> None: + """Test the creation of the TP-Link Omada sensor entities.""" + await snapshot_platform(hass, entity_registry, snapshot, init_integration.entry_id) + + +async def test_device_specific_status( + hass: HomeAssistant, + init_integration: MockConfigEntry, + mock_omada_site_client: MagicMock, + freezer: FrozenDateTimeFactory, +) -> None: + """Test a connection status is reported from known detailed status.""" + entity_id = "sensor.test_poe_switch_device_status" + entity = hass.states.get(entity_id) + assert entity is not None + assert entity.state == "connected" + + _set_test_device_status( + mock_omada_site_client, + DeviceStatus.ADOPT_FAILED.value, + DeviceStatusCategory.CONNECTED.value, + ) + + freezer.tick(POLL_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + entity = hass.states.get(entity_id) + assert entity.state == "adopt_failed" + + +async def test_device_category_status( + hass: HomeAssistant, + init_integration: MockConfigEntry, + mock_omada_site_client: MagicMock, + freezer: FrozenDateTimeFactory, +) -> None: + """Test a connection status is reported, with fallback to status category.""" + entity_id = "sensor.test_poe_switch_device_status" + entity = hass.states.get(entity_id) + assert entity is not None + assert entity.state == "connected" + + _set_test_device_status( + mock_omada_site_client, + DeviceStatus.PENDING_WIRELESS, + DeviceStatusCategory.PENDING.value, + ) + + freezer.tick(POLL_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + entity = hass.states.get(entity_id) + assert entity.state == "pending" + + +def _set_test_device_status( + mock_omada_site_client: MagicMock, + status: int, + status_category: int, +) -> OmadaGatewayPortStatus: + devices_data = json.loads(load_fixture("devices.json", DOMAIN)) + devices_data[1]["status"] = status + devices_data[1]["statusCategory"] = status_category + devices = [OmadaListDevice(d) for d in devices_data] + + mock_omada_site_client.get_devices.reset_mock() + mock_omada_site_client.get_devices.return_value = devices diff --git a/tests/components/traccar/test_init.py b/tests/components/traccar/test_init.py index 610e741f5f506f..fb90262a084147 100644 --- a/tests/components/traccar/test_init.py +++ b/tests/components/traccar/test_init.py @@ -11,9 +11,9 @@ from homeassistant.components.device_tracker import DOMAIN as DEVICE_TRACKER_DOMAIN from homeassistant.components.device_tracker.legacy import Device from homeassistant.components.traccar import DOMAIN, TRACKER_UPDATE -from homeassistant.config import async_process_ha_core_config from homeassistant.const import STATE_HOME, STATE_NOT_HOME from homeassistant.core import HomeAssistant +from homeassistant.core_config import async_process_ha_core_config from homeassistant.data_entry_flow import FlowResultType from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.dispatcher import DATA_DISPATCHER diff --git a/tests/components/tradfri/test_config_flow.py b/tests/components/tradfri/test_config_flow.py index af2fdc22d2a7b4..5c06851782cc37 100644 --- a/tests/components/tradfri/test_config_flow.py +++ b/tests/components/tradfri/test_config_flow.py @@ -86,6 +86,10 @@ async def test_user_connection_timeout( assert result["errors"] == {"base": "timeout"} +@pytest.mark.parametrize( # Remove when translations fixed + "ignore_translations", + ["component.tradfri.config.error.invalid_security_code"], +) async def test_user_connection_bad_key( hass: HomeAssistant, mock_auth, mock_entry_setup ) -> None: diff --git a/tests/components/tradfri/test_cover.py b/tests/components/tradfri/test_cover.py index 5aa4e75728d53f..59f3f8a956a715 100644 --- a/tests/components/tradfri/test_cover.py +++ b/tests/components/tradfri/test_cover.py @@ -8,8 +8,12 @@ from pytradfri.const import ATTR_REACHABLE_STATE from pytradfri.device import Device -from homeassistant.components.cover import ATTR_CURRENT_POSITION, DOMAIN as COVER_DOMAIN -from homeassistant.const import STATE_CLOSED, STATE_OPEN, STATE_UNAVAILABLE +from homeassistant.components.cover import ( + ATTR_CURRENT_POSITION, + DOMAIN as COVER_DOMAIN, + CoverState, +) +from homeassistant.const import STATE_UNAVAILABLE from homeassistant.core import HomeAssistant from .common import CommandStore, setup_integration @@ -27,7 +31,7 @@ async def test_cover_available( state = hass.states.get(entity_id) assert state - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN assert state.attributes[ATTR_CURRENT_POSITION] == 60 assert state.attributes["model"] == "FYRTUR block-out roller blind" @@ -44,11 +48,11 @@ async def test_cover_available( @pytest.mark.parametrize( ("service", "service_data", "expected_state", "expected_position"), [ - ("set_cover_position", {"position": 100}, STATE_OPEN, 100), - ("set_cover_position", {"position": 0}, STATE_CLOSED, 0), - ("open_cover", {}, STATE_OPEN, 100), - ("close_cover", {}, STATE_CLOSED, 0), - ("stop_cover", {}, STATE_OPEN, 60), + ("set_cover_position", {"position": 100}, CoverState.OPEN, 100), + ("set_cover_position", {"position": 0}, CoverState.CLOSED, 0), + ("open_cover", {}, CoverState.OPEN, 100), + ("close_cover", {}, CoverState.CLOSED, 0), + ("stop_cover", {}, CoverState.OPEN, 60), ], ) async def test_cover_services( @@ -66,7 +70,7 @@ async def test_cover_services( state = hass.states.get(entity_id) assert state - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN assert state.attributes[ATTR_CURRENT_POSITION] == 60 await hass.services.async_call( diff --git a/tests/components/trafikverket_camera/test_config_flow.py b/tests/components/trafikverket_camera/test_config_flow.py index a940f31f7f3e77..48162a17e2c4e9 100644 --- a/tests/components/trafikverket_camera/test_config_flow.py +++ b/tests/components/trafikverket_camera/test_config_flow.py @@ -329,7 +329,7 @@ async def test_reconfigure_flow( entry.add_to_hass(hass) result = await entry.start_reconfigure_flow(hass) - assert result["step_id"] == "reconfigure_confirm" + assert result["step_id"] == "reconfigure" assert result["type"] is FlowResultType.FORM assert result["errors"] == {} @@ -427,7 +427,7 @@ async def test_reconfigure_flow_error( ) await hass.async_block_till_done() - assert result2["step_id"] == "reconfigure_confirm" + assert result2["step_id"] == "reconfigure" assert result2["type"] is FlowResultType.FORM assert result2["errors"] == {error_key: p_error} diff --git a/tests/components/trafikverket_weatherstation/test_config_flow.py b/tests/components/trafikverket_weatherstation/test_config_flow.py index c7f30ed8b375d1..f8a0f636718fa0 100644 --- a/tests/components/trafikverket_weatherstation/test_config_flow.py +++ b/tests/components/trafikverket_weatherstation/test_config_flow.py @@ -206,7 +206,7 @@ async def test_reconfigure_flow(hass: HomeAssistant) -> None: entry.add_to_hass(hass) result = await entry.start_reconfigure_flow(hass) - assert result["step_id"] == "reconfigure_confirm" + assert result["step_id"] == "reconfigure" assert result["type"] is FlowResultType.FORM assert result["errors"] == {} @@ -265,7 +265,7 @@ async def test_reconfigure_flow_fails( entry.add_to_hass(hass) result = await entry.start_reconfigure_flow(hass) - assert result["step_id"] == "reconfigure_confirm" + assert result["step_id"] == "reconfigure" assert result["type"] is FlowResultType.FORM assert result["errors"] == {} diff --git a/tests/components/transmission/test_config_flow.py b/tests/components/transmission/test_config_flow.py index b318862047efdc..b724a91f7a1994 100644 --- a/tests/components/transmission/test_config_flow.py +++ b/tests/components/transmission/test_config_flow.py @@ -164,7 +164,10 @@ async def test_reauth_success(hass: HomeAssistant) -> None: assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" - assert result["description_placeholders"] == {"username": "user"} + assert result["description_placeholders"] == { + "username": "user", + "name": "Mock Title", + } with patch( "homeassistant.components.transmission.async_setup_entry", @@ -194,7 +197,10 @@ async def test_reauth_failed(hass: HomeAssistant, mock_api: MagicMock) -> None: assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" - assert result["description_placeholders"] == {"username": "user"} + assert result["description_placeholders"] == { + "username": "user", + "name": "Mock Title", + } mock_api.side_effect = TransmissionAuthError() result2 = await hass.config_entries.flow.async_configure( @@ -222,7 +228,10 @@ async def test_reauth_failed_connection_error( assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" - assert result["description_placeholders"] == {"username": "user"} + assert result["description_placeholders"] == { + "username": "user", + "name": "Mock Title", + } mock_api.side_effect = TransmissionConnectError() result2 = await hass.config_entries.flow.async_configure( diff --git a/tests/components/triggercmd/test_config_flow.py b/tests/components/triggercmd/test_config_flow.py index 51f3730ab1ace5..f12fcfef76840b 100644 --- a/tests/components/triggercmd/test_config_flow.py +++ b/tests/components/triggercmd/test_config_flow.py @@ -140,7 +140,7 @@ async def test_config_flow_connection_error(hass: HomeAssistant) -> None: ) assert result["errors"] == { - "base": "connection_error", + "base": "cannot_connect", } assert result["type"] is FlowResultType.FORM diff --git a/tests/components/tts/conftest.py b/tests/components/tts/conftest.py index 16c24f006d733f..ddef3ee0c284bc 100644 --- a/tests/components/tts/conftest.py +++ b/tests/components/tts/conftest.py @@ -10,9 +10,9 @@ import pytest -from homeassistant.config import async_process_ha_core_config from homeassistant.config_entries import ConfigFlow from homeassistant.core import HomeAssistant +from homeassistant.core_config import async_process_ha_core_config from .common import ( DEFAULT_LANG, diff --git a/tests/components/tts/test_notify.py b/tests/components/tts/test_notify.py index 07ba2f2f3f5a07..00cdae2934f903 100644 --- a/tests/components/tts/test_notify.py +++ b/tests/components/tts/test_notify.py @@ -9,8 +9,8 @@ DOMAIN as DOMAIN_MP, SERVICE_PLAY_MEDIA, ) -from homeassistant.config import async_process_ha_core_config from homeassistant.core import HomeAssistant +from homeassistant.core_config import async_process_ha_core_config from homeassistant.setup import async_setup_component from .common import MockTTSEntity, mock_config_entry_setup diff --git a/tests/components/twilio/test_init.py b/tests/components/twilio/test_init.py index 8efa1c247420ff..9c07bd6f3d89ae 100644 --- a/tests/components/twilio/test_init.py +++ b/tests/components/twilio/test_init.py @@ -2,8 +2,8 @@ from homeassistant import config_entries from homeassistant.components import twilio -from homeassistant.config import async_process_ha_core_config from homeassistant.core import HomeAssistant, callback +from homeassistant.core_config import async_process_ha_core_config from homeassistant.data_entry_flow import FlowResultType from tests.typing import ClientSessionGenerator diff --git a/tests/components/twitch/conftest.py b/tests/components/twitch/conftest.py index 25e443c2778ae5..07732de1b0c692 100644 --- a/tests/components/twitch/conftest.py +++ b/tests/components/twitch/conftest.py @@ -111,8 +111,8 @@ def twitch_mock() -> Generator[AsyncMock]: mock_client.return_value.get_followed_channels.return_value = TwitchIterObject( "get_followed_channels.json", FollowedChannel ) - mock_client.return_value.get_streams.return_value = get_generator( - "get_streams.json", Stream + mock_client.return_value.get_followed_streams.return_value = get_generator( + "get_followed_streams.json", Stream ) mock_client.return_value.check_user_subscription.return_value = ( UserSubscription( diff --git a/tests/components/twitch/fixtures/check_user_subscription.json b/tests/components/twitch/fixtures/check_user_subscription.json index b1b2a3d852a6b2..5e710b72699101 100644 --- a/tests/components/twitch/fixtures/check_user_subscription.json +++ b/tests/components/twitch/fixtures/check_user_subscription.json @@ -1,3 +1,4 @@ { - "is_gift": true + "is_gift": true, + "tier": "2000" } diff --git a/tests/components/twitch/fixtures/check_user_subscription_2.json b/tests/components/twitch/fixtures/check_user_subscription_2.json index 94d56c5ee1281a..38a1f063f969dc 100644 --- a/tests/components/twitch/fixtures/check_user_subscription_2.json +++ b/tests/components/twitch/fixtures/check_user_subscription_2.json @@ -1,3 +1,4 @@ { - "is_gift": false + "is_gift": false, + "tier": "1000" } diff --git a/tests/components/twitch/fixtures/get_followed_channels.json b/tests/components/twitch/fixtures/get_followed_channels.json index 4add7cc0a98cea..990fac390e95b6 100644 --- a/tests/components/twitch/fixtures/get_followed_channels.json +++ b/tests/components/twitch/fixtures/get_followed_channels.json @@ -1,9 +1,11 @@ [ { + "broadcaster_id": 123, "broadcaster_login": "internetofthings", "followed_at": "2023-08-01" }, { + "broadcaster_id": 456, "broadcaster_login": "homeassistant", "followed_at": "2023-08-01" } diff --git a/tests/components/twitch/fixtures/get_streams.json b/tests/components/twitch/fixtures/get_followed_streams.json similarity index 55% rename from tests/components/twitch/fixtures/get_streams.json rename to tests/components/twitch/fixtures/get_followed_streams.json index 53330c9c82ee99..e02c594c4cccaf 100644 --- a/tests/components/twitch/fixtures/get_streams.json +++ b/tests/components/twitch/fixtures/get_followed_streams.json @@ -1,8 +1,10 @@ [ { + "user_id": 123, "game_name": "Good game", "title": "Title", "thumbnail_url": "stream-medium.png", - "started_at": "2021-03-10T03:18:11Z" + "started_at": "2021-03-10T03:18:11Z", + "viewer_count": 42 } ] diff --git a/tests/components/twitch/test_sensor.py b/tests/components/twitch/test_sensor.py index 8ce146adf07184..613c0919c49ae2 100644 --- a/tests/components/twitch/test_sensor.py +++ b/tests/components/twitch/test_sensor.py @@ -21,8 +21,8 @@ async def test_offline( hass: HomeAssistant, twitch_mock: AsyncMock, config_entry: MockConfigEntry ) -> None: """Test offline state.""" - twitch_mock.return_value.get_streams.return_value = get_generator_from_data( - [], Stream + twitch_mock.return_value.get_followed_streams.return_value = ( + get_generator_from_data([], Stream) ) await setup_integration(hass, config_entry) @@ -45,6 +45,7 @@ async def test_streaming( assert sensor_state.attributes["started_at"] == datetime( year=2021, month=3, day=10, hour=3, minute=18, second=11, tzinfo=tzutc() ) + assert sensor_state.attributes["viewers"] == 42 async def test_oauth_without_sub_and_follow( @@ -79,6 +80,7 @@ async def test_oauth_with_sub( sensor_state = hass.states.get(ENTITY_ID) assert sensor_state.attributes["subscribed"] is True assert sensor_state.attributes["subscription_is_gifted"] is False + assert sensor_state.attributes["subscription_tier"] == 1 assert sensor_state.attributes["following"] is False diff --git a/tests/components/unifi/snapshots/test_switch.ambr b/tests/components/unifi/snapshots/test_switch.ambr index 87b485adaf2acd..45e6188a3f4d80 100644 --- a/tests/components/unifi/snapshots/test_switch.ambr +++ b/tests/components/unifi/snapshots/test_switch.ambr @@ -1,1952 +1,4 @@ # serializer version: 1 -# name: test_entity_and_device_data[site_payload0-device_payload0][button.mock_name_port_1_power_cycle-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.mock_name_port_1_power_cycle', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Port 1 Power Cycle', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'power_cycle-10:00:00:00:01:01_1', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[site_payload0-device_payload0][button.mock_name_port_1_power_cycle-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'restart', - 'friendly_name': 'mock-name Port 1 Power Cycle', - }), - 'context': , - 'entity_id': 'button.mock_name_port_1_power_cycle', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_entity_and_device_data[site_payload0-device_payload0][button.mock_name_port_2_power_cycle-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.mock_name_port_2_power_cycle', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Port 2 Power Cycle', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'power_cycle-10:00:00:00:01:01_2', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[site_payload0-device_payload0][button.mock_name_port_2_power_cycle-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'restart', - 'friendly_name': 'mock-name Port 2 Power Cycle', - }), - 'context': , - 'entity_id': 'button.mock_name_port_2_power_cycle', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_entity_and_device_data[site_payload0-device_payload0][button.mock_name_port_4_power_cycle-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.mock_name_port_4_power_cycle', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Port 4 Power Cycle', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'power_cycle-10:00:00:00:01:01_4', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[site_payload0-device_payload0][button.mock_name_port_4_power_cycle-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'restart', - 'friendly_name': 'mock-name Port 4 Power Cycle', - }), - 'context': , - 'entity_id': 'button.mock_name_port_4_power_cycle', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unavailable', - }) -# --- -# name: test_entity_and_device_data[site_payload0-device_payload0][button.mock_name_restart-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.mock_name_restart', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Restart', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'device_restart-10:00:00:00:01:01', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[site_payload0-device_payload0][button.mock_name_restart-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'restart', - 'friendly_name': 'mock-name Restart', - }), - 'context': , - 'entity_id': 'button.mock_name_restart', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_entity_and_device_data[site_payload0-device_payload0][switch.mock_name_port_1_poe-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.mock_name_port_1_poe', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:ethernet', - 'original_name': 'Port 1 PoE', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'poe-10:00:00:00:01:01_1', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[site_payload0-device_payload0][switch.mock_name_port_1_poe-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'outlet', - 'friendly_name': 'mock-name Port 1 PoE', - 'icon': 'mdi:ethernet', - }), - 'context': , - 'entity_id': 'switch.mock_name_port_1_poe', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_entity_and_device_data[site_payload0-device_payload0][switch.mock_name_port_2_poe-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.mock_name_port_2_poe', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:ethernet', - 'original_name': 'Port 2 PoE', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'poe-10:00:00:00:01:01_2', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[site_payload0-device_payload0][switch.mock_name_port_2_poe-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'outlet', - 'friendly_name': 'mock-name Port 2 PoE', - 'icon': 'mdi:ethernet', - }), - 'context': , - 'entity_id': 'switch.mock_name_port_2_poe', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_entity_and_device_data[site_payload0-device_payload0][switch.mock_name_port_4_poe-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.mock_name_port_4_poe', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:ethernet', - 'original_name': 'Port 4 PoE', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'poe-10:00:00:00:01:01_4', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[site_payload0-device_payload0][switch.mock_name_port_4_poe-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'outlet', - 'friendly_name': 'mock-name Port 4 PoE', - 'icon': 'mdi:ethernet', - }), - 'context': , - 'entity_id': 'switch.mock_name_port_4_poe', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_entity_and_device_data[site_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.block_media_streaming-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.block_media_streaming', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:network', - 'original_name': 'Block Media Streaming', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '5f976f4ae3c58f018ec7dff6', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[site_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.block_media_streaming-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Block Media Streaming', - 'icon': 'mdi:network', - }), - 'context': , - 'entity_id': 'switch.block_media_streaming', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_entity_and_device_data[site_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.dummy_usp_pdu_pro_outlet_2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': None, - 'entity_id': 'switch.dummy_usp_pdu_pro_outlet_2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Outlet 2', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'outlet-01:02:03:04:05:ff_2', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[site_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.dummy_usp_pdu_pro_outlet_2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'outlet', - 'friendly_name': 'Dummy USP-PDU-Pro Outlet 2', - }), - 'context': , - 'entity_id': 'switch.dummy_usp_pdu_pro_outlet_2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_entity_and_device_data[site_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.dummy_usp_pdu_pro_usb_outlet_1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': None, - 'entity_id': 'switch.dummy_usp_pdu_pro_usb_outlet_1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'USB Outlet 1', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'outlet-01:02:03:04:05:ff_1', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[site_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.dummy_usp_pdu_pro_usb_outlet_1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'outlet', - 'friendly_name': 'Dummy USP-PDU-Pro USB Outlet 1', - }), - 'context': , - 'entity_id': 'switch.dummy_usp_pdu_pro_usb_outlet_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_entity_and_device_data[site_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.mock_name_port_1_poe-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.mock_name_port_1_poe', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:ethernet', - 'original_name': 'Port 1 PoE', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'poe-10:00:00:00:01:01_1', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[site_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.mock_name_port_1_poe-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'outlet', - 'friendly_name': 'mock-name Port 1 PoE', - 'icon': 'mdi:ethernet', - }), - 'context': , - 'entity_id': 'switch.mock_name_port_1_poe', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_entity_and_device_data[site_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.mock_name_port_2_poe-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.mock_name_port_2_poe', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:ethernet', - 'original_name': 'Port 2 PoE', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'poe-10:00:00:00:01:01_2', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[site_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.mock_name_port_2_poe-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'outlet', - 'friendly_name': 'mock-name Port 2 PoE', - 'icon': 'mdi:ethernet', - }), - 'context': , - 'entity_id': 'switch.mock_name_port_2_poe', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_entity_and_device_data[site_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.mock_name_port_4_poe-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.mock_name_port_4_poe', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:ethernet', - 'original_name': 'Port 4 PoE', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'poe-10:00:00:00:01:01_4', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[site_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.mock_name_port_4_poe-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'outlet', - 'friendly_name': 'mock-name Port 4 PoE', - 'icon': 'mdi:ethernet', - }), - 'context': , - 'entity_id': 'switch.mock_name_port_4_poe', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_entity_and_device_data[site_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.plug_outlet_1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': None, - 'entity_id': 'switch.plug_outlet_1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Outlet 1', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'outlet-fc:ec:da:76:4f:5f_1', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[site_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.plug_outlet_1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'outlet', - 'friendly_name': 'Plug Outlet 1', - }), - 'context': , - 'entity_id': 'switch.plug_outlet_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.block_media_streaming-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.block_media_streaming', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:network', - 'original_name': 'Block Media Streaming', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '5f976f4ae3c58f018ec7dff6', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.block_media_streaming-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Block Media Streaming', - 'icon': 'mdi:network', - }), - 'context': , - 'entity_id': 'switch.block_media_streaming', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.dummy_usp_pdu_pro_outlet_2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': None, - 'entity_id': 'switch.dummy_usp_pdu_pro_outlet_2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Outlet 2', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'outlet-01:02:03:04:05:ff_2', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.dummy_usp_pdu_pro_outlet_2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'outlet', - 'friendly_name': 'Dummy USP-PDU-Pro Outlet 2', - }), - 'context': , - 'entity_id': 'switch.dummy_usp_pdu_pro_outlet_2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.dummy_usp_pdu_pro_usb_outlet_1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': None, - 'entity_id': 'switch.dummy_usp_pdu_pro_usb_outlet_1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'USB Outlet 1', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'outlet-01:02:03:04:05:ff_1', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.dummy_usp_pdu_pro_usb_outlet_1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'outlet', - 'friendly_name': 'Dummy USP-PDU-Pro USB Outlet 1', - }), - 'context': , - 'entity_id': 'switch.dummy_usp_pdu_pro_usb_outlet_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.mock_name_port_1_poe-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.mock_name_port_1_poe', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:ethernet', - 'original_name': 'Port 1 PoE', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'poe-10:00:00:00:01:01_1', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.mock_name_port_1_poe-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'outlet', - 'friendly_name': 'mock-name Port 1 PoE', - 'icon': 'mdi:ethernet', - }), - 'context': , - 'entity_id': 'switch.mock_name_port_1_poe', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.mock_name_port_2_poe-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.mock_name_port_2_poe', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:ethernet', - 'original_name': 'Port 2 PoE', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'poe-10:00:00:00:01:01_2', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.mock_name_port_2_poe-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'outlet', - 'friendly_name': 'mock-name Port 2 PoE', - 'icon': 'mdi:ethernet', - }), - 'context': , - 'entity_id': 'switch.mock_name_port_2_poe', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.mock_name_port_4_poe-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.mock_name_port_4_poe', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:ethernet', - 'original_name': 'Port 4 PoE', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'poe-10:00:00:00:01:01_4', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.mock_name_port_4_poe-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'outlet', - 'friendly_name': 'mock-name Port 4 PoE', - 'icon': 'mdi:ethernet', - }), - 'context': , - 'entity_id': 'switch.mock_name_port_4_poe', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.plug_outlet_1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': None, - 'entity_id': 'switch.plug_outlet_1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Outlet 1', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'outlet-fc:ec:da:76:4f:5f_1', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.plug_outlet_1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'outlet', - 'friendly_name': 'Plug Outlet 1', - }), - 'context': , - 'entity_id': 'switch.plug_outlet_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.ssid_1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.ssid_1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:wifi-check', - 'original_name': None, - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'wlan-012345678910111213141516', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.ssid_1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'switch', - 'friendly_name': 'SSID 1', - 'icon': 'mdi:wifi-check', - }), - 'context': , - 'entity_id': 'switch.ssid_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.block_client_1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.block_client_1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:ethernet', - 'original_name': None, - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'block-00:00:00:00:01:01', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.block_client_1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'switch', - 'friendly_name': 'Block Client 1', - 'icon': 'mdi:ethernet', - }), - 'context': , - 'entity_id': 'switch.block_client_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.block_media_streaming-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.block_media_streaming', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:network', - 'original_name': 'Block Media Streaming', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '5f976f4ae3c58f018ec7dff6', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.block_media_streaming-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Block Media Streaming', - 'icon': 'mdi:network', - }), - 'context': , - 'entity_id': 'switch.block_media_streaming', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.dummy_usp_pdu_pro_outlet_2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': None, - 'entity_id': 'switch.dummy_usp_pdu_pro_outlet_2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Outlet 2', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'outlet-01:02:03:04:05:ff_2', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.dummy_usp_pdu_pro_outlet_2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'outlet', - 'friendly_name': 'Dummy USP-PDU-Pro Outlet 2', - }), - 'context': , - 'entity_id': 'switch.dummy_usp_pdu_pro_outlet_2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.dummy_usp_pdu_pro_usb_outlet_1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': None, - 'entity_id': 'switch.dummy_usp_pdu_pro_usb_outlet_1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'USB Outlet 1', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'outlet-01:02:03:04:05:ff_1', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.dummy_usp_pdu_pro_usb_outlet_1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'outlet', - 'friendly_name': 'Dummy USP-PDU-Pro USB Outlet 1', - }), - 'context': , - 'entity_id': 'switch.dummy_usp_pdu_pro_usb_outlet_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.mock_name_port_1_poe-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.mock_name_port_1_poe', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:ethernet', - 'original_name': 'Port 1 PoE', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'poe-10:00:00:00:01:01_1', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.mock_name_port_1_poe-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'outlet', - 'friendly_name': 'mock-name Port 1 PoE', - 'icon': 'mdi:ethernet', - }), - 'context': , - 'entity_id': 'switch.mock_name_port_1_poe', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.mock_name_port_2_poe-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.mock_name_port_2_poe', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:ethernet', - 'original_name': 'Port 2 PoE', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'poe-10:00:00:00:01:01_2', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.mock_name_port_2_poe-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'outlet', - 'friendly_name': 'mock-name Port 2 PoE', - 'icon': 'mdi:ethernet', - }), - 'context': , - 'entity_id': 'switch.mock_name_port_2_poe', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.mock_name_port_4_poe-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.mock_name_port_4_poe', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:ethernet', - 'original_name': 'Port 4 PoE', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'poe-10:00:00:00:01:01_4', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.mock_name_port_4_poe-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'outlet', - 'friendly_name': 'mock-name Port 4 PoE', - 'icon': 'mdi:ethernet', - }), - 'context': , - 'entity_id': 'switch.mock_name_port_4_poe', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.plug_outlet_1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': None, - 'entity_id': 'switch.plug_outlet_1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Outlet 1', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'outlet-fc:ec:da:76:4f:5f_1', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.plug_outlet_1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'outlet', - 'friendly_name': 'Plug Outlet 1', - }), - 'context': , - 'entity_id': 'switch.plug_outlet_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.ssid_1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.ssid_1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:wifi-check', - 'original_name': None, - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'wlan-012345678910111213141516', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.ssid_1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'switch', - 'friendly_name': 'SSID 1', - 'icon': 'mdi:wifi-check', - }), - 'context': , - 'entity_id': 'switch.ssid_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.unifi_network_plex-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.unifi_network_plex', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:upload-network', - 'original_name': 'plex', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'port_forward-5a32aa4ee4b0412345678911', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.unifi_network_plex-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'switch', - 'friendly_name': 'UniFi Network plex', - 'icon': 'mdi:upload-network', - }), - 'context': , - 'entity_id': 'switch.unifi_network_plex', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.block_media_streaming-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.block_media_streaming', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:network', - 'original_name': 'Block Media Streaming', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '5f976f4ae3c58f018ec7dff6', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.block_media_streaming-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Block Media Streaming', - 'icon': 'mdi:network', - }), - 'context': , - 'entity_id': 'switch.block_media_streaming', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.dummy_usp_pdu_pro_outlet_2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': None, - 'entity_id': 'switch.dummy_usp_pdu_pro_outlet_2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Outlet 2', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'outlet-01:02:03:04:05:ff_2', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.dummy_usp_pdu_pro_outlet_2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'outlet', - 'friendly_name': 'Dummy USP-PDU-Pro Outlet 2', - }), - 'context': , - 'entity_id': 'switch.dummy_usp_pdu_pro_outlet_2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.dummy_usp_pdu_pro_usb_outlet_1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': None, - 'entity_id': 'switch.dummy_usp_pdu_pro_usb_outlet_1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'USB Outlet 1', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'outlet-01:02:03:04:05:ff_1', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.dummy_usp_pdu_pro_usb_outlet_1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'outlet', - 'friendly_name': 'Dummy USP-PDU-Pro USB Outlet 1', - }), - 'context': , - 'entity_id': 'switch.dummy_usp_pdu_pro_usb_outlet_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.mock_name_port_1_poe-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.mock_name_port_1_poe', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:ethernet', - 'original_name': 'Port 1 PoE', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'poe-10:00:00:00:01:01_1', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.mock_name_port_1_poe-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'outlet', - 'friendly_name': 'mock-name Port 1 PoE', - 'icon': 'mdi:ethernet', - }), - 'context': , - 'entity_id': 'switch.mock_name_port_1_poe', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.mock_name_port_2_poe-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.mock_name_port_2_poe', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:ethernet', - 'original_name': 'Port 2 PoE', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'poe-10:00:00:00:01:01_2', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.mock_name_port_2_poe-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'outlet', - 'friendly_name': 'mock-name Port 2 PoE', - 'icon': 'mdi:ethernet', - }), - 'context': , - 'entity_id': 'switch.mock_name_port_2_poe', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.mock_name_port_4_poe-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.mock_name_port_4_poe', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:ethernet', - 'original_name': 'Port 4 PoE', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'poe-10:00:00:00:01:01_4', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.mock_name_port_4_poe-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'outlet', - 'friendly_name': 'mock-name Port 4 PoE', - 'icon': 'mdi:ethernet', - }), - 'context': , - 'entity_id': 'switch.mock_name_port_4_poe', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.plug_outlet_1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': None, - 'entity_id': 'switch.plug_outlet_1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Outlet 1', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'outlet-fc:ec:da:76:4f:5f_1', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.plug_outlet_1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'outlet', - 'friendly_name': 'Plug Outlet 1', - }), - 'context': , - 'entity_id': 'switch.plug_outlet_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.ssid_1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.ssid_1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:wifi-check', - 'original_name': None, - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'wlan-012345678910111213141516', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.ssid_1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'switch', - 'friendly_name': 'SSID 1', - 'icon': 'mdi:wifi-check', - }), - 'context': , - 'entity_id': 'switch.ssid_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.unifi_network_plex-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.unifi_network_plex', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:upload-network', - 'original_name': 'plex', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'port_forward-5a32aa4ee4b0412345678911', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.unifi_network_plex-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'switch', - 'friendly_name': 'UniFi Network plex', - 'icon': 'mdi:upload-network', - }), - 'context': , - 'entity_id': 'switch.unifi_network_plex', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- # name: test_entity_and_device_data[site_payload0-wlan_payload0-traffic_rule_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.block_client_1-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/unifi/snapshots/test_update.ambr b/tests/components/unifi/snapshots/test_update.ambr index 99a403a8f21cd4..405cb9d52a6563 100644 --- a/tests/components/unifi/snapshots/test_update.ambr +++ b/tests/components/unifi/snapshots/test_update.ambr @@ -37,6 +37,7 @@ 'attributes': ReadOnlyDict({ 'auto_update': False, 'device_class': 'firmware', + 'display_precision': 0, 'entity_picture': 'https://brands.home-assistant.io/_/unifi/icon.png', 'friendly_name': 'Device 1', 'in_progress': False, @@ -47,6 +48,7 @@ 'skipped_version': None, 'supported_features': , 'title': None, + 'update_percentage': None, }), 'context': , 'entity_id': 'update.device_1', @@ -94,6 +96,7 @@ 'attributes': ReadOnlyDict({ 'auto_update': False, 'device_class': 'firmware', + 'display_precision': 0, 'entity_picture': 'https://brands.home-assistant.io/_/unifi/icon.png', 'friendly_name': 'Device 2', 'in_progress': False, @@ -104,6 +107,7 @@ 'skipped_version': None, 'supported_features': , 'title': None, + 'update_percentage': None, }), 'context': , 'entity_id': 'update.device_2', @@ -151,6 +155,7 @@ 'attributes': ReadOnlyDict({ 'auto_update': False, 'device_class': 'firmware', + 'display_precision': 0, 'entity_picture': 'https://brands.home-assistant.io/_/unifi/icon.png', 'friendly_name': 'Device 1', 'in_progress': False, @@ -161,6 +166,7 @@ 'skipped_version': None, 'supported_features': , 'title': None, + 'update_percentage': None, }), 'context': , 'entity_id': 'update.device_1', @@ -208,6 +214,7 @@ 'attributes': ReadOnlyDict({ 'auto_update': False, 'device_class': 'firmware', + 'display_precision': 0, 'entity_picture': 'https://brands.home-assistant.io/_/unifi/icon.png', 'friendly_name': 'Device 2', 'in_progress': False, @@ -218,6 +225,7 @@ 'skipped_version': None, 'supported_features': , 'title': None, + 'update_percentage': None, }), 'context': , 'entity_id': 'update.device_2', diff --git a/tests/components/unifiprotect/test_camera.py b/tests/components/unifiprotect/test_camera.py index 75a0beb23d9aa0..379f443923a27f 100644 --- a/tests/components/unifiprotect/test_camera.py +++ b/tests/components/unifiprotect/test_camera.py @@ -4,6 +4,7 @@ from unittest.mock import AsyncMock, Mock +import pytest from uiprotect.api import DEVICE_UPDATE_INTERVAL from uiprotect.data import Camera as ProtectCamera, CameraChannel, StateType from uiprotect.exceptions import NvrError @@ -12,8 +13,13 @@ from homeassistant.components.camera import ( CameraEntityFeature, CameraState, + CameraWebRTCProvider, + RTCIceCandidate, + StreamType, + WebRTCSendMessage, async_get_image, async_get_stream_source, + async_register_webrtc_provider, ) from homeassistant.components.unifiprotect.const import ( ATTR_BITRATE, @@ -22,6 +28,7 @@ ATTR_HEIGHT, ATTR_WIDTH, DEFAULT_ATTRIBUTION, + DOMAIN, ) from homeassistant.components.unifiprotect.utils import get_camera_base_name from homeassistant.const import ( @@ -31,11 +38,12 @@ STATE_UNAVAILABLE, Platform, ) -from homeassistant.core import HomeAssistant +from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.setup import async_setup_component from .utils import ( + Camera, MockUFPFixture, adopt_devices, assert_entity_counts, @@ -46,6 +54,45 @@ ) +class MockWebRTCProvider(CameraWebRTCProvider): + """WebRTC provider.""" + + @property + def domain(self) -> str: + """Return the integration domain of the provider.""" + return DOMAIN + + @callback + def async_is_supported(self, stream_source: str) -> bool: + """Return if this provider is supports the Camera as source.""" + return True + + async def async_handle_async_webrtc_offer( + self, + camera: Camera, + offer_sdp: str, + session_id: str, + send_message: WebRTCSendMessage, + ) -> None: + """Handle the WebRTC offer and return the answer via the provided callback.""" + + async def async_on_webrtc_candidate( + self, session_id: str, candidate: RTCIceCandidate + ) -> None: + """Handle the WebRTC candidate.""" + + @callback + def async_close_session(self, session_id: str) -> None: + """Close the session.""" + + +@pytest.fixture +async def web_rtc_provider(hass: HomeAssistant) -> None: + """Fixture to enable WebRTC provider for camera entities.""" + await async_setup_component(hass, "camera", {}) + async_register_webrtc_provider(hass, MockWebRTCProvider()) + + def validate_default_camera_entity( hass: HomeAssistant, camera_obj: ProtectCamera, @@ -149,7 +196,7 @@ async def validate_rtsps_camera_state( """Validate a camera's state.""" channel = camera_obj.channels[channel_id] - assert await async_get_stream_source(hass, entity_id) == channel.rtsps_url + assert await async_get_stream_source(hass, entity_id) == channel.rtsps_no_srtp_url validate_common_camera_state(hass, channel, entity_id, features) @@ -283,6 +330,26 @@ async def test_basic_setup( await validate_no_stream_camera_state(hass, doorbell, 3, entity_id, features=0) +@pytest.mark.usefixtures("web_rtc_provider") +async def test_webrtc_support( + hass: HomeAssistant, + ufp: MockUFPFixture, + camera_all: ProtectCamera, +) -> None: + """Test webrtc support is available.""" + camera_high_only = camera_all.copy() + camera_high_only.channels = [c.copy() for c in camera_all.channels] + camera_high_only.name = "Test Camera 1" + camera_high_only.channels[0].is_rtsp_enabled = True + camera_high_only.channels[1].is_rtsp_enabled = False + camera_high_only.channels[2].is_rtsp_enabled = False + await init_entry(hass, ufp, [camera_high_only]) + entity_id = validate_default_camera_entity(hass, camera_high_only, 0) + state = hass.states.get(entity_id) + assert state + assert StreamType.WEB_RTC in state.attributes["frontend_stream_type"] + + async def test_adopt( hass: HomeAssistant, ufp: MockUFPFixture, camera: ProtectCamera ) -> None: diff --git a/tests/components/unifiprotect/test_media_source.py b/tests/components/unifiprotect/test_media_source.py index 60cd3150884a60..18944460ca5942 100644 --- a/tests/components/unifiprotect/test_media_source.py +++ b/tests/components/unifiprotect/test_media_source.py @@ -669,7 +669,7 @@ async def test_browse_media_recent_truncated( model=ModelType.EVENT, id="test_event_id", type=EventType.RING, - start=datetime(1000, 1, 1, 0, 0, 0), + start=datetime(2000, 1, 1, 0, 0, 0), end=None, score=100, smart_detect_types=[], @@ -683,7 +683,7 @@ async def test_browse_media_recent_truncated( model=ModelType.EVENT, id="test_event_id", type=EventType.MOTION, - start=datetime(1000, 1, 1, 0, 0, 0), + start=datetime(2000, 1, 1, 0, 0, 0), end=None, score=100, smart_detect_types=[], @@ -697,7 +697,7 @@ async def test_browse_media_recent_truncated( model=ModelType.EVENT, id="test_event_id", type=EventType.SMART_DETECT, - start=datetime(1000, 1, 1, 0, 0, 0), + start=datetime(2000, 1, 1, 0, 0, 0), end=None, score=100, smart_detect_types=["person"], @@ -706,7 +706,7 @@ async def test_browse_media_recent_truncated( metadata={ "detected_thumbnails": [ { - "clock_best_wall": datetime(1000, 1, 1, 0, 0, 0), + "clock_best_wall": datetime(2000, 1, 1, 0, 0, 0), "type": "person", "cropped_id": "event_id", } @@ -720,7 +720,7 @@ async def test_browse_media_recent_truncated( model=ModelType.EVENT, id="test_event_id", type=EventType.SMART_DETECT, - start=datetime(1000, 1, 1, 0, 0, 0), + start=datetime(2000, 1, 1, 0, 0, 0), end=None, score=100, smart_detect_types=["vehicle", "person"], @@ -734,7 +734,7 @@ async def test_browse_media_recent_truncated( model=ModelType.EVENT, id="test_event_id", type=EventType.SMART_DETECT, - start=datetime(1000, 1, 1, 0, 0, 0), + start=datetime(2000, 1, 1, 0, 0, 0), end=None, score=100, smart_detect_types=["vehicle", "licensePlate"], @@ -748,7 +748,7 @@ async def test_browse_media_recent_truncated( model=ModelType.EVENT, id="test_event_id", type=EventType.SMART_DETECT, - start=datetime(1000, 1, 1, 0, 0, 0), + start=datetime(2000, 1, 1, 0, 0, 0), end=None, score=100, smart_detect_types=["vehicle", "licensePlate"], @@ -758,7 +758,7 @@ async def test_browse_media_recent_truncated( "license_plate": {"name": "ABC1234", "confidence_level": 95}, "detected_thumbnails": [ { - "clock_best_wall": datetime(1000, 1, 1, 0, 0, 0), + "clock_best_wall": datetime(2000, 1, 1, 0, 0, 0), "type": "vehicle", "cropped_id": "event_id", } @@ -772,7 +772,7 @@ async def test_browse_media_recent_truncated( model=ModelType.EVENT, id="test_event_id", type=EventType.SMART_DETECT, - start=datetime(1000, 1, 1, 0, 0, 0), + start=datetime(2000, 1, 1, 0, 0, 0), end=None, score=100, smart_detect_types=["vehicle", "licensePlate"], @@ -782,7 +782,7 @@ async def test_browse_media_recent_truncated( "license_plate": {"name": "ABC1234", "confidence_level": 95}, "detected_thumbnails": [ { - "clock_best_wall": datetime(1000, 1, 1, 0, 0, 0), + "clock_best_wall": datetime(2000, 1, 1, 0, 0, 0), "type": "vehicle", "cropped_id": "event_id", "attributes": { @@ -802,7 +802,7 @@ async def test_browse_media_recent_truncated( model=ModelType.EVENT, id="test_event_id", type=EventType.SMART_DETECT, - start=datetime(1000, 1, 1, 0, 0, 0), + start=datetime(2000, 1, 1, 0, 0, 0), end=None, score=100, smart_detect_types=["vehicle", "licensePlate"], @@ -812,7 +812,7 @@ async def test_browse_media_recent_truncated( "license_plate": {"name": "ABC1234", "confidence_level": 95}, "detected_thumbnails": [ { - "clock_best_wall": datetime(1000, 1, 1, 0, 0, 0), + "clock_best_wall": datetime(2000, 1, 1, 0, 0, 0), "type": "vehicle", "cropped_id": "event_id", "attributes": { @@ -823,7 +823,7 @@ async def test_browse_media_recent_truncated( }, }, { - "clock_best_wall": datetime(1000, 1, 1, 0, 0, 0), + "clock_best_wall": datetime(2000, 1, 1, 0, 0, 0), "type": "person", "cropped_id": "event_id", }, @@ -837,7 +837,7 @@ async def test_browse_media_recent_truncated( model=ModelType.EVENT, id="test_event_id", type=EventType.SMART_DETECT, - start=datetime(1000, 1, 1, 0, 0, 0), + start=datetime(2000, 1, 1, 0, 0, 0), end=None, score=100, smart_detect_types=["vehicle"], @@ -846,7 +846,7 @@ async def test_browse_media_recent_truncated( metadata={ "detected_thumbnails": [ { - "clock_best_wall": datetime(1000, 1, 1, 0, 0, 0), + "clock_best_wall": datetime(2000, 1, 1, 0, 0, 0), "type": "vehicle", "cropped_id": "event_id", "attributes": { @@ -870,7 +870,7 @@ async def test_browse_media_recent_truncated( model=ModelType.EVENT, id="test_event_id", type=EventType.SMART_AUDIO_DETECT, - start=datetime(1000, 1, 1, 0, 0, 0), + start=datetime(2000, 1, 1, 0, 0, 0), end=None, score=100, smart_detect_types=["alrmSpeak"], diff --git a/tests/components/update/common.py b/tests/components/update/common.py index 70b69498f666f5..465812e6a3aa23 100644 --- a/tests/components/update/common.py +++ b/tests/components/update/common.py @@ -5,48 +5,16 @@ from homeassistant.components.update import UpdateEntity -from tests.common import MockEntity - _LOGGER = logging.getLogger(__name__) -class MockUpdateEntity(MockEntity, UpdateEntity): +class MockUpdateEntity(UpdateEntity): """Mock UpdateEntity class.""" - @property - def auto_update(self) -> bool: - """Indicate if the device or service has auto update enabled.""" - return self._handle("auto_update") - - @property - def installed_version(self) -> str | None: - """Version currently installed and in use.""" - return self._handle("installed_version") - - @property - def in_progress(self) -> bool | int | None: - """Update installation progress.""" - return self._handle("in_progress") - - @property - def latest_version(self) -> str | None: - """Latest version available for install.""" - return self._handle("latest_version") - - @property - def release_summary(self) -> str | None: - """Summary of the release notes or changelog.""" - return self._handle("release_summary") - - @property - def release_url(self) -> str | None: - """URL to the full release notes of the latest version available.""" - return self._handle("release_url") - - @property - def title(self) -> str | None: - """Title of the software.""" - return self._handle("title") + def __init__(self, **values: Any) -> None: + """Initialize an entity.""" + for key, val in values.items(): + setattr(self, f"_attr_{key}", val) def install(self, version: str | None, backup: bool, **kwargs: Any) -> None: """Install an update.""" @@ -54,10 +22,10 @@ def install(self, version: str | None, backup: bool, **kwargs: Any) -> None: _LOGGER.info("Creating backup before installing update") if version is not None: - self._values["installed_version"] = version + self._attr_installed_version = version _LOGGER.info("Installed update with version: %s", version) else: - self._values["installed_version"] = self.latest_version + self._attr_installed_version = self.latest_version _LOGGER.info("Installed latest update") def release_notes(self) -> str | None: diff --git a/tests/components/update/conftest.py b/tests/components/update/conftest.py index 759f243e8db7a6..eae5cc318da2f1 100644 --- a/tests/components/update/conftest.py +++ b/tests/components/update/conftest.py @@ -51,12 +51,24 @@ def mock_update_entities() -> list[MockUpdateEntity]: ), MockUpdateEntity( name="Update Already in Progress", - unique_id="update_already_in_progres", + unique_id="update_already_in_progress", installed_version="1.0.0", latest_version="1.0.1", - in_progress=50, + in_progress=True, supported_features=UpdateEntityFeature.INSTALL | UpdateEntityFeature.PROGRESS, + update_percentage=50, + ), + MockUpdateEntity( + name="Update Already in Progress Float", + unique_id="update_already_in_progress_float", + installed_version="1.0.0", + latest_version="1.0.1", + in_progress=True, + supported_features=UpdateEntityFeature.INSTALL + | UpdateEntityFeature.PROGRESS, + update_percentage=0.25, + display_precision=2, ), MockUpdateEntity( name="Update No Install", diff --git a/tests/components/update/test_init.py b/tests/components/update/test_init.py index 6082e0ecfe7ca2..a35f7bb0f12126 100644 --- a/tests/components/update/test_init.py +++ b/tests/components/update/test_init.py @@ -18,6 +18,7 @@ ) from homeassistant.components.update.const import ( ATTR_AUTO_UPDATE, + ATTR_DISPLAY_PRECISION, ATTR_IN_PROGRESS, ATTR_INSTALLED_VERSION, ATTR_LATEST_VERSION, @@ -25,11 +26,15 @@ ATTR_RELEASE_URL, ATTR_SKIPPED_VERSION, ATTR_TITLE, + ATTR_UPDATE_PERCENTAGE, UpdateEntityFeature, ) from homeassistant.config_entries import ConfigEntry, ConfigFlow from homeassistant.const import ( ATTR_ENTITY_ID, + ATTR_ENTITY_PICTURE, + ATTR_FRIENDLY_NAME, + ATTR_SUPPORTED_FEATURES, CONF_PLATFORM, STATE_OFF, STATE_ON, @@ -88,6 +93,7 @@ async def test_update(hass: HomeAssistant) -> None: assert update.state == STATE_ON assert update.state_attributes == { ATTR_AUTO_UPDATE: False, + ATTR_DISPLAY_PRECISION: 0, ATTR_INSTALLED_VERSION: "1.0.0", ATTR_IN_PROGRESS: False, ATTR_LATEST_VERSION: "1.0.1", @@ -95,6 +101,7 @@ async def test_update(hass: HomeAssistant) -> None: ATTR_RELEASE_URL: "https://example.com", ATTR_SKIPPED_VERSION: None, ATTR_TITLE: "Title", + ATTR_UPDATE_PERCENTAGE: None, } # Test no update available @@ -541,10 +548,20 @@ async def test_entity_with_backup_support( assert "Installed update with version: 0.9.8" in caplog.text +@pytest.mark.parametrize( + ("entity_id", "expected_display_precision", "expected_update_percentage"), + [ + ("update.update_already_in_progress", 0, 50), + ("update.update_already_in_progress_float", 2, 0.25), + ], +) async def test_entity_already_in_progress( hass: HomeAssistant, mock_update_entities: list[MockUpdateEntity], caplog: pytest.LogCaptureFixture, + entity_id: str, + expected_display_precision: int, + expected_update_percentage: float, ) -> None: """Test update install already in progress.""" setup_test_component_platform(hass, DOMAIN, mock_update_entities) @@ -552,12 +569,14 @@ async def test_entity_already_in_progress( assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_PLATFORM: "test"}}) await hass.async_block_till_done() - state = hass.states.get("update.update_already_in_progress") + state = hass.states.get(entity_id) assert state assert state.state == STATE_ON + assert state.attributes[ATTR_DISPLAY_PRECISION] == expected_display_precision assert state.attributes[ATTR_INSTALLED_VERSION] == "1.0.0" assert state.attributes[ATTR_LATEST_VERSION] == "1.0.1" - assert state.attributes[ATTR_IN_PROGRESS] == 50 + assert state.attributes[ATTR_IN_PROGRESS] is True + assert state.attributes[ATTR_UPDATE_PERCENTAGE] == expected_update_percentage with pytest.raises( HomeAssistantError, @@ -566,10 +585,20 @@ async def test_entity_already_in_progress( await hass.services.async_call( DOMAIN, SERVICE_INSTALL, - {ATTR_ENTITY_ID: "update.update_already_in_progress"}, + {ATTR_ENTITY_ID: entity_id}, blocking=True, ) + # Check update percentage is suppressed when in_progress is False + entity = next( + entity for entity in mock_update_entities if entity.entity_id == entity_id + ) + entity._attr_in_progress = False + entity.async_write_ha_state() + state = hass.states.get(entity_id) + assert state.attributes[ATTR_IN_PROGRESS] is False + assert state.attributes[ATTR_UPDATE_PERCENTAGE] is None + async def test_entity_without_progress_support( hass: HomeAssistant, @@ -997,3 +1026,85 @@ def version_is_newer(self, latest_version: str, installed_version: str) -> bool: assert update.installed_version == BETA assert update.latest_version == STABLE assert update.state == STATE_OFF + + +@pytest.mark.parametrize( + ("supported_features", "extra_expected_attributes"), + [ + ( + 0, + [ + {}, + {}, + {}, + {}, + {}, + {}, + {}, + ], + ), + ( + UpdateEntityFeature.PROGRESS, + [ + {ATTR_IN_PROGRESS: False}, + {ATTR_IN_PROGRESS: False}, + {ATTR_IN_PROGRESS: True, ATTR_UPDATE_PERCENTAGE: 0}, + {ATTR_IN_PROGRESS: True}, + {ATTR_IN_PROGRESS: True, ATTR_UPDATE_PERCENTAGE: 1}, + {ATTR_IN_PROGRESS: True, ATTR_UPDATE_PERCENTAGE: 10}, + {ATTR_IN_PROGRESS: True, ATTR_UPDATE_PERCENTAGE: 100}, + ], + ), + ], +) +async def test_update_percentage_backwards_compatibility( + hass: HomeAssistant, + supported_features: UpdateEntityFeature, + extra_expected_attributes: list[dict], +) -> None: + """Test deriving update percentage from deprecated in_progress.""" + update = MockUpdateEntity() + + update._attr_installed_version = "1.0.0" + update._attr_latest_version = "1.0.1" + update._attr_name = "legacy" + update._attr_release_summary = "Summary" + update._attr_release_url = "https://example.com" + update._attr_supported_features = supported_features + update._attr_title = "Title" + + setup_test_component_platform(hass, DOMAIN, [update]) + assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_PLATFORM: "test"}}) + await hass.async_block_till_done() + + expected_attributes = { + ATTR_AUTO_UPDATE: False, + ATTR_DISPLAY_PRECISION: 0, + ATTR_ENTITY_PICTURE: "https://brands.home-assistant.io/_/test/icon.png", + ATTR_FRIENDLY_NAME: "legacy", + ATTR_INSTALLED_VERSION: "1.0.0", + ATTR_IN_PROGRESS: False, + ATTR_LATEST_VERSION: "1.0.1", + ATTR_RELEASE_SUMMARY: "Summary", + ATTR_RELEASE_URL: "https://example.com", + ATTR_SKIPPED_VERSION: None, + ATTR_SUPPORTED_FEATURES: supported_features, + ATTR_TITLE: "Title", + ATTR_UPDATE_PERCENTAGE: None, + } + + state = hass.states.get("update.legacy") + assert state is not None + assert state.state == STATE_ON + assert state.attributes == expected_attributes | extra_expected_attributes[0] + + in_progress_list = [False, 0, True, 1, 10, 100] + + for i, in_progress in enumerate(in_progress_list): + update._attr_in_progress = in_progress + update.async_write_ha_state() + state = hass.states.get("update.legacy") + assert state.state == STATE_ON + assert ( + state.attributes == expected_attributes | extra_expected_attributes[i + 1] + ) diff --git a/tests/components/update/test_recorder.py b/tests/components/update/test_recorder.py index 0bd209ce1c2f4e..68e5f93a757a57 100644 --- a/tests/components/update/test_recorder.py +++ b/tests/components/update/test_recorder.py @@ -7,9 +7,11 @@ from homeassistant.components.recorder import Recorder from homeassistant.components.recorder.history import get_significant_states from homeassistant.components.update.const import ( + ATTR_DISPLAY_PRECISION, ATTR_IN_PROGRESS, ATTR_INSTALLED_VERSION, ATTR_RELEASE_SUMMARY, + ATTR_UPDATE_PERCENTAGE, DOMAIN, ) from homeassistant.const import ATTR_ENTITY_PICTURE, CONF_PLATFORM @@ -34,7 +36,9 @@ async def test_exclude_attributes( assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_PLATFORM: "test"}}) await hass.async_block_till_done() state = hass.states.get("update.update_already_in_progress") - assert state.attributes[ATTR_IN_PROGRESS] == 50 + assert state.attributes[ATTR_DISPLAY_PRECISION] == 0 + assert state.attributes[ATTR_IN_PROGRESS] is True + assert state.attributes[ATTR_UPDATE_PERCENTAGE] == 50 assert ( state.attributes[ATTR_ENTITY_PICTURE] == "https://brands.home-assistant.io/_/test/icon.png" @@ -52,7 +56,9 @@ async def test_exclude_attributes( assert len(states) >= 1 for entity_states in states.values(): for state in entity_states: + assert ATTR_DISPLAY_PRECISION not in state.attributes assert ATTR_ENTITY_PICTURE not in state.attributes assert ATTR_IN_PROGRESS not in state.attributes assert ATTR_RELEASE_SUMMARY not in state.attributes assert ATTR_INSTALLED_VERSION in state.attributes + assert ATTR_UPDATE_PERCENTAGE not in state.attributes diff --git a/tests/components/utility_meter/snapshots/test_diagnostics.ambr b/tests/components/utility_meter/snapshots/test_diagnostics.ambr index c69164264daa63..6cdf121d7e3886 100644 --- a/tests/components/utility_meter/snapshots/test_diagnostics.ambr +++ b/tests/components/utility_meter/snapshots/test_diagnostics.ambr @@ -41,7 +41,17 @@ 'status': 'collecting', 'tariff': 'tariff0', }), - 'last_sensor_data': None, + 'last_sensor_data': dict({ + 'last_period': '0', + 'last_reset': '2024-04-05T00:00:00+00:00', + 'last_valid_state': 3, + 'native_unit_of_measurement': 'kWh', + 'native_value': dict({ + '__type': "", + 'decimal_str': '3', + }), + 'status': 'collecting', + }), 'name': 'Energy Bill tariff0', 'period': 'monthly', 'source': 'sensor.input1', @@ -57,7 +67,17 @@ 'status': 'paused', 'tariff': 'tariff1', }), - 'last_sensor_data': None, + 'last_sensor_data': dict({ + 'last_period': '0', + 'last_reset': '2024-04-05T00:00:00+00:00', + 'last_valid_state': 7, + 'native_unit_of_measurement': 'kWh', + 'native_value': dict({ + '__type': "", + 'decimal_str': '7', + }), + 'status': 'paused', + }), 'name': 'Energy Bill tariff1', 'period': 'monthly', 'source': 'sensor.input1', diff --git a/tests/components/utility_meter/test_config_flow.py b/tests/components/utility_meter/test_config_flow.py index 560566d7c4913c..612bfaa88d7a13 100644 --- a/tests/components/utility_meter/test_config_flow.py +++ b/tests/components/utility_meter/test_config_flow.py @@ -72,6 +72,10 @@ async def test_config_flow(hass: HomeAssistant, platform) -> None: assert config_entry.title == "Electricity meter" +@pytest.mark.parametrize( # Remove when translations fixed + "ignore_translations", + ["component.utility_meter.config.error.tariffs_not_unique"], +) async def test_tariffs(hass: HomeAssistant) -> None: """Test tariffs.""" input_sensor_entity_id = "sensor.input" diff --git a/tests/components/utility_meter/test_diagnostics.py b/tests/components/utility_meter/test_diagnostics.py index 9ecabe813b1ca5..8be5f949940cd1 100644 --- a/tests/components/utility_meter/test_diagnostics.py +++ b/tests/components/utility_meter/test_diagnostics.py @@ -91,7 +91,17 @@ async def test_diagnostics( ATTR_LAST_RESET: last_reset, }, ), - {}, + { + "native_value": { + "__type": "", + "decimal_str": "3", + }, + "native_unit_of_measurement": "kWh", + "last_reset": last_reset, + "last_period": "0", + "last_valid_state": 3, + "status": "collecting", + }, ), ( State( @@ -101,7 +111,17 @@ async def test_diagnostics( ATTR_LAST_RESET: last_reset, }, ), - {}, + { + "native_value": { + "__type": "", + "decimal_str": "7", + }, + "native_unit_of_measurement": "kWh", + "last_reset": last_reset, + "last_period": "0", + "last_valid_state": 7, + "status": "paused", + }, ), ], ) diff --git a/tests/components/utility_meter/test_select.py b/tests/components/utility_meter/test_select.py index 61f6cbe75b9488..1f54f3b500a161 100644 --- a/tests/components/utility_meter/test_select.py +++ b/tests/components/utility_meter/test_select.py @@ -3,10 +3,72 @@ from homeassistant.components.utility_meter.const import DOMAIN from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er +from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry +async def test_select_entity_name_config_entry( + hass: HomeAssistant, +) -> None: + """Test for Utility Meter select platform.""" + + config_entry_config = { + "cycle": "none", + "delta_values": False, + "name": "Energy bill", + "net_consumption": False, + "offset": 0, + "periodically_resetting": True, + "source": "sensor.energy", + "tariffs": ["peak", "offpeak"], + } + + source_config_entry = MockConfigEntry() + source_config_entry.add_to_hass(hass) + utility_meter_config_entry = MockConfigEntry( + data={}, + domain=DOMAIN, + options=config_entry_config, + title=config_entry_config["name"], + ) + + utility_meter_config_entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(utility_meter_config_entry.entry_id) + + await hass.async_block_till_done() + + state = hass.states.get("select.energy_bill") + assert state is not None + assert state.attributes.get("friendly_name") == "Energy bill" + + +async def test_select_entity_name_yaml( + hass: HomeAssistant, +) -> None: + """Test for Utility Meter select platform.""" + + yaml_config = { + "utility_meter": { + "energy_bill": { + "name": "Energy bill", + "source": "sensor.energy", + "tariffs": ["peak", "offpeak"], + "unique_id": "1234abcd", + } + } + } + + assert await async_setup_component(hass, DOMAIN, yaml_config) + + await hass.async_block_till_done() + + state = hass.states.get("select.energy_bill") + assert state is not None + assert state.attributes.get("friendly_name") == "Energy bill" + + async def test_device_id( hass: HomeAssistant, device_registry: dr.DeviceRegistry, diff --git a/tests/components/utility_meter/test_sensor.py b/tests/components/utility_meter/test_sensor.py index 745bf0ce012fa4..0ab78739f7f8a2 100644 --- a/tests/components/utility_meter/test_sensor.py +++ b/tests/components/utility_meter/test_sensor.py @@ -26,7 +26,6 @@ ) from homeassistant.components.utility_meter.sensor import ( ATTR_LAST_RESET, - ATTR_LAST_VALID_STATE, ATTR_STATUS, COLLECTING, PAUSED, @@ -760,64 +759,6 @@ async def test_restore_state( "status": "paused", }, ), - # sensor.energy_bill_tariff2 has missing keys and falls back to - # saved state - ( - State( - "sensor.energy_bill_tariff2", - "2.1", - attributes={ - ATTR_STATUS: PAUSED, - ATTR_LAST_RESET: last_reset_1, - ATTR_LAST_VALID_STATE: None, - ATTR_UNIT_OF_MEASUREMENT: UnitOfEnergy.MEGA_WATT_HOUR, - }, - ), - { - "native_value": { - "__type": "", - "decimal_str": "2.2", - }, - "native_unit_of_measurement": "kWh", - "last_valid_state": "None", - }, - ), - # sensor.energy_bill_tariff3 has invalid data and falls back to - # saved state - ( - State( - "sensor.energy_bill_tariff3", - "3.1", - attributes={ - ATTR_STATUS: COLLECTING, - ATTR_LAST_RESET: last_reset_1, - ATTR_LAST_VALID_STATE: None, - ATTR_UNIT_OF_MEASUREMENT: UnitOfEnergy.MEGA_WATT_HOUR, - }, - ), - { - "native_value": { - "__type": "", - "decimal_str": "3f", # Invalid - }, - "native_unit_of_measurement": "kWh", - "last_valid_state": "None", - }, - ), - # No extra saved data, fall back to saved state - ( - State( - "sensor.energy_bill_tariff4", - "error", - attributes={ - ATTR_STATUS: COLLECTING, - ATTR_LAST_RESET: last_reset_1, - ATTR_LAST_VALID_STATE: None, - ATTR_UNIT_OF_MEASUREMENT: UnitOfEnergy.MEGA_WATT_HOUR, - }, - ), - {}, - ), ], ) @@ -852,25 +793,6 @@ async def test_restore_state( assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfEnergy.KILO_WATT_HOUR assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.ENERGY - state = hass.states.get("sensor.energy_bill_tariff2") - assert state.state == "2.1" - assert state.attributes.get("status") == PAUSED - assert state.attributes.get("last_reset") == last_reset_1 - assert state.attributes.get("last_valid_state") == "None" - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfEnergy.MEGA_WATT_HOUR - assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.ENERGY - - state = hass.states.get("sensor.energy_bill_tariff3") - assert state.state == "3.1" - assert state.attributes.get("status") == COLLECTING - assert state.attributes.get("last_reset") == last_reset_1 - assert state.attributes.get("last_valid_state") == "None" - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfEnergy.MEGA_WATT_HOUR - assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.ENERGY - - state = hass.states.get("sensor.energy_bill_tariff4") - assert state.state == STATE_UNKNOWN - # utility_meter is loaded, now set sensors according to utility_meter: hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) @@ -882,12 +804,7 @@ async def test_restore_state( state = hass.states.get("sensor.energy_bill_tariff0") assert state.attributes.get("status") == COLLECTING - for entity_id in ( - "sensor.energy_bill_tariff1", - "sensor.energy_bill_tariff2", - "sensor.energy_bill_tariff3", - "sensor.energy_bill_tariff4", - ): + for entity_id in ("sensor.energy_bill_tariff1",): state = hass.states.get(entity_id) assert state.attributes.get("status") == PAUSED @@ -939,7 +856,18 @@ async def test_service_reset_no_tariffs( ATTR_LAST_RESET: last_reset, }, ), - {}, + { + "native_value": { + "__type": "", + "decimal_str": "3", + }, + "native_unit_of_measurement": "kWh", + "last_reset": last_reset, + "last_period": "0", + "last_valid_state": None, + "status": "collecting", + "input_device_class": "energy", + }, ), ], ) @@ -1045,21 +973,33 @@ async def test_service_reset_no_tariffs_correct_with_multi( State( "sensor.energy_bill", "3", - attributes={ - ATTR_LAST_RESET: last_reset, - }, ), - {}, + { + "native_value": { + "__type": "", + "decimal_str": "3", + }, + "native_unit_of_measurement": "kWh", + "last_reset": last_reset, + "last_period": "0", + "status": "collecting", + }, ), ( State( "sensor.water_bill", "6", - attributes={ - ATTR_LAST_RESET: last_reset, - }, ), - {}, + { + "native_value": { + "__type": "", + "decimal_str": "6", + }, + "native_unit_of_measurement": "kWh", + "last_reset": last_reset, + "last_period": "0", + "status": "collecting", + }, ), ], ) @@ -1804,6 +1744,26 @@ async def test_self_reset_hourly_dst(hass: HomeAssistant) -> None: ) +async def test_self_reset_hourly_dst2(hass: HomeAssistant) -> None: + """Test weekly reset of meter in DST change conditions.""" + + hass.config.time_zone = "Europe/Berlin" + dt_util.set_default_time_zone(dt_util.get_time_zone(hass.config.time_zone)) + await _test_self_reset( + hass, gen_config("daily"), "2024-10-26T23:59:00.000000+02:00" + ) + + state = hass.states.get("sensor.energy_bill") + last_reset = dt_util.parse_datetime("2024-10-27T00:00:00.000000+02:00") + assert ( + dt_util.as_local(dt_util.parse_datetime(state.attributes.get("last_reset"))) + == last_reset + ) + + next_reset = dt_util.parse_datetime("2024-10-28T00:00:00.000000+01:00").isoformat() + assert state.attributes.get("next_reset") == next_reset + + async def test_self_reset_daily(hass: HomeAssistant) -> None: """Test daily reset of meter.""" await _test_self_reset( diff --git a/tests/components/vallox/conftest.py b/tests/components/vallox/conftest.py index 590dbc8de4b5f6..b652940930088d 100644 --- a/tests/components/vallox/conftest.py +++ b/tests/components/vallox/conftest.py @@ -81,7 +81,7 @@ async def init_reconfigure_flow( result = await mock_entry.start_reconfigure_flow(hass) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure_confirm" + assert result["step_id"] == "reconfigure" # original entry assert mock_entry.data["host"] == "192.168.100.50" diff --git a/tests/components/vesync/snapshots/test_fan.ambr b/tests/components/vesync/snapshots/test_fan.ambr index 21985afd7bff02..60af4ae3d5be9b 100644 --- a/tests/components/vesync/snapshots/test_fan.ambr +++ b/tests/components/vesync/snapshots/test_fan.ambr @@ -67,7 +67,7 @@ 'platform': 'vesync', 'previous_unique_id': None, 'supported_features': , - 'translation_key': None, + 'translation_key': 'vesync', 'unique_id': 'air-purifier', 'unit_of_measurement': None, }), @@ -158,7 +158,7 @@ 'platform': 'vesync', 'previous_unique_id': None, 'supported_features': , - 'translation_key': None, + 'translation_key': 'vesync', 'unique_id': 'asd_sdfKIHG7IJHGwJGJ7GJ_ag5h3G55', 'unit_of_measurement': None, }), @@ -256,7 +256,7 @@ 'platform': 'vesync', 'previous_unique_id': None, 'supported_features': , - 'translation_key': None, + 'translation_key': 'vesync', 'unique_id': '400s-purifier', 'unit_of_measurement': None, }), @@ -355,7 +355,7 @@ 'platform': 'vesync', 'previous_unique_id': None, 'supported_features': , - 'translation_key': None, + 'translation_key': 'vesync', 'unique_id': '600s-purifier', 'unit_of_measurement': None, }), diff --git a/tests/components/vicare/snapshots/test_sensor.ambr b/tests/components/vicare/snapshots/test_sensor.ambr index ed4caf8ea79596..793f3e87611592 100644 --- a/tests/components/vicare/snapshots/test_sensor.ambr +++ b/tests/components/vicare/snapshots/test_sensor.ambr @@ -548,7 +548,7 @@ 'state': '7.843', }) # --- -# name: test_all_entities[sensor.model0_energy_consumption_this_year-entry] +# name: test_all_entities[sensor.model0_electricity_consumption_this_year-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -562,7 +562,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.model0_energy_consumption_this_year', + 'entity_id': 'sensor.model0_electricity_consumption_this_year', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -574,7 +574,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Energy consumption this year', + 'original_name': 'Electricity consumption this year', 'platform': 'vicare', 'previous_unique_id': None, 'supported_features': 0, @@ -583,23 +583,23 @@ 'unit_of_measurement': , }) # --- -# name: test_all_entities[sensor.model0_energy_consumption_this_year-state] +# name: test_all_entities[sensor.model0_electricity_consumption_this_year-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'energy', - 'friendly_name': 'model0 Energy consumption this year', + 'friendly_name': 'model0 Electricity consumption this year', 'state_class': , 'unit_of_measurement': , }), 'context': , - 'entity_id': 'sensor.model0_energy_consumption_this_year', + 'entity_id': 'sensor.model0_electricity_consumption_this_year', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '207.106', }) # --- -# name: test_all_entities[sensor.model0_energy_consumption_today-entry] +# name: test_all_entities[sensor.model0_electricity_consumption_today-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -613,7 +613,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.model0_energy_consumption_today', + 'entity_id': 'sensor.model0_electricity_consumption_today', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -625,7 +625,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Energy consumption today', + 'original_name': 'Electricity consumption today', 'platform': 'vicare', 'previous_unique_id': None, 'supported_features': 0, @@ -634,16 +634,16 @@ 'unit_of_measurement': , }) # --- -# name: test_all_entities[sensor.model0_energy_consumption_today-state] +# name: test_all_entities[sensor.model0_electricity_consumption_today-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'energy', - 'friendly_name': 'model0 Energy consumption today', + 'friendly_name': 'model0 Electricity consumption today', 'state_class': , 'unit_of_measurement': , }), 'context': , - 'entity_id': 'sensor.model0_energy_consumption_today', + 'entity_id': 'sensor.model0_electricity_consumption_today', 'last_changed': , 'last_reported': , 'last_updated': , @@ -897,7 +897,7 @@ 'state': '20.8', }) # --- -# name: test_all_entities[sensor.model0_power_consumption_this_week-entry] +# name: test_all_entities[sensor.model0_electricity_consumption_this_week-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -911,7 +911,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.model0_power_consumption_this_week', + 'entity_id': 'sensor.model0_electricity_consumption_this_week', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -923,7 +923,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Power consumption this week', + 'original_name': 'Electricity consumption this week', 'platform': 'vicare', 'previous_unique_id': None, 'supported_features': 0, @@ -932,16 +932,16 @@ 'unit_of_measurement': , }) # --- -# name: test_all_entities[sensor.model0_power_consumption_this_week-state] +# name: test_all_entities[sensor.model0_electricity_consumption_this_week-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'energy', - 'friendly_name': 'model0 Power consumption this week', + 'friendly_name': 'model0 Electricity consumption this week', 'state_class': , 'unit_of_measurement': , }), 'context': , - 'entity_id': 'sensor.model0_power_consumption_this_week', + 'entity_id': 'sensor.model0_electricity_consumption_this_week', 'last_changed': , 'last_reported': , 'last_updated': , diff --git a/tests/components/vicare/test_types.py b/tests/components/vicare/test_types.py index 13d8255cf8d9ba..c411213f13e848 100644 --- a/tests/components/vicare/test_types.py +++ b/tests/components/vicare/test_types.py @@ -39,7 +39,7 @@ async def test_ha_preset_to_heating_program( ha_preset: str | None, expected_result: str | None, ) -> None: - """Testing HA Preset tp ViCare HeatingProgram.""" + """Testing HA Preset to ViCare HeatingProgram.""" supported_programs = [ HeatingProgram.COMFORT, @@ -52,6 +52,17 @@ async def test_ha_preset_to_heating_program( ) +async def test_ha_preset_to_heating_program_error() -> None: + """Testing HA Preset to ViCare HeatingProgram.""" + + supported_programs = [ + "test", + ] + assert ( + HeatingProgram.from_ha_preset(HeatingProgram.NORMAL, supported_programs) is None + ) + + @pytest.mark.parametrize( ("vicare_mode", "expected_result"), [ diff --git a/tests/components/vilfo/test_config_flow.py b/tests/components/vilfo/test_config_flow.py index c4fdb2fe22ceee..24739f509e4daa 100644 --- a/tests/components/vilfo/test_config_flow.py +++ b/tests/components/vilfo/test_config_flow.py @@ -150,6 +150,10 @@ async def test_form_exceptions( assert result["type"] is FlowResultType.CREATE_ENTRY +@pytest.mark.parametrize( # Remove when translations fixed + "ignore_translations", + ["component.vilfo.config.error.wrong_host"], +) async def test_form_wrong_host( hass: HomeAssistant, mock_is_valid_host: AsyncMock, diff --git a/tests/components/vlc_telnet/test_config_flow.py b/tests/components/vlc_telnet/test_config_flow.py index d29a2c06beb623..a4b559bbe1b775 100644 --- a/tests/components/vlc_telnet/test_config_flow.py +++ b/tests/components/vlc_telnet/test_config_flow.py @@ -9,10 +9,10 @@ import pytest from homeassistant import config_entries -from homeassistant.components.hassio import HassioServiceInfo from homeassistant.components.vlc_telnet.const import DOMAIN from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType +from homeassistant.helpers.service_info.hassio import HassioServiceInfo from tests.common import MockConfigEntry diff --git a/tests/components/vodafone_station/const.py b/tests/components/vodafone_station/const.py index 1b3d36def03ae1..9adf32b339d6af 100644 --- a/tests/components/vodafone_station/const.py +++ b/tests/components/vodafone_station/const.py @@ -1,5 +1,7 @@ """Common stuff for Vodafone Station tests.""" +from aiovodafone.api import VodafoneStationDevice + from homeassistant.components.vodafone_station.const import DOMAIN from homeassistant.const import CONF_DEVICES, CONF_HOST, CONF_PASSWORD, CONF_USERNAME @@ -16,3 +18,98 @@ } MOCK_USER_DATA = MOCK_CONFIG[DOMAIN][CONF_DEVICES][0] + + +DEVICE_DATA_QUERY = { + "xx:xx:xx:xx:xx:xx": VodafoneStationDevice( + connected=True, + connection_type="wifi", + ip_address="192.168.1.10", + name="WifiDevice0", + mac="xx:xx:xx:xx:xx:xx", + type="laptop", + wifi="2.4G", + ) +} + +SENSOR_DATA_QUERY = { + "sys_serial_number": "M123456789", + "sys_firmware_version": "XF6_4.0.05.04", + "sys_bootloader_version": "0220", + "sys_hardware_version": "RHG3006 v1", + "omci_software_version": "\t\t1.0.0.1_41032\t\t\n", + "sys_uptime": "12:16:41", + "sys_cpu_usage": "97%", + "sys_reboot_cause": "Web Reboot", + "sys_memory_usage": "51.94%", + "sys_wireless_driver_version": "17.10.188.75;17.10.188.75", + "sys_wireless_driver_version_5g": "17.10.188.75;17.10.188.75", + "vf_internet_key_online_since": "", + "vf_internet_key_ip_addr": "0.0.0.0", + "vf_internet_key_system": "0.0.0.0", + "vf_internet_key_mode": "Auto", + "sys_voip_version": "v02.01.00_01.13a\n", + "sys_date_time": "20.10.2024 | 03:44 pm", + "sys_build_time": "Sun Jun 23 17:55:49 CST 2024\n", + "sys_model_name": "RHG3006", + "inter_ip_address": "1.1.1.1", + "inter_gateway": "1.1.1.2", + "inter_primary_dns": "1.1.1.3", + "inter_secondary_dns": "1.1.1.4", + "inter_firewall": "601036", + "inter_wan_ip_address": "1.1.1.1", + "inter_ipv6_link_local_address": "", + "inter_ipv6_link_global_address": "", + "inter_ipv6_gateway": "", + "inter_ipv6_prefix_delegation": "", + "inter_ipv6_dns_address1": "", + "inter_ipv6_dns_address2": "", + "lan_ip_network": "192.168.0.1/24", + "lan_default_gateway": "192.168.0.1", + "lan_subnet_address_subnet1": "", + "lan_mac_address": "11:22:33:44:55:66", + "lan_dhcp_server": "601036", + "lan_dhcpv6_server": "601036", + "lan_router_advertisement": "601036", + "lan_ipv6_default_gateway": "fe80::1", + "lan_port1_switch_mode": "1301722", + "lan_port2_switch_mode": "1301722", + "lan_port3_switch_mode": "1301722", + "lan_port4_switch_mode": "1301722", + "lan_port1_switch_speed": "10", + "lan_port2_switch_speed": "100", + "lan_port3_switch_speed": "1000", + "lan_port4_switch_speed": "1000", + "lan_port1_switch_status": "1301724", + "lan_port2_switch_status": "1301724", + "lan_port3_switch_status": "1301724", + "lan_port4_switch_status": "1301724", + "wifi_status": "601036", + "wifi_name": "Wifi-Main-Network", + "wifi_mac_address": "AA:BB:CC:DD:EE:FF", + "wifi_security": "401027", + "wifi_channel": "8", + "wifi_bandwidth": "573", + "guest_wifi_status": "601037", + "guest_wifi_name": "Wifi-Guest", + "guest_wifi_mac_addr": "AA:BB:CC:DD:EE:GG", + "guest_wifi_security": "401027", + "guest_wifi_channel": "N/A", + "guest_wifi_ip": "192.168.2.1", + "guest_wifi_subnet_addr": "255.255.255.0", + "guest_wifi_dhcp_server": "192.168.2.1", + "wifi_status_5g": "601036", + "wifi_name_5g": "Wifi-Main-Network", + "wifi_mac_address_5g": "AA:BB:CC:DD:EE:HH", + "wifi_security_5g": "401027", + "wifi_channel_5g": "36", + "wifi_bandwidth_5g": "4803", + "guest_wifi_status_5g": "601037", + "guest_wifi_name_5g": "Wifi-Guest", + "guest_wifi_mac_addr_5g": "AA:BB:CC:DD:EE:II", + "guest_wifi_channel_5g": "N/A", + "guest_wifi_security_5g": "401027", + "guest_wifi_ip_5g": "192.168.2.1", + "guest_wifi_subnet_addr_5g": "255.255.255.0", + "guest_wifi_dhcp_server_5g": "192.168.2.1", +} diff --git a/tests/components/vodafone_station/snapshots/test_diagnostics.ambr b/tests/components/vodafone_station/snapshots/test_diagnostics.ambr new file mode 100644 index 00000000000000..c258b14dc2d4f5 --- /dev/null +++ b/tests/components/vodafone_station/snapshots/test_diagnostics.ambr @@ -0,0 +1,43 @@ +# serializer version: 1 +# name: test_entry_diagnostics + dict({ + 'device_info': dict({ + 'client_devices': list([ + dict({ + 'connected': True, + 'connection_type': 'wifi', + 'hostname': 'WifiDevice0', + 'type': 'laptop', + }), + ]), + 'last_exception': None, + 'last_update success': True, + 'sys_cpu_usage': '97', + 'sys_firmware_version': 'XF6_4.0.05.04', + 'sys_hardware_version': 'RHG3006 v1', + 'sys_memory_usage': '51.94', + 'sys_model_name': 'RHG3006', + 'sys_reboot_cause': 'Web Reboot', + }), + 'entry': dict({ + 'data': dict({ + 'host': 'fake_host', + 'password': '**REDACTED**', + 'username': '**REDACTED**', + }), + 'disabled_by': None, + 'discovery_keys': dict({ + }), + 'domain': 'vodafone_station', + 'minor_version': 1, + 'options': dict({ + }), + 'pref_disable_new_entities': False, + 'pref_disable_polling': False, + 'source': 'user', + 'title': 'Mock Title', + 'unique_id': None, + 'version': 1, + }), + }) +# --- diff --git a/tests/components/vodafone_station/test_diagnostics.py b/tests/components/vodafone_station/test_diagnostics.py new file mode 100644 index 00000000000000..02918d8191226c --- /dev/null +++ b/tests/components/vodafone_station/test_diagnostics.py @@ -0,0 +1,51 @@ +"""Tests for Vodafone Station diagnostics platform.""" + +from __future__ import annotations + +from unittest.mock import patch + +from syrupy import SnapshotAssertion +from syrupy.filters import props + +from homeassistant.components.vodafone_station.const import DOMAIN +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant + +from .const import DEVICE_DATA_QUERY, MOCK_USER_DATA, SENSOR_DATA_QUERY + +from tests.common import MockConfigEntry +from tests.components.diagnostics import get_diagnostics_for_config_entry +from tests.typing import ClientSessionGenerator + + +async def test_entry_diagnostics( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + snapshot: SnapshotAssertion, +) -> None: + """Test config entry diagnostics.""" + entry = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_DATA) + entry.add_to_hass(hass) + + with ( + patch("aiovodafone.api.VodafoneStationSercommApi.login"), + patch( + "aiovodafone.api.VodafoneStationSercommApi.get_devices_data", + return_value=DEVICE_DATA_QUERY, + ), + patch( + "aiovodafone.api.VodafoneStationSercommApi.get_sensor_data", + return_value=SENSOR_DATA_QUERY, + ), + ): + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + assert entry.state == ConfigEntryState.LOADED + assert await get_diagnostics_for_config_entry(hass, hass_client, entry) == snapshot( + exclude=props( + "entry_id", + "created_at", + "modified_at", + ) + ) diff --git a/tests/components/voip/snapshots/test_voip.ambr b/tests/components/voip/snapshots/test_voip.ambr index 935dbba51b8b3c..3cc64400419c0e 100644 --- a/tests/components/voip/snapshots/test_voip.ambr +++ b/tests/components/voip/snapshots/test_voip.ambr @@ -5,6 +5,3 @@ # name: test_pipeline_error b'\'\xff\x9d\xfe\xc7\xfe\x92\xfe\x88\xfe\xe2\xfe\x02\x00\x9a\x00!\x00H\xff$\xff|\xff\x94\xff1\xff\xd6\xfe\xdf\xfe8\xffj\xff*\xff\xba\xfe\x99\xfe\xf1\xfe\\\xff\x87\xff\x84\xffs\xff?\xff\xf5\xfe\xce\xfe\xd7\xfe\x0e\xff\x8e\xff\xed\xff\xea\xff\xd2\xff\xcf\xff\xa4\xffP\xff\x1b\xff=\xff\x8e\xff\xbe\xff\xd1\xff\xe9\xff\x01\x00\xdf\xffe\xff\xc9\xfe\x88\xfe\xd6\xfe[\xff\x9e\xff\x9d\xff\x9c\xff\xbe\xff\xde\xff\xc5\xff\x95\xff\x98\xff\xc7\xff\xf0\xff\n\x00\x15\x00\xf3\xff\xba\xff\x9a\xff\xae\xff\xe5\xff\r\x00\x15\x00!\x00A\x00Z\x00[\x00A\x00\r\x00\xee\xff\r\x00V\x00\x8a\x00\x89\x00p\x00l\x00\x98\x00\xe2\x00\x13\x01\xff\x00\xc6\x00\xa9\x00\xae\x00\x9e\x00x\x00_\x00x\x00\xc9\x00\x10\x01%\x01)\x01\x1c\x01\xea\x00\xa1\x00j\x00\x85\x00\xf7\x00i\x01q\x01\x1e\x01\xe0\x00\xea\x00\n\x01\n\x01\xe0\x00\xb3\x00\xb3\x00\xeb\x00.\x01K\x01=\x01\xff\x00\xae\x00\x81\x00\x97\x00\xd6\x00\x10\x016\x01K\x010\x01\xe6\x00\x9f\x00^\x00\'\x00*\x00|\x00\xdf\x00\xfa\x00\xcc\x00\x94\x00X\x00\xfa\xff\xc0\xff\xfb\xff\x89\x00\xed\x00\xe3\x00\xa5\x00\x81\x00\x88\x00\x95\x00\x89\x00q\x00c\x00S\x00B\x005\x00\'\x000\x00H\x00H\x00<\x007\x00#\x00\xe8\xff\xa3\xff\xba\xff?\x00\x9d\x00l\x00\xf8\xff\xb9\xff\xbf\xff\xd3\xff\xdd\xff\xe6\xff\xf3\xff\x02\x00"\x008\x00+\x00\n\x00\xf8\xff\x04\x00\r\x00\xf4\xff\xc1\xff\xa9\xff\xd8\xffI\x00\xba\x00\xd3\x00u\x00\xf1\xff\x97\xffh\xffY\xff}\xff\xcf\xff8\x00}\x00r\x008\x00\t\x00\xfb\xff\x02\x00\x12\x003\x00l\x00\x8b\x00^\x00!\x004\x00b\x00.\x00\x1a\x00\xa2\x00\xfa\x00\x93\x00\xed\xff\xa7\xff\xd8\xff(\x00<\x00\x04\x00\xd4\xff\xf7\xffR\x00\x88\x00W\x00\xef\xff\x94\xffm\xffW\xff\xde\xfe_\xff\xb3\x01\x86\x02v\x00\x87\xfe\xae\xfe\xb6\xff\xe5\xffg\xff\x1d\xffF\xff\xa4\xff\xe3\xff\xdf\xff\xdb\xff\xed\xff\xf0\xff\xc1\xffl\xffm\xff\xce\xff\xf8\xff\xc1\xff\x8d\xff\xa7\xff\x05\x00\x83\x00\xde\x00\xed\x00\xad\x00\x0c\x00@\xff\xcb\xfe\x0c\xff\xec\xff\xbb\x00\x03\x01\x04\x01\xd6\x00c\x00\xe0\xffz\xff>\xffh\xff\xf7\xffw\x00\xa0\x00{\x00\x0f\x00\\\xff\xb3\xfe\xb3\xfe\xb6\xff\xe7\x00\x0e\x01>\x00\x92\xff\xbc\xffY\x00\xa1\x00N\x00\xcb\xff|\xffn\xff\x81\xff\xb3\xff,\x00\xb9\x00\xc6\x00R\x00\x01\x00\x1e\x00_\x00`\x00 \x00\xd8\xff\xc5\xff\xf4\xff6\x00`\x00v\x00\x8d\x00\xb4\x00\xe4\x00\xf4\x00\xad\x00,\x00\xbc\xff\x96\xff\xde\xff~\x00.\x01m\x01\xea\x00/\x00\xd8\xff\xb5\xff\xa3\xff\xcb\xff\xfc\xff\xee\xff\xa6\xff\x8d\xff\x00\x00\xd2\x00c\x01$\x01\x11\x00\x0c\xff\xe2\xfe;\xfft\xff\x9f\xff$\x00\xd5\x00\x1e\x01\xce\x00E\x00\xda\xffs\xff\xea\xfep\xfe\x80\xfeH\xffW\x00\xf6\x00\x03\x01\xd1\x00S\x00}\xff\xcb\xfe\x8b\xfe\x96\xfe\xcb\xfeB\xff\xee\xff\x86\x00\xd5\x00\xdf\x00y\x00\x94\xff\x9a\xfe\x14\xfe>\xfe\xf1\xfe\xaa\xff\xe9\xff\xe7\xff\x11\x00;\x00\x13\x00\xaa\xffF\xff\x1b\xff%\xffU\xff\xc7\xff\x82\x00\x1a\x01)\x01\xd3\x00\x80\x00C\x00\xde\xffY\xff)\xff\x9c\xffl\x00\x19\x01\\\x017\x01\xc7\x003\x00\xb3\xffq\xffp\xff\xb0\xff,\x00\x9f\x00\xbb\x00\x9b\x00\x91\x00\x8e\x00P\x00\xdb\xffr\xffF\xff]\xff\x9d\xff\xf2\xff/\x00#\x00\xe1\xff\xa8\xff\x8f\xff\x87\xff\x85\xff|\xfff\xffH\xffJ\xff\x85\xff\xd7\xff\x0c\x00\xfe\xff\x98\xff\xe5\xfe6\xfe\x14\xfe\xc7\xfe\xe7\xff\x9a\x00g\x00\xb6\xff=\xff&\xff\x18\xff\xb6\xfe\x11\xfe\xaa\xfd#\xfen\xff\xb7\x002\x01\xc5\x00\xe8\xff(\xff\xd7\xfe\xf4\xfe=\xffl\xfft\xff\x8c\xff\xda\xff\x14\x00\xdc\xffl\xffY\xff\xd0\xffo\x00\xb7\x00m\x00\xb9\xff\x02\xff\x97\xfe\x9b\xfe\x10\xff\xd2\xff\x89\x00\xcb\x00\x8b\x005\x00:\x00\xa8\x00\r\x01\xeb\x00p\x00\x10\x00\xf3\xff\'\x00\x91\x00\xf8\x00V\x01\xa9\x01\xbf\x01j\x01\xd0\x00)\x00\x8c\xff/\xffw\xffg\x00z\x01+\x027\x02\xbf\x01\x19\x01\x85\x00\x05\x00\x80\xff-\xffp\xffD\x00(\x01\xb9\x01\x15\x02[\x026\x02+\x01V\xff\xa0\xfd\n\xfd\xdc\xfd\x92\xff~\x01\xf4\x02G\x03A\x02\x8c\x00U\xff\xed\xfeP\xfe\xeb\xfc\x13\xfc\xa4\xfd/\x01\xec\x03\xc1\x03v\x01\x84\xff\x15\xffZ\xffZ\xffI\xff\xc5\xff\xad\x00?\x01A\x01b\x01\n\x02d\x02|\x01\xe1\xff\x1c\xff\xe5\xff\x89\x01\xb5\x02\xdb\x02V\x02~\x01\\\x00!\xff\x0c\xfeD\xfd\x03\xfd\x84\xfd\xbb\xfeI\x00p\x01\x8d\x01\xac\x00b\xffV\xfe\xbd\xfdZ\xfd\x0c\xfd\x1c\xfd\xe6\xfdO\xff\xda\x00\x07\x02\x93\x02k\x02\xc2\x01\xe2\x00\n\x00w\xffM\xff\xaa\xff\x93\x00\x94\x01*\x02O\x02\'\x02h\x01\xdb\xff\xfa\xfd\xaf\xfc\x8c\xfct\xfd\xd5\xfe@\x00|\x013\x02\xee\x01t\x00L\xfe\x7f\xfc\xd7\xfbR\xfc~\xfd\x03\xff\xae\x00\x0c\x02\x8a\x02\x14\x023\x01H\x00$\xff\xd2\xfd%\xfd\xb6\xfd\xa4\xfe\x95\xfe\xdf\xfdu\xfe\x18\x01i\x03o\x02\x92\xfe$\xfb\xb5\xfa\x03\xfd"\x00\xc4\x02\x8b\x04 \x05\x15\x04\xd2\x01\x86\xff\x13\xfe}\xfdl\xfd\x1b\xfe\t\x00\xa5\x02F\x04\xd2\x03\xde\x01\xdc\xff\xc0\xfey\xfe}\xfe\x9b\xfe\xa9\xfe`\xfe\xc3\xfd2\xfd\xd6\xfc\x97\xfc\xb6\xfc~\xfd\xa9\xfe\xb1\xffS\x00\xad\x00\xd8\x00\x9b\x00\x04\x00v\xff\xe1\xfe\xe9\xfd\xca\xfci\xfc\x8e\xfd\xd4\xff\xba\x018\x02\xb8\x01C\x01_\x01\xd6\x01\xe9\x01\x19\x01\xc3\xff\x8b\xfe\xc6\xfd\xba\xfd\xab\xfe=\x00\x82\x01\xd2\x01a\x01\x02\x01\x0b\x01\xfc\x00]\x00f\xff\xf8\xfez\xff1\x00d\x00F\x00s\x00\x19\x01\xd8\x01%\x02\xe0\x01\x8f\x01\xac\x01\x02\x02N\x02\xd2\x02\xaa\x03T\x04f\x04\xc4\x03\r\x02\xf2\xfe\xc4\xfb*\xfb\xf8\xfd+\x01\x1e\x01\xcb\xfdr\xfa\xdd\xf9\x17\xfcI\xff\xcf\x01\x07\x03\xbd\x02\x06\x01\xc0\xfe \xfd-\xfc\x17\xfb]\xfa\\\xfc2\x02\xce\x08\x1f\x0bj\x073\x01h\xfd\xa7\xfd\x1d\x00\x7f\x02\x95\x03\xdb\x02M\x00\xaa\xfcI\xf9W\xf7\x06\xf7\xd8\xf7~\xf9\xe8\xfb\xcc\xfe`\x01\x87\x02\x88\x01\x05\xff\xb5\xfc\xcc\xfb\xf5\xfbk\xfc<\xfd\xa3\xfe7\x00\x8d\x01\xee\x02t\x04~\x05`\x05w\x04\xe8\x03\x02\x04\xb7\x03\x00\x02G\xffn\xfd\x13\xfe\xb0\x00\xf1\x02J\x03c\x02\xbe\x01\xf2\x01\xbf\x02\xdc\x03\x0f\x05F\x06;\x07%\x07O\x05 \x02\xfc\xfeJ\xfd\xb7\xfd\xa3\xff\x87\x01\x1b\x02m\x01p\x00\xf6\xff_\x000\x01*\x01\xb6\xff\xae\xfd$\xfc\n\xfb\xbe\xf9\x08\xf9T\xfbO\x01\x97\x07\x13\t\x06\x04I\xfc\xce\xf7,\xf9U\xfe\x03\x04O\x08\x02\n6\x08\x83\x03\xe6\xfd!\xf9\r\xf66\xf5=\xf7\x8a\xfb\x07\x00"\x03\xdd\x04\xbe\x05\x82\x05\x98\x03h\x00*\xfd\x94\xfa\xc1\xf8\xf3\xf7y\xf88\xfa\xbc\xfc\x80\xff\xc2\x01\xd2\x02\x98\x02\xb2\x01\x1d\x01\xe0\x00\x93\xffX\xfc\xa3\xf8\x1c\xf7@\xf9"\xfeM\x03\xc1\x06!\x08\x01\x08\xef\x06\xce\x05\xb5\x05\xfa\x06\xfb\x08m\n\xf7\t\x1b\x07\x87\x02\x1d\xfe\xe3\xfbZ\xfc7\xfe\xfb\xff\x1d\x01\xa3\x01}\x01\x88\x00\xfc\xfe\x89\xfd\xe3\xfc\'\xfd\xbb\xfd\x16\xfe\x1d\xfe\x9a\xfd\x9d\xfc\xd2\xfc0\x00h\x05\x10\x08\xa8\x05\xca\x00\xfd\xfd\xf1\xfe\xd4\x01F\x04\x9f\x05\xcf\x05\x9d\x03\xe1\xfd=\xf6C\xf1w\xf2\x1a\xf8\x93\xfcO\xfcE\xf9A\xf8I\xfb\xd1\xff)\x02\x8d\x01\x01\x00\xf5\xfe\xc2\xfd\x83\xfb\xfc\xf8\xec\xf7\\\xf9\x03\xfd\xa2\x01-\x05\x12\x06\xea\x04\xdd\x03,\x04\xd8\x04\xec\x03\x91\x00\xea\xfbJ\xf8s\xf7/\xf9\xf3\xfb\xa9\xfe\xea\x00T\x02\xb1\x02\xa9\x029\x03b\x04h\x05\x9d\x054\x04\xce\x00\xb1\xfc\x1a\xfa\\\xfa\x08\xfd\x98\x00m\x03\xdd\x049\x05\xa2\x04!\x03\\\x01N\x00w\x00z\x01r\x02\x95\x02I\x01o\xfeZ\xfb\xb4\xfa4\xfe\xae\x03s\x06\xaa\x04j\x01J\x004\x01X\x02\xaf\x03\x8c\x05<\x06\xc5\x03\xce\xfe\xea\xf9\xfb\xf6B\xf6r\xf7\r\xfaC\xfd]\x00\xb0\x02f\x03G\x02C\x00\x9f\xfe\xf5\xfd\xa8\xfdN\xfc\x1d\xf9\xf9\xf5\xb2\xf6L\xfc\xb1\x02\xa6\x04f\x01\x13\xfd\x1d\xfc]\xff\x06\x04~\x06}\x05d\x02\x1c\xff\x87\xfc\xa5\xfa\xc0\xf9\xe1\xfa\x9d\xfe\xce\x03\x05\x08\xa6\t6\tH\x08\xc1\x07\xa6\x07\xbf\x07v\x07\xad\x05\xdd\x01F\xfd\xfe\xf9E\xf9\xef\xfa\xab\xfd\x8f\xff\xa9\xff\xdf\xfe\xa5\xfe\n\xff!\xff~\xfej\xfdZ\xfc\x84\xfb\xa8\xfav\xf9\x02\xf9\xb5\xfb\x06\x02M\x08e\nM\x08P\x05\xea\x03H\x04\xe1\x05!\x08g\t\xc9\x07<\x03\xb1\xfdT\xf9\x1e\xf7\xf5\xf6y\xf8\xfd\xfae\xfd\x17\xffD\x00\xc5\x00\xfe\xffl\xfe\xb6\xfdG\xfex\xfe \xfd-\xfb6\xfa\xe3\xfa\xda\xfc\x8c\xff\\\x02\x97\x04\xc6\x05\xd8\x05G\x05x\x04\x0c\x03\xb8\x00\x00\xfe\x85\xfb~\xf9C\xf8`\xf9X\xfe\x1b\x05Z\x08\xa2\x05\xa4\x00|\xfe?\x00\xd6\x038\x07Y\t\xa4\t\xdd\x07_\x04\x1e\x00R\xfc%\xfan\xfa\x17\xfd\xc5\x00\x80\x03.\x04U\x03&\x029\x01\xaa\x00\x99\x00\x0f\x01\xf7\x00u\xfe\x90\xf9\xbd\xf5\x1a\xf7=\xfd\x9e\x02\xf9\x02\x1c\x00\xee\xfe \x01"\x04\x9b\x05G\x052\x03c\xff\x02\xfb\x97\xf7\xb4\xf5^\xf5\xe5\xf6c\xfa\xd7\xfe\xbf\x02\xfb\x04$\x05\xa5\x03\xbd\x01\xc3\x00\xdd\x00\xac\x00\xeb\xfe\x1f\xfc\x0b\xfa\xc1\xf9\xd8\xfa\xcb\xfc\xa0\xff\xac\x02b\x04\xf7\x03f\x02=\x01\xd9\x00\x87\x00\x8e\xff\xcf\xfd.\xfc\xd9\xfb\xfc\xfc\xf4\xfeU\x01\x7f\x03^\x04M\x044\x058\x07\xe2\x07 \x06\xef\x03d\x03=\x04=\x04\x84\x01\xcb\xfca\xf9\x9e\xfa\xba\xffI\x036\x01\xa9\xfbI\xf8\xd3\xf9$\xfe\xc4\x01V\x03\xbb\x03\xc0\x03\x7f\x02\xaf\xfe{\xf9\x0e\xf7\x12\xfa\xdc\xff\xa4\x03%\x04\xa3\x03\xd8\x03\x8c\x04\x85\x05\x1c\x07\x91\x08\xe7\x07\xe7\x03w\xfd\xbf\xf6\xe7\xf1\x89\xf0A\xf3\xb0\xf8\xac\xfd\t\x00n\x00\x81\x00Q\x00\xf6\xfe\xb2\xfc\xa2\xfa7\xf9P\xf8\xf2\xf7\x17\xf8\xbb\xf8N\xfa\x88\xfd[\x02\x06\x07\x1d\t\xdf\x071\x05\x9f\x03\xb0\x03\xc3\x03O\x02}\xff\xe3\xfc\xe4\xfb\xc4\xfc\xff\xfe\xb6\x01\xc6\x03\xb0\x04D\x05:\x06\xb1\x06\xc5\x05^\x04\xc3\x03\xaf\x03\xc9\x02\xc4\x00\x03\xff\x99\xfe\x07\xffm\xff\x98\xffw\xffI\xff\xef\xff}\x01<\x02\xaf\x00 \xfe\x8f\xfd\x02\x00i\x02^\x00\xe8\xf9L\xf5\xde\xf8G\x02\xde\x07\x19\x04\xfe\xfb\x82\xf8\x84\xfc\xd4\x03\x8f\t\xfc\x0b/\x0b\x9f\x077\x02%\xfc~\xf6\xbc\xf2{\xf2\x13\xf6\xf0\xfb\x8f\x01\x07\x05\x88\x05\xa3\x03\xe2\x00\xee\xfe\x9f\xfe>\xff1\xff\x92\xfd\xb2\xfa\x9a\xf7\xd9\xf5\x19\xf7d\xfb%\x00>\x02\x83\x01\x01\x01\xe5\x02k\x05\x05\x05\x8d\x00\x99\xfa\x11\xf7\xb6\xf7\xe4\xfa\x1d\xfe\xd6\x00f\x03\x8f\x05g\x07Z\tz\n*\tw\x06\x98\x05#\x07\n\x08\xfa\x05\x0e\x02\xe0\xfe\xa3\xfd\xcb\xfd`\xfe\x18\xff\xf6\xff\xd4\x00E\x01\xe0\x00\xd5\xff\xd8\xfe\x96\xfeB\xffC\x00V\x00\xd9\xfe\n\xfd<\xfd\xcf\xffK\x02\x9b\x02u\x01f\x01\xab\x031\x06(\x06\xd2\x03\xfa\x01\xf2\x01\xe6\x01S\xff\xdf\xfa\x06\xf8\n\xf9R\xfc)\xfe\x01\xfd\x16\xfb\x99\xfb\xd8\xfe3\x02k\x03\xc3\x02\x9f\x01c\x00\xa4\xfe\x85\xfc\xb1\xfa\xbf\xf9$\xfaJ\xfc\xf7\xff|\x03\xeb\x04\x1d\x04\x9b\x02\xcb\x01\x9c\x01 \x01"\x008\xff\x85\xfev\xfd\x08\xfcc\xfb\x9e\xfc,\xffP\x01\x04\x02\xeb\x01.\x026\x03w\x04\xd7\x04W\x03\'\x00\x13\xfd\xe4\xfb\x80\xfc8\xfd>\xfd\xad\xfd\x8a\xff\xfb\x010\x03S\x02:\x00y\xfeB\xfe\x9a\xff&\x01T\x01\xb8\xffG\xfd\x85\xfb\\\xfb\x8a\xfc\x03\xfeZ\xff\x01\x01\xe7\x02\xf1\x03s\x03?\x02g\x01\xec\x00;\x00\xfa\xfeq\xfd\x93\xfc\xf6\xfc\x1d\xfe\xce\xfe}\xfe\x19\xfe\xe7\xfe\xc1\x00\xe0\x01\xaa\x00\xf6\xfd\xa2\xfc\x0b\xfe0\x00V\x00t\xfe\xb0\xfc]\xfc\xfe\xfc\xfc\xfdb\xff\x0b\x01a\x02\x1c\x03\x96\x03 \x04\x86\x04A\x04\x0e\x03G\x01\x8d\xffG\xfe\xdc\xfd\x8e\xfe\xd9\xff\xfe\x00\xdb\x01\xbc\x02y\x03|\x03\xb7\x02\xab\x01\xb6\x000\x00w\x00\x10\x01\xb0\x00\xb7\xfeI\xfcq\xfb\xf6\xfc`\xff\xb6\x00\xce\x00\xf1\x00\xb4\x01B\x02\xd6\x01\xaf\x00}\xff\xa3\xfe\x1b\xfe\xc2\xfds\xfd#\xfd`\xfd\x0e\xff\xe2\x01\xf0\x03\x84\x03@\x01q\xff\x85\xff\x02\x01g\x02n\x02\xd9\x00\x83\xfe\x9d\xfc\xe1\xfbC\xfc:\xfdT\xfe\xca\xff\xb1\x01\x0f\x03\xac\x02\xb5\x00\xc8\xfeW\xfe=\xff\x1b\x00h\x00\xb7\x00\xa8\x00\x86\xff\xed\xfdR\xfd\x98\xfe\x1f\x018\x03\xa4\x03\xb7\x02\xbe\x01\xbd\x01\x97\x02\r\x03\xdc\x01>\xff\x11\xfd\x0f\xfd\x95\xfes\x00\x08\x03\xbd\x04\t\x04\xd1\x02\xb0\x02(\x03-\x03\x92\x02\xa1\x01\x8b\x00}\xff\xa4\xfe0\xfet\xfeC\xff\x00\x00Z\x00j\x00\x9e\x00\xe4\x00\x96\x00\xa6\xff\xd6\xfe\xd7\xfe\x9b\xffu\x00\x97\x00\xbd\xff\x88\xfe\xbf\xfd\xa3\xfd\x0f\xfe\xc5\xfe\xd6\xff2\x01d\x026\x03a\x03e\x02\x96\x00\xd8\xfe\x9c\xfd\x14\xfd?\xfd\xd1\xfd\x9f\xfe\x8a\xffA\x00:\x006\xff\xeb\xfd\xb5\xfd\x02\xff\xa7\x00j\x01+\x01\x82\x00\xc4\xff\x9f\xfe\xec\xfc\x86\xfb\x8b\xfbO\xfd\x01\x000\x02/\x03Z\x03\xe1\x02\xa7\x014\x00o\xff\xa2\xff\x1e\x00\x00\x00`\xff$\xff\xa0\xff1\x00~\x00\r\x01\x17\x02\xfd\x02\t\x03d\x02\x99\x01\xba\x00\xa8\xff\x9d\xfe\x15\xfeb\xfeZ\xffP\x00\x9d\x00\x82\x00\xc9\x00>\x01?\x01\xfe\x00\xf2\x00\x18\x011\x01\x16\x01\xc5\x00p\x00X\x00g\x00\x0b\x00\x10\xff\x0b\xfe\xbf\xfd\x8d\xfe:\x00\xf3\x01\x94\x02\xb1\x01\x1f\x00.\xffU\xff\x9c\xff\xc8\xfe\x11\xfd\xf4\xfb`\xfc\xde\xfdA\xff\xe9\xff&\x00N\x00P\x00J\x00o\x00\xb4\x00\xe4\x00\xc4\x00D\x00\x8a\xff\xd1\xfeN\xfe\x15\xfe\x15\xfet\xfeW\xffm\x00B\x01\xb9\x01\xbd\x01C\x01\xc1\x00g\x00\xaf\xffN\xfe\xbc\xfc\xc7\xfb\xe4\xfb\x05\xfd\xe2\xfe#\x01\x07\x03\xda\x03\xa6\x03\x00\x03=\x02\x14\x01`\xff\xea\xfd\xa8\xfd\x8f\xfe\xb6\xffW\x00|\x00\x94\x00\xc2\x00\x00\x01 \x01\xef\x00\xaa\x00\xc4\x00\x1b\x01\xfc\x00.\x00/\xffu\xfeD\xfe\xa0\xfe\x12\xff?\xffR\xff\xbd\xff\x9a\x00>\x01\x05\x01[\x00H\x00\x0f\x01\xd9\x01\xab\x01Q\x00\x96\xfe\x94\xfd\xaa\xfd,\xfe\x86\xfe\xff\xfe\xfb\xff\xff\x00\xff\x00\xc6\xffl\xfe%\xfe=\xff\xe9\x00\xf0\x01\x90\x01\x00\x00\xfd\xfd=\xfc\x13\xfb\xa4\xfa\x1d\xfb\xb4\xfcY\xff0\x02\r\x04\x89\x04\xf7\x03\x9e\x02\xb9\x00\xa7\xfe\x03\xfd\x81\xfc9\xfdo\xfe_\xff\xf0\xff\x8d\x00\xa1\x01\x03\x03\xd5\x03~\x03G\x02\xda\x00\x9b\xff\x90\xfe\x8e\xfdp\xfc}\xfbd\xfb\xa1\xfc\xe8\xfeI\x01\xda\x02K\x03\xf6\x02\\\x02\xa8\x01\xfd\x00\x80\x00$\x00\xf0\xff\xf7\xff\x04\x00 \x00u\x00\xd6\x00\xf2\x00\xef\x00\x1e\x01N\x012\x01\xe3\x00\xab\x00\x99\x00\x95\x00\x82\x00L\x00\xfc\xff\xb4\xffu\xff(\xff\x1e\xff\xcd\xff\xec\x00\x9a\x01^\x01\x0e\x01\xb7\x01\x04\x03\xad\x03\xec\x02\xfb\x00\xb6\xfe\x0f\xfd\x82\xfc\x04\xfd\x17\xfe>\xff\x11\x00\x8a\x00\xfb\x00q\x01\x18\x01<\xff\xf6\xfc\xba\xfcM\xffU\x02\x08\x03%\x01\xa6\xfe/\xfd\xc7\xfc\x16\xfdN\xfe~\x00\xc6\x02\xfb\x03\xfe\x03\xcd\x03\xeb\x03F\x03\xaa\x00\x10\xfd\xf1\xfa\xa5\xfb~\xfe\x95\x01\x81\x03,\x04\xfa\x03\xfd\x02i\x01\xb9\xffE\xfel\xfd\x92\xfd\x92\xfe\xa7\xff\x00\x00H\xff\xf0\xfd\xcd\xfc\x7f\xfc(\xfdp\xfed\xff\x87\xff\x97\xff#\x00\xc8\x00&\x01\\\x01\x7f\x01_\x01\xf2\x00\x82\x00+\x00\xd7\xff\xdc\xff\xc9\x00`\x02\x8f\x03\xbf\x03:\x03O\x02\x06\x01\x99\xffj\xfe\xb6\xfdi\xfd<\xfd#\xfd\x92\xfd\xd1\xfeA\x00\xc3\x00\xce\xff!\xfe\x17\xfdG\xfdJ\xfe\x96\xff\x02\x01T\x02\xc2\x02\xe0\x01\\\x00!\xff-\xfe+\xfd\xac\xfc\xa1\xfd\xa9\xff\xfe\x00\xc1\x00X\x00\x81\x01i\x034\x03\xd1\xff\x99\xfb\x7f\xf9N\xfa\xbe\xfc\x8c\xff]\x02\xa9\x04h\x05B\x047\x02\x91\x00\xd3\xff|\xff\x1b\xff\x1f\xff\x00\x007\x01\xa9\x01\xf9\x00\xdd\xfff\xff\xea\xff\xb5\x00\x02\x01\x8e\x00\x91\xff\x92\xfe\x0f\xfe\xf3\xfd\xb5\xfdc\xfd\xa2\xfdi\xfe\xef\xfe\xcd\xfe\x93\xfe8\xff\xcd\x00b\x02\x04\x035\x02#\x00\xb0\xfd\x19\xfcn\xfc\x9a\xfe\t\x01\x0f\x02\xa0\x01\xde\x00\x8f\x00\xbd\x00\x06\x01\xe9\x00Y\x00\xb7\xff7\xff%\xff\xea\xffT\x01h\x02Q\x020\x01\x03\x00\x7f\xffc\xff*\xff\xf9\xfeo\xff\x95\x00d\x01\x05\x01\x17\x00\xe5\xff\xee\x00\\\x02\xe6\x02\x0e\x02\x95\x00\x9d\xffz\xff\xf5\xff\x12\x01\xd1\x02\x9d\x04\xb0\x05\xae\x05[\x04\x8f\x01\r\xfe0\xfc\xa4\xfd|\x00\xff\x00\x1a\xfeC\xfac\xf8j\xf9l\xfc\xdd\xffF\x02\xa0\x02\xe3\x00}\xfe\x85\xfdH\xfe\xa8\xfe+\xfd\xc7\xfb\xf7\xfd\xa0\x03\xfc\x07\x02\x07>\x02\xea\xfe\xd8\xff4\x03\xe4\x05\x91\x06X\x05\x89\x02q\xfe\xe3\xf9>\xf6\x88\xf4\xe3\xf4\xde\xf6\xc2\xf9\xd3\xfcw\xff\x1f\x01*\x01l\xff\xe1\xfc(\xfb\x15\xfb*\xfc\x86\xfd\xc1\xfe\xd5\xff\xf0\x00w\x02<\x043\x05y\x04\xbb\x02\xf9\x01G\x03`\x05\xe3\x05\x9d\x03\xd8\xffn\xfd\xe8\xfd\t\x00\x82\x01\x82\x01\xec\x00\x96\x00\xbf\x00\xb8\x01\x90\x03\x9b\x05\xe4\x06\xb8\x06\xf0\x04E\x02\xee\xff\xc4\xfe\xf2\xfe\xf5\xff\xe2\x00\x13\x01\x98\x00\xcd\xff\xf3\xfew\xfe\xc5\xfeZ\xffd\xff\xd8\xfe\x18\xfe\x10\xfdw\xfb\r\xfa\x0e\xfb\xda\xff\x13\x06\x96\x08J\x04\x1d\xfc\xaf\xf6\xea\xf7\xdd\xfd\x16\x04M\x08B\n\x8a\t\xde\x05\'\x005\xfa\xd1\xf5[\xf4u\xf6\x08\xfbX\xff\x7f\x01(\x02\n\x03g\x04\xde\x04u\x03\xc6\x00\xfa\xfd\x9f\xfb\xfc\xf9B\xf9c\xf9c\xfar\xfc,\xffm\x01S\x02\r\x02\xab\x01\xa7\x01\xe3\x00)\xfeH\xfa\xcc\xf7\xc3\xf8\xef\xfc\xf6\x01s\x05\xcb\x06\xe2\x06\x85\x06(\x06g\x06\x9b\x07_\t\xd8\n.\x0b\xbe\t,\x06D\x01\\\xfdn\xfc\xfd\xfd\xc9\xffW\x00\xd1\xff\x1e\xff\xc2\xfe\x9d\xfe^\xfe\xdd\xfd?\xfd\xdb\xfc\xff\xfc\xc2\xfdw\xfe\x1b\xfeo\xfd#\xff\x05\x04V\x08\xd9\x07B\x03D\xff\t\xff\x8c\x012\x04\xe1\x05\x9f\x06\x95\x05d\x01\xaa\xfa\xe4\xf4\xc3\xf3\x0c\xf7X\xfa\xee\xf9\xcd\xf6C\xf5\x1b\xf8\xb1\xfd\xe2\x01\x8f\x02U\x01\\\x00x\xffs\xfd\x89\xfaq\xf8\xb2\xf8\x7f\xfb\xd0\xff\xca\x03\xa2\x05*\x05\xfe\x03\xce\x03\xbc\x047\x05\x85\x03\x80\xff(\xfb\xfa\xf8y\xf9\x01\xfb<\xfcj\xfd\x0b\xff\xca\x00:\x02\xae\x03[\x05\xd4\x06|\x07\x95\x06\x8a\x03,\xff\x88\xfb?\xfa\xa7\xfb\xea\xfeg\x02\x85\x04\xd8\x04\xdb\x03C\x02\xd3\x00&\x00b\x00\x1b\x01\x9d\x01i\x01O\x00Q\xfe!\xfcs\xfb\xe4\xfd\xa2\x02\xfc\x05"\x05\xc0\x01\xcb\xffo\x00\xc8\x01\xfd\x02\xc8\x04l\x06\x9a\x05\xb0\x01\xb1\xfc\xe7\xf8\xfd\xf6\xa2\xf6\xb8\xf7\xf5\xf9\xbc\xfcE\xff\xc2\x00\xe3\x00.\x00N\xff\xa2\xfe-\xfeO\xfd\xfe\xfa\xb3\xf7\x8c\xf6\xe8\xf9\xa4\xff\xbc\x02\xad\x00#\xfc\xdb\xf9\x1b\xfcM\x01\xdb\x05\x12\x07\xce\x04\xd2\x00\x14\xfd{\xfa\x16\xf9T\xf9\xfd\xfb\xb8\x00\x8c\x05\x84\x08a\t6\t\xdc\x08\xaa\x08\xf1\x08z\t\xd3\x08|\x05\x19\x00C\xfb.\xf9\x15\xfa\x85\xfck\xfe\x9f\xfe\xe2\xfd\xc3\xfd\x9c\xfeJ\xff\xec\xfe\xc6\xfd\x86\xfc\xa0\xfb\x04\xfb1\xfav\xf9\xf1\xfaF\x00(\x07\xfb\n\x1d\n\x03\x07\x9f\x04\x06\x04%\x05\xb0\x07G\ns\n\xd5\x06\xad\x00\xe9\xfa\x90\xf7\x91\xf6\x11\xf7\xa2\xf8\xf4\xfat\xfda\xff\xfd\xff\x13\xff\x94\xfd)\xfdM\xfeB\xffI\xfe\xfa\xfb/\xfa\x0c\xfa\x96\xfbV\xfey\x01\xf1\x03!\x05;\x05\xc7\x04-\x04L\x03\xd2\x01\xd5\xff\x86\xfd\xff\xfa\xd3\xf8\xba\xf8\xc2\xfc\xda\x03\xb8\x08/\x07\x85\x01\xc6\xfd\xd9\xfe\xd9\x02\xda\x06y\t\x98\n\x10\nh\x07\xcc\x02\x91\xfd\xe8\xf9r\xf9\xed\xfb\x84\xffG\x02H\x03\xe4\x02\xf8\x01\'\x01\xc4\x00\xf3\x00\x8f\x01\xbc\x01\xf6\xff\xb2\xfb/\xf7z\xf6\t\xfb\xf7\x00\x03\x03\xe0\x00\x07\xff\x99\x00\x13\x04\x9d\x06O\x07\xe9\x05\xfd\x01\xa7\xfc4\xf8\xe5\xf5\x81\xf5\xb0\xf6\x87\xf9x\xfdH\x01\xdf\x03\xb4\x04\xce\x03\xeb\x01Z\x00\x1b\x00\xb3\x00\\\x00\x19\xfe\x1a\xfbq\xf9\xc6\xf9v\xfb\r\xfe"\x01\x8a\x03\x1d\x04\x17\x03\xd0\x01\r\x01m\x00M\xffy\xfd\xb7\xfb_\xfb\xe2\xfcO\xff\x96\x01>\x03\xe9\x03\xfb\x03\xf8\x04=\x07\x7f\x08\x11\x07\xb5\x04\t\x04+\x05\xba\x05s\x03\x9b\xfe(\xfa\xbe\xf9\x14\xfe\xd3\x02\x93\x02K\xfdc\xf8j\xf8\x8f\xfc\xd1\x00\xae\x02\xe6\x02J\x03\x90\x03q\x01b\xfc\x02\xf8\x9f\xf8\xa8\xfdN\x02\xaa\x03B\x03{\x03\xa3\x04\xf6\x05m\x07\xe1\x08\xe4\x08\xd4\x05\xbb\xffy\xf8\x83\xf2\xcd\xefU\xf1b\xf6\xff\xfbJ\xff0\x00|\x00\xc8\x00\xf2\xff\xa8\xfd7\xfb\x8f\xf9\x8c\xf8$\xf8p\xf8,\xf9J\xfaz\xfcQ\x00\xd3\x04\xc9\x07\xd7\x07\xc8\x05\xd7\x03q\x03\xd5\x03+\x03\x9b\x00K\xfdJ\xfb\xa4\xfb\xfe\xfd\x1a\x01m\x034\x04w\x04\x9c\x05\xfe\x06\xc8\x06\xf6\x04a\x03\x0e\x03\x02\x03\xc8\x01\xc5\xff\xab\xfe\xe5\xfe.\xff\xc9\xfe\x00\xfey\xfd\x00\xfe\xe6\xff\xc8\x01}\x01\xf9\xfe\xff\xfc\x0c\xfe\xc4\x00\xc7\x00\x05\xfc\x93\xf6a\xf7u\xff\x00\x07\t\x06-\xfeX\xf8\x14\xfa\xee\x00\xa3\x07\x96\x0b?\x0c\x90\tR\x04\x12\xfe)\xf8\xcb\xf3K\xf2d\xf4U\xf9*\xff\xc0\x03\x95\x05w\x04\xa0\x01\xeb\xfe\xe1\xfd\x8a\xfe>\xffR\xfe\xac\xfbt\xf82\xf6b\xf6\xc1\xf9\xcd\xfe\x14\x02\xe0\x01\xab\x00\xe6\x011\x05\xa2\x06{\x03;\xfd\x1a\xf8>\xf70\xfa\x0b\xfe\x0e\x01e\x03h\x05V\x07\x86\t\x1d\x0b&\n\xb4\x06\xfa\x03`\x04X\x06\\\x064\x03%\xff\x17\xfd\x8f\xfd\xe0\xfe\x80\xffO\xff,\xff\xc5\xff\xb6\x00\xf5\x000\x00N\xffX\xff*\x00s\x003\xffV\xfd\x1e\xfdj\xff*\x02\xc5\x02z\x01\xf9\x00\xf6\x02\xd9\x05\xa7\x06\xd3\x04\xad\x02\t\x02\x06\x02Y\x00t\xfc\xc6\xf8L\xf81\xfb_\xfe\xcf\xfe\xe7\xfc\xf2\xfb\x05\xfe\x97\x01\x9e\x03*\x03\xb6\x01\xab\x00\xf6\xff\xd8\xfe\xf9\xfc\xe0\xfa\xb8\xf9\xa3\xfa\xec\xfd?\x02E\x05\x92\x05\xef\x03I\x02\x9e\x01M\x01\x86\x00z\xff\xac\xfe\xe4\xfd\xc7\xfc\x0e\xfc\x02\xfd\xaf\xffX\x02Y\x03\xee\x02\xbc\x02\xbf\x03]\x05\x0e\x06\x9e\x04J\x01\xd0\xfd\x1d\xfc\xa7\xfc\n\xfe\xa3\xfe\x8b\xfe\x1c\xff\xbf\x00m\x02\xcf\x02r\x01+\xff\xba\xfdr\xfe\xb1\x006\x02?\x01A\xfe\x83\xfb\xef\xfad\xfcF\xfe\xa4\xff\xf2\x00\xa5\x02\x0e\x04"\x04\xf4\x02\x95\x01\xa3\x00\xe0\xff\x06\xff\x1f\xfel\xfdE\xfd\xb0\xfd\x0f\xfe\xb7\xfd\x10\xfdu\xfdo\xff\x83\x01v\x01\x03\xff\xb5\xfc\x07\xfd$\xff\x1a\x00\xb5\xfe\x82\xfc\x9a\xfbu\xfc\n\xfel\xff\x85\x00\x88\x01\x87\x02|\x03\x1d\x04\x10\x04R\x03;\x02\x1e\x01\x11\x00\x07\xffL\xfe\x88\xfe\xbd\xff\xf3\x00\x81\x01\xe0\x01\x91\x027\x03)\x03c\x02M\x01Y\x00\xd3\xff\xba\xffo\xff$\xfe\x1d\xfc\x1e\xfb\x9d\xfc\xaa\xff\xb6\x01\xa9\x01\xd7\x00\xdf\x00\xb5\x01,\x02\x98\x01N\x00\x00\xff1\xfe\x0f\xfeh\xfe\xaa\xfeq\xfeb\xfe\x83\xff\x85\x01\xa5\x02\xc7\x01\xf3\xff\x1b\xff\xdc\xff \x01h\x01\x03\x00\x83\xfdR\xfb|\xfa\xed\xfa\xfb\xfb5\xfd\xb6\xfe\x88\x00\t\x02?\x02\xf5\x000\xff\'\xfe2\xfe\xd7\xfe|\xff\xb3\xffJ\xffU\xfe<\xfd\xc7\xfc\xa1\xfd\xad\xff\xd0\x01\xd6\x02q\x02V\x01\x95\x00\xbe\x00Q\x01\x1b\x01\x88\xff\xab\xfdY\xfd\r\xff8\x01-\x02(\x02\x8d\x02\x97\x030\x04\xdc\x03K\x03\x03\x03\xa0\x02\xa6\x018\x00\xe3\xfe\x0b\xfe\xc8\xfd2\xfe)\xff)\x00\xc5\x00\x10\x01\x1d\x01\xb7\x00\xe1\xff*\xff\x17\xff\x9a\xff#\x00\x1d\x00{\xff\xaf\xfeD\xfeh\xfe\xd7\xfed\xff\x0e\x00\xee\x00\x01\x02\xd3\x02\xe5\x029\x02G\x01`\x00T\xff\x15\xfe\x1f\xfd\x0c\xfd\xf4\xfdA\xffD\x00\xc2\x00\xa7\x00\x0b\x00`\xff>\xff\xd3\xff\xbc\x00.\x01\x9a\x00D\xff\xe2\xfd\xbf\xfc\xdb\xfb\xb5\xfb\x19\xfd\xdf\xff\x9c\x02\xdd\x03\x95\x03\xb3\x02\xca\x01\xcc\x00\xda\xff^\xffk\xff\x98\xff\xaf\xff\n\x00\xad\x00\xf6\x00\xdd\x006\x01@\x02\xfb\x02\x87\x02d\x01\xbc\x00\xbf\x00r\x00F\xff\xfd\xfd\x9e\xfdV\xfet\xff\x15\x008\x00\xa4\x00\xb1\x01\xa1\x02\x9c\x02\xde\x01O\x01h\x01\xab\x01M\x01;\x009\xff\xf7\xfe`\xff\xb5\xffT\xff\xae\xfe\xd3\xfe\x18\x00\xa7\x017\x02&\x01H\xff#\xfe`\xfe3\xffH\xffA\xfe\x0c\xfd\xda\xfc\xbf\xfd\xc5\xfe?\xff\x87\xff\x12\x00\xe4\x00\xb5\x016\x02C\x02\xe4\x01\x14\x01\xe4\xff\x92\xfez\xfd\xfa\xfc>\xfd$\xfeM\xffd\x00<\x01\xb9\x01\xcf\x01\x93\x01\x00\x01;\x00\xa1\xff"\xffZ\xfeG\xfdt\xfc~\xfcu\xfd\xff\xfe\xcc\x00~\x02v\x03c\x03\xa3\x02\xd1\x01 \x01C\x00\'\xffO\xfe\x16\xfe&\xfe\x1a\xfe!\xfe\xab\xfe\xc0\xff\xf9\x00\xc5\x01\xce\x01=\x01\xa3\x00m\x00d\x00\xf7\xff\xff\xfe\x12\xfe\xdd\xfd\x8b\xfe\x94\xff\x03\x00\xaa\xff\x89\xffv\x00\xd1\x01W\x02\xbf\x01\x04\x01\x04\x01\x83\x01\x86\x01x\x00\xc8\xfe\xaf\xfd\xb0\xfd\xff\xfd\xfc\xfd&\xfe)\xff\xd2\x00\x1e\x02/\x02\x1b\x01\xcb\xff0\xff\x82\xff-\x00p\x00\xeb\xff\xc1\xfeg\xfdg\xfc\x16\xfc\x93\xfc\xe2\xfd\xcb\xff\xb9\x01 \x03\xc2\x03\xad\x03\x04\x03\xd0\x01\x13\x00\x16\xfe\xc7\xfc\x0e\xfd\x91\xfe\xf6\xffs\x00\x86\x00#\x01{\x02\xa6\x03\xab\x03\x81\x02\xda\x00a\xff]\xfe\xae\xfd\xf0\xfc\xf2\xfb)\xfb\x89\xfb\x89\xfd]\x00\x8e\x02,\x03y\x02\x80\x01\x03\x01\r\x01\x18\x01\xaf\x00\xe6\xff \xff\xb0\xfe\xa7\xfe\xcc\xfe\xf4\xfeI\xff\xfb\xff\xcf\x00j\x01\xa3\x01{\x01\xe9\x00\x00\x00\t\xffP\xfe\xef\xfd\xf5\xfdR\xfe\xba\xfe\x18\xff\xd5\xff/\x01\xa2\x02;\x03Q\x02v\x00t\xff:\x00\xcf\x01\xb4\x02Y\x02(\x01\xc5\xffw\xfe\'\xfd\xef\xfbs\xfbU\xfcq\xfe\xc0\x006\x02\x80\x02\x06\x02K\x01\x90\x00\xfb\xff\x89\xff\x01\xffo\xfe8\xfee\xfe\xaa\xfe\x03\xff\xe7\xff\x82\x01-\x03\x1f\x043\x04\xa4\x03\x98\x02\xfd\x00\xc9\xfe\x93\xfc[\xfb\xa5\xfb\x14\xfd\xd8\xfe=\x00\x18\x01\xa1\x01\xf3\x01\t\x02\xef\x01\xa4\x010\x01\xaf\x00\x0e\x00(\xff%\xfed\xfd+\xfd}\xfd\x13\xfe\xeb\xfe\'\x01I\x04\xd0\x04I\x02\xf2\xffv\xff\xac\xffb\xff\xf2\xfe\xe4\xfe\x1b\xffL\xff_\xff{\xff\xf6\xff\xd8\x00\xd0\x01I\x02\xf4\x01g\x01@\x01D\x01\x00\x01~\x00$\x00\xf9\xff{\xffJ\xfe\xd2\xfc,\xfc;\xfd\x8f\xff\x8a\x01\x0e\x02\x7f\x01\xd8\x00\xc0\x00L\x01\xcc\x01\x82\x01l\x00\x12\xff\x1c\xfe\x01\xfe}\xfe\xd4\xfe\xf3\xfe\x9d\xff+\x01\xd4\x02\x8b\x03\x1a\x03\xec\x01\x8d\x00\x8d\xff\x01\xff\x8c\xfe\x02\xfe\x9f\xfd\x9a\xfd\x02\xfe\xd2\xfe\xe1\xff\xe6\x00\x9f\x01\x03\x02<\x02s\x02\x80\x02\x08\x02\xb8\x00\xb8\xfe\xe4\xfc9\xfc\xee\xfc[\xfe\xc4\xff\xe3\x00\xac\x01\x0e\x02\xff\x01\xa1\x01>\x01\xe3\x00Z\x00\xa4\xff?\xff~\xff\xc5\xffj\xff\x9f\xfeC\xfe\x10\xff\xf0\x00\xfb\x02\xca\x03\xc3\x02\x0e\x01\x15\x00\xf6\xff#\x00M\x00i\x00=\x00\x9e\xff\xc9\xfe\x18\xfe\xcb\xfd<\xfeS\xfft\x00C\x01\xbc\x01\xe8\x01\xd8\x01\x9c\x01\x1e\x01v\x00\xd9\xffV\xff\xde\xfea\xfe\x17\xfeg\xfev\xff\xc5\x00\x90\x01}\x01\xcd\x00E\x00u\x00\xf5\x00\x07\x01`\x006\xff\xf4\xfd\x10\xfd\xe6\xfc}\xfd\x88\xfe\xc1\xff\xe5\x00\xaf\x01\x1a\x02F\x02#\x02}\x01\x8d\x00\xad\xff\xe4\xfe<\xfe\xdd\xfd\xc2\xfd\xa8\xfd\xa2\xfd\x0e\xfe\xf2\xfe\xdd\xffS\x00D\x00\x1b\x00P\x00\xcb\x00\xed\x00:\x00\xf3\xfe\xc3\xfd&\xfdN\xfdK\xfe\x05\x00\xfc\x01F\x03P\x03|\x02\x92\x01\xea\x00O\x00\x9e\xff\x1e\xff\x0f\xff\x1d\xff\xb2\xfe\xad\xfd\xaa\xfc\x96\xfc\xad\xfd \xff\xf8\xff\x12\x00\xfe\xff?\x00\xcb\x00+\x01\xfc\x00b\x00\xbf\xffO\xff\x1b\xff\xfb\xfe\xcc\xfe\xb4\xfe\t\xff\xee\xff\x1d\x01\r\x02=\x02\x97\x01\x8e\x00\xad\xff%\xff\xc7\xfeT\xfe\xce\xfdu\xfd\x94\xfdJ\xfeY\xff_\x00\xf3\x00\xe6\x00r\x003\x00\x99\x00W\x01\x94\x01\xe7\x00\xb6\xff\xbc\xfem\xfe\x86\xfek\xfe9\xfe}\xfe:\xff\xfc\xffr\x00\xa9\x00\xbc\x00\xca\x00\xdc\x00\xed\x00\xde\x00\x96\x00\x06\x000\xffd\xfeI\xfe>\xff\xc0\x00\xbc\x01\xb9\x01J\x01;\x01\xaf\x01!\x02\x0f\x02K\x01\x13\x00\xee\xfe7\xfe\xe9\xfd\x05\xfe\xa7\xfe\xac\xff\xb9\x00\x96\x01\'\x02`\x02P\x02\x03\x02\x85\x01\xf5\x00b\x00\xc3\xff/\xff\xd7\xfe\xec\xfev\xff6\x00\xc4\x00\xf5\x00\x04\x01G\x01\xa8\x01\xd6\x01\x9b\x01\x03\x01J\x00\xb5\xffO\xff\x1d\xffS\xff\xf1\xff\xab\x00;\x01\xa3\x01\x0f\x02\x90\x02\xf2\x02\xca\x02\xe8\x01\xb1\x00\xd7\xff\xa7\xff\xca\xff\xaf\xff\x19\xffe\xfe7\xfe\xdb\xfe\xec\xff\x9c\x00L\x004\xffc\xfe\xc8\xfe?\x00\xa7\x01\x05\x02q\x01\xa7\x00!\x00\xdb\xff\xb7\xff\xb9\xff\xf4\xffZ\x00\xba\x00\x1f\x01\xb8\x01M\x02E\x02\x8b\x01\xbf\x00E\x00\xd9\xff\x17\xff\xfd\xfd\xf1\xfc\x83\xfc\x10\xfdp\xfe\xe1\xff\x9c\x00\x98\x00}\x00\xcb\x00F\x01B\x01s\x00+\xff\x01\xfe\x80\xfd\xd3\xfd\x9c\xfeR\xff\xa7\xff\xb7\xff\xe5\xffx\x00G\x01\xbe\x01{\x01\xb6\x00\xee\xffI\xff\x8e\xfe\xc2\xfdw\xfd#\xfeq\xff\x8e\x00\x11\x013\x01[\x01\xad\x01\xf5\x01\xe1\x01f\x01\xac\x00\xc7\xff\xd0\xfe$\xfe\x0e\xfeo\xfe\xda\xfe\x06\xff\x06\xff\x19\xff?\xff>\xff\x0c\xff\xf1\xfe\x1f\xfft\xff\xc8\xff\x13\x00E\x00\x15\x00\\\xffw\xfe,\xfe\xe3\xfe\x19\x00\xc2\x00\x81\x00\xfe\xff\x07\x00\xa0\x000\x01]\x01*\x01\xa9\x00\xd9\xff\xb4\xfer\xfd\x97\xfc\x9d\xfc\x92\xfd\xe3\xfe\xe7\xffi\x00\xad\x00\xf8\x00$\x01\xec\x00R\x00\xac\xffK\xff\x19\xff\xcf\xfeL\xfe\xd0\xfd\xc9\xfdn\xfe\x80\xffk\x00\xda\x00\xec\x00\xec\x00\xf7\x00\xe5\x00r\x00\x9d\xff\xac\xfe\n\xfe\x1b\xfe\xfa\xfe9\x000\x01\x8d\x01\x84\x01\x98\x01\x11\x02\xa5\x02\xc5\x025\x02&\x01\xf5\xff\x07\xff\x9e\xfe\xc7\xfeM\xff\xd0\xff\x05\x00\xf3\xff\xdc\xff\xd9\xff\xd1\xff\xb8\xff\xc3\xff4\x00\xf7\x00\x8c\x01\x8b\x01\xf9\x00C\x00\xde\xff\x02\x00\x7f\x00\xe6\x00\x13\x01*\x01a\x01\xc9\x01 \x02\x04\x02h\x01\xba\x00`\x00(\x00\x9a\xff\xbc\xfe\x11\xfe\x0e\xfe\xa9\xfen\xff\xe1\xff\xea\xff\xea\xffJ\x00\xea\x00.\x01\xd0\x008\x00\xda\xff\xb3\xffz\xff\x12\xff\xc2\xfe\xf9\xfe\xbe\xff\x82\x00\xbd\x00\x83\x00Y\x00\x8e\x00\xda\x00\xc9\x00N\x00\xd2\xff\xb4\xff\xe1\xff\x00\x00\xdd\xff\xa3\xff\xa4\xff\t\x00\xaf\x006\x01f\x01^\x01l\x01\xa8\x01\xc5\x01U\x01I\x00\x1a\xffa\xfec\xfe\xeb\xfeu\xff\xa1\xff\x8d\xff\x82\xff\x98\xff\xbf\xff\xe3\xff\r\x00M\x00\x87\x00~\x00\x16\x00\x8b\xffO\xff\xa5\xffK\x00\xb7\x00\xc8\x00\xcc\x00\t\x01p\x01\xb3\x01\x97\x010\x01\xe2\x00\xe1\x00\xd0\x00&\x00\xfe\xfe&\xfe1\xfe\xd5\xfep\xff\xb0\xff\xbf\xff\xe1\xff&\x00`\x00U\x00\xe1\xff9\xff\xca\xfe\xbb\xfe\xcd\xfe\xc9\xfe\xc8\xfe\xfb\xfeV\xff\x90\xffj\xff\x00\xff\xd3\xfe?\xff\xf1\xffC\x00\x08\x00\xb5\xff\xae\xff\xce\xff\xaf\xffJ\xff\x0f\xffm\xffI\x00\xff\x00\n\x01\x95\x00E\x00u\x00\xdb\x00\xee\x00e\x00\x8d\xff\x04\xff"\xff\x9c\xff\xcc\xff~\xff\x1e\xff\r\xffF\xff~\xff\x8a\xff\x81\xff\xa1\xff\x11\x00\xb5\x00<\x01<\x01\x97\x00\xb3\xff:\xffd\xff\xd1\xff(\x00k\x00\xb2\x00\xde\x00\xd5\x00\xad\x00~\x00W\x00F\x00<\x00\x02\x00\x8e\xff6\xffQ\xff\xb4\xff\xe6\xff\xcb\xff\xb0\xff\xde\xffK\x00\xa7\x00\xc0\x00\xa2\x00y\x00^\x00%\x00\xa1\xff\xf2\xfe\x84\xfe\xa8\xfe>\xff\xcf\xff\xf4\xff\xa1\xffK\xff]\xff\xb7\xff\xe9\xff\xbd\xffx\xff\x81\xff\xdc\xff\x17\x00\xf5\xff\xb5\xff\xbd\xff\x19\x00w\x00y\x00\x19\x00\xc6\xff\xfa\xff\xa3\x00%\x01\x12\x01\x9a\x00"\x00\xbf\xff`\xff\x16\xff\x1d\xff|\xff\x00\x00b\x00]\x00\xf3\xff|\xffY\xff\x9b\xff\x0c\x00r\x00\xa5\x00\x9b\x00m\x00J\x00B\x002\x00\x0e\x00\x17\x00n\x00\xca\x00\xe4\x00\xcc\x00\xbb\x00\xcc\x00\xfb\x00(\x01%\x01\xd2\x00L\x00\xdc\xff\xaf\xff\xaf\xff\xa8\xff\x91\xff\x8c\xff\xa8\xff\xdc\xff\x0e\x00\'\x00)\x00!\x00\t\x00\xd8\xff\x82\xff\x1d\xff\xfc\xfeQ\xff\xe1\xffS\x00p\x00A\x00\x0e\x00\x1f\x00]\x00y\x00^\x00I\x00S\x00T\x00#\x00\xdb\xff\xc0\xff\x06\x00\x8b\x00\xd0\x00\x83\x00\xe8\xff\xa3\xff\xe9\xffG\x00E\x00\xf1\xff\xbb\xff\xe1\xff-\x000\x00\xcd\xffY\xffE\xff\xab\xff\x1c\x00\x1f\x00\xab\xff1\xff\x1f\xffx\xff\xf6\xffN\x00[\x00\x12\x00\xac\xff\x86\xff\xbf\xff\x12\x00<\x00G\x00B\x00\x1c\x00\xe4\xff\xd3\xff\xf6\xff/\x00j\x00\x9b\x00\x91\x00>\x00\xf0\xff\xf8\xff&\x00\x19\x00\xd5\xff\xaa\xff\xc0\xff\xf0\xff\x00\x00\xf2\xff\xfd\xff-\x00K\x00\x16\x00\x8f\xff\xf4\xfe\x9b\xfe\xc2\xfeV\xff\x00\x00O\x00\x14\x00\x88\xff\x1f\xff\x18\xffE\xffe\xff\x83\xff\xd4\xffJ\x00\x91\x00\x85\x00e\x00\x80\x00\xbc\x00\xd5\x00\xae\x00d\x00E\x00j\x00\x8e\x00]\x00\xf1\xff\xd8\xffK\x00\xcf\x00\xba\x00\x0b\x00P\xff\x19\xffw\xff\xf4\xff\t\x00\xaf\xffb\xff\x7f\xff\xdc\xff\x15\x00\xfc\xff\xc4\xff\xb9\xff\xf1\xff:\x00V\x00)\x00\xe9\xff\xed\xff+\x00M\x00\x1c\x00\xc8\xff\xb0\xff\xe6\xff7\x00l\x00q\x00L\x00\x07\x00\xd1\xff\xd6\xff\x03\x00\x1f\x00\x1e\x00\x19\x00\x13\x00\xf0\xff\xbc\xff\x9f\xff\xae\xff\xd4\xff\xe2\xff\xba\xffz\xffS\xffj\xff\xb5\xff\n\x00=\x009\x00\x06\x00\xc9\xff\xa0\xff\x91\xff\x9b\xff\xd0\xffC\x00\xc0\x00\xeb\x00\xa1\x00%\x00\xdf\xff\xfc\xffR\x00\x91\x00\x94\x00v\x00T\x00\x1b\x00\xb2\xffO\xffJ\xff\xb3\xff5\x00p\x00^\x00C\x00Q\x00\x81\x00\x9a\x00t\x00 \x00\xde\xff\xe7\xff)\x00Y\x00W\x00R\x00\x88\x00\xdc\x00\xe5\x00m\x00\xb3\xff=\xff\\\xff\xe9\xffc\x00c\x00\x06\x00\xbf\xff\xd4\xff\x10\x00+\x00\x14\x00\xe9\xff\xbf\xff\xb2\xff\xdb\xff*\x00r\x00\x9b\x00\xb1\x00\x9e\x00:\x00\x95\xff\x12\xff\xfe\xfeA\xff\x97\xff\xc6\xff\xca\xff\xc4\xff\xc6\xff\xb5\xff\x8a\xffh\xffw\xff\xb6\xff\xed\xff\xdf\xff\x89\xff0\xff\x14\xff[\xff\xf1\xffy\x00\x9b\x00_\x00+\x00Q\x00\xb3\x00\xf8\x00\xe0\x00\x86\x001\x00\x08\x00\xea\xff\xbd\xff\x9b\xff\xae\xff\xfb\xffE\x00Y\x00?\x00\x0c\x00\xd6\xff\xb4\xff\xb7\xff\xc8\xff\xc6\xff\xb1\xff\x94\xffu\xffh\xff\x83\xff\xd1\xffD\x00\x9d\x00\x99\x00G\x00\x0c\x001\x00\x93\x00\xca\x00\x92\x00\x10\x00\xb9\xff\xd0\xff&\x00b\x00b\x00J\x009\x002\x00\x14\x00\xd4\xff\x9f\xff\xa6\xff\xf3\xffY\x00y\x00\x17\x00y\xff%\xff8\xfft\xff\x9b\xff\xa7\xff\xb6\xff\xd5\xff\xeb\xff\xe3\xff\xc3\xff\xb3\xff\xd5\xff\x0c\x00\xec\xffZ\xff\xd3\xfe\xc2\xfe\x15\xff}\xff\xbb\xff\xcc\xff\xca\xff\xd3\xff\xf0\xff\x1a\x00>\x00i\x00\xa0\x00\xbc\x00\x91\x00\x1d\x00\x86\xff\t\xff\xf3\xfeO\xff\xce\xff+\x00\\\x00\x80\x00\xab\x00\xd5\x00\xdc\x00\x9f\x00:\x00\xf2\xff\xee\xff\xfc\xff\xda\xff\xac\xff\xba\xff\x16\x00\x8b\x00\xcc\x00\xba\x00}\x00Q\x00<\x003\x00 \x00\xf9\xff\xba\xfft\xffJ\xffX\xff\x91\xff\xca\xff\xed\xff\x14\x00V\x00\x9f\x00\xbe\x00\xa7\x00\x8b\x00\x8d\x00\x93\x00j\x00\x1c\x00\xf0\xff\xf6\xff\xfc\xff\xdd\xff\xba\xff\xca\xff\t\x002\x00\r\x00\xab\xff]\xffj\xff\xcd\xff\x1e\x00\xfc\xff\x88\xff0\xffB\xff\x9f\xff\xec\xff\x03\x00\t\x00-\x00p\x00\x9b\x00\x8c\x00m\x00{\x00\xb7\x00\xd8\x00\xa5\x00\x1d\x00\x86\xff3\xff6\xffd\xff\x8e\xff\xb9\xff\xfc\xffK\x00y\x00x\x00`\x00I\x006\x00.\x00/\x00\x11\x00\xcc\xff\x97\xff\xc3\xffM\x00\xd3\x00\xfd\x00\xdb\x00\xbf\x00\xdc\x00\x06\x01\xf0\x00\x91\x009\x00 \x00\x1b\x00\xf4\xff\xc5\xff\xcd\xff\x07\x001\x00&\x00\x15\x001\x00`\x00o\x00U\x00*\x00\x04\x00\xf6\xff\xf0\xff\xd0\xff\x8d\xffN\xff<\xffb\xff\xc5\xffL\x00\xb0\x00\xb0\x00\\\x00 \x00J\x00\xa0\x00\x92\x00\xfe\xfff\xffN\xff\xa0\xff\xf4\xff\x0e\x00\x11\x00*\x00?\x00)\x00\xeb\xff\xa3\xffz\xff\x9a\xff\xfc\xffI\x001\x00\xb3\xff(\xff\xef\xfe\x19\xffT\xffq\xff\xa0\xff\x19\x00\xab\x00\xe8\x00\xaa\x009\x00\xe9\xff\xd5\xff\xd2\xff\xb8\xff\x86\xffU\xffF\xff^\xff\x8a\xff\xb7\xff\xd2\xff\xd8\xff\xcf\xff\xba\xff\xa5\xff\x97\xff\x8c\xff\x92\xff\xa3\xff\xa3\xff\x8e\xff\x80\xff\x89\xff\x99\xff\xa2\xff\xaf\xff\xd8\xff\x18\x00n\x00\xd6\x00\x1e\x01\x0b\x01\xa5\x004\x00\xda\xffk\xff\xe8\xfe\x9d\xfe\xd6\xfeh\xff\xe6\xff\x14\x00\x08\x00\xe8\xff\xcf\xff\xc5\xff\xc4\xff\xb2\xffx\xff/\xff\x0e\xff:\xff\x82\xff\xac\xff\xb8\xff\xda\xff8\x00\xaa\x00\xde\x00\xb4\x00v\x00\x87\x00\xe4\x00#\x01\xee\x00O\x00\xad\xffj\xff\x8c\xff\xbc\xff\xc2\xff\xc1\xff\xf1\xff;\x00S\x000\x00\x03\x00\x04\x00\'\x00=\x003\x00\x03\x00\xb3\xffm\xffc\xff\xb0\xff3\x00\xa8\x00\xeb\x00\x03\x01\xff\x00\xd8\x00\x96\x00Z\x00A\x00@\x00.\x00\xfa\xff\xc1\xff\x9e\xff\x86\xffg\xffU\xffs\xff\xcd\xff7\x00\x80\x00\x9b\x00\x92\x00w\x00P\x00 \x00\xe5\xff\xa6\xff\x87\xff\x9b\xff\xd4\xff\xf8\xff\xf2\xff\x03\x00b\x00\xf6\x00c\x01f\x01\n\x01\x91\x00-\x00\xef\xff\xd0\xff\xb9\xff\xa7\xff\xa8\xff\xca\xff\r\x00d\x00\x9e\x00\x84\x00\x18\x00\xbc\xff\xc2\xff\x1b\x00j\x00_\x00\x0e\x00\xbd\xff\x94\xff\x8a\xff\x8a\xff\x9d\xff\xe1\xffY\x00\xcd\x00\x05\x01\n\x01\x0c\x01\x17\x01\xff\x00\xae\x00E\x00\xf2\xff\xc7\xff\xaf\xff\x99\xff\x8d\xff\xb2\xff\x0e\x00g\x00}\x00D\x00\xf4\xff\xc9\xff\xd1\xff\xf0\xff\xfa\xff\xdc\xff\xa0\xffk\xffk\xff\x9d\xff\xcd\xff\xe3\xff\x05\x00N\x00\x9d\x00\xcb\x00\xd1\x00\xaa\x00P\x00\xd7\xffp\xffA\xffC\xff^\xffo\xffT\xff\x1a\xff\xff\xfe/\xff\x9a\xff\xff\xff$\x00\x0e\x00\xef\xff\xdc\xff\xc7\xff\x9c\xffk\xff\\\xff\x85\xff\xbd\xff\xdc\xff\xda\xff\xd4\xff\xe7\xff\x17\x00U\x00\x86\x00\x95\x00\x80\x00_\x003\x00\xe7\xff\x88\xffB\xff#\xff\x1b\xff7\xff\x93\xff\x06\x00F\x00@\x00\x1a\x00\xee\xff\xc9\xff\xb3\xff\xb3\xff\xc7\xff\xcd\xff\xad\xffh\xff4\xffK\xff\xbd\xffS\x00\xba\x00\xc6\x00\x98\x00x\x00{\x00s\x00A\x00\x07\x00\x01\x007\x00_\x009\x00\xbe\xff/\xff\xf6\xfe?\xff\xca\xff,\x00G\x00;\x005\x006\x00)\x00\xf2\xff\x8a\xff#\xff\n\xffR\xff\xc0\xff\x0e\x006\x00d\x00\xa4\x00\xc3\x00\x97\x00U\x00<\x00<\x00(\x00\xfe\xff\xe8\xff\xfe\xff\x0e\x00\xca\xffB\xff\xe9\xfe\x14\xff\x85\xff\xd6\xff\xf7\xff\x18\x00W\x00\x89\x00v\x00&\x00\xd5\xff\xa7\xff\x94\xff|\xffg\xff\x81\xff\xdb\xffO\x00\xb0\x00\xfb\x00/\x01-\x01\xf1\x00\xaa\x00\x80\x00T\x00\xfb\xff\x88\xff:\xff;\xff\x8a\xff\xfd\xffd\x00\x99\x00\x8e\x00]\x00!\x00\xdf\xff\xa6\xff\x9a\xff\xc6\xff\x11\x00J\x00J\x00\x05\x00\xb4\xff\xa7\xff\xec\xffP\x00\x98\x00\xb5\x00\xc5\x00\xd6\x00\xce\x00\x90\x00A\x00\x1c\x00$\x00+\x00\x05\x00\xb0\xffR\xff-\xffa\xff\xd4\xff?\x00j\x00\\\x00D\x00>\x00<\x00%\x00\xe6\xff\x99\xffa\xffP\xffj\xff\x9f\xff\xe1\xff\x1d\x00C\x00M\x00J\x00F\x00H\x00G\x002\x00\x11\x00\xfa\xff\xea\xff\xbb\xffS\xff\xe2\xfe\xc2\xfe$\xff\xd8\xff{\x00\xc6\x00\xc1\x00\xa6\x00\x97\x00\x88\x00T\x00\xeb\xffi\xff\t\xff\xf8\xfe4\xff\x99\xff\x06\x00d\x00\xac\x00\xe2\x00\n\x01\xfe\x00\x93\x00\x00\x00\xaf\xff\xd1\xff \x001\x00\xea\xff\x90\xffz\xff\xc7\xffB\x00\x97\x00\x98\x00U\x00\x0b\x00\xf2\xff\r\x005\x00<\x00\x14\x00\xdf\xff\xbd\xff\xa6\xff\x8f\xff\x83\xff\x9d\xff\xea\xffY\x00\xc7\x00\x06\x01\x02\x01\xc9\x00~\x00*\x00\xe4\xff\xbb\xff\xa4\xff\x83\xffU\xff<\xffT\xff\x8c\xff\xbc\xff\xc6\xff\xb4\xff\xac\xff\xc9\xff\x12\x00a\x00\x82\x00H\x00\xdd\xff\x7f\xff:\xff\t\xff\x02\xffT\xff\xf4\xff\x9d\x00\x02\x01\r\x01\xdb\x00\x9c\x00e\x00*\x00\xdf\xff\x98\xffo\xff[\xffA\xff.\xffW\xff\xc1\xff-\x00`\x00b\x00b\x00b\x00I\x00\x19\x00\xfc\xff\x07\x00\x17\x00\xfb\xff\xb0\xffq\xffr\xff\xa0\xff\xbf\xff\xc5\xff\xed\xffS\x00\xc2\x00\xed\x00\xc6\x00\x89\x00`\x00/\x00\xd2\xff_\xff\x17\xff!\xffg\xff\xcc\xffE\x00\xb4\x00\xed\x00\xd3\x00\x80\x00)\x00\xf2\xff\xd6\xff\xb1\xff|\xffH\xff&\xff\x17\xff*\xffY\xff\x90\xff\xc1\xff\xed\xff#\x00S\x00m\x00o\x00W\x00\x1d\x00\xd5\xff\xa1\xff~\xff\\\xff2\xff(\xffo\xff\xf7\xff\x81\x00\xc2\x00\xb2\x00}\x00_\x00[\x00I\x00\x01\x00\x91\xff2\xff\t\xff\x17\xffX\xff\xc8\xff@\x00\x8f\x00\xa1\x00\x8e\x00l\x000\x00\xdb\xff\x9d\xff\xb5\xff\x1c\x00y\x00m\x00\xfd\xff\x83\xffT\xff\x87\xff\xe0\xff!\x00H\x00z\x00\xc0\x00\xf8\x00\xfc\x00\xc2\x00c\x00\xfd\xff\xa8\xffj\xffC\xff9\xff[\xff\xb1\xff\'\x00\x96\x00\xe2\x00\xf7\x00\xdd\x00\xac\x00p\x00.\x00\xeb\xff\xa6\xffc\xff2\xff2\xffc\xff\xb7\xff\x11\x00`\x00\x8e\x00\x84\x00W\x00/\x00\x1c\x00\x0c\x00\xe6\xff\xab\xffp\xffG\xff?\xff^\xff\x9f\xff\xf4\xffV\x00\xc1\x00\x1b\x017\x01\x07\x01\xb3\x00n\x00N\x00G\x002\x00\xf4\xff\x9c\xffh\xff\x85\xff\xe2\xffC\x00z\x00x\x00U\x00/\x00\x18\x00\r\x00\n\x00\x11\x00\x15\x00\x00\x00\xbb\xffk\xffW\xff\x96\xff\x02\x00Q\x00f\x00`\x00_\x00c\x00`\x00_\x00f\x00t\x00\x87\x00~\x00$\x00z\xff\xee\xfe\xf0\xfe\x80\xff3\x00\xa9\x00\xd7\x00\xe6\x00\xe0\x00\xae\x00`\x00\x10\x00\xba\xffZ\xff\n\xff\xe5\xfe\xe7\xfe\xfd\xfe\'\xffg\xff\xbc\xff\x07\x00"\x00\x11\x00\x01\x00\n\x00\x12\x00\xf1\xff\xac\xff_\xff3\xff9\xffl\xff\xbb\xff$\x00\xa2\x00\t\x01!\x01\xdd\x00{\x00F\x00N\x00T\x00"\x00\xc0\xffj\xffR\xffy\xff\xb8\xff\xfb\xffD\x00\x7f\x00\x8b\x00q\x00F\x00\x13\x00\xdc\xff\xcc\xff\x08\x00h\x00\x86\x00/\x00\xad\xff\x84\xff\xce\xff-\x00Q\x00?\x000\x00=\x00_\x00z\x00}\x00m\x00c\x00c\x00K\x00\xfd\xff\x9a\xffs\xff\xaa\xff\x14\x00[\x00]\x004\x00\r\x00\x07\x00\x18\x00#\x00\x16\x00\xf8\xff\xc8\xff}\xff\x16\xff\xb8\xfe\x95\xfe\xd2\xfe\\\xff\xeb\xff6\x00-\x00\x02\x00\xe3\xff\xd4\xff\xc5\xff\xb0\xff\x94\xffh\xff2\xff\x12\xff0\xff\x83\xff\xe9\xff6\x00V\x00L\x00+\x00\x17\x00.\x00]\x00h\x00.\x00\xd4\xff\x8d\xfft\xff\x89\xff\xcb\xff4\x00\xa0\x00\xd0\x00\xab\x00\\\x00\x18\x00\xef\xff\xd4\xff\xc5\xff\xd1\xff\xea\xff\xe5\xff\xba\xff\xa1\xff\xbc\xff\xf5\xff$\x00F\x00e\x00p\x00P\x00!\x00\x1d\x00]\x00\xae\x00\xd0\x00\xa7\x00[\x00\x1a\x00\x06\x00\x19\x00.\x00&\x00\x01\x00\xd6\xff\xca\xff\xf6\xff5\x00H\x00\'\x00\x01\x00\xf6\xff\xed\xff\xb3\xffH\xff\xf1\xfe\xf5\xfeL\xff\xb1\xff\xd5\xff\xac\xffx\xff\x86\xff\xd4\xff\x1f\x005\x00\x17\x00\xcd\xffa\xff\xf3\xfe\xc2\xfe\xf6\xfer\xff\xf3\xff\\\x00\xa9\x00\xcc\x00\xbd\x00\x95\x00~\x00q\x00U\x00+\x00\x0f\x00\xfb\xff\xd6\xff\xa8\xff\xa4\xff\xeb\xffT\x00\x89\x00X\x00\xef\xff\x9b\xff\x80\xff\x9d\xff\xd6\xff\x07\x00\x10\x00\xe4\xff\xb0\xff\xb2\xff\xf5\xffO\x00\x92\x00\xb5\x00\xbc\x00\xa9\x00\x8d\x00{\x00|\x00\x87\x00\xa0\x00\xb5\x00\xa5\x00Z\x00\xfd\xff\xde\xff\x16\x00f\x00x\x00<\x00\xee\xff\xce\xff\xda\xff\xf3\xff\x11\x00;\x00]\x00\\\x00*\x00\xdf\xff\xa6\xff\x92\xff\xa1\xff\xbe\xff\xda\xff\xee\xff\xf6\xff\xf2\xff\xec\xff\xec\xff\xf3\xff\x01\x00\x12\x00\x0f\x00\xdb\xff\x86\xff\\\xff\x88\xff\xeb\xffE\x00c\x00?\x00\x08\x00\x00\x008\x00u\x00x\x00H\x00\x18\x00\xfe\xff\xdb\xff\xa7\xff\x91\xff\xc4\xff\'\x00x\x00\x88\x00W\x00\x12\x00\xdd\xff\xcd\xff\xe3\xff\x0f\x000\x00-\x00\x07\x00\xd8\xff\xbd\xff\xbf\xff\xd9\xff\x0b\x00J\x00i\x00=\x00\xe7\xff\xb8\xff\xda\xff%\x00]\x00b\x004\x00\xf2\xff\xca\xff\xda\xff\x10\x00N\x00n\x00[\x00,\x00\x04\x00\xf6\xff\xfb\xff\xfd\xff\xf3\xff\xea\xff\xe3\xff\xd2\xff\xa9\xff\x84\xff\x8d\xff\xc8\xff\x0c\x00\'\x00\r\x00\xd3\xff\x92\xffk\xffu\xff\xb8\xff\x12\x00>\x00\x18\x00\xba\xffm\xffn\xff\xb6\xff\x04\x00)\x00,\x00"\x00\x13\x00\x01\x00\xf1\xff\xe8\xff\xf4\xff\x15\x00A\x00T\x006\x00\xfc\xff\xe1\xff\xfc\xff\'\x00 \x00\xd7\xff\x83\xffc\xff\x85\xff\xb4\xff\xc7\xff\xc1\xff\xc1\xff\xc7\xff\xc3\xff\xa8\xff\x84\xff}\xff\xb6\xff\x1c\x00n\x00t\x00<\x00\x08\x00\x05\x00,\x00_\x00\x89\x00\x9a\x00\x7f\x005\x00\xde\xff\xac\xff\xba\xff\xf1\xff#\x00$\x00\xe6\xff\x91\xff^\xffg\xff\x95\xff\xbb\xff\xd8\xff\xf4\xff\x0b\x00\x0b\x00\xf9\xff\xea\xff\xea\xff\xf7\xff\x13\x009\x00L\x00.\x00\xef\xff\xc8\xff\xe0\xff\x19\x001\x00\xfe\xff\xa7\xff{\xff\xa9\xff\x0b\x00Y\x00o\x00_\x00;\x00\t\x00\xd9\xff\xd3\xff\x0e\x00k\x00\xb3\x00\xc0\x00\x90\x00P\x005\x00Q\x00v\x00t\x00A\x00\n\x00\xe8\xff\xc8\xff\x9e\xff\x80\xff\x8e\xff\xc3\xff\r\x00K\x00Y\x006\x00\x0b\x00\x10\x00B\x00b\x00<\x00\xec\xff\xbc\xff\xd4\xff\x17\x00I\x00U\x00X\x00j\x00o\x00H\x00\xf6\xff\xb2\xff\xa7\xff\xd0\xff\x07\x00\x18\x00\xec\xff\xaf\xff\xa6\xff\xe6\xffA\x00\x80\x00\x8d\x00y\x00K\x00\x01\x00\xaa\xffn\xffk\xff\xa2\xff\xf0\xff\x13\x00\xe2\xff\x81\xffK\xffc\xff\xa0\xff\xc7\xff\xba\xff\x89\xffY\xffS\xffq\xff\xa2\xff\xdb\xff%\x00t\x00\x9b\x00v\x00\x1f\x00\xde\xff\xe4\xff*\x00l\x00g\x00"\x00\xef\xff\x08\x00W\x00\x8d\x00}\x00<\x00\xfd\xff\xce\xff\x9c\xff]\xff<\xffq\xff\xed\xff`\x00\x8b\x00n\x00>\x00)\x001\x00:\x00,\x00\x0c\x00\xed\xff\xe0\xff\xe2\xff\xf1\xff\x13\x00J\x00\x89\x00\xae\x00\xa0\x00d\x00,\x00\x1d\x00$\x00\x11\x00\xcc\xffr\xffI\xff\x7f\xff\xf3\xffU\x00r\x00c\x00\\\x00c\x00J\x00\x01\x00\xad\xff\x87\xff\xa1\xff\xed\xff8\x00F\x00\x0b\x00\xc4\xff\xb7\xff\xf9\xffL\x00\\\x00\x17\x00\xb9\xff{\xff`\xff[\xffs\xff\xbe\xff"\x00[\x00H\x00\x04\x00\xc7\xff\xa7\xff\x9c\xff\xaa\xff\xd5\xff\x0f\x00<\x00I\x00A\x004\x00#\x00\x12\x00\x0b\x00\x03\x00\xe4\xff\xac\xffz\xffs\xff\xa8\xff\x00\x00O\x00o\x00c\x00O\x00M\x00O\x00E\x002\x00#\x00\x1b\x00\x14\x00\x08\x00\x05\x00%\x00b\x00\x98\x00\xa2\x00\x82\x00Y\x00A\x00\x1c\x00\xd7\xff\x92\xffl\xffd\xffy\xff\xa1\xff\xd0\xff\xf9\xff\x15\x007\x00e\x00\x82\x00g\x00&\x00\xf3\xff\xed\xff\x02\x00\x0f\x00\xfa\xff\xcc\xff\xa8\xff\xb7\xff\xfa\xff9\x00?\x00\n\x00\xc6\xff\x95\xff\x80\xffs\xffj\xffm\xff\x99\xff\xf0\xffF\x00e\x00D\x00\x05\x00\xe0\xff\xe8\xff\r\x00-\x00>\x00B\x004\x00\x13\x00\xf3\xff\xe8\xff\xf4\xff\xf8\xff\xd4\xff\x91\xffQ\xff1\xff6\xffZ\xff\x96\xff\xe3\xff\'\x00<\x00"\x00\xf9\xff\xdb\xff\xd5\xff\xe6\xff\x04\x00\x12\x00\x00\x00\xec\xff\xfe\xff/\x00a\x00s\x00c\x00?\x00\x1e\x00\n\x00\xff\xff\xf7\xff\xea\xff\xdd\xff\xd9\xff\xe6\xff\xee\xff\xe6\xff\xd9\xff\xe9\xff!\x00c\x00\x87\x00\x85\x00i\x008\x00\xfb\xff\xc7\xff\xb6\xff\xc0\xff\xc5\xff\xb5\xff\xa8\xff\xc3\xff\xf8\xff\x1a\x00\x17\x00\x04\x00\xf3\xff\xdf\xff\xbd\xff\xa1\xff\xaf\xff\xe0\xff\r\x00 \x00%\x007\x00]\x00}\x00t\x00B\x00\t\x00\xf8\xff\x12\x008\x00N\x00N\x00A\x00.\x00\x15\x00\xf4\xff\xcb\xff\x9e\xff{\xff\x82\xff\xc0\xff\x1d\x00d\x00n\x00D\x00\x18\x00\x13\x00$\x00.\x00\'\x00\x0e\x00\xf1\xff\xdf\xff\xde\xff\xed\xff\n\x00&\x004\x002\x00#\x00\x07\x00\xe6\xff\xcf\xff\xc7\xff\xc6\xff\xbf\xff\xaf\xff\xa9\xff\xb4\xff\xc5\xff\xd8\xff\xf2\xff\x1b\x00G\x00b\x00i\x00c\x00Y\x00O\x00C\x00>\x003\x00\x10\x00\xde\xff\xc1\xff\xcc\xff\xf0\xff\x15\x002\x00<\x001\x00\x03\x00\xaf\xff`\xffN\xff\x84\xff\xd4\xff\x0f\x00$\x00\x1d\x00\x0f\x00\x08\x00\x05\x00\xfe\xff\xfe\xff\x17\x00E\x00q\x00~\x00j\x00C\x00\x12\x00\xe0\xff\xb8\xff\xa9\xff\xbb\xff\xe4\xff\x04\x00\x03\x00\xf2\xff\xf0\xff\x00\x00\x1a\x006\x00T\x00f\x00\\\x00;\x00\x18\x00\xfb\xff\xe8\xff\xe0\xff\xed\xff\x17\x00N\x00j\x00P\x00\x11\x00\xd7\xff\xca\xff\xe6\xff\xfd\xff\xe4\xff\xa9\xff~\xff\x82\xff\xa3\xff\xc0\xff\xc9\xff\xcf\xff\xde\xff\xf7\xff\x19\x004\x006\x00\x1d\x00\xff\xff\xf9\xff\xfd\xff\xe9\xff\xb5\xff\x89\xff\x91\xff\xc5\xff\xfd\xff\r\x00\xfd\xff\xe9\xff\xe1\xff\xda\xff\xcf\xff\xd3\xff\xf3\xff&\x00F\x00;\x00\x11\x00\xeb\xff\xdf\xff\xe8\xff\xf3\xff\xff\xff\x1a\x00>\x00L\x006\x00\x0b\x00\xde\xff\xb6\xff\x9d\xff\x99\xff\xa3\xff\xae\xff\xb4\xff\xaf\xff\xab\xff\xb9\xff\xe2\xff\x1d\x00V\x00{\x00\x8f\x00\x95\x00\x91\x00x\x00A\x00\n\x00\xf7\xff\x12\x00;\x00X\x00Z\x00C\x00 \x00\x06\x00\xfc\xff\x06\x00\x1a\x00#\x00\x10\x00\xe6\xff\xc2\xff\xb7\xff\xc1\xff\xcd\xff\xd7\xff\xef\xff\x1a\x00H\x00i\x00v\x00p\x00^\x00F\x006\x005\x00,\x00\x0b\x00\xe2\xff\xca\xff\xcf\xff\xda\xff\xce\xff\xae\xff\x9b\xff\xa1\xff\xae\xff\xae\xff\xa0\xff\xa2\xff\xc0\xff\xec\xff\xf4\xff\xc6\xff\x8f\xff\x7f\xff\x9d\xff\xce\xff\xf1\xff\x05\x00\x11\x00\x17\x00\x1c\x00.\x00H\x00S\x00?\x00\x1b\x00\xfe\xff\xeb\xff\xd1\xff\xaa\xff\x8b\xff\x8c\xff\xba\xff\xff\xff>\x00]\x00X\x00>\x00#\x00\x06\x00\xdf\xff\xbb\xff\xb5\xff\xd5\xff\x06\x00)\x00+\x00\x18\x00\x0e\x00\'\x00G\x00G\x00\x1d\x00\xf2\xff\xea\xff\xfe\xff\n\x00\x01\x00\xf6\xff\xfc\xff\x15\x000\x007\x00)\x00\x11\x00\x00\x00\x06\x00#\x00P\x00\x81\x00\x9d\x00\x8d\x00P\x00\x07\x00\xd3\xff\xba\xff\xb6\xff\xc3\xff\xe2\xff\t\x00\'\x00)\x00\r\x00\xdd\xff\xb6\xff\xb4\xff\xd4\xff\xf0\xff\xe5\xff\xbd\xff\x9a\xff\x90\xff\x9b\xff\xaf\xff\xbc\xff\xc1\xff\xc5\xff\xca\xff\xd6\xff\xe6\xff\xef\xff\xea\xff\xde\xff\xd7\xff\xd3\xff\xc4\xff\xa6\xff\x8c\xff\x90\xff\xb6\xff\xf7\xffA\x00u\x00\x85\x00\x84\x00\x8c\x00\x9a\x00\x97\x00n\x000\x00\xff\xff\xf4\xff\x03\x00\t\x00\x01\x00\xff\xff\x1b\x00C\x00L\x00&\x00\xed\xff\xd0\xff\xda\xff\xef\xff\xf3\xff\xe9\xff\xe2\xff\xeb\xff\x05\x00"\x001\x000\x001\x00H\x00l\x00~\x00u\x00o\x00w\x00{\x00W\x00\r\x00\xc3\xff\xa5\xff\xbf\xff\xf3\xff\x16\x00\x1d\x00\x15\x00\t\x00\x03\x00\t\x00\x1e\x002\x00=\x00?\x004\x00\x0e\x00\xd4\xff\x9b\xff\x82\xff\x96\xff\xc9\xff\xff\xff \x00&\x00\x1b\x00\x05\x00\xed\xff\xd0\xff\xb3\xff\x9f\xff\x96\xff\x98\xff\xa3\xff\xb8\xff\xda\xff\x01\x00\x1d\x00 \x00\x11\x00\x0e\x00(\x00R\x00r\x00o\x00M\x00$\x00\x0c\x00\xf6\xff\xce\xff\x9c\xff\x8a\xff\xb5\xff\t\x00B\x004\x00\xfb\xff\xdf\xff\xf9\xff&\x000\x00\n\x00\xda\xff\xbd\xff\xbe\xff\xd1\xff\xf2\xff\x1a\x00@\x00]\x00j\x00b\x00E\x00(\x00\x1d\x00\x1e\x00\x14\x00\xf7\xff\xd5\xff\xc7\xff\xd4\xff\xe6\xff\xe9\xff\xdf\xff\xda\xff\xeb\xff\x06\x00\x0b\x00\xed\xff\xd4\xff\xe6\xff\x1a\x00<\x00\x1e\x00\xd2\xff\x95\xff\x90\xff\xb2\xff\xd9\xff\xee\xff\xf5\xff\xfa\xff\x04\x00\x11\x00\x1e\x00\x1b\x00\x04\x00\xe5\xff\xd7\xff\xde\xff\xe2\xff\xd1\xff\xc3\xff\xd1\xff\xfb\xff#\x00.\x00#\x00\x1c\x00*\x00>\x00@\x00%\x00\xfc\xff\xd4\xff\xb6\xff\xa6\xff\xa8\xff\xba\xff\xd9\xff\xf8\xff\x08\x00\xfa\xff\xd2\xff\xb7\xff\xbf\xff\xe5\xff\x08\x00\x14\x00\x0b\x00\xf9\xff\xdb\xff\xb9\xff\xa8\xff\xbd\xff\xfa\xffA\x00k\x00e\x00D\x00/\x007\x00M\x00N\x00)\x00\xf2\xff\xcb\xff\xc7\xff\xd6\xff\xd7\xff\xc5\xff\xc0\xff\xe0\xff\x10\x00\x1e\x00\xfa\xff\xd1\xff\xd4\xff\x04\x00.\x00\x1d\x00\xe1\xff\xb1\xff\xb3\xff\xd4\xff\xec\xff\xeb\xff\xed\xff\x05\x001\x00M\x00A\x00\x17\x00\xf0\xff\xea\xff\n\x000\x005\x00\x11\x00\xec\xff\xe8\xff\x08\x00/\x00<\x003\x001\x00P\x00~\x00\x93\x00\x81\x00\\\x00>\x00.\x00\x1b\x00\xf2\xff\xc6\xff\xb1\xff\xc2\xff\xe4\xff\xfa\xff\xfb\xff\xf7\xff\xff\xff\x03\x00\xf1\xff\xd9\xff\xd8\xff\xe7\xff\xf3\xff\xf0\xff\xf0\xff\x04\x00$\x00;\x009\x00!\x00\x0c\x00\x11\x009\x00h\x00}\x00c\x000\x00\x03\x00\xe9\xff\xd6\xff\xb5\xff\x8f\xff\x8b\xff\xb6\xff\xf2\xff\x08\x00\xf2\xff\xd5\xff\xdc\xff\x00\x00\x17\x00\x01\x00\xc8\xff\x96\xff\x8f\xff\xb3\xff\xda\xff\xe4\xff\xdc\xff\xe6\xff\x08\x00\'\x00#\x00\x0b\x00\xff\xff\x0f\x000\x00G\x00@\x00$\x00\n\x00\x0e\x00+\x00C\x00C\x007\x002\x006\x005\x00%\x00\x1a\x00#\x00;\x00E\x003\x00\x08\x00\xd7\xff\xad\xff\x91\xff\x96\xff\xb5\xff\xe0\xff\x05\x00-\x00M\x00M\x00+\x00\x08\x00\x04\x00\x1c\x00+\x00\x17\x00\xee\xff\xd2\xff\xde\xff\x04\x00#\x001\x00?\x00O\x00K\x00!\x00\xe6\xff\xbd\xff\xb3\xff\xba\xff\xc7\xff\xd3\xff\xda\xff\xdb\xff\xcd\xff\xb3\xff\x9f\xff\xa0\xff\xb7\xff\xd6\xff\xef\xff\xf7\xff\xf2\xff\xe9\xff\xe8\xff\xec\xff\xde\xff\xbc\xff\x93\xff\x81\xff\x9a\xff\xd8\xff\x11\x00\x1d\x00\x01\x00\xef\xff\xfe\xff\x1a\x00%\x00\x15\x00\xf7\xff\xe6\xff\xf2\xff\x16\x001\x00&\x00\x00\x00\xdd\xff\xd8\xff\xf2\xff\x18\x00J\x00w\x00\x8a\x00r\x00=\x00\r\x00\xee\xff\xd5\xff\xbd\xff\xaf\xff\xbf\xff\xe8\xff\x17\x004\x00>\x004\x00\x19\x00\x02\x00\x00\x00\x1b\x002\x00"\x00\xed\xff\xc0\xff\xc2\xff\xee\xff!\x00D\x00X\x00a\x00W\x008\x00\x0f\x00\xf7\xff\xf3\xff\xf6\xff\xf5\xff\xf9\xff\x02\x00\x00\x00\xec\xff\xd0\xff\xbf\xff\xbb\xff\xba\xff\xb3\xff\xa6\xff\x9a\xff\x9e\xff\xb9\xff\xdb\xff\xf1\xff\xf8\xff\xf5\xff\xe4\xff\xc9\xff\xb1\xff\xab\xff\xb0\xff\xb6\xff\xcb\xff\xfb\xff6\x00\\\x00b\x00_\x00d\x00i\x00Z\x005\x00\r\x00\xf4\xff\xe7\xff\xe9\xff\xfd\xff\x16\x00%\x00\'\x00&\x00+\x00(\x00\x14\x00\xf9\xff\xe3\xff\xd9\xff\xd5\xff\xd0\xff\xca\xff\xc7\xff\xce\xff\xe2\xff\t\x00<\x00f\x00y\x00t\x00e\x00S\x007\x00\x10\x00\xec\xff\xe3\xff\xfd\xff,\x00X\x00k\x00_\x00<\x00\n\x00\xe3\xff\xd8\xff\xe6\xff\xf5\xff\xfb\xff\xfa\xff\xf7\xff\xf6\xff\xee\xff\xdb\xff\xca\xff\xcc\xff\xe0\xff\xf2\xff\xf3\xff\xe8\xff\xdd\xff\xda\xff\xdf\xff\xe9\xff\xf4\xff\xfd\xff\xfc\xff\xee\xff\xdb\xff\xd1\xff\xd2\xff\xd3\xff\xce\xff\xc1\xff\xb9\xff\xc6\xff\x01\x00\\\x00\x8c\x00u\x00=\x00\x0f\x00\xfb\xff\xf5\xff\xed\xff\xde\xff\xd4\xff\xdd\xff\xf7\xff\x19\x00;\x00I\x00@\x00(\x00\x17\x00\x1c\x00"\x00\n\x00\xd3\xff\xa4\xff\xa0\xff\xc0\xff\xe9\xff\x0c\x00(\x00;\x00:\x00/\x00+\x000\x00&\x00\x06\x00\xe2\xff\xd8\xff\xe9\xff\xfb\xff\xfe\xff\xf6\xff\xf3\xff\xfb\xff\x06\x00\x10\x00\x18\x00!\x00\'\x00\x18\x00\xf9\xff\xe0\xff\xd5\xff\xd7\xff\xdf\xff\xec\xff\x00\x00\x13\x00\x12\x00\xfa\xff\xe4\xff\xe9\xff\xfb\xff\xfd\xff\xe8\xff\xd9\xff\xe9\xff\t\x00\x0f\x00\xea\xff\xb7\xff\xa1\xff\xb6\xff\xe0\xff\xfb\xff\xfa\xff\xee\xff\xf1\xff\x10\x00=\x00[\x00O\x00(\x00\x07\x00\x00\x00\x05\x00\x01\x00\xf5\xff\xea\xff\xe7\xff\xec\xff\xf2\xff\xf8\xff\x03\x00\x11\x00\x15\x00\x07\x00\xf0\xff\xda\xff\xcc\xff\xc4\xff\xc0\xff\xc0\xff\xc8\xff\xe4\xff\x1b\x00R\x00e\x00L\x00)\x00\x1c\x005\x00Z\x00g\x00M\x00!\x00\xfb\xff\xf2\xff\x01\x00\x12\x00\x0b\x00\xee\xff\xd0\xff\xc9\xff\xd6\xff\xe2\xff\xe5\xff\xdb\xff\xce\xff\xca\xff\xdf\xff\xf7\xff\xef\xff\xc5\xff\xa1\xff\xa5\xff\xc9\xff\xed\xff\x06\x00\x1d\x001\x008\x00.\x00\x1e\x00\x18\x00\x12\x00\x00\x00\xe5\xff\xda\xff\xeb\xff\x0b\x00\x1f\x00\x16\x00\xf9\xff\xea\xff\x02\x00)\x00A\x00@\x003\x00&\x00\x1b\x00\x03\x00\xd9\xff\xb8\xff\xb7\xff\xd2\xff\xfb\xff\x1b\x00(\x00\x1f\x00\x0c\x00\xf8\xff\xec\xff\xe7\xff\xe3\xff\xe6\xff\xf1\xff\x03\x00\x05\x00\xf8\xff\xe8\xff\xf0\xff\x10\x000\x008\x00&\x00\x18\x00#\x00=\x00K\x00=\x00 \x00\x0b\x00\x04\x00\x04\x00\x03\x00\x00\x00\xf8\xff\xed\xff\xe5\xff\xe2\xff\xe2\xff\xe6\xff\xef\xff\x04\x00\x1a\x00.\x005\x00$\x00\xf3\xff\xb8\xff\x9b\xff\xab\xff\xd3\xff\xf5\xff\x06\x00\x0b\x00\x03\x00\xfe\xff\x02\x00\r\x00\x16\x00\x18\x00\x08\x00\xe0\xff\xb4\xff\xa5\xff\xb8\xff\xd1\xff\xda\xff\xe3\xff\t\x00G\x00v\x00|\x00`\x00;\x00\x1c\x00\xff\xff\xe3\xff\xd4\xff\xdd\xff\xff\xff"\x000\x00\x1f\x00\xff\xff\xec\xff\xeb\xff\xee\xff\xe8\xff\xdf\xff\xdb\xff\xdb\xff\xdd\xff\xdc\xff\xd1\xff\xc0\xff\xbe\xff\xe0\xff\x1d\x00J\x00J\x00,\x00\x10\x00\x15\x003\x00G\x00=\x00*\x00\x1f\x00\x1f\x00 \x00\x1e\x00\x14\x00\xf8\xff\xd1\xff\xad\xff\xa2\xff\xb7\xff\xdd\xff\xf7\xff\xfb\xff\xf9\xff\r\x00*\x00)\x00\xf7\xff\xb9\xff\xa5\xff\xc4\xff\xf3\xff\x13\x00\x1d\x00\x15\x00\x0c\x00\x14\x001\x00S\x00^\x00D\x00\x11\x00\xd7\xff\xab\xff\x9b\xff\xa5\xff\xbc\xff\xdb\xff\x03\x00,\x00D\x00J\x00=\x00\'\x00\x0f\x00\x04\x00\x02\x00\xf7\xff\xe7\xff\xe2\xff\xef\xff\xff\xff\xfc\xff\xef\xff\xf2\xff\t\x00\x1b\x00\x12\x00\xf9\xff\xe9\xff\xea\xff\xed\xff\xe7\xff\xe2\xff\xea\xff\xff\xff\x10\x00\x15\x00\x1d\x00.\x00A\x00D\x008\x003\x00=\x00@\x00/\x00\x14\x00\x05\x00\n\x00\x12\x00\x07\x00\xf1\xff\xd8\xff\xbe\xff\xa5\xff\x9e\xff\xb3\xff\xdc\xff\xfc\xff\x05\x00\x05\x00\x18\x003\x00/\x00\x04\x00\xd8\xff\xcf\xff\xe1\xff\xef\xff\xf2\xff\xf9\xff\x0f\x00(\x00;\x00C\x00?\x00/\x00\x10\x00\xe8\xff\xc8\xff\xbc\xff\xc5\xff\xd3\xff\xdc\xff\xe6\xff\xf7\xff\n\x00\x15\x00\x19\x00\x1a\x00\x16\x00\x10\x00\x0c\x00\x10\x00\x11\x00\x07\x00\xeb\xff\xcb\xff\xb5\xff\xb5\xff\xca\xff\xe6\xff\xf6\xff\xf7\xff\xf2\xff\xf1\xff\xf4\xff\xf8\xff\xfd\xff\xfc\xff\xf1\xff\xe3\xff\xd8\xff\xd9\xff\xe2\xff\xfb\xff\x1f\x00:\x00C\x00>\x005\x004\x007\x009\x002\x00&\x00\x1c\x00\x16\x00\x0b\x00\xe9\xff\xb5\xff\x8d\xff\x96\xff\xca\xff\x04\x00 \x00\x17\x00\x08\x00\x11\x000\x00;\x00\x1a\x00\xe4\xff\xc5\xff\xc8\xff\xdd\xff\xee\xff\xfd\xff\x15\x00,\x004\x002\x00.\x00/\x00-\x00 \x00\x06\x00\xec\xff\xd7\xff\xca\xff\xbf\xff\xb8\xff\xc0\xff\xdd\xff\x05\x00(\x00:\x00?\x008\x00-\x00"\x00\x1b\x00\x10\x00\x01\x00\xeb\xff\xd5\xff\xcd\xff\xd2\xff\xd4\xff\xd1\xff\xd0\xff\xdb\xff\xef\xff\xfa\xff\xf9\xff\xf0\xff\xec\xff\xec\xff\xe6\xff\xd5\xff\xbe\xff\xb4\xff\xc9\xff\xf4\xff\x1f\x005\x009\x00@\x00R\x00d\x00j\x00\\\x00D\x00,\x00\x12\x00\xf7\xff\xda\xff\xbe\xff\xa3\xff\x9a\xff\xac\xff\xd6\xff\x00\x00\r\x00\xff\xff\xf6\xff\x06\x00\x1e\x00 \x00\x08\x00\xf2\xff\xf0\xff\xf5\xff\xf1\xff\xed\xff\x03\x00.\x00R\x00[\x00S\x00Q\x00S\x00K\x000\x00\n\x00\xe9\xff\xca\xff\xae\xff\x9a\xff\x97\xff\xb5\xff\xee\xff+\x00H\x00<\x00\x1e\x00\n\x00\x0b\x00\x11\x00\x11\x00\x06\x00\xfc\xff\xf4\xff\xeb\xff\xe0\xff\xd8\xff\xd9\xff\xe5\xff\xf4\xff\xfc\xff\xfc\xff\xf7\xff\xf6\xff\xfd\xff\x05\x00\x06\x00\xfc\xff\xe9\xff\xd6\xff\xd0\xff\xdc\xff\xf2\xff\xff\xff\x00\x00\x07\x00$\x00I\x00_\x00W\x00?\x00$\x00\x08\x00\xec\xff\xd4\xff\xc2\xff\xb4\xff\xab\xff\xb1\xff\xcf\xff\xf4\xff\x0e\x00\x12\x00\x14\x00+\x00M\x00Y\x00=\x00\n\x00\xe7\xff\xdd\xff\xe0\xff\xe0\xff\xdf\xff\xeb\xff\x07\x00$\x00;\x00G\x00H\x00@\x00.\x00\x14\x00\xf4\xff\xd6\xff\xbb\xff\xa8\xff\xa3\xff\xb7\xff\xe7\xff!\x00M\x00U\x00G\x00:\x006\x00/\x00\x17\x00\xfb\xff\xf0\xff\xf2\xff\xf1\xff\xe2\xff\xcd\xff\xc3\xff\xc7\xff\xd4\xff\xe0\xff\xe5\xff\xe4\xff\xe4\xff\xea\xff\xf6\xff\xff\xff\xff\xff\xf8\xff\xeb\xff\xde\xff\xd6\xff\xdf\xff\xf5\xff\x0c\x00\x1f\x003\x00F\x00O\x00K\x00C\x00C\x00C\x003\x00\x0e\x00\xe6\xff\xc8\xff\xb5\xff\xa9\xff\xa9\xff\xb8\xff\xd1\xff\xeb\xff\xff\xff\x15\x000\x00E\x00=\x00\x17\x00\xeb\xff\xd9\xff\xdd\xff\xe1\xff\xe0\xff\xe8\xff\t\x005\x00U\x00^\x00\\\x00Z\x00V\x00E\x00+\x00\x08\x00\xde\xff\xb2\xff\x8f\xff\x89\xff\xa6\xff\xd4\xff\xfb\xff\x0e\x00\x11\x00\x11\x00\x1b\x00\'\x00/\x00#\x00\x04\x00\xde\xff\xc1\xff\xb5\xff\xbc\xff\xd2\xff\xee\xff\x04\x00\r\x00\x11\x00\x12\x00\x0e\x00\n\x00\x02\x00\xf7\xff\xea\xff\xe3\xff\xde\xff\xd6\xff\xc5\xff\xba\xff\xc4\xff\xde\xff\xf5\xff\x01\x00\x0b\x00\x1a\x000\x00E\x00M\x00A\x00$\x00\x02\x00\xe4\xff\xca\xff\xb8\xff\xb1\xff\xba\xff\xd5\xff\xfa\xff\x19\x00$\x00%\x000\x00E\x00O\x00?\x00\x16\x00\xf3\xff\xe5\xff\xea\xff\xed\xff\xf0\xff\xf9\xff\x05\x00\n\x00\x06\x00\x0b\x00 \x00<\x00I\x00=\x00!\x00\xfc\xff\xd4\xff\xb0\xff\x9b\xff\x9e\xff\xba\xff\xe3\xff\x0f\x00,\x006\x006\x00@\x00R\x00T\x007\x00\x06\x00\xd9\xff\xba\xff\xac\xff\xae\xff\xbb\xff\xcd\xff\xe0\xff\xf0\xff\xf8\xff\xf8\xff\xf7\xff\xfd\xff\x0c\x00\x16\x00\r\x00\xf4\xff\xd5\xff\xbf\xff\xbd\xff\xd1\xff\xf7\xff\x15\x00\x1e\x00\x1c\x00$\x00=\x00Y\x00_\x00K\x00\'\x00\x04\x00\xf1\xff\xe6\xff\xd6\xff\xc5\xff\xc4\xff\xe0\xff\n\x00%\x00%\x00\x1b\x00\x1f\x009\x00V\x00W\x007\x00\x0e\x00\xf5\xff\xf4\xff\xfa\xff\x00\x00\x04\x00\r\x00\x1d\x00*\x00(\x00\x1e\x00\x1b\x00(\x00=\x00G\x00@\x00 \x00\xee\xff\xbd\xff\xa7\xff\xb7\xff\xde\xff\x00\x00\n\x00\xfd\xff\xf3\xff\xfe\xff\x1b\x003\x00+\x00\x07\x00\xdf\xff\xc8\xff\xbc\xff\xaf\xff\x9e\xff\x97\xff\xa5\xff\xc8\xff\xf0\xff\x0b\x00\x12\x00\x15\x00\x1d\x00%\x00\x17\x00\xfb\xff\xde\xff\xc8\xff\xbf\xff\xc7\xff\xe4\xff\x06\x00\x0f\x00\xfd\xff\xeb\xff\xed\xff\x0b\x007\x00S\x00K\x00)\x00\x02\x00\xdd\xff\xbf\xff\xad\xff\xb0\xff\xcb\xff\xf6\xff\x1d\x00*\x00\x1d\x00\x0e\x00\x18\x00:\x00Y\x00Z\x009\x00\x13\x00\xfa\xff\xef\xff\xea\xff\xf1\xff\x08\x00(\x005\x00\'\x00\x0f\x00\x08\x00\x18\x00,\x002\x00+\x00\x1c\x00\x05\x00\xe5\xff\xc9\xff\xbd\xff\xc9\xff\xe5\xff\xff\xff\x0c\x00\r\x00\x07\x00\x07\x00\x10\x00\x1c\x00\x1e\x00\x13\x00\xfc\xff\xda\xff\xb7\xff\xa4\xff\xab\xff\xbf\xff\xd2\xff\xdd\xff\xe0\xff\xdf\xff\xe6\xff\xfa\xff\x0c\x00\x06\x00\xea\xff\xcf\xff\xc9\xff\xd2\xff\xda\xff\xdb\xff\xdf\xff\xe7\xff\xee\xff\xed\xff\xef\xff\xf8\xff\x0e\x00,\x00C\x00F\x00.\x00\x06\x00\xdb\xff\xbb\xff\xb1\xff\xc1\xff\xe8\xff\x0c\x00\x1a\x00\x13\x00\n\x00\x16\x00<\x00f\x00w\x00^\x00/\x00\x06\x00\xf0\xff\xe1\xff\xda\xff\xe6\xff\x08\x00/\x00:\x00)\x00\x13\x00\r\x00\x15\x00!\x00(\x00(\x00\x1b\x00\xfe\xff\xdd\xff\xcc\xff\xd7\xff\xf7\xff\x13\x00\x1e\x00\x19\x00\x15\x00\x1d\x004\x00?\x001\x00\n\x00\xe3\xff\xcc\xff\xc2\xff\xb9\xff\xb4\xff\xbb\xff\xce\xff\xe8\xff\xfd\xff\x07\x00\x01\x00\xf2\xff\xea\xff\xed\xff\xf1\xff\xf3\xff\xf0\xff\xea\xff\xe7\xff\xed\xff\xfb\xff\t\x00\n\x00\xf8\xff\xdf\xff\xca\xff\xcb\xff\xec\xff\x1b\x008\x00.\x00\x08\x00\xe0\xff\xc7\xff\xc1\xff\xc6\xff\xcf\xff\xde\xff\xf3\xff\x0c\x00#\x004\x00@\x00I\x00N\x00N\x00E\x000\x00\x14\x00\xf7\xff\xe6\xff\xee\xff\x0b\x00%\x00,\x00 \x00\x19\x00!\x001\x009\x00.\x00\x1d\x00\x14\x00\x0f\x00\xff\xff\xe1\xff\xcd\xff\xd4\xff\xf6\xff\x16\x00"\x00\x1c\x00\x19\x00!\x00*\x00(\x00\x0f\x00\xee\xff\xd5\xff\xce\xff\xcf\xff\xd4\xff\xde\xff\xee\xff\t\x00\x1e\x00#\x00\x17\x00\x0c\x00\x0f\x00\x1a\x00\x13\x00\xf6\xff\xd2\xff\xbf\xff\xc7\xff\xdb\xff\xec\xff\xfa\xff\x05\x00\x05\x00\xf9\xff\xe7\xff\xdc\xff\xd7\xff\xd9\xff\xe2\xff\xed\xff\xf3\xff\xf4\xff\xf1\xff\xef\xff\xf2\xff\xf7\xff\xf9\xff\xf9\xff\x00\x00\x0c\x00\x12\x00\t\x00\x03\x00\x0e\x00$\x00+\x00\x1f\x00\x0b\x00\xfc\xff\xf2\xff\xe9\xff\xe0\xff\xd6\xff\xda\xff\xf0\xff\x0f\x00-\x00;\x002\x00\x1c\x00\x0b\x00\x08\x00\x0c\x00\x08\x00\xf5\xff\xe2\xff\xe8\xff\t\x00\'\x00,\x00\x1c\x00\x0f\x00\x12\x00"\x00/\x00#\x00\x04\x00\xe5\xff\xd8\xff\xd4\xff\xcc\xff\xc1\xff\xc5\xff\xd9\xff\xf3\xff\x07\x00\x12\x00\x19\x00!\x00"\x00\x15\x00\xfb\xff\xe0\xff\xd3\xff\xd3\xff\xdc\xff\xf0\xff\x0f\x00+\x008\x001\x00\x1c\x00\t\x00\xfb\xff\xf1\xff\xed\xff\xeb\xff\xe6\xff\xe3\xff\xe0\xff\xe3\xff\xe7\xff\xeb\xff\xea\xff\xea\xff\xf6\xff\x0e\x00\x1c\x00\x16\x00\x03\x00\xf6\xff\xf9\xff\x08\x00\x18\x00!\x00#\x00\x1f\x00\x11\x00\xfa\xff\xe2\xff\xd6\xff\xdd\xff\xf3\xff\t\x00\x13\x00\x17\x00\x19\x00\x17\x00\x0e\x00\xfd\xff\xeb\xff\xdc\xff\xd6\xff\xd7\xff\xdf\xff\xe8\xff\xeb\xff\xf1\xff\x05\x00&\x00>\x00?\x00*\x00\x0e\x00\xf7\xff\xe5\xff\xd0\xff\xb9\xff\xae\xff\xc1\xff\xef\xff\x1e\x005\x005\x002\x006\x00>\x009\x00#\x00\x04\x00\xeb\xff\xe0\xff\xe4\xff\xf1\xff\xfc\xff\x06\x00\x12\x00\x1a\x00\x18\x00\x0b\x00\xfe\xff\xf5\xff\xef\xff\xe4\xff\xd5\xff\xcd\xff\xd7\xff\xef\xff\t\x00\x14\x00\x14\x00\x14\x00\x18\x00!\x00)\x00.\x002\x002\x001\x000\x00)\x00\x1c\x00\x0b\x00\xfa\xff\xec\xff\xe3\xff\xe2\xff\xe5\xff\xe9\xff\xeb\xff\xf3\xff\xfb\xff\x03\x00\x07\x00\x02\x00\xfc\xff\xfb\xff\x01\x00\x06\x00\x00\x00\xf2\xff\xe7\xff\xe6\xff\xef\xff\xfd\xff\x0c\x00\x16\x00\x19\x00\x16\x00\x16\x00\x14\x00\x00\x00\xd9\xff\xb1\xff\x98\xff\x98\xff\xad\xff\xcf\xff\xf0\xff\x02\x00\n\x00\x15\x00 \x00$\x00\x1d\x00\x11\x00\x05\x00\xfd\xff\xf5\xff\xeb\xff\xe3\xff\xe6\xff\xfb\xff\x16\x00)\x002\x00,\x00\x1b\x00\n\x00\x02\x00\xfe\xff\xf7\xff\xec\xff\xe3\xff\xe6\xff\xf1\xff\x00\x00\t\x00\x06\x00\xff\xff\x05\x00\x13\x00\x1f\x00!\x00\x1c\x00\x1a\x00\x1b\x00\x17\x00\x06\x00\xf5\xff\xef\xff\xf5\xff\x01\x00\n\x00\x0e\x00\n\x00\xfb\xff\xea\xff\xeb\xff\x04\x00%\x006\x001\x00!\x00\x0f\x00\xfb\xff\xe4\xff\xd5\xff\xdb\xff\xf4\xff\n\x00\r\x00\x07\x00\t\x00\x15\x00!\x00\x1c\x00\n\x00\xf0\xff\xdd\xff\xd7\xff\xd7\xff\xd4\xff\xcb\xff\xc9\xff\xd4\xff\xec\xff\x03\x00\x0c\x00\x0e\x00\r\x00\r\x00\x0e\x00\r\x00\t\x00\x00\x00\xf1\xff\xde\xff\xd4\xff\xde\xff\xf6\xff\x0c\x00\x10\x00\x07\x00\xff\xff\x02\x00\r\x00\x12\x00\x08\x00\xf2\xff\xe0\xff\xde\xff\xe6\xff\xee\xff\xf7\xff\x01\x00\x0f\x00\x1f\x00/\x00B\x00I\x00>\x00%\x00\x16\x00\x1b\x00\'\x00(\x00\x11\x00\xf9\xff\xef\xff\xf6\xff\xf9\xff\xee\xff\xdc\xff\xd3\xff\xda\xff\xee\xff\x07\x00\x17\x00\x18\x00\x05\x00\xf2\xff\xe7\xff\xe6\xff\xe7\xff\xee\xff\x02\x00\x1b\x00$\x00\x1c\x00\x0f\x00\x10\x00$\x009\x00>\x000\x00\x12\x00\xf2\xff\xd6\xff\xbe\xff\xaf\xff\xb7\xff\xd8\xff\x00\x00\x18\x00\x12\x00\xfd\xff\xf3\xff\xfb\xff\x06\x00\x08\x00\xff\xff\xf5\xff\xf2\xff\xf3\xff\xf3\xff\xef\xff\xea\xff\xe6\xff\xe8\xff\xf1\xff\xfd\xff\x03\x00\x04\x00\x05\x00\xff\xff\xf1\xff\xe4\xff\xde\xff\xdf\xff\xdd\xff\xdb\xff\xe0\xff\xf0\xff\x03\x00\x0f\x00\x15\x00\x18\x00\x1c\x00\x1f\x00$\x00*\x00*\x00\x18\x00\xfa\xff\xe0\xff\xdb\xff\xe3\xff\xea\xff\xea\xff\xe9\xff\xef\xff\xfc\xff\x0c\x00\x1c\x00\'\x00"\x00\x11\x00\x01\x00\xfe\xff\x03\x00\x04\x00\xff\xff\x00\x00\n\x00\x12\x00\x0f\x00\x0b\x00\x12\x00!\x00,\x00,\x00$\x00\x13\x00\xfb\xff\xe0\xff\xc8\xff\xba\xff\xc2\xff\xe1\xff\x0b\x00!\x00\x18\x00\x05\x00\x07\x00"\x00>\x00@\x00,\x00\x17\x00\x12\x00\x0e\x00\xff\xff\xe8\xff\xd9\xff\xe2\xff\xfb\xff\x15\x00\x1d\x00\x12\x00\xfe\xff\xed\xff\xe8\xff\xea\xff\xe9\xff\xe2\xff\xd7\xff\xd5\xff\xde\xff\xed\xff\xf8\xff\xfb\xff\xf8\xff\xf9\xff\x06\x00\x16\x00\x1d\x00\x1d\x00 \x00#\x00\x19\x00\x05\x00\xee\xff\xe1\xff\xdd\xff\xdc\xff\xda\xff\xdc\xff\xe0\xff\xe6\xff\xe7\xff\xf1\xff\t\x00 \x00!\x00\r\x00\xf2\xff\xe1\xff\xd9\xff\xd6\xff\xe0\xff\xf4\xff\t\x00\x18\x00#\x00.\x006\x007\x000\x00)\x00!\x00\x13\x00\xff\xff\xe9\xff\xdb\xff\xdb\xff\xf0\xff\x0c\x00\x1c\x00\x16\x00\x08\x00\x08\x00\x16\x00%\x00#\x00\x17\x00\x0b\x00\t\x00\n\x00\x02\x00\xf2\xff\xe9\xff\xf1\xff\x03\x00\x0e\x00\n\x00\x00\x00\xff\xff\x07\x00\x11\x00\x11\x00\x01\x00\xf0\xff\xed\xff\xf1\xff\xe7\xff\xd1\xff\xc3\xff\xd1\xff\xf3\xff\x10\x00\x1b\x00\x1c\x00\x1a\x00\x15\x00\x0f\x00\x0b\x00\x03\x00\xf7\xff\xe8\xff\xe1\xff\xe9\xff\xf5\xff\xf7\xff\xee\xff\xe0\xff\xda\xff\xd8\xff\xd9\xff\xe9\xff\x04\x00\x1c\x00\x1e\x00\x0b\x00\xf4\xff\xe5\xff\xdb\xff\xd5\xff\xd5\xff\xde\xff\xf0\xff\x06\x00\x16\x00\x1c\x00\x1e\x00!\x00)\x00*\x00\x1b\x00\x01\x00\xe4\xff\xcf\xff\xc7\xff\xd1\xff\xe9\xff\x04\x00\x16\x00\x1b\x00\x1b\x00$\x001\x003\x00(\x00\x1c\x00\x1b\x00\x1e\x00\x18\x00\x06\x00\xf3\xff\xf1\xff\xfa\xff\x00\x00\x03\x00\t\x00\x14\x00\x18\x00\x13\x00\t\x00\x01\x00\xf9\xff\xf0\xff\xe4\xff\xd9\xff\xd1\xff\xd4\xff\xe9\xff\t\x00\x1c\x00\x19\x00\x11\x00\x12\x00\x1f\x00*\x00)\x00\x1e\x00\x0e\x00\x06\x00\x07\x00\x03\x00\xed\xff\xd1\xff\xc8\xff\xd7\xff\xf6\xff\x02\x00\xf8\xff\xe6\xff\xe7\xff\xf9\xff\x0c\x00\t\x00\xf5\xff\xe1\xff\xdd\xff\xe9\xff\xf4\xff\xf7\xff\xf4\xff\xf3\xff\xfc\xff\x0c\x00\x16\x00\x1c\x00#\x00,\x00+\x00\x1c\x00\x03\x00\xeb\xff\xd6\xff\xc8\xff\xc7\xff\xd9\xff\xf4\xff\x06\x00\n\x00\x0c\x00\x12\x00\x1c\x00$\x00&\x00$\x00\x1f\x00\n\x00\xe8\xff\xd0\xff\xd6\xff\xee\xff\xfa\xff\xf4\xff\xf3\xff\x07\x00&\x001\x00!\x00\x07\x00\xf8\xff\xf6\xff\xf7\xff\xef\xff\xe1\xff\xda\xff\xe0\xff\xf4\xff\t\x00\x11\x00\x11\x00\x17\x00#\x00.\x00.\x00*\x00%\x00\x19\x00\x03\x00\xe7\xff\xd3\xff\xd1\xff\xe2\xff\xfd\xff\x0e\x00\n\x00\xfa\xff\xec\xff\xed\xff\x00\x00\x13\x00\x10\x00\xff\xff\xf2\xff\xf6\xff\xfb\xff\xee\xff\xd6\xff\xd0\xff\xe4\xff\x06\x00\x1f\x00\'\x00$\x00 \x00\x1f\x00\x1f\x00\x17\x00\x06\x00\xf6\xff\xeb\xff\xe9\xff\xe7\xff\xe7\xff\xed\xff\xfb\xff\x0b\x00\r\x00\x08\x00\x08\x00\x13\x00"\x00(\x00!\x00\n\x00\xf2\xff\xdf\xff\xd9\xff\xde\xff\xe2\xff\xe2\xff\xe5\xff\xed\xff\xfe\xff\r\x00\x13\x00\x17\x00\x18\x00\x16\x00\x0b\x00\xf5\xff\xd6\xff\xbd\xff\xbc\xff\xd8\xff\xf4\xff\xf9\xff\xf6\xff\x06\x00/\x00O\x00J\x00/\x00\x18\x00\x11\x00\t\x00\xf1\xff\xd5\xff\xd0\xff\xe6\xff\t\x00\x1f\x00\x1f\x00\x11\x00\xfe\xff\xf6\xff\xff\xff\x13\x00\x1e\x00\x17\x00\x03\x00\xf0\xff\xe4\xff\xdd\xff\xd6\xff\xd8\xff\xe6\xff\xfe\xff\x13\x00\x1f\x00!\x00 \x00!\x00%\x00!\x00\x14\x00\x06\x00\xfe\xff\xf9\xff\xed\xff\xdd\xff\xdc\xff\xf3\xff\x15\x00+\x00\'\x00\x1b\x00\x17\x00\x1c\x00\x18\x00\n\x00\xfa\xff\xf1\xff\xf0\xff\xef\xff\xf0\xff\xf0\xff\xea\xff\xe2\xff\xe2\xff\xee\xff\xf9\xff\xfd\xff\x00\x00\x05\x00\x0c\x00\x0b\x00\x01\x00\xee\xff\xd7\xff\xc3\xff\xbf\xff\xcb\xff\xdd\xff\xed\xff\xff\xff\x14\x00#\x00)\x00\'\x00*\x002\x00+\x00\x0c\x00\xdf\xff\xc1\xff\xc5\xff\xde\xff\xf6\xff\xfd\xff\xfe\xff\x01\x00\x0b\x00\x15\x00\x13\x00\x0c\x00\x08\x00\x08\x00\x06\x00\xfa\xff\xe8\xff\xdb\xff\xdc\xff\xef\xff\x06\x00\x17\x00\x1d\x00\x1e\x00 \x00#\x00)\x00+\x00\'\x00 \x00\x14\x00\x02\x00\xe6\xff\xcf\xff\xc7\xff\xd6\xff\xf6\xff\x12\x00$\x00*\x00+\x00,\x00-\x00%\x00\x15\x00\xfe\xff\xec\xff\xe3\xff\xe0\xff\xe6\xff\xf3\xff\x02\x00\x0b\x00\n\x00\x05\x00\x01\x00\xff\xff\xff\xff\xfb\xff\xf0\xff\xe4\xff\xe0\xff\xe2\xff\xdf\xff\xd5\xff\xd3\xff\xe1\xff\xf3\xff\xfc\xff\xfe\xff\xfc\xff\xfd\xff\x07\x00\x17\x00(\x00-\x00\x1e\x00\xfe\xff\xde\xff\xcb\xff\xcb\xff\xd7\xff\xe5\xff\xf8\xff\x0b\x00\x17\x00\x11\x00\x06\x00\x04\x00\r\x00\x19\x00\x19\x00\r\x00\xfc\xff\xe7\xff\xd9\xff\xda\xff\xe5\xff\xf6\xff\x03\x00\x0e\x00\x15\x00\x19\x00\x19\x00\x1a\x00\x1f\x00$\x00\x1e\x00\x0c\x00\xf4\xff\xe0\xff\xda\xff\xe1\xff\xf5\xff\x10\x00(\x006\x00:\x009\x003\x00)\x00\x1a\x00\n\x00\xf6\xff\xe3\xff\xd5\xff\xd2\xff\xdc\xff\xec\xff\xf8\xff\x02\x00\n\x00\n\x00\x04\x00\xf9\xff\xf5\xff\xf9\xff\xfc\xff\xfb\xff\xf1\xff\xdf\xff\xcb\xff\xc5\xff\xdb\xff\x01\x00\x1d\x00\x1e\x00\r\x00\x03\x00\x02\x00\x07\x00\x07\x00\x02\x00\xfc\xff\xf9\xff\xfa\xff\xf6\xff\xec\xff\xe6\xff\xea\xff\xf7\xff\t\x00\x13\x00\x0f\x00\x04\x00\x00\x00\x08\x00\x18\x00#\x00\x1b\x00\x07\x00\xee\xff\xdb\xff\xdb\xff\xe2\xff\xed\xff\xf8\xff\x05\x00\x10\x00\x17\x00\x17\x00\x17\x00\x1a\x00\x1f\x00\x1a\x00\x0f\x00\x02\x00\xf7\xff\xf2\xff\xf3\xff\xfd\xff\n\x00\x14\x00\x16\x00\x18\x00\x1c\x00!\x00"\x00\x1c\x00\x14\x00\n\x00\xfa\xff\xe7\xff\xd7\xff\xd2\xff\xd8\xff\xe3\xff\xf3\xff\x03\x00\x0e\x00\r\x00\x07\x00\x04\x00\x05\x00\x02\x00\xf8\xff\xee\xff\xe5\xff\xdd\xff\xd5\xff\xdb\xff\xee\xff\x00\x00\x08\x00\x08\x00\x0b\x00\x0e\x00\r\x00\x08\x00\x06\x00\x0b\x00\x10\x00\x0b\x00\xfe\xff\xf1\xff\xea\xff\xee\xff\xfd\xff\x0f\x00\x1f\x00"\x00\x18\x00\t\x00\x02\x00\x04\x00\x05\x00\x02\x00\x04\x00\n\x00\n\x00\xff\xff\xef\xff\xea\xff\xf3\xff\x00\x00\x06\x00\x03\x00\xfd\xff\x02\x00\r\x00\x16\x00\x19\x00\x17\x00\x14\x00\r\x00\x01\x00\xf5\xff\xee\xff\xef\xff\xf4\xff\x00\x00\x0b\x00\x12\x00\x16\x00\x19\x00\x1c\x00\x1e\x00\x1b\x00\x0b\x00\xf4\xff\xdc\xff\xce\xff\xcf\xff\xdd\xff\xee\xff\xf9\xff\xfb\xff\xf4\xff\xf1\xff\xf4\xff\xfc\xff\xfd\xff\xf8\xff\xf4\xff\xf2\xff\xed\xff\xe0\xff\xd7\xff\xd8\xff\xe8\xff\xfa\xff\x04\x00\t\x00\x08\x00\x04\x00\xfd\xff\xfd\xff\x07\x00\x0f\x00\x0c\x00\x00\x00\xee\xff\xe3\xff\xe5\xff\xf3\xff\x04\x00\x13\x00\x1d\x00!\x00\x1e\x00\x18\x00\x12\x00\x10\x00\x11\x00\x14\x00\x17\x00\x13\x00\t\x00\x00\x00\xff\xff\x08\x00\x0e\x00\r\x00\x03\x00\xfa\xff\xf8\xff\xff\xff\n\x00\x0f\x00\x10\x00\x0f\x00\x11\x00\x11\x00\t\x00\xfa\xff\xee\xff\xf1\xff\x01\x00\x12\x00\x19\x00\x17\x00\x11\x00\x12\x00\x11\x00\t\x00\xfd\xff\xf0\xff\xe6\xff\xdf\xff\xdb\xff\xdb\xff\xde\xff\xe2\xff\xe7\xff\xf2\xff\xfc\xff\x00\x00\xfe\xff\xfd\xff\x00\x00\x03\x00\x02\x00\xfb\xff\xed\xff\xdf\xff\xdb\xff\xe4\xff\xf9\xff\x08\x00\x0c\x00\x04\x00\xf9\xff\xf5\xff\xfb\xff\x02\x00\x08\x00\x07\x00\x01\x00\xf7\xff\xeb\xff\xe8\xff\xec\xff\xf6\xff\x05\x00\x14\x00 \x00#\x00\x1b\x00\r\x00\x02\x00\x03\x00\x0b\x00\x13\x00\x12\x00\x08\x00\xfa\xff\xf3\xff\xf4\xff\xfa\xff\xff\xff\x04\x00\x0b\x00\x16\x00!\x00(\x00"\x00\x14\x00\x0e\x00\x11\x00\x12\x00\x0c\x00\xfd\xff\xf6\xff\xfe\xff\x13\x00#\x00\x1f\x00\x0e\x00\xff\xff\xfe\xff\x04\x00\x02\x00\xf6\xff\xec\xff\xe7\xff\xe5\xff\xe1\xff\xdd\xff\xe1\xff\xed\xff\xfd\xff\x08\x00\x0b\x00\x04\x00\xff\xff\xfd\xff\xfd\xff\xf8\xff\xf0\xff\xec\xff\xed\xff\xf0\xff\xee\xff\xe8\xff\xe8\xff\xf0\xff\xf7\xff\xfa\xff\xf9\xff\xf7\xff\xf6\xff\xf6\xff\xfb\xff\x02\x00\x06\x00\x02\x00\xfa\xff\xf7\xff\xfa\xff\xfd\xff\xff\xff\x04\x00\x10\x00\x1c\x00\x1d\x00\x12\x00\x06\x00\x05\x00\x0c\x00\x14\x00\x12\x00\x07\x00\xfa\xff\xee\xff\xe9\xff\xe7\xff\xe6\xff\xec\xff\xfc\xff\x12\x00#\x00&\x00\x1c\x00\r\x00\x05\x00\x03\x00\x03\x00\x02\x00\xfe\xff\xfb\xff\xfb\xff\xff\xff\x08\x00\x0f\x00\x14\x00\x18\x00\x1d\x00\x1e\x00\x16\x00\x08\x00\xfb\xff\xf5\xff\xf3\xff\xea\xff\xdb\xff\xd9\xff\xed\xff\t\x00\x1a\x00\x16\x00\t\x00\x04\x00\x07\x00\x0b\x00\x05\x00\xf9\xff\xf2\xff\xf3\xff\xf5\xff\xf0\xff\xe7\xff\xe3\xff\xee\xff\x00\x00\x0e\x00\x10\x00\x06\x00\xf9\xff\xf1\xff\xf0\xff\xf6\xff\xfa\xff\xfc\xff\xfb\xff\xfc\xff\xfa\xff\xf4\xff\xed\xff\xe9\xff\xf5\xff\x0c\x00\x1b\x00\x1c\x00\x13\x00\x0b\x00\t\x00\x07\x00\x05\x00\x03\x00\x06\x00\x05\x00\xf9\xff\xe9\xff\xe1\xff\xe9\xff\xfc\xff\x0e\x00\x16\x00\x17\x00\x15\x00\x17\x00\x1a\x00\x16\x00\x0b\x00\xfe\xff\xf6\xff\xf2\xff\xf1\xff\xef\xff\xeb\xff\xef\xff\xfb\xff\x11\x00$\x00&\x00\x18\x00\x03\x00\xf4\xff\xee\xff\xeb\xff\xe1\xff\xd6\xff\xd4\xff\xe1\xff\xf6\xff\x08\x00\x10\x00\x19\x00\x1e\x00#\x00"\x00\x1b\x00\x0f\x00\xff\xff\xf4\xff\xf0\xff\xee\xff\xea\xff\xe7\xff\xef\xff\x01\x00\x0f\x00\x10\x00\x07\x00\xfe\xff\xfc\xff\xfc\xff\xfa\xff\xf6\xff\xf8\xff\xfe\xff\x04\x00\xff\xff\xf2\xff\xe8\xff\xe7\xff\xf6\xff\x0c\x00\x1f\x00\'\x00$\x00\x1b\x00\x0f\x00\x04\x00\xfb\xff\xf7\xff\xf6\xff\xf6\xff\xf2\xff\xe9\xff\xe5\xff\xe8\xff\xee\xff\xf9\xff\x04\x00\x0f\x00\x17\x00\x19\x00\x15\x00\x0e\x00\x04\x00\xfc\xff\xfb\xff\xfb\xff\xf6\xff\xe9\xff\xe1\xff\xec\xff\x04\x00\x1e\x00%\x00\x1e\x00\x12\x00\r\x00\x0b\x00\x04\x00\xf8\xff\xe8\xff\xdd\xff\xdc\xff\xe4\xff\xef\xff\xf7\xff\xfc\xff\x04\x00\x13\x00 \x00#\x00\x18\x00\x08\x00\xfd\xff\xf6\xff\xee\xff\xe3\xff\xdc\xff\xde\xff\xe9\xff\xf7\xff\x03\x00\x0c\x00\x0e\x00\x13\x00\x18\x00\x1a\x00\x14\x00\n\x00\x04\x00\x00\x00\xfd\xff\xf9\xff\xf5\xff\xf5\xff\xfb\xff\x03\x00\x0b\x00\x10\x00\x13\x00\x14\x00\x13\x00\x0e\x00\x06\x00\xfb\xff\xf3\xff\xf3\xff\xf6\xff\xf3\xff\xea\xff\xe4\xff\xe3\xff\xe9\xff\xf2\xff\xfe\xff\x10\x00"\x00.\x00.\x00!\x00\x0e\x00\xfc\xff\xf2\xff\xee\xff\xec\xff\xe8\xff\xe9\xff\xf0\xff\xf9\xff\x06\x00\r\x00\x14\x00\x19\x00\x1a\x00\x13\x00\x02\x00\xf0\xff\xe6\xff\xe0\xff\xe0\xff\xe1\xff\xe4\xff\xed\xff\xf6\xff\x01\x00\r\x00\x18\x00 \x00$\x00\x1f\x00\x14\x00\x06\x00\xf7\xff\xe8\xff\xe0\xff\xe3\xff\xec\xff\xf6\xff\xfd\xff\x00\x00\x04\x00\x0c\x00\x13\x00\x16\x00\x11\x00\x06\x00\x00\x00\xfe\xff\x00\x00\xfd\xff\xf7\xff\xf4\xff\xfa\xff\x01\x00\x03\x00\x04\x00\x06\x00\x0c\x00\x13\x00\x17\x00\x18\x00\x17\x00\x0e\x00\xfe\xff\xf1\xff\xec\xff\xed\xff\xec\xff\xe8\xff\xe7\xff\xec\xff\xf6\xff\x02\x00\x11\x00\x1c\x00\x1f\x00\x1b\x00\x14\x00\x0f\x00\n\x00\xff\xff\xf3\xff\xeb\xff\xed\xff\xf4\xff\xf9\xff\xfb\xff\xfd\xff\x05\x00\x0e\x00\x19\x00\x1a\x00\x13\x00\x07\x00\xf5\xff\xe9\xff\xe1\xff\xdf\xff\xe4\xff\xec\xff\xf2\xff\xf6\xff\xf7\xff\xff\xff\x0e\x00\x1f\x00%\x00\x1e\x00\x0f\x00\xfe\xff\xef\xff\xe2\xff\xdc\xff\xdf\xff\xe7\xff\xef\xff\xf6\xff\xf9\xff\xfc\xff\xff\xff\x05\x00\x0e\x00\x13\x00\x12\x00\x0e\x00\x0b\x00\t\x00\x05\x00\xfc\xff\xf5\xff\xf7\xff\xfb\xff\xfa\xff\xfb\xff\x02\x00\x10\x00\x1a\x00\x1b\x00\x11\x00\xfd\xff\xe9\xff\xda\xff\xd8\xff\xe4\xff\xf1\xff\xf8\xff\xf7\xff\xf2\xff\xf2\xff\xfa\xff\x06\x00\x13\x00 \x00%\x00&\x00"\x00\x1c\x00\x13\x00\x0b\x00\x04\x00\x00\x00\xfe\xff\xfc\xff\xfe\xff\x03\x00\x08\x00\x06\x00\x05\x00\x05\x00\x0c\x00\x11\x00\x0e\x00\x03\x00\xf2\xff\xe4\xff\xe1\xff\xe9\xff\xf3\xff\xf4\xff\xf1\xff\xf5\xff\x08\x00\x1f\x00*\x00$\x00\x15\x00\n\x00\x00\x00\xf5\xff\xec\xff\xe6\xff\xe8\xff\xed\xff\xf3\xff\xf9\xff\x01\x00\x07\x00\n\x00\n\x00\n\x00\t\x00\x0b\x00\x0b\x00\x0b\x00\x01\x00\xf0\xff\xe4\xff\xe4\xff\xed\xff\xf2\xff\xed\xff\xe8\xff\xee\xff\x00\x00\x10\x00\x12\x00\x07\x00\xf6\xff\xe8\xff\xe6\xff\xea\xff\xf2\xff\xf4\xff\xf3\xff\xf1\xff\xf3\xff\xfa\xff\x04\x00\x0f\x00\x17\x00\x1e\x00\x1d\x00\x17\x00\r\x00\x02\x00\xfa\xff\xf7\xff\xfc\xff\x06\x00\n\x00\x05\x00\xfc\xff\xf6\xff\xf9\xff\x01\x00\x0b\x00\x17\x00 \x00\x1f\x00\x14\x00\x08\x00\x03\x00\x04\x00\x02\x00\xfb\xff\xf7\xff\xf9\xff\x04\x00\x0f\x00\x15\x00\x13\x00\x0f\x00\x10\x00\x12\x00\x14\x00\x0c\x00\xfc\xff\xee\xff\xeb\xff\xef\xff\xf0\xff\xec\xff\xea\xff\xf0\xff\x00\x00\x0f\x00\x18\x00\x17\x00\x11\x00\x0f\x00\x13\x00\x13\x00\x08\x00\xf3\xff\xe5\xff\xe8\xff\xef\xff\xf3\xff\xef\xff\xf1\xff\xf8\xff\x00\x00\xff\xff\xfc\xff\xf8\xff\xf3\xff\xea\xff\xe3\xff\xe1\xff\xe5\xff\xe9\xff\xe8\xff\xe7\xff\xe5\xff\xe6\xff\xf2\xff\x03\x00\x16\x00\x1e\x00\x17\x00\x0c\x00\x06\x00\x07\x00\x06\x00\x03\x00\xfc\xff\xf6\xff\xf8\xff\xfb\xff\x02\x00\x06\x00\x04\x00\x02\x00\x07\x00\x0f\x00\x14\x00\x10\x00\x06\x00\x00\x00\xff\xff\x04\x00\x0c\x00\x10\x00\x0c\x00\x04\x00\x02\x00\x08\x00\x13\x00\x1a\x00\x19\x00\x18\x00\x1a\x00\x1a\x00\x12\x00\x04\x00\xfc\xff\xfa\xff\xf5\xff\xef\xff\xec\xff\xf7\xff\x06\x00\x0c\x00\x03\x00\xfb\xff\xfc\xff\t\x00\x16\x00\x1b\x00\x13\x00\x03\x00\xf5\xff\xef\xff\xf0\xff\xf0\xff\xec\xff\xe9\xff\xef\xff\xfb\xff\x03\x00\x02\x00\xfb\xff\xf8\xff\xf6\xff\xf3\xff\xec\xff\xe7\xff\xe7\xff\xec\xff\xf0\xff\xf1\xff\xf2\xff\xf4\xff\xfe\xff\x08\x00\x0f\x00\r\x00\x08\x00\x08\x00\x05\x00\x01\x00\xfa\xff\xf3\xff\xf0\xff\xed\xff\xed\xff\xee\xff\xed\xff\xed\xff\xf2\xff\xfa\xff\x08\x00\x0f\x00\x0f\x00\n\x00\x06\x00\t\x00\x0c\x00\t\x00\x05\x00\x04\x00\x05\x00\n\x00\x10\x00\x19\x00\x1c\x00\x18\x00\x16\x00\x13\x00\x12\x00\x0b\x00\x00\x00\xf8\xff\xf9\xff\xfe\xff\x01\x00\xff\xff\x01\x00\x05\x00\x08\x00\t\x00\x08\x00\x07\x00\n\x00\x0c\x00\x0f\x00\x12\x00\x11\x00\t\x00\x00\x00\xfa\xff\xf8\xff\xf5\xff\xf0\xff\xf1\xff\xf5\xff\xf6\xff\xf0\xff\xee\xff\xf3\xff\xfa\xff\xfb\xff\xf3\xff\xed\xff\xed\xff\xf4\xff\xf9\xff\xfa\xff\xf7\xff\xf8\xff\xfc\xff\x03\x00\x07\x00\x05\x00\x02\x00\x03\x00\x05\x00\x06\x00\x02\x00\x00\x00\x02\x00\x05\x00\x02\x00\xf9\xff\xf3\xff\xf3\xff\xf4\xff\xf5\xff\xf7\xff\xff\xff\t\x00\x10\x00\x11\x00\r\x00\x03\x00\xfa\xff\xf4\xff\xf9\xff\x01\x00\x05\x00\x02\x00\xfd\xff\x01\x00\t\x00\r\x00\n\x00\x04\x00\xfc\xff\xf7\xff\xf6\xff\xf9\xff\xfe\xff\x01\x00\x01\x00\x02\x00\x07\x00\r\x00\x11\x00\x10\x00\x0e\x00\t\x00\x04\x00\x01\x00\x04\x00\x07\x00\t\x00\x07\x00\x06\x00\x06\x00\x05\x00\x01\x00\xfc\xff\xfb\xff\xfa\xff\xf7\xff\xf3\xff\xf3\xff\xf2\xff\xed\xff\xe7\xff\xe9\xff\xf7\xff\x06\x00\x0e\x00\x0b\x00\x02\x00\xfd\xff\xfb\xff\x00\x00\x02\x00\x00\x00\xfc\xff\xfd\xff\x03\x00\x05\x00\x00\x00\xfc\xff\xfd\xff\x02\x00\x03\x00\x00\x00\xfc\xff\xfc\xff\xf9\xff\xf6\xff\xf3\xff\xf3\xff\xfc\xff\x08\x00\x0e\x00\x0e\x00\x08\x00\x02\x00\x02\x00\x0b\x00\x13\x00\x13\x00\x0e\x00\t\x00\x0c\x00\x0f\x00\x0c\x00\x04\x00\xfc\xff\xfd\xff\x00\x00\xfe\xff\xf8\xff\xf1\xff\xee\xff\xf3\xff\xfb\xff\x02\x00\x02\x00\xfb\xff\xf7\xff\xf6\xff\xf9\xff\xfb\xff\xf9\xff\xf8\xff\xfb\xff\xff\xff\x03\x00\x02\x00\x02\x00\x03\x00\x00\x00\xfc\xff\xfb\xff\x01\x00\x04\x00\x01\x00\xf8\xff\xf0\xff\xea\xff\xe6\xff\xe9\xff\xf3\xff\x05\x00\x12\x00\x15\x00\x12\x00\r\x00\n\x00\x06\x00\xfc\xff\xf8\xff\xf9\xff\x03\x00\x0b\x00\x08\x00\xfc\xff\xf2\xff\xf7\xff\t\x00\x1a\x00\x1a\x00\n\x00\xfa\xff\xf1\xff\xf0\xff\xf2\xff\xf3\xff\xf7\xff\x01\x00\x0c\x00\x12\x00\x11\x00\x0b\x00\x06\x00\x02\x00\x04\x00\x0b\x00\x10\x00\x11\x00\x12\x00\x11\x00\x0b\x00\x05\x00\x00\x00\x01\x00\x02\x00\x01\x00\xfd\xff\xfe\xff\x03\x00\x0b\x00\x0f\x00\x0e\x00\x06\x00\x03\x00\x04\x00\t\x00\n\x00\x02\x00\xf4\xff\xed\xff\xf2\xff\xfc\xff\xff\xff\xf8\xff\xf3\xff\xf5\xff\xfe\xff\x01\x00\xfc\xff\xf4\xff\xeb\xff\xe6\xff\xe4\xff\xe5\xff\xe5\xff\xe1\xff\xde\xff\xe2\xff\xf2\xff\x05\x00\r\x00\x0b\x00\t\x00\n\x00\n\x00\x05\x00\x00\x00\xfc\xff\xfb\xff\xfd\xff\xff\xff\xfe\xff\xf9\xff\xfc\xff\x05\x00\x11\x00\x16\x00\x13\x00\x0c\x00\x06\x00\x00\x00\xf5\xff\xeb\xff\xea\xff\xf6\xff\x05\x00\x0e\x00\x0b\x00\x05\x00\x01\x00\x02\x00\x08\x00\x10\x00\x10\x00\x0e\x00\x0e\x00\x11\x00\x11\x00\x0c\x00\x03\x00\xfd\xff\xfe\xff\xff\xff\x01\x00\x03\x00\x06\x00\x02\x00\x00\x00\x05\x00\r\x00\x15\x00\x19\x00\x17\x00\x0f\x00\x07\x00\xfe\xff\xf8\xff\xf3\xff\xef\xff\xee\xff\xef\xff\xf6\xff\x05\x00\x12\x00\x15\x00\n\x00\xfd\xff\xf5\xff\xf3\xff\xed\xff\xe5\xff\xe2\xff\xe1\xff\xe4\xff\xe7\xff\xec\xff\xf2\xff\xf9\xff\x02\x00\x0c\x00\x14\x00\x15\x00\x0c\x00\xfb\xff\xeb\xff\xe2\xff\xe5\xff\xf2\xff\xfd\xff\xfd\xff\xf7\xff\xf6\xff\xfe\xff\x07\x00\t\x00\x06\x00\x02\x00\xff\xff\xfe\xff\xfd\xff\xfc\xff\xfa\xff\xf7\xff\xf8\xff\x01\x00\x0b\x00\x14\x00\x11\x00\n\x00\x06\x00\x08\x00\x0e\x00\x12\x00\x14\x00\x14\x00\x12\x00\x0e\x00\t\x00\x05\x00\xff\xff\xf9\xff\xf5\xff\xfa\xff\xff\xff\x06\x00\x0b\x00\x13\x00\x19\x00\x19\x00\x15\x00\x0e\x00\x07\x00\xfe\xff\xf7\xff\xf0\xff\xef\xff\xf0\xff\xf3\xff\xf4\xff\xf7\xff\xfd\xff\x03\x00\n\x00\r\x00\x0c\x00\x05\x00\xfa\xff\xeb\xff\xe5\xff\xea\xff\xf3\xff\xf5\xff\xf2\xff\xf3\xff\xf9\xff\x04\x00\x0f\x00\x18\x00\x1a\x00\x19\x00\x12\x00\x04\x00\xf7\xff\xed\xff\xe9\xff\xea\xff\xee\xff\xf3\xff\xf5\xff\xf8\xff\xfc\xff\x01\x00\x07\x00\n\x00\n\x00\x07\x00\xfe\xff\xf1\xff\xe8\xff\xe7\xff\xef\xff\xfe\xff\x0c\x00\x10\x00\t\x00\x02\x00\x00\x00\x03\x00\x07\x00\t\x00\n\x00\r\x00\x13\x00\x17\x00\x12\x00\x06\x00\xfa\xff\xf4\xff\xf5\xff\xfa\xff\xff\xff\x03\x00\x06\x00\x0e\x00\x16\x00\x1a\x00\x17\x00\x10\x00\n\x00\x08\x00\x06\x00\xff\xff\xf6\xff\xee\xff\xe9\xff\xe7\xff\xe9\xff\xee\xff\xf9\xff\x05\x00\x0e\x00\x11\x00\n\x00\xfb\xff\xec\xff\xe4\xff\xe6\xff\xe9\xff\xec\xff\xf0\xff\xf5\xff\xfa\xff\xfd\xff\xff\xff\x04\x00\x10\x00\x1c\x00$\x00\x1f\x00\x0f\x00\xfc\xff\xf1\xff\xf0\xff\xf2\xff\xf3\xff\xf3\xff\xf7\xff\x03\x00\x11\x00\x17\x00\x15\x00\x0e\x00\n\x00\t\x00\x07\x00\xfe\xff\xf5\xff\xee\xff\xf2\xff\xfa\xff\x04\x00\x06\x00\x01\x00\xfe\xff\xff\xff\x04\x00\x07\x00\x04\x00\x01\x00\x00\x00\x01\x00\xfe\xff\xfd\xff\xfc\xff\xff\xff\x00\x00\xfc\xff\xf6\xff\xf4\xff\xf8\xff\xff\xff\x08\x00\x11\x00\x17\x00\x1a\x00\x1a\x00\x18\x00\x12\x00\x05\x00\xf7\xff\xec\xff\xe6\xff\xe7\xff\xea\xff\xed\xff\xf5\xff\x01\x00\x0c\x00\x11\x00\x0e\x00\x06\x00\xfc\xff\xf5\xff\xf2\xff\xf1\xff\xf0\xff\xee\xff\xf1\xff\xf5\xff\xf9\xff\xfd\xff\x02\x00\x0b\x00\x14\x00\x1c\x00\x1b\x00\r\x00\xf9\xff\xea\xff\xe8\xff\xec\xff\xf0\xff\xf3\xff\xf7\xff\xfe\xff\x08\x00\x0c\x00\t\x00\x04\x00\x07\x00\x10\x00\x17\x00\x17\x00\x0e\x00\x04\x00\x01\x00\x01\x00\x02\x00\x01\x00\x00\x00\xfd\xff\x00\x00\x07\x00\x0b\x00\x08\x00\x02\x00\xff\xff\x01\x00\x06\x00\x06\x00\x02\x00\x00\x00\xfc\xff\xf6\xff\xf0\xff\xed\xff\xed\xff\xf6\xff\x01\x00\n\x00\x11\x00\x15\x00\x16\x00\x11\x00\x0b\x00\xff\xff\xf2\xff\xe9\xff\xe6\xff\xe8\xff\xe6\xff\xe3\xff\xe5\xff\xf3\xff\x03\x00\x0e\x00\r\x00\t\x00\x06\x00\x03\x00\xfd\xff\xf7\xff\xf3\xff\xf0\xff\xf2\xff\xfa\xff\x03\x00\t\x00\x0c\x00\x0b\x00\r\x00\x13\x00\x15\x00\x0f\x00\x04\x00\xfd\xff\xfc\xff\xff\xff\xfd\xff\xf7\xff\xf2\xff\xf6\xff\xfd\xff\x01\x00\x01\x00\xff\xff\x00\x00\x06\x00\x0c\x00\x0f\x00\x0b\x00\x02\x00\xfd\xff\xfc\xff\x01\x00\x06\x00\n\x00\x08\x00\x02\x00\xfa\xff\xf6\xff\xf7\xff\xfb\xff\x01\x00\n\x00\x13\x00\x15\x00\x16\x00\x12\x00\n\x00\xfd\xff\xf0\xff\xe6\xff\xe8\xff\xf3\xff\x01\x00\x0c\x00\x12\x00\x17\x00\x1c\x00\x1d\x00\x19\x00\x0e\x00\x02\x00\xf3\xff\xe8\xff\xe1\xff\xe0\xff\xe0\xff\xe3\xff\xec\xff\xf8\xff\x00\x00\x02\x00\x03\x00\x01\x00\xfd\xff\xf4\xff\xed\xff\xec\xff\xed\xff\xf3\xff\xfa\xff\x00\x00\x04\x00\x05\x00\x03\x00\x05\x00\n\x00\x10\x00\x11\x00\x0e\x00\x0b\x00\x08\x00\x05\x00\xfd\xff\xf4\xff\xf2\xff\xf6\xff\xfc\xff\xff\xff\xff\xff\x03\x00\x08\x00\x0b\x00\x0b\x00\r\x00\x0e\x00\x0e\x00\x0c\x00\n\x00\x07\x00\x04\x00\x03\x00\x02\x00\x00\x00\xfa\xff\xf4\xff\xf1\xff\xf4\xff\xfa\xff\x03\x00\t\x00\x07\x00\x06\x00\x06\x00\x06\x00\x02\x00\xfb\xff\xf5\xff\xf3\xff\xf8\xff\xfc\xff\xfe\xff\x00\x00\x06\x00\x11\x00\x1e\x00%\x00"\x00\x18\x00\x08\x00\xf5\xff\xe5\xff\xdc\xff\xda\xff\xdd\xff\xe5\xff\xf1\xff\xfe\xff\x07\x00\n\x00\x0b\x00\x08\x00\x04\x00\xff\xff\xfd\xff\xfa\xff\xf7\xff\xf6\xff\xf4\xff\xf5\xff\xf9\xff\x01\x00\x06\x00\x07\x00\t\x00\x0c\x00\x11\x00\r\x00\x03\x00\xf8\xff\xee\xff\xe9\xff\xee\xff\xf6\xff\xfc\xff\xfa\xff\xf7\xff\xf9\xff\x01\x00\x06\x00\x07\x00\x08\x00\x0c\x00\x10\x00\x10\x00\x0b\x00\x03\x00\xfa\xff\xf7\xff\xfd\xff\x01\x00\x00\x00\xf9\xff\xf5\xff\xfa\xff\x02\x00\x07\x00\x06\x00\x05\x00\x07\x00\x0c\x00\x11\x00\x11\x00\t\x00\xff\xff\xf7\xff\xf5\xff\xfa\xff\xfe\xff\x00\x00\x03\x00\t\x00\x14\x00\x1a\x00\x19\x00\x11\x00\x08\x00\xfe\xff\xf1\xff\xe7\xff\xe1\xff\xe3\xff\xe9\xff\xee\xff\xf5\xff\xfb\xff\x01\x00\x07\x00\r\x00\x10\x00\r\x00\x08\x00\x02\x00\xfb\xff\xf4\xff\xee\xff\xed\xff\xf3\xff\xff\xff\t\x00\x0e\x00\x0e\x00\x10\x00\x13\x00\x13\x00\x0e\x00\x04\x00\xfc\xff\xf9\xff\xfb\xff\xfc\xff\xfa\xff\xf4\xff\xf1\xff\xf5\xff\xfe\xff\x04\x00\x04\x00\x05\x00\x07\x00\x0c\x00\x0b\x00\x03\x00\xf4\xff\xea\xff\xea\xff\xf0\xff\xf7\xff\xfa\xff\xf9\xff\xfa\xff\xfc\xff\xfe\xff\xfd\xff\xfe\xff\x03\x00\n\x00\x0e\x00\x0b\x00\x08\x00\x04\x00\xfe\xff\xf9\xff\xf7\xff\xf9\xff\xfe\xff\x03\x00\x06\x00\x0c\x00\x10\x00\x15\x00\x16\x00\x13\x00\x0e\x00\n\x00\x04\x00\xfc\xff\xf2\xff\xeb\xff\xe9\xff\xeb\xff\xf2\xff\xfb\xff\x04\x00\n\x00\x0b\x00\n\x00\x04\x00\xff\xff\xfd\xff\xfa\xff\xf7\xff\xf4\xff\xf5\xff\xfb\xff\x03\x00\x08\x00\x08\x00\x03\x00\x00\x00\x02\x00\x0b\x00\x11\x00\x13\x00\x0e\x00\x07\x00\xff\xff\xf8\xff\xf1\xff\xee\xff\xed\xff\xef\xff\xf8\xff\x03\x00\x08\x00\x08\x00\t\x00\x11\x00\x15\x00\x10\x00\x08\x00\x00\x00\xfc\xff\xfb\xff\xf9\xff\xf4\xff\xf3\xff\xf7\xff\x00\x00\x01\x00\xfd\xff\xf8\xff\xf8\xff\xfd\xff\x01\x00\x00\x00\xfa\xff\xf4\xff\xf0\xff\xee\xff\xf0\xff\xf7\xff\xfe\xff\x04\x00\x05\x00\x07\x00\x08\x00\x0b\x00\r\x00\x10\x00\x12\x00\x0f\x00\x08\x00\xfd\xff\xf5\xff\xf1\xff\xef\xff\xf0\xff\xf2\xff\xf8\xff\xfd\xff\x02\x00\x07\x00\t\x00\n\x00\x07\x00\x04\x00\x02\x00\x02\x00\x04\x00\x02\x00\x02\x00\x02\x00\x07\x00\x0b\x00\x0b\x00\x07\x00\x05\x00\n\x00\x0e\x00\x0c\x00\x07\x00\x02\x00\xff\xff\xfa\xff\xf4\xff\xf2\xff\xf5\xff\xf9\xff\xfd\xff\xfe\xff\xfa\xff\xf7\xff\xf7\xff\xff\xff\x0c\x00\x16\x00\x18\x00\x0f\x00\x03\x00\xfa\xff\xf3\xff\xed\xff\xea\xff\xee\xff\xf8\xff\x00\x00\x03\x00\x02\x00\x01\x00\x04\x00\x08\x00\n\x00\x06\x00\x00\x00\xfc\xff\xf9\xff\xf7\xff\xf4\xff\xf6\xff\xfc\xff\x03\x00\x06\x00\x05\x00\x03\x00\x02\x00\x04\x00\x08\x00\x05\x00\xff\xff\xf6\xff\xef\xff\xec\xff\xef\xff\xf3\xff\xf3\xff\xf3\xff\xf5\xff\xfb\xff\x04\x00\x08\x00\x0b\x00\x0b\x00\r\x00\x10\x00\x10\x00\x08\x00\x00\x00\xf8\xff\xf7\xff\xfb\xff\x04\x00\x0c\x00\x0b\x00\x07\x00\x05\x00\t\x00\r\x00\x10\x00\x0c\x00\x08\x00\x03\x00\xfe\xff\xfb\xff\xf8\xff\xf7\xff\xf8\xff\xfc\xff\xfd\xff\xfe\xff\xff\xff\x03\x00\n\x00\x0e\x00\x0c\x00\x08\x00\x04\x00\x00\x00\xfa\xff\xf4\xff\xf3\xff\xf4\xff\xfb\xff\x00\x00\xff\xff\xfa\xff\xf7\xff\xfa\xff\x04\x00\x0c\x00\x0f\x00\x0b\x00\x03\x00\xfc\xff\xf5\xff\xf1\xff\xef\xff\xf1\xff\xf7\xff\x01\x00\x04\x00\x03\x00\x03\x00\x04\x00\x0b\x00\x0e\x00\r\x00\t\x00\x02\x00\xfd\xff\xf8\xff\xf4\xff\xf1\xff\xf1\xff\xf4\xff\xf9\xff\xff\xff\x01\x00\x02\x00\x04\x00\x08\x00\x0c\x00\x0c\x00\x04\x00\xfc\xff\xf6\xff\xf6\xff\xfa\xff\xfd\xff\x01\x00\x02\x00\x01\x00\x04\x00\t\x00\r\x00\x0e\x00\r\x00\r\x00\n\x00\x04\x00\xf9\xff\xf0\xff\xee\xff\xef\xff\xf5\xff\xf9\xff\xfd\xff\xff\xff\x01\x00\x04\x00\n\x00\x0e\x00\r\x00\x08\x00\x01\x00\xff\xff\xfe\xff\xfc\xff\xf9\xff\xfb\xff\xfd\xff\xff\xff\xff\xff\xfe\xff\x03\x00\x07\x00\x08\x00\x05\x00\x02\x00\x01\x00\x01\x00\xfe\xff\xf8\xff\xf4\xff\xf6\xff\xff\xff\x07\x00\x07\x00\x02\x00\xfe\xff\x01\x00\x07\x00\n\x00\t\x00\x07\x00\x04\x00\x01\x00\xfe\xff\xf8\xff\xf3\xff\xed\xff\xec\xff\xf0\xff\xf6\xff\xfc\xff\x01\x00\x03\x00\x05\x00\t\x00\r\x00\x0e\x00\x0c\x00\x07\x00\x02\x00\xf9\xff\xf3\xff\xf4\xff\xfc\xff\x02\x00\x06\x00\t\x00\x0c\x00\x0f\x00\x11\x00\r\x00\x07\x00\xfd\xff\xf2\xff\xec\xff\xeb\xff\xee\xff\xf2\xff\xf4\xff\xf8\xff\xfc\xff\x03\x00\x08\x00\x0b\x00\x0b\x00\t\x00\x05\x00\x01\x00\xff\xff\xfd\xff\xfb\xff\xf5\xff\xf4\xff\xf8\xff\xfd\xff\x02\x00\x03\x00\x05\x00\x06\x00\x08\x00\x0b\x00\r\x00\t\x00\x02\x00\xf9\xff\xf6\xff\xf6\xff\xfc\xff\x02\x00\x06\x00\x07\x00\x04\x00\x04\x00\t\x00\x0e\x00\r\x00\x08\x00\x02\x00\x04\x00\x07\x00\x06\x00\xff\xff\xf9\xff\xf6\xff\xf7\xff\xfa\xff\xfd\xff\xfc\xff\xfe\xff\x01\x00\x04\x00\t\x00\x0b\x00\x0b\x00\x08\x00\x01\x00\xfc\xff\xf7\xff\xef\xff\xeb\xff\xed\xff\xf4\xff\xfb\xff\x04\x00\r\x00\x13\x00\x12\x00\x0e\x00\x07\x00\x04\x00\xff\xff\xf8\xff\xf1\xff\xec\xff\xea\xff\xeb\xff\xf3\xff\xfc\xff\x05\x00\x0b\x00\x0c\x00\n\x00\x06\x00\x02\x00\xfd\xff\xfa\xff\xf5\xff\xf4\xff\xf5\xff\xf8\xff\xfc\xff\xfd\xff\xfe\xff\x01\x00\x07\x00\x0c\x00\x0e\x00\x0b\x00\x07\x00\x02\x00\x00\x00\xfe\xff\xfb\xff\xf5\xff\xf2\xff\xf1\xff\xf9\xff\x01\x00\x06\x00\x04\x00\x01\x00\x02\x00\x06\x00\n\x00\x0b\x00\x0b\x00\n\x00\x04\x00\xff\xff\xfd\xff\xfd\xff\xff\xff\x01\x00\x02\x00\x03\x00\x06\x00\n\x00\r\x00\x0c\x00\x08\x00\x04\x00\x02\x00\x01\x00\x00\x00\xfc\xff\xf6\xff\xf2\xff\xf6\xff\xfe\xff\x03\x00\x02\x00\x02\x00\x06\x00\x0b\x00\r\x00\x0e\x00\x0c\x00\x04\x00\xfa\xff\xf1\xff\xee\xff\xef\xff\xef\xff\xee\xff\xf1\xff\xf8\xff\x02\x00\x06\x00\n\x00\x08\x00\x04\x00\x01\x00\xfd\xff\xfc\xff\xf8\xff\xf6\xff\xf4\xff\xf4\xff\xf8\xff\xfe\xff\x03\x00\x08\x00\x0c\x00\x0e\x00\x0e\x00\n\x00\x05\x00\x02\x00\xfe\xff\xfa\xff\xf8\xff\xf6\xff\xf7\xff\xfb\xff\x00\x00\x01\x00\xff\xff\xff\xff\x05\x00\x0b\x00\x0c\x00\t\x00\x04\x00\x05\x00\x08\x00\x05\x00\xff\xff\xf7\xff\xf5\xff\xf7\xff\xfe\xff\x07\x00\n\x00\x08\x00\x04\x00\x03\x00\x05\x00\x08\x00\x04\x00\x00\x00\xfc\xff\xfc\xff\xfb\xff\xf9\xff\xfa\xff\xfc\xff\xfd\xff\xfe\xff\xff\xff\x05\x00\x0c\x00\x0e\x00\n\x00\x06\x00\x01\x00\xfe\xff\xf9\xff\xf6\xff\xf4\xff\xf2\xff\xf4\xff\xf9\xff\xff\xff\x02\x00\x00\x00\xfd\xff\xfc\xff\x01\x00\x05\x00\x06\x00\x02\x00\xfb\xff\xf5\xff\xf3\xff\xf9\xff\xfe\xff\x00\x00\xfe\xff\xfa\xff\xfc\xff\x03\x00\t\x00\n\x00\t\x00\x05\x00\x04\x00\x02\x00\xff\xff\xfb\xff\xf6\xff\xf2\xff\xf5\xff\xfa\xff\xff\xff\x00\x00\x02\x00\x07\x00\x0b\x00\x0b\x00\x08\x00\x07\x00\x07\x00\x03\x00\xfd\xff\xf9\xff\xfb\xff\xff\xff\x04\x00\x05\x00\x04\x00\x05\x00\x08\x00\x0b\x00\x0c\x00\x0b\x00\x05\x00\x00\x00\xfe\xff\xfd\xff\xfc\xff\xf8\xff\xf4\xff\xf6\xff\xfb\xff\xff\xff\x01\x00\x03\x00\x06\x00\x08\x00\x08\x00\x06\x00\x01\x00\xfe\xff\xfb\xff\xf8\xff\xf9\xff\xfa\xff\xf7\xff\xf6\xff\xf8\xff\xfd\xff\xfd\xff\xfd\xff\xff\xff\x01\x00\x01\x00\xff\xff\xfb\xff\xf7\xff\xf8\xff\xfa\xff\xfc\xff\xfd\xff\xfe\xff\xff\xff\x00\x00\x01\x00\x01\x00\x01\x00\x02\x00\x05\x00\x05\x00\x06\x00\x06\x00\x03\x00\x01\x00\xfd\xff\xfb\xff\xfa\xff\xf8\xff\xf4\xff\xf2\xff\xf6\xff\xff\xff\t\x00\x0f\x00\x0e\x00\x0c\x00\x0e\x00\r\x00\x07\x00\xfe\xff\xf8\xff\xf6\xff\xf5\xff\xf6\xff\xfa\xff\x03\x00\x0c\x00\x11\x00\x10\x00\r\x00\x08\x00\x05\x00\x04\x00\xff\xff\xfa\xff\xf5\xff\xf5\xff\xf8\xff\xff\xff\x03\x00\x05\x00\x06\x00\n\x00\x12\x00\x15\x00\x10\x00\x07\x00\x00\x00\xfc\xff\xfc\xff\xfc\xff\xfa\xff\xf5\xff\xf5\xff\xf9\xff\xff\xff\x03\x00\x03\x00\x02\x00\x01\x00\xfe\xff\xf9\xff\xf6\xff\xf7\xff\xfd\xff\xfe\xff\xfd\xff\xfb\xff\xfd\xff\x01\x00\x02\x00\x01\x00\x00\x00\xff\xff\xfe\xff\x02\x00\x04\x00\x03\x00\x00\x00\xfe\xff\xff\xff\x02\x00\x01\x00\xfc\xff\xf4\xff\xf0\xff\xee\xff\xf1\xff\xf6\xff\xfd\xff\x05\x00\t\x00\n\x00\x0b\x00\r\x00\x08\x00\x03\x00\xfe\xff\xfb\xff\xf9\xff\xf7\xff\xf8\xff\xfd\xff\x04\x00\x0b\x00\x0c\x00\x0c\x00\n\x00\x07\x00\x05\x00\x01\x00\xfc\xff\xf7\xff\xf4\xff\xf4\xff\xf5\xff\xf5\xff\xf3\xff\xf8\xff\x04\x00\x11\x00\x13\x00\r\x00\x04\x00\x01\x00\x03\x00\x04\x00\x01\x00\xfd\xff\xfb\xff\xfa\xff\xfc\xff\xff\xff\x04\x00\x06\x00\t\x00\x0b\x00\t\x00\x04\x00\xff\xff\xfb\xff\xfd\xff\xfd\xff\xfa\xff\xf9\xff\xfa\xff\xfd\xff\x01\x00\x06\x00\x06\x00\x03\x00\x03\x00\x03\x00\x04\x00\x00\x00\xfd\xff\xff\xff\x06\x00\n\x00\x08\x00\x00\x00\xf8\xff\xf5\xff\xf9\xff\xfa\xff\xf9\xff\xfb\xff\xff\xff\x02\x00\x06\x00\x05\x00\x02\x00\xff\xff\xfd\xff\xff\xff\x00\x00\xfd\xff\xf9\xff\xf4\xff\xf5\xff\xfb\xff\x01\x00\x06\x00\x07\x00\t\x00\x08\x00\x06\x00\x05\x00\x02\x00\xff\xff\xfd\xff\xfc\xff\xfb\xff\xf8\xff\xf8\xff\xfb\xff\x02\x00\t\x00\n\x00\x06\x00\x02\x00\x02\x00\x06\x00\x05\x00\x01\x00\xfb\xff\xf8\xff\xf6\xff\xf4\xff\xf1\xff\xee\xff\xf3\xff\x00\x00\x0c\x00\x11\x00\t\x00\xff\xff\xfb\xff\xfd\xff\x02\x00\x00\x00\xfd\xff\xfb\xff\xff\xff\x06\x00\x0b\x00\r\x00\x0b\x00\x0b\x00\n\x00\x07\x00\x03\x00\x02\x00\x00\x00\x01\x00\x02\x00\x02\x00\xfe\xff\xf9\xff\xf7\xff\xf8\xff\xf9\xff\xf6\xff\xf7\xff\xfc\xff\x02\x00\x05\x00\x03\x00\x01\x00\x02\x00\x06\x00\x07\x00\x05\x00\x00\x00\xfa\xff\xf9\xff\xfc\xff\x02\x00\x07\x00\t\x00\n\x00\x07\x00\x06\x00\x03\x00\xff\xff\xfb\xff\xf7\xff\xf5\xff\xf9\xff\xfa\xff\xf9\xff\xf9\xff\xf7\xff\xf8\xff\xfc\xff\x00\x00\x02\x00\x02\x00\x05\x00\x07\x00\t\x00\t\x00\x07\x00\x05\x00\x02\x00\xfd\xff\xf7\xff\xf5\xff\xf7\xff\xfc\xff\x04\x00\x07\x00\x04\x00\xff\xff\xfd\xff\xff\xff\x01\x00\xfd\xff\xf8\xff\xf5\xff\xf8\xff\xfb\xff\xfe\xff\xff\xff\x00\x00\x05\x00\x0b\x00\x0f\x00\x0c\x00\x06\x00\x03\x00\x02\x00\x05\x00\x06\x00\x04\x00\xfe\xff\xfb\xff\xfd\xff\xfe\xff\xfc\xff\xf8\xff\xfa\xff\xff\xff\x03\x00\x04\x00\x05\x00\x04\x00\x04\x00\x04\x00\x01\x00\xfd\xff\xf9\xff\xf6\xff\xf7\xff\xf9\xff\xff\xff\x05\x00\x0b\x00\x0e\x00\x0c\x00\x06\x00\x01\x00\xfd\xff\xfb\xff\xf9\xff\xfb\xff\xfb\xff\xfc\xff\xfe\xff\x01\x00\x03\x00\x04\x00\x04\x00\x05\x00\x04\x00\x04\x00\x04\x00\x03\x00\x00\x00\xff\xff\x02\x00\x06\x00\x05\x00\x02\x00\xfc\xff\xf7\xff\xf8\xff\xfb\xff\xfe\xff\xff\xff\xfe\xff\xff\xff\x03\x00\x05\x00\x03\x00\x01\x00\xfe\xff\x00\x00\xff\xff\xfe\xff\xfc\xff\xfc\xff\xff\xff\x02\x00\x05\x00\x05\x00\x03\x00\x02\x00\x04\x00\x04\x00\x02\x00\xff\xff\xfc\xff\xfa\xff\xf7\xff\xf3\xff\xef\xff\xee\xff\xf5\xff\xff\xff\x05\x00\x06\x00\x04\x00\x04\x00\x05\x00\x06\x00\x01\x00\xfa\xff\xf7\xff\xf6\xff\xfb\xff\x00\x00\x01\x00\x04\x00\x07\x00\n\x00\x0b\x00\t\x00\x02\x00\xfc\xff\xf8\xff\xf7\xff\xf8\xff\xfa\xff\xfb\xff\xfd\xff\xff\xff\x00\x00\x01\x00\x02\x00\x06\x00\x08\x00\x07\x00\x05\x00\x03\x00\x04\x00\x06\x00\t\x00\n\x00\n\x00\x07\x00\x04\x00\x04\x00\x02\x00\x01\x00\x01\x00\x03\x00\x03\x00\x01\x00\x00\x00\xff\xff\xfc\xff\xf9\xff\xf9\xff\xfd\xff\x02\x00\x03\x00\xff\xff\xfa\xff\xfa\xff\xfc\xff\xfe\xff\x00\x00\x01\x00\x02\x00\x03\x00\x04\x00\x04\x00\x03\x00\x05\x00\x04\x00\x00\x00\xfb\xff\xf2\xff\xef\xff\xf2\xff\xf8\xff\xfd\xff\xff\xff\xff\xff\x03\x00\x07\x00\x07\x00\x01\x00\xf9\xff\xf6\xff\xf6\xff\xf9\xff\xfa\xff\xf6\xff\xf5\xff\xfa\xff\x04\x00\r\x00\x0e\x00\x08\x00\x00\x00\xff\xff\x00\x00\xff\xff\xfd\xff\xfa\xff\xf9\xff\xfc\xff\x01\x00\x04\x00\x05\x00\x06\x00\x08\x00\n\x00\t\x00\x06\x00\x05\x00\x01\x00\x00\x00\x01\x00\x03\x00\x06\x00\x06\x00\x02\x00\xfe\xff\xfc\xff\xfd\xff\x03\x00\x08\x00\x08\x00\x03\x00\x00\x00\xfe\xff\xff\xff\x02\x00\x03\x00\x01\x00\xfe\xff\xfc\xff\xfe\xff\x02\x00\x02\x00\x01\x00\x00\x00\x03\x00\x07\x00\x07\x00\x02\x00\xfa\xff\xf6\xff\xf8\xff\xfd\xff\x01\x00\x01\x00\xfc\xff\xf7\xff\xf3\xff\xf6\xff\xf9\xff\xfa\xff\xf9\xff\xfb\xff\x02\x00\x06\x00\x05\x00\x02\x00\xff\xff\x00\x00\xff\xff\xfc\xff\xf9\xff\xf8\xff\xfb\xff\x00\x00\x05\x00\x06\x00\x04\x00\x04\x00\x06\x00\x08\x00\x04\x00\xfe\xff\xf7\xff\xf7\xff\xfc\xff\x00\x00\xfe\xff\xfc\xff\xfe\xff\x06\x00\x0b\x00\x0c\x00\n\x00\t\x00\x07\x00\x05\x00\x01\x00\x01\x00\x01\x00\x03\x00\x02\x00\x02\x00\x01\x00\xff\xff\x00\x00\x05\x00\x08\x00\x04\x00\x01\x00\xfe\xff\xfe\xff\xfd\xff\xfc\xff\xfa\xff\xf7\xff\xf7\xff\xf9\xff\xfc\xff\x00\x00\x01\x00\x02\x00\x05\x00\x07\x00\x07\x00\x03\x00\xfb\xff\xf5\xff\xf6\xff\xfa\xff\xfe\xff\x01\x00\x00\x00\xff\xff\xfd\xff\xfc\xff\xfb\xff\xfb\xff\xfd\xff\xff\xff\x01\x00\x02\x00\xff\xff\xfb\xff\xfa\xff\xfc\xff\xff\xff\xff\xff\xfd\xff\xfc\xff\xfd\xff\x00\x00\x04\x00\x04\x00\x03\x00\x01\x00\x04\x00\n\x00\t\x00\x05\x00\xff\xff\xfe\xff\x00\x00\x01\x00\x01\x00\x00\x00\x03\x00\x07\x00\t\x00\x08\x00\x07\x00\x07\x00\n\x00\t\x00\x05\x00\xff\xff\xfd\xff\xfe\xff\x00\x00\x00\x00\xfd\xff\xfa\xff\xfc\xff\x02\x00\x07\x00\t\x00\x06\x00\x02\x00\x01\x00\x01\x00\xff\xff\xfa\xff\xf5\xff\xf4\xff\xf7\xff\xfd\xff\x01\x00\x03\x00\x03\x00\x02\x00\x03\x00\x01\x00\xff\xff\xfd\xff\xfa\xff\xf8\xff\xf6\xff\xf5\xff\xf6\xff\xf7\xff\xf9\xff\xf8\xff\xf7\xff\xf6\xff\xfa\xff\xff\xff\x04\x00\x04\x00\x02\x00\xfe\xff\xf9\xff\xf5\xff\xf5\xff\xfa\xff\x00\x00\x00\x00\x02\x00\x02\x00\x04\x00\x06\x00\x07\x00\x06\x00\x05\x00\x06\x00\x07\x00\x07\x00\x04\x00\x02\x00\x01\x00\x02\x00\x02\x00\x03\x00\x03\x00\x05\x00\x06\x00\x08\x00\x08\x00\x06\x00\x07\x00\x0c\x00\x0e\x00\x0c\x00\x06\x00\x02\x00\x03\x00\x05\x00\x04\x00\x00\x00\xfc\xff\xfb\xff\x00\x00\x05\x00\x07\x00\x06\x00\x04\x00\x04\x00\x03\x00\x00\x00\xf9\xff\xf6\xff\xf5\xff\xf5\xff\xf4\xff\xf4\xff\xf9\xff\xfd\xff\x03\x00\x06\x00\x05\x00\x03\x00\x02\x00\xff\xff\xff\xff\xfc\xff\xf7\xff\xf2\xff\xf2\xff\xf7\xff\xfa\xff\xf9\xff\xf7\xff\xf8\xff\xfd\xff\x03\x00\x05\x00\x03\x00\xfb\xff\xf4\xff\xf1\xff\xee\xff\xf1\xff\xf4\xff\xf8\xff\xfb\xff\xff\xff\x00\x00\x03\x00\x05\x00\x07\x00\x07\x00\x06\x00\x06\x00\x06\x00\x06\x00\x04\x00\x02\x00\x02\x00\x04\x00\x06\x00\x05\x00\x04\x00\x05\x00\x06\x00\x07\x00\x08\x00\t\x00\x0b\x00\r\x00\x0c\x00\x07\x00\x04\x00\x02\x00\x01\x00\x02\x00\x01\x00\x02\x00\x01\x00\x03\x00\x07\x00\n\x00\x0c\x00\x0b\x00\n\x00\x08\x00\x04\x00\xfe\xff\xfb\xff\xf8\xff\xf9\xff\xf6\xff\xf3\xff\xf3\xff\xf8\xff\x00\x00\x04\x00\x04\x00\x01\x00\xff\xff\xff\xff\xff\xff\xfd\xff\xf8\xff\xf0\xff\xee\xff\xef\xff\xf4\xff\xf7\xff\xf9\xff\xfb\xff\xfd\xff\x01\x00\x06\x00\x07\x00\x05\x00\x00\x00\xf8\xff\xf5\xff\xf4\xff\xf6\xff\xf9\xff\xfb\xff\xfe\xff\x00\x00\x01\x00\x02\x00\x04\x00\t\x00\n\x00\x06\x00\x04\x00\x02\x00\x00\x00\xfe\xff\xfc\xff\xfa\xff\xfa\xff\xfd\xff\xff\xff\x04\x00\x04\x00\x03\x00\x02\x00\x04\x00\t\x00\r\x00\x0c\x00\x05\x00\xfc\xff\xfb\xff\xff\xff\x03\x00\x06\x00\x02\x00\x00\x00\x00\x00\x04\x00\x0b\x00\r\x00\x0c\x00\t\x00\x07\x00\x05\x00\x02\x00\xff\xff\xfe\xff\xfd\xff\xfa\xff\xf8\xff\xf9\xff\xfc\xff\x01\x00\x05\x00\x04\x00\x05\x00\x06\x00\x04\x00\x03\x00\xfe\xff\xf9\xff\xf6\xff\xf5\xff\xf7\xff\xf9\xff\xf9\xff\xf6\xff\xf6\xff\xf8\xff\xfd\xff\x02\x00\x03\x00\xff\xff\xfd\xff\xfc\xff\xfb\xff\xf8\xff\xf4\xff\xf5\xff\xf8\xff\xfd\xff\x01\x00\x01\x00\x02\x00\x02\x00\x05\x00\x07\x00\x08\x00\t\x00\n\x00\x08\x00\x05\x00\x01\x00\xff\xff\xfe\xff\xfe\xff\xfe\xff\xff\xff\x01\x00\x01\x00\x02\x00\x02\x00\x03\x00\x04\x00\x05\x00\x03\x00\x00\x00\xfb\xff\xfa\xff\xf9\xff\xf7\xff\xf7\xff\xfd\xff\x02\x00\x05\x00\x08\x00\x08\x00\n\x00\x0b\x00\r\x00\n\x00\x02\x00\xfa\xff\xf7\xff\xfa\xff\xfe\xff\xff\xff\xfd\xff\xfd\xff\xfe\xff\x03\x00\x05\x00\x04\x00\x00\x00\xff\xff\xfd\xff\xfd\xff\xfc\xff\xfc\xff\xfb\xff\xfb\xff\xfc\xff\xfd\xff\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x04\x00\x06\x00\x08\x00\x06\x00\xff\xff\xf8\xff\xf5\xff\xf5\xff\xfc\xff\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x04\x00\x07\x00\x06\x00\x04\x00\x02\x00\x03\x00\x04\x00\x05\x00\x04\x00\xff\xff\xfd\xff\xfe\xff\x00\x00\x01\x00\x00\x00\xfe\xff\xfd\xff\xff\xff\x02\x00\x07\x00\x08\x00\x04\x00\x00\x00\xfd\xff\xfc\xff\xfc\xff\xfb\xff\xf9\xff\xf9\xff\x00\x00\x04\x00\t\x00\x0c\x00\n\x00\x08\x00\x06\x00\x02\x00\xfe\xff\xfa\xff\xf7\xff\xf4\xff\xf3\xff\xf4\xff\xf9\xff\xff\xff\x03\x00\x01\x00\x00\x00\xfd\xff\xff\xff\x00\x00\xff\xff\xfb\xff\xfa\xff\xf8\xff\xfa\xff\xfa\xff\xfd\xff\xfd\xff\xff\xff\xff\xff\x01\x00\x01\x00\x03\x00\x04\x00\x01\x00\xff\xff\xfe\xff\xfe\xff\xfe\xff\xfd\xff\xfd\xff\xff\xff\x01\x00\x02\x00\x03\x00\x04\x00\x06\x00\x06\x00\x08\x00\n\x00\x0b\x00\x0c\x00\x0b\x00\x07\x00\x03\x00\x02\x00\x02\x00\x03\x00\x02\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\xff\xff\xfd\xff\xfd\xff\xfd\xff\xfd\xff\xfc\xff\xfa\xff\xfc\xff\xfe\xff\x02\x00\x04\x00\x05\x00\x07\x00\x0b\x00\x0c\x00\r\x00\x07\x00\x01\x00\xfd\xff\xfa\xff\xf9\xff\xf9\xff\xf8\xff\xfa\xff\xfd\xff\xff\xff\x00\x00\x00\x00\xfd\xff\xfd\xff\xfc\xff\xfa\xff\xf8\xff\xf5\xff\xf4\xff\xf5\xff\xf4\xff\xf6\xff\xfa\xff\xff\xff\x01\x00\x04\x00\x04\x00\x07\x00\x04\x00\x02\x00\xfe\xff\xfd\xff\xfd\xff\xfd\xff\xfe\xff\xfe\xff\xfe\xff\x00\x00\x01\x00\x02\x00\x03\x00\x04\x00\x06\x00\x05\x00\x04\x00\x03\x00\x05\x00\x07\x00\x07\x00\x06\x00\x05\x00\x04\x00\x04\x00\x04\x00\x05\x00\x05\x00\x03\x00\x01\x00\x00\x00\x02\x00\x05\x00\x05\x00\x02\x00\xff\xff\xfd\xff\xfe\xff\xfd\xff\xfc\xff\xf9\xff\xf8\xff\xfa\xff\xfd\xff\x02\x00\x05\x00\x06\x00\x08\x00\t\x00\x07\x00\x04\x00\x01\x00\xff\xff\xfd\xff\xfa\xff\xf9\xff\xfb\xff\xff\xff\x01\x00\x02\x00\xff\xff\xfe\xff\x00\x00\x02\x00\x04\x00\x03\x00\xfd\xff\xfa\xff\xf8\xff\xf8\xff\xf8\xff\xf9\xff\xf7\xff\xf9\xff\xfd\xff\x01\x00\x05\x00\x06\x00\x04\x00\x00\x00\xfc\xff\xf9\xff\xfa\xff\xfa\xff\xfa\xff\xf8\xff\xfa\xff\xfc\xff\x00\x00\x02\x00\x04\x00\x06\x00\x07\x00\x07\x00\x06\x00\x05\x00\x04\x00\x04\x00\x06\x00\x05\x00\x03\x00\x02\x00\x01\x00\x00\x00\x01\x00\x01\x00\xff\xff\xfd\xff\xfe\xff\xff\xff\x00\x00\x02\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\xfb\xff\xf9\xff\xfa\xff\xff\xff\x05\x00\t\x00\t\x00\t\x00\x0c\x00\x0c\x00\n\x00\x08\x00\x04\x00\xff\xff\xfa\xff\xf7\xff\xf7\xff\xf9\xff\xfc\xff\xfd\xff\xfc\xff\xfd\xff\xfe\xff\x00\x00\x02\x00\xff\xff\xfb\xff\xfa\xff\xf9\xff\xf9\xff\xfb\xff\xfb\xff\xfc\xff\xfc\xff\xfe\xff\x01\x00\x06\x00\x08\x00\x07\x00\x06\x00\x04\x00\x03\x00\x01\x00\x01\x00\xfe\xff\xfd\xff\xfc\xff\xfb\xff\xfb\xff\xfb\xff\xff\xff\x01\x00\x05\x00\x07\x00\x07\x00\x05\x00\x03\x00\x00\x00\xff\xff\xfd\xff\xfc\xff\xfb\xff\xfa\xff\xfc\xff\xff\xff\x01\x00\x03\x00\x03\x00\x00\x00\xfd\xff\xff\xff\xfe\xff\xfe\xff\xff\xff\x00\x00\x02\x00\xff\xff\xfb\xff\xf8\xff\xf9\xff\xfd\xff\x02\x00\x07\x00\x08\x00\t\x00\x08\x00\x08\x00\t\x00\x07\x00\x07\x00\x06\x00\x04\x00\x02\x00\xfd\xff\xfa\xff\xfb\xff\x00\x00\x04\x00\x07\x00\x06\x00\x03\x00\x02\x00\x01\x00\xfe\xff\xfe\xff\xfd\xff\xfb\xff\xf8\xff\xf8\xff\xf9\xff\xfc\xff\xfd\xff\xfc\xff\xfb\xff\xfd\xff\x01\x00\x03\x00\x04\x00\x01\x00\xfe\xff\xfe\xff\x01\x00\x01\x00\xfe\xff\xfb\xff\xf9\xff\xf9\xff\xfc\xff\xff\xff\x04\x00\x08\x00\x08\x00\t\x00\n\x00\n\x00\x07\x00\x04\x00\x00\x00\xfd\xff\xfb\xff\xfa\xff\xfa\xff\xfd\xff\x00\x00\x02\x00\x02\x00\x00\x00\xfe\xff\xfe\xff\xfc\xff\xf9\xff\xf6\xff\xf7\xff\xf9\xff\xf7\xff\xf5\xff\xf4\xff\xf6\xff\xfc\xff\x02\x00\x07\x00\x07\x00\x06\x00\x05\x00\x06\x00\x06\x00\x08\x00\t\x00\t\x00\x05\x00\x01\x00\xfe\xff\xfe\xff\x01\x00\x03\x00\x05\x00\x06\x00\x02\x00\x02\x00\x01\x00\x02\x00\x02\x00\x00\x00\xff\xff\xfc\xff\xfc\xff\xfa\xff\xfd\xff\xfe\xff\xff\xff\x02\x00\x05\x00\x07\x00\x07\x00\x05\x00\x01\x00\x01\x00\x02\x00\x06\x00\x03\x00\xfe\xff\xf8\xff\xf7\xff\xfa\xff\xfe\xff\xff\xff\xff\xff\xff\xff\x00\x00\x04\x00\x04\x00\x04\x00\x02\x00\x01\x00\x00\x00\x00\x00\xfe\xff\xfc\xff\xfa\xff\xfb\xff\xff\xff\x02\x00\x03\x00\x00\x00\xfd\xff\xfd\xff\x00\x00\x02\x00\x00\x00\xfd\xff\xfd\xff\xfd\xff\xfc\xff\xf9\xff\xf8\xff\xf9\xff\xfd\xff\xff\xff\x03\x00\x05\x00\x06\x00\x07\x00\x06\x00\x02\x00\xff\xff\x00\x00\xff\xff\xfe\xff\xfb\xff\xf9\xff\xfb\xff\xfe\xff\x02\x00\x04\x00\x03\x00\x02\x00\x01\x00\x01\x00\x02\x00\x03\x00\x02\x00\xff\xff\xfc\xff\xfb\xff\xfd\xff\x01\x00\x03\x00\x04\x00\x04\x00\x03\x00\x04\x00\x03\x00\x02\x00\x00\x00\x01\x00\x06\x00\x07\x00\x04\x00\x00\x00\xfc\xff\xfb\xff\xfe\xff\x00\x00\x02\x00\x06\x00\x07\x00\x07\x00\x05\x00\x03\x00\x02\x00\x03\x00\x04\x00\x03\x00\xff\xff\xfc\xff\xfa\xff\xfb\xff\xfe\xff\x01\x00\xff\xff\xfc\xff\xfa\xff\xf9\xff\xfb\xff\xfc\xff\xfc\xff\xfb\xff\xfc\xff\xfe\xff\xfe\xff\xfc\xff\xfa\xff\xf9\xff\xfb\xff\xfe\xff\x03\x00\x04\x00\x04\x00\x06\x00\x08\x00\t\x00\n\x00\t\x00\x07\x00\x04\x00\xff\xff\xfc\xff\xfa\xff\xfd\xff\x00\x00\x02\x00\x01\x00\x00\x00\xfe\xff\xff\xff\x00\x00\xfd\xff\xfb\xff\xf9\xff\xf6\xff\xf7\xff\xf7\xff\xf8\xff\xfc\xff\x00\x00\x02\x00\x02\x00\x04\x00\x03\x00\x01\x00\x01\x00\x01\x00\x04\x00\x07\x00\x08\x00\x04\x00\x02\x00\x02\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x02\x00\x03\x00\x02\x00\x04\x00\x05\x00\x05\x00\x05\x00\x04\x00\x01\x00\xfe\xff\xfd\xff\xfd\xff\x00\x00\x01\x00\x03\x00\x03\x00\x03\x00\x02\x00\x00\x00\xff\xff\xfe\xff\xfe\xff\xfe\xff\xfc\xff\xfa\xff\xf8\xff\xf9\xff\xfa\xff\xfe\xff\x01\x00\x01\x00\x00\x00\x00\x00\x02\x00\x04\x00\x03\x00\x04\x00\x04\x00\x05\x00\x03\x00\xfe\xff\xfb\xff\xf9\xff\xfc\xff\xff\xff\x03\x00\x06\x00\x04\x00\x02\x00\x01\x00\x01\x00\x02\x00\x02\x00\xff\xff\xfb\xff\xf8\xff\xf8\xff\xfc\xff\x00\x00\x02\x00\x02\x00\x02\x00\x02\x00\x02\x00\x00\x00\xfe\xff\xfb\xff\xfb\xff\xfe\xff\x00\x00\xff\xff\xfe\xff\xfc\xff\xfa\xff\xfb\xff\xff\xff\x03\x00\x05\x00\x04\x00\x03\x00\x04\x00\x04\x00\x07\x00\x07\x00\x07\x00\x04\x00\x02\x00\xfe\xff\xfd\xff\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\xfd\xff\xfd\xff\xff\xff\xff\xff\xff\xff\xfd\xff\xfb\xff\xfc\xff\xfc\xff\xfd\xff\xfd\xff\xfe\xff\x00\x00\x03\x00\x07\x00\x08\x00\x07\x00\x08\x00\x06\x00\x07\x00\x08\x00\x07\x00\x04\x00\xfe\xff\xfa\xff\xf9\xff\xfa\xff\xfe\xff\xff\xff\xff\xff\xfe\xff\xfd\xff\xfd\xff\xfe\xff\xfe\xff\xfe\xff\xfd\xff\xfd\xff\xfb\xff\xfa\xff\xfa\xff\xfb\xff\xfc\xff\xfe\xff\x00\x00\x03\x00\x07\x00\x07\x00\x04\x00\x01\x00\x00\x00\x04\x00\x05\x00\x04\x00\x01\x00\xfb\xff\xfa\xff\xfc\xff\xfe\xff\x00\x00\x01\x00\x03\x00\x02\x00\x02\x00\x02\x00\x00\x00\xff\xff\xfe\xff\x00\x00\x00\x00\xfe\xff\xfd\xff\xfb\xff\xfc\xff\xfe\xff\x01\x00\x02\x00\x01\x00\x00\x00\x01\x00\x02\x00\x03\x00\x02\x00\xfe\xff\xfd\xff\xff\xff\xfe\xff\xfc\xff\xfc\xff\xfd\xff\x00\x00\x03\x00\x04\x00\x01\x00\x01\x00\x03\x00\x05\x00\x07\x00\x06\x00\x06\x00\x04\x00\x00\x00\xfe\xff\xfe\xff\xfe\xff\x00\x00\x03\x00\x04\x00\x05\x00\x05\x00\x02\x00\x01\x00\x01\x00\x02\x00\xff\xff\xfc\xff\xf8\xff\xf8\xff\xf9\xff\xfc\xff\xfe\xff\xfd\xff\xff\xff\xff\xff\x01\x00\x01\x00\xff\xff\xfc\xff\xfd\xff\x00\x00\x02\x00\x02\x00\x02\x00\xfe\xff\xfc\xff\xfc\xff\xff\xff\x02\x00\x04\x00\x05\x00\x05\x00\x05\x00\x06\x00\x05\x00\x05\x00\x04\x00\x03\x00\x00\x00\xfd\xff\xfd\xff\xfe\xff\xff\xff\xff\xff\x01\x00\xff\xff\xfe\xff\xff\xff\x00\x00\xff\xff\xfd\xff\xfa\xff\xfb\xff\xfd\xff\xfd\xff\xfd\xff\xfa\xff\xfa\xff\xfd\xff\xff\xff\x02\x00\x02\x00\x02\x00\x04\x00\x06\x00\x07\x00\x08\x00\x07\x00\x04\x00\x03\x00\x00\x00\xfe\xff\xfc\xff\xfc\xff\xff\xff\x02\x00\x02\x00\x01\x00\xfe\xff\xfe\xff\xff\xff\x00\x00\x01\x00\xfe\xff\xfb\xff\xf9\xff\xfa\xff\xfc\xff\xfe\xff\xff\xff\x01\x00\x03\x00\x06\x00\x05\x00\x02\x00\xff\xff\xff\xff\x00\x00\x02\x00\x01\x00\x00\x00\xfe\xff\xfe\xff\xfc\xff\xfc\xff\xfe\xff\x00\x00\x01\x00\x03\x00\x01\x00\x00\x00\xff\xff\x01\x00\x04\x00\x06\x00\x05\x00\x00\x00\xfd\xff\xfd\xff\xfd\xff\x00\x00\x02\x00\x02\x00\x02\x00\x02\x00\x03\x00\x02\x00\x02\x00\x00\x00\x01\x00\x02\x00\x00\x00\xfe\xff\xfc\xff\xfc\xff\xfe\xff\x00\x00\xff\xff\xff\xff\x01\x00\x01\x00\x03\x00\x04\x00\x03\x00\x01\x00\x01\x00\x01\x00\x00\x00\xfe\xff\xfe\xff\xfc\xff\xfe\xff\x00\x00\x02\x00\x02\x00\x00\x00\xff\xff\xff\xff\x01\x00\x01\x00\xff\xff\xfb\xff\xf9\xff\xf8\xff\xfa\xff\xfe\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xfe\xff\xfe\xff\x00\x00\x02\x00\x01\x00\x01\x00\x00\x00\xff\xff\xfe\xff\xff\xff\xfe\xff\x02\x00\x05\x00\x06\x00\x04\x00\x01\x00\xff\xff\x00\x00\x01\x00\x04\x00\x03\x00\x01\x00\xfe\xff\xfe\xff\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\xfe\xff\xff\xff\x01\x00\x02\x00\x02\x00\x00\x00\xff\xff\xfe\xff\x00\x00\xff\xff\xfe\xff\xfe\xff\xff\xff\x01\x00\x03\x00\x04\x00\x06\x00\x06\x00\x04\x00\x03\x00\x03\x00\x05\x00\x06\x00\x04\x00\x00\x00\xfe\xff\xfd\xff\xfe\xff\xfe\xff\x00\x00\x00\x00\x00\x00\xff\xff\xfe\xff\xfe\xff\xfd\xff\xfc\xff\xfb\xff\xfb\xff\xfb\xff\xfc\xff\xfc\xff\xfe\xff\xff\xff\x01\x00\x03\x00\x03\x00\x03\x00\x00\x00\x01\x00\x01\x00\x01\x00\x02\x00\xff\xff\xff\xff\xff\xff\xfe\xff\xfe\xff\xfd\xff\xfe\xff\xff\xff\x02\x00\x02\x00\x00\x00\xfe\xff\xfc\xff\xfd\xff\x01\x00\x02\x00\x03\x00\x02\x00\x02\x00\x03\x00\x03\x00\x03\x00\x03\x00\x03\x00\x02\x00\x01\x00\x02\x00\x04\x00\x02\x00\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\xff\xff\xfe\xff\xfd\xff\xfe\xff\xfd\xff\xfd\xff\x00\x00\x01\x00\x02\x00\x03\x00\x04\x00\x03\x00\x03\x00\x02\x00\x02\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x03\x00\x03\x00\x01\x00\xff\xff\xfe\xff\xff\xff\xff\xff\xff\xff\xfc\xff\xfb\xff\xfb\xff\xfc\xff\xfe\xff\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\xfd\xff\xfd\xff\xfb\xff\xfb\xff\xfd\xff\xfe\xff\x00\x00\xff\xff\xff\xff\xfe\xff\x00\x00\x03\x00\x04\x00\x03\x00\x00\x00\xfe\xff\xfe\xff\x00\x00\x01\x00\x03\x00\x04\x00\x04\x00\x03\x00\x03\x00\x02\x00\x02\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xfe\xff\xfe\xff\xfe\xff\x00\x00\x02\x00\x03\x00\x02\x00\x01\x00\xff\xff\xff\xff\xff\xff\x02\x00\x03\x00\x05\x00\x03\x00\x02\x00\x01\x00\x02\x00\x02\x00\x03\x00\x00\x00\x00\x00\xff\xff\xfd\xff\xfc\xff\xfe\xff\xfe\xff\xfe\xff\xff\xff\xfe\xff\xff\xff\xfe\xff\xff\xff\xff\xff\xfd\xff\xfc\xff\xfb\xff\xfd\xff\xff\xff\x02\x00\x02\x00\x02\x00\x03\x00\x04\x00\x04\x00\x02\x00\x00\x00\xfe\xff\xfd\xff\xfd\xff\xfe\xff\xff\xff\xfd\xff\xfd\xff\xfd\xff\xff\xff\x00\x00\x02\x00\x01\x00\x00\x00\x00\x00\xff\xff\xfd\xff\xfd\xff\xfe\xff\x00\x00\x03\x00\x04\x00\x05\x00\x04\x00\x03\x00\x02\x00\x01\x00\x02\x00\x00\x00\x00\x00\x01\x00\x02\x00\x01\x00\x00\x00\x00\x00\x02\x00\x01\x00\x00\x00\xff\xff\xff\xff\xfe\xff\xfe\xff\xfe\xff\x00\x00\x01\x00\x02\x00\x02\x00\x02\x00\x01\x00\x03\x00\x02\x00\x02\x00\x01\x00\x01\x00\x00\x00\xfd\xff\xfe\xff\x01\x00\x01\x00\x03\x00\x03\x00\x00\x00\xfe\xff\xfe\xff\xfd\xff\xff\xff\xff\xff\xff\xff\xff\xff\xfe\xff\xfd\xff\xfd\xff\xfe\xff\xff\xff\xff\xff\xff\xff\xfe\xff\xff\xff\xfe\xff\xff\xff\xff\xff\xff\xff\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x03\x00\x04\x00\x04\x00\x02\x00\xff\xff\xff\xff\xff\xff\x01\x00\x02\x00\x04\x00\x04\x00\x02\x00\x02\x00\x01\x00\x01\x00\xff\xff\xfd\xff\xfe\xff\xfe\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x02\x00\x01\x00\x02\x00\xfe\xff\xfd\xff\xfe\xff\xfd\xff\x00\x00\x02\x00\x03\x00\x03\x00\x02\x00\x02\x00\x04\x00\x02\x00\x02\x00\x01\x00\xff\xff\xfe\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\xfe\xff\xfe\xff\xfe\xff\xff\xff\x01\x00\x00\x00\x00\x00\xfe\xff\xfe\xff\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\xfe\xff\xfe\xff\xfd\xff\xfd\xff\xfe\xff\xfe\xff\xfe\xff\xfd\xff\xfc\xff\xfc\xff\xfe\xff\xfe\xff\xff\xff\xff\xff\xff\xff\xfe\xff\xfe\xff\xff\xff\x01\x00\x02\x00\x04\x00\x05\x00\x04\x00\x03\x00\x00\x00\xff\xff\x00\x00\x03\x00\x02\x00\x03\x00\x02\x00\x01\x00\x01\x00\x01\x00\x02\x00\x03\x00\x02\x00\xff\xff\xff\xff\xfe\xff\xfd\xff\xfe\xff\xfe\xff\x00\x00\x00\x00\x01\x00\x02\x00\x01\x00\x02\x00\x02\x00\x01\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x01\x00\x01\x00\x03\x00\x04\x00\x05\x00\x03\x00\x01\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\xfe\xff\xff\xff\xfd\xff\xfd\xff\xfd\xff\xfd\xff\xff\xff\x01\x00\x01\x00\x01\x00\xff\xff\xfe\xff\x00\x00\x01\x00\x00\x00\x01\x00\xff\xff\xff\xff\xfe\xff\xfe\xff\xfc\xff\xfd\xff\xfe\xff\x00\x00\x02\x00\x01\x00\x00\x00\xfd\xff\xfe\xff\xfc\xff\xfc\xff\xfd\xff\xfe\xff\xfe\xff\xff\xff\xff\xff\x00\x00\x00\x00\x03\x00\x04\x00\x03\x00\x01\x00\xff\xff\xfe\xff\x00\x00\x02\x00\x04\x00\x02\x00\x03\x00\x02\x00\x02\x00\x02\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x02\x00\x03\x00\x03\x00\x04\x00\x02\x00\xff\xff\xfe\xff\xfe\xff\x00\x00\x01\x00\x01\x00\x02\x00\x04\x00\x03\x00\x02\x00\x01\x00\x01\x00\x03\x00\x03\x00\x03\x00\xff\xff\xfe\xff\xfd\xff\xff\xff\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x02\x00\x00\x00\xfe\xff\xfe\xff\xfc\xff\xfc\xff\xfc\xff\xfc\xff\xfd\xff\x00\x00\x02\x00\x01\x00\x01\x00\x00\x00\x02\x00\x00\x00\xff\xff\xfe\xff\xff\xff\x00\x00\x00\x00\xff\xff\xfc\xff\xfd\xff\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\xfc\xff\xfd\xff\xfe\xff\xff\xff\xff\xff\xff\xff\xff\xff\xfe\xff\xfe\xff\xfe\xff\xff\xff\xfe\xff\xff\xff\x01\x00\x02\x00\x02\x00\x02\x00\x03\x00\x04\x00\x04\x00\x03\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x02\x00\x03\x00\x06\x00\x05\x00\x03\x00\x02\x00\x02\x00\x00\x00\x00\x00\xff\xff\xfe\xff\xfd\xff\xfd\xff\xfd\xff\xff\xff\x01\x00\x01\x00\xff\xff\xff\xff\x01\x00\x04\x00\x02\x00\x00\x00\x00\x00\xff\xff\xfe\xff\xfe\xff\xfc\xff\xfc\xff\xfe\xff\x00\x00\x01\x00\x02\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xfd\xff\xfe\xff\x00\x00\xff\xff\xfe\xff\xfc\xff\xfe\xff\x00\x00\x03\x00\x03\x00\x03\x00\x03\x00\x01\x00\x02\x00\x00\x00\x01\x00\x01\x00\xff\xff\xff\xff\xfe\xff\xfb\xff\xfa\xff\xfb\xff\xfe\xff\x00\x00\x02\x00\x00\x00\xff\xff\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\xfe\xff\xfe\xff\xff\xff\xff\xff\xff\xff\x00\x00\x02\x00\x05\x00\x07\x00\x08\x00\x07\x00\x05\x00\x04\x00\x03\x00\x01\x00\xff\xff\xfd\xff\xfd\xff\xfd\xff\xfd\xff\xff\xff\x01\x00\x02\x00\x01\x00\x02\x00\x01\x00\x01\x00\x02\x00\x00\x00\xfe\xff\xfb\xff\xfa\xff\xfa\xff\xfd\xff\xfe\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x02\x00\x01\x00\x00\x00\xfe\xff\x00\x00\x00\x00\xff\xff\xfd\xff\xfb\xff\xfd\xff\x00\x00\x02\x00\x02\x00\x02\x00\x02\x00\x01\x00\x02\x00\x01\x00\x00\x00\xff\xff\xfd\xff\xfe\xff\xfe\xff\xfe\xff\xfd\xff\xfd\xff\xfe\xff\x01\x00\x01\x00\x02\x00\x02\x00\x04\x00\x04\x00\x04\x00\x02\x00\x01\x00\x02\x00\x01\x00\x02\x00\xff\xff\xfc\xff\xfe\xff\xff\xff\x02\x00\x04\x00\x07\x00\x05\x00\x03\x00\x03\x00\x01\x00\xff\xff\xfd\xff\xfc\xff\xfc\xff\xfe\xff\xfe\xff\xfe\xff\x00\x00\x01\x00\x03\x00\x02\x00\x02\x00\x03\x00\x02\x00\x02\x00\x00\x00\xff\xff\xfd\xff\xfb\xff\xfd\xff\xfc\xff\xfd\xff\xfd\xff\xfe\xff\x01\x00\x01\x00\x02\x00\x01\x00\x01\x00\x00\x00\xff\xff\xfe\xff\xfd\xff\xfc\xff\xfc\xff\xfc\xff\xfd\xff\xff\xff\x01\x00\x02\x00\x03\x00\x05\x00\x05\x00\x02\x00\xff\xff\xfc\xff\xfd\xff\xfe\xff\xff\xff\xfe\xff\xfd\xff\xfd\xff\xfd\xff\xff\xff\xff\xff\xff\xff\xff\xff\xfe\xff\x00\x00\x03\x00\x04\x00\x03\x00\x01\x00\x01\x00\x01\x00\x01\x00\x02\x00\x00\x00\xff\xff\x01\x00\x02\x00\x06\x00\x06\x00\x06\x00\x06\x00\x06\x00\x06\x00\x03\x00\x02\x00\x01\x00\x00\x00\x00\x00\xff\xff\xfd\xff\xfc\xff\xfd\xff\xff\xff\x02\x00\x02\x00\x02\x00\x01\x00\x01\x00\x01\x00\xfe\xff\xfe\xff\xfb\xff\xfc\xff\xfc\xff\xfc\xff\xfb\xff\xfd\xff\xff\xff\x02\x00\x03\x00\x03\x00\x03\x00\x03\x00\x01\x00\xfe\xff\xfd\xff\xfb\xff\xfa\xff\xfc\xff\xfc\xff\xfc\xff\xfe\xff\x00\x00\x01\x00\x02\x00\x03\x00\x02\x00\x00\x00\xff\xff\xff\xff\xff\xff\xfe\xff\xfc\xff\xfc\xff\xfd\xff\xff\xff\x01\x00\x02\x00\x01\x00\x03\x00\x02\x00\x02\x00\x01\x00\xff\xff\xff\xff\xff\xff\x01\x00\x02\x00\x02\x00\x02\x00\x00\x00\x00\x00\x00\x00\x01\x00\x03\x00\x03\x00\x03\x00\x04\x00\x05\x00\x06\x00\x04\x00\x03\x00\x03\x00\x02\x00\x02\x00\x01\x00\xff\xff\xfe\xff\xff\xff\x00\x00\x02\x00\x03\x00\x03\x00\x04\x00\x04\x00\x03\x00\x02\x00\xff\xff\xfe\xff\xfe\xff\xfe\xff\xfe\xff\xfc\xff\xfb\xff\xfe\xff\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\xfe\xff\xfd\xff\xfd\xff\xfb\xff\xfb\xff\xfb\xff\xfb\xff\xfb\xff\xfa\xff\xfe\xff\x01\x00\x03\x00\x03\x00\x02\x00\x02\x00\x01\x00\xff\xff\xfd\xff\xfc\xff\xfa\xff\xfa\xff\xfc\xff\xfc\xff\xfd\xff\x00\x00\x01\x00\x03\x00\x02\x00\x01\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\xfd\xff\xff\xff\xfe\xff\x00\x00\x01\x00\x01\x00\x02\x00\x04\x00\x06\x00\x06\x00\x06\x00\x05\x00\x03\x00\x02\x00\x01\x00\x04\x00\x04\x00\x02\x00\x00\x00\xfe\xff\xff\xff\x01\x00\x03\x00\x02\x00\x02\x00\x03\x00\x04\x00\x04\x00\x02\x00\x00\x00\xfe\xff\xfe\xff\xfe\xff\xff\xff\xfe\xff\xfe\xff\xfd\xff\xff\xff\x00\x00\x02\x00\x03\x00\x02\x00\x01\x00\x02\x00\x00\x00\x00\x00\xff\xff\xfe\xff\xfd\xff\xfc\xff\xfb\xff\xfb\xff\xfb\xff\xfd\xff\xff\xff\x01\x00\x02\x00\x01\x00\x01\x00\x01\x00\xfe\xff\xfd\xff\xfd\xff\xfb\xff\xfb\xff\xfb\xff\xfb\xff\xfc\xff\xff\xff\x02\x00\x02\x00\x03\x00\x02\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\xff\xff\xfe\xff\xfe\xff\xfd\xff\xfc\xff\xff\xff\x01\x00\x04\x00\x06\x00\x05\x00\x04\x00\x04\x00\x02\x00\x01\x00\x01\x00\x00\x00\x02\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x03\x00\x03\x00\x05\x00\x05\x00\x05\x00\x05\x00\x03\x00\x01\x00\x00\x00\x00\x00\x00\x00\xfe\xff\xfe\xff\xff\xff\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\xfe\xff\xfc\xff\xfc\xff\xfd\xff\xfd\xff\xfd\xff\xfb\xff\xfb\xff\xfd\xff\xff\xff\x02\x00\x03\x00\x01\x00\x01\x00\x01\x00\x01\x00\x02\x00\x00\x00\xfe\xff\xfc\xff\xfb\xff\xfd\xff\xfd\xff\xfe\xff\xfd\xff\xfe\xff\x00\x00\x00\x00\x02\x00\x01\x00\xff\xff\xfd\xff\xfd\xff\xfe\xff\xff\xff\x00\x00\xff\xff\xfe\xff\xfe\xff\xff\xff\x02\x00\x05\x00\x04\x00\x05\x00\x06\x00\x05\x00\x04\x00\x04\x00\x02\x00\x02\x00\x00\x00\xfe\xff\xfd\xff\xfc\xff\xfd\xff\xff\xff\x00\x00\x02\x00\x02\x00\x04\x00\x05\x00\x04\x00\x01\x00\xff\xff\xff\xff\xfe\xff\xfe\xff\xff\xff\xff\xff\x00\x00\x00\x00\x02\x00\x03\x00\x03\x00\x04\x00\x03\x00\x03\x00\x02\x00\x00\x00\x00\x00\xff\xff\xfe\xff\xfd\xff\xfb\xff\xfb\xff\xfb\xff\xfe\xff\xff\xff\x01\x00\x01\x00\x01\x00\x02\x00\x01\x00\xff\xff\xfd\xff\xff\xff\xff\xff\xfe\xff\xff\xff\xff\xff\xfd\xff\xfd\xff\xfe\xff\xff\xff\x01\x00\xff\xff\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\xfe\xff\xff\xff\xfe\xff\xff\xff\x00\x00\x02\x00\x04\x00\x06\x00\x04\x00\x04\x00\x02\x00\x03\x00\x04\x00\x03\x00\x00\x00\xff\xff\xfd\xff\xfd\xff\xff\xff\xff\xff\x02\x00\x04\x00\x06\x00\x06\x00\x04\x00\x01\x00\xff\xff\xff\xff\xfe\xff\xfc\xff\xfc\xff\xfd\xff\xfe\xff\xff\xff\xff\xff\xff\xff\x01\x00\x02\x00\x01\x00\x00\x00\xff\xff\xfc\xff\xfb\xff\xfd\xff\xfe\xff\xfe\xff\xfd\xff\xfd\xff\xfe\xff\x00\x00\x01\x00\x02\x00\x02\x00\x02\x00\x03\x00\x02\x00\x01\x00\x01\x00\x01\x00\xff\xff\x00\x00\x01\x00\xfe\xff\xfe\xff\xfd\xff\xfd\xff\xfd\xff\xfd\xff\xfd\xff\xfe\xff\xff\xff\xfe\xff\xfe\xff\xfe\xff\xff\xff\x02\x00\x02\x00\x01\x00\xff\xff\xff\xff\x01\x00\x00\x00\x02\x00\x04\x00\x06\x00\x06\x00\x06\x00\x05\x00\x03\x00\x03\x00\x01\x00\x01\x00\x00\x00\xff\xff\xfe\xff\xfd\xff\xfd\xff\xfc\xff\xff\xff\x03\x00\x04\x00\x04\x00\x03\x00\x02\x00\x00\x00\x00\x00\x00\x00\xfe\xff\xfe\xff\xff\xff\x00\x00\x00\x00\x01\x00\x02\x00\x03\x00\x04\x00\x03\x00\x01\x00\xfe\xff\xfc\xff\xfb\xff\xfd\xff\xfc\xff\xfc\xff\xfc\xff\xfd\xff\xfe\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\xff\xff\xfe\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xfe\xff\xfd\xff\xfe\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x02\x00\x04\x00\x03\x00\x03\x00\x01\x00\x01\x00\x03\x00\x03\x00\x02\x00\x01\x00\xff\xff\xff\xff\xfe\xff\x00\x00\xff\xff\x02\x00\x03\x00\x04\x00\x04\x00\x02\x00\x01\x00\xfe\xff\xfe\xff\xfd\xff\xfe\xff\xfe\xff\x00\x00\xff\xff\xff\xff\xfe\xff\xff\xff\x01\x00\x02\x00\x02\x00\x01\x00\xff\xff\xff\xff\xfe\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x01\x00\x02\x00\x04\x00\x03\x00\x02\x00\x01\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xfe\xff\xfd\xff\xfd\xff\xfd\xff\xfe\xff\xff\xff\xff\xff\xff\xff\xfe\xff\xfc\xff\xfd\xff\xfe\xff\xfe\xff\x00\x00\xfe\xff\xff\xff\x01\x00\x01\x00\x02\x00\x02\x00\x02\x00\x04\x00\x04\x00\x04\x00\x03\x00\x02\x00\x02\x00\x02\x00\x02\x00\x01\x00\x00\x00\xfe\xff\xfe\xff\xff\xff\xff\xff\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\xff\xff\xfe\xff\xff\xff\xff\xff\xff\xff\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\xff\xff\xff\xff\x01\x00\x02\x00\x02\x00\x02\x00\x00\x00\xfe\xff\xfe\xff\xfe\xff\xfe\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x02\x00\x02\x00\x02\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xfd\xff\xfe\xff\x00\x00\x01\x00\xff\xff\xff\xff\xfe\xff\xfe\xff\xfe\xff\xfd\xff\xfe\xff\xfd\xff\xfd\xff\xff\xff\x01\x00\x01\x00\x01\x00\x02\x00\x03\x00\x02\x00\x02\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\x00\x02\x00\x02\x00\x01\x00\xff\xff\xfe\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\xfe\xff\xfe\xff\xff\xff\xff\xff\x02\x00\x03\x00\x04\x00\x02\x00\x00\x00\xff\xff\x01\x00\x02\x00\x03\x00\x02\x00\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xfe\xff\xfe\xff\xfe\xff\xfe\xff\xfe\xff\xfe\xff\xfe\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xfe\xff\xfe\xff\xff\xff\x01\x00\x01\x00\x02\x00\x01\x00\x00\x00\x01\x00\x02\x00\x03\x00\x03\x00\x01\x00\x01\x00\xff\xff\x00\x00\xff\xff\xff\xff\xfe\xff\xfe\xff\xfe\xff\xfe\xff\xfd\xff\xfe\xff\xff\xff\xff\xff\x01\x00\x00\x00\xfe\xff\xfe\xff\xfe\xff\xfe\xff\xfe\xff\xff\xff\xff\xff\x01\x00\x02\x00\x01\x00\x02\x00\x01\x00\x02\x00\x03\x00\x03\x00\x02\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x01\x00\x02\x00\x03\x00\x03\x00\x02\x00\x01\x00\x00\x00\x00\x00\x01\x00\x02\x00\x02\x00\x02\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\xfe\xff\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\xfd\xff\xff\xff\xff\xff\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x02\x00\x03\x00\x03\x00\x02\x00\x02\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\xfd\xff\xfd\xff\xff\xff\x01\x00\x01\x00\x00\x00\xff\xff\xfe\xff\xfe\xff\xfd\xff\xfd\xff\xfc\xff\xfd\xff\xfe\xff\x00\x00\x01\x00\x00\x00\xff\xff\xfe\xff\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\x00\x03\x00\x02\x00\x01\x00\x01\x00\x02\x00\x02\x00\x02\x00\x02\x00\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xfe\xff\xfe\xff\xff\xff\x00\x00\xff\xff\xfe\xff\xfd\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x02\x00\x01\x00\x02\x00\x01\x00\x01\x00\x01\x00\x02\x00\x02\x00\x02\x00\x03\x00\x02\x00\x01\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\xfe\xff\xfe\xff\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\x01\x00\x00\x00\x00\x00\xfe\xff\xff\xff\x02\x00\x02\x00\x02\x00\x00\x00\x00\x00\xff\xff\x01\x00\x02\x00\x01\x00\x01\x00\x01\x00\xff\xff\xff\xff\xff\xff\xfe\xff\xfe\xff\xfe\xff\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x02\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\xff\xff\xff\xff\xfe\xff\xfe\xff\xfe\xff\xfe\xff\xfd\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x03\x00\x03\x00\x03\x00\x01\x00\x01\x00\x02\x00\x02\x00\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\xff\xff\xfe\xff\xff\xff\xfe\xff\xff\xff\xfe\xff\xff\xff\x01\x00\x01\x00\xff\xff\xff\xff\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x02\x00\x01\x00\x01\x00\x01\x00\x01\x00\x03\x00\x03\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\xff\xff\xfe\xff\xfd\xff\xfd\xff\xfd\xff\xfe\xff\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xfe\xff\xfd\xff\xff\xff\xff\xff\xfe\xff\xfd\xff\xfd\xff\xfe\xff\x00\x00\x01\x00\x02\x00\x02\x00\x02\x00\x02\x00\x02\x00\x01\x00\x00\x00\xff\xff\x01\x00\x01\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\xfe\xff\xfd\xff\xfc\xff\xfe\xff\xfe\xff\xff\xff\x00\x00\xff\xff\x00\x00\x02\x00\x01\x00\x01\x00\x02\x00\xff\xff\x01\x00\x02\x00\x02\x00\x01\x00\x01\x00\x01\x00\x01\x00\x02\x00\x00\x00\x01\x00\xff\xff\x00\x00\x02\x00\x00\x00\xff\xff\x00\x00\x00\x00\x02\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x01\x00\xff\xff\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\xff\xff\x00\x00\xff\xff\xfe\xff\xfe\xff\xfd\xff\xfd\xff\xfe\xff\xfe\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xfe\xff\xff\xff\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\xfe\xff\xfe\xff\xfd\xff\xfd\xff\xfe\xff\xfe\xff\xfe\xff\xfd\xff\xfe\xff\xfe\xff\xff\xff\x00\x00\x01\x00\x01\x00\x01\x00\x02\x00\x01\x00\x01\x00\x02\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x02\x00\x02\x00\x02\x00\x01\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xfe\xff\xfe\xff\xfd\xff\xfe\xff\xfe\xff\xff\xff\xff\xff\xfe\xff\x00\x00\xff\xff\xff\xff\xff\xff\xfe\xff\xfe\xff\xfe\xff\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\xfe\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xfe\xff\xff\xff\xff\xff\xff\xff\xff\xff\xfe\xff\x01\x00\x00\x00\x01\x00\x01\x00\x01\x00\x02\x00\x03\x00\x02\x00\x01\x00\x00\x00\xfd\xff\xff\xff\xff\xff\x01\x00\x00\x00\x00\x00\x02\x00\x02\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x02\x00\x01\x00\x01\x00\x02\x00\x02\x00\x02\x00\x03\x00\x02\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\xfe\xff\xfd\xff\xfd\xff\xfd\xff\xfd\xff\xfe\xff\xff\xff\x00\x00\x01\x00\x02\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\xff\xff\xff\xff\xfe\xff\xff\xff\xfe\xff\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\xfd\xff\xfd\xff\xfd\xff\xfc\xff\xfe\xff\xfe\xff\xff\xff\xff\xff\x00\x00\x01\x00\x02\x00\x02\x00\x03\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\xfe\xff\x00\x00\x01\x00\x03\x00\x03\x00\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x02\x00\x01\x00\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x02\x00\x02\x00\x03\x00\x02\x00\x01\x00\x01\x00\x00\x00\x00\x00\x02\x00\x01\x00\x01\x00\x01\x00\x01\x00\x03\x00\x02\x00\x02\x00\x00\x00\xff\xff\xff\xff\xfe\xff\xfd\xff\xfc\xff\xfd\xff\xfd\xff\xfb\xff\xfd\xff\xfe\xff\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\xfe\xff\xfe\xff\xfe\xff\xfd\xff\xfd\xff\xfe\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x02\x00\x01\x00\x01\x00\x00\x00\xfe\xff\x00\x00\xff\xff\xfe\xff\xfe\xff\x00\x00\x00\x00\x01\x00\x02\x00\x01\x00\x01\x00\x01\x00\x02\x00\x02\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x02\x00\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x02\x00\x02\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xfe\xff\xfe\xff\xff\xff\x00\x00\xfe\xff\xff\xff\xfe\xff\xfd\xff\xfc\xff\xfc\xff\xfe\xff\xfd\xff\xff\xff\xff\xff\xff\xff\x01\x00\x03\x00\x02\x00\x02\x00\x01\x00\x00\x00\x01\x00\x01\x00\x02\x00\x00\x00\xff\xff\xff\xff\x00\x00\x02\x00\x01\x00\x00\x00\xff\xff\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\xfe\xff\xff\xff\xfe\xff\xff\xff\xff\xff\x00\x00\x02\x00\x01\x00\x01\x00\x02\x00\x02\x00\x01\x00\x01\x00\x00\x00\x01\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x02\x00\x02\x00\x02\x00\x02\x00\x00\x00\xff\xff\xff\xff\xfe\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x01\x00\x01\x00\x01\x00\x02\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\xfe\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x01\x00\x01\x00\x02\x00\x03\x00\x03\x00\x02\x00\x02\x00\x01\x00\x01\x00\xff\xff\xff\xff\xfe\xff\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\xfe\xff\xff\xff\xfe\xff\xfe\xff\xfe\xff\xfe\xff\xfe\xff\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\x02\x00\x01\x00\x02\x00\x02\x00\x02\x00\x02\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\xfe\xff\xff\xff\xfe\xff\xfe\xff\xfe\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x02\x00\x02\x00\x01\x00\x02\x00\x01\x00\x01\x00\x00\x00\xff\xff\xff\xff\xff\xff\xfe\xff\xfe\xff\x00\x00\xff\xff\xff\xff\xff\xff\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\xfe\xff\xff\xff\xfe\xff\xff\xff\xff\xff\x01\x00\x02\x00\x02\x00\x01\x00\x02\x00\x02\x00\x01\x00\x03\x00\x02\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\xff\xff\xff\xff\xfe\xff\x00\x00\xff\xff\xff\xff\x00\x00\xfe\xff\xff\xff\xfe\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x02\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\xfe\xff\xfe\xff\xfe\xff\xfd\xff\xfe\xff\xff\xff\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x02\x00\x02\x00\x00\x00\x01\x00\xff\xff\xff\xff\xff\xff\xfe\xff\xfe\xff\xfe\xff\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\xfe\xff\xff\xff\xff\xff\xfd\xff\xfd\xff\xfe\xff\xfe\xff\x00\x00\x00\x00\x01\x00\x02\x00\x02\x00\x02\x00\x03\x00\x03\x00\x02\x00\x03\x00\x02\x00\x02\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xfe\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xfe\xff\xfe\xff\xff\xff\xfe\xff\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x02\x00\x02\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x02\x00\x02\x00\x02\x00\x01\x00\xff\xff\xfe\xff\xff\xff\xfe\xff\xfe\xff\xfe\xff\xfe\xff\xfe\xff\xff\xff\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x02\x00\x02\x00\x02\x00\x02\x00\x02\x00\x02\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\xfe\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x02\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x01\x00\x01\x00\x01\x00\x02\x00\x02\x00\x02\x00\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\xfe\xff\xff\xff\xff\xff\xff\xff\x00\x00\x01\x00\x02\x00\x01\x00\x02\x00\x01\x00\x02\x00\x02\x00\x02\x00\x03\x00\x02\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\xfe\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xfd\xff\xfe\xff\xfd\xff\xfe\xff\xfd\xff\xfd\xff\xff\xff\xff\xff\xff\xff\x00\x00\x01\x00\x01\x00\x00\x00\x02\x00\x01\x00\x02\x00\x01\x00\x02\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x02\x00\x01\x00\x00\x00\x00\x00\xff\xff\xfe\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x02\x00\x01\x00\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x02\x00\x02\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x03\x00\x02\x00\x02\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\xfe\xff\xff\xff\xfe\xff\xfe\xff\xfd\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xfe\xff\xff\xff\xfd\xff\xfe\xff\xfe\xff\xfe\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x02\x00\x02\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x02\x00\x01\x00\x02\x00\x00\x00\x00\x00\x01\x00\x01\x00\xff\xff\x00\x00\xff\xff\xfe\xff\xfe\xff\xff\xff\xff\xff\xfe\xff\xff\xff\xfd\xff\xfe\xff\xff\xff\xfe\xff\xfe\xff\xfe\xff\xfe\xff\xfe\xff\xff\xff\xfe\xff\xff\xff\xff\xff\xfe\xff\xff\xff\xff\xff\x01\x00\x02\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x02\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x01\x00\x00\x00\x01\x00\x02\x00\x02\x00\x01\x00\x01\x00\x02\x00\x00\x00\x00\x00\xff\xff\x01\x00\xff\xff\x00\x00\x01\x00\x01\x00\x02\x00\x01\x00\x01\x00\x01\x00\x01\x00\x02\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xfe\xff\xfe\xff\xfe\xff\xfe\xff\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xfe\xff\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\x02\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xfe\xff\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x02\x00\x02\x00\x01\x00\x01\x00\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x02\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x02\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xfe\xff\xff\xff\xfe\xff\xfd\xff\xfe\xff\xfe\xff\xfe\xff\xfe\xff\xff\xff\xff\xff\xff\xff\xfe\xff\xfe\xff\xfe\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x02\x00\x02\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x01\x00\x01\x00\x02\x00\x03\x00\x02\x00\x02\x00\x02\x00\x02\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\x01\x00\x02\x00\x00\x00\x01\x00\x02\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xfe\xff\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x02\x00\x02\x00\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\xfe\xff\xff\xff\xff\xff\xff\xff\x00\x00\xfe\xff\xfe\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xfe\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x02\x00\x01\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x02\x00\x02\x00\x01\x00\x02\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\x01\x00\x02\x00\x01\x00\x01\x00\xff\xff\xfe\xff\xfe\xff\xff\xff\xfe\xff\xfe\xff\xff\xff\xfe\xff\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x02\x00\x02\x00\x02\x00\x01\x00\x00\x00\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xfe\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x02\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\xfe\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\xfe\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\xfe\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x02\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\x02\x00\x02\x00\x01\x00\x01\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x02\x00\x00\x00\x00\x00\x02\x00\x01\x00\x00\x00\x01\x00\x01\x00\xff\xff\xff\xff\xff\xff\xff\xff\xfe\xff\xfe\xff\xfe\xff\xfe\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x02\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\xfe\xff\xfe\xff\xfe\xff\xff\xff\xff\xff\xfe\xff\xfd\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x01\x00\x00\x00\x00\x00\x02\x00\x02\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x02\x00\x01\x00\x01\x00\x02\x00\x00\x00\x01\x00\x02\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\xfe\xff\xfe\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\xff\xff\x01\x00\x00\x00\xff\xff\xff\xff\xff\xff\xfe\xff\xfe\xff\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x01\x00\x00\x00\x01\x00\x01\x00\x02\x00\x02\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\xfe\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xfe\xff\xfe\xff\xfe\xff\xfe\xff\xfe\xff\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x01\x00\x02\x00\x01\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x02\x00\x01\x00\x01\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xfe\xff\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x02\x00\x01\x00\x00\x00\x01\x00\x02\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\xff\xff\xff\xff\x01\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xfe\xff\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x02\x00\x02\x00\x03\x00\x02\x00\x02\x00\x02\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xfe\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xfe\xff\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\xfe\xff\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x02\x00\x01\x00\x01\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x02\x00\x01\x00\x01\x00\x02\x00\x02\x00\x02\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xfe\xff\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\x02\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\xff\xff\x01\x00\xff\xff\xfe\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x02\x00\x01\x00\x01\x00\x02\x00\x01\x00\x01\x00\x01\x00\x00\x00\x01\x00\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\xff\xff\xff\xff\xff\xff\x01\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\xff\xff\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x01\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\xff\xff\x00\x00\xff\xff\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\x01\x00\xff\xff\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\xff\xff\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\xfe\xff\xff\xff\xfe\xff\xff\xff\xfe\xff\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x02\x00\x02\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xfe\xff\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\xff\xff\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x01\x00\x00\x00\x01\x00\x02\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\x02\x00\x01\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x02\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xfe\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\xff\xff\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\xfe\xff\xff\xff\xff\xff\xfe\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x02\x00\x00\x00\x01\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xfe\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x02\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x01\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x02\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x02\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x01\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x02\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\xfe\xff\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x02\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xfe\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\x01\x00\x00\x00\x00\x00\x01\x00\xff\xff\x01\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x02\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\xff\xff\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xfe\xff\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x02\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x01\x00\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\xff\xff\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\xfe\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x02\x00\x02\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\xff\xff\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x02\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\xff\xff\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\xfe\xff\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\xff\xff\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xfe\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\xff\xff\x01\x00\x00\x00\xff\xff\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\x00\x00\xfe\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\xff\xff\x01\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x01\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x01\x00\x00\x00\xff\xff\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\xff\xff\xff\xff\x00\x00\x01\x00\xff\xff\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\x01\x00\x00\x00\xff\xff\x01\x00\x00\x00\x01\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\xff\xff\xff\xff\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x02\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\xfe\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\xff\xff\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\xff\xff\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xfe\xff\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x01\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x01\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x01\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\xff\xff\x01\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\xff\xff\x00\x00\x01\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x01\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\xff\xff\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\xff\xff\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\xff\xff\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x01\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\xff\xff\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\xff\xff\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\xff\xff\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\x01\x00\xff\xff\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\xff\xff\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\xff\xff\x01\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\xff\xff\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\xff\xff\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\xff\xff\x01\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x01\x00\x00\x00\xff\xff\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\xff\xff\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\xff\xff\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x01\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x01\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x01\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\xff\xff\x00\x00\xff\xff\x01\x00\x00\x00\x01\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x01\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\x01\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\xff\xff\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x01\x00\x01\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\xff\xff\x01\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\xff\xff\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\xff\xff\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\xff\xff\x01\x00\x00\x00\xff\xff\xff\xff\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\xff\xff\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\xff\xff\xff\xff\xff\xff\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\xff\xff\xff\xff\x01\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\xff\xff\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\x01\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\xff\xff\xff\xff\xff\xff\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x01\x00\x01\x00\xff\xff\x01\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00' # --- -# name: test_pre_recorded_message - b'\xfe\xff\x04\x00\x05\x00\x03\x00\x04\x00\x03\x00\x02\x00\x00\x00\xfe\xff\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\xfe\xff\xfe\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xfe\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\xfe\xff\xfc\xff\xfc\xff\xfc\xff\xfd\xff\xfd\xff\xfd\xff\xfd\xff\xfe\xff\xff\xff\xfe\xff\xfe\xff\xfe\xff\xff\xff\xff\xff\xfe\xff\xfe\xff\xfe\xff\xfe\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\xfd\xff\xfe\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xfe\xff\xff\xff\xff\xff\xff\xff\xff\xff\xfe\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\xfe\xff\xff\xff\x00\x00\x01\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xfe\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x03\x00\x03\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xfe\xff\x00\x00\xff\xff\x00\x00\x00\x00\xfe\xff\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\xfe\xff\xff\xff\x00\x00\x00\x00\xfe\xff\x00\x00\x00\x00\x00\x00\xff\xff\xfe\xff\xfe\xff\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x03\x00\x02\x00\x03\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x03\x00\x02\x00\x02\x00\x01\x00\xff\xff\x01\x00\x01\x00\x01\x00\xfe\xff\xfc\xff\xff\xff\x00\x00\xfe\xff\x00\x00\x00\x00\xfd\xff\xff\xff\xfe\xff\xfe\xff\xff\xff\xfe\xff\xfe\xff\xfd\xff\xfe\xff\x00\x00\xff\xff\xfd\xff\xfd\xff\xfe\xff\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\xfe\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\xfe\xff\xfe\xff\xfe\xff\x00\x00\xff\xff\xff\xff\xff\xff\xfe\xff\xff\xff\xff\xff\xff\xff\xff\xff\xfe\xff\xfe\xff\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\xfe\xff\xfe\xff\xfe\xff\xfe\xff\xfd\xff\xfc\xff\xfe\xff\xfd\xff\xfe\xff\xfc\xff\xfc\xff\xfe\xff\xfd\xff\xfc\xff\xfe\xff\xfc\xff\xfc\xff\xfd\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xfe\xff\xfe\xff\xff\xff\xff\xff\xfe\xff\xfe\xff\xfe\xff\xff\xff\x00\x00\xfe\xff\x00\x00\xff\xff\xff\xff\x00\x00\xfe\xff\xfe\xff\x00\x00\x00\x00\xfe\xff\xff\xff\xff\xff\xfe\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\xfe\xff\xff\xff\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\xfe\xff\xfe\xff\x02\x00\x02\x00\x01\x00\x02\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\xff\xff\x00\x00\x02\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x02\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xfe\xff\xff\xff\xff\xff\xff\xff\xfe\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x02\x00\x02\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x01\x00\x02\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x01\x00\x00\x00\xff\xff\xfd\xff\xfe\xff\xff\xff\xff\xff\xfd\xff\xfd\xff\xfe\xff\xfe\xff\xfe\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\xfe\xff\xfe\xff\xff\xff\xfe\xff\xff\xff\xff\xff\x00\x00\xff\xff\xfe\xff\x00\x00\xfe\xff\xfc\xff\xfd\xff\xfe\xff\xfd\xff\xfe\xff\xfe\xff\xff\xff\x00\x00\xfd\xff\xff\xff\xff\xff\xfd\xff\xfc\xff\xfd\xff\xfe\xff\xfe\xff\xfc\xff\xfc\xff\xff\xff\xfe\xff\xfc\xff\xfa\xff\xfb\xff\xfb\xff\xfb\xff\xff\xff\xfe\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xfe\xff\x00\x00\xff\xff\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x00\x00\xfe\xff\xfa\xff\xfe\xff\x00\x00\xfd\xff\x00\x00\x00\x00\xff\xff\x00\x00\xfd\xff\xfa\xff\xfc\xff\xfc\xff\xfa\xff\xfe\xff\xfd\xff\xf8\xff\xf7\xff\xfa\xff\xfe\xff\xfa\xff\xf8\xff\xf9\xff\xfa\xff\xfd\xff\x00\x00\x00\x00\x00\x00\xfb\xff\xfb\xff\xfa\xff\xfd\xff\xff\xff\xff\xff\x01\x00\xfc\xff\xff\xff\xf8\xff\xff\xff\x00\x00\xf3\xff\xfd\xff\xf3\xff\xfb\xff\x01\x00\xff\xff\xfa\xff\x02\x00\xf4\xff\xeb\xff\xfc\xff\xf7\xff\xe8\xff\xfb\xff\xf8\xff\xf7\xff\r\x00\xfe\xff\x02\x00\xfe\xff\xf9\xff\xfa\xff\xf8\xff\x00\x00\xf6\xff\xfe\xff\x02\x00\x05\x00\x04\x00\xfa\xff\xf4\xff\xe8\xff\xf3\xff\x06\x00\xf9\xff\x06\x00\n\x00\xf8\xff\xfa\xff\x01\x00\xf4\xff\xfd\xff\xf7\xff\xf4\xff\x01\x00\x05\x00\x02\x00\x04\x00\xfc\xff\xef\xff\x03\x00\xf3\xff\xfc\xff\x08\x00\x04\x00\xfd\xff\x08\x00\x04\x00\x00\x00\x00\x00\x06\x00\x03\x00\xfd\xff\x04\x00\x15\x00\x06\x00\x12\x00\x15\x00\x05\x00\x04\x00\x05\x00\x05\x00\x02\x00\x07\x00\x05\x00\xfc\xff\xfd\xff\x06\x00\xff\xff\xf8\xff\x01\x00\xf2\xff\xe6\xff\xf4\xff\xef\xff\xfb\xff\xfc\xff\xf2\xff\xec\xff\xe4\xff\xe6\xff\xf9\xff\xfa\xff\xee\xff\xea\xff\xe9\xff\xf8\xff\x06\x00\x0b\x00\xe9\xff\x03\x00\xea\xff\xfc\xff\x0f\x00\x00\x00\x13\x00\xe6\xff\xfe\xff\x10\x00\x12\x00\xfd\xff\x03\x00\xf1\xff\xfb\xff\x18\x00\x1f\x00\x08\x00\xfa\xff\xf9\xff\xf6\xff\r\x00\x17\x00\x03\x00\xfb\xff\xfc\xff\xf3\xff,\x00\x1c\x00\xf8\xff\xed\xff\x05\x00\x10\x00$\x00@\x00\x19\x00\x00\x00\x19\x004\x00G\x00]\x001\x00\x07\x005\x00J\x00X\x00\\\x00\x03\x00\xf6\xff\x13\x007\x00]\x008\x00\xef\xff\xeb\xff\x00\x00#\x00\x85\x00S\x00\xb6\xff\xcf\xff\x1a\x00\xc3\xff\xb6\x00\x8a\x00^\xff\xe0\xff\xfc\xff\xba\xff4\x00n\x00\xc5\xff5\xff\xf4\xffR\x00\xe8\xff-\x00\x11\x00z\xff\xb0\xff\x92\x00\xeb\xff\xca\xff\t\x00\xa0\xff\xcb\xff6\x00L\x00\x02\x00\x91\xff\xdb\xff\xd3\xff\xed\xff\xc0\xff\x8b\xff\x97\x00\xe2\xff\x16\x00B\x00\xbc\xff\xfb\xff1\x00\xe4\xff\xed\xff\x95\x00\xcc\x00H\x00>\x00\x03\x00g\xff\x18\x01\x8c\x01\xa8\xff?\xff\xc6\xfeO\xff\xaa\x00\x00\x01Q\xff\xaf\xfe\xce\xfe\xd8\xfe\x7f\xff\xce\xfe\x93\xfd\xb6\xfc\x9c\xfd\xb1\xff\xf7\x00H\x00D\xfe\x8d\xfc\xc2\xfco\xffG\x01r\x00\x94\xffG\x007\x01,\x02\xc0\x02\x18\x01\xaa\xff\xf0\xffS\x00\xbf\x029\x03\xa0\x01p\x00/\x00\xc4\xff\xb3\xff\xd4\xffU\xfdB\xfd\x8b\xfe\xfb\xfe\x86\xfe\x0e\xfd\xba\xfd\xb7\xfd\x8e\xfc\xf0\xfc\x88\xfd"\xfe\'\xfe]\xfe\xfb\xfe\x13\x00\x08\x01\xe1\x00&\xff\xf0\xfe\x05\x015\x01E\x02:\x02G\x02*\x02E\x02\xcf\x02\x1f\x03\xcc\x03\x15\x03N\x03\xdf\x03\x82\x04X\x05P\x05f\x04}\x04Q\x06\xe3\x06\x9a\x06\x8e\x06\xc7\x05a\x05\xe6\x05-\x06g\x066\x06\x9e\x05\xf4\x03\x9b\x03\x14\x03e\x02\x99\x01\xdf\xff\xa1\xfe{\xfe%\xfe2\xfd/\xfc\xc3\xfa-\xf9\xe2\xf8\xa2\xf8\x8d\xf8\xa0\xf9B\xf9\x15\xf9\xf3\xf8<\xf9y\xfa\xe1\xfa\xce\xfa#\xfb\xa1\xfc\xf3\xfd\xec\xfeE\xff\xc5\xfe\x9f\xfe8\xff\x19\xff\xff\xfe5\xff\xd8\xfe\x90\xfe\x87\xfd\xb5\xfcR\xfc\x18\xfc\xae\xfaI\xf9/\xf9\x14\xf9>\xf9\xb6\xf8d\xf8o\xf8E\xf8\x18\xf8c\xf8g\xfaA\xfb\xe2\xfak\xfb\xda\xfbM\xfd\xa0\xfe\x1c\xfft\xfe\xee\xfe\xf9\xff\x0e\x00y\x00*\x00P\xff\xfa\xfe\x84\xfe\xef\xfd\xd4\xfe\xb3\xfdf\xfd\xfa\xfbq\xfb\xfa\xfb \xfd{\xfd\xe4\xfc\xb3\xfc\xe5\xfa\x97\xfd\xee\xffP\x00o\x01o\x00\xfc\x01\x13\x04S\x05R\x08\x13\x07\xda\x08\xa6\t`\x0cX\x11\x1c\x0f\x88\x0b\xb5\x04\x17\x08\x8f\x17\x8f)\x9f4G+\xa0\x1c\x9f\x12\xe9\x13\x88#\xac+++\x94"\x8f\x1bM\x1f\xa0\x1e\x05\x17\xf1\x04\x17\xf4V\xec\x13\xf0\x0e\xfaJ\xfe&\xf9\xcb\xe7\x96\xd7\xa6\xcf\xab\xd2\xd2\xd9\x95\xdbT\xd9J\xdb\x84\xe2\x98\xe8\x06\xeb8\xe8J\xe5\x93\xe5\xfa\xea\xa2\xf8:\t\xe9\x11\xd3\x10c\n\x97\x05\x1a\x08\xbb\r\x94\r\x15\x0e\xef\rz\x0eU\x10\xc1\r+\x08\xbd\xfd(\xf24\xeaj\xec\x1f\xf3H\xf7\x0e\xf5\x07\xed\xcb\xe6\x9f\xe2\xe1\xe2\xdc\xe5&\xe87\xed\xcb\xf1\x13\xf8\xf9\xfe*\x039\x04\xda\x00h\xff\xe3\x03\xdf\x0eY\x18\xf4\x1d\xa7\x1d8\x1a=\x16\xad\x12\xa8\x118\x11\xa7\x10\xaa\x0e\xfb\r`\x0c}\n\xd2\x06|\xfe@\xf5\x11\xf1U\xf2K\xf6\x9c\xf9\xf3\xf8\xf8\xf5\xc6\xf2\xb5\xf0\x1a\xf2\xb6\xf4\xa5\xf6\x87\xf7~\xf9\xa3\xfd1\x01{\x03\xff\x01\xfc\xfc\x1c\xfb\xa7\xfb\xf7\xfd\x85\x00\xb1\x00\xaf\xfe\x01\xfc\xf1\xf9x\xf7\xab\xf6c\xf4M\xf2f\xf2?\xf2\x1f\xf7 \xf8\xf7\xf7!\xf6W\xf1@\xf34\xf5\xce\xf8\xdb\xfdA\x00\x9f\x02[\x03\x18\x04\x0b\x01\xb0\x02\x0c\x06>\x08 \x0b\xfa\n\xc6\x0e\xaa\x12K\x13>\x12y\x11o\x11C\x17\xe6\':7w9 /\x0e$\'$\xcb)\x04,\xc8+\xc3+\x9a)t"`\x18\xf0\x0f\x88\x07s\xfc\xe7\xef\xf7\xe7\x9a\xe9\x0c\xed3\xec\xcf\xe4*\xda\x11\xd1\xab\xcc\xf6\xcc\x00\xd1^\xd8\x93\xdf\xb2\xe4\x08\xe75\xea\xad\xefZ\xf3`\xf4\x0c\xf6\x05\xfd/\tU\x13\xaa\x17\x06\x16,\x13-\x10\xdb\r\x18\x0c"\n\xe7\t\x8b\x08\xef\x04\x19\x00P\xfb\xd8\xf5\xbe\xed{\xe4C\xe0\x04\xe1\xff\xe2\xe4\xe2\xaf\xe2\x89\xe2N\xe2\x9e\xe2 \xe4{\xe8\x1e\xef\xf5\xf5\x1a\xfc\xf2\x01\x9e\x08\xba\x0fh\x12\xf5\x14\xcc\x17\xb1\x1cb \xb8 9!\xa6 |\x1f\xbe\x1bu\x17\xef\x13~\x0fo\nt\x05\xef\x00;\xfd\xe3\xf9L\xf6\xab\xf1&\xef\xc7\xed\xf0\xec\x1a\xed\xf4\xec;\xee/\xf0\xa9\xf22\xf68\xf9\xf3\xfax\xfb\xd2\xfd(\xff\x00\x01\x9d\x02\xad\x02T\x03R\x02\xf6\x00$\xff\\\xfd\x9e\xfa\xef\xf6\x91\xf4\x1d\xf3K\xf3\x80\xf2\x88\xf0X\xed#\xec\x8f\xebb\xeaK\xeb\xdc\xec;\xf0\xf2\xf3\x15\xf5\xfe\xf7\xb2\xfa\xf3\xfb(\xff\xc9\x00`\x03]\t\x0b\x0cW\x10O\x14\xbf\x14\x9c\x14\xb6\x11\x81\x11X\x14y\x17f\x1b\xce&c9vB\x96:,&\xd5\x1b\xbc%\xf20H4.3\x983\xaa0P%B\x15\xa8\n/\x03i\xf8#\xf0\xcf\xf03\xf9"\xfc\x8a\xf1\xb0\xde{\xd1\xf9\xcc\xa9\xcc>\xcfw\xd67\xe1\xb8\xe7F\xe7\x06\xe6\xf1\xe7\x0c\xe9C\xe8\x0c\xea\xa2\xf2\x83\x00k\r\x91\x13\xb1\x13X\x0f\xa4\n\x13\x07(\x05\xe9\x06L\n\x00\x0b\xbb\x08\xe9\x04\x01\xffN\xf7\x97\xee\'\xe6\x9c\xe0\x95\xdeu\xdfX\xe3\x14\xe5\xba\xe4\xee\xe1\xad\xddE\xdc+\xe0\x10\xe8\xab\xf0\xbc\xf7\x06\xfet\x05\xdf\n\xc3\x0fH\x13)\x16W\x18\x7f\x1c\xc3"\x14)\xaf+<)k$0\x1e8\x19V\x15\r\x13\xd7\x0f3\x0c\x0e\x08\xb8\x01l\xfa\xe3\xf3\x05\xef\x17\xec_\xeah\xea\xa6\xeb\xfd\xec`\xeex\xef\x82\xf0\xcf\xf0U\xf2\x9d\xf5S\xfa\x89\xff\x1f\x02[\x03>\x039\x02L\x01\x0b\x00\x1e\x00\x11\x00\xb5\xfe~\xfcv\xf9!\xf7\xb5\xf3\xde\xf0Z\xed\xac\xeat\xe9\xc3\xe8\xdd\xe9\xf5\xe9\xe5\xe9\xce\xe8+\xe9_\xeb\x0f\xee\xa8\xf3x\xf6\xc7\xf9\x8d\xfc`\xfe\xd3\x03R\tA\x0f\x15\x12C\x12\xdb\x12\xfd\x14%\x1bK\x1eN\x1f\xd3\x1f\x08#\xa5.\xe7<\xb7D\x99?\x101*&\xc8&O.N3\xac2w/\x03(\xf0\x1c\xbe\x0e\xd9\x03\x90\xfd\x9f\xf5*\xefz\xeb\x04\xed\x81\xee\xac\xe9\x04\xe0\xcd\xd4`\xcdL\xcc7\xd2x\xdb0\xe4?\xea\x03\xeb\xe6\xe9\x1d\xeaH\xed\xd2\xf2\xfe\xf6\xc3\xfc\x11\x04\xa6\x0cc\x12H\x14\'\x12\x06\r!\x08h\x04\x1c\x03\x0c\x03\x10\x03{\x02\x18\xff?\xf8\xe8\xef\x1a\xe7\x00\xe1\x1e\xddb\xdb\xcf\xdby\xdd)\xe0=\xe2T\xe2\xf5\xe1\xac\xe2\xcf\xe4\xd4\xe8\xbc\xef?\xfa\xf3\x03k\x0cF\x12A\x16R\x18F\x19U\x1c\xca w%\x9a(\x99)])S&(!\xa5\x1aD\x14\x12\x0fP\x0b\x12\x08g\x04q\x00\xa2\xfb\xf8\xf5x\xef1\xea^\xe7\x82\xe7\x04\xe9p\xeb\xc0\xed\xf6\xef\x94\xf1\x94\xf2#\xf3v\xf4k\xf7j\xfb\xbf\xff\xcc\x03N\x07\x1f\x08\xbc\x06\x03\x04\xe8\x01\xe7\x00\r\x00\xf5\xfeo\xfdE\xfbl\xf8\x8e\xf5|\xf1;\xed\'\xea\n\xe8,\xe7\xe3\xe7,\xe9d\xea`\xea#\xe9\xf5\xe8\x88\xea\x11\xede\xf2\xcb\xf7`\xfc\xda\xff7\x00l\x01H\x04g\tX\x0f\x93\x13\x1c\x162\x18S\x1c\xa5!\xa5,&=\xe5J\x93L\xaf>[1\x0e1\x819x?j>\x0b;\x8c6\xc3,\\\x1de\x0e\xb3\x04X\xfc8\xf35\xeb_\xe87\xebR\xeb\xf2\xe2\x8e\xd4\x8c\xc9\xad\xc6\xb1\xc9\xe4\xce\xc2\xd5c\xde\x8d\xe5x\xe8\xc8\xe8\xd7\xe9\xc7\xed\x9a\xf2\x15\xf7\xa0\xfc\x87\x04\xc1\x0fd\x19V\x1d\x97\x19a\x12\x93\x0b\xee\x06\xf7\x04E\x05\xe8\x06\x05\x07\xbb\x02\xb6\xf9\xbd\xee|\xe5P\xe0\xa5\xdc\x98\xd9*\xd8\x86\xd9;\xdc\x05\xde>\xde]\xdeg\xdf\xd7\xe1\xea\xe6\r\xeeE\xf7`\x01 \nM\x0fI\x12C\x15T\x1a\x1a \x91$q\'\xa6)\x89*H)\xb4%\xf8 6\x1d\x80\x19\xe1\x14d\x0f|\n\x8f\x06\x83\x02[\xfc\xd1\xf5\'\xf0T\xec\x99\xea4\xea\xad\xeaP\xeb1\xec\xfa\xec\x9a\xed\xb2\xeeo\xf0|\xf3]\xf7!\xfb8\xff\xe3\x02\xed\x05\xd4\x07\xc2\x07b\x06\x9a\x04\xc7\x03\x97\x03\xcc\x03\xad\x02\xe1\xff\xe6\xfb0\xf7X\xf3G\xf0t\xed\xcb\xea\x80\xe8\x08\xe7b\xe6\xab\xe6J\xe7\xef\xe75\xe9\xc1\xea\xde\xec\xcd\xef\xd2\xf2\x93\xf5\xdc\xf7#\xfa\xce\xfd\xc7\x02:\x07Y\x0b\xc3\x0e\xc8\x10\x82\x11]\x11\xb9\x12\x98\x16 \x1c"%b3+C\xd8JVD\x845\xd2*\x89)I-\xca3X;\xc0?\xc3;\xfd,\r\x19\x8f\x08r\xfd\t\xf8c\xf5^\xf2\x06\xf0W\xedR\xea\x0c\xe5\xdb\xdc\xe9\xd2\xee\xca\x87\xc6\xf4\xc5\xe7\xcaP\xd5\xb0\xe1\x8c\xeaM\xec\xdb\xe8\xc5\xe5\x99\xe5U\xe9G\xf1\xd1\xfc\x8b\t\xff\x12+\x16\x90\x15\xc9\x13\xe8\x10J\x0cQ\x07\x99\x04U\x05\x85\x07\x06\t\xb2\x08\xd9\x04\xa1\xfck\xf1)\xe6\xe3\xddA\xda:\xdc\xe1\xe1\xb4\xe7\x93\xe9\x90\xe7\x0f\xe5K\xe3b\xe3\x14\xe6\xc6\xeb=\xf4"\xfdI\x05\xaa\x0c\xd7\x12\xaf\x17}\x1a\xe9\x1a\x8e\x1a\x07\x1b\x97\x1e\xc6#\xc3(M*\'(\xe4"I\x1b\xd4\x13\x9c\r\\\n\xdc\x08\xcd\x07\xaa\x04b\xffH\xf9\xba\xf3f\xef\x0b\xec\xb7\xe9\x89\xe8=\xe8\x8a\xe8\xc4\xe9\x18\xec\xf7\xeeK\xf1\x00\xf3\x02\xf4\x11\xf5\x87\xf6\xd9\xf9\x0b\xfe\xdf\x01b\x04\xee\x04\xd1\x04o\x03\xfc\x00\xd8\xfe\x9f\xfd\x06\xfdd\xfc,\xfa\xa9\xf7A\xf5M\xf3\x85\xf1\xab\xef\xac\xed\xa8\xec\xd0\xecp\xed\xae\xee\xda\xefn\xf1\x8a\xf3"\xf5\xc6\xf6\x19\xf9\x0f\xfc_\xff$\x02\x0e\x05d\x08_\x0c\x01\x10&\x12\x17\x13Q\x13\x14\x142\x15\x0b\x18b\x1fK,\xd89\x86@\x8e<\x991\xf4\'t$\xb5&\xcf+\xfa1\xa16\x036\x7f-s\x1e.\x0e\x1b\x02\x97\xfb\x81\xf93\xf8\x1d\xf55\xf0k\xeat\xe5\x7f\xe0\xaa\xda\x00\xd4\xfb\xcd\xa8\xc9r\xc8i\xcb\x1d\xd2T\xda\xf2\xe0g\xe4\'\xe5]\xe4\xb4\xe3e\xe5\x07\xeb\xf4\xf4\xcd\xffL\x08\x12\rD\x0f\xfc\x0f\xa4\x0f\xea\r\x13\x0cE\x0b\xca\x0b.\rj\x0e\xa3\x0e\xe6\x0c\xf7\x08\xf9\x02\xd0\xfb\x84\xf4\xd9\xee]\xec\xcd\xec\xca\xee \xf0\x16\xf0:\xef\xb4\xed\xfe\xeb\xe5\xea\xdc\xeb\x94\xefW\xf5\xf7\xfb\xe6\x01\x90\x06\x11\n\xf5\x0c?\x0f\xd3\x10\xb1\x11\x0c\x13c\x15\x8a\x18\xef\x1a\xfb\x1bK\x1b+\x19\xa0\x15\xed\x10\xec\x0b/\x07\xe3\x03\xdd\x01\x94\x00\xfc\xfe\xba\xfc\x8d\xf9\xe5\xf5\xef\xf1g\xee\xe3\xeb\xf6\xea\xb1\xebx\xed\xac\xef\xa0\xf1\xea\xf2k\xf3y\xf3q\xf3\xf6\xf3\x0e\xf5\xd4\xf6\xfe\xf8\x7f\xfb\xd0\xfd\xa0\xffX\x00\x1c\x00G\xff&\xfeV\xfd\xbe\xfc\xcb\xfc2\xfd\xe5\xfd\x94\xfe\xce\xfe^\xfep\xfdf\xfc\xb9\xfb\xa3\xfb\x05\xfc\x18\xfdo\xfe\xce\xff\xf0\x00\x88\x01\x8a\x01\x0b\x01B\x00\xea\xff\xa1\x00A\x02|\x04y\x06\xc5\x07V\x08\\\x08\x08\x089\x08\xff\x08\xe5\nj\x0ej\x14\x14\x1ds&\xde,\xc6-\xd4)\x19$\x17 \x8c\x1f\xa9"\xf1\'?-0/\xba+\x88"\xa1\x15\x97\x08\xc7\xfe\x01\xfa\xed\xf8x\xf8\xf3\xf5\x84\xf0$\xe9c\xe1\x19\xdaC\xd4Z\xd0\x97\xce:\xcel\xce\xf4\xceF\xd08\xd3\xe0\xd7\x9d\xdd5\xe3e\xe7\xe5\xe9\xfc\xebr\xef^\xf5\x06\xfd\x90\x05\x86\r\xf0\x13l\x17\x9a\x17\x95\x15P\x13\x88\x12\x99\x13\xdc\x15\xc4\x17$\x18\xed\x15\x94\x11\xd0\x0b\xd4\x05\xb0\x00\xf9\xfc\xcb\xfa^\xf9\xfc\xf7\x05\xf6\xca\xf3\xbd\xf1t\xf0\xab\xef\xfb\xeeA\xee\xcb\xed\'\xeeW\xef\xa4\xf1\x13\xf5Q\xf9\xa3\xfd\xe0\x00\xa2\x02\xd4\x02F\x02\x90\x02[\x04\xb5\x07\xa1\x0b\xd9\x0e\x0f\x11\xc5\x11\xd9\x10\xc1\x0e\x02\x0c\xdf\t\x95\x08E\x08N\x08\x08\x08\x05\x07&\x05\xe8\x02\x0c\x00\x81\xfc\xa2\xf8o\xf5\xc7\xf3\xaf\xf3\x81\xf4\xb5\xf5\xd4\xf6_\xf7\x1a\xf7\xdb\xf5\x07\xf4c\xf2\xc9\xf1\xff\xf2\xd9\xf5\x93\xf9\xcf\xfc\xf4\xfe\x06\x00d\x00M\x00\xd8\xff\xd5\xff\xb2\x00v\x02O\x04\xaa\x05Q\x06;\x06|\x05,\x04\xb0\x02n\x01\xbf\x00n\x00z\x00`\x00\xe1\xff\xdc\xfe\x7f\xfd?\xfca\xfb\xea\xfa\xdf\xfa\xf8\xfa\x13\xfb,\xfbH\xfb\x96\xfb\xce\xfb\x18\xfc\xd4\xfcx\xfe\xc5\x00z\x03\xea\x06$\x0cG\x13\xd5\x1ao \xec"\xea"\xb2!\x7f \xe7\x1f\xfc \x00$=(\xba+\xfd+q\'\xb1\x1e\xe7\x13\xd3\t\\\x02<\xfe\x9c\xfc\xde\xfbO\xfa\\\xf6\x96\xef\xb6\xe6\xa9\xddf\xd6\x93\xd2&\xd2:\xd4:\xd7\xfa\xd9\xed\xdb4\xdd<\xde\x85\xdf\xe4\xe1\x8a\xe5\x88\xea/\xf0.\xf63\xfcv\x02o\x08\xaa\rX\x11:\x13\x80\x13\xc9\x12\xee\x11\x85\x117\x12\xbe\x13\x87\x15\xe7\x15\xdb\x13\xf0\x0e0\x08\x1a\x01\x08\xfb\x16\xf7>\xf5#\xf5;\xf5`\xf4\xcd\xf1\xed\xed\x15\xea\x85\xe7\xa2\xe6;\xe7\xc7\xe8+\xeb"\xeeS\xf1\xb5\xf4\xff\xf7B\xfbA\xfe\xb6\x00\xaf\x02Z\x04\x84\x06\xb6\t\xff\r\x8f\x12R\x16C\x188\x18\xc6\x16\xa0\x14\xb2\x12O\x11\xd0\x10\xdc\x10\x8d\x10\xfb\x0e\xe7\x0b\xd2\x07\x98\x03\xe3\xff\xa1\xfc\xb8\xf9\x18\xf7\x02\xf5\xb3\xf3\x16\xf3\xf5\xf2\x10\xf3#\xf3\xf9\xf2X\xf2h\xf1\xba\xf0A\xf1Q\xf3\xa8\xf62\xfa\xf9\xfcV\xfe~\xfe<\xfe8\xfe\xf7\xfe;\x00\xf0\x01\x80\x03\x81\x04\x84\x04\x8e\x03\x06\x02\x91\x00\xb7\xffK\xff\x0e\xffe\xfe\x8b\xfdg\xfcX\xfb`\xfa\x8a\xf9\x1e\xf9\x11\xf9;\xf9!\xf9o\xf8\x8e\xf7\xf6\xf6f\xf7\x07\xf9m\xfb\xea\xfd6\xff[\xff\xe7\xfe\xf5\xfeS\x00\x83\x03+\t\xe2\x11:\x1c\t%\xfd(\xb5\'\xf1#0!\x82!\xdf$K*\xe3/\xb93\xdc3m/|&\xe7\x1a\xec\x0fb\x08\xd5\x04\xa0\x035\x02.\xff\x08\xfa\x96\xf2Q\xe9o\xdf\x1c\xd70\xd2\'\xd1\xbe\xd2f\xd5\n\xd8H\xda\x0e\xdcd\xdd:\xde.\xdf\x93\xe1(\xe6\n\xed\xd4\xf4V\xfc\xb6\x02\xfc\x07\xdd\x0b9\x0e\xf0\x0e\xb1\x0eI\x0e[\x0e\xfe\x0e\xd1\x0f\xaf\x10\x00\x11\x18\x10\xc5\x0c\xda\x067\xff\xce\xf7b\xf2p\xef\xe7\xee\xa8\xef\x80\xf0\x07\xf0\xbc\xed7\xea\xe2\xe6a\xe5\x92\xe68\xea\x00\xef\xce\xf3\x03\xf8\xab\xfb\xc6\xfe~\x01i\x04\x89\x07Z\x0b\x19\x0f\x9e\x12x\x15\x94\x17\x8d\x19\xea\x1a\x8e\x1b\xe7\x1at\x19\xaf\x17\x00\x16]\x14\x87\x12p\x10\xe7\r\xfc\n\xb4\x076\x04v\x00\xc4\xfcF\xf9\xf0\xf6z\xf5w\xf4J\xf3\xdf\xf1\xce\xf0\xf4\xefj\xef\'\xef\xa8\xef\r\xf1\xd7\xf2\\\xf4U\xf5=\xf6O\xf7\xb8\xf8\x18\xfa \xfb\xa2\xfb\xee\xfbl\xfc\x08\xfd\xb0\xfd\xdb\xfd\xc1\xfd[\xfd\xd5\xfc\xf4\xfb\xd8\xfa\x00\xfa\xcb\xf9/\xfaa\xfa\x0c\xfa\x02\xf9\xdf\xf7\xf5\xf6\xb3\xf6\xe2\xf6_\xf7.\xf8\xf0\xf8m\xf9U\xf9\xc9\xf8~\xf8\xef\xf8c\xfa\xa9\xfcp\xff\x81\x02\xfe\x04\x80\x06E\x07T\x08\xa3\n\xe8\x0e\xec\x15\xf8 E.\x9e8\x88:Z4\x0f,\x91(5,=4\x87<\xe9@v?j7u*z\x1br\x0ey\x06\xef\x03\x94\x03\xd2\x00s\xf9\t\xef\xe5\xe4%\xdcE\xd4\xc1\xcc\xc5\xc6\xa8\xc3I\xc4\xe9\xc7I\xcd\xac\xd2.\xd6\x82\xd7\x9f\xd7\n\xd85\xda\x07\xe0\x92\xea\x99\xf8L\x05\xf8\x0c|\x0f\xb0\x0fK\x0f\x9e\x0e\xd6\r}\x0e\x8c\x11\x9b\x15G\x18\xdc\x17D\x14\xaf\r\xe0\x04J\xfb \xf3\xec\xed\\\xec<\xeeN\xf1\x84\xf2\xdd\xef$\xea\x9a\xe4\xcb\xe1\\\xe2b\xe5\x8d\xeaZ\xf1\x9b\xf8\xe3\xfe\xfb\x02n\x05:\x07\xcb\t\xa2\x0cD\x10\x9e\x14\x11\x1a\xc1\x1f\xfa#\x18%\x8a"\x90\x1e\xee\x1a\x98\x18k\x16\xb7\x14\x80\x14\xea\x14\x8c\x13\x1f\x0e\xbd\x05j\xfd\xa3\xf7\xee\xf4Y\xf4\xd5\xf43\xf5W\xf4E\xf2,\xefg\xeb\x19\xe8K\xe7\xb0\xe9\xde\xed\xe8\xf1 \xf5\x95\xf7\x88\xf8\xa0\xf7\x9b\xf5N\xf4\xe8\xf4)\xf7#\xfa\xfc\xfc\xe9\xfen\xff\x9a\xfe\x8e\xfc\x92\xf9\xd3\xf6y\xf5A\xf6z\xf8\xc1\xfa\xa2\xfb\xdb\xfa\x97\xf9K\xf8\x84\xf7\xe6\xf6\xdc\xf6\x0c\xf8\xaf\xf9\x99\xfb\xe1\xfc\'\xfd\x81\xfdc\xfe\xe5\xff-\x017\x01\xcf\x00W\x01\xf3\x02\x1e\x05\xbd\x06\x7f\x07O\x08g\t\x1d\x0b6\r\xd3\x0f\xfd\x12\xef\x17\xe3\x1e\x16\'\x11/ 414\x84/\xb7)\x1e(\x1e-\x125\xad9\xfc6\xef.\t%\xd0\x1ab\x10?\x07\x8b\x01\xa7\xfe\xba\xfb\xd3\xf5S\xed\xb1\xe3\x96\xda\xd4\xd2\x04\xcd8\xc9\x0e\xc7\xe3\xc6\xc5\xc9\xb4\xce\x9b\xd2I\xd3M\xd2\xe7\xd2\x0e\xd7u\xde\x11\xe8\x8a\xf29\xfc\xf5\x03\xcd\x07j\x086\x08\xba\nO\x105\x16&\x193\x19#\x18\x92\x16\x8d\x13\xe3\x0e\x90\t\xb3\x04\xc0\x00|\xfd\x88\xfa\xad\xf7\xbe\xf4\x03\xf2\xd0\xee\xba\xea\xec\xe6\xaa\xe5\x0f\xe8\x01\xec\xfa\xeeV\xf0D\xf2\xfc\xf5W\xfaN\xfe\xa1\x01\xee\x057\nV\x0ex\x11\xe6\x14\x11\x18\x02\x1b<\x1c\xc0\x1b\x8d\x1a\xee\x18\x80\x18n\x17\x9b\x152\x12\n\x0f\x1b\r\x92\x0bd\x08\xba\x02\x8f\xfc\x14\xf8\t\xf6^\xf5\xe0\xf4\xfb\xf3\xa3\xf2\xb6\xf0\xd1\xee\xb5\xec,\xeb\x96\xea\n\xecr\xefM\xf2\xf7\xf3\xbe\xf46\xf5\xff\xf4\xac\xf3\xc7\xf2/\xf4\x84\xf7\xbd\xfa\xbc\xfc4\xfd\xbc\xfc\x85\xfb\xcf\xf9\x85\xf8P\xf8\x9c\xf9i\xfcP\xffu\x006\xff\xea\xfc\x1e\xfba\xfa\xd4\xf9\xde\xf9\xa2\xfb\x1f\xfey\xff#\x00\x8f\x00T\x00\x10\xfd~\xf8\xe3\xf8\xcf\xfe\xc9\x05B\tW\x07\xfc\x03V\x01P\x01}\x03\xb6\x05=\x08J\n\x04\r\xd1\x0f;\x17S$j/a.\x99!V\x1a<"p0\xb47\xab7N7S6\xe4/\x80%\x86\x1d\xcb\x183\x13z\r\x14\n\xde\x08\xcf\x04\x9d\xfb\xeb\xee^\xe0E\xd5\x90\xd0\x1c\xd2Y\xd5\xde\xd4\xf7\xd1\x14\xcf\xa0\xcc\xdb\xc9\x07\xc8\x8e\xca\xd8\xd1m\xdb\xa0\xe4\x8a\xec\xb9\xf3\xd1\xf7K\xf8\x9b\xf8\'\xfd\x9f\x05\xfd\rr\x13\xa7\x17\\\x1a\t\x1bi\x18\xf2\x12\x86\r\xb0\n\xa8\x0b/\r\xbb\x0bY\x07\xbc\x01\x88\xfb\xb8\xf4r\xee\xf5\xea\x9e\xea\x04\xeb\xf7\xeb-\xedk\xee\xdd\xeda\xec\x1e\xec\x0f\xeeL\xf2\xf7\xf8x\x00\x11\x07\xcf\t\xc3\n\xc7\x0ba\x0e,\x110\x13\x14\x15\xfb\x18\xdc\x1di \xe9\x1e\x1c\x19S\x131\x0f\x11\x0f\xbd\x10\xd7\x10W\r8\t7\x05d\x00\xc4\xfa\xfe\xf5\xda\xf3\x8a\xf2\x0e\xf2\n\xf3Q\xf3r\xf1\x8c\xed\x03\xea\xbb\xe8\x1e\xea\n\xed\xa8\xf0!\xf3\xbb\xf3\xad\xf2O\xf2\x96\xf2\xec\xf2\x98\xf3\xf1\xf5!\xf9\xaf\xfb\x16\xfd\xa3\xfd\xaf\xfc\x95\xfbs\xfb\xfb\xfc\xd8\xff\xfa\x01\xa4\x02b\x01\xe7\xffQ\x00\xa7\x00\xc9\xff\x9d\xfeD\xfdS\xfe7\x00\xd9\xff?\xff\xb6\xfd&\xfd\xdd\xfd\x99\xfe\xdc\x00\x80\x02n\x02\xe9\x02\xc3\x03\xea\x03F\x04\xe7\x04\xe1\x062\t\x01\x0b\x83\r\x8f\x0fH\x15t\x1fJ);\'&\x1e\xc6\x1cU& 0\x0f0p.\x8e1\xc22\xe1,\x06%Z \x9f\x1b\xd7\x13\xe7\rk\x0e\x89\x0e\xe4\x06\xeb\xf9f\xeef\xe7\x0b\xe2A\xdc\xd5\xd8e\xd7\t\xd6\x82\xd3\xbb\xd1\xee\xd0d\xcf?\xcd\xf8\xcc\x14\xd1\xec\xd7\x1b\xdfS\xe5\x06\xea:\xed\x08\xf0\x1b\xf4"\xf9@\xfeO\x02\xb3\x06(\x0c\x8d\x10\xc7\x12\xf6\x12\xa1\x11K\x0f\xa9\x0c\xef\x0be\x0c\x0e\x0c\x06\n\x98\x07\xbb\x04\x08\x00w\xfa\xe9\xf6\xa1\xf4q\xf26\xf1m\xf2\xcc\xf5\xd2\xf5c\xf3.\xf1L\xf1)\xf2m\xf4\x95\xf8~\xfe\x0b\x03/\x060\t\xf0\t\xa6\x08\xe1\x07\x1a\n\xe3\x0e[\x15\xf7\x1a\xfd\x1b\x96\x17\xef\x11\xb7\x0f\x05\x10\x08\x0eM\x0c\xd0\x0be\x0c \x0cK\t\xdc\x03\xa2\xfc\x90\xf5\xc5\xf1\xfa\xf2\xec\xf4r\xf5\xe8\xf3\xdf\xef\xe3\xeb\xf7\xe8p\xe7\xdb\xe6\xd3\xe6\xbc\xe7a\xeb`\xef\xde\xf1\xc6\xf1\x86\xf0M\xef\xf1\xf0D\xf4\xb0\xf7Y\xfb\n\xfe\xa0\x00#\x03\\\x03z\x02\x94\x01P\x01`\x02\xf2\x03/\x08\x92\x0bh\t\xa8\x06\xf5\x03\xcc\x03$\x05\x16\x03\x01\x03\xaf\x05\xea\x06\x85\tW\n\x89\x07}\x01%\xfe\xd2\x01o\x04\xee\x05\x11\tN\n\xb9\x07H\x06v\x06\xd5\x06\xf8\x05\x8d\x05]\x07g\x0b\xee\x12b\x1a\xb1\x1b\x05\x17\\\x14?\x16\n\x19H\x1b\xe8\x1d\x8e!\xf7"l!\xf4\x1f\xbd\x1e\xe0\x19\xfb\x112\r\xa6\r\xf4\r\xec\n%\x07c\x03\x94\xfc\xff\xf3Z\xeeV\xeb{\xe7\xa2\xe2\xd3\xe0Y\xe1w\xe0Q\xdd8\xda\xf4\xd7O\xd6;\xd5\x8b\xd7\xb7\xdc{\xe1+\xe4\xef\xe5-\xe91\xec\xa8\xedc\xf0\xfb\xf4\xb9\xf8-\xfd]\x02\xac\x06\x0f\t\x97\t>\tf\t4\nW\x0b9\r\x02\x0eF\x0e\xb7\rM\x0c\x90\n\x9d\x07\xcc\x04L\x03\xc0\x024\x03$\x04]\x03\xfa\x01\x8e\xff\xe1\xfd\xc4\xfd\xdd\xfd\x8d\xfd\xdc\xfe,\x01J\x02B\x03\xdd\x041\x05J\x03\xfb\x01\x87\x03K\x06R\x07\x15\x08\xd1\x07V\x08m\x06\x84\x03\xdd\x02\x87\x008\xffT\x00\r\x002\xff\xe9\xfd\xcf\xfbJ\xf8e\xf6\xe0\xf60\xf5\xbc\xf2\xf5\xf4\x14\xf8\x12\xf6\xd9\xf0%\xf5\x06\xf8\x96\xefi\xf0\xe3\xf7 \xf9\xd0\xf5\xf1\xf6x\xfe\xed\xf9\xae\xf6z\xfc\xf1\xfb\xf1\xf7\x86\xfbc\x02\xae\xfd\x85\xfb+\x06\xdf\x01\x10\xf7V\x00r\x08\xcb\xfe`\xfa\x85\t\x97\x0f\xce\xfcy\x01\x17\x16\x84\x07\xa1\xfc\xe7\ny\x11\x02\t\xa3\x08\x1e\x12\x93\x0e?\x05\x8b\rQ\x11\x87\n(\x08\xe4\x0b[\x0c\xae\t)\x0c\x8c\x0c\x07\tZ\x03a\x05\x01\x07\x9c\x04\xeb\x03\xf9\x02\x8a\x04\xe4\x04\xdf\x00\xc9\x01e\x05\xf9\xfe\x03\xfe!\x06@\x06S\x01\xac\x05V\x08\xba\x05]\x03\xfd\x04\x8d\x07\xb2\x05\xc0\x01\xba\x04\xd8\x07F\x05\xcb\x02\xd8\x02i\x00\xcd\xfb$\xfb>\xfc\x15\xfb\x02\xf9\xe4\xf6\x04\xf5\x9e\xf4\xdc\xf3\xbd\xf1\xa0\xef\x8c\xefD\xee\xe4\xed\xd3\xf0c\xf2\x82\xef\x1b\xf0q\xf2&\xf4\xf6\xf2F\xf7B\xf9\xe0\xf5j\xfb\xae\xff\xe3\xfe\xa3\xfd\xbd\xff#\x03\xa9\x02<\x02\xab\x04F\x07\x1b\x04\xcb\x02\xb5\x07\xc2\x06\xa6\x03\xc2\x05\xa5\x04\xc9\x04\x96\x05\x13\x04\x10\x05\xcc\x04\xa3\x02r\x03\xb7\x04\xcf\x04\xd3\x01\x83\x040\x02\xb2\x01j\x04:\x01\x1b\xff#\x02\xb1\x01\xd3\xfb\x08\xfc\xc7\x00%\xfd\xec\xf4\xa3\xfb"\xfc\x08\xf7\xd4\xf7?\xfb\xb1\xf4|\xf2\x16\xf9\xd9\xf8Z\xf2\xf4\xf4V\x01\'\xf3\x15\xf4)\x00\x1a\xfdY\xef\xaf\xfb\x9c\xffd\x01\xc5\xf8\xcb\xf8\x95\x0c\xbc\xfea\xf6\xa7\t\xd0\x05%\xfaC\x047\x06C\x04\x06\xff5\t\xe7\x06\x07\xf8\xec\x07}\x0c\xa3\xfa\xbf\xf8"\x08\x81\x08\x03\xf7\xeb\xfb\xcb\x0e\xd2\xff\xf3\xf8\xb8\x05\x88\x03U\xfd\xad\x019\t~\x00\xc4\xff\x98\t\xbb\x04*\xfe\xa1\x0bk\x0c\x97\xfdI\x08M\n\xb9\x04\xe7\x04\xb6\n\xc3\tV\x05\xc3\x08\xed\x05\xf3\x03\xe7\x0cY\x07\xdd\xfbk\x05D\x10\xf5\xfa\x1a\xfd\xee\x10E\x01C\xf7\xf7\x04\x88\x05\xf4\xf4G\x04b\x00\xc3\xf8\xea\x00b\xfb\xc9\xfb\x8d\xfcG\xff\x18\xf7j\xf3\xda\x03,\x03\xd3\xf3\xb8\xf8\xc7\x06\x1a\xfa\x8d\xfb\x08\xfe\xdc\x03\x80\x00\xdc\xfa\xbe\x08\xa8\x02:\x00\xd2\x05\x9d\x05\x1f\x00[\x03\xbd\x06\x91\x03\xf4\x03\xf8\x03P\x04\x18\x02f\x01\xc6\x01\xfd\x01\x8a\x01\xa3\xfd@\xfd\x11\x05#\xfdG\xfa\xd2\x01g\xfdg\xf7\xe8\xfd\xb7\x02\x94\xf7\xf8\xf5q\x02\xf7\xfd\xda\xf3\xa8\xfd:\x01\x86\xf7D\xf5\xc7\x03\xa8\xfd\xc8\xfbt\xfc@\x00 \xfe\x04\xfee\x060\xfa\xf7\xf8\xcc\x06\xdb\x00\\\xfa\x11\x00\x07\x03\xa3\xfb|\xfc\xc7\x04\xef\xf9\x17\xf8\xe5\xfc\xd7\x05\xb0\xef\x81\xfa\x1f\x08\xdd\xf8Q\xf1\xe7\xfd\x88\x01\x89\xf5Q\xf3\x00\x00\x08\x04\xd6\xed\xf1\x01\x8f\x00?\xf4O\x01\xcf\xfa\x89\xff\xfe\x01\xa0\xf8y\x06T\x01\x1c\xfbD\t[\x05\x85\xfa}\t\xf5\x07n\xfd\x0f\n\t\x04\xa1\x02\xa4\t\xff\x01c\x01V\x0bv\x04\x7f\x00\xf9\x08R\xff&\x04]\x07\xfa\xfc\n\x01P\n\xcb\xfc*\xff\xff\x05,\xfdb\xff\xe1\xffH\x06\x1e\xfbS\xf1\xbd\x07@\x10n\xeb\x1c\x01\xcf\x0b0\xf5\x9e\xf2(\x0c\xb1\x06\xb4\xef"\x01\xaf\t\xd3\xfeF\xf7`\x05Y\xffy\xff\x10\xfae\x05\x83\x04\x80\xfc\x97\xfd\x0f\x07I\x02\xfb\xf4\x1e\xfd\xd9\x0b\xa2\xf8\x98\xf6\xd7\x06p\x01\xd6\xfc\x11\xfdB\xff\xc1\xf3\xb6\x04\x98\xfa\xbd\xfb\x95\xffe\x04\xc0\xfe\xc4\xf6\x13\xff9\x06\x18\xff\xc4\xf4e\x0b\xee\x069\xf7g\x01R\x11\xd4\xfd \xfd\x89\t\xc2\nE\xf9D\x06\x07\x0fb\xff\x94\xfd\x9e\x08l\x0b\xc7\xfa\xd5\x03a\x0c\xaa\xf8\x8b\xfb\x9b\x0b%\x02v\xfc,\x04\xf6\x00\'\xff\xfd\x01\xc3\xfd\x94\x03\xfa\xfa\x14\xff,\x03\xe6\x01\xfb\xfc*\xfe+\x01[\xfa^\xff\xbe\xfdQ\x00\xf8\xfd&\xfd\x1e\xfb\x1c\x06\xbf\xfe\x00\xf2\x8e\x0b\xd0\xf8\x96\xf8\xe9\x02\xcc\xfd\xe2\xfeH\xf8\xf8\x04\xc0\xf9\xc0\xfd\x1b\xffh\xfdm\xfc\x0c\x01]\xf5\x90\x00y\x03\xbb\xf5C\x08{\xf6\xe4\xfc\xa2\x00\xca\x00\x8f\xf9\x95\x02\xb6\xf6\xa2\x04&\x00\x85\xf9\xde\x02\'\xffn\x00\x13\xff\xaf\x02\xf8\xf5~\x0b+\xfa6\x02f\x05f\xfd\xe5\xfe6\x07\xff\x02B\xfc\x1e\x00\xc6\x07\xf4\x01\xa9\xfbp\x0bk\xfb\xce\x03\x0b\x05\x96\x01\xb9\xfa8\x05Z\x00\xf7\x02\xcf\x00;\xfc\xf6\x00\xec\x02\xed\xff\xbb\xfe\x85\xfc\xa5\x02\x11\x02\xbd\xf4\xac\x08\x9f\x06\x00\xf1\x17\x01n\x10u\xec\x88\x05x\r\x96\xf3\xdb\xffJ\x0eV\xfa\xa8\xfc\x0c\x07\xe4\xfd\x1b\x06l\xfb\xff\x05\x95\x01[\xfd\xfc\xfeJ\x055\x02\x17\xf6_\x07\xb9\xf7\x15\x04\xa6\x019\xf4\x0e\x06\xb7\xfc\xb7\xf9\xd5\xffV\xfd&\xfbz\x02\xbf\xee\x82\x0f\\\x02x\xea\x17\x08E\x00\xff\xfdo\xf5\x9d\x0f\xc5\xff\xcc\xf9\x8c\xff~\x00\x03\x073\xf49\r\x19\x01\xb7\xfd\xa8\x01!\x02\xf4\x01\xe1\xffh\xfeo\xfcX\n\xa5\xfdi\x03r\x04\x0e\xfd\xed\xffb\x06\xdb\xf9\x02\x00\xba\rL\xf4\xc0\x02I\x08\x92\x038\xf8\xc8\x03T\x03\xd6\xf8\x9d\x08p\x05\x1f\xff\xd8\xf5\x96\x14\xae\xfdH\xf0/\x10\xcf\x07\xfa\xed\xa7\xfd\xff\x15\xa7\xf6\x9f\xf9\xcb\x08\x80\x02\x9f\xf7\t\xf2|\x0c\xca\x00\xd5\xf1I\x00\xe7\x06\xae\xf9s\xef\xe9\x0b@\xf7\x8f\xf2I\x06\xe6\xf4\xe4\xfc&\x05\xaf\xf3`\x02]\xf9\xb7\xfc\xc1\x003\xf9\x1f\x03O\xf9\xeb\x04\xa7\xfa\x1c\x07\x1b\xfb\xd0\xfe5\x05\xbc\xfe\xda\x02\xda\xfdE\x05\x1a\xffH\x08\xfb\xfd\x1e\x05\xbb\x08\x0e\xf4\xa5\x04E\x08(\x02\xbc\xfd\x89\xff\xf9\x0fg\xf3\t\xfe\x90\x12\xa9\xfb\xd6\xef\x18\x06\xd9\x11\x95\xf0B\xfb:\x14\x9a\xfe\xda\xf1\xff\xfc\xb5\x0e\xde\xfd\xe0\xf5]\x0cK\xfd\xc6\xf2\xaa\x14r\x01h\xe9X\x04t\x0b\xaa\xf8\x9f\xf5\xb3\x0b5\x07\x86\xf2\xe0\xfa\x0c\x0f\xce\xf2H\x01\x9d\x00i\xfb\x1c\x05\x10\x00\x7f\xfcs\xfb\x93\x05\x0c\xfa\x90\x00\x1f\xf8O\x05\x17\xff\xb1\xff\xbb\xf2j\x05\xec\x04\'\xec\x99\x10x\xff\xf8\xec\xc3\x06v\x0b\xcc\xe5\x82\x04\xe7\x19`\xe6\x8f\xfa\xe6\x1a\x8a\xf2\x1b\xf7\x88\x05G\x08{\xfbd\xf7\xc7\x16\xc7\xfa\xe0\xf4\xfb\x0c\xd2\x04\xdc\xf9[\x07\xe6\xff"\xfa$\x0f\'\x01P\xfa9\x08\x9e\x04\xe8\xff\x83\xffq\xff\xac\x06R\xff\x80\x01W\x03\x89\x02}\x00\xb0\xfcX\x07x\x00\xed\xf7p\n\x08\xfa\xd0\x00%\x08\xb4\xf6\\\x05o\x02[\xfd&\xf9\x8b\x08\xee\xf9\x89\xfcq\tN\xf8\x80\xf9#\n\x04\xff\x9c\xf2\x9e\r\xcf\xf5\xc1\x005\x05\xe1\xf7\xda\xf9\x91\t\x1b\xfb(\xf4\x1d\x13h\xed\xe2\xfb\xe1\r3\xf5}\xf8\x06\xfe\xf4\x07\x88\xf4\x87\xfa\x02\x05,\xffu\xfa\xd2\xf7\xf4\x04!\x00\x17\xfcQ\xf6\xb9\x0b\xf6\xfc\xb3\xf5\xbc\x06D\x04\xc2\xfbj\xff\\\x06\x8f\xfa\x9c\x01X\x03}\x02K\x00w\x10r\xf5\xeb\xffK\r`\xfbc\xfd\x82\x0c\xb3\x02\x88\xfb*\x00-\ns\xfc\x86\xf9\x01\t*\xfal\x018\xf5\x82\x11#\xfc\xe2\xef\x9b\n^\x06\xae\xf2\x88\x02@\x0c\x1d\xfa\xae\xf6\xb4\x13\x99\x01\x84\xeer\r\x00\x0c\xf1\xf7\x8e\xf1\x0e\x18\x97\xfc\x89\xef#\x0c\xde\x08\xa1\xefk\xff^\x0c\xde\xf2~\xfa\xfd\x08\x84\xf9\x81\xf3\x89\x06\xd0\x06\xf3\xec\x17\xfci\x0e\xe6\xf2\x99\xf3\x8b\x05_\x06\xb1\xf6\x85\xf3\xf5\x14\xa8\xfeH\xe2I\x12\xa8\x0f\xda\xe80\x01\xbd\x14\x8b\xf5\xd6\xf8#\r\x93\x046\xf3\x0e\x06~\x0b\x0b\xfd\x17\xfe\x08\x08\xfc\x06)\xf5p\x07\x0b\x08\xee\xfc\x8b\xf9\xd4\tf\x03\x0e\xfb\xc7\x02\x8c\x02F\x000\xfcQ\x00\x05\x01\x10\xfec\xfb\x03\x08$\x00\x84\xf9\x87\td\xff\xa6\xeeD\x03\xe4\x07L\xfd\x83\xfe\xb7\x02\xff\xfb]\xfd}\x05\x91\xf9\x97\xfer\xff\xad\xfa\x0e\x00\x14\x02d\x01_\xfcJ\xfb\xa8\xfc\x85\xfd\xd6\xfe\xb7\x02\x9a\xf6\x1a\xfd\xe8\x03\xd3\xfb\xb0\xfb\x9b\xfc6\x02\x94\xf5x\xfdn\x07\x81\xf9\x97\xff\x97\x03\xf2\xf6\xa1\xf6\xfa\x10\xe3\x01y\xf1q\x04\xd3\x04\xc7\xfe\xe9\xfb\xb8\x0c\xc5\xffi\xf7E\x03P\nD\tG\xf4\x96\x08\xe5\x08\xb0\xf6(\x07\x06\x0b\xad\x02W\x03\xec\x01\xa4\x012\x052\x02!\x05#\x01\n\x00\xcd\x03r\x02\xb8\x02E\xff\x19\xffL\xfe\xf1\xfd\xeb\x01A\xff\xec\xffz\xf8\n\xfal\x06\xa0\xf7\x90\xfd\xa2\xfe\xcd\xf9\xfb\xfe\x86\x001\xf8\xfa\xfb\xc2\x08\xac\xf3i\xf6\xa7\x07A\xff\xae\xf7\x9f\x00\x85\x01\xf7\xfa\xc9\xfb\xda\x02\xc6\xfb\xa3\xf8m\x03\xce\xfe\x87\xff\xb7\x00n\xff[\xfcd\xfe\x10\xff\xd1\xfdh\xfe \xfc7\xff\x90\x03\xae\x013\xfe\x1e\x01o\x01\xf4\xfdn\x03Y\x07\x17\x03\xe3\x01K\t;\x0b\xf8\x04\x80\nM\x0e\x8a\x04t\x05\xcd\x0f\xe8\x0c\xdf\x08<\x0cc\x08e\x08v\n\x05\n\xaa\x05\x8f\x01\x07\x05V\x02G\xfe\x8f\x02\xad\x01\x9e\xfa\x9b\xfa\xc0\xfa\xcf\xf8\xd9\xfa\xde\xf8I\xf5\x0e\xf8\x05\xf9]\xf7\x13\xfa\xa1\xfa\xaf\xf8e\xf8\x90\xfa\xee\xfd\x1e\xfe\x0f\xff\t\xfe\xa5\xfd\xc9\x00t\x01I\xff&\x00e\x03\xca\xfc\x95\xfa\x13\x02\xd8\x02\xcb\xfb\xa6\xfa\xa9\xfb\x0e\xf9\x0c\xf9\x04\xfat\xf9\xe3\xf4/\xf6\xb0\xf5\xf3\xf5\x80\xf8\xbe\xf5\x14\xf8\xca\xf2\x07\xf3\x0b\xfa\x84\xfc\xb1\xf6V\xf8\xe1\xfc[\xf7\xfb\xfb;\x00\x03\xff\xad\xf9r\xfb)\xffN\xfc\xbb\x01T\x01\xd9\xf9\x9f\xfd\xb9\x00d\xfd\x11\xfd\x0e\x00\xcc\xff\x8a\xf9,\xfc\xb1\x03\x87\x05\n\x02\x9c\xfdx\xffj\x03\x16\x04E\x05v\x05%\x07\x8d\t\'\x0bu\r\xa2\x0e\x90\rB\x0fM\x11=\x13\xcd\x19\xef\x1dH \xa7"k#\xd2"\xcc\x1e$\x1e\xd8\x1f\xef\x1f\xe6\x1b`\x18|\x18>\x17}\x11\xfc\n\x92\x03\'\xfdV\xf7u\xf2\x93\xf1\xbc\xef~\xeb\xb2\xe7\xd2\xe6\xc0\xe5[\xe3E\xe2#\xe0\xca\xde\x93\xdf\x0f\xe3\x14\xe8\xb7\xec\x14\xf0\x1f\xf2\x99\xf3=\xf7\x9c\xfb\xac\xfd\xc5\xfeb\x01\xbb\x02u\x05j\n\xba\x0b\n\x0c\xa5\x0b+\x07\xec\x035\x03=\x02*\xff\xb7\xfa\x08\xf8E\xf7\xb9\xf6\xcf\xf5\x8f\xf5&\xf1\xbc\xec\xa2\xebV\xecq\xee\x9f\xf0\xfd\xf1\xfa\xf2\xdb\xf6\xa1\xfc\x07\x01\x04\x03\x08\x05G\x04q\x06\x05\n\xd5\rJ\x11\x9f\x12R\x12\xc3\x11\xa1\x12\xef\x12\x1c\x10s\x0b\xb0\x07\r\x05u\x03\xb4\x02\x84\x01\x93\xfe\xbf\xfa"\xf7^\xf5\xfc\xf3\x10\xf22\xefG\xed\xa8\xed(\xf0\xd8\xf2\xf4\xf2\x19\xf3\xd4\xf3\xbd\xf3\x10\xf4\xa0\xf5\xca\xf6\xd1\xf7n\xf9\x18\xfbt\xfc\xce\xfdO\xfd\xf3\xfb\xe9\xfa#\xfa\xb0\xfa\x9d\xf9\x9d\xf8\xcc\xf8\xf4\xf9`\xfc\'\xfc\x9a\xfb\xa9\xfb\x87\xfc\x03\xff~\xffs\xfd\xce\x03U\x16O%?*\xc9\'\x0b*\xb52r5\xff2\x872F5T5e2\x9d2\xf14\xc10\x15#\x99\x13\xa4\t\x03\x04\x94\xfco\xf3\xc7\xeb5\xe7\xb2\xe3\xf5\xdf\xfa\xdc\xd7\xd9\xf3\xd5\x89\xd0\x89\xcb#\xcd\x84\xd5\xdf\xdc\x1b\xe1\x94\xe6\x86\xee#\xf6\x80\xfc|\x01D\x06+\x08\xe6\t\xa9\x0e\xb4\x13b\x19\xd5\x1d[\x1cL\x19\xb7\x16\x9f\x13\xe2\x0eR\t)\x03\xb1\xfbl\xf4\xac\xef\x8d\xef\x86\xed^\xe9\x06\xe4;\xde\xc0\xdaU\xd9P\xda\xb9\xdc\x17\xdes\xe1\x1d\xe8\xa2\xee\xa9\xf5\xe0\xfa\xab\xfe\x93\x01y\x05\xfe\t\xbf\x0f\x85\x15\x06\x1a\xc1\x1d\xcb\x1e\xd2\x1d\x16\x1e=\x1d\xfb\x19\xcd\x15v\x11[\x0e\x0c\x0b\xe8\x07\xe4\x04T\x01\xd7\xfc\xa4\xf8?\xf5\xcf\xf2%\xf2~\xf1\x99\xf0\xcc\xef{\xf0\xa0\xf2`\xf4A\xf5m\xf6R\xf7\xf8\xf8H\xfa\xc6\xfa\xe9\xfb4\xfc\xd6\xfb\x12\xfa\xa0\xf8A\xf8\xa8\xf8N\xf6\xc6\xf3\x8b\xf2l\xf1\xd4\xf1q\xf1\xa6\xf0\xd2\xf0\xd3\xf0f\xf2\xae\xf3\xcf\xf3\x9c\xf5\x8c\xf9\x9c\xfcX\x00\n\x05\xcb\x07\x8e\t[\x0b3\x0f\xad\x16\xff n+\xfa2_6\xd86\xc87\xd48\xf96\\2^,\x98(k\'\xa8$\xd1\x1f\xaf\x19\x9d\x11Z\x076\xfc\xa7\xf2\x90\xeb\xc4\xe5\x1d\xdf\xf5\xd8\x97\xd6\x8a\xd7D\xda\x89\xdc\xf8\xdc\x11\xdc\xa2\xdc&\xdf\x86\xe2X\xe7k\xed\xf6\xf3\xe9\xf8N\xfe\x99\x05f\ry\x12T\x14\xcc\x13w\x13\xbb\x13\xf8\x13E\x13\x88\x11\x03\x0f\xd7\x0bg\x08y\x04\x1f\x01\xf0\xfc\xf5\xf6\x9c\xefM\xe9\x85\xe5\xab\xe3\xf8\xe1o\xe0\xd3\xdf6\xe0\x83\xe1\x94\xe3\xb9\xe5~\xe8-\xeb\x9a\xedO\xf18\xf6x\xfcr\x02V\x07"\x0b\xf2\x0e\x18\x12m\x14\xc7\x15\x85\x16\xb2\x16+\x16\xa4\x14\xb0\x13\x12\x13\xcc\x11i\x0f\xaf\x0b\xdd\x07i\x05\xa6\x02T\xff\x05\xfd\xee\xfa\x18\xf9\xd7\xf7\xfd\xf6\x89\xf6\xed\xf6\x86\xf7_\xf7p\xf6\xd7\xf5\xf7\xf6G\xf8\x7f\xf8\xd1\xf8\x0e\xfa\'\xfb\x9a\xfbo\xfc\xa8\xfc\xcb\xfc\xce\xfd\x0b\xfe\x8a\xfdh\xfd:\xfe\xf3\xfe\x88\xffp\x00\xd4\x01\xd0\x02\x8a\x03d\x04\xdc\x04\xde\x05\x8b\x06\xdc\x06t\x07\xe2\x07\x97\x08s\tN\tz\x08z\x083\x08\x9c\x06\xf7\x04\xa1\x044\x05.\x05,\x04\r\x02\x8b\x00\x80\x00\xac\x00\xd5\x00o\x00\xf6\xff\x03\x00D\xff\x99\xfeE\xffr\x00\xc8\x01\xd8\x02\x19\x03.\x031\x04\x8b\x05k\x06i\x07\xf1\x08E\n\xa3\n\x8e\n\xd4\ns\x0b\xb3\x0b\xfa\n\xad\t>\x08\xdf\x06j\x05\xad\x03R\x02(\x01\xa4\xff\xc2\xfd\xb7\xfbT\xfa\x94\xf9g\xf8\xa9\xf6$\xf5c\xf4+\xf4\xda\xf3H\xf3/\xf3@\xf3D\xf3\xbf\xf3\x86\xf4\xbd\xf5\x1f\xf7\x9f\xf7z\xf8\xac\xf9{\xfa\x89\xfb\'\xfc\xbf\xfc\x85\xfd\xd2\xfdR\xfeq\xfey\xfe\xe5\xfe\xdb\xfe\xb3\xfe\x7f\xfe\x1c\xfe\xbf\xfd\xc9\xfd\xd7\xfd\xd7\xfd\x9a\xfd\xc1\xfdp\xfe\x8a\xfe\xf6\xfe\xcf\xff^\x00\x07\x01\x85\x01\xdf\x01\x99\x02i\x03\xf5\x03f\x04\x8a\x04\xa1\x04\xf5\x04]\x05]\x05\x1c\x05\xcb\x04_\x04\xc5\x03\x17\x03\x15\x03\xc9\x02\x1d\x02\xaf\x01\xde\x00j\x006\x00\xc9\xffW\xff;\xff=\xff\xcf\xfe\x95\xfe\xc9\xfe\xd6\xfe\x9f\xfe\xa2\xfe\xf0\xfe\xa3\xfe\xbb\xfe\x08\xff\xbd\xfe\xb4\xfe\xd5\xfe\x97\xfe4\xfe\t\xfe\xcf\xfd:\xfdy\xfc\x0e\xfc\xbf\xfb\x04\xfbl\xfa5\xfa\xca\xf9R\xf92\xf9*\xf9\x19\xf9\\\xf9\x02\xfa\xbc\xfa3\xfb\xfd\xfbA\xfdU\xfee\xff|\x00\xc3\x01\xda\x02?\x04h\x05e\x064\x07h\x08\x01\t\x18\t\x88\t\xb2\t\xc5\t\xb9\t\x9d\t"\t\xc7\x08W\x08\xda\x07\x1c\x07\x8f\x06\x11\x06\x82\x05\xa1\x04\xdc\x03s\x03[\x03\r\x03\x86\x02\x85\x02\x93\x02l\x02:\x02u\x02z\x02w\x02C\x02(\x02\xed\x01\xd7\x01\xac\x01<\x01\xcf\x00N\x00\xd4\xff?\xff\x86\xfe\xac\xfd\x1d\xfd\xb5\xfc\x13\xfc0\xfb3\xfa\xa3\xf9(\xf9v\xf8\xdb\xf7g\xf7\x0c\xf7\x01\xf7B\xf7a\xf7\xa4\xf7\x12\xf8u\xf8\xa6\xf8\x02\xf9\x0b\xfa\xb8\xfa\x0b\xfb\xdb\xfb\xf4\xfc\x01\xfe\xe4\xfe\xdc\xffL\x00\xa5\x00M\x01\xd3\x01\x02\x02v\x02\xb4\x02\xbf\x020\x031\x03\x0c\x035\x03\x1b\x03\xa5\x02\x9c\x02u\x02,\x02\xf8\x01\x00\x02\xdb\x01~\x01~\x01\x98\x01I\x01\x0f\x01/\x01#\x01\xde\x00\xb9\x00\xdf\x00\xe1\x00\xb5\x00\xa3\x00\xad\x00_\x00\xfd\xff\xcf\xffz\xff\x02\xff\xa1\xfe\x0e\xfel\xfd\xe5\xfc\xa0\xfc8\xfc\xbc\xfb,\xfb\xbe\xfa\xb0\xfa\xab\xfa\xa0\xfa\xd5\xfaW\xfb\xbd\xfb)\xfc\xf9\xfc\t\xfe\xa2\xfe0\xff\x04\x00E\x01\x0c\x02\xc1\x02\x8f\x03J\x04\xd5\x04b\x05\xef\x05\x01\x06\xee\x05\xf0\x05\xbd\x05I\x05\x1c\x05\xa8\x04\x15\x04\x97\x03;\x03}\x02\xe9\x01\xb8\x01-\x01\xb0\x00q\x00o\x00;\x00a\x00\x93\x00\x94\x00\xc2\x00_\x01\xc8\x01\xff\x01]\x02\xd4\x02$\x03t\x03\xcb\x03\xe0\x03\x0c\x04\x19\x04\x10\x04\xda\x03\x9e\x03M\x03\x97\x02\xfa\x01[\x01=\x00X\xff\x97\xfe\x88\xfd\x99\xfc\xc2\xfb\xe9\xfa0\xfa\xbf\xf9Y\xf9\xf6\xf8\xae\xf8\xcd\xf8\xbb\xf8\xfe\xf8\x85\xf9\x15\xfa\xf3\xfa\xa9\xfb\xa0\xfc\x99\xfd\x7f\xfe;\xff\x0f\x00\xdb\x00\x82\x01\xf7\x01r\x02\xf8\x02B\x03f\x03\x81\x03f\x03\x14\x03\x1a\x03\xe4\x029\x02\xb2\x01\x8c\x01&\x01\x98\x00\x0f\x00\xf4\xff\xb3\xff\n\xff\xe3\xfe\x01\xff\xef\xfe\xe5\xfe\xf1\xfe\xfa\xfe6\xffv\xff\xa5\xff\xd8\xff\x00\x00=\x00Z\x00\x80\x00\xca\x00\xe8\x00\xed\x00\xb5\x00\xa2\x00\x9e\x00\x7f\x004\x00\xe4\xff\x7f\xff\x1f\xff\xba\xfel\xfe]\xfe\xfc\xfd\xbc\xfd\x87\xfdN\xfd$\xfd2\xfd\'\xfd1\xfdu\xfd\xbc\xfd0\xfe\x90\xfe\xe7\xfeG\xff\xb0\xff/\x00\x80\x00\xe3\x00+\x01&\x01:\x01\x84\x01\xa4\x01w\x01m\x01n\x013\x01\xd9\x00\x97\x00W\x00\x07\x00\xdf\xff\x9c\xff\x8e\xff\x95\xffy\xff`\xff\x98\xff\xb4\xff\xba\xff\xf1\xff9\x00\x96\x00\x0f\x01k\x01\xc4\x015\x02\x96\x02\xc9\x02\t\x03+\x034\x039\x039\x03+\x03\xef\x02\xc5\x02{\x02\x1e\x02\xb5\x014\x01\xb1\x008\x00\xaf\xff\x08\xffX\xfe\xc8\xfd\x9a\xfd\x1d\xfd\x94\xfcB\xfc\x13\xfc\x06\xfc\xfc\xfb\x10\xfc?\xfcb\xfc\xa9\xfc\x06\xfd\\\xfd\xd1\xfdA\xfe\x8f\xfe\xec\xfeU\xff\xa2\xff\xf7\xff2\x00k\x00\xa3\x00\xd4\x00\xd3\x00\xc9\x00\xe1\x00\xdd\x00\xc8\x00\xa8\x00\xab\x00\x9b\x00a\x00S\x00[\x008\x00\x16\x00\x00\x00\xfa\xff\xfa\xff\x1b\x007\x00:\x00K\x00o\x00\x80\x00\xac\x00\xf2\x00\x06\x01\x13\x01?\x01|\x01\xa1\x01\xdb\x01\xd9\x01\xcb\x01\xdd\x01\xbe\x01\xbd\x01\xb8\x01\x95\x01e\x01]\x01"\x01\xd0\x00\xa9\x00\x8e\x00G\x00\xeb\xff\xb1\xfff\xff8\xff)\xff\x04\xff\xd6\xfe\xc8\xfe\xcb\xfe\xbc\xfe\xbc\xfe\xc3\xfe\xdc\xfe\xd1\xfe\xba\xfe\xcb\xfe\xe2\xfe\xf4\xfe\x06\xff\x07\xff\x0b\xff\x1c\xff\x03\xff\xfd\xfe%\xff\x14\xff\x0c\xff\x04\xff\r\xff7\xffa\xffj\xff\x80\xff\x9f\xff\xcf\xff\t\x00<\x00r\x00\xa7\x00\xba\x00\xeb\x00/\x01n\x01\xb0\x01\xde\x01\x11\x02L\x02z\x02\xa4\x02\xe0\x02\xf6\x02\xf8\x02\xea\x02\xd5\x02\xb4\x02\x97\x02\x85\x02;\x02\xeb\x01\x92\x01;\x01\xc7\x00W\x00\xf1\xff\x8d\xff7\xff\xea\xfe\xbb\xfe\xa0\xfe{\xfeZ\xfe8\xfe*\xfe1\xfe0\xfe>\xfeg\xfe\xa1\xfe\xd2\xfe\xf8\xfe \xff=\xffm\xff\x83\xffx\xfff\xffb\xff^\xffZ\xffK\xff1\xff\x0c\xff\xd7\xfe\x9e\xfen\xfe7\xfe\xfc\xfd\xd4\xfd\xbb\xfd\xad\xfd\xae\xfd\xbc\xfd\xe4\xfd\x13\xfeQ\xfe\x9c\xfe\xee\xfeI\xff\xb5\xff\x12\x00f\x00\xbf\x00-\x01\x92\x01\xe0\x01/\x02w\x02\xad\x02\xd9\x02\x04\x03\x18\x03\x19\x03\x18\x03\x07\x03\xed\x02\xca\x02\xa0\x02d\x02\x1b\x02\xcd\x01}\x01+\x01\xde\x00\x85\x001\x00\xe4\xff\x8f\xff>\xff\xf5\xfe\xb0\xfev\xfe;\xfe\x06\xfe\xd7\xfd\xba\xfd\xa7\xfd\x93\xfd\x89\xfd\x80\xfdt\xfdg\xfda\xfdd\xfdm\xfdz\xfd\x97\xfd\xc7\xfd\xe5\xfd\xfb\xfd*\xfeg\xfe\x93\xfe\xc1\xfe\xfc\xfe9\xffo\xff\xb9\xff\x03\x00G\x00\x87\x00\xcf\x00\r\x01E\x01\x8b\x01\xc0\x01\xe2\x01\x05\x02\x18\x02&\x02+\x02%\x02\x1b\x02\xfd\x01\xd9\x01\xbb\x01\xa8\x01\x96\x01u\x01H\x01\x12\x01\xec\x00\xc1\x00\x9c\x00|\x00`\x00C\x007\x006\x00<\x007\x00\x1a\x00\x0b\x00\x03\x00\xf8\xff\xed\xff\xe8\xff\xf3\xff\xef\xff\xed\xff\xe9\xff\xe2\xff\xde\xff\xd3\xff\xb3\xff\x89\xfff\xffD\xff\x1d\xff\xfc\xfe\xd7\xfe\xb2\xfe\x8b\xfeh\xfeP\xfe;\xfe\x1f\xfe\x03\xfe\xf4\xfd\xf2\xfd\xf0\xfd\xfb\xfd\t\xfe\x12\xfe9\xfeo\xfe\x9c\xfe\xd1\xfe\r\xffP\xff\x85\xff\xbc\xff\xfb\xffA\x00\x82\x00\xc0\x00\x08\x01E\x01\x81\x01\xb4\x01\xd7\x01\xf4\x01\x10\x025\x02J\x02S\x02V\x02Y\x02]\x02]\x02W\x02E\x02%\x02\x07\x02\xe5\x01\xc0\x01\x92\x01^\x01 \x01\xde\x00\xa6\x00w\x00>\x00\xfc\xff\xbd\xff\x7f\xffG\xff\x11\xff\xd4\xfe\x96\xfe[\xfe4\xfe\x19\xfe\x05\xfe\xf9\xfd\xf5\xfd\xf4\xfd\xf1\xfd\xfd\xfd\r\xfe\x16\xfe\'\xfe>\xfe^\xfe\x82\xfe\xb5\xfe\xe6\xfe\x14\xffE\xffj\xff\x87\xff\xa9\xff\xcd\xff\xf6\xff\x17\x00:\x00]\x00\x80\x00\xa3\x00\xbf\x00\xdb\x00\xe8\x00\xef\x00\xff\x00\x13\x01 \x01$\x01\'\x01!\x01&\x01\'\x01\x16\x01\x04\x01\xf2\x00\xdb\x00\xd5\x00\xc6\x00\xbd\x00\xad\x00\xa1\x00\x9e\x00\xa2\x00\xaa\x00\xa8\x00\xa5\x00\xa4\x00\xac\x00\xbc\x00\xca\x00\xc8\x00\xc9\x00\xc4\x00\xc3\x00\xc8\x00\xb5\x00\x9c\x00\x81\x00f\x00Z\x00J\x00.\x00\x03\x00\xe4\xff\xd0\xff\xb1\xff\x8d\xffd\xff9\xff\x11\xff\x05\xff\xf6\xfe\xe6\xfe\xc6\xfe\xa4\xfe\x8d\xfe\x91\xfe\x90\xfeh\xfeX\xfeQ\xfeM\xfe^\xfez\xfe\x8d\xfe\x9b\xfe\xbb\xfe\xd7\xfe\xff\xfe\'\xffT\xffy\xff\xa5\xff\xd1\xff\t\x007\x00]\x00\x8c\x00\xa7\x00\xbe\x00\xd5\x00\xeb\x00\xfd\x00\x02\x01\xfe\x00\x00\x01\x05\x01\x04\x01\xfb\x00\xed\x00\xe5\x00\xd4\x00\xbc\x00\xa8\x00\x93\x00~\x00k\x00U\x00?\x00%\x00"\x00\x15\x00\x05\x00\xfc\xff\xf2\xff\xee\xff\xe6\xff\xde\xff\xd8\xff\xcf\xff\xbe\xff\xb8\xff\xad\xff\xab\xff\xac\xff\xa7\xff\x9f\xff\x97\xff\x94\xff\x84\xffu\xffc\xff\\\xffS\xff@\xff2\xff$\xff&\xff#\xff\x1d\xff+\xff3\xff>\xffG\xffT\xffb\xffv\xff\x8c\xff\x9c\xff\xbb\xff\xdc\xff\xf4\xff\x07\x00\x1c\x00,\x000\x00F\x00c\x00a\x00k\x00{\x00\x82\x00\x8b\x00\x90\x00\xa2\x00\x9b\x00\x84\x00\x83\x00}\x00s\x00k\x00j\x00`\x00S\x00M\x00N\x00^\x00U\x00P\x00Q\x00`\x00v\x00z\x00y\x00{\x00\x87\x00\x98\x00\xa3\x00\x9b\x00\x91\x00\x89\x00q\x00_\x00T\x008\x00(\x00\n\x00\xf4\xff\xd8\xff\xc3\xff\xaa\xff\x8d\xff\xa3\xffg\xffj\xff\x7f\xffH\xffe\xffH\xff\n\xffk\xff\x0e\xffU\xff\x1d\xff\x19\xff)\xff\xf5\xfe{\xff\x0b\xff_\xff^\xff,\xffi\xffj\xffp\xff\xad\xff\x94\xff\xcb\xff\xdb\xff\xba\xff\xe1\xff\x1f\x00\x00\x00\xf4\xff\xf6\xff\xfb\xff0\x00)\x00~\x007\x00\xc2\x00l\x00t\x00\xcb\x00\xa3\x00 \x01\xda\x00\xf2\x00\xf7\x00\x12\x01\xe9\x00\x9d\x01\x9c\x00\x1a\x02~\xfe\x1a\xffM\x0e\x90\tk\x02o\xfe\x02\xfd\xc5\xff\xff\xfc\xf2\xfc\x93\xfcQ\xfd\x93\xfc\xda\x00\x82\x03N\x00 \xfd\x00\xfe\x07\x03\xf8\x02x\xfc[\xfc\xae\x01\xc7\n:\x08;\xfe\xa1\xfa\x88\xf8\xfb\xfa\xb6\xff\xef\xfda\xfd#\xfe\x88\x03\x11\nk\xfc\xbc\x02\xb8\x07\xad\x03\n\x06z\x05b\x04[\x03\xb0\x07\xce\xfe\xd9\xf8x\xfc+\x01$\x03C\x03M\xfd\x18\xf5\xb1\xef-\xec\xa1\xeb\x8e\xf2\xc1\xf9\xf0\xfb\n\xff\xf6\xfaH\xf7B\xfb \xff\xc9\x00\xf2\x02\xba\x05\xc2\x07b\x01(\x03\xfc\x04\xd3\x08_\x0e`\x08\xc0\x05\xd5\x00-\x04\x99\x07\x88\x07\xee\x03!\x02\xf0\xffn\xfe\xe2\xfd\xa6\xf9D\xfb|\xfd=\xffX\xfe2\xfc\xa0\xfc\xec\xfbQ\xf8\\\xfba\xfb4\xfc\x14\xff\x90\xfd\x15\x00\x06\x01\x1e\x01\x11\x06\x11\x08!\x08"\x04\xf3\x01}\x02\xc5\xff\xc0\x01\x0c\x02\x10\x05\x1a\x02\xb6\x04\x91\x02\xb3\xffi\x01\x90\xfd \xfb\xbb\xf8\x8e\xfd\xdd\xfcw\xff_\x01\xcc\xfd{\xfa\x07\xf7\x04\xfa]\xfe\x07\x01\xa9\x01\xa5\x02\xd5\x00\x98\x01\xfd\x00\xf7\x00k\x01\xca\x01\x98\x01.\x07\xf0\x12Q\x0f\xcb\x08\xce\xffA\xfep\xfb\xdb\xf6\xa5\xf9\xe1\xf7F\xf8\xec\xfd\x8b\xff\x80\xff\t\x01\xc6\xfe\xee\xff8\xfcu\xfa\x01\xfc5\xfa\xab\xfcE\x003\x02]\x02Q\x02\x90\x03\xba\x03\\\x02\xf5\x02\xab\x02\x08\x01~\xffQ\xfe\x86\xfc{\xfa\x1d\xfe\xc4\xfd\xe8\x00\xe1\x01\xbc\x01`\x011\xfd=\xfa@\xf6D\xf94\xfe\xd1\x00|\x02\xc7\xfe\xd8\xfd\x86\xff\xf5\xff\xc9\x01\xcf\x05Z\x08\xab\x07\xba\x048\x01 \xfdJ\xfb\x03\xfb\xe8\xfa\xe0\x01\xde\x06\x11\x06\xb7\x05t\x02\x14\xfd\xa7\xfd\xf9\xf9Y\xfaW\xff\x7f\xfe\xb6\xff\xd2\x007\x00\x8a\xfeE\x00\xc2\x01p\x04\xe9\x04\xe0\x05\xf1\x05&\x01\x13\xfe)\xfb\xce\xfc@\x03_\x04\xe7\x04C\x043\x00\x97\xfc\x82\xfd\xf6\x00\xec\x01\x90\x00\x01\xff6\xfd\xf6\xfa\x1d\xfa\xf2\xf9#\xfd\xcd\x01\xca\x04\xa6\x06\xf2\x03\x95\xff\x9d\xfd:\xfa\xab\xf8\xe7\xfa\xec\xfd\xb5\x01\xf5\x034\x03L\x01\xac\xfc\xd2\xfc=\x00\x12\x01\x84\x01u\x01"\x03\x0b\x02\xc8\xfeL\xff\xbd\xff\xe8\xff\xf5\x00\xe4\x00\x81\xff\x90\xfco\xfd9\xff\xf7\xfeQ\x04\xb2\x05\x11\x03\xf2\xfe/\xfcA\xffG\xff\xdc\x01\x97\x03\xe8\x00\x9e\x00\x1b\x01\xb5\x00e\x01\xca\xfe\xd8\xfe&\x00\'\x01\xde\x00y\xff\xe6\xfc\xe6\xfc\x8a\x00\n\x01\xcf\x02h\x03\x0c\x04-\x03\xfe\x00<\xff?\xfdb\xfa\n\xfa\xd2\xfc?\x01\xc1\x04\xd9\x04\x9e\x04Y\x02\xd9\xfe\xde\xfb\xe0\xfa\xe7\xfa\xa3\xfe\x08\x03\xf7\x04D\x03#\xfe\xd5\xf8B\xfc\x1b\xff=\xff|\x03\x82\x04\x1d\x01\xd0\xfdN\xfeg\xfdN\xfc\xfc\xfb\xe8\xfcy\x00\xdc\x02V\x00\xa6\xff\xc4\xff\xb8\xfd\xb3\xfcr\xfb;\xfe$\x02\xd3\x05\xaa\x06^\x01\x08\xfe\xe7\xfd\xfe\xfdu\xffb\x00\xbc\x01\xf8\x03\xf6\x04\xed\x03[\x01y\xfeZ\xfe\xe1\xffw\xff>\x03\x03\x04(\x01\x08\xfe\xd5\xfb\x1e\xfd\xce\xff\x9d\x02X\x015\x01\x87\x00\xfc\xfe\xf0\xfd\'\xf8\x9e\xf5\xc8\xf9\xac\xfei\x03%\x05\x05\x03\x8e\x00\\\xfeG\xff[\x00\xec\xfe\xd3\xffI\x03 \x07\xcd\x04\xd9\x01\xae\xff\xec\xfb\xd8\xfb\xad\xfa\xa2\xfd\xdc\x03s\x07\xab\x05L\x00\xa9\xfcy\xf9\x99\xf9\xf9\xfc\x0c\xff\xfd\xff\xe9\x01\x82\x02\xac\x01\x85\xff+\xfc(\xfa<\xf9N\xfau\xfd\xb4\x00\xef\x03Z\x05\x11\x05j\x03\xdd\x00\xd5\xff\xd6\xff\xd5\x00\xc8\x01w\x01\xbf\x00\'\x01\xb8\x01y\x00\x07\xff@\xff\xcb\x01\xb8\x01B\x03n\x04*\x01\x85\xfeK\xfb\x83\xfbN\xfdq\xfd_\xfe\xb1\x00U\x04\x8b\x03\xbf\x01\x18\x00(\xfdH\xfd\x9f\xfdU\xff\xa9\x00&\x00\x97\xff\x9a\xff\x8f\x00\x1e\x00\x81\x00J\x01%\x01\xda\x00+\xff\xb8\xfd=\xfe\x9f\xfe\xdd\xfe\'\xff\xa7\xff-\x00\xf1\xff`\xfe\x80\xfdM\xfe^\xfe\xb0\xff5\x01\xda\x01\x9a\x02\x87\x01\x06\x01\x13\x01\xc7\xff\x83\x01\xfa\x02\x97\x03|\x04\xd6\x02\xd2\x00\xbf\xfe\xc0\xfb\xc6\xfb~\xfeD\xfe\x05\x00\xb1\x02\xfe\x01\xfc\xff\x07\xfe\xfa\xfc\xaa\xfd\xd7\xff\xf0\x01\xda\x02\x8d\x024\x02\xc2\x011\x01\xb1\x00\x19\x00L\xff\xf1\xff\xac\x00|\x00*\xff9\xfe\xa8\xfd\x80\xfc\xf5\xfd\x94\x00\x86\x03^\x047\x043\x03K\x01\xe6\x00\xf0\xff5\xff;\xff4\xfd\xdd\xfc\xb1\xfex\xfe\xd8\xfe\n\xfe\xdb\xfcg\xfd\x8f\xfdC\xfds\xfd\xc2\xfeL\x00\x9d\x01\xc4\x02p\x04\n\x03}\x01\x8d\x00\x01\xff\xc7\xfe!\xff\x97\xff\x1f\xfe(\xff\xe4\xff\x9a\xfd\xc6\xfc\x8f\xfc\xd9\xfd\xe4\xff\x82\x01w\x03+\x06\xd8\x07\x10\x06T\x03\xbb\x00[\xff(\xfe\x1e\xfd\xfd\xfc\x88\xfd%\x00\xba\x01\xe9\x01\x13\x01\xa8\xffj\xff"\xfe\x93\xfd\xc1\xfd;\xfe\x98\xffr\x00\xed\xff\\\xff\x86\xfe\x81\xfeF\xffq\xff\x17\x00\xfb\xffI\xff\xda\x00\xf1\x02\xb0\x02{\x02\xc3\x02\xaa\x02\x1c\x03U\x03\x10\x02k\x01k\xffh\x00\xab\x00\xcd\xff\xf9\xff:\xfe\xaa\xfe\x10\xffE\xffB\xff\x1e\xffu\xff>\x00\xe3\x00\xb5\x00\xd9\xff\xba\xff\xc3\xff\xb5\x00`\x02\xe6\x02 \x04\x8d\x04\xbd\x04>\x04\x99\x03\xc9\x02\x1b\x01\x96\xff]\xff\xfa\xff\xd3\x00\x1c\x01K\x00\x13\xff\x0e\xfeq\xfd\xed\xfcl\xfc\xa3\xfb\x07\xfcb\xfdN\xfd\x05\xfd\xd3\xfc\x88\xfcr\xfc|\xfc\x89\xfc\x8a\xfc\t\xfdy\xfdH\xfe\xfd\xfeh\xfe\x0c\xfe\x8a\xfdM\xfd\xd8\xfc\\\xfb\x85\xfa\x0b\xfa \xfa\x00\xfa\xc1\xf9\xff\xf9B\xf9\xde\xf8\xe7\xf8\xc8\xf8\xa7\xf8\r\xf8\xa9\xf8-\xfak\xfbw\xfc\x85\xfd\xbc\xfe\xd7\xfe\xd7\xfe\xbb\xfe\n\xfeo\xff0\x00G\x01\xc9\x02U\x02\xed\x02#\x03]\x03%\x03d\x01f\x01+\x01>\x02\x9c\x03\n\x04\xb1\x05 \x05r\x05\xf9\x04\xdb\x039\x03>\x02\x85\x02t\x04U\x07\x9c\tW\x0b\xc4\x0c\xeb\r\xd7\x10\x99\x13t\x15t\x18\xda\x1b/ h#\xc6#\x16"Q\x1d\xe4\x17\xe4\x11<\x0b\xd6\x04\xc6\xffZ\xfc\x16\xfa\xcb\xf8\xc0\xf6\xc5\xf3\xcd\xf0\x1d\xee2\xecb\xeb;\xeb\x04\xec\xcd\xed\x01\xf0\xe6\xf1\xf7\xf2n\xf3\x92\xf3\xcc\xf4\x8e\xf6\xf6\xf8\x06\xfc\n\xff\xe6\x01\xf4\x03f\x04c\x03=\x01\x86\xfe\x04\xfc\x00\xfaG\xf8B\xf6c\xf4A\xf2\xd9\xefv\xed\xd4\xea7\xe9\xd0\xe8\x86\xe9\x8d\xeb=\xee\xf6\xf05\xf3F\xf4\xab\xf4\x1d\xf5\x83\xf5H\xf6M\xf8\xfd\xfa\xcd\xfd\xe4\x00\x14\x03\xe9\x03\x84\x03\xf6\x01\xb4\x00\x92\xff\x00\xff\xf0\xfe\xca\xfe\xda\xfe2\xfe\x98\xfdi\xfc\x95\xfa\xf8\xf9\x03\xfa\xb2\xfb\x88\xfeY\x01\x81\x03\x1f\x05\xe3\x05\xd7\x06\xed\x07C\x08\xbc\t\xd9\nF\x0c\xc8\x0eD\x0f\xca\x0eY\x0c\x17\x08\t\x06\xf8\x06L\x0c1\x15%\x1f<)\xe21K7\x837\xf03\xdd-}(\x86%u#G!\x15\x1de\x16\x04\r\xce\x01\x08\xf6\xbf\xebv\xe6\xed\xe55\xe8\xd8\xeb\xe5\xed\xb1\xed\xec\xeb\xaa\xe8\x00\xe6\xc8\xe4O\xe5\xc2\xe8:\xeeW\xf4"\xf9d\xfb\xa8\xfb\xcb\xfb\x8f\xfd\xd1\x00\xab\x05s\ni\x0e\xc0\x10\xdc\x0f\x86\x0bU\x04P\xfcx\xf5\xb0\xf0m\xee:\xed\xbe\xec\xfc\xeb;\xea"\xe8\xed\xe4F\xe2\xe9\xe1\xc0\xe3\xdd\xe7K\xecg\xf0\x8f\xf3\xf8\xf4\x81\xf5\xe5\xf5\xab\xf6\x9b\xf8\x8f\xfb\xf9\xfe\xbc\x01;\x03D\x03z\x01:\xff\x8c\xfd\xcb\xfc7\xfd\xb2\xfdG\xfe\xaf\xfdw\xfcx\xfak\xf7\xbe\xf5j\xf4%\xf5\xc8\xf7\x8f\xf9\x8b\xfc\xe4\xfdD\xfe|\xff\xde\xfe\xc9\xff\xc4\x004\x02\x03\x06\x81\t\xda\r\xd3\x10\x8d\x12>\x12<\x0f\x01\x0b\xe4\x04Y\x00\xf6\xfe\xb5\x01a\n\xa6\x16\xf5$\x822\x11<\x86@\x02A\xe7>\x02\xfa+\xf8\x04\xf6p\xf5\x8a\xf4\xcd\xf3v\xf3S\xf2\x00\xf2\xea\xf1w\xf2\xca\xf4\xbc\xf7\xbd\xfb0\xff\x01\x01\xee\x02\x15\x03\xb5\x03\x11\x05<\x06\xb4\x08\xca\t3\n`\n`\x08\x98\x06V\x03\xca\x00\xa7\x00,\x02z\x07z\x0e\xdf\x17r#\xa9.\xdf9\xdaB!IXL\x08KEF\x91<\xfc/\xb3!\x8e\x11,\x03F\xf5\xe2\xe8\x15\xdf\x83\xd7%\xd4Z\xd4\'\xd8\xc6\xde*\xe6\xf8\xec\x99\xf1\xa3\xf4_\xf6\xce\xf7\x17\xfa\xa8\xfdy\x02\xad\x07\x05\x0c\x19\x0f\xe6\x10\x98\x11\xb0\x11\xc1\x10\xa2\x0e\xd1\n\xce\x04K\xfd\xbe\xf4M\xec|\xe55\xe0\xb3\xdd\x98\xdd\xef\xde\xc9\xe1\xf1\xe4\x05\xe96\xeeA\xf3\xc0\xf7I\xfb-\xfd\x97\xfe\x8f\xff\xf2\xffZ\x00-\x00\xd5\xffv\xffp\xfe\xce\xfc\x10\xfb\x0b\xf9\x07\xf8\xd8\xf7\xbc\xf7{\xf7\xa8\xf6s\xf5\xb8\xf4\xd2\xf4\xaa\xf5\xe8\xf6*\xf8\xaa\xf8\xee\xf7K\xf6k\xf4\x16\xf3\x11\xf3\xda\xf3\xcc\xf5\xbf\xf89\xfbK\xfd\xbd\xfe\xaf\x00\xa6\x03\xd3\x06b\x08|\x08\x80\x06\xd0\x03\xc3\x01.\xff`\xfd\xee\xfbH\xfb\x00\xfb\xe2\xf8,\xf5*\xf2\xb6\xf5\xac\x02a\x17\x0e/\xc5CnS\x8b]\xdcbzc\xe0^8V\xe6J3;\x83&\xad\x0c1\xf1 \xda\xb7\xca\xe3\xc3\x1d\xc4\xb6\xc7{\xcc\x9e\xd2\x80\xd9v\xe2\x01\xec^\xf5R\xfd\x01\x03\xdd\x06\xa3\x07\x8e\x06\xed\x04\xd9\x04\x8b\x08\xf2\x0e\xc8\x151\x1a\x99\x18\xc7\x12\xfa\t\x9b\x01F\xfa\xd4\xf2\x86\xebv\xe2\xbc\xd9\xc3\xd2\x13\xcf?\xd1\xdf\xd6:\xdf\xb0\xe8\xe2\xf0\xba\xf8\xbf\xfe\x80\x03j\x07\xd7\t\xd3\n\xc4\t\xa7\x06\xd0\x02^\xff\xd7\xfc\xc6\xfb\x0f\xfb\xe3\xf9\xf7\xf7\xac\xf5\x8a\xf4}\xf4\xd0\xf5\x90\xf8\xb9\xfa"\xfcn\xfb\xbc\xf8\x17\xf6\x19\xf6\xe5\xf8q\xfc\xfe\xfd \xfbL\xf6%\xf2\xb2\xef"\xf0\xd2\xf1\x9e\xf4\x04\xf8\xa8\xfa\x85\xfdC\x00\x1c\x03\xdb\x06\xba\n\x07\rH\x0c\xc7\x08S\x04\xa1\xff\xde\xfb4\xf8\xfb\xf3c\xf0\x1b\xec\xc4\xe7\xaf\xe4\xe1\xe6\xe0\xf4G\x0e\x06.=IkY\xc9b\'hwm2p\xfdh\xdfX\x11B\xaf&6\t\xdd\xe9\xb6\xce\xc7\xbc\xff\xb5\x08\xb7[\xb8\x83\xbam\xc1\xf4\xd0\xd8\xe7\xd8\xfd;\x0c\x7f\x12\xf0\x14\xc2\x160\x18\xbe\x183\x18u\x17\x03\x176\x16\xa6\x13\xe6\x0e\x8e\n\n\x08L\x06~\x002\xf5\xb2\xe6F\xd9.\xd0O\xcb\x96\xc8I\xc7.\xc8\xb5\xcd#\xd8\xbc\xe6\x04\xf7p\x06N\x13\x1b\x1c?!\xe2!2\x1f\x05\x1b\xa9\x14\xf6\x0b\xa0\x00\x07\xf5\xfa\xeb\x96\xe6t\xe4}\xe4D\xe5y\xe6\xf0\xe8f\xed.\xf3\xe5\xf9o\x009\x05\xfe\x07A\tp\tS\tn\x08k\x06\xde\x02\xe0\xfdl\xf8\xf3\xf2 \xef\xc2\xed\x0b\xee\x9f\xef\x99\xf12\xf4\xdb\xf7\xe0\xfb\xd4\x00\xca\x04\xaf\x06-\x07x\x05\xe7\x01\xaf\xfd\x12\xfa\xdf\xf6\xe2\xf1|\xe9\x08\xe0\xd7\xda\x8a\xde3\xebR\xfbi\x0b\xea\x1e&6?P"h\x90v\xa4zwwJo\x02bAO\xe46\xb0\x17b\xf7\xe1\xdb\x04\xc6j\xb7\xad\xafD\xaeb\xb3\x18\xbeQ\xcdh\xdf\xb0\xf2\xa1\x03~\x0f\xf5\x15\xff\x17,\x18\xe6\x187\x19w\x17\xaa\x13\xbf\x0e|\n\x0e\x08%\x07\x0c\x06\xaf\x01\xbb\xfa\x85\xf2V\xea\x08\xe4\xc6\xde_\xd9\xf8\xd40\xd2\x1a\xd4:\xda)\xe3C\xee\xce\xf9\xcb\x04[\x0fM\x17\xd3\x1b\xb1\x1cd\x19\x1e\x13\xae\n\x80\x01\x94\xf7\xab\xecp\xe3\xa9\xdd\xa7\xdbL\xdeQ\xe3\x1f\xe8\'\xeek\xf5\x7f\xfdz\x05V\x0bY\x0e\xac\x0f\xe7\x0ed\x0cs\x08\x16\x04G\x00v\xfcA\xf7V\xf1\x0b\xeci\xe9\xeb\xe9\x87\xeba\xed\x9f\xef\xf2\xf2\xbe\xf7\xbf\xfc\x90\x01(\x06\x19\n\xbc\x0c\xf0\x0c\xa2\n%\x06w\x01\xc7\xfc\xdf\xf5\xbd\xef\xf5\xe9`\xe4\xdb\xdf\x7f\xda\x9b\xd9\xc3\xe3s\xfar\x18\x9d1fC\xb1P9_%r\xac\x7f\xff\x7f#rSYR?\x06(x\x0e\xfc\xf1"\xd5R\xbc\xc8\xac5\xa8D\xab\x8b\xb3\xf4\xc0\xe1\xcf\'\xe0W\xf2.\x03B\x12Z\x1fb%&$) p\x1c\xd1\x196\x18J\x14/\x0c\x9d\x02[\xfb\x9b\xf7\x99\xf6\x05\xf5\x16\xf0\x1a\xe7\xbc\xde1\xdb\x96\xdb9\xdd\x82\xde^\xdeb\xe0t\xe7&\xf2=\xfdS\x06w\r\xa9\x11\x9b\x11"\x11 \x11\xc1\r\xd8\x07\x91\xff\xed\xf4\xfb\xec\xd9\xe9p\xe8{\xe6\xdc\xe5\xea\xe7\xa7\xec\xbe\xf3\xdb\xfb\xca\x012\x06W\n\x0f\r\xc6\r\xb3\r\xa9\x0b\x81\x06\'\x009\xfa\xde\xf4c\xf0\x17\xedq\xeaV\xe9S\xeb\x8a\xefe\xf4)\xf9\x06\xfe\x98\x02q\x06\x9c\t\xb9\x0bS\x0cR\x0b:\x08}\x03\xef\xfdR\xf8p\xf3\xf0\xee\xcf\xeb=\xeaG\xe7\x94\xe2\xa0\xe0\xe3\xe5\xc9\xf4\xfe\n\xef \xe50o>\xd5P\x18e-t\x0fygr&d\x1fT\x10B\xb8)\xec\x0c\xa7\xf0\x87\xd5+\xc0i\xb3\xaf\xad!\xaf\x9c\xb5s\xbe\xf5\xc9\x81\xdat\xef(\x03\xcd\x10a\x17\xa1\x19\x05\x1c>\x1f\x12 &\x1c\x18\x14\xce\x0bi\x06c\x04\x83\x02\xdc\xfe\xeb\xf9[\xf5\xd1\xf1\x9e\xefS\xed\x1a\xea\xf8\xe5\xfb\xe1\x14\xdf\xb1\xde\\\xe1\x83\xe66\xec\x9f\xf0\xf6\xf4\xaf\xfb\xc5\x04+\r,\x11\xa9\x0f\xfc\x0b\x87\t\xe8\x07L\x04\xda\xfd\xdb\xf5\xe4\xee\xa8\xebm\xeb\x9d\xec\x9e\xee\xa2\xf0\x1e\xf3\xd5\xf7\xc9\xfd\xcc\x03\x0e\x08\xf4\x08T\x07\x84\x05\x82\x03N\x00\xa2\xfb\'\xf6\x08\xf1@\xedj\xeb\x83\xebQ\xed\xb7\xf0\x8e\xf4\x10\xf8?\xfc\xba\x01}\x07\x92\x0b\x8a\x0c#\x0b\xe4\x08\xbb\x06\x95\x04Q\x01V\xfc\xa2\xf55\xf0\xce\xec\xb3\xea\x8b\xe9\xa5\xe5\r\xe0A\xe1\x94\xedP\x02\xd9\x17\xab&\x981N@LVMl\xdbw1v#l``\xf4S\xc9A\xac(\x9d\x0b\x94\xef\x15\xd8`\xc4\x15\xb6\xc4\xae\x05\xae\xa8\xb1\x14\xb8\x8a\xc2\xa2\xd1\'\xe5^\xf8\x06\x04\xd4\tB\x0fL\x15\xf5\x1bm\x1fA\x1c[\x15\xaf\x10\xdf\x0ec\r\x04\x0b~\x07\xc5\x02\xd0\xfe[\xfb\xd4\xf6O\xf1\xb8\xec0\xe8\x93\xe2\x05\xdd\xbc\xd9\xec\xd9\xea\xdd\xb6\xe3\x92\xe7\x86\xea\x81\xf0&\xfa\x98\x03(\t5\nz\t\xa5\t9\x0b\xd0\n+\x06\x19\x00\x1c\xfb\xbf\xf7\xef\xf5\xb7\xf5\xae\xf5\xdc\xf5\xb4\xf6m\xf8\xd1\xfa\xc4\xfd\xd4\x00J\x02*\x01w\xff\x8e\xfd\n\xfb\x1e\xf9d\xf6\x98\xf2\xc1\xef\xb0\xee\x98\xef\x17\xf2C\xf5\x81\xf8\xf7\xfb\xb5\xffR\x04\xb6\x08\xb0\x0b-\r\xdc\x0ck\x0b\x03\t\xaa\x05\x98\x01\xc7\xfc\xaf\xf7\xc4\xf2\x0f\xee\\\xeb\xb1\xe9\x02\xe7R\xe3\xdc\xe0\xaf\xe4I\xf1g\x03\xd5\x14\x1e"\x1b.\x00?\xeaS\xe1e2n\xfck\xacd\x8a][T\x96D/.\xf7\x14g\xfd\xd5\xe9\xeb\xd9\x9e\xccX\xc2\xf9\xbb~\xb9X\xbb6\xc2\x96\xcc\x9e\xd7\xdd\xe0@\xe8\x90\xefA\xf8p\x01f\x08\xa3\x0b-\x0c\x07\r\x14\x10\xbb\x14\x0b\x18\xaf\x18\xa5\x17\x9e\x16\xe7\x15\x99\x14>\x11\x85\x0b\x00\x04\x1d\xfc\x1f\xf4\x96\xec\xcc\xe5\xf6\xdf"\xdb>\xd7o\xd5\xd5\xd6\xd7\xda\xc9\xdf\x1d\xe4\xc9\xe7\x94\xec\x1e\xf3$\xfaO\xff\xe7\x01\x1b\x03\x80\x04\xb8\x06\x9a\x08p\t\xf3\x08\xc2\x07\x85\x06\xf9\x05\x05\x06\xf5\x05\x05\x050\x03\xd9\x00\xe7\xfeg\xfd\xc1\xfb.\xf9\x17\xf6G\xf3/\xf1[\xf0\x94\xf0h\xf1\xa1\xf2I\xf4\xd5\xf6\x91\xfa^\xffg\x04\xca\x08\x00\x0c[\x0e\n\x10%\x11v\x11e\x10\xf0\r\xa6\n<\x07\xdb\x03~\x00,\xfd\x05\xfa \xf7[\xf4J\xf1|\xee\xac\xed;\xf0\xfa\xf5\xd5\xfcO\x03\x9b\t_\x113\x1bq%l-I2\xb14\xf15\x056\xd83\xb5.F\',\x1f%\x17\x19\x0f\xd8\x06\xaf\xfe\xc4\xf7m\xf2<\xee\xef\xeai\xe8\x07\xe7\xda\xe6\x10\xe7\n\xe7\xf0\xe69\xe7+\xe8\x7f\xe9\xa2\xeat\xeb^\xec\xef\xedA\xf0%\xf3\xdf\xf5|\xf8?\xfb<\xfec\x01)\x04W\x06-\x08\xae\t\x9c\n\xa8\n\xc4\tU\x08\xcb\x06\xed\x04\xa7\x02\xdd\xff\x06\xfd\xb6\xfa\xd9\xf8J\xf7\xba\xf5N\xf4b\xf3\xe7\xf2\xea\xf20\xf3\x8d\xf3=\xf4.\xf5.\xf6=\xf7@\xf8z\xf9\xc0\xfa\x0f\xfc7\xfd;\xfe<\xffB\x00R\x01L\x02\xf8\x02@\x03y\x03\xcf\x03^\x04\xf6\x04K\x05t\x05y\x05r\x05T\x05\x13\x05\xcb\x04{\x04\x11\x04\xa5\x03\x0b\x03|\x02\xbe\x01\xea\x008\x00\x81\xff\xb0\xfe\xc8\xfd\xfa\xfc\x80\xfc:\xfc\xf8\xfb\xa9\xfb2\xfb\xea\xfa\xcb\xfa\xc7\xfa\xee\xfa\x1d\xfbh\xfb\xb5\xfb\n\xfc[\xfc\xa9\xfc\x02\xfdH\xfd\x8c\xfd\xb3\xfd\xb5\xfd\xe2\xfd]\xfe\x1f\xff1\x00\x91\x01A\x03#\x05\xf7\x06\xa9\x08I\n\xf5\x0bu\r\xca\x0e\xd4\x0fx\x10\xf4\x10l\x11\xf5\x11a\x12\x90\x12{\x12\x08\x12f\x11\x99\x10\x80\x0f \x0et\x0c\x95\n\x9d\x08s\x06\x05\x04f\x01\xe4\xfe\xae\xfc\xe4\xfa]\xf9\x0c\xf8\x1f\xf7\xcb\xf6\xbc\xf6\xf9\xf6e\xf7\xf5\xf7\xb8\xf8O\xf9\x8e\xf9U\xf9\xc4\xf8\x16\xf8U\xf7\xa1\xf6\xec\xf5u\xf5B\xf5k\xf5\xdc\xf5\xa2\xf6\x96\xf7\xb6\xf8\xbd\xf9\x87\xfa\xfa\xfa\x1e\xfb\x12\xfb\xc1\xfa.\xfak\xf9\x97\xf8\x02\xf8\xd5\xf7\x19\xf8\xc1\xf8\x9d\xf9\xd2\xfa<\xfc\xba\xfd:\xff\x95\x00\xd6\x01\xdd\x02\xaa\x03H\x04\xa0\x04\xaf\x04\xa8\x04k\x04\x0b\x04\x99\x03\x1d\x03m\x02\xa4\x01\xc8\x00\xf3\xff\x1a\xff]\xfe\x9f\xfd\xde\xfc=\xfc\xdd\xfb\xc9\xfb\x1e\xfc\xc0\xfc\x9f\xfd\x94\xfe\x86\xff\x87\x00\x85\x01\x84\x02\x80\x035\x04\xa4\x04\xe3\x04\xf4\x04\xdf\x04\xa0\x04\x17\x04l\x03\xa4\x02\xc9\x01\xec\x00\xe2\xff\xdf\xfe\xf1\xfd\x06\xfd8\xfc|\xfb\xe6\xfa\x87\xfaQ\xfam\xfa\xc6\xfa\x83\xfb\x8d\xfc\xc9\xfda\xff\r\x01\xdf\x02\xc3\x04\xb8\x06\xb5\x08\x92\n8\x0c\x9b\r\xb1\x0ev\x0f\xef\x0f\x1e\x10\xcf\x0f\x0f\x0f\xf7\r\xc7\x0c\x9a\x0be\n\x07\t\x9e\x07q\x06\x99\x05\xe8\x04B\x04\x95\x03\x05\x03\x97\x02+\x02\xab\x01\x08\x01V\x00\x9a\xff\xb9\xfe\xa8\xfd\x84\xfcw\xfb\x86\xfa\xaf\xf9\xbc\xf8\xbf\xf7\xff\xf6\x95\xf6s\xf6]\xf6B\xf6C\xf6y\xf6\xe4\xf6T\xf7\xb8\xf7\xf0\xf7:\xf8\x98\xf8\xf7\xf84\xf94\xf9\x14\xf9\xfc\xf8\xea\xf8\xf8\xf8\x00\xf9!\xf9s\xf9\xfc\xf9\xb3\xfav\xfb^\xfcY\xfdC\xfe\x11\xff\xb0\xffA\x00\xa7\x00\xd5\x00\xbc\x00k\x00\xf8\xff\x8e\xff\x10\xff\x8c\xfe\x1f\xfe\xe0\xfd\xb7\xfd\xbe\xfd\xf4\xfda\xfe\x08\xff\xc8\xfft\x00\x06\x01\x8e\x012\x02\xe4\x02\x98\x03\x15\x04K\x04]\x04z\x04\x9a\x04\xa8\x04\x88\x04-\x04\xb8\x03"\x03\xa3\x02\x0c\x02s\x01\xd5\x00\x16\x00V\xff\xae\xfe^\xfeg\xfe\x92\xfe\xc0\xfe\xe4\xfe\x1c\xff\x8c\xff\xf6\xffE\x00J\x00\x08\x00\x92\xff\xf5\xfeT\xfe\xa5\xfd\xd7\xfc\x0c\xfcg\xfb\x19\xfb.\xfb\xa2\xfbg\xfcG\xfde\xfe\xbe\xff9\x01\xc8\x02)\x04P\x05K\x06)\x07\xf2\x07\x9d\x08\x05\t%\t5\tM\tm\tp\t7\t\xd9\x08\x80\x08!\x08\xcd\x07o\x07\xde\x06>\x06\x87\x05\xce\x04$\x04\x8b\x03\xf8\x02S\x02\xaf\x01"\x01\xa4\x002\x00\xcd\xffR\xff\xc9\xfeI\xfe\xd5\xfdv\xfd\x1a\xfd\xbf\xfcV\xfc\x16\xfc\xd4\xfb\xa4\xfb\xa9\xfb\xb6\xfb\xce\xfb\xde\xfb\xc4\xfb\x92\xfbW\xfb\x0b\xfb\xa6\xfa"\xfa\x97\xf9\n\xf9\x85\xf8\x0f\xf8\xc7\xf7\xba\xf7\xd3\xf7\x08\xf8@\xf8\xbf\xf8w\xf9n\xfam\xfb4\xfc\xe0\xfc\xa8\xfdy\xfe"\xff\x98\xff\xbd\xff\xc8\xff\xe9\xff\x10\x00\x1e\x00\x05\x00\xcb\xff\x9f\xff\x83\xffm\xffd\xff^\xffy\xff\xa9\xff\xf8\xffV\x00\xe2\x00\x8a\x01M\x02&\x03\xec\x03\x9c\x04\x1e\x05o\x05\x8f\x05o\x05 \x05\x95\x04\xde\x03\'\x03i\x02\xad\x01\xfa\x00\\\x00\xf6\xff\xac\xff\x98\xff\x80\xffq\xffe\xff`\xffa\xffc\xffC\xff\x0e\xff\xd4\xfe\xb2\xfe\xbc\xfe\xdb\xfe\x12\xffB\xff\x8b\xff\xdf\xff5\x00t\x00\x8d\x00|\x00G\x00\xf1\xff\x81\xff\x02\xff\x83\xfe"\xfe\xd1\xfd\xa1\xfd\x88\xfd\xa0\xfd\xef\xfd`\xfe\xcf\xfe7\xff\xae\xff.\x00\xbc\x00O\x01\xe2\x01|\x02*\x03\xfd\x03\xf0\x04\xf7\x05\x01\x07\r\x08\x12\t\x0c\n\xde\ni\x0b\xb4\x0b\xaa\x0b\\\x0b\xc8\n\xe0\t\xc7\x08\x8f\x079\x06\xd2\x04q\x03\x1d\x02\xe9\x00\xd8\xff\xef\xfe*\xfe\x90\xfd#\xfd\xe2\xfc\xbf\xfc\xa7\xfc\x91\xfc\x9f\xfc\xac\xfc\xaf\xfc\x8d\xfcE\xfc\xf3\xfb\x89\xfb\x02\xfb_\xfa\x9c\xf9\xdc\xf8+\xf8\x87\xf7\xf4\xf6|\xf6A\xf60\xf6E\xf6t\xf6\xbd\xf6*\xf7\xad\xf7K\xf8\xf2\xf8\x8f\xf9E\xfa\x02\xfb\xbf\xfbt\xfc\x14\xfd\xbc\xfdR\xfe\xd9\xfeG\xff\xa1\xff\xe2\xff\x1d\x00I\x00Y\x00c\x00`\x00`\x00p\x00\x83\x00\xa6\x00\xd1\x00\x0f\x01f\x01\xda\x01[\x02\xde\x02X\x03\xc8\x036\x04\x96\x04\xd5\x04\xfa\x04\xf3\x04\xc5\x04y\x04\t\x04\x8f\x03\r\x03\x86\x02\x06\x02\xa1\x01U\x01,\x01\x1e\x011\x01P\x01w\x01\x9d\x01\xb2\x01\xa0\x01c\x01\xfc\x00v\x00\xc5\xff\xfa\xfe-\xfeh\xfd\xc4\xfc9\xfc\xe3\xfb\xb3\xfb\xa8\xfb\xca\xfb\r\xfc_\xfc\xaa\xfc\xe5\xfc\x07\xfd\x1b\xfd%\xfd\x14\xfd\x01\xfd\xe9\xfc\xd5\xfc\xde\xfc\xf6\xfc-\xfd\x85\xfd\xee\xfde\xfe\xe2\xfek\xff\xf9\xff\x96\x003\x01\xd3\x01~\x028\x03\x08\x04\xf1\x04\xed\x05\xf3\x06\xf6\x07\xee\x08\xe2\t\xbf\nt\x0b\xf6\x0bA\x0cX\x0c@\x0c\xf4\x0by\x0b\xd5\n\t\n\x1e\t6\x08K\x07P\x06k\x05\x8b\x04\xb4\x03\xf1\x022\x02\x85\x01\xde\x002\x00\x88\xff\xdb\xfe5\xfe\x97\xfd\xf3\xfcW\xfc\xd8\xfbl\xfb\x13\xfb\xd2\xfa\x94\xfa\\\xfa7\xfa&\xfa\x15\xfa\xf3\xf9\xc4\xf9\x97\xf9k\xf9.\xf9\xe8\xf8\xa0\xf8j\xf8=\xf8\x17\xf8\xff\xf7\xf9\xf7\t\xf8)\xf8S\xf8\x9a\xf8\xea\xf8M\xf9\xc6\xf99\xfa\xb4\xfa/\xfb\xa7\xfb#\xfc\x9c\xfc\x1b\xfd\xa8\xfd;\xfe\xd3\xfek\xff\x11\x00\xc0\x00u\x010\x02\xde\x02s\x03\xed\x03\\\x04\xa8\x04\xd7\x04\xe8\x04\xc3\x04\x9c\x04\\\x04\x10\x04\xb9\x03Y\x03\xfd\x02\xa3\x02^\x02\x1c\x02\xef\x01\xd0\x01\xa9\x01\x9b\x01\xa6\x01\xb8\x01\xc5\x01\xcf\x01\xd1\x01\xc0\x01\xaf\x01\x8a\x01;\x01\xe7\x00\x8a\x00\x1e\x00\xb0\xff@\xff\xcf\xfeg\xfe\x05\xfe\xb4\xfds\xfdE\xfd\x1e\xfd\x0b\xfd\xfb\xfc\xfa\xfc\x0c\xfd\x1c\xfd1\xfdE\xfdu\xfd\xa4\xfd\xbe\xfd\xe7\xfd\x1f\xfeX\xfe\x93\xfe\xcd\xfe\x03\xff=\xff}\xff\xbf\xff\xfb\xff+\x00P\x00\x84\x00\xb7\x00\xdd\x00\xf8\x00!\x01K\x01\x7f\x01\xc3\x01\xf5\x01\'\x02P\x02\x81\x02\xb5\x02\xcc\x02\xf3\x02\x01\x03\n\x03,\x03\x18\x031\x037\x03=\x03f\x03\x80\x03\xc6\x03\xef\x03(\x04_\x04\x7f\x04\x9f\x04\xad\x04\xa4\x04\x94\x04q\x04?\x04\x02\x04\xaa\x03a\x03\x10\x03\xb0\x02l\x02\x18\x02\xd4\x01\x8b\x019\x01\xfb\x00\xa7\x00W\x00\x03\x00\xbb\xff[\xff\xf2\xfe\x86\xfe\x1b\xfe\xb4\xfd9\xfd\xc1\xfcB\xfc\xd1\xfbT\xfb\xfb\xfa\x9a\xfaR\xfa\x10\xfa\xd9\xf9\xc2\xf9\xaa\xf9\xa2\xf9\xbc\xf9\xd6\xf9\x00\xfa,\xfak\xfa\xca\xfa\x1e\xfbs\xfb\xd5\xfb?\xfc\xa9\xfc\r\xfd~\xfd\xe5\xfd4\xfe~\xfe\xcf\xfe,\xff\x80\xff\xc1\xff\x17\x00g\x00\x9d\x00\xaf\x00\xdb\x00\x0b\x01\x15\x01\x1b\x01(\x01L\x01P\x01.\x01$\x01\xfb\x00\n\x01\x13\x01!\x01_\x01_\x01]\x01c\x01\r\x01\xf3\x00\xcf\x00\x94\x00\x10\x01\x1f\x01\x06\x01\x1b\x01\xcc\x00\xa1\x00\x91\x00u\x00\x85\x00\xe1\x00\xe1\x00\xa4\x00f\x00J\x00\x16\x00\x14\x00\x98\x00\xdf\x00\xe6\x00D\x01P\x01P\x01%\x01%\x01\xcf\x00\xd5\x00T\x01\xb1\x01^\x01\xe9\x00\x01\x01\x10\x01;\x00\xf3\xfe\xe0\x00\xc4\x06\xc3\t\x91\x08j\x01\x94\xf8&\xf58\xf6\xb7\xf7\x8e\xfb\xb8\x00*\x04\x0b\x05\xbf\xfb\xcd\xf9A\xf9\xda\xf7\xb8\xfc\xb4\x01#\x04\xd7\x02e\x02\x7f\x00\xdd\xfe\xce\xff\xd9\x02\xfb\x04\x7f\x06\xab\x07\x14\x08\xe8\x05r\x02\x9e\xff#\xff{\x00:\x02&\x03\xc6\x01\xee\xff\x10\xfe\xc8\xfc\x9f\xfc\xe9\xfc;\xfd\x8a\xfd\x89\xfd\xe2\xfde\xfc\xf1\xfc\xd8\xfc\xed\xfa\xb1\xfbn\xfe\xdc\xffM\x00\xed\x00\x8e\x00\xfe\xff\x05\xff\xf4\x00\x11\x03\xe7\x03\xe0\x03\xa8\x037\x03\x92\x01b\x00\xb1\x01\x8f\x03\xcc\x03\xa7\x04^\x044\x03\xc0\x01i\x00\xd3\xff\x92\x02\x94\x03E\x06\x1c\x06|\x03P\x03L\x00m\x00\x1c\x03i\x06\x04\x06s\x05W\x02l\x00\xce\x01\xdd\x02H\x03\x95\x01\x1a\xffA\xfd\xdd\xfe\xe4\x00\xf9\x00\xa0\xff\xce\xfc\xd1\xfb\xba\xfb+\xfb\x9c\xfbe\xfc\xe5\xfc\xf5\xfc\\\xfc\x04\xfa\xa8\xf7\x1c\xf7\xe3\xf8\xf9\xfb\xdf\xfd\xe1\xfd\xa2\xfb\xa4\xf7\x05\xf7\xe1\xf8\xc7\xfaL\xfd\x92\xfez\xfe\xb8\xfch\xfa\xc7\xf9\xa5\xf92\xfb\xf7\xfdh\x00V\x01\xae\xfe\xd6\xfb\xc0\xfa4\xfc:\x00\x95\x02^\x03G\x02D\xff\xef\xfd_\xfd\xaa\xff\xd2\x01I\x02\x8b\x04\x9d\x03o\x00\x0b\xfe4\xfd\x10\xfe1\x01\x90\x04;\x06\xfb\x03\xe5\xfe\xba\xfb8\xfbi\xfe\x0f\x04\xfd\x06\xc6\x04\xcb\x00N\xfe\xae\xfd\xa4\xff\x02\x01\xbc\x01O\x02\xb7\x01\n\x01+\xffL\xfes\x00\xc8\x02\x81\x00\x19\xfe\xc6\xfd_\xfe\x92\x01\x8e\x02Z\x00\x9b\xfdB\xfb\xa5\xfb\xa3\xfd\xd2\xfe\xf0\xfe~\xfd\xfd\xfa\xb7\xf9\x80\xf9\xcd\xf9\x8a\xfa\xa9\xfbq\xfb\xac\xfaZ\xfa\t\xfbO\xfcP\xfdP\xfe\x91\x01e\x06\xb0\n\x16\x0e\xdd\x10R\x12\xa3\x13g\x15 \x17\xdf\x18&\x1as\x1a<\x19R\x17\x8c\x15q\x14\xf1\x13\x92\x13\xd0\x11\xa7\x0e@\x0b\xee\x07\xf2\x04\x06\x02\x16\xff\xa7\xfb\xbe\xf8A\xf6\x90\xf3\x10\xf1\x12\xef\xe2\xed_\xedm\xed\xa6\xed\x99\xed*\xed\xfa\xec)\xed\xd1\xed\x04\xef\x11\xf0\xf7\xf0\xc8\xf1\xbd\xf2!\xf4\xe6\xf5\xd6\xf7\xe0\xf9{\xfb\x81\xfc4\xfd\xc8\xfdB\xfe\xaa\xfe\xd2\xfe\x95\xfey\xfeb\xfes\xfe\xcf\xfe\xe6\xfe\x18\xffx\xff\xc6\xff/\x00\x81\x00.\x00\xe4\xff\xa2\xff|\xff\xec\xff\xdd\xff\x89\xff\xa6\xff\x17\x00 \x00\xd0\x00{\x01[\x01\x83\x01\xc6\x01\x14\x02\x97\x02"\x03J\x03\x9a\x03\x13\x04q\x04%\x05F\x06\x10\x07\xd6\x07\xbd\x08\x1a\t\xe3\t5\n\x9a\n\xfa\n\xbe\n\xa9\n\x87\n\x1e\nt\t\xb8\x08\xba\x07P\x06\xa3\x05\xd0\x04\xa5\x03V\x02h\x00I\xfe\xce\xfc\xcb\xfb<\xfa\x03\xf9v\xf7\x1a\xf6\xfb\xf4\xfc\xf46\xf5\x14\xf5\\\xf5\xbe\xf5\x07\xf6z\xf6\x1a\xf7\x19\xf7\xb1\xf7\x9f\xf8q\xf9S\xfa\x98\xfaa\xfa\xd4\xfaz\xfc\xb1\xfd\xac\xfe\xae\xfeH\xfe\x84\xfe\x91\xff\xee\x00/\x01(\x00\r\xff\x8b\xfe\x0f\xff\xd4\x00)\x01\x99\xff_\xfeD\xffI\x00\xc9\x00U\x00\xca\xfe[\xfe\xa7\x00\xc9\x02\x89\x03]\x02*\x00K\x01\xc8\x02\xc6\x02\xe7\x01`\x01\x85\x01\x10\x02\xf6\x01y\x000\xff\xee\xfe\xe8\xffo\x00P\x00\xde\xff\x04\xff\xe8\xfe\xa0\xff\x86\x00R\x02\x1a\x05\xbf\x07\x19\n\xcc\x0bs\r\xe6\x0fl\x12%\x14\x8e\x14\x15\x14\n\x13v\x12\xcd\x12\x14\x13\xd3\x126\x11\x01\x0f\x18\x0e\xef\r\x18\r\x15\x0bC\x08O\x059\x02\r\xff\xcf\xfb\xac\xf8r\xf5=\xf2\xe0\xef\x06\xee&\xec1\xea\xfc\xe8s\xe9\x01\xeb\xbc\xebQ\xeb"\xebB\xec\xb0\xeeT\xf1\'\xf3)\xf4\x06\xf5S\xf6\xbf\xf8\xed\xfbw\xfe\xd1\xff\xa1\x00\xff\x01\xce\x03\x07\x05\x1e\x05\xde\x04"\x05=\x05\x89\x04$\x03\xa3\x01\x9b\x00\xfa\xfft\xff\xb5\xfe\xbd\xfd\x99\xfc\xd1\xfb\xab\xfb\xbd\xfb\x8d\xfb\xec\xfa\\\xfa\\\xfa\x81\xfa^\xfam\xfa\xab\xfa\x05\xfb\xfb\xfbm\xfc\xae\xfc\xaf\xfd\xc0\xfe\xc5\xff\'\x01\xdd\x01\xca\x01\x82\x02m\x03\xc2\x04\xb9\x053\x06\xb9\x06\xae\x07\xc0\x08\x9b\tE\n.\n\xb3\n`\x0b2\x0cO\x0c\xaa\x0b\xfa\tG\t6\t\xb4\x08\xb9\x07^\x05\xab\x03\x86\x03_\x03\x01\x02\x1f\xff|\xfco\xfb\x86\xfb\x80\xfbU\xf9\xa3\xf6\xb7\xf4\xfe\xf4\x9c\xf5\xe6\xf44\xf4\x82\xf4\x17\xf5\x81\xf5\x1d\xf6V\xf6m\xf7*\xf9e\xfa\x06\xfb\xc2\xfb\xc3\xfb\xbc\xfc\xcb\xfeB\x01\\\x02\xcd\x00n\xffj\x00,\x04\xfb\x05\x9b\x04\xe9\x01%\x00\x8f\x00\xfc\x01U\x03\xb3\x02\xcf\x00?\xfe+\xfc(\xfc\xf7\xfd\xf5\xfe\xf3\xfd\xd5\xfb`\xfa\xfb\xf9\x96\xfa^\xfbs\xfbN\xfbJ\xfbD\xfb\xb3\xfb;\xfc\x0c\xfd\x80\xfeH\xff\x16\xffz\xfeG\xfeh\xff\x82\x01\xeb\x02\xa7\x02b\x02U\x04\x9d\t\xc8\x0f\x1e\x13\x17\x136\x12[\x14V\x19\xc3\x1c\xef\x1b\xb7\x18\xbf\x16\x9c\x17T\x19G\x19y\x17\x0c\x15@\x13[\x13_\x143\x13\x19\x0f\x16\n\xeb\x06\xca\x057\x03\xf2\xfd\xb5\xf88\xf5\xdd\xf2\xfb\xef\x81\xec\xf1\xe9\xfc\xe8\xd2\xe8\xa0\xe8\xa7\xe8\xd3\xe8\xd9\xe8>\xe9k\xea\xfb\xeb\x03\xed%\xed\x11\xee\xc3\xf0N\xf3T\xf4\xb2\xf4F\xf6\x80\xf9K\xfc$\xfdT\xfd\x15\xfe(\xff\xf8\xff\x99\x00\xe1\x00m\x00<\xff\xa8\xfe%\xff\xb0\xffR\xffh\xfeu\xfe5\xff|\xff\xf3\xfex\xfe\xb0\xfe\x05\xff\xe7\xfek\xfeG\xfeU\xfeL\xfe\x87\xfe\n\xff\xc5\xff-\x00?\x00\x02\x01\xe1\x012\x02\x80\x02{\x02\xe6\x02\xeb\x03"\x04\x19\x04\x92\x04h\x05\x16\x06\x9e\x06\xb7\x06\xdf\x06\xce\x077\x08E\x08\x17\x08r\x07\xa8\x06\x94\x06%\x07~\x07!\x07\xa3\x05\xf2\x04\x80\x053\x06\x0c\x06b\x05!\x04\xf6\x02\x91\x01C\x00\xc3\xff\xa1\xff;\xff\x1e\xfe%\xfc\xdc\xf9\x18\xf8\x8f\xf7\xe9\xf7O\xf85\xf8\x89\xf7\x06\xf7e\xf6\xf7\xf5\x04\xf5Y\xf4\x0b\xf6\x1c\xf9\xee\xfa*\xfa\xe0\xf7\xba\xf6;\xf9\x14\xfdg\xff;\xffI\xfe\x06\xfe\x99\xffy\x01\xa1\x02\x8b\x02\xc5\x00N\x00F\x01/\x03v\x03\xc4\x01\xc3\xffW\xffB\x00\x90\x00+\x00\x7f\xff,\xff\x14\xff\x91\xfe\xc6\xfcR\xfcK\xfdb\xfem\xffS\xff\xb9\xfd\xf4\xfc\x9c\xfd\xee\xfe\x15\x00\xee\xff\xe0\xfe\xff\xfe\xfc\xff\xa1\x01\x1c\x02\x8b\x00\x8e\xff5\x00\x05\x02\xc6\x02\x8e\x01\x1f\x00w\x00\xb4\x02J\x05\xdc\x07\x19\nw\x0b\xe4\x0bh\x0c\x1c\x0e\xb9\x10\xc8\x11\x8c\x10\xa5\x0f\x18\x10W\x11\xb6\x11\x0e\x11%\x11\x06\x12\xcd\x11\xaa\x11\xb0\x11f\x10\x9d\r\xc1\t\x9a\x07"\x07\xb1\x04e\xff&\xfb\x11\xfa0\xfa\xfd\xf7\xcb\xf3\x9f\xf0\xdc\xef*\xf0\x08\xf0\xc7\xefv\xef,\xee(\xedS\xee\x86\xf0R\xf1\x1c\xf0\xf2\xef\xdb\xf27\xf5h\xf4\xa5\xf2\xe2\xf3X\xf8\xa8\xfb(\xfb\x91\xf9\x96\xf9\x92\xfa\x86\xfb\xb9\xfcv\xfe*\xff\x82\xfd.\xfc\n\xfd\xcb\xfe\xfd\xfe\x96\xfd\x81\xfd\xe1\xfet\xff\x8a\xfe!\xfeP\xff\x95\x00D\x00!\x00\x8f\x00\xb9\x00\x98\x00\x00\x00\xd3\x00\xe1\x01\xf1\x00\xb7\xff)\x01\xa3\x02)\x03\x87\x02\xe0\x00\xa1\x01\xcc\x041\x05\x87\x04\xad\x04\xfc\x04O\x05\x1b\x06M\x06\x9c\x06c\x07\x1b\x07\xfc\x06\xcd\x079\x07d\x06\x12\x07s\x08\x10\n\xba\t\xd9\x06_\x05\xb6\x05\xaa\x05\xbb\x05\x05\x05\xa5\x03Y\x03\xb1\x01@\x01\xa0\x00\xd8\xfe\x11\xfe\x93\xfe\xc6\xff\x17\x00\xae\xfd\xa1\xfa\xa9\xf9^\xfb\x17\xfe]\xfe\x95\xfcc\xfa\xf3\xfay\xfc:\xfd\xcf\xfb\xb6\xf9\x8f\xf91\xfb\xbd\xfc\x86\xfc\xa2\xfa}\xf9\xfa\xf9\xc1\xfb\xd6\xfc\x9c\xfbS\xfa\xab\xf9W\xfap\xfb"\xfbp\xfa\xdd\xf9&\xfa\xfa\xfa\xfe\xfa\xa1\xfa\xda\xfa\xe3\xfa-\xfb\x91\xfbJ\xfb\x00\xfbl\xfa\xee\xf96\xfa\xd1\xfa\xf4\xfa\x1c\xfb\xd7\xfaM\xfb\xe3\xfb#\xfc\xe4\xfc\xd8\xfc\x9b\xfc:\xfd\xbb\xfd0\xfeK\xfe\x08\xfew\xff\xdc\x00\xb0\x00\xf8\xff,\x00Y\x01-\x03\xc2\x030\x03\xda\x02+\x02Q\x032\x07\x85\n\x95\x0b\xd8\x0fL\x1d\x0e/X7;2\\-\x9b4\xc5?\xe4>\xc4/\xaa \x05\x1c\x0c\x1b\n\x14\x00\x07w\xfa\xbe\xf2\x12\xefk\xed6\xebT\xe4\x18\xdc\xb8\xd8\x00\xdb\xe4\xdc\xef\xd8S\xd4_\xd6\x83\xdc\xc5\xe0\x97\xe3\xb4\xea\xcc\xf5`\xfe<\x02^\x07\x9b\x10A\x18\xac\x18\x96\x14\xb8\x12\xa7\x13l\x13\x10\x0f\x1f\x08\xc2\x01\xac\xfdT\xfb\xe4\xf8\x99\xf4z\xee~\xe8\n\xe4\xca\xe1I\xe0r\xdd\xc4\xd9?\xd8P\xda\xe9\xde(\xe4\x89\xe9X\xef\x14\xf6\xee\xfd\xfc\x06\x83\x0f\xf9\x14i\x17t\x19\xa9\x1c3\x1fU\x1e\xf7\x19Y\x15\xbd\x12;\x11G\x0eK\ts\x04\x00\x01"\xfe\x91\xfa\xa2\xf6\xc1\xf3\x9e\xf1\x14\xef\xce\xec\xb8\xecF\xefS\xf2o\xf4\x19\xf7\x17\xfc\x9b\x02\x95\x08\xf6\x0c\x07\x10\x8e\x12\x7f\x15x\x18\xfd\x19\x9b\x18]\x15\xde\x13\x82\x13\xf9\x11W\rV\x07\xe1\x03\xe9\x00\x8d\xfcN\xf7]\xf2\x7f\xef\n\xecq\xe8\x0f\xe7T\xe7B\xe9\xf3\xe9\xf3\xea\xf5\xef\xb8\xf7{\x00T\x06T\t\xc3\r\x05\x14\xfe\x1a|\x1d\xdb\x1a\xe3\x17\x18\x18\xcd\x19\x7f\x17h\x10\xb5\tu\x06\xde\x04\x0e\x012\xfa~\xf4\x12\xf1m\xee\xff\xeaC\xe7\xad\xe5|\xe5\n\xe5\xe1\xe45\xe6\xf6\xe9\xbb\xee\x01\xf2{\xf4\xa2\xf7\xfd\xfb\x94\x00g\x03\xb5\x03\xd9\x032\x05\x89\x07P\t\xd2\x08\xd3\x07\xf9\x07\xa3\x08\x93\x08\x11\x07\xa3\x04=\x02\xbf\xff\xfd\xfc^\xfa\xd4\xf7\xd8\xf5\xb3\xf3+\xf2\x1f\xf2U\xf3\xc4\xf4s\xf5]\xf5\x0e\xf6\xad\xf8|\xfb4\xfdz\xfd\xbb\xfe\xbc\x01\t\x05T\x077\t\x83\x0b\x17\r\xae\r\xa3\x0e7\x103\x0e\x0e\nO\x0bS\x17\xc1&\x10,\x8d\'\xba&o1\xbf<\xbe:\xd7,\x91\x1f\x1b\x1b\xe7\x17\x87\r/\xff\x91\xf46\xf0\xc8\xeb\xc8\xe3\x90\xde\xfd\xe0E\xe5\x92\xe2\xb1\xdb9\xdbP\xe4\xf5\xeb\xe1\xe9\'\xe4g\xe6\xeb\xf0Q\xfa>\xfdG\xfe\xc5\x02\xf2\t\xf1\x0f\xd7\x133\x16\x84\x15U\x11\x07\x0c\x7f\x08/\x05}\xfe\xa5\xf4\xdc\xeb\xb8\xe6\xa8\xe4\x04\xe3\xa2\xe0[\xde\xfe\xdd_\xe0\xde\xe3\t\xe7W\xe9\x8d\xebm\xee\x81\xf2\xef\xf6\x14\xfbM\xff \x04\x84\x08.\x0c\xc7\x10\x9c\x16\x96\x1a\xa4\x1a\xf9\x18\xf4\x18\n\x1a"\x18\xeb\x11^\n\xe2\x04V\x01A\xfd\xa7\xf7X\xf2T\xef\xa7\xeeP\xef8\xf09\xf1\xbc\xf2\xfd\xf4\x96\xf7E\xfa\xff\xfc\xb2\xff\xda\x01\xa5\x030\x06\xb8\t0\r\xd4\x0f\xe3\x11\t\x14=\x16\xb9\x17\x18\x18\x06\x17F\x14\xab\x10\xfc\x0c\x17\t<\x04\\\xfe\x87\xf8\xea\xf3j\xf0c\xed\x00\xeb^\xe9\xfc\xe8\xf5\xe9\xe8\xebZ\xee\xd2\xf0\xf8\xf3\xd0\xf7\x1b\xfc\x98\xff\xa1\x02\xd5\x07x\x0e\xa3\x13X\x15\xc3\x15\x88\x18\x19\x1b\x07\x1a\xd3\x15i\x11p\x0e{\n\xb2\x04x\xfe\xae\xf8*\xf4\x8e\xf0\xa0\xedc\xeb\x08\xea\xe2\xe9\xad\xea\x15\xec\xdb\xed\t\xf0\xe4\xf2u\xf5\xef\xf7\xa1\xfa\xc4\xfd`\x01\x1f\x04\xda\x05n\x07@\t\x1c\x0b\xc2\x0b\xc0\ni\ty\x08V\x07n\x05\x85\x02\xdc\xff\x14\xfe)\xfcW\xfa\xba\xf8R\xf7\xc5\xf6 \xf6\x94\xf5\xe4\xf53\xf6\xee\xf6\xa4\xf7!\xf8t\xf9\xe1\xfa\x8b\xfc6\xfe/\xff\xbd\x00\x86\x027\x04\xa8\x05\x86\x06"\x07\xa0\x07\xb7\x07~\x07\x07\x07\xc2\x05;\x04p\x02\xe5\x00\x87\xff\xaa\xfd-\xfb\xbb\xf9\xc1\xf9\xbd\xf9\xd6\xf7\xf5\xf4s\xf5\xb1\xfb\xc2\x02b\x05\xa0\x07\x04\x11a!\xef,\xef,\xa7)G.S8h:\xa2/\xc7!"\x1bB\x19\xb5\x12\x1c\x05E\xf7\x0c\xef\xe2\xeb_\xe9\xce\xe4P\xdf\x8e\xdc\xd2\xddw\xe0f\xe17\xe1\xbe\xe2\x80\xe6d\xe9\x06\xeb\x7f\xee\x98\xf5_\xfc\x0c\xff6\x00\x87\x05\x95\x0eA\x14\xbb\x12\x14\x0fZ\x0f\xad\x11\n\x10|\x087\x00N\xfb9\xf8\x03\xf4&\xee\x03\xe9\xb4\xe6A\xe6\xc9\xe5\x82\xe5\x9c\xe60\xe9\x8d\xebX\xed\x95\xef\xf3\xf2\x1b\xf7\xf2\xfah\xfdj\xff\xf5\x02f\x08\x03\r\xc2\x0e\x9d\x0f\xff\x11+\x15]\x16{\x14D\x11\x9c\x0eN\x0c\xcd\x08\xb6\x03\x87\xfe\xa6\xfa\xe2\xf7\x8b\xf5\xaf\xf3\xee\xf2\xa2\xf3O\xf5;\xf7v\xf9p\xfc\xdb\xff\xe7\x02\x07\x05\xd4\x06\x19\t[\x0b\xdc\x0c~\r\xd3\r\x84\x0eo\x0f\xf8\x0f\x96\x0fM\x0e\xd4\x0cy\x0b\xb3\t\xb9\x06\xb0\x02\x95\xfe$\xfb\xdf\xf7L\xf4\xe4\xf0E\xee\xd8\xece\xec\xa9\xec\xbd\xed\xb4\xef\x83\xf2y\xf5\x7f\xf8\xd8\xfb<\xff<\x02(\x04\x9e\x05\x15\x07c\x08L\t5\t\r\tT\t\xb8\t\xb8\t\xf1\x08\xa8\x08/\t5\t\x1b\x08\x13\x06\x18\x04\xb1\x02\xbf\x00\xd8\xfd\xad\xfa\xf4\xf7e\xf6.\xf5\xda\xf3\xfb\xf2\xec\xf2 \xf4\x0e\xf6\xc5\xf7\x86\xf9\x81\xfb\xcd\xfd<\x00\x07\x02:\x03r\x04z\x05*\x067\x06\xa3\x05,\x05\x9f\x04\xd4\x03\xd2\x02\xb9\x01\xdc\x00\xe4\xff\xe3\xfe\xc8\xfd\x03\xfd}\xfc\x03\xfc~\xfb\xea\xfa\xcd\xfa\x1d\xfb\x81\xfb\xa5\xfb\xb4\xfb\xef\xfbh\xfc\xdb\xfc\t\xfd\xe4\xfc\xf4\xfc\x17\xfd8\xfdI\xfd\'\xfd8\xfdr\xfd\x9a\xfd\x0c\xfef\xfe\xa7\xfe\xfc\xfe\x1c\xff|\xff\xbe\xff\x83\xffN\xffC\xffD\xffU\xff+\xff^\xff)\x00\xdb\x00\xec\x00/\x01B\x02\xb1\x03\xf1\x03\xe9\x02I\x04\xdf\tb\x10s\x13\x18\x14\x8c\x17X\x1fv%\x82%\x8d"m!\x07"\xc1\x1f\x0e\x19q\x11/\x0bx\x05R\xff\xdd\xf8\x88\xf3\t\xef\\\xeb=\xe9z\xe8\x00\xe8l\xe7\x00\xe8\xa1\xe9\xcc\xea5\xebA\xec\xcb\xee\x15\xf1\x16\xf2X\xf3\xae\xf6\xe9\xfa\xdc\xfd\x9b\xff\x82\x02\xfc\x06\x83\nm\x0b%\x0b\xab\x0b(\x0c\x97\n\xec\x067\x03%\x00\xdd\xfc\xcd\xf8\x93\xf4r\xf1\x87\xefI\xeeN\xed\n\xed\xd9\xedv\xef:\xf1\x0e\xf3\x00\xf5%\xf7\x99\xf95\xfci\xfe\xfb\xff\xb6\x01M\x04\xfb\x06\xb2\x08\x07\n\xdb\x0b\x1e\x0e\xc6\x0f;\x10\xdf\x0fn\x0f\xc5\x0e=\r\x82\n\x1c\x07\xe2\x03\x13\x01G\xfee\xfb\xf3\xf8\xb1\xf7\x95\xf7\xe1\xf7G\xf8N\xf9L\xfb\x9e\xfdo\xff\xab\x00\xf8\x01b\x03_\x04\xad\x04\xa9\x04\xab\x04\xe7\x04J\x05u\x05b\x05;\x05h\x05\xcb\x05\xb6\x05\xef\x04\xbf\x03\xa0\x02J\x01e\xff\x13\xfd\xbf\xfa\xc6\xf8+\xf7\xd8\xf5\xd8\xf4q\xf4\xea\xf4\xe0\xf5\x0f\xf7\xa7\xf8\xb1\xfa\x04\xfd\xf9\xfe\x9a\x00,\x02\xba\x03\x0e\x05\xbd\x05\'\x06\x9d\x06)\x07{\x07\x82\x07\x06\x08\x0c\t\xe1\t\xdf\t6\t\x8e\x08\xf7\x07\xd1\x06\xb1\x04\xeb\x01\x0b\xffd\xfc\xd3\xf9O\xf7\xf7\xf46\xf3Z\xf2`\xf2\xed\xf2\xcf\xf3 \xf5\t\xf7b\xf9\xb6\xfb\xd2\xfd\xcf\xff\xb6\x01k\x03\xb4\x04\x89\x05\x18\x06q\x06\x83\x06T\x06\xd7\x053\x05r\x04d\x03/\x02\xfa\x00\xa4\xffJ\xfe\xb0\xfc\xe2\xfaV\xf9\xee\xf7\xc3\xf6\xc0\xf5\xc0\xf4\x1e\xf4\xe5\xf3\xf3\xf3]\xf4\x00\xf5\xd6\xf5\x00\xf7F\xf8\xc3\xf9k\xfb\r\xfd\xb6\xfeP\x00\'\x02\xe5\x03Z\x05j\x06\x9b\x07\xc0\x08\x82\t\x9e\tQ\tn\t.\tN\x08\xb9\x06G\x05\x98\x04p\x03j\x01\xaf\xff\x04\xff\xe0\xfet\xfdC\xfb\x9d\xfb\x8e\xff\x07\x04\xa5\x05>\x06?\n\xac\x11q\x17\xe2\x18\xff\x18\x8b\x1b\x05\x1f\x91\x1f\x8b\x1c\xc5\x18\xff\x15\xa9\x12\xc4\r\x08\x08\xd2\x02A\xfe\xe5\xf9\xd7\xf5Q\xf2>\xef\xeb\xec\xca\xeb\xf9\xea\x97\xe9=\xe8^\xe8\xc4\xe9k\xea\xfc\xe9X\xea\xe9\xec\x10\xf0\xd5\xf1\xec\xf2\xb2\xf5C\xfaO\xfe\x9f\x00\x85\x02\x81\x05\xa3\x08%\n\xf5\tx\t3\t2\x08\xf5\x05\x15\x03\\\x00\x05\xfe\xab\xfb\x19\xf9\xcf\xf6A\xf5|\xf4\xfc\xf3\xae\xf3\xb1\xf3L\xf4\x99\xf5.\xf7\x83\xf8\x96\xf9 \xfb\x99\xfd\xf2\xffV\x01\x84\x02p\x04\xf0\x06\xe3\x08\xf9\t\xc6\n\xea\x0b\n\ru\r\xec\x0c\xfe\x0b#\x0b:\n\xc3\x08\xa2\x06`\x04\xba\x02z\x01\xd0\xff\xc1\xfdh\xfcV\xfc\x84\xfc\xf6\xfbI\xfb\xc6\xfb\x10\xfd\xec\xfd\xeb\xfd\xfb\xfd\xbc\xfe\xce\xffp\x00l\x00R\x00\x95\x00@\x01\xb0\x01u\x01)\x01d\x01\xf4\x01\xed\x01 \x01\x8c\x00\x8b\x00_\x00\x80\xffT\xfe\xa7\xfd\x84\xfd@\xfd\x93\xfc\xfc\xfb\x1b\xfc\xd4\xfca\xfd\x97\xfd\x06\xfe\x06\xff-\x00\xf7\x00z\x01\x18\x02\xe5\x02\x8c\x03\xde\x03\xe7\x03\x10\x04H\x04[\x04A\x04\x0b\x04\xea\x03\xb4\x03g\x03\r\x03\xa7\x020\x02\xac\x01\x11\x01\x8b\x00.\x00\xd6\xff^\xff\xe6\xfe\xe3\xfe\x1e\xffB\xff\x10\xff\xf7\xfeO\xff\xa0\xff\x9a\xffA\xff\x02\xff\xf7\xfe\xe8\xfe\x93\xfe\x0f\xfe\xc1\xfd\xb0\xfd\x9a\xfdM\xfd\x08\xfd\x12\xfd6\xfdD\xfd"\xfd\x0f\xfd7\xfdX\xfd!\xfd\xb7\xfc\xac\xfc\xe6\xfc\xf0\xfc\xba\xfc\x95\xfc\xc5\xfc&\xfdi\xfd\x9f\xfd\x10\xfe\xb4\xfeG\xff\xa3\xff\x0e\x00\xc7\x00\x84\x01\xdd\x01\xf0\x018\x02\x99\x02\xba\x02}\x024\x02)\x029\x02\x08\x02\x9c\x01Y\x012\x01\x06\x01\xc7\x00\x8e\x00e\x007\x00\x00\x00\xc7\xff\x9c\xffw\xffa\xffW\xff@\xff0\xffC\xffw\xff\x8b\xffv\xffw\xff\x9d\xff\xcd\xff\xb9\xff\x8a\xffu\xff\x9c\xff\xab\xff{\xff4\xff\x0b\xffB\xff`\xffj\xffb\xffb\xff\x98\xff\x96\xffx\xff\x7f\xff\x81\xffd\xff\x02\xffh\xfe\x1e\xfe+\xfe\x08\xfeU\xfd\xab\xfc&\xfd\xde\xfe\x0c\x01\xd6\x02<\x04F\x06^\t\xc2\x0c\xb5\x0e\xd2\x0e\xf1\x0eI\x10\x8b\x11w\x10S\r\xf2\nx\n\x9a\to\x06\x96\x02\xee\x00\xf4\x00\xc4\xff\x11\xfdI\xfb\x97\xfb\x07\xfc\xbd\xfa\xc1\xf8/\xf8\xcd\xf8\xb3\xf8R\xf7\xe9\xf5\xde\xf5\xc5\xf6\x1c\xf7\x87\xf6Z\xf6\x8b\xf7x\xf9\x98\xfa\xc3\xfa\x8f\xfbf\xfd\x0c\xfft\xff/\xff|\xff9\x00C\x00N\xffI\xfe\xfb\xfd\xe1\xfdM\xfdf\xfc\xda\xfb\xef\xfb0\xfc\'\xfc\xfc\xfb.\xfc\xd3\xfco\xfd\x8f\xfd\x8b\xfd\xf2\xfd\x9d\xfe\xec\xfe\xc8\xfe\xc5\xfe?\xff\xc9\xff\x0b\x00*\x00u\x00\x1e\x01\xcd\x01S\x02\xb3\x02\x17\x03\xa7\x030\x04Y\x04;\x04\x1c\x04@\x04Q\x04\xe9\x036\x03\xc9\x02\xd3\x02\xcb\x02W\x02\xd8\x01\xda\x01!\x02:\x02\x11\x02\x04\x023\x02X\x02P\x02$\x02\xf9\x01\xec\x01\xee\x01\xca\x01\\\x01\xe3\x00\xc8\x00\xe3\x00\x9e\x00\xf6\xffo\xff_\xfff\xff\xf5\xfeI\xfe\xee\xfd\xf7\xfd\xeb\xfdx\xfd\x08\xfd\x02\xfd1\xfd\x1e\xfd\xd1\xfc\xc4\xfc$\xfd\x8a\xfd\xa6\xfd\xc1\xfd$\xfe\xba\xfe,\xff\x83\xff\xe0\xffF\x00\x96\x00\xd2\x00\x0e\x015\x018\x01,\x012\x01A\x01B\x016\x010\x01H\x01l\x01\x89\x01\x8e\x01}\x01w\x01\x85\x01x\x01\'\x01\xad\x00n\x00C\x00\xea\xffo\xff!\xff*\xffM\xffG\xffY\xff\xac\xff+\x00\xa7\x00\xfc\x00Z\x01\xdd\x01b\x02\xd0\x02\xf4\x02\xcf\x02\xbc\x02\xdb\x02\xc0\x02,\x02w\x01\x18\x01\xf8\x00|\x00\x80\xff\xcd\xfe\x96\xfeO\xfe\x87\xfd\xa5\xfcN\xfcB\xfc\xfc\xfb\x88\xfbQ\xfb\xa6\xfb\x0e\xfc\x1f\xfc*\xfc\x9e\xfcl\xfd\x05\xfej\xfe\x13\xffB\x00\x85\x01a\x02\xfd\x02\xc6\x03\xd3\x04d\x05Y\x05\x13\x05\xf5\x04\xd8\x049\x04=\x03T\x02\xbd\x01\x1a\x019\x00Z\xff\xc4\xfe\x8e\xfeN\xfe\xd8\xfdo\xfdo\xfd\xac\xfd\xb5\xfd}\xfdj\xfd\xa7\xfd\xe9\xfd\xe5\xfd\xb8\xfd\xca\xfd\x15\xfe@\xfe1\xfe%\xfeF\xfes\xfek\xfeB\xfe4\xfe1\xfe$\xfe\xec\xfd\xae\xfd\x98\xfd\x80\xfdt\xfdO\xfd*\xfdO\xfdV\xfdp\xfdc\xfdf\xfd\xa9\xfd\xb6\xfd\xc8\xfd\xd0\xfd\xea\xfd:\xfe^\xfem\xfe\x91\xfe\xba\xfe\xec\xfe\xfd\xfe\xf9\xfe\n\xff)\xffH\xffT\xff[\xffV\xffm\xff\x91\xffz\xffg\xff\xa9\xff(\x00\xaf\x00\xf9\x00\xbb\x01Q\x03\t\x05e\x06\x8f\x07\'\t\x1e\x0bf\x0c\xfe\x0c\xa8\r\x83\x0e\xe8\x0e^\x0e\xeb\r\t\x0e\xd7\r\xdc\x0c\xc7\x0b\x9f\x0b\x96\x0b}\n\xd5\x08\xfa\x07\xa0\x07R\x06\xc1\x03\x7f\x01b\x00D\xff\x00\xfdR\xfa\xdc\xf8\x88\xf8\xd4\xf7I\xf6,\xf5{\xf5Z\xf6k\xf6\xd5\xf5\xc5\xf5\x9d\xf6U\xf7\x1f\xf7\x90\xf6\x98\xf6\t\xf76\xf7\xe1\xf6\x8e\xf6\xc6\xf6O\xf7\xc4\xf7\xf1\xf7/\xf8\xd5\xf8\xc2\xf9\x86\xfa\xfe\xfak\xfb+\xfc\x14\xfd\xa9\xfd\xea\xfdE\xfe\xe3\xfek\xff\x9f\xff\xc0\xff%\x00\x9d\x00\xd7\x00\xe6\x00\x1d\x01\x8b\x01\xe6\x013\x02{\x02\xdc\x02P\x03\xba\x03\x00\x043\x04j\x04\xa0\x04\xb6\x04\xac\x04\x95\x04\x82\x04\x80\x04o\x04J\x04\'\x04\x1b\x04$\x042\x043\x046\x04?\x048\x04\x13\x04\xd5\x03\x91\x03F\x03\xe9\x02\x83\x02\x02\x02w\x01\xf0\x00y\x00\xfb\xffh\xff\xdf\xfeo\xfe\x16\xfe\xaa\xfd1\xfd\xc4\xfc}\xfcN\xfc\n\xfc\xc6\xfb\xb5\xfb\xc6\xfb\xc8\xfb\xb8\xfb\xe4\xfb;\xfcw\xfc\xa0\xfc\xef\xfc[\xfd\xa8\xfd\xbc\xfd\xef\xfd>\xfel\xfeo\xfeo\xfe\xa5\xfe\xd4\xfe\xdd\xfe\xf7\xfeK\xff\xb0\xff\x05\x00_\x00\xcb\x00M\x01\xb4\x01\x15\x02{\x02\xc6\x02\xf0\x02(\x03p\x03s\x034\x03\x18\x03;\x03A\x03\xfc\x02\xd3\x02 \x03\x9b\x03\xb3\x03d\x03G\x03\xa2\x03\xf4\x03\xb5\x037\x03,\x03q\x03F\x03\x9c\x02\x1d\x022\x02B\x02\x99\x01\xbe\x00.\x00\xeb\xffS\xffB\xfea\xfd\xd4\xfcg\xfc\xbe\xfb\xe9\xfao\xfa;\xfa\x1b\xfa\xe8\xf9\xa8\xf9\xa8\xf9\xd4\xf9\x01\xfa\x16\xfa\x1b\xfa]\xfa\xc5\xfa\x18\xfbJ\xfbw\xfb\xe6\xfbk\xfc\xc2\xfc\x02\xfdX\xfd\xe2\xfdi\xfe\xc0\xfe\x08\xffs\xff\x03\x00\x84\x00\xce\x00\x17\x01\x8d\x01\x01\x02E\x02E\x02\\\x02\xbb\x02\xe3\x02\xcb\x02\x80\x02w\x02\xab\x02\x88\x024\x02\xeb\x01\xe8\x01\x05\x02\xbc\x01S\x016\x01>\x01#\x01\xb8\x00Q\x00@\x00@\x00\x0e\x00\xc1\xff\x89\xfft\xffx\xffa\xff\x1e\xff\x0c\xff7\xffm\xffy\xffN\xffu\xff\xf0\xff>\x00`\x00\xcd\x00\xe1\x01"\x03\xdb\x03i\x04\x80\x05\xce\x06w\x07v\x07\xd5\x07\xbc\x08\xfd\x08L\x08\xc4\x07N\x08\xbc\x08\xf8\x07\xef\x06\x12\x07\xa8\x07\xf5\x06%\x05\x18\x04*\x04\x85\x03H\x01\x0e\xffK\xfe\t\xfe\xa5\xfc\x80\xfa\x85\xf9\xff\xf9=\xfaS\xf9n\xf8\xcd\xf8\xbc\xf9\xb1\xf9\xd3\xf8\x91\xf8C\xf9\xac\xf9*\xf9\x8d\xf8\xdd\xf8\x98\xf9\xda\xf9\xd4\xf94\xfa\x04\xfb\xb3\xfb\x10\xfcw\xfc\x12\xfd\x8c\xfd\xd4\xfd\x16\xfeT\xfer\xfe\x8f\xfe\xc0\xfe\xfd\xfe\x1e\xff8\xff\x85\xff\xe5\xff#\x00>\x00t\x00\xc1\x00\xd9\x00\xbf\x00\x9b\x00\xa4\x00\xb5\x00\x8d\x00@\x00&\x00M\x00q\x00P\x00F\x00\x97\x00\xed\x00\x0c\x01\t\x01=\x01\x9f\x01\xd1\x01\xcd\x01\xe2\x01"\x02c\x02y\x02\x81\x02\xad\x02\xeb\x02\x14\x03\x19\x03\x17\x03&\x03&\x03\x01\x03\xb7\x02o\x025\x02\xe7\x01\x87\x01#\x01\xd7\x00\x95\x00K\x00\xf2\xff\xb2\xff\x93\xffp\xff<\xff\x03\xff\xd3\xfe\xaa\xfeu\xfeH\xfe0\xfe\x18\xfe\x0c\xfe"\xfeF\xfeb\xfey\xfe\xb9\xfe\x07\xff2\xffV\xff\x92\xff\xcd\xff\xd0\xff\xb8\xff\xdb\xff%\x00)\x00\x01\x00+\x00\x98\x00\xcd\x00\x9c\x00\xb8\x00v\x01\x0c\x02\xd3\x01j\x01\xaf\x01H\x02$\x02`\x01\x17\x01u\x01{\x01\xa2\x00\xce\xff\xce\xff\x12\x00\xa4\xff\xc2\xfeb\xfe\x9e\xfe\xae\xfe=\xfe\xcf\xfd\xf0\xfd9\xfe(\xfe\xde\xfd\xe3\xfdI\xfel\xfej\xfe\x92\xfe\xda\xfe\x02\xff\x05\xff:\xffv\xff\x89\xff\x86\xff\xa3\xff\xc5\xff\xab\xff\x8a\xff\x89\xff\x99\xff\x97\xffv\xffY\xffV\xffK\xff7\xff3\xff?\xff?\xff\x1b\xff\x1c\xffB\xff9\xff\xff\xfe\xe6\xfe\'\xffT\xff"\xff\xd9\xfe\xe4\xfe(\xff \xff\xcb\xfe\xa2\xfe\xdd\xfe\t\xff\xd6\xfe\x98\xfe\xb4\xfe\x08\xff\n\xff\xe8\xfe\xec\xfe2\xffr\xff}\xff\xa8\xff\xee\xff*\x00R\x00\x92\x00\xff\x00V\x01p\x01\xc8\x01\x91\x02K\x03\xc2\x03E\x04U\x05\x8e\x06\x0e\x07\xfd\x06\x86\x07\xbd\x08K\t\xa7\x08\x18\x08\xb1\x08|\t\x08\t\x00\x08\xf2\x07\xb8\x08\xac\x08`\x07N\x06c\x06\\\x06\x0c\x05\x14\x03\xc3\x01?\x01U\x00\xb2\xfe\t\xfd\x16\xfc\xa9\xfb\xf9\xfa\xdf\xf9\xe8\xf8\x96\xf8\x96\xf83\xf8]\xf7\xbf\xf6\xa7\xf6\xb4\xf6d\xf6\xd6\xf5\xaa\xf5\xe6\xf5C\xf6p\xf6\x90\xf6\xff\xf6\xb8\xf7|\xf8\x03\xf9d\xf9\xe6\xf9\x92\xfa-\xfb\x82\xfb\xb6\xfb.\xfc\xdb\xfcg\xfd\xc2\xfd!\xfe\xc0\xfe\x82\xff\x1b\x00\x91\x00\x01\x01\x85\x01\x05\x02O\x02~\x02\xb6\x02\x04\x03E\x03P\x03R\x03\x82\x03\xd3\x03\xfd\x03\x06\x04"\x04f\x04\xa1\x04\xbd\x04\xdb\x04\xff\x04\x1e\x05!\x05 \x05#\x05\x16\x05\xfd\x04\xf0\x04\xe7\x04\xc8\x04\x90\x04l\x04g\x04Q\x04\x08\x04\xba\x03\x84\x03V\x03\xf5\x02\\\x02\xd3\x01o\x01\xfb\x00K\x00\x86\xff\xfa\xfe\x9d\xfe&\xfex\xfd\xda\xfc\x96\xfcW\xfc\xe8\xfbt\xfb;\xfb+\xfb\xe0\xfa\x8d\xfa\x97\xfa\xe1\xfa\xfa\xfa\xdc\xfa#\xfb\xc4\xfb \xfc\x0f\xfca\xfc\x82\xfd\x9b\xfe\xde\xfe\xde\xfe\x80\xff\x85\x00\xf4\x00\xe5\x00$\x01\xd5\x01t\x02\x86\x02_\x02\x9f\x02!\x03t\x03b\x03*\x03L\x03\x83\x03\x98\x03t\x03+\x03\x01\x03\xfe\x02\xe8\x02\xa9\x02[\x02\x08\x02\x0b\x02(\x02\xea\x01y\x01&\x01\x1e\x01\n\x01\x9c\x00"\x00\xe7\xff\xaa\xff?\xff\xc8\xfep\xfe5\xfe\xec\xfd\x9f\xfd_\xfd\x16\xfd\xb0\xfcV\xfc1\xfc+\xfc\xfa\xfb\xab\xfb\x9d\xfb\xc6\xfb\xcc\xfb\x98\xfb\x88\xfb\xdc\xfbC\xfct\xfcv\xfc\xa7\xfc\x1e\xfd\x8c\xfd\xc0\xfd\xe2\xfd9\xfe\xad\xfe\xf8\xfe\x0c\xff\x1f\xffc\xff\xaf\xff\xc7\xff\xb8\xff\xbd\xff\xfa\xff3\x00.\x00\x11\x00&\x00o\x00\xad\x00\x9e\x00\x94\x00\xe4\x00O\x01|\x01Q\x01r\x01\xfa\x01V\x02J\x02 \x02j\x02\xfa\x02\x1f\x03\xd3\x02\xc2\x02&\x03\x7f\x03Y\x03\x15\x03=\x03\xab\x03\xd4\x03\xbe\x03\xf2\x03n\x04\xc0\x04\xd8\x04\xf9\x04a\x05\x9a\x05x\x05u\x05\xb0\x05\xd2\x05\x91\x05;\x05F\x05w\x05\x1e\x05c\x04\xed\x03\xc4\x03b\x03\x81\x02o\x01\x9f\x00\x01\x00,\xff\t\xfe\xe3\xfc\x1e\xfc\xa0\xfb\xf4\xfa\x07\xfa@\xf9\xe7\xf8\xb8\xf8h\xf8\xea\xf7\xa5\xf7\xb6\xf7\xc7\xf7\xc3\xf7\xb7\xf7\xe1\xf7@\xf8\x94\xf8\xdb\xf84\xf9\xa8\xf9;\xfa\xda\xfak\xfb\xeb\xfbq\xfc\x13\xfd\xb7\xfd9\xfe\x9a\xfe\x13\xff\xa0\xff\xfd\xff3\x00w\x00\xd2\x00\x15\x01&\x011\x01\\\x01\x88\x01\x9d\x01\xa6\x01\xc3\x01\xe5\x01\xeb\x01\xdf\x01\xce\x01\xd1\x01\xd6\x01\xbc\x01\x96\x01\x87\x01\x98\x01\xaa\x01\x9e\x01\x9d\x01\xc1\x01\xe2\x01\xed\x01\xed\x01\x0b\x02A\x02[\x02I\x02@\x02S\x02l\x02j\x02V\x02\\\x02p\x02n\x02N\x025\x023\x02 \x02\xec\x01\xaa\x01{\x01H\x01\x01\x01\xb2\x00n\x00)\x00\xe0\xff\x9b\xfff\xff9\xff\xfd\xfe\xcb\xfe\xb1\xfe\x9e\xfe|\xfeW\xfeQ\xfe_\xfe`\xfeU\xfem\xfe\xa7\xfe\xd3\xfe\xd6\xfe\xe8\xfe9\xff\x8d\xff\xa3\xff\xa4\xff\xd7\xff2\x00\\\x00Y\x00~\x00\xd9\x00\x1f\x01\x1f\x01\x18\x01?\x01n\x01d\x01E\x01@\x01;\x01\x1b\x01\xe9\x00\xcf\x00\xaf\x00v\x00Q\x00D\x00\x1d\x00\xde\xff\xb5\xff\xb2\xff\xad\xff\x7f\xffU\xff]\xffv\xffo\xffH\xffC\xffo\xff\x99\xff\xa1\xff\x9f\xff\xbb\xff\xeb\xff\x05\x00\x08\x00\r\x00\'\x00D\x00D\x00,\x00\x1b\x00\x17\x00\x1b\x00\x06\x00\xd3\xff\xb1\xff\xa7\xff\xa5\xff\x94\xffw\xffh\xfft\xffz\xffa\xff<\xff4\xffP\xff_\xffW\xffR\xffw\xff\xa5\xff\xc0\xff\xd6\xff\xf1\xff!\x00Q\x00y\x00\x8c\x00\x96\x00\xae\x00\xd5\x00\xe1\x00\xc9\x00\xc2\x00\xdc\x00\xfb\x00\xf8\x00\xe4\x00\xf4\x00 \x01:\x01%\x01\x0f\x01\x1f\x012\x01\x1b\x01\xe6\x00\xbe\x00\xa9\x00\x9f\x00x\x00?\x00\x18\x00\x02\x00\xe2\xff\xa9\xffl\xff3\xff\xf5\xfe\xb9\xfet\xfe\x1e\xfe\xc2\xfdp\xfd"\xfd\xdb\xfc\x97\xfce\xfcA\xfc4\xfc9\xfc7\xfc5\xfcM\xfc\x89\xfc\xb8\xfc\xd7\xfc\x04\xfdM\xfd\xa0\xfd\xe1\xfd!\xfe\x80\xfe\xf5\xfec\xff\xb6\xff\x17\x00\x96\x00\x06\x01R\x01\x99\x01\xfb\x01J\x02p\x02\x8b\x02\xa3\x02\xad\x02\xa2\x02\x8d\x02\x84\x02o\x02F\x02\'\x02\n\x02\xd1\x01\x86\x01:\x01\xf5\x00\xaf\x00N\x00\xed\xff\xbc\xff\x8c\xffK\xff\x04\xff\xd7\xfe\xc9\xfe\xa9\xfe\x82\xfeo\xfev\xfe\x84\xfe\x8f\xfe\xa3\xfe\xbe\xfe\xe9\xfe5\xff\x90\xff\xe4\xff>\x00\xbc\x00R\x01\xdc\x01Z\x02\xf0\x02\x97\x03\x1e\x04\x85\x04\xeb\x04]\x05\xb6\x05\xdb\x05\xea\x05\x05\x06\x15\x06\xfb\x05\xbc\x05~\x05K\x05\xe3\x04L\x04\xb6\x032\x03\x9e\x02\xe0\x01\x1f\x01r\x00\xd1\xff$\xffr\xfe\xdd\xfdd\xfd\xf5\xfc\x9e\xfcJ\xfc\x00\xfc\xcd\xfb\xa5\xfb\x8e\xfbv\xfbY\xfbQ\xfbb\xfb}\xfb\x89\xfb\x93\xfb\xad\xfb\xdf\xfb\x0e\xfc*\xfcL\xfct\xfc\xa4\xfc\xd0\xfc\xfa\xfc#\xfdV\xfd\x89\xfd\xb2\xfd\xd8\xfd\xff\xfd+\xfeY\xfe\x7f\xfe\x9f\xfe\xcb\xfe\xf6\xfe&\xffR\xfft\xff\x9c\xff\xbf\xff\xe1\xff\x01\x00+\x00W\x00t\x00\x92\x00\xb0\x00\xd7\x00\xfc\x00\x1d\x01F\x01n\x01\x97\x01\xbd\x01\xe1\x01\x0b\x02,\x02J\x02i\x02\x8d\x02\x9e\x02\xa4\x02\xa6\x02\xa9\x02\xa4\x02\x92\x02w\x02h\x02U\x024\x02\x16\x02\xf5\x01\xd1\x01\xa6\x01w\x01H\x01\x0b\x01\xd9\x00\xb4\x00z\x00*\x00\xe9\xff\xc6\xff\xb1\xffr\xff$\xff\x1b\xff5\xff2\xff\xf9\xfe\xf0\xfeQ\xff\xb2\xff\xba\xff\xa0\xff\xc7\xff\x13\x00!\x00\xfb\xff\xef\xff\x00\x00\x0b\x00\xd2\xff|\xffP\xffB\xff\'\xff\xd9\xfe\x8a\xfex\xfey\xfed\xfe:\xfe&\xfe9\xfeO\xfeN\xfeD\xfeN\xfef\xfe\x8f\xfe\xb8\xfe\xd7\xfe\xfc\xfe+\xffo\xff\xa5\xff\xcc\xff\xed\xff\x1f\x00O\x00d\x00s\x00~\x00\x92\x00\xa2\x00\x9c\x00\x89\x00\x86\x00\x80\x00o\x00N\x006\x002\x00\x1a\x00\xfc\xff\xde\xff\xcb\xff\xb8\xff\x9d\xff\x83\xffu\xffx\xffu\xffn\xffk\xffr\xff\x84\xff\x86\xff\x8b\xff\x89\xff\x93\xff\x9e\xff\x9d\xff\x99\xff\x9a\xff\x9e\xff\xa1\xff\xa2\xff\xa7\xff\xb7\xff\xcc\xff\xd7\xff\xdf\xff\xed\xff\x07\x00.\x00T\x00s\x00\x9a\x00\xbf\x00\xde\x00\xf7\x00\x15\x01:\x01W\x01k\x01t\x01\x83\x01\x85\x01u\x01[\x01H\x01=\x01(\x01\x12\x01\xfb\x00\xe2\x00\xc2\x00\xaa\x00\xa9\x00\xc6\x00\xee\x00\x18\x01@\x01s\x01\xb6\x01\xff\x01I\x02\x9a\x02\x00\x03l\x03\xb5\x03\xd9\x03\xf5\x03\x1b\x04;\x04/\x04\x05\x04\xd9\x03\xa2\x03]\x03\xf2\x02|\x02\x12\x02\xa2\x01$\x01\x96\x00\x07\x00\x82\xff\xfd\xfem\xfe\xe1\xfd\\\xfd\xd9\xfcZ\xfc\xdd\xfbw\xfb\x1f\xfb\xc8\xfa\x7f\xfaH\xfa.\xfa\x1f\xfa\x1d\xfa0\xfaK\xfak\xfa\x92\xfa\xbf\xfa\xfc\xfa8\xfbs\xfb\xb5\xfb\xfa\xfbD\xfc\x8c\xfc\xd7\xfc%\xfdp\xfd\xc1\xfd\x0f\xfeZ\xfe\xa5\xfe\xed\xfe>\xff\x90\xff\xe4\xff,\x00|\x00\xca\x00\x0e\x01N\x01\x89\x01\xc7\x01\x04\x02@\x02n\x02\x8f\x02\xb3\x02\xd7\x02\xee\x02\xf7\x02\xfd\x02\x01\x03\x02\x03\xf6\x02\xe6\x02\xd7\x02\xc7\x02\xaf\x02\x96\x02x\x02[\x02C\x02$\x02\x03\x02\xe2\x01\xc7\x01\xab\x01\x86\x01c\x01H\x016\x01\x1f\x01\x07\x01\xf7\x00\xf1\x00\xe8\x00\xd5\x00\xc0\x00\xb3\x00\xa9\x00\x91\x00q\x00W\x00<\x00\x1f\x00\xf8\xff\xd3\xff\xb3\xff\x93\xffm\xffJ\xff.\xff\x13\xff\xf6\xfe\xd8\xfe\xbd\xfe\x9c\xfe\x7f\xfec\xfeJ\xfe5\xfe\x1d\xfe\r\xfe\x01\xfe\xfc\xfd\xf2\xfd\xf2\xfd\xff\xfd\x17\xfe4\xfeO\xfew\xfe\xa1\xfe\xca\xfe\xf1\xfe\x16\xffE\xfft\xff\x9f\xff\xc2\xff\xe4\xff\x0b\x007\x00\\\x00z\x00\x93\x00\xaf\x00\xc8\x00\xd4\x00\xda\x00\xe3\x00\xeb\x00\xec\x00\xde\x00\xd9\x00\xd5\x00\xcf\x00\xc2\x00\xaf\x00\x99\x00\x88\x00~\x00m\x00]\x00L\x00>\x003\x00&\x00\x1a\x00\x0b\x00\x07\x00\x06\x00\x04\x00\x01\x00\xfc\xff\x01\x00\x00\x00\xff\xff\x02\x00\x06\x00\r\x00\x15\x00\x17\x00\x1b\x00)\x00,\x000\x00/\x001\x00>\x00K\x00K\x00>\x00B\x00G\x00E\x00>\x004\x00-\x00-\x00+\x00\x1b\x00\r\x00\x02\x00\xfa\xff\xee\xff\xdc\xff\xd1\xff\xc3\xff\xb6\xff\xa9\xff\xa9\xff\xb6\xff\xc6\xff\xd7\xff\xea\xff\r\x009\x00b\x00\x95\x00\xd7\x00&\x01m\x01\xaa\x01\xe8\x01(\x02^\x02\x86\x02\xaf\x02\xcf\x02\xdf\x02\xda\x02\xca\x02\xbc\x02\x9e\x02d\x02"\x02\xe0\x01\x98\x01@\x01\xd6\x00i\x00\xfe\xff\x9b\xff0\xff\xbb\xfeQ\xfe\xee\xfd\x9f\xfdM\xfd\xfa\xfc\xb6\xfc\x86\xfce\xfcH\xfc0\xfc+\xfc9\xfcK\xfc`\xfc|\xfc\xa6\xfc\xd2\xfc\xfd\xfc.\xfda\xfd\x9a\xfd\xcb\xfd\xfb\xfd-\xfe\\\xfe\x95\xfe\xc2\xfe\xeb\xfe\x14\xff9\xffh\xff\x8c\xff\xb5\xff\xd3\xff\xf4\xff\x1f\x00D\x00d\x00\x87\x00\xb2\x00\xdf\x00\n\x01+\x01I\x01m\x01\x8f\x01\x9e\x01\xa7\x01\xb5\x01\xc2\x01\xc4\x01\xb6\x01\xb1\x01\xae\x01\xa6\x01\x99\x01\x86\x01p\x01\\\x01P\x01;\x01 \x01\x12\x01\x0c\x01\x04\x01\xf7\x00\xe8\x00\xdb\x00\xd7\x00\xcc\x00\xba\x00\xa3\x00\x8c\x00\x85\x00p\x00E\x00\x16\x00\xef\xff\xc1\xff\x8b\xff[\xff5\xff\x12\xff\xe5\xfe\xb6\xfe\x86\xfe`\xfeB\xfe4\xfe\x1c\xfe\x0f\xfe\x0c\xfe\x0f\xfe\x1d\xfe$\xfe.\xfe>\xfeN\xfei\xfe\x83\xfe\xa7\xfe\xd2\xfe\xf1\xfe\x1c\xffP\xff\x93\xff\xc5\xff\xf6\xff(\x00_\x00\x90\x00\xb4\x00\xee\x00(\x01J\x01[\x01m\x01\x80\x01u\x01z\x01\x8b\x01\x97\x01\xaa\x01\xc6\x01\xb8\x01\x86\x01J\x01/\x01%\x01\x14\x01\x0f\x01\xf3\x00\xc9\x00\xae\x00\x8e\x00N\x00\x0e\x00\xe2\xff\xdf\xff\xf9\xff\x06\x00\xf2\xff\xd8\xff\xca\xff\xbc\xff\xc8\xff\xd4\xff\xda\xff\xd5\xff\xc3\xff\xa4\xffb\xff"\xff\x10\xff\x19\xff\x1d\xff&\xffA\xff\x81\xff\xde\xff\\\x00\xf7\x00\xb4\x01\x90\x02`\x03\x17\x04\xc4\x04i\x05\xfb\x05a\x06\x89\x06\x95\x06\x8e\x06d\x06\x0b\x06\x96\x05\x12\x05y\x04\xca\x03\x14\x03d\x02\xbb\x01\x1a\x01k\x00\xaa\xff\xeb\xfe6\xfe~\xfd\xbb\xfc\x01\xfcb\xfb\xda\xfa\\\xfa\xf2\xf9\xbb\xf9\xb6\xf9\xc7\xf9\xf1\xf9E\xfa\xc1\xfaC\xfb\xca\xfbV\xfc\xe1\xfc]\xfd\xca\xfd"\xfeX\xfep\xfe}\xfe\x8d\xfe\x86\xfen\xfeN\xfe3\xfe \xfe\x04\xfe\xeb\xfd\xdf\xfd\xd6\xfd\xce\xfd\xca\xfd\xc9\xfd\xc4\xfd\xd3\xfd\xd8\xfd\xd3\xfd\xe4\xfd\xfa\xfd\x1b\xfe@\xfey\xfe\xc1\xfe\x12\xffl\xff\xcd\xff1\x00\x90\x00\xf1\x00N\x01\xa6\x01\xef\x01#\x02D\x02l\x02\x84\x02\x84\x02{\x02}\x02\x85\x02\x86\x02\x85\x02\x9b\x02\xda\x02\x1b\x039\x03M\x03\x82\x03\xbc\x03\xd4\x03\xc8\x03\xaf\x03p\x03\x1b\x03\xc9\x02~\x02\x03\x02e\x01\xcf\x00@\x00\xb3\xff&\xff\xb4\xfeO\xfe\xf9\xfd\xaa\xfdA\xfd\xe9\xfc\xb3\xfc\x98\xfcV\xfc\x03\xfc\xe9\xfb\xe4\xfb\xb7\xfb\x85\xfb\x8c\xfb\xd3\xfb\xf7\xfb!\xfc\x8f\xfc\xff\xfc3\xfdI\xfd\x8b\xfd\xe0\xfd\xec\xfd\xe9\xfd;\xfez\xfe]\xfer\xfe\xc2\xfe\xe1\xfe\t\xffX\xff~\xff\x81\xff\x9f\xff\xba\xff\x9a\xff\xc4\xff"\x009\x00,\x00\x88\x00\x07\x01\x15\x01\r\x01b\x01\xea\x01*\x02A\x02\x8b\x02\xe4\x02\x13\x03a\x03\xab\x03\x9b\x03m\x03I\x03I\x03L\x03B\x03^\x03h\x03\xdd\x02j\x02\x8e\x02\xb7\x02\x87\x02P\x02\xa0\x02P\x03\xfd\x03\xcf\x04"\x06\xdb\x07z\t\x90\n&\x0b\xed\x0b\xec\x0c}\r#\r\x81\x0c\x0c\x0cx\x0bC\n\xef\x08\x0b\x08B\x07\xe5\x05\xfb\x03\r\x026\x00R\xfe\\\xfcD\xfa\xfb\xf7\xde\xf57\xf4\xf0\xf2\xca\xf1%\xf1#\xf14\xf1\x11\xf1*\xf1\xe4\xf1\xc9\xf2\x89\xf3K\xf48\xf5\x0f\xf6\x1b\xf7\xb2\xf86\xfal\xfb\xdf\xfc\xc4\xfe:\x00\xe3\x00\xb8\x01\x07\x03\xd0\x03}\x03\xff\x02\x0f\x03\x13\x03s\x02\xc9\x01\x89\x01Y\x01\xb4\x00\xd7\xffG\xff\xe2\xfe6\xfeR\xfdx\xfc\xbd\xfb\x06\xfb\x8b\xfas\xfa\x96\xfa\xe7\xfaQ\xfb\xee\xfb\xb4\xfc\x9a\xfd\xb6\xfe\xc6\xff\xa3\x00F\x01\xe9\x01\xa2\x02r\x03_\x04*\x05\xaf\x05"\x06\x9a\x06\x0b\x07B\x079\x07\x1d\x07\xc1\x060\x06z\x05\xc1\x04;\x04\xbd\x033\x03\x84\x028\x02\xb9\x02\\\x03R\x03\xde\x02\x97\x02\x80\x02\xce\x01\xfd\x00\x8b\x00\x07\x00\x04\xff\xf4\xfdx\xfdT\xfd\x04\xfd\xc6\xfc\x9c\xfc*\xfc\xaa\xfb\x81\xfb\xac\xfb\x9a\xfb_\xfbs\xfb\x9d\xfb\x83\xfbY\xfb\xb7\xfbt\xfc\xd3\xfc\xa0\xfcl\xfc\x85\xfc\xb2\xfc\x8e\xfcm\xfc\x87\xfc\xbb\xfc\xb1\xfc\x97\xfc\x0f\xfd\xd0\xfdM\xfe;\xfe\x0e\xfe\xdc\xfdz\xfd\x14\xfd\xf0\xfc\xf3\xfc\xd2\xfc\x86\xfcV\xfcF\xfch\xfc\xb4\xfc\n\xfd9\xfd\x11\xfd\x1b\xfd5\xfd\x19\xfd\xd0\xfc]\xfd\x1a\xff\xb5\x00-\x01\x82\x01\xb0\x02\xe9\x03\xd9\x03h\x03\n\x04\xf9\x04\xe0\x04u\x04]\x05\x1b\x07\xfd\x07\x8d\x07\x03\x07\t\x073\x07\xcc\x07\xbb\t8\rc\x11\xff\x13y\x14|\x14Z\x15\x00\x16\x81\x14\xdb\x12C\x13\xd4\x13\xbd\x11\x00\x0f>\x0f\xf1\x0f\x93\x0c\x96\x06\xa0\x02=\x00\xce\xfb\x96\xf6X\xf4\x9a\xf3#\xf1\xc1\xed\xbe\xeca\xed\xae\xec\xed\xea\x8b\xe9\xe9\xe8\x8a\xe8\xe0\xe8\x1a\xea.\xecd\xef\xf2\xf2\x94\xf5\xc7\xf7\xe4\xfa,\xfe}\xff\xb5\xff8\x01s\x03n\x04\xd0\x04\xb1\x06\xed\x08\x1b\t\xee\x07C\x07\xbf\x06\xbd\x04\xd0\x01\x90\xff\x15\xfej\xfc\x94\xfaY\xf9\xa0\xf8\xce\xf7\xa1\xf6\\\xf5O\xf4\x82\xf3\x1a\xf3Q\xf3\xeb\xf3,\xf5\x07\xf7F\xf9W\xfb\xfc\xfc\xe5\xfe\xca\x00k\x02\x99\x03\x08\x05\xfe\x06\xea\x08Q\n\xe2\x0bY\ro\x0eJ\x0eK\x0e\x93\x0e\x19\x0e\xfb\x0c.\x0c\x10\x0b\x8b\tq\t\x8d\x0c\xc9\x0e\x87\x0c\x8c\x08\x83\x06\x80\x05n\x02\x17\x00\xc7\xff9\xffQ\xfc\x1e\xfa\xbb\xfa\xc8\xfb^\xfa7\xf7d\xf4\xad\xf2\x16\xf2\xfe\xf2\xd6\xf4[\xf6\xbf\xf6\xab\xf6+\xf7\xd4\xf7\xb0\xf8:\xf9\\\xf9\\\xf9\xc9\xf9\x93\xfb\x00\xfe\xbd\xff\xad\x00N\x00h\xff\xad\xfe)\xffm\x00\x9e\x00/\x00\xdd\xff\x95\xff.\xff\xea\xfeo\xff\x1e\xff\x84\xfd\x0f\xfc\xab\xfb\xd2\xfbT\xfb2\xfbS\xfbr\xfb\x93\xfa1\xfa$\xfb\xb2\xfc6\xfd&\xfc%\xfc\x1d\xfd\xc3\xfe\xb1\xff\xee\x00]\x03\xe8\x04\xb6\x05\x86\x06\xd1\x07d\x08\x98\x08K\t/\n=\ng\x0b5\x11\xbb\x19\xdf\x1d\x92\x1a\x96\x15\x9d\x15\xe5\x17\x1c\x17\xd1\x15|\x19^\x1d\xf9\x19!\x13\xba\x12\xe0\x15\xde\x10\xc8\x05\x18\xff\xf8\xfe\x03\xfd\xb4\xf8\x15\xf9b\xfbn\xf7V\xee>\xe9\x02\xe9\xec\xe7\x8c\xe5\xed\xe45\xe6\x86\xe7m\xe9\x86\xec\xc8\xee1\xefK\xef\x06\xef\x1e\xf0\x86\xf3v\xf9\x13\xfe+\x00)\x022\x04O\x05\x1a\x05\xc6\x046\x05R\x05\x04\x05\xf1\x04\xfd\x05\xaf\x07\xc7\x06"\x03I\xff9\xfdu\xfbD\xf8\xb7\xf6\xea\xf6X\xf6X\xf4\x8a\xf3\xb5\xf4\xdd\xf4\x14\xf3\x80\xf1\x8e\xf1G\xf2\xaa\xf3\xbe\xf6\xe7\xf9T\xfc_\xfdC\xffn\x00\xde\x01\x8a\x03\xb4\x054\x07\x9f\x08\x9a\n\xce\x0cZ\x0eX\x0fl\x0fN\x0e%\x0eX\x0e\xfc\x0eN\x0e\x1d\x0eG\r\n\r\xfc\r!\x0f\x89\r1\t\xf9\x05K\x03\x0e\x01Y\xffe\xff\xaa\xfe\xeb\xfb$\xf9?\xf7(\xf6N\xf4z\xf2\xe2\xf0\t\xf0}\xf1\xab\xf2\xa9\xf3e\xf4\xd7\xf4\xc6\xf4\xa9\xf3\xe3\xf4\xa5\xf6\x8a\xf8}\xf9i\xfa\x16\xfc\x0e\xfdW\xfe\xde\xfe\xaf\xff@\x00L\x00\xcc\x00\x06\x02\xf0\x03~\x05F\x05\xcb\x03!\x03\x19\x02E\x01\xed\x01b\x03\xb0\x01\xb7\xff\xa4\xff\x92\xfeS\xfdA\xfe\x15\xfeB\xfb\x1b\xfb\xbb\xfc\x83\xfd\xdd\xfc\x06\x00O\x02\x95\xff\x02\xff\xc8\x03F\x05M\x04?\x05\xe6\x07\xc3\x07\xde\x06l\x0bI\x0c[\n\x0e\n+\x0c\r\x0b\x1f\n\x18\x0bi\n\r\n\xb4\t\xf1\n\x8b\t\t\nJ\x0bE\x0cn\x0b\xa4\t\x1b\n\xe1\to\t\x0c\tw\x08\xcc\x08\x1f\x08X\x06i\x05^\x04l\x02\xec\xffI\xfe\x9f\xfdN\xfcg\xfb\xdd\xfa\xa3\xf9\x84\xf8w\xf7\x85\xf6\x15\xf5\xd3\xf4\x8f\xf4N\xf4\xcc\xf4V\xf5M\xf6f\xf5\xce\xf5\xa3\xf6R\xf7\xb6\xf7^\xf8\xdc\xf9\xf1\xfa\xd1\xfbY\xfc~\xfd\x15\xfe4\xfe@\xfea\xfe\x83\xfe\x91\xfe\xd8\xfe\x03\xff!\xffP\xfe\xa4\xfd\xee\xfcg\xfc4\xfc;\xfb\xbc\xfa\xad\xfa#\xfa\x87\xfa\xf6\xf9\x14\xfa\xfc\xf9\xbe\xf9\x1c\xfaj\xfa\x8c\xfb\x18\xfc\xb1\xfc\xae\xfd\x13\xfeR\x00\x9d\x01{\x03@\x06\x98\x06\xff\x07Z\x075\t2\tw\n\xf0\n\x04\n\x87\n\xa7\x078\x08\x13\x06\xf9\x04\xfd\x02\x06\x01\xb4\xff\x8a\xfe\xbe\xfe\x05\xfd\xb7\xfd\xb6\xfbI\xfa\x87\xf9\xb0\xf9\xe2\xf9\xbb\xf9P\xfa\xb7\xfa\xe4\xf9\xf7\xfa\xfe\xfaP\xfc\xdc\xfc\xd0\xfb\x01\xfe\xfc\xfc\xe0\xfc\n\x00\xde\x00\x1b\xfej\xff\xd6\x01\x92\xff\xeb\x00?\x02:\x02\x12\xff\xf0\x00\xcc\x03\x8d\xff\xd2\xff\x06\x04\x80\x03\x0c\xfd\xcd\xff~\x01D\xffk\x01\xef\x03\x1f\x00\x0b\xffe\x02\xfe\x01\x85\xffc\x02\xbc\x03\xdc\x00\xd7\x01\x84\x02\x97\x02\x92\x01\x0b\x04\xb4\x00\xd4\x00\xeb\x013\x01\xa1\x01f\x02\xf7\x01\x9b\xfe\x89\x00\xee\x00\xab\x00\xc5\x01\xd9\x01L\x00?\x02G\x00\x07\x02\xbd\x02:\x01\xff\x011\x01\x92\x03G\x01\x0c\x031\x04G\x01f\x03\xe5\x018\x02a\x02f\x00\xe4\x01,\x00\xe0\x00]\x00\xa2\xff\x04\xfe5\xffC\xff\x1a\xfd\xfa\xfd\xfd\xfcc\xfe0\xfd\xa5\xfd\r\xfe,\xfd\xa1\xfd\x86\xfe\x92\xfe\xd0\xfen\x00.\x00j\xffr\x00\xf8\x01j\x00[\x02\x85\x02\x9d\x01\xbd\x02~\x03\xea\x01\xe5\x01\x99\x03 \x01$\x01S\x02\x96\x01\x9a\x00&\x00\xeb\x00D\x00\xed\xfe\x99\xff\x08\xfe\xf4\xfe"\xfe\xf8\xfd\xe5\xfdn\xfdh\xfd\xe2\xfc\xdf\xfe\x8f\xfbY\xfd\xc4\xfd\x85\xfb\xd7\xfc\x82\xfd\x96\xfc\xf1\xfcE\xfe\x1f\xfd(\xfd\x05\xfe\xce\xfd\xf1\xfdM\xfe\xd9\xfex\xff\x0e\xffZ\xff\x1e\xff\x12\x010\xff\x8a\xff\xda\x00 \x001\x00E\x00\xcd\x00\xcf\xff\x07\x01\xf8\x00\xf5\x00\xa4\x00\xc7\xff\x9c\x00\xe4\xff\xfc\xfe\xbc\x00A\x00k\xff=\xff\xa8\x00D\xffP\xff\xe5\xff:\xfe6\x00\xca\xff\xd9\xff\xa0\x00\xe7\x00X\x00%\x00\xad\x00\x10\x01{\x00\xba\x01\x9e\x01\xea\x00\xb6\x01\xaa\x01\xb3\x01\xa0\x00\x03\x01\t\x01\xc1\x00V\x00\xb0\x02\xb2\xff\xb5\xff\xfd\x01\xe0\xfe\xab\xff@\x00\xfa\xff\xd0\xfe\xd4\xff!\x00\xeb\xfe\xcb\xff\x9c\xff?\xff\xa8\xfe3\x00i\xffJ\xff\xf1\xff\x8d\xff\x88\xff{\xff\xa0\x00>\xff-\x00\x0b\x00\x82\xff=\x00\xdf\x00X\x00\xbd\xffp\x00\x1b\x01[\x00\x9b\x01H\x01\xd3\x00\x86\x01\x82\x00\xca\x01q\x01v\x00\x10\x01\xca\x00\xa8\x00R\x01\xba\x00i\x01m\x00\xea\xff\x9e\x00f\x00\xef\x00L\xff\x9c\xff5\xff\xc2\xfe\x1b\x01\x19\xfe\x98\xff\xdb\xff\xd5\xfdR\xff\xd9\xfe6\xff&\xfe\x9f\x00\xd8\xfe\xdb\xfe \x00I\xff\xc6\xffo\xff\x83\x00\x07\x00@\x00\xc9\xff\xb3\x00\x97\x00\x87\x00\x89\x01\x9a\x00\xde\x00\x8c\xff/\x02\x92\x00L\x00\x17\x01H\x00\x98\x00x\xffi\x01\x10\x00\x91\xff\xdc\xff\xcc\xff\x9f\xff\x96\xff\x84\xff>\xff\x90\xfe\xd0\xff#\xff0\xff\x80\xff\x0c\xff;\xff\xc2\xfe\xd7\xffp\xfe\xc6\xff\xc2\xfe\xcb\xffF\xffn\xff\xf3\xff\xa7\xff|\xff]\xff\xc3\xffy\xff\x1a\x00\x99\xffq\x00\x7f\xff_\x00_\x00\xa7\xff\xff\xffu\x00\xce\xff\xdb\xff\xdc\xff\xbd\xff\x9c\xff~\xff\xb1\xff\x18\xff\xc4\xffX\xfe\x1f\xff%\xff\x17\xffM\xff^\xff\xda\xfeY\xff^\xff\x80\xff%\xff\x9e\xff\xe7\xff\x00\xffm\x009\x00+\x00X\x00\xda\x00\xb8\x00\xf1\x00\x9c\x00]\x01\xe1\x00\xa3\x01\x19\x01\xd8\x01P\x01\xfa\x00\\\x02d\x00\xb7\x01\x94\x00\x0c\x01g\x00 \x00\x13\x00Q\x00\xc6\xff}\xff\xca\xff\x0f\xff7\xff\x1b\xff#\xff\xd9\xfe\x04\xff\xdc\xfe?\xff\xa9\xfel\xff*\xff\x81\xff\xb3\xfex\x00\xc7\xfeS\xff#\x01X\xffe\x00)\x00-\x01\x05\x00W\x01D\x00<\x01\x9d\x00\xec\x002\x01\xbc\x00\x96\x01\xe2\x00n\x01\xb1\x00\xe4\x00\x8d\x00\x08\x01/\x00\xc6\x00\x89\x00_\x005\x00\x12\x00\x0f\x00\x8a\xff\xda\xffy\xff\x89\xff[\xffY\xff\xb6\xffq\xff\x16\xff\xa0\xff\x08\xffI\xffo\xff\xa0\xff\x85\xff\xad\xff\xc2\xff\x00\x00\x08\x00\x1d\x00\x92\x00`\x00\xb1\x00\xb5\x00?\x016\x01\xfb\x001\x01\x06\x01\x1e\x01;\x018\x01c\x01\xbb\x00\xd7\x00\xd7\x00s\x00\xed\xff3\x00\'\x00)\xff\xe2\xff1\xff\x17\xff\x0b\xff\xb4\xfe\xe9\xfe2\xfe\xc8\xfe\xb1\xfe$\xfe\xd3\xfe\x9f\xfe\xa9\xfe\xe9\xfe\xec\xfe\r\xffy\xffO\xffs\xff\xdb\xffM\x00N\x00j\x00\xa9\x00\xa1\x00B\x01\xa2\x00\xbd\x00_\x01\xeb\x00\x15\x01P\x01\xcf\x00\x9c\x00\xdd\x00\xdb\xffh\x00\xdc\xffr\xff\x06\x00\\\xff\x1b\xff\x93\xff\xc8\xfe`\xfe\x9e\xfeT\xfe\xbf\xfe\xf5\xfd\xd9\xfe\x84\xfe\n\xfe\x1d\xff\xc1\xfe\xae\xfe\xda\xfeQ\xff\xf2\xfej\xff\xb9\xff\x1a\x00e\x00\x0f\x00\x9a\x00\x10\x01\x85\x00\xeb\x00\'\x01\xe1\x00z\x010\x01\xa6\x01\xb9\x00\x8e\x01_\x01\xda\x00\xfc\x00\xc7\x00\xb2\x00O\x00\xcd\x00\'\x00<\x00P\x00d\xff\xd2\xff\xb8\xff&\xff`\xff\x1a\xff\xf8\xfe\x15\xff\x08\xff\xaa\xfe\xdf\xfe\xea\xfef\xffy\xfe%\xff\x92\xfea\xffo\xff\x92\xfe\x00\x00\x14\xffx\x00\xda\xffZ\x00[\x00\xac\xff\xf3\xff\x02\x00\x00\x01*\x00`\x00\xa5\x00\x9c\x00\x06\x00\x83\x00\xd8\xff\xbc\xff\x17\x00\n\x00R\x00W\xff\x99\x00\x88\xff\x88\xff\xa2\xff{\xff\xdb\xff\xdb\xffk\x00\x86\xff\x1b\x00\xd6\xff$\x00\xea\xff\xfc\xff\xfe\xff\xa1\x00\xa3\x00&\x00K\x01|\x00\x94\x00\x94\x00\xb4\x00\xd1\x00\xce\x00\x05\x01L\x01\xfa\x00\xa1\x00\xee\x00\xdf\x00h\x00\x8f\x00\xa6\x00\xc7\x00\xd3\x00\x80\xff\x92\x00\x16\x00\xd1\xff\xb7\xffA\x00\x0c\xff\x0c\xff\xb3\xff\x0e\xff\x81\xff\xc9\xfeN\xff\xc6\xfex\xff\xa4\xfez\xff\xf3\xfe\xf5\xfe\x8f\xff\xa2\xfe\xbc\xff+\xff\xdf\xff\xce\xff\xdc\xff\x16\x00\xb6\xff\xd2\xff\r\x014\x00\xe5\x00\xea\x00\xac\x00\xa5\x01\xb2\x00\x8a\x01m\x01\x04\x01\xa1\x01\xce\x01\xcf\x00\x83\x02\x10\x02e\x000\x01#\x01\xd5\x00\xa3\x00\x87\x00\x03\x00\x13\x00l\xff\'\xffX\xff\x9a\xfe\xa6\xfeg\xfe\x0f\xfek\xfe9\xfe{\xfeV\xfe\x8f\xfeT\xfe\xa2\xfe\x13\xff\xf1\xfe\x03\xff\x01\xff\xa1\xff\xed\xff\x02\x00}\x00Z\x00@\x00\xaa\x00\xcc\x00D\x01V\x01p\x01\xd7\x00\xb4\x01\x13\x01H\x01\xa4\x01\xa1\x00\xce\x00$\x01\x8a\x00\xa9\x00\n\x01\xae\xff\x10\xff8\x01\x18\xffF\xfe\xe9\x00\x91\xfe>\xff\xcf\xfeG\xff\xa8\xff\xe0\xfe\xa2\xfeu\xffo\xfeG\xff\x0b\x00\x1d\xff\xec\xffi\xff\xd5\xff\xaa\xff\xd6\xff*\x01\xde\xff\xcb\xff\'\x01\x0e\x00\xad\x00\xed\x00\x10\x01\x00\x00\xbf\x00\xf5\x00\xf9\xff@\x01\xc1\xff\xb8\x00\x00\x00\x7f\xffM\x00\x83\xff`\xff\xe4\xff=\xff\xb2\xfe\x1b\x00W\xfe\xc3\xfe~\xff\xcc\xfen\xfe\\\xff\x1b\xff\xa2\xfe\x03\xff\x15\xffJ\xff\xe3\xfe\xa7\x00\xa2\xfe\x00\x00\xcf\xff@\xff8\x00\x12\x00b\x00(\x00\x19\x01\xb5\xff~\x00\xde\x00\xf8\x00d\x00:\x01N\x00+\x01S\x01Q\x01\x12\x01\x9f\x00k\x01\x06\x00\xbd\x00#\x01<\x001\x00\x95\x00\xe7\xff\n\x01\x04\x00\x19\xff\xbc\xff\xb5\xff\x81\xff\xa4\xff5\x00P\xff&\x00\r\x00e\xfe\x98\x00M\xfe\xc8\xffl\x000\xffe\x005\xffL\x00u\xff6\x00O\xff\xc4\xff\x13\x00\x11\x00\xd8\xff\x8d\xff$\x00\xda\xff:\x00]\xfeK\x00\x1b\x00\xc3\xff\xcd\xff\xb4\xff\x17\xff+\xff_\x01\xaa\xffe\xff\xb0\xff\x1e\x00V\xff\xa4\x00\xc4\x00\x12\xffi\x00}\xffc\x00\xa9\xff(\xff\xe4\xff\x00\x00[\x00{\xffT\x00\x95\xfet\xff\xd6\xff<\x00\xb4\x00s\xff\x13\x01#\xfe\xce\x01R\xffZ\x00\xcd\x01E\xff\x05\x01\x83\xff\xc1\x01\xce\xff\xb8\x01\xc9\x008\xff\xed\x00\xc4\x00\xdb\xfe\xe1\x008\xff\x12\x01\xd8\x00t\xff\xa2\x01\xcc\xfch\x01y\x00S\xfe\r\x00\xa4\x00\xfa\xff\xe6\xff\x11\x01=\xfd\\\xfe\x8f\x00s\xff\x85\x00\xeb\x00\xcc\xff-\xff\x96\xff@\x00\xfc\xfe\xb5\x019\xfe\xa8\x02\xa8\x00\xc6\xff\xa0\x00\xb0\xfa\xd6\x01i\x01g\x01\xd6\x00M\xff\x18\xff>\xfe\xb2\x00\xfd\xfe2\x01\x13\x00\x0e\xfe\xbc\x00\x15\xfd\xcf\x034\xfe \xfa\x92\x04X\xfd\xaa\xfe\x89\x00=\x01\x8b\xfd\xb7\x00\x86\x01\xef\xfd\xce\x01*\xff\xb1\xff3\x02\x96\x00\x95\x01\x0e\x01\x90\xffL\x02\xba\x009\xffM\x01\xc3\x01d\x03%\xff\x92\xfe\x92\x04\x18\xfd#\x05i\x00\x8f\xfb\xb3\x03\xf9\xfbi\x05L\xfd\x01\x02\x95\x00*\xf9\xb4\x02u\x01(\xff\xd8\xfe\x9d\xfd\xfc\xffq\x02D\xfc\x0c\x00E\x01\x1a\x03e\xfbA\xffA\x02g\xfd\x92\x03\x8f\x00\x81\xfd\xae\x00\xdf\xfe=\xfec\x02\x81\x00\x95\xffr\xfc\xc7\xfe\xa4\x02t\xff\xde\xfd\x90\xfe\x0e\x01\x1c\xfdv\xfe\x9a\x04,\xfe\xdc\x00\x9e\xfb\xeb\x01\x83\xfe\x16\xff\\\x04\x88\xfb\xdc\x03\x1c\x00\x08\x00X\xfdf\x01\x1e\x00P\xfe\xef\xfeE\x05\xfd\xff@\xfe\t\x01\x96\xfc\xe9\x00\x1b\xfe~\x01g\xff-\xfe\xc1\x01\x08\x02\xde\xfc\x17\x03\xbf\xfe\x07\x00\x03\x01\xae\xfd\x1c\x02t\xfe]\x02\x19\x02\xd6\xff\xaa\xff\xb2\xfe\xf6\xff$\x01\x80\xff\x9b\xffS\xfc\x1c\xff\xad\x00\xa5\xfdL\x02\xb6\xffb\xfdZ\x02\x94\xfe\xc5\xfd\x7f\xfd\xaa\xf7\x9c\x00\xac\x10\x02\x03\x97\x00#\xfe\xf9\xfb\xa6\xffy\x02\x1f\x03N\xff\xd0\x06Y\xfd\x8e\x01\xda\x0bn\x047\xf5&\xf7\xff\xfa1\xfd\x8b\x03}\x00\xda\xfeR\xffC\xfc\t\xf8i\xfc\x7f\x02\xe5\xfb$\xfe\xb6\xffL\x03u\x03\xeb\xfe\xa4\x05\x9e\xfdP\x00\xae\x03 \x02\xd0\x03\xbb\x07\x14\x02\xa9\xff\xc4\x07a\x01\x84\xfb?\x02\x10\x06\x11\x00\xa8\xfd\xaa\x002\xfcM\xfc\xf3\x02\x08\xfds\xfb\xc8\xfa\xed\xfc\xb8\xfb\xf2\x03\x04\x00b\xf9\xf3\xfe\xf7\xf9\x8c\x00`\x00n\x00\x85\x01\xfb\x02\xf2\xfc0\xfeU\xfe\xca\xff\xc1\x04z\xfe\x1f\x01\x9a\xfd\xa5\xfdZ\xffB\x01\r\x01X\xfc\xa0\xff\x10\x00)\xfdU\x01\x8d\x00\xae\xff\xb3\x00\xec\xfe\xf0\xfe\xc7\x00$\x04"\x01\x16\x01\xfb\xffA\x01?\x03s\x02A\x01C\x01\xcc\x01\x89\x01]\xff\x15\x01\xe1\x03%\x01\x1e\xff|\xff\x88\x00\x14\xff\x1e\x01\x9c\x01\xea\xff\xbc\x00\xc2\x00\xdb\xfe\x90\xff\xba\x01\x1a\x01\xe9\x00\x8e\xff\x9b\x00\xea\x01~\x01\xb9\x00\x82\xff\xe1\xffk\x00\xd1\x00\x93\x01\xcc\x01\x9e\x01Z\xff\\\x00v\x00i\x01\x9e\x00\xdb\xff \x01:\x00\xbd\x01w\x01\r\x00\xd4\xfe=\xfe\xeb\xfe \xfe\x14\xfe\x96\xff\xd4\xffS\x00I\xff\xac\xfb\x9b\xfb\x90\xfc\x1d\xfe<\xfd\xd0\xfe\xb9\x00\x11\x00,\xff\x03\xfdu\xfcr\xfb=\xfd\xa5\xfdg\x001\x03m\xfd\xe4\xfb\xf2\xff[\xff\x03\xf9]\xf9\xb4\xfa\x1d\xfd6\xfe[\xfc\x0e\xfe\xcb\xfc\xa7\xf9\xcc\xf6}\xf9\xff\xfc\xa6\xf9\xa0\xf9\xf6\xfbu\xfe\xfd\x00\x89\xfe*\xfbY\xf8\xb0\xfa\xdd\xfd\xb5\xff\xf2\xffl\x00.\xffp\xfc\xbc\xf9\xa0\xf9\x8b\x00\xfa\x07\xfa\x15<\x17\x96\x10\xd9\x0c\x9f\x0f \x18\xc4\x17\xa7\x17\xd7\x1c\xcf!\xef \x92\x1e\xc4\x1f\x19\x1e\x92\x13\\\ti\x07$\x0c\x03\x0c\xdd\x07\x86\x03\xb1\xfd\xc0\xf7\xa3\xf0\x9c\xec\xb4\xeb\xe4\xe8~\xe6 \xe5\n\xe7)\xea\x82\xe9\xac\xe61\xe5\xc9\xe6\xc2\xe8Q\xec:\xf3\xae\xfa~\xfd\xf3\xfb\x8d\xfc*\x00\xf4\x02\xf8\x03]\x04\x05\x08v\n+\x0bd\x0b\xdc\x0b\x0e\x0bj\x04e\xff=\xfd\xac\xfd\xe5\xfe\x83\xfdx\xfbI\xf9\x86\xf3\xf4\xee7\xee\xee\xeeM\xf1\xa4\xf0{\xf0\x86\xf2\xc9\xf4`\xf6q\xf7u\xfa\xc1\xfcK\xff\x04\x023\x06\xd9\x0b\x14\x0e \x0f \x0f\x06\x10\xe3\x11\xfe\x12\x19\x13\xcd\x12\xb9\x12\x92\x10\xa8\x0eG\r\x83\x0b\x90\x08j\x04\x14\x01\x00\x00\xe2\xfe\xa6\xfcg\xfa\xf5\xf7\x92\xf4\x99\xf1\xc2\xf0w\xf08\xf1=\xf2m\xf1\xee\xef\xa2\xef\x82\xef\x81\xf0]\xf2\xfe\xf4\xdb\xf6\t\xf7\x8a\xf8\xab\xf9\xc8\xf9\xae\xfa\x04\xfb\xaf\xfb \xfd\xb1\xfd\xb2\xfe\x88\xfeb\xfe4\xfc9\xf9D\xf8d\xf7\xcc\xf6\x17\xf7\xed\xf8\xf1\xf7\x1b\xf6i\xf5\xc0\xf5\xda\xf4\xd4\xf1\xe7\xf7\xa0\x0c+$"-\x15%\x15\x1b\xe1\x1a\xc4\x1fM%F1DDWN\xa5E\xce5%-_*\xaa \xa4\x14&\x14o\x1a\'\x1bh\x10\xf4\x05O\xfd\x94\xeeI\xdc\x18\xd1\x04\xd4`\xdcy\xe1\x05\xe0i\xdc\x15\xd9b\xd2P\xccn\xcd\x9e\xd7Z\xe4\x04\xed\xa1\xf5\x8f\xfe>\x03\n\xffO\xf9_\xfc\xf8\x05\xf7\x0e\x82\x14#\x19\xac\x1cT\x1a\x8b\x11\x14\n\x0c\x07\xe8\x05\xd8\x02\xc4\xff\x18\x00\xc6\x00\xdc\xfb{\xf2\x19\xea{\xe4\x8f\xdf)\xddJ\xe0\x16\xe8@\xed\xa7\xeb\x99\xe8E\xe6x\xe6\xd9\xe7\xd0\xed8\xf83\x02\xb8\x07$\t9\n\xeb\n\x9c\x0b7\r\x02\x11>\x18\x91\x1d\xfc\x1f\xe5!\xd0 \xdd\x1a\xd3\x11\xed\x0e\x9e\x16\xfd\x1e\xf7\x1e\xad\x19\x9d\x12>\nz\x00\x8c\xfc\x85\x00+\x05d\x03\xfa\xfc\xc4\xf7\x9d\xf2\\\xec\x08\xe9W\xea\xaf\xec\xa7\xec\x16\xebF\xec\xdd\xedx\xed\xea\xeaW\xe8\xbb\xe7\x0c\xea^\xef\x13\xf5\xf0\xf8\xa1\xf8s\xf5-\xf3V\xf2f\xf5\xb5\xfb0\x00\xdd\x02\xd0\x02\x12\x01\xcd\xfdg\xfb\x96\xfb\x93\xfer\x01\r\x02\xea\x02\x9b\x01)\x00\xd3\xfeq\xfe@\xfe\xe0\xfc\x98\xfc\xf0\xfc\x05\x00)\x05\x9d\x0c\xb0\x11\xa6\x10A\x0e}\x0c\xcd\r|\x12\xa2\x1al&\x7f-;,P&4!\x94\x1e~\x1dA \xb8%\xe1(\xe8$L\x1c\xb7\x14\xab\r9\x06S\x00\x9f\xfe\xa3\xff\x98\xfd\x82\xf8\xaf\xf3$\xee|\xe6p\xde"\xdc\x13\xe0\xa7\xe4N\xe6o\xe6:\xe6\x00\xe3\xbb\xde&\xde{\xe4\xff\xec\x97\xf2\xb8\xf5K\xf7\xc9\xf6\xd6\xf4\x80\xf4*\xf8\x9d\xfd\xca\x014\x04I\x05$\x04Z\x01\xca\xfeu\xfe\xce\xff\x82\x01\xfe\x02W\x03\xf0\x01]\xff\xd0\xfc7\xfb\xdf\xf9D\xfab\xfb\xc8\xfc\x0c\xfd\x85\xfc\xef\xfb)\xfb\xcc\xfaB\xfb\x94\xfdM\x00w\x02\n\x03\x02\x03g\x03X\x04\x9a\x06r\tK\x0c,\r\'\x0c,\nP\t.\x0b\x84\r\xc1\x0fx\x10<\x0e\xa3\n\xea\x05\x16\x04\x87\x04\xd0\x04{\x05\xd8\x04\xe6\x02\xb3\xfdf\xf9\xd3\xf7\x8f\xf7c\xf7\x15\xf82\xfau\xf9\xc3\xf5E\xf3\x9f\xf3\xa1\xf4\xcb\xf4\x03\xf60\xf8\x90\xf8\x19\xf7-\xf6\x88\xf6.\xf7\x85\xf7\x04\xf9\x02\xfb~\xfb\xb4\xfa\x02\xfat\xfa\xff\xfa\xb0\xfb\xee\xfc\x83\xfe\x08\xff\xd5\xfe\xba\xfe#\xffQ\x007\x01+\x02\xe3\x02]\x03t\x04U\x05D\x06\x92\x07\x86\x08\xcd\x08\xd6\x07\xbf\x07c\t\xb9\np\x0bU\x0b\xdc\nn\n&\tK\x086\x08\xba\x08\xa0\x08\x05\x08\x04\x07\xce\x05\x1f\x05?\x04X\x04\xdb\x04i\x05\x84\x05\xaa\x04(\x04\x13\x04P\x04\xad\x04\xb7\x05\xb1\x06\xb5\x06\xf2\x05\xe0\x04\xc8\x04\xdd\x04\xa6\x04\xd6\x04\xab\x04\xd0\x03]\x02\xc5\x00\xf4\xff\x08\xff\xd6\xfd\x96\xfc\x85\xfbe\xfa\xb8\xf8\xfb\xf6\xb7\xf5;\xf5\xe0\xf4w\xf4!\xf4\xf8\xf3\x14\xf4K\xf4\x9a\xf4i\xf5\x99\xf6\xb5\xf7\xc3\xf8\xe9\xf9m\xfb\xc7\xfc\xb9\xfd\xbd\xfe\x14\x00X\x01!\x02\x98\x02\x17\x03\x9a\x03\xe6\x03\xf8\x03$\x04D\x04\xe8\x03U\x03\xed\x02m\x02\xcf\x01L\x01\x16\x01\x8c\x00\xbc\xff1\xff\xdd\xfe?\xfe\x98\xfd~\xfd\xa1\xfd{\xfdG\xfdO\xfdS\xfd8\xfdC\xfd\xaf\xfd!\xfem\xfe\x94\xfe\xa1\xfe\xb2\xfe\xce\xfe\x04\xff.\xffB\xff7\xff\x06\xff\xd1\xfe\xc4\xfe\xb5\xfe\xa1\xfee\xfe\x19\xfe\xbc\xfdw\xfd\x91\xfd\xfd\xfd\x1f\xfe\xee\xfd\xe2\xfd\xf4\xfd\xee\xfd\x18\xfe\x9e\xfe+\xffK\xff%\xffS\xff\xa6\xff\xe8\xff+\x00\x87\x00\xca\x00\xda\x00\xfd\x00O\x01\xb6\x01\xee\x01\xe7\x01\xf3\x01!\x02K\x02[\x02i\x02}\x02q\x02I\x02\x0b\x02\x04\x02\xfd\x01\xe2\x01\xca\x01\x98\x01|\x01l\x01T\x01S\x01n\x01\xa4\x01\xa0\x01\x98\x01\x96\x01\xc5\x01\xf9\x01*\x02e\x02\x83\x02\x80\x02G\x021\x020\x02(\x02\x05\x02\xbc\x01~\x01@\x01\xed\x00\x91\x00B\x00\xe3\xffv\xff\xe5\xfem\xfe1\xfe\x13\xfe\xe3\xfd\x8c\xfd&\xfd\xc8\xfc\xa9\xfc\xbe\xfc\xfc\xfc=\xfdr\xfd\x90\xfd\x97\xfd\xaa\xfd\xf7\xfd\x82\xfe*\xff\xc3\xff:\x00\x99\x00\xe6\x004\x01\x98\x01\x18\x02\x8e\x02\xe7\x02.\x03n\x03\x87\x03z\x03B\x03\x10\x03\xd7\x02\xb7\x02\x8c\x02T\x02%\x02\xdb\x01Y\x01\xa4\x00\xfe\xff\xa3\xff\x80\xffn\xffF\xff\xe3\xfe_\xfe\xe1\xfd\x93\xfdw\xfd\x83\xfd\x8b\xfd{\xfd\x82\xfd\x8d\xfd\x9d\xfd\xaa\xfd\xb8\xfd\xd1\xfd\xfb\xfd.\xfe\x94\xfe\x17\xffa\xffh\xffA\xff$\xffX\xff\xbc\xff\x1c\x006\x00\t\x00\xc0\xff\x9c\xffv\xff`\xffY\xff4\xff\xfb\xfe\xcb\xfe\x98\xfeg\xfeQ\xfeF\xfe\x03\xfe\xaf\xfd\xc4\xfd\x15\xfe[\xfe\x83\xfe\x8c\xfe\xab\xfe\xae\xfe\xb7\xfe\x11\xff\xaf\xffG\x00\xad\x00\xd3\x00\xf3\x00C\x01\x89\x01\xce\x01A\x02\x90\x02\xa9\x02\xa4\x02\xc6\x02\xf6\x02\n\x03\x00\x03\xd8\x02\xc2\x02\xc3\x02\xc1\x02\xc7\x02\xbf\x02\x88\x02K\x02(\x02\x01\x02\x05\x02\xed\x01\xa9\x01h\x018\x01\x1c\x01\xf4\x00\x90\x00>\x00\xf5\xff\x93\xff*\xff\xf4\xfe\xeb\xfe\xc7\xfe\x89\xfe&\xfe\xed\xfd\xc0\xfd\x8c\xfd\x83\xfd\x9b\xfd\xcc\xfd\xdb\xfd\xbb\xfd\xd1\xfd\x0f\xfe+\xfe7\xfeJ\xfe\x92\xfe\xf3\xfeL\xff\x90\xff\xe5\xff\x15\x00/\x00U\x00\xa2\x00\x13\x01w\x01\xc2\x01\xe8\x01\xf8\x01\x03\x02\x16\x02:\x02V\x02g\x02d\x02L\x02\'\x02\x00\x02\xd5\x01\xaf\x01w\x012\x01\x01\x01\xe9\x00\xcc\x00\xa9\x00}\x00L\x00\x12\x00\xd8\xff\xae\xff\xaa\xff\xa8\xff\x9e\xff\x7f\xffF\xff\x19\xff\x0f\xff\t\xff\xf2\xfe\xd3\xfe\xb5\xfe\x89\xfeV\xfe2\xfe:\xfe6\xfe\x11\xfe\xec\xfd\xe4\xfd\xdf\xfd\xc7\xfd\xbf\xfd\xda\xfd\xf3\xfd\xfb\xfd\x08\xfe;\xfeu\xfev\xfet\xfe\x92\xfe\xc4\xfe\xe8\xfe\x04\xff,\xffL\xffq\xffw\xffu\xff\x8b\xff\xaf\xff\xd4\xff\xfd\xff\x1e\x00O\x00w\x00\x92\x00\xaa\x00\xdc\x00\xf6\x00\xf0\x00\x00\x01F\x01\x92\x01\xba\x01\xd8\x01\xfc\x01\x0e\x02\x04\x02\x0e\x02F\x02o\x02j\x02Q\x02Q\x02\\\x02Q\x02=\x02"\x02\xef\x01\xa8\x01k\x01K\x01/\x01\x04\x01\xc7\x00\x81\x00/\x00\xe7\xff\xac\xff|\xffF\xff\x10\xff\xd3\xfe\x97\xfeX\xfe!\xfe\xed\xfd\xbd\xfd\x89\xfdT\xfd?\xfd/\xfd\x1f\xfd\x19\xfd\x03\xfd\xef\xfc\xe0\xfc\xe2\xfc\xf6\xfc!\xfd?\xfdb\xfd\x86\xfd\x99\xfd\xbb\xfd\xf1\xfd.\xfet\xfe\xb0\xfe\xfa\xfeA\xff\x85\xff\xc5\xff\x15\x00h\x00\xbd\x00\x1b\x01\x87\x01\xfb\x01^\x02\xb5\x02\x01\x03K\x03\x99\x03\xe5\x031\x04r\x04\x97\x04\xa1\x04\x97\x04\x84\x04{\x04`\x046\x04\xfb\x03\xa9\x03L\x03\xe7\x02v\x02\x05\x02\x93\x01\x1d\x01\xa0\x00!\x00\xa6\xff)\xff\xb0\xfe6\xfe\xc0\xfdY\xfd\x05\xfd\xca\xfc\x8f\xfcN\xfc\x13\xfc\xf1\xfb\xe0\xfb\xe4\xfb\xfd\xfb"\xfcA\xfcV\xfcw\xfc\xbe\xfc\x16\xfdk\xfd\xbb\xfd\x03\xfeJ\xfe\x90\xfe\xde\xfe;\xff\xa0\xff\x01\x00Q\x00\x94\x00\xcf\x00\r\x01A\x01c\x01\x87\x01\xb6\x01\xda\x01\xf7\x01\x0c\x02\x13\x02\x06\x02\xe6\x01\xc7\x01\xb8\x01\xbb\x01\xc7\x01\xc8\x01\xcb\x01\xb4\x01\x94\x01~\x01\x82\x01\x8f\x01\x90\x01\x8f\x01\x8b\x01\x86\x01\x85\x01\x91\x01\x9c\x01\x9f\x01\x92\x01z\x01q\x01g\x01_\x01Y\x01S\x015\x01\xfc\x00\xd4\x00\xab\x00|\x00H\x00\x14\x00\xe5\xff\xab\xff\x85\xffV\xff\x17\xff\xcf\xfe\x86\xfe@\xfe\x04\xfe\xde\xfd\xc4\xfd\x9c\xfdb\xfd"\xfd\xed\xfc\xc6\xfc\xb2\xfc\xb1\xfc\xc0\xfc\xc8\xfc\xd0\xfc\xe7\xfc\x04\xfd!\xfdG\xfd|\xfd\xcb\xfd\x1e\xfex\xfe\xdc\xfe,\xffn\xff\xb4\xff\t\x00q\x00\xd4\x00<\x01\x8a\x01\xc6\x01\xf4\x01\x1e\x02R\x02\x82\x02\xae\x02\xd0\x02\xe2\x02\xf1\x02\xf5\x02\xf9\x02\xed\x02\xd3\x02\xab\x02\x8a\x02v\x02m\x02S\x02&\x02\xee\x01\xb0\x01y\x01=\x01\x17\x01\xed\x00\xb5\x00p\x00*\x00\xf4\xff\xb9\xff\x7f\xffA\xff\t\xff\xcb\xfe\x8c\xfeW\xfe$\xfe\xf4\xfd\xc3\xfd\x9a\xfd\x81\xfdg\xfdI\xfd3\xfd\'\xfd\x1d\xfd!\xfd0\xfdN\xfdd\xfdq\xfd\x85\xfd\xa2\xfd\xc8\xfd\xf2\xfd\x1e\xfeS\xfe\x88\xfe\xb8\xfe\xe0\xfe\r\xff8\xffm\xff\x9f\xff\xd8\xff\x18\x00P\x00\x7f\x00\xa7\x00\xd2\x00\xfb\x00\x18\x014\x01^\x01\x90\x01\xbd\x01\xd3\x01\xd8\x01\xdd\x01\xe0\x01\xe8\x01\xfc\x01\x15\x02\x1d\x02\x0c\x02\x04\x02\xff\x01\xfb\x01\xe4\x01\xd2\x01\xcf\x01\xbb\x01\xb2\x01\xa5\x01\x90\x01q\x01A\x01\x18\x01\x03\x01\xf4\x00\xde\x00\xc6\x00\x9a\x00^\x00 \x00\xe6\xff\xc4\xff\xa4\xff\x89\xffa\xff.\xff\xfd\xfe\xbb\xfev\xfe;\xfe\x05\xfe\xe3\xfd\xc5\xfd\xaa\xfd\x94\xfds\xfdM\xfd"\xfd\x11\xfd!\xfd;\xfdY\xfdn\xfd\x84\xfd\x9b\xfd\xbc\xfd\xea\xfd*\xfeu\xfe\xc2\xfe\x12\xffY\xff\x9b\xff\xe2\xff\x1e\x00h\x00\xc0\x00\x0b\x01R\x01\x92\x01\xc8\x01\xe5\x01\x07\x02!\x02D\x02\x81\x02\x94\x02\xa3\x02\xa7\x02\x92\x02y\x02l\x02V\x02F\x02:\x02,\x02\x11\x02\xe2\x01\xb2\x01~\x01S\x015\x01\x1b\x01\xf0\x00\xca\x00\x94\x00a\x00;\x00\x0e\x00\xf3\xff\xd0\xff\xaa\xff\x8a\xff]\xff1\xff\x03\xff\xe0\xfe\xc5\xfe\xae\xfe\x9a\xfe\x7f\xfee\xfeF\xfe9\xfe,\xfe\x1c\xfe \xfe$\xfe.\xfe:\xfe;\xfeC\xfeR\xfef\xfe{\xfe\x98\xfe\xbd\xfe\xdb\xfe\x01\xff \xff0\xffM\xff\x88\xff\xc6\xff\xdf\xff\xed\xff\x1e\x00W\x00\x84\x00\xad\x00\xce\x00\x00\x01(\x01=\x01M\x01R\x01Y\x01v\x01r\x01r\x01\x86\x01\x99\x01\x9f\x01\x98\x01\xb2\x01\xd8\x01\xe0\x01\xda\x01\xce\x01\x95\x01G\x01.\x01K\x01l\x01\xad\x01\xe3\x01\x96\x01\xdf\x00\xfd\xff\xd9\xff\x03\x00:\x00\x82\x00\x7f\x002\x00\x8b\xff\xdc\xfe\xa1\xfe\xc4\xfe\xe2\xfe\x0c\xff\xf1\xfe\xb6\xfen\xfe#\xfeL\xfe\x88\xfeK\xfeQ\xfe\xdf\xfe\xc2\xfe\xb0\xfe;\xfe\xcb\xfd\x04\xfe\xa8\xfd\xae\xfd\xfd\xfd\x19\xfe\x15\xfe\x8f\xfeJ\xff\x81\xff\xab\xff\xdf\xff7\x00\xe5\xff\x9e\xff\xfd\xfe\x16\xfe\xd4\xfc\xfc\x00\xd9\x0e\xbc\x14P\x05\xc9\xf5\x93\xf8\xdd\xfa\xe7\xf8\x7f\xfe\xaf\t6\t\xbf\x00#\xfd\x86\xfa\xec\xf6S\xf5\xba\xfd$\x064\tY\x08\x98\x02\x1d\xfe\x9d\xfc\xe2\xfb\xcf\x00\xea\x05\xda\x08\xc4\x04n\x01\xf0\xff8\xfe\x13\xff\x00\x00L\x03\xc7\x02\x9a\x00$\xfd\x9b\xfc\xf8\xfc\x95\xfd:\xfe\x97\x00\x86\x00\x7f\xfeJ\xfb4\xfc\x1f\xfb\x0b\xfb\xf8\xfe\xde\xff5\x02\xf2\xfe\x03\xfe\x8b\xfa\x04\xfd\xa0\xff\x81\x05\x8e\x04d\x03f\x02\xaa\xfb\xe4\xfcT\xff`\x07\x16\x07\xf8\x05f\x03\xf3\xfe9\xfc\xe4\xfb\x92\xffK\x02@\x02*\x002\xfc\xf3\xfa\xc3\xf9\xa8\xfb\xf9\xfe\xb0\x00\xfd\xff_\xfe\xf2\xfba\xff\x98\xfb\xd3\xf4%\x06)\x19E\x17H\x06-\xffH\xfb\x0c\xf7\xd7\xfc\xf4\x0bV\x14O\r\xbb\x02\x1a\xf8\x11\xf1\xe0\xf0\xab\xfb\x01\x03\x1e\x03\xcb\x01x\xff0\xf9\x8e\xf3>\xf5+\xfb\x88\xfes\x02\x95\x08\xdf\x02\x15\xf8\xa8\xf7\x9a\xfb\xd4\xfeQ\x04D\x0b\x01\t\x11\xfe\x8a\xf9\xfc\xfaI\x03\xaf\x06a\tk\t\xda\x00k\xfa\xb9\xf8\x9d\xfb\xcd\xfd\\\x02Z\x08\xb2\x04\xa0\xfas\xf59\xf4;\xf9?\xfd\xc8\x01\x9d\x03)\x00U\xfd\xb6\xfbx\xfc\x05\xfe\xae\x03\x1b\x04F\x04\x84\x03V\xffi\xfe\xe3\xff[\x04\xf2\x02\xb0\x01\xbe\x04\x83\x05\x18\x01Y\xfe@\xfe\xd7\xfe5\x02\x8d\x05\xd5\x06\x18\x02\xd9\x00\x05\xfe\xf1\xfe\xd1\x03g\x03g\x02\xa2\xfdL\xfc3\xfb/\xff\xe0\x00\xa1\x03\xec\x02\x18\xfa6\xfb&\xfe\xed\xff\x07\x02\x0f\x03 \x01\xb7\x00\xdb\x01\x07\xffy\xfbw\xff\xe1\x01_\x00\xcf\xffx\x02\xcb\x02!\xfbm\xf9h\xfem\xfer\xfc\xee\xfa\x9b\xfdY\x01\xf1\xfd\xea\xfd\xd2\xfd\xd9\xfb\xaf\xfc\x1f\xffA\xff\xeb\xff\xbf\x03a\x03\x9e\x02\x86\x02I\x01!\x01\xee\xff\xba\xff\xb9\x02#\x03v\x01\xd1\x02\xa0\x07\x1c\x04\xdd\xfd\x1c\xfe\x04\xff\xa1\x02\xbd\x03$\x02\xd6\x03\xaa\x00\x08\xff\xdc\xfe=\x009\x00s\xfe\xeb\xfep\xfec\xff<\xff\xa9\xff\x99\xfc\x80\xf9J\xfa\xab\xfd{\xfe\xce\xfew\x01\xc6\x00h\xfe\x07\xfa\x07\xfa\x1f\xff\xc6\x03\x0f\x06-\x016\xfe\x1c\x00\xe1\x02\x14\x02\xb9\xff"\xfe\xb0\x07D\x05\xb2\xf7\xc9\xfa\xe4\x06\xd9\x07y\xfb7\xfe\xa0\x00&\xfa{\xfb\xfb\x00\xf8\x00\x84\xfco\x01E\x06\xfe\x02\xe2\x00\x12\x00\xe0\xfd\x8c\xfe\xbb\x07\x9f\x0bm\x01\xc8\xfcl\xff \xfb\x9f\xf68\xf9V\x03\x89\x06\xe5\x02\x03\xfd\xa8\xf7,\xf7\xf0\xf8j\xfe\xd4\x05m\nd\x06\xfc\x00s\x01\xb4\xfe\x03\xfb6\x03\xfc\n\xd8\x0eA\n\xc1\x04\xa7\xfa\xed\xf2\xb8\xf8"\x02\xa9\x06m\xfc\xb2\xfe\\\xf9\x10\xf4\x0e\xf7\xce\xf7\xbd\xf9G\xf7\xe4\x00Q\x07\x19\x01\xaa\xfe\xb3\xfea\xfe\xbc\xfa\xc9\xfcj\x08\xa6\x0b`\x07\xf5\x03\x04\x04[\xfde\xfb\xa0\xfe\x0e\x003\x044\x08\xd2\x08\xd5\x00\x9d\xfd^\xfa\xbf\xfb\xbb\xfd\xdd\xfbo\x00\xb1\xfd\xd3\xffS\x01\x89\xf8v\xf6\x07\xf7k\xfbe\x00\xd9\x02W\x08P\x03\xd2\xfa\xf5\xfdh\x00\xc4\x00\xe9\x05\xb5\x12\x93\x0e\xbd\xfc:\xfc\xda\x02\xd3\x06r\x03\xbf\x06-\t}\xff\x01\xfc\x9a\xfe\xfc\x00&\xfb\xf6\xfb\xe2\xff\x17\xfdM\xfa\x1e\xf8v\xfc\xa1\xfe\xe0\xff~\xfdd\xf8\r\xfb\xe8\xff\xd9\x03j\x01u\xfd\x1a\x01f\x055\x03 \x03\xa6\x03I\x04^\xfe\xe0\xfd\xd9\x03\xde\x03\xd6\x01W\x03V\x03s\xfd\x12\xf5\xef\xf8\xc6\x00\xec\xffT\xfd\x98\xfcK\x00W\xfaE\xfaY\x00\xc8\xfeg\xfc\xda\xffp\x04\xd1\xfd)\xfc\xc5\x03\xe2\x04G\x02\x98\x03\xa3\x03\x1d\x00&\xff{\x04\xb9\x04\x1a\x00h\x00\x8b\x03\xe6\x04d\x02\x8a\xfd\n\xfd\xe9\xfc\xac\xfd\x14\xfft\xfeC\x00P\xff\'\xfd%\xf9;\xfc\x92\x00A\xfd\xd3\xfc\xe3\x01\xdb\x05\x00\x01R\xffU\xfe\xdd\x02\xd8\t\xb3\x05\xaa\x02\x00\x01j\xfe\xb3\x01\x91\x04\x8a\x07(\t\x8c\x02\xfc\xfb/\xfbW\xfc\xc6\xfbY\xfc\xeb\xfe\xfa\xfe\x93\xff\xb4\xfe\xa9\xf9\xcd\xf8u\xfb\xa9\xfba\xfaC\xfd\x9b\x01\xb0\x00\x06\xfc\xd0\xfb\x1d\x01/\xff\x11\xfc \xfd\xe8\x02\xc0\x02\xb6\x01\xfa\x04\xed\x02\xb9\xfe\x17\xfeg\x06\x12\x08\xda\x05>\x05J\x03~\xff\xa1\xff\xba\x03=\x01\xed\xfd\xa9\xfe2\x02\x0f\x00s\xfa\x13\xfb\xd1\xfci\xff\x8b\xfb\xea\xfb<\xff2\x00\t\x08<\x00l\xfb\x14\xfeJ\x00\xd2\x02\xb5\x05\x9f\nh\x04%\xfeD\xfa\x9c\xf8\x84\xfc;\x04\x8e\x07\x9c\x06&\x05\xe1\xfe\xfe\xf6\x0b\xf6\xed\xfcY\x01\xd4\x00\x12\x01\xcf\x03}\xff;\xf9\x11\xfa\xfa\xfb\x0e\xfc[\xfag\xfc\xc3\x01\xc3\x05&\x04[\xfeO\xfb\x12\xfd\xdb\x01\xc6\x04\xab\x06\n\x049\x01+\x02\xb3\x02\r\x05\x05\x07\x19\x05\x94\xfd\xa5\xf8a\xfb:\xfe\xd9\xfd\xa9\xfdi\xfe\x84\xfd\x08\xfa\x7f\xfc\xdd\xfe\t\xfe\xdb\xfd\xfd\xfdx\x01:\x03m\x05\xf9\x01#\xff\x8d\x00x\x02\x18\x05\xd2\x04\x89\x06\xe6\x04\x86\x00W\xfd\xf5\xfc\xaa\x01\x13\x05\xea\x07^\x06H\x01\xdf\xfb\x97\xfb\x87\xfd\xff\xf9\xca\xfay\xfeE\x02\xa7\x00\x17\xfd.\xfd\x05\xfai\xf6\x07\xf6\xba\xfc\\\x042\x05\x87\x02\xe2\xfd\xb6\xf9\xb3\xfbH\xff\xe2\x03\xe0\x04\xaa\x03\xe2\x01\xb2\x00\xd4\x02U\x01\xd9\x00\n\x00"\xff-\x01\xe0\x03\xab\x02\xc7\xfe\xbd\xfd\xc5\xfb\xa7\xfa\x91\xfd\x16\x00\x93\x00+\x00m\x00u\xff\x80\xfd\xa2\xfb\x9b\xf9\xc2\xfc?\x00q\x03\xdf\x04m\x04\x06\x03\x82\xfe\xe4\xf9L\xfb;\x01\x8d\x03\xb3\x02C\x02\xd9\x02O\x02E\xff\xf1\xfd\xc0\xfe\xe8\xff\x7f\x00f\xfe\xe8\xfff\x02\xfe\x00m\xff\x00\xff\xf0\xfd(\xfd>\xfa"\xfa\x02\xfc\x05\xfc\xf8\xff\xc0\x01"\x02\x83\x01\x19\xff;\xfd\x0f\xfbQ\xfb\x1a\x02\xda\x06\xdc\x08\xa4\x08\xdb\x05*\x02\x86\xfc\xe1\xfaX\xff\xfd\x04?\x07\x9a\t\x0c\x06\x1e\x01\xb9\xff|\xfeL\xff;\x00\xf8\x04T\x08\x19\t\xf4\x08&\x06:\x03\xa8\x00\x17\x00^\x04\xd7\x08\xf0\n\xa9\tI\x08\xfa\x04R\x00\xee\xfe9\x00\x9d\x04\x8a\x04\xa6\x04c\x05`\x03\x0b\x02\xc0\xff}\xfd\xf3\xfcy\xfe\xe5\xfe\xa1\xfeJ\xff`\xff\xf9\xfc\xf2\xf9\xa8\xf8O\xf6l\xf4\xda\xf6\xbc\xf9\xd5\xfaP\xfb\x80\xf9*\xf7\x14\xf6E\xf7\x0b\xf8\x95\xf7\xc3\xf8\xf2\xf9\xb1\xfa\x14\xf9\x89\xf8Y\xf9\xc3\xf8\x9d\xf9\x8e\xf9\x98\xf8\xb4\xf7\xf5\xf8\x11\xf9i\xf9\xa6\xfb\xa3\xfaZ\xf9\x1d\xf8\x81\xf6\x04\xf5\xe4\xf2\xe3\xf2\xf6\xf32\xf5\xd7\xf7\x1a\xf9U\xf8]\xf9\xf4\xf9\x94\xf78\xf8\xdc\xfc\x8b\x03,\x0b\x15\x0b\x8f\x08K\x07\xf7\x02\x11\x04\xb9\x06\xb4\x08J\r*\x0c\x92\t\xee\x07j\x08\x82\t\xac\x07\xc0\x07N\x05\'\x04\xbd\x03\xa5\x03\x8f\x07\xf9\x04J\x04\t\x07\xb5\x01\x81\xfb1\x00\x92\x15\xd7)\xd5,k$Q\x1bc\x18\x8a\x17\x81\x1a5&01\xa12k)\xe3\x1fx\x18\x91\x11=\x07{\xfe\x96\xfb\x14\xfd\x1b\x00\x06\x01`\x00y\xf4M\xed\xe3\xe3\x12\xd8\xbc\xdaI\xe6\xe2\xf0-\xf2r\xf1\x99\xee\x18\xedc\xe9\x0c\xe8\xd8\xef\x84\xf5A\xfaL\xfd_\xfdM\x04=\x07\xb3\x01Z\xff\xb0\xf9\x01\xf6\x1d\xfaL\xff)\xfe\xdf\xfcN\xfav\xf2\x1b\xe9\x95\xe4\xd9\xe8\xb9\xeb\xbc\xec\xba\xed\xcb\xea\xf0\xe7\xde\xe6k\xe7\xd9\xe8;\xec\xab\xf17\xf5\xf8\xf7\xe0\xf9\x1a\xfb!\xfa\xe4\xf8\x9f\xf9\xec\xfb\xcf\xfeN\x00\xd1\x00G\x01\x11\xff8\xf9\t\xf5\xa5\xf3\xc1\xf3\xaf\xf7\xa2\xfb\xb5\xfc\xf9\xfc\x82\xfb\xcc\xf8\xec\xf5\xa3\xf5\xfa\xfa\xf5\x02U\x05\xcd\x03\x19\x07\xae\r\x06\x13\x1e\x11R\n\xda\x07\xa3\x069\x0bs\x105\x15\xd7\x1a\x14\x16\x90\x0e\x1a\x07\xec\x01z\xfd\xd8\xfb\xeb\x0e\xb9,\x9e>\x1d=\xaa-\xa0"\xaf \xfc\x1bv\x1d\xa1,\xac;@?\x0c8\xb0*h\x1b\x88\x0e\x03\xfd\xf0\xf1\x16\xf3\x9f\xf8\x0b\x03{\x04\x99\xfd\x89\xf2\x10\xe2\x16\xd3\xd2\xcc\xda\xd0\xe4\xdf\xaa\xef/\xf4\xcc\xf5\xe2\xf6\xb3\xf1\\\xec\x88\xe9\xb7\xeb\xf3\xf3\xa1\xfb\x00\x03\xb9\nF\x13\x14\x11\xad\x04\xcf\xf9\x86\xf0\xec\xee\xa1\xf4-\xfb\xc0\xffI\x01\xee\xfd>\xf4\xca\xe9f\xe4\xb5\xe4\xc9\xe6\x13\xe8\xc5\xee~\xf4R\xf7\xbf\xf7\xe3\xf2\xea\xec^\xe8\x95\xe8\x9d\xec\x11\xf5s\xfea\x03j\x03\xb0\x01j\xfd\xde\xf8\t\xf7\xdc\xf8C\xfd\xd9\x02\xca\x06\x17\x04\x17\xfe\xe0\xf7\x8e\xef\xf5\xe9\xcb\xe9P\xec\xf8\xf0k\xf3\xc9\xf6\x80\xf7 \xf4Q\xf2\xa5\xf0n\xf4\x8c\xfb\xa4\x01 \x07~\x08L\x0c\xf3\x0e\xf6\x08@\x03\xec\x04\x81\x07\xd3\x0c\xe2\x10\x91\x10l\x11\xdb\x0b\xa5\x05\x02\x04p\xfe<\xfb\xa0\x0c\xf5.\xefK\x04T\x8dA-*\x94"\'!S\'\x9c9\xebH\xcfJ@;\x90%\xb4\x16\x95\x06\xda\xf8\xea\xed\xfb\xe5*\xe9p\xf0\xdb\xf4S\xf11\xecO\xe0Y\xd0L\xc8Y\xca\xd2\xd8\xb4\xedE\xfb\xda\x01\xce\x05\xc5\x01:\xfb\xb2\xf6\xe5\xf6y\xfa\xab\x00.\x07I\x0e\xb8\x11;\x10T\x0cq\xfeB\xf4\xca\xef\xc2\xe6\x9c\xe3\xdd\xe7K\xee\x95\xf4]\xf5\x8b\xf0\xb4\xe6\x80\xdeW\xdd\x0f\xe2i\xe9\xcd\xf2Z\xfd\xec\x01"\x02\x14\x02\xc3\x01g\x00\x14\xfdn\xfa\xad\xfc\xee\x014\x08o\x0b\t\t!\x05\xd6\xfe\x04\xf8\xb4\xf3\xf2\xf3,\xf9B\xfcW\xfc\x86\xf9\xa8\xf5\xef\xf4*\xf2b\xee<\xeb\xdf\xe9\x1b\xeb)\xed\xb7\xf3\xea\xfa\xfe\xfe\x83\x01\xfb\xfdp\xf8\x88\xf7\xed\xf8\x95\xfc\x94\x028\x057\x06\xe0\x05\xeb\x03\x90\x01\xf7\x00\x98\xfc\xfa\xf5\xa7\xf5\xcd\xf5d\xf2\xdc\xf1\xce\xfd\xef \xb8O\xe6f\x83^\xa6I\xd96F.\x913\x9b?!Pv[\xceR\xb1>\x0c*\x19\x12\x17\xfc\x94\xe5\x19\xcf\xfe\xc6\xbb\xcb\x13\xd4~\xdc\xbd\xe1\xd7\xdfZ\xd8\x9d\xcb\xf5\xc2A\xc8%\xd9\xc1\xee\x0c\x04\x13\x14\x95\x1e\xb4%\xd7\'\x02"\xba\x17\xcb\x0ca\x05\xfc\x04]\x07b\x0e`\x14\x0e\x10&\x07O\xf5\xde\xdeP\xcd\x18\xc0\xea\xbe\xf8\xc6\xd9\xd1C\xdej\xe6*\xe8\xe9\xe5\xa8\xe5\xff\xe6\x1b\xe8\\\xee\x8a\xfa\xef\x08s\x18\x05(\xe9.\x00*2!J\x16e\n\x9e\x04\xb4\x01"\x00^\x02\x01\x03\xf3\xfd\xa7\xf6&\xf1"\xeb0\xe6\xa7\xe2 \xe2I\xe7\xb9\xee\xf0\xf31\xf7\xec\xf6\x1e\xf5\xf8\xf4\xc0\xf14\xf3\x98\xf9\xf3\xfc\xf2\x02\x8e\x06\xc6\x05\xfa\x06U\x04\xed\x00\xc3\xfe\xe5\xf93\xfc9\xfe\x9e\xfe \x02\xda\xfe\xb2\xf9\x90\xf5\x10\xef\x86\xedA\xef\x88\xee\x1a\xf0\xf9\xf2\x84\xed\x07\xe9=\xf6\xa2\x15\xb1@e[\xa1W\x08K>B\xffB\xbaH\x0fL\xd8P\xa0Q\xf3Jk>\xea-\x0b\x1f`\r\x1d\xf3,\xd7\xa2\xc4\x95\xc0\xdc\xc7p\xd0\xfb\xd5D\xd9\x89\xd9\xb2\xd7\x97\xd6)\xd9\x12\xe2\xe1\xed\xfa\xfa\xd5\x08p\x19\x97)\x913\x8f3\x9a(\xcf\x1a/\rk\x03;\xfeH\xfc\xcf\x00f\x00\xc1\xfa!\xf2z\xe1a\xd1\x95\xc4\xa3\xbc;\xbe\xc7\xc7Q\xd5\xeb\xe2\xba\xee\xd4\xf5c\xf9l\xfa(\xfbp\xfe\xc3\x03\xf9\n\x0e\x15\xb2 \x06)M*\xfb#p\x18\xbe\nk\xfe\x1e\xf7\n\xf3\xba\xf0\x95\xf1\x1d\xf1r\xee\xc9\xee\xec\xedX\xec\xa4\xebF\xe9M\xe9\x85\xed\x0e\xf5\x9a\xfbu\x01W\x05I\x04\xfb\x02\xf7\xff@\xfc\xa1\xfb\x1d\xfbM\xfc\xe4\xfe\x0e\x01\xf1\x02\xf0\x01\xc4\xff\xc4\xf9X\xf4\xdc\xf0p\xedl\xf06\xf4\x85\xf5\xc9\xf8\xf5\xf5e\xf0\xa9\xefX\xebZ\xeb*\xf2\x89\xf0<\xec\xed\xf3l\x0cC6\xf5]\xf5f{ZKL\xa2?i<\x8e@\xccDrL!NDBu/\xb7\x1b\x0c\n?\xf7\x9e\xdc\xb3\xc3\xc7\xb7\x00\xba/\xc5\xd6\xd2\xba\xdd\xa7\xe3\xe1\xe2:\xdeh\xdb\x18\xe0W\xec\xaf\xfb\xc8\t\xf6\x16\xb4%\xe8116T/?!\xb9\x12\xe2\x06\xa3\xfdk\xfc\x10\x02\x9d\x04.\x02/\xf5\xde\xe3n\xd5D\xcaw\xc6\x00\xc6?\xc9\xb6\xd2 \xdd\x89\xea\xc6\xf6L\xfe\n\x02d\x00c\xfeY\xff.\x04\xc8\r\xb3\x18\x9f \xa1!Z\x1dg\x15/\nO\x00\x7f\xf8o\xf0\xef\xea\xf9\xe9,\xeb8\xee\xb4\xf2\xb1\xf4\x81\xf3k\xf1\x1b\xee\xa2\xece\xf0F\xf7a\xfe\x8d\x05\xbf\x08)\t\xdf\x07W\x05\x99\x03\x87\x00\xd7\xfdL\xfb\xb0\xfa\xc5\xfc@\xff\xd3\x01\xac\x01/\xfd\xf1\xf7\xd6\xf1\xb9\xebA\xe9Q\xe9.\xea\xc6\xec\x9a\xefH\xf1\x1d\xf2\x94\xf1\xce\xee[\xec\xa9\xeb\x93\xea\x08\xea\xd1\xec\x8f\xf8\xe4\x18\xf1C(d(n\xeccWS$I\xc8D\xdeB\x92E\xecJhM\xd1G\xe76\x11"\x96\x0e\xbd\xf5\x01\xd9\xfa\xbd\xc9\xacz\xadg\xba\xee\xca\r\xd7\x86\xdd\xda\xdd\xc6\xdbH\xd9c\xdar\xe4\xa1\xf4D\x06\x01\x17H%\xb61\xd09\xe980/?\x1f\xe8\x0f\x18\x04\xc3\xfc\xa2\xf9\xec\xf9\x17\xfd\x9a\xfb\xa5\xf4)\xe8\xe0\xd89\xcc$\xc2\xf0\xbe\x9a\xc2s\xcb\x08\xdb:\xeb#\xf8\xb2\xff\xa1\x02\xb1\x02&\x01.\x01i\x03\x0c\tM\x13\x8b\x1e\xcd$]#\x87\x1c\xc8\x12q\x07\xd0\xfd\x8e\xf5/\xefq\xedx\xef\n\xf1\xbd\xf1\xf7\xf2o\xf2\xc0\xf0\x1d\xef\x12\xec\xc0\xeb\x9f\xf0\xb8\xf6\xcd\xfeY\x05\xb7\x07\xc1\t#\x08\x12\x05G\x03\\\x01\xc1\x00d\x01P\x02\xfd\x02o\x03\xf2\x02\xbb\xff\xb3\xfa\xa6\xf6\x84\xf1\xbe\xed\x82\xec7\xebZ\xec\t\xee\xe9\xedQ\xedp\xecR\xe9\xb9\xe8\xe7\xecz\xefR\xf1p\xf0Q\xec4\xf3\x1e\x0c\xfe0\xf1V\x95iif\x9dY\xf6NxK\x9bL\x9cK\xe2H\xe8G\\Bs6l&\x0e\x12\xcf\xfd\xb3\xe9\x8b\xd2$\xbd\xa6\xb1\xcf\xb2Z\xbe\x83\xcc\x01\xd5\xdd\xd6\x92\xd6/\xd5\xe1\xd6T\xde+\xea\x1d\xf9\xec\x08\xd2\x15\x85 g*\xe40\xed2\xcf-\x9d"\xc7\x15\xcb\n\x0e\x04\x97\x01\xf1\x02\xa8\x02E\xfe\xed\xf4\xb3\xe5\x9e\xd6I\xccM\xc7\xdd\xc79\xcb\x82\xd0\x00\xd8\x98\xe1O\xebH\xf4\xb8\xfa\xf6\xfc.\xfd\x1a\xfd\x1f\xfes\x03\xf3\x0cx\x17\x1d\x1f\xce #\x1d|\x16F\x0f}\x083\x02\xba\xfc\x15\xf8p\xf4\xce\xf2\xf9\xf3\r\xf7c\xf9\'\xf9\x01\xf5(\xef\xab\xea\xf4\xe9\x1c\xee\xeb\xf4\xf7\xfa\xe2\xff\xe0\x01b\x02.\x02f\x01\x90\x01i\x01\x0c\x01p\x01*\x02\xce\x04R\x07L\x08[\x07\x92\x01\xee\xfa\x19\xf5?\xf0=\xee\x9a\xec\x8e\xeb\xb2\xec\n\xed\x0b\xecS\xeb\x9e\xea\xc5\xea\xea\xe9\xb4\xe6\xbe\xe6I\xf2\x94\x0c\xa80zP\xd1]\xf2Y\xdfN+E.B{B\x7fC\xb2G/K\x85IH>\x92)*\x13M\x00T\xf0z\xdf\r\xcd[\xbeS\xb9\xcc\xbe\xfd\xc9A\xd4\x13\xd8\xa6\xd6\x8c\xd4s\xd4`\xd8R\xe0i\xebI\xfaK\n\xc3\x178"x(\xd8+=-\x91*\xde"\x12\x18\xea\rq\x08\x90\x084\tT\x06+\xffB\xf4\x92\xe81\xddd\xd2{\xcab\xc7\xd4\xc8\x02\xcez\xd4S\xda\xf7\xe0\x17\xe9`\xf0\x89\xf5S\xf7\xd8\xf7N\xfb[\x02\xa9\x0bD\x15I\x1d\xa7"\x81$\xaa!\xfd\x1a\xca\x12\xa3\x0b\xf0\x06\xa5\x03\x85\x00,\xfd\x04\xfa\xf9\xf7\xef\xf5\x16\xf3\x03\xf0\xa4\xec\xa1\xea\x06\xea\xcd\xe9\x7f\xeb)\xefS\xf3Q\xf9\x92\xfe\xba\x01q\x04Z\x05z\x05p\x07\xc3\x07\x01\t"\x0b\xa3\x0b`\r\x9f\x0c\x1d\t\x9e\x03l\xfc\x1b\xf6\xa3\xf1\xb5\xefB\xee\xe7\xecO\xec\xab\xe9q\xe7X\xe6\xf4\xe3\xd3\xe3~\xe4<\xe5\xed\xecO\xfe\xf0\x16\x9c1^E\xaaM\xe5M\xdbH\xa9A`=4=9A\xc2G\x1fK\x10EU7o%\xa9\x12\xa6\x02\x82\xf1\xd4\xe0\x82\xd4\x97\xcd\xa9\xcc\x87\xce\xf9\xcf$\xd1\xae\xd2\xb4\xd3j\xd4\xb4\xd4\xdc\xd5%\xdbc\xe5\xbc\xf2\xb6\x00\xa2\x0c\x0f\x17\xf3\x1f\xac%!\'\t$\x0b\x1f\xc5\x1a\x9d\x17/\x15\xb6\x12\x95\x0f\xbb\x0b\xf3\x05T\xfd\xe1\xf2k\xe8\xb4\xdf}\xd9\xad\xd4\xeb\xd0\xae\xce3\xcf\xa7\xd2\xf7\xd77\xde,\xe4\x99\xe9d\xee\xe6\xf1\x17\xf5\xa3\xf9b\xffE\x07\xee\x0f\x9b\x16\xf1\x1bL\x1f\xf6\x1f)\x1f\xbd\x1b4\x16\xad\x10\r\x0b\x86\x06\xfd\x03\xaa\x01\t\xff\x10\xfc\x99\xf70\xf24\xedY\xe9\xc3\xe7:\xe8*\xea\xef\xecz\xf0=\xf4q\xf7\x8a\xfa\xde\xfd{\x01\x82\x05b\t|\x0b\\\x0c\x0b\x0c\x96\n"\t\xdd\x06Q\x04x\x01\xe4\xfd\x17\xfa\xe2\xf5a\xf2T\xef\x1f\xed\xf8\xeb\xb5\xea\xe8\xeaR\xeb\x90\xeb\'\xec\xed\xeb\x1c\xee\x0b\xf5\x93\x02\xe3\x16\xed+2<[D\xdbD\xe5A\x16=\x9f8\xb16e7\x9d;\xa9?\xb0>\xff6K)\x87\x19\xed\nO\xfd\x00\xefg\xe1\xe3\xd6\xe7\xd0J\xd0X\xd2\xcb\xd4X\xd7\xa8\xd8M\xd8w\xd6w\xd3F\xd2\xdd\xd5]\xdf\xc7\xed\x87\xfdZ\x0bh\x15=\x1b\xb1\x1d\xf3\x1c\xdf\x1a\xfc\x18\x96\x17c\x17\x10\x17P\x15G\x12\xc9\r\xa2\x08h\x034\xfc\xf7\xf2\xf8\xe8\xc0\xdf\x0b\xd9u\xd5\xd2\xd3\x80\xd4\x89\xd7\x12\xdcg\xe1\xd1\xe5L\xe8$\xe9\x0f\xea\xf3\xeb\xff\xef\xae\xf66\xff.\tE\x13,\x1b}\x1f\xb8\x1f\xb4\x1c\xfb\x18\xbe\x15_\x13\xd2\x11A\x10\xa7\x0e\x9a\x0c}\t\x8a\x05\xe4\x00m\xfb\x1b\xf6@\xf1\x10\xedB\xea&\xe9\r\xea\xdd\xec\xca\xf0\x83\xf44\xf7n\xf8\xf6\xf8o\xf9\x8e\xfa\xa9\xfc\xfa\xfe`\x010\x03\x81\x03}\x03\x92\x02g\x01\x8c\x00\x0c\xff\xd8\xfdZ\xfc\x82\xfa\x1b\xf9\xbf\xf7\xdd\xf6\xcc\xf6\xdd\xf6s\xf6@\xf5#\xf3\xd4\xf2p\xf7c\x02\xc9\x11\x99!Q-F3\xad4t2\x8c.a+\xd1*L.\xfe4;;\xcd\xf63\xf5\x01\xf4\x88\xf2[\xf1\x00\xf1T\xf1\xaf\xf2\xf6\xf44\xf7>\xf9\x91\xfa\xf0\xfa\x00\xfb\xc6\xfa_\xfa\x89\xfa\xc1\xfaZ\xfb\xab\xfc\xe7\xfdR\xff\xee\xff\xa3\xff\xa7\xfe\xd8\xfc\xf9\xfa\t\xf9/\xf8\xc3\xf9\x07\xffo\x08Z\x14\xba\x1f$(\xe3+R+\x9a(}%\xfa#\xe9$\x03(\xdd,11\xc92\x130M(d\x1d\xd0\x11\xca\x07\xac\x00\xde\xfb\x19\xf9d\xf7\xee\xf5\x9b\xf3\x11\xf0\x8b\xeb\xb5\xe6\xf7\xe2\x93\xe0<\xdf\x8e\xde\xfc\xdd\x1f\xde\xb3\xdf\xf2\xe2\xd7\xe7\x9c\xedx\xf3\xc2\xf8p\xfc&\xfe\xf4\xfd\xdc\xfc1\xfc\t\xfd\xff\xffw\x04H\t\xf5\x0cK\x0e\x8b\x0c\xda\x07.\x01r\xfa\x84\xf5<\xf3S\xf3\x89\xf4\xdd\xf5\xef\xf5\xb3\xf4@\xf2\xf4\xee\x0c\xec\x1b\xea\x8c\xe9\xcb\xea<\xed\x90\xf0/\xf4\xc8\xf7\xb3\xfb\xd0\xff\xe3\x03[\x07\n\n\xad\x0br\x0c\x9a\x0c~\x0c\xc1\x0c\xad\rq\x0f\xaf\x11~\x13\xb5\x13\xd6\x11\x19\x0e<\t\x04\x04O\xff\xd1\xfb9\xfa8\xfa\x18\xfb\xeb\xfb\xc3\xfbc\xfa\x05\xf8\xed\xf4\x18\xf2\xea\xef\x0e\xef\xea\xef\x0f\xf2\xde\xf45\xf7v\xf8\x9d\xf8\xc7\xf7\xb7\xf6\xbd\xf5u\xf5\x06\xf6H\xf7P\xf9p\xfbB\xfdX\xfeQ\xfep\xfd\x19\xfc\x94\xfa\xa0\xf9/\xfa\xa8\xfcl\x02\x16\x0b>\x15\xe8\x1ex%\x11(\x9b\'X%{#0#\x7f$\xf3\'\x0e,\xc5/c1\xff.\x01)\r Y\x16\xf7\r/\x076\x02\x89\xfei\xfb\xfe\xf8\n\xf6T\xf2\xd6\xed\xb5\xe8\x87\xe4O\xe1\x19\xdf\xbb\xdd\x80\xdci\xdc\xb2\xdd\xad\xe0,\xe5\x11\xea\xf5\xeee\xf3\xb6\xf6\xe9\xf8\xe3\xf9q\xfak\xfb\x8a\xfd\x1e\x01\x84\x05\xee\t@\r\xac\x0e\xbd\r\x87\n\xb5\x05\xa6\x00\x99\xfcd\xfa\xed\xf95\xfa\xab\xfa-\xfa\xbc\xf8g\xf6`\xf3\x90\xf0M\xee*\xeds\xed\xc9\xee5\xf1\xef\xf3\xe9\xf6:\xfa\x8f\xfd\xfc\x00\xae\x03\xc0\x05\xfa\x06{\x07\xca\x07\xdd\x07r\x08\xc1\t\xa5\x0b\x14\x0e%\x10\xd2\x10\xed\x0fW\r\xa9\t\x99\x05\xca\x01\xe1\xfe\x80\xfdd\xfd\x17\xfe\xf7\xfe\x19\xff\x17\xfe!\xfcC\xf9+\xf6\x94\xf3\xc8\xf13\xf1\xd7\xf1\xf7\xf2S\xf4I\xf5\x88\xf50\xf5~\xf4\x8e\xf3\x17\xf3\xc8\xf2\xe7\xf2\x99\xf3v\xf4,\xf6\x01\xf8\xa0\xf9\x0c\xfb\x90\xfb\xd5\xfb\xcd\xfb\x7f\xfb,\xfc\x93\xfe5\x04\x81\rM\x18\xaa"\xdc)\xe1,\xfe,\x0f+\xff(\x02(\xb1(\t,z0+4\xf34\xbd0\xa9(j\x1e?\x14\x01\x0c\x07\x05R\xff\xcb\xfa"\xf7\xd4\xf3\xef\xefm\xeb\xc2\xe6\xb8\xe2\x97\xdf\xe1\xdc\x88\xdar\xd8/\xd7\xae\xd7\xfd\xd9\x0f\xde\x84\xe3\xe2\xe9\x94\xf0F\xf6X\xfas\xfc^\xfdM\xfe\xfc\xff\xf7\x02\xfd\x06o\x0b\x93\x0fm\x12\x02\x13\xfd\x10\x8d\x0c$\x07^\x02\x08\xff\x06\xfdz\xfb\x13\xfa@\xf8$\xf6\xd0\xf3:\xf1\xd0\xee\xb8\xec,\xeb\xae\xea\x18\xebn\xecj\xee\r\xf1\x91\xf4\xe1\xf8~\xfdx\x01u\x045\x06\x0e\x07U\x07k\x07\xfb\x07U\t\x93\x0bO\x0e\xcc\x10\x1f\x12\xcf\x11\xe8\x0f\xcb\x0c\xd9\x08\xe3\x04{\x01W\xff_\xfe5\xfe`\xfe;\xfe\x8f\xfd=\xfc\x0c\xfaP\xf7b\xf4\xf8\xf1\xb4\xf0\x9d\xf0S\xf1t\xf2f\xf3\x00\xf4\xe9\xf3A\xf3"\xf2*\xf1\xd7\xf0]\xf1\xd5\xf2\x7f\xf4\x9d\xf5.\xf6U\xf6\xb8\xf6d\xf8\x99\xfa0\xfdu\xffK\x00}\x00n\x00j\x01W\x05\xc9\x0c\xa3\x17i$T/"6\x1a7K3\xc3.\xc3+\xc8,\x900\x9a4c7T6=1\xd7(\xa7\x1d\x89\x12\x93\x08d\x006\xfaE\xf4Z\xee\x0e\xe8G\xe2B\xde\xbb\xdb\\\xda-\xd9;\xd7 \xd5\xe2\xd2\xc8\xd1\x04\xd3M\xd7I\xdf[\xe9\\\xf3 \xfb\xd8\xff\xb1\x02\xdd\x04G\x07\xf8\t\xb7\x0c\xe9\x0fb\x13u\x16\xf1\x17\x1b\x17\x88\x14\x12\x11C\r\x07\t\xed\x03\x0c\xfe-\xf8\x05\xf3T\xef\x1a\xed\xd2\xeb;\xeb\xb6\xea\xfe\xe9\xd3\xe8,\xe7\xdf\xe5}\xe5\xf4\xe6\x85\xea\x85\xefg\xf5\x06\xfb\x0b\x00o\x04\xa2\x07\xe2\t\xe6\n\x82\x0b\xac\x0c5\x0ey\x10\x90\x12\xd9\x137\x14>\x13\x89\x11\x0f\x0f\xef\x0b\xd3\x08r\x05b\x02\x92\xff\x04\xfd]\xfb3\xfa\xba\xf9\x81\xf9\xb1\xf8{\xf7\xba\xf5\xc5\xf3H\xf2)\xf1.\xf1\x18\xf2\x97\xf3 \xf5\xd7\xf5\x82\xf5\x9f\xf4\xa3\xf3|\xf3M\xf44\xf5F\xf6\xcb\xf6\xb0\xf6\x9c\xf6i\xf6\x9f\xf6\xc2\xf7\x7f\xf9x\xfc.\xff\xc0\x00e\x01\xb0\x00\x1e\x00\x06\x00\x14\x01\x01\x06D\x0fo\x1cn*6429%9\xa26m4\xff1<1\xdc0\xd30\xb00F.u*\xdd#\xf0\x1a\xf4\x0f\xce\x02\xfd\xf5\x96\xeao\xe2\xce\xdd\x9a\xdbk\xdb[\xdbW\xda\xc5\xd7o\xd4\xd9\xd1b\xd1{\xd3\xcf\xd7\x81\xddI\xe4\xf3\xebI\xf4\xaa\xfd\x9f\x06a\x0e\xea\x13\x94\x16\xe4\x16\xb0\x15\xb8\x13\xe7\x12\xab\x13\xaf\x15\xe0\x17\x12\x18W\x15/\x0f\x9b\x06/\xfd?\xf4\xb6\xecC\xe7\xd4\xe3$\xe2z\xe1\x81\xe1\xc1\xe1\xbf\xe1.\xe1Q\xe0\xba\xdf*\xe0\x8e\xe2Z\xe7m\xee\t\xf7\xaf\xff\x1e\x07\xa5\x0c%\x10&\x12.\x13\xbe\x13+\x14\xa8\x14\xfe\x14?\x15\x1e\x15\xa6\x14w\x13K\x11\xcb\r\x02\tO\x03\xe4\xfd[\xf9\x90\xf6\xcc\xf5\n\xf6\xf3\xf6\x1a\xf7\x81\xf6\xb9\xf5\xc4\xf4<\xf4\xdc\xf3\xfe\xf3\xd5\xf4\xe2\xf5w\xf7\xc5\xf8\xe0\xf9\xcf\xfa\xb3\xfb\xb6\xfc7\xfds\xfc\x9c\xfa\x80\xf8O\xf6\xff\xf4\x03\xf4\xc5\xf3[\xf5\xf3\xf6g\xf8T\xf8\xb3\xf6\xa1\xf6\xd5\xf6\xba\xf8\xcc\xfa\xc0\xfbs\xfd\x96\xfd\x9b\xff\x9a\x05\xa2\x10\xce R0p;\xd4?\xe7=m9N4b1\xcd0\xe81c4\xab3\xf1/p(\x00\x1e\x14\x13\xf1\x05 \xf8\xca\xeb\xf3\xe1$\xdc\x8d\xd9\x1e\xd8\x94\xd8G\xd9\xf5\xd8\xc4\xd7f\xd5\xcd\xd3\xf5\xd4F\xd8;\xde1\xe6q\xef\x8a\xfaR\x05\xe4\x0e\\\x15\xa3\x18\xc5\x19\xa8\x19\x9a\x19\x86\x19\xa5\x19\x0f\x1a\x05\x1a\xe1\x18\xef\x15\x00\x11\xb8\n,\x03\x8e\xfa+\xf1:\xe8\x02\xe1\xb9\xdc\xf9\xda\x10\xdb\xb4\xdb\x1d\xdc\xdc\xdb\xde\xda\xd5\xd9\x04\xdaO\xdcO\xe1T\xe8t\xf0\xa2\xf8S\x00G\x08\xad\x0f\xba\x15\x04\x19\\\x19G\x184\x17\x0b\x17S\x18\xa7\x19r\x1aM\x19\x0f\x16\xb3\x10h\n\xbc\x04\xfb\xff\xd4\xfc\xa0\xf9\xe3\xf6\xfd\xf4p\xf4S\xf5\x92\xf6$\xf7\x9e\xf6\xa8\xf5\xd6\xf4\x02\xf5\xe8\xf5\x90\xf7\xe1\xf9\x8a\xfc\x1d\xff\xfd\x00e\x01\xd2\x00\xa6\xff%\xfe\x93\xfc\x1c\xfa\xe0\xf76\xf6\x1e\xf6\xe6\xf6}\xf7\xde\xf6&\xf5?\xf3[\xf1\xce\xf0R\xf0\xb5\xf18\xf4\xfd\xf6\xa9\xfa\xfa\xfc\xef\xff[\x02\x7f\x039\x04\xdf\x02x\x04n\x0c\xc5\x1a\x85-%:\xdf>\xe8;\x0c6&3\xf91;2\xbe1\x9f03.k)c!\xd3\x17\xdf\x0e<\x06e\xfcQ\xef\xf8\xe1\xfd\xd8$\xd7+\xdaf\xdd\xbc\xdd\xbd\xdb\xbb\xd9\x02\xd9\xac\xd9\xe1\xdbc\xe0c\xe7\x86\xf0\x7f\xf9\x85\x01/\tk\x10\xff\x16\x99\x1a\xf9\x19\xc0\x16\x9e\x13F\x13\x94\x15\xbc\x17\x93\x17N\x14\xa5\x0eY\x07]\xfe\x82\xf4\x13\xeb\xd6\xe33\xdf\xc9\xdbo\xd91\xd8\xfb\xd8\x05\xdb;\xdc\x1b\xdbw\xd8\x02\xd7[\xd9\x02\xe0L\xe9:\xf3\x90\xfc\xe0\x04\x9d\x0b^\x10;\x13,\x15|\x16\x14\x18\xfc\x18(\x1a\x8c\x1b0\x1d0\x1e:\x1c8\x17\x05\x10\xbb\x08\xe2\x02W\xfex\xfb\xf2\xf9\x94\xf9\xb0\xf9\x85\xf8x\xf6\xce\xf3\x88\xf1\'\xf0Y\xef4\xf0\xc7\xf2&\xf7\xce\xfbK\xff\x9c\x00\xd0\x00\x1a\x005\xff\xc0\xfeC\xfe\x86\xff\x8b\x00\x00\x01D\x00\xd5\xfd\\\xfb \xf8\xeb\xf4\xe2\xf1\x9e\xee\x88\xedZ\xed\xc5\xee\xc0\xf0\x83\xf0,\xf1\xaf\xf18\xf3\x99\xf5\xfe\xf6\x84\xf9I\xfc\x8b\x00\xbf\x04\xed\x07!\x0b\x93\r?\x14\xe3\x1fo-5:F>3:\xa93c.\xab.U1\xd01\xba0>-\xe8\'\xc9!]\x18\x82\r\xdc\x01\xe9\xf5\xc7\xebK\xe3<\xdf\x89\xdfb\xe2\t\xe5-\xe4a\xe0&\xdc\xa9\xd9>\xdb\x06\xe0\xc2\xe6\x8d\xee\xe3\xf6\x84\xffG\x07\xa5\r\n\x11<\x12g\x11\xed\x0f_\x0f\xeb\x0f\xc1\x115\x14G\x15\x16\x134\r\x89\x04N\xfb\xe7\xf2\xfa\xeaQ\xe4\xb4\xdf\xb8\xdd\xbb\xde\x9a\xdf\x83\xdf\xf4\xdd\xe2\xdb\xaa\xda1\xda\xf1\xda=\xde5\xe4S\xed\x06\xf7\\\xffW\x06A\x0b\xe5\x0f}\x12z\x13P\x14\xa0\x15\x81\x18\x89\x1b\x9a\x1d\x80\x1e\x82\x1c\x01\x19E\x13\xa0\x0cT\x06\xdf\x00\xc6\xfd\xba\xfb\xc8\xfa?\xfa\x92\xf9\x07\xf8\xd4\xf5N\xf3\t\xf1\xd4\xf0\x10\xf2\xd3\xf4w\xf8\xe7\xfb+\xff9\x01C\x02\xa1\x02(\x02\x99\x01\x9f\x00\x94\xff\xee\xfe\xd5\xfe\x87\xfeo\xfd$\xfb\xa7\xf7U\xf4\xa2\xf0\xfa\xed[\xec\x0b\xeb\xb3\xeb\x13\xec\xa4\xedb\xef+\xf0\x83\xf2\xcf\xf3\xbe\xf6:\xfa\xca\xfd\xa0\x02l\x06\x11\t\xfc\x08\x12\x08\x80\n\x1c\x12\xff\x1f\xa7-\xb25\xbf6\xb12\xd6.\x08,\xeb*8+\xb5+\xcc+V*\xb2&\x06!\x1a\x19\xc6\x0e\x82\x02\x8a\xf62\xed\xce\xe8\xf6\xe8\xb1\xea\xbd\xec\x05\xecm\xe8\x8d\xe3E\xdf\xfb\xdda\xdf\xc7\xe2\n\xe8,\xef\xd7\xf7h\x00d\x06\xa1\x08f\x07\xd5\x05\xc4\x044\x05\x96\x07,\x0b\xe1\x0fx\x12\xe8\x10/\x0c\x19\x05\xd6\xfd\xce\xf6#\xf0\xa5\xeb\xc3\xe9Q\xea\x08\xeb\x0c\xeb\x90\xe9L\xe7%\xe4\xeb\xe0N\xde\x8d\xde\xb5\xe2\xff\xe8\xab\xef\xbb\xf5<\xfb\x97\x00\xf5\x03\xff\x04&\x05\xd6\x05\xc9\x08\xe5\x0b)\x10\xcf\x13\xa4\x17N\x19\n\x17\x98\x12\xe8\x0c\x1c\t\x17\x06"\x03i\x01,\x00\xec\x00\x19\x01\x0e\xff\x1e\xfc"\xf8L\xf5\x9c\xf3\xd7\xf3\xba\xf5\x84\xf9M\xfd\x94\xff\x10\x01A\x01L\x01\xa2\x00\x9b\xffz\xfe0\xfe{\xff\xec\x00\x10\x02\x17\x01f\xfdB\xf9A\xf5\xd9\xf2\xde\xf1\xed\xf0\xb8\xef\x08\xeeq\xed:\xee\x0f\xf0\x1c\xf2\xc3\xf1z\xf1X\xf2h\xf4\x1f\xf9+\xfd\xf7\xff\x98\x02\x0f\x04\xdc\x05\x9d\x08\xf0\x07\x8d\x06\xff\x07\xcb\x0fp\x1f=->2P.\x85\'\x0e&\xb2(0+\xc4*\xd4(^)\x1b)\x89&\xad!\xe9\x19\x1b\x10S\x03\xa9\xf6\xf9\xef\x13\xf1.\xf5\xad\xf6\x15\xf3\x80\xed\x19\xeaB\xe7\x03\xe4\xbb\xe0b\xdfs\xe2\xc0\xe8u\xf0\xdd\xf7\x93\xfd\x98\x00\xfd\xfe\x0c\xfb\x17\xf8v\xfa\x8c\x00\xf7\x05\xcf\x08\xb1\t\xb9\x0b\x9a\x0c!\t\x13\x03\x91\xfc\xfb\xf7\xa5\xf5\xba\xf3I\xf3\xcf\xf4\xe7\xf6>\xf6\xe7\xf1O\xecH\xe9\x98\xe8O\xe8\xf7\xe7\x9e\xe9<\xee\xbb\xf4\xc6\xf9\x99\xfb\xe3\xfc.\xfd\xc0\xfd,\xfe~\xff\xd1\x03\xc9\x08\x84\x0c\x1b\x0e\x8c\x0e\xeb\x0fB\x0f~\x0c\xdf\x07\xe9\x04\xbe\x04\xe5\x05\x7f\x06\x19\x05\xf0\x03\x16\x02\xe5\xff\x0c\xfd[\xfb\xe3\xfb\xb3\xfc;\xfd\x7f\xfd\xa2\xfe\xd2\x00g\x025\x02\x0c\x00\x9e\xfef\xfe$\xff\xe3\xff\'\xff\x7f\xfe\xb6\xfcI\xfb\x82\xf9\xfa\xf6\x9e\xf5\x02\xf4n\xf3\x93\xf1\xb6\xef\xa1\xef\x1e\xf0\xa5\xf1\xce\xf0\xfd\xef\xc1\xf0G\xf2\xae\xf5\xc6\xf7\xe3\xf9-\xfc\x15\xfd\x97\xfe\x17\x00\x99\x02\xb5\x05\x1a\x06\x88\x04\xe2\x02\xde\x07u\x14k"\x80*\xf4(\x0b$o"\x86%\xe3**,*+\xdb*\xf8*\xd4+%)\xda"\x94\x19Q\rR\x02\xb1\xfcX\xfc\xae\xfd\xda\xfb{\xf5\xb1\xee\x00\xea\x1a\xe7-\xe5\x95\xe1 \xde\xed\xdd*\xe1\xb9\xe7\x8b\xef\xea\xf4\x01\xf6\x06\xf3a\xf0\x81\xf2\xff\xf8\x0c\x002\x05\xf5\x07\xf7\x08\x9e\n \x0c\x96\x0c\xa3\n8\x04g\xfe\xca\xfb.\xfdR\x01\xeb\x01\xd8\xfe5\xf9A\xf3-\xef^\xec\x00\xea\xc4\xe8U\xe8\xad\xe8\x86\xeb\xba\xefE\xf3\x07\xf5\xfd\xf2/\xf0\xe1\xefE\xf3f\xfa\xc1\xff\x1f\x04\xa3\x06n\t\x83\x0b\n\x0cR\x0b;\t\x8f\x07\x0e\x07\xb9\x08\xc1\x0b\xf3\r\xb2\rj\n\xfc\x04\x8f\x01\xe1\xfft\xffq\xfe\xc1\xfdQ\xfe\xdb\xff}\x01\xc1\x00\xb7\xfe;\xfcn\xfa\xee\xf9F\xfaS\xfb\x06\xfdx\xfd\xd2\xfck\xfb\xe1\xf9\xc8\xf8\x14\xf7Z\xf5[\xf3\xe2\xf2\xd4\xf3\xac\xf4\x8a\xf4i\xf3T\xf2\xcd\xf2\xde\xf2\x7f\xf3\xdf\xf4\x03\xf6\x1a\xf9\xf8\xf9\xe1\xfa\x18\xfd\x99\xfe\x9c\x01\x00\x02\x18\x02\xe2\x03\x94\x03\xbb\x02\x10\x03\x86\t[\x19\x96%\xe9\'\xc6#j!\x00&u*\xcd)\x9f(\x8e+80a2W/%)\x12!Q\x16\xeb\t3\x01\xbf\xfe\xec\xffH\xfe\xc9\xf7?\xf1\x13\xed\x8b\xe9\x01\xe3g\xdbi\xd7\xb2\xd8\xfd\xdd#\xe4\xdf\xe9B\xef\xa6\xf1\xe3\xf0\'\xeea\xeeC\xf4\x80\xfbE\x02\x1b\x06/\nK\x0f\xda\x11\x02\x11\x1b\r\xa1\x08\x0c\x06H\x04N\x04[\x05\x86\x06\xe2\x05\xc3\x00=\xfa\n\xf4|\xf0\x84\xedH\xea\xac\xe7O\xe7I\xe9-\xec(\xeec\xee\x17\xee\x1b\xed6\xedh\xee\xad\xf2$\xf9Q\xffA\x03[\x06\x14\t\x92\x0b\xf9\x0bM\n\x90\t\x88\n\xd2\x0c[\x0f\x02\x11\xe2\x10.\x0fj\x0bz\x07\x00\x05\xa0\x03\xd9\x02&\x02\xcf\x00\xbb\x00\xe5\x00k\x00\xe2\xfe\x16\xfb2\xf8I\xf7(\xf8T\xfa\x90\xfb\xed\xfb%\xfb"\xf9 \xf7\xbc\xf5\xec\xf4\r\xf5\x80\xf5\xd1\xf6\x9b\xf7m\xf7\x90\xf69\xf5\x0f\xf4\x1f\xf3N\xf3\x82\xf5\xd7\xf7s\xf8\x95\xf8\xa9\xf8\x1c\xfbM\xfd\xcf\xfd\x8d\xfd\xa4\xfc\x02\xff\xcb\x01\x05\x04i\x05\xca\x04\xf7\x05\xad\ni\x13\x12\x1f\xa9%v$\x9d\x1f\x93\x1e\xb9${+\xed,\x1b+O+M-\xcc,E&\x0b\x1d\xb1\x14;\rR\x064\x01D\x00\x0c\x00\x9b\xfb4\xf3\x9b\xeaD\xe5\x82\xe2o\xdf\x00\xdd^\xdc+\xdfU\xe4l\xe9\xd5\xec\xe4\xed\xb7\xed\x0c\xed\xc3\xee{\xf3\x0e\xfb\xbe\x02\x1d\x08\xf6\tk\n\x88\x0b\x11\x0cu\x0b\xd7\x08\xd2\x06\xad\x07#\tI\t)\x07$\x03\x98\xfe\xca\xf8R\xf3;\xefE\xed\x0b\xed:\xec\x00\xebW\xea\xa3\xea}\xeb\xd7\xeaS\xe9\xa3\xe9E\xec\xb7\xf0+\xf5p\xf9\xb3\xfd\xc8\x00\xe6\x01\xe5\x02G\x057\x08\xa8\t\x17\nm\n\xb0\x0c\xf0\x0et\x0f\xa1\x0e\xac\x0b\xbd\tK\x083\x07\xd0\x06\xf4\x05{\x05V\x04~\x02\x1d\x01\xa1\xff\xc5\xfed\xfda\xfb@\xfa*\xfa\x06\xfb\xfe\xfa\x9f\xf9C\xf8\xac\xf6I\xf6\x9b\xf69\xf6K\xf6s\xf5\xeb\xf4\x92\xf5\xd4\xf4:\xf4\xc4\xf39\xf3\xe3\xf4\xeb\xf5/\xf7n\xf8\xf1\xf7\x9b\xf8~\xf9\xd8\xfaB\xfd\xea\xfe>\x01\x11\x03\t\x02\x15\x00r\xff\xed\x02~\t\x95\x10m\x15%\x19\x07\x1ds\x1fg \x11\x1f_\x1e1!\xb8&\x83++,\xd4(&$\xe9\x1f;\x1b\xa5\x14\xd7\r\xc6\x08\xe4\x066\x06\xac\x03\xa7\xff\xfc\xf9i\xf3\xb0\xec$\xe7\xd5\xe4\t\xe55\xe6E\xe7^\xe8M\xea\xe2\xeb_\xec\xdb\xeb\xbf\xeb\xb3\xedJ\xf1\x9d\xf6\xf8\xfc\xa6\x02\xc3\x05Y\x05\xac\x03\xad\x030\x05?\x06\xd2\x05#\x05\xc5\x05(\x07)\x07\xe5\x04\xbd\x00\x82\xfcp\xf9\xa5\xf7\xb6\xf6w\xf5P\xf4\xe1\xf3\xe1\xf3\x8c\xf3G\xf2|\xf0c\xef\xfd\xee\xd2\xefQ\xf2\xca\xf5\xe5\xf9\x17\xfc\xa2\xfc0\xfcG\xfc\xc3\xfd\xcb\xff\xb3\x01\x88\x03\r\x05\xcf\x06\x0b\x08 \x08X\x077\x06\x95\x05\xa9\x05\xc0\x06\xfc\x07\xf5\x08\xb4\x08\x8c\x07;\x06\x95\x05\x10\x05G\x04\x93\x03\x9a\x03x\x04\xff\x04\x82\x04\x84\x02[\x00\xe2\xfd\xac\xfby\xfaa\xf9\xd3\xf8M\xf8p\xf7#\xf7?\xf6\x87\xf4\xc4\xf2x\xf1\xdd\xf1v\xf3\r\xf5a\xf6[\xf7\x19\xf8\x1f\xf9\xd4\xfaf\xfcu\xfdV\xfes\xff\xd9\x01\xa5\x047\x06\xf2\x06\x1e\x06\xb6\x04\xe0\x03\x8f\x04\xf3\x07\xdf\x0b\xdf\rK\r(\x0cn\r\x84\x0f\xbb\x10\x87\x10\xc7\x0f\x00\x110\x13\x1a\x15\x00\x161\x15\xd2\x13\x0f\x12\x0f\x10\xc8\x0e\xa3\rn\x0c\xb7\n\xb8\x08\xf8\x07 \x07\x11\x05*\x01\xe0\xfc\x96\xfa\xbf\xf9>\xf9\t\xf9\xc7\xf8\xa1\xf8r\xf7\xcd\xf54\xf5\xfc\xf4\x81\xf4.\xf3\x80\xf2\x84\xf3I\xf5\x80\xf6\xa8\xf6\x92\xf6w\xf6\x0b\xf6\x92\xf5\r\xf6\x9b\xf7\x1e\xf9\xb0\xf9\xb3\xf9?\xfa\x1e\xfb\x8e\xfb\x7f\xfb\xac\xfb\x1a\xfcx\xfc\x01\xfd\xcb\xfd\x8d\xfe[\xfe[\xfd\xd7\xfc\x01\xfd0\xfd\xe8\xfca\xfc\x9f\xfc\xca\xfcf\xfc\x06\xfcc\xfcp\xfd#\xfe1\xfeC\xfe\x93\xfe\x05\xff?\xff\xd1\xff\xe3\x00\xb4\x01@\x02b\x02\xf4\x02\x12\x04\xc1\x04\xf9\x04:\x05\xb0\x05\x18\x06J\x06J\x06n\x06a\x06\xde\x05(\x05\x98\x04\xf7\x03\xed\x02\x97\x01N\x00!\xff\xd9\xfdu\xfc\x1f\xfb\xf0\xf9\xce\xf8\x92\xf7M\xf6e\xf5\x9e\xf4\xe8\xf3q\xf3R\xf3\xaa\xf3\x17\xf4s\xf4\xfb\xf4\xda\xf5\xe3\xf6\xf3\xf7\xdd\xf8\xcf\xf9\xba\xfa\xa4\xfb\x9b\xfc\xb5\xfd\xc7\xfe\x9b\xff\xd1\xff\x96\xff=\xffO\xff$\x00L\x01\xa0\x02\xe3\x039\x05\xe2\x06\x96\x08[\n~\x0c\x0f\x0f\xc8\x11\x89\x14\x1b\x17\xba\x19!\x1c\x82\x1d#\x1e\xc9\x1eC\x1f\xcc\x1e)\x1d^\x1b\x05\x1au\x18\xe1\x15\xa7\x12\xf8\x0f|\rb\n\xeb\x06(\x045\x02\xcf\xff\xec\xfc:\xfar\xf8\xae\xf6E\xf4<\xf2\xf9\xf0\x1b\xf0\xcc\xeet\xed\x19\xed_\xedk\xed\x19\xed.\xed\x08\xee\xe8\xeea\xef\xea\xef\xd1\xf0\x07\xf2\x17\xf3\xc6\xf3\xa4\xf4\xba\xf5\xdf\xf6\xff\xf7\x05\xf9\xef\xf9z\xfa\xa8\xfa\xab\xfa\xf6\xfa\x98\xfb\x14\xfcz\xfc\x97\xfc\x81\xfc\x8d\xfc\xaf\xfc1\xfd\xfe\xfd\x9e\xfe\x11\xffz\xff0\x00p\x01\x89\x02\r\x03*\x03p\x03\xff\x03\x9c\x04\x18\x05{\x05\xaf\x05\x89\x05$\x05\xdd\x04\xd0\x04\xa0\x04;\x04\xe8\x03\xbd\x03\x91\x03%\x03\x9e\x02d\x02V\x02\xfb\x01m\x01\x10\x01\xd9\x00\x91\x00\xf9\xffm\xff\x19\xff\x95\xfe\xcf\xfd&\xfd\x9e\xfc\xfd\xfb\x1e\xfbL\xfa\xdb\xf9x\xf9\xdd\xf8E\xf8\x16\xf8\x13\xf8\xf8\xf7\xa9\xf7c\xf7\x95\xf7\xee\xf7\x0f\xf8\x1e\xf8D\xf8r\xf8\xac\xf8\x01\xf9R\xf9\x96\xf9\xbd\xf9\xcf\xf9\xa0\xfaL\xfc\x0b\xfe\xe5\xff\xa3\x01\x87\x03#\x06\x15\tm\x0c\x1a\x10d\x13\x1a\x16K\x18l\x1a\xc2\x1c!\x1f\x92 \xc1 . ^\x1f\x84\x1e8\x1d\x1a\x1b\x9a\x18\xdc\x15\xe4\x12\xd9\x0f\xd4\x0c\xf8\t\xc4\x06/\x03\x03\x00\xc5\xfd\x11\xfc\xfc\xf9\xcc\xf7\x08\xf6\xc3\xf4\x8c\xf3\x19\xf2\xfc\xf0H\xf0\xa3\xef"\xefH\xef\xbf\xef\xfb\xef\xd1\xef\x98\xef\x08\xf0\xa6\xf0$\xf1\xa9\xf1\x18\xf2\x89\xf2\x02\xf3\xc5\xf3\xd8\xf4\xc0\xf59\xf6u\xf6\xda\xf6[\xf7\xe2\xf7T\xf8\xac\xf8\x0c\xf9x\xf9\x02\xfa\x9d\xfa\x1c\xfb\x81\xfb\xd5\xfbK\xfc\xf4\xfc\xe8\xfd\xf9\xfe\xe4\xff\x9a\x00:\x01\xfb\x01\xcd\x02\x97\x03I\x04\xc8\x042\x05\x82\x05\xb0\x05\xdc\x05\xd8\x05\xb2\x05p\x05/\x05\x00\x05\xa1\x04\x18\x04\xa2\x03N\x03\r\x03\xc5\x02\x88\x02P\x02\x02\x02\x95\x011\x01\xdb\x00[\x00\xc1\xff\'\xff\xab\xfe \xfek\xfd\xa4\xfc\xfd\xfb^\xfb\xac\xfa\xfc\xf9l\xf9\xf8\xf8\x87\xf8U\xf8D\xf8D\xf8X\xf8^\xf8s\xf8\xba\xf8\xd9\xf8\x03\xf9U\xf9x\xf9\xb5\xf9\x11\xfal\xfa\xe9\xfaU\xfb\x94\xfb\x08\xfc\x95\xfcT\xfdz\xfe\xf5\xff\x05\x02c\x04\xc6\x06`\t\xd7\x0b\x1c\x0eq\x10\xf4\x12\xce\x15\x95\x18\x84\x1a\xdd\x1b\xe6\x1c\xa5\x1d\n\x1e\xde\x1d^\x1dt\x1c\xb6\x1a\x99\x18\x89\x16\xbc\x14\xaa\x12\xdd\x0f\xd2\x0c\xce\t\x1a\x07\x9d\x046\x02?\x00;\xfe2\xfcB\xfa\xa4\xf8u\xf7\x1f\xf6\xcf\xf4z\xf3p\xf2\xa0\xf1\xba\xf0#\xf0\xb4\xefU\xef\x1d\xef\xe4\xee\xe7\xee\xf3\xee\xfb\xee0\xef\x9b\xef\x1b\xf0\xa4\xf0_\xf1S\xf2)\xf3\x02\xf4\xe3\xf4\xbb\xf5q\xf6\xe5\xf6\x96\xf7x\xf84\xf9\xc6\xf9k\xfa5\xfb\xcc\xfb3\xfc\xc2\xfcr\xfd\x0e\xfe\xa8\xfe\x82\xff\xbc\x00\xac\x01>\x02\xc5\x02T\x03\xf5\x03U\x04\xc7\x04d\x05\xbd\x05\xd6\x05\xca\x05\xf5\x05\x10\x06\xd1\x05r\x05D\x050\x05\xd8\x04X\x04\x1a\x04\xf7\x03\xac\x03=\x03\xea\x02\xac\x025\x02\xad\x018\x01\xe9\x00y\x00\xde\xff@\xff\xaa\xfe/\xfe\x93\xfd\xf9\xfcg\xfc\xd9\xfbY\xfb\xea\xfa\x8d\xfa\x1d\xfa\xbf\xf9t\xf99\xf9"\xf9.\xf9R\xf9i\xf9O\xf9=\xf9f\xf9\x91\xf9\x88\xf9\x9c\xf9\xe7\xf96\xfaM\xfaB\xfa\x7f\xfa)\xfb\xcb\xfb\x04\xfc\xa5\xfc\xe1\xfd(\xff\x8e\x00/\x02A\x04\xc2\x06\xf6\x080\x0b\xe5\r\\\x10\\\x12!\x14\x1c\x16\x0c\x18\x99\x19\xa0\x1aS\x1b\xd0\x1b\x95\x1b\xd6\x1a\x13\x1a\x01\x19\xae\x17\xc6\x15\xb8\x13\xe6\x11\xbf\x0f|\r\x05\x0b\xa7\x08w\x06\xfd\x03\x9c\x01j\xffp\xfd\x82\xfbz\xf9\xbd\xf7A\xf6\xc9\xf4W\xf3\xf5\xf1\xde\xf0\n\xf03\xef\x88\xee\x08\xee\x8f\xedC\xed,\xed]\xed\xbb\xed\xd4\xed\xf6\xedb\xee\x0c\xef\xe1\xef\xa6\xf0o\xf1F\xf2#\xf3\r\xf4\x18\xf5&\xf6\t\xf7\xcb\xf7\x89\xf8u\xf9\xa3\xfa\xa2\xfb_\xfc\xfc\xfc\xa5\xfdj\xfe\x1c\xff\xd8\xff\x94\x008\x01\xb4\x01&\x02\xc7\x02h\x03\xd3\x03\x19\x04X\x04\xb3\x04\x00\x05(\x057\x05C\x05M\x05]\x05`\x05R\x058\x05\r\x05\xd6\x04\xd0\x04\xde\x04\xcc\x04\x93\x04I\x04\x1f\x04\xdf\x03z\x03?\x03 \x03\xd2\x02H\x02\xea\x01\xb6\x012\x01X\x00s\xff\xe4\xfe_\xfe\xb0\xfd\xf4\xfca\xfc\xc5\xfb\x0c\xfbz\xfa\x11\xfa\xbd\xf96\xf9\x92\xf81\xf8+\xf8=\xf8G\xf8\\\xf8\x88\xf8\xc7\xf8\x0b\xf9C\xf9\xb4\xf9S\xfa\xdb\xfae\xfb\xf6\xfb\xb8\xfc\x8c\xfdA\xfe\x11\xff\x0e\x00"\x01\x16\x02\xfd\x02\x1a\x04H\x05r\x06\xd8\x07\x80\t\x17\x0bs\x0c\xb3\r\x1d\x0f\x85\x10\xac\x11\xc1\x12\xe1\x13\xbd\x14:\x15\x88\x15\xe1\x15!\x16\xe0\x15#\x15M\x14o\x13Z\x12\xe6\x106\x0f\x8d\r\xd3\x0b\xf6\t\x17\x08?\x06G\x04\x1d\x02\t\x00:\xfe\x9d\xfc\xe1\xfa"\xf9\xac\xf7j\xf6$\xf5\xf5\xf3\xf1\xf2\x1d\xf2L\xf1o\xf0\xe7\xef\x9e\xeff\xef2\xef*\xefw\xef\xca\xef\t\xf0_\xf0\xe4\xf0\x91\xf1"\xf2\xc1\xf2\x8e\xf3^\xf4)\xf5\xe1\xf5\xba\xf6\x9c\xf7R\xf8\xf7\xf8\xa8\xf9p\xfa\'\xfb\xc8\xfbk\xfc\x0f\xfd\xa0\xfd&\xfe\xb5\xfea\xff\xf9\xff\x80\x00\x0e\x01\xc2\x01\x8b\x02.\x03\xb9\x03@\x04\xc0\x041\x05\xa0\x05\'\x06\x96\x06\xca\x06\xe7\x06\x1a\x07Z\x07^\x07%\x07\xe5\x06\xa4\x06T\x06\xf2\x05y\x05\xff\x04h\x04\xcc\x03:\x03\xab\x02\n\x02>\x01]\x00\x96\xff\xf0\xfeU\xfe\xba\xfd\x1d\xfd\x82\xfc\xf7\xfb\x8c\xfb\x1f\xfb\x95\xfa$\xfa\xbb\xf9S\xf9\x10\xf9\r\xf99\xf9?\xf9\x14\xf9\x04\xf9H\xf9\xb8\xf9\r\xfaW\xfa\xc7\xfa[\xfb\xc6\xfb/\xfc\xea\xfc\xd9\xfd\xa8\xfe\x16\xff\xae\xff\xa3\x00\x96\x01g\x02,\x03\xfc\x03\xe0\x04\xa0\x05J\x06,\x07\xf0\x07k\x08\xd9\x08L\t\xc8\tM\n\xb7\n\x15\x0b[\x0b\x88\x0b\xdb\x0b@\x0ch\x0cX\x0cC\x0c=\x0c\x1b\x0c\xc9\x0b\\\x0b\xf2\n\x87\n\xeb\t5\t|\x08\xcd\x07\xf6\x06\xf6\x05\xe8\x04\xe8\x03\xf4\x02\xd8\x01\xaf\x00\x9b\xff\x99\xfe\x9b\xfd\x84\xfc\x85\xfb\x91\xfam\xf9M\xf8U\xf7}\xf6\xac\xf5\xb6\xf4\xf7\xf3o\xf3\xeb\xf2z\xf2#\xf2\xea\xf1\xc7\xf1\xa8\xf1\xb9\xf1\xec\xf18\xf2\xa5\xf21\xf3\xef\xf3\xaa\xf4d\xf5<\xf6K\xf7e\xf8j\xf9r\xfaz\xfb~\xfcn\xfdS\xfeL\xff=\x00\x16\x01\xe2\x01\xb7\x02\x87\x031\x04\xb2\x043\x05\xb1\x05\x19\x06h\x06\x9a\x06\xbc\x06\xde\x06\xe0\x06\xce\x06\xbd\x06\xb7\x06\x9d\x06T\x06\x0e\x06\xc8\x05m\x05\xf2\x04x\x04\x00\x04u\x03\xd9\x02P\x02\xe4\x01b\x01\xb1\x00\xef\xffE\xff\xaf\xfe\x15\xfeO\xfd\x90\xfc\xee\xfbg\xfb\xe2\xfa\x7f\xfaM\xfa(\xfa\xd4\xf9x\xf9l\xf9\x91\xf9\xaa\xf9\xbb\xf9\r\xfa{\xfa\xc3\xfa\xf0\xfaa\xfb\x15\xfc\x9b\xfc\x11\xfd\x98\xfd3\xfe\xc8\xfec\xff\x19\x00\xce\x00^\x01\xca\x01R\x02\x0b\x03\x9a\x03\xef\x03\\\x04\xfc\x04|\x05\xcd\x05\x13\x06t\x06\xc1\x06\xdf\x06\t\x07R\x07\xa8\x07\xec\x07\x14\x08;\x08W\x08:\x08\xfa\x07\xc4\x07\x9e\x07\x85\x07g\x07\x18\x07\xc7\x06\x87\x06\x1f\x06\xad\x05A\x05\xf1\x04\x90\x04\x13\x04\x8b\x03\xf9\x02y\x02\xfa\x01\x8b\x01A\x01\xf9\x00\x93\x00\x1c\x00\x95\xff\x0c\xff\x81\xfe\xf4\xfdq\xfd\x01\xfd\x9a\xfc\x10\xfc{\xfb\xe2\xfaJ\xfa\xc1\xf9^\xf90\xf9\x14\xf9\xf5\xf8\xbd\xf8\x7f\xf8]\xf8C\xf8Y\xf8\x8c\xf8\xc8\xf8\x08\xf9K\xf9\xb8\xf9/\xfa\xa7\xfa\x1d\xfb\xa1\xfb>\xfc\xdf\xfcs\xfd\n\xfe\xa4\xfe\'\xff\xaf\xff:\x00\xc0\x00P\x01\xc9\x014\x02\x98\x02\xf6\x02b\x03\xb1\x03\xe6\x03\xf6\x03\xe4\x03\xe6\x03\xf1\x03\xf7\x03\xfa\x03\xe4\x03\xb7\x03U\x03\xea\x02\x89\x02\x1c\x02\xc1\x01`\x01\xf4\x00\x8e\x00\x1a\x00\xa6\xff>\xff\xc9\xfed\xfe\xfb\xfd\xa1\xfd/\xfd\xb7\xfci\xfc\x1b\xfc\xdc\xfb\xbb\xfbd\xfb-\xfb\x00\xfb\xc3\xfa\xcd\xfa\xa4\xfa\x98\xfa\xbc\xfa\xd9\xfa\xfe\xfaD\xfb\x8e\xfb\xdf\xfbN\xfc\xa1\xfc \xfd\x94\xfd3\xfe\xaa\xfe"\xff\xb5\xffn\x00\x13\x01\xdd\x01U\x02\xf8\x02\x95\x03\xec\x03`\x04\x87\x04\xd3\x04\xed\x04\xef\x04\xfd\x04"\x05$\x05~\x05\x7f\x05\xca\x05\xb4\x05`\x05q\x05\x11\x053\x05\x86\x04\xed\x04|\x04\xe1\x03u\x04\xb3\x03o\x030\x03\x19\x03\xb7\x02Y\x02c\x02\x8f\x02\x9d\x02\xf4\x01\xe9\x01\xd1\x01\'\x01\xcd\x00\r\x00\xe7\xffj\xff\xf3\xfe\xc5\xfe"\xfey\xfbH\xfa\x8d\x01\x9c\x0fV\x15\x8a\x03\x89\xee\x1c\xe8\xd7\xee\\\xf7\x91\x03|\x05\x86\xfd\n\xf3\xe9\xe7\xc7\xeee\xf3=\xf8\xde\xf8\x1e\xf6\x00\xf6\x80\xf6\xe9\xf8\xbd\xfe\n\x04\x1f\x02W\xfb\x17\xf8B\xfcI\x028\r+\r\xcf\x04u\xff\x7f\x00\x93\x06\xa5\x0c\xaf\n\xa1\x02\xbb\x005\x05\n\x0b5\x0b\xe4\x06"\x02\x90\xff\xbf\x04\xb3\x06\x0c\xfff\x04\xde\x02\xa8\xff\x01\x01 \x03\x17\x07\xd3\xff\xed\xfd\xc0\xfe\x97\xfc\xd3\xfe\xbf\x03a\x04\xed\x01\x92\xfc\x02\xfbt\xfe\xfd\x00\xd3\xfe\x80\xfdO\xffj\xfeE\xfc^\xfe\xab\xfcH\xfe\xf4\xfd\x13\xf8\x97\xf7\xa8\xf9\xea\x034\xfcN\xfa\x88\xfbQ\xf6c\xf8S\xfc+\x02\x1d\xf9\xac\xfc3\xfeZ\xfau\xfb\xe8\xfeP\x02\xe3\x00\xd3\xff\xa1\xfc(\xfdf\xfd\xe7\x01t\x07\xe8\x04?\x02\xda\x01\xc2\x00\x8e\x01\xac\x05-\x05\x00\x03\x1c\x04\xae\x07\xb2\x04 \x03\x87\x04f\x04\xce\x06\xa1\x06F\x03\xc3\x01\xb5\x02\x93\x03\xba\x05\xbc\x04\xeb\x04\xb9\x01\xd1\xfe`\x00\xa4\x04\x00\x03\xc0\xffx\xff\x9a\x01\x00\x00w\x00\xa4\x02u\x00\xcd\xfd"\xfd\xda\xfdZ\xfeA\x02\\\x001\xfes\xfb\xfa\xfa\xe1\xff0\x01\x9b\xfd\xdb\xfc_\xfdB\xfc<\xfc\x9e\xfd\xad\xfeG\xfd\xf5\xfb\xb4\xfa\xcb\xfb\x0e\xfdd\xfc\x0c\xfd\xdd\xfd\x8f\xfco\xfb-\xfb\xae\xfc\xc3\xfe\xe5\xfeg\xff\xdc\xfe\x0e\xfe\x80\xfc\x11\x00\xf1\x03\xce\x01\x0f\x03=\x02\xc1\xfe\xbc\xff\xb6\x04\x00\x05\xbd\x025\x03a\x01\xfe\x00\xd1\x05+\x05\xca\x01s\x01\x06\x05\xb6\x03$\x02\xad\x019\x02\x9e\x02\xf0\x01\x84\x01C\xff\xab\xfe\xa2\xfe,\x02z\xff\x0c\xfd\x99\xfb\x85\xfc\xc5\xfe\xb4\xfd\x81\xfc\xd9\xfcn\xfe_\xfd\x88\xfb\x8f\xfc2\xfe\xfc\xfd\xd2\xfb\xbd\xfa\x9c\xfe\xc0\xfb\xa0\xfcC\xfe\xa0\xfc\xfc\xfb?\xfc\xc8\xfe?\xfdw\xfc|\xfd\x8d\xfd!\xff\xe9\xff8\x00V\xfe\xce\xfdO\xff\x0e\xff\x15\x00\xb3\xff\'\x02\xd6\x01\xc4\x00\xc5\x01_\x03F\x01\x94\x00h\x02\xbc\x02\xe1\x04W\x04\x03\x04\x87\x02R\x04v\x04\x0e\x04U\x07n\x05q\x02\x18\x03\xe4\x03\x14\x03\xeb\x03\x8b\x06^\x04\xa4\x02\t\x02F\x02\x92\x00|\x02\'\x03\xc7\xff\x16\x017\x01m\x01\xeb\xff\xc2\xffr\xfd\xf6\xfd\x0c\x00h\xfe$\xff\x92\xffw\xfc\xe0\xfa\x16\xfe+\xfc2\xfbC\xfd\x7f\xfbw\xfb\xa0\xfc?\xfc\xd0\xfb/\xfbY\xfb\xdc\xf9\x05\xfc>\xfdk\xfd\\\xfd\x8a\xfc\x18\xfe\xa8\xfd"\xfe\x06\xff\x19\x00!\xfd\xfa\xfe\xce\x00\xae\x00\x82\x01S\xff\xfa\xff,\x01;\x02X\x03\xf2\x03\xf0\x01\xc2\x01\xf9\x01E\x02\xdf\x03e\x05\xb2\x04\x81\x02\xc6\x03\xdf\x02\xab\x03^\x03\xc2\x03\x19\x02\xa6\x02"\x04\x1c\x02\x84\x02\x95\x01n\x02\x96\x00X\x00:\x00\xf3\x00\xb7\xff\xbe\xfe\xac\xff\xa1\xfe\x90\xfe3\xfe:\xff|\xfe\xe3\xfe4\xfe\x15\xfe\xa5\xfe%\xfe\x82\xfc\x07\xfd\x00\xfe\xbb\xfe\xbf\xfd0\xfc\x1d\xfe\xf9\xfd\xb4\xfd\x8c\xfc\x91\xfd\xe6\xfb\x02\xfc\x86\xfd\n\xfd\xea\xfeb\xfd\xa8\xfd\xd0\xfc\x8f\xfe\x04\x00-\x00r\xfdG\xfd\x15\x01v\xfe{\x00\xff\x00s\x01\xb2\x00W\x01\x7f\x025\x02\x85\x019\x00b\x04\xd1\x03T\x03\xe7\x02\x9c\x02 \x04\xc4\x03\r\x04\xcf\x04j\x04$\x02e\x02\xed\x04\xa9\x03Y\x03\xfc\x04\xe7\x03{\x02\xeb\x01\x9d\x02\xc9\x00c\x02R\x02\x9e\x02\xbd\xffr\x01\xc4\x01\x9f\x00\xda\xfe\xeb\xfcu\xff9\xfc\xca\xfe5\xfeG\xff\xdf\xfd\xe8\xfb\xb0\xfb}\xfbI\xfb\x8b\xfd\xd5\xfa\xad\xfa\x11\xfe\x94\xfb\x0c\xfd\x99\xfdG\xfd\x98\xfa\xec\xfa\x00\xfd\x86\xfe\xc3\xfb\x1e\xff0\xfeX\xfeZ\xfe\x95\xfd\xf4\xfe\x9f\xfe{\x01\xfc\xfe\x0b\x01-\x00\xe3\x00\xf5\x00e\x01b\x02"\x02\x9a\x01\xd7\x01\x82\x01I\x04l\x04.\x01\x91\x03e\x03\xb0\x02\xc9\x03\x9a\x03I\x02\xda\x02;\x01\xc6\x03\xb7\x02\x08\x01\xf1\x02\x8f\x01\xfb\xff\xc4\x00\xe2\x01\xb6\xff"\x00(\x00\x01\x01d\x00c\x00\xba\xffW\xfe\xcc\xfe\xb3\xfe\xaf\x00O\x00\xa8\xfe]\xfe\xb4\xff\xed\xfe$\xfe[\xfe\x93\xfe\xe5\xfe\xdf\xfeF\xff\xe0\xfd\x96\xfdN\xfe\xf3\xfdQ\xfc\xd0\xfdN\xfeQ\xfe\xd8\xfe\xbf\xfb\x17\xfb\x03\xfd\xef\xfc\x81\xfc\xae\xfdM\xfch\xfdd\xfee\xfd\x9f\xfb<\xfd^\xfe\xf9\xfe\xd3\xff%\xfe[\xff\xce\xfe\xb2\x00J\x01\xa6\x00s\x00\xc4\x01\xae\x01\xad\x00H\x03\x8a\x03\x9c\x02w\x02\xbf\x02\x81\x02\xfd\x05:\x033\x01\xcf\x03\x8e\x03\xee\x03\xba\x02\x1e\x03F\x02<\x02p\x03\xc4\x02m\x01\x15\x01\xb7\x01<\x01~\x00_\x00}\x00(\xff\xc2\x00d\xff\xeb\xfd4\x00\\\xfe:\xfd,\xfe\x0e\xfe\x0b\xfe\xc9\xfdX\xfe\xf5\xfc%\xfd\xa9\xfd\xb5\xfc\x1b\xfe=\xfe*\xfd\x85\xfdG\xfet\xfd\x8e\xfe4\xfe\x0b\xfe;\xfd\xb2\xff\xfb\xff[\xff\xe6\x00\xd1\xfe\x05\xff\xa7\xff\xd9\x00\'\x01\xe4\x00P\x00\xf1\x01p\x01t\x01\xbc\x01\x10\x01\xd9\x011\x01;\x02\x9c\x02\x1e\x022\x02s\x01\x86\x01\xab\x013\x01N\x02\xf6\x01\xc1\x00\x94\x01\xe2\x01\x84\x01\xf9\x01#\x01\xf1\x00H\x01\x06\x01\x86\x00\xac\x01F\x02\x01\x01I\x00\x18\x01\xb0\xff5\x00\xd1\x01\xe8\xffH\x01F\xff\xb5\xff\xd7\xffq\xff\xf2\xfe0\xffR\xffD\xfe\x08\xffS\xfd\x0e\xfe\xdd\xfd\xa0\xfdY\xfeD\xfd\xdf\xfc\xae\xfd\xa2\xfd\xaf\xfd>\xfd\xea\xfc\x95\xfc\x95\xfd\xc6\xfd\x8b\xfe\x92\xfe\xc2\xfdb\xfeD\xfd\xb5\xfe\xe0\xfd\x1b\x00\xe6\xfe\x89\xff\xa3\xff~\xff\xf9\x01\x16\x00\'\x01\x0e\x01\x84\x01$\x00J\x02\xed\x01>\x03\xcf\x03\xc8\x02\xfa\x020\x028\x03\xfb\x02\xb2\x02\xc4\x02P\x04j\x03\xa3\x03w\x02\xb6\x03\x85\x01N\x02\xa2\x02\x0c\x03_\x017\x00\xf3\x00t\x00L\x02\xaa\xff\xce\x00\x94\xfe\xc0\xfe]\xfe&\xff\xd8\xfe\x11\xfe\xb2\xfd\'\xfd\xea\xfdr\xfc\xea\xfc\xe3\xfc\x95\xfc\xc8\xfd\x14\xfcT\xfc\x87\xfc\x92\xfc\x9d\xfc\xc7\xfdS\xfe\xdd\xfc<\xffR\xfd\r\xfeJ\xfe2\xffo\xfeG\xff\xc0\xffN\xff\xf2\x007\xff~\x01\x03\x00F\x00\xa5\x00\x7f\x01\x85\x01\xfa\x00\x82\x02\xe2\x00A\x02~\x02\xa2\x01\x8b\x00$\x01J\x02\xe0\x017\x03;\x01\x8a\x00\xc0\x00\x11\x01V\x01c\x02\xff\x01\x05\x00\xd1\x00\xf8\x00Z\x00\xa1\x01_\x01\x06\x00\xb8\x00\x16\x00c\x00\x81\x00{\x00\xbe\xffP\x01V\xff\x95\xff\xf7\xfe\x97\x00\xd8\xfe\xc2\xfe\\\x01<\xfe\xa3\xfeS\xfd\'\xff\xdb\xfe\x0f\xffO\xfdv\xfd\xc8\xfd\xb7\xfc\xd0\xfd\xad\xfd\x84\xfd\xef\xfct\xfd\xe8\xfcX\xfe\x05\xfd\x03\xfd\xbd\xfe+\xfd\xf3\xfe\xd7\xfd\x9f\xffc\xfe\x0c\xff\x15\x011\xfe\xbd\x00\x95\xff~\x01^\x00F\x01\xc2\x00\xf4\x01l\x02[\x00\t\x03\xb2\x01\x06\x03\t\x02\xb8\x02\xdc\x01{\x02\x95\x02|\x02Y\x02<\x02i\x02\t\x02B\x02.\x01e\x02v\x01B\x01\x9a\x01=\x00\xbd\x01\x9d\x00\xcc\x00\x0f\x00\xcd\xfev\x01\xf8\xfe\x8b\xff\xa9\xff"\xff\xb5\xfe\xad\xff\x1d\xfe\xf2\xfdT\xff\n\xfe8\xff>\xfe\xed\xfd\x8f\xfd\x12\xfe[\xfe\xd1\xff\xd7\xfe\xd1\xff\xfc\xfd\x00\xfe2\xfen\xfe\xa5\xffR\xffc\x00Q\xfe\xf5\x00\xad\xfd\x95\x00\x9e\x00\xfa\xff\x17\x00\xaf\xfe(\x01\xf8\x00\x87\x02\x1d\x00w\x02\xcb\x00\x1d\x02\xe6\x02I\x01\x05\x03\x99\x01\xb3\x02\xcf\x01\xe3\x02"\x02?\x02\xe6\x01=\x01\xfb\x01\x7f\x00O\x01u\x00:\x00n\x00\x08\x01T\xff\x98\xff\x16\xff\xbc\xffm\xff\x89\xffV\xff\xd9\xfd\xbb\xff9\xff0\xfe\xc2\xffO\xfe\xa9\xfe\xc5\x00_\xfd\xa7\xff\x02\xfe\xdb\xff<\xfe\x93\xff\x1c\xff\x1b\xff\xdd\xfer\xfe\x8d\xff/\xfe\x1e\x00U\xfd[\xfff\xfd\xa1\xff\x15\xff\x8c\xfe\xb0\xfe\x13\xfew\xfe\x87\xfeW\xff\x07\xff\x15\x00\xc3\xfe\xa5\xff\xfd\xfe\x1f\x00\xae\xff\xa3\xff\xc2\x00Q\x00g\xff\xed\x00\xcf\xff\xac\x01b\x01\xa1\xff<\x01\xd2\x00\xd2\x01S\x00\xee\x01\x91\x00\x0b\x02\xa8\x02\x96\x01\xcf\x01\x88\xff\xe1\x00y\x01G\x01g\x03\xa5\x01c\x00\x19\x01\xeb\x01\xc6\x00E\x02\xcc\x00\x08\x01a\x01\x89\x00\x1c\x01\xad\xffd\x02\x8b\xff\x81\xff\xc3\xff\x86\xfe\xd9\xffy\xff!\xff\x13\xff\x12\xfer\xfe\x19\xff@\xfe\xa6\xfe&\xfd\xe0\xfd9\xffp\xfe=\xfe\xce\xfd\xde\xff\x86\xfe;\xfe\x85\xfft\xfd?\xffq\xff\xa6\x00\x18\xff\xce\xff\xa6\xff~\xfe9\x01u\x00\x83\xff\xbc\x00\xce\x00\xda\xff\x01\x00\x9e\x00\xa1\x00I\x00\x96\x015\x00\x96\x018\xff\x15\x02\x03\x00\xc8\x00\xcf\x01E\xff\xfe\x01\x95\x00\xb7\x01\xfe\xffO\x01\'\x01\xe4\x00\xec\xfe\x15\x01\xed\xff\x9c\xff\xc8\x00\xc8\xff*\x01\xde\xfe*\x00\xc6\xfe\xd0\xffl\xffP\xfe/\x01\xbb\xfe\xaf\xff\xa3\xfe\xf6\xfe2\xff?\xff_\x00\xc4\xfd\xd2\xff\xeb\xfe\xea\xff\x7f\xfe\'\xff\n\xff\xb0\xfe\xc0\xffm\xfe\xf7\xffr\xff\x89\xff\xd9\xfe\xdc\xfe\xab\xfe\x1c\xff\xe8\xff\xf7\xfe7\xfe\xdc\xff8\xff;\x00\xbe\xff\x94\xfe\x94\xff\xc5\xff{\xff\xc6\xff*\x00\xc4\xff\xb8\xff\x8c\x01\xea\xff#\xff\xe6\x00K\x01\xe3\x00Z\x00\xd0\xff\xf8\x01,\x00\xb9\x011\x02b\x00q\x02\x1b\xff\x08\x03$\x00\xb4\x01&\x01\x8c\x00\xee\x01a\x00\x9c\x03_\x01\xf5\xff]\xff\xdc\xff\xc0\xffN\x01\xd5\x00\x93\x00\x10\x00t\xff\x95\xff\xae\x00S\xff\x1c\xff\xa6\xfe\xa9\xfeR\x00g\x00u\xff\x08\xffQ\x00\xaa\xfeD\xff`\xfea\x01\x88\xff\xd6\xffU\xff\xf2\xfek\x00/\xff\xcd\xff\xc4\xffq\x00\x9b\xff\x93\x00\xac\xff\x00\xffR\xff`\x00\xf4\x00N\x00\xef\xff\xbd\x00\r\x00w\xff\x91\x00\xc1\xff\xe5\xff#\x01\x15\x00\xc6\x00\x08\x00\t\x00\xd0\x00F\xffd\x00\x83\xff\xde\x01\x8f\xff\xd4\x00\xa2\x00\x7f\x00\xea\x00\x08\x01\xe2\x01\xe0\xff\xc8\x01\xce\xfe\xa7\x01\xdc\x00/\x01\x1c\x01-\x00Z\x01\xa2\xff4\x00\xcb\xff\xb0\xff\xfe\xfe\xa2\x01F\xff\x81\xff\x9a\xff\xf2\xfe~\xff5\xff\x1b\x00\xf5\xfe\x13\xff\x96\xfe\x14\xfe\xa0\xff\xe2\xffx\xfe\x17\xfe\x94\xfd\xf6\xff\xf6\xff\xba\x00:\xff6\xff\xaa\xff.\xfey\x00\xe8\xfd\x04\xff9\x00\x86\xff.\x02\xba\xfe6\x00(\xff`\x00t\xfe\xe7\xff2\x01*\xff\xe3\x01\x0e\x00\x19\x01\xc9\xfe\x9d\x00\xeb\xfe\x00\xff\xce\x00W\x01\x9d\x01\xdc\xfea\x00\xdd\xff\xd3\xfe\xa6\xff\xe1\x00\xe7\x00\xbe\xff|\x01\x11\x00\\\x00@\x01\xc1\xfe\x94\xff\xb6\xff\x07\x01\x81\xff+\x01\x05\x00\xa1\x00v\xff\x90\xff&\xff\xf7\xff}\x01i\xfe\xe9\xffn\x00`\x00\xde\xfe\xdb\x00Y\xfeh\x00\xac\xff\xd0\xff\xda\xffR\xfe]\xff\xb0\x00\xc9\x00\x90\xff.\xff\xcf\xfe\xbf\x00\xd3\xff\xcc\xff>\xfe3\x00\xde\xffJ\x00\x8e\x00\x8f\x006\xff\xd2\xff\x98\xfe\xea\xfe\x95\x00i\xff7\x01\xc9\xfe:\x02\xbc\xff\x19\xff\xba\xfe$\xff\xb3\x00\n\x00\xd8\x01S\x00K\x00\xbe\x00\x12\xff\xa8\x00\xc8\xff\x91\xfe\xd1\x01L\xff\x9d\x00\xc2\x00\xd2\x01\xb1\x00\n\x00\xf7\xff\xdc\x00\x10\x01\xe6\xfd\xd3\x00W\x00\xa7\x00{\x02\x08\x01}\xfe|\xff\xef\xffI\xff\xb1\xff_\x00\xab\xfe\xdd\xff\x8c\x00\xde\xfe\x8f\xffh\xffI\xff|\xff\xea\xfe\\\x00C\xffG\x00~\x00\xb0\xff\x04\xffJ\xffR\x00\xee\xff\xca\xff\xc0\xfd\xe9\x00\xe3\xffN\xff\xb6\xfew\xff\xf7\xffd\x01\xab\x00t\x00<\x00\xbb\xfe\xaa\xff\x7f\xfeK\xff\x14\x01L\x01m\x01\x10\x01a\xff~\xff.\xffT\x00\x96\x00\xcf\x00G\x00m\x00\x88\xff2\x00R\x00e\x00\x83\x01\xe7\xff\xef\x00}\x00"\x02\xce\xff]\xfe\x10\x01\r\x00\x8a\xff\x0e\x00w\x00\x8b\x00\x07\x02\xd3\x00\xd0\xfe\x8b\xfe\x8a\xff\x80\x00q\x00\xf9\x00\xc9\xff\xcb\xfe\xcf\xfe.\x00\xbe\x01T\xff\xbb\xfc\x80\x00\xca\x00\xa5\xff\'\x00\xc5\xff\x8a\xfd_\xfc\xd7\xff"\x022\x02Q\x00\x95\xff\xf7\xff\x8b\x00\x80\xfe\xbd\xfe\xaa\xff\xbb\xff\xd9\xfe\xbb\xff\xf1\x01<\xff\x03\x01%\x01\xd3\xff\xe5\xff\x85\xff\x88\x01`\xff\x15\xff\x06\x00\x13\x00<\xffV\x01\xe0\x02\xa4\x01\xca\xff\xd4\xff\x03\x000\xfe%\xff\x12\xff\xea\x01\xca\xff\x94\x00\xb2\x00\xd6\xff\xab\x01\x0e\xff\x16\x00x\xff\xa2\x00X\x029\x00\x81\x00*\x01W\x00\x81\xff\xcc\x008\xfe{\xfd\xae\xff\xcd\x02<\x04n\x00\x12\xff*\xfe\xff\xfd\xe5\xffz\x02\x9a\xff\xa4\xfdW\xff\x9c\x01\xc6\x01J\x00\x0b\x00J\xff2\xfd\x00\xfe\x18\x00`\xffk\xff\xaf\xff\x8a\x00\xf5\xff#\x01\xfe\xff\xac\xfe\x99\xff\xe3\xfe\r\x00H\xffn\x01\xeb\x01G\x00\xe2\xff\xc9\xfe\xf2\xfe\x00\xff\x91\x00\xb5\x01\xda\x01\x9f\x01\xd5\x01\xb3\xffN\xfe\xec\xfe\xc1\xffA\x01i\x00\xf7\x00\x00\x01h\x00\x88\x00\xd4\xfei\xfe\x8b\xfe\x86\xff\xf6\x00\xc4\x00\xa9\xff\xf4\x00\xd0\x00a\xfe\xcb\xfe\x1d\x00v\xfe\xe5\xfd\x06\x00C\x00D\x01\xfc\xff\x8c\xfe\x14\xfe\xe3\xfd\\\xfeX\xfdB\xfe(\xff\xb3\xfeh\xff\x97\xff\x83\xfe?\xfd\xed\xfc\x8d\xffO\xfe\x06\xfe\x9c\xfd\x82\xfd\xfd\xff\xec\xffB\xff\xb3\xfd\xb3\xfdy\xfem\xfe\xc1\xff\xee\xfe8\xff\x14\xff\x80\xff1\xff7\xfe\xe5\xfd$\xfc\x1b\xff\xa5\x004\xff\x08\xfe\xce\xfd\x81\xfec\xfe\xce\xfeC\xfe\xc2\xffo\xff\xe7\xff\xb5\x00H\x00G\x00\xb5\xff\xc0\xfe\xe6\xfe\x89\xff7\x00\xb0\xff\xb0\xff\xc5\xff\x83\xfe\x9a\xfeQ\xff\xbb\xfeX\xff\x1a\x00\xf7\x00o\x02\x1c\x04s\x05\x86\x07\xbd\t\xac\x0bH\x0e\xa9\x10\xef\x12U\x14O\x15\xd5\x15\xaf\x15\x1f\x15\x0b\x14h\x12\x14\x10\x10\r8\nG\x07z\x03\xe9\xff\x01\xfdt\xfa\xa5\xf7\x9b\xf5\xdb\xf3\xf8\xf1B\xf1\xc6\xf0$\xf0\x05\xf0,\xf0g\xf0\xd0\xf0\xbf\xf1\\\xf2\x91\xf2\xa7\xf3\t\xf5\x01\xf6\xe2\xf7\x1e\xfa\x92\xfb%\xfd!\xff\xec\x003\x02_\x03\xbe\x03\xc9\x031\x04\x13\x04\xaa\x03\x1c\x03\x06\x02\xd3\x00\xa7\xff\xc0\xfe\xeb\xfd\xa0\xfc\xfe\xfb\xa3\xfb]\xfb7\xfb{\xfb&\xfc9\xfc}\xfcD\xfd\r\xfe\x17\xff\xc6\xff+\x00O\x01t\x02&\x037\x03`\x03\xb9\x034\x04\x01\x04\xb3\x03\x92\x03T\x03\xb7\x02\xc7\x01k\x00\xf0\xfff\xff\xff\xfe\x15\xff\x91\xfe.\xfe6\xfd\xdc\xfb4\xfb\x0c\xfd\xd4\xfeI\xff\x9c\xfd\xcc\xfc\xc8\xfd%\xff\x81\xff8\xfeb\xfd\x9c\xfe\x87\xfff\xfe\x03\xfeO\xfe4\xfe\xb9\xfc\xf8\xfb\xcc\xfbv\xfbT\xfb\xd4\xf9\xe8\xf8Z\xf8\xdc\xf8@\xf8\xe2\xf7\x95\xf7\xe9\xf7\xf1\xf7#\xf8\xd4\xf8\x0f\xf9\x10\xfa\xde\xfa\n\xfcf\xfd$\xff\x16\x01L\x04\xb9\x07\r\x0bR\x0f\xa3\x13\xa1\x18@\x1e"#$\'0*\xb7,I.\x9f.\x82-\x0f+d\'\xb1!R\x1b\xb1\x14\xae\r\xb6\x06^\xff\x05\xf8\xf9\xf1\x02\xed\xdd\xe8r\xe5\x99\xe2\x9a\xe0\xb8\xdf\x9c\xdf\xd8\xdf\x96\xe0\xff\xe1t\xe3\xc3\xe4\x87\xe6\xfb\xe8\x05\xec\xe1\xeej\xf1_\xf47\xf8h\xfcg\x00:\x04\xc2\x07\x00\x0b\xcb\r\xc4\x0f\xdf\x10"\x11{\x10\xd5\x0e;\x0c\xe9\x08|\x05\xa4\x01`\xfd\xf9\xf8\x18\xf5\xef\xf1S\xefW\xed\xec\xebw\xeb\xc9\xeb\xb5\xec\xe3\xedy\xef\xd1\xf17\xf4\'\xf6/\xf8\x9e\xfa6\xfd\t\x00N\x02\x11\x04|\x06)\tD\x0b\xa4\r\xa3\x0f \x11c\x12Y\x13\xd3\x13\xac\x13\x1b\x13\xbe\x11\x91\x0f\x14\r\x8e\n\xcb\x07\xc1\x04\xfd\x01F\xff\x83\xfc\xac\xfa=\xf9\xb8\xf7\xc8\xf6_\xf6!\xf6\xd1\xf5\xde\xf5\xf1\xf5\xe0\xf5)\xf6Z\xf6I\xf64\xf6V\xf6M\xf6\x11\xf6&\xf6Z\xf6\x8f\xf6\xb4\xf6\x89\xf6\xa6\xf6\xe4\xf6N\xf7\xef\xf74\xf8\xa5\xf84\xf9\xe4\xf9]\xfa\xe9\xfa\xf4\xfb\x97\xfc\xc3\xfc\t\xfd^\xfd\xd7\xfd\xfb\xfd"\xfe\x04\xfe\xab\xfd\xfa\xfd\x0f\xfe\x9a\xfe$\x002\x02\xd6\x04{\x085\r\x83\x12\x7f\x18u\x1f2&\x19,\xad1V6)::\x11\xc6\x14[\x17\xbc\x18\xf2\x18\n\x18\xbf\x15\x00\x12)\r\xc7\x07\xd9\x01Q\xfbW\xf4\xcc\xedJ\xe8\xc9\xe3M\xe0\xde\xdd\xbb\xdc\x11\xdd\xdc\xde\xe1\xe1\xca\xe5k\xea\x93\xef\n\xf5X\xfa\x97\xff\xb4\x04p\t\x8a\r\r\x11\xe1\x13]\x16\xa2\x18b\x1a\\\x1b\xd7\x1b\x16\x1c\xd3\x1b\xfa\x1a\xb7\x19\xe0\x17n\x15Z\x12\x93\x0ea\n!\x06\xb1\x01V\xfd#\xf9"\xf5\xc3\xf1E\xef\xa6\xed\xfe\xec\xdc\xecR\xedm\xee\x16\xf0\x1c\xf2V\xf4\xb5\xf6\xcf\xf8|\xfa\xe3\xfb\xf5\xfc\xc5\xfdl\xfe\x86\xfe\'\xfe\xa8\xfd\x07\xfdi\xfc\xb0\xfb\xc7\xfa\xe8\xf9\xf3\xf8\xf6\xf7-\xf7\x8b\xf6\xe2\xf5N\xf5\xcc\xf4E\xf4M\xf4\xaa\xf4E\xf5\xf6\xf5\xe0\xf6\xec\xf7\x0e\xf9\x88\xfa\x05\xfc`\xfd\xd6\xfe\xd3\xff\xbb\x00\xa3\x01\x8a\x02 \x04_\x05Z\x067\x08~\x0b\xf3\x10i\x17\x15\x1d\x87"a(\x02/\xe45\xea:\x96=\xaa>N>\xe9; 7O0\x12(\xa4\x1e\xb0\x13\xbf\x07\x96\xfc\xe4\xf2E\xea\xe9\xe1 \xda\xac\xd4\x15\xd2[\xd1m\xd1\x15\xd2\xfe\xd3\x03\xd7c\xda\xef\xdd\xa4\xe1\x97\xe5^\xe9u\xec`\xef\x12\xf3\xc4\xf7\xb7\xfc\xd3\x00>\x04X\x08C\r\x01\x12\x84\x15\xae\x17\xbe\x18\xb2\x18\xff\x16\x9b\x13\xd8\x0e/\ts\x02\xb2\xfaV\xf2\xb1\xeam\xe4\x1b\xdf\xa3\xdaI\xd7\xd4\xd5\xa9\xd6\x8a\xd9\xb5\xdd\xc1\xe2\xab\xe8J\xef`\xf6V\xfd\xe1\x03\xfd\tH\x0f~\x13\xaa\x16\xf8\x18\xfd\x1aX\x1c\xd8\x1cy\x1c\xad\x1b\xd4\x1a\xf2\x19\x8f\x18\xb8\x16\x89\x14\xf7\x11\x08\x0f\xb5\x0bT\x08\xc0\x04\xcd\x00\xad\xfc\xb8\xf84\xf5l\xf2y\xf0,\xef\x9e\xee\xcd\xee\xde\xef\xbe\xf1.\xf4\xc5\xf6\x93\xf9Z\xfc\xf0\xfe(\x01\xcb\x02\xce\x038\x04\xf5\x03\x12\x03\xb2\x01\x12\x00;\xfe\x10\xfc\xc0\xf9\xb5\xf7\x02\xf6q\xf4\xe6\xf2\x94\xf1\xb9\xf01\xf0\xea\xef\xee\xef3\xf0\xd2\xf0\x98\xf1U\xf2n\xf3 \xf59\xf7\'\xf9\xdf\xfa\xea\xfcZ\xff\xa0\x01\xf2\x02\xb6\x03\\\x04\xb3\x04a\x04\xf4\x02Q\x01V\x00\x1a\xff\xcd\xfd\x1a\xfd\xee\xfe\xa4\x03 \t\xbf\x0eq\x15\xf6\x1e\xb7*!5\xd4<\xa0B\x0fH\x11L\xf7K\xc7G\x00A\x868\xc2-* \xad\x11t\x04Q\xf8C\xec\xa4\xe0p\xd7\r\xd2y\xcf\x0b\xceB\xcd\x12\xce\xd2\xd0\x8c\xd4\x03\xd8\t\xdbo\xde\x16\xe27\xe5\xc0\xe7\xab\xea\xf0\xee$\xf42\xf9\x12\xfe\xfa\x036\x0b\x94\x12\xb5\x18?\x1d\x9a \xb9"\xca"K %\x1b3\x14\xc0\x0b:\x02%\xf8D\xeeR\xe5\x83\xdd\x1b\xd7\xbc\xd2\xc5\xd0Y\xd1\xf7\xd3\xfd\xd7!\xddL\xe3p\xea\n\xf2\x85\xf9K\x00\xeb\x05\xb8\n\x0b\x0f\x1d\x13\xae\x16\\\x19\x02\x1b\x1d\x1c\xe6\x1c\xcc\x1dn\x1e{\x1e\x81\x1d\\\x1bQ\x18\xcc\x14\x0e\x11\xa2\x0c\'\x07\xf8\x00\xd7\xfaJ\xf5\x9d\xf0$\xed\xe9\xea\xbf\xe9\x87\xe9\x99\xeaN\xedp\xf15\xf6\xea\xfa\t\xff\xba\x02\x16\x06+\t\x85\x0b\x83\x0c\x02\x0cr\nj\x08g\x06P\x04\x01\x025\xffA\xfc\x8c\xf9m\xf7\xe7\xf5\x9b\xf4\x1f\xf3I\xf1]\xef\xee\xed\x1b\xed\xb7\xec\x8d\xec\xb3\xec\x04\xed\xf0\xed\xc2\xef^\xf2\xea\xf5\x80\xf9\x9c\xfc\x87\xff&\x02\xde\x04\x1e\x07\xfc\x07\x08\x08\xab\x07s\x06\xc5\x04\xa2\x02~\x00\x03\xff4\xfdq\xfa\x8e\xf7`\xf5V\xf4\xc2\xf3\x85\xf2Y\xf1\x9c\xf1\xa8\xf3X\xf8\n\x01\xb7\r\x0b\x1c\xac(\xf52\'>=K\x89V\xd6[\x1d[\rW\xe5P\xe6F\xf08\\)\xda\x19\xa4\t\x7f\xf8\xe8\xe8\xaf\xddo\xd6\xf4\xd0\x92\xcbx\xc7\xd8\xc5,\xc6]\xc7\xa3\xc8\xc0\xc9\x00\xcb\x88\xcc7\xcf&\xd4\x9d\xdb1\xe5\x82\xef3\xfa\x9c\x05\t\x12\xb5\x1e\xea)t2\x017\x957\xbc4\xfa.\xbe&k\x1c@\x10m\x03q\xf6R\xeaK\xe0\xed\xd8/\xd4\x10\xd1\xf7\xcei\xce\xa2\xcfu\xd2\xef\xd5\x98\xd9\xa2\xdd=\xe2\x88\xe7\x82\xed\xa1\xf4\r\xfd\xeb\x05\x85\x0e\xef\x16*\x1f\x98&\xe5,H1;3{2D/\x14*\x18#]\x1aa\x10\xad\x05\x90\xfb\xe8\xf2\xdc\xebm\xe6\xdc\xe2:\xe1G\xe1\xd0\xe2\xa1\xe5n\xe9\xbd\xed\xa8\xf1.\xf5\xe9\xf8\xca\xfda\x03j\x08T\x0c&\x10\xa5\x146\x19\xa8\x1c\xac\x1em\x1f\xd5\x1eM\x1c\xb6\x17\x04\x12\xcc\x0b\xe2\x04D\xfdB\xf5\xe3\xed\xfb\xe7\xdc\xe3\xf8\xe05\xdf\xc1\xde\xf9\xdf]\xe2\x8d\xe5o\xe9\xb4\xed\x0e\xf2\xd6\xf5_\xf9o\xfd\x9e\x01\x7f\x05\xfd\x07n\tt\x0b\xbb\rR\x0fP\x0f\xdd\r\x07\x0c\xb5\t\t\x06.\x01\x95\xfc0\xf8a\xf3\xec\xed\x07\xe9\xa3\xe6|\xe6\x17\xe6j\xe5\xbe\xe5&\xe8"\xec\x8c\xef\x9f\xf2\xc5\xf6$\xfbA\xff\x96\x05U\x12\x97%\x898\xabDdK\xefR,\\\x7fax^WUmK\x8c?X/b\x1d\n\x0f\\\x04\xa4\xf8\xef\xe9\xb2\xdc\xc6\xd5\xf9\xd2\xc8\xceh\xc8h\xc3\x10\xc11\xc0\xf7\xbf\xad\xc1\x9a\xc7J\xd01\xd9g\xe2\x0b\xee\xd1\xfdP\x0e \x1b\xd3#<*\xe7.\x061\xab0O-\xd4&\x1d\x1e\xd4\x14\xb8\x0b\x07\x03\xaf\xfa\x90\xf3\xb3\xec\x8e\xe5h\xde\xa5\xd8\x9b\xd4\xa8\xd0F\xcc$\xc8]\xc6\xd3\xc7\n\xcc^\xd2x\xdb\xca\xe6[\xf2=\xfdt\x08#\x14*\x1e\xae$\xfb\'\x87)\x9f)\x8c(\x17&\x9e"T\x1e\x97\x19\xcd\x14\xcc\x0fr\n\x88\x04\xc7\xfd\xed\xf6\x99\xf0\xb9\xeaY\xe5\x1b\xe1\xfd\xde\xc2\xde\xe8\xdfv\xe2X\xe7\t\xee\xfd\xf3\xd5\xf8\x02\x00<\r\x1a\x1c\xb5$\xb4%\xdb%\xe8)\xab-p+\xc7$\xf3\x1eY\x1an\x13=\nl\x03\x1c\x01\x97\xfeh\xf7\xaa\xed\x9e\xe62\xe4h\xe2\x01\xde_\xd9g\xd8\xbb\xdb1\xdf\xff\xe1\xc0\xe7\xaa\xf1\xd9\xfb\x14\x01&\x03\xf1\x07=\x0fO\x14^\x13\x07\x10\xf9\x0e\xf5\x0el\x0c\xef\x07\x12\x05-\x04\x9e\x01\x0b\xfb\xdb\xf3\xbb\xefS\xed}\xe8\x89\xe0\x91\xd9\xfd\xd6A\xd8\xcb\xd98\xdc0\xe1L\xe8\xe0\xef\xf1\xf5\xad\xfby\x01\xd7\x05\xef\x08\x18\n\xa8\n\xb8\x0b\xb6\x0eI\x14\x93\x1aO%\x116\x08H\x90S\xbeV\xd6WVX\x86R\xf6C(4\xa2(\xb6\x1d)\x0f>\x01D\xfbx\xfa(\xf6\x1e\xed\x90\xe3l\xda\xac\xd1\x84\xc8\x97\xc1\xa1\xbf\xa1\xc1\xf9\xc6\xe0\xce\xda\xd8\x14\xe6i\xf5n\x02\xdb\n\x9c\x0f$\x12\xe7\x132\x15\x98\x15\x06\x16\xe9\x16\xef\x18\x1e\x1b\xc3\x1aM\x18K\x15+\x10N\x06-\xf8\x16\xea\xe8\xde\xcd\xd5\x17\xce\n\xcak\xcb\x92\xcf\xa9\xd3\xea\xd6W\xdb\xb9\xe0\x11\xe51\xe8\x9e\xeb\xac\xf0\xe7\xf6\xcf\xfe\x9a\x08\xcb\x13\x0f\x1e\xe2%\xc1*\xc7,w+\xa0\'\x94"\xdb\x1c\xa8\x16U\x10\x95\nd\x06\xa8\x03K\x01\xb0\xfd\x9d\xf8-\xf3\x9a\xeeE\xea\x9a\xe6\x03\xe4\x92\xe3\x17\xe5)\xe8s\xedz\xf5P\xfeP\x06\xa7\x0b\xf5\x0e4\x11_\x13\t\x16_\x17U\x19\x02\x1e/%\xb5*\xc7+5) $\xb0\x1b/\x10\r\x04/\xf9\xc2\xef]\xe7\xc2\xe0\xa8\xdc\x99\xdb\xa1\xdb\xac\xdcK\xddu\xdd\x0b\xde\xb9\xde\xb4\xe1\xd3\xe6\x0e\xef"\xf9\xb9\x02M\x0bB\x12\x8f\x17[\x1a\x90\x1a\x1b\x19\x1e\x16\xda\x11\xa2\x0c~\x08\t\x06\xf8\x03\xc1\x00\xbf\xfb\x84\xf5\x87\xee\xd9\xe7k\xe2R\xdf\x1d\xde\xd8\xdd\xf9\xdeA\xe1\x96\xe5m\xeb^\xf0l\xf4r\xf7\x03\xfb\x87\xfe\x03\x02\x94\x06\xa2\n\xc2\rZ\x0f\x0b\x10=\x11\xd5\x12\xbe\x13~\x13\x11\x10I\x0c\x02\n\x8e\x08Q\rf\x1e\xd55\xf9B\xc9;,+_$\x07&\x0c#d\x1b\xd8\x17\xe1\x19\x8a\x18\xd6\x0e\x11\t|\r\xef\r\xc7\xff\xb0\xe9g\xda\'\xd6\xa9\xd6\x83\xd8#\xdf\x9f\xe6\x90\xe8\x04\xe5\x88\xe1\x92\xe3\xfb\xe9\x8a\xed\x0e\xec\x9f\xeaX\xee\x97\xf7\xfa\x02\x04\x0eg\x163\x18\x06\x13\x9b\x0c\r\t\xca\x07\x86\x07\xa2\x06\xc4\x03\xa5\xfe#\xf9\xe3\xf7l\xf9\xc4\xf8\x9b\xf3\xc5\xeb:\xe4\x15\xdfR\xde/\xe2\xe2\xe89\xefX\xf3L\xf5k\xf7z\xfb\x93\x00\x98\x04\x07\x07\xd8\x08\xd5\n\xd0\rS\x12\xe6\x17\x89\x1b\xaf\x1a\x1e\x15\xdf\r}\x07\xde\x025\xffU\xfc#\xfa\x1c\xf8\x8b\xf5\xf0\xf3\x0b\xf4@\xf4)\xf2I\xee\xe5\xeb\x1a\xec\xbb\xee\xc3\xf3\xe7\xfa\x08\x03\x88\x08\xb2\n\xc3\n\xa4\x0b\x18\x0e\xf8\x10\xe4\x12\xe1\x13\xf0\x15\xfb\x18\x1d\x1au\x19\xd9\x17\x0b\x16\xe1\x10\xe0\x07\x85\xff\'\xfaI\xf7\xcf\xf4\xc9\xf2^\xf1\xa7\xef\xd1\xec\xc2\xe9\x1c\xe7\xd7\xe6\\\xe8\x05\xebd\xee\xc9\xf3\xd2\xfaU\x01c\x05\xaf\x07\xce\t6\x0bL\x0b\x8e\n\x80\nh\x0bG\x0c\xfb\x0b\xa0\nW\x08\xf3\x04C\x00\xcd\xfa\xa9\xf6\xb3\xf4=\xf4j\xf4\xa4\xf3_\xf3\xd7\xf3T\xf4\xc3\xf4\xc3\xf3K\xf3\x10\xf4\x91\xf6\xb1\xf9\x9d\xfc!\xff\x8d\xff\x18\xff \xfdQ\xfc\x10\xfd\xd4\xfcn\xfc8\xfb`\xfb\xba\xfbD\xfae\xf7\x98\xf5\xb2\xf5\xf7\xf5V\xf5\xd1\xf5w\xf88\xfc\xb1\x03y\x15D.\x01=\x9c7\xc6\'\x7f#(+\x821\x0f3\xe17uB\xe0@s.\xb7\x1bR\x167\x13x\x03\x02\xf0\x83\xe8\'\xecX\xec\x12\xe6\x9e\xe2\xa5\xde\xb9\xd4:\xc6\xfe\xbeo\xc6-\xd6,\xe4t\xebJ\xf0v\xf5=\xf9G\xfb\x16\xff0\x08\xbc\x10.\x16\xe2\x1b\x81"?\'\x9b&3!\x1f\x18:\r\xc1\x047\x02*\x03o\x02\xac\xfdP\xf5!\xeb\x9d\xe0\xd1\xd8k\xd5\xd4\xd5\xb2\xd7\x97\xd9F\xdc\x92\xe0\xc9\xe5\x05\xe9k\xea\xc8\xec\xe6\xf1,\xf8g\xff\x1c\x08\x1d\x11\x1b\x16/\x16h\x14;\x13\x8e\x12F\x12\x8f\x12)\x13}\x12I\x0f"\n\x88\x04\x16\x00\xca\xfc~\xf9R\xf6\xd6\xf4O\xf5\x82\xf5{\xf5\xbb\xf5\x04\xf7\x9c\xf7\xaf\xf7\x18\xf9\xa1\xfc\xcb\x00y\x04\xf6\x06e\t\'\x0b\xc4\x0c\xfd\r\xda\r\xb9\x0c\x8c\x0c}\x10\xc2\x15\x80\x18d\x16\x92\x11\xcc\x0b\xb9\x05\xc0\x01_\x01\xce\x02\x13\x02\x94\xfd\x0b\xf8\xda\xf3\xfb\xf0\x18\xf0w\xf0\x19\xf1C\xf1D\xf1n\xf2L\xf4P\xf6j\xf8$\xfa\x04\xfb\'\xfc\x88\xfe\xc7\x01\xe8\x03\x10\x05\x9f\x05:\x05\xe9\x03]\x02#\x02\xb4\x02\x9a\x02\x0b\x01\xa2\xfe@\xfc\x17\xfan\xf8(\xf7\r\xf6R\xf4\xe6\xf1:\xf0\t\xf0\xce\xf0(\xf1p\xf1\xea\xf18\xf2\xb3\xf1\x9f\xf1\xb3\xf3c\xf7P\xfa\xe8\xfb0\xfd\xda\xfe&\x00\xeb\xff\x0b\x00\xdd\x00u\x03]\x06\x12\x08\x9b\x08\xef\x06\xfb\x057\x058\x04\xd7\x07\xf7\x16\x801e?\'3\x0b\x1a\x1e\x10,\x1a\x92$\xad+R7MA\x1d5\x8d\x16\xeb\x04I\t\x9a\x0e\xcf\x05-\xfdi\xff\x89\x01\xbb\xf6\xda\xe7^\xe2\xae\xe2z\xdc\xa8\xd2S\xd2\x0b\xe0\x08\xee-\xee\x15\xe6\xd8\xe1z\xe3S\xe5;\xead\xf8X\x086\rq\t\\\x08\xee\x0cn\x0e\xdd\x0c<\x0f\xaf\x14q\x16\xd2\x11\xca\x0e\xb1\x0e\xe2\x0b\xae\x01@\xf6x\xf1\x80\xf1)\xf1U\xefT\xee+\xec?\xe4\xab\xda\xf6\xd7O\xdd]\xe4\xc4\xe8\xa5\xecq\xf0\xb7\xf1\x92\xf0\xa2\xf1y\xf7;\xff\xef\x05S\x0b\xfb\x0fY\x13\x81\x13x\x12\xa1\x12?\x14\xb7\x15@\x16\x0f\x17\xf5\x16\xc8\x13#\x0e>\t]\x06\x9f\x03(\x01:\x00,\x00\xaf\xfdH\xf9\xfa\xf5"\xf5\x88\xf4\x8c\xf4\x81\xf6\xfb\xf9\xc4\xfb\xa9\xfb\xb9\xfc\x05\xff\xa2\xfe6\xfe\x8a\x01Z\nZ\x0f\xa8\r\xff\ni\t\xd5\x07\xa3\x04\x1b\x08r\x0fw\x11J\x0b\xac\x03\xcd\xff~\xfc\xbc\xf9c\xfcM\x01\xff\x00\x87\xfb \xf7\x9f\xf6\xe7\xf5\xd6\xf4\xa7\xf6$\xfaN\xfb\xb2\xfa\xf9\xfb\xe3\xfd\xc2\xfd\xdb\xfb\xd0\xfbk\xfd\xe5\xfe\xa9\x00\xed\x02\x0b\x04\xd8\x017\xfe>\xfc\xc4\xfc\xb5\xfew\x00Z\x00T\xfe\xcb\xfa\x89\xf8\xee\xf7\xd0\xf8\xf0\xf9\xd8\xf9|\xf8}\xf6"\xf62\xf7\xab\xf8]\xf9R\xf9K\xf9A\xf9f\xfa\xf2\xfc%\xff\x94\xff\x9c\xfem\xfd\xef\xfc\x84\xfc\x97\xfdJ\xff\x08\x00\x8b\xfe\x0c\xfc\xab\xfb\\\xfbM\xfb\x9b\xfa\xff\xf9Y\xf7\x94\xf2\x1c\xf4\x81\xff\xd0\x11N\x1f\x0f d\x14\xa0\x03\xf4\x02\xc9\x17\xa15\x88Aa8\xc0+\x9c!\x02\x1a\xd9\x15b!\xcc2F19\x1b=\x07\xe7\x06\x94\x07\x0e\xfd6\xf2\x9c\xf4$\xf82\xee\xf2\xe2n\xe4\x11\xe7\xd9\xda\xb8\xcb\x88\xd0\xd9\xe3A\xee\xc0\xe9S\xe5z\xe6\xa7\xe3\xcb\xe1n\xec\xf0\x01\xd7\r\xf2\x07J\x00w\x01\xfd\x05R\x06R\t?\x13\xad\x1a\x13\x15i\t\x93\x03\x90\x03@\x02\xf5\xfe\xd6\xff\x95\x02[\xff\xb0\xf5\xc0\xed^\xeb2\xea-\xe8\x81\xe9\xd1\xee\xeb\xf1\xac\xed\xe3\xe7?\xe7\xac\xeam\xee\xd0\xf2\x0f\xfal\x00\xb1\x00p\xfd\xf9\xfc\x11\x01\xab\x05\xa3\t\x80\x0e\xec\x12]\x13\xa9\x0f\x96\x0cQ\x0cm\r\x9a\x0ek\x10\x94\x12\x19\x12\xb1\rf\x07<\x03\xa3\x02Q\x04\xcf\x05e\x06U\x05t\x02I\xfd\xf2\xf8O\xf8\xeb\xfa8\xfd.\xfeg\xfeI\xfe`\xfbP\xf8\xb3\xf7\x84\xfa\xe0\xfd\xf8\xff\x0e\x03\x8d\x04\x95\x03\xe6\xfe\xb6\xfc\x9d\xff\xe4\x05W\x0c\xaf\x0f\x9b\x0e8\x08X\x01\x1e\xff\x9d\x02\x97\x08 \x0cx\x0b\x7f\x05\xf4\xfd\x0f\xf9\xaa\xf8h\xfbs\xfd\x95\xfey\xfd\x15\xfa\xa5\xf6\xc7\xf4\x00\xf6X\xf7m\xf8\xb9\xf9\xf6\xfa*\xfb\x15\xfa4\xf9*\xf9\x88\xf9\x93\xfa\xd7\xfcF\xff\xad\xffu\xfd\xe0\xfa\xb0\xf9\xb4\xfa\x9e\xfc\xb7\xfe\xcf\xffJ\xfe\x9e\xfbg\xf8l\xf77\xf8\xad\xfa\xe5\xfc\x9d\xfc\x8a\xfb\xc6\xf8\xab\xf7@\xf6\x86\xf6m\xf8F\xfaz\xfc\x94\xfc\xd2\xfc%\xfc*\xfa\xb0\xf8.\xf9\x98\xfc\x19\xfe\x0b\xfe!\x06\xaf\x15\x02\x1d\x13\x0fn\xff\x9c\x05u\x19\xc3\'&//6\xf3/X\x17\x82\n\xcd\x1c\xa55A7F-x(B\x1d\x07\x07b\xfe_\x0f\xfd\x1b\x06\x0f\xe8\xfd"\xfb\x13\xf7~\xe7P\xdf\xb3\xe7\xbb\xed\xf0\xe3\xf3\xdc\xc4\xe2\xb2\xe5\xeb\xdb\xd2\xd3\xec\xda\x81\xe6\x9d\xec\x8b\xef\xeb\xf3\\\xf4\xea\xedG\xebe\xf4W\x03\xff\r\xae\x0eF\x08$\x02w\x00\x05\x04$\x08\x9a\x0b\x0b\x0eY\x0c\x90\x04\x83\xfc\x01\xfb\xa3\xfcE\xfal\xf5\x85\xf5\x89\xf8\xc7\xf6/\xf03\xec\xcd\xeb\x11\xea\x9f\xe8\x16\xed1\xf5\xae\xf7\x08\xf3\x8f\xef,\xf1\xad\xf4>\xf8q\xfe\x00\x06\xbf\x08\x97\x05\x17\x03\xb1\x05\x1e\nu\x0c\xa6\x0es\x12"\x14\xe4\x10\xcb\x0c\xdb\x0c?\x0f;\x0f\xef\r\x91\x0e\x05\x0f\x9d\x0b\x83\x06\x12\x05(\x06\xba\x04\xba\x02\x03\x03\xe4\x04f\x01y\xfbM\xf9\xeb\xfa\x8f\xfaL\xf9\xdb\xfc\xc9\x02^\x00\x93\xf7P\xf4y\xf9\xd0\xfee\xff\xd0\x02X\x07\x07\x05m\xfd\x86\xfc#\x03\xc5\x07+\x06[\x06\x0f\t\xb3\x06\x12\x01\x00\xff\xf9\x02?\x04\xdc\x01r\x01}\x02&\x00\x9e\xf9\x10\xf7\x1c\xf9\xc4\xfa\xf2\xf9\x08\xf9\xd3\xf8\x8b\xf5f\xf1-\xf0\xc2\xf2\n\xf5(\xf5\x0c\xf5S\xf4\xff\xf26\xf2\xa7\xf30\xf7\x0f\xf9Q\xfa\xbd\xfaQ\xfb\xfd\xfb\xc4\xfc\x05\xff\x9f\x00\x94\x02\x89\x03%\x04\xcb\x03\x10\x03(\x03\xb7\x03\x99\x04W\x05\x12\x06\x86\x05,\x04r\x02c\x02\xa4\x02L\x03!\x04\x8c\x04\x03\x045\x02o\x02\xa5\x03V\x04C\x04\x1d\x04\xdc\x04[\x03\xc8\x03;\x05\xe6\x06\xa4\x07\xbc\x07\xcf\n\xfe\n\\\x0c\x84\x119\x17\x8f\x16\xce\x0f\xef\r\x9d\x12\xe5\x15\x99\x16m\x19q\x1b\xa0\x16\xe0\x0bk\t1\rI\x0e\xe9\n\x0c\x08\x9c\x08\xc8\x02\xe5\xfa\xd0\xf7u\xf9\x9c\xf8\x07\xf3\xf5\xf0%\xf2\xf2\xf0,\xec\x81\xe8\xf8\xe8\xda\xe9E\xe9\xb9\xe9*\xed\x80\xef\xfb\xec\xba\xe9\xcf\xeaQ\xf0\x86\xf3\xd3\xf4V\xf7\x90\xf9P\xf94\xf7\xbe\xf9J\xfe\x12\x00\xa0\xff\xe4\x00\xe7\x03\xa8\x032\x01\xe1\x00\xb8\x02\xd4\x02\xf6\x00\x88\x01\xfe\x03\xc6\x035\x00\xc5\xfdt\xfe\xa2\xfe\x03\xfe`\xfe\x80\x00\x98\xffz\xfc\x11\xfb\xcf\xfcf\xfe\x0f\xfe\r\xffg\x00\x1d\x00L\xfeH\xfe\xfb\x006\x02\xe7\x01\xe7\x01S\x03\x02\x04A\x03\xed\x03a\x05\xf4\x05\xcf\x04\xe4\x04"\x070\x08\xf1\x06\x98\x06\x9d\x07_\x076\x06\r\x06\xa0\x08\xdd\x08\x80\x06\xfe\x04\xc5\x04\xdf\x03E\x02\xf6\x01\xde\x02\xb6\x01\x94\xffX\xfe\xbf\xfd\x97\xfcd\xfb\xf6\xfbE\xfcd\xfc\xc9\xfb\x0c\xfc\x81\xfb;\xfa\xe2\xf9I\xfa\xa3\xfbX\xfcc\xfd\x9b\xfd0\xfd\x12\xfcP\xfb\xe9\xfb\xc7\xfc4\xfe \xff0\xff\x92\xfe7\xfd\xa9\xfc\xfe\xfcA\xfd\xcd\xfd\xc9\xfd\xa6\xfd\xbc\xfc;\xfbj\xfa\x9d\xf9\xfd\xf9[\xfag\xfb]\xfc\r\xfcP\xfa\x96\xfa\\\xfa*\xfbA\xfd\x01\xff}\x01s\x00\xf9\xff\x07\x00\xe4\x00j\x01\xa3\x01\xa1\x02L\x04\x81\x04\xdb\x03*\x05\x05\x05\xb1\x03N\x04\xc3\x05:\x06\xeb\x04\x89\x05\xc9\x07F\x07+\x07\x0e\x07i\x06?\x05\x81\x06B\x08\xbf\x08\xd6\x06\xcd\x030\x04l\x06m\x06\xbc\x052\x07\xb5\x06\xb5\x01o\x00\xff\x03X\x01\xba\xffO\x03\xb0\x06t\x02]\xfb\x9d\xfd\xf7\xfe\xe5\xfd\xf3\xfc\xa5\xfe\xfe\xfe\xb2\xfc\xf0\xfb\xeb\xfe\xa2\x02\x88\xff\xa7\xfb{\xfd\xe1\x00\r\x01Z\xff\xb9\x01\x0c\x04B\x02\xdc\xff/\x02\xee\x03p\x00\xcb\xfep\x01\xed\x02\x1a\x00x\xfd&\xfeG\xfe~\xfbF\xfb/\xfc\xc8\xfa)\xf9\xec\xf8\xcb\xf9\x1b\xfa\xdf\xf9\x84\xf9\xf0\xfaz\xfa \xf9W\xfa4\xfc \xfd6\xfe\xd3\xfe\xfd\xfe\xa4\xfe\xbf\xfc?\xfe\xc3\x00n\x01\x0c\x01l\xff\xb4\x00Q\x02"\x01\x7f\x00~\x00\xde\x00\x1b\x00&\xff\x9b\x019\x02\xd1\xfe\xd4\xfd\xba\xfe&\xfe<\xffr\xffK\xff\'\xfe]\xfd\x10\xfeK\xfeZ\xff\xb5\xfe\xb2\x00\x1a\xff-\xfd\x1f\xff\x92\xffS\xfeg\xff]\x01M\xfd\xd3\xfb\xce\xff,\x02\x12\xfe\xb6\xfe\xd3\x00%\xfeo\xff\x9e\xff\xa0\xffN\xfe\x9d\x02\xfa\x00\x00\x01\xf3\x03/\x00\x92\x00\xaa\x05\xa8\x03\xc8\xff\x98\x01\xce\x04\xbf\x05/\x00U\x06\xdd\x06\x12\x00\xb4\xffq\x02\xee\xff\xb9\x00\xfe\x03\xc5\xffN\x00\xa3\xff6\xff\xc3\xfb?\xfeX\x00\x16\xfd\xde\xfeg\x02\x9a\x00\xce\xfb\xc6\xfc\xdc\xf9W\xfd\xf5\x01b\x00\x00\x03I\xfe\x17\x01\xcb\xfe\x80\xfb\x93\xff\x8f\x002\x08!\x01s\xfd\x0e\x05\xcc\x04\x04\x00\xa3\xfd\'\x07~\x04\xaf\xfe\xa2\x01\xfa\x04X\x04\xf0\xfe_\x02\x1c\x05\xcc\x01\xd3\xfb\xee\x07\x10\x05\xa3\xf8B\xfe\xbb\x04\x90\x03\x9b\xfbW\x06\xc9\x03\xa3\xf5V\xff9\nG\xf9?\xfc\\\n\x90\xfc%\xfc\xd4\x028\x05\x0f\xfco\xfa\x8b\x04\x93\xfe\x1a\xfd%\x04\xba\x04/\xf9\x98\xfeJ\x00p\xf9\x86\xfe\xc0\x00\x04\xfe3\xfd\xcc\x01{\xfd\n\xfa\xba\xfe\x87\xfe\x90\xfa\xcb\xff;\x02\xb6\x00R\xfdx\x01\xd2\xfd\xb7\xfd$\xff\x98\xfd\xb4\x04\x9b\xfen\xfd\xed\xffK\x00\x1a\xfe=\xfe\xe7\xfb\x98\xfeI\x01\x87\xfdj\xff\xec\x03\xf1\xfd~\xfa\xea\x02\x00\xff\x10\xfa\xf5\x00\x1a\x01\x00\xffL\x01\x92\xfe\xc9\xff.\x04\x04\xfc\x8f\xfd\x96\x02I\xfe\x97\x06\x99\x00x\x02\x93\x01\x1d\xfe\xc9\x00F\xff\xca\x04\xf7\x04y\xfe\xef\xff]\x03\xe0\xfa\xd0\x01\xbe\x00&\xfd\x01\xff\xe3\xfc\xaa\x02r\x00S\xfc\xd5\xf8G\xfb\xf9\x00\xbd\xfb\xde\x01\xd9\x03\x88\xf9J\xfbj\xff\xd2\xfb\xc2\xfa|\x03\x15\x04\xcb\xf9=\x00|\x03W\xff\xea\xfb\xcb\x00z\x01\xa7\xfbU\x04\xaa\x07\xed\xfa\x96\x01\xfd\t\xca\xfb#\xf7\x1b\x06\xd6\x03\xe8\xfd\x1f\n\x80\x00\xf5\xf6\xef\t\xf5\x04\x8f\xf99\x02\xb4\x02g\xff,\xfa\xb6\x0bX\x0c\xbd\xf8a\xff[\x04\x16\xf3\x1e\x00\x95\x0e\x83\xf9\x87\x06P\x06\x07\xfc\x1c\xf9\x04\x01\x93\xfe\xd4\xfc\xfa\x06\xfd\xf9>\x02\xc8\x05\x17\x01E\xfa\xa9\xef\xa7\t\x9e\xfd,\xf8t\x0e\xfb\xfa\x1a\xfa\x93\x02\x0e\x01\x1a\xf4\xce\x03\xfa\x021\xf7\xfe\x00\xcb\r~\x01\xc8\xf6\xa5\t}\xfb\xd4\xf0\xa3\x0c\xf0\x07\xc5\xf7\x04\x14R\x02B\xf7\xf3\xfc\xf1\x05\x83\xfb)\xfcJ\n\xea\xfeT\x02v\x01N\x05[\xf8n\xf2\xdf\x01\xdd\x040\xf9\xc2\t9\n\xc8\xf0-\x01\x17\x08\xa9\xf6C\xfeF\x05k\x02\x99\xff\n\x03A\x06\x9b\xfb\x80\xffv\xf7\x07\x02\xf5\x00\x96\xfb\x89\x0c\x06\xfc\xd2\xf6\xec\x02H\x01$\xf7\x0b\t\xf5\xfc\x10\xfcB\x00\xa9\xfc\x0c\x07\x10\xfb\xa3\xfeO\x01\xc5\x04\xaa\xfa\x8b\x02\x8b\x07\xa8\xf8w\xfd\xf1\x03\x07\xff7\x04\x95\n\xa4\xfbs\xfa\x91\x02\x8a\x02\xc2\xff\x95\xfd@\xffx\x06v\xffh\xfc\xcc\xfcM\x04\xb1\xfb\x16\x00\x9a\xfe\x0c\xfbx\xfar\x03\x1e\x06}\xf8Z\x00:\xfd\x1d\xf9\xa3\x00c\x05\xe6\xf3@\x06?\x08\xf4\xf4^\xfe\xd0\x03\xb7\x04\x19\xf9\x8b\xfd\xdf\x05\x00\xfc\xe5\xfe\xd7\n\x85\xfe\xc3\xfc\t\xfe\x18\x00\xfc\x02\xfd\xffz\xfb.\x0c\x7f\x03\xeb\xf3\xf4\x02\x06\r\xfc\xf7D\xf0\x1a\x15\xaf\xfc\xad\xf9\xd3\x06\x0f\x07[\xff\x1b\xf9o\xff\x87\xff\xc1\x01\xac\xfb$\x0b.\xff\x7f\xfe\x8b\x00\x17\x04\xb5\xf7@\xf6\xcf\x0f\xe6\xfe\x14\xf5b\x05e\x13\xf8\xee\xbf\xf1\xbe\x18\xbd\xf9\x15\xe7\x85\x13\x04\x0b\x81\xed\xa7\x05\x00\x0b\xbc\xfc\x8c\xf2l\x06\xbb\x00~\xf3H\t\xa2\t\x14\x00\x8a\xf8D\x05)\xff\xb0\xe8\xa4\n\xee\t\xb3\xf1\\\x0c~\x08\xd2\xef\x0f\xfd}\x03P\xfb\x91\xf7\xce\xff\x17\x0b?\x01g\xf7\x9c\x08V\xfb\x01\xf4\xff\x08K\xf8\x18\x03\x81\x08\xce\xf77\x02o\t\xa1\xf5q\x03\x13\xfd\xee\xf6\'\x0c_\xfeh\x03\xe8\x06\x14\xfa\xf4\xff\xb5\x04\x1b\xf6\xd9\x07\x18\x03A\xf8%\x039\x087\x03o\xf9\xf6\x07\x8f\xfd\xd1\xf8\xb8\x01-\x03\xe2\x00d\x03\xb6\x07\xe0\xf7\xb8\x01\x18\xff\xb5\x00\xa1\xfb3\xfd\xa2\t?\xf9j\x02\x8d\xfd\xc4\x06\xeb\xfci\xf4\xf6\x02\x88\x03\xe7\xfd~\xfe&\x01\x11\xff\xc1\x03[\xfd\x16\xf8\x99\x03\x9b\x03^\xf8H\x06\x8f\xfe\x9f\xfc\xa8\x073\x01\xb0\xf2?\x0b0\x07\xee\xe9\xfa\t{\x06\xa5\xffT\xfe\xa8\xffd\x05\xfe\xfb\xfc\xf9\xcc\x04M\x000\xfcW\x06\x8b\xfb\x9b\x03\x13\x00\xcc\xfbF\x010\x01L\xf2Z\n\xa3\x04~\xf7Z\x07\xd0\xff\xcb\xfc\xb5\xfd(\xfd\xe1\xfc&\x04/\xfd\xe5\x06b\xfc\xa5\xfcR\x08\xa8\xf7f\xf6\\\x0b\xf2\xfeM\xf9s\x07\xfd\x07\xfe\xf9\x08\xf7\xf3\n#\xf9\xaf\xff\xfd\xfe\x8f\r\x7f\xf8q\xfa\xe5\x11\xb9\xf7\xf9\xf3\x1a\x02\xa3\r\xe3\xf3o\x04\xbb\r\t\xf7\x7f\xf8\xcd\x05\x00\x009\xf7\xe8\x05\xc1\x06\xc5\xfd\x9b\xfb\x8f\n\x11\xfdC\xed\xf8\x0c\xf5\x02\xc1\xf3\x14\x06\xb0\n\x1e\xf8\xfa\xf7\xe1\n\xdb\xfe\xbb\xf1\x9f\x01\x11\x0e\xd0\xf2\xff\x00\xc6\x0f\xcc\xf7:\xf7\xc3\x0co\x00\xe4\xefL\x058\x11d\xf3\x07\xfb\xe5\x1f\xd3\xe8\x04\xf5\xfc\x1ah\xf1\x08\xf8\xaf\x0c\x80\x03\x08\xfa6\xfb\x0b\x0b\xb2\xfc`\xf6\t\x08\x95\xfc\xdc\xf3\x90\x07\x1c\t\x9f\xed\xaa\t\xde\x02\xa3\xf4\xef\x008\x04}\xfb\xd2\xfeF\x070\xfb@\x02\xf9\xfe\xf3\x04\x8b\xf7c\x07k\x01Z\xf9O\x03\x0e\t\xc6\xfa\x91\xfcr\n\xc6\xfa\xbd\xfeX\xfc%\x07\xfa\xfcC\xfc!\t\x18\xfdX\xf8V\x0b\x1e\xf7e\x01\xc2\xf8#\x08g\xfe\x05\xf8y\x0c\x95\xf7\x1a\x07\xeb\xf2\x92\x04\xec\x00\xf8\xfd\xcf\x01\x9b\x03\x00\x01a\xfb\xc3\x01Z\x04\xaf\xfa<\xfe\x11\x08=\xfd\x8c\x00&\xfbQ\x08\x9f\x02\x0c\xfa\x02\xfc\xb5\x06\xee\xfd\xfa\xf3g\x0e\x91\xfe\xdb\xf7\xbd\x07\xda\x04j\xf3\xf2\xff\x16\r\x1e\xf35\xf8,\x12\x00\x02!\xf4\r\x07\xba\x07\x03\xf2\x0b\xfa\xe6\n\xda\xffS\xffC\xfb-\x0b\xb1\xffO\xf6:\x03\xcc\xfe\x8e\xfe\xb7\xfd\x82\x04\xed\x00s\x08\xf5\xf3\x14\x02\x81\x07d\xf1/\x03\xee\x05d\xfe\'\xf9\x91\x06\x05\x06\'\xf7\xfc\x00<\x01\xa6\xf8-\xfd\x03\x0b\xbe\xfc)\xfd\x80\x051\xf9\'\x01`\xfdt\x04\x87\xfb\x00\xf9c\x0c\x00\x04&\xf0\xe4\t:\x05M\xef`\n\xf6\x00\xbb\xf8A\x01\xf1\x07Y\xfd\xe5\xf9\x15\x07\x9a\x022\xf2\xbc\x076\t\xd6\xef\xac\x03\xf9\x11U\xf3\xa5\xf7\xab\x0f3\xfe+\xf1\x13\x07\xdf\r\x0b\xf5}\xfa\x16\r\x07\xfe\x8e\xf1\xbd\t\xdb\x07\xe9\xf8\x7f\xfa\x95\r\xd1\xf9B\xf6\xd5\r8\xfd\xf7\xf5\xc2\x05\xad\x07\x82\xf6~\x03d\n\xa3\xf2\xa8\xfel\x00\x85\x07j\xfa\xee\xfb\xaf\x0f\xd1\xf4n\xfc\xbf\x06\x86\xfe\x1d\xfa\xeb\x035\xff\xda\xfd\x93\x02\xd6\x00\xe3\xfeZ\xfb\xfa\x01\xa7\x03m\xfeO\xf3\xa5\x10\x96\xfe\xa5\xf2s\t\x9f\x04\xc8\xf7\xd7\x02\xfb\x03\x0e\xfby\x02\r\xfa\xf8\nx\xfa\xba\xff\x85\x0b3\xf6\x1f\xff+\x04X\x00\xc1\xfa\xd7\x05\xb0\x01P\xfdp\x03\x1f\xfdr\x03e\xfa.\x01S\x01 \xffR\x00\xc1\xfc\xe9\t\xb4\xf9\x7f\xfd\x0b\x02\x9e\xffD\xfaq\x05<\x01V\xf5\xf2\x0bp\x01\xfa\xf7\xe9\x04%\xff5\xf8\xe9\x06\x03\xf88\x06l\x04\x82\xfb\xaf\xff\xad\x02=\x02\xd5\xf2\x98\x07\xa7\x07\x94\xf1\xeb\x02n\t\xab\xfa;\x01L\x04C\xfaJ\xfd-\x008\x00\xd8\x01$\xfc\x02\x06\x1e\x02\xc9\xf8^\x05|\x01G\xf8&\xfb\x99\x08D\xfd\xce\xfa:\x13.\xf5\xf5\xfcU\x08\xf3\xf6y\xfeT\x01\xf2\x07\xe6\xfcP\xff\x0c\x07\x02\x02\xde\xf4\xa1\x04\x17\x01k\xf8s\t_\xfe\xab\x01\xcf\x05\x8c\xf8\xb5\x01\x19\xfe{\xfe\xbc\x055\xf7\xc6\x08\xf9\x04Y\xf9\xaa\xfd\xa2\x08\xc1\xf6\xed\xfd\x0f\x07\xd9\xf8\x16\x08\x83\xfe~\xfc\xf0\x02\x8b\xfe\x92\xf8\xe6\x07\xd0\xf9y\xffF\x08V\xf6t\x05c\x03\x7f\xf5\xf7\xff\xd0\x05s\xf6\x98\x01\x1e\x06\x84\xff\x1a\xfd\xd2\x00\x94\x02\xb4\xf8\n\x01\xc3\x06\xce\xfa\xd4\x02\xbf\x04\x10\xff\x17\xfc\xd7\x03\xdd\x02\x0c\xf5\xd1\t\xe3\xfd\xd9\xfd\xc6\x01\x89\x00\r\xffG\xfe\x0c\xfe\xe3\xff`\x02\x86\xf7\x7f\x0bs\xf8\xfa\x00\xbd\x05\xcb\xf6\xcb\xfd4\x04\xb1\xfe6\xfe\x1c\x06l\xfb\xcf\xff\x1b\x02j\x00\xeb\xfd\x9e\xfc\xdd\x05\xbc\x03\x12\xf75\x07z\x06\xc0\xf5\xe8\xfe\xb0\x0b\xc3\xfad\xf9@\t%\x01\x06\xf8~\x06N\x06\xaa\xf5!\x05\x14\xff\x18\xf7\xbb\x07\x9d\x06\x1c\xf3\xf0\x05\x9d\x05\xb2\xf7}\x02N\xfd\x96\xfe.\x00~\x00:\x01b\x02N\xff\x1e\x00\x08\xfc=\xff\r\xfdo\x03\xe0\xfeN\x00r\x04\r\xfbu\x00\xe4\x01\xe0\xfan\x00>\x01.\xf8\xaf\x075\x01d\xff\xed\xffF\xfd\x15\xff6\x01(\x00\'\xff3\x00\x91\x01\xa3\xff\x1a\xff\xfe\x03|\x03\xa4\xfbi\xfd\xe3\x02\xb7\xfb\x94\x03j\x060\xfc\xce\x00b\x05\xd2\xf9\x02\xff\x9d\x06\xc0\xfb-\xff\xba\x06\xaa\xfa\xfe\x01\x06\x08\x89\xfa\x9e\x00;\xff\xdc\xffV\xfa\xa7\x04-\x07\x98\xf7J\x04l\x03\xcd\xfaz\xfd\x82\x05\xdc\xfb\xbc\xfad\x05\xde\x02\x11\xfbl\x04b\x02\x15\xfa\xdf\xfc\xe5\x00V\x02\xcc\xf9\x97\x060\x00q\xf9v\x06}\x00\\\xfc\xfc\xfc\x1e\x02\r\xfc\xb6\xffl\x05%\xfd\x9e\x01\xcd\xff\x02\x00\x14\xffT\xfe\xf9\xfe\xba\xfc\xde\x02\xb6\xff\'\x01\r\x04\xa3\xff\x81\xfa\x01\x02\x01\xfeo\xfa\x88\x04\x89\x00\x01\x01\xe8\xfd\xd2\x05\xcf\x00;\xf9n\x03\x19\xfb,\xfe\x99\x02S\x00\xcf\x04d\x01\x90\xffD\xff\x1b\xffr\x00\xff\xfb\n\x04<\xff\x9d\xfe\xb4\x07b\xff\xb9\x00\x94\x01\x8e\xfc\xce\xfd\xdb\xfee\x00\x1b\x00\\\x03N\x02\x12\xfe\xb1\x01[\xfdA\xfe\x0e\x00@\xfc\xc7\x00\xab\xff\xf7\x00\xc1\x02\xcc\x01\xb8\xfd\'\xfe\xf9\x00\xa8\xf8\x05\x04-\x03\xf1\xf9\xc3\x04s\x00\x16\xfe\xcf\x01\\\x02\xe5\xfcx\x01\xf9\xfb"\xfd\r\x077\xfd\x0b\x031\x02/\xfd\xc9\xff\x9d\xfey\xfe7\xff\x8c\x00*\x00\xa7\xfe\x87\xff!\x04\xa9\xff\xa9\xfc,\xff\xd1\xfd\xc0\xfen\x01\xd4\x01\x87\x00\x08\x03\xf6\xfd7\xff\xbc\x01h\xff>\x01L\xfd.\xff<\x04r\xff%\x00\xd6\x03u\xfd\xc6\xfc\xbe\x02\x9d\xfd\x1c\xff\n\x03\xcb\xfd]\xfe\xe2\x02\xbd\x00\x08\xfe\x07\x00\xa3\xff\xda\xfd(\xfe\x8a\x01\xe5\x00K\xfe\xf0\x00\xc2\x01\xb7\xfd\'\xff[\x02\xec\xfd^\xfd\x98\x02\x92\x00\xca\xffd\x017\x00\x1e\xff\xd0\xfe\xf8\xff\x1b\xff\xce\x003\x00\xd0\xff3\x01\x94\x00\xb3\xfe\xfc\xff\xc5\xff\xc3\xfc2\x02\xfd\x00#\xff\x8a\x01\x19\x00\xa3\xff8\xff\x12\x00\x18\xffC\xffT\x01\xfa\x00\xef\x01&\x00\x0e\xff\xc4\x00\x80\xff\x80\xfd\xab\x00\x8f\x02x\xfe\xbd\x01\xc1\x01<\xfe*\x00\xe7\x00\xf8\xfd\xd4\xfe=\x01&\x00\xc1\xff\xff\x00\x05\x03G\xff\xbb\xfd\xbf\x00\xdd\xff\xd8\xfc[\x00Y\x04(\xfeF\x00\x8e\x02\x88\xfes\xff\x1e\x00\xc7\xfdD\xfdr\x02\xeb\x01^\xfe<\x01\xd8\x00&\xfe\xaa\xfe\xca\xfeR\xfeQ\x00\x8e\xff\x19\x00\xc7\x00y\xff\x1b\x00\x83\xffi\xfc\r\xff\'\x00\xff\xfc\xe1\xff\x9c\x03\r\x00u\xfc\xf3\xff\xa0\xfe\x98\xfb`\xfe\xaa\xffI\xfd\xbb\xff\xcc\x01\xae\xfd\xf1\xfdO\xff\x8c\xfdt\xfd\x00\x00\x8f\x00\xaa\x00\xe3\x02F\x03\xaf\x03?\x04\x8f\x04Y\x05)\x04\xb9\x05o\x07\xfc\x07\r\tK\t\x90\x07O\x07\x0f\x06\xba\x044\x04@\x03\xd9\x02\xbb\x00\x8a\x00\x81\xff\xce\xfd\xa0\xfc\xa6\xfa\xca\xf8\xc5\xf7\xc7\xf7Q\xf8:\xf8\xca\xf7\x82\xf8\xba\xf8\x7f\xf8\xd4\xf9s\xfa\xa9\xfa>\xfc\x82\xfd.\xff\x84\x00\xfe\x015\x02Z\x02\xa4\x03\xee\x02\xce\x03\xc6\x04\xe5\x04\xac\x04\xb6\x03\xba\x03-\x03\xc8\x02\xa9\x01\x9f\x00[\x00\x04\xff\xd0\xfe\xc9\xfe\xc6\xfd{\xfd\x0b\xfda\xfb\x0c\xfc\xbc\xfc\xc9\xfb9\xfc\xb8\xfc\xcc\xfc\xf5\xfc|\xfe\x1f\xff\xc8\xfe`\xff\xdf\xff:\x00\xea\x00\x8e\x02V\x02\x08\x02\xbc\x02\xe3\x02\xff\x02?\x037\x03\n\x02\xa6\x02\xc6\x02%\x02\x8f\x02\xed\x01\xbf\x00\x00\x01\xce\x00d\x00\xa3\x00c\x00\x90\xff}\xff\xe8\xffI\xff^\xff\x98\xff\xd3\xfe\xc6\xfe0\xff\x8e\xff\xb5\xff\xfe\xfeI\xff\xee\xfe\xaf\xfe\x18\xff\x11\xff\xf8\xfe-\xff \xff\x05\xffe\xff)\xff\xe0\xfe\xe9\xfe\x08\xff\xeb\xfe\x9c\xff\xbd\xff\xfb\xfe\x80\xff\x97\xff0\xff\x97\xff\x06\x00\xc4\xfft\xff8\x00\x1a\x00\x00\x00{\x00`\x00j\x00\x07\x00\xb0\x00\xf0\x00n\x00\xa1\x00\xb9\x00p\x00\x7f\x00\x9a\x00\xbe\x00\xc4\x00e\x00\xbe\x00\x0f\x00\x13\x00\x82\x00\xe9\xff\xff\xff\'\x00\xd8\xff\xf4\xff\x0b\x00X\xff\xa0\xffT\xff.\xff\xd8\xff\x89\xff\xb0\xff\xfc\xff\x88\xff\x83\xffo\xff\x88\xff\xbc\xff{\xff\xf5\xff+\x00\xdc\xff\xef\xff8\x00\xb8\xff\x98\xff\xdc\xff\xbb\xff\n\x00w\x00<\x00`\x00}\x00\xb6\xff;\x00\x02\x00\xdb\xffo\x00K\x00R\x005\x00\x98\x00\xfb\xff\xdd\xff\xdc\xff\xb9\xff\xb7\xff\xb3\xff&\x00\x04\x00\xa9\xff\x03\x00\xb2\xff6\xff\xac\xff\x90\xffm\xff\xb2\xff\xfd\xff\xf5\xff\x02\x00\x1a\x00(\x00\xf5\xff;\x00\x1c\x00E\x00\x96\x00\x95\x00\x87\x00\xb0\x00\xd0\x00y\x00\xca\x00E\x00n\x00\x93\x00J\x00x\x00\x9d\x00D\x00U\x00?\x00\xe3\xff\x1a\x00\x0c\x00\xc8\xff\xde\xff\xbb\xff\x90\xff\x11\x00\xa1\xff\x85\xff\xdb\xff\x1d\xff;\xff6\xff\xe8\xfe\xa2\xff\x8b\xffr\xff\x96\x00\xd1\xffy\x00\xcc\x00\x93\xff=\x00\x15\x00\xfe\xff\x1d\x04\x11\x05\xf4\x03o\x03\x11\x02\x7f\x01W\x01P\x02\xc6\x02\xc3\x02`\x022\x02\xb0\x01]\x00\xda\xfe\x16\xfc\xd2\xfa\xaf\xfb{\xfc\r\xfdD\xfd\xe3\xfc\x1c\xfb\xc5\xfaF\xfbt\xfa\x83\xfb\xb3\xfb\xea\xfa\xab\xfd\xf1\xfe\xae\xfe\xbb\xff\xd6\xfe\xcb\xfd\x0f\xfef\xfe\\\xff\xc2\xff\xe9\xffg\x00\xca\x00\x9f\x00\xe6\x00g\x00t\xff\x86\xffO\x00\x07\x01\x0c\x02+\x03\x8f\x03a\x03\xb7\x038\x04W\x04\xa3\x04\xdd\x04]\x05$\x06\xf3\x06Y\x071\x07^\x06*\x05S\x04\xc3\x03j\x03\xc7\x02\x01\x02t\x01\xbd\x00\x00\x00\xeb\xfe\x9d\xfd`\xfct\xfb\x13\xfb\x00\xfb\x1a\xfb`\xfb|\xfbr\xfbu\xfb\xa3\xfb\xde\xfbs\xfcW\xfdQ\xfe_\xffR\x009\x01\xd3\x01%\x026\x024\x02\x84\x02\x06\x03\x8a\x03\xb8\x03b\x03\xb8\x02\x16\x02M\x01\x82\x00\xe5\xff\x1b\xffP\xfe\xa8\xfd)\xfd\xa7\xfcD\xfc\xb0\xfb\xf5\xfa\x90\xfam\xfau\xfa\t\xfbN\xfbs\xfb\x0f\xfc\x97\xfc\x18\xfd\xef\xfd\xee\xfe\x1b\xff\xb1\xff\x96\x00\x0e\x01\xd7\x01c\x02\xae\x02\xf8\x021\x03F\x03a\x03I\x03\xd3\x02u\x02F\x02\x12\x02\x0e\x02\xab\x01\x06\x01\x8f\x00\xff\xff\x9d\xff\x80\xffF\xff\xe5\xfe\xd4\xfe\xc9\xfe\xca\xfe\xfc\xfe\x0c\xff\xe4\xfe\x17\xffG\xff\x90\xff\x10\x00R\x00o\x00\xba\x00\xbb\x00\xb4\x00\xc1\x00\x13\x01\xfd\x00\xd1\x00\r\x01\x03\x01\xcf\x00\x8b\x00I\x00\xf6\xff\xbf\xff\x90\xffd\xffd\xffI\xff\x16\xff\xeb\xfe\xd3\xfe\xba\xfe\x8c\xfe\x83\xfe\x82\xfe\xa9\xfe\x0e\xff#\xff{\xffb\xff\x80\xff\xbf\xff\xb8\xff\x13\x00L\x00\x89\x00\xe6\x000\x01@\x01A\x01]\x016\x01\x02\x01\x07\x01\x06\x01\xdd\x00\xd2\x00\x8e\x00\x1d\x00\xda\xff\x80\xff\xfa\xfe\xf5\xfe\xcf\xfe\xc0\xfe\xdd\xfe\xf3\xfe\x06\xff\xbc\xfez\xfe\x9c\xfe\x03\xff\xfe\xffZ\x01J\x03g\x02\x10\x01\x1e\x02\x97\x01\x17\x02\xeb\x02}\x02 \x04\xae\x03\xfe\x02/\x03\xc0\x01\x1e\x00\xeb\xfeY\xfe\x9c\xfe\xda\xfe\xae\xfdZ\xfe\xa8\xfd\xf4\xfc\\\xfdP\xfc\x99\xfd\xf0\xfc}\xfb\xbf\xfdY\xfe\x9b\xff\x7f\x03\x80\x03\xde\x02W\x01\x9a\x00\xca\x01\x95\x01\xc8\x01\x83\x02\xa4\x02\x82\x01\xef\x01\x9e\x01\xb2\xfe\x91\xfc\xb9\xfa\t\xfa\xe3\xfap\xfb\x1b\xfc<\xfb\xa0\xfa\xaf\xf9\xcb\xf8\xa2\xf8\x84\xf8W\xf9\x0f\xfas\xfa\xe7\xfbI\xfd%\xfd\xa2\xfd\xc7\xfd)\xfeI\xff\xa5\x00j\x02\x07\x04\xb7\x05\x8e\x07"\tS\n|\x0c\xd3\rq\rc\r>\x0e\x1c\x0f\xb8\x0f\x10\x10r\x0f\xe3\r\xe7\x0b\xf4\t\x16\x08\xd6\x05:\x03\x87\x00\x97\xfe\xdb\xfd\x8b\xfca\xfa\x11\xf8$\xf6\x9e\xf4y\xf38\xf3\xa5\xf3R\xf4\x8b\xf4\xd7\xf4:\xf6\x91\xf7|\xf8i\xf9\xb5\xfaG\xfc\x1d\xfe\xf4\xff\xe2\x01\xc3\x03\xb5\x04\xc9\x04\r\x05\xd8\x05V\x06\x18\x06\xc7\x05Z\x05\xc9\x04\xfe\x03\xe5\x02\xd4\x01P\x003\xfeG\xfcg\xfbu\xfb\x18\xfb\xa6\xf9t\xf8#\xf8\xf8\xf7\xdf\xf7\xf1\xf7k\xf8\xd6\xf8=\xf9~\xfa\x99\xfcR\xfe\xff\xfe\x13\xff\xe4\xffg\x01\xa5\x02\xd2\x03\xcb\x043\x05\xf8\x05\x02\x06\x96\x062\x07\xce\x06\\\x05\xa4\x04\x90\x04A\x04u\x04V\x03E\x02\x97\x01\x98\x003\x00\xa5\xff\x1f\xff"\xfe\x9d\xfc\x0e\xfc\xbf\xfc\x1e\xffr\x00o\xfd8\xfc\xa8\xfc\xb2\xfc\xef\xfd\x1b\xfe\x97\xfeb\xfe\x93\xfe\xb2\x00p\x02\xcb\x02\xd3\x00X\xfe\xb5\xfe\x95\x028\x04\xad\x04\xe3\x04-\x03!\x03\xac\x034\x03`\x03G\x01\n\xff\xad\xff\xab\x00\xa7\x01\x88\x00\xff\xfd\xf1\xfb\xb5\xfam\xfa\x1c\xfae\xfa\xe6\xf9\x82\xf8\x14\xf9>\xfa4\xfas\xfa\x11\xf94\xf8v\xf9\x89\xfa\xa7\xfb\xd0\xfcp\xfc\xb0\xfd>\xfe\xd5\xfd\x12\x00h\x00<\xff\xf2\xff@\x01\x82\x01G\x02a\x01\x16\x01\xd3\x00\x97\x00\xbe\x02\xbd\x01\xa5\x01/\x01\xbb\x00\xf3\x01\x0c\x01c\x01]\x02h\x02\x18\x04\xc1\x08H\x0c\x82\n\xce\x08=\t\xb2\x08m\n\x83\x0b\xe2\x0c\xd2\x0e\x8e\x0b\'\x0b\x00\x0c\xde\x08\xd0\x05B\x02L\x00s\x00\x85\xff\xf1\xfeh\xfe\x06\xfc\x99\xf9L\xf8\xaf\xf7\xa1\xf7\xb5\xf5\x12\xf4j\xf5\xa4\xf6r\xf8 \xfa\xf1\xfal\xfb\xb0\xfa\x11\xfbB\xfd6\xff\xb9\xffr\x00\xb1\x01\x9b\x03\xc5\x04E\x05\x8c\x05\xa4\x04\xa8\x02\xca\x01\x9f\x01V\x02\x82\x01U\x00\xa7\xff\xc3\xfeN\xfey\xfc\xe8\xfa\x18\xf9>\xf7|\xf5\xc9\xf5\xac\xf76\xf9\x08\xf9\xea\xf8E\xf9%\xf9\x98\xfa}\xfb\xa1\xfd\x01\xfe\xd7\xff\x89\x05\xa5\t0\r\xf0\r\x0e\x0c@\x0bH\n\xfe\nd\x0cP\x0e\x01\r\xc3\x0b\x1d\x0e\x88\x0b\x07\x08\xf5\x02\xf1\xfb\xce\xf78\xf7u\xf9\x8b\xfb\x9f\xf8A\xf6\xcb\xf4[\xf2\xba\xf1\xe9\xf1\xd9\xf0\xe4\xef\x8f\xf0\x8e\xf4\x03\xfa\xca\xf9_\xfa\x9e\xf7>\xf6\x16\xf8\x0e\xfb \xfe\xce\xfb\xa9\xfdZ\x00[\xff\x08\xfe\x91\x01\xdd\x01\x9f\xfa\x13\xfb\x96\x01v\x009\xffR\x03H\x05\x15\x02\xf3\xfeA\x05\xba\x05\xe2\x01\xd5\x06\x9f\x08\xa7\x064\x08\xc9\x0b\x1c\n\x0e\x05\xb0\x07\x04\x08[\x05R\x08l\tK\x04\x12\x02i\x06\x1c\x05`\x04o\t\xd8\x08l\x04\x15\x07I\x0bl\n\x15\x0c\x84\x0c0\x0b+\x0b\x1a\r%\x0e\x06\r\xe0\x0b-\x08W\x06\xa4\x06\xfe\x06\xf8\x03\x82\xff,\xfd\xf6\xfb\x9c\xfc<\xfbd\xf9`\xf7N\xf5\x9c\xf3\xd2\xf3\x88\xf48\xf3\xe4\xf2\xc9\xf2\xb7\xf3g\xf5\x81\xf5\xd5\xf6\xc0\xf79\xf6)\xf6n\xf8\xcc\xfa\xc6\xfc`\xfdp\xfdo\xfd\x0e\xfe\x11\xff(\xff\xfd\xfe\x00\xfeG\xfd\r\xfe<\xff_\xfe\xe4\xfd\x0b\xfd\xee\xfb\x1c\xfc9\xfc\xa7\xfc4\xfd\\\xfdE\xfcf\xff\xef\x00\xee\x00\xf9\x02u\x02\x11\x03\xa6\x02\x16\x04\x8d\x05\xe3\x05\xf0\x06\x0c\x06\x90\x05\xec\x05\x00\x04`\x03\xf6\x01\xfd\xff[\x00o\xfd\x18\xfe@\xfe\xc4\xfa\xb7\xfb9\xf8\x88\xf6\x18\xf9\x08\xf7\xca\xf6\x96\xfa\xce\xf9\x12\xf8\n\xf9\xa0\xfb&\xfc\xae\xfa\x0f\xfek\xfd\xf2\xfe\xd3\x00r\x00\xeb\x02\xa7\x00~\xff\\\xff-\xfe\xa4\x01J\x02B\xfe\xe7\x00\xc4\xff\xc6\xfbG\xfc\xd5\x00\xe2\xfb\xe7\xfc+\x02\xfc\xfb;\xff\xc7\x03\xf8\x00\x8f\xff\x1b\x03P\x05<\x04\xdd\x02t\t\xe4\x08O\x04\xa8\t\x9d\n\xb1\x07\x19\x08\x18\n\x07\x08\x17\x06b\x07\x8a\x08!\x07 \x06h\x05\x1c\x04\x11\x04\xf7\x02\xed\x03\xdf\x04\x1d\x02\xd8\xfe\n\x05?\x05}\xfbE\x02\xcc\x04*\xfb\xa4\xfdV\x04\x0b\x00\xa8\xff\xef\x01\x0c\xff+\xff\x95\x01E\xfe\xad\xfd\xa3\x00N\x00\xfc\xff0\x023\x02\xb5\xfe\xde\x00\x1c\xfd\xd4\xfc\xa7\x03T\xfa6\xfcG\x03:\xfd\xb2\xfb\xc9\xfd\xbf\xf9\xf5\xfa\x13\xfc\xe4\xf9}\xfaf\xfd\xf9\xfd\xb1\xf9`\xffg\xfd\xc5\xfb\xc1\xfb\x10\xfdK\x01\x98\xfa!\x01\xe5\x044\xfb#\x02\x03\x06\xdd\xf6\xac\x00\xf4\x061\xfcS\xfa\xd0\n\xc2\x03-\xfb#\x03\xcd\x05\x06\x03\x8f\xf7L\x05\xe4\x04"\xfa\x83\xfdt\x05\x98\x00\xb0\xf9#\x04\x9c\x01\x00\xf8\x1f\xfb_\x04/\xf9N\xf6<\x08\xe2\xfe\x87\xf7\xcb\xfc\x92\t\x91\xf4\xa9\xf8\xe2\x06\x0c\xf7\x88\xf9\xee\x05|\xfe\x8d\xf5\xb4\x0b\xa9\xfd}\xef,\x052\x08N\xf0\x1b\xff\xf1\x0c\xa0\xf6m\xf8P\t\xe9\x04Z\xf3-\x02\xbc\t\x1c\xf5\x8a\xf7W\x13s\xfb\xe4\xef\xd3\r\x11\x06`\xf3 \x01L\x0eT\xf3\x8c\xfe\xd0\x0b\xde\xfa\xdc\xff\xb7\tM\xff\xe3\xfbM\x07\xee\x05!\xf5^\x05\x1f\x0f\xce\xf2\x88\x04\xdb\x0f%\xf1\xc2\x04.\x11C\xfa-\x01\xf9\t\xd4\xfc\x93\xfbB\x0e\x88\x02\xa7\xfeP\x0b\x16\xfe\x02\x00\x1b\n\xbb\xfe7\xf8%\n\x00\x034\xf98\x07#\x08g\xf6\xee\xfdk\x0e\xba\xf0R\xf2\xf9\x16\xa0\xf7S\xeeo\x0b\x04\x0c\x00\xeb\xb3\xfbO\r*\xef\x92\xf4\xa8\x02.\x08%\xf8)\xfd\x00\x02\xd8\x00\xc1\xf1\x0e\x01\xc8\t\xd2\xf4b\xfa+\x0f\xa1\xfeh\xf0\x1b\x10\xec\x02\xf8\xfbV\xf9\xac\x08\xed\xfe\xf9\xfdK\x02\x17\xfe\xff\xfes\x01\x1c\xff\x8a\xfaa\x07\x89\xf7\x8c\xf9\xc3\x02\x1f\x00\xdb\xfb\xfc\n\xc8\xfcd\xf7o\x0c\xe4\xfd@\xfe\x08\x03\\\xfbo\x0e\xe9\xf9j\x00\x1b\x11\xcb\xee[\x02B\x05%\xfb\x0c\xfc\xfd\x07<\x00\xe3\xf8\x7f\x01\xd7\x04\x1c\x05\x9a\xee\xa2\t\x1b\x00.\xf1/\x04K\x05\xc3\xfe-\xfa\x0e\x01H\x02\xb5\xfb7\xf7\xde\t\xd1\xfa\xf5\xf7\xed\x06;\xfen\xfb(\x07]\x06\xfd\xea4\x05\xcc\x0eW\xea\xad\x02\x08\x11+\xf1\xcf\xf9\x15\x0b(\x01\x1d\xef\x87\x068\x06-\xf5 \xff\xfd\x04\xc3\xfb\xc4\xf8\x85\x0c\r\xfa\xab\xf1\xee\x12\x92\xfe~\xf1\xd6\x11"\x00W\xf5\xa2\x08\xb6\x07\xa6\xfc~\xfc\xd4\x0e\xae\x03\x10\xf3\xe9\r2\x06Z\xf9>\x03\xe1\x08#\xfbh\xfe\xc4\x08\xf0\xf9\x8c\n\xdc\xf75\xfd\x13\x0c\xff\xf7\xec\xf8\x0e\x0eQ\xfa\xaf\xf1\xa1\x10\xe1\x00\x8a\xf36\x056\rY\xf6\x0e\xf9\xce\x0c\x89\x02G\xf0\x91\x04\xae\x16C\xf13\xfa\xcd\x0f\xe9\xf6\x08\xff[\x00\x17\x05\xc3\xfb\xaf\x00\xc8\xfb\x8b\x02\x0c\x0b\x97\xe71\x10\x01\xfa+\xf5\xe3\x08\xea\xfe1\xf7\xb5\xf8\x83\x0f\xa7\xfa\n\xf25\nv\x04\xf8\xe8\xa3\t\x9c\n\xaf\xf8*\xfd\xa9\x08`\x01b\xeft\x12\x0e\xfb\xa3\xf9\xfb\r\xba\x034\xf6\xb1\x05a\x05e\xff\xc2\xf3h\x04B\x13\xc6\xeb\x05\x0b\x85\x061\xf7\xe9\x01U\x05\xae\xf6\x15\x03\xdf\x07\x9a\xf1\xf5\x06\xea\x08I\xf2\xde\x04\xfd\x00\xb8\xf3\xcd\x05j\x02]\xf9\xb6\n\xc3\xf1\x8e\x02\xca\x11\x82\xe2\xf6\n\x13\x0c\xc2\xee\xe0\xf9\xe1\x11K\xfdB\xf7\xac\x08I\x03O\xf8\xa3\xf4\xda\r\xa0\xff\xa8\xfaS\x06W\x02\x91\xfa\xe0\xfc\x94\x0cB\xf4S\xfbP\r]\xf5\x06\xff(\t\xb2\xfb\x01\xfe~\x02t\xf9\x0b\x04 \xfe\x8a\x00\x8a\xff\xb1\xfd\xa3\x08\xaa\xfc\xed\xf5\x9f\x063\x07b\xf2_\x03o\x08\xac\xf8\x8d\x02\x0f\x05\x18\xf2d\r\xe5\x00\x0c\xf03\n;\x03\xfd\xfd\x98\xfc9\x05\xbb\xfe\xf1\xfc\xbd\x02V\xff\xc1\x00\x94\xf9\xc2\x040\x00A\xfe\xb7\x04u\xfe\xfa\xffC\x01\xbe\xf8\xcd\x04V\x01V\xfa\x1c\x03\x1d\x03\xd4\xfe\xf5\x00\xba\xfc]\xfc\xbe\xff\xaf\xf6\x8b\n\x13\xfb\x88\xfdv\x08`\xf6\xb9\x00.\x05\xd9\xf5\x98\x01\'\x02\x88\x01\xf3\x01\x81\x006\x03b\xfa\xea\x04$\xfdP\x04\xab\xf9\xfa\x06\x9c\x06\xc1\xf84\x00\xd8\x04\x99\x00\x1b\xfc\x98\x02f\x08S\xf7\xc7\xfe\xa8\x04\x1e\xfe\x13\x02p\xfa\xbe\x07\xb8\x00\xce\xfa\x0f\xfe6\x07\x18\xf4X\x00\xa5\x06\xcf\xfcm\xfe\x9e\xff>\x01\xf0\xf8\x8e\x02i\x02y\xfb1\xf3\xe3\x133\xf5\x91\xf9\x10\x0f\x9a\xf8\xb7\xfbM\x06\x92\x016\xf2\xc1\x0b\xc2\x00=\xfa\x86\x04D\x08\x95\xf6\xa4\x00/\x07\x1f\xf7\xd0\x04r\xfdY\x01\xcc\x05\xf0\xf5\t\x01\xf4\n\xa7\xf2\x89\x04\x05\x01D\xf5\xdd\x02\xf0\x07\xd6\xf2\x1a\x04\x8e\x03\xf5\xf7c\x06\x1d\xf9\xbf\n\x00\xf8\xef\x01\x8c\x04I\xfdS\x02\x7f\x00\x8d\xfe\xd1\x08\xcd\xfcD\xfaH\x0e\x81\x02\xc9\xf0\x12\x0b\xd9\te\xef\xa3\x08\xf2\x02\xfb\xf8H\x06\xf3\x03M\xf3#\r\xc2\xfe\x96\xf6B\x02\x9b\x0b\x1f\xef[\xff\xba\x11\x9b\xeb\xbd\x06W\x03\xa3\x03X\xed\xe7\t\xb0\x02U\xf7X\x04\xf0\x02\x9f\x01Y\xfa\x8a\x01\xe6\xfe\xdc\x04n\xf8E\x07\xfc\x03\xff\xf6\x18\x00:\x07\xf5\xfe\xe8\xfb\xa4\xfeh\x04\\\xf9\x8b\x05\x12\x01\x82\xf2\xd0\x0eT\xfb\x07\xf9\xae\x07\x86\x01\x1f\xfeA\xfb\'\xfd\xa1\r\x99\xfaL\xf8\xfd\x12\xb8\xf52\xfb\xc5\x06\x1e\xff8\xf8\xf2\r\xc6\xf8\xd3\xfbI\r%\xf9?\x02,\xf8\xa5\n\xa4\xfa\xaf\xf9\xe5\x0c\xa7\xfe\xa4\xf6\x00\x0b\x16\xfe\xfd\xf7i\nd\xf6K\x03\xbb\x06\xf3\xf1\x13\t{\t\x03\xf6t\xfdr\x07\xf2\xf6\xc9\xfe\x13\x0e9\xf3\x8e\x01\x16\x05\x00\xfe\x10\xf6n\x0e2\xf7\xb0\xf38\x14\xfa\xfa\x07\xf1\x9c\x11\x98\xfcv\xee.\x16\xc0\xf4\x19\xf4,\x0cD\x03\xf5\xed1\x11\xdf\xfds\xf2\xf0\x0c\xe0\xf7k\x00\xbf\xfe\xd9\x00=\x07\xb3\xfc[\xfe\xfa\xff\x10\x07\xe1\xef\xac\x0c\x85\x06\xca\xec\xa6\x16M\xf6A\xfa]\x0b\x03\xfc\xbf\xfc\xca\n\x80\xf8\xdc\xfe\x7f\n\x9f\xf8\xbc\x017\x04\x8f\xf78\x05q\xfbz\x08\xf1\x06{\xeeO\nJ\x03i\xf3\xe5\x00\xb6\r\x06\xf1\xad\x06~\x01\x80\xfc\x07\x01\x1b\xfc\xab\x03\x1e\xfe\xc6\xf8b\x03\xd4\x08|\xf0\xb7\t\x86\xfb)\xfe\'\x03\xdb\x003\xf5\xce\x06s\x06\x9f\xf2\xa7\x01s\x0f5\xf0\xfc\xfd\xe2\x14\x85\xe92\x066\x03\xf2\x02\x12\xf8B\x05\xb8\t\x95\xee\xd4\n\x9b\xfd\xa5\xff<\xfa\xe1\rR\xf7\x9e\xfb\xeb\x10\xc1\xec\t\x0c\xed\xfb\xff\xfb4\x04\xe2\x00\xe0\xff\xd5\xf5\xa8\x12=\xf9\x85\xf3-\x11\x08\xfa\xa2\xf4$\x0eh\x04t\xe8\x94\x11\x8d\r\x9a\xe85\n\xc0\x05\x10\xf3\xbe\x04\xed\x01V\xfc8\x06\xb7\x02)\xf3\xf5\x10\xcc\xf7\xd9\xf4U\x10\x10\xfd\xb9\xf5\xba\x06\xe5\x04\xbf\xf4\x1b\x0c\xcb\xfd\xd7\xf9\xb5\x032\xfb\xa2\x02\x14\x04\x8c\xf1\x11\nQ\x06,\xf1+\x04\x8e\x0e\xf8\xf0G\xf6\x01\x12\x8d\xf0\xf6\x02\x93\x0c\x90\xf2K\x03\xe4\x08_\xf7\x8e\xfd\xb3\x01 \x01d\x03\xda\xfc\xe5\xffc\nD\xf7\xbb\x01y\xfb\x88\x02\xce\n3\xf2\xe7\x01V\x12D\xf2\xa3\xf5\x15\x18\x08\xf3h\xf9\x0c\x0bm\x03\x95\xf8J\x02\xbd\x06\x9d\xf9\x9a\xfb\xe9\x08\xca\xfe\xae\xf9\xf6\x03\xe1\x07#\xf8\xd8\xfa\xe1\x0c6\xf4\x89\x05\x10\xfbA\x05j\xfcl\xfc\xfa\x0e\x0f\xf1c\x00a\x03U\x03\x96\xec\x8e\r\xb1\x03"\xf9\xb5\x04\x1f\xfbo\x04\xd9\xf8\x00\n\xde\xf3\x93\x07\xfc\x05R\xf7[\x07V\xfb0\x06\xe6\xfb\xd1\xf6\x12\x13\xcb\xf3\xc3\xfcY\x08\x16\xfc\xa1\xfb\x91\x01\xa2\x06\x7f\xf1r\x08\xe8\xfa\xf8\x06\xe7\xf8y\x03*\x04\xf6\xf8\xfa\xf9\xab\x04Z\x03\xab\xfa:\x07\xee\xf3\x90\x0e\xa8\xf6>\xfd\xb1\x08\xf2\xfd\x19\xfe*\xfd;\x06w\x05\xd2\xf5\xf7\x00\x00\nT\xfdF\xf5v\rW\x01\xea\xf0\x82\n\x99\x03\x16\xf9\xb3\x05B\x02\x86\xf4{\x06b\xffE\x03\xb6\xf5\x18\x08\xd7\xfcy\x03e\x00\xab\xf4Q\x0cn\xf7I\x00Y\x01n\x03\x15\x01$\xf9#\xffG\r\xcc\xec\xdc\x05\xf0\x08w\xf6 \x01\x0b\xff\x16\x06\xe3\xf2.\x0f\xd8\xf8\xbf\xf7\x98\x08T\xff\xa8\xfc\xa8\xfcQ\n\xe1\xf8\xe7\xf6U\x13\x92\xf73\xfb`\x06G\xfb\x1b\x00\x80\xfcS\tB\xffO\xfb\x1f\x01G\x04\x85\xfa*\xfa3\x15\xb4\xeew\xf8Y\x1ep\xeao\xfb\xd2\x14\x03\xf72\xf5\x02\x11\xb0\xf9_\xfb\x87\x08\xb2\xf8\x91\x0e\xe5\xf0\x0c\x05H\x026\x006\xfc/\xfe\x91\n\x8c\xf66\x08l\xf5C\x06\xb0\x02`\xf9E\xfb\xc6\x0c$\x002\xef\x84\n\x9f\x06"\xf5\xa8\xfel\x05\xb2\xffK\x01\x1c\xf5\xa9\x06\xb1\xff\x8b\x04N\xfa\x8b\xfd!\x04\xf3\xfe\xe1\xfe\x11\xfb\xac\x0c-\xf0\xa6\x04\xd3\x08X\xfbm\xfcr\x03\x8c\x03S\xefH\x0c\x98\t\xff\xeb\x00\x07\x8d\x0e"\xee\xcd\x05"\x04\xad\xfb\x16\xfdW\x00\xbc\x05}\xfc\xfe\xff\xe0\x036\x03\xd5\xf0\xf8\x08\\\x06 \xf5\xd0\x01U\x03\xda\x01\xc4\xfe\x19\xfc\xb1\x01\x1e\x0b\xfa\xf3?\xfe\xd8\t\x98\xfcv\xfb\xa7\x04\xa3\x02\xff\xf8\xf6\x0b\xee\xf21\x07\x92\x01\xb9\xf8i\x07\xce\xfc\x01\x04\xa4\xf8\xf2\x04\xa1\xff\xa9\xfd\xe5\x01\xd5\x01\xad\xf7\xd0\x06\xdf\x01\x14\xfd\xc9\xfbb\x00\xc8\x07c\xf8\xcd\x01\x8a\xfbd\x10\xb7\xef}\xfc"\t\x02\x02\x93\xff\xfc\xf3j\x10\xba\xf9\x17\xf8\xa9\x0b\x1d\xfa\x8e\x00\xe5\x08:\xf8\xf5\xfa\xdc\x04\x98\x00\x9a\xf9\xfe\x0b7\xfd\xc0\xf71\x04e\x05w\xf5\xf1\xfa\x05\x0fP\xf5\xd3\xfeT\r\x1e\xfe\xd5\xf7\xf9\x05?\xf7\xf9\x027\x07\xb1\xf7\x18\x0f\x8a\xf8\xce\xfe2\x04~\xf6H\x07\xdc\x01C\xf7\x8b\x05\x88\tv\xf3=\x02{\x07r\xf5y\xfe\x92\x0b@\xf5\xc3\x02\xa8\x03\xf7\xfc\x89\xff\xbc\x01p\x04\xf2\xeeC\x0c\x08\x03&\xee\x0c\x0c\x13\x0b\xf4\xee\xa7\x03S\x08-\xef\xbf\ny\x05\x82\xf3\xc3\x08\xd8\xf8"\x05S\x00{\xfcM\x01x\x02\x0e\xfd\xbd\x00\x9f\x02\xd4\xfb\xe5\x06\x03\xf7\xee\x08Q\xfc0\xff\xa3\xff\x99\xfb\x8b\x0c\xbd\xf9\x82\xf8\x92\x040\x0c|\xee\x85\xffG\x11\xd5\xf4\x8b\xfe\xd2\x04k\x00Y\xf8\xbd\x08\x94\xf9\x9b\x02\xef\xfe\xe9\xff\x87\tF\xed\x8e\x0cY\xfe\xe2\xfa\xc1\x00\xdb\t\x04\xf9\x19\xf92\x0e\xe5\xf1\x8b\x08\xc0\xfeL\xfbZ\xfe<\t\xfc\xfb\xf3\xf8.\x13\xe8\xeb\xf0\x06\x1b\xfc\xfc\x03\x15\x06\x99\xee\x99\x10\xba\xfc\xbc\xfa\x13\xff\xe6\x06\x08\xfa\xb9\x01Z\x01\xeb\xf6\x06\x0e\x85\xf9\xaa\xf8\xc0\tv\xfd\xc4\xf9;\x0e_\xee\x9f\x08@\x06&\xf4\xb8\x00\x12\x06\xf6\xff\xb7\x01\xda\xf98\x00(\x0f{\xe8Y\x08\xf6\n\xeb\xfa\xc5\xf8{\x07\x80\xffs\x00\xec\xfb5\xfeC\r\x10\xf3\x00\x027\x08,\xf5\xd4\x03\xbb\x04\x9a\xf6q\t\x97\xfbH\x02 \xf9\xe0\x01N\x08\xbd\xf8\x85\xfb\xc6\x0c\x1f\xf6\xd2\xff&\x0cB\xec\x19\n\x7f\x03\xe4\x01\xe3\xf1\xeb\x0c\x81\x02\x82\xf0\xed\x05d\x08\xee\xfa!\xf7\x9b\n\xd0\x018\xf7\xbb\xfe\xa1\n\xa2\xff\xcb\xf5\xe0\x01\x89\x03\xd7\xfbT\x06w\x01&\xf8\x19\x05\xa7\x03k\xf6\xb8\x02\x8c\x02T\xffK\xfaW\x08Q\x03t\xf6u\x07\x8e\xfd?\xf5/\n\xde\xfd\x85\x03\x05\xfd\xc2\x00\x03\x06\x05\xf7\xd9\x01(\xfe\x1d\x08&\xfb\x92\xfd\x07\x0c+\xfb\x01\xf3\xe2\x12&\xf4\t\xfa\xa5\x0e|\x00i\xf2\xfe\x0c;\xfd2\xfar\x07\x08\xf2L\r\xc2\x00@\xf6\xaa\x08\xc3\x089\xe9\xe4\x11\xe9\xf7P\xfe\x97\x039\xfeW\x02\x19\x01\x93\x00\x96\xfc\x88\x02\x97\xf5-\x13\x16\xeb\x8e\x07\x05\n\x1b\xf4\xd7\x08-\xfe\x0c\xf9\xdc\x03\xd5\x04"\xe7\xfd\x11U\x0fw\xe8\x95\x02w\x122\xf0n\xf7\xbc\x177\xee\xed\xff\x7f\x0b4\x00<\xf7\x82\x04\x07\x0c\n\xef#\xfe\x96\x13+\xf4\x05\xf6\xc6\x0e2\xfb\x90\xfb\xa6\x02\x15\x07\xe8\xf2(\x01\x1d\x01\xef\x07\xab\xf4b\x07\xe7\x05\xf2\xf1\xbe\x01\xde\x00}\x05n\xf9\xf1\x07\xd8\xf3\xd8\x04\x84\x04>\xfet\xfd;\x031\x03\xee\xf0o\x0f\x9d\x01\x1f\xf3a\x08\xdd\xff=\x01\x8d\x00\xb4\xff\x94\xfd\xb5\xfb8\x05\xe0\xffb\xfd\x8a\tc\xfb\x05\xf7\xa5\x06\xc2\xff\xe8\x02M\xf1\'\n\x1b\x02\xa7\xff\xff\x01[\xf6\x01\ti\xf7A\xfe\x89\t\xbb\x01\xeb\xf4\x8e\x0b\x80\xffy\xf1\x1d\x0b\xdf\x01K\xf6I\x03U\t\t\xf6x\xfc\xd9\rT\xf8\x8f\xf7|\x08\xac\xfex\xf8&\t\x80\x06\x9b\xf0\xd6\xff\xe0\t\x89\x01\xe6\xf5"\x05x\x05\t\xf6^\x02\xee\x04\x87\xfe\x11\x00s\x04"\xf9\xa9\xf7G\x15\xb8\xf3\xa9\xfdw\r\xcb\xf1.\x07\xc4\xfc\xbe\x00\xdd\xff{\x04\x90\xfd0\x00^\x01\xf2\xfb\x98\x05\xc3\xf6J\n\xb6\xf5\xd0\x07H\x00\x11\xfa#\x0bU\xf23\x04&\x02\xf9\xfd\x93\xfa0\x0b \xfc=\xf7\xf2\r\x02\x00\xe0\xef\xcc\x01J\r\x9b\xf7\xf5\xf9/\x0c!\x059\xf0A\t\x82\xf6Y\x01\x16\x04\x16\xff\xec\x04\xf1\xf7j\n\xe8\xf6\xc9\xff\x8c\x05\xc5\xfb\x8e\xf5\xf0\x07u\x14\xee\xe8e\x01\xa8\x13\x11\xe6\xc4\x01\x10\x10\x01\xfbM\xf6\xec\x07\xef\x08+\xf3#\x02\x1a\x07%\xf5\xd3\xf9\x87\x12\x08\xf9\xd1\xfcy\x07\x9f\xff\xb9\xf7\x10\x00\x14\t\xb0\xf46\x08\xa4\x02\x16\xf9\xf4\x006\t\xf1\xfc.\xec8\x16\xc4\x07-\xe9\xfb\x05\xad\x0ft\xf6\xbf\xf2\x87\x113\xfe\xbf\xf6t\t\xb7\xfc?\xfa\x8a\x0cA\xf56\xff\xf9\x02\'\x05\xe1\xf9\x0f\xf8C\x0f\xba\xf1\xd8\x06\x15\xfe\xce\x00\x88\x02$\xfe\xdb\xf6\xfb\x06\xb2\x00\xe4\xfd\xa6\xfe%\x02)\tC\xefG\ry\xf4R\x05D\x02\xd4\x00%\xfc\xbc\x07\xee\xfb\xec\xfd\xd5\n\xd9\xf6A\x07t\xfe\x8d\xf5\xc8\x03\x83\x0f}\xef\x12\x06\xe1\tS\xf0\xcd\xfap\x0b\xee\xfd\xf5\xf5O\xfd\x90\nk\x05\xa1\xf3\x07\x05H\x00\xa7\xfd2\xfb\xb1\xfe\xce\r\xba\xff\x16\xfc\r\x00Q\x00\x87\x04\x80\xf8\xd2\xf7X\x0e.\x02z\xf3T\n\xcf\x02R\xf5\x80\x00!\xff\xc8\xfa\n\x0b\xe2\xfd\x05\xfeN\xffL\xff\xd5\x04_\xf2\xff\x02\x83\x0b\x93\xefK\x04\xba\n\x84\xfc\x93\x02\xd7\xee\xdd\x0c\xa8\x05\xe3\xf0\x0e\x0b7\x0b3\xf4\xcb\xfa\x9d\t\xae\xfe\x88\xfa\xeb\x05u\x06\xaa\xf1d\x06\x18\x0f\x12\xef]\xf9\xf9\x17\x01\xf5\xc2\xf5\x8f\n(\x05\xc7\xfa\xbf\xf5}\x0c\x83\x00\xa2\xf6\xdb\x05\x9c\x01@\xfe\xad\xfb\xeb\x00\x8f\x05u\xfb"\xfc\xc4\t\\\xfe\xfd\xf1\x80\x05\xc4\x08\xf5\xfb$\xf8\xc3\x06\xe2\xfe\x98\xf5\xf3\x08\x19\x06\xb2\xf6\\\x00P\x04\x8e\xf7f\x02m\x05\xa9\xfb_\xfeL\x00\xe7\x02\xdb\xfe\x87\x01e\x00\xd8\xf9;\xff\x10\x02d\xffB\x08J\xfaV\xf8\xd5\x0b\x99\xfbO\xfb\xec\x07%\xfd\xb5\xf4*\x04g\rm\xf8\xc0\x00r\x01\xe5\xf6\x9f\x009\x02J\x01\'\x00\xe5\x018\xf9:\x02\x10\x08*\xf7h\x01\xbe\xffQ\xfa\x1a\x05j\x03\xdc\x02\xd7\x01\x80\xfbW\xfa9\x05\xb2\x00\xd9\xff\x8c\x05\x91\xfe\n\xfcO\x05\x13\x01\x0b\xfa\x1d\x04\xd1\x02p\xf7\xd1\x03\x8e\n\xc6\xfa|\xf9+\xff\x07\x05\xc4\xfa\x9f\xfb\x03\x08\xad\x03\xaa\xf6+\xfe4\x02q\xfdk\x02y\xfd\x85\xfe\xc5\xfe|\x02Q\xff\xae\x02\xac\xfc3\xfd2\x01\xbb\xfa\x10\x07\xbe\x03E\xfa9\xfc\xb2\x06\xef\xfeg\xfa\x9b\x01\x03\x04F\xfe\xd1\xfb\x1e\x05\n\x01\x83\xfc\xa2\x00\xea\xfd\x8a\xfe;\x02d\x00L\xff\xfa\x01\xa0\x00\x99\xfc\x1f\xff\x7f\xff\x92\x01\xc6\xff\xa6\xfd=\x03\x9b\x02G\xfe\x14\xfe\x1b\x047\xfdq\xfd\xf7\x02}\x02U\x02\xce\xff\xa7\x00\x0b\xfc\xbb\x00L\x00\x91\x01\r\x01\xf7\xfe\xae\xff\xbb\xfb\xf6\x02\x1b\x03\x1c\xfb\x07\xfd\xda\x01\x05\xfea\x00\x0f\x00\x8f\xff0\xff\xa9\xfbK\x02R\x02\xfe\xff\xd3\xfdi\xffC\x014\x00T\x04\xb7\x00\xb6\xff\xbe\xfd~\x000\x04\x15\x02\x0f\x03\x00\x00\x98\xfe\xba\xfea\x02a\x02\xe9\xff\x0f\x017\x01\xd6\xff\x98\x00\xc5\x02\x85\xfe\xed\xfc+\x02\xf4\x01d\x00A\x02\x1f\x02\xc0\xffI\xfe\x96\x01\x9c\x00z\x01\x98\x02\xa7\x01\xfb\xff\x92\x02\xea\x01i\xff\xfa\xfe#\x00\xe5\x01\x99\xff\xc4\x01\x8a\x01\x89\xff\xf1\xfc\xb4\xfe\xc0\xfe\xf9\xfd1\x00\x15\xff\xff\xfd#\xff\xfb\xfd\x15\xfe\xa9\xfes\xfc\xab\xfe\x9f\xfe~\xfe;\x00]\xff\x7f\xfc\xbb\xfa_\xfe0\x00\xd8\xfd!\xfb\xf1\xfd\xfc\xfb\xf6\xf7x\xfcd\xfa\xc2\xf8\xcd\xf7-\xf9\xec\xf8\xca\xf7b\xf7\x9b\xf3\xb9\xf3N\xf7\xb5\xf9\xd9\xf5L\xfa_\xf99\xf5\xe8\xf8N\xfc\xbe\xff2\xff\t\x03\xdc\x05\xc9\x05\x85\x08\x9b\x0c6\x0f\x85\x11=\x13\x87\x16\x05\x19{\x1c,\x1d\xcd\x1b\xb4\x1b\x83\x1a\xa7\x1c5\x1c\xd1\x1a\xd1\x1aT\x16\xe4\x10\xc1\x0f\x93\r-\t+\x05\x14\x02^\xfe\xd6\xfb\xf2\xf8\xd9\xf4/\xf1W\xed\xf6\xeb\x8c\xec\xb7\xec\xa8\xeb\x03\xea\xf8\xe8e\xea\x96\xec\xa3\xedA\xf0\x91\xf3\xc7\xf3\xb7\xf6\x10\xfa\xac\xfbN\xfe\xc6\xff\xd0\x00d\x04!\x07\x18\x066\x06\x04\x06\xb4\x04\xb0\x02\x9c\x02z\x02T\x00<\xfe\xef\xfb\xd4\xf8_\xf4\xce\xf1!\xf0\x15\xee\xbd\xec\xac\xea;\xea\x12\xe8]\xe5\x13\xe5]\xe5\xa6\xe4\xe5\xe5p\xe7\xaf\xe7F\xea[\xeb\xf3\xe9\x1a\xec{\xf2\x04\xf5\xf7\xf6\x90\xf9\xa0\xf8\xe8\xfd\xab\x01\xd0\x03\xf5\t\xab\r&\x16\xf0\x1f\xc0%\r&\t%\x04(e,\xb75A;\xbc>\xb3AA\n\xb1\n\xed\x0c\xbb\x0cK\ry\x0f\x02\x0f2\x0c\x06\x06\xbc\x01\x15\x01\x92\x00\xbc\xfd\xf0\xf9J\xf3\x92\xeco\xea\x01\xeaP\xe8\x0c\xe6M\xe3b\xe3>\xe5/\xe5j\xe6\x19\xe7s\xe6\xd7\xe8\x8c\xedS\xf1C\xf5}\xf7\xbc\xf6\xad\xf8\xc8\xfb\x11\xfd\x9b\xff\x0b\x00\xc3\xfd\xd7\xfe\xa3\xff\x97\xff\xe1\x01\x15\x00B\xfco\xfd\xdf\xfc\x90\xfdc\xfe\xbe\xfc\xde\xfc\xcd\xfb\xda\xfd8\x08\xdc\x14q\x14\x86\rM\nr\x10\xb0\x1f\xca(L,\x1a-2,d-\xc1/\xa50\r/\x1a*\xa9\'\xa8+\xbb.C)\x0f\x1c\'\x0f\x0e\x08s\x06\xc3\x06\xbf\x06\x01\xff-\xf4\x9d\xec|\xe6l\xe5\x82\xe4\x1b\xe0\xb8\xdd\\\xdf\x9b\xe1\x91\xe3\xa4\xe3o\xe1a\xe1\xf8\xe3\x1d\xea\xb2\xf3?\xf9P\xfb\x82\xfc@\xfd\xc0\xff \x05\xe4\t,\x0bC\x0c\x1d\r\xea\r\xf9\x0eD\x0c\x96\x07\xd6\x02r\x01\x9d\x02\xa8\x012\xfdU\xf7l\xf0\xde\xed\x95\xec\x13\xeax\xe9\xed\xe7{\xe5\xde\xe4\xe3\xe6\xb2\xe6\x11\xe6\x1f\xe7\x16\xe9\x1c\xed6\xf2\xf3\xf4\x90\xf4x\xf4\xf0\xf6\xa2\xf9\xac\xfb0\xff\x7f\x00\x90\x00\xdb\x02\xc9\x01\xbd\xffR\x01\xca\x00\x9b\x02v\x02\x8c\x02=\x03\x8e\x01\xe4\x00\x98\x00Y\x02\xff\x03\xbc\x02\xe4\xff\x1c\xffB\x04,\x0c\xa3\n\x85\x0b\r\x11\x81\x14\x18\x18k\x1au\x1e\x1f \x92\x1f\xb1#\xe8+\x1a1\xe8+-%\x80#\x1e$\x17%\xae#\xff!\t\x1d\xad\x13\x83\x0f\xa2\r\xb5\n\xc6\x05L\xfd\xfd\xf9\xff\xf7)\xf4^\xf2\xef\xedv\xe7u\xe3\xdb\xe1P\xe5\x80\xe7\xd9\xe5\xdb\xe3\x04\xe2M\xe2t\xe6\xe9\xea\x08\xed"\xef\xbe\xef(\xf3\xd5\xf7?\xfb*\xfd\x95\xfc\x82\xfc\xf9\x00\xae\x07\xe0\x07M\x07\x0f\x05\x8f\x01\x16\x02\xcf\x03\xa8\x04g\x02\x89\xfd\xf3\xf9\x07\xf9\x03\xfa\x0e\xf8\x14\xf4z\xf0[\xed\xaa\xebi\xf1\t\xf2x\xee\xcb\xec\xaa\xe89\xec \xf2X\xf26\xf1o\xf4\xa0\xf3,\xf3\x81\xf8A\xfbo\xfa\xec\xf8\xb5\xf9@\xfc\xf7\x00\\\x01\xbd\xff\xbf\xff\xac\xff\xfa\x028\t\xc9\x07Q\x05\x00\x02\xd0\x02u\x07\x97\n6\x11K\x0c\xa6\x05\xf5\x07\xea\x04k\x08\xa9\x0e\x84\x08\xec\x07\x15\x0e.\x15\xf9\x15\xab\x11=\x0b@\x0ba\x10J\x17\xda\x1cD\x1e>\x1b#\x16\xd1\x14v\x14\x92\x17\x8b\x15g\x13\xeb\x14\xcd\x13\x1d\x13<\x0f\x91\x08C\x03<\x00\x0b\x01\xfa\x048\x02:\xfc^\xf8"\xf36\xf0\n\xf0a\xefV\xef\xd4\xee\xb5\xed\'\xec\xf3\xea:\xeb\x91\xe9\xb8\xe82\xee\x88\xf2L\xf2\xc5\xf4\xbb\xf5\x8e\xf3d\xf7\xe9\xf9\xfe\xfc}\x02\xb6\x02-\x03\xb0\x02\xd8\x02\x83\x02V\x02\xeb\x02Y\x02\x98\x01s\x01\xd9\xfey\xfb\xfb\xf6\xcd\xf5N\xf5Y\xf3f\xf3\xfd\xf6\xb1\xf1\xb4\xed\x93\xef\xd4\xeb\x14\xedM\xf1@\xf2\xa4\xf4j\xf6\x08\xf3\xea\xf3\xf6\xf7\x0c\xf6j\xf8\'\xffO\x01/\x03\xcc\x06\xb9\x08\xaa\xff\xe3\xfe[\r\xe5\ra\x0c\x81\x0fd\x0c@\x0c\xe9\n\xa6\x0bi\x0e\xed\x08\\\x06E\x0eY\x0f\xea\n\xe9\x04)\x02.\x04\xdd\xfdw\x06\xe8\x10\x1d\x05\x16\xfa\xba\x00\xb5\x02\x8f\xfan\xff\x96\x07b\xfdp\xf8\x16\x08(\x04\x14\xf9\x14\xfe\x1c\x01\xc6\x00\x7f\x00\xfa\x06\xcb\x08\x88\tu\x08p\x03\x82\n3\x0b\xd5\x08Q\x11\x13\x14\xfd\x0e~\r\xef\x0f\xad\x0e\xb2\n\x82\x0c@\x0cy\x0c\x9c\x0c\x95\x08\xfa\x08\xbd\x04 \xff\xdd\xfd\xdd\xff\x93\xff\x8e\xfb\xaa\xfb\x96\xf9\x92\xf5\xb7\xf4,\xf4`\xf1\xb3\xf1\x96\xf2]\xf09\xf5\x90\xf5!\xf1\x19\xf1_\xf5\x1d\xf4I\xf2\xba\xf7\xce\xf9i\xf8\x1c\xf8\xcd\xfdA\xfa>\xf9O\xfd\xad\xf9\x99\xfb\x12\xffc\x02.\xfag\xfd\x8c\x00M\x00\x92\xf72\xfa!\xff\xb4\xfb\xb6\xfb\xe9\xffe\xf8M\xfc\xad\xfc\xf6\xfa\xc9\xfbk\xfa^\xfa\xb2\xf9=\x02\xbe\xfb\xb8\x00T\xf8\xa5\x00\x8f\xf8v\xfe\'\x01"\xf7\x07\x03\xdb\xfe&\x05\x9a\xf9x\x022\x00\'\xff\r\xfb\xe7\x06(\tT\xfb4\x0f\xaa\x04\x14\xfbX\xfc\x98\x11l\x00$\xfci\x16c\x082\xfaL\x06\xb2\x11\xbe\xf6\xcd\xfd\x1a\x1b\xf2\x03\xc0\xf4\xf7\x10\xb1\n/\xfaH\xfe>\x10\xe9\xfc\xa8\xf9b\x06\xf1\x04h\x06\xc3\xf2m\x07\xbe\x07\xbd\xf4R\x02\x7f\x06\x06\xfc\x0f\xff\xe0\x00\x83\xfe\xe3\x02\xf9\x02X\xffm\xfc\xaa\xfat\x055\x08\x0b\xfa\x89\xfb\xeb\x06\xef\x00\xb9\xfea\x02\x1a\x07\xbc\xffL\xfc\xa6\x02v\x07r\x07\xdd\xfcx\x06\x82\x02\xd1\x02M\x03\xfa\x02\x05\x07\x0b\xfeY\x03\xca\x08\xaa\xfbi\x05\x19\x07i\xfd\xae\xfc(\x04\xe5\x05\x83\xfd\xb9\x01#\xff\x95\xfe\xf5\xf9\xae\x02\t\x00{\xf3\xe6\xfc6\xfe\x14\xf9\x9e\xf2\x07\xfay\xfdf\xebU\xf5\xf8\xfe\x93\xf5S\xf6\xa4\xef\x9a\xfc\x94\xf5\x1a\xf1\x90\x04\xa9\xf7O\xf4\xc4\xf9\xee\xfc\xff\xf9S\xfe\xed\xfc\xed\xf5\x16\x05\xea\x05\xe3\xef2\x05\x94\x05\xdc\xfbv\xfb\xbf\xfc~\x11\x9a\x02Z\xed;\n\x19\x06\\\xfe\xf5\xfbn\x06\xfc\x02O\xf7\xff\x0f\xe5\xf2B\x01\x1b\x12\xfd\xefQ\xfa\xa8\x18\x05\x00a\xf2\xc0\n\x06\x0c)\xf8\x81\xff\x8a\x12\xc5\x07\xfb\xfa[\xff\x87\x12\xe2\x03\x9d\xf4B\x0b\x81\x0f\x0c\xfb\xa4\xfe\x0c\x0e,\x04\xfd\xfeC\xfc \n%\x01\x0e\x00h\x06E\x01\xa0\x04\x9d\x02\x11\xf6Y\t2\t\xca\xedS\x10\xc9\x0b\x11\xe7H\x07\xe9\x19\xb1\xf2\xfe\xf5\xec\x05\xff\x0b2\xf1\x01\x02T\x1a\xed\xeed\xf3\xf6\x0e\xbc\x02e\xec\xea\x04\x7f\n\xe0\xfa\xd7\xf2\x11\x0b\x83\x03\xc2\xe7\x87\xfa\xb9\r\x1a\xf8\x05\xf0\x03\x08S\x04x\xef\x1a\xf8\xf3\xfdl\xfd\x1b\xff\xf4\xf1X\x0c\xd2\xf4@\x00\xc5\xfe\x16\xfd*\xf7\xb5\x01\xd4\n\x05\xef\x8c\t\x96\x05\x9c\x07\xc9\xf0\x9f\x07\x85\x0c\x80\xec\xa8\x07\x01\x10v\xfc`\x07\x1c\x05\xc6\xf8/\x01\xb7\x03\xb1\x02\xaf\x00\xa6\xff\x15\x07\xcd\xfb\xd6\xfe`\x02\x15\x01\xaf\xf3\xee\xfc|\x0f\xc4\xf4\xee\x05{\x00\xda\xfc\xd5\xf86\x06\x9a\xfd\x02\x06n\xfe\xf2\xfb?\n\xff\xfcp\x03\xc5\xf5.\t0\xff\xaf\xf3\x18\x0c\xb4\ta\xf0q\x03\x86\xff\xf2\xff\x1e\x02U\xf5\x1c\x0b\x0c\xff\x82\xfb\xfc\xfa|\t\xcd\x04\xd4\xe8\xb7\x0e}\xff\xe8\xf6F\x0f?\xfb\x80\xfb\t\x08\x81\xfa\xaf\xfa\x93\x04\xe8\rG\xf1\xb4\x01B\t\xba\xf4\x83\x0c\xcf\xf4\xa2\x06\xfe\xf5\xbf\x08\x80\xf5\x9b\x0b\xe6\xf6h\xfbw\x05\xc6\xf8\xb4\x07\'\xf2U\n\x8f\xef*\x0f?\xf9>\xfeQ\xf9\x93\r\x9c\x02\x17\xec\xa9\x0c\t\x0c\xa2\xf0\xb9\xf9e\x1e\xa7\xee\xbf\x00\x9c\n\xe0\xff\xf6\xf4\xe3\x08\x83\x03\xe1\xf8\xcb\t\xd3\xfb)\xfd\xca\x02\x0f\xf96\x07\xa8\xed\xd2\x086\t\xe8\xe5\xbe\x0e\xda\x01Y\xf3\xcb\xee7\x12\xf3\xfe\x0c\xe4\x08\x18\xf3\x01\x19\xef6\x02n\x03K\xfe\xcf\xf1\xb4\x0c\xa5\x059\xf7\x8f\x08K\x00\x91\xf8\xb1\x06s\x04Q\xfc\x87\x02I\x07\xa9\xff\xd8\xfe+\x10\xe0\xf6i\xfeI\x06K\x03C\x04\x1e\xfa\t\x10 \xf5\xfa\xfe>\x18<\xf3\x97\xf8\xf6\x0f\x97\xf6\xb4\x03\x97\x06\x0f\x05S\xff"\xff\xe9\xfdZ\xf9\x7f\x0f\xa2\xedy\rJ\x07\xeb\xf0V\x03\xb8\x01\xc9\xf5\xcc\x00*\xfe7\xf7F\t~\xf9\x9a\xfb\xa7\x00\x05\xfb\x0e\xf2\xc9\x10"\xf1\x1e\xf9S\rZ\xf2\xaf\xf8\xb0\t\xec\xf8@\xfa/\x05C\xefL\n*\x03\x9d\xf8Q\x04-\xf9\x9e\xff?\x0c(\xee\xc0\x07\xa8\x14\xe2\xe8\xab\xfa\xd1\x1a\xa5\xfay\xef\xda\x0e\x83\x0c\\\xf3+\xf70\x16R\x00\xab\xf7\x81\x01\xb7\x0b\x13\xfaw\xf7\\\x1bs\xef}\xfbB\x17\xd5\xed\xac\xfb\xb1\x13\xe9\xf8\x87\xf6q\r\xc5\xfa\x07\x05\x14\xfd\x85\xfb\x92\x07\xdf\xf7\x03\x03\x89\x07\xfe\xfa\x8b\xfa\xdc\x06\xc7\x02j\xf6\xde\x00\xb8\x0e\x18\xedg\x07\x11\x01`\x02/\x04\x19\xf0\x11\nw\xff\x1a\x01X\xf5\xcc\x0e\xf4\xf9\xef\xf8,\x08a\xfd\x0f\x04\xbd\xf3\x9d\x00\xc1\x07\xda\xfa\xc3\x00\x8d\x021\xfb\x8a\ta\xea<\x0c\xdf\x0b\x06\xeb\xc8\x08\xec\x00]\xfe\r\x06\xa4\xfb\x08\xf4\x9f\x10L\xf6v\xfb:\t\xa1\x04\x13\xf7\xc5\xfa\n\x0e\xf5\xfc\xb3\xf3\xe8\x0bp\x04\x91\xf1\x99\x11\xd6\xfb\xe3\xfb\xb9\xfd\x18\tm\xf5B\t\x1b\xff\x95\xf6\xf6\x14\xa1\xf1\xf8\xf4\xf6\x10\xa5\xfc]\xf0Y\x0f\xd3\x01\x8c\xf7\xac\xfd\xab\t\xa3\xf4C\xfeY\x08\x12\xf9U\x01\x1a\x06"\xfa\xf0\xff\xcf\xff\xe6\xfcD\x01:\x01\xb7\x00\xf0\xf7C\x0b\x0f\xf6p\xffM\n\x83\xf7\xeb\xf8\xff\x064\x04h\xfaD\xfd\xd4\x08\xd7\xfeR\xf8\xb1\n\x94\xfc\x99\xfc*\x06\xd1\x04\xc8\xf63\x08\x1b\x06_\xf4+\x05x\x08\x9f\xfa\xa1\xfc\xec\t)\xfc{\xfcR\x05\x14\x00~\xfeT\x03v\xf7^\x01\xe0\x07\xec\xf1P\x07r\x06\xac\xe8\xe7\x12y\x01\xf1\xea9\x0c\xa1\x07X\xec\xcc\x06d\r6\xe9\xc4\x0b\x10\xfa\x0f\x05\x11\xfd\n\xf4g\x13\xa9\xf1{\xfe\xf1\x08\xac\xf6\xeb\xfc~\x0b\xac\xf11\xfb\xf8\x13\x89\xf0E\xf6\xdf\x12\xe3\xf7l\xf6\x92\x0b\xe8\xfc\x93\xf7c\x02\x7f\x05\x12\xfb\xc8\xfe:\x07\xfd\xfeE\xf7/\t\xb0\xff!\xfd\xdc\x02\x15\x07\x17\xfe\xe4\x00\xb5\x06\xca\xf9\xf6\x04t\x06k\xfbD\x07\xca\xfd\xd2\x01q\xfe\xb2\x05\xb2\x06\x93\xeb5\x13\xec\xf9\x1c\xfa\'\x08\x85\xfc\xf5\xf88\x08\x9e\x03\xd7\xf03\x07G\x0f~\xe7a\xfd#\x1f\xd0\xe6e\xfd\xa4\x17\xb5\xf4\x15\xef\\\x15\x8b\xf9\xb6\xf3\x91\x11\x82\xf8\'\xfe\xeb\x02E\x01e\x00\x99\xf8c\x06\xe1\xfb\xa1\xfeZ\x08/\xff\xef\xf2\xcc\x0b\xe1\x04\xe7\xe7\x99\x11\xed\xff_\xf7\xdf\xfa\xb3\x0c6\x01\x7f\xecg\x107\xf8\x05\xf9s\x04\x03\x01\x13\xff\x9d\xfd\xbc\xfd\x97\xf8\xd2\r\x87\xf0\xf3\x01:\t\xa4\xeb\x9f\x0c"\x07G\xef\xc1\xff\xce\x0f\x9b\xf0>\x00\x94\t\x7f\xfb\x90\xf8\xad\x0c\xe7\xfd\xee\xf0\xc8\x19\xfb\xf1\xd9\xfe\xe7\t^\xfbU\x01N\xff\x97\x07\x18\x02\xc0\xfe\xf7\xf7\x80\x11@\xf6\xe2\xfcY\x14\x93\xf1:\x05\xdf\x001\xfeU\x04\\\xfd{\x06"\xf9=\x06k\xfcL\xff;\x03<\xfc^\tV\xeb\xf2\r=\x04\x9f\xf6\xc7\x002\x06\xce\xf9\x1b\xf0t\x18\x1b\xf7\x0c\xf0\xd3\x0f\x9c\xfe\xd1\xf3\x8a\x05\xd1\x04a\xf4\xe0\x00I\x06\x84\xf7,\x01\xc7\x06\x88\xff+\xed\xb0\x10\xa7\x02e\xf2\x8e\x04\xb3\x05\xa1\x00\xc1\xf1\xbd\x16J\xf4w\xfd\xaf\x04\xed\x00\xe1\xfb\xbb\x02~\x0c\xa6\xee\xd7\x0eq\xfdI\xf4i\x0c\xf6\x00\xdd\xf82\x03\xf8\x06\xa7\xf8\x13\x00\xb8\x074\xf7G\xff\xbb\x03\x14\x04\x00\xf9.\x02\xb5\xfd\xc7\x00\x05\x03\xf2\xf6\xaa\x08\x94\xfb\xcb\xf9|\x07\x02\x05\xad\xefW\t5\x05E\xef\x19\x0e_\x03\xaa\xf3J\x07\xe9\x02\x07\xf3\xce\x11=\xfb\x84\xf3\xbb\x12\x80\xf7\xd4\x009\x05\xf2\xf8-\x03\xef\x02\x94\xfe\x8f\xf9\xb2\x0eb\xfe\t\xeb^\x11H\x05\\\xee~\x07k\x07\xb0\xf0\n\t(\x05\xe8\xf0\x06\x06\x19\n/\xf1\xc0\xff\x9f\r\xf5\xf3\x1e\x02^\x00\xdc\x01\x8c\xfe\x8d\xfey\x02(\x01\xd4\xfb\xc0\xfc\xfa\x0b\xfa\xfa\xed\xf6\xfa\x0cS\xfb\xcc\xf8_\x0b\x12\xfd2\xfa}\x03l\x04Z\xf9\x10\x082\xf7\xab\x02^\x05\xdc\xf9(\x004\x02\x8d\x08\x19\xee\x1d\x0c\x0f\x00\x7f\xf6{\x0c`\xfa\xd1\xfa\x85\x07X\xfd\xea\xfd\x90\x005\xff8\x00z\x013\xfa\x91\xfc\x8a\x06\\\xfd\x00\xfa[\x03\x1a\x01e\xf6\xa2\x06s\xfbQ\xfc~\x04\x1d\xfc3\xfe\xe0\x05\x8d\xff\x03\xf9\x8c\x08\x0b\xfcn\xfe`\x04+\x02\xf7\x01/\xff\x0f\x00N\x05h\xfe\x94\xfd \x07\xd8\xfe\x15\xffR\x04\xaa\x00\x17\xff\xf3\xfdi\x045\xff9\xfdL\x043\xfd\xf1\xfe\xec\x02\xed\xfd\xf1\xfd\x04\x03\xf5\xf9\x88\x01\xa6\x00\xd6\xfb\xd4\xff\xe9\x00\xaa\x01u\xf8\x83\x01\xd7\x03\x8e\xf74\x05\x0f\x00\x01\xf9;\x06\xaf\xfde\xfc-\x04z\x025\xf9\x86\x04\xff\x02\x8e\xfa\xb8\x02\xe3\x03^\xfe\xc6\xff\xb3\x04\xa1\xfa\xd5\x03U\x02\x1c\xfdy\x01 \x00\xdf\x00\x16\xfes\x04:\xfc3\xfe\xc3\x03n\xfc\x7f\xfe\x95\x010\xfe`\xfe)\xff\xf3\xfe\xc2\xffS\xfd\xc9\xfe=\x01I\xfd\xb8\xfc1\x04\xe9\xfe\xc4\xfd<\xff\xcf\xff3\x02\xcc\xfe\xeb\xffz\x02\xb9\xff\xe4\x00\xb3\x01r\x01\xe1\x01D\x01\xf2\xff\x1a\x02d\x03`\x00\xd8\x00\x18\x02{\x01k\xff\x7f\x01\xab\x00\xb1\xffH\x00\xf9\xffi\xfe\x98\xff\'\x00q\xfd\x14\xff`\xff\x11\xfe\xb0\xfe\x93\xff\xba\xfdc\xffN\xff|\xfe\'\xff\xdc\x00\x1b\xff\x02\x00L\x01H\xff`\x00\xd8\x00\xf0\x00\xe9\x007\x01\xdd\x00\xe3\x00\x92\x00\xda\x00\xe2\x00\x84\x00\xe9\xff\xa5\x00\x0f\x01\x01\xff2\x003\x00f\xff\x9f\xff\xfa\xffY\xffz\xff\x8a\xffw\xff\x95\xffc\xff\x8c\xff\xa8\xffw\xff\xa1\xff\xf2\xff\xb7\xff\xc8\xff\xb8\xffG\x00\xd1\xff\xd0\xffd\x00\xfa\xff\xa9\xff\x9c\x00M\x00\xca\xff+\x00\xd3\xffa\x00\xda\xff\xfe\xffb\x00\xaf\xff\xb4\xffK\x00\x98\xff\x94\xff[\x00\xb4\xff[\xff\x06\x00\x02\x00r\xff\xdf\xff-\x00\xae\xff\xfa\xff`\x00\x01\x009\x00`\x00\x16\x00\x8b\x00\x96\x00=\x00\xf0\x00\x99\x00i\x00\xf9\x00\x91\x00B\x00r\x00\x8a\x003\x00\x07\x00\x82\x00\x15\x006\xff\xe7\xff?\x00\x04\xff\x8e\xffB\x00\xb0\xfe\r\xff\x0c\x00T\xff\x11\xff\xb1\xffa\xffD\xff\xf2\xff\xbd\xffx\xff\xf9\xff\x02\x00\xf8\xff\xe9\xffj\x00P\x00\xfc\xff]\x00\x87\x00J\x00H\x00\x99\x00K\x00\xec\xffk\x008\x00\xda\xffX\x00\xfe\xff\xb7\xff\xc6\xff\xc8\xffD\x00\xc0\xff9\xff\x11\x00\xc9\xff\xe7\xff\xab\xff\x9b\xffw\x00\xa4\xffY\xff\xd4\x00)\x00(\xff\xef\xff/\x01/\xff.\xff\x05\x01}\xff\x9b\xff\xbe\xff\x13\x00K\x00\xa3\xff\xb9\xff\xaf\xffb\xff\xaa\x00\xb9\xff\xeb\xfeI\x00\x97\xff$\x00\x07\xff\xc7\xff\r\x01\x9c\xfe\x13\xffE\x01Q\x01\x1c\xfe\xaf\xff\x8a\x01\x87\xffl\x00\x8f\xff\x0e\x01r\x01L\xfd\xf7\x00\xc3\x03d\xfe\xd9\xfd4\x02\x00\x01\x00\x00\xa0\xffh\x01.\x01v\xfe\x8c\xff\x18\x02a\x01\xac\xfcM\x01\xd4\x00R\x00\xb7\x00\x15\x01w\xfd\x80\xfd`\x02\xa3\x03\x07\xfe\xeb\xfa\x80\x03\x94\x01p\xfd\\\xff"\x02\x05\xfc,\xfe*\x04\x0f\xfe\x93\xfdK\x03\xf8\xfd\x13\xfb\x85\x04\xd2\x02\xc4\xf8{\xfd\xc4\x08{\xfe\xd5\xfa\xde\x00+\x04\xff\xfd\x02\xfe#\x00\x03\x01\xad\x00\xc6\xfd|\x01\xbc\x02\xbd\xfe\x02\x00\xaf\x00\x04\xfe\x94\x00\'\x01\xba\x03\xbb\xff\x9d\xfc\xbd\x02\xd0\xfd\xa4\x01\xc3\x01z\xf9\xdd\x06\xb3\x04\x95\xf4r\x00q\x04|\xffW\xfeE\xfe\xdd\x03~\xfc\x1b\x00\x12\x02\x07\xfb\xbd\xfe\x18\x07\x97\x00W\xf7\x0b\x05>\x04B\xf7\xe6\x03\x81\x03[\xfd$\xfcM\x03\x85\x07\x05\xfa\x1c\xfd\xac\x01c\x06y\xfe\xcb\xf7\xd5\xff\x08\nx\x03\x85\xf2\xec\x02K\x0b#\xf5@\xf8$\n\xbb\x06H\xf5S\xfc\xa1\x07\x05\x02\xe3\xf9\xe9\xfbB\x07(\x00\x12\xf8\x8e\x02~\x0c(\xfa\xa8\xf3\x04\ta\x05\xe7\xf8\x84\xfe\x97\x06\xc8\x04%\xf3\x92\xfd\xdc\x0e\xa0\xfe\x93\xf2\xd3\x00\x90\x05\x88\x01\xaa\xfbH\x00?\x05?\xfa/\xfd:\x05\xd8\x05\xa5\xf8I\xfb\xfa\nK\x07\xb4\xf2\xf5\xfdX\n\xfe\x03\xb7\xf6z\xff\xf1\x03\x98\xff\x85\x032\xff\xce\xfc\xb4\xfb\xea\x00H\x07\xf4\xf6\xb7\xfd\xe9\x07\xec\xff\x08\xf8\xc4\xffN\x04\x81\x00}\xfcn\xfa\xee\x04\x98\xfc\xce\x03l\xfc\xd0\x04\x0b\xff\xea\xf3e\x06\xb3\x07\xbf\xfc\x86\xf92\t\x82\xff\x10\xfe@\t!\xf9&\xfd\xa2\x07\xf8\xfa\x99\t\x03\x02\xc5\xf7\t\xf8\xaa\x0ci\x07\x0e\xec|\x00/\x05\xe6\t\x07\xf8\x0e\xf17\x07\x18\x03\xaf\xfd8\xff\xbd\x03S\xf2\xaf\xfc\x9a\x15\xd7\xfc.\xf3\x8f\xf7\x9e\x07+\x14\x9c\xf8\xbb\xe8Z\x03D\x1b\x11\xfd\xa7\xe9\x9a\xff\x81\x12\xbd\x05.\xf7\xe3\xf5\xd0\xff>\x04\x99\nu\xfdB\xfa\x14\xfe\xa1\xfcn\r\xc1\xf7Q\xfb\xbd\r\x1b\xfc \xf8i\x04\xeb\n\x08\x00\x0c\xf7\xe9\xfa?\x02?\x01\x06\r(\xff/\xf1\xf5\xff\xb5\x01g\x03\x17\xfe#\xfe\x13\x06\xb9\xf8\x8e\xf4z\x0b\x1b\x0c\x9d\xfc\xc2\xf1\xe2\xf9\x86\n\x9d\n\xad\xf7\xed\xf3\x9f\tZ\tN\xf3\xc2\xf9D\x10\xd9\x02\x88\xf2\xdf\xf6\xa4\x0e3\x0f\xc0\xf5\xdf\xf2\xb2\x0b\xa4\x02\x81\xf4\x18\x06\x07\x06l\xff\x88\xf9\xfe\xfc\x1d\nk\xfc\xee\xf4\xf6\x06\x88\x01\xb4\xf9E\x05\xf1\x05\xd7\xf4\xc2\xfe\x96\x07\xf9\xf59\x03\x9e\x08\xf0\xf5:\x01@\x0e\x93\xfa}\xf2\xb6\x02\t\xff\xcd\x02\x02\x05\xc0\xf6\xaa\x07\xb3\x02\xa3\xf3\x1b\x07\xea\xff\xf4\xf1N\xfe\x92\x10\xfb\x06\xcb\xf5\xb5\xf7#\x00\xf1\x07\xcc\xfd\x92\x03Z\xf9\x17\xf6!\x0f\x80\x07\xd5\xfa\xfd\xfc/\x02\x04\xfa\xcb\xf8\xe8\x07\x86\t\x91\xfe\xf3\xfc\r\x05\xf6\xfdm\xf4 \x00\x93\n\xbf\x07j\xf3|\xf8\xeb\x0eQ\t:\xf5\x19\xf5\x9c\x06\x8a\x02\xda\x00\xc8\xf5\xd5\x06\x1a\x0c\xa9\xed.\xfd<\x0c1\xfff\xedq\x01^\x0e\xdb\xfd\xb3\xf6\xc7\x05\x0c\x08\xef\xf7\xea\xf6o\x06\x10\r\xde\xf9\xd1\xf0\xf8\x07c\x14\xb0\xfc\x08\xf8\xe4\xfb\xd9\x00\xca\x06H\x01\x82\xfb[\x02m\x00\x87\x01\x8b\x03\x03\xfe\x9e\xf7\xcc\xfb\x94\n>\x01q\xf8X\xf99\n:\x08Q\xfd$\xf9\xe9\xfb4\x08\x02\x07\x89\xfau\xf6E\nr\x0b\x16\xfb\x82\xfc\x1b\x01\xff\x03w\x01\x1e\xfb\xf9\xff\x9b\x03\xdb\x00\\\x03U\x02\x85\xfb!\xfcR\x03\x89\x01\x10\xfc\xec\xfe\xd3\x01/\x06L\xfbH\xf6Q\x02\x11\x02\xa9\x01n\xf8\x19\xf9l\x02|\x04\xd8\xfc\xe2\xfbW\xfdQ\xfe\x85\x00R\xfe\x17\x00E\x00\x15\xff\xf0\xfd\xde\xfb2\xff\x17\x08K\xff\x16\xfa\xa9\xfd\x8d\x01?\x03\x98\xfb=\x00\x8e\x04D\xfd\xdb\xf5\x94\x03n\x0b\x91\xfd\xc2\xf0\xf5\xf9\x87\t\xd3\x05\xb4\xfb\x8f\xf2\xf3\xf7u\x06C\x08\x00\xf9i\xf0\xe5\xf6Z\x01\xf5\x07?\xfe\x05\xf6I\xf9t\xff\x7f\xfe/\xf7\xf7\x01\x98\t\xea\xfd\x06\xf4\x19\xf7h\x05\x8f\t\x87\xfd_\xf3\x97\xf5k\x03<\x08I\x05\x91\xfc\x95\xf2+\xf8\xe4\x02\xda\x07w\x07\x91\x08}\x07&\x06\x1f\tP\x11\xa8\x12\xa3\x07\xa6\x06\xce\x15\xfd#y \xdd\x10\x11\x0c2\x12\xa9\x12O\x11\x81\x12\xfd\x108\x0b\x99\x05\xbe\t$\rT\x00\x8f\xf0g\xee\xce\xf6,\xfb\x08\xf6\x93\xf0W\xee2\xea\x17\xe7s\xe80\xebU\xe9\xc9\xe6\xc8\xeb\xaa\xf4\xf3\xf8\r\xf5\xe0\xef\xbf\xf1]\xf6\xd0\xfa\xfb\xff{\x03\x1f\x05v\x044\x045\x07\xf9\x07\xe2\x03\x00\x00\xb0\x02B\t\x85\x0b\x83\x06a\x02\xa8\xff\xbf\xfd \xfc\x8e\xfc\x1b\xfdl\xfb\xa4\xf9\x00\xf9\x13\xfb\xb9\xfa$\xf7U\xf3D\xf3;\xf8s\xfc\xed\xfcT\xfd\xb8\xfc\xd3\xfbA\xfeN\x01\r\x02"\x02\x83\x03\xb3\x06\x97\tf\n\xad\x08m\x07\xb2\x07^\x07C\n\xa9\x0c\xf7\x0c\x96\nn\n\x13\n7\n!\tX\x07\x19\t\x8f\t*\n\xbb\n\xb5\x08\xa7\x03\xc1\x00\x8e\x001\x01\x9d\x01\xec\xff\xa9\xfe+\xfc\x8b\xf9\xe4\xf7f\xf6\xbe\xf4k\xf3(\xf4\xcf\xf5\x98\xf5\t\xf5\xa0\xf4{\xf2^\xf1\xdb\xf3\x0e\xf5\x9a\xf5a\xf7u\xf8\xf2\xf8\xef\xf9\xa5\xfa}\xf9+\xf9\xce\xfb\x0f\xfe)\xffv\x00\xc5\xff\x07\xffE\xff0\xff\x1e\x00\xc7\xfe\xf0\xfd\xe9\xfdi\xfd\x9e\xfc&\xfb\xe5\xf7\x07\xf7C\xf7j\xf8\x94\xfa\x88\xf80\xf7\xc8\xf43\xf3;\xf5\x9f\xfe0\x0c}\x13b\x11T\n\x85\x0e\x8c\x1a|\x1f\x12\x1dr\x1e=,\xf88\xf38\n/\xca$1!\xfa\x1e\xa4!\x07$Y#\x81\x1c\xcc\x12\x98\x0c\xf3\x04\x0f\xfa-\xefH\xeap\xeb\x9f\xeeq\xeeo\xe8\x11\xe0c\xd7\xe7\xd3[\xd5T\xda\x93\xdf\xc4\xe2\xbe\xe7\x1f\xec\x81\xee\x8f\xed\xaf\xecQ\xf0\xee\xf4\xc3\xfb\xeb\x03[\n{\x0c\xd2\x08\xcb\x05\x93\x06\xb1\x08\x02\x08I\x05\xc1\x06i\n|\x0b\x91\x06\x10\x000\xfaT\xf6\x84\xf4\xff\xf5_\xf82\xf7\xae\xf3\x9a\xf1-\xf1\xa5\xef3\xedZ\xeb/\xee\xd7\xf2\xdd\xf6)\xfa\xb3\xfb\xf6\xfa\xfc\xf8\xcd\xf9\xf3\xfe\xc8\x02d\x05\r\x08;\x0b\xa7\x0e\xec\x0e@\r;\x0c/\x0c4\x0c\xfd\x0e^\x12\xbb\x13O\x11\xd3\r\x06\x0bF\n\xee\x08|\x06x\x05,\x05\x8c\x06f\x07\xce\x04;\x00\xb3\xfbZ\xf8>\xfa\xf5\xfc,\xfel\xfe\xc0\xfc\xb9\xfa.\xf9n\xf90\xf9\x9d\xf8-\xf9\xbb\xfb\xbf\xffI\x01\xa4\x00p\xfd\xc9\xfa_\xfc\xee\xfdR\x011\x02s\x02\xc6\x01\xfa\x00\xb5\x00P\xfe\x86\xfc\x9a\xfb\x8e\xfb\r\xfd\xd5\xfe6\xfe\xbe\xfb\xf0\xf8\xb3\xf6\x17\xf7\xd5\xf7\x0e\xf8\x91\xf8v\xf9\xb8\xfa\x07\xfbK\xfa\xb2\xf81\xf7:\xf7v\xf9\x0c\xfc\xb2\xfc\xd4\xfd\xb8\xfd\xb7\xfbs\xfc\xde\xfb\xf9\xfb|\xfd_\xfd\x0c\xff5\xff\xcd\xfe}\xff]\xfc\x90\xfc\xec\xfci\xfc\x9c\xfep\xfe\x83\xff\r\xfe\xc7\xfc\xae\xfd\xde\x04m\x0f\xa9\x13a\x0fq\ne\x0eV\x1a\x1f \x9d\x1c\x99\x1d\xa8#\x1e+Q+\x96$q\x1f\xa6\x1a-\x18\x13\x19\xcd\x1b\x9b\x1bh\x13\xef\x07\xd9\x02\x9d\x00\x83\xfb\x1e\xf4\x07\xee\x97\xec0\xec\xb0\xe9d\xe9K\xe7\xd5\xe1\xcc\xdc\x8b\xdc\x9a\xe2\xdc\xe8e\xeb\xe1\xebS\xee\x1a\xf22\xf5\x0c\xf7d\xf8\xac\xfb\x01\xff\xac\x02\xf7\x07\xa1\x0b?\x0bk\x07m\x05\xe3\x06\xd2\x08\xef\x08\x1e\x06\xab\x04\xcc\x03r\x02G\x00\x01\xfd\xde\xf9b\xf6$\xf4\xbb\xf4\xda\xf6g\xf6\xd7\xf2\xbb\xef\x9f\xef\xd2\xf0\xd5\xf0(\xf1{\xf2\xb3\xf4g\xf6\xf8\xf7\x97\xfa\xc2\xfb\xbb\xfbW\xfc\xe5\xfe\xd8\x02\x06\x06/\x07\xb5\x07\x9d\x08p\t\x9b\n\xa1\n\x96\nW\nG\x0b\x9f\x0c\xd1\x0cO\x0cg\nW\x08\xb9\x06o\x06M\x06@\x06&\x05E\x037\x02\xa9\x01X\x00\x9a\xfe\t\xfdv\xfbZ\xfb\xc0\xfb\xcc\xfb\xde\xfb\x15\xfbh\xf9N\xfa\x02\xfb\x06\xfb+\xfb\x9b\xfa\xc7\xfd\x18\x00\x89\x01)\x03q\x03\xee\x03\x1f\x05\xec\x06k\x08\xa1\t?\t!\n\xf1\x0b_\x0b\x17\x0b8\x08\x95\x06V\x06B\x04\x11\x04\xee\x01R\x01\xed\xfef\xfcp\xfb]\xf9}\xf7n\xf5C\xf4U\xf4\x82\xf4]\xf44\xf4\x7f\xf3N\xf25\xf2\x88\xf2W\xf35\xf4\x86\xf4<\xf6(\xf7\x1c\xf8\xe8\xf8 \xf8\xff\xf7\xe3\xf89\xfa\xa5\xfb\xf3\xfcT\xfd\xd5\xfd\xcb\xfd\xe0\xfd\x11\xfe\xf0\xfd\xa1\xfe[\xfe\xd1\xfe>\x00\xbb\x00\xf5\xfe\xb9\xfe\x94\xfe]\xfe\x93\xff\xbb\xff\x19\x01\xd6\xff/\xff\xcd\xff\xd1\x01\xc9\x05V\n)\r\xae\r\xed\x0c*\x0e\x88\x12n\x176\x1b\xaf\x1b\x91\x1d7 \x9f!\x15 \x8e\x1c\xd7\x1b\xce\x1aI\x19y\x18(\x17\xaf\x14\xe7\x0e\xd5\x08\xc7\x05\x96\x03p\xff\x90\xfa\xa8\xf7c\xf6\xcd\xf4\xb2\xf1\xc1\xef\x1e\xef\xc2\xec\n\xea \xeb]\xee\x08\xef\xd7\xed\xa3\xed\x9b\xefq\xf1\x8d\xf1:\xf3c\xf4I\xf4B\xf4\xf5\xf4|\xf7\x90\xf8\x9f\xf7#\xf7\x06\xf8\xd4\xf8F\xf8\x7f\xf8\x9e\xf8\x84\xf8R\xf8?\xf8~\xf9\xe5\xf9\xcb\xf9!\xfa\xcb\xfaW\xfb\xa7\xfb\xb1\xfb5\xfc\xdf\xfc(\xfdi\xfe\xcc\xff\xce\x00\x9e\x00j\x00\xb7\x00T\x01\xbc\x01\xee\x01\xbe\x02\xb5\x03\xc4\x03\x01\x04\x83\x04\x89\x04\xe2\x03\xa2\x03*\x04\x04\x05\x91\x05\x06\x06\xa1\x06>\x07Q\x07@\x07S\x07a\x07\xc4\x07\xb0\x07q\x081\t\xc4\x08P\x08\xb5\x07\'\x07y\x06\x81\x05\xa6\x04\x0e\x04\x7f\x03~\x02\xf0\x01X\x01+\x00\xf4\xfe\xbb\xfd\x9a\xfcr\xfc\xb3\xfb,\xfb\x19\xfb\xc6\xfao\xfaG\xfaE\xfa\x8c\xfa`\xfa}\xfa#\xfb\xcb\xfb\xe5\xfc\xf5\xfdx\xfe\x8a\x00\xf4\x01\xd8\x01Z\x02\x7f\x03\x80\x05\x9b\x05\xaa\x05q\x06\x8c\x07"\x08\xb4\x067\x06P\x06\x01\x05"\x03\xbe\x02\xcb\x02\x18\x02\xb1\xff\x0c\xfe2\xfe\xb6\xfc\xf8\xfaN\xfa\xbd\xf9X\xf9\xf7\xf7Y\xf7\xe6\xf7t\xf7\x8f\xf6\x1a\xf6\x96\xf6\x82\xf6\x9c\xf6\xd1\xf61\xf7\x92\xf7z\xf7\xbb\xf7x\xf8.\xf9B\xf9j\xf9\\\xfa\xc6\xfbu\xfc\x8e\xfc\xf7\xfc\xb6\xfd\xea\xfeO\xff\x03\x00\xee\x00|\x01K\x019\x01\xe6\x01^\x02\x1f\x02\xf5\x01{\x024\x02l\x02%\x02U\x02\xd9\x02\xc8\x02-\x03\xd4\x03\x1a\x04\xd3\x04I\x08\xb9\n-\x0bS\x0b\xd1\x0c\x9f\x0f)\x11\x88\x12\xcb\x14\t\x16\xff\x15\x14\x16\xab\x16\x10\x17\x0f\x16\x8f\x144\x13\xb6\x11\x9a\x0f\x87\r\xda\x0b\xc4\x08J\x05/\x03\xf4\x000\xfe\x18\xfbE\xf9\xba\xf7\x86\xf4\x0c\xf2\xb8\xf1=\xf27\xf1\xf0\xee\x00\xef\x0b\xf0\xbe\xef\xd4\xee\x04\xef\x9e\xf0\xb1\xf07\xf0b\xf1\x11\xf3\xae\xf3\x9f\xf2\xc3\xf2\x9d\xf4\xb7\xf5\xb4\xf5\xda\xf5\x0c\xf7P\xf8e\xf8\xa4\xf8,\xfa\x94\xfb\xdc\xfb\x81\xfb\xd9\xfc=\xfe\x91\xfe\xa8\xfeK\xff\x95\x00\xe6\x00Q\x00\xaf\x00\xaa\x01\xb7\x01\n\x01\xeb\x00\xa4\x01\xe5\x01\x1a\x01\xae\x00\xa5\x01\xf1\x01\x1b\x01\xee\x00\xc1\x01\xf9\x01\xa5\x01X\x01:\x02Z\x03\xd7\x02\xa2\x02\x9b\x03n\x04n\x04>\x04\xf5\x04\xb1\x05\x7f\x05N\x056\x06\xe9\x06\x91\x06;\x06q\x06\xa7\x06G\x06\xd6\x05\xae\x05\x80\x05\xe1\x04 \x04\xb5\x038\x03O\x02|\x01\xe6\x00\x16\x00\x87\xff\xb6\xfe\xe7\xfdW\xfd\xb6\xfc-\xfc\xe1\xfb\x82\xfbI\xfb\xec\xfa\x0f\xfbB\xfb\xe7\xfa\xe9\xfae\xfb\x97\xfb\t\xfc\x8e\xfd#\xfe\xea\xfd\xa9\xfe\x1e\xff#\x00\xcb\x00,\x01Z\x02\x9d\x02\x85\x02\xb4\x02u\x03?\x04\xa4\x03\\\x03\x9f\x03\xd1\x03*\x03l\x02\xac\x02K\x02\xa3\x01\xe3\x00\xae\x00\xb0\x00\xc1\xff\x8c\xfe0\xfe!\xfel\xfdg\xfc8\xfc0\xfc\xc5\xfb \xfb\xf9\xfaO\xfb*\xfb\x93\xfa\xa4\xfaZ\xfb\xca\xfb\x9d\xfb\x9c\xfbm\xfc\xbe\xfc\x95\xfc\xd6\xfcv\xfd\x13\xfe{\xfe\x83\xfe\xcf\xfe\x90\xff\xc5\xff\xc1\xff~\x00\xba\x00\xbd\x00\xa2\x00\xe8\x00"\x01\xf1\x00\xe7\x00\xf0\x00\xf2\x00\xbe\x007\x00\x95\xffe\xff\x0b\xff\x05\xff\xff\xfe\x08\xff\xe2\xfe\xcb\xfe\x1c\xff\x83\xff]\xff\xc3\xffr\x01-\x03\xe0\x04\x07\x06\x1d\x08@\n"\x0b\x18\x0c\xd3\x0e\xe9\x11L\x13\xc1\x13r\x14\x97\x15\x8a\x15;\x14i\x14`\x15\x19\x14\xef\x104\x0eN\r\xbb\x0b\x16\x08\xce\x04\x1e\x03\x02\x016\xfd\xfa\xf9\xf5\xf8\x12\xf8\xe2\xf4^\xf1\x85\xf0S\xf1\x17\xf0\x1d\xeeM\xee\xa1\xef\x80\xef\x18\xee\xba\xee\xd0\xf0\x1f\xf1I\xf0A\xf1\xcf\xf3\xdb\xf4\x8a\xf4\xeb\xf4\xae\xf6\xb8\xf7y\xf7Y\xf8\x04\xfas\xfa8\xfa\xa1\xfaG\xfcU\xfd\x0e\xfdR\xfdH\xfe\x89\xfeY\xfe\xfb\xfe\xd5\xff\xfb\xff\xae\xff\xef\xff\xa0\x00\xba\x00T\x00m\x00\xc1\x00\x82\x005\x00\xad\x00j\x01(\x01\t\x01\x9a\x01\xea\x01\x03\x02f\x02\x13\x03\xc0\x03\x06\x04k\x04U\x05\xf8\x05Q\x06\x9a\x06\x06\x07z\x07`\x07b\x07\xa8\x07\x84\x072\x07\xdd\x06\xbe\x06\x91\x06\xd5\x05 \x05\xa6\x04\xf4\x03\n\x03?\x02\x90\x01\x0f\x016\x00\x06\xffN\xfe\xc8\xfd\xd2\xfc\xf1\xfbU\xfb\xd3\xfa=\xfa\xa5\xf9\x82\xf9\x89\xf9\x7f\xf9M\xf9\\\xf9\xa9\xf9\xcd\xf9\xf4\xf9e\xfa\xf3\xfaP\xfb\xa7\xfbQ\xfc\r\xfd\x8d\xfd"\xfe\xa0\xfe\xfe\xfe\x9f\xff[\x00+\x01\x0c\x02\xf5\x02\xbc\x033\x04\xdd\x04\xa8\x05\x9e\x06\xfb\x06\xe5\x06|\x07\xf6\x07\xcd\x07C\x07\x10\x07\xfd\x06\x16\x06\xd9\x04\n\x04\x8e\x03\x92\x02\x02\x01\xd8\xff<\xffa\xfe\x1b\xfd\x0e\xfc\xd4\xfbQ\xfbR\xfa\xbe\xf9\xcb\xf9\xa4\xf97\xf9\xfa\xf8&\xf9\xa2\xf9\xc9\xf9\xd4\xf98\xfa\xd2\xfa=\xfb\x98\xfb.\xfc\xf4\xfcx\xfd\xa3\xfd-\xfe\xc2\xfe+\xffv\xffV\xff\x80\xff\x00\x00\x95\xffH\xffp\xffu\xff\x13\xff\xa0\xfek\xfew\xfe:\xfe\xc1\xfd\xc3\xfd\xd6\xfd\xcc\xfd\xe5\xfdD\xfe\xd0\xfe\xf2\xfe\xb8\xfeS\xff%\x00\xe7\x00a\x01&\x02\x10\x03t\x03\x14\x04 \x05\xc7\x05p\x06w\x07\xa9\x08\xe6\t\x8c\n@\x0b5\x0c\xf4\x0c\\\r`\x0e\x88\x0f\x07\x10\x04\x10\xd1\x0f\x02\x10\xe9\x0f\x1c\x0f\xc2\x0e\x96\x0e\xa3\r\xd9\x0b\xfa\t\xdf\x08\x9b\x07K\x05\xf8\x02b\x01\xdf\xff\x94\xfd&\xfb\x8e\xf95\xf8%\xf6\xe8\xf3\xe6\xf2\x92\xf2l\xf1F\xf0\x1d\xf0\x8a\xf0e\xf0\xee\xef_\xf0]\xf1\xd7\xf1\x0e\xf2\x15\xf3\xa5\xf4\x87\xf5\xf3\xf5\xd0\xf6"\xf8 \xf9\x94\xf9Z\xfa}\xfb8\xfc\xa1\xfc8\xfd>\xfe\xf8\xfe+\xff\x88\xff\x1b\x00\x8c\x00\xc7\x00\x1d\x01\xa1\x01\xda\x01\xe8\x01%\x02\x7f\x02\x9c\x02\x8a\x02\x9a\x02\xa8\x02\x89\x02b\x02t\x02\x86\x02\\\x02?\x02F\x026\x02\x01\x02\xea\x01\n\x02\x18\x02\x02\x02\x02\x026\x02a\x02X\x02i\x02\xaf\x02\xd6\x02\xae\x02\x8a\x02\xbc\x02\xe1\x02\xc8\x02\x9b\x02\xa3\x02\xb3\x02m\x02\x1e\x02\x02\x02\xda\x01g\x01\xe4\x00\x9b\x00\x82\x00\x1f\x00\x8c\xff\x1c\xff\xd2\xfed\xfe\xdd\xfdu\xfd.\xfd\xd7\xfck\xfcA\xfc1\xfc\t\xfc\xdc\xfb\xc9\xfb\xed\xfb\x14\xfc*\xfc`\xfc\xa1\xfc\xd8\xfc\x13\xfdm\xfd\xef\xfde\xfe\xcf\xfe\x18\xffl\xff\xe0\xffy\x00\x12\x01\x80\x01\xf2\x01y\x02\xc3\x02P\x03\xd9\x03\x8a\x04\xc1\x04\x84\x04\xe2\x04C\x05q\x050\x05S\x05\x91\x05\x17\x05p\x045\x04r\x04\xd9\x03\xfe\x02\x9b\x02l\x02\xc9\x01\xc1\x00P\x00\xfc\xff7\xffg\xfe"\xfe\xe8\xfd\x08\xfd"\xfc\xda\xfb\xaa\xfb@\xfb\xc4\xfa\xbd\xfa\x98\xfa\x10\xfa\xd8\xf9\x1d\xfaD\xfa\x05\xfa\n\xfa<\xfa\x8c\xfa\x86\xfa\xa0\xfa\x17\xfb6\xfbE\xfb\x8d\xfb\xf3\xfb>\xfc\x87\xfc\xc2\xfc \xfds\xfd\xa9\xfd\x1c\xfe\x7f\xfe\xe4\xfeA\xff\x9d\xff\x0b\x00\x80\x00\x08\x01\x89\x01\xff\x01y\x02\xea\x02R\x03\xd0\x03B\x04\x95\x04\xe5\x04Q\x05\xd7\x05*\x06\x19\x06Z\x06\xb7\x06\xba\x06\x9e\x06\x8d\x06\xb1\x06\xab\x06D\x06\xf4\x05\xd9\x05\x91\x05\x1a\x05\xc2\x04\x80\x04-\x04\xbb\x03B\x03\x1d\x03\xf1\x02\xac\x02\x86\x02{\x02y\x02M\x028\x02_\x02z\x02n\x02T\x02U\x02U\x02\x14\x02\xf3\x01\xd2\x01}\x01\x15\x01\x9e\x00A\x00\xc0\xff)\xff\x89\xfe\xd7\xfd7\xfd\x93\xfc\x0c\xfc\x92\xfb\xfa\xfa\xa5\xfa,\xfa\xd1\xf9\xa3\xf9z\xf9\x8a\xf9a\xf9~\xf9\xca\xf9\xf8\xf9U\xfa\x85\xfa\xda\xfaQ\xfb\xa5\xfb+\xfc\x9b\xfc\x0c\xfde\xfd\xae\xfd5\xfe\xa5\xfe\xfc\xfeH\xff\x88\xff\xd5\xff\x11\x00M\x00\x9b\x00\xcb\x00\xee\x00\x1c\x01M\x01u\x01\x80\x01\x88\x01\x87\x01\x88\x01\x94\x01\x9d\x01\x96\x01\x7f\x01[\x015\x01\x13\x01\xed\x00\xbe\x00\x89\x00X\x00(\x00\x03\x00\xdb\xff\xa7\xffv\xffQ\xff-\xff\x15\xff\x03\xff\xf0\xfe\xf8\xfe\xff\xfe\x06\xff\x0e\xff\x1b\xffE\xffv\xff\xae\xff\xd6\xff\xfe\xff-\x00f\x00\xa4\x00\xd3\x00\x03\x011\x01]\x01\x85\x01\xb2\x01\xdc\x01\xf5\x01\x0b\x02"\x02A\x02N\x02E\x02>\x028\x02.\x02*\x02\x1c\x02\t\x02\xe4\x01\xb8\x01\x9b\x01\x81\x01R\x01\x0c\x01\xd3\x00\xa0\x00l\x00@\x00\x06\x00\xc4\xffv\xff2\xff\n\xff\xdd\xfe\xa9\xfei\xfe7\xfe\x17\xfe\xeb\xfd\xc4\xfd\xa7\xfd\x8b\xfdy\xfdc\xfdV\xfdD\xfd\x1f\xfd\x00\xfd\xfd\xfc\n\xfd\x0f\xfd\xf8\xfc\xea\xfc\xf0\xfc\xed\xfc\xe8\xfc\xf6\xfc\x04\xfd\x08\xfd\x14\xfd(\xfdc\xfd\x86\xfd\x98\xfd\xd7\xfd\x1d\xfei\xfe\xb2\xfe\xf1\xfeW\xff\xb7\xff\xff\xffq\x00\xec\x00E\x01\x9c\x01\xfb\x01d\x02\xca\x02\x06\x03I\x03\x9f\x03\xd6\x03\xfd\x03%\x04L\x04m\x04\x84\x04\x8d\x04\x95\x04{\x04`\x04L\x040\x04\x18\x04\xf0\x03\xb9\x03{\x03.\x03\xe6\x02\xa0\x02Q\x02\x01\x02\xaa\x01Z\x01\xfe\x00\x9a\x00<\x00\xdf\xff\x80\xff\x1c\xff\xbf\xfeg\xfe\x11\xfe\xc8\xfd\x93\xfdd\xfd6\xfd\x13\xfd\xfd\xfc\xe9\xfc\xec\xfc\xfb\xfc\x0b\xfd\x1e\xfd?\xfdc\xfd\x88\xfd\xb5\xfd\xe3\xfd\x04\xfe(\xfeL\xfe\x80\xfe\xb3\xfe\xb1\xfe\xd5\xfe\xf2\xfe\xf5\xfe\x11\xff\x15\xff%\xff=\xff+\xff&\xff:\xff:\xff?\xffH\xffT\xffh\xff\x80\xff\x91\xff\xb6\xff\xde\xff\xf5\xff*\x00[\x00\x92\x00\xd5\x00\x11\x01W\x01\xa4\x01\xd7\x01\t\x02P\x02\x80\x02\xa9\x02\xd8\x02\xfd\x02$\x030\x03&\x03+\x03\t\x03\xfb\x02\xca\x02\x8f\x02b\x02.\x02\xe5\x01\x8c\x01K\x01\xeb\x00\xa1\x00C\x00\xd7\xff\xa1\xffb\xff\x01\xff\xdb\xfe\x97\xfef\xfe-\xfe\x10\xfe\xda\xfd\xcc\xfd\xc0\xfd\xa7\xfd\xa6\xfd\xb0\xfd\xb9\xfd\xc2\xfd\xd4\xfd\xb3\xfd\x05\xfe\xd8\xfd-\xfe+\xfeY\xfe\x85\xfep\xfe\xe7\xfe\xb9\xfe\x15\xff)\xff\x90\xffu\xff\x94\xff\x07\x00\xfd\xff@\x00\x8f\x00\xdc\x00\x00\x01\x0b\x01N\x01\x98\x01\x90\x01\xae\x01\xcb\x01\xf1\x01>\x02\x17\x025\x02\x82\x022\x02\x93\x02\xa3\x01<\x02\x03\x01\xf5\x00\x81\x00`\xff<\x05\x02\x059\x00\xf1\xfb\x98\x04\x9f\xfbB\xfck\x04\x06\xfa\x02\x02\x88\xfa\x8c\xff\xd8\xfdt\xf9*\xfe\xad\xfb\x8c\xfd8\xfdt\xfd;\xffZ\xfd\r\xfe\xe9\xff\xfd\xfd\xa9\xff\x18\xff\x83\x00\xfe\xfe\x82\x02\x85\xfec\x01|\x00x\xff\xb6\x02,\xfft\x01\x00\x00\x9b\x00\x93\x00\xff\x00\xf4\xffk\x01\xe1\xff\xe6\x00\t\x00,\x00\x19\x01_\xff(\x01?\x00\xda\x00H\x00\n\x01\x97\x00\'\x01\x07\xff;\x02=\x00\x1c\x00\xc1\x01"\x00\x9a\x01\xae\x00{\x01\x11\x00,\x01\x00\x00\xaf\x01\x0c\x00\x1e\x02\xc6\xff\x1c\x02\xa8\xff\x06\x01T\x00\xa6\xff\x92\x019\xfds\x04\x16\xfc\xe2\x01\xe9\xfe\x0e\xff\x7f\x00\xcc\xfd\xbb\xff\xea\xfd\xf2\xfe\xe4\xfd\x90\xfe\x86\xfe\x98\xfb\xf5\xffA\xfc:\xfd=\xff,\xfbr\xffi\xfc\xa2\xfe\xad\xfd\x8e\xfec\xfe@\xfe\xd6\xff\xa5\xfe\xed\xff\xdd\xffN\xff\x98\x010\xffO\x02~\xff\x04\x01\xfe\x01\xd4\xfe\xac\x032\xff\xe8\x02\xdd\xff\xf8\x01\xd6\x00\xa6\x01%\x00[\x02\x12\x015\xff\x06\x04a\xfe\x9d\x01.\x01\xaf\x00\xba\x00T\x00\x8d\x02\x8c\xff\xd5\x01\xed\xfe\xfe\x01\xb8\xff\xc1\xffZ\x03\xba\xfd\xe4\x03\xf2\xfd\xd5\xff\xcc\x02\xec\xfeK\x00\x85\x01\xfe\xfc\x83\x01\xd1\xff\x1d\xfd\x03\x04\xa5\xfb\x19\x01\x1a\x00\xa1\xfb\xe2\x02\xfd\xfcO\xfd\xe6\x02`\xfbs\x00\xc1\x00\x8e\xfb\x15\x01T\x00$\xfc5\x02\xdb\xfc,\xffT\x02\xab\xfb\xf6\x02.\xfe\xb9\xff\x82\xffU\x00\x08\xff\xca\x00\xf5\xff\xd5\xff\x15\xff\xf3\x01\xdf\xfe\x10\x03\xd8\xff\x90\xfd\xc0\x04V\xfd"\x01M\x03\xb1\xfe\xad\x00\xb0\x03b\xfcO\x04\xf8\xffD\x00\x93\x010\x01,\xff\xc9\x00t\x02\xf0\xfc\xb2\x03\x89\xff2\x003\x01\xe8\xff^\xfe\xd8\x03\xe0\xfc\xde\x01,\x01\x9c\xfc\xcb\x01\xfd\x02[\xfa\xc1\x04\xf1\xfe\x8c\xfcW\x068\xfb\xe6\xfd\xbd\x03(\xff\xc6\xfd:\x06n\xf9U\x01\xb3\x00\xf3\xfd\xbf\xffO\x03\x0e\xfd\x0b\x01\xfe\x01#\xf9\xfe\x04Q\xf9\x9a\x06.\xfc\xc2\x01P\xfdr\x02\x16\xfdC\xff\xf9\x01\xe0\xf8\xf8\x07\xe5\xfa\xa1\x01\xb3\xfe\xbe\x03/\xf9\x17\x07\xe8\xf9h\x00\x88\x06\x19\xf4\x8e\t3\xfe\xa6\xfeS\x00\x96\x03\xb4\xf8\x9d\x05\xb4\x00\xa3\xfa\xa0\n\xa7\xf78\x04\xc7\x01|\xfcx\x04\xbf\xfeE\x01 \xfc\x92\x06\x04\xfcF\x04\xc8\x01\xd9\xfbn\x06\xda\xfa^\x02r\xff\x1d\x03x\xfd\x8b\x03t\xfe\xd6\xffO\x02=\xfaI\x04\xa2\x00\x82\xf7\x88\x07\x12\xfb\x18\x03>\xfe\xd6\xfcF\x03\xff\xf8\x7f\x08\xe6\xf6\xa7\x05\x0f\xfbH\x04(\xfb&\xffw\x07)\xf5n\x08\'\xfaX\x04\xc9\xf9t\x03R\x01\xa9\xfc\x9e\x03\xe1\xfd\x07\x02\x18\xfb\x06\x04c\x00\x88\xfdW\xff\xd3\x03v\xfb\xff\x03\xf4\xfd#\xfd5\x04\xb4\xf9\xc2\x03\xd3\xfd?\xff\x9f\x00\xbb\x01\x8b\xfd\x1d\xfd\x8e\x03K\xfb\xe9\x02\xf3\xfeG\x00\xf3\xfc\x91\x06[\xfa\x87\xfe\x7f\x05\xf7\xf7\x91\x04k\xfe\xec\x01\x10\x00\xe4\xfds\x02\xb8\x02\xe1\xf7L\x07\x98\xfd\xc0\x01\x98\x01l\xfdQ\x04A\xfb\xbf\x05\x9a\xfc\xf7\x03\xae\xf9\x91\x07\xa9\xfd\x8b\xffn\x00P\xfeX\x00\x94\xfeH\x05\xd5\xfc\xa9\x00\x19\xfc\x8a\x03\xcb\xfb8\x00E\x03o\xfa@\x03\x9e\xffM\xfdr\x04\xa4\xf7J\x02\x88\x03g\xfa\\\x02\x8c\x00T\xfc-\x01\xab\x01]\xfd\xe2\x00(\xffX\xfcr\x06\x9d\xfc!\xfe\x7f\x08"\xf8\x1d\x05,\x00\xd6\xfe\xcd\xff\xd3\x01\x17\x01\x06\xff\x15\x07\xcf\xf7c\x05j\x02D\xf9b\x06d\xfe\xa6\xfeC\x05\xc6\xf8\xb5\x03\xa1\x04\xac\xf7\xa7\x05\xe9\xfd\xbb\xfd\xa8\x00\xaa\x00\x81\xfed\xff\xb8\x00\x9f\xfc;\x06D\xf8\xb5\x05\xc3\xfe\xa1\xfd!\x03\xc0\xfe\xcc\xff\x00\x00\xe3\x00\xca\xff\xfd\x00\x80\xfee\x02\x15\x00\xd3\xfd\x07\x00\x1c\x05a\xf8\xb4\x04l\xff\xd1\xfaB\x08\x15\xf9U\x02\xa9\x00n\xfc\xf3\x04\xa9\xfa\x0e\x04\xca\xfa|\x03\x84\xfeL\xfdz\x06+\xf8\xe8\t\x0f\xf6\x0b\x01\xaf\x05X\xfa\x87\x03\x08\x03\x18\xf9\xa2\x05\xba\x01\xc3\xf8\xb2\x06\xe2\x00\xf4\xfda\x02\x06\xff\x81\xfd\x1a\x05f\xfdu\x038\xfd"\x01\xc5\xffu\xff\xe0\x01\x89\xfb\x1a\x04B\x00\xe9\xfd\x06\x01t\x05J\xf9\xd6\x00\xb9\x01\xb1\x00z\xfe\x07\x00\x1d\x07\x98\xf5\x1b\t\x85\xf9\xbb\x00\x13\x04\x18\xfb\xb9\xff!\x03l\x01Q\xf8m\nl\xf7\x08\x00r\x03\xcd\xfd\r\xff\x97\x02&\xff\x15\xfd\x13\x03\xec\xfe\x03\xff\xc4\xfev\x030\xfd\xfa\xfa\xa9\x06b\xff=\x01@\xff\x17\xf9\xe4\x07\xcc\xfb\xc5\x00\xc2\x02\xa4\xfdM\xfd\x1c\x04\x03\xfe\x14\x00T\x03\xd6\xf7\x0b\x07\x83\x01K\xf7\xa0\t\xf1\xf9\xbe\x00\xd0\x04+\xfa\x8f\x03=\xfb\xd6\x053\xf9\x06\x07\xd7\xfbr\x00\x01\x042\xf7\n\x07\xb9\xfbf\x01\xa5\x05\xd2\xf8\xcd\x02\x8f\x01\xc8\xf8\xe4\x07\xd7\xfb^\xff\xe4\x03\xa5\xfci\x00\x93\xff\xb9\xff\x01\xffi\x01\xfe\x00\xb3\xfdV\x02F\xfc1\x03\x97\x00\xfc\xf9%\x04\x82\xff\xf6\xfd\x15\x00\x8f\x05\x9f\xffL\xf6\xe1\x080\xfb\xd1\xfd\xb8\x07\xb5\xf8\xec\x06\x9f\xfb\xb9\xfd\xf6\x03Y\x00w\xfc\x95\x03-\xfe\x99\xfb\xcc\n\x9e\xf4\xbd\x04\xb7\x00\x04\xfc\xdd\x07Q\xfa \xfc\xfe\t(\xfa{\xfeX\xff\x87\x06\x8f\xfc\x19\xfc\xb8\x0f\xab\xef!\x06U\xfe\xed\x00Y\x04c\xf9#\x08\x0c\xfd\xc4\xfeR\xfe\xe2\x04\xe5\xf9E\x07u\xfa\x8b\x01\xfd\x025\xf9\xcc\nh\xf1\xdf\n\x17\x00\x02\xf7~\t\'\xf9\x1c\x02\xd9\x02\xa2\xfdW\xfe\xad\x00\xcf\x02a\xfa}\x06c\xf8\xa6\x00\xc9\n\xc9\xf6\x8e\x01\xa4\xff\x97\x00T\xfd(\x00\x17\x04\x19\xfc*\x03\x1e\xfc\xdc\x04\xc3\xfc\x9b\xfb\xf8\x08\xc7\xfeo\xf6\x16\x0b\x01\xf9Q\xfe\xf9\x0b8\xf5\xf2\x03g\x00@\xff,\xfb\x97\x0b\xa5\xf3\xc5\x03\xf8\x05w\xf7t\x056\x00E\x04`\xf4\x98\x06\xda\xfeZ\xfbA\nf\xf8[\x03\xde\xffe\x00 \x03\xf5\xf2\x1a\x0e\x96\xff\xb5\xf5\x8c\n\x00\x02t\xf4\x14\x0b\x9a\xfb!\xfce\x07\xc7\xfcu\xfdZ\x07@\xfc\x02\xfb\x8c\t\r\xf5\x9a\x08\x1e\xfb\x9b\x01\x8a\x06\x16\xf7k\x00p\x08\xde\xf6\x8e\xfe%\x0b7\xf4L\x03\xf8\x06\x9c\xf7\x17\x01\xf2\x02\x82\xfb\xfc\x01G\xff\'\xfe\x97\x02\x1b\xfcB\x07D\xfef\xf7h\x07\xf2\xfd\xe0\xf6/\x05b\x08B\xf6\xb5\x07\x95\xfa4\xff\'\x04f\xfc\xe2\xfc\xee\x06\xdf\x03\x8a\xf4\xcd\x0c\x11\xf6?\t\x14\xfd@\xf6e\x14\x92\xed\x84\x06\xda\x03\xb7\xf8\xbb\x03V\x00&\x02\xf3\xf3\x10\x10\xc6\xf3z\x02V\x08n\xf4m\n\x82\xfb:\xf7\x88\t\xcb\xf9}\x04U\x032\xf4\xc7\x0c\xc1\xf7\xdd\xff4\x03u\xff\x85\xfeo\x00\x86\x00\xc9\x02[\xfc\x9d\x00m\x02.\xff\x89\xfb\xc0\x08\x00\xf8\xe0\x00l\x04\x90\xfa1\x06I\xfc\x81\x06\xf5\xef*\r\xbd\xfc\xca\xfc\xba\x00\x0b\x01\xc7\xff+\x01\xb6\x03+\xf1R\x0f\xb6\xf5\xc6\x00*\n\xb8\xf2\xfb\x0b\xe7\xfb"\xf8`\x0eY\xf1\x9d\x060\xff\xc1\x008\x02?\xf8T\x08\xb9\xf4\xe1\x0f\x0b\xf5l\xfb;\n7\xfa\x94\x012\xfc_\ns\xf4#\xfc\\\x13\xdf\xf2\xe1\x01x\x02o\xfbw\x00\xca\x00^\x01w\x02\xba\x00T\xfa~\x06\x84\xf7\xa2\x02+\n\x07\xf3\x81\xff\xb9\x10\xf1\xee\r\x05\xfe\x07w\xf4\x83\x07\x81\xffa\xfb\x00\x03\xf9\x03\xd8\xf6\xd9\x0c\x1e\xf9\xd5\xfc\xb0\x03\x97\x00\x19\xfd\xa3\xff=\x05\x0e\xfaS\x06\n\xf9@\x03\x8b\xfeu\x01\x80\xfd\xde\xffM\x05C\xf9L\x05\xa6\xfc~\xff\x8d\x01\xc4\xfb\xab\x057\x00a\xf9\xd9\xfeB\x06\xa5\x02\xf7\xfam\xfd\xd1\x01\\\x01\x17\x01\xef\xf96\x06\x17\xfe>\xfb)\x07=\xfdl\x03\x11\xfe \xfb\xec\xfd\xce\x0c\x86\xf9C\xf6\xa7\x0f\x11\xffG\xf1x\r\xc8\xfeT\xf8k\x06\x0b\xfb\xd7\x01T\x05\x8e\xf7\xc0\x06x\x06\xf7\xea\x10\x10Z\x00\xad\xf3,\n[\xfeo\xfeT\x02V\xff\x9c\xf9\x91\x10!\xf5z\xf6\xee\x13\xef\xf5M\xfc\xb4\x06\xf6\x01=\xf7C\x0bO\xf5\xa3\x08\\\xfd|\xfa\xe3\x0bn\xf3t\r\x9d\xf0j\x08L\x02\xd1\xf9\xa0\x022\x02\xd6\xfa6\x05L\xff]\xfam\x06\xc3\xf6\xb0\x08}\xfe\xe3\xfep\xfb{\x11W\xeb\xa7\x00\xed\x0b5\xf7\x16\x08\xb9\xf8y\x08\xfd\xf9\x88\xfe\x1f\x07\xb0\xf9\xbc\x01\xde\x06\x17\xfci\xfa\xeb\xff\xfc\x06\xfa\xfa\x1e\x01\x17\x07\xe6\xf5\xf1\x00\xa5\x04z\xf9\'\xfe\xdf\x01\xb9\x04\x8e\xf8\n\x07>\x06\xe5\xf6\x89\x04\xa9\xf2\xbf\x08\xd5\x08\xff\xef]\x11a\x00\x15\xf6\n\x01\x17\x01\xc0\x04]\xf5\xf8\x05\xf5\x04\xd1\xfa\x9f\x02\x87\xff/\xfee\x01\xc2\xfa)\x03O\x05\xdb\xf6E\x01w\n\xc9\xf4n\x03\x91\n"\xeb7\x07\xa0\x0c\x99\xe9l\x07A\x14#\xeb\xd5\x04\xc2\x05\xa3\xf1\x05\tP\x08\xaa\xf1j\x03|\x06c\xf8F\x00\xf7\tD\xf4\xc2\x01\xe5\x08\x1b\xf5\xb4\x04S\x06\xb3\xf7 \xfe,\x0c8\xf82\xfco\x04\x80\xfcv\x03\xfb\x04v\xf4u\x03\xad\x0e2\xe9\xb5\x02\xbb\x11!\xf2\x90\x05O\xff\x00\xfd|\x00\x02\x06,\xf2\xe0\x0bb\x00|\xf1\xb2\x16\xe2\xed\xf5\x03\x0b\x02\xd0\xfdg\xfb\xfc\x0b\n\xfaK\xf8M\x0f\xe7\xec\x1a\x12\xa5\xf4M\xff\xbb\x02\xe0\xfb\x08\x0b\xae\xf4M\x108\xec\xa4\x08\xde\x00\r\xf3<\x17`\xee\xf0\x03=\x07\xd4\xf87\xfe/\x07`\xfa\xe1\xff\xa7\x06\xc5\xef\xd8\x11\xbe\xfc\x1d\xef\xcf\x16\xe2\xf1\x08\xfdQ\x15\x93\xe5\xa7\r}\xfe\x99\x00r\xf8\x19\x03\xd9\t2\xfbX\xfd\xb5\xfa\x8e\x16\xe7\xde{\x10o\r{\xed\t\x02\xa8\x0c\xdf\xf4\xc6\x02-\x03\x9d\xf31\x15.\xedG\x04\xc3\rv\xeb\x85\n\xac\x03\xee\xf3]\r\x99\xf7\xcb\x01(\xfe\x99\xffH\x03\xcd\x04t\xf2\xed\n4\xfd\x88\xfaw\x0c\xda\xe9U\x159\xf6_\nN\xf4\x9d\x01(\x05\xdd\xf7A\x04\x9a\xfa3\x0eH\xf2q\x00T\x0c\x17\xf6[\xf6P\x15\xbc\xf8\x83\xef=\x16r\xef\x17\x04r\x0b\x02\xf7\xd5\x01{\xf8d\x11n\xee\xc7\x04\x1c\x04\xca\xfb\x80\x01\xa5\xfe\x0b\t\xfe\xf7\xbb\x03\x90\xff"\xf6\x95\x07t\xfd-\x046\x01\x9d\xf9\x85\x0b\r\xf6u\x02\xab\xf9\xf9\x0bP\xfc\x8e\xf6\xe6\x15\xba\xef]\xfd\xe7\x10\x8d\xed\xdd\x01Q\x0b\xf3\xfe\x97\xeeW\x19u\xf2\xf7\xf9\xf9\x11\x06\xe7g\x12\x11\xff1\xf9y\x04\xdb\x06\x12\xf0z\x10Q\xf5\xf8\xfd\xee\n\xee\xf4\xcb\x04\xd5\x05&\xfc\x19\xfbM\x07\xbe\xf60\x0b\xd8\xf3\x1f\x02\xf6\x0b\xaf\xf2t\x08\xcf\x02\xd7\xf6\x16\xfc\xf9\r\x0e\xec\x08\x02\x1c\x19\xdb\xe6C\x0bD\x00\xa9\xf8\x8e\x04f\x03\xc9\xf36\x0b\x17\x04\xa3\xf6w\n\xb8\xf6\x8b\x0b\'\xf3\xda\x02,\x10\x9e\xe8\x80\x0cl\xff\xb1\xf5Z\r\r\xf6.\n\xc0\xf1\x02\x02\x90\x06\x12\xfc\xc0\x02\t\xff\x12\x043\xfav\xf9N\x06\xdd\xff\x9b\xfeY\x06\x1c\xf4\xda\x08:\xfc\xac\x00T\x04\x7f\xf7\xa3\x06_\xffQ\xfc\xe8\x08\xb5\xf7\x1b\x03\x0b\x02{\xfb\xdc\x07p\xff\x83\xf44\t\xfe\xfb4\x01I\x05\xbc\xfc\x9f\x07\xfc\xecS\x0bs\x02\x89\xfa\xcd\xf7_\x12-\xef\xf8\x0c@\x03\xbb\xe7\xf3\x18n\xef\x9f\x017\x03F\n\xf9\xf1\xe7\x05\xf9\x04\xed\xf2F\t\x98\xf8\xde\x04\xa4\x01\x06\xfa\xeb\x02\x80\xff2\x00u\x05I\xf8\x95\xfbU\x07\xd6\xfc\xa9\xf9\xbc\x12\xb2\xf2\xd6\xf5\x0e\x0fT\x02x\xfd\xfd\xf8\x1a\x08\xaa\xf6\x0e\x03\xb9\x01\xfc\xfdG\x0f\xa8\xf1\x0f\x02j\x01\x0f\xfe\xd7\xfd\xb2\x04\x99\x02\x95\xf2\x08\x11G\xfa\xa7\xfdQ\x01\x7f\xfe\xb3\x04*\xf8<\x06\xa8\xfb;\x07d\xf8\x97\x06P\xfd\x84\xfc\x8e\x06I\xf5\xa8\x0c^\xf4\xc1\x07\x07\xfe\xfc\x00\xb5\xfe\x87\xff\xa8\xfe\xed\xfb\xee\x0e;\xf1V\x05J\x00\xb5\xfa\x08\n!\xf7\x1d\xfd\xd1\x0e\xf8\xf5\xb3\xfe\xad\xfc\xe6\x06\xa3\xfe\xa7\xfb1\n9\xf5\\\x03R\x03\x1e\xf9\xa0\x07\xf1\xfeP\xf4\xdc\x07\xf0\x06}\xfc\xf2\xf9\xac\x08v\xf9\xed\xfe\xca\x02|\xfd\x07\t\xe2\xf1\x84\x05\x11\ns\xf3>\t\xda\xf4W\x05\'\x01s\xf7\xe5\x0e\x12\xf9\x10\xfe!\xffn\n\xbb\xf1\x95\x08|\x01Q\xf6Z\t\x03\xf73\n]\x02\x97\xf0\x1d\x0e\xaa\x02,\xf15\x0fT\xf5\x89\x01\xf5\x06\x11\xf77\x03Q\x0bP\xf3\xb0\x02E\x00\xdd\x01;\xfes\xfaY\x06\xcb\x02\x9f\xfaT\xfa\x85\x10\x06\xed\x12\n\xca\xfd\xb0\xfb\xc0\t\xf0\xf5+\x017\x03o\xfa\x80\x07\xff\xf9\r\x02\xa0\t\xdc\xeaD\x11w\xf3/\x07V\x03\x19\x01\x80\xf6\x02\x08\xd2\x06\xb2\xe9\x90\x1b\x92\xf3\xd2\xfb\xcd\x10\xbc\xee\x93\xfe^\x11R\xf4\xa7\xfe\xdd\n\xe6\xf2r\xff.\r\x7f\xee\xe7\x04\x1c\x01d\xf5\xb0\n\x08\x014\xff\xf4\xf7\xb9\r\xbb\xf5P\xfb\xab\r\x81\xfd\xc5\xfc\xcd\x06\xe1\xfc(\xff\xbc\x08A\xef\xaf\tW\x07\x0e\xf3x\x06\x12\x01\x99\xfeN\xfb\r\x04\xab\xfb!\xfd\x81\x0b?\xf8b\xfd%\x08\xff\xfd\xb2\xf5\xb1\t0\xfd\x81\xf0X\x11\xac\xfc\x9a\xf99\x10~\xf2\xe5\xfc\xa1\x07^\xfe,\xfe\xa4\x06\x80\x00\xd1\xf4\x07\rR\xfc\xd1\xfb\\\x04N\xfep\xfe\xce\x04\x1f\xff\x17\xfc\xf0\x020\x03\x0f\xfe3\xfc\x86\x03\x10\xfc\xc9\x03v\xfd\xda\x01\x16\x04e\xf8z\t\x07\xf8\x8c\x00[\x01\xc0\xfa\xd1\x03\x07\x03\xea\x00\xe4\x02\x07\x01\xb7\xf2\xd2\xfe\x81\x08\xa9\xff\xa3\xf8\x05\nz\xfe\xaa\xf6s\x07\xd3\x01\xb6\xf5\xd5\x05\x12\xfd\x9e\xf4\x16\x10\x13\xff}\xfd\xe5\x04%\xf7F\x00\x9a\xff\x02\x00%\x06|\xfaI\x01\x92\x06/\xf9\xdd\x056\xff\xf2\xf3\xc0\n\x9c\xfch\xfe\x11\t\xe8\xfd\xc9\xf8\xa2\x00^\x06\xa0\xf7\x18\x05\xf9\x01R\xf78\x02\x1f\x03\xdd\x01\r\xfb\xc8\x03\x83\xfc\xd2\xfb\x13\t\x08\xf9\x8b\x02T\x00\x80\xfa,\x05\xe0\xff\xd7\x01\xee\xff\xe8\xfe\xf4\xfaY\x00.\x02J\x017\x00\xe4\x00A\x00,\xfc\x12\xfe\x9e\x03o\xfd\xff\xfe\x98\xff\x12\x05\xd2\x04\xc4\xf9\x12\x00w\xfd\xdd\x037\xfc\xc7\xfd\xe3\x04^\x07\x13\xfa\xdd\xfe?\x04\x0c\xfb)\x02\xa1\x00?\xfc\x87\xfc\xd7\x03\xd6\xfd\x1d\x05\xeb\xfbs\xfe\x10\x00\xb2\xf8n\x02n\xfe\xa8\x01\x16\xfe\xf4\x00\x98\x02~\xfe\xe3\xfc4\x03\xb0\x00?\xfe\xb8\x02A\x00T\x00i\x04\xd7\x00\x17\xfd\x0c\x00@\x03\x00\xfd\xb7\x00\xc4\x02#\xfcQ\xfe\x16\xfd)\x01Y\xfeW\x00\x14\xff8\xff\xf7\xfcN\x02\xe2\x02e\xfb2\x04V\xff\xca\x02:\x03\x16\x03\x93\x02l\xfd\xb6\xff\xaf\x01n\x03\xf0\x02\xd1\xffx\xfe\xe7\xfc \x00\x94\x02\xe6\xfa\xfa\xfe\xaf\xfd(\xfd\x9b\xff\x03\xfe\x91\xfd\n\xfd8\x00\xe5\x00b\xff-\x02\x96\xfe\xce\xf9]\x04\xa1\x02\x84\x00\x04\x06`\x02\xf7\xfeF\x01\x85\x00\x06\x00\x10\x03.\x03;\xfe!\x00\x8d\x020\x00\xdf\xfe\xde\xfd\xcb\xff\x08\xff"\xfc\x19\x01J\x01Q\xfd\x11\x00l\xffk\xfd!\xff\xf6\x00\xec\xfe\xb6\xff\xda\x04\xf0\xff|\xfe*\x02\xf2\x00\xcb\xff\x17\x03b\x01\xdc\x01\xb8\x00\xed\xff\xce\x004\x00\xc0\x01\xd1\x005\x01\xab\xfe\xb0\x00\xae\xff\x98\xfe\x88\x00\xa7\xfeJ\xff\xd5\x01\x99\x00\x86\x00\x97\x00\r\xfcF\xfd\xfe\xff\xb4\x00\xff\x01]\x02|\xff\xcd\xfe\x87\x00\xfb\xff\xa3\xfeW\xfe\xcf\xff\xc8\x01\xd3\x01\xbc\x02\xe5\xff:\xfe8\xfdc\xfc.\x00\xfc\xff\x02\x00\x8d\x00\x1c\xff\xd9\xff\x1e\x00r\xfd\x8b\xfc\xf1\xfc\xf5\xfc8\x00\x1c\x02\xfe\x00\xfc\xfd\xf4\xfd^\xfd\xf2\xfb\xc3\xfd\x9c\xfe+\xfe\x00\xffW\xff\xcc\xff\x8b\xfd\x94\xfc\x93\xfb\xc6\xfb\xf1\xfco\xfdM\x01\xef\xff\xfa\xfc\xe3\xfe\xee\xfd$\xfdO\xfe\xf0\xfc\xec\xfc\x13\xfd1\xffU\xff\xf5\xfe\x0c\xfe\x7f\xfd\x14\xfd@\xfc\xf6\xfa\xfe\xfb+\xffB\x02{\x07\x1f\x0b\xe1\nB\x08n\n\xdc\x0c\x03\x11c\x12\x19\x14\xd9\x17\x9d\x18\xef\x19f\x19\x9c\x16\xc7\x11\x9e\r\x11\x0b\x95\x0c\xb3\r\xb7\t\xc4\x049\x01\x99\xfb\xf8\xf6\xb5\xf4X\xf1w\xeeN\xeb.\xed\x86\xed_\xee\x11\xee\x93\xea%\xeb\xe0\xea\xec\xed\x16\xf2Z\xf7\xed\xfaA\xfd*\xff\xc6\x025\x06\xa9\x05\xf3\x07\xe4\x06\xa0\tb\x0c\xa7\x0c\x90\x0c\x16\x07\x07\x05m\x01\xf8\xfe+\xfek\xfax\xf6\x0f\xf4/\xf3\xd6\xf1\x0f\xf1\xbc\xed\xa0\xeaU\xea\x07\xea|\xed\xdd\xefM\xf0\x84\xf0x\xf1;\xf3\x7f\xf5\xde\xf8\xe1\xf7\xa3\xf7\x96\xfbd\xfe\x03\x00\xda\x02q\x01F\xfeK\x00`\x00\x06\x02M\x03\xbd\xff:\xfe\x0f\xff\x05\x00\x11\xff\xbd\xfbS\xf9)\xf8\xcd\xfaG\xfdR\xfc\x9d\xfb\xfd\xf8\r\xf9\xbb\xffc\x04\xe2\x048\x03b\x03\xf1\xff\xa3\x016\nY\x15\xd8#\xda(\xd9*\xc8+\xe2,\xad.I+\xac)Q-24\xa68~3\xd2(\xf7\x1b\x8d\x0f\xb3\x08(\x03(\xfe4\xf8E\xf3\xe9\xf1\xe1\xf03\xec\xf2\xe1J\xd7G\xd4\xab\xd6x\xddb\xe5~\xe9\xc9\xeb\xb9\xed]\xf0t\xf3\xd4\xf7\xbd\xf8\x9d\xfb\x9e\x02\xc1\n\xb3\x12\x11\x15\xf0\x11_\x0cc\x07F\x05\xe1\x04\x9e\x03\xbf\x00\x1d\xfe\xb8\xfa\xc2\xf7\xcc\xf4%\xed\x10\xe6\xef\xdf\xee\xdd\xa2\xe0\xe3\xe3\x18\xe6W\xe7\xa8\xe7"\xe9]\xec\x8b\xf0\r\xf5\xf3\xf7\x16\xfd\x82\x04\x0b\rR\x14\x0e\x17f\x16\xc7\x15\x15\x16\x99\x17\xc9\x19C\x19h\x15\xbc\x12\x91\x0f\xed\x0b0\x07\x18\x01\x19\xfb\xed\xf6\x9d\xf6\xd5\xf5R\xf5R\xf3\x0e\xf1+\xef\x9c\xf0\xf4\xf3\x10\xf5}\xf6\xd9\xf8\xf5\xfa\xb7\xfez\x02\x1c\x01\xe0\x00\x99\x00\xa4\x00\x9f\x04\x96\x05\x18\x03S\x01\xab\xfd\xa7\xfc\x86\xfe\xa9\xfa\xfb\xf6\xb1\xf3\xaa\xf1 \xf2M\xf3\xb1\xf1\xff\xee\x06\xeeN\xeb\xdf\xed;\xf2\xea\xf3~\xf6p\xf4U\xf7\xc1\xfb\xde\xff\x19\x03\xae\xff#\x04+\t\xc3\x16\x15-\xb76\xdd8\x1f1\xa1+\xa71p7\x8c6\xd0344\xd73p/\xdb$\xeb\x16\xc9\x04\x9c\xf4\xd0\xed\n\xef\xcc\xf1\x01\xee\xb2\xe5\x8f\xde\xde\xdb\xeb\xd9\xe6\xd7h\xd7Q\xdaC\xe1#\xed"\xfaS\x02\xcf\x03\xc8\x00Y\x00\x04\x04\x11\x0c*\x14\x8a\x18\\\x1a\x0b\x1b\x94\x18y\x13&\x0c^\x00\xec\xf7\xe5\xf4\xb6\xf3"\xf4\xc9\xf0@\xe9\x7f\xdf\xe7\xd6\xde\xd4.\xd3\x80\xd3\xac\xd6>\xdc\xdd\xe4\x1a\xebb\xef#\xf1m\xf1r\xf5\x07\xfd0\x08\x7f\x12,\x18\x06\x1ai\x1a\xd1\x1af\x1a\x8e\x18@\x14\xf9\x11\x00\x13&\x16:\x14\xae\x0c\xe1\x02q\xfa\xb3\xf6s\xf5\x84\xf65\xf5\xe1\xf4M\xf4\x0f\xf4\x0e\xf8\x1c\xf7\xce\xf5\xd6\xf7\xb8\xf9\xb8\x02\xaa\tl\x0c\r\rU\n\xc5\t\xc9\tP\n\x87\x08\x98\x05\xc6\x03\x0c\x02\x17\x01\xe2\xfdj\xf6\xf5\xee.\xeaD\xe7[\xe7h\xe6\xd1\xe4\xa9\xe4\x15\xe4\xee\xe6\x9f\xe81\xe6Q\xe6\x13\xe7\x7f\xec\xb5\xf5\x8b\xfa;\xfc\xe9\xffA\x03\x9f\x06\x94\x0bE\x08\xc7\x05\x18\n;\x0f\xb7\x16\x1c\x199\x19\xe7\x1e\xdb+\x025\x193r*\xeb"\xd6!^#\xbe\'\xbd,R,\xc9"\xe6\x15c\r\xa8\x07\x9c\xfe\xa3\xf3\xb0\xef\xeb\xf12\xf6\xe4\xf7\xc6\xf6\xff\xf1\xb7\xe9\x19\xe3\xcc\xe4\x97\xee\\\xf8\x1b\xfe)\x03H\x07\xb9\x08\x05\x07\xd4\x03\xdc\x01\xb7\x01\xc8\x04\x07\nw\x0f\x8c\x10\xb5\nS\xffM\xf4\x9b\xef\xe1\xeb~\xea\xcf\xea\x1d\xeb\x06\xed\x87\xea\xb7\xe7\xb3\xe3\xb7\xde\xa2\xddN\xe0\xc5\xe8\xce\xf2\xc6\xfaS\xfe\x0f\x00`\x00&\x00l\x00\x95\x02.\x06\x00\x0b\x00\x11\xcc\x12O\x13j\x0e\x92\x06\xea\x01\xdb\xffK\x01\xbf\x02\xd3\x04\x8f\x04s\x02c\x00\x82\xfc\xf0\xfa\xfc\xf95\xfa\x13\xfd\x86\x00y\x05.\x08{\x07\x14\x06\xcd\x04V\x05\x89\x08f\n\x05\rA\x0e\x83\x0e.\x0e\x1d\x0c(\x08\x8d\x03\x10\xff)\xfcS\xfc\x89\xfb\x7f\xf8Y\xf3~\xee\xeb\xea\x8d\xe8X\xe7\xf1\xe4n\xe4W\xe7\x7f\xe9\x06\xed\x05\xef\xf1\xed~\xefF\xf0\xe2\xf2T\xf8\xa7\xfa\xf2\xfd7\x01|\x03T\x05\xb6\x03\x91\x01\xbf\xfe\xc4\xfe\xd0\xfek\x02/\x04H\x03\x84\x02\x00\xfe\xea\xfaV\xf5f\xf55\xffL\x0b:\x19\x86&\xd30\x003\x86+\xa1\x1e-\x1a\xfd#81\xb6:s:t2\xf6%;\x17\xe2\x08.\xfd\x88\xf3U\xed\xa6\xed\x1c\xf2)\xfa\xc9\xf7\x14\xec\x07\xe0J\xd9\xde\xda\xb6\xe1\x05\xec=\xf7\x11\x01\xf0\x07\x10\n\xb7\tE\x07\x05\x02\xb6\x00\xf6\x04\x87\x0c\xf5\x13q\x15\xeb\x10\xee\x07g\xfc\xd8\xf3\xe1\xedL\xe7M\xe5\xbe\xe6\xa1\xe7m\xea)\xe9\xe7\xe3U\xdd\xed\xd8\xe1\xda\xa5\xe1m\xea\xf7\xf1\x7f\xf9\x18\xff\xd4\x02I\x04=\x04%\x03\x11\x05 \t\xa6\x0e\x00\x14\xc0\x14\x0c\x12\xbf\r\x02\n\x0b\x04\xda\xfe:\xfc\x1e\xfft\x05\xfb\x07i\x06\r\x01\xf5\xfcn\xfb\xe9\xfa\xfb\xfc<\x01*\x05\xf3\x07r\n\xec\t\xf9\x065\x03{\x00\xf5\x02\x8e\x06g\n\xf7\n\xf6\t\xcb\x08L\x05\xde\x02\x0b\xffB\xfc\xbb\xfc\xa4\xfc\x9f\xfd\x1f\xfd\xcf\xf8\xcc\xf2\xf8\xee\x9f\xec{\xeb\x8f\xecp\xec\xbe\xec{\xee\x11\xf0\xee\xf0\x8f\xf0r\xf0\x15\xf1\xd4\xf3\xfd\xf7\x98\xfb\x0b\xfd\xea\xfc\x98\xfc!\xfd\xe6\xfdi\xfd}\xfb\xa0\xf9>\xf8\x9b\xf9-\xf9\xd4\xf6\xfa\xf3\x00\xf4\xb8\xf9\xc3\xfb\x19\xfd\xf3\xfb\xcf\xf9k\xfeu\x03Q\x0c4\x1c\xcb+\x954\x817\xd6638\x179\xc16\xe16\x819\x819\x8c5\xb9.\xe4$\xfb\x17P\x06\xad\xf8\xdf\xf2O\xefZ\xed\x95\xea\n\xe9)\xe7\x96\xe2\x98\xdd\xef\xdcy\xdf\x00\xe4\xd2\xeby\xf5\xbd\xfe\xad\x04a\x05\xfc\x04\xc1\x04\x06\x05=\x07H\n\xef\x0e\xb9\x10\xb3\x0e\xde\x08\x94\x01.\xfbo\xf3\xaa\xedW\xeb\xae\xebR\xeb\x03\xe9@\xe6\xba\xe2\xfd\xde\xf9\xdb\x9f\xdc\x0b\xe1\xe1\xe5\xd5\xeb\xca\xf0~\xf4\\\xf7t\xf8\xbc\xf9\xc3\xfc\xc4\x00f\x06\xaa\x0b#\x0e\xfb\x0f)\r\xd5\tz\tg\t\x97\nw\n\xd2\x07\xfa\x06\xb4\x07\xbb\x07\xf8\x07\x8f\x05i\x02\x02\x03\x0b\x05l\x07s\t\xf4\x07j\x07\x14\x08o\x08\x86\x08\xde\x06T\x05\x81\x05\xad\x05\xec\x06e\t\x93\tC\x06\xff\x03x\x02\x1e\x02\x03\x014\xfe/\xfd\xf2\xfd\xdd\xfc\xb5\xf9\xed\xf5s\xf0\\\xec\xd3\xeaH\xe9M\xe9\xf3\xe9%\xe8\x86\xe9\xd9\xe9\x9f\xe8Y\xe9\xfa\xe7\xb3\xe9\x1e\xef\xa3\xf2\xb9\xf6\xbf\xf9\xad\xf9\x14\xfa\xe8\xf8.\xfa\x87\xfc,\xfe\x0e\x00\xfc\x01R\x01\x08\x01j\x00\x18\xfd:\xfeu\xff\x8b\x01\xfa\x05\xba\x06\x02\x06\xa4\x06\x15\x07]\tS\x0bK\x0c\xe0\x0f+\x14\x80\x17]\x1b\x10!\x8f(\xda0\x8401*o(\xcc*\xf2,I+\xdf\'l&Z"f\x1a\x90\x12w\x0by\x04S\xfb\xea\xf52\xf6\xbc\xf6\xb8\xf3p\xecG\xe82\xe7\xd5\xe5\xba\xe5u\xe7\xe3\xe9\xf2\xec\x9b\xee\xbe\xf1\xca\xf5\xec\xf5t\xf4\x06\xf5\xfe\xf7\xff\xfbI\xfe\x9d\xfe\xd0\xfd\t\xfcI\xfaA\xf7\xaf\xf4\xde\xf3\xf6\xf1f\xef\xfe\xee\xa0\xef.\xeeN\xeb>\xe9\xd2\xe8\xb8\xe8Q\xea\x0b\xed\x15\xf0\xd5\xf2c\xf4\xe6\xf6\x1e\xf9\x1b\xfb|\xfd\xef\xffD\x035\x07\xcb\nD\x0e5\x0f\x90\x0f\xaf\x10@\x10}\x104\x11\xd3\x10\xbf\x10m\x11\xb4\x13\x05\x15\\\x10Q\x08_\x04\xc2\x03}\x04c\x050\x03\xcd\x00(\xffD\xfd\xa3\xfc\x9a\xfb\xb7\xf83\xf8\xd0\xf9[\xfd\x93\x01{\x03\xf4\x00A\xfd\xfc\xfb\xf9\xfb\x9d\xfcv\xfc#\xfcZ\xfc\xcf\xfb\xc7\xfa \xf87\xf4\'\xf0>\xee(\xee\x18\xef\xc7\xf0\x1a\xf1\xa0\xf0-\xf0R\xef\xb5\xefK\xf1\xb5\xf2Z\xf5\x00\xf8\xc2\xfb\xf1\xfeB\xfe\x15\xfd(\xfd\xab\xfd\xa4\xff\xc8\x01Z\x02\x95\x05.\x07\r\x06\x8e\x07\x9c\x06E\x04\xc4\x04T\x05C\to\x0b\x06\n\xc4\n\xe5\t\xfc\x07\xd4\x05\x06\x05\xec\x06*\x08\xab\n\x8b\r\x11\x0e\x0b\r\x91\x0cl\x0f)\x14\x14\x18\xa4\x1ac\x1f\x96%\xf2&\x9f%A$\xcb"\xdc"\x0b!\x91\x1f^\x1f2\x1a\xbc\x13\x87\x0e\xbb\x08a\x02\xdf\xfb\xae\xf5\xe6\xf1\x13\xef\xda\xeb\x1a\xe9\xbe\xe5\xb3\xe1G\xdf\x96\xde\xae\xdf\xb5\xe1\xfb\xe2\x11\xe4\xee\xe5v\xe8\x1f\xeb=\xed\xb0\xefM\xf2\t\xf5O\xf8n\xfba\xfe&\xff\xa7\xfe\xa4\xfe\xa8\xffg\x00p\xff\xc5\xfe\xed\xfe\x1c\xfe\x0f\xfcd\xfa\x10\xf9\'\xf7 \xf5\xcd\xf4\xe1\xf5\x9a\xf6p\xf6\xbc\xf6\xca\xf7\x88\xf8y\xf9j\xfbA\xfe\xb4\x00R\x03@\x060\t#\x0b\x1e\x0cE\re\x0f\xd2\x0f\xec\x0f\x7f\x10\xf7\x103\x11X\x0fQ\x0c!\nQ\x08\xfc\x05\xc0\x031\x01k\xff\xc4\xfd\x16\xfc\xfd\xfa\x88\xf9\xe3\xf6:\xf5\xbc\xf5}\xf6\x11\xf7\x80\xf7\xcf\xf7\\\xf8\xde\xf7\x04\xf8j\xf8\x9c\xf7x\xf7\xd5\xf7\x07\xf9u\xf9\xe2\xf8\xe0\xf7\xe4\xf5\x15\xf5\xbb\xf4-\xf5\xcb\xf4\xb2\xf5\x89\xf5\xd9\xf5u\xf7\xbe\xf7<\xf8\xa6\xf7\xb6\xf8\x05\xfd\x00\x01\x97\x00\x15\x01\xc4\x03\x15\x05\xf2\x04\xb9\x04Q\t\x0c\n\xfd\x08\xce\n\xac\x0b-\n\xc3\t-\n-\x08\xfc\x08\x19\x0b\xbd\n\x89\x08\x18\x08\x0c\x08\xb7\x07f\x07\x8d\x08\xb5\x07\xa1\x04\xef\x05\x91\x08<\x08@\x06.\x07\xf3\x07p\x05I\x04\xc9\x04\xab\x04\xd6\x042\x04\xbf\x02d\x04|\x07?\x08&\x07\xeb\x06i\n%\x0c\xb8\x0b\xed\x0bw\x0c2\x0c\x04\x0c\xa3\x0c\xd9\x0b\xb4\t\x95\x06@\x04\xda\x01\xc7\xff\x8e\xfe\xb6\xfcw\xfa\xab\xf7\x1b\xf6U\xf5\xfa\xf3\xff\xf16\xf0k\xf0\xfa\xf1\xa2\xf1\x80\xf1>\xf2>\xf2\xc1\xf1\xa3\xf21\xf5D\xf6S\xf6\xc1\xf7\x1b\xfa\xba\xfb\xe4\xfc\xf1\xfd\xf3\xfe.\xff2\x00(\x02\xbd\x03\xb9\x038\x03\xaa\x02M\x02\x13\x03\x90\x02@\x01O\x00\x14\x00\x84\xff\x1e\xfe\xc2\xfd\xa7\xfcy\xfa\xba\xf9\x8a\xfa\x1b\xfb\x02\xfa\xcb\xf9\xc1\xfa\x8a\xfbr\xfb\xf7\xfbI\xfd#\xff-\xff\x9d\xff\x1f\x02\x04\x04q\x03\x8f\x03"\x04\xae\x03\xb5\x04_\x03\xcb\x01\x9a\x03;\x03\x92\x01E\xfek\xfev\xfe\x82\xfa\xdf\xfb\x93\xfc\xdb\xf9\xee\xf9\xe6\xfa\xf4\xf8\xaa\xfc\x10\xfdP\xf9h\xfe\xc1\xff\x9f\xfd\xd2\x00\r\x03\xc8\x01\xc5\x02L\x03\x18\x07\x8e\x07=\x03\x82\xfeb\x05_\x05\xe4\xff(\x08\xf6\x01\x17\xfe\x8d\x02\xe6\x02\xc1\x03\xd5\x00D\xfc\x83\x00\xca\x03\x84\x01\x9e\x05\x97\x02r\xfd\x89\x02L\x04a\x02y\x03\xc3\x049\x001\x01\xd4\x04\\\x04\xda\x04\x1c\x01\x1d\x00\t\x00\xf4\x00\xa6\x02\xac\x02\xd0\xfeY\xfa\x95\x00\x9a\xff\xfa\xfa\xd8\x01\x05\x00#\xf8\x9b\xf8>\xff\xb2\x01\xd5\xfd\xca\xfc\xde\xfd\x80\xfe6\x00)\x01\xb5\x01\xf9\x00:\xfe"\x02\xc6\x05\xce\x01\xc6\x00\xbb\x01\x89\x02\x04\x01\x81\x03\xdd\x04G\x01\x9e\x00\xb0\x00\xff\x00\xa7\x03\xba\x040\xfeF\xff\xa9\x03\xe5\x02+\x00T\x00\xc7\x02j\xfd]\x01\xfe\x05_\x01f\xffH\x02\xec\x034\x00\xa4\x01\x10\x03\xac\x003\xff\x0e\x02<\x01\xcc\xfd[\x00\x12\xff\x1e\xfd\x87\xfb\xe4\xfbv\xff0\xfe\x86\xfa\x8e\xfb\x8e\xfb\x16\xf9\xa4\xfc$\xfdU\xfc}\xfc\xdb\xfb\xc1\xfc\xb0\xfe^\x00\xe7\xfcE\xfe\xf6\xfeH\xffo\x01Q\x01\xcc\xffW\xff\xfe\xfeN\x00\x8c\xffi\x01\x0e\x01\x7f\xfb>\xfd\xe9\x01\x1e\x01\xf9\xfcz\xfe\x16\xff\x8f\xfe\x1e\xff\xb8\xffP\x00\x93\xfe\xd7\xfc%\xff\x16\x01\xc2\xfe\x8a\xff0\xfe\x89\xff\xe6\xfd\xe1\xff?\x01W\x00Q\x02\xca\xfe\xfd\xfew\x02]\x04f\xff\xa4\xfd\x0b\x00\x9c\x01\xa2\x03%\x00\x9d\xfe\xbf\x00\xba\x03\xee\xfc\xea\xfd\xfa\x01\x89\xfdL\x01\x0c\xfe\x10\xff\x07\x01\xf8\xfd\x07\xfd\x89\xfcO\xff\xef\xff\x01\x008\xfat\xfe\x0e\x01\x9f\xfc\xf6\xfba\xfe\xf3\xff\x98\xf8T\xffR\x02i\xfe\xac\xfc8\xf8\xe0\x03\xad\x01\xf0\xfa>\x00\xc0\x032\xfe\x9a\xff\x87\x04\xc7\x02\xfd\xfc\x1c\x00\x1b\x08\xb5\x01\x94\x00\xce\x07\x0e\x04\xbb\xfe\xb2\x03\xae\x08)\x03D\xfc\xee\x06\xdf\t\xe4\xfb\xef\x03\x98\x08\xb0\xfd\xdf\xff\xc0\x07\x11\x03\xd7\xfd\xd9\x01\xaa\x05\xe5\x03\xfc\xfe\x8b\x05J\x02\xd1\xfd\xbb\x01\xa6\x01\xc2\x05\x97\xfe\xd2\xfd\x9a\x01 \x03\xbc\xffD\xf8|\xfeV\x03\xce\xfc\x83\xfa\x1f\xfe\xde\xfd\xd1\xfcJ\xfa3\xfe\xcf\xfcL\xf9\x0f\xfe4\xfc\xaa\xfc\x08\xfc\x90\xff\x90\xfc\xab\xfd\xe4\xfd\xfb\xfb\xdf\xfd\xa8\xfc\xc8\x02\x10\xfe\xab\xffg\x01\x02\xfe\x00\xfbY\x02\xfd\x02\x12\xff\xd7\xff\xc9\x03\x83\xff\x8b\xfc\x19\x05\x8e\x05v\xff\xbf\xfec\x071\x00\xa9\xff\x1e\x039\x06S\xff\xf7\xfd@\x036\x067\x05~\xfe\xe8\x02}\x01b\x02\xe6\x02\x9e\t\xd4\x03\x89\xff\xb1\x06\xf5\x05\x11\x00&\x03\x84\x06\x00\x00\xe1\xfe\x92\x01\x9e\x05f\xffV\xfb\x0c\xfb\x96\x00\x05\xfd\x1b\xfbn\x02\x82\xfe|\xf3\xf1\xff\x86\x02\xef\xf62\x00?\xfb\x12\xf7W\x00\xaa\xfb\xd1\x00\x89\xfd\xc6\xf6\xb6\xfe\xcd\xff;\xfb!\xfc\x81\x03\x14\xfa\x94\xfb\x08\xff\xe6\xfcQ\x01\xc0\x019\xfbH\xfb(\x03 \xfd\x9e\xffl\xfe\xf8\x01\xf4\xfe\xd4\xfdZ\xfe\xb2\x00\xfb\x03\xc7\xfb\x1c\x01\x18\x03-\xfe\x92\xff\x83\x03\xbb\x03\x00\x01\x04\xfe\x02\x01\xc2\x08!\x04\x86\xfd\x10\x01\x8b\x08\xe7\x02H\xff\xd5\x00d\tU\x04\xfe\xfb[\x05f\x02\x8f\x00\x16\x03\xbd\x00\x06\x02\xa8\x01\xbd\xf8\x86\x05\xd2\x03\xd2\xf8g\xfe\x1f\x06\x9f\xf9\xeb\xf9:\x07O\x00\xed\xf1\x9b\xfb`\ny\xf9\xd3\xfan\x00q\xff\x1e\xfb7\xfcG\xff\xcc\x00W\xf38\xfe\x1f\x08\x03\xfaD\xfb\xdb\xfd\x0b\x00\xaf\xfc\xa0\xfc\x83\x00\x16\xfd\xa1\xfa\x04\x05(\xffD\xfc\xd1\xff\x9d\x00\xe9\xfc\x93\xf8\xf9\x05\xf0\x03\x06\xf9\xc4\x00Q\x00\xea\x04\xf4\xfft\xfcb\x03\n\x00\xc7\xffX\x04\x9b\n\xc3\x01v\xfe\xfe\x01Y\x06G\x01 \x02\xa5\td\x05\xbd\xfe6\xff\'\x0c\x87\x00\xa3\xfaZ\x04Y\x03\xdf\xff\r\x02\xd8\x03\xa7\x00W\x01\xdb\xff\xdd\xfc\x86\xfe\xb8\x05I\xff\xaf\xfd\x03\xfd[\x03\xb2\x02\xb9\xfd\xa7\xfa\xf0\xf9\x05\x06\xcc\x02\xec\xf8\xb1\xfb=\x08\xc2\xfb\xf0\xf3\x0e\x03\x95\x05f\xf6Y\xf7N\x04\xbe\xfe\xe6\xfc\xd0\xfc\xb1\xff\x93\xfe\xb9\xf7\x10\xfd\x90\x01\x88\xff\x9b\xff\x03\xff\xd9\xfc\x08\xfc\xcf\x01:\x03T\xfeX\xfc}\xfdz\x06;\x03O\xfb\xf8\x01\xdb\x04T\xff\x8d\xff\x83\x06\r\x02\xa5\xff\xdc\xfeJ\x08\x1e\x03\xbe\xfc\x14\x055\x00\xbc\x00\xab\x02K\x05\xea\xfd\x82\x00G\x04%\x00\x11\x01\xeb\xff\xd8\x03\xee\xfdD\xff\\\x05\x8c\x00\xcb\xfd\xcc\x02\\\x00\x19\xfa\x98\x07\x86\x006\xff\x0c\xfe\xb4\xf7]\x0bS\x03\r\xf4\x9c\x00\x99\r]\xf6)\xf3\x18\x08J\x08U\xf6\x96\xf7\xf5\x01!\x00\xf8\x01\xb2\xf8\xe9\xfb\xf8\xfax\xfa\x86\x04\xb4\xff\x14\xf9\xd4\xfc \x02\xc9\xfb0\xfe\xae\xfe\x7f\x01L\xfe\x85\xff\xb6\x01\x91\x00\x12\xfde\xfc\x0e\x07g\xfe2\xf8\xb5\x03\xa2\t\xfc\xfb\xe2\xf7\xfc\x04_\x02K\xf7\xad\x04\xe2\n}\xfc\xd7\xf8W\x07\x06\x05\xab\xfb\x9a\xfe\xc6\x07&\x05\xb8\xfd:\x05\xf2\x03\xd1\xff\x07\x00k\x05Q\xfe\xe5\x04\xfd\x061\xf9\x07\xf9\xe7\x07Y\x0c_\xf3\x82\xfa\xef\x08\xb6\xfa\xba\xf8\xa8\x03\xf9\x07\x1a\xf8\xc8\xf5\xa5\x06d\x03v\xf8\x95\xfdC\x02\xcd\xfc\x97\x02\xdf\xfd\xce\xfc\x06\x05\x94\xfc\xf4\xfd\x1c\xfe\xdd\xff\n\x01\xde\x00\xa4\xfb(\xfe\xe3\xfc\xea\x00R\x04}\xf4\x03\xf9{\x01?\t\x95\xf62\xf5.\n\x1d\x03{\xf2\xac\xfc\xff\r\xa6\x02\x05\xf5k\x01\x9f\x0e\xfe\xfa{\xf9x\x08\xf5\x08N\xfd\xb6\xfc*\x08e\x07G\xfd+\x03\x19\xff\xe0\xfeG\x05\xf0\x02\x86\xfei\x04\xac\x03\xbc\xf6l\x01\x81\x07`\xfe\xa9\xfa_\x05\x8c\x00\xfa\xf6~\xfe\xe3\x07\x9b\xfdl\xfbl\x03\xa8\xff\xdb\xf9Y\x02\xff\x01\x80\x00\x06\x00\xeb\xfc\x19\x08\xae\xff)\xfb"\x01\x8f\x04m\xfc\x99\x02\x01\xfd\x8c\xfc\xb9\x03\xb6\xf9\x0c\xfc\xc6\x029\xfd@\xf6\xda\xfe\xa4\xfc\xbb\xf9C\x00\xaa\x00o\xfa\x1a\xfcJ\xff\xd4\xfch\xfb@\x08\xb8\xff\x8e\xf3\'\x03\x18\x074\x00z\xf9\xfc\xff]\xff\xb1\x02A\x03\x9d\x01\xcc\x034\xfd\xc7\xff\xa5\x04\x84\t\xef\xfe3\xfb\xe6\x06)\x07\x83\x05c\xfa\xc4\x05\xc7\x05|\xf8\x98\x04o\t\x1a\xff\x9d\xff\xd6\x01(\x02\xda\xfe[\xfd\x8a\x05>\xfe\xf9\xf9\x8f\x03"\x05\xe7\xf7T\xfc\xd6\xfe\xa0\xfb\xb0\xfc\r\x08\xe9\x00\xbf\xf4\xb9\xfe\xd2\xfeY\x01\xcf\xfd\xc8\x03\n\x02-\xf3F\x02\xc4\x0e\xa5\xfb\xc2\xf53\x02G\x07\xbb\xfa\x90\xfe\x9f\x0e\x05\x04\x91\xf2\xe2\xf7\x8b\x04P\x11`\x01V\xf3\x14\xf8\x86\x06:\x07=\xf7\xb5\x03;\x00\xb1\xf7\\\xf6\x95\xfd<\r\xde\x06\xc6\xf2\xb2\xf5\x83\x03\x91\x04\xa1\xfa\xe4\x01\xe1\x02U\xf8(\x03\x9b\x08\x16\x02\x0e\xfd\xf3\xff\xa6\xfaK\x04\xdf\x07X\xfe\xf0\xfeG\x05\xde\x00\x85\xfd\x03\x06^\xfd\x0f\xfd\x87\x03\x12\x02\xc7\x03\xb8\x02\xa0\xfcn\xfa\x80\x04k\x03\xc0\xfbQ\xfdP\x08\xd7\x00l\xf6\xc7\x00M\x01r\xfc|\x01]\xfd-\xfdV\x03\xd7\xfbL\xfcl\xf9\x0b\x02\x81\x0cm\xf3B\xf2 \x0b\x93\x07\xac\xfdj\xf2\xe6\x00\xa6\n\x1a\xfa\xf8\xf9d\x05(\x0b\xf8\xfc\xb3\xf0p\x00\x15\x0eo\xfd\xc2\xf2S\x02\xc1\x0e\x0c\xfb\x84\xf4D\x06Y\x08L\xfe\x01\xf5\xa7\xffh\x0fn\x080\xfa\xda\xf6u\x00n\x0c@\x03h\xfa\xf5\x01|\x06\x01\x00\xdf\xfb\xec\x03L\x05\xcb\xfd\x1b\xf7\x94\xfd\xbc\x0eA\x04\xf9\xf4\x8d\xfb/\xfdY\xff-\x03}\xfd}\x00\x03\xfe\x0f\xfc\xbf\x02\x8e\x02\x9b\xf9[\xfc+\x02g\x01\xd0\x073\x00\xc9\xfd\xa2\xf7\xc0\xf9+\tg\x04\x16\x02\xbc\xffw\xf8P\xfea\x07\xdc\xfe\x8e\xfcz\xfc"\x00\xbc\x02\xb1\xffI\x05U\x00\x8b\xf5m\xf9-\x07\n\x05\xe0\x01\xf9\xfa\xdc\xfa\x81\xfb6\xff\xd5\x05\x1e\x028\x02\xdd\xf8\x90\xfbU\x02[\x03\xbd\x00W\x01`\x00\x1e\x00%\x02d\x04a\x04m\xfcC\xfc*\xfe\xad\x00+\x08\x8f\x04\xc5\xfb\xb7\xfc\xd3\xfd\xb9\x00@\x00d\xf94\xfe5\x05\x9a\xff]\xff\x1e\x04\xca\x02\xed\xf8\xff\xf4\x11\x03`\r\x0b\x04\xeb\xfc\xbb\xfb\x15\xfd\xac\x00\xf2\xfe:\x04\xb2\x00\xa1\xf9\xb1\xfe~\x011\x04[\x04\x96\xfc%\xf3<\xfb\x8a\x0b\x10\x07\xd6\xffV\xfd\xa9\xf9\x96\xfby\x00\x16\x06s\x04\x86\xfd\x87\xfb\xa1\xfe\xae\x01\xa2\x04D\x01o\xfb\xed\xfd\x9c\x03\xa8\x02"\x05\xdd\x01\xdc\xf9\x82\xfcD\x01\xed\x04\x15\x01?\x00\xfc\xffl\x00\xa9\x01O\xff\x07\x00\xf2\xff\xa8\xfe\xdf\x00\x93\x00\xd3\x01\x87\x03k\xfc\xb8\xfa)\x00\xc3\x01\x18\x02_\x01\xd4\xfd\xd7\xfc.\xfd<\xff\xb8\x00\xff\x05 \xfd\xe5\xf7\xb2\xff\xf3\x04)\x03\xa7\xf9\x1d\xfdL\x00\xe7\x01\xb9\x01?\x03\xb7\xfdD\xfc\x94\xffj\x00:\x05:\x04\xd4\x00\xc2\xf85\xfd;\x031\x03\xbf\x03[\x00B\xfd|\xfd\x8c\x00\xd1\x01\xcf\x00\xe2\xfd\x8b\xfdV\xffB\x03*\x06{\xfeg\xfa)\x00\x9f\x01\xd3\xfee\x01\x92\x03\xb6\x02\xf8\x00\xcb\xfdp\xfe7\x00\xd4\xff2\xff"\x04\xdf\x025\xfd\xe6\xfd\xe9\x00J\x01\x8f\xfb{\xfd!\x02$\x03\xb5\x02\xc1\xfd_\xfb\n\xfe\xfb\xfe6\x00g\x02\x81\x02\x8f\xfe\x94\xfcx\xfe\n\x00K\xfe<\xfd\x19\x00+\x03w\x02\xd7\xfe\xeb\xfb\x8d\xfc\x83\xfe*\xff\xd8\x00V\x02\xdc\x02}\xfdO\xfa\xba\xfe\xbf\x01(\x00\x9b\x00\xbb\x01\xb0\x00\x03\x00\x91\xfe4\xfd\xd8\xff\xf7\x01R\x00 \x02\xd2\x02v\x00(\xfd\xa5\xfcf\x00\x19\x02\xad\x01\x94\x00\x00\x01\xa1\x01r\xfe\x17\xfe\xfe\xfe\xb2\xff\xb3\x01\x12\x01h\x02<\x03\x19\x00P\xfc\xc6\xfb\xad\x01\x8a\x05q\x03\xb2\xff\x86\x00\xb8\xff\xe1\xfd5\xff\xe2\x01\x16\x02g\xff\xef\xfe]\x016\x02\xdf\xfdZ\xfcE\xfe\x19\x00\x95\x01\xd7\x00e\xff\xc0\xfd\xc2\xfb\xbf\xfe\xed\x00&\x00\x03\x01\x8d\xfe/\xfd\xbd\xfd\x9f\x00\xf3\xff\xb7\xff\x03\x02/\xffq\xfc\xa8\xfd\x97\xfe\xbf\xfd\xe3\xfew\xff\xf2\xffM\xfc\x98\xfb\x85\xfd\x02\xfd3\xffr\xff\xcc\x01\xd1\x02T\x03\xd5\x01L\x02\xf5\x04\x85\x07{\n^\n\x0c\x0c\xc0\n3\n,\x0b%\x0b\xd8\x0bI\x0b.\n\x02\x0b\xfe\x07}\x05\xeb\x03\x9e\x00\\\x00\x9d\xff\x0c\xfe@\xfb\xe5\xf9\x83\xf6d\xf4T\xf4\xa7\xf4Z\xf4\xa0\xf3G\xf4\xba\xf2\xee\xf2w\xf4\xa9\xf6\xdf\xf8\x85\xf9<\xfb.\xfc-\xfe\xb8\xff!\x00S\x03\xe4\x04\x0c\x05\xc1\x051\x07\xf4\x06U\x05\xf9\x04p\x04"\x050\x04X\x02\x12\x01\xc8\xfd5\xfcw\xfb\t\xfa\xf5\xf8\xcf\xf7\xf3\xf6\xc0\xf4\x0f\xf5V\xf4\xbe\xf4C\xf4\n\xf5\x95\xf6\x89\xf64\xf8r\xf7\x1f\xf9\xf9\xfae\xfcs\xfe\xef\xfe\x96\x00\xb2\x00\x9b\x00B\x01\xdd\x02\xff\x04\xe6\x03a\x03\xc4\x02\xbe\x02\x1e\x03\xbd\x02\x1e\x03\xfe\x01z\x01\xa3\x01=\x00\xca\xff\xaa\xff\x11\x01\xac\x01\xd3\xff\x1a\xff\xef\xfd\xfa\xfeg\xfd\xfc\xff\xd4\xff\xbd\xfb\x9c\xfd\x9d\xfdh\x00\x11\x01\xc0\xff\x84\xffk\xfcO\xfd\xd6\x08B\x14m\x16~\x0f\t\t\xd1\x0e|\x182\x1f\x0f \xaf\x1f\x98!\x8a\x1f\xf3\x1e}\x1e\x92\x1a\x8c\x16\xf6\x11v\x16\xb1\x18\x1b\x11>\x06\xd1\xfb\x05\xfa\xba\xf9\x9d\xf8\xa9\xf7\xe5\xf1\x9b\xea\xdc\xe4\xbb\xe4\xaa\xe5\xd2\xe5B\xe4\x83\xe5\x90\xe7\xd6\xe8\x97\xea\xa8\xe9+\xeb\x80\xeeL\xf4<\xfaO\xfd\x84\xfe\xc5\xfb\x9e\xfb\xd6\xffw\x04\xa0\x07\x85\x07t\x06\xba\x04<\x03\x12\x03A\x02X\x01\xff\xff\x0f\x004\xff\x97\xfdE\xfa\xba\xf6\xe3\xf4|\xf5\xa4\xf7\xef\xf8\xe0\xf7>\xf52\xf3{\xf3{\xf6\xbc\xf8\xdf\xfa\t\xfcD\xfc\x96\xfd\xae\xfe\xb4\xff\x00\x01\xc3\x02\x98\x05k\x07\xa2\x08i\x08\x86\x07!\x07\xc0\x07}\t\xb1\n\xbf\nl\t\x18\x07\xa3\x05\x90\x05\xbf\x05[\x05\r\x04\x84\x04\xf1\x03\xa2\x01T\x00\t\xff\xa7\xfe\x0c\x01\xa0\x02\x92\x02A\x005\xfd+\xfe\xef\xfe\xac\x01g\x03\xea\x03\xf2\x00\x11\xffK\x02n\x01[\x02\xf7\x00\xa3\x01P\x03\xf0\x00\xda\x04\x90\x03~\xfe\xf5\xfb^\xfc\xf9\x02\xa9\x01\t\xff\x19\xfd\xea\xf9\xdd\xf8\xdb\xf7\x14\xfb\x13\xfc!\xf8\x08\xf5#\xf5\x14\xf70\xf6&\xf5\xbc\xf58\xf6\xef\xf5\x91\xf67\xf8\x99\xf8\x11\xf7D\xf7W\xfa\xd3\xfc]\xfc6\xfb\xdd\xfb\x1d\xfc\x9a\xfc\xb3\xfe\x9f\x00X\x00\xbc\xfe\xae\xfd*\xff@\xff\xd4\xfe\x9f\xfe\xca\xfd\xcd\xfd\xdf\xfd\xf5\xfc\x1a\xfc~\xfb\xf3\xfbh\xfc0\xfb \xfc\x95\xfb\x9d\xfey\x08\x19\x10s\r.\x04\xd9\x03"\x13\x8c\x1f\x96%M&\xe2!*\x1d\xf0\x19\xfe#\x99.\x04/\x07&\xa1\x1d\'\x1b\x07\x18\x8c\x15\xc5\x12\xb8\x0e]\x08\xf6\x02\xf5\xfe.\xf8\xfc\xef\xa6\xean\xe9\x85\xebo\xeb\x9d\xe8\xdf\xe1\x1d\xdd6\xdeL\xe4\xe7\xea\xb1\xee\x89\xef\x11\xedZ\xec\x81\xf0\xcf\xf8\xd0\xff\x1f\x02\xb3\x02\xee\x02\xb9\x04\xd2\x04J\x06\xf0\x08\xa0\t4\tD\x07*\x06r\x03\xcc\xfd\x91\xfb3\xfc\xdc\xfc\xdf\xf9^\xf5\xc5\xf1o\xee\x16\xec\xac\xec\x9a\xef*\xf0\x94\xedq\xeb\xd4\xec\xd8\xee\xd6\xf0 \xf4\x88\xf7\x17\xfa\x87\xfa\xd2\xfc\x00\x007\x02(\x04>\x07E\x0b\x9a\x0cF\x0c\xee\x0b\xc2\x0c\xae\r\x18\x0ed\x0f\xb0\x0f5\x0eO\x0bh\t%\t\x97\x08*\x08\xa2\x06\x07\x06\xf0\x03U\x01\x94\xff<\xff\xa3\xffW\xff\xff\xfd\xc1\xfd\xf2\xfc\xa8\xfb\x84\xfb\x93\xfbU\xfd\x93\xfd\x9c\xfd\x12\xfd(\xfd\xc8\xfc\xce\xfdw\xff\xd9\xff\xd4\x00\xa6\xff\x83\x00\xfd\x00\x91\x01\x06\x02|\x03\x16\x04;\x03\xcc\x02\xc4\x02\xee\x03\xae\x03B\x04\x01\x04\xba\x02\x9c\x01O\x00\x05\x01\x02\x01[\x00b\x00\x08\xfe\xee\xfc\xf5\xfa\xf9\xfcm\xfe\xf9\xfe\x17\xffo\xfcM\xfc\xb9\xfa\xe2\xfd\xab\xff\xc2\x00\xde\x00\x84\xfeF\xfe\xd4\xfc>\xfd\t\xfeR\xffq\xff\xc1\xfd\xd0\xfa!\xf9\'\xf9\xca\xf9{\xfaa\xf9\x01\xf9\x03\xf8\\\xf6C\xf6y\xf6\xeb\xf7\xf4\xf7\x07\xf9`\xfa\xd3\xf9\xcf\xf8\xbc\xf8Q\xfd\x0c\xffP\x01\x08\x03\xd4\x02\x80\x02\x03\x01\x01\x07\xe2\nN\x0b\x99\x0c\xf0\r\x06\x0e\xc9\x07\x8b\x06E\x0c\x91\x12\x86\x14\xfe\x10\x92\x0c\xa9\x04\xc6\x01V\x08V\x11\xbe\x12u\t\xee\x00\xe7\xfej\x01R\x06i\tC\x08\xee\x03\xdd\xfe\xa0\xffY\x02\xc2\x03)\x04\x03\x03:\x05\xf1\x05\xa5\x05V\x04\xd4\x01Y\x02\xf7\x04]\x07T\tt\x05\x15\x00\xd9\xfb\xbc\xfa\xf0\xffX\x00d\xfd\xf6\xf6\xbc\xf1s\xf1\xf1\xef\x9a\xf1d\xf2\xad\xef\x1e\xec\x96\xe9\xf9\xeb\n\xefc\xee\x06\xef\xae\xf1\t\xf3\xab\xf2\xb0\xf3\x86\xf7\x03\xfbT\xfb\xe4\xfby\xff\x87\x00\t\x00\x1e\x00\xb0\x02\xc4\x04\x08\x03\x84\x02n\x03\x8c\x03\x82\x01\xc4\x00\xd2\x01B\x02j\x00\x95\xfe\xc1\x00\xf8\xfeG\xfd\xac\xfe,\x02-\x03\xee\xfe\xa1\xfdS\x020\x05\x13\x05\x0b\x05\xf2\x05\x9f\x06\x1d\x04c\x06\r\t]\x08\xa2\x05\'\x03\xe3\x04\xcc\x04\xbb\x02\xe0\xff<\xff\xbc\xfeI\xfd`\xfc\t\xfd\xe9\xfb\xb7\xf9J\xf9\xe0\xfb\x85\xfce\xfb!\xfe\x1e\x01\xfc\x01\xb0\xfe\xe5\xff\xe1\x05O\x08\x9b\x07\xbf\x06\xef\x07\xef\x06\xed\x05)\tZ\x0b\xda\x06\xad\xff\xc3\xfe_\x03\x1f\x04y\x00_\xfb\xd1\xf7\xb3\xf5\xc0\xf5\x03\xf9E\xf9!\xf5\xd6\xf0_\xf2\xdc\xf5U\xf6+\xf6\x03\xf7\x19\xf9\xbd\xfac\xfe\xde\x01\xae\x00l\xff\x18\x02\xff\t/\x0ey\x0e\xb2\x0e\xf5\x0b9\t\xdc\t\xba\x10R\x16\x14\x12\x98\x0b\xf1\x06\x8d\x04\xd0\x04-\x06O\t\xc8\x05\xfb\xfc\xe4\xf6\x01\xf8\xcc\xfc\xe6\xfd\x14\xfc\xe9\xf9K\xf8\x1f\xf7\x10\xfa?\xff\x0b\x02(\xff7\xfd\x94\xff\xc4\x02W\x04\xfd\x04\xd6\x05u\x03\xdc\x00d\x02\x04\x06&\x06\xce\x02\xdd\x00E\x00\xd9\xff\x84\x00\x1b\x01,\x00T\xfd\x87\xfc\x93\xfe^\xfee\xfd\xba\xfcS\xfc.\xfc\x9c\xfbH\xfe$\xff\x98\xfcL\xfa\xfc\xfaR\xfd\x87\xfd\xa2\xfd\xe8\xfd8\xfdV\xfa`\xfa`\xfdK\xfe\xd5\xfc\x9c\xfa\x11\xfb&\xfb\xce\xfa\x01\xfb\xdd\xfb=\xfc\xb5\xfa\xf2\xfaH\xfc\x1a\xfd\xad\xfc\xe8\xfc\xd8\xfe\x9a\xff\x7f\x00\x8d\x00\xbc\x01\x9d\x02\x84\x02\xcd\x03:\x05\x13\x06\xbf\x05\x14\x05v\x05\xba\x05\xc4\x05\r\x06\xee\x05H\x04\x7f\x02o\x012\x02%\x026\x00f\xfe\xeb\xfc\x06\xfd~\xfcU\xfc\x93\xfc\xc4\xfbI\xfb\x87\xfb\x10\xfee\xfeJ\xfe\xcd\xff\x9a\x02\x99\x03\xb5\x02\x7f\x03)\x06\xfb\x07\xe4\x07\xd2\x08\x92\x08\x91\x07\xc2\x05F\x06\x94\x07h\x06\x85\x03P\x01\xdb\xffK\xfe\x8b\xfd\x0c\xfcC\xfbI\xf9;\xf7\x9d\xf6K\xf6f\xf6\x83\xf5\x99\xf5\xf6\xf5\xf4\xf57\xf6\x8e\xf6\xb2\xf7{\xf8\x10\xf9\x01\xfa\xe0\xfa\xc1\xfb0\xfcx\xfdy\xfe\x81\xff \x00\xa8\x00O\x01N\x01\x03\x02\xda\x02E\x035\x03\xe5\x02\xd5\x02\xd3\x02+\x03\xf4\x02\xe4\x02\x84\x02R\x01\x82\x01\x98\x01\x13\x020\x01e\x00\xf6\xff\x08\x00\x00\x00\x04\x01\x14\x01"\x00\xcd\xff`\xffK\x01\x8b\x01\x9f\x02\x12\x02\x83\x01\x02\x01\x00\x014\x02\xa4\x02\x9e\x02y\x01\x85\x00\xa5\xff\xb1\xff\xf6\xfd\x94\xfd\x05\xfd\xe7\xfc\x92\xfc,\xfc\xcb\xfd\x03\xfdo\xfd]\xff\xf8\x02\x17\x05\x88\x06N\tx\x0c\x14\r2\x0e\xc3\x12&\x17\xdc\x17\xa2\x15\xdf\x15w\x15\x08\x148\x13\x7f\x14e\x13F\x0c\xee\x05\x87\x02_\x01i\xfeD\xfc\x04\xfa\xbc\xf4L\xed\xf6\xe9\xd4\xeb\x89\xed\xfa\xecf\xeb\xaf\xea_\xe9V\xe9\x02\xedd\xf2\xdf\xf5:\xf6\xd6\xf6m\xf89\xfb\x82\xfe[\x02\xdf\x04\xca\x04\xd0\x03\xfe\x03\x7f\x052\x06R\x06\xa1\x05\x9c\x03\x90\x01\x08\x00\xea\xff&\xff`\xfd\x97\xfb?\xfa\x1c\xf9\x88\xf8\xd2\xf8\xd3\xf8\xe5\xf7\xe9\xf6s\xf7\x03\xf9\'\xfa\x1d\xfb\xee\xfb%\xfc\x8b\xfc\xda\xfd\x1a\x00\xef\x01\xa3\x02\xc3\x02\xe3\x02\xff\x02\xea\x03o\x05/\x06\r\x06.\x05\x80\x04/\x04O\x04\xc2\x04\xba\x04\xc8\x03\xc0\x02%\x02\xba\x01\x85\x01\x96\x01\x8d\x01(\x01\xaa\x00\xa0\x00\xfb\x00\xb6\x00\xd1\x007\x01\xd8\x01*\x02"\x02\x1a\x02\x04\x02\xf6\x01#\x02b\x02\x84\x02\xe7\x01\x0e\x01+\x00\xe6\xff\xb0\xff1\xff\xfe\xfe=\xfe\x81\xfdI\xfdQ\xfd4\xfd\xc2\xfds\xfe\xab\xfe\x18\xff\xc2\xff3\x01\xd0\x01e\x02\x86\x03H\x04\x04\x05\x00\x05<\x05\xae\x05;\x05\xc8\x04]\x04h\x03K\x025\x01\x9e\x00\xc6\xffn\xfe\r\xfd\xc3\xfb\r\xfb\x98\xfa9\xfa\'\xfaZ\xf9\xba\xf8\x9b\xf8\x18\xf9\xf6\xf9S\xfaV\xfa\x88\xfa\xe4\xfa\xc6\xfb\xcf\xfc\x87\xfd=\xfe\x97\xfe\x04\xff\xe2\xffZ\x00/\x01\x89\x01\xd8\x01\xf8\x01\xd0\x01\x1e\x02=\x02\xeb\x01\xb1\x01f\x01\xfd\x00j\x00\xee\xff\xa8\xff\xa8\xff-\xffu\xfeK\xfe\xe0\xfd\xb7\xfd\x05\xfe{\xfe\x7f\xfe"\xfe\x0e\xfe\xad\xfe7\xffF\xff9\x00\xb6\x00\xa5\x00\x7f\x00\xf9\x00\xe3\x01\x0e\x02<\x02\xd1\x02\x13\x03\xc2\x02\xac\x02\n\x03\xef\x02\xd0\x02\xae\x02\x1c\x02A\x01\x9f\x00\xb0\x00\xf7\xff"\xff\xdb\xfe\xa1\xfe\x91\xfd\t\xfd(\xfe\xaa\xff\x83\x00\xcc\x01\xaa\x03~\x04\x86\x04\xc8\x05.\nS\r\x96\x0e\xbf\x0e\xd7\x0ev\x0e\x0f\x0e\xb1\x0fU\x11\'\x10n\x0c\x00\t\xba\x060\x05\xd7\x03\xa2\x02\xa1\xff6\xfb\x98\xf7\xb9\xf5K\xf5\xc1\xf4\xdd\xf3\xb7\xf22\xf1H\xf0\xa2\xf0\xec\xf1\xbf\xf3\x91\xf4\xee\xf4\x81\xf5\x9f\xf6~\xf8\x1d\xfa\x85\xfb\x8a\xfc\xe2\xfc\x9a\xfd\x9e\xfe\xb3\xff\x9b\x00\x8b\x007\x00\x04\x00\x1a\x00\xac\x00|\x00\n\x00i\xff\xbc\xfe^\xfe7\xfeE\xfe\xc5\xfd\xe2\xfcB\xfc*\xfcD\xfcV\xfcl\xfcz\xfcW\xfcx\xfc\x01\xfd\xd5\xfd\x81\xfe\xe3\xfeS\xff\x06\x00\xa8\x00)\x01\xbc\x01g\x02\xc5\x02\xdb\x02#\x03}\x03\xa0\x03\xa6\x03\xd1\x03\xf8\x03\xfd\x03\xd1\x03\xc0\x03\xb5\x03\xbc\x03\xe2\x03\xf8\x03\xf0\x03\xbd\x03\x8e\x03l\x03\x7f\x03\x8e\x03v\x03-\x03\xce\x02p\x02A\x02\x0f\x02\xcc\x01k\x01\xf4\x00\x8e\x00\x1d\x00\xd8\xff\x9d\xff;\xff\xe8\xfe\xb8\xfe\x86\xfeV\xfe=\xfe*\xfe\x1c\xfe\x1a\xfe3\xfe\\\xfeo\xfex\xfee\xfe]\xfe\x93\xfe\xc6\xfe\x06\xff(\xffM\xff~\xff\x94\xff\xc4\xffG\x00\xea\x001\x01\'\x01n\x01\xdc\x01v\x02\xef\x02\x89\x03\xa0\x03\x03\x03\x81\x02\x94\x02\xef\x02\xa1\x02\xe9\x01R\x01\xba\x00\xcd\xff\x1e\xff\n\xff\xc7\xfe\xfc\xfd1\xfd\x0c\xfd$\xfd\xd2\xfc\xae\xfc\r\xfdA\xfd\x02\xfd\n\xfd\x87\xfd\xdf\xfd\xe1\xfd\xda\xfd=\xfe\x85\xfe\\\xfel\xfe\xbf\xfe\xd2\xfe\xa6\xfe\xb2\xfe\x14\xffM\xffK\xff9\xff\xa4\xff\xc4\xffo\xff\x81\xff\xca\xff\xe1\xff\x8d\xffa\xfft\xff:\xff\xdf\xfe\xc1\xfe\xd4\xfe\xdb\xfe\xa5\xfe\x81\xfe\x8e\xfe\xd6\xfe\x1d\xffZ\xff\xad\xff\xed\xff\x1f\x00R\x00\x8f\x00\xd6\x00\xf8\x00\x12\x01\x1d\x01\xfd\x00\t\x01\xe1\x00\xc4\x00\xb1\x00\x86\x00\x89\x00l\x00W\x00\\\x00d\x00\x7f\x00k\x00k\x00}\x00F\x005\x006\x002\x00\xfd\xff\xa1\xff\x8b\xffp\xff\xd8\xfez\xfe\xc2\xfen\xff:\x003\x01t\x02T\x03!\x04^\x05c\x07\x8e\tJ\x0b\x91\x0cH\r\x96\r\xe2\r\x1f\x0eV\x0e\x06\x0e\xb0\x0c\xa8\nh\x08\x8a\x06\xc1\x04\x95\x021\x00\xa8\xfd\x1f\xfb\x9b\xf8\xd0\xf6\xfb\xf55\xf5\xfa\xf3\xec\xf2z\xf2\xc4\xf23\xf3\xdf\xf3\x05\xf5\xf6\xf5\x8c\xf6W\xf7\xc5\xf8d\xfaS\xfb\xfc\xfb\x0b\xfd\t\xfe\x97\xfe\x0e\xff\xbc\xffI\x00)\x00\xf7\xff]\x00\xbc\x00x\x00\xfe\xff\xdc\xff\xdc\xffu\xff\x15\xff\x11\xff\xef\xfe\x1f\xfeb\xfdp\xfd\x9e\xfd^\xfd\x0c\xfd\x10\xfd8\xfd\x1a\xfd6\xfd\xe3\xfd\x81\xfe\xa5\xfe\xbf\xfeS\xff\xf7\xffY\x00\xdd\x00k\x01\xb3\x01\xc9\x01\n\x02k\x02\xbe\x02\xcf\x02\xd7\x02\xf1\x02\xeb\x02\xde\x02\xe0\x02\xf8\x02\x08\x03\xf5\x02\xe7\x02\xf4\x02\xe1\x02\xca\x02\xe0\x02\xfa\x02\xed\x02\xcb\x02\xcc\x02\xaf\x02g\x02%\x02\xf2\x01\x9f\x010\x01\xc2\x00Y\x00\xda\xffM\xff\xd3\xfey\xfe\x13\xfe\xac\xfdg\xfd;\xfd\x1c\xfd\x06\xfd"\xfdu\xfd\xb9\xfd\x04\xfek\xfe\xeb\xfeg\xff\xe0\xffe\x00\xe7\x00W\x01\xad\x01\xfe\x01J\x02l\x02i\x02\\\x02Q\x02+\x02\xeb\x01\x96\x013\x01\xc7\x00o\x00\x1e\x00\xcb\xffn\xff@\xff+\xff\xe6\xfe\xd5\xfe(\xff\x98\xff\xa6\xff\xa3\xff\t\x00r\x00\xbb\x00D\x01o\x02)\x03\xd4\x02\x99\x02\n\x03P\x03\xf9\x02\xc9\x02\xf7\x02z\x028\x01e\x00m\x00\xf9\xff\xd7\xfe\x0f\xfe\xd1\xfdC\xfd]\xfc\x12\xfct\xfcC\xfc\xa0\xfb\x97\xfb\x1c\xfcU\xfcD\xfc\x8f\xfc8\xfdt\xfdU\xfd\xbd\xfdq\xfe\x7f\xfeA\xfe\x82\xfe\x02\xff-\xff\'\xffb\xff\xd3\xff\xe3\xff\xd4\xff=\x00\xd5\x00\x16\x01\x16\x01<\x01\x85\x01\xa3\x01\xa9\x01\xc5\x01\xc4\x01\x8e\x015\x01\xfe\x00\xdc\x00\xb1\x00u\x00*\x00\xed\xff\xb7\xff\xa6\xff\xac\xff\xb7\xff\xcd\xff\xd5\xff\x00\x00<\x00v\x00\xaf\x00\xd0\x00\xe8\x00\xee\x00\xfe\x00\x06\x01\xd8\x00\x83\x005\x00\xe3\xff\x84\xff\x13\xff\xaa\xfeE\xfe\xb6\xfd%\xfd\xc6\xfc\x93\xfcS\xfc\xfb\xfb\xcf\xfb\xc6\xfb\xb8\xfb\xca\xfbI\xfcO\xfdj\xfew\xff\xb8\x003\x02\xbb\x03E\x05=\x07q\t<\x0bc\x0cF\r\x1b\x0e\xc4\x0e\xed\x0e\x08\x0f\xd8\x0e\xe4\r.\x0c?\n\xaa\x08\xf1\x06\xcf\x04\x7f\x02/\x00\xd7\xfdi\xfb\x86\xf9]\xf8U\xf7\x13\xf6\xff\xf4|\xf4w\xf4\x97\xf4\n\xf5\xd4\xf5\x8f\xf6\x18\xf7\xd7\xf7\xfb\xf8A\xfa.\xfb\xf8\xfb\xef\xfc\xb6\xfdD\xfe\xcb\xfeW\xff\xb7\xff\xaa\xff\xa3\xff\xdc\xff\xfa\xff\xc3\xff\x82\xffl\xffW\xff\x08\xff\xc9\xfe\xc9\xfe\xa9\xfe4\xfe\xda\xfd\xcf\xfd\xc0\xfd\x9a\xfd\x87\xfd\x91\xfd\xa4\xfd\x9e\xfd\xba\xfd\x18\xfev\xfe\xbf\xfe\x07\xff[\xff\xb3\xff\xf2\xffJ\x00\xa0\x00\xd1\x00\xfd\x00+\x01M\x01g\x01v\x01\x90\x01\xa3\x01\xa1\x01\xa5\x01\xbb\x01\xd0\x01\xe4\x01\xfe\x01(\x02R\x02e\x02\x86\x02\xc7\x02\xe9\x02\xfb\x02\x19\x035\x03&\x03\xf5\x02\xcf\x02\xb9\x02m\x02\x03\x02\xac\x01P\x01\xd0\x00C\x00\xce\xff]\xff\xd6\xfeU\xfe\xff\xfd\xba\xfdw\xfd;\xfd*\xfd9\xfd?\xfd]\xfd\x9e\xfd\xed\xfd3\xfe|\xfe\xe1\xfeM\xff\x98\xff\xea\xffW\x00\xc4\x00\xff\x006\x01\x85\x01\xca\x01\xe4\x01\xf8\x01\x17\x02 \x02\xfe\x01\xf0\x01\xff\x01\xf3\x01\xb6\x01|\x01a\x01A\x01\t\x01\xd5\x00\xb4\x00\x87\x00/\x00\xef\xff\xd8\xff\xc2\xff\x94\xfff\xff^\xffW\xffB\xff1\xffH\xffk\xffi\xffm\xff\x94\xff\xc7\xff\xe3\xff\x05\x002\x00R\x00Y\x00d\x00\x7f\x00\x8c\x00|\x00t\x00m\x00X\x00A\x008\x00\'\x00\xff\xff\xcc\xff\xc5\xff\xb8\xff\x8f\xff\x80\xff\x8f\xff\x82\xffS\xffO\xffm\xffp\xff^\xffh\xff\x95\xff\x96\xff\x89\xff\xa5\xff\xdd\xff\xf1\xff\xe8\xff\xfd\xff5\x00Q\x00Y\x00q\x00\xa7\x00\xc8\x00\xc6\x00\xe8\x00%\x01M\x01V\x01S\x01x\x01\x91\x01\xa1\x01\xc0\x01\xfc\x01\xf5\x01\xb2\x01\x92\x01\x8f\x01h\x01#\x01\xe4\x00\xb2\x00.\x00\xa1\xffD\xff\r\xff\xa7\xfe)\xfe\xd2\xfd\x93\xfd5\xfd\xd7\xfc\xbb\xfc\xca\xfc\xad\xfc|\xfc\x85\xfc\xbb\xfc\xcb\xfc\xc5\xfc\xfe\xfco\xfd\xb5\xfd\xe1\xfdU\xfe\n\xfff\xff\x81\xff\xe1\xffe\x00\xa8\x00\xb8\x00\x03\x01j\x01b\x01\x1f\x01\xfc\x00\xf9\x00\xce\x00\x94\x00u\x00X\x00\t\x00\xa6\xffk\xffd\xff[\xffC\xff\x1f\xff\xf4\xfe\xd4\xfe\xd4\xfe\xe7\xfe\t\xff$\xff.\xff#\xff4\xffd\xff\xa0\xff\xdb\xff\xf4\xff\r\x00,\x00B\x00}\x00\xbd\x00\xfa\x00\x1c\x011\x01M\x01~\x01\xad\x01\xc6\x01\xdb\x01\xf3\x01\xf7\x01\xdb\x01\xdb\x01\xf9\x01\xef\x01\xb5\x01u\x01Q\x01\x07\x01\xaa\x00\x80\x00f\x00\xf9\xffk\xff.\xff"\xff\xf8\xfe\xd2\xfe\x0f\xff]\xffJ\xffN\xff\xce\xfft\x00\xe7\x00y\x01A\x02\xe6\x020\x03\xb3\x03\x8a\x04\x1b\x05J\x05\x8e\x05\xee\x05\n\x06\xe3\x05\xc7\x05\x96\x05\x0b\x054\x04y\x03\xd0\x02\xf8\x01\xdb\x00\xbd\xff\x9c\xfek\xfd`\xfc\x93\xfb\x00\xfbb\xfa\xa9\xf9%\xf9\xdd\xf8\xda\xf8\x10\xf9r\xf9\xd7\xf9.\xfa\xa0\xfa6\xfb\xf7\xfb\xc7\xfc\x8b\xfd=\xfe\xb1\xfe\x1a\xff\x97\xff\x15\x00}\x00\xc9\x00\xfb\x00\x03\x01\xe9\x00\xe7\x00\xf2\x00\xe9\x00\xbd\x00\x83\x00N\x00\x0e\x00\xde\xff\xc7\xff\xb2\xff\x8e\xffW\xffD\xff;\xff7\xff@\xffR\xffR\xffE\xffM\xffY\xffS\xffJ\xffB\xff@\xff7\xff/\xffA\xffd\xff]\xffL\xffT\xffx\xff\x94\xff\xa7\xff\xcd\xff\xf6\xff\xff\xff\xfb\xff#\x00p\x00\x98\x00\x9b\x00\xaa\x00\xc4\x00\xbf\x00\xc2\x00\xdd\x00\x06\x01\x0c\x01\xfc\x00\xfd\x00\x18\x01)\x016\x01M\x01V\x01Z\x01c\x01x\x01\x83\x01\x83\x01\x84\x01n\x01N\x01<\x01&\x01\x02\x01\xd5\x00\xa1\x00i\x00,\x00\xf9\xff\xd9\xff\xbe\xff\x9a\xffx\xff_\xffX\xffW\xff_\xff{\xff\x96\xff\xaa\xff\xbe\xff\xde\xff\x08\x000\x00P\x00q\x00\x88\x00\x94\x00\x9d\x00\xa0\x00\xad\x00\xa3\x00~\x00Y\x009\x00\x1c\x00\x00\x00\xde\xff\xb9\xff\x93\xffy\xffk\xff[\xffT\xffL\xff:\xff3\xff+\xff4\xff@\xffH\xffR\xffQ\xffU\xfff\xff{\xff\x95\xff\xab\xff\xbd\xff\xcf\xff\xde\xff\xf4\xff\x19\x004\x00A\x00R\x00n\x00\x89\x00\x9d\x00\xb2\x00\xce\x00\xde\x00\xe2\x00\xef\x00\x04\x01\x0f\x01\x19\x01\x15\x01\x12\x01\n\x01\xfe\x00\xed\x00\xdd\x00\xc3\x00\x9d\x00v\x00Q\x000\x00\xfe\xff\xca\xff\x98\xffl\xffM\xff#\xff\x01\xff\xdc\xfe\xbe\xfe\xa3\xfe\x9b\xfe\x9d\xfe\x9d\xfe\xa6\xfe\xb3\xfe\xc8\xfe\xe3\xfe\x05\xff&\xffC\xffj\xff\x8e\xff\xa7\xff\xc0\xff\xd4\xff\xed\xff\x00\x00\x01\x00\x1b\x00\x1e\x00,\x004\x00)\x00G\x00i\x00\x8b\x00\xa1\x00\x8c\x00\x9c\x00\x92\x00\xd3\x00P\x01\xe8\x01A\x027\x02[\x02j\x02f\x026\x02/\x021\x02\xb7\x01\'\x01\xac\x00G\x00\xd6\xff=\xff\xb6\xfe0\xfe\x99\xfd\x0e\xfd\x8a\xfcF\xfc\x13\xfc\xe3\xfb\xb2\xfb\x9a\xfb\xc4\xfb\xfe\xfb4\xfco\xfc\xd5\xfc9\xfdz\xfd\xdf\xfdF\xfe\xa7\xfe\xee\xfe\x0c\xffU\xff\x9b\xff\xc7\xff\xeb\xff\xfa\xff\x06\x00\xe5\xff\xbe\xff\xb8\xff\x9e\xff}\xff8\xff\xf4\xfe\xdc\xfe\xb6\xfe\x95\xfe\x88\xfe\x95\xfe\x9c\xfe\x91\xfe\xa9\xfe\xde\xfe\x1c\xffD\xff\x8d\xff\xfa\xff>\x00u\x00\xc1\x009\x01\x91\x01\xc9\x01\x1d\x02{\x02\x90\x02\x90\x02\x9e\x02\xb9\x02\xa6\x02|\x02\x8a\x02\x99\x02\x8f\x02j\x02p\x02\x8b\x02\xa8\x02\xde\x02t\x03g\x04G\x05\xfb\x05\xb4\x06\xaa\x07^\x08\xd0\x08o\t\x0c\n9\n\x0f\n\xd3\t\xa5\t\x0c\t:\x08H\x07.\x06\xd4\x047\x03\xb4\x010\x00\xa8\xfe/\xfd\xc3\xfb\x83\xfa\x8c\xf9\xb4\xf8\x18\xf8\xa0\xf7_\xf7T\xf7L\xf7y\xf7\xce\xf7;\xf8\xbc\xf8O\xf9\x0e\xfa\xca\xfa\x98\xfbg\xfc\xfa\xfc\x84\xfd\xf1\xfdF\xfe\x89\xfe\xa9\xfe\xc3\xfe\xd1\xfe\xbf\xfe\xa3\xfey\xfeH\xfe\x18\xfe\xcb\xfd\x83\xfdA\xfd\x0f\xfd\xd8\xfc\xb7\xfc\xc3\xfc\xd3\xfc\xf8\xfc0\xfdv\xfd\xc6\xfd-\xfe\x9e\xfe\x06\xffk\xff\xce\xff(\x00\x93\x00\xfb\x00J\x01\x9d\x01\xef\x01\x1d\x026\x02E\x02M\x02B\x020\x02\x17\x02\xf9\x01\xda\x01\xc9\x01\xac\x01\x96\x01{\x01[\x01S\x01M\x01F\x01:\x01;\x01B\x01G\x01T\x01[\x01`\x01X\x01P\x01N\x01:\x01\x14\x01\xeb\x00\xbf\x00\x8b\x00N\x00\x0e\x00\xd3\xff\x98\xffQ\xff\x0e\xff\xd2\xfe\xa5\xfer\xfeO\xfe6\xfe&\xfe\x1d\xfe"\xfe5\xfeM\xfel\xfe\x95\xfe\xc1\xfe\xf3\xfe-\xffe\xff\x98\xff\xd3\xff\t\x00I\x00z\x00\xa2\x00\xbb\x00\xcf\x00\xe8\x00\xf1\x00\xf9\x00\x03\x01\xfa\x00\xf4\x00\xe8\x00\xe2\x00\xd6\x00\xbf\x00\xa5\x00\x8b\x00t\x00a\x00J\x00;\x00(\x00\x19\x00\x16\x00\x17\x00 \x00%\x00)\x00<\x00F\x00Y\x00m\x00t\x00\x87\x00\xa0\x00\xa5\x00\xbb\x00\xb0\x00\xb6\x00\xb8\x00\xb0\x00\xb2\x00\x97\x00\x93\x00\x95\x00\x87\x00\x84\x00f\x00?\x00\x1c\x00\xd9\xff\xab\xff\x88\xffg\xffw\xff6\xff\xfd\xfe\x03\xff\xfc\xfe\xef\xfe$\xffk\xff\xa1\xff\x9c\xff\xb8\xff6\x00\xc2\x00\xe8\x01K\x04\xa4\x05\xf7\x05\xd9\x05`\x05\xae\x04\x83\x02v\x01\xe1\x00o\xffH\xfeU\xfd\xe9\xfcH\xfc\xf0\xfa\xcf\xf9d\xf8\xc0\xf6\x15\xf6\x87\xf5\xb6\xf5y\xf6\xf5\xf7\x8f\xf9)\xfa\xb7\xfb\xad\xfd\xa4\xfe\x7f\xfe\xf5\xfe-\x00y\x00\x19\x01\x10\x02\x07\x03q\x03\x15\x03\xab\x03\x14\x04\xa8\x03I\x03\xe3\x01\x11\x01\xa7\x00\xef\xff\xb1\xff\x82\xff\xbe\xffA\xff\xfc\xfe_\xff}\xff\xfa\xfe\x8d\xfe\x88\xfe\xa2\xfe\xf7\xfe\x07\x00\x00\x01\x9f\x01#\x02\xa4\x02\xe0\x03u\x04n\x04m\x04\xd2\x04C\x05\x19\x05}\x05\xcf\x06\xb6\x06\x08\x06\xb8\x05\xa3\x05\xfe\x05\x98\x05\xbe\x05\xd2\x05\xc8\x05\x9a\x05T\x05u\x05#\x05R\x04n\x03\xd6\x02\x96\x020\x02\xb6\x01;\x01\xc2\x00/\x00A\xffi\xfe\xac\xfd\r\xfdJ\xfc\x9b\xfbH\xfbo\xfb\x9e\xfb\x8f\xfbt\xfbZ\xfb3\xfb\x0e\xfb\xe2\xfa\xbf\xfa\xda\xfa\x02\xfbf\xfb\xe8\xfb\xc1\xfcj\xfd\xa4\xfd\xb3\xfd\xb5\xfd\xcc\xfd\xaa\xfd\x9a\xfd\xbd\xfd\x08\xfeP\xfe\x95\xfe\xdf\xfe \xff\x04\xff\x9b\xfe\'\xfe\xcb\xfd\x90\xfdD\xfdT\xfd\xb5\xfd\x19\xfeg\xfe\xc1\xfe8\xffb\xffh\xff\x83\xff\xac\xff\xec\xff0\x00\xb6\x000\x01\x84\x01\xdc\x01 \x02^\x02@\x02\xf9\x01\xb2\x01x\x01J\x01A\x01A\x01\\\x01Q\x016\x01:\x01\xfd\x00\xd3\x00\x8f\x00L\x00\x15\x00\xe8\xff\x00\x00,\x00K\x00g\x00`\x00S\x00M\x00>\x00\x0c\x00\xe5\xff\xcf\xff\xbc\xff\xc4\xff\xdf\xff\x12\x00/\x00,\x00;\x00B\x006\x00=\x00+\x00\x1f\x00\x04\x00\x11\x00E\x00M\x00|\x00\xa0\x00\x8a\x00\x99\x00t\x00U\x00,\x00\xf7\xff\xe9\xff\xb5\xff\xb9\xff\xb0\xff\xb3\xff\xc0\xff\xae\xff\x8e\xffX\xff*\xff\x08\xff\xed\xfe\xb6\xfe\x9d\xfe\xa8\xfe\xae\xfe\x06\xffT\xffw\xff\xae\xff\x9a\xff\xc2\xff\xfc\xff$\x00Y\x00g\x00\x91\x00\xbe\x00 \x01\x8a\x01\x9f\x01\xab\x01G\x01\xd2\x00\xa8\x00S\x00A\x00\x03\x00\xa5\xff\xaa\xff|\xffe\xffy\xffa\xffA\xff\x19\xff\x01\xff!\xfft\xffm\xff\xbc\xffT\x00\xca\x007\x04\x8a\x06p\x07\xfd\x07\xa2\x06\xf5\x05\xbd\x03\xe5\x02\xc5\x02\x03\x02b\x02\xe3\x01\xb7\x01\x8a\x01Q\x00\xdb\xfd\xeb\xfa\r\xf9\x85\xf7\xf9\xf6\xbd\xf7\xa6\xf8\x1e\xfa\x14\xfb\xda\xfb4\xfc\xf0\xfc\xa4\xfci\xfaV\xfb\x1a\xfc4\xfc\xbd\xfe\x00\x00\x19\x01\xfe\x01H\x02\x8c\x02\xa3\x01\xe3\x00\x95\x00\xa3\xff\x8d\xff\xb0\x00,\x01\xb7\x01d\x02\xd1\x02"\x02\xaa\x00\x94\xff\xed\xfeX\xfd\x1a\xfde\xfd\xc5\xfd\xf6\xfd\x15\xff\xc2\xffg\xff\xeb\xff\x8d\xff\x18\xff\x84\xfeg\xff\xe7\xfe\x82\x00Y\x03\xa1\x01\x8e\x03I\x05\xb3\x03\xe8\x02%\x03~\x03\r\x01\x8a\x00\xd0\x03C\x01\x94\xff&\x04l\x02\x90\xff\xa4\x01\xbd\x01\xe4\xfeq\xff\x81\x01\x89\xff\xd3\xff\xc0\x01\xaf\x02\xc2\x01@\x02\xf5\x02\x18\x01\xb4\x00\x84\x02\xf4\x02\x06\x003\x01\xf9\x02\x1e\x005\x00e\x02\xbf\x00\xe2\xfe\xc1\xff\xfe\xff\x8b\xfe9\xff\x9e\x00\x9a\xfe\xb7\xff\x18\x01\x92\xfe\xd8\xff\xce\x01\xa3\xff\xc9\xfd\x83\x00\xd3\x00\x05\xff\xfd\x00\x04\x02*\x00g\x00\xe1\x01\xe9\xff\x1f\xff\x05\x01\xad\xff8\xfdY\x01\x82\x01\x0c\xfd\x9f\xff\xd5\x01\x98\xfe^\xfc\xcf\xff\x83\xfe\xdd\xfaV\xffI\x00\x95\xfb\xfe\xfc]\x00k\xfea\xfd\xbc\xff9\xfe\xb0\xfcZ\xff\xb9\xff\xde\xfd\x92\xff\xa0\x00\x13\xff\xf9\xfe\x1e\x00,\x00\xb0\xfeZ\xffM\xff+\xfe\x17\xff\xcd\xfe\xb0\xff\x11\x00\x03\xffm\xffb\xff\xf1\xfey\xfe\xb8\xfe2\x00\xe2\x00G\xff\x89\x00\xa9\x01\xb8\xff1\x00\xcc\x00j\x00\x9a\x00q\x01\x8d\x00\xfd\x00\x85\x02\x7f\x01\xdd\xff\xab\x00\x00\x00\xbb\xfe\xe0\x00l\x00m\xffD\x00\xe6\x00a\xff4\xfd\xe4\xfeu\xff\x0f\xfc\xc7\xfd\xd9\x02\x96\xfe:\xfdM\x03\xf1\x00\x8b\xfb>\x007\x02)\xfdI\xff\x1c\x05N\x01y\xfeU\x03\xbd\x03\x86\x01\x08\xff\xe8\x02\xff\xfc2\xff\xde\x00\xe7\xff\xe6\xfd\xfe\xfe\x91\x02C\xfc\r\xfeV\x00\xca\xfd?\xfa\x00\xffn\x00\xde\xfb)\xff\xdc\x01\x0b\xfe\xb1\x01e\x01\xb5\xfd\xe0\x00\xaf\x02\xa1\xfd\x9d\xfd\x02\x04\xfb\xffq\xfc\t\x01\xab\x060\xfd(\xff\xaf\x06\x15\xffT\xfc\xbb\x04,\x02\xe9\xfa\n\x03\xb8\x03F\xfd\x9c\xff\xe4\x05\xc4\x00\x08\xff(\x01\x91\x00u\xfft\xfbz\x033\x03\xf7\xfb\xbe\x02\n\x04\x96\x01\xf2\xfe\xe1\x00\x7f\x03\xc8\xfd\x15\xfe(\x01\x96\x020\x01\x19\x02\xe4\x00_\x01\x1e\x01\xd9\xfcS\xfe\xbc\x03\xd6\xfd\xfb\xfc\x9b\x012\x00<\x00}\xfc\xe0\x02\xea\xff\xf2\xfbE\xfe0\x01\xb8\x00C\xfcy\x01\xd0\x00~\xfe\xdc\x01\xc0\x00\r\x01`\xfe_\xfes\x03\x08\xfbT\x01\xc6\x04x\xfb\xc1\xfe\x1c\x05\xbc\xfc=\xfe\xb5\x00\xa1\xfd\xd4\xfd[\xfd\x7f\x01\x85\xfc\x08\x01\xe9\x00\x98\xfc\x05\x00\xdc\x02\x03\xfbR\xfc;\x05\x1e\xff\xa2\xfcF\x02\x17\x03y\xfe]\x01\xbc\x01$\x00\xeb\xfc\x18\x01r\xfd\xcb\x00\xa9\x04\xa4\xfe.\xfeb\x03f\x034\xf9o\x04\xf0\x00\xe8\xfa\x14\xff\x14\x02\xb8\x01\xf9\xfeM\xfd\xde\x03\xf6\x00\xd2\xfa%\x02\x8a\x01\xe0\xfc\xf3\xfc\x82\x03E\x03\x0c\xfen\x02V\x03y\xfd\xcf\xff\x9c\x03\xf2\xfet\x00\xbe\x03\xe6\xfdW\x02\x81\x03\x98\xfa\xe3\x00\xb4\x027\xfe\xfa\xfd\xbf\x03K\x01\x9a\xfa6\x03y\x00\xbc\xfc\x9c\x00p\x01)\xff\xeb\xfe\x07\x03\xa4\xfe?\xfed\x02\xd8\xfa`\x00\xcc\x05\x00\xfb\x9e\x00\x16\x07T\xf9\xf4\xfc\x05\x08\x0e\xfc\xac\xf9\xbe\x07O\xfe\xff\xfb\xfc\x04\xf2\x01\xee\xfc\xe1\xfe\xd9\x03\xdb\xfa\xa1\xfa\xbc\x04"\x07w\xf8\xc3\x01\xe6\x07\x1a\xf8\xec\xfd\xaf\x04o\xfdw\xfb@\x04\x86\x02A\xfc\xf0\x00\xe1\x04\xf0\xfbP\xfc\x96\x01\'\x02\x95\xfb\xb9\x00]\x03\x85\xfe\xa3\x00\x86\xfe\x91\xfd^\x00p\x02\xe5\xfa\xea\x01\xd3\x01:\xfd\x16\x03\xb9\xfe\xda\xfb\x8e\x02z\x00\xc2\xfb\xbf\xfe\x1f\x07w\x01e\xf8\x89\x03E\x05\x96\xf8\xbb\xfe\x04\x08]\xfaz\xfd\xd0\x07W\xfeG\xfe\x18\x05\xa8\xfc\x11\x00\x15\x00\xd7\xfeL\x00\xce\x00{\x01\xa0\xff\x88\x002\x01_\x01\x85\xf9\x98\xff\xd9\x00\xed\xfb|\x01\x9d\x04\xef\xfe\x93\xfc0\x03g\xfe\x96\xfcG\x00\xf7\xff\xe0\xff1\x006\x05\xb2\x00\x1d\xfb\xa1\x02F\x00\x07\xfc_\xff\xc1\x01d\x02+\xff\xff\xffc\x02\x03\xffP\xfd.\xff5\x04l\xfdU\xfa\xb4\x05\xd1\x01X\xfcn\x01\xd8\x03\xac\xfd\x96\xfa|\x06\xe6\xfd\xbf\xf7t\x05\xdd\x01\x90\xfd\xaa\x02l\x03\xdf\xfa\xaa\xfe\xc0\x01\xd1\xfc%\xff\x01\x01\xf7\xfe\x08\x01=\x01\xbd\x01\xa5\xff\x96\xfcQ\x03\xbc\xfe\xeb\xfc6\x00f\x01\xb3\x03N\xfe%\x00\xfc\x06\x83\xfc\x15\xf9[\x07\x11\x01n\xf7\xd7\x03\xe7\x07\xca\xfa\xa5\xfc\x17\x07\xbc\x01\xaf\xf6\xaa\x04\xa7\x02a\xf5(\x05\x99\x04[\xf9\xaf\x03\xb8\x00\xe7\xfb\xab\x02\x82\x02\x11\xfc\xc7\xfd\xd3\x05\x16\xfd)\xffu\x00\xcf\x00\xe4\x03\xd4\xfc\xce\xfex\x02&\x00h\xfe1\xff\xc5\x01c\xff^\xfek\x032\xff\xcf\xfa\xa4\x06;\x01\xbc\xf6\x85\x04\x08\x02\xab\xfc%\xfe\x07\x04\xec\xff\xe4\xfa\xf5\x03s\x00\x1a\xfd\x87\x00\xcd\x01\x94\xfcF\xff\x1d\x02\x12\x011\xfe\x89\x02%\x00\x9f\x00\xc4\xfc\xe5\xfen\x06\xce\xfc)\xfe\xac\x07\xa2\xfe6\xf9\x02\x07q\x00u\xfa%\x01\xa9\x05\xee\xf9\x98\x00\x88\tp\xf7\xde\xfb:\x08\xeb\xfe&\xf7\x9e\x06^\x05d\xf9\x11\xfe\xcb\x07\xa4\xff\xe3\xf7q\x06I\x00v\xf9\xa2\x02j\x04w\xfc\xcc\xfek\x04\x1e\xfe:\xfb\xa5\x04>\x01\x83\xf9B\x02P\x02c\x01_\xfcA\x03\x8f\x02 \xf9\xf7\xff|\x03/\xfd\xca\xfd(\x07G\xfb\xba\xff\x97\x04e\x00d\xf9\x85\xfe\x86\x06^\xfb\xef\xff\xd1\x05\xd1\x00\xb1\xfa\x81\xffy\x04\x06\xfa\x83\xfd\x97\x04\xae\xfe\x99\x01\xd2\x02;\xfb\x13\x02T\x00\xf8\xf9L\x04\xdb\xfe$\xfc\xbc\x05\xdd\x03\xa7\xf6\xf4\x05\xc7\x05d\xf3\xfa\x01\x0e\x06\xf9\xf9\xa1\xfe\x98\x07:\x02\xb0\xfb_\x04\xb7\xfek\xf86\x032\x03\xac\xfei\xfeH\x05a\xfd\x10\xfeB\x05}\xf8\xe6\x01X\x025\xf7\xd3\x06v\x02\x9c\xfbc\xff\xab\x05/\xf9\xaf\xfa~\x08s\x03\xa5\xf5\xcc\x01\xc9\n\xbe\xf4\xf6\xfdM\t.\xfc\x1d\xf8\xb2\x07\xda\x01\xee\xf7L\x03\xc2\x04\xd0\xfcl\xfa\xea\x04\xff\x04q\xf5\xf1\x05\xd8\x03\x04\xf8\xba\x01\x97\x08s\xf8\xec\xfa\x03\x11\xa1\xf5\xa9\xf8Y\rc\x00\x81\xf1\xf0\x08\xd1\x07\xa2\xf6\xd4\xff`\x06?\x02>\xf5Q\x03~\x04\xd0\xfc\x8d\xfd%\x03\xfd\x05j\xf8\xcb\x02\xb2\x02\xbd\xf8\xd6\x01\x1d\x02\x15\x01@\xfc\xe5\x03~\x01D\xfa\xe2\x02}\x04%\xf6n\xff_\n\xb3\xf7\x86\xfc\xd3\x0b(\xfb\xbe\xf7r\n\x84\x01y\xf3+\x05\xf7\x07v\xf1\xb4\x01\xf8\x0fH\xf6\xa5\xf7\x0c\x0e\xca\xfe/\xf46\x06R\x01;\xfb\x8d\xff\xc2\x05\xb2\x03\xe8\xf9D\xff\x91\x06u\xf9b\xf9c\x0c\x97\xfc\xdf\xf5P\n\xce\x06s\xf6\x17\xfd\xe6\n\xe9\xfc\xf5\xf6\x98\x05\x96\x05\x90\xf6\xbc\x00\xb7\x0eU\xf5O\xfd\xbf\x0b\xf2\xf8\x0e\xfd\xd3\x034\x00B\xffM\xffT\x05\xa7\xfe\r\xfb\x8b\x05>\x03\x01\xf7|\xfe\x99\t+\xfb\x81\xfbc\t\xe5\xfd\x97\xf9S\x04x\x03Q\xfa\xd3\xfc\x12\t-\xfd\x9d\xf8\xd4\x03\xff\x07\xa3\xf9`\xfb\xf6\x07-\x01\xda\xf4\x9d\x05\x15\x06q\xf7\\\x02\xea\x05)\xfc(\xf7\x1a\x07\xc7\x05\x1c\xf6\xdd\xff\xf8\n\xb8\xf4\'\xfen\x08\x90\xfc\xb1\xf8{\x01\xf8\t\xca\xf7\x7f\x00r\x07E\xfb\xea\xf8\xc7\x04\xe0\x06!\xf9\x05\x03\x17\x06:\xfb\xb0\xfa\xc8\x06\xe9\x01\xcc\xf8\xd7\x02\xe6\x05\xd5\xfa\x13\x00\xe0\x05{\xf9J\xff\x9a\x04\x0e\xff\xf2\xf8\xf1\x06\xd4\x04\xdc\xf7\x87\x01\xce\x04\x9b\xfa^\xfdS\x05 \xfc\xe8\xfcd\x04\xe2\x04\x8b\xfb\x16\xfb\x84\x06\'\xfc\x97\xfc\xf4\x05\xc8\xfe\xb0\xfc\xc5\x00\xdd\x05z\xfb\xd7\xfcj\x03\xf7\x01\xbf\xfaR\x00{\x04\xa4\xfd\xf9\xfe7\x03\x96\x01\xc1\xf8\xee\x02\xb0\xfe\x83\x01Y\x01\xba\xfa\x0e\x05\x86\xff\xb8\xfa\xee\x06\xdb\xff\xf9\xf2\x80\n\xed\x05G\xf2\xc3\x02+\x0b\xfa\xfa\xe1\xf5\x0b\t\xb9\x038\xf6\x01\x01:\x08g\xfc\xa8\xf7\xac\t\x81\x03l\xf5\xa8\x02\xe5\x08W\xf6\xbe\xfbd\x0bg\xff*\xf6\xd5\x04y\x06\x02\xfbe\xfe\xea\x03\x19\x02G\xf6\xa0\xffK\r$\xf8\x12\xf9\xf3\x0e\x98\xfe\x19\xef\x14\n\xc8\x08w\xf2X\xffs\x08h\x00\x9f\xf8;\x01\xae\x08\xe5\xfb\x93\xf4\xf3\n\xef\x05\x97\xedH\n\x9d\x08\x97\xf4!\xfe\xcd\x071\xff=\xfa\xf7\x04\x80\x00B\xfb\xda\xffi\x05W\xffA\xfc|\x03\xf4\x01?\xfa\x03\x00\x01\x06W\xfd\xb7\xfb\xf2\x05\x9c\x01n\xf9\xee\x02(\x06^\xf5\xb6\xff\n\n}\xfa\xb1\xfc\t\x04A\x04-\xfb{\xfb\x16\x08t\xfc\xab\xf9M\t\xd0\xfd\xd1\xfb\x95\x02:\x07_\xfb\x01\xf9\xb4\x08\xfb\xfa\x1a\xff5\x01\xc3\x00\xde\x02U\xff\xa0\xfd\xf6\x02X\xfe\xfd\xf9\xe2\x05\xb7\x022\xf8\xd3\x03\x9e\x03)\xfb\x9d\x03>\x02\x12\xfb/\xfdI\x04\xd4\xff\x94\xfc\xa9\x01-\x06\xe0\xf9K\xfcF\x0b\xc7\xfd\xff\xf4e\x04\xba\x05\xb5\xf8\x9e\xff\x0c\tE\xfd\xa8\xfb\x86\x03r\x01-\xfa\xd1\xff\xc1\x04a\xff\x14\xfd0\x04\xeb\x03$\xf8\x1b\x04E\xfc*\x00Q\x04r\xfc\xd1\x00X\x02\xcf\x00\x9e\xfc\x03\x01k\x00d\xff~\xff\xe2\xff\xeb\x03c\xff\xe0\xfb-\x04\x9a\xff\xe1\xfc\xa0\xff\xe3\x05&\xfdA\xfb$\x08\x12\xff\'\xfa\x8a\x00x\x07\x9d\xf8\x92\xfe\xd0\x07\x9d\xfb\x8c\xff\xcc\x03\x91\xfd\xb5\xf9\xf9\x06\x08\xfd\xa5\xfa}\x059\x03\x95\xfb\x15\xfe>\x03\x90\x00\xc7\xfct\xfe\xaa\x06\xf3\xfc_\xfe\x94\x04\x7f\x00\xd5\xf9;\x04\xda\x03:\xf9\x95\xfe[\x08\xf1\xfb\x05\xfb\x05\x03\xcb\x03\xad\xfd\xce\xf93\x04\x0e\x02/\xfe\'\xfdx\x06F\xfb$\x00\xb1\x01!\xf8\x95\x04V\x06\x95\xfb\xb4\xfa\xca\x05\x94\x00\x97\xfb\xf0\x00\x84\x04\'\xfd\xdc\xfb\xe9\x05[\x03\xf2\xf8\xd6\xfe\x9a\x07G\xfdJ\xfb\xb5\x04e\xfev\x01\x80\x00\xc9\xfc\xdb\x015\x04\xbd\xfc%\xf9\xbb\x06\xda\x00\x85\xff\x17\xfc\xa0\x03g\xfd\xe6\x01\xea\x04\x15\xf5\xb3\x02\xa9\x03}\x00\x7f\xfaR\x03\xe4\x05\x9c\xf8\x14\xfe\xed\x07z\xfa\x07\xfb\xd5\x07\xd5\x00\x08\xfc\x07\xfc/\x05\xc8\x04\xf6\xf9R\xfa\xb6\x08\x90\xfe6\xf9\x01\x02\xeb\x08\x1c\xfc\xb3\xf3\xdc\x0bi\x05?\xf5~\xff~\tn\xfai\xf7\xb4\nJ\x02\xa4\xf9\x97\x03\x02\x010\xfc\xd8\xfdJ\x03y\x04i\xf9\xe8\xfcV\x0c,\xfb\xab\xf6\r\x0c\xe3\x01M\xf2\'\x05D\x06\x8f\xfap\x00,\x02\x03\x04u\xf8\xc0\x00\xbd\x03\xf1\xfc\x1a\xfef\x01\xd7\x04\xa6\xfa\xc5\x02\xc6\x001\xfd\xd4\xfd\xec\x03M\xfe\xee\xfcx\x04\xb7\x03q\xf8\xb8\xfer\x0bi\xf5\xc8\xfd\x0b\x07[\x01\xb7\xf7V\xff\xe3\x0c\xfb\xf9\xec\xfa~\x02\x19\x03\x94\xfa\xab\xfei\x08-\xfbj\xfc\xb4\x02\x04\x06\xcd\xfb\xaa\xf9k\x08\x88\xfe\x87\xf6W\x05p\x08\x8a\xf8`\xfd\xd9\x07J\xfd\xa1\xfc\x1f\x01\x98\x00\xf9\xfe\xcc\xfe\xde\x00s\x04\xff\xfd\xc5\xfeg\x03#\xfb\xf5\xfe\xe8\x03\xb5\xfc9\x01#\x01\xf7\xff\x99\x03\x94\xfa\xf4\xff]\x05\x12\x00\xbd\xf6a\x03\xe6\x07\xc8\xf9\x87\x00\xfc\x01\xfb\x02\xa6\xfc\xa9\xfb\x9c\x07\xc3\xfd\x94\xf9\xb0\x05\x7f\x05\x15\xf9q\xfc\x8e\t\xaa\xfd\xd2\xf7\x9d\x06\xa0\x02\x17\xf7\xb8\x01~\x08\xf1\xfb8\xfa|\x03R\x05\xa2\xfaQ\xfc9\x06\x8e\x00\x15\xfa\x87\x01{\x02<\x00\x19\xff~\x01\x0c\xfdJ\x03\xf4\xfc\x9b\xff\xb3\x04H\xfc\xa3\x03\x8b\x00$\xfe\xca\xfbm\x01!\x06\xda\xfb\xc3\xfb\x89\x05\x00\x00\xb0\xfb\xd4\xff\xf2\x03d\xfb\xd3\xfb\xdb\x08\x84\xffd\xfa\xcd\x03K\x054\xf7F\xfd\x97\tx\x01e\xfa\xd1\xff\x12\x06l\xfd\xc4\xfc\xaf\x02\x87\x01Y\xfc\xad\x00t\x02\xe3\xff\x88\xffv\xfeL\x01\xf5\xfe\xe9\xfe\xe3\xffG\x03\xa4\xfc\x05\x00I\x03u\xfe\xb2\xfd\x91\xffr\x02\xc1\xfcB\xfd\xde\x04H\x03\xc8\xfaL\x00\xf2\x03-\xf9\xd6\x01\xe6\x06\xbd\xf9Z\xfd\x08\x05\xe1\x02\x9d\xf8\xfd\x02\x03\x05\xf3\xf9;\xfc\xca\x08\x1d\x00\xb5\xf6_\x06\x85\x06O\xf8\x00\xfb\xfe\t\xee\xfd\x9b\xf9\xe4\x03\xc6\x04\xc0\xf9[\xffC\x06\xa6\xfd\xd8\xfa\x81\x02F\x04\xed\xfa\xec\x00\xd9\x02!\xfe\x02\x00\xc7\xfeF\x00^\x017\xfdK\x02\x98\x00p\xfc9\x00\x14\x04\xa4\xff\xf7\xfbX\xff\xb5\x04\x0b\x00\r\xf8a\x05\xb2\x04\xea\xf8n\xfd\xb0\x07\xc3\x00\xd3\xf8i\x02\x9c\x04"\xfb\xcd\xfc\x98\x06q\x00*\xf9w\x03\xa9\x05\x9e\xf8z\xfe\x85\x05J\x00\xa5\xfa\xa0\x00\xd4\x03\xf0\xfd\x93\xfe\xf8\x01l\x00\x8f\xfcg\x00~\x03\x8a\xfdC\xfd\x08\x05\xdc\xfen\xfb\x17\x02\x8b\x05P\xfb3\xfc^\x07{\xfe\xb0\xfa\x90\x02\xb5\x04\x01\xfdX\xfcs\x04\xa7\x01\n\xfcL\x00`\x01\x06\x01\xc6\xfdD\xff\xd6\x04\x02\xfd\xd0\xfeO\x01\x80\x00\r\xff\x9e\xff\x9e\x00\xe8\xff\xd5\x00\xfb\xfd\t\x020\x00\x8c\xfd\xc0\xfe\x0f\x04\xbd\xfe\x19\xfc\xfa\x02\xfe\x03\x02\xfd\xf3\xfa\x9a\x06\xdf\xff\x1e\xfa\xb7\x00.\x05>\xfeb\xfe8\x01K\x00\xa9\xff\x8a\xfeX\x00\xbb\x00}\xffB\x00:\xff\xad\xff\xfe\x02D\x00#\xfcM\x00Q\x03\x15\xfcX\x00\xbb\x03N\xfd\x84\xfd}\x03\xfd\x03H\xfbe\xfc&\x05\xa5\xff\xeb\xfb\x95\x01v\x03\xc5\xfe\x8c\xff\x02\x01\xaf\xfe\x03\x00\x0e\x01\xa5\xff\x98\xfd\xbd\x01\xd0\x03/\xfe\x1f\xfe\xb2\x02\x08\xfe\xce\xffS\x00\x96\x00c\xffY\x00\x80\x01\x16\xff\xe6\xffg\x00\xb2\x00\x17\xfeh\x00\xfb\x01:\xff\xaa\xfd\x0e\x03)\x01\xea\xfc\xec\xff\xfe\x03\xe3\xfc\x82\xfe\x92\x05\x84\xfd\xce\xfb\x0e\x03l\x05u\xfa\x85\xfd\xe2\x05\x8f\xff\xf8\xfb\x0f\x01\xb0\x02\x02\xfc\xe1\xfe\x08\x02\x8f\xff~\xfeI\x01!\x01\x9e\xfd\'\xfe\r\x03\xa9\x01\xaf\xfc\x82\xff\x10\x04\xa1\x00\xf4\xfc\xf4\x01A\x01\xd3\xfeY\xff \x01\x1b\x00\x9c\xfe\xc1\xff\x00\x01\xd0\xfe\xa8\xfe"\x03\xfd\xfdP\xfd`\x01\xa3\x02\xbe\xfe3\xfd\xda\x011\x01\xda\xfc\xdc\xfe\xaf\x02\xbd\x01\x0c\xfe\xb3\xfdQ\x01n\x03.\xfdc\xfe\xb4\x02\x7f\x00\xa6\xfe\xda\x00\x8f\x02~\xfd\x16\xfe#\x02\x98\x02`\xfdh\xfe\x8c\x02r\x01\x11\xfc\x18\x01%\x03\x80\xfd\xa6\xfeN\x00\xef\x01e\xfe.\x01\x18\x00\xfc\xfde\x00\x8c\x01\xc6\xff\x9c\xfe\xdb\x00\xd1\xff\x89\xff\x01\x01\x0b\x01\x81\xff\xfc\xff\x93\xfe1\x00\'\x01\x03\xffA\x01\x1d\x00\xc2\xfe\x03\xff\x03\x01\x9e\x01-\xfe}\xfe\x90\x00\xe9\xfe\x8e\x00\xee\x01\x8e\xff\xd3\xfd\x89\xffg\x01\xec\xffE\x00\xea\xff(\xff,\xff\xa4\x006\x01\xa9\xff\xb3\xff\xcc\x01\xc8\xfd7\xfe\xed\x02+\x00\xc0\xfe\x7f\x00\xf3\xff@\xff\xc5\x01\xd5\xff\xe0\xff\xba\xff)\xff2\x00>\x00\\\x01c\xff\xfb\xffA\x00Y\xfe\x9e\x00\x8f\x01\xba\xfe\xa7\xfe\xe1\x00\xf7\x00\x9d\xffX\xff\xe7\x00\x8a\xff\xd6\xfe\x12\x00\xaf\x00f\x00g\x00f\xff,\xfe\xdf\x00\x89\x01:\xff\x9f\xfe\xde\xffF\x01A\xff\x02\x00\xdb\x00\x94\xff\xad\xfem\xff\x14\x01\xc8\xff\xc0\xff\xd3\x00\x81\xff\xbd\xfe`\x00-\x01s\xff\x8d\xfe\xe5\xff\x91\x00/\x00\x16\x00C\x00-\x00\xe2\xfen\xff\xb7\x00\xb8\xff\xb4\xff\x82\x00\x98\xff\x08\x00U\x00\xcb\xff\x11\x00I\x00V\xff\xfc\xfe\xc1\xffe\x01{\x00\x95\xff\xe7\xff\xdb\xff1\x00\x0e\x00\xd1\x00\xc3\xffb\xff\xde\x00r\x00B\xff\x80\x00\xd1\x01T\xff\xa0\xfe\xae\x00\xf9\x00p\xff\x03\x00\xba\x00\xd4\xfe\xef\xffz\x01T\xff\xb8\xfe\xfb\x00"\x00\xf1\xfeA\x00\x12\x00\xde\xff+\x00Y\x00i\xff*\xff\x0b\x01\x95\x00\xab\xfe\x91\xff\x0c\x01W\x00\x0e\xfe\x8d\x00T\x021\xff\xfb\xfe\x9b\x00S\x00l\xff\xfe\xff\xd4\x00s\x00\xfc\xff\x01\x00\xbe\xff\xec\xff\x87\xff\xd9\xff<\x00i\xff\x00\x00\x7f\xff\x93\x00\xf2\xff\xad\xfe}\x00Y\x00\xd2\xfe\x96\x00E\x01d\xff\xe1\xff\x0e\x01\x1d\x00\x92\xff\x85\x00\x86\x00\xcd\xff\x98\xff\\\x00\xbb\x00Y\xff\x87\xffB\x01J\x00\x9a\xfe\xbd\xff\x19\x01\n\x00\xc0\xfe\xa5\xff\x96\x01?\x00\xa6\xfeU\x00\x95\x00\x06\xff\xca\xff\xad\x00\x91\xff&\x00a\x00\x00\x00 \x00y\x00\x83\x00\x96\xffb\xff\x89\x00^\x00}\x00W\x00\xe8\xfe\x11\x00\xaf\x00\xb0\xff\xb5\xff%\x00\x01\x00\xcb\xffp\x00#\x00L\xff\x10\x00\x9b\x00\xb9\xffv\xff\xf3\xff\xc1\x00\x07\x00\xeb\xff1\x00\xa1\xff\xd7\xff\xd3\x00\xb6\xffL\xff\xa7\x00\xd1\x00\xdd\xff\xb2\xfe\xd4\x00\xd4\x00x\xfe/\xff\xb5\x00\xd1\x00(\xffQ\xff\xc1\x00\x00\x00n\xfe2\x00\xff\x00~\xff\x84\xfe\xb8\x00\x82\x01\x19\xff\xf0\xfe[\x00\xdd\x00\x86\xff(\xff\xfc\x00\x95\x00\xe6\xfe\n\x00\xfe\x00\x14\x00e\xfe\xfd\xff\xcc\x01\xa4\xff}\xfe\x91\x00A\x01\x80\xff\xba\xfej\x00\xe3\x00\xf3\xfe\xd7\xff_\x00\xc3\xff\xfb\xff\x00\x00\xe7\xff\xc3\xff\xae\xff\xff\xff\x08\x00\xf6\xff\x1f\x00\x89\xff\xf2\xff\x9e\x00\x19\x00A\xff\x84\xff\xbc\x00_\x00\xac\xff\x8d\xffM\x00m\x00\xf8\xff\xab\xff\xca\xffX\x00\xc7\xff\xbe\xff\x99\x00\xce\xff\\\xff\x84\x00_\x00<\xff\xa4\xffX\x00\xb6\xffS\xffA\x00;\x00`\xff\xbf\xffq\x00\xec\xffZ\xff\r\x00o\x00\x86\xff\xb4\xff{\x00a\x00\xc7\xff\xff\xff`\x00\xaa\xff\x08\x00\x8a\x00q\xff\x8c\xff\xa5\x00M\x00P\xff\xb3\xff\x95\x00\xed\xff2\xff\x02\x00\xe1\xff\x91\xff\xbb\xff\x99\x00\xf6\xffA\xffj\x00p\x00^\xff\xa4\xff\xdc\x00s\x00k\xff\xce\xff\xc3\x00o\x00\x7f\xff\r\x00\xaa\x003\x00\xb3\xff8\x00h\x00\xfe\xff\xd8\xff\xf7\xff\xf1\xff\xd4\xff\x13\x00-\x00\xf9\xff|\xff\xd5\xffg\x00\xf1\xff\x9b\xff\xbe\xff2\x00\xfb\xff\xd6\xffT\x00B\x00\xb2\xff\x87\xff\xf1\xffa\x00.\x00\xf5\xff\x02\x00\xfe\xff\x14\x00\x1d\x00\x19\x00\x14\x00\xff\xff\xd5\xff\xc1\xff=\x00G\x00\x16\x00\xc5\xff\x9f\xff\xe1\xff\'\x00\x0f\x00\xc2\xff\xba\xff\xdd\xff\x06\x00\x19\x00\t\x00\xd2\xff\x0b\x00\xec\xff\x01\x00-\x00(\x00N\x00\x0b\x00\x0e\x00\x0c\x00^\x00V\x00\x00\x00\xe0\xff\x16\x00N\x006\x00\x02\x00\xff\xff\x13\x00\xf9\xff\xda\xff#\x00;\x00\xfe\xff\xc2\xff\x06\x00 \x00\t\x00*\x00\xe8\xff\x00\x00=\x00\xf4\xff\xdc\xff>\x00M\x00\xf5\xff\r\x00h\x00R\x00\xcd\xff\'\x00\\\x00\xd5\xff\xed\xffB\x00&\x00\xd6\xff\xfe\xff+\x00\xe0\xff\xb2\xff\x10\x00(\x00\xc7\xff\xeb\xff\x0b\x00\xfc\xff\xef\xff\t\x00\x11\x00\xdd\xff\xd3\xff\x16\x000\x00\xcf\xff\xeb\xff5\x00\xea\xff\xf6\xff\x08\x00\xde\xff\xf4\xff\x17\x00\r\x00\xe0\xff\x06\x00!\x00\xe5\xff\xd8\xff\xff\xff\xd1\xff\xbf\xff\xf1\xff\xff\xff\xee\xff\xe8\xff\n\x00\xe1\xff\xf1\xff\xf7\xff\xf6\xff\xda\xff\x12\x00 \x00\xee\xff\xf6\xff:\x00!\x00\xb7\xff\x12\x00\x0f\x00\xf6\xff\x16\x00\r\x00\x1a\x00\x08\x00\x01\x00\x06\x00\x0e\x00\x17\x00\xdd\xff\xdf\xff\'\x00\x05\x00\xf2\xff\xf2\xff\xf9\xff\xea\xff\xe6\xff\x0e\x00\xe2\xff\xcb\xff\x00\x00\x1e\x00\xdc\xff\xdd\xff\x0b\x00\xfa\xff\xee\xff\xdd\xff\xff\xff\xeb\xff\xe9\xff\x05\x00\xdc\xff\xef\xff\x02\x00\xe5\xff\xdf\xff\xf9\xff\x13\x00\xe7\xff\xe1\xff\xd8\xff\t\x00\x11\x00\xcb\xff\xe7\xff\x04\x00\xde\xff\xc4\xff\xf6\xff\x04\x00\xe0\xff\xdf\xff\x1e\x00\xf2\xff\xcb\xff\x1d\x00A\x00\xba\xff\xbe\xff*\x00\x04\x00\xee\xff\xff\xff\x01\x00\xf5\xff\xf1\xff\xfc\xff\x0e\x00\x1b\x00\xf0\xff\xcf\xff\t\x00(\x00\x1a\x00\xf8\xff\xf8\xff\xfa\xff\x05\x00\x0e\x00\xf3\xff\xe0\xff\x00\x00\xf5\xff\xd8\xff\x00\x00\x16\x00\x03\x00\xe7\xff\xd8\xff\xf6\xff\x03\x00\x08\x00\x0f\x00\x00\x00\xf7\xff\x1e\x00\t\x00\x1b\x00\r\x00\xe9\xff\xf7\xff\x0c\x00\x13\x00\x05\x00\x17\x00\xdb\xff\xc5\xff\xe5\xff\x11\x00\xfc\xff\xbb\xff\xeb\xff\t\x00\xe3\xff\xc2\xff\x1a\x00\x1f\x00\xb3\xff\xb9\xff\x04\x00\x16\x00\xdd\xff\xe9\xff\xef\xff\xe1\xff\xf3\xff\xf8\xff\xf6\xff\xfc\xff\xf6\xff\xf0\xff\xf3\xff\x00\x00 \x00\x11\x00\xf5\xff\xf8\xff\x11\x00\r\x00\x16\x00\x0f\x00\x14\x00\xec\xff\x1e\x00)\x00\xf3\xff\x05\x00\xfd\xff\x00\x00\xed\xff\xfb\xff\x13\x00\t\x00\x03\x00\x08\x00 \x00 \x00\xf2\xff\x03\x00$\x00\xf4\xff\x07\x00!\x00\t\x00\xe6\xff!\x006\x00\xec\xff\xf9\xff#\x00\x17\x00\x01\x00\x12\x003\x00"\x00\xf7\xff\x1a\x00<\x00\'\x00\n\x00\n\x00+\x00+\x00\x14\x00\x18\x00=\x00\x12\x00\xf3\xff-\x004\x00\xfc\xff\x10\x00\x1f\x00\xeb\xff\xf4\xff\x14\x00\x16\x00\xe9\xff\xe7\xff\x00\x00\xe5\xff\xf3\xff\x08\x00\xf0\xff\xdb\xff\x01\x00\x19\x00\xf2\xff\xef\xff\x0f\x00\x08\x00\xed\xff\x12\x00\x0e\x00\xf2\xff\x18\x00"\x00\xf4\xff\xef\xff:\x00\x11\x00\xf5\xff\x0f\x00\x08\x00\x15\x00\x04\x00\x07\x00\x02\x00\x07\x00\x11\x00\xff\xff\xf7\xff\xff\xff\x05\x00\xf9\xff\xe2\xff\xfa\xff+\x00\x1a\x00\xf3\xff\xf3\xff/\x00\x1a\x00\xde\xff\xed\xff0\x00\x10\x00\xf5\xff\xf5\xff\x15\x00\x00\x00\xc5\xff\n\x00\xf6\xff\xe4\xff\xf7\xff\xeb\xff\xed\xff\xf3\xff\xf8\xff\xe4\xff\xec\xff\xf8\xff\xe5\xff\xda\xff\x05\x00\x00\x00\xf3\xff\xea\xff\xf7\xff\xfc\xff\xec\xff\x01\x00\xf6\xff\xe6\xff\xe9\xff\xfe\xff\x10\x00\xfa\xff\xef\xff\xf8\xff\x01\x00\xf0\xff\xe1\xff\xff\xff\xf9\xff\xe6\xff\xf3\xff\xf3\xff\xe2\xff\xe3\xff\xf6\xff\xf5\xff\xe4\xff\xea\xff\xf6\xff\xf0\xff\x00\x00\xf8\xff\xf7\xff\x02\x00\xff\xff\xf9\xff\xff\xff\x10\x00\x00\x00\xee\xff\xea\xff\x14\x00\x0e\x00\xe4\xff\r\x00\x13\x00\xd1\xff\xd9\xff\x15\x00\xf6\xff\xd3\xff\xf0\xff\x01\x00\xee\xff\xea\xff\xf6\xff\xf5\xff\xf1\xff\xde\xff\xe7\xff\n\x00\x00\x00\x02\x00\xf0\xff\xf0\xff\x05\x00\x12\x00\xfe\xff\xe4\xff\xfa\xff\x11\x00\xf8\xff\xe9\xff\xfc\xff\r\x00\xe9\xff\xe4\xff\xec\xff\xea\xff\xe0\xff\xf4\xff\x02\x00\xd1\xff\xcf\xff\x00\x00\xff\xff\xd1\xff\xdf\xff\xf6\xff\xe5\xff\xdd\xff\xf1\xff\x0b\x00\x06\x00\xe1\xff\xe3\xff\xff\xff\x18\x00\x08\x00\xf4\xff\r\x00\x1e\x00\n\x00\xfa\xff\x19\x00\x18\x00\x00\x00\xf9\xff\x0c\x00\x19\x00\x08\x00\x01\x00\x01\x00\xfc\xff\xf5\xff\xfd\xff\t\x00\xfa\xff\xeb\xff\xeb\xff\xf5\xff\xf3\xff\xf0\xff\xf0\xff\xe4\xff\xdc\xff\xf2\xff\xf3\xff\xee\xff\x06\x00\x00\x00\xe1\xff\xf5\xff\x12\x00\xf7\xff\xed\xff\x01\x00\x0b\x00\xef\xff\xf7\xff\x17\x00\x05\x00\xf8\xff\xf9\xff\x10\x00\x0c\x00\xfd\xff\xff\xff\x0f\x00\x0e\x00\xf8\xff\x06\x00\x0e\x00\xff\xff\x0f\x00\x15\x00\x0b\x00\r\x00\x15\x00\n\x00\t\x00\x16\x00\x16\x00\x0e\x00\x06\x00\x13\x00\x18\x00\x11\x00\x0f\x00\x0f\x00\x12\x00\x16\x00\x16\x00\x1a\x00\x1b\x00\r\x00\x1d\x00"\x00\x17\x00\x15\x00$\x00\x17\x00\x05\x00\x1b\x00\x19\x00\x13\x00\x17\x00\x12\x00\x10\x00\x10\x00\x1b\x00\x1d\x00\x02\x00\xfc\xff\x1c\x00\x18\x00\x04\x00\x0c\x00\x0e\x00\xfe\xff\n\x00&\x00\xff\xff\xf3\xff\x1a\x00\x0c\x00\xe5\xff\x07\x00\x16\x00\xe5\xff\xf5\xff\r\x00\xee\xff\xf1\xff\x10\x00\xf9\xff\xf2\xff\t\x00\x11\x00\xf5\xff\xfa\xff\x0b\x00\xf7\xff\xf8\xff\xfc\xff\x03\x00\x05\x00\x03\x00\xfc\xff\x00\x00\x13\x00\x01\x00\xf4\xff\xf7\xff\x03\x00\xfc\xff\xff\xff\xfc\xff\xff\xff\xf0\xff\xff\xff\x11\x00\xf4\xff\xee\xff\x05\x00\xfe\xff\xee\xff\x04\x00\x01\x00\xf0\xff\xf6\xff\x07\x00\xf8\xff\xed\xff\x00\x00\xf7\xff\xed\xff\xfe\xff\xff\xff\xf2\xff\xf9\xff\t\x00\x06\x00\xed\xff\xfd\xff\x06\x00\xf5\xff\xf8\xff\x00\x00\xf7\xff\xf8\xff\xf3\xff\xf3\xff\x00\x00\xf6\xff\xe9\xff\xfe\xff\xfb\xff\xe4\xff\xe9\xff\x00\x00\xf7\xff\xe8\xff\xf0\xff\x04\x00\xf8\xff\xe9\xff\xfd\xff\xf7\xff\xf3\xff\xf2\xff\x04\x00\xfa\xff\xf9\xff\xff\xff\xfb\xff\xf6\xff\x03\x00\x02\x00\xf1\xff\xf9\xff\xfc\xff\xed\xff\xe8\xff\xf4\xff\xe1\xff\xee\xff\xef\xff\xe3\xff\xe7\xff\xef\xff\xe1\xff\xe3\xff\xf5\xff\xea\xff\xe4\xff\xf1\xff\xf3\xff\xe8\xff\xe9\xff\xf2\xff\xee\xff\xf0\xff\xf9\xff\xee\xff\xec\xff\xfe\xff\xf0\xff\xee\xff\x01\x00\xfc\xff\xe8\xff\xf4\xff\x05\x00\xf0\xff\xe6\xff\x04\x00\x05\x00\xf6\xff\xf5\xff\n\x00\xfc\xff\xf1\xff\x04\x00\xfd\xff\xeb\xff\xf9\xff\x02\x00\xf3\xff\xf9\xff\x00\x00\xeb\xff\xed\xff\t\x00\xfc\xff\xe4\xff\xff\xff\x10\x00\xea\xff\xeb\xff\x11\x00\xfc\xff\xe2\xff\x02\x00\x12\x00\xf3\xff\xf7\xff\x0b\x00\x00\x00\xf7\xff\x07\x00\x01\x00\xfe\xff\x05\x00\x04\x00\x00\x00\xfe\xff\x04\x00\x07\x00\xfb\xff\x02\x00\x08\x00\x00\x00\x01\x00\x01\x00\x04\x00\xfd\xff\xfc\xff\x04\x00\xff\xff\xfb\xff\x04\x00\x04\x00\xfc\xff\xfc\xff\x07\x00\xf9\xff\xfc\xff\x11\x00\x01\x00\x00\x00\r\x00\x0c\x00\xfc\xff\xfa\xff\x07\x00\x06\x00\xfd\xff\x00\x00\x04\x00\x04\x00\xf7\xff\x02\x00\x05\x00\xf5\xff\xf7\xff\x07\x00\x04\x00\xf6\xff\x00\x00\x10\x00\x0c\x00\x00\x00\n\x00\x11\x00\x00\x00\xff\xff\x1a\x00\x0e\x00\x04\x00\x1a\x00\x0c\x00\x08\x00\x12\x00\x10\x00\t\x00\x0b\x00\x0e\x00\x0f\x00\n\x00\x0c\x00\x0f\x00\x0c\x00\x0c\x00\x13\x00\n\x00\t\x00\x17\x00\x12\x00\x0c\x00\x13\x00\x15\x00\x08\x00\r\x00\x17\x00\x05\x00\x04\x00\x14\x00\x07\x00\xff\xff\x10\x00\x11\x00\x02\x00\r\x00\x1b\x00\x01\x00\xfd\xff\x17\x00\x13\x00\xfe\xff\x07\x00\x11\x00\x04\x00\x01\x00\x0b\x00\x06\x00\xfa\xff\x04\x00\x05\x00\xfe\xff\xf8\xff\x05\x00\x0b\x00\x02\x00\xf8\xff\xfc\xff\x13\x00\x02\x00\xf7\xff\x02\x00\x03\x00\xf7\xff\xff\xff\x00\x00\xf6\xff\xf7\xff\x02\x00\xff\xff\xf6\xff\xfb\xff\xfe\xff\xf7\xff\xf4\xff\x00\x00\xfb\xff\xf2\xff\x00\x00\x03\x00\xfc\xff\xff\xff\x03\x00\xfc\xff\xf9\xff\x04\x00\x00\x00\xfc\xff\x01\x00\x06\x00\x05\x00\xfc\xff\x00\x00\x05\x00\xfd\xff\xff\xff\x08\x00\xfd\xff\xf9\xff\x00\x00\x03\x00\xf7\xff\xf0\xff\xfb\xff\xfd\xff\xf4\xff\xf0\xff\xf2\xff\xf4\xff\xf1\xff\xf0\xff\xf7\xff\xf2\xff\xf1\xff\xfb\xff\xfd\xff\xf4\xff\xf9\xff\x00\x00\x01\x00\xf7\xff\xfc\xff\x07\x00\xf7\xff\xf0\xff\x05\x00\x03\x00\xef\xff\xfc\xff\xff\xff\xef\xff\xf2\xff\xfa\xff\xf1\xff\xf0\xff\xf6\xff\xfb\xff\xf5\xff\xf2\xff\xf7\xff\xf6\xff\xf3\xff\xf6\xff\xf6\xff\xf7\xff\xf9\xff\xf5\xff\xed\xff\xf3\xff\xf3\xff\xf6\xff\xf4\xff\xf4\xff\xf5\xff\xf2\xff\xf3\xff\xf9\xff\xf7\xff\xef\xff\xf6\xff\xfa\xff\xee\xff\xef\xff\xfb\xff\xf9\xff\xf2\xff\xfa\xff\xfd\xff\xf5\xff\xf3\xff\xf7\xff\xf8\xff\xef\xff\xf8\xff\x00\x00\xf9\xff\xf2\xff\xf7\xff\xfd\xff\xf5\xff\xf8\xff\xfc\xff\xfa\xff\xf4\xff\xfd\xff\x01\x00\xf1\xff\xf5\xff\x01\x00\xfb\xff\xf2\xff\xfd\xff\xff\xff\xf3\xff\xee\xff\xfe\xff\xfd\xff\xf1\xff\xfd\xff\xff\xff\xf6\xff\xf8\xff\x00\x00\xf5\xff\xf4\xff\x00\x00\x04\x00\xf9\xff\xfb\xff\x07\x00\x00\x00\xf9\xff\x02\x00\n\x00\xfc\xff\xff\xff\x0b\x00\t\x00\xfc\xff\x07\x00\x0f\x00\xfc\xff\xf9\xff\x08\x00\t\x00\xfe\xff\xff\xff\x02\x00\x01\x00\xfd\xff\x07\x00\x02\x00\xfb\xff\x05\x00\n\x00\x01\x00\xfd\xff\x03\x00\x0b\x00\t\x00\x04\x00\n\x00\n\x00\x03\x00\x02\x00\x03\x00\x02\x00\x02\x00\x03\x00\x00\x00\xff\xff\x00\x00\x00\x00\xfd\xff\xfb\xff\xff\xff\xff\xff\xfc\xff\xfa\xff\xff\xff\x06\x00\x03\x00\x00\x00\x00\x00\xfc\xff\xfa\xff\xfb\xff\xfd\xff\xf5\xff\xf8\xff\x01\x00\xff\xff\xff\xff\x00\x00\x03\x00\x02\x00\x00\x00\x02\x00\x06\x00\x04\x00\x06\x00\n\x00\x0b\x00\x0b\x00\r\x00\x10\x00\x0c\x00\x13\x00\x17\x00\x14\x00\x0e\x00\x17\x00\x13\x00\n\x00\x11\x00\x0e\x00\x06\x00\x06\x00\x11\x00\x0c\x00\x0b\x00\x13\x00\x12\x00\x08\x00\n\x00\x12\x00\x0c\x00\x06\x00\x05\x00\t\x00\x07\x00\n\x00\x06\x00\x00\x00\t\x00\x05\x00\x00\x00\x00\x00\xfe\xff\xfa\xff\xfe\xff\x02\x00\x00\x00\x00\x00\x02\x00\x03\x00\x01\x00\xff\xff\xfd\xff\xfb\xff\xf8\xff\xfd\xff\xfb\xff\xf9\xff\xfc\xff\xfc\xff\xf4\xff\xf1\xff\xfb\xff\xfd\xff\xf7\xff\xfc\xff\x00\x00\xf7\xff\xfb\xff\xff\xff\xfb\xff\xf9\xff\xff\xff\xfe\xff\xfb\xff\xfd\xff\xfe\xff\xf9\xff\xf9\xff\xfc\xff\xff\xff\xfc\xff\xfa\xff\xfc\xff\xfe\xff\xfd\xff\xfa\xff\xfa\xff\xfb\xff\xfb\xff\xfb\xff\xfd\xff\xfd\xff\xf9\xff\xf8\xff\xfb\xff\xfa\xff\xfc\xff\xff\xff\x01\x00\xfc\xff\xf7\xff\x05\x00\x01\x00\xfd\xff\x03\x00\x03\x00\xfd\xff\xfd\xff\x00\x00\xfb\xff\xf8\xff\xfd\xff\xfe\xff\xfc\xff\xfd\xff\xfb\xff\xfa\xff\xf5\xff\xf2\xff\xf5\xff\xf9\xff\xf4\xff\xf5\xff\xf6\xff\xf3\xff\xfa\xff\xf3\xff\xf5\xff\xf7\xff\xf3\xff\xf2\xff\xf5\xff\xf2\xff\xef\xff\xf1\xff\xf4\xff\xf5\xff\xf1\xff\xf6\xff\xf6\xff\xf0\xff\xf4\xff\xf7\xff\xf5\xff\xf3\xff\xfa\xff\xfb\xff\xf4\xff\xf7\xff\xf8\xff\xf8\xff\xf7\xff\xf8\xff\xfd\xff\xf7\xff\xf5\xff\xf5\xff\xf5\xff\xf6\xff\xf2\xff\xf3\xff\xf5\xff\xf7\xff\xf9\xff\xf6\xff\xf6\xff\xfa\xff\xfd\xff\xf9\xff\xfa\xff\xfe\xff\xfe\xff\xfc\xff\xff\xff\xfe\xff\xfa\xff\xfc\xff\xfb\xff\xfb\xff\xf9\xff\xfc\xff\xfa\xff\xf6\xff\xfc\xff\xfb\xff\xfd\xff\x00\x00\xfe\xff\xfa\xff\xf9\xff\xfc\xff\xfd\xff\xfe\xff\xfa\xff\xfb\xff\xfe\xff\xfc\xff\xf9\xff\xfb\xff\xfa\xff\xfe\xff\xfe\xff\x00\x00\xff\xff\xfe\xff\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x03\x00\x01\x00\x02\x00\x04\x00\x04\x00\x04\x00\x02\x00\x01\x00\x00\x00\x01\x00\x03\x00\x00\x00\xff\xff\xff\xff\xff\xff\x01\x00\x02\x00\x02\x00\x02\x00\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x04\x00\x04\x00\x03\x00\x06\x00\x05\x00\x06\x00\x04\x00\x07\x00\x07\x00\x03\x00\x04\x00\x06\x00\x06\x00\x04\x00\x06\x00\x03\x00\x00\x00\x01\x00\x04\x00\x04\x00\x04\x00\x08\x00\x07\x00\x06\x00\x07\x00\x04\x00\x03\x00\x06\x00\x06\x00\x04\x00\x04\x00\x06\x00\x08\x00\t\x00\x0b\x00\n\x00\x0c\x00\r\x00\x0c\x00\x0b\x00\x0b\x00\x0c\x00\x07\x00\x05\x00\x07\x00\x06\x00\x03\x00\x04\x00\x04\x00\x02\x00\x03\x00\x03\x00\x06\x00\x03\x00\x00\x00\x01\x00\x07\x00\x07\x00\x05\x00\x06\x00\x06\x00\x06\x00\x06\x00\x05\x00\x04\x00\x01\x00\x03\x00\x04\x00\x02\x00\x00\x00\x00\x00\x02\x00\x01\x00\xfe\xff\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xfd\xff\xfc\xff\xfb\xff\xfe\xff\xff\xff\xfd\xff\xfc\xff\xfd\xff\xfd\xff\xff\xff\xfe\xff\x01\x00\x00\x00\xff\xff\xfd\xff\xfa\xff\xfb\xff\xfb\xff\xfe\xff\xfd\xff\xfc\xff\xfd\xff\xfb\xff\xfa\xff\xfb\xff\xfa\xff\xf9\xff\xfa\xff\xfa\xff\xfc\xff\xff\xff\x00\x00\xff\xff\xfc\xff\xfd\xff\xfd\xff\xfa\xff\xf5\xff\xf3\xff\xf8\xff\xf7\xff\xf6\xff\xf6\xff\xf5\xff\xf4\xff\xf6\xff\xf6\xff\xf4\xff\xf4\xff\xf7\xff\xf8\xff\xf7\xff\xfa\xff\xf6\xff\xf3\xff\xf3\xff\xf5\xff\xf6\xff\xf7\xff\xf8\xff\xf7\xff\xf9\xff\xf8\xff\xf5\xff\xf6\xff\xf9\xff\xf9\xff\xf9\xff\xfa\xff\xf8\xff\xf9\xff\xfb\xff\xff\xff\xfe\xff\xfc\xff\xfd\xff\xfd\xff\xfd\xff\xfd\xff\xfb\xff\xf8\xff\xfa\xff\xfb\xff\xfa\xff\xf6\xff\xf4\xff\xf3\xff\xf6\xff\xf6\xff\xf8\xff\xf8\xff\xf5\xff\xf7\xff\xfb\xff\xf9\xff\xf8\xff\xfa\xff\xfd\xff\xfe\xff\xf9\xff\xfc\xff\xfa\xff\xf9\xff\xf8\xff\xf5\xff\xf4\xff\xf4\xff\xf1\xff\xf2\xff\xf4\xff\xf4\xff\xf5\xff\xf3\xff\xf5\xff\xf3\xff\xf6\xff\xf8\xff\xf8\xff\xfa\xff\xf9\xff\xfb\xff\xfc\xff\xfe\xff\xfc\xff\xfb\xff\x00\x00\x00\x00\xfb\xff\xfd\xff\xfe\xff\xf9\xff\xfe\xff\xfc\xff\xfa\xff\xfe\xff\xfd\xff\xff\xff\x00\x00\x00\x00\xfe\xff\xff\xff\x01\x00\x02\x00\x00\x00\x01\x00\x02\x00\x01\x00\x02\x00\x03\x00\x04\x00\x05\x00\x01\x00\x03\x00\x05\x00\x05\x00\x04\x00\x02\x00\x03\x00\x02\x00\x03\x00\x02\x00\x01\x00\x03\x00\x03\x00\x02\x00\x03\x00\x03\x00\x03\x00\x02\x00\x03\x00\x03\x00\x02\x00\x04\x00\x07\x00\x03\x00\x04\x00\x06\x00\x06\x00\x07\x00\x03\x00\x03\x00\x02\x00\x04\x00\x07\x00\x05\x00\x02\x00\x03\x00\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\xfc\xff\x00\x00\xff\xff\x00\x00\x01\x00\x02\x00\x02\x00\x02\x00\x05\x00\x03\x00\x05\x00\x04\x00\x06\x00\x08\x00\x08\x00\t\x00\x08\x00\x06\x00\x05\x00\x05\x00\x03\x00\x05\x00\x07\x00\x08\x00\x06\x00\t\x00\x07\x00\x05\x00\x06\x00\x04\x00\x04\x00\x04\x00\x02\x00\x02\x00\x00\x00\xff\xff\x01\x00\x01\x00\x04\x00\x05\x00\x04\x00\x03\x00\x05\x00\x05\x00\x05\x00\x06\x00\x06\x00\x06\x00\x05\x00\x04\x00\x04\x00\x07\x00\x05\x00\x04\x00\x05\x00\x04\x00\x02\x00\x03\x00\x05\x00\x05\x00\x03\x00\x03\x00\x02\x00\x00\x00\xfe\xff\xf8\xff\xfa\xff\xff\xff\xfe\xff\xfe\xff\xff\xff\x00\x00\x00\x00\xfe\xff\x00\x00\x00\x00\xfe\xff\xff\xff\x00\x00\x02\x00\x03\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x03\x00\x05\x00\x05\x00\x04\x00\x02\x00\x01\x00\x00\x00\xfb\xff\xfd\xff\xfd\xff\xfa\xff\xfd\xff\xff\xff\xfd\xff\xf9\xff\xfc\xff\xfd\xff\xfb\xff\xfb\xff\xf9\xff\xf9\xff\xfb\xff\xfa\xff\xf8\xff\xf8\xff\xf9\xff\xf9\xff\xfa\xff\xf7\xff\xf8\xff\xf9\xff\xf7\xff\xf8\xff\xf7\xff\xf7\xff\xf8\xff\xfa\xff\xfa\xff\xf8\xff\xf6\xff\xf6\xff\xf5\xff\xf8\xff\xf7\xff\xf4\xff\xf7\xff\xf5\xff\xf8\xff\xfb\xff\xf9\xff\xfb\xff\xfc\xff\xfa\xff\xf7\xff\xf9\xff\xfa\xff\xfb\xff\xfc\xff\xfc\xff\xf9\xff\xf7\xff\xf6\xff\xf6\xff\xf3\xff\xf0\xff\xf1\xff\xf1\xff\xf1\xff\xf3\xff\xef\xff\xee\xff\xf1\xff\xf3\xff\xf3\xff\xf1\xff\xf2\xff\xf3\xff\xf3\xff\xf4\xff\xf6\xff\xf4\xff\xf7\xff\xf5\xff\xf6\xff\xf6\xff\xf7\xff\xf7\xff\xf8\xff\xf6\xff\xf6\xff\xf6\xff\xf5\xff\xf3\xff\xf8\xff\xfc\xff\xfc\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xc9\x00\xb5\x00\x9d\x00/\x00\x16\x01F\x01\xcb\x00#\x01\xc3\x00p\x00\xe7\x00\x9f\x01\x87\x01\t\x01\xd3\x00\xa0\x01`\x00\xc2\xfe9\x00\xae\x01\x9f\x00\xc5\xff\xfe\xff\x85\xff\xea\xfe\xf3\x00;\x00o\xfdQ\xfd\xd3\x00U\x01\xdb\xfeh\x00:\x02\x9d\xffb\xfe\t\x00<\x00\xbb\xffk\x02\r\x01\xd9\xfe5\x00\x7f\x01\x9f\x00\xa5\xfe`\xfe\xcc\xfd\x91\xfe\x9e\xff6\x00\x83\xfed\xfe\x13\xfe\x9c\xfd\xa4\xfe}\xff\xf6\xfe\xb4\xfd\xe9\xff\xff\x00\xd6\xff\xc9\x00L\x01#\x00\xf0\xff\xa4\x00\x16\x02\xfd\x01N\x02\xba\x02\xf1\x01\x9c\x01\xa7\x01,\x02\x9c\x01\x86\x01w\x01\x9f\x01\xca\x01\\\x01\xe4\x00\x9f\x00\xcc\xffh\xff\xa8\xff\xc0\xff\xc3\xffE\xff\xae\xfeE\xfe$\xfe \xfeT\xfe|\xfe!\xfe\x99\xfe\x83\xfe\x16\xff\xbe\xfe\xaf\xfd+\xff\xa9\xff\r\xff\x81\xff\xde\xff\xf9\xfe4\xff\x06\x00a\xffF\xff\x85\xff@\x00J\x00u\x00\xae\xff\xb0\xfe\x9e\xff\x9d\xffP\xff\x82\x00\x1c\x00\xbc\x00 \x01P\xff\x88\xfe\xeb\xfd\xc4\xff\xe3\xffB\x00\xd7\x01\x92\xffr\xfe\x08\x00$\xff&\xfe"\xffe\xffT\xfe\xc5\x00\xbd\x02\x13\x00\xb1\xff7\xff\xcf\xfe\x17\xff\xd9\xff~\x020\x026\xffS\x01\xd0\x01\xaf\xffE\xff\xa9\x00\x8d\x02\xaf\x00#\x00\xe3\xffa\x02n\x02\xdc\x01\xe4\x01\xbf\xff\xc1\xfe\xcc\xfew\xffM\x00#\x02\x82\x02.\xff}\xfc\x13\xff\x03\x01\xbc\xfe\x0b\xfcq\xfb\xc3\xff\xa8\x02\xb2\xfd\xa9\x00\xc1\x00\xd8\xfb\x0e\xfc\xad\xff\x0e\xff\x9b\xfb\xcb\x00\xcc\x01\x82\x00u\x00\xc2\x00F\xff9\xfb\xa2\xfd\xe0\x01\xca\x01\xb8\x00\x8f\x02\xb7\x04\xa7\xff+\xfe\xe0\x01\xfc\x00\xbf\xfc\x80\x00R\x04G\x03|\x01\xa6\x01u\x02\x0f\xfe\xc6\xfe\x1f\x01\x08\xfe\xe4\x00>\x04\x18\x03\x93\xff\xc2\xfeA\x01R\xfd\xdd\xfd=\x003\xff!\x00k\x026\x00\xc1\xff\xe3\x01\x1e\xfco\xfc\xe2\x00"\x01u\x02s\x03*\x01\x17\x00u\xfc\xe9\xfb\xbe\x02\xbb\x02\\\xfe\xfd\x00l\x029\xfe\xd9\xfcl\x00x\xffs\xfe\xb8\x02\xa2\xff\x93\xfd\xb9\x04\xaa\x03\x9c\xfcc\xfe_\xfe2\xfe\x93\x02\xc2\x01\xdf\x01\x0f\xfek\xfb\xf5\xfe\x13\x01\xc3\x00\xf9\xfb\xff\xfe\x8e\x00\x06\xfe\x12\x03\x8a\x02s\x00\xcb\x01\xc8\x00a\xfe>\xff\x86\x02=\x08\xee\x04r\xfd\x9f\xfb\xc0\xfb\xe4\xfd\xde\x01\x16\x05\xa9\x00\x92\xfc\xc4\xfa;\xfc\xda\x01\t\x01`\x01\x16\xfd<\xfd\xbc\x024\x01\xf5\xfe\x00\x02\xad\xfe\x84\xf8C\xfd\x80\x03;\x03%\x02\xbd\x03\x06\xfch\xf9\x00\x03U\x05\xe5\x00\x1a\x02P\x03E\x01C\x02\x1b\xff\x8f\xff\x8b\xfd\x08\xfd\xe1\xff/\xfcg\xffA\x03\xf2\xff\x9a\xfa\xc2\xfa\xee\xfa\x1f\xfbR\x02\xb3\x06D\xfe\x11\xfc\xec\x00\x03\x01H\x04R\x01i\xfd\x8c\xfdl\x03\xdd\x03/\x04!\xff\x9a\xfe\x83\x03\xd3\x01\xce\x04@\x01\x15\xfa\xae\xfb\xef\x01\x12\x03\xc2\x02\x9f\x02\xb0\x02\xe7\xfbg\xfb\x15\xff\x1e\xffu\xff`\x005\x01_\xfec\x01\x92\x03X\x04\xc9\xfd\x8b\xfdN\xff\x04\xfc\x05\x02\xfe\x04\xf6\xff\x7f\x00B\xfe\x05\xfe\xa1\xff\x0f\xfa\xb3\xfd&\xff6\xfc\x02\x02Y\x04J\x02\x9c\xfe\x1c\xfdf\xff~\x01\xbf\x00\xff\xff\x98\x00\xc6\x03d\x06\xce\x00\x89\xff\x1d\xfe$\xfc\xa0\x01\n\x07\xc2\x01\xf5\xf8\xf7\xfd\x03\xfe{\xfd\xaa\x02\xc2\x01\x84\xf8\xaa\xf4\xca\xfb\xba\xfc\xac\xfe\xca\x02\x00\xff\x9b\xfb\x9e\xfd\xcc\x00\x92\x01\xaa\xfd\x14\x00\xf8\x04\xb1\x03\xe7\x02_\x05\xe2\x007\xfb\x10\x04l\x03?\xfd\xb1\xfe\xd6\x03C\x05\xb0\x02\xc3\xfc\xeb\xfaf\xfe(\xfeU\x02\x86\x03\x86\xfd\xc6\xf7V\xfcV\xfe`\x03\x11\n\x1b\xfc\xdd\xf28\xf9\xe2\x01$\x02\xe4\x05\xc5\x01$\xfaA\xfa4\x06\xba\x06)\xfe\xf4\xfb8\xfd\x1b\x04\xda\x04^\x05\xd3\x03I\x03t\x03`\x03M\x04Y\r\x00\x03\xa8\xf4\xdd\xfft\x00:\xfd\xdf\x00\xa1\x01`\x05f\xfc@\xf3{\xf0\xc2\xf3\x9f\x00\x99\x03\x15\x05\xfd\xfb{\xf4\xfe\xf9\xbf\x03\xc7\n-\x07\xe8\x01#\xf8\xe9\xff\x88\x0e\xfa\x053\x03\n\x03\x93\x05\x7f\x03\x8b\xfc\x03\x02\x89\x01m\xfdF\x00-\xfd\xed\xfbX\xfd\xe2\xff\xad\x05f\x008\xf2\xa5\xed\x15\xfa#\x08\'\tm\xfd\xd4\xf9\xbd\xfe1\xff\x7f\x046\x05\xf3\xf9\x15\xf7]\x01\xe0\x0bT\n\xc4\x03\x9d\x011\xfe\x89\xfa\x96\x02u\n\xea\x01\xe1\xfac\xfa"\x05\x90\x04\xa9\x00K\xfe\x98\xf4\xa6\xf4\xb2\xfa\xde\x01f\x02\xed\xff)\xff\x06\xfe4\xff:\xff(\xff^\xfe@\x01x\x06\t\xffB\x01R\x04\xb8\x03\xde\x03J\xfc\xd4\xfb\xf2\x00\x05\x06\xc8\x02d\x06\x10\x03\x9d\xfa,\x00\x89\x06M\x00\xbd\xf6\x1f\xfd\xdc\x02G\x02\xc5\xfbI\xf6\\\xfd*\x03\x95\xfe^\xfa\x0b\xf9)\xfcl\x07\x82\x08v\x03\x87\xfeq\xff\x88\xff\xf3\xfdh\t\x87\x0bz\x00d\xfd\xe4\xff\x16\x02\xfe\x03M\x03\x7f\x01\xff\xfa\x85\xf4\xdf\xfc\x93\x05\xd8\x00"\xfb\x10\xfb\xa4\xfd\xda\xfd\xd4\xfeF\xfa\x00\xf8M\xff=\x04\xda\x05\x8c\x01\xe8\xfd\xed\xfb\xf9\x01^\x05{\x04S\x00\xfc\xfb\x15\x02\x9e\x03\xde\x01g\x03A\xfe\x9c\xfb\xb4\x00,\x00\xa6\xffp\xfe\xc7\xfb\xe7\xfd\xd3\xfe\xdc\x00\xba\x00[\xfd\xde\xfeG\xffF\xfc\xe8\x00\xb8\x02\xa0\x01L\x02n\xff\xf9\xfd\xd4\x00#\x02\xdf\x05\x80\x01\x8e\xfd\x17\x00Q\xff\xc1\x03\xe7\x03\xa5\xfe\x04\xfdo\xfec\xfc\x1a\x00R\x00\xab\xfe\x9b\xfc\xb1\xf8\xe5\xfdh\x01K\x02\x92\xfe\xb5\xfa\xff\xfce\xfe\x8c\x02n\x0by\x04\xd7\xfc0\x01\xa3\xff,\x003\t\xa6\x05\x14\xffU\x00J\xfd\n\x00y\x04\x04\xfe\xc5\xfb,\xfa+\xfe\\\x03\xe8\x00\xf1\xfbh\xfa5\xfdb\xfdU\x00d\x01\x1d\x00\x91\xfa\x8b\xfcG\x02\x16\x02\xf1\xffT\xfc*\x02\xec\x03m\x00\xb9\xfe\xe8\xfd"\x01\xc5\x08\xdc\x05\xdf\xf8\x11\xfb\xfb\x03s\x05\xcb\x04\xe0\xfdy\xf8\xd3\xfa5\x07\x86\x08m\x00H\xfc\xa6\xf9\xbb\xffB\x06\x00\x04d\xfe=\xfe\xc5\xfb\xb0\xfe&\x06\x81\x04\x94\xfe\xbb\xfb"\xfc\xdc\xff\xdf\x00\xba\xff\x82\x00\x0b\xfd\xdf\xfc\xc2\xfe\x91\x00\x9e\x01\x99\xff\x89\xfe\x90\xfcL\xffh\x03\xad\x03\x9f\x02b\xfd\xc4\xfdi\x02\\\x02\xd3\x00O\x01\xce\xfe\xb3\xfc\xd5\x01c\x02a\xffh\xfe\x86\xfc\xa5\xfd+\xffC\x05e\x02\x85\xfa,\xfbE\xff\xb2\x01%\x04E\x03\xb1\xfd\xe8\xfb\xf2\xff\xf5\x04*\x03\xb0\xfe\x94\xf9\xab\xfe\x86\x05\x9d\x01\xec\xff\xea\xfc\xe4\xfdN\x03r\x02\x8b\x00\x0b\xfe8\x00\x98\x02\x92\x01\xf3\xffX\xfdn\xfff\x01\x90\x01)\xfc\xd9\xfc\x91\x02\x0e\xff\xdc\xfc4\xff@\x00\x06\xff~\xfe\xdd\xff\xf3\xff1\x03$\x02\xcc\xfd\xe5\xfcJ\x03\xcd\x04\xac\xff\xcc\xfd\xde\xffh\x02D\x00\xb2\x03\xdb\x029\xfc\x18\xfbV\x02i\x05\xe8\xfdQ\xfc\xa7\xfeM\xff\xe4\xff\x91\xff\x18\xffr\xfer\xfe\xc6\x00_\x00\xc9\xff&\x01\x1c\x01>\x00t\xff\xc2\xfeP\x02s\x02@\x01\xf0\xff\x01\xfe\xa6\x01\x04\x03\x07\x01\xf2\xfc\x8a\xfcG\x00\x1c\x04\x1f\x02\xda\xfdg\xfct\xfeF\x04\xde\x03\\\xfbo\xf9\xab\xfe\x1b\x01\xfa\x03\xf1\x01\xd1\xfdq\xfb\xe8\xfc\xd1\x01\r\x03\xa1\xffz\xff\x1b\x00H\xff\x8c\x03\x83\x05\x87\x01\n\x00\xe2\xfe\xea\xfcB\x03O\x05R\x01;\xfe\x9f\xfc\x88\xfeC\x01f\xfe\xbe\xfd\x8f\xfd\xe1\xfc \xffO\x00\x93\xff\xf3\xfd2\xfe\xd2\xfe\xc6\x01\x99\xff\x81\xfe!\x00\xf3\x02\x12\x02\x01\x017\x01\x19\xfeV\x01\x9c\x02R\x02\x06\x01\xaf\xfd\xa4\xfe\xe7\x00f\x02\xf2\xfe\x11\xfd\x1c\xfck\xfd\xcb\x04\xac\xfe^\xfa\xa7\xfdk\xfc\x85\x02B\x05\x83\xfe\x98\xfbw\xfd\x03\x01w\x05\xbb\x05Z\x00\xef\xfc\x9e\xfdM\x01\x17\x07k\x03\x12\xfc\x9a\xf96\x00\x15\x07\xff\x05\x86\xfe\xa7\xf7%\xfa|\xff+\x05t\x05u\xfd\x9e\xf7\xb4\xf9f\x03\xe0\x07D\xfe<\xf8\x1a\xf9\x16\x01\x99\x04k\x02\xf8\xfd\x80\xf8\xd5\x00\xb2\x03n\x01\x1a\x00\xe7\xff\xf4\xff\x87\x03\x11\x03\xcc\xff\x86\x02\x1e\x031\x03\xdd\x00&\x00Y\x01\xb2\x02y\x01$\xfc.\xfe\'\x01Y\x00_\xfe\x16\xfd\x9f\xfe\x04\xfd%\xfb}\xfb\xd8\xff\x99\xff5\xfb"\xfd\x96\xfd\xd7\xfd\xd6\xff\xc8\x03\xdb\xfd\xc6\xfa\xb1\x03\xdb\x03\x01\x08\x17\x05\x06\xfc\x98\x02\xf1\x03\xd4\x05a\x08<\x027\xffx\xffS\x02A\x03\x80\x00 \xfa\xf9\xf8\x05\xfc\xeb\x00\x81\xff\xe6\xfa%\xf9\xbd\xf8\xe3\xfb\xd5\xfe\x0f\x00\x82\xfc\xbf\xfeH\x01s\x03\x0f\x05\x1b\x03\xd1\x00\x10\x03q\x02\xe4\x03\xba\x08\x9c\x02\xa8\xff\xbb\xff?\x01\xa6\x05\x05\x05;\xfa\xb7\xf9\xd3\x00\x94\x00\x9f\xff\x97\x00\xe8\xfbT\xf9\x87\xff\x89\x00\x87\xff\x1e\xfce\xfc\x86\xfeT\xfe\x9b\x01\x15\x02o\xff\x11\xfc3\xfc*\x03G\x06P\x04#\x01\x93\xff\xb0\xff\t\x04s\tt\x06D\xfb\xcf\xfb\xf5\x05\x89\x04%\x02q\xfft\xfa\xa2\xfa\xd5\x00\x9d\xffa\xfdu\xfa\xc6\xfa6\xff0\xfe\xb8\x00\xdf\xfd\x1f\xf99\xfd\xf6\x06\x13\x06\xfa\xfc\x91\xfcm\xfe\xb7\x01\x1b\x08G\x03\xcb\xfa\x1d\xfe\xcb\x02\xef\x05\xa7\x03\xef\xfde\xfde\xfe\xda\x00\xd3\x02s\x02\xf5\x00\xba\xfc\x13\xfe\xc5\x00\xe5\xff\xf1\xffd\xfe\\\xfc\xab\xfe-\x00#\x01J\x01\xba\xfc\xcc\xfc>\xff\x1c\x01H\x00\xba\xfe\x87\xfe\xa8\x01x\x01\x88\x00\xfd\x04\xac\x01\x1b\xfd/\x00\x8b\x02t\x02\xdf\x01\xbc\x00\xc6\x01\x15\x02Y\x01\xcb\xff\\\x00\x05\xff\x91\xfc\xeb\xfck\x02\xdf\x03\x85\x01\xe5\xfd\xf3\xfb\x96\xfd\xe4\xff(\x03\x02\xff\x7f\xfc\xdb\xff\xcf\x03\xe6\x01\xef\xff\xe8\xfd\xa0\xfb\x1d\xfe\xb1\xfd&\x03\x8e\x02\xf8\xfd\x95\xfb\xed\xfb\x90\x02x\x03\xfe\xfc\x87\xf7v\xfc\xe8\x02\xae\x05`\x00\xa8\xfaC\xfd\xc7\xfe\xd6\xff\x12\xfd\xca\xfd\xcd\xfe\x7f\xff\xc5\x00\x01\x00\xe3\x00\xb2\xfe\x9b\xfbA\xfb\xfc\x02\x19\x04\xe1\x01\xe5\x01`\xffK\x03*\x04\x92\xff\x1d\xfd\xce\xff\xa8\x01\x18\x02t\x02/\xfft\xff\xe4\x009\xfe\\\xffY\xfeH\xfe\xc7\x00\xe7\x00L\x00\xd9\xfe\xc6\x00\xe5\xfe{\x00\xd3\xff\xa8\xfd5\x01\xdc\x00\xe8\x00\x98\x02\x93\xff\xea\xfc\xd3\x02\xc2\x02\xc6\x00\xea\xff\xdb\xff\x13\x01\x00\x01\x15\x03y\x01T\xffc\x00e\x04\xbb\x03\xd7\x00\x88\xffB\xfe\xfb\x00\x98\x02\x18\x04\x10\x02z\xfe\xb3\x00\xa9\x01W\xffe\xfe\xf4\xff\x9c\xfe\xa2\xfe\xb9\x01\xa4\x00\x1b\x01\x1d\x00\x02\xfd\x92\xfb\xb8\xfb\xb5\xff\x19\x01\xc2\xffh\x00\x92\xff\x89\xff\xe1\xfe.\xfd\x93\xfbj\xfbu\xfe\xc4\x00b\x02\x80\x02.\xff\x82\xfcd\xfbP\xfb\xe4\xfc\xf8\xfd\xe3\x01x\x01\xfb\xfe\x1f\x00K\xff\xa9\xfc\x81\xfai\xfa\x10\xfc\x0c\x01!\x02\xb2\x01\xd0\xfe \xfb]\xfb\x8f\xfc\xf4\xfdG\xfc8\xfa\xe3\xfb\x0e\x01\\\x02\xbe\xfe\xaa\xfa{\xf6\xbc\xf8{\xfc\r\xffB\xfe.\xfb^\xfbx\xfc"\xfe\x08\xfd\x0f\xfbz\xf9\x1a\xfbz\xff\x1f\x01\xa2\x027\x016\xfd\xda\xfd\xe2\x02P\x04\x00\x02F\x04\x9d\x06\xca\x05\x08\t&\x08\xe3\x06\x1e\x06\x14\x05t\t\xfd\x0b\x82\t\x12\t\xd6\x07c\x06e\nz\x0by\x08G\x07\xfa\n\xae\x0c\xa4\x0cM\x0e\xa9\x10\xd1\x11"\x11&\x12\xdc\x14L\x17\x95\x15\x8d\x15\x85\x14\x05\x15\n\x17\r\x14"\x10\x12\r\x16\n.\x07\xf7\x04\x95\x02\x1c\xfe\xe7\xf9\xfe\xf6\n\xf4\x82\xf0\xf0\xec\xed\xe8B\xe7Y\xe7v\xe6\x07\xe5 \xe5\xa1\xe4\xb2\xe3\xa1\xe3\x08\xe5\xa6\xe6\xbb\xe8y\xea3\xecq\xef\\\xf0\xbb\xef.\xf1\xf1\xf2r\xf5\xbf\xf6U\xf6\xa5\xf8\xe7\xf9\xc9\xf7*\xf6l\xf7\xe3\xf6\x92\xf4_\xf4\xd6\xf4b\xf4\xe8\xf2}\xf2\xa3\xefQ\xee\x96\xee\xb1\xf2\xe5\xf3<\xf0\xff\xee\xa5\xf0\x11\xf7\x1c\xfd\xe5\xfb\x1f\xf9\xb3\xfc\xc1\x01\xcf\x05C\x08\xda\x08o\t\x18\n\xa8\x0e+\x12\x8d\x12;\x11\xe5\x0e\xe3\x10\x8a\x14\xd4\x16\x93\x15\xb6\x11\x88\x0f\xaa\x12B\x1e\xdc&\xac$C\x1dF\x1c\xc5!\x00)z/\xa21\x920H.n0\x894\x891\x16&\xe6\x1c\xcf\x1d~"\xf7!X\x19\xb4\x0b\x99\xfeK\xf6q\xf3\xee\xf2\x81\xed\n\xe3\xe5\xdae\xd9?\xd8\xb4\xd2O\xcd\x0c\xca\x1c\xcaY\xceA\xd4\xf8\xd7\xa4\xd7\t\xd7\x8a\xda\xd7\xe1;\xea\x1e\xf15\xf3\xf0\xf6U\xff\xa5\x04\xff\x06\x93\tr\n\x02\r=\x10\xbc\x13\x80\x15_\x11\x88\n\xa2\x07\x8c\x08\n\x08/\x03y\xfc\x08\xf8\xf6\xf4\x8d\xf1\xb3\xee\xc7\xea\x11\xe6%\xe2\xe5\xe1\xe8\xe4#\xe5\x05\xe2\x90\xe0\xf4\xe3r\xe9\x94\xeb\xbf\xec\xc1\xefC\xf2/\xf5\x93\xfa4\xff!\x01Z\xff\xd3\xff\xf4\x04w\x08\x1b\t8\x07g\x05\x97\x06\x14\x07\x81\x07\xa6\x07\xfa\x041\x03\x9d\x01\x84\x01\x10\x03\xfe\x00\x10\x01\x90\xffk\xff\t\xff\xee\xffv\x05\xf1\x03\x00\x02<\x01,\x022\t\x9e\x12\xc2\x1dz\x1f\x89\x18\x07\x19\xd4#O-\xd91\xf84\xf57f:2:H:\xfa7O0\x18*\x97*\xca-\xa1,\xf7 \x16\x11\t\x07\xe5\x00s\xfeg\xfc\xdc\xf5\xd1\xecP\xe43\xdd\xe6\xda#\xd9;\xd4\xce\xd0\xb6\xd1\xd6\xd5\xce\xd6\xcb\xd4"\xd4n\xd5\\\xda\xd8\xe1\xde\xe8\x90\xecE\xef\'\xf1\x19\xf3y\xfa\xae\x00%\x03\'\x07\x11\x0b\x03\r5\x0b=\n&\x0b\x1c\x0c\xd5\x0bv\x0c\x1b\x0b\x97\x05\xa7\xfe\x80\xfa\x9d\xf9R\xf8K\xf5\x13\xf29\xef\xfd\xea\xae\xe6\xf6\xe4\x01\xe6o\xe6;\xe6\xe6\xe6\xd6\xe8`\xe93\xe8\xb3\xe8T\xec\xf2\xf0m\xf3V\xf5\xc6\xf6\xb3\xf7(\xf8\xbb\xf9N\xfb\xe9\xfd\xfe\xffJ\x03\xbf\x04T\x04\xd3\x00\xf8\xfe\x97\x03d\x06\xd0\x08l\t\xbb\x07,\x07\xd8\x02\xfe\x02*\x08\xfa\t\xd5\x08J\x081\x0c\t\rh\x0b=\x0c\xfd\x13\x04\x1f\xda"\xd0"\xa9!\n$\xf6)\xa71 8\x0f<\x8c:b4\xfa1>2C1\xb9-X\'S$* \x1b\x17\xce\r\x12\x03\xaa\xf9\x8d\xf4\xaa\xf1s\xef\x12\xe8\xcb\xdc\x9e\xd3\xf2\xcf\x8a\xd1\xcb\xd2\xc0\xd3\xe1\xd15\xcfr\xcf\xc0\xd1\x81\xd6~\xdb\xe2\xe1\xda\xe6\xc1\xe9t\xee\x93\xf3\xf4\xf4\xc5\xf8\x91\x00i\x07\xe5\x0br\x0c5\x0bj\n?\tf\x0c8\x11|\x11g\r\x03\x07^\x02\xc8\x00\x91\xfee\xfd\xdb\xfb\xfe\xf6#\xf2\'\xef\x86\xec\xb9\xe9\xaa\xe6\xfd\xe6\xa5\xea\x0c\xeb\xa8\xe8\xc1\xe5s\xe6c\xe9L\xed\x02\xf1\xc4\xf1\x13\xf2<\xf2\xe8\xf3\xf5\xf6K\xf9\xad\xf9Q\xfbY\xfd\x9f\xfd\xef\xfdG\xff\xbb\x00\n\x01X\x01.\x03\xcc\x03T\x04q\x04\xd7\x03\x06\x04\x93\x03\\\x07)\x08\x9f\x03\x9e\x01\xa1\x02\xad\x06.\x0b\xe2\rM\x0fe\x0b\xb3\t\xbc\x12\xc5\x1d\xb8$\'$|#\x97\'D,\xc90\xaf5\xa07\x9a6\x884\xb13\xe33A/\xc6)i&\x15#\x91\x1e\x90\x16\x9f\x0cy\x04\xb9\xfe\x8c\xfa\xb2\xf6\xbf\xef\xa9\xe5\x85\xde\xfe\xda\xea\xd9\xc5\xd8y\xd5w\xd2F\xcf)\xcf\xce\xd2U\xd6\x1a\xd8\x8d\xd9\x16\xdc\xa8\xdf\x1b\xe4\xb4\xe8\xef\xecg\xf2\x8b\xf7\xfe\xfa\xdf\xfd\x00\x00\x16\x03\x93\x07y\n\xbb\x0c6\r\x12\x0c\xc3\ng\tu\t\x14\n\xaa\x08\x1c\x05\x07\x01\xb6\xfc\xc1\xf9\xa8\xf9n\xf9\x93\xf6s\xf2\x80\xee|\xee\xc7\xee\xeb\xedP\xee\xd2\xed-\xed<\xed\xeb\xed\x83\xf0\x85\xf1T\xf1w\xf2y\xf3f\xf4\x94\xf5\xdc\xf6$\xf8\xfa\xf9J\xfa\'\xfaI\xfbw\xfd\xb9\xfe\x9c\xff\x17\x01Q\x02\x99\x01\xe1\x01X\x04?\x06\x02\x07\x9a\x05\xbd\x07D\t\x89\t\xfb\x0b|\r\xaf\r\xdc\x0el\x15\x9c\x1c/\x1d\x9a\x1a\xa2\x1c\x8a#\xec((-\xc40=0\xa1.\xbe,\xf7.\xf41k0\xc0,)\'\x02#K\x1e\x8c\x18\x8f\x14v\x0f\xb3\x08Y\x01\xe1\xfa\xff\xf4\x8e\xef\xc9\xea\xce\xe6\xbf\xe2\xc4\xde}\xdb\xd4\xd8&\xd7\x99\xd6\x9f\xd8V\xda\x1d\xdb\xea\xdb5\xdd\xc1\xe0@\xe5\x9d\xe9\xa7\xec\x90\xef[\xf3|\xf6\xbb\xf9P\xfdb\x00\xd8\x02x\x03\x7f\x04\x93\x06\x90\x07\xfe\x06%\x06\xbe\x059\x05\xa5\x03\xc8\x015\x00_\xfd\x85\xfa\xca\xf9(\xf9\x87\xf6\xa2\xf2\x00\xf0\xb5\xf0\x8a\xf1u\xf0M\xef\xbd\xed\xd7\xec>\xee)\xf1a\xf3\x98\xf1\x17\xf0\xe0\xf1_\xf4\x94\xf5\xdd\xf5=\xf7\x92\xf7}\xf7n\xf8\xcd\xf9i\xf9B\xfb\xfd\xfe\x1b\xff\xbb\xfc\xfb\xfa=\xfe\x14\x03\xfd\x04\xf2\x02\x14\x00\xde\xff\xeb\x02\x98\x07q\n~\tT\x06t\x05[\t \x11\xd9\x17\xfd\x19}\x18\x8c\x15\xfb\x17\xab!@-L3\xdf,>(\xe0)20[6-7?3\x0b,h&\xa6$\xd0%M#M\x1b\x1a\x12[\x0c\x9f\x06\xa8\xff\\\xfa\x0f\xf7\xf1\xf1d\xea\xf6\xe3\x1b\xe0\x87\xdc\x92\xd9\xb3\xd9\x19\xda\xe1\xd7-\xd3\x1f\xd2\n\xd6\xe6\xdas\xde\xa0\xdfQ\xdf+\xe0\xf9\xe3A\xeb\xb7\xf1\xb6\xf3\x06\xf4\xf2\xf5\xa5\xf8\xe4\xfc2\x02\x8a\x05m\x04\x94\x02v\x04\x8d\x07\xea\x07\xd3\x06c\x06\x8c\x05\xd7\x03\x0e\x027\x01}\xff\xc0\xfc\x93\xfb\xb5\xfb\xa1\xf9b\xf5S\xf3\x17\xf4\xe3\xf4\xb2\xf3\xc2\xf1\x88\xf1w\xf0\xf2\xef\x11\xf2k\xf3\xb3\xf2B\xf1\xf9\xf1l\xf4W\xf5\xde\xf4\xef\xf5&\xf7\xee\xf8\x98\xfa\xbc\xfah\xfbF\xfc0\xfe\xbb\x00\xb6\x01\xd2\x01q\x01\xe8\x01R\x05\'\x07X\x07\xc6\x06<\x05\x88\x06\xb9\x0b\x80\x127\x13T\x0e\x02\r^\x15\xcc\x1f\xb3#\x02"p G#1)\x910\x9b3\x050\x0b+\xfc*\xa0/O1\x91-\x9b%\xd6\x1f\xb9\x1cL\x1a\xce\x16\xaf\x10\xac\x08\xa0\x01\x10\xfd\x0c\xf9\x00\xf4\xa2\xedr\xe95\xe6\xe7\xe2\xd1\xdfL\xdd\xc6\xda3\xd9\xa9\xda\x12\xddw\xdcF\xda\xa1\xdbj\xdf\xb2\xe3\xaf\xe6\xa3\xe8\xfc\xe8\xfb\xea\xe8\xef\x8f\xf5\xed\xf8^\xf9\x0b\xfam\xfc\x90\xff\x0e\x03\xc2\x04\xe3\x03\xdd\x02\t\x04\xcb\x052\x05T\x03V\x02\xdb\x01\x08\x00\x83\xfe#\xfe\x85\xfcG\xf9{\xf7\x1c\xf8\x9c\xf7\xa6\xf4W\xf28\xf2\x99\xf2M\xf2\xae\xf1\x9e\xf1\x86\xf0\xe5\xef\x8b\xf1|\xf3&\xf3&\xf2\x9e\xf3\x9e\xf5"\xf6\xff\xf6\xac\xf8\x84\xfa\xbe\xfa\x1b\xfc\x1b\xff\x1c\x00d\x00\xc0\x026\x05b\x05z\x05\x88\x06\x95\x08S\nS\x0c\x10\rB\x0b\xac\nX\rP\x13\x9c\x18\x83\x19\xe5\x16\xeb\x15\x1c\x1a\xa6"\x1a)b*\xe3\']%y&@+\x120d0s*\xca#\x8c \xc7 ^ \x08\x1c\xbf\x15\xeb\r\x16\x07\x1a\x03\x9d\x00d\xfcD\xf5\x80\xee-\xeb\xbc\xe7\xde\xe3\xcc\xe0\xbd\xdep\xdd\x18\xdbo\xda;\xdbA\xdb]\xdb\x1e\xdd\xc4\xe0\x83\xe2t\xe2l\xe5\x9f\xe90\xed{\xef\xfc\xf1f\xf4g\xf6\xee\xf8\xcc\xfc\xa9\xffs\x00h\x00H\x01\x90\x02\n\x04/\x05 \x05A\x03\xde\x00\xb5\x00\xbe\x017\x01\x9a\xfe\x0e\xfc\x9e\xfa\xca\xf9\x16\xf9F\xf8\xa5\xf6\x18\xf4r\xf3~\xf4\xf6\xf3\x8d\xf1\xad\xf0\xe7\xf19\xf2f\xf1\x9a\xf1$\xf2\xd7\xf1\xc9\xf1\x84\xf3.\xf5\xb0\xf5<\xf6I\xf7\xf0\xf7\x8d\xf8\x1a\xfb4\xff\x12\x00\x0c\xfe\xe0\xfdt\x01\xfc\x05\xaa\x078\x07\xc5\x05\xb8\x04\xbe\x08\xae\x11\x9e\x15\x9d\x12-\x0e\x90\x11\x99\x1bp!\xc0%_&|$[$=*P3\x856\x9c2\x1c.=-3.Z/M.\xc0)\xaf!\x13\x1a\x16\x17\xc1\x14\xca\x0f\x02\x08\x9c\xff_\xf9\xd4\xf3\xac\xefY\xebZ\xe6#\xe1\xe7\xdc\xc2\xda\x10\xd9\x0e\xd8\xbf\xd6k\xd6\x88\xd7\xd2\xd8\xbb\xd9}\xdb\x81\xde\xf1\xe1\xcf\xe4\xc5\xe7~\xea\x86\xedz\xf0>\xf4!\xf8\xd8\xfa\x06\xfc?\xfd\xe9\xfe\xb6\x01\xee\x04\xb0\x06\x7f\x05,\x03M\x03\xb2\x05g\x07\xbf\x06\x05\x04\xe0\x00(\xffW\x00\xf4\x01\x15\x00\xfc\xfb\xda\xf8\x07\xf9\x07\xf9x\xf8\xf8\xf7L\xf6\xb8\xf3l\xf26\xf4\xb1\xf4\x7f\xf2\x17\xf1\x9f\xf2\x10\xf3\x1d\xf2\xc8\xf1\x08\xf3?\xf3\xac\xf3&\xf6}\xf7\x1e\xf6:\xf5o\xf8M\xfcS\xfe\x12\xff\x8c\xfe\\\xfef\xfe\xa0\x02\xb5\t\xea\x0b\xc9\x087\x05@\x05\x0c\x0c`\x17U\x1c\x1d\x18\xab\x10*\x13\x96\x1f\x03*5-\x0b)3&\xd4&\xcf-X5\x837\xc61{*y)\x14+W+\x1a\'\xa9 \x96\x19h\x13H\x0f9\x0b\xcb\x05g\xff\x12\xf97\xf3\xe7\xed\xc8\xe9\x92\xe5k\xe2d\xe0\x9f\xddu\xdaV\xd7\x93\xd7\x95\xd9\xc7\xdb\'\xdc8\xdbM\xdb \xdeE\xe3I\xe8\xf5\xea\x1d\xeb\xbb\xebd\xef\xa8\xf5F\xfby\xfc\xb2\xfb\xff\xfb\xe8\xff\xa3\x04\x9d\x06\xd2\x05\x94\x04{\x04;\x05F\x06\x87\x06\t\x05\xfe\x01\x17\x00:\x00e\xff\x94\xfc2\xfa\xf6\xf8\xc2\xf7!\xf6J\xf4\x9c\xf3C\xf2\x81\xf0\n\xf1\xa7\xf1\xf2\xef\xac\xedl\xee,\xf1\x10\xf1\x98\xef\xaf\xef$\xf1\xbc\xf1\x80\xf3\x10\xf6\xfa\xf5\x15\xf5\xb3\xf7\xb0\xfb\xca\xfd\x8a\xfd5\xfe\xb0\x00\xa0\x01F\x04\x1b\x06\x0c\x07\x9a\x08)\x08D\x06\xe5\x05\xac\x0bb\x144\x15B\x0e\xc2\tr\x0e\xa3\x1b\x15&\x8f&\xb5\x1e\x83\x19\xd8 \xbb/\x8f8;5]+]&-+c3\xaa5\xd3.0#X\x1b\xef\x1a\x8a\x1c>\x19\x96\x0f+\x06\x19\xff&\xfa6\xf7\xea\xf3\xcd\xee\x89\xe7R\xe2,\xe0\xed\xdd\xd8\xdb\x98\xda\xdc\xda<\xda$\xd8*\xd8B\xda\xe2\xddg\xe0\xb9\xe2\xc3\xe3\x19\xe4\xce\xe6\t\xed)\xf3\xf6\xf4\xe1\xf3[\xf4\x11\xf8<\xfe\xad\x02.\x03\x0f\x00=\xfe\x0f\x01T\x06V\x086\x05\x84\x00T\xfe\x1e\x00\x90\x02\x0c\x02\xe1\xfd\'\xf9\xc0\xf7\x1c\xf9\xaa\xf9\\\xf7\x9f\xf4\xef\xf2\xbe\xf1\xa6\xf1V\xf21\xf2\x15\xf0\xb7\xee\x08\xf0\xc3\xf0O\xf0\xf8\xef"\xf17\xf2\xf8\xf2\x1b\xf4\xa7\xf5\xae\xf6\xcb\xf7\xe1\xfa\x0f\xfd\xfa\xfe,\x01\xf4\x02\xb6\x04\x1f\x06\t\x08:\n\xfd\x0c\xe5\x0e\x7f\x0f\x99\x0f!\x10\x95\x11\xb6\x14<\x1a6\x1e\x85\x1d+\x19\xbe\x1a\x03#h+f.\x8d*(\'\x99&\x91+\xaa2\x064\xe1,\x7f#\x98\x1f\xb0"=$\x1f \x01\x17N\x0c\xea\x05F\x03\xdd\x03:\x00P\xf7\x83\xed\xc4\xe7\x85\xe6\x1a\xe6\x10\xe5l\xe1\x87\xdc2\xd8\xd5\xd8(\xdc\xcd\xdd"\xdd\x06\xdd-\xde\x10\xdf\xf5\xe0\x0e\xe6N\xeaE\xec\xce\xec9\xee\xf5\xf0\x1b\xf5\x8e\xfa:\xfeC\xfe\xff\xfc8\xfe_\x02D\x06<\x08\xde\x06\xd4\x03,\x02\x1a\x04\xb0\x06\xf8\x05|\x02 \xff\xa4\xfd\x96\xfcN\xfc6\xfcx\xfa\xb2\xf6\xd5\xf3k\xf4\xe9\xf4\xa5\xf3+\xf2\xb9\xf1\xc2\xf0C\xefN\xefd\xf1t\xf1-\xf0\x82\xf0o\xf1n\xf1\xf1\xf1\xc8\xf4\x16\xf7\xa0\xf6#\xf7\xa1\xf9\x1e\xfc\xde\xfcQ\xff\xe7\x02\x9e\x03h\x03\x93\x05l\t\xce\x0b\xe8\x0b\x18\x0cf\r\x81\x10:\x16\xac\x17s\x15X\x15\x84\x19\x98 |$\xd4%\x81#\x7f"L&\x7f-M1d-\x9b(\x84&Z(u*\x99(U#\xe9\x1aA\x15\x00\x14\xa2\x12\x07\x0e\xde\x05v\xfe\xda\xf7!\xf4\x1f\xf3\x9c\xf0\x10\xead\xe2\xc5\xdfP\xe0Y\xe0\x84\xdey\xdc\x9c\xda\x97\xda\xba\xdc\xff\xdf\xaf\xe1\xad\xe1t\xe2\x8b\xe5\xff\xe9[\xed\xb7\xee\x90\xf0q\xf3\xfe\xf6\xd8\xf9\x9f\xfb\x8e\xfca\xfe\x16\x01\xac\x02\xd1\x01\x14\x01\x02\x02\xe1\x02}\x02m\x01p\xff\x9d\xfc]\xfbs\xfck\xfc\xee\xf8\x03\xf5\xf1\xf3k\xf4:\xf4\x90\xf3~\xf2U\xf0j\xeeB\xef\xd4\xf1\xff\xf1A\xf0`\xefS\xf0\xf9\xf0<\xf2\x9b\xf4\x9b\xf5\x82\xf4.\xf5\xeb\xf7\xec\xfa\xf1\xfb\x14\xfd}\xff\xcb\xff\xfc\x00\xcf\x03\xc0\x06\xd1\x08\xc0\x06\x82\x06$\t\xc5\x0c@\x11\xa1\x0f=\x0c\xd8\x0b\xd0\x10\x93\x19\xd4\x1d\x11\x1b\xfc\x16v\x18}!\xe1+6.\xb3*\x07&\x88\'Y.75M5\xad-\xfd%\xfc#\xbb&$\'\xa1!\xf3\x17\x1b\x10\xa9\ne\x07\xcb\x04\xc6\xff\x99\xf7.\xef\x8a\xea\xa6\xe8\xe3\xe5\x83\xe23\xdf$\xdc\x16\xdae\xd8/\xd9\xb9\xda&\xdcc\xdc\xaa\xdc\x0f\xde\r\xe08\xe3;\xe8\xfa\xecV\xed\xcc\xecG\xef\xa3\xf4\x88\xfaI\xfd\xad\xfd\xc3\xfb\x83\xfc\xd2\x00[\x051\x06\xbc\x03#\x01g\x00(\x02F\x04^\x03[\xff\x18\xfc\xf6\xfb\x94\xfcu\xfb\x01\xf9\'\xf7\xfc\xf5\x01\xf5R\xf4\xfb\xf3\xa2\xf2\xe3\xf0\xec\xf0\xf2\xf1i\xf1\xb4\xef\x87\xef\x0f\xf1\xa0\xf1\xd7\xf1p\xf2w\xf3\xe6\xf3c\xf5\xfe\xf7\xa4\xf9N\xfa\x1f\xfc\x16\xff`\x01\x1b\x03\x12\x04\xc4\x05f\x07d\n\xd4\r\x99\x0e)\x0e\xcd\x0c3\x0f\x15\x16B\x1b\x95\x1b\x84\x177\x16\xc3\x1b\x8b$\xa3*\xc2)\xaf$K"|\'\x9a/~3\xeb.\xaf\'V#\xfc$\xb5(\xf7\'5!\x0b\x17\x1a\x10\xf7\r<\x0eP\x0b\xb6\x03\x06\xfa\xd4\xf2\x0e\xf05\xef\xbf\xedG\xe9\x17\xe3\xd0\xdd\xa2\xdc\x83\xde\x89\xdf\x8a\xde\n\xddC\xdc\xa5\xdc\x84\xdet\xe2\x88\xe5*\xe7\xe6\xe7@\xe9\x90\xeb\x15\xef\xce\xf3\xf8\xf7\x8e\xf9<\xf9\x89\xf9\x8f\xfc\x06\x01\xbf\x04\xdf\x045\x02\x10\x00E\x01\x1d\x04/\x05"\x03r\xffh\xfc\xcd\xfaq\xfbo\xfc\x98\xfa\x06\xf65\xf2\x93\xf1[\xf2\x7f\xf2P\xf1z\xeff\xed\xa4\xec\x83\xed\x1f\xef\x85\xef\x07\xef\xaf\xeeF\xefh\xf0g\xf2\x9a\xf4H\xf5o\xf5Y\xf7\xb2\xf9\xb8\xfb\xea\xfc\xc2\xfe)\x01\xce\x01\x94\x03a\x07:\tR\t\xb0\t\xe9\n\x8d\rb\x11\xbc\x16?\x17v\x13\x92\x13\xa1\x19y"\xb5&\xb6%\x86"\xe4!\xbf&h/\x8b3\xa9/\xf6(\xdc%H)\xc6,\xc9+\x83%\xb5\x1c\xec\x16\xe6\x14\xc1\x14\x10\x11g\t\xe6\x00\xe7\xf9r\xf6\xdf\xf4W\xf2\\\xec\xde\xe5$\xe2\xa9\xe0\xde\xdf\xf3\xdeS\xdeT\xdcp\xdb\xe8\xdb\x0e\xdev\xe0\x13\xe2\xd1\xe3\x9c\xe5M\xe8@\xeb\xaf\xedB\xf1\xbb\xf4Y\xf7\x83\xf8\x0c\xfa\x92\xfc\\\xffD\x01i\x02\x00\x02+\x01\x13\x01\r\x02\x89\x02N\x01\xbc\xfe\x7f\xfc\xad\xfbE\xfb8\xfa0\xf8\xfe\xf5\n\xf4J\xf3E\xf3\xdc\xf2\xc4\xf1\x0f\xf0\x92\xef3\xf0X\xf1B\xf12\xf0\xdd\xef\xff\xf0\x8d\xf2\xb8\xf3/\xf4.\xf43\xf4\xa8\xf5!\xf8\xbe\xfaU\xfbd\xfbF\xfc\xae\xfd\x19\x00\x03\x03\xa7\x05\xf7\x06\xe3\x05\x90\x04W\x07\xfb\x0c6\x126\x13*\x10F\x0e<\x11Q\x19\xfb!\x1e$\x96 \x89\x1c\x88 \xb2)\xd30\xb82\x80-\xf9)o)9.;2\x1f0\x88)B"g\x1f\xbe\x1e\x82\x1c\x80\x176\x11.\nr\x03\'\xff\xfe\xfb\xe0\xf7\xe4\xf1j\xec\xb0\xe8\xa5\xe4\x86\xe1\x8b\xdf\xa7\xde\xd2\xdd`\xdb\'\xdac\xda\x01\xdc\x0e\xde\xfd\xdf\xb5\xe1\x8f\xe2\xc1\xe3\xcf\xe7\x8f\xec0\xf0s\xf1\x95\xf2\x0f\xf5\xaa\xf8a\xfcD\xff\xce\xff~\xff\xd2\xff\xb5\x01\x9e\x03=\x04\xf7\x02\xdc\x00\xb0\xffM\xff\xe7\xfe\x97\xfd\xa0\xfb|\xf9[\xf7\xf1\xf5\xd0\xf4|\xf3)\xf28\xf1\xfc\xef/\xefw\xee"\xee\xc4\xee\xed\xee\x01\xef\xd1\xee\x1f\xef\x0e\xf1\xae\xf2\xb5\xf3R\xf4\x9f\xf5\xc6\xf6\xc1\xf8Z\xfb\xe7\xfd\x7f\xfe \xffL\x01U\x04\xa2\x06\xd1\x06\xc9\x08\x15\x0b\xdf\r\x1f\x0fp\x0f\xdf\x11c\x13\xe0\x18\xf1\x1d\x14\x1e\x1a\x1c\xc5\x1c\xcd#w+\x9a-\\+\xd1(O(=,\xd11\x1d3a-\xb1$\x18";$\n%\x84 1\x18\xb3\x10E\x0b\xf8\x07)\x06_\x02\xf9\xfa\xb1\xf2\xa5\xed\xa2\xeb\x81\xe9r\xe6\xf0\xe2\xd3\xdf\xc9\xdc\xf3\xda\xad\xdb~\xdd\x01\xde\xd0\xdc\xaa\xdd\x91\xdf\xcb\xe0\x06\xe3\xfd\xe7<\xec\xcf\xec\x90\xec~\xef\xbe\xf3\x0f\xf8\xae\xfbh\xfd\x8f\xfc\xf1\xfb\xd1\xfeQ\x03\xf8\x04\xa4\x03\xca\x00\xc4\xfe\x1a\xffF\x01\xc4\x01e\xfe\xe6\xf9@\xf7N\xf7f\xf7\\\xf6@\xf40\xf1\x9b\xee\xc4\xed\xaa\xee\x11\xef1\xee\xaf\xec$\xec\x11\xec4\xed\xa8\xee\x1f\xf0\xc3\xf0\x0c\xf1#\xf2\x92\xf3\x0b\xf5\xef\xf6\x0c\xf9V\xfa\xab\xfb\\\xfd\xfa\xfd\xf4\xfe\xe9\xff\xb2\x02\x99\x07\xc7\x089\x07c\x06.\x08\xdd\x0fk\x16J\x18z\x17\xc6\x152\x1b\xdd#\xa3+\xeb.\xf9+++e/\xc96\xef:\xad9K5q2\x012\xab2.1\t,o$.\x1eo\x1a\x7f\x16:\x10y\x08\x12\x02\xe6\xfb#\xf6\xae\xf0n\xec\x9f\xe6\x11\xe1r\xde\xfa\xdc\xfc\xd9\x8f\xd5\xd2\xd4Z\xd6_\xd7\x9d\xd6\x18\xd7\x0b\xd8I\xda\xc3\xdd\xac\xe2d\xe6\xc4\xe6\x86\xe8\xcd\xec\xe8\xf2\x0c\xf8|\xf9v\xfav\xfb\xa1\xff\x0c\x04x\x05O\x04\xca\x03\x98\x04\xc0\x04r\x04\n\x04\x17\x02\x98\xfe\xdd\xfc\xd9\xfc\x1f\xfb]\xf7\x92\xf4L\xf3$\xf2\xa6\xf0o\xefx\xed\xde\xeb\xbc\xeb\xb5\xec{\xed]\xecE\xec\xcc\xedk\xef\x88\xf0{\xf2c\xf4\x18\xf6\x8c\xf7W\xf9\xc5\xfc$\xfe\x08\xffF\x02n\x043\x06j\x06#\x072\t\x9d\x0b\xa0\x0e\xd2\x0f\xa2\x0e\x17\x0eS\x10\xf9\x15p\x1c[\x1e)\x1c\xff\x19\x00\x1e\xa5\'p.\x93/\x8e-y*h,\x801\xf66[7+0\x1b+\xa4(k)\x9c\'B#\x19\x1d]\x15#\x0f\x07\n\x83\x06\x83\x001\xf9\xd4\xf2N\xee\x03\xe9[\xe3\x12\xdf(\xdcf\xda\x1e\xd7q\xd5\n\xd5\xd7\xd3&\xd3\xf7\xd4C\xd9G\xdb\x08\xdb1\xde\xab\xe2R\xe6\xa3\xe9\x06\xee\x13\xf2\xef\xf4\xf6\xf6\xb2\xfa\xa5\xfe\x80\x01F\x03O\x04j\x04\xb8\x04\xc3\x05\xaf\x06C\x06\x07\x04g\x01\x9f\xff\xc2\xfe\x83\xfd`\xfb\x8b\xf8\xd4\xf5}\xf3\x0b\xf2w\xf1Y\xefh\xec\xa2\xeb\x1a\xecA\xeb\xff\xe9\x97\xe9\xdb\xe9A\xea\x7f\xeb\x10\xed\x15\xee:\xee4\xef\xb5\xf1\xf6\xf4N\xf7\x8f\xf8\xce\xf9\xc2\xfb_\xfd\x9f\x00\x83\x03n\x06\xe8\x08\xee\x08\xd1\x0b\x82\r2\x0fm\x13\xcf\x17-\x1d\n \xdb\x1e\x07\x1f!"/+\xa03\xd73\x0b2\x16/\x96/;4\xbf:?;\xcc4r-m)\x82*\x8f)\xcc%\x86\x1eD\x16\r\x0fV\t\xe3\x05_\x01z\xfa\x89\xf2\x9f\xec\x8c\xe8\x08\xe3\x8d\xde\xab\xdb\xd7\xd9\xce\xd8L\xd5w\xd3\x08\xd36\xd4\xc1\xd6\xa3\xd9]\xdc\xa1\xdd\xe4\xdd \xe1\x8e\xe8\xed\xed%\xf1_\xf3\xa5\xf4\x9b\xf7p\xfb\xb9\x00\x82\x03\x18\x04\xc8\x03\xc4\x03\xf7\x03z\x05\xa2\x06\x1c\x05}\x02\xf7\xff]\xfe\x15\xfd]\xfbm\xf9\n\xf7\x13\xf4\xb4\xf1\xf8\xefr\xeei\xed\xff\xeb\xb9\xeaF\xea\x8d\xe9\x91\xe8B\xe8;\xe9\xfe\xea\xe1\xeb\x0c\xec\x97\xec\x02\xee*\xf0C\xf3\x82\xf5A\xf7\x16\xf8t\xf9\x99\xfc\x90\x00Q\x03\xf9\x03x\x03\xcf\x042\t\xfa\r\\\x12J\x10\x9c\r\xeb\r\x1b\x15/ \x06&\\$\x93\x1e\x10\x1fY&\xaf1\x959\x039\xe53\x08/\xba1}9_<\xc78\xf81\x12.\x8e+](\x91$\x8b\x1eh\x18\x9f\x12\xe6\r\xec\x07\xe0\xfe:\xf6\x90\xf0\x1c\xef\xec\xec\x9e\xe6\xbb\xde\xb8\xd7J\xd4d\xd6(\xd9<\xd9\x90\xd5\x96\xd2\xa0\xd3\x1f\xd7\xd3\xdc\xc6\xe1\xa0\xe2E\xe4\xb0\xe7i\xebT\xf0\xed\xf3c\xf5\xee\xf9\x97\xfe\x87\x00\xc3\x00\x1a\x01\xf3\x01o\x03\x8e\x05\xff\x07\xf0\x06>\x01s\xfd\xb0\xfd;\xff\x84\xfe}\xfb]\xf8\xc8\xf3}\xef\xfb\xee%\xef\x87\xee\xe9\xeb,\xe94\xe8\xed\xe6\xf6\xe5\xf7\xe5\xc3\xe7\xc5\xe8\xc0\xe9\x86\xea\xe7\xea\x97\xeb\xc5\xecz\xf0\xc7\xf5\\\xf83\xf9\x96\xfan\xfc;\xff;\x02\xc6\x04\xa5\x08\xe4\t\x8a\x0b\x01\x10\xfb\x0e\xf8\rz\x0e\x80\x11\x05\x1a\xe0\x1f \x1f\xf6\x1a2\x19(\x1e\x85\'\x81.\xc7/\x11-\xc3)G-w2\xe16\x985j/ .\xc0-\x91.Q+\x8b$\x9d\x1e\x11\x1a\xca\x18\r\x16]\x0fJ\x05\x9a\xfd\xc5\xf9~\xf7x\xf4\x08\xef{\xe8F\xe0\xb7\xdd\xce\xde\x0c\xdd\x1a\xd9\xa9\xd6\x15\xd8\xd1\xd9\xd9\xd9\xd1\xdb\xce\xda\xcf\xdb\x8b\xe0:\xe52\xecf\xed\xe7\xebg\xee,\xf4\xb8\xf9\xc0\xfbp\xfd\x81\xfe\xa9\xff\x02\x01,\x03\x97\x03\x83\x01\x1e\xff\x9b\xfe\xf8\xff\x05\x00\xf0\xfb\xc5\xf6u\xf4\xfa\xf3J\xf3\x00\xf15\xee\xe3\xea\xb8\xe8\xc8\xe8\xe4\xe8\xe8\xe7\xb9\xe61\xe6(\xe6\x17\xe8\x1d\xea4\xea\x9e\xea\x14\xec\x0f\xef\xe1\xf1L\xf4\x87\xf7H\xf7\x99\xf8%\xfeY\x01\xf5\x03\xdb\x06\x9a\x07\xcc\t{\n\x04\r\x1e\x11G\x11\xc4\x12\xc6\x13\xf5\x15\xb0\x16U\x14\x99\x13\x89\x18W \xe8"\xeb!\xbf\x1e\x00\x1f\xd8!\x06&\xad+\xeb.\x94-\xba)\x98)3*\xea)\xbb\'\x7f&\xd5&}$\xc6\x1f\xb8\x1a%\x15\x19\x0f\xcc\x0be\n\x8d\x08\xd6\x02J\xfa\r\xf4\xd9\xef8\xeb\x81\xe8\xb3\xe7*\xe5\xdf\xdf\\\xdcS\xdbQ\xdb\xa8\xd8%\xd8N\xdd8\xe0\xa9\xdf\xe7\xe0\xb2\xe1\xea\xe2\x83\xe7\xff\xec\xc8\xf2O\xf5 \xf5\xa5\xf5\xdc\xf8\xf4\xfc\xfe\xfe[\x01Y\x03\xf1\x02\x8b\x00G\x01\xf8\x01\x0b\x00\xef\xfc\xe1\xfc\x86\xfdz\xfbi\xf7{\xf3\x98\xf2\x85\xf0\xe8\xed\xbe\xeez\xef\xf6\xea\xb9\xe9\xaa\xea\x94\xe8R\xe8\xee\xec\xc8\xeb\xda\xe9a\xf0m\xf3`\xf2\xa4\xf1>\xf4t\xf87\xfc\x17\xff\x9c\x04\xd8\x04c\x04X\x07\xe1\t\x13\x0c\x14\x10u\x12\xff\x0f\xd4\x11D\x14\xad\x11\xcc\x10D\x11\xcb\x11\\\x12[\x14\xc7\x15\x81\x11\xb2\x10\xcf\x10-\x12\xc3\x17\xd1\x1b\x84\x1c\x8e\x19\x9e\x1b\xb0\x1b\x8c\x1d@#\x1b$I#\n"_"\xca!\x80\x1f\xd0\x1d\x8f\x1c\r\x1a\x08\x18E\x15\xc2\x11B\x0c\xf3\x05\x13\x02\xba\x00\x93\xfdH\xf9\xeb\xf4B\xee\xf4\xea\x86\xe7\xc8\xe5C\xe4\x06\xe2\xe0\xe0D\xe0(\xdf\x89\xdf\xea\xde\xe8\xde"\xe3\xe6\xe4\x88\xe8\x05\xecc\xeb\xcc\xedy\xf0\xd5\xf1\x82\xf7\x88\xfb\xb5\xfah\xfd\xc2\xff\xb2\xff\x9d\x00F\x00c\xff\x7f\x00n\x006\x00:\xff\xd0\xfc:\xf9\xf1\xf6\x1f\xf6\x8f\xf4\x89\xf3\x82\xf3\x8d\xf1:\xee\x06\xefR\xec\xbf\xea/\xef\x81\xec@\xec\xc9\xf49\xf0n\xef\x90\xf5\xf1\xf2\x19\xf9\x01\xfb\xdd\xfa;\xfe\xb4\x05j\xff\x98\x03X\x0b/\x06\xac\t\xc0\t\xe7\x0f\x0b\rg\x08K\x11\xbe\x11\x90\x08\x1f\x11\xaf\x11:\x06\x13\x0fu\x0e\x83\n\xd4\r}\n<\x07\xb1\n\xc1\x0cN\x08\x1c\tL\t\xed\x08F\x0b\xbc\x0c\x0c\x0e^\r/\r\xe5\x0ed\x0f\x81\x12\xa7\x13\xee\x13\xbb\x14\xc5\x11\xa4\x13\xf1\x16U\x12\x87\x0f\xc1\x11\xfd\x11y\r\x05\x0f.\x0f_\x08D\x04\xcf\x037\x031\x01\x13\xff\xa6\xfd\xd5\xfbe\xf6\x8b\xf4i\xf4\x18\xf2X\xf0\xe8\xef.\xefv\xee\xea\xee~\xec\xdc\xeb\xcc\xed\x0f\xedG\xeb_\xee\x11\xf2\x89\xf0\x1f\xf0i\xf2Z\xf2f\xf1\x18\xf4\xb7\xf5\x9c\xf7\x1b\xf7\xd8\xf5j\xf6R\xfa\xfd\xf8\xc4\xf7\xda\xf6)\xf8\x1d\xfc\xa3\xfa\x1f\xf7\x11\xf9?\xf80\xf8\xfd\xf6\x8e\xf7.\xfb\x05\xf9F\xf6O\xf7\xb6\xfb\x8b\xf65\xf5c\xfa\xc1\xf5\x1c\xfe\x03\xfa\xe4\xf8\xfe\xfa\x1d\xfdv\xf9\x9d\xf7N\x04\xe1\xfe\x8d\xfc\x8c\xfe\xfa\x03\xaf\xff\x84\x04\x8b\x05\x82\x01\x7f\t\x83\x05\xed\x02z\x0fQ\t\xc3\x05\xc3\x0e\x97\x0f|\t\xa6\x0cp\x0e\xe3\n\xc3\r\xdf\x0b\xde\x12\x8c\x0f3\n\x07\x0f\x03\x08\x87\x06\xd5\r\x1f\rJ\x08`\t\x1d\x0b\xb8\x06\xec\xff\xd2\x06\xd9\x08Y\xff8\x04\xaf\x0c]\x05u\x03\xd7\x07}\x03\x15\x06\x90\x07\xf3\x05R\rW\x0b\xaf\x064\x0c\xbc\x0fQ\x08}\x03\x89\x0bw\x0bs\x04t\n_\x0bc\x05)\x02\x05\xff\x17\xfd\xd8\x00\xd6\xfd\x15\xfb~\xfc\xf8\xf8s\xf4S\xf5|\xf6\t\xf1\xf6\xf1u\xf4\xab\xf4\xb9\xf5 \xf4\xbb\xf3<\xf5\x9c\xf5\xa2\xf6\xcf\xfa\xd3\xfck\xf84\xfd\x05\xfdJ\xfb\x8c\xff\x87\xfd\xe0\xfd\xd8\x01\xf6\xf6o\x00X\xff\x83\xf8j\xf80\xfbb\xfa\xa1\xf1\xb7\xf87\xf8_\xf2\xc8\xee\x82\xf8\xdb\xf3\xcf\xec\xbc\xf2\xab\xf5C\xf4-\xee\x12\xf5\xe2\xf3/\xf4F\xf4\xed\xf8%\xf5\x8e\xf6^\xff\x03\xf9\xad\xf9\xf3\x04\x82\x01N\xf7\x99\x03C\t\x1c\x04i\x03W\rw\x0c\x1c\x02\xb5\x02\x9a\x0c\x9b\t6\t\xc4\x0b\xd8\x08\n\x0b&\tS\x05j\x04\xcd\n\x1b\x08S\x01\x8f\r\xdd\x0b\xf4\x01\xdd\x02O\x08e\x03\x80\x00\x80\x07Z\ne\x01\xca\x04m\t\x8f\xfe\x8d\x00\x13\t\xc8\x03\x8f\x00x\x0ci\r\xde\x001\x03\xcb\t\xc9\x08j\x06\xf3\x0c\x8f\x0e\x81\t@\t\xdc\x0e\x04\x08\xc8\x05x\r\xea\x0b<\x08\x85\x0c9\x0b\xb4\x05|\x06s\x04\xd3\xff\xb4\x02\x95\x07\x1b\x01[\xfd\xa3\x00E\xfd&\xf4\t\xf9\x84\xfc\xeb\xf6\x88\xf4B\xf7\x94\xf7O\xf3\xdc\xf2W\xf8\x93\xf4\xc6\xec_\xf29\xf7e\xf8\xb1\xf5]\xf5)\xf7N\xef\x90\xf0\xd3\xfa1\xf9\xc9\xef\xec\xf8<\xfd?\xf4\xe7\xee\xb3\xf7\x83\xf7X\xee\'\xf4h\xfbJ\xf3\xf5\xfa7\xf6\xde\xefA\xee\x7f\xf6\x97\xf9\xc0\xf3\x16\xfe\x04\xfaA\xf9B\xf4\x8b\x00X\xf7\x8d\xf8\x8e\x06O\xfc\x98\xfe\xbc\x06\xad\rS\xff\xff\xf7\x18\t\x9f\n\x11\xfd\x92\x07\xf1\x18\x03\n=\xf6\xb5\x13Q\n\xeb\xf7\xf4\x0e*\x10g\x02O\x07\xa0\rj\n\x85\x03\xb0\x03\x1e\t \x06\xd5\x06\xf4\x0e\x1c\x10W\x02\xd9\x05\x96\t \r\xc1\x02\x8d\t\xc4\x10\x8e\x04z\x05F\x0c~\x05,\x00_\x08>\x02\xf2\x02\xfd\x05V\x04\x9d\x00\x04\x02`\x02i\xfb\xbd\x03E\x00I\x00;\x05q\xfc\xe2\xfa\xab\x04\x17\x02\xc9\xf9j\x05Z\xff\xf8\xfa\xa8\xfek\x04\x9b\x01\xaa\xf9,\xfe\x0f\xfd\xfb\xfd|\xfd\xa0\x03\x16\xfe\xaf\xf3I\xf9h\x04w\xf9K\xf2\xc0\x06\xb6\xf7\xbb\xf3\x13\xf7c\x03p\xfaF\xefW\xfe5\xfe\xf9\xf0\x89\xf6L\t\xa9\xf9+\xf2\x08\xf5\xf7\x0b\xe9\xfc\'\xf1\x05\x0c\x16\x04\x01\xec?\x08\x06\x13Q\xfa\x1b\xfc\n\x05\xa9\x0e(\xf8v\x00\x13\x12\x8b\x02\xc1\xf8\xec\x04\x00\x049\x03\xcb\xfd\x0f\xf9\xbd\x07\xfa\xf9W\x04\xc9\xfcc\xfaP\x01\xb1\xfc\xc1\xf4\xf6\xfd\x11\nb\x00\x0f\xfa\x85\xfbC\x07\xea\x06\x93\xfbc\xf5\x87\x0f\r\x031\xf9\xd4\x0c\x93\x14J\xf8\xa3\xf4\xd6\x0c6\x06\xf4\x01\xf7\x03\x15\x0bR\xff\xfa\x04h\x02P\x07\xfd\xfeX\xf4f\x08e\x00\xf7\xfd\xaf\x05\x98\x042\xeeV\xf9{\ne\xf0K\xf8\x81\x03\x8e\xfb\x15\xf2\xa7\x00\x1f\x00y\xfa\x0b\xef?\xf9\x08\x03\xcc\xee\x02\xfeS\x08\x02\xfa}\xe8\xee\x05\xeb\xfa\xc8\xed\xcf\x03_\xf9\x03\xf98\x01\xac\xfd\xfe\xf9\'\x01V\xf7\xd5\xf7\xf0\x0c\x0e\xfe\xc9\xfa\x1c\x07\xc2\x07\xe4\xfdw\xf8\x7f\x0f>\th\xf5\xbb\x018\x1b\x7f\x01s\xf4\x8b\x10{\x0f\xf9\xf7\x1b\x03\xf4\x16\xa0\xfct\x01\x9b\x0cI\t\x86\xfa"\t9\t\xf9\xfa\xff\x06k\x0b\xc0\xfa\xeb\xfa\xed\r<\x04\xd6\xfc\xae\xf5\xcf\x0e\x1b\x02\xad\xf25\x04\x06\x0bN\xf8\xcf\xf9\xa0\x06h\xfa+\xfe\n\xfe\xf2\x08N\xf9\xe5\x01\x86\xff\x9b\xfa7\xfdH\xff\xa7\ti\xf9L\xfb*\x06\xb9\x06\x9b\xf5\xd2\xfcC\x0e\x07\xf7\x17\xf6\xaa\x11\xe2\t\xed\xf5\x91\x00\x0e\n-\xfb\xf4\xfd\xb7\nx\x03=\xfb\xc1\x05\xaf\x068\xfe\x87\x03[\x03\x1c\xf7[\xfa\xf0\x0b\xc1\x00L\xf9\x15\x01\x06\x03\xc3\xf3?\xf42\x03\x93\x02E\xf0\x82\xf2\xe0\x07\x1c\xfb\x7f\xf0`\xf9\xf2\xfeL\xf1\xdc\xf2[\x02-\x04n\xf4\xd8\xf8h\x01\x86\xf9?\xf9\x85\xfcP\x04\xc3\xfa]\xf98\x05a\xfen\xf2~\x02\x06\x00\x81\xf2\x81\xfc\xf9\xff\xe9\xfa\xf3\xf9\xd9\xfc\x95\xfb\xd1\xf7 \xf4\x8d\x04\xe3\x01\x8e\xf2&\xf8\n\x0eG\xfbF\xec\xeb\t\xf7\x10(\xf9n\xf1\x9f\x11L\x0c\x89\xf5W\x04A\x0f6\x07\xd4\xf3.\x0e\xf1\x17S\xf1\xb9\xfdW\x1eX\xfe\x05\xedq\x19}\x08\xce\xfa\xa9\x02a\nJ\x00m\x04I\x06.\x06\xaa\xfdm\xfb\x90\x0ft\x06\x8c\x00\xf7\x01\xe2\n\x93\xfc\x13\x00\xa5\x0fz\xffa\xfa\xf8\x0f\xcb\x05\xda\xf5\xd6\x07V\n\xda\xfb(\xf8\x1f\x08\xb8\x02\x91\xfa\xad\x03?\xfeI\x00T\xfb\x9e\xfe\xc9\xff\xc2\xff\x90\x01\x8c\xfe\x0c\xfch\x07\xda\x04\x9c\xf8\n\x01J\nh\x018\x04f\nj\x05\x00\x02\xc4\x08X\x0c\xbb\xfe\xae\x07x\x05N\t\xb9\x06{\x03J\x0cQ\xff\xce\xf8\x9f\x05\xbc\x07\xc8\xf8C\x02\xb3\x00\r\xfa\x8b\xf7\x93\xfe\xba\x01\xaa\xf5\xca\xf1A\xfa6\xfd\xaf\xfd\xa5\xf7D\xf9\x03\xfdG\xf1\xbd\xfb\x81\xfe\xcf\xf8\x13\xfdy\xfc\xa0\xf5\xac\x00&\x03\x05\xf3t\xf6\x12\x00\xaa\xf7\xa0\xf8\xd3\xffa\x00\xe4\xf6!\xf4M\xf4X\xfaC\xfe \xf3x\xf9Z\xf9\x10\xfad\xfb\xf5\xf0~\xfb\xa5\x00\xfc\xf0Z\xf7i\x00,\x01\xa9\xff\xa4\xf1t\xfc\xd1\x0cg\xfc\xe0\xeer\t\xb5\x0e\x98\xf6\x0e\xfb\x80\x0e`\x05h\xfc\xe4\xff\xc8\x08\xdd\xfe\xbf\xff\x1e\x05\xbf\x051\x05\xa6\xfb\r\x0bS\xfb\xb9\xfe\xb1\x06h\xfeC\xfa\x97\x04\xe5\r\x14\xfc\xcf\xfa\x87\x03b\x03M\x00;\xfb]\x07\x15\x04\x14\x05%\x02\x17\x02\xb1\r\xde\x02\xc2\xfbW\x03\xf9\x10\x14\x03K\x00d\t\xe1\x0b4\xff\xf9\x00\xfb\x0b\xc0\xff\x8e\x03_\x02\'\x01\xdb\x08T\x03z\xffv\x050\x02\xae\x00\x91\x01\x0b\x00\x15\x00M\x05K\x02\x1e\x05S\x03\xf9\x00\x05\x02y\x01}\x01\xb5\x03L\x078\x01B\x06\t\x08u\x03\x1e\x03<\x03T\x06\x0c\x04\x1a\x03Y\x07\xe1\x06\xa8\x02\r\x03\xa4\x01\xc8\x01*\x02n\xfd\xdf\x01\x9f\x02\x83\xff%\xfea\xfc\\\xfe\xf7\xfc\x8a\xfb\xb4\xfav\xfc\x81\xfa#\xfb\x0e\xfd\xbc\xf8\xf8\xf8\x9f\xf5\xd7\xf8\xfe\xfa\x04\xf7V\xf9\xae\xf8{\xf4\xad\xf8m\xf8H\xf5O\xf7\x15\xf5\xd7\xf6\x1f\xfb\xa3\xf7s\xf9s\xf5\xa7\xf3\xcb\xf63\xf8\x96\xfb\x1c\xf9\xc0\xf7\xd6\xfa\xc8\xf6\n\xf3\x08\xff^\xfa\x88\xf4\xbc\xfc\xca\xf9\xa4\xfcP\xfc~\xfc?\xfa\xa5\xfa\x06\xf9\x8b\x00\x12\xff\x06\x00\xee\x03\x90\xfa\xe3\x00g\x03}\x01\xeb\xfd\x05\x02c\x00R\x04\xa1\t\xf6\x05\xe0\x01C\x04\xd0\x01\x93\x05\x11\x05e\x06\xb1\x07\xcd\x05\xae\x0c\xbe\x03?\x06\x18\x08v\x05\x8b\x04\x12\x07k\x06\x80\x035\ri\x07\xe8\x02\xfa\x03\x8a\x05\\\x04i\x04D\x05\x88\x04"\x03r\x03\x9a\x07\x15\x03\xaf\x01\xa7\x03\x8b\x02\x05\x03\xb3\x03\xe1\x05\xe0\x03\xd0\x04m\x06\x14\x04(\x05-\x04\x08\x05\x85\x07\x97\x07Y\x05\xd9\x07\xa7\x06\xd7\x05\xcb\x07o\x05\xca\x05\xdf\x03\xb0\x04+\x05C\x05\xfa\x03\x0e\x02\xf7\x00\xb5\x00\xef\x00\xdd\xff5\x00\xec\xfe8\xfd\x1b\xfd\xe7\xff\xfe\xfc,\xfb>\xfd~\xfb\xc4\xf98\xfc\xd0\xfcu\xfa\x89\xfa\xe0\xfb\xc8\xfa\x12\xfaT\xfc\xd9\xfaz\xfa\xfb\xfa\x99\xfc\t\xfbV\xfaT\xfc\xe1\xfc\x15\xfc\x9e\xfa3\xfb)\xfbu\xfc\x9d\xfb\xe0\xfa\xf2\xfa_\xfbq\xfa\xdb\xfb\xa1\xfa\xf4\xf9\x94\xfaP\xfa\xa4\xf9F\xfb\x9a\xfb\x11\xf9m\xfa?\xfb\x99\xf9\x15\xfa\xe3\xfb\xe6\xf7\xa4\xfbA\xfc\xce\xf9i\xfb\xe9\xfc\xc2\xfa\x95\xfaO\xfd\xbf\xfcf\xfb?\xfe\xfb\xff~\xfdL\xff\xcf\xff\x8c\xfev\xff\xcf\x01<\x01\xdb\x02\x9b\x02\x12\x02\xd7\x03 \x04S\x04I\x04\xd8\x03\x1f\x04\xc4\x05\x8d\x06\xda\x057\x04\x8b\x052\x05T\x04\x9a\x03\x83\x05@\x05e\x04\xde\x03b\x03\x93\x02F\x02\xf9\x03\x10\x02\x98\x02\xaa\x02\n\x01\xe9\xff\xf9\x01c\x02\x92\x00\x15\x01\xfe\x00v\x00\xef\xff&\x01\xbf\x00?\x00_\x00\xf0\xff\xea\x00\xed\x00\x8e\x01\xd5\x00t\xff/\x02\xbe\x00X\x00\x17\x02\x05\x03\x1d\x02M\x01\xaf\x01\xe7\x02.\x03\x01\x03\xc1\x03O\x03\x8c\x03;\x03\x9f\x03\xe3\x04\x13\x05\x96\x03\x1f\x04\xb6\x03P\x03~\x03\x98\x03\xcc\x02X\x02\x17\x02\xdc\x00\x17\x01\xa0\x00\xb2\xff\r\xff\x16\xff\xe2\xfd\xad\xfd\xc1\xfd\xd3\xfc`\xfcN\xfcq\xfc;\xfba\xfb>\xfbB\xfb$\xfb\x16\xfb\x03\xfa\xe0\xfa\x13\xfbf\xfb\xef\xfbE\xfa\xfe\xf95\xfa\xad\xfbv\xfb\x9f\xfb\\\xfc%\xfb\x89\xfb\x99\xfbe\xfb\xcd\xfc\xb3\xfc.\xfc\xf0\xfc\x9d\xfc\x97\xfbH\xfdM\xfeP\xfdO\xfb\xe1\xfdK\xfe(\xfd|\xfe\x14\xfep\xfd^\xfdu\xff\xd7\xfe\xa5\xfe\x83\xffS\xff\xf1\xfd\x0f\x01@\x01\n\xff\xcd\x00\xd9\x01\xa5\x00\xc4\x02\x8d\x03\x81\x00\x9a\x02\xe7\x031\x02F\x03\xf1\x04\x89\x03\xbe\x02\xbd\x03\xb0\x04W\x035\x02\xa6\x03\x8b\x04)\x03\xed\x02(\x03\x9f\x02\xfa\x00\x8a\x02\xaa\x02\xc7\x00\xcb\x02\xa4\x01\x9f\xffL\x00\xb9\x00\xd4\x00\xe1\xff\xcc\xfe\xe0\x00\xd2\xff\x80\xff\x9b\x01T\x00\xad\x00\xe3\xffp\x00u\x01\xf5\x02m\x014\x01\x0c\x025\x03\x98\x03\x19\x02\x95\x03\x08\x05\xad\x03\xf1\x01>\x04\xac\x05J\x04r\x03W\x04\xd1\x03\xcb\x03\xb2\x03r\x03r\x03J\x03\xb8\x02\xb7\x01\x99\x02W\x02V\x01:\x00\xa7\x00{\x00\x18\x00Y\x00I\xff{\xff\x05\xff\xb1\xfe\xf2\xfd#\xfeK\xfe\x84\xfe~\xfd\xd7\xfd\xed\xfd\xb7\xfd&\xfdx\xfdN\xfdd\xfc\x81\xfd\xbd\xfc\x0c\xfd\xb1\xfc\x1c\xfd \xfd^\xfd\xb6\xfc\xc0\xfb\x8d\xfb\xbf\xfdT\xfdF\xfc(\xfe\xb5\xfc\x13\xfd\x03\xfcd\xfc\xb6\xfd\xde\xfc)\xfc\xbb\xfc\x85\xfd\xb2\xfbW\xfd\xa0\xfc@\xfc\xe6\xfby\xfc@\xfe\x9b\xfc\xae\xfd_\xfd=\xfd\x10\xfd\xea\xfef\xffM\xfej\x003\xffl\xff\xdc\xff(\x00y\x00\x06\x01\xae\x01\xc7\x00<\x01X\x01\x1c\x02\x0c\x02\xcf\x01\xc1\x01*\x02\xfb\x01 \x02k\x02n\x01\xbd\x01~\x028\x01\x1d\x01\xe5\x01^\x015\x01\xfb\x00;\x01R\x00\xb1\xff\xd6\xffw\x01!\x01x\xfe;\x00\xd1\x00\xb2\xff\xc7\xff\x8f\xff\x88\xff\xda\xfe\xc0\x00k\x00\x16\x00\xc4\xff5\xff\x03\x001\x00\xf3\xff\xb8\x00\xdc\x00\x93\xff\xae\xff\xd6\x00\xb5\x01\x1a\x01\xe5\x00\x05\x00\x99\x00\xd9\x012\x02\xbd\x01\xdc\x01j\x01\xb9\x01S\x02\x83\x02w\x02g\x02\xe8\x028\x02d\x02H\x03\xe5\x02\xbe\x02x\x02{\x02B\x02\x95\x02\xe7\x02|\x02t\x02\xb7\x01Y\x01\'\x01\xc7\x01\xb8\x01\x89\x00\x14\x00\xde\x00\x12\x00\xad\xffe\x00\x0c\x00R\xff\x06\xff\xa7\xff\xba\xfe\x9a\xfe\xda\xfe\xe6\xfe0\xfe\x06\xfey\xfeq\xfe;\xfdK\xfe\xe7\xfd\x86\xfc\xd9\xfdi\xfd\xaa\xfd\x90\xfdW\xfcp\xfd\xee\xfc\xb2\xfc6\xfd\xbd\xfd6\xfc\xea\xfc[\xfd\xb0\xfci\xfdc\xfdf\xfc\x1c\xfe\x07\xff\xae\xfc\x17\xff`\xfd\x9b\xfc.\xff\xe7\xfe\x04\xff\xa9\xff+\x00\x91\xfd"\xfe$\x00\xd9\xfe\t\xff\x0e\x00]\x00\x84\xfe,\x00\xcd\x00v\x006\xfe$\x00\x9a\x01\x9e\xfd\x98\x01X\x01\x8b\x00\xf9\xff\xb0\x00p\x00/\x00\t\x01\xa4\x00\x17\x01M\x00\x92\x02u\x00\x8e\x00\xe8\x00\x8b\x01\x03\x00]\x01\x85\x01e\xff\xa6\x01=\x00g\x00\xa1\x00H\x01\xe8\x003\x00\xa8\x00\xaf\xff\xc2\x00\xd6\x00"\x00\x8f\x01%\x01#\x01b\xff\xe4\x01\xdf\x01\xa6\xff\xd4\x01\xa5\x00\xe0\x01V\x02\xd0\x01&\x00T\x02\xb5\x00\xd5\x01\xd9\x029\x02\xdf\x02v\x01\xfe\x01a\x01\xf4\x03e\x01\x03\x04d\x03\x97\x01\xba\x02.\x03i\x02\xf6\x02\xc0\x03|\x02\x95\x02\xe4\x001\x03\xc7\x01\x15\x02#\x02x\x00\xee\x01\xe8\xffS\x00\xd7\x01L\xff\r\x00\xe3\xff:\x00\xc6\xffN\xff\xd3\xff\x8c\xfe\x13\xff\x1f\xfeT\xfe\xce\xff\xd8\xfe\xe6\xfd\x8e\xfd\xaa\xfc"\xfe\x06\xfd"\xfe{\xfd\x14\xfe\xa8\xfb\xa6\xfcr\xfd\x9a\xfdE\xfd\xb2\xfc\x00\xfe>\xfc@\xfdI\xfc6\xfe\xb6\xfd\x8a\xfb|\xfdJ\xfe\xd3\xfc0\xfd\x03\xffV\xfd\xcb\xfb\x17\xfd"\xfe\xbf\xfe\xea\xfc[\x00k\xfbp\xfc=\xfeG\xfec\xff\\\xfd\xfe\x00q\xf9\xe6\x00^\xfeF\x01\x00\x00W\xfc\x93\x00\xc2\xfb\xea\x02-\x01\x8a\xff\x14\x00\x9e\x00\x8a\xff\xf7\xff\xc8\xfeG\x02;\x01\xd8\x003\x02\xaa\xfd\x8d\xffY\x02@\x03\xf8\x023\x00=\xfd\xce\xff\xe5\x01\xb8\x02\x81\x03\\\x02\xc8\xfd\xa9\xfd}\x02\x83\x02\x85\x00\xd2\x00|\xfec\xff\x0b\x02\xae\xff\xc9\x00z\x00\xb9\x00\x07\x00@\xfe\xc6\x00J\x03\xf9\xffj\x00\x10\x02\x19\x00\x98\x01\x8a\x01\x07\x01\xe4\x03\xd8\x00\x89\x01C\x02\xb2\x021\x04\x84\x02\x05\x04(\x01v\x01{\x04\xa3\x03j\x03\x1f\x04x\x01\xf7\x03\xff\x03y\x03R\x02M\x02B\x02\xa9\x01x\x03\xe3\x02}\x02\xde\x00S\x01$\x00c\x01\x8f\x01\xa4\x00n\x00\x92\xfe?\xfe%\x00\xb2\x00\xfd\xfe\x0e\x00\x00\xfd4\xfd1\xff\x98\xfe}\xfd\x19\x00\xc6\xfd\xe0\xfcG\xfdt\xfc\xe5\xfe9\xfc\xbd\xfd\xc4\xfe\x87\xfc\x99\xff\xb6\xfe\xa6\xfa\xb9\xfbc\xfcV\xfc\x9e\xfd\x8c\xfd\x8c\xfc\x86\xfd-\xfa\x16\xfc\x8e\xfc<\xfd/\xfc\xe2\xfbp\xfc\xaa\xfc|\xfd\xbb\xfai\xfe<\xfc\x8a\xfd\xcf\xfd+\xfd<\xfd\x03\x02\x9c\xfc\xf3\xf6j\x031\x00\x12\xfc\x12\x03\x9e\x02\xf9\xfaH\xfe\x9f\x02\xf1\x00\xbc\xfew\xfc\xab\x02\'\x05\x8e\x05t\x026\xfd\x9c\xff\xeb\xfd\xe5\x02B\x02\xda\x02&\x03\x1f\x05\xdf\x04\x15\xfc\xce\xff\xff\x00!\xffl\xff\xc6\x07\x11\x03\xaa\xfd\x99\x03\x9d\x00\x8b\xff\xed\xffx\x01\\\x03\xa4\x03m\xfe\xbb\xfe\x1b\x02p\x03c\x04_\x00W\x02N\x049\xfez\x00j\x05\xfe\x03\xaf\x02\xab\x04#\x06\xa5\x01\xbc\x00\xb6\x066\x06;\x03\x18\x04\x9c\x02\xc3\x02\xd6\x04\xb0\x07\xc1\x043\x02\xad\x03\x0b\x01\xc0\xffl\x02\x96\x03\xce\x01d\x02\xe1\x02\x84\xfdO\xfe\xc6\x01\x97\xffe\xfe\x8a\xff\xe6\xfd\x01\xfd\x8d\xff\xb4\xff\xc0\x01\x82\x00\x17\xfb\xe0\xf9|\xff\xcf\x00\'\xff\xcb\x00I\x01\xbd\xfe\xb1\xfav\xfc\xa2\xfe\x1e\xff$\xff\x99\xfd\xb8\xfd\x8a\xfdY\xff\x99\xff3\xfd\xf8\xf9\xd4\xfb\xad\xfd\xd4\xf9\xda\xfb`\x02\x08\x00\xc9\xf7_\xf9\x99\xfb\xa0\xfa\xe6\xfc(\xfe\xd4\xf8s\xfa\xef\xfc\xeb\xfa@\xfd\xe8\xfb\xdb\xf8V\xf7\xb0\xffh\xfeD\xf8F\xfc\x02\xfe\x90\xfao\xfcM\xfc&\xf9`\xff\x9c\x01\x07\xfdI\xfb\xb6\xfcg\x00n\x03`\x00\xfe\xfa:\xfd \x01_\x02\x8c\xff\xe5\x00\xe5\x02B\x00\x86\x03\x00\x03\xd3\xfcP\xfd\xb6\xff\xa4\x03r\t\xa5\x05f\xfd|\xfc\xfd\xfc\xb0\x00\x85\x039\x01&\xff\x8e\xff\xd5\xff \xfek\x00\x80\x01\x8f\xff\xd1\xfe+\xff\x9f\x03\xaf\x05-\x06\'\t\x0f\x0b\x1b\x0b\xaa\t\xec\x08\xef\x07C\x0c\xd3\x11\xcb\x11x\x11\x08\x12\x9e\x0f$\r;\x0e\x98\r$\x0c\x06\t\x18\x07\xb2\x07@\x08\xfb\x06\xcf\x05\xd4\x02\xf4\xfeF\xfa.\xf8n\xf9\x9e\xf9x\xf8;\xf7\xe7\xf7\x8a\xf5S\xf6U\xf5\xeb\xf4"\xf7\xe1\xf3\x1a\xf3\x12\xf5\xf8\xf9y\xfc\xbd\xfbr\xfb\xe8\xf9\xdf\xf8\'\xf96\xfb\xe1\xfd\x96\xff\xdb\xfc\t\xfe\xe4\xfd\x80\xfcs\xff%\x00\r\xfe\xd8\xfb\xab\xfb\xfb\xfb\xd7\xfe\xc9\xfdp\xfe)\x00(\xfe}\xfb\xf7\xf9N\xff\x06\x00\x18\xfc\x03\xfe\xb8\xffd\xfdA\xfd\xb9\x00*\x01\x1c\xfc\x94\xfa\x93\xfb\x8a\xfb \xfe\xea\x010\xfe\x95\xfcD\xfd\x18\xf9b\xf9:\xfa9\xfc\x98\xfb\r\xf9\x10\xfa\xff\x00u\x01\xd3\xfc\xb2\xf9\xf5\xf4\xdc\xf4\x17\xfc\xbd\x01\xe4\x01.\x00\xc3\xfe\xff\xfd)\xfb\x9c\xf9\x8e\xfc\xe6\xfb\x8f\xfbW\xfc\xb0\xf9\xad\xfd\r\x03\x84\xffc\xfc\xb7\xf8\x8d\xf0\xff\xeb\xfb\xf1G\x01\x13\x12\x0c\x1c\xe0\x1a|\x12H\x0eu\x0fX\x13g\x1c\xb5)\xa44\xf85\xed482D.\r)5\x1f%\x18\xd0\x15\xf0\x17\t\x1c\xfb\x19/\x11r\x041\xf68\xe8\'\xe0\xa5\xdf\x99\xe2u\xe47\xe3\x89\xe1\xdb\xde\xa3\xda\x1b\xd7\x9e\xd4\xb3\xd4\x90\xda\xb4\xe4T\xef\xbf\xf8>\xffW\xffu\xfa<\xfa-\xfd\xe5\x03\xe0\r\x00\x13\xa4\x16s\x18[\x17\x96\x14v\x0f\x13\t\x9e\x04\xde\x03\xc8\x04\xf1\x07!\x08\xfe\x04\xac\xfdX\xf2D\xea\xc7\xe7\xc1\xe7C\xe9\xd0\xec\x9c\xee\xbf\xee\xfd\xed\xf9\xed\x93\xecw\xeb@\xed\x05\xf1`\xf7D\x00A\x07u\x07\xcd\x04\xe0\x019\xfeM\xfe\xb2\x01\x9b\x05\x86\x08\x19\x07"\x03\xba\xfeK\xfa\x9d\xf6\xa1\xf4\x8a\xf0h\xf0\x96\xf2\x98\xf3\x97\xf6N\xf7\xd6\xf2\xfa\xebK\xe7\xea\xe5\x89\xea\xd8\xf2*\xf9\x98\xf9+\xf7\x19\xf5{\xf7\xd3\xf8@\xf8\xde\xf5F\xeep\xeb\xc1\xf3$\x15\\A\xf5S"D\x9b"\x03\x13\x7f!?r\xe9ZqL\x11Ha?\xdf&\xef\x0cc\x071\x10\xd0\x13\xf0\x07\x84\xee\x9e\xd2\xd4\xbb\x86\xad\x91\xad\x8e\xbbR\xcb\x11\xd0&\xc7\x9f\xbe_\xc0F\xc9\x8b\xd0\x87\xd7u\xe5g\xf6|\x08h\x12\x7f\x19*\x1e\xc1\x1c\xad\x1d\xbe\x1f\x0e"8&\xbd"\xff\x18\xe0\x12\x8b\x0f\x97\t}\xfd\x9e\xec\x90\xde\x9f\xd6\xbd\xcf\xbf\xcd\xe0\xcf\xf2\xd0\xea\xcc\xa8\xc6\x0f\xc5\xf3\xca\x14\xd6Z\xdcb\xe0L\xe7w\xf1\x88\xfe\x0c\n\xf2\x12\xd9\x18?\x19g\x15]\x14\x8b\x18\x00 \xd7#\xf1\x1e\xa9\x16\x0c\x0fd\x07Q\xff\x1a\xf7s\xf0\x95\xed\x96\xec\xc7\xe9\xcb\xe6\xf7\xe1\x1a\xdc"\xd6\xe3\xd2)\xd6f\xdfd\xe8\xf1\xee\xb0\xf0\x02\xf2\x90\xf6\x98\xfa\t\x02q\x08\xe0\x0e*\x13\x1a\x15\xb9\x17\xca\x180\x19\xe1\x12\xd1\x0b\x00\ta\x03\xa0\xfd\x96\xfe\xaa\x14;>\xa9Y\x81PC.9\x17\x03\x1d\xa44\xfcL\x88[\x9ba\xfdV\xc8=e\'5\x1c\xa3\x17\xc3\x0bk\xfb\x1f\xf7\xfd\x01\xc4\x0bi\x01\xff\xe2\xf2\xc1\x95\xaf=\xb1\xee\xc1`\xdaN\xed\xfb\xf0\xd4\xe3\xba\xd4\x9f\xd2*\xdc\xa5\xeb\xbb\xf8\xab\x06)\x16\x00$[)\x81"8\x12`\x03\xa3\xffp\x03\x93\x0e`\x17[\x10L\x01\x07\xed\x89\xdaP\xd4\xca\xd3\xfe\xd2\xa7\xd0\xe3\xd0\x85\xd7\xd5\xe0X\xe4I\xe1\xd8\xdc}\xd9\x1d\xdf\xb9\xf1\xc5\x08\xd5\x19W\x1c;\x13?\x0b\xbf\t\x8a\x0f\xb3\x17\xe3\x1a\xf4\x18\x88\x13\xee\x0c\xcf\x05T\xfe\xdf\xf6\xde\xed\x16\xe6\xfa\xe1Z\xe4\xea\xe9Y\xe9\xcf\xe2G\xd9\x07\xd4\xb2\xd5\xc6\xdb`\xe5\x81\xee\xdc\xf4L\xf8\x81\xf9\xf9\xfc@\x01H\x03\x8c\x05=\t~\x11\xff\x1a\x90\x1f\xa8\x1d\xa5\x14\x85\x08\x92\xfej\xfa\x00\xfe\xf7\x04\xc0\x03\xb2\xf5p\xe4\xf1\xe6\xa0\x0c\xda>\xceS\x03=\xbb\x1b\xa7\x14\xaa,\xd4M\xef^\xbdbs^qN]=\xba0j\'\xcd\x1bT\x05\xb6\xf6j\xfc\x8a\t\xf5\x07\xfa\xee\xf8\xcc\xf6\xb7\'\xb5\xaf\xbf\xcd\xcfa\xde\xbc\xe4k\xe2h\xdbe\xda\xbc\xe3X\xf0\x17\xf9B\xfe\xe6\x08\x96\x1a\xf2*\x06,\x0f!j\x11\xa9\x03V\x03\x02\x08U\r1\x0e0\x04=\xf9|\xebR\xdd\xc5\xd38\xcb\x93\xcc\x1b\xd4b\xdb\xab\xe3\xcd\xe7\x0b\xe7,\xe2J\xde\xa2\xe3\x91\xf3g\x05w\x11q\x17e\x17g\x12\x8f\r\'\x0b,\x0b\xa6\x0c\x87\r\xca\x0c@\x0bW\x08\xa0\xff\x16\xf1a\xe2\xd0\xda\xf6\xdb)\xe2\xd4\xe8\x1c\xeb\xc0\xe7\xb7\xe0\x8f\xd8\xf9\xd6\xe8\xdd\xbe\xe9\xfd\xf4C\xfc\xba\xfex\x02!\x05\xe6\x03\xad\x04\xa4\x05\xdf\t+\x11X\x14\xcf\x14\x9c\x11$\t8\xff\x01\xf9\x18\xf8\x16\xf9\n\xf9"\xf2\x92\xe3\x8b\xd78\xdb\xf1\xfd\xdb1\x95RtO\xfc8\xcf,~8SRYb\xe4h^o\xbcq\xb3i@Xb=S\x1c\\\xfd\x80\xe9\xb8\xeaY\xfag\x01f\xefJ\xcf\xd9\xb2Q\xa9\xce\xae\xaa\xb8d\xc6j\xd7{\xe5\xce\xed\xd7\xf2\xb1\xf6\xe6\xf8!\xf6\xf5\xf5\x93\x04\xfb\x1f\x047w=\xf6-\xfd\x15\x1a\x06b\xfd_\xff\x94\xff\xef\xf8\x1e\xf4\xaf\xeb\x94\xe3\xa4\xdc\x19\xd13\xc6\x18\xbf\xe4\xc0\xae\xcd\'\xdeY\xec\xab\xf2Y\xf1\xa1\xee\x9d\xefr\xf7\xa9\x03\x04\x11\xef\x1b\xa7\x1f\x9e\x1d\x9c\x19\xad\x14\xe6\x0er\n@\t\xf3\x08\x84\x07\xf4\x02\x1e\xf9Z\xee\x89\xe5r\xe0\x1e\xdf\xb2\xde\x90\xe0D\xe1@\xe1\xc4\xe0 \xe0\x8d\xe2\xb5\xe7~\xed\xa2\xf4\x82\xfc\xd4\x03~\x08D\x07\x03\x04\xb7\x03\xe4\x07\x8c\x0f\xb5\x14\xba\x14I\x10r\x079\xfe\x16\xf8\xd2\xf5\xc5\xf4\x9f\xf1\xfb\xeb\xc4\xe9\x8f\xec\xa0\xe9\xdf\xdb\x07\xce\x96\xd4\xc4\xfcE2\x95T\x00Z\x94I\xa1:\x85=1M{c\x14uxyRp\x98\\\xf5E\xc50n\x1a\xf2\xfeB\xe7\x1a\xdf\x85\xe3\xcf\xe9V\xe9\x03\xdbC\xc5<\xb2F\xab\\\xb6\x11\xce]\xe7\x1b\xf8\xb9\xfc\x95\xfba\xfb\xea\x00\xe3\x08\x15\r\xc2\x12\xa6\x1b\xe2%\x11,2(\x10\x1c\xdb\x08\x84\xf3\x1d\xe5\xc7\xe1m\xe6W\xed{\xed\xeb\xe2\xb7\xd49\xc8)\xc4(\xc8\xfb\xce-\xd7\x00\xe0%\xeb9\xf6L\xff\x9e\x03\xbe\x02\xc1\x00,\x00\xcc\x05\xce\x13\xcc \x9f#\xed\x1do\x14\xc2\r6\x0b \x08]\x033\xfe\xc5\xfa\xce\xf6\xe6\xf0v\xe9!\xe3#\xde\xa9\xd8>\xd6\xb7\xd8h\xdf\xb4\xe6\xd1\xe8\xd2\xe7\xa1\xe8*\xee\xb4\xf6\xcf\xfd\x07\x04j\t\xac\rU\x11n\x11\xf7\x0e \r\x9c\n\xd0\x08\xb1\x08D\x07\'\x03\x80\xfc]\xf5|\xf0;\xee@\xea\xbe\xe4\xc4\xe2\xd7\xe1\xd3\xdd\x82\xdd8\xec\x93\x10\x98;qR\xefQ\x82FYB\x89K\x0c\\Fk\xddsQu\xafl\xddW\xb8;\xb6\x1d\xfd\x03Q\xf2<\xe8\xd6\xe7]\xeb\xb9\xe8O\xddG\xcdF\xbd\xe0\xb5\xea\xb9\xf3\xc6Z\xda\xe1\xee<\xff\xc9\x08K\t\xd8\x04\x9e\x00\x9c\x00]\x06\xcf\x10H\x1e\xb8\'M(p\x1bu\x07\x0c\xf57\xe7\x82\xe1\xf5\xdf|\xe1\xa3\xe5Y\xe8\x89\xe8\xe6\xe1(\xd8\xee\xcf^\xcbY\xd0\xf5\xdb]\xeb\x1e\xfaJ\x00&\xff\xd4\xfc\xf1\xfb\x99\xfd\xd0\x00j\x04r\x0c\x9a\x17\xb4\x1e\x80\x1b\t\x10\x1e\x03w\xfa\x88\xf8\x95\xfaW\xfd\x92\xff\x9c\xff\x98\xf9\xa2\xee\x9f\xe3\xf3\xdc\xd0\xdbX\xde-\xe4\xca\xeda\xf7N\xfc\x9f\xf9\xb6\xf16\xec\x9f\xedP\xf5&\x01\xb6\x0bS\x12\xed\x13[\x0f\xc9\x07\xf6\xff\xac\xfb\x9c\xfc\xfb\xfe4\x01>\x01\xd2\xfe\xa2\xfb\x01\xf7t\xf2s\xec6\xe7\xff\xe5s\xe8B\xeew\xeb\xf5\xe6\xa2\xf3\xe4\x145>\xa1T>R\xbcG\x9eC$K\'V\xc3^\xc3c&d\x94`\xa2Q\x8f8\x11\x1b\x98\xff6\xea\xf3\xdaE\xd7z\xde\'\xe7\xde\xe6(\xdb\xb9\xca\xf3\xbfx\xbf\xd0\xc7i\xd7\xb5\xeb\xe8\x00!\x11l\x16\\\x13\xf6\x0b\x10\x06v\x04\x8f\x06\x02\x10\xab\x1a\x9d!\xf1\x1e\xea\x0f\xfe\xfdl\xed\x85\xe3\xc1\xe0\x91\xde\x16\xdfS\xe1\xab\xe3\x11\xe4`\xe0\x14\xd9\x81\xd1P\xd0\x84\xd6\xa9\xe2U\xef\x86\xf8\x96\xfdZ\xfe*\xfer\xff\x92\x03\x1c\t\x91\x0e\x85\x13\xb9\x15\x05\x14\xdf\x0e\xad\x07\x7f\xff\x1e\xf9\xfe\xf6\xe8\xf7\xab\xfb\x97\xfe\xbc\xfb\x08\xf4\xc5\xe9\xbf\xe1\xb8\xde\x01\xe1\xc7\xe7\xf1\xeeI\xf5S\xf9\x94\xf9\xda\xf8\xd4\xf6f\xf4\xbc\xf4\x82\xf7;\xff\xb4\t\xfb\x10\x86\x13\xc4\x0eM\x05*\xfeM\xfa\xa0\xfa\x0c\xfd\xb9\xfe%\xff|\xfc\xc0\xf8p\xf2\xdf\xeb\xcc\xe7\xdf\xe6\xe0\xed\x08\xf8\x1b\xfd\x8c\xf8\x80\xee\xbf\xecV\xff\xfe%9M,_\xd4Y\x03L7G\x90NyU\x1dU\xe2OkIqF~>\x82*\xcc\x10\x0b\xf5\xf4\xde\xce\xd3\x0e\xd3\x9a\xdb\xd9\xe5B\xe9\x01\xe3{\xd7b\xd0\t\xd3\xc7\xdd\x92\xeb\xaa\xf8Y\x03\x8f\x0co\x13s\x15l\x12\x89\x0b\xf3\x03O\xffJ\x01\x1f\t\xfd\x120\x16X\x0f\xbc\x00\xda\xedU\xe2n\xde[\xdf\xbe\xe4.\xe8)\xea)\xe9\xb3\xe3\xb6\xdeR\xdb<\xda>\xdc\xd7\xdfS\xe6\x9e\xf0\x1e\xfc\xf5\x05Q\n\x9f\x07\xd4\x01\x0f\xffy\x02J\n\x90\x11\xf4\x15\x1a\x15\x1b\x0f\x02\x07\x16\xfe\xfe\xf6\x02\xf3~\xf0\xb9\xef\xc3\xefk\xf0W\xf0\xab\xee\xff\xea\xcb\xe6\xfa\xe5.\xe8\x98\xee\xff\xf5\x16\xfdS\x02\xa1\x02\x1f\x01\x07\xfe^\xfbm\xfcT\xff\x15\x05\x19\x0bm\x0e\xa0\x0e\r\t\x08\x00W\xf7\xf6\xf0\x0f\xef\xca\xf0H\xf4\\\xf8\xb4\xfa\xe3\xfa\xff\xf7\xc1\xf2~\xed\xf1\xeb}\xf0\xb5\xf5\xc6\xf9\x10\xff\xfe\t\x8f!\xa7<\xaeP\x8bY\x97SKI\x9e@\xec<\xdeB\x8aK\xa1Q\xc4M\x19<\xb7#.\x0b\xf3\xf7\xd0\xea\x15\xe0\x0c\xd9\xc7\xd8\x86\xde\x8a\xe5\xed\xe8H\xe5+\xdeV\xd8\xa2\xd6\xf0\xda\xe7\xe5\xfd\xf55\x08\xd1\x15\xd1\x19,\x15;\rR\x08\xdf\x07\xd4\x08\xff\x08"\t[\nk\x0c\x10\x0bR\x03\n\xf7e\xe9\xef\xde[\xd8\'\xd6\x80\xd9\'\xe0\\\xe7\xce\xe9\xdc\xe4\x7f\xdd\x03\xd9\xe8\xdac\xe1\x97\xe8\xe7\xef\x98\xf8\xef\x02=\x0c\xcd\x10>\x0fQ\n|\x05\x1a\x03V\x03M\x06\x93\x0b/\x10\xa4\x10\x84\n)\xffQ\xf3\xfc\xebf\xea7\xed\xf3\xf1\x94\xf4\x84\xf5\xda\xf4\xec\xf1\xac\xee\x82\xeb\x81\xe9P\xeb\x06\xf0\x1b\xf7\x91\xff[\x05\xc9\x07\x1d\x06\x90\x00\x9a\xfc\xcd\xfb\x0f\x00\xb3\x07\xf6\r^\x11\x8a\x0eT\x07Y\xfe\xca\xf4\xbc\xf0\xc4\xf0\x9a\xf37\xf7\xbc\xf6\xbd\xf5\x8f\xf4%\xf2\xea\xee(\xec\x86\xec\xed\xee\xa6\xf1I\xf4}\xfc\xaf\x11z/\x1eMp^\xd4\\nP\xb1D\xc2?\xafB\x95H6M{N\xf7F\x905\xb8\x1d\xfe\x05\xbd\xf3Y\xe6\x17\xdc;\xd5\xd0\xd3\xc6\xd8\x1c\xe0\xd4\xe4\xdc\xe3Q\xde\xa5\xd8.\xd68\xd9\xb5\xe2\x92\xf2G\x03\x00\x0fh\x121\x0f\xfe\x0b\xe9\x0b\xe7\x0c\xd3\x0c\xe6\n1\t\xbc\n\xb7\r\xaf\x0el\x0bF\x03\xf2\xf8\xed\xed\xf7\xe3\x7f\xdd\xec\xdc\x05\xe2\xea\xe8?\xec)\xea@\xe3[\xdc\x98\xd9\x82\xda\xac\xdf\xf5\xe6@\xef\xa4\xf7\xd8\xfd\x82\x01\xf3\x03\x0c\x06\x89\x085\n"\t\xae\x06\xc9\x05\x86\x07\x9f\x0b\xda\x0e\xbd\r\x0f\t\x80\x01\xaa\xf9\xaa\xf3V\xef\xef\xeeu\xf1u\xf4I\xf6O\xf5\x8c\xf2\xb6\xf0z\xeff\xf0\xfb\xf2\x85\xf60\xfc^\x02$\x07\xca\t\xe4\x08E\x06\xc7\x04\xba\x04\xf4\x06\x1e\t\xa8\t\x17\x08!\x04z\xfe>\xf8\x07\xf4\xe2\xf1]\xf1\xaf\xf2\x0c\xf3\xe7\xf1b\xef\x9b\xea\xdc\xe9!\xeb\xae\xeb&\xea\xc6\xe6\x8c\xed\xa1\x026$\xf6G&]\x99`wUUE\x17<\xe4:\xccB\xa3NDW\xcdW\xbcJ|2k\x15h\xfa\xf4\xe6\xf2\xdc\x89\xd9T\xda\xf9\xdbD\xdd\xcb\xde,\xdfW\xdd~\xd8\xdb\xd1\xae\xcd\xfe\xcf}\xd9\x9d\xe8\x85\xf9\xc6\tG\x16\x1e\x1cN\x19\x0e\x0f*\x03\xe8\xfc\xf7\x00\x9c\x0c#\x19\x87\x1f\x15\x1d\xc3\x13\xc9\x05m\xf6\xab\xe8=\xdf\xbf\xdc\xe0\xdf\xb3\xe4\xe3\xe6\x17\xe4<\xdf\xe4\xdb%\xdb\xd7\xdb\x1e\xdd\x85\xde\xe0\xe0\x95\xe5\x83\xec\r\xf6\xd5\x00\x00\x0b\x9e\x11\x99\x12\x86\x0eN\x08I\x03\x94\x026\x07\xef\x0e\xba\x15\x80\x17\xec\x12&\t\xb0\xfd4\xf4\xb0\xee\xb3\xed4\xefh\xf1\n\xf3w\xf2A\xf0q\xee\x14\xee\xa9\xef\x82\xf2\xae\xf5K\xf9\xca\xfd\xe9\x02e\x08h\rY\x10\x9a\x11\xed\x10\xf3\r\x91\n\xb0\x06\x1b\x043\x03\x06\x02\xe7\xff\x7f\xfb\xb1\xf5.\xf0\xa3\xeb\t\xe9\x88\xe7\xa3\xe7\x03\xe8\xf6\xe6\xd4\xe3\x1b\xdev\xd8\xf1\xd7\xb5\xe1\xd5\xf8\xba\x18L8yOPXGUmJ\x11@\xe8=\xceEOU\x05b[dJY\x02D\x8f+\x0b\x14\xfd\xffc\xee.\xe0@\xd8\xd5\xd59\xd7\x8d\xd8O\xd8\xe9\xd6\x9f\xd4P\xd0\x0f\xca\xf8\xc4\xd5\xc6P\xd4w\xeaz\x01\xfa\x10\xe6\x16l\x16\xbc\x12\xac\x0e3\x0b\x8a\n/\x0f\x8e\x17\t [#\xce\x1e\x02\x153\t\x10\xfdZ\xf1\xc2\xe5\xdb\xdbO\xd6\xf6\xd5\x02\xd9\'\xdck\xdd#\xdd\x92\xdb\r\xd94\xd6e\xd57\xd9\x0e\xe2\xa9\xee&\xfb-\x05w\x0c\xe2\x11\xe1\x15\x06\x18\x9c\x17\x0c\x15\xe1\x11F\x10]\x11\x10\x14]\x16\xf1\x15\x0e\x12$\n\xed\xfe\xc4\xf2\xda\xe8\x8f\xe4_\xe6^\xec\xb9\xf2\xf9\xf5\xaa\xf4\x95\xf0.\xec_\xea\x0b\xedD\xf4\t\xff}\t\x14\x11-\x14-\x13\x17\x11\x12\x0f\xad\x0eL\x0f\xe3\x0e\xab\r\x0f\n\xcc\x04\x00\xffx\xf8\x8b\xf3k\xef\xec\xeb"\xe8\xc6\xe28\xde#\xdb{\xda\xab\xdc\xbd\xde\xb7\xe2.\xe5\xf9\xe3\xd5\xdf.\xdaN\xde\xe9\xf0\x0f\x11&7\x01U\\dgd|Y\x00M\x01D:C\xbeLFZ/e\x7fdfT\xdd8Q\x19\r\xff\xba\xed\x84\xe3\xe1\xddT\xda\xde\xd8,\xd9\xc2\xd9\x9f\xd9g\xd8~\xd6\x01\xd5\x9a\xd3\xe6\xd2\x10\xd6\xad\xdf\x1a\xf1\x04\x06D\x17p\x1f\xfb\x1d}\x16\x0e\x0ek\x08\xcc\x06\x1a\t\x01\x0e$\x13p\x157\x12\x9c\x08\xac\xfa;\xec\x03\xe0\x83\xd7\xc1\xd2@\xd1\xca\xd2\x9e\xd6\x07\xdb\xbb\xdd\xc8\xdd!\xdc{\xda\xdb\xda\xed\xdd\xff\xe3\x0c\xed\xfe\xf7A\x03\x82\x0c\x80\x12\x95\x15P\x173\x19S\x1b\x12\x1c\xfb\x1a\xa3\x18&\x16Q\x14J\x12\xeb\x0e\x9f\t\xf5\x02?\xfb\xcb\xf2\xc5\xea7\xe5*\xe4,\xe7L\xec\x89\xf0P\xf2\xc2\xf1\xbd\xf0\x0b\xf1:\xf3(\xf8\xb3\xff\xd5\x08]\x11\xf9\x16[\x18\x11\x16\xea\x11P\x0e\xf5\x0b\x86\n\xf1\x08\xe6\x05\xec\x01P\xfc\xfa\xf5^\xef\x04\xe9\x14\xe5\x91\xe2J\xe0\xbb\xddC\xda\x1d\xd9=\xd9\x9b\xda\xed\xdb\xbb\xdcl\xdeB\xde\x8e\xdc\xb6\xdc\xb7\xe5+\xfd\xb7\x1fSBSZ\xf5b\xb4`\x1dY\x02R\xf1NIQ\xa6Y\xa5b\x1bfb^NK:1d\x17\xb5\x02\xfc\xf3\x17\xea\xce\xe1\xfa\xdad\xd6O\xd4\xd3\xd4\x19\xd6H\xd7\xc6\xd7\x02\xd7}\xd5\x9d\xd4\x86\xd7\xc2\xe0o\xf0\xb5\x02\xda\x11l\x19\xfe\x18\x86\x13\x8b\rm\n\x9b\ne\x0c \x0eM\x0eC\x0cU\x07b\xff\x85\xf5\xcb\xeb\x80\xe3_\xdd\x1b\xd9&\xd6\xcf\xd4J\xd5\xa5\xd7O\xda\x8e\xdc\xea\xdd\x19\xdf\xc0\xe0\x08\xe3\xf4\xe6\xae\xec~\xf4\xb4\xfd\xce\x06\xb9\x0e\x13\x15\xf8\x19&\x1eM!0"\xa0 -\x1d\xb2\x19\xa2\x17h\x16r\x14;\x10\x89\t\xb8\x00\x80\xf6{\xec\xf5\xe4\x9b\xe2+\xe5\x8a\xea\xef\xefb\xf2I\xf2\xd1\xf1\x7f\xf2\x9c\xf5\xd8\xfa\xb7\x01\xe9\t\xaf\x10[\x15\xd0\x166\x15[\x13/\x11\xc3\x0f\xf9\r"\n\xb0\x05\xd1\xff$\xfa\xd8\xf4\x92\xeec\xe8+\xe2k\xdd\x12\xda2\xd7t\xd5\x18\xd4\xcb\xd3\x14\xd5?\xd7\x7f\xdc\xe1\xe2\xd9\xe8\xa1\xed\xfa\xef\xce\xf23\xf6#\xfa\x80\x01\xb5\x0f\x95\'SF\xc2`\xe7o\xaep\x1eh\x8b^\x80V2S\xe7R?SqR\xcbK`>\xb9*\xf3\x12!\xfc*\xe8R\xd9\xab\xcf\xfc\xc9\xab\xc8"\xcbV\xd0A\xd6\x03\xda\xb3\xda\x1b\xdav\xda\xf0\xdeJ\xe7\x19\xf2p\xfd\xed\x07p\x10l\x16\x16\x19n\x18\x91\x15\xf2\x11~\x0e\xca\t\xbc\x02\xee\xf9\xc8\xf2\xc6\xef\xf3\xef\xdb\xefX\xec\xb5\xe5B\xde\xf8\xd7\xad\xd38\xd1S\xd1\xa0\xd4\xd7\xda\xe2\xe1\x86\xe7%\xeb\x1d\xee\xa1\xf1\xa8\xf54\xfa\xfe\xfe\xe2\x04\x9e\x0c\x0b\x16Z\x1f\x11&N(Y&?!\xfc\x19\x87\x11\x02\tf\x02\x1e\xff\x13\xfe\xd0\xfc\xf7\xf8\x0f\xf3\xb6\xec\x7f\xe7\x99\xe4\x8d\xe4\x00\xe8:\xee\xb4\xf6\xe9\xff"\x07\x96\x0b\xa6\ro\x0e\xfb\x0e\xc2\x0f\xf4\x10}\x12\xb5\x13\xa1\x14\xff\x13.\x11\xa4\x0b\xc3\x03\xaa\xfb\xb3\xf3R\xed\xfa\xe8\xf1\xe5\xec\xe4\xf4\xe3\xe2\xe13\xde\xca\xd8\xcd\xd4\xb9\xd2\xe3\xd3\x19\xd8n\xdd(\xe4\xce\xea\xbb\xf0\xaf\xf5\x94\xf8%\xfb\xeb\xfdX\x01\x03\x06G\t\x03\n\xa6\x06\x90\x01N\x01g\x0c\xda#uA\xf1Y\xe9e\x03d\xffXNL5C9@(BvF\x87H\xe8C\xc76\xbd#{\x10\x87\x00\x85\xf2\xad\xe3e\xd45\xc9e\xc6\xb1\xcc\xa9\xd7\xa9\xe1W\xe7\xa4\xe7a\xe4(\xdfa\xdb\xb8\xdc\xbb\xe5<\xf5\xec\x05b\x11%\x15r\x13\x81\x10v\x0e\x9d\x0bj\x06\x8a\xffz\xf9V\xf6\x17\xf6u\xf7O\xf9e\xf97\xf6\xf2\xeex\xe4\x95\xda\x9b\xd4\x1f\xd5\x9e\xdb#\xe5\x07\xee\xa6\xf3\xde\xf4\xce\xf2\xe4\xef\x8c\xee\x1b\xf0\xd7\xf3\xd7\xf8\xe8\xfdv\x03\xd3\t\x1b\x10h\x15\xde\x17.\x17\xc4\x13T\x0e\xfb\x08\x06\x05\n\x04\x9b\x05B\x07\xcc\x07Q\x05[\x00\xb0\xfa$\xf5\xd4\xf1\xd3\xefY\xf0f\xf3c\xf7\xb1\xfc_\x01\x89\x05\x03\t\'\x0b\x9f\x0c\xcf\x0cD\x0cH\x0c\x9f\x0c-\r\xc5\x0c?\n\t\x06\xcb\x00\x1c\xfb\xd9\xf5\xad\xf0\xe5\xeb\x1a\xe8\x81\xe5\x11\xe41\xe3\xa0\xe2\x93\xe21\xe3\x9b\xe4\x16\xe6\xeb\xe7\xb3\xea&\xee/\xf3\xc5\xf7Z\xfb\t\xfe\xac\xff(\x01X\x02\xd7\x03\xdf\x06j\n\x08\r\xf9\r\x84\x0c\x18\nj\x05\xbd\xfe\xc5\xf8a\xfa\xea\x08?$;B\x85V7Z\xb8NF?\xa64y2\xea5\xba9|;X9S2\x05\'\xdb\x16{\x05\r\xf4a\xe3\xe9\xd4t\xc9?\xc6\xe9\xcb=\xd8\x0f\xe5@\xeb-\xe99\xe2R\xdb`\xdae\xe0\xf5\xeb\xa4\xfaD\t\x89\x15\x9c\x1d\xb7 \x06\x1f\xa3\x1a\xbb\x14.\x0e4\x07\xe2\x003\xfd\xd3\xfc\xb8\xfek\xff\xa6\xfbL\xf2\x1d\xe6\x0c\xdb\xce\xd3\x19\xd1\x8b\xd2\x0f\xd7|\xdd\xae\xe3i\xe8\x05\xebu\xec\x88\xedx\xee*\xef\xc9\xef%\xf2~\xf7>\x00\'\n\xee\x11/\x15+\x14\xd0\x10\xaa\r\xf4\x0b\xe3\x0b\x10\rD\x0ey\x0e\xd1\x0c\xa7\t\x17\x06\x9d\x02\\\xffL\xfb\xa3\xf6\xd2\xf2F\xf1\xc7\xf2\xa7\xf6s\xfb$\x00v\x03\xcd\x04\x0b\x05\x01\x05\xe8\x05=\x08\x9f\n\xa3\x0c}\r\xcc\x0c\xc1\n\x06\x07\x15\x02J\xfc\xfc\xf6\xa9\xf2\x89\xef\xe6\xed\x8e\xec\xa9\xeb7\xeb\x10\xeb)\xeb"\xeb\xe9\xea\xfd\xeb,\xee\xff\xf0\xff\xf3\x96\xf6,\xf9\xfb\xfbd\xfd*\xfeD\xff\x14\x01\x1a\x04\x06\x06\x83\x06\xbc\x05L\x03\xa3\x00\x16\xfeP\xfbR\xfa\xee\xf6b\xf0\xb5\xea\x9d\xe9\xfc\xf48\n\x0f ?0<5\xfc2a0\x151o5\xf79X=\xf9=\xa3=\x15;\x8a4o,Q!\x9a\x13\x10\x04%\xf4%\xea\xb1\xe8z\xed_\xf4\xe0\xf6\xfd\xf3\x0b\xee\x11\xe8T\xe5!\xe6H\xe9\xc7\xed`\xf2\xb5\xf7D\xfeb\x05\xd2\x0b\xa0\x0e\x16\r\xa8\x08\x05\x04\xb5\x01\x1c\x02\xcc\x03\xd1\x04\xbe\x03\xe5\xffi\xfat\xf4\xe4\xee\xef\xe9Z\xe4\x1e\xde6\xd9^\xd7<\xd9D\xdd\xc8\xe0;\xe2\xf5\xe1q\xe1\xbe\xe2>\xe6t\xeb\xa3\xf1\xb8\xf7\xcd\xfc\xbf\x00\xbc\x03\xe6\x06\xb9\n\xd0\rb\x0f\xf8\x0eq\x0eP\x0f_\x11<\x13\xdf\x12\x12\x10+\x0c\x0e\x08\xcb\x04Y\x01_\xfeA\xfc\x9a\xfa&\xfa\x12\xfa\x01\xfb3\xfd`\xff&\x01\xf1\x01i\x02\x13\x04\xb8\x06\xb1\t5\x0cH\rc\ry\x0c[\nE\x07\x7f\x03\xb5\xff\x8e\xfc\xe7\xf9\xd2\xf7\xc6\xf6\xc4\xf5\x91\xf4\x98\xf2\xc7\xef\xa1\xed\xfd\xebq\xeb\xbe\xeb\x1f\xec,\xed\x83\xef,\xf2\xe1\xf4N\xf6w\xf6\xa3\xf6W\xf7\xae\xf8A\xfa\xbc\xfb`\xfc\xf6\xfb\x13\xfa\xd8\xf6\xfe\xf3\xcf\xf31\xf6u\xfa8\xfe\x8e\xff\xc0\xff\xdf\xfeT\xfeI\xfe\x1b\xfe\xe5\xfd!\xfd2\xfd\x84\x00\xc4\t%\x18\x07(!4O:\x9e;\xf4:\xbc;\xdc=\xe0@rC\xb7DeD\xe9@\x84:\n1s%\x1f\x19\xfc\x0b(\x01\xb7\xf9\xaa\xf5G\xf4\xab\xf1t\xed\xf5\xe72\xe2\xa3\xdd\x1d\xdb\xd4\xda)\xdc\xae\xdeo\xe2\x0b\xe7e\xec\xe1\xf1>\xf6\xaa\xf9\x87\xfbk\xfcY\xfd\x8d\xfe\xd6\xff(\x01\xfb\x00l\xff\xb3\xfc-\xf9\x82\xf5\xa8\xf1\x8c\xed\x08\xea\xac\xe7U\xe6\xa5\xe5\x8d\xe4\x0f\xe3\\\xe1^\xe0z\xe0\xc5\xe1\xb7\xe3\xd9\xe5P\xe8t\xebJ\xef>\xf4U\xfaP\x01\xdc\x08e\x0f\xed\x14\xe1\x18\x87\x1b\xc8\x1c\x01\x1c\xd6\x19\x9c\x16\x80\x13p\x10U\r$\x0b\x87\x08i\x05\xb4\x01\x95\xfd\x8f\xfa\x07\xf9\xbe\xf8W\xf9\x93\xfa\xc7\xfb\xd3\xfc\xba\xfd\x01\xfe;\xfe=\xfe\xa6\xfdo\xfd|\xfdD\xfe\xf8\xff\x00\x01\xa1\x01\xfe\x00\xcd\xff\xcd\xfd\xef\xfb.\xfb\xfd\xf9\x97\xf8\x07\xf7s\xf5z\xf4\xb7\xf3a\xf2\x86\xf1\xb3\xf19\xf0\x8e\xed\xcd\xec\x01\xee"\xf1k\xf46\xf6\xde\xf7-\xfal\xfc\xf9\xfd!\x00#\x02\xb9\x02\x0f\x03^\x057\x08\x18\x0c\xb5\x10\xbd\x12\xff\x12\xbd\x11:\x0f\xfb\r\x86\r\x07\r\x85\rE\r\xcd\x0b\x15\n;\x08k\x06\x9c\x05<\x05\xcd\x04\x15\x04\xdd\x02g\x02K\x03\xf4\x05\xab\t\xdc\r\xff\x11I\x16r\x19\x1f\x1b\x0c\x1c\x81\x1c\xe6\x1c\xba\x1d5\x1f\xb6 \xdf!\xd6\x1f\x05\x1cS\x17:\x11\xc0\x0cc\t\xcd\x06\x02\x05o\x02\x90\xfe\xb9\xf9w\xf5\x98\xf1\xa0\xedO\xea\x0f\xe8\x06\xe7\xb7\xe6V\xe6Q\xe5\xab\xe4\x9f\xe4\xd7\xe4l\xe5V\xe6_\xe7\x89\xe9K\xec\xc4\xeeh\xf1(\xf3\x94\xf4;\xf55\xf5\x03\xf6\x95\xf7\xdd\xf8\x96\xf9\x90\xfa\x0f\xfb\xe0\xfb\xfe\xfcq\xfd\xf0\xfd\x06\xfe\xca\xfd\xad\xfdH\xfe\x81\xff\x99\x00\x16\x01\xdb\x00.\x00\x1e\x00Y\x01\xb0\x01|\x03@\x05\xec\x04C\x05{\x06\xec\x05]\x06\x19\x06\xda\x04\n\x05\x82\x05\xbb\x05>\x04\x92\x02\x8e\x02\r\x02\xfa\xff\xbb\xfep\xfd)\xfc\x0f\xfcR\xfb\xb9\xf8&\xf8\xdd\xfa*\xfb$\xf9\xf3\xf8,\xfa\x15\xfd\x00\xfe4\xfe\xe0\xfb\x8c\xfa/\xfc\xd5\xffA\x01Q\xffx\xfdp\xfd\x1c\xfe\x92\xfd\x93\xfeh\xfc\x92\xfa\xa8\xfb\xa2\xf9!\xfa\xaf\xfcQ\xfbN\xfd\x8d\xfe\x10\xfc\x0f\xfd)\xfd\x81\xfc\x12\xff[\x02\xc0\x04\xc2\x04G\x045\x05\xab\x07\xd6\x07\xa6\x06\xc3\x07\xc3\x07\x96\t\xb5\t\xcc\nH\x0b\xf4\x0b\xd7\r\'\r\t\x10\xca\x0c:\n\xbe\x0b\x0c\n\x0f\n\x8c\nM\x08\xee\x07\xf7\t\xc3\x07\x08\t\x7f\x07#\x02C\x02\x85\x02\xfa\x03!\x059\x08t\x04\x16\x01j\xfe\xac\xfe\x87\x02\xa9\x01\'\xff\xa7\xfci\xfb\xe7\xfb`\xff_\xfb>\xfc\x8b\xfb;\xfaa\xf9G\xf51\xf8\x14\xfa\x89\xfb\x13\xfa\xf7\xf8\xc1\xf9\xf1\xf9\x8d\xf9(\xfaS\xfa\xb6\xfa8\xfap\xfb\x96\xfd\x94\xfe\xed\xff5\xfe\xa8\xfc\xfc\xfc[\xfe;\x00\xe2\x00\x05\x01)\x02w\x01\xb9\x00\xbf\xff\x8b\x00\x1b\x01D\x02\x1b\x02Z\x04\xa2\x03\xe2\x02V\x04\xf7\x03*\x05C\x07\xe1\x04\x13\x01o\x06D\x07\x93\x01\xd9\x01l\x05\xbc\x03\x87\xfd\xe9\x00p\xfe\xd4\xfaV\xfe?\xfe\xe3\xfb\xb8\xfa3\xfd\xf5\xf75\xf6\xef\xf5\xb7\xf7\x8e\xf6\xfd\xf5x\xfb2\xf6(\xf3\x94\xf6B\xf64\xf5~\xf55\xf9\xc6\xf7B\xf6W\xfb\xfb\xf9\xc1\xfaZ\xfb\'\xfa\xda\xf7\xd2\xf7:\xf72\xfat\xfb\xa6\xfa\x0f\xf9\x1a\xfd\xb3\xfaI\xfa/\x00\xf4\xfd\xda\xff\xe9\xfd\xb5\x01V\x04z\x06\xdd\x05\x16\x06\xc9\x05\xfa\x07\x85\x0b\x1f\nq\x08\xd7\t\xf7\r\x9f\x0cL\n\x83\t\xe7\n&\x0c\xba\x06}\x04>\n\x8c\n\x15\x07\xc0\x068\x08&\x04\x06\x03\x95\x05\x1a\x05\xee\x04\xdc\x02\x84\x01\x06\x02\xec\x06\x0c\x05\x99\x02Q\x00\xd4\x00>\x000\xff\xed\xfe+\xfa%\xfd\x1d\xfd\xeb\xfb\xb7\xfb\x97\xfc\xb4\xf9$\xfdU\xfb\x08\xfay\xfb\x01\xf8\x16\xfbr\xf9\x18\xf8\xb1\xf7\x94\xfb\x81\xfc\x1d\xf5I\xf9\xb1\xfau\xfet\xfb\x9b\xf8\xcb\xff\xaf\xfd\x92\xfeU\xffh\x00\xca\x01\xec\x02~\x00\xc7\x02t\x06\xa9\x03\x1d\x03t\x04\x9d\x02H\x02%\x06\xcb\x0c.\x06\x99\x04F\x02\x1e\xff!\x08\x91\x0cY\x06\x06\x04A\x07\xd3\x03\x15\x06~\x03N\n\xf7\x02\x83\x03&\x08\x9d\x06\xae\x03R\xfd\xfa\x06W\x00\xdb\xfd\xdd\xfe\xb2\x00\xcf\xfd\x06\xfdT\xfe\xd0\xf8+\xfa\x15\xfb\xa7\xf6\x0c\xf7\xeb\xf8\xd8\xf7\xf5\xf5@\xf7\x12\xf9\x0f\xf6%\xf9\xa0\xf5\xf7\xf8b\xfbd\xf8\xe5\xfc)\x00f\xfd\x9f\xfd6\x00v\xfc\x92\x01\xc0\x03\xf3\xfd\xc1\xff!\xfe\xeb\xfdz\x02\x91\x02\xca\xfe\xf1\xfe\x01\x00\xe8\xff6\x06\xa4\x06\x9e\x02~\x01\xbb\x06W\x084\x06\xb2\x03\xee\x04\xb3\x065\x04q\x07\xc3\x046\x03q\tK\x05\xe3\x01\xce\x01\xb9\x00\xc2\x03\x9c\x03g\xff\x90\xffp\xff\xba\x02M\x01\x0c\x01\xbe\x02\xcb\xfeE\xfd\x8d\x00N\x00T\xfc\xe6\xfd\xc7\xfd\x14\xfe\xf8\xff~\xfe\x8e\xfet\xfdP\xfeX\xfa\x13\xf9\xa6\xfdZ\xfeB\xfe\xc2\xf87\xfb`\xfaE\x00\x85\xf8\xc2\xf6]\xfb\xdd\xf8\x14\xfc\xba\xf9\xec\xfa\xa0\xfd\xea\xfa|\xf9\x99\xff\xc9\xfc`\xfc\xa1\xf3\x19\xfd\t\x00\xac\x03\x85\x04.\xff\xbf\xfft\xfe\xe9\x06q\x00\xbc\x030\x06$\x06\xa9\x04\x9c\x07\xb6\tg\x05.\x05N\x03\xd8\x00\x92\x05\xa6\x04\xa5\x02\x87\x07\x80\x04\xcf\x02\xf6\xfe`\x04\xcd\x05\x0f\x00\xfc\x03\xc3\x00\xeb\xff\x8b\x04\xb1\x08&\x00\x03\xfdX\xfd#\xfe\xaa\x01\xe1\xfd\x19\x03\xc6\x00\xe4\xfc\xda\xfb\xdf\xf2\xf8\xf8=\xfd:\xfa\x96\xfdt\xf9 \xf9!\xf8p\xf8)\xf8\xa7\xf8\xa8\xf8\xf5\xf9`\xfc\x94\xfd\xd6\xfb\xd5\xfb\x81\xfc\x96\xfe\x8f\x02z\x01\xf0\x00*\x00\x12\x05\x89\xfa@\x02#\x02\x12\x01\xb8\x02\x97\x02O\x02}\xff\xcd\x01\x82\xf8>\x038\x02v\x03H\xffQ\x00\x8a\x04\x99\x04A\x02#\x01m\x042\x03s\x03\xeb\x03\x1e\x08\t\x05\xed\x02_\x01S\x06\x0c\x07\x9f\x04\x7f\x07F\x04\xf9\xff.\x03H\x06\x85\x05J\x06\xc2\x01\x16\xfe\xac\xfe4\x01\xb2\x01\x10\x00\xf1\xff8\xff\xb2\xfe\xee\xfd\x83\xfbO\xfb\'\xfb:\xfd\x83\xfc\x8b\xf9\x1d\xfbN\xffe\xfc\x13\xf9\x96\xfc<\xfb\xb3\xf9_\xf9\x82\xf9I\xfcH\x014\xfbm\xfa\xad\xfa\xcf\xfd\x9a\xfeK\xfb\xa4\xfe\x82\xfd.\xfe \xfb7\x00\x1c\x02\x00\x02\xd4\xfeE\xff\x91\x03`\xffr\x03R\x02/\x03\x1e\x06\xa4\x02\xb0\x05\x04\x08\xf5\x05\x0f\x005\x05\x9e\x08\x91\x05w\x06i\x05\x03\x03"\x07%\t\xf6\x05R\x04(\x02v\x00\x07\x02*\x03\xb9\x02\xf6\x01\x14\xfd\xf2\x00+\x00\\\xffT\xff\xd0\xfa\xd7\xf9!\xfc\xea\xfd\xa7\xfe\x93\xfc\xa4\xf9L\xfd\x88\x00P\xfa1\xfc\xf2\x02a\xf9|\xfa\xba\xfb\x95\xfa\xd8\xfdI\xfe\xf0\xfd\xbd\xfa5\xfaj\xfe\xe0\xfe\n\xfb\xfa\xf9\x9d\xfb\xbe\xfd\x9c\xff:\x02r\xfcd\xfc\xd7\x00\x16\xff\xc8\xff@\x01\xf6\xfe\t\x01\xdb\x01\x0c\x03\xde\x02?\x00\xc2\x04v\xfe(\x02e\x05\xdb\x01\xf7\xfe\x92\x03\x98\x030\x02U\x05\x01\x04\xa7\x02u\x02\xfc\x01\xca\x03\xd9\x07\xa6\x01\x82\x03\xce\x02\x05\x036\xff.\x04W\x02R\x01\n\x05M\x01Q\x03\xeb\xfeW\x03\xb9\x00\xbf\x01\x9c\x00u\xfdW\xff\xa5\x03a\x00\xab\xfd\xf5\xfb\xa1\xfb\xc6\xfdl\xfc\xf0\xfb\x82\xfcG\x00X\xfb\xae\xfaF\xfa\xa9\xfb\'\xfa!\xfb|\xfc\xe7\xfa\xe0\xfa\xc3\xfca\xfd\xb1\xfbE\xfb\xb1\xf9\x88\xfd\x00\xfe\xa4\xfd\xbf\x00\xaa\xff\xf1\xfd\x82\xff\x12\x01\xe6\xfd\xd9\x01`\x04`\xfe\x89\xffg\x01D\x04=\x02!\x05E\x05\xfa\x02Q\xff\x7f\xffN\x06\xc4\x07\xc7\x08W\x012\x04\xad\x00\xf1\x03\x11\x04H\x03\xc2\x02\x85\x02\x06\x03\xae\xfe\x00\x07\xb3\x00\x1e\xfc\xa9\xfd\x12\x01r\x02G\x02\x91\xfe\xf8\xfc\xa4\xfd\xea\xfc\xf1\x01\x95\xff\xf6\xfc\x17\xfd\x8d\xfcd\xff\xdc\xfcC\xfdS\xfc\xe3\xfc\xf2\xfb\x97\xfc\xb8\xfcG\xfe~\xfe\x16\xfd\xe4\x00s\xf8\xb2\xfab\xff\x16\xffE\x00c\x00!\xffR\xfa\xc4\xfb\xd3\x00_\xfe/\xffT\xfe.\xfd\x93\xffW\x00\xac\xfb2\xfd\x94\x01\x84\xfc\xab\x01}\x01\x9a\x01\xe4\xff>\x01\x8b\xff.\x03`\x03\xeb\x00\x8f\x03\xfb\x04^\x046\x03\x13\x05\xcf\x02\xd2\x03\x04\x02\xdf\x06U\x03\x93\x04\xa2\x01a\x01\xb5\x04\xc4\x04{\x02\xe8\x02#\x03\xd6\xfd2\x00 \x04I\x040\xff0\x02\xf2\xff\xf6\xfb\xec\xff\x1a\xff\x97\x01\xcd\x03\xd6\xfb\x00\xf91\xfeY\x01:\xfe"\xfb\xf9\xfaJ\xfdH\xfd[\xffo\xfc\xd6\xf9\xae\xfa\xa5\xfbq\xfe~\xfc\xcc\xfc\xa7\xfb\x9e\xfc\xca\xfe\x85\x03\x81\xfe0\xfb\x11\x00\x8a\x01\xc1\xff\xaf\xff\x9b\xfe\x8e\x02\xa0\xff\xf2\xff\xb7\x03\xc5\xff\xea\x00\xdf\x01\xa6\x00\xed\xfc\xad\x02\xdd\xfe\xb7\x04x\x04\xa6\x00\xe7\x00W\xffv\x03*\x05Y\x06\xcc\xff#\x01-\x04<\x01\xda\x00G\x02r\x06\xca\x019\xffa\x00R\x01\xb3\x04m\x008\x00\xba\xff\xd5\x01\xd7\x00m\xfe\xef\x00\x9a\x02\xc2\xff\x8f\xffv\xff\xcb\xfdK\xfd\xb0\xfe\xac\x01\xd7\xff\xaf\xff\xd2\xfa\xa2\xfd\xe1\xfd5\xff,\xfc\xc6\xfd\x16\xfe\xf0\xfe\xa4\x004\xff\x0c\xffw\xfa,\x01\x7f\xfc\x9b\xfc&\xff*\x00\x9d\xfe/\xff\x9d\xfeQ\xfd\x8d\xfe\x9d\xff\xb1\xfc\xfc\xfc\xd2\x00\xb8\x00\xae\x02\xb8\xfe\xea\xfe\xba\x01\xb9\x02\x8a\xff\x1b\xff\x93\xff\r\x04\xa4\x02P\x02\xb0\x01\xc1\x00\xe9\x00\xdc\xff\xbe\x04\xde\x021\x01%\x01\xaf\x01\x97\x01(\x03\xe8\x02\xa2\x00\xab\x00\xa7\xff\xe7\xfdB\x02&\x00\xa1\x00\x05\x02m\xff\xfd\x01\x9f\x00\xfb\xfe\x00\x01\xe3\xfe\n\xfe4\x00\xc4\x01\xfa\x01\xd7\xff\xd7\xfe4\xff,\xfe\xae\xfc|\xfe\xe6\xfe\xa1\xffO\xfee\xfd|\xfe9\xfc\xe5\xfdV\xffT\xfeP\xfcL\xfc\xce\xfeX\xfd\x85\xfeW\xffE\xff\x14\x01\xe6\xfc8\x00\xd0\xfd\x15\xff\xdd\x04\x01\x00\xd8\x00\xa9\xfe7\xff_\x00\xec\x02\x97\x00\xab\xfe\x18\x04j\x00\xa5\xfd.\x01\x8f\x01\xf8\xff\xba\xfd\xda\x00>\x02\x18\x00\xa7\x02\xba\x01\xb3\xff{\xffj\xffS\x007\x00i\x01F\x03\r\x00\xdc\xfd\x17\xff\x1c\xff\x9a\x01W\x02\xa0\xfe/\x01\x0c\x01+\xfd\xd2\xff\x14\x00\xc3\xfe\xc1\xfft\x02\x10\x05\xd6\xfe\x98\xff=\xfe\x80\xfc!\x01\x99\x00\xf2\xff\xb0\x01>\x01-\x00\xb1\xfe\xe1\xfc\xb8\x01y\xfe\x80\xfd\xc8\xfe0\xff\xea\x00x\xfff\xff\xa8\x00~\xfe\x1e\xfd\x84\xfc\xe8\xfd\xd0\x01\xa7\xff\xd6\xfe`\xff\xb2\xfd\xfe\xff*\x00\x00\x00\x1f\x00`\x00\xb2\xff2\x01\xc4\x01\x85\x00A\x01]\x00\xac\x01\x9f\xffA\x02\xb1\x00\xcc\x01"\x03\x86\x00\xbe\xff\xc0\x01$\x01Z\xff:\xff+\x03$\x01\xb6\x03\t\x02\xa0\xfc\xb0\x01s\xfe\x11\x03<\x00\x1f\xffS\xff!\x01\x02\x03\xba\xffg\xfe\xb4\xfe\xa9\xff\xae\xfdN\xfd\xb7\xffx\x03G\xff\xc8\xfe\xf7\xff$\xffY\xff\x1b\xfe\xc0\x00\x9e\xfe\x7f\x00i\x00d\xff\xf6\x00\xd2\xfd\x86\x00\xd3\xfd\xb8\x00\x0f\xff\xdf\xfe\xc5\xfdf\x03N\x00\xdb\xf9\x06\x03\x19\xfe\xb2\xff\x17\x04l\xfdc\xfd\x98\x03w\x00\x04\x00\xdb\x01q\xffZ\x01\xcc\xff\x8a\x01\xa4\t\x94\xfe\xb6\xfb\r\x00<\x05\xab\x02\xbb\x01H\x04\x0e\xfdP\xfc<\xfd\xa7\xff\xa3\x02\xc1\xfe>\xfd\xfc\xfe\x8c\xfb\xaf\x01r\x03\xef\x018\xfcv\x00\x80\xfc\xd0\xff\xfe\x06\xcf\xff\x19\x01\xc7\xff4\x03n\x025\x00=\xfc5\xfd\xd2\xfb\x1a\x015\x00\x14\xfee\xfe\x10\xff\xb9\x01\x0c\x01M\x01O\xfd\xc0\xfb%\xfd\x9a\xfe5\xfe\xd9\xfe\xf7\xff\x0f\x00V\xff\x85\x00\xb3\xfb\x1e\xfd^\x02e\x02"\x00X\xfaC\xffb\x02s\x01\xca\x021\xffw\xff\xc3\x00B\x02\x01\x01_\x00\xe8\x01\xdf\xff\xb8\xffP\x03\x9f\x03\x9d\x02\xd9\xfc\x1d\xfd\xeb\x01\x1a\x05\x88\x00\x1b\x03N\x07$\xfc\x02\x01\x90\xfd\x97\xfd\x0e\x03N\x04\xc2\x038\xfeu\x00T\x05!\x02\xca\xfb-\xfe\xe5\xff!\xfd\x93\xfe\xf1\x035\x00\xe2\xfc\xcb\xfd\x0f\x017\xfd{\xfa\xb6\xfd\x18\xfe\xe7\xff\x82\xfe\x8b\xfe\xb2\xfc8\xfd\xd7\x00\xbb\x00\x96\xfe%\xfd\xd9\xfa\xe8\xfd?\x01\xbd\xff\xd6\x02\xcf\x00\xb0\xfb\x8f\xff\x8e\x00\xc7\xfd\xbe\xfe-\x00\x9d\xff{\x00~\x04[\x00\x03\xff\xdc\x023\x00\x00\x00-\x01{\x01\xf0\xff\t\xfe\x14\x04\xda\x04\xeb\x010\x01\xa3\xfdd\xfc\x15\xffJ\x01\xc3\xffP\x01[\x03\x91\x03\x1c\x02q\x01\xeb\xfc\xba\xf9\xa6\xfcI\xff\xbe\x01\xc6\xfe$\xff`\x05\xc3\x02\xfb\xff#\x03\xd2\xfb\xf3\xf9\x1b\x00\\\xfck\x01\xd6\x06\x8b\x029\xffS\x01b\xfd\x15\xfe6\x00\xd5\xfc\t\x00O\xffT\xfc\x8d\xfed\xff\xd7\xfct\x00a\xff\x94\x01C\x00\xcc\xf92\xfb\x9f\xfd\xf7\xfe\xa4\xff\xef\x00\xc7\xfd\xf4\xfe\x87\x01~\x01\x17\xfc\x08\xfd\x9b\xffX\xfd\xca\xfe\xcb\xff \x00h\x02\x7f\x04\xcd\x02!\x01i\xfcq\xfe.\x01\xf6\xfe\x16\x02 \x04\xb7\x02x\x05P\x05E\x03\xd9\x01y\x01\xea\xfe\xd5\x00x\x02\xf1\x01\xb9\x03\x1c\x01p\x01R\xff\xb5\x02\xea\x05r\x00(\xfeg\x01\x92\x02J\x00\x1e\xfc\xd9\xfce\xfc\x1f\xff\xe0\x01\x19\x02 \x03z\x012\xffM\xfc\xee\xfa\xce\xfb\xbe\xfd\x0f\xfe\xe9\x00\xd1\x01\xb1\x01\xec\x00\x0f\x01R\x00}\xfc\xca\xf9\x85\xfb\x95\x00\x9c\x00\xec\x00\xea\xfdf\xff1\x00\xec\x00\x04\x02\xe3\xfd"\xfe\xe7\xfez\x03\xe7\x03\x14\x01%\x00\xbf\xfcr\xfc\x99\xff\xe7\xffd\xfek\xff!\x02;\x02\xf6\xff\xb3\xff6\x005\x01\xc1\xfd\xdb\xfd\xec\xff\xe9\x01m\x02\xf3\x03\x96\x05\x0b\x03\\\xffS\xfd\x15\xff\xce\xfeM\xfe\xa3\x000\x03\xad\x01I\x02\n\x01|\x00\xf2\x01\x1b\xff\xc4\xff?\xff\xb0\x00\xe3\x00\x1d\xfed\xfe\x8d\x00\xf1\xffE\xfe\x8e\x00\xc4\xfd\xa6\xfd\x95\xff/\x01\xf8\x01u\xff\x07\xfeY\xfc:\xfd\xb6\xff\x87\x03i\x00r\xfc\xf6\xfb\xa6\xfd\xc4\x00m\x01\x81\xff\xfe\xfcB\xf9\xa2\xfb\x86\x01\x1f\x02:\x017\xff\xda\xfd\xc4\xfe\x03\x04c\x03]\xff\xd5\xffM\xff\xee\xfe\x1f\x017\x03-\x02\x8e\x00\xb3\xfe*\xfe\x03\xff\x9b\x00\x0f\x01<\x00\xb4\xfe\xce\xfe\\\xff\x02\x01;\x01S\x00\x9e\x01\x82\x01V\xffC\x00\x11\x021\x01\x16\x01\xff\xffW\xffU\xff\x81\x02B\x02\xe6\xfe\x9a\xfe3\xff\x86\xfes\xfeP\x00z\x00T\xffX\xff\xc0\x000\x00\x8e\x01\xa5\x01\xd7\xff=\xfec\xff\xd3\xffi\xffd\xff\xed\xfd>\xfe\xd3\xfe\n\xff~\xfeU\xfe(\xfe\xf1\xfe\xd6\xfey\xfe\xa6\xff\xa9\xff\r\x00^\x00\xed\x00P\x01q\x01_\x01\xdf\xff\xa5\xff\x83\xff5\x00\xbe\x00\xa7\x00L\x01\x1c\x01\r\x00(\xffh\x00A\x01\x7f\x01\x08\x01\xa9\xff\x8d\x00c\x01\x00\x01\x9e\x00\xf6\xff_\x008\x01P\x01\x11\x010\x01\xec\x00!\x00\x82\x00\x1c\x01V\x017\x01\x13\x01\xc1\x00\x9c\x00\xbc\x00\x03\x01\xa7\x006\x00O\x00\\\x00@\x00h\xff\xdd\xff\xf8\xff/\xffK\xfe\x15\xfe\xb9\xfe\x14\xffM\xff-\xff9\xfeC\xfe\x85\xff\xba\xffc\xffF\xff\xde\xfe)\xff\xa3\xff\xcf\xff5\x00\xcf\xff\xaa\xfe7\xfd\x1b\xfd\x02\xfd\x10\xfd\'\xfd\xf8\xfc\xde\xfd\x19\xfe\x11\xfeW\xfdN\xfd\xe2\xfe\xc3\xfe[\xfd\xb5\xfc!\xfdv\xfe\xbc\xfea\xfe*\xfe#\xfe\x17\xff\t\xff\xbd\xfe\xfe\xff\x05\x00m\xfe\xb2\xfd\xb7\xfdm\xfe\x9c\xfe\xdc\xfdO\xfd\xc2\xfd\xc8\xfe\xf8\xfe\xbb\xff\x80\x00\xfe\xff\x1b\x009\x00C\x00\xac\xff\xcc\xfe\xfb\xfc6\xfa\xcc\xf8\x95\xf7\x05\xf7\xb3\xf9M\x01`\x0c;\x17Y"\x13,\x8c2u5\xb73\x080\x06*|"Z\x1a\xf8\x11\x94\t\x9b\x01p\xfb\n\xf7\x1f\xf3H\xf0\x0c\xed\xdb\xe9\xf7\xe7\xeb\xe5\xf4\xe56\xe6\xcc\xe6\xc0\xe7W\xe9\xa0\xed\x15\xf3\xaf\xf8J\xfe\xff\x02\xd5\x05;\x08\xb8\t\xd4\tM\x08\x17\x05\x12\x00q\xfb\x96\xf7\xcf\xf4c\xf3|\xf18\xf0\x1d\xf0V\xf0\xa4\xf1\xad\xf2x\xf3U\xf4\xc0\xf4\x87\xf5W\xf6\xa8\xf7&\xf8\x9c\xf8\xb9\xf9\x0b\xfb!\xfd3\xfe\r\xff\xe6\x00\xa1\x02\x9c\x05\xa6\x07\xc0\x08\xfe\t\x87\t\x1c\t\x96\x08e\x08\xd1\n%\x0fQ\x12Z\x15t\x16\x9c\x16\xd8\x15\x0f\x12\xe7\x0bx\x03\xa1\xfb\x9e\xf4&\xef\x86\xea\xb4\xe7\x02\xe6f\xe5&\xe8\xa5\xec\xa2\xf1\xa1\xf6\xe3\xfaT\xfe\xa6\x01\xa4\x04d\x06\xc4\x06\xc9\x05\x9e\x03<\x01H\xfe\x97\xfc\x81\xfa\xcc\xf7\x15\xf6O\xf4\xff\xf2d\xf3\xa2\xf3\xef\xf2\xb3\xf3\xb9\xf4r\xf7\xa5\xfa\x89\xfdi\xff\xec\xff\x15\xffO\xfd\x9c\xfb\x1f\xfa4\xf9%\xf8\xe9\xf7\x9e\xf7\x81\xf8J\xfb\xa5\xff@\x04U\x08m\t\x81\t\xf3\t\x18\n\x9d\t\x18\x05x\x01j\xffW\xfc:\xf93\xf6\x00\xf6n\xf7Q\xfa=\x05V!\x8bJ"i\x0bp;gHd\x0fk\x88gOR\xe40A\x12\xc9\xfa\xda\xe6/\xd7 \xce\xb0\xcci\xcco\xc7\xb4\xc3\x07\xcc\xa7\xdd\x8c\xe9\xd2\xe5\xd9\xdcT\xde\xa7\xecc\xfb\xb1\x00\xe5\xff]\x02\x80\x0b\x04\x15\xbf\x1b\x07\x1e\xfe\x1b\xea\x14;\x07\x85\xf8\xaa\xf0\x88\xeb\x1c\xe4\xa5\xd67\xcb\xef\xc9C\xd0\x9e\xd9\x1c\xdfQ\xe0\x85\xe2\xce\xe7V\xee\x98\xf3\x1a\xf9\xd1\xfe\xdc\x00\xaa\x00\xcc\x02S\nt\x13\x94\x18\xb2\x17>\x13\xa7\x0f\x07\x0e\xb9\n|\x03\xb4\xf9\x8b\xef\x07\xe8\x15\xe5\xa0\xe7J\xed\xe5\xf2\xb7\xf7\xbb\xfdn\x06T\x10E\x18\x06\x1b\x8c\x18\xc1\x13\xa9\x0f\x0e\r\xfb\nx\x07w\x02\x92\xfe\x96\xfd1\x00q\x04d\x07@\x08=\x07\xf7\x054\x06\xc0\x05\xa9\x04X\x01a\xfdy\xfb\x19\xfc\x00\x00\xf4\x03{\x06\xd8\x07\x94\x08-\x0b\xe7\r\x1b\x0e\xcd\n\xe0\x03\x85\xfd\x92\xf8\xac\xf4\xf8\xf1K\xef%\xee\x9c\xed_\xeel\xf1\xa6\xf6\xd6\xfb\xd5\xfe\x98\x00\xd3\x022\x06\x91\t\xed\ni\n5\t\xb3\x08\xcf\x08\xb9\x07\x08\x05\x1a\x01o\xfc\x96\xf7\xb4\xf3 \xf1\x0e\xef \xee\xb3\xedg\xeeZ\xf13\xf6\xb4\xfb\x95\xff(\x02(\x04\x84\x05\x85\x060\x06\x9b\x04\x12\x02\xe9\xffI\xfe\'\xfd\x94\xfc\x14\xfcc\xfb\x0e\xfa\x00\xf9#\xf9M\xfa\xbb\xfan\xfa\x04\xfaX\xfa\x8e\xfb\xc5\xfc\x99\xfd\xec\xfd\x06\xff\xa6\x00x\x02n\x04\xd2\x05+\x06\x10\x05(\x03\xf8\x01n\x01\xd4\xffD\xfc{\xf6\xc9\xee\xbd\xea\xea\xeez\xfb\\\x0b\xd8\x17\r#{7GU\x13i\xfdd\xc5M\x898\x102\x97-\x04\x1d\x92\x015\xe9-\xdf\xee\xdfH\xe0\xf0\xdem\xdf\xf5\xe0W\xe1\xab\xe0\x81\xe3\x7f\xeb>\xf3\x9e\xf5\x0c\xf5*\xfa\xec\x08t\x19\tL\x0e}\x11\x9f\x10\xad\n\x0f\x01)\xf7\x8e\xefH\xeb\xb4\xe9P\xe9/\xea\xd3\xec\xe9\xf2\xdf\xfa>\x01\xea\x03s\x02r\xff\x8b\xfd\xf1\xfc&\xfdi\xfc\'\xfb\xd5\xfa\xd5\xfc\xe5\x00T\x04\xad\x04\x08\x02\x0c\xfe\x86\xfa\x13\xf8\xf2\xf5s\xf3h\xf0\xc8\xee\x13\xf1\xb2\xf6\xe6\xfd!\x03\xab\x05\xef\x06\xda\x07j\t\xd4\t\x9d\x071\x04\x91\x00\x04\xff\x03\xff\xad\xff\xe9\xffS\xff\x89\xfe/\xffW\x01_\x03\x13\x04\x13\x02\xb0\xff\xf7\xfe\xb9\x00o\x03\x1c\x05\xe3\x05\xf3\x06\xfb\x08\x03\n\xfe\x08\x96\x06a\x04\xb7\x02\xe2\x01\xb1\x00\x87\xfe\x88\xfa\x14\xf5\xf1\xf2\x10\xf9\x1a\x07\xc7\x16\xb5%}9JS\xdegDi\xfaV{?0.\xbb\x1e\xb2\x08\xcc\xebZ\xd2f\xc65\xc6\xdd\xc9\xdd\xcd\xae\xd4A\xe0G\xeba\xf1\x06\xf4c\xf8C\xff!\x04\x83\x03\x91\x01\xd7\x04D\r\x8e\x13\xd4\x11\xad\t\x7f\x00\x08\xf8:\xee\xff\xe1\xce\xd6\xc5\xcf\xb1\xcc\x85\xcc\x8c\xd1\x10\xdd\\\xed\xe5\xfc6\x08`\x0f\x90\x14\x15\x19\x1e\x1a!\x14\xf4\x07\xff\xf9\x9f\xef`\xeb\xd6\xebg\xec\x8e\xea\xd6\xe8\x17\xeb\x81\xf1\x89\xf8\x99\xfb`\xf9\xb3\xf5\xc6\xf5\xf8\xfa\xfe\x01V\x07\xd4\n$\x0e\x80\x13\\\x1a\t \x0b!\x83\x1ba\x11\x96\x06\x0b\xfe3\xf8o\xf2\xcd\xeb*\xe6\x00\xe5\x8c\xea/\xf5\xfa\xff\xea\x06\x99\n\xb0\x0e\xbe\x14\xb6\x19$\x1a\xe0\x15\xf2\x0fa\x0c\xfb\x0b\xee\x0c\x16\x0c\x89\x08\x0c\x04\x1c\x00\xc4\xfc\xb1\xf9\xe2\xf5q\xf1$\xed\x17\xeb7\xed9\xf3\\\xfa\xe9\xff3\x03\xa6\x06\xcf\x0b:\x11\xa3\x12\xea\rI\x05\xdd\xfd\x1c\xfa\x80\xf8\x99\xf5n\xf14\xef2\xf2\x82\xf9y\x01\xcc\x06\x07\t<\tb\x08\x95\x06Y\x03#\xfe\xb5\xf7s\xf1s\xed\xa0\xed\xbd\xf1Z\xf7.\xfck\xff/\x02%\x05\x80\x07W\x07\xac\x03\x97\xfd\x8e\xf8\x92\xf6\xc2\xf7j\xf9N\xfa%\xfbR\xfd,\x01\x03\x05\xce\x06u\x05\x0c\x02\x04\xff\x9e\xfd\x93\xfd\x9e\xfd\xbb\xfd"\xfek\xff\x15\x02@\x05 \x07\xbd\x06\x14\x04\x99\x00\x13\xfe@\xfd \xfd\xa4\xfc\xb7\xfbj\xfb\x03\xfd\xf5\x00\xa5\x05}\x08f\x08\x84\x06\xe3\x04\x10\x04\x8d\x03a\x02\x82\xff\x90\xfc\xa6\xfb\xe0\xfc!\xff\x11\x00G\xff\xeb\xfd\xf0\xfc\x87\xfd\xfd\xfe7\x00\x04\x00\xde\xfe\x86\xfe\x8a\xff\xc4\x01j\x03\xdf\x03\x1d\x03\xcd\x01\xe3\x00q\x00R\xff\xf9\xfc/\xfa\x8c\xf8\x94\xf9\xad\xfdW\x02S\x043\x03\x1f\x01\xd4\x00\xa8\x03#\x06\xb0\x03w\xfb?\xf3\xe5\xf5\x01\tn$q8\xcd<\xe6:%A\xa9L\xd0J\xe50\xbf\t|\xec\x15\xe3\x9c\xe4\xab\xe3;\xdbg\xd4\xfa\xd7\x15\xe6\x16\xf5;\xfcX\xfa9\xf3\xd2\xed+\xee\xdf\xf3\x89\xfb\x8c\x009\x02f\x03\xf6\x07/\x0e\xb7\x0f\xa8\x07o\xf7c\xe6\xeb\xda\x8c\xd6\xa1\xd7A\xdbc\xe1\xa7\xe9\x86\xf3n\x00\x1b\x0fB\x1a\xa0\x1b\xe3\x12\xef\x06\x08\xff\xab\xfbB\xf8#\xf1\x99\xe8\xef\xe5\xa3\xeb\x0e\xf5\x1c\xfbH\xfb\xd5\xf8\x8d\xf7\x8b\xf8g\xfa\x90\xfbb\xfc\xfc\xfeL\x04\x10\x0c\xa8\x153\x1e\xb6!\xa7\x1e\xe4\x17P\x11X\x0bi\x03\xab\xf8\xc0\xed\n\xe72\xe7\xdb\xec\xcd\xf3\xac\xf9D\xff\n\x06\x18\r\xdb\x11:\x13[\x11\x9f\rg\t0\x06P\x05,\x06\xef\x06~\x06n\x05w\x04\xf0\x03\xba\x02x\xff\x1f\xfa@\xf5\xbc\xf3\x03\xf6\xf5\xf9\xd5\xfc9\xfe\x1c\x00J\x03\xb0\x06e\x07\xd1\x04\xc8\x00\xd8\xfdV\xfd(\xfe\x7f\xfep\xfd\xd9\xfb\xa2\xfb\x05\xfd\xa0\xff\xd3\x01\xac\x02k\x02\x16\x02\xb0\x02\xcb\x03\xea\x03H\x02\x84\xff\xdb\xfdo\xfe\xed\x00\x1d\x03_\x030\x02\xde\x00,\x00\x80\xff\xac\xfdq\xfaK\xf6\r\xf33\xf2\x12\xf4\xdc\xf6\xe3\xf8\xcd\xf9)\xfb3\xfes\x02\x8b\x05a\x05=\x02\xd3\xfe)\xfd\x9e\xfdW\xfe]\xfe\xa7\xfd@\xfdq\xfe9\x00\x0f\x01\xad\xffU\xfc\x04\xf9\xc2\xf7\x10\xf9B\xfb\x85\xfcq\xfcj\xfc\xe1\xfd\x91\x00\x99\x02b\x02\xcc\xff\xfa\xfc\x12\xfc\x87\xfd\xf6\xff\xe9\x00>\xff]\xfd\x84\xfd\x96\xff\xd8\x01\xc4\x01\xea\xff.\xfe\xec\xfd\x11\xff?\x00\x18\x00f\xfeA\xfc-\xfbA\xfc\x1a\xff\xce\x00\xe9\xff\x9c\xfd\x8d\xfcP\xfd\xc6\xfd\xc1\xfbE\xf7\x18\xf2;\xeei\xed-\xf6:\x0f\xd53\xd7R\x84\\KUJN\x9bK\x94A\xaa\'\xfe\x07\xe7\xf40\xf4\xf8\xfa\x9b\xfb\xdc\xf4d\xeeq\xee\xa6\xefr\xeb\x80\xe2\xc1\xdac\xda\xb1\xe0\xeb\xebi\xfay\t\xf5\x14\xb3\x17\x1d\x13C\x0br\x02\xf2\xf7M\xea[\xdc\t\xd4\xcf\xd4S\xdd3\xe8A\xf1\xeb\xf5\xe3\xf7o\xfbi\x023\x08\xa3\x06\x0c\xff\x1a\xf84\xf8y\xfe[\x04\x98\x05\x83\x010\xfc\x01\xf85\xf4x\xef\xe3\xe8\xff\xe1\xb1\xdd\xe8\xdeb\xe6\xb9\xf1f\xfdx\x06;\r\xfc\x12\xfe\x17\xbf\x1ad\x18?\x11\xcb\x08n\x03\xfa\x01\x01\x02\x07\x01\xbe\xfe\x86\xfd\x91\xfe\xf7\x00\x12\x01v\xfd\xbe\xf8\xef\xf6\xf0\xf9G\xff\xb9\x04(\nn\x10B\x169\x19\xe9\x18\xde\x16h\x14\xa6\x10:\x0b\xa2\x05[\x02\\\x02\x0f\x03\x1e\x02,\x00\xd3\xff\xaa\x01\xbc\x02\x81\x00\xf8\xfb\x0c\xf9\x98\xf9\xa2\xfbG\xfc\x1d\xfc\xb3\xfd\n\x02\xe3\x06\x92\tz\t\xac\x07v\x04\r\x008\xfb\x97\xf7\x86\xf5\x1d\xf4\xfd\xf2I\xf3\xec\xf5\x0b\xfa{\xfcv\xfbg\xf8\xf4\xf6f\xf8\x1d\xfb$\xfc\xc8\xfb\x17\xfd\x8f\x01\xc4\x06C\t}\x07n\x03r\xffh\xfc\xa4\xf9\xb9\xf66\xf4[\xf3\xea\xf4\x82\xf8\xd7\xfc\x90\x00\n\x02,\x01\x06\xff\xaf\xfd\xb5\xfd/\xfe\xbb\xfd\xa3\xfc\x89\xfc\x0b\xfeY\x00\xe2\x01\x8f\x01\xd3\xff\xd0\xfd\xa3\xfcZ\xfc>\xfc\xb5\xfbE\xfb\xe0\xfb/\xfeo\x01d\x04\xa7\x05L\x05F\x04x\x03\x10\x03r\x02Z\x01\x0f\x00\x8a\xff\x03\x00%\x01=\x02g\x02<\x015\xff\x81\xfdL\xfds\xfe\xb8\xff\xcb\xffJ\xfe\xb7\xfcV\xfd\x9d\xff\xa3\x00G\xfe=\xfa\x0b\xf9#\xfc\x08\x01.\x04\xdc\x03\x8a\x02y\x02\xeb\x05~\x0b~\x0f\x7f\x0f\n\r\xba\x0c\xb0\x11\x19\x1al%\xb31\xc7=\x81E\xbfB\x995 #\xed\x11\xde\x04^\xfa\x94\xf2\xe0\xec\x0e\xe8\x07\xe5\xcd\xe3G\xe3\xb1\xe0f\xde#\xe0\xb7\xe5\xee\xec\xbb\xf3\xf0\xfb\xd9\x04\xa1\n\xb6\x0be\t\xf6\x04U\xfd_\xf2\x1b\xe8\x08\xe3\xe2\xe3\xbc\xe6\x06\xe7o\xe6\x14\xeaj\xf3\x86\xfa\xb7\xf7\x97\xef\x12\xee\x8e\xf6L\xff\x1f\x01k\x00\x1b\x04\x97\t\xf7\x08@\x01\xb9\xf8\x8d\xf3T\xef\x7f\xe9\xd7\xe4\x89\xe5\x0c\xec\xf6\xf3\xd2\xf8\xbf\xfa\xa1\xfd=\x03\x97\x08\xed\x08\xd7\x04\xc3\x02Z\x07J\x0f\xe8\x13\xc8\x12Y\x0f\x1d\rT\x0b\\\x07\xc2\x00\n\xfa\xa7\xf5M\xf4\xaf\xf5r\xf9c\xfe\xdf\x01\xdf\x02\xdd\x02)\x05_\n\x98\x0f\xa3\x11\x9f\x10k\x10Q\x13\xd9\x16\x8e\x16\xcb\x10\x0c\x08\x99\x00\x99\xfd\x1c\xfe~\xfe\xd0\xfc\x08\xfb\x94\xfcz\x01%\x05O\x03\x1a\xfd\x9a\xf8[\xfb\xf1\x03s\x0c\xcd\x0fU\x0e\xc9\x0b\xdb\t4\x07=\x01H\xf8\r\xf0%\xec\x18\xee\t\xf4\xf5\xfa\x82\xffv\x00g\xfe\x08\xfcr\xfb\xff\xfbP\xfc\x8a\xfbz\xfb\x16\xfen\x02\xc5\x056\x05c\x00\xed\xf9\xe5\xf4?\xf3\x14\xf4o\xf5C\xf7e\xfaL\xff\x0b\x04~\x06n\x05\x80\x01\xd8\xfc\x90\xf9\x8c\xf8)\xf9Q\xfa\xcf\xfb\xe0\xfcK\xfd|\xfc\x93\xfa\\\xf8s\xf6\xed\xf5\x87\xf7$\xfb\n\x00\x80\x04\xb0\x07}\t\xc3\t\xc0\x089\x06\n\x03\xd5\x00I\x00\xb7\x00\\\x004\xff/\xfeN\xfeU\xfe;\xfd\x89\xfb\xae\xfa;\xfd\x96\x01\x96\x05\xb5\x07-\x08\xa6\x08\n\t\xaf\x07Y\x04F\x00\x82\xfd\x82\xfc\xfa\xfc\x9f\xfdq\xfd\xc4\xfc\xc4\xfb\x15\xfc,\xfd\xbd\xfd\xca\xfe\x00\x02\x93\x08\x00\x0e\xf5\x0e\xc9\x0b\xc0\x08\xb0\t\x80\r{\x0e\x03\t\x9e\x07i\x19\xac:\x18QSF\x1a$B\t\x82\x06A\x12R\x17\xf0\x0f(\x05>\x01U\x04\x1c\x060\xfe\xeb\xebP\xd8\xc0\xcd\x0e\xd13\xdeU\xee\x8d\xfa\xfc\xfe\x94\xfcu\xf7\xf5\xf32\xf2\xa5\xf0\xa5\xef\xd9\xf0z\xf6\x81\xff+\x07^\x08U\x02q\xf7C\xeao\xdeb\xdb\x80\xe5\x87\xf5\x01\xfe-\xfbi\xf5\xae\xf4W\xf7*\xf6\xab\xf0\xb3\xec\xbd\xef\xe7\xf9\x86\x04\x9a\t5\x08\x84\x02\x80\xfb$\xf6\x8c\xf3\xfa\xf3s\xf5\xf6\xf7\xb9\xfbG\x00\x17\x05\x9e\x08\xc0\x08\xa6\x03\xa1\xfc\xd9\xf9j\xfe#\x06L\ng\t\x92\x07\x8a\x07S\x07\xe9\x04\xde\x00\x9e\xfd\x03\xfc\xc9\xfb\x9e\xfd\xdb\x01&\x06\x9a\x07=\x06x\x04\t\x05\x83\x07\x80\n\x88\x0c-\r4\x0eC\x10M\x12\xcf\x11\r\r\xf7\x07\xd4\x05\x9b\x06\xd3\x05[\x01\x02\xfe\xbd\xfe\xd6\x01C\x02\x1e\xfe\xe7\xf8\xbf\xf5\xae\xf4\r\xf5\xb1\xf5]\xf7\x85\xf9\xd8\xfa\xdb\xfb\x91\xfc[\xfd\xd2\xfcd\xfb\x9b\xfa!\xfb\xae\xfc\x1d\xfe`\xff\xe2\xff\x86\xffT\xfes\xfc\x94\xfa"\xf94\xf9^\xfa\xd0\xfb\xae\xfc&\xfd\xe5\xfdC\xfe`\xfe\xb6\xfd\x0e\xfd\xb7\xfc\xd8\xfcn\xfd\xfc\xfd\x9c\xfe\xec\xfe\xd5\xfe\x1c\xfe:\xfd\x9b\xfc\x17\xfc\x8a\xfb\xb4\xfb\xab\xfcE\xfe#\xff\xbe\xfe\x0f\xfe\x85\xfd\x9f\xfd\xe1\xfd}\xfe\xdc\xff.\x02=\x04\xbb\x04\\\x03\xe7\x01\x91\x01\xc9\x02\xc8\x04t\x06\x85\x07\xa3\x07w\x06Q\x04p\x01\xd4\xff$\xff\'\xfe\xd0\xfc\xfa\xfc\xa0\xff\x18\x03\xe3\x02\x00\xff%\xfb\x92\xf9\xe2\xfbw\xff(\x03\x01\x07\xd0\x06\xd6\x02\x83\xfb\x0b\xfc\x14\r\xd2\'V8\xce1\xf7!\x9c\x1bs"\xec*\x7f+l(\xa5\'\xb2(\xdb&\x17\x1e?\x10\x8c\x03\xcb\xf9\xd0\xf1p\xeb(\xea\xe3\xec\x04\xec\x87\xe2\xca\xd8\xae\xd8\x0b\xdf\x1b\xe2+\xe0\x05\xe2s\xeb]\xf4F\xf7\xa1\xf6.\xf8\r\xfc\'\xfe\xb0\xff\x94\x03\x00\n\xef\x0bJ\x07\xc9\x01\xbc\x00\xc1\x01\xed\xfe\xb9\xf8\xca\xf3\x97\xf2A\xf3.\xf26\xee,\xe9\xec\xe5\x12\xe5\xf3\xe4N\xe5A\xe7\x82\xea+\xedt\xee1\xf0\xcc\xf3\x7f\xf8\x1e\xfc\xf5\xfd\xe5\xff\x00\x04*\t\xc8\x0ca\r\x9c\r\x0b\x0f\x8a\x10(\x10\x88\x0e\xa6\r\x95\x0c\xe3\t\xaf\x06\x94\x05\xa1\x06\xfa\x05\n\x03g\xffz\xfd{\xfc\x95\xfbL\xfc\xf6\xfd\xb3\xff\x89\x01\x95\x03\x1e\x05\xa1\x04*\x04\xc8\x06\xb1\n\x14\x0e~\x119\x15o\x16\xb5\x11h\n\xbf\x06<\x07\x89\x08\x1b\x08~\x06\xa0\x03\x1b\xff\x03\xfa#\xf6S\xf3J\xf1\x8f\xf0\x95\xf1\xdb\xf2\xd2\xf2\xd3\xf1\xb5\xf0b\xf07\xf1\x8e\xf3b\xf7\xff\xfa\x02\xfd\xe4\xfd\x0f\xfe"\xfe/\xfe\x16\xff,\x01\xd8\x02;\x034\x02\xa9\x00\xdd\xfe\x01\xfd\xad\xfbg\xfb\x7f\xfb3\xfbX\xfa\xf2\xf8\x88\xf7\x1c\xf6\x1f\xf5\xd2\xf43\xf5\x86\xf6\xa5\xf8P\xfa\xb2\xfaa\xfad\xfaT\xfb\xf0\xfb#\xfd\x1d\x00\xba\x04n\x07\xc9\x06;\x04\x05\x028\x02\x15\x046\x07\xfb\t\xb0\n\xad\t\xab\x05g\x00z\xfdI\x00\x80\x06\x14\t\xb4\x05I\x00s\xfeD\x01\xd7\x05\x94\t"\tq\x05\xdc\x01\x8a\x03\x15\t]\x0e\xc6\x14\xaa \x03,M*m\x1b&\x11\xc2\x16\x93#\xa9+\xcb-\xda-\xca\'\x85\x1a\xfb\r\xdc\x08\xac\x08\xaf\x07C\x05?\x02\x14\xfd\x1f\xf4>\xe9Y\xe0\x13\xdb%\xda\xce\xdcL\xe0\xd6\xe27\xe1\x8f\xdcU\xd8-\xd7a\xdbP\xe45\xefh\xf7\x9f\xf8\xc6\xf5\x98\xf5h\xf9\xca\xfe\xdb\x03Y\n/\x10\xb2\x10J\x0c8\x07[\x05f\x05z\x05W\x05\xb5\x04\x02\x03E\xfem\xf6\xb0\xee\x8d\xeb\xe8\xec\xc1\xee\xf8\xed/\xec\x07\xeb\xc1\xe9\xd5\xe7\xb8\xe7M\xeb\xf3\xf0\x84\xf5\xe2\xf8(\xfc\x83\xff\xbb\x01\x88\x03\x07\x06<\n\xcc\x0e\x8e\x12\xef\x14\xbd\x15V\x14G\x12+\x11`\x12S\x14\x82\x14\x8f\x11H\x0c\x97\x08\xc8\tD\x0e\xaa\x0f\xc9\n\xa7\x03\x0b\x01\x9e\x02\x86\x03\n\x03\xb7\x03t\x04\x1c\x02p\xfdc\xfb\xa3\xfc\xba\xfda\xfd\x1f\xfd\x9d\xfd\xac\xfd\\\xfc7\xfb\xf9\xf9$\xf9j\xf9\xeb\xfa\x1e\xfc\x9a\xfb\x87\xf9S\xf7+\xf6\xbf\xf6\x9d\xf8~\xfaE\xfbi\xfa\xd6\xf8\xd1\xf7\xd4\xf8\xb4\xfa>\xfc\x18\xfd\xf2\xfd]\xffo\xff\xeb\xfdA\xfcg\xfc2\xfe\xaa\xff\xd4\xff\xfe\xfe\xb0\xfd\x15\xfc\xef\xf9<\xf9\x15\xfa_\xfb-\xfc\x93\xfb\x0f\xfb\x0f\xfa\xce\xf8\x91\xf8\x89\xf96\xfb\xea\xfc!\xfe\xc6\xfe\r\xff\xe9\xfe\xc2\xff\xd5\x00q\x01\xfe\x02n\x06\x9b\x08\x01\x08r\x05\x13\x05\xb5\x07&\t\xc8\t\xf9\nT\x0c\xa6\x0b/\x08S\x06\xaa\x07\xa0\n\xe0\x0cr\rm\x0c\xba\nF\n(\x0c\xeb\x10\x9a\x18C\x1f\xd3\x1fB\x18\xe8\x0f\x11\x10\xfd\x17) \xc3!\xb0\x1e\x88\x1a}\x15\x9c\x0f\xeb\n\x99\t\xca\n\x91\t\x93\x05\xcf\x00\xc4\xfb\xda\xf5\x0e\xef\xdd\xea\n\xea\x08\xea\x83\xe9\xf5\xe7\xfb\xe4\x02\xe1T\xddw\xdcX\xde\x81\xe1w\xe5\xaa\xe8?\xe9G\xe7\x97\xe6G\xe9\xb5\xed\x8e\xf1\\\xf5\xfa\xf9k\xfc\x91\xfb\x13\xfa\x9f\xfaR\xfd\xcb\xff\x9e\x02\xdc\x052\x075\x05`\x01\xc5\xfeW\xfeX\xff=\x01\x9b\x02v\x01\xc3\xfd\r\xfa>\xf8\xcf\xf7\xa2\xf7\xcf\xf7\xf4\xf8\xca\xf9\x02\xf9\x0e\xf7/\xf6o\xf7/\xf9v\xfb\xa1\xfdI\x00\x9c\x01\x8b\x02\x92\x04\xa0\x07\xb1\t\xf9\t$\x0b(\x0fL\x14[\x17A\x17\xf8\x13h\x10\xb3\x0fh\x13\xcb\x16\x8b\x15\x02\x11D\r!\x0b\xf3\x07\x9f\x04\xb4\x03\x0e\x04\x98\x02\xee\xfe\xef\xfb\xa5\xf9\xc2\xf6{\xf4\xb1\xf4\xef\xf5q\xf5\x1c\xf4l\xf3\t\xf3\xcf\xf1\xed\xf0z\xf2\xfd\xf4^\xf6\xf4\xf6\xf2\xf7\xfb\xf8%\xf9K\xf9\x7f\xfaI\xfc|\xfd6\xfe\xf9\xfe9\xff\xb5\xfec\xfe\xe3\xfe\xeb\xffu\x00Z\x00 \x00\xdc\xff0\xffO\xfe\x8d\xfdy\xfd\x1e\xfe\x9d\xfeT\xfe`\xfd\xf4\xfc\x86\xfd)\xfeb\xfe\xe4\xfe\xc8\xff\x9f\x00\xa7\x00\x98\x00e\x01\'\x02b\x02(\x02+\x03\x9b\x04U\x04;\x03\x82\x03\x9e\x05\xc5\x06\xda\x05\xd1\x04x\x05\xdb\x06\x0e\x07\xaa\x05k\x04\xa1\x04a\x06\xa9\x07\xc5\x07\x9f\x08\xca\x0bJ\x0fb\x0f\\\r\x94\r\x01\x11\xa2\x13\xb3\x14F\x15\xe1\x15\x8e\x14\x9f\x12\x04\x13\xb8\x14\x94\x14.\x12U\x10\x0f\x0e[\nn\x07\x18\x07a\x07,\x04}\xff?\xfc\xb2\xf9!\xf6&\xf3\x9b\xf3\x12\xf4\xf3\xf0\x07\xec\r\xea\x1e\xebJ\xebp\xea\xc6\xea\x04\xec\xed\xeb\x14\xeb\x07\xec\x8a\xee\x10\xf0\xb8\xf08\xf2g\xf4<\xf5\xe7\xf4\xc4\xf5U\xf8U\xfa\xeb\xfa\x8a\xfb\x96\xfc\xcb\xfc\xf3\xfb!\xfc\xa5\xfe\x1a\x01T\x01\xe6\xff\xbe\xfew\xfe\xd4\xfe\x8e\xff|\x00\x8f\x00H\xff\x89\xfd\xd5\xfcV\xfd\xf2\xfd\x9e\xfd\xa5\xfc\xc0\xfb\xc4\xfb\x11\xfcm\xfc\x98\xfc\xc5\xfc"\xfd\xba\xfd\xed\xfe\x98\x00\'\x02\xf8\x02\x84\x03\xde\x04\xa0\x06v\x08\xb9\t\xb5\n5\x0b6\x0bg\x0b\xe9\x0b\x94\x0ce\x0c\xec\x0bx\x0b\xd0\n\xbe\t\x1d\x08\xfb\x06\x0f\x06\\\x05\x98\x04|\x03>\x02\xaf\x005\xff\xea\xfd\xee\xfcR\xfc\xcd\xfb/\xfb<\xfa\x02\xf9\xa8\xf7\xe9\xf6\xfc\xf6\\\xf7G\xf7\xb6\xf6\x1f\xf6\xc9\xf5\x94\xf5\xf5\xf5\x00\xf7\xfb\xf78\xf8\xf7\xf7\xd5\xf7\x19\xf8\x99\xf8p\xf9\x85\xfa\\\xfb\x8d\xfb\xa4\xfb(\xfc\x0b\xfd~\xfd\x83\xfd\xdf\xfd\x9e\xfeR\xff\xe4\xff\xb4\x00|\x01h\x01\xe2\x00\xdd\x00\x83\x01\x87\x01\xed\x00\r\x01=\x02?\x03\xb0\x02\x8c\x01\x88\x01\x0e\x02J\x02Q\x02\x7f\x02\xa7\x02#\x02\xd9\x01\xc8\x02A\x04\xe1\x04\\\x04W\x04\xe1\x04\x94\x05\'\x06\xa4\x07s\n\xcc\x0cx\rw\r\x80\x0e\xc0\x10\xab\x12\xfd\x13E\x15$\x16\xf4\x15h\x15\xfe\x15\x1c\x17\xeb\x16\x0f\x15F\x13?\x12\xca\x10i\x0e\xbe\x0b\xb7\t\x16\x07\xbf\x03\x82\x00\x03\xfe\xca\xfb\xb1\xf8H\xf5\xc6\xf25\xf1\xd2\xef\xf2\xed\x06\xec\x8e\xea\x9d\xe9A\xe9\x8e\xe9\x1f\xeau\xeae\xea\xa7\xea\x8a\xeb\xee\xecO\xee\xb5\xef#\xf1k\xf2U\xf39\xf4\xac\xf5\x86\xf7%\xf9\'\xfa\xe3\xfa\xe1\xfb\xd7\xfc\xce\xfd\xdd\xfe\xd9\xff;\x00\x0e\x003\x00\xe7\x00\x8a\x01\xae\x01s\x01O\x01J\x01b\x01k\x01_\x014\x01)\x011\x01{\x01\xdc\x01\'\x02\t\x02\xad\x01\xca\x01\xd9\x02R\x04%\x05\x19\x05\x8a\x04\n\x04J\x04T\x05\xac\x06E\x07\x0b\x07\x8a\x06C\x06\x02\x06\x10\x06\xbb\x06h\x07@\x07A\x06w\x052\x05\xcb\x04J\x04%\x04\x0f\x04\x1e\x03\xaf\x01\xb9\x00R\x00\x9f\xff\x88\xfe\xc0\xfdT\xfd\x8f\xfcx\xfb\xc2\xfah\xfa\xeb\xf9k\xf9\x1c\xf9\xe2\xf8k\xf8\x06\xf8\x04\xf8\x13\xf8\xe3\xf7\xc5\xf7\x0e\xf8\xa6\xf84\xf9\x89\xf9\xa9\xf9\xb7\xf9\xc4\xf9[\xfa\x9a\xfb\xf9\xfc\x90\xfd`\xfd\xfc\xfc;\xfd\x1c\xfe$\xff\xde\xff"\x00\x1c\x00!\x00a\x00\xe8\x00q\x01\xcc\x01\xb2\x01\x9c\x01\xe4\x01u\x02\xe7\x02\x07\x03\x14\x03!\x03\xfb\x02\xb7\x02\xc6\x021\x03\x81\x03\x9c\x03\x81\x03Z\x03\x0e\x03\xf1\x02.\x03{\x03\xe3\x03\xe4\x04\xcf\x06\xc8\x08\x8d\t\xa4\t$\nx\x0b<\re\x0f\xc9\x11p\x13\xba\x13\x9e\x13\xfa\x13\x97\x14w\x14*\x14`\x14N\x14\x0b\x13\xd7\x10\xd9\x0e\x01\r\x87\n\x18\x089\x06I\x04I\x01\xe3\xfd%\xfb\x06\xf9\x8c\xf6\xfa\xf3\xed\xf1S\xf0\xa4\xee\x01\xed\x01\xec_\xeb\xae\xeaE\xeaz\xea/\xeb\xa0\xeb\x07\xec\xc6\xec\xc4\xed\xc7\xee\xed\xefm\xf1\x12\xf3O\xf4I\xf5I\xf6a\xf7i\xf8s\xf9\xac\xfa\xf3\xfb\xcf\xfc5\xfd~\xfd\x0b\xfe\xa7\xfe\n\xff6\xff\x92\xff\x17\x00]\x00\x0e\x00\xab\xff\xb0\xff\xcd\xff\xa2\xffj\xff\xcc\xffg\x00_\x00\xf4\xff\xf0\xffq\x00\xb6\x00\xcf\x00|\x01d\x02\xaf\x02\x9e\x024\x03\x1f\x040\x04\xd4\x03-\x04\x1c\x05b\x05\x1c\x05F\x05\xc7\x05\x95\x05\xe1\x04\xe0\x04i\x05~\x05\xfd\x04\xdb\x04I\x05R\x05\xf6\x04\xd3\x04\xb3\x04(\x04\x86\x03\x84\x03\xb1\x03+\x03%\x02"\x01d\x00\x9a\xff\xdf\xfe;\xfex\xfdk\xfcb\xfb\x86\xfa\xed\xf9l\xf9\x02\xf9Y\xf8\x96\xf7"\xf7+\xf7`\xf7l\xf7U\xf7@\xf7@\xf7\x86\xf7!\xf8\xfa\xf8\x90\xf9\x1e\xfa\xb0\xfa(\xfbU\xfb\xc1\xfb\xf8\xfcb\xfe\'\xff=\xffB\xfft\xff\xb4\xffe\x00\x82\x01a\x02\xf7\x01\xfa\x00\xe9\x00%\x02o\x03\xe9\x03\xe9\x03\xa8\x03\x16\x03\xb2\x02w\x03\x1c\x05:\x06\x1f\x06e\x05\x1e\x05~\x05+\x06\xf2\x06\xb5\x07\xde\x07\x88\x07\xb0\x07}\th\x0c\xae\x0e\x07\x0f\x0e\x0e\xce\r\x83\x0f\x90\x12m\x15\x01\x17!\x176\x16\x0f\x15\xc8\x14\xea\x15W\x17M\x17\x0e\x15\x99\x11\x85\x0e~\x0c/\x0b\xd7\t\x93\x07\xda\x03\xfd\xfe\xab\xfa\xbb\xf7\xe1\xf5\xfc\xf3\x80\xf1\xa9\xeey\xeb\xa0\xe8\x12\xe7\xc4\xe6\xd9\xe6C\xe6`\xe5\xee\xe41\xe5W\xe6J\xe8U\xea\xad\xebN\xec \xed\xff\xee\xcb\xf1\xe9\xf4\x9d\xf7\x90\xf9^\xfa\xc6\xfa\xce\xfb\xe2\xfd\x90\x00\xad\x02\xb6\x03\xbd\x033\x03\xef\x02G\x03\x1b\x04\xa4\x04\xad\x04J\x04Y\x03g\x02\x02\x02;\x02\x90\x02.\x02\x80\x01\x97\x00\x0b\x00\xcd\x00\xac\x02E\x04\xa8\x03\xb0\x01x\x00!\x01\xe3\x02\x8e\x04a\x05\x02\x05y\x03\xc2\x01X\x01\x1b\x02\x82\x03\x1b\x04\xc1\x03\xa9\x02\\\x01\x97\x00\x8a\x00\x16\x01\xc8\x01\xef\x01\xa2\x01D\x01\x18\x01\xe9\x00K\x00\xe0\xff\xd8\xff&\x00N\x00_\x00p\x00\xcd\xffY\xfe\xc2\xfc\'\xfc{\xfc\xde\xfc\x10\xfd\xe8\xfc<\xfc\xdc\xfa;\xf9Q\xf8D\xf8\xa3\xf8\xff\xf8V\xf9d\xf9\xb4\xf8\xb1\xf7\x0c\xf7\x17\xf7\x93\xf7O\xf80\xf9\xf9\xf93\xfa\x03\xfa\xed\xf9C\xfa?\xfbp\xfce\xfd\xb1\xfdZ\xfdW\xfd\x0c\xfe\x18\xff\xf6\xff\x80\x00\xa9\x00\x91\x00/\x00P\x00\xfa\x00\xb3\x01\xfd\x01\xc5\x01\xee\x01\x14\x03\xae\x04\xa1\x05C\x05M\x04Y\x040\x06\x85\nz\x10\xac\x15.\x17\x03\x15\xdd\x12W\x14s\x19\xe6\x1fw%\xca(\x06(\xc2#i\x1f\xa9\x1e\x87!\xe0#\\#\xee\x1f:\x1b\x94\x15\xe4\x0fI\x0b\xde\x07\xb1\x04\xb3\xff\xb2\xf9\x0c\xf4\xb5\xef^\xec"\xe8;\xe3\x80\xde\x00\xdb)\xd9\xb9\xd8\x9f\xd9\x82\xda&\xdad\xd89\xd7\xaa\xd8\x90\xdc\xc3\xe1~\xe6\xd3\xe9V\xeb\x8a\xec\'\xef\x9b\xf3\xdf\xf8\x03\xfe\'\x02-\x04|\x04Q\x05Y\x08\x02\x0c\x0c\x0e6\x0e\x0f\x0e\xe9\r+\r\xb6\x0c,\r\x1e\r;\x0b\xf6\x07T\x05\x13\x04\xd5\x02$\x02\xb1\x00\xa2\xfe\xf4\xfb\xfd\xf9_\xf9Q\xf9\x92\xf9\xfb\xf9\x99\xf9\xe1\xf7\xe1\xf5G\xf6\x8d\xf9\x17\xfe0\x01\xd2\x01:\x00J\xfeh\xfe\x93\x00~\x04\xc0\x07\xff\t\x9c\t\xa0\x07!\x06\x80\x06C\x08\xb8\x08G\x08l\x07\xb7\x06\xe1\x05|\x05\xf0\x05l\x05\xe3\x02v\xffU\xfd\x1b\xfdj\xfd\x90\xfdu\xfd\xc1\xfb\xf3\xf8\xe4\xf5\xb4\xf4\xc3\xf4\xe1\xf4\x14\xf5<\xf5\xdc\xf4\xc2\xf3\xe5\xf2\r\xf3s\xf3d\xf3e\xf3:\xf4\xa0\xf5\xba\xf6\xc8\xf7A\xf8`\xf8\xed\xf7%\xf8k\xf9*\xfb\x18\xfd\xad\xfe\x8a\xff=\xff;\xfeg\xfe\\\xffX\x00\xde\x00J\x01\x1f\x02\xcc\x02\xd8\x03\xb0\x04\xf3\x04\xd5\x04\xd3\x04;\x05\xa0\x07\x82\x0ex\x19\xcf!\x89#\xcc!\xe5!\xbc#\xd6%\xf9+\xfd6\xb8?\x1b?\x958\xf33-1\xc3,.(\xe7%\xfd"/\x1b\xa5\x11\x8f\n\xd1\x04\xd2\xfc\x07\xf3\xb7\xe9\x16\xe0\xc3\xd6a\xd0/\xcei\xcez\xcda\xcb\\\xc8\x1e\xc5F\xc4O\xc7G\xcd|\xd3G\xd9\x8a\xdf\xe9\xe5\x19\xec9\xf2\x00\xf9\x16\xffV\x032\x06{\t\xdb\r\xda\x12\x14\x17\xac\x19\xb8\x19\xb2\x16!\x12\x1e\x0e\x9b\x0b\r\n)\x08\xf7\x05\xe2\x02V\xfe\xe9\xf8r\xf4t\xf1\x01\xefO\xecx\xea$\xea\x17\xeb\xe7\xeb\xa2\xed\x95\xefS\xf1=\xf2\x12\xf3\xa4\xf5\xc1\xf98\xff\xcd\x044\t\x11\x0cb\r\xed\r7\x0ec\x0f5\x11\xf2\x12\xf3\x13\xb3\x13A\x13\x05\x12P\x10\xd3\r\x1b\x0b\xd6\x07\x8f\x041\x02\xa7\x00\x1e\x00\x08\xff<\xfd=\xfa\x11\xf7?\xf48\xf2F\xf1%\xf1\xa9\xf1:\xf2\xa5\xf2\x04\xf3\x13\xf3\r\xf3\xe6\xf2\x8c\xf2\x94\xf2\xb2\xf2\x0f\xf4\xba\xf5\x1b\xf7\xbe\xf7\xf1\xf7\xa7\xf7T\xf6\xf6\xf4\xf1\xf4\xf3\xf5\x8b\xf6\x19\xf7\xaf\xf75\xf8\xe6\xf6\xa4\xf6\x9b\xf7e\xf8\xb6\xf7~\xf6J\xf7\xb8\xf8\xde\xfa\x05\xfe6\x01\xab\x02@\x02\xe6\x02\x94\x05\xa5\x08\t\r\xf3\x11\xde\x18B =)w4\x97<\xd7?\t? @\xe8BGD\tFMH\x18JkEz;J2T*\n"\xb2\x15G\t\xd2\xfe\xc3\xf4,\xeaF\xe0\xf1\xdaf\xd62\xcf\xda\xc5[\xbf\xaf\xbcQ\xba\x16\xba\xe2\xbd\xff\xc3\xb3\xc8\x08\xcdW\xd4\x94\xdd\xae\xe5\xb0\xed\x05\xf6\x06\xfd\xb8\x01\x08\x07%\x0eR\x14_\x19C\x1dK\x1f\xfc\x1d;\x1a1\x17\xc7\x13>\x0f\x07\n/\x04a\xfd\x82\xf6x\xf1\xa8\xed\xfa\xe8\x84\xe4\xf5\xe0U\xdd\x9a\xd9:\xd8\xc6\xda)\xde\x8c\xe1\xac\xe5\xd6\xea\x8a\xf0\xba\xf5\xf0\xfc=\x04\x19\x0b\x97\x10G\x15\xdd\x18\xd1\x1d\xf9"K\'N)6)B(\xbf$\xbb!\x91\x1e\x92\x1b%\x17\xd6\x11\xac\x0c\x11\x07\x95\x01n\xfdr\xf9\xb8\xf5#\xf2\xad\xee\xbf\xebM\xe9*\xe9\x19\xea0\xeah\xeav\xebE\xed\x1f\xef\x84\xf0Y\xf3-\xf5=\xf6\xc2\xf6A\xf7\x9d\xf8\x18\xf9\x1a\xf9\x8b\xf8\xf0\xf6\xd8\xf5\xb6\xf4S\xf3\xa4\xf1\x10\xef\x1a\xee\xf9\xec\xf5\xeb\xe4\xea8\xea\xf8\xea\x03\xea\xf4\xe94\xec\x8a\xf0\x82\xf3\xd9\xf2\xaa\xf3\x8c\xf6\x0c\xfa\xba\xfc\xad\x02\xe5\n\xac\x10\xb7\x14w\x1a!"\xa5(\xc80%=7H;NLR\xf4VUY\xafV[T\xf6R\xcfN\x94F`=\x165\xac+#!p\x153\x08=\xfa\xbd\xec"\xe0p\xd5\x00\xcde\xc7\xa1\xc1o\xbc\x80\xb9\x94\xb9X\xbb@\xbe\xef\xc2\xb5\xc7\x9b\xcde\xd4\xe1\xdc\x14\xe6\x0f\xf0\x16\xfb\xae\x03\xc3\t\x8a\x0f\xa9\x15\xc1\x1a\xdb\x1b\xc5\x1b\xf1\x1a\t\x18J\x13\xf6\r\xc2\t{\x04m\xfd\xde\xf5\x9b\xee(\xe8+\xe2\x96\xdc\x04\xd8e\xd4\x9b\xd1\xc8\xcf\x1e\xd0\xb5\xd2\xed\xd6,\xdbr\xe0\xdb\xe6\x98\xedd\xf5$\xfd<\x05\xde\x0b]\x12 \x18\x13\x1d\xc7"M*\t0\xc51\xef0\xd00\xc6.\x86)\x9a%;"k\x1d\xf4\x14L\r\x14\t\x10\x04\xdd\xff&\xfc\xc2\xf8C\xf4\x94\xee4\xec\x00\xea6\xe9Q\xe9\x1f\xe9\xa7\xe9n\xe9b\xec\x9c\xf0\t\xf4\xec\xf6T\xf88\xfa\x97\xf9;\xf9\xc9\xf9D\xfa\xb6\xf9\xd7\xf6\xb2\xf4v\xf3?\xf2\x82\xf0\x7f\xee\xfb\xec8\xea]\xe7\xe6\xe4\x87\xe3.\xe3\x98\xe2\x8b\xe3z\xe3E\xe5\xfa\xe7\xe0\xeb\xa2\xefX\xf2\xa5\xf6\xc7\xf9\xd8\xfc\\\xffK\x04P\n\xf8\x0fX\x17\xf6\x1f\x8a(\xd80B\xbb\x90\xb7\x8a\xb7N\xba\xf3\xbev\xc6m\xcf\xdb\xd7\xc4\xe0\xe8\xea\xb6\xf4\xcc\xfc\xb5\x04\xff\x0c\x0b\x13P\x16\x96\x19o\x1d\x12\x1f\xcb\x1dI\x1b\x9c\x17\x87\x11\xab\t\xd3\x01a\xf9\x1f\xf0\xf2\xe6N\xde\xad\xd6\xa3\xd0\xf4\xcc\xcf\xca\xc5\xc9\xc2\xca\x11\xcd\x9c\xcf7\xd3\x13\xd9\xba\xdf[\xe6!\xee\xe0\xf6\x86\xff/\x08e\x11O\x1a\xdd!\x81(\xc7-\xd90L1I2k1i.L*\x11&\x87!\xc6\x1a\xfd\x14\xdd\x0f\x9f\tG\x03J\xfdE\xf8-\xf3\x9e\xeeO\xec\x05\xea\r\xe9X\xe9\xbc\xea\xd4\xec\xd1\xeeV\xf2\x13\xf6\xc2\xf82\xfb%\xfd\x8e\xfeL\xff\xe0\xfeA\xff\xa0\xfe\xe7\xfd|\xfc`\xfa\xb1\xf8\xd6\xf5\xcc\xf2\xf3\xee\xba\xea,\xe7\xb7\xe3\x01\xe1\xd6\xdeS\xdd\xb1\xddD\xde\x0e\xdf\xf7\xdf\xc4\xe1h\xe4\xbd\xe5\xf9\xe6\x03\xea\x17\xef\x9c\xf3\x08\xf7/\xfb\x9d\x00\xab\x06\x85\x0e\xc2\x18\x86"\x9c+\x1a7\xd4D)P\xf8X\xa2c\xd7l\xb0n\xecj@h\x0beZ[\xbbM\\A 5\x87%@\x14q\x05\xce\xf8\xff\xec4\xe1w\xd5\x85\xcb^\xc4\xd8\xbe\xae\xb9\xf9\xb6\xe7\xb7\xa0\xb9\xc0\xbb\x1b\xc1z\xca\x93\xd4\xef\xdex\xeb\'\xf8p\x02\xab\x0bw\x15n\x1d\x9b!\xf6#w%j#\xb2\x1e\x89\x1aB\x16\xa3\x0f$\x07\xd6\xfe\xef\xf5\xee\xebq\xe2\x92\xda\xdf\xd2\x19\xcb\xd6\xc4\x0e\xc0\xbd\xbcZ\xbc/\xbf\x9e\xc3\x1f\xc9\xc9\xd0\xfe\xd9\xab\xe3\x1a\xee\x94\xf9\xd5\x04\x9c\x0e\xd0\x17\xbf 5(Y.\xb03\xc97:939\xd37\xf84i0\xf4*\x93$\x92\x1c\xff\x13\x8c\x0b)\x03\x83\xfbW\xf5r\xf02\xec\xad\xe8/\xe77\xe7\xf9\xe7\xb1\xe9G\xec2\xef\xde\xf1\x14\xf5\xe8\xf8\xab\xfc\xbe\xff\x92\x02\x87\x04\x00\x06[\x06R\x06U\x05(\x03\x90\x00\xe6\xfc\xab\xf8\xfa\xf3\xc1\xef\xf7\xeb\x0e\xe8^\xe4\xd5\xe1\xbf\xdfN\xdd\xe8\xdb\xd4\xdb=\xdc\xfb\xdb\x10\xdc\xaa\xddl\xdf,\xe1\xe9\xe3\xce\xe7\x9d\xec!\xf0\xf0\xf3\x0c\xf9\x08\xfek\x022\x06}\x0c\xd3\x14\xe3\x1c\xbc%m2LBRP\xe8ZRd7m\x9ar\x80sxp\xaajca\xfaS\x19C\xc31o"\xc7\x13\xd3\x03*\xf4\x96\xe7\x87\xdd\xb3\xd3F\xcb\xfa\xc5?\xc2f\xbe*\xbb\x1a\xba\xc9\xbb\r\xc0A\xc6K\xcd\xe1\xd5\x01\xe1f\xed~\xf9\xee\x05\xd2\x12\xb4\x1d\xdd$b)2,\x83,\xad)d$\x11\x1d\xee\x13"\n\x9f\x00\xbd\xf6\xda\xec\xad\xe3\xca\xda\xc6\xd1\xb3\xc9\x8f\xc3\x8f\xbe\xdb\xbay\xb9\xd4\xb9Q\xbb\x91\xbf\x80\xc7\xba\xd0\xc8\xdaU\xe7T\xf5\xd9\x01\x90\r.\x1aM%\xed,?3k8\x16:\xf98}8n7\xb42\xbb,Q(p"\x06\x1a\\\x12{\r\xfb\x064\xfe\x0f\xf7C\xf2p\xedH\xe8\x16\xe6)\xe6\x80\xe6\x84\xe7\x94\xea<\xef\x96\xf4\xa9\xfay\x00\xe2\x04\xe8\x07m\n\x9d\x0b\x01\x0b)\t\xd1\x06h\x03e\xfei\xf9\x93\xf5\x16\xf2:\xeeb\xea8\xe7\x07\xe4\xc0\xe0\xe5\xdd^\xdb\xb8\xd8z\xd65\xd5|\xd4D\xd4\xa3\xd5R\xd9\x91\xdd\xc0\xe1\xe8\xe6.\xee\xb6\xf5V\xfb\x8b\x00o\x06\x98\x0b\x88\x0e\t\x12\xf2\x18\xb8!\\)\xc71\x04=SI\xf9S8]\x00fDk?k\x9dg\x02a\\WJJq;K+K\x1ap\n\x01\xfd\xa9\xf1\xe4\xe7\x17\xe0g\xda\xa9\xd5\xe0\xd1_\xcf\xae\xcd\x90\xcc\xfa\xcb\xe2\xcbK\xcd\xc8\xd0;\xd6P\xdd\xb6\xe5\xa4\xef\x1e\xfa@\x04\xe6\r\x84\x16V\x1d\xae!\t#\x1b!\xdf\x1c&\x17\x1e\x10.\x07\xbe\xfd\x94\xf5\x16\xee\xd0\xe5F\xde\x13\xd9\xa5\xd4Y\xcf\x80\xca\xc1\xc7\x9c\xc5\xf8\xc2\xe5\xc1\xb8\xc3\xe3\xc6\x05\xcb\xa2\xd1"\xdb\xd3\xe5,\xf1\x1d\xfe\x8d\x0b9\x17\xd4 \xef(\xd0.W2\x813\xc22Z0\xe5,.(\xfa"T\x1e\xe7\x19\x14\x15\x9b\x0f\x01\x0bG\x07"\x03\xc7\xfe\xc2\xfb\xa4\xf9"\xf7\xe3\xf4%\xf4\xc3\xf4\xf9\xf5\x17\xf8\xe2\xfa\x15\xfe\xc5\x00e\x03\xad\x050\x07\xce\x07G\x07i\x05\xa0\x02\x80\xff\x89\xfc\x15\xf9\x0b\xf5+\xf1e\xedi\xe9\xb8\xe5Y\xe3\x89\xe1\x0e\xdfM\xdc\xa9\xda\xd1\xd9\xe6\xd8\xd7\xd8\x10\xda{\xdc\x90\xde\xa6\xe1\x8c\xe7h\xee\x9f\xf4\x0e\xfa\xe4\xff\x88\x05\x82\x08\xbb\n;\x0eS\x11\x86\x12\xc6\x13%\x18X\x1e\xfb$\x17.\x849\x97C\xdcJ\x85Q\x9eW\x83Y\x9eV\xc4QtJ\xaf?H2\xfa%\xc4\x1aU\x0f\x9a\x04\xda\xfb3\xf5\xf1\xef\x00\xecA\xe9\xd8\xe65\xe4<\xe1~\xde\x91\xdc^\xdbq\xda\x92\xda\x85\xdcL\xe0j\xe5\x0e\xecQ\xf4v\xfc\x8d\x03\x94\t\xc7\x0eo\x12\\\x13J\x12d\x0f\x02\x0bF\x05\t\xff\xa9\xf9\xd8\xf4\xd2\xef\xd3\xea\xa3\xe6\x8f\xe3\xb5\xe0\xb0\xdd5\xdb\x17\xd99\xd63\xd3\xa6\xd1\xea\xd1:\xd2q\xd3u\xd7w\xdd#\xe4\xce\xeb\xaa\xf5\xe2\xff\x12\x08\x19\x0fW\x16\xcc\x1b\xa2\x1e\xb1 u"p"\xb7 l\x1fk\x1e2\x1c[\x19j\x17G\x15\xe4\x11\x7f\x0e(\x0c~\t\xe8\x05\xa6\x02\x91\x00\x00\xff\xa6\xfdX\xfd\xf0\xfd\xa2\xfep\xff!\x00\r\x01\xf3\x01Q\x02\x85\x01\xed\xffk\xfe\xea\xfc\xe7\xfa\xda\xf89\xf7\x92\xf5/\xf3\xb7\xf0%\xef\xf8\xed\x8b\xec\r\xeb}\xe90\xe8\xe1\xe6\x0b\xe6\x89\xe5L\xe5\xcd\xe5\xa4\xe6\xe3\xe7\xd0\xe9\xf4\xec\x1c\xf1#\xf5Z\xf9\x82\xfdo\x01\xec\x04\xcd\x07u\nP\x0c\x8c\ra\x0e\xbd\x0e\xb9\x0f\xd9\x11\xee\x14\x8b\x18\xdf\x1c\x8d!\xb2%\x8f)\xc6-~1\xf42Q2\x1d1\xec.++\x8a&\xae"\x98\x1e\x8a\x19z\x14\xac\x10\xb7\rB\n~\x06\xfa\x02q\xffV\xfb\xe1\xf6\xec\xf2\xc0\xef\xe0\xec\xd4\xe9\xba\xe7\x03\xe7(\xe7\x98\xe7\xc0\xe8R\xeb\x1c\xee)\xf0\x04\xf2\x81\xf4\xe8\xf6\xce\xf7\xd4\xf76\xf8j\xf8q\xf7\t\xf6}\xf5\x84\xf5\x84\xf4\xfa\xf2)\xf2\xb4\xf1N\xf0\x96\xee\x9f\xed\xc1\xec\xdf\xea\xb6\xe8\xfc\xe7\xf9\xe7C\xe7.\xe7\xee\xe8\xab\xeb\x18\xee\xfa\xf0\x84\xf5\t\xfa[\xfd7\x00r\x03\x0c\x06I\x07y\x080\n\x96\x0bK\x0c}\rX\x0f\xea\x10\xe9\x11\xec\x12\xb9\x13\x86\x13\x91\x12s\x11\xfb\x0f\xa1\r\xb7\n"\x08\xc6\x05\x9b\x03\xeb\x01\xba\x00\xfd\xffz\xffp\xff\xb6\xff\xf4\xff!\x00\xf3\xff^\xffy\xfek\xfd\x93\xfc\xcd\xfb\xdd\xfa\x14\xfa\x96\xf9L\xf9\xd1\xf8i\xf8Y\xf8\xf8\xf7\x9f\xf6\xd6\xf4\x98\xf3Z\xf2w\xf0\xd8\xee_\xee\xae\xee\x18\xefZ\xf0\xeb\xf2\xd7\xf5\x01\xf8\xba\xf9\x91\xfb&\xfd\xdf\xfd<\xfe\xe5\xfe\x00\x00\xfd\x00G\x02\x01\x04s\x06B\t\x87\x0b\xb8\r\x00\x10*\x12\xbf\x13~\x14C\x158\x16\xf7\x16Q\x17,\x18\xd5\x194\x1b\xcb\x1bS\x1c"\x1d8\x1d\x10\x1cv\x1a\xd0\x18Z\x16\x1c\x13u\x10}\x0ex\x0cq\n\x01\t\xeb\x07z\x06\xe2\x04\xa3\x03#\x02\xfb\xff\xc1\xfd\xfe\xfb\'\xfa\x0f\xf8@\xf6\x98\xf4\x0c\xf3\xa5\xf1\x88\xf0\xf2\xef\x89\xefA\xef\x14\xef\x11\xef\xfd\xee\xb6\xeeD\xee\xb9\xed\x0b\xedh\xec\xd0\xebO\xeb\xe5\xea\xb9\xea\xc5\xea\xd6\xea\xf7\xeav\xebX\xec4\xed\x16\xeed\xef3\xf1\x02\xf3\xba\xf4\r\xf7\xa3\xf9\xfe\xfb\xf1\xfd\xc9\xff\xe0\x01\xa7\x03\xfe\x044\x06s\x07n\x08\x14\t\xbe\t{\n"\x0bx\x0b\xcb\x0bM\x0c\xa1\x0cg\x0c\n\x0c\xea\x0b\x93\x0b~\nk\t\xcd\x08U\x08u\x07\xae\x06\x9d\x06\x8d\x06\x1d\x06\x94\x05\x8c\x05n\x05\xa8\x04\xb0\x03\xef\x02.\x02\xd6\x00\x98\xff\xc3\xfe\x08\xfe\x05\xfd\xf9\xfbV\xfb\xb3\xfa\xb1\xf9\x89\xf8\x90\xf7\x98\xf6:\xf5\x07\xf4;\xf3\x9d\xf2\x14\xf2\xbb\xf1\x01\xf2z\xf2\xd8\xf2]\xf3\x19\xf4\xda\xf4z\xf5S\xf6u\xf7\x8a\xf8\x8b\xf9\xcd\xfaE\xfc\xb1\xfd\xff\xfeL\x00\x86\x01\xb0\x02\xcc\x03,\x05\x8b\x06\xc4\x07\xd5\x08\xe6\t\xdf\n\x9e\x0bF\x0c\xeb\x0cH\rL\rX\r\x94\r\xc5\r\xdd\r\x03\x0e1\x0e,\x0e\xff\r\xe4\r\xaa\r;\r\xa5\x0c\x0f\x0cy\x0b\xb8\n\xf1\t\\\t\xae\x08\xef\x07#\x07N\x06c\x05E\x04T\x03h\x02b\x01[\x00b\xff\x83\xfe\x94\xfd\x9d\xfc\xce\xfb\xed\xfa\xc6\xf9f\xf8\xfd\xf6\xa9\xf5Z\xf4\x08\xf3\xea\xf1\x11\xf1{\xf0\x0e\xf0\xf2\xef2\xf0\xa7\xf0\x14\xf1~\xf1\x02\xf2\xa1\xf2<\xf3\xd4\xf3\x9b\xf4\x89\xf5\x98\xf6\xc4\xf7\x1d\xf9\x9f\xfa\x08\xfcU\xfd\x95\xfe\xd4\xff\xec\x00\xd4\x01\x8f\x024\x03\xf2\x03\xc3\x04p\x05\x17\x06\xcd\x06\xa0\x07O\x08\xc4\x08.\t\x88\t\xa8\t\x7f\tR\t/\t\x07\t\xad\x08k\x08:\x08\xde\x07q\x07\x11\x07\xb4\x06\x13\x064\x05a\x04{\x03m\x02K\x01c\x00\x93\xff{\xfe<\xfd+\xfc2\xfb%\xfa\x00\xf9\n\xf87\xf7P\xf6\x98\xf5d\xf5\x82\xf5\x9e\xf5\xdb\xf5|\xf6,\xf7\x89\xf7\xd0\xf7.\xf8\xa4\xf8\xb3\xf8\xbe\xf8U\xf9\x1b\xfa\xd3\xfa\xaf\xfb\xf5\xfcE\xfe\x1e\xff\xb5\xffP\x00\xc0\x00\xc7\x00\xa9\x00\xb7\x00\x03\x01+\x01m\x01+\x02A\x03=\x04 \x05?\x06J\x07\x01\x08\x92\x08\x0e\tz\t\xba\t\xe1\t@\n\xb5\n\x04\x0bK\x0b\x8c\x0b\x8b\x0bS\x0b\x02\x0b\x8e\n\xf4\tC\t\x91\x08\xfa\x07;\x07q\x06\xc7\x05"\x05N\x04K\x03>\x02\x14\x01\xd5\xff\x92\xfe`\xfdF\xfc.\xfb)\xfah\xf9\xb8\xf8\x0f\xf8x\xf7\xf8\xf6o\xf6\xe3\xf5j\xf5\r\xf5\xc7\xf4\x9d\xf4\xa9\xf4\xfe\xf4i\xf5\xea\xf5\x85\xf60\xf7\xdf\xf7\x86\xf8.\xf9\xc7\xf9e\xfa\x0c\xfb\xbb\xfbr\xfc\x18\xfd\xcb\xfd\x8c\xfeX\xff\x0c\x00\xb3\x00m\x01&\x02\xea\x02\xbb\x03\x88\x04O\x05\xef\x05_\x06\xb8\x06\x02\x07"\x07 \x07\x19\x07\x13\x07\x1b\x07\x0e\x07\x15\x078\x07I\x07D\x075\x07!\x07\xf9\x06\x9e\x063\x06\xcc\x059\x05\x93\x04\xdf\x03$\x03Z\x02c\x01x\x00\x88\xff\x93\xfe\x89\xfdw\xfc\x7f\xfb\x86\xfa\xa1\xf9\xce\xf8\x1c\xf8\x98\xf7D\xf7$\xf7\'\xf7L\xf7\x9a\xf7\r\xf8\xa0\xf8*\xf9\xb0\xf9&\xfa\x8b\xfa\xdd\xfa8\xfb\x93\xfb\xef\xfbH\xfc\x90\xfc\xd8\xfc\x13\xfdN\xfd\x8c\xfd\xcc\xfd\n\xfeQ\xfe\xa0\xfe\xfd\xfe^\xff\xdd\xff\x94\x00P\x01\x02\x02\xba\x02{\x03,\x04\xd0\x04x\x05\x1d\x06\xa5\x06\xfa\x06D\x07\x86\x07\xa3\x07\xbb\x07\xcb\x07\xc9\x07\xb4\x07z\x07P\x073\x07\xfd\x06\xa3\x06O\x06\xe5\x05^\x05\xd8\x04S\x04\xba\x03\x1f\x03o\x02\xa8\x01\xea\x00\x15\x00S\xff\x97\xfe\xe0\xfd6\xfd\x9a\xfc\r\xfc\x8e\xfb\x1a\xfb\xb9\xfal\xfa\x1f\xfa\xd7\xf9\x97\xf9Z\xf9\x1c\xf9\xe0\xf8\xc1\xf8\xa2\xf8\x8b\xf8~\xf8\x91\xf8\xb8\xf8\xdd\xf8&\xf9}\xf9\xd8\xf9:\xfa\xa6\xfa\x1c\xfb\x99\xfb\x16\xfc\xa9\xfcP\xfd\xfe\xfd\xcb\xfe\xb3\xff\xa5\x00\xa0\x01\x93\x02p\x03=\x04\xed\x04|\x05\xe2\x05(\x06`\x06\x87\x06\xa8\x06\xc1\x06\xd5\x06\xd7\x06\xde\x06\xf4\x06\xec\x06\xcd\x06\xa0\x06m\x06+\x06\xd1\x05\x87\x05=\x05\xdf\x04f\x04\xed\x03\x8c\x03\x16\x03z\x02\xdc\x01,\x01t\x00\xa2\xff\xca\xfe\xfc\xfd/\xfdc\xfc\xae\xfb\x11\xfb\x83\xfa\r\xfa\xae\xf9d\xf9&\xf9\x05\xf9\xf4\xf8\xf8\xf8\x05\xf9\x10\xf97\xf9r\xf9\xb8\xf9\x04\xfaI\xfa\xa9\xfa\x11\xfb\x81\xfb\xfd\xfbu\xfc\x01\xfd\x8a\xfd\x17\xfe\x99\xfe\x15\xff\x98\xff\x0c\x00{\x00\xe9\x00a\x01\xd1\x010\x02\x94\x02\x07\x03l\x03\xb9\x03\x0e\x04z\x04\xda\x04/\x05\x8d\x05\xec\x05D\x06\x7f\x06\xb3\x06\xe4\x06\xfe\x06\xf7\x06\xe4\x06\xc7\x06\xa4\x06|\x06U\x06"\x06\xf4\x05\xc1\x05\x86\x057\x05\xd4\x04V\x04\xcd\x033\x03u\x02\xb3\x01\xe3\x00\r\x00.\xffU\xfe\x84\xfd\xbb\xfc\xeb\xfb&\xfb\x86\xfa\xe9\xf9X\xf9\xd7\xf8[\xf8\xfb\xf7\x98\xf7V\xf7,\xf7\x00\xf7\xf2\xf6\x10\xf7=\xf7\x88\xf7\xea\xf7u\xf8)\xf9\xdb\xf9\x8b\xfaD\xfb\xfe\xfb\xba\xfc`\xfd\xf5\xfd\x9c\xfeR\xff\x03\x00\x96\x00c\x01U\x02)\x03\xf8\x03\xc3\x04e\x05\xc1\x05\x01\x06\x1a\x06\'\x06#\x06\x03\x06\xe8\x05\xf8\x05\xf8\x05\xe2\x05\xd9\x05\xc4\x05\xab\x05q\x054\x05\xe7\x04~\x04\xe7\x03e\x03\xf4\x02y\x02\x04\x02\xa6\x01F\x01\xe5\x00\x81\x00\x12\x00\xb3\xff/\xff\x9b\xfe\x01\xfee\xfd\xda\xfcS\xfc\xdb\xfb\x80\xfb.\xfb\xfc\xfa\xe3\xfa\xc5\xfa\xa8\xfa\x94\xfa\x87\xfax\xfa_\xfaT\xfaU\xfaU\xfar\xfa\xbc\xfa\x1a\xfb\x8c\xfb\xfc\xfb\x82\xfc\xfa\xfcl\xfd\xdb\xfdD\xfe\xac\xfe\xff\xfe]\xff\xc4\xff+\x00\x9b\x00\n\x01~\x01\xe2\x01I\x02\x8d\x02\xba\x02\xed\x02\x18\x03T\x03\x86\x03\xbe\x03\xea\x03\x10\x049\x04\\\x04m\x04a\x04e\x04_\x04S\x04i\x04\x8e\x04\xd0\x04\xf9\x04/\x05n\x05\x84\x05\x83\x05g\x054\x05\xe6\x04k\x04\xdf\x03O\x03\xad\x02\xf9\x015\x01~\x00\xaf\xff\xca\xfe\xea\xfd\x15\xfdG\xfc\x84\xfb\xdd\xfa@\xfa\xc5\xf9a\xf9\x1d\xf9\xf0\xf8\xd4\xf8\xe2\xf8\x03\xf9C\xf9\x92\xf9\xe1\xf9d\xfa\xe4\xfaj\xfb\xee\xfbf\xfc\xe1\xfc6\xfd\x97\xfd\xf3\xfdQ\xfe\xc0\xfe9\xff\xb6\xff>\x00\xe9\x00\x8f\x017\x02\xd0\x02L\x03\xbe\x03\xf7\x03 \x04A\x045\x049\x041\x04&\x04\x1c\x04\x04\x04\xde\x03\xc7\x03\x95\x03I\x03\xfd\x02\xa7\x02\\\x02\xf6\x01\xa3\x01y\x01J\x01\x0e\x01\xd8\x00\xd3\x00\x01\x01\xe9\x00\xc0\x00\xb8\x00\xb3\x00\x93\x00M\x00&\x00$\x00\xf0\xff\x9b\xffW\xff\x10\xff\xac\xfe\x14\xfe\x9f\xfdh\xfd\x18\xfd\xac\xfcd\xfcD\xfc\x17\xfc\xe7\xfb\xb9\xfb\x8d\xfb\x99\xfb\xb3\xfb\xc0\xfb\xfd\xfb\x17\xfc\xea\xfb\xc3\xfb\x99\xfb\xbd\xfb%\xfc\xaa\xfc\x16\xfd\x19\xfdh\xfd\x0e\xfe\xa2\xfec\xff\xc9\xff~\x00\x02\x01\x02\x02\xc3\x02\xb7\x02"\x02\x1b\x02\t\x038\x05"\rs\x1a\xb6\x1f\xd3\x13\xf8\x03\xf8\xfd\xba\xfb~\xf5a\xf50\x00c\x0c\xda\x0c\xec\x06\x02\x04\xcd\x01\xcb\xfa6\xf1\x99\xf03\xf8V\xff\xe0\x01H\x06g\x0b\xf0\t\xe8\x01b\xfbU\xf9\xd6\xf86\xf9\xc8\xfc\x03\x02|\x05\x9f\x06\xf4\x03l\xff\xbd\xf9\x05\xf7\xba\xf4\x14\xf7]\xfc\x97\x02\x19\x04\x01\x01&\xff\xbd\xfb\xc0\xf8A\xf6[\xf9\xaf\xfd\x83\x01b\x02\xe3\x01\x80\x00&\xfd\x83\xfa\x9b\xf96\xfb\x8b\xfd\xa6\x01\x1b\x06\xd9\x06\xdc\x02\x80\xfe\x1a\xfdM\xfc\xe5\xfb3\xff\xf0\x04>\x08b\x06\x9d\x03\xd0\x02\xfd\x00\xb1\xfd\'\xfc\x85\x00\xec\x05t\x07C\x06\x87\x078\tJ\x04\xb7\xfd\xcf\xfd\xd7\x02\xeb\x04\x88\x03\xba\x03\x84\x05\xad\x01\x84\xfc.\xfa\x1b\xfc\xd1\xfef\xfe\xed\xfew\xfe\x9c\xff9\x01B\x01\xd3\xfd\xc5\xf9\x96\xfb]\xfd=\xfd\x17\xfc&\xfe\xbb\xfe\x86\xfb\xa5\xfb\x89\x00\xf5\x02\xbe\xfe\xa2\xfcX\x00\x81\x02\x13\xfe\xae\xfa\xd5\xfe]\x05\xa6\x04S\xfd\xa1\xf8.\xf9\xee\xfb\xd7\xfbg\xfc\x0e\x01F\x02\x96\xff\xf1\xfa_\xfb\x17\xfeX\xff\x1f\x00;\x00c\x04\x97\x02\x8e\xff\x8b\xfc\x1e\x00\x87\x01\xd5\xfa\xe4\xf7\xa4\xfdQ\t3\t\xad\x04S\x02\xf0\x03\xed\x00\x99\xfc6\xfe{\x04\x8a\t\xf1\t\xb9\x07>\x06N\x05\xac\x04\xea\x03\x8d\x01H\x01\x19\x03\x86\x04x\x05\xfc\x07\x1b\x07\xc4\x02Y\xfcq\xfa\x00\xfd\'\x02Q\x05Q\x02,\xff{\xfa\xd8\xf5G\xf5\xcf\xfb&\xff\x98\xf7\x18\xf1s\xf8R\x039\x01\x98\xfa\xf0\xfd7\x01H\xfa\xc2\xf3\xa7\xfb6\x06\n\x03o\x00\x90\x046\x05\xb7\xf9\xf5\xf5\x9c\x02\x91\n\xce\x06,\x01\x1e\x042\x01E\xfb\x8f\xfe:\x07:\x07d\xf9\xe4\xf6Q\xfc\xc7\xffo\xfdE\xfau\xfc\x9d\xf8N\xf5\xd5\xf9*\x00\x80\xffW\xfb\x0b\xfd\xe0\x00F\xfe\xb2\xfcN\x00F\x07T\x0b\x92\rt\x10\xc5\x0e\xfc\x08\xa1\x06\xe2\x0b\xac\x0c\x92\t\xb7\t\x1c\tg\x05\xed\x03\xe2\x00\xd9\xf9\xc0\xf4\xff\xf1\x05\xf5\x92\xf5\xc7\xf4x\xf7\x9a\xf5\x80\xf1u\xeez\xf0\xb3\xf2\'\xf5\x05\xfcV\x03^\x02\x00\xfc\xb8\xfdk\x02\x9c\x03\x07\x05a\nc\r\xd9\x0bD\t\xeb\tV\nu\x08\x94\x08\x8c\x08A\x04U\x00L\x01d\x01\x88\xfd\x87\xf8\xd1\xf9\xc9\xf8i\xf5\x94\xf4;\xf7;\xf9\xe0\xf5\x17\xf5\xce\xf8\x91\xfd6\xfe\xe8\x00^\x05A\x05\xc4\x01\x9b\x01T\x07/\x0b\xd2\x08\x14\t\x88\r,\r\x1a\x07\xec\x04\n\x08\xac\x04\x03\x00m\x03O\tr\x05\xbc\xfc,\xfd\xf8\xfc\x8a\xf9S\xfa\xbe\xfe\x8e\xff|\xfba\xf9\xa9\xf7?\xf6\x00\xf7\xcb\xf9>\xfd\xc5\xfc\xea\xf9\x19\xfb\x17\xfc\x9c\xfd\xa7\xfc\n\xfc*\xff\xe6\x006\x02\xa4\x04\xdf\x05\xe3\x00\x85\xfc\xcb\x00Z\x04\xa4\x03\xd6\x03\x95\x06s\x04\xeb\xfc#\xfd\x8a\x02.\x00\x10\xfe\x00\x01D\x03\xcd\x01U\xfe\xf8\xff\xd2\xfe\x81\xfc]\xfe7\x02\xbd\xfe(\xfa,\x00\xe8\x04(\x04\xd1\x00(\x00M\x02\xf2\xfe(\xfb\xac\xfe\x17\x02U\x00|\xff\x1d\x05\xe9\x05\x17\xff;\xfb\xf0\xfd\xc6\x019\x01\xa2\x02"\x05~\x02\x88\xfe\xe3\xfc\xba\x00\x08\x02\x90\x01\xea\x01\xaa\x00)\xff=\xffR\x003\xfe\x12\xfd\xc2\xfe#\x01\x0f\x00|\xff\xe7\x00J\xff\xcc\xfa\x9c\xfa\xcf\xff\xd7\xffM\xfc\x90\xfd\xf9\x00@\xfe\xf2\xf9\xbb\xfb\xd8\xfe\xd0\xfeJ\xfc\x89\xfeP\x02(\xffk\xfd\xf7\xfe/\x03\x1b\x02y\xfe\xbe\x008\x04W\x03&\xff\xfa\xff\xb1\x00;\xff\xae\x00f\x05\x01\x06\xdc\x02\x16\x02\xa6\xffA\xfb\x9a\xfd\xab\x06\xad\x08\xca\x02-\xff\xb4\xfeF\xfc\xd7\xfa\xc3\xff\xf3\x04\xa5\x02e\xfd4\xfe\x0e\x01\x80\xff\x90\xfd6\xfd\xbb\xfc\x0b\xfdF\x01g\x04\xda\x00\x08\xfcA\xfd\xfb\xff[\xfeO\x00\x19\x05b\x03\x89\xfb_\xfb\xc6\x02\xb8\x00\xd2\xfc\xb6\xfdL\x04;\x03\xc2\xfcE\xff\xf7\x00\xaa\xff\x05\xff\xe6\x01W\x04\xc6\x00\x81\x01a\x01\xd2\xfe\xf9\xff\xda\x02$\x03d\xfe\x10\xff@\x016\x00\xbb\xfdr\xfc\xad\xff8\x00\x82\x00X\x00\xef\xff\xa6\xfd?\xfd\x8c\x00\xd6\x00x\x01\xfd\x00\xd8\x02\xc3\x02\xe9\xfez\x01]\x03\x81\x01\x0b\x00R\x03l\x06D\x01\x14\xfe\xce\xffV\x00\x19\x00\xd7\x00I\x04\xa2\x01Y\xfc\xcb\xfa\xf1\xfdQ\x02\x85\x00b\xffy\xfe\x07\xff\xa8\xff%\xff\xfb\xfdG\xfdr\xfdU\xfd\xbb\xfe0\x00^\x00L\xfd\xd6\xf9Z\xf9\xf1\xfb|\xfeF\x00n\xff\xa7\xfe\xe5\xfa%\xf8\xeb\xf9\xd1\xfd\x80\x00x\xff\xf0\xfe\x8a\xfd\xa1\xfcS\xf9\x9b\xf8\xcb\xfc\x00\x00\xba\xffr\xfd^\xfdO\xfd\xba\xfb\xe0\xfa\xf4\xfcu\xff\xc9\xfe\x1a\xfdM\xffk\x02\xf7\x00\x1d\xff#\xfe\xa6\xfe\xcf\xfe\xd7\xff2\x04*\x06O\x03\xb9\xffn\x00\xd3\x03\xb1\x06\xdb\x05\xd2\x05\xc5\x054\x06[\x07\xc6\x08\x89\t\xe9\x07"\x06\x8c\x05-\x07t\x07\n\tR\x05\xe4\x00\x08\x01\xc5\x03\x94\x07\xbc\x05w\x05\xb2\x05\x8a\x04\xbb\x04v\x08\xa0\x0f\xf1\x10%\r\x0c\r\xdf\x0f\xf6\x11\x1d\x11H\x11\x9e\x12\x84\x10\xd8\x0c\x17\x0cv\x0e)\x0c\xb0\x06t\x02)\x01\'\x00\x03\xfd\xd7\xfb\xfc\xfa\x1d\xf7L\xf2\xe5\xef\x99\xef\x8b\xee0\xec\xa0\xebP\xeb-\xea\xac\xe9\xf2\xe9\x99\xea\x0f\xea\xf6\xe9H\xebe\xed\xb1\xef\xbf\xf1\xe7\xf2P\xf3]\xf3\x85\xf4\x03\xf6q\xf7\x0f\xf9\xdc\xf9w\xf9\xb7\xf8C\xf8\xa5\xf7\xda\xf6k\xf6\xbd\xf5B\xf5\xe5\xf4u\xf3\xa4\xf2X\xf1\x1a\xf0\x9d\xef\x8c\xef(\xf0\xdf\xf0]\xf1u\xf11\xf2\xd3\xf2\x00\xf4\\\xf6\xf4\xf8"\xfa\x9a\xfb\xda\xfc\xd6\xff5\x02\xa4\x04\xe3\t\x0c\n\x98\t\x10\x0b.\x0e\x14\x12\x8b\x12\x87\x14\x9f\x14D\x12\xc2\x10\xdb\x10\x08\x16\xdd\x1c\xa0%f&\x8e\x1e^\x1b\x1c#U/\x020e)\x89\'\xc4)\xd8-)1(4\xd1/\xa9#\xa7\x19k\x17~\x1aZ\x1a4\x16}\r\xd7\x03\x94\xfc\xce\xf8\xfb\xf5Z\xf1*\xec\xc9\xe6\x18\xe3\xc5\xe2\x05\xe4X\xe3\xb7\xdf\xe0\xdc\r\xdeT\xe0{\xe3\xd7\xe5\x85\xe7\xe8\xe7\x08\xe8\xff\xea,\xf1&\xf6\xd2\xf6}\xf3\x0f\xf1\xc5\xf5L\xfe=\x01\xdd\xffw\xfc]\xf9O\xf9\x93\xf9\xe0\xfd\x97\x009\xfd\x9f\xf5\xdb\xf2)\xf5\x0f\xf73\xf7?\xf4\x19\xf2m\xef\x13\xefO\xf2|\xf5>\xf5\xdd\xf1b\xef\xbb\xef\xb8\xf3%\xf8\x8c\xfa\xeb\xf9\xac\xf6\x12\xf5v\xf50\xf7\xec\xf9\x07\xfb}\xfa\x04\xf9\x94\xf8Z\xf8\xf3\xf8\xbb\xf8Z\xf8d\xf81\xf9\xa1\xfb+\xfdL\xfd\x06\xfcp\xfa\xa2\xfa(\xfd\x88\x00\x99\x033\x037\x02y\x01\xec\x02\\\x06\xa1\x08\x13\n\xc6\x0b1\r\xf4\r\xbf\x0e\xe6\x0e\xe4\rB\x0f\xf0\x16\xc5!>\'\xdb#\xf0\x1f\xec\x1eU"\x9f\'\xfe+R/\xb8.f,\x13+\x06*\n(i#q\x1d\xe7\x18\x15\x16b\x16\xd4\x15\x18\x11l\x08\x8c\xff\xbb\xf9\xc5\xf6\xc3\xf44\xf2\x17\xf0c\xec\x10\xe8b\xe6\xa9\xe6\xb4\xe7\n\xe6\xe9\xe2\xaa\xe1\xef\xe2h\xe6K\xea\xfa\xec\xd6\xed%\xed\xf8\xec\xc3\xefK\xf3\x7f\xf4T\xf44\xf4\x8a\xf6S\xf9\x8c\xfaU\xfa\xc9\xf8\x10\xf7\x87\xf5*\xf6\xdf\xf8\r\xfa\xbc\xf9;\xf8d\xf7\xed\xf6\x01\xf6\x8d\xf53\xf5\xf2\xf5\x08\xf7\xab\xf7\xcf\xf7\x80\xf7s\xf7\x9f\xf7\xea\xf7\r\xf8\xfd\xf8.\xfa;\xfb-\xfc\x87\xfc\xe4\xfcd\xfd\x12\xfe.\xff;\x00\xe7\x00d\x01\xde\x01\xf8\x01q\x02\xe9\x02\x18\x03-\x03\x9e\x02j\x02\xef\x02\xee\x02@\x02\xae\x01\xe2\x01h\x025\x02\xb4\x01\xc3\x01\x16\x02\xce\x01\xf8\x00"\x01\xa5\x01\xdd\x01i\x01\x84\x00\xe0\xff\xbb\xff\xb1\xffR\xff\xad\xff\xea\xff\x91\xff\x85\xff\x15\x00\x88\x01>\x03\xe5\x04O\x06\xae\x07P\t\x01\x0bt\r\xbc\x10}\x13\xd3\x15\xc3\x16\xab\x17\xeb\x18\xec\x19A\x1b\xb8\x1b\xe7\x1bX\x1b\'\x19\xb5\x17u\x16g\x14P\x126\x0f7\x0c\xa5\t\xab\x06+\x04\xa8\x01:\xff=\xfc\xc6\xf9\xcd\xf7\x18\xf6\xe2\xf4B\xf3\xf9\xf1\xdf\xf0\x8f\xef\xd4\xee\x99\xee\xee\xee1\xef\x8d\xeef\xee\xed\xee\x9f\xef~\xf0B\xf1B\xf2\x1d\xf3\'\xf3Q\xf3q\xf4\xb0\xf5\xe2\xf6\xe2\xf7\x9f\xf8Q\xf9\x13\xfa\xbb\xfa\xbb\xfb5\xfc8\xfc\xe8\xfc\xad\xfd\'\xfez\xfeD\xfe\xe7\xfdv\xfd\xcf\xfc\xd6\xfc\x05\xfd\x92\xfc/\xfc\xde\xfb\x1a\xfb\xc4\xfa\xd1\xfa\x84\xfar\xfa\x06\xfa\x8d\xf9\xb1\xf9\x91\xf9M\xf9\x83\xf9g\xf9,\xf9H\xf9G\xf96\xf9F\xf9\xe2\xf8\xd0\xf8+\xf9\x89\xf9\xfe\xf9`\xfa\xa0\xfa\xee\xfa\xfe\xfa\xfa\xfa\x9e\xfb\x94\xfc\x00\xfd\x0e\xfd;\xfd`\xfd\x8e\xfd\x01\xfeG\xfe\xbc\xfe<\xff\x08\x00r\x01\xf2\x02\x01\x05E\x08\x07\x0c\xdd\x0e\x80\x11\xdc\x14\xbc\x18y\x1cL\x1f\n"N%\xc6\'p(\x80(\xe0(\x93(\x80&\xd5#\xec!\xb9\x1f\xb8\x1b"\x17\x8e\x13$\x10\xa3\x0b\xc3\x061\x03\x08\x01\x1a\xfes\xfa\xd9\xf7=\xf6q\xf4l\xf2.\xf1+\xf1\xa9\xf0$\xefm\xee!\xef\xa1\xef\x8a\xef]\xef\x8d\xef\xc7\xefm\xef"\xef\xba\xef\'\xf0\x08\xf0\xd5\xef=\xf0\r\xf1`\xf1\xd3\xf1z\xf2$\xf3\xc6\xf3\x1a\xf4\xf2\xf4\x10\xf6\x00\xf7\xcc\xf7n\xf8\xd3\xf8Q\xf9\xef\xf9\x81\xfa\xfd\xfa5\xfb_\xfb\xb6\xfb\xf3\xfb\xe2\xfb\xce\xfb\xd2\xfb\x8b\xfb\x8a\xfb\xcf\xfb8\xfc\xa2\xfc\xbe\xfc\xe4\xfc\x1c\xfdV\xfd\x80\xfd\xb6\xfd\x14\xfe[\xfe\x80\xfek\xfeR\xfej\xfet\xfey\xfe\x81\xfep\xfe^\xfe,\xfe\x0f\xfeG\xfe\xa2\xfe\xbc\xfe\xcc\xfe\xaf\xfe\x93\xfe\x9d\xfeR\xfe8\xfee\xfe~\xfe\xc9\xfe\x14\xff&\xff\x1b\xff\xe4\xfe\x99\xfe\xfd\xfep\x00A\x03\x83\x06v\x08\x13\n\x87\x0c;\x0f\xe3\x11\x93\x14i\x18W\x1c\x0f\x1e\xbe\x1e< \x0b"\t#\xed!\xaf v |\x1e\x0e\x1bY\x18G\x16\xf6\x13\x83\x0f\xb2\nt\x08\x90\x06\xc8\x02\x11\xff\xd1\xfc\xbb\xfb\xaa\xf9W\xf7\xd8\xf6\x1e\xf7\xd0\xf5H\xf3\x97\xf2s\xf3\xc8\xf3_\xf3A\xf3\x07\xf4\xbf\xf3j\xf2\x1a\xf2i\xf2]\xf2\x97\xf11\xf1\xf7\xf1,\xf2\xd4\xf1\xc0\xf1\x1c\xf2\'\xf2\x0c\xf2\x97\xf2\xa5\xf3G\xf4q\xf4\xd0\xf4\x7f\xf5%\xf6\xb9\xf6\x08\xf7C\xf7}\xf7\x96\xf7\xe5\xf7M\xf8\xa4\xf8\xfa\xf8\xe9\xf8\xb8\xf8\xd9\xf8\'\xf9Z\xf9\x8f\xf9\x16\xfa\xb2\xfa\x13\xfb\x9d\xfb\x87\xfc[\xfd\xb1\xfd\xed\xfd\x87\xfei\xff\x05\x00R\x00\xbd\x00\x12\x01 \x01+\x01\x80\x01\xc1\x01u\x01\xe8\x00l\x00\xc2\x00\x14\x01\x06\x01\x01\x01\xa9\x00\x0b\x00\xa0\xff\xb7\xff\x04\x00\x1d\x00\xb0\xffA\xffA\xff\xea\xfe\xc6\xfe\xe2\xff\xe5\x00\xcc\x00\x8a\x00%\x01\x04\x03\x8a\x05/\x08S\x0b\x01\x0e\x0c\x0fG\x10\x0b\x13|\x16"\x19\xf7\x1a\xaf\x1c\x19\x1e\xe4\x1d\x1e\x1d\x80\x1d\x1f\x1e\xee\x1c\xfb\x19:\x17I\x15\xb3\x12\x86\x0f\xb1\x0c\n\n\x84\x06S\x028\xff\xba\xfd>\xfc\xd8\xf9I\xf7\xd4\xf5\xf4\xf4-\xf4\x99\xf3\xa3\xf3\xc7\xf3\xfc\xf2\x11\xf2k\xf2\xb9\xf3\xdb\xf4\x15\xf5,\xf5\xf2\xf56\xf6\xbb\xf5\x82\xf5\xe5\xf5j\xf6\x02\xf6V\xf5\xce\xf5Q\xf6\xc4\xf5\xaa\xf4J\xf4\xcc\xf4\xdb\xf4r\xf4\xac\xf4\x85\xf5\x7f\xf5\xb6\xf4\xa4\xf4q\xf5"\xf66\xf6\x85\xf6Q\xf7\xf1\xf7F\xf8\xb5\xf8\x90\xf9O\xfa\xb5\xfa\xe5\xfar\xfb1\xfc\xae\xfc4\xfd\xdb\xfd\x9c\xfe.\xffY\xff\x8d\xff\xc4\xff\x06\x00J\x00\x92\x00\x13\x01\x8a\x01\xa2\x01\x93\x01\x80\x01\xa1\x01\xa9\x01p\x01i\x01\x95\x01\xac\x01\xbe\x01\xd0\x01\xfd\x01\xda\x01c\x01\xf9\x00\xde\x00\xf1\x00\xee\x00\xf4\x00\xc9\x00V\x00\xf0\xff\xc5\xff\xc2\xff\xf4\xff\x0b\x00&\x00s\x00p\x00\x9e\x00\xd5\x01\xc3\x03\xc8\x05\xca\x07y\t\x11\x0b1\r\xc7\x0f\xa1\x12\xeb\x14\xf5\x15Y\x17\x0f\x19>\x1a<\x1b\x98\x1b}\x1bU\x1a\t\x18\x95\x16\xba\x156\x14\xa1\x11\x88\x0e\xbe\x0b\'\tt\x06N\x04\x81\x02I\x00|\xfdB\xfbL\xfa\xed\xf97\xf9\x10\xf8\x18\xf7\x81\xf6\xe2\xf5\x9e\xf5\xf3\xf5n\xf6S\xf6\x91\xf5l\xf5\x18\xf6q\xf6\x18\xf6\x86\xf5c\xf5<\xf5\xbd\xf4\x96\xf4\xe3\xf4\xbf\xf4\xec\xf3?\xf3U\xf3\xc6\xf3\xbc\xf3s\xf3u\xf3\x98\xf3\xdd\xf3L\xf4\xf8\xf4O\xf5V\xf5i\xf5\xc7\xf5\xa3\xf6\x81\xf7G\xf8\xd2\xf8\t\xf9Z\xf9\xd8\xf9{\xfa\xfb\xfa\x94\xfb\n\xfc=\xfc\x8f\xfc\t\xfd\x96\xfd\xcc\xfd\xb2\xfd\xcf\xfdI\xfe\xd5\xfec\xff\xe7\xff\x0e\x00<\x00j\x00\xd8\x00\x87\x01\x0e\x02J\x02n\x02\xa1\x02\xf6\x029\x03;\x03L\x03L\x03\xf9\x02\x8f\x02\x90\x02\xc2\x02\x8b\x02\xf8\x01p\x01s\x01M\x01\xa3\x007\x00\xf3\xff\xc8\xff\xc4\xff\xb4\xff\xf0\xff\xd8\xff^\xffq\xff\x05\x00\n\x01Q\x02\x1b\x045\x06i\x07\x15\x08\x9b\t(\x0c\x99\x0e\xc5\x10\xec\x12\xda\x14\xdb\x15)\x16\xdf\x16\x1a\x18\xc5\x18\x7f\x18\xaf\x17\xa2\x16\x18\x15F\x13}\x11\x00\x10U\x0e\xf4\x0b\n\tU\x06E\x04X\x02%\x00.\xfe\xae\xfcX\xfb}\xf9\xda\xf7,\xf7\xac\xf6\x87\xf53\xf4\x94\xf3e\xf3\xbf\xf2\n\xf2 \xf2H\xf2\xb0\xf1\xf1\xf0\xd5\xf09\xf1\x1c\xf1\xc0\xf0#\xf1\xba\xf1\xbe\xf1\xac\xf1@\xf2[\xf3\xeb\xf3\x15\xf4\xcf\xf4\xd5\xf5\\\xf6\xb3\xf6\x95\xf7\xbe\xf8_\xf9\x92\xf9\xf5\xf9\xb4\xfa\x14\xfb2\xfb\xac\xfbO\xfc\x81\xfcq\xfc\x8a\xfc\xe2\xfc\xf6\xfc\xbf\xfc\xd7\xfc6\xfdt\xfd\x87\xfd\xb5\xfd\xf3\xfd0\xfeh\xfe\xdb\xfe\x7f\xff\x08\x00\x8e\x00\r\x01\xb7\x01R\x02\x08\x03\xbe\x03E\x04\xbd\x048\x05\x98\x05\xe4\x05V\x06\xd6\x06\xe7\x06t\x069\x06T\x06\r\x06N\x05\xdc\x04\xb9\x04j\x04\x9b\x03\xae\x02%\x02\x15\x02\xe3\x01+\x01\x00\x00/\xff"\xff\xc0\xfew\xfe:\xfe\x18\xfem\xfe\xa1\xfed\xfe\xb8\xfe\xb8\xff\xb1\x00\xf8\x00!\x01\x8a\x02Y\x04a\x05\xc3\x06\xe6\x08\x93\n\xfa\na\x0b\xf8\x0c\xd2\x0e\xb2\x0f\x01\x10\xcd\x10\x9e\x11Y\x11\xb2\x10\x9e\x10\xc3\x10\x11\x10\x80\x0eM\r\x82\x0cD\x0b\xa4\t\xfc\x07\x8d\x06\xc5\x04\xc2\x02\x00\x01\xb5\xffz\xfe\xe2\xfc4\xfb\xca\xf9\x9b\xf8\x94\xf7\x90\xf6\xc6\xf5!\xf5T\xf4\xc8\xf3|\xf35\xf3&\xf3#\xf38\xf3A\xf3O\xf3\xb2\xf3+\xf4w\xf4\xc8\xf4H\xf5\xec\xf5p\xf6\xbb\xf6\x19\xf7\x99\xf7\x04\xf8j\xf8\x0c\xf9\xc6\xf90\xfaa\xfa\x90\xfa\xdd\xfa!\xfbI\xfb\x82\xfb\xd4\xfb\x16\xfc=\xfcU\xfc\x83\xfc\xa6\xfc\xae\xfc\x9f\xfc\xb6\xfc1\xfd\xb5\xfd-\xfe\x9f\xfe\xde\xfe\xf5\xfe!\xff\x9a\xffr\x007\x01\xc6\x01&\x02s\x02\n\x03\x94\x03\t\x04r\x04\xda\x04\x1f\x052\x05y\x05\xff\x050\x06\xd2\x05a\x05V\x05w\x05!\x05\xb6\x04`\x04\xa2\x03\xd5\x029\x02+\x02\xfa\x01\x16\x01x\x00Q\x00\xe3\xff>\xff\x0b\xffr\xff\x88\xff\x05\xff\xb1\xfe\r\xff{\xff\xb6\xff\x1f\x00L\x00\x9e\x00\xd8\x00\x1b\x01\x14\x02\xae\x02\xec\x02)\x03\x87\x03\xac\x03\x08\x04\xa8\x04`\x05\xe9\x05\xef\x05\xec\x05,\x06\x18\x06\x12\x06\x01\x06\xc2\x05\xc2\x05M\x05\xa4\x04\x06\x04v\x03\x0c\x03t\x02\x9a\x01\x01\x01\xa2\x00\xcc\xffY\xfe\xa0\xfd\xba\xfd\xaa\xfd\xde\xfc\x1c\xfc\r\xfc[\xfbB\xfa\xeb\xf9\x98\xfa#\xfb\xe7\xfa\xcb\xfa\xee\xfa\xe0\xfa\xcb\xfa9\xfb\xe7\xfb\x83\xfc\x88\xfc\xc0\xfc\xaf\xfd\x8a\xfe\xf2\xfe/\xffo\xff\x86\xff\x9c\xffU\xff\xb5\xffK\x00]\x00e\x00\x1a\x00\xf4\xff\xe6\xff\x99\xff\x99\xffy\xff*\xff!\xff\x11\xff%\xff\xe5\xfe\xb6\xfe\x9b\xfef\xfeD\xfeL\xfe\x98\xfeq\xfe\xcf\xfe\xab\xfe\x8e\xfe\xcf\xfe\xdf\xfe\xb7\xfeV\xff\x9c\xffJ\xff\xa3\xffi\xff\x88\xffV\xff4\xff\x95\xff\xa0\xff\x96\xff\xe9\xfe\xf4\xfe\xc3\xfeV\xfe_\xfe_\xfe8\xff\x80\x00\xca\x02\xb8\x012\xfe\x82\xfc\x89\xfd!\xfei\xffr\x00\xc4\x00l\x004\xfe\xa2\xfe[\xff\x17\xff\x81\xffH\x01\x12\x02Z\x02\x84\x02M\x03\xf5\x03\x1c\x04B\x04Z\x04r\x05\xe9\x05\xdf\x05U\x06\xa3\x06*\x06\xf9\x05\x07\x05.\x05\x02\x05\xed\x03\x7f\x03\xef\x02\xfa\x01G\x01\x08\x01(\x00W\x00\x01\xff\xd9\xfd\x92\xfc^\xfc\xe6\xfc<\xfc+\xfc:\xfcV\xfcw\xfb\x13\xfc\x7f\xfc\x80\xfc\xed\xfc\x87\xfd\xb0\xfd\xca\xfd\x86\xfer\xfe\xb3\xfe\xb6\xfe\x0c\xff \xff\xaf\xff\x92\xff\x18\x00Q\x00\xa0\xff\xc0\xff\r\x00\x92\x00\xd8\x00\x05\x01\\\x01\xc9\x01H\x02\xcd\x02\x98\x02\x13\x03S\x03\x08\x04\xc7\x03N\x044\x04t\x03\xd7\x03%\x03\xd6\x02\x13\x03\xfd\x02E\x02\xbb\x018\x01&\x01%\x00l\xffl\xff\x90\xfe\x1b\xffH\xfe\xd8\xffu\xfe\n\xfdd\xfd\x90\xfc\x05\xfd\xfa\xfb\x0c\xfd\x1c\xfe\xc7\xfdG\xfd\x08\xfdn\xfc\\\xfcI\xfb3\xfd\xae\xfd\x8e\xfe\xcd\xfe\x1f\xfe\x86\xfe\x1b\xfdZ\xfe\xf6\xfd\x05\xff\xc0\xfe\x1f\xff\xc7\xff\x81\xff\xb9\xff#\x00\xee\xff\xba\xff\x9b\xfe\xe5\xfe\x02\x00\xd2\xff1\x00\xd9\x00,\x01\xa8\xff\x02\x00Z\x00\xac\x00u\x00\xc1\x00\x8e\x01V\x01\xdf\x00\x03\x02d\x02,\x01\x9d\x01\xd1\x01/\x02^\x01\xee\x01V\x02\xe5\x01\xec\x01\xb4\x00\xa9\x00\xc3\xffM\x00{\x00\xe1\xff\xb5\xff\xba\xfeh\xfe\x0c\xfe\xd9\xfd1\xfe+\xff\xcc\xfd^\xfd\xb4\xfd,\xfe\xac\xfdB\xfeY\xfe\x8f\xfe,\xfe\xe5\xfc\xe2\xfc\x99\xfd\x05\xfe\xa9\xfd\xf4\xfe\x08\xff0\xfe\xb6\xfc\x9f\xfd\xa5\xfd\x91\xfe\x14\xffg\xff\xcd\xff\x83\x00p\x01\xd5\x00\xe2\xff\x90\x00\x94\x03\xc1\x02$\x03\xbf\x03\xba\x04c\x03\x95\x02\x8e\x03h\x05\x0f\x06\xbf\x046\x03\xa4\x04M\x03,\x02b\x01\x85\x03R\x02\x03\x00\xad\x00\x81\x00\x95\xff\x8e\xfd\xd3\xffs\xff\xcd\xfe\xc5\xfd\x1b\xff\xa9\xfdS\xfem\xfdE\xfeK\xfe2\xfd\r\x00E\xfdy\xff(\xfe@\xfe&\xfdO\xfc.\xfeh\xfe\xe8\xfd\x92\xfe:\xff\xad\xfdK\xfd#\xfe\x83\xfe\x10\xff\x94\x00L\xff\x04\xff\xc8\xff!\xff?\x00\xe7\x00\xe0\x02\xac\x01\x7f\x00\xf5\xff\x00\x01\x9b\x01>\x01\x8d\x02\xdb\x01\xff\x01?\x01p\x01\xb8\x00\xc1\x01\xe4\xff_\x00\xa2\x00\xe8\x01\xfb\x01V\x00/\x02\xe0\xff\t\x006\xff\x83\xff\xd6\xff\xa3\x00\xa4\x00u\x01\x83\x00\xf5\xfe\xc9\xff\x9f\xfe\xe7\xff\x1f\xffW\x00`\x01\xf5\xff\x06\xff\xdc\xfeW\xffh\x00\xf0\x00\x83\xff\xe5\xff^\xfe\x96\xffG\x00\xee\xfe9\xff0\xfee\xffV\xfec\xfe\x9f\xfe|\xfe\xa0\xfe[\xfd\x1b\xfd<\xfeO\xfd\xea\xfe\'\xff\x97\xff\xd3\xffX\xfeO\x01/\x01\xf4\x00\xe6\xff\x9e\x02\xb1\x02\xda\x02\x0b\x05\xc4\x03\x91\x04\xa8\x02G\x02\xc1\x03\x16\x04\xf1\x01a\x03\xff\x01\xdc\x01_\x01\xa2\x01G\xff\xdc\xff\xe8\x00\xca\xfd\xeb\xff\x98\xfd\xf0\xfe\xfd\xfc\x1a\x01\xb0\xfc\x8f\xfd<\xfd\x00\xfdv\xff\x81\xfe\xfd\x00\xe1\xf9\xaf\x008\xff\x89\x00J\x00#\xff\xac\xff\xe0\xfe\xfe\xfe\xba\x01y\x01\xec\xfe\x19\x00\xc5\xff\xc0\x02\xf5\x01\x18\x00\x0c\xfe\xd5\xff\xb4\xfds\x01#\x02\xc4\x00\xd5\xff(\xfe\xf0\xfe2\xff\xf2\xfe\x8f\x005\x00 \xfe\x9a\x01.\xffr\x00\xc1\x00\x93\xff\x89\xff"\x00\x17\x02R\x00\x94\xff\xc9\x01\xa7\xff\xd5\xfe(\x00\xa5\xfd\xdb\xffe\x00\x12\xff\xe8\x01\x9a\xff\x84\xfe}\xff4\xfen\xfd\xed\xfcY\x00#\x03\x1a\x00\x9d\xfe$\x00I\xfc%\xff\n\x00\x7f\x00\xca\xfe\xfa\xff\xd3\x01\xbb\xfd\xbc\xfeO\xff\xe0\x00$\xfe\xd2\xff\xb0\x01\xf6\xfb\n\x01U\x00e\x00\xd3\xfdn\xfdX\x01\x0f\xff\xdf\x00\x06\xff\xad\x02\xaa\xfc\x86\x01`\x01\xba\x01\xdd\xfe\xec\x00\n\x05\xbd\x00\xfd\x01\xb3\xfe\x96\x03`\x02?\x01+\x00\xbe\x02\xbb\x00\x12\xff\xeb\xfec\x02\xf6\x025\xfc8\xfc\x19\x01\xee\xfda\x00L\xfd\x0b\x00Z\xfe1\xfbO\x01\xe8\xfbq\xff\x9f\xfcr\xfe\xb0\xfe\xd7\x00\x7f\x00\xa9\xffJ\xfe\x96\xfdt\xfd\xf3\xfd\x9e\x02)\x01\xc1\xff$\x01\x1d\x01\xbb\xff\x12\xff\xa9\x00G\x03~\x01\xda\xff\xcc\xfc\xcb\x02\x91\x02\x86\x06\x98\x01\xdc\x00\xf5\x00\x89\xfc\x7f\x03\xd7\x00\x08\x03\xdf\x01\x85\x00$\x04F\x00\xba\xfc\x87\x01t\x02B\xff\t\xfff\xfe9\x00\xc0\xfeH\x00:\x02\x01\x02\x91\xfeR\xf9\x99\xff\r\xfe\xee\x00\x17\x00\xc1\xfe\x9b\x00^\xfd\xef\x02\x87\x00\x08\xfe\xb2\x00\xd2\xfd\xba\xfe\xb8\xfe\x94\xff\x04\x02\xf8\xfe\xab\xffA\xff\xac\x03\xb5\xfe\x1d\xfd\n\xfb\xf1\xfb@\x02\xeb\x003\xff\x0b\xff@\x00\xce\xfe\xc6\xfe\x94\xfe\xa3\x04\xe5\xfeT\xfd\x1d\x02e\xfb1\x00\xc4\x04\xbc\x02|\x02 \xff2\x01\xb3\x00)\xff\xb1\xfc\x9d\x00\x94\x04\xf3\x04\\\t\x81\xfb\xb1\xfc\x8e\x01\x1f\x00\xe1\x02\xf6\xfe\x13\x04R\x00\xcb\x00M\x02\xeb\x01h\xfc\x86\xfc\xbf\xff\xae\xfbb\x03>\x01\xa8\xff\x12\xfe\x88\xfc[\xfc\x13\x01\x01\xff\x02\xfb\xb3\x03\x83\x00C\xff\r\x00R\xffn\xfe\xd5\xff\x0b\xfe\x92\xfe\xb3\xfe|\xfd\x9d\xffi\x02s\x00\x17\x02\x04\xfd\x1e\xf8\xed\xfbV\xff\xe1\x03\x96\x04\x95\x04\n\x01\xb8\xfb\xdb\xfc:\xfe@\x00<\x04?\x04r\x01\x85\x00\x8d\x03\x9b\xfd\xd3\x02\xdc\x05"\x00\xf8\x00\x1c\xfe\xc2\x00\xeb\x02\xed\x01\xbe\x02k\x03e\xff\xc1\xfa\x07\xffi\x01h\x00\xc1\xff\xfc\xff~\xfe]\x00\n\x00\xd2\x04\x87\xfbt\xf8\xf6\x02\xc5\xfe!\xfc*\x03\x84\t\xe1\xfe\xc3\xfa*\xfa)\xfc\xb4\xfdw\x02\x14\x05z\x02\xb1\xfe\xfd\x02\x91\xfe\xb8\xf7\xb4\xfe\xb4\xfbg\xfe\xf0\xfe\x1d\x04V\x04\x0e\x02\x1c\x02l\x00\xd1\xf1o\xf0w\xfd\xc3\x07\xd1\x0cI\x08q\x02\xb4\xf7\xb4\xf8\x1c\xf6Z\x01\xd3\x04\xba\x03\xf5\x02h\x03\xed\x00y\xfb\xee\x03I\xfe\x9e\x01\x0e\xff/\xf9z\xff\xef\x04\xb8\n.\x05\x8b\xfd\xb4\xfc\x06\xfb\xfe\xf6(\x00\xa4\t\xce\tn\x02\xb3\xf5c\xfau\xfb\x18\x04\xf6\tA\x03\x1c\xfc\xfc\xf6;\xf9}\xfbS\x06G\t\x15\x03\xd2\xf7i\xf87\x01\xc1\x03\x08\xfd\x14\x00\xbf\xfe\xca\xfa\xd6\xfe\xd4\xf8\xf6\x04\x17\x0f\x88\x02\xbd\xf7\xd1\xfb\x92\xfb\xd3\xfd"\x05\x11\x03\xad\x03\x8b\x02\xde\xff\xab\xfb\xe0\xfem\x05n\x04W\x00\xcc\xfd\x1b\xff/\xff\x7f\xfd\xcb\x03&\x06H\x02\xeb\x00\x14\xfe+\x02\x7f\xff\x8b\xfaJ\x01\xbc\xffr\xfeO\x04\xab\x04\xec\x05.\x00\xbf\xf9@\x00\xd0\xf5g\xf8{\x06/\x0c\x9a\x04\xe9\xf8\xdc\xf9\x91\xfc"\xff \xfc\xf9\x064\x05W\xfb\xfe\xfd \x01<\x03\x11\xfd\xd6\xfc\xaa\xff\xe9\xfcW\x05\x87\x04Y\xff\xe3\xff\xe9\xfdl\xfb\x15\xfb\x8d\x00!\xffK\x01\xe0\x01\x84\x03v\x03\xd6\xfaX\xfb\xdb\x003\x06\xe9\xfd\xf5\xf62\x06,\x05\xa7\xff\xf8\x05{\x06\x1b\xfa\xf7\xf4\x16\xfd\x9e\x03\xa9\x05@\x01\xb3\x04\xfd\x03w\xf9\x9e\xfc\x8c\x01:\xfd&\x00y\x06\xc3\x00\xc5\xf5:\xff\x82\x054\x00\xee\x06\xb9\x05\xb2\xfd,\xf5\xdf\xf65\x01m\x02\xfd\x02\xbe\x06\xce\x06\x1e\x03\xc4\xff\x1b\xfcz\xf8\xb3\xf7[\xff\xc1\x05\xa1\x05o\x08\xc6\t\xa0\xfd-\xf3\x01\xf5\x13\xfa\xd0\xf9\xab\x01\xf2\x0cc\x0cs\tq\xfa&\xf3\xe3\xf3\xbf\xfan\x00_\x02\xf6\t\xa1\n\xf9\x050\xfc\xae\xfc\xce\xf9~\xf8\xfa\xfa\xa7\xfc|\x05w\x0b\x14\r\xaf\x06\xc0\xfd\x89\xf6H\xee\xec\xf5^\x03c\x08\x12\x10\x11\rE\xf9\x00\xec\xfd\xf1S\x00\r\x07\x8d\x06-\x07\x12\x00\xc8\xfa\xf6\xfb\xa9\xfd\xfa\xfc\xca\xff\xe1\xfe\x0e\x01\xfb\t\xc7\x06m\x02\xf1\xfb\x14\xf2\xd7\xf5\xc1\x00$\x07\xd9\x0b\x8d\n\x9b\x01{\xf8\xe2\xf7\x12\xf8\xb5\xfa\xd0\x039\x0c\x05\x0b\xff\xfbk\xf9\xd4\xfb\x12\xfb\xc4\xfd\r\x07\x1c\x05\x86\xfc\x05\xfe\x16\x00\xe5\x00\xcf\x02~\x00\xd5\xfam\xfc\xee\xfd\x87\x05\x14\t\xe6\x014\xfc\xe3\xf7\xc7\xf9\xb7\xff7\x06\xa3\nz\x03\xf4\xfa\xfd\xf9n\xfbN\xfd\x8b\x02\xca\x06\xc9\x06\xdf\x00\x87\xf8v\xf8"\xfe\xf9\x04\xd8\x02X\xffy\xfe\xe5\xffo\xff\x86\xfd\xd4\xfe\x13\x00\xf4\xfe\xb4\xfb\x14\xfe\xc6\x02\xb3\x04\x1d\x02*\xfd\'\xfcR\xfd\x8f\xfb\xb3\xfeL\x02\xd0\x02\xa4\x04\xd9\x01\xf5\xfdU\xfc\x05\xfc\x8c\xfdu\xffu\x03n\x03\xa1\x01\x0c\x01\x83\x00\xd1\xff\x1d\xfe\x96\xfek\xfd\xd9\xfe+\x02{\x04y\x02+\xff\xcf\xfd\x01\xfe\xd6\xfes\xffD\x01\xb7\xff\xc2\xfc\'\xfb\xbf\xfb\xb6\xfe\xda\x02\x99\x01\x11\xfd\xd9\xfa\x90\xfb\xa9\xfa\xe1\xfb.\x00\xfd\xff\x11\x00\x03\xff\xf5\xfe\xa1\xfe)\xfd\x84\xfd\xbd\xfdk\x00\xb1\x03\xd0\x04I\x04\xd0\x03\xe0\x03\xe2\x02\xab\x01\xe4\x04[\n\x02\n\x0f\t\x8a\x08~\x06\xfd\x06u\x08\xa6\t\xd0\x0b\x18\rQ\tB\x06?\x04\xf8\x04\xc0\tE\x0c#\n\t\x05\xee\x02!\x01^\x00\xad\x01\x1b\x03\xf7\x03\xb3\x01Q\xfe~\xfc\xc4\xfbB\xf9}\xf8\x9c\xf8D\xf9|\xfa\x87\xf9\xf3\xf7\x12\xf6K\xf3`\xf0\xbc\xef\xcf\xf2\xab\xf6q\xf7\x16\xf6\x99\xf3\xfb\xf0\x95\xedJ\xec\xa1\xf0\xe0\xf6.\xf9\xbb\xf6\xec\xf2\x03\xf0\xf0\xeeT\xf0\xc9\xf2\x8b\xf4\x99\xf6\x14\xf6C\xf3@\xf2o\xf2+\xf3\xb2\xf2\xdf\xf0\xc0\xf3l\xf7-\xf74\xf5\xa7\xf4\xd3\xf5\xf1\xf4\xb2\xf6\x06\xfa#\xfb\xe5\xfc\x04\xfd\x8c\xfdn\x00s\x03\xc4\x06\n\x08b\x07Q\t(\x11\xc0\x1f\xb6-\x9c2Q*\xac"\x84&\x19/6<\xe7H\x93P\xe6Lc>02\x111\xe85\xa15\xc22i/U)8\x1fn\x12\xb3\x08r\x00p\xf63\xee\x97\xebY\xedI\xeb;\xe2\xb9\xd3\x93\xc8l\xc56\xc6l\xcb\xb8\xd2\xe6\xd7\xea\xd5\x81\xcf@\xcdt\xd2\xbc\xda\xa4\xe01\xe8\x83\xee\x90\xf4\x95\xf8\x18\xf9\x85\xfe\xdc\x03\x11\x05\xa2\x04@\t\xed\x10\xbd\x15\xe2\x153\x13\xe3\x0f[\t\xf1\x05~\x07B\n\xf3\n\xcf\x05\xfa\xfd\xca\xf6\x08\xf1\xf6\xee5\xee\xc6\xecY\xeb\xde\xe71\xe4\xd9\xe1\xe3\xe1\x9a\xe3\x18\xe3\xa0\xe2\xb0\xe3y\xe5<\xe8\t\xeaK\xed\x15\xee\xa7\xed\xa2\xefR\xf3\xd8\xf8$\xfb\x99\xfc+\xfe&\xfe\xe0\xfd/\xfe;\x02\x1e\x06\x04\x06\x86\x04\'\x02\xd1\x04\xaa\x04\xf0\x02{\x01%\x00\xa2\x00\x9c\xfd\x93\x02\xd5\x07w\n\x86\x03\x87\x01\xad\x10\xcf\x1e\x82#"!l(|/o.\xe1,\x8b3\xa7C\xafG\x9bA\xa2?\xc7A\xb9?\xe83P-\x95-5-j\'\xaf\x1eh\x1a\xf8\x11\xbc\x04\xee\xf7&\xef\xc7\xec\x8c\xe9\xcf\xe6\x16\xe4\xf6\xdd\xe3\xd3B\xc9X\xc7\xcf\xcc\x94\xd3\\\xd6\xfd\xd5g\xd6\x0e\xd6%\xd5.\xd9\xca\xe1\x11\xe9\x11\xee#\xf1\xf1\xf3\xf5\xf7\xde\xf9\xcc\xfa\xaf\xfd\xb4\x01\xf5\x07\xa9\x0b\xad\x0c[\x0ba\x07\xbc\x02\x03\x01:\x05\x03\x0b\x08\x0c/\x06\xa9\xff\xbe\xf9N\xf5\xdf\xf4{\xf8\x8a\xfb\x1d\xf9\x90\xf3T\xf0\xdf\xedN\xec_\xedA\xf0+\xf3\xbd\xf2\xfe\xf1\x11\xf2x\xf2\x94\xf1 \xf1\xf2\xf3\x84\xf8\xe5\xfb\x05\xfd*\xfc\xca\xfa\xe1\xf8\xe1\xf7>\xfb\xd6\x00\xb8\x04@\x04K\xff\xfd\xfb\xcf\xfb\xa6\xfd\xbe\xff\x1a\x02\xbc\x01\x13\xffU\xfb\xf5\xf8\x89\xfa@\xf9\xb3\xf7$\xf8\xb7\xf9\xc5\xf94\xf7j\xf4\xcc\xf0\xf9\xef\xf2\xfaK\x12,$\x1b\x1d\xff\x0e\xc6\x0b%\x16d\'\xcf6NJ\x16R\xf2G!5\x13/y;AHDL\x98HfB76\x80%\x94\x1c\x18\x1c\xfc\x1ay\x13\xc0\x08X\x03\x9a\xfc\xd8\xee9\xe0d\xd8[\xd6g\xd5\x9a\xd4a\xd5\n\xd4\xd4\xca(\xc0\x91\xbej\xc7\xea\xd2Y\xd9\xa5\xdb\x05\xdd\x8c\xdb \xda\x9e\xdf\x0c\xeb\x05\xf8\x10\xfe\xf1\xfc{\xfc\x84\xfeK\x01\x0f\x04%\n\xc3\x0f\xff\x10\x87\x0ca\t#\x0b\x82\x0b\xed\x08\x14\x08\x8e\t\x15\t\xb6\x05z\x02\xc9\x00#\xfd\x9a\xf8Q\xf7\xc4\xfa\x11\xfcM\xfa\xfe\xf5\xe3\xf1I\xee\xc8\xedZ\xf2\xce\xf6j\xf9d\xf6,\xf2\xbf\xf0\xce\xf2\xb1\xf7B\xfc_\xff\x7f\xff(\xfec\xfe>\xff0\x02C\x04,\x05\xba\x050\x05\x10\x05\x8d\x04]\x04\x0f\x03\x1d\x02S\x01\xec\x00\x16\x00U\xfc\x14\xfa\x8a\xf8)\xf8\xa5\xf7\'\xf6\xe5\xf4\xa7\xf2\xa9\xf0\xff\xec|\xeb\xa9\xeb\xf0\xec\x13\xf3p\xf8\x9e\xfcP\xfai\xf4m\xf7(\x03!\x13i\x1f\xdc#\n!\xd5\x1a\x81\x1a\x0c&\xf79\x02E\xe1D\xe7<\xff2\x17/81[9\xe4=19l-\xc2"\xf4\x1bu\x17W\x16b\x13\x90\x0b\x97\x00E\xf6\x11\xf2`\xef\x9f\xeb\xfa\xe5\x14\xdf\x85\xd9\xa7\xd6:\xd7\xed\xd7D\xd8\xe7\xd55\xd3\x1d\xd33\xd6\x94\xdc\xb9\xe0\xc2\xe15\xe2\xcf\xe2T\xe5\x01\xe9\xa9\xef\x0b\xf6\x07\xf8\n\xf6\x14\xf5\xf7\xf8W\xfd\xb6\xff|\x02]\x04\xd8\x03-\x01\x8e\x01n\x05\xe3\x06\xbb\x05\xe6\x04H\x05\xd7\x03\x0f\x022\x02\xea\x03A\x021\xff]\xfeS\xff%\xff\\\xfdN\xfc\xdd\xfb\xc6\xf9o\xf8\x8c\xfav\xfc\xe4\xfc\xff\xf9\x80\xf8u\xf8\x1d\xfa\x7f\xfcM\xff\x1a\x00(\xfen\xfc^\xfca\xfe\\\x01K\x03~\x033\x02r\xff\xbd\xfe\xeb\xfe\xfd\xffq\x00\x9a\xffJ\xfe\x90\xfc\xd8\xfar\xf9\xd4\xf8\xe2\xf7\xd9\xf6\xf9\xf4\xdc\xf4\xc4\xf4(\xf3\xc0\xf1k\xf1Z\xf3\xe0\xf4\xbd\xf6t\xf8.\xf7\xef\xf3\xc1\xf6\x0b\x02m\x0e\xc3\x13\x99\x11\xd4\x0e\x1a\r\x8c\x10\x9b\x1d\xe3.\xb48\xa23o)_$R\'~0\xe09\xa3?q9c+& +\x1e\x7f#c&\xae$\xbb\x1c\xba\x10\x9d\x05\xec\xff\xe2\x00K\x01\xdf\xfd\x9a\xf7F\xf1\xa9\xeb\x87\xe6E\xe4<\xe5\x0e\xe6-\xe4"\xe1\xab\xdf\x82\xdf/\xdeG\xddZ\xden\xe1d\xe3a\xe3h\xe4\t\xe5\xd0\xe4J\xe4r\xe6X\xeb2\xee\x15\xef\xfe\xee\x91\xef\x86\xf0B\xf1E\xf4e\xf7\xd1\xf9r\xfa\x93\xfa\x0c\xfco\xfd\xbd\xfen\x00x\x02D\x047\x04\x9d\x03\xdc\x04\xbb\x06\xbd\x08\x87\x08J\x081\x08\xbd\x07\xf0\x07P\x08\x11\n]\n\x00\t\xfd\x06\x93\x06\xa5\x07\x8a\x08\xdd\x07\xfa\x06d\x06\x19\x06\x8f\x05\xed\x05\xf5\x06:\x06\t\x04\xab\x02\n\x038\x04/\x04s\x03s\x02g\xffu\xfc\xcf\xfb0\xfd\xcd\xfd\xf5\xfbr\xf96\xf73\xf5\xce\xf3\x1f\xf4\xe4\xf4\xeb\xf3\xcf\xf1]\xf0u\xf0\xca\xf0\x07\xf1o\xf1\xfb\xf1\xf1\xf1\x15\xf2\xd5\xf3!\xf6c\xf7|\xf7J\xf8\xaa\xfa\xfb\xfd4\x01\xb2\x03)\x05\xd3\x05P\x07\xd1\n\xaa\x0f\xda\x13d\x16k\x17\x0e\x18l\x19X\x1c9 =#\x95#}"\xf9 \xa9 }!N"\x1f"\xd7\x1f1\x1c>\x18\xa6\x15F\x14\xde\x12k\x10\x8a\x0c\x04\x08\x91\x03I\x00P\xfe\xa6\xfc*\xfa\xdb\xf6l\xf3\xc6\xf0*\xef\x08\xee\xfe\xec\xaf\xeb\x1e\xea\xf0\xe8\\\xe8\x84\xe8\xb1\xe8\x8b\xe8\xfc\xe7\xca\xe7\x12\xe8\xce\xe8\xe0\xe9\x90\xea\xd1\xea"\xeb\xbe\xeb\xcc\xec\x16\xee_\xef\xa4\xf0\xa5\xf1\x90\xf2\xf3\xf3\xa2\xf5O\xf7\xad\xf8\xc9\xf9\xef\xfaL\xfc\xc2\xfdN\xff\xa8\x00\xab\x01c\x02\x15\x03\xff\x03\x08\x05\xe6\x05o\x06\xc0\x06\xf7\x06/\x07|\x07\xed\x079\x08.\x08\xd9\x07\x91\x07\x87\x07\x87\x07\x9b\x07\x97\x07Z\x07\xc9\x06\x08\x06\x9a\x05o\x053\x05\xb4\x04\xe6\x03\xfd\x02\x07\x02\x0f\x01Z\x00\xb3\xff\xcb\xfe\xb7\xfd\x7f\xfcr\xfbs\xfa}\xf9\xa5\xf8\xeb\xf7)\xf7K\xf6\xae\xf5j\xf5J\xf55\xf5\x00\xf5\xe2\xf4\xe6\xf4$\xf5\xde\xf5\xf3\xf6\x00\xf8\xe4\xf8\x91\xf9F\xfa/\xfb\x84\xfc$\xfe\xb0\xff(\x01i\x02p\x03e\x04\x8a\x05\xe9\x06\x1f\x08\xff\x08\xac\tN\n\xc1\nY\x0bG\x0cF\r\xe1\r\xf4\r\xc1\r\xe2\rh\x0e\x14\x0f\xd2\x0f,\x10\x11\x10\xa5\x0fM\x0fb\x0f\x88\x0fa\x0f\xff\x0eL\x0eJ\r[\x0c\x8b\x0b\x03\x0bD\n\x02\t\x8b\x07^\x06U\x05Z\x04m\x03c\x02&\x01\xec\xff\x0b\xff\x7f\xfe\xf4\xfd&\xfdY\xfc\x94\xfb\xf3\xfa\x83\xfai\xfa;\xfa\xae\xf9\xe3\xf8\x1d\xf8\xdb\xf7\xa8\xf7\x81\xf71\xf7\xa5\xf6\xce\xf5\t\xf5\xae\xf4\xa5\xf4\xb6\xf4\x9c\xf42\xf4\x9c\xf3\x1f\xf3\xf2\xf22\xf3\xa3\xf3\xf4\xf3\x19\xf4\x0f\xf4\x13\xf4g\xf4\x01\xf5\xe0\xf5\xb4\xf6V\xf7\xf8\xf7\x93\xf8d\xf9N\xfaX\xfbd\xfc]\xfd5\xfe\x0c\xff\xf1\xff\xd7\x00\xaf\x01r\x026\x03\xe7\x03\x80\x04\xe0\x04/\x05}\x05\xb6\x05\xef\x05\x05\x06\xfc\x05\xd1\x05\x8a\x05=\x05\xf1\x04\xac\x04[\x04\xf1\x03}\x03\xff\x02\x87\x02 \x02\xbc\x01h\x01#\x01\xd3\x00j\x00\x00\x00\xbf\xff\x99\xffj\xffC\xff\x17\xff\xcf\xfej\xfe\x15\xfe\xf4\xfd\xd9\xfd\xb9\xfd\x88\xfdF\xfd\xdd\xfcr\xfc5\xfc7\xfc8\xfc\x12\xfc\xbb\xfb<\xfb\xd9\xfa\x9b\xfa\x9c\xfa\xae\xfa\xae\xfa\x94\xfa}\xfa\x87\xfa\xb9\xfa\x1e\xfb\x94\xfb\x1a\xfc\xa7\xfcN\xfd1\xfeS\xff\xb2\x00"\x02\x91\x03\xf0\x047\x06\xa7\x07_\tM\x0b\x1c\r\xb5\x0e!\x10j\x11\x8b\x12\x8a\x13\xb1\x14\xdf\x15\xaa\x16\r\x17\x07\x17\xc0\x16G\x16\xd2\x15T\x15\xa3\x14\x8f\x133\x12\x8b\x10\xbc\x0e\x0f\rZ\x0b\x83\t\x8e\x07r\x05O\x039\x016\xff9\xfd8\xfbA\xf9c\xf7\xb0\xf5(\xf4\xa2\xf2.\xf1\xcb\xef\x97\xee\x99\xed\xc2\xec$\xec\xa0\xeb\'\xeb\xbe\xea\x85\xea\x7f\xea\xa2\xea\xe1\xea:\xeb\xb8\xeb>\xec\xc7\xec\x94\xed\x98\xee\x81\xefY\xf08\xf1U\xf2\x8e\xf3\xcc\xf4\x13\xf6R\xf7\x85\xf8\xbb\xf9)\xfb\xbd\xfcC\xfe\xa0\xff\xd1\x00\x0c\x02[\x03\xae\x04\x02\x062\x07/\x08\xf3\x08\xa9\tr\nH\x0b\xf3\x0bG\x0ck\x0cy\x0cv\x0c_\x0c.\x0c\xd3\x0bC\x0b\x80\n\x9e\t\xcf\x08\x00\x08\x11\x07\x0c\x06\xde\x04\xb3\x03\x80\x02H\x01+\x00\x13\xff\xfa\xfd\xdf\xfc\xce\xfb\xd2\xfa\xe8\xf9\x10\xf9J\xf8\xa3\xf7\r\xf7\x84\xf6\n\xf6\xa9\xf5h\xf5:\xf5:\xf5`\xf5\x97\xf5\xdc\xf5,\xf6\x8d\xf60\xf7\xed\xf7\xde\xf8\xc7\xf9\x94\xfak\xfbQ\xfcr\xfd\xb4\xfe\x03\x007\x01F\x02)\x03\r\x04\x1c\x05^\x06\x91\x07l\x08\x12\t\xaa\t8\n\xee\n\xc6\x0b\x80\x0c\x07\ra\r\xb3\r\xff\r*\x0eD\x0e\x82\x0e\xcd\x0e\xfe\x0e%\x0f\x14\x0f\xdd\x0e\x95\x0e{\x0em\x0eX\x0e\x1f\x0e\xa6\r\xda\x0c\xff\x0bi\x0b\xe6\nQ\nz\t^\x08\x06\x07\xa5\x05m\x04f\x03e\x02\x14\x01\x91\xff\t\xfe\x91\xfc5\xfb\x03\xfa\xff\xf8\xf2\xf7\xbe\xf6t\xf5b\xf4\x8a\xf3\xdd\xf2Y\xf2\xde\xf1e\xf1\xf2\xf0\x9f\xf0\x87\xf0\xa3\xf0\xc5\xf0\xee\xf0\x1b\xf1i\xf1\xe3\xf1c\xf2\xef\xf2\x88\xf3(\xf4\xd0\xf4\x94\xf5f\xf65\xf7\x0f\xf8\xe1\xf8\xb3\xf9\x92\xfau\xfbg\xfcN\xfd$\xfe\xec\xfe\xb4\xff\x87\x00V\x01\t\x02\x88\x02\xeb\x02H\x03\xbd\x03&\x04\x84\x04\xb8\x04\xbc\x04\xa2\x04y\x04x\x04\x87\x04\x7f\x04Q\x04\xfb\x03\x9a\x03:\x03\xf5\x02\xc5\x02\xa0\x02S\x02\xd1\x01U\x01\xff\x00\xd1\x00\x9e\x00u\x00/\x00\xd2\xffi\xff1\xff6\xff2\xff\x18\xff\xcd\xfe\x8e\xfeu\xfep\xfe\x97\xfe\xb6\xfe\x9a\xfeo\xfeP\xfec\xfe\x89\xfe\x94\xfe\x8b\xfem\xfeL\xfeM\xfeg\xfe\x7f\xfes\xfe8\xfe\x15\xfe\t\xfe\x02\xfe\xf5\xfd\xef\xfd\xd2\xfd\xb8\xfd\x8b\xfd\x94\xfd\xd6\xfd\xf3\xfd\xf9\xfd\xe1\xfd\xe9\xfd1\xfe\xad\xfe"\xffr\xff\xc1\xff\'\x00\xbc\x00\x8a\x01\x84\x02u\x03B\x04\xf1\x04\xd0\x05\xf3\x06&\x088\th\n\x98\x0b\x99\x0cO\r\xee\r\xc5\x0e\x96\x0f&\x10m\x10\x8f\x10O\x10\xcb\x0fb\x0f\x1b\x0f\xb0\x0e\xc1\r1\x0c\x8b\n!\t\xfb\x07\xdf\x06h\x05\xa5\x03\x90\x01s\xff\xa2\xfdN\xfc,\xfb\xf2\xf99\xf85\xf6\x88\xf4\xc1\xf3{\xf3\xe7\xf2\xe0\xf1\xc8\xf04\xf0=\xf0\x9a\xf0\t\xf1%\xf1\xff\xf0\xfa\xf0s\xf1{\xf2\xc8\xf3\xa5\xf4\xd8\xf4\x17\xf5\xe3\xf5W\xf7q\xf8#\xf9\x90\xf93\xfa\xe6\xfa\x89\xfb\xb1\xfc\xd1\xfd3\xfe\xd6\xfd\xf1\xfd>\xff\x84\x00\xb5\x00\x7f\x00\x8c\x00\xea\x00B\x01\x12\x02z\x02\xed\x02\xb3\x03\x86\x03\x88\x02\xc1\x02\x12\x04w\x02\x88\x01\x17\x08\xa4\x0f\x9f\np\xfc\\\xfa\xe4\x05l\x0el\r\x15\t\x95\x030\xfb\x1b\xfa1\x07\x9c\x0fO\x08\x03\xfe!\xfa\x98\xfb\xf6\xfe\xa9\x02\xb7\x01\xa0\xfb\xb1\xf6\x91\xf7\x13\xf9\xe8\xf7\x9e\xf8\x05\xfcA\xf9\xa9\xf0?\xee<\xf6\x90\xfd.\xfc\xf8\xf6e\xf42\xf5\xfa\xf6V\xfbJ\x01\xe7\x01\x7f\xfcu\xf8\x0c\xfb\xb0\x01\xc2\x05m\x05\xeb\x02m\x00b\x01\x11\x05#\x08\xa2\t\x8e\tt\x06\xdc\x03\xb1\x064\x0c\xb1\r*\n\x85\x08\xfb\n\x7f\x0c\x1c\x0cV\x0c\x0f\r\xa5\x0c\xb3\x0b\x9e\x0c\x02\x0e\xa8\rf\x0c=\x0b\xdb\n[\nm\nb\x0b:\x0cM\n|\x06\x1d\x05\x19\x07\x9f\t\xb2\x07\xc1\x04\x02\x04\x8f\x03\xbd\x01\xb6\x01\xb8\x03\x96\x01\xc8\xfb\xea\xf9\xd4\xfd\x02\x00\x9b\xfc?\xf8\xce\xf6\x1a\xf6\'\xf5\xeb\xf6\xac\xf9\xda\xf8\x87\xf3\x84\xee\x14\xefk\xf3\xc2\xf6\x02\xf7\xd5\xf4\\\xf1\xe8\xeeW\xf0\xf5\xf5\x82\xfa\xeb\xf9S\xf70\xf6B\xf6\x1b\xf8\xf7\xfc\x9f\x01\x87\x01\xbb\xfd2\xfd\xd8\x00t\x03\x91\x04#\x06e\x07!\x06!\x05\x9b\x07\xa0\t\x16\x08\xd0\x068\x07\x02\x08S\x07\xdc\x06\xd1\x05\x91\x01\xab\xfd\x0c\xff\xf8\x01\xb6\xff\x85\xfb\xf3\xf8D\xf5\xc6\xf0|\xf1\xaa\xf6\x03\xf7\x83\xf0\x02\xebk\xe9O\xeb"\xeew\xf0\x89\xf0\xf3\xed\xcd\xeb\xfc\xeb\xdb\xed\'\xf2\'\xf6c\xf6\x1d\xf4\xf9\xf2\x05\xf5g\xf8\x19\xfcT\xfe\xcc\xfel\xfd\t\xfc\x9c\xfe\xf2\x01\xb3\x03]\x05\xe7\x07\xd2\x08i\x05J\x01@\x02\xc2\x08\xa7\x0e\xb8\x0fh\x0b\x13\x06>\x04^\x06\x7f\t\x8a\x0f*\x14\x9c\x112\x0bk\t\x87\r\xe6\x0f\x98\x14\x12 \xd3,\xe6(\xa4\x18\x7f\x0f?\x19\x9c,\xe26.8\xd42}(\xce\x1b\x9e\x18o%K2\x890f!\x8d\x13q\x0cc\x08\xed\x08W\n\xae\x07\x1e\xfe\xee\xf1\xdb\xe8\xb8\xe3\xd1\xe1\xa8\xdfo\xdcT\xd8\xe8\xd5=\xd4\xa2\xce\x06\xc9D\xc8\xbc\xcd\x93\xd3\xd4\xd6\x03\xd8C\xd7\xcf\xd4\x87\xd4\xfa\xdb+\xe6F\xee\xc0\xf1N\xf1\xe5\xf0C\xf1t\xf5\xb9\xfe\xe3\x06\x8a\n{\t\x1a\x07\xf0\x06\x80\t/\rC\x11?\x13\x9f\x12\xea\x10\xd4\x0e\x9f\rx\r"\x0e\xb7\r\x06\r9\x0b%\t\xf6\x06\x8a\x03\xf1\x00\xf3\xff\x89\xff:\xff\xb1\xfd\xb7\xfb\xeb\xf8 \xf5-\xf4\x19\xf5\x91\xf7\xee\xf7\xba\xf6N\xf5B\xf3\x0f\xf2j\xf3\xfb\xf6r\xfa\'\xfb\x8a\xf9\xb3\xf7A\xf7N\xf8\xc8\xfa\xbb\xfd\x83\xff\xf4\xfe:\xfc\x1e\xfa\xa0\xfa\xaf\xfc|\xfe\xc1\xfe\x9f\xfd\xd8\xfbC\xf9\xce\xf7\x9a\xf7R\xf9\x9a\xf9\xdf\xf7c\xf5\x83\xf3\x10\xf3|\xf2\x12\xf3\x87\xf5d\xf8 \xfa\xed\xf9Z\xf9\x92\xfa\xbd\xfd\xe7\x04;\x11C#\x93+\x03"\xec\x12R\x16}1\xc4I\xc4PQLKG\x8a@F8\x87<\x99M\x9eY\xd1O\t;E.\xe8)\xe2\'\x91"\x9c\x1dC\x17v\n\xce\xfa9\xeeH\xe7N\xe2\x8f\xdb;\xd3\xbe\xcd\xc6\xcb\xc1\xc6\xe3\xbd<\xb6\xa0\xb57\xbb\x01\xc1\x8e\xc4\x0f\xc7\xe9\xc6;\xc5\x92\xc6*\xce\xf6\xda\xf7\xe5f\xec~\xef\xbe\xf0[\xf2y\xf8\xbd\x01@\n\x0c\x0f\x97\x11\t\x13\xa4\x12\xff\x11\xaa\x14\xa9\x18\xdb\x17\xa1\x13L\x12R\x13\x1e\x11\xc3\n\xcc\x05\'\x05p\x03\xea\xff&\xfd\xca\xfbe\xf9\xe3\xf3_\xef:\xef1\xf1\xe7\xf2)\xf2\xc8\xef\x9c\xee\xe2\xee&\xf1\xf6\xf3\x7f\xf7\x9a\xfa\x81\xfc\x88\xfc\xfb\xfd\xd2\x01g\x05-\x07{\x08\xb1\nC\r\x17\rE\x0b\x04\x0b\xf2\x0b2\x0c\x1b\x0b\x18\t\xc1\x06\xda\x03\n\x00H\xfe1\xfdE\xfb\xbb\xf6\xdc\xf1\xb9\xed]\xeb\x07\xea3\xe9S\xe7\xb9\xe3\xdf\xe1\xae\xdfO\xde0\xdd\xf0\xdf\xc1\xe6\x0b\xe9\xde\xe7(\xed$\xf63\xf7:\xf1\xae\xfd\xc0#_>\xae1^\x18\xf6\x1b\x847\xbdJ\xe7O\xeb\\zl\x04ffJ\xfd:3IO[nZrL&Ex?\xc1+\xbd\x12\x05\t0\r\x80\n\xc2\xf9\x15\xea\x06\xe5.\xde_\xcb\xa1\xb9\x95\xb4\x89\xb9\x93\xbev\xbd \xbc\xe2\xb8l\xb2\xb2\xae\x95\xb3[\xc0V\xcf\xb7\xdb\x83\xe1h\xe2$\xe2?\xe6G\xf0\x8f\xfb\xb9\x08\x87\x15Z\x1ak\x17\x91\x12\\\x14<\x1a\xfb\x1d\x9b\x1f\xc1!f!_\x1b\xa7\x12\x1c\r\xa4\n\xc4\x06\xd0\x00<\xfcB\xfbj\xf9\xec\xf3\xf6\xea\x14\xe2\x1d\xdeT\xde\xc5\xe0\x93\xe42\xe7\xe2\xe6\x7f\xe2_\xdd\xbd\xde\xcb\xe6\x92\xf1\xca\xf8\xc6\xfb\xb1\xfc{\xfd`\xff(\x04\xe8\n\x17\x12\x8d\x15n\x155\x15<\x16\x94\x178\x17\xb1\x14\x08\x13\xed\x12\xc9\x12\xc0\x10h\x0c\xce\x06p\x01\x8d\xfc\x8e\xf9\x95\xf8\xa3\xf7\xce\xf5\x8a\xef@\xe94\xe4\xe1\xe1\xf6\xe0\xba\xdfS\xe1F\xe10\xe0\x12\xdf\xa2\xe0\xbd\xe4\xa3\xe5\xa9\xe6\x04\xe8\xa6\xe8\xde\xeb\xcd\xfav\x1d96\xdf/\xc7\x14\xfe\x08\xa2\x1e\xe3>}Z\x9amJs_bqC\xf56\xc9GHb*k|a\tQ\x80=\t,w\x1e\x8e\x16&\x13:\x0c\xee\xff4\xf1r\xe4Y\xdbG\xd1\x08\xbf3\xacR\xa5z\xab\xa3\xb7]\xbb\xa3\xb6y\xad\x9d\xa5\x80\xa4\xbd\xaeA\xc3O\xd9\xc6\xe7\xe7\xe8\xcf\xe5\xf7\xe5\x05\xedH\xfb5\x0e\xce V+o(\xdf \x8c\x1dJ#t+\x9d0\x990\xab,\x01&\xfa\x1c\xb2\x15M\x11=\r-\x06V\xfc\xaf\xf5\x0f\xf3\x83\xf0\x02\xeb\x96\xe2\x80\xdaF\xd4\xcf\xd1\x90\xd4I\xdaV\xdf|\xe03\xde\x93\xdbj\xdc$\xe2\xa8\xec\xd6\xf6\xf5\xfdj\x01s\x03\xbf\x04t\x07E\x0c\x9b\x12\xb0\x17\x15\x1a\xc2\x1a\xe5\x1b\xd4\x1b\x86\x19\xc4\x15\xcf\x12I\x11/\x10]\x0eb\x0b\x14\x07\x0f\x00\x19\xf8\xe5\xf0\x02\xed\xb2\xebS\xeb\x94\xe9\xb9\xe51\xe10\xdd\xab\xda\xfc\xd8\xe1\xda\xbb\xddL\xe1\xa0\xe3\xa0\xe4H\xe7\xe6\xea\x19\xef\xd2\xf3\x15\xf8N\xfb\xa0\xfaU\xf8\x12\xfd\x9b\x13\xa08\x95S\x80M\x99,2\x16\x96&tM.l\x92w\\w\xb5n\xdcW\x97>\xfe8iIXYMU\x8fA\xf3,3\x1fP\x11l\x00G\xefJ\xe3P\xdce\xd6\xe8\xce\x95\xc8r\xc1\xa1\xb2\x8a\x9e?\x8f\xc6\x90y\xa0\x18\xb2?\xbc\xbc\xbc4\xb7\xd6\xb1\x9f\xb3r\xc0\xb0\xd6\xb4\xee\x8d\x00\xe0\x07\xe4\x07\x83\x07\x96\x0cN\x17\xf5$X/\xca3\xe23\xc83\x022\x9f-s(?%\xe7 k\x19\x99\x12u\x10\x8f\x0e\xd7\x06\r\xfa\xd1\xed\x1e\xe6\x96\xe1\x1d\xe1^\xe3\x83\xe4\x11\xe1)\xdb\t\xd7k\xd6\xd9\xd8\xc7\xdf\x85\xe8\\\xeeQ\xf0\x98\xf2-\xf7\xc1\xfbe\xff\'\x03\x12\nY\x0f[\x12m\x14\x8b\x17s\x1a\x95\x1a\xf3\x17\xcf\x15\xb4\x14\xcf\x14#\x15\n\x144\x108\n\xf2\x02(\xfc(\xf7\xae\xf3\x8e\xf2\x82\xf0k\xec}\xe5`\xdeg\xda{\xd9@\xda\x8d\xda\xdb\xdbv\xdcn\xdc\xd8\xdc\xed\xde\x1d\xe3J\xe7v\xe9X\xeb\x9e\xeeh\xf4\x90\xfc~\x04\xba\x08\xc1\x08\xc8\x06\xbb\x05M\x06\xa4\r\xcd&\x80M\x95f|^\xed@\x11/]7vK\x8d^|q\xff\x7f\xbc{\x88b\x88D_5\xcc5\x9a7\xba2\xdd\'\xb3\x1bZ\x10)\x04\xaf\xf4V\xe2\x90\xcf)\xbe\x8c\xaf\xc5\xa5\xa1\xa6\xa4\xb1\xc9\xbb.\xb9O\xa9\xbf\x98\xa6\x91\x05\x97\xbd\xa5\xeb\xba\xc8\xd2\x05\xe5\x13\xeb\xb4\xe6\xe7\xe5\xc1\xf0@\x01\x08\r\xed\x14z\x1e\xc2(\xbe.p1\x0e4@6z2\x10\'\xee\x19D\x11\x9e\x10\xe9\x15\xd6\x1a%\x18,\x0c\x89\xfc\xdb\xeeV\xe3m\xdbI\xdbt\xe2\x91\xe9\xe5\xe8\xba\xe4\xff\xe3\xcd\xe5l\xe5E\xe2\xbb\xe1\xf2\xe6\x8c\xee\x06\xf7\xb7\x016\x0c(\x12i\x11T\x0c3\x08\xef\x08\xab\x0e\x0f\x18i\x1f\xb4!\x8e\x1f\xf0\x1aI\x15a\x10\x12\r\xc5\n\x05\x08\x0e\x04\xc2\x00\x1a\xff\x07\xfd\xb8\xf80\xf2\xe0\xe9A\xe1\xb8\xda7\xd9\x9f\xdc;\xe1\xd8\xe2\xe7\xdf\x93\xdb\xf9\xd6\xa7\xd4\x91\xd7\xa2\xdd\x90\xe4\x01\xea?\xed\xb2\xefS\xf1\x8a\xf21\xf6\xf5\xfb\xc1\x01\x9c\x07!\x0c\xbd\x0f$\x11c\x10\xaf\x0f\x89\x13v \x036OJ\xdcQoK\x9eA\\>\xe0A>G\xdcMgW\x04_\xb0\\`O|@\x816\xb0/\x8f%Q\x18\x9a\x0c\xdd\x03t\xfc\xf1\xf4\xb3\xed\xe5\xe4A\xd9\xd8\xca\xce\xbc\x9e\xb1_\xab\x10\xac\x00\xb34\xbb\x8e\xbf\x9d\xbe\xab\xbb\x85\xba\x02\xbd~\xc4Z\xd0I\xde\xb7\xea\xef\xf4\xeb\xfcQ\x04\xa9\x0bN\x13\x82\x19v\x1d\xab\x1ew\x1e\xc6\x1e) \xa3"0%\xf5%}"\x86\x1a\x95\x0f\xd8\x04\xaf\xfc\x16\xf8\xcf\xf7\xde\xf8\x00\xf8]\xf3\x17\xecf\xe5\xd9\xe0}\xde"\xde\xab\xde\xa8\xe0H\xe3\xc8\xe6E\xeb\x82\xf0\xfc\xf5\x07\xfa\xbc\xfb>\xfc"\xfd\xdb\x00\x17\x07\x83\x0eC\x15+\x19p\x1a?\x19\x14\x18\xb8\x16\xe1\x15=\x16\xb5\x16\x02\x16i\x13\x01\x10?\r\x93\n\x16\x07z\x02\xd6\xfcL\xf6\xd2\xef?\xeb1\xe9\xf0\xe8;\xe8\xc9\xe5&\xe2\xac\xdd\x19\xda\x92\xd8p\xd9*\xdci\xdf\x88\xe2$\xe5\xf8\xe6#\xe8\xcb\xe9;\xed$\xf3\xd0\xf9F\x00\n\x05\x98\x07\xaa\t\x99\x0cP\x11\x93\x16\\\x1a\xd9\x1a\xf9\x19\xdf\x19(\x1e\xb7\'\x9c4\x16@\xa7E\x8fD\n?\xb78e4-4w7\x86;\x9e<\x109\xd31\xb2(\xbd\x1f\xd6\x17+\x10\x0b\x07\xf6\xfb3\xf1!\xe9\x02\xe4X\xe1\xb2\xdf\xb3\xdd4\xd9\xb5\xd1\xa9\xc9\xf5\xc34\xc2\x9d\xc4\x18\xca\xef\xd0\x9e\xd6\xa3\xda\x97\xde\x03\xe4=\xeb\x1f\xf2\xa7\xf7g\xfb\\\xfeq\x01\xa5\x05,\x0b&\x11\x0f\x16\xbf\x18\x86\x18\x88\x15\xd5\x10k\x0c.\t\x0e\x07\x04\x05M\x02*\xff\xe5\xfb\xfe\xf8_\xf6\xca\xf3\t\xf1&\xeem\xeb\xfd\xe8\x0f\xe7\xa2\xe6`\xe8\xbc\xeb\xed\xee\xcb\xf0\xe8\xf1J\xf3\xbd\xf5\xf4\xf8\xee\xfc\xf7\x00\x82\x04D\x07\xc1\t\x95\x0cZ\x0f\xed\x11\x8d\x14\x9a\x16&\x17\xff\x15\xa1\x14B\x14W\x14\xad\x13=\x12\xdd\x10\xed\x0e\xdf\x0b\xe5\x07\x91\x03\x9b\xff\xb6\xfb\xdb\xf7h\xf4%\xf1\xe9\xed6\xeb\x12\xe9_\xe7\xf2\xe5\xf1\xe4\xd8\xe4\x13\xe5l\xe4\xc2\xe2\xac\xe2]\xe5\xc0\xe8Q\xeb\t\xed\xe6\xef\\\xf3\xbb\xf6\\\xf9A\xfc\x1f\x00q\x03Y\x05&\x06\xdd\x07/\x0b\xb5\x0f\xf5\x12G\x14X\x14\xd9\x14\'\x17\xe4\x1b\xf0!\n(\xce,\xa6.\xc1-"+\x1d(\x11&\'%\xff$\x05%\xb3#\x7f \xcc\x1b\\\x16\x89\x11Y\r\xd8\x08\xc1\x03/\xfeo\xf9?\xf6\x04\xf4W\xf2N\xf1\x93\xf0\xdf\xee\xa6\xeb\xff\xe7X\xe5\x9a\xe4\x84\xe5\x1f\xe8\xa8\xeb\xd4\xee\x7f\xf0\x0f\xf1\xcf\xf1\xc5\xf3f\xf6\x15\xf9`\xfb\xe7\xfc\\\xfd\xe8\xfc\x14\xfcO\xfb\x13\xfb0\xfb\x95\xfbM\xfb\xd5\xf9y\xf7\x02\xf5\xce\xf2\n\xf1e\xf0\x9b\xf0V\xf1\xd0\xf1!\xf2N\xf2\xb5\xf2f\xf3\xac\xf4\\\xf6\x06\xf8\x8a\xf9\xda\xfa\x1d\xfcz\xfd\x15\xff,\x01}\x03^\x05]\x06\xf5\x06b\x07\x17\x08\x90\x08J\tq\n\xac\x0bV\x0c\x1d\x0c\xa6\x0b\x19\x0b2\n_\t\x08\t!\t\xd5\x08z\x07\t\x06\xbf\x04\xa1\x03i\x02Q\x01C\x00\x17\xff\xca\xfd\x91\xfcK\xfbO\xf9\xa2\xf65\xf4\xad\xf2q\xf1\'\xf0}\xefg\xf0\xaf\xf1\xc2\xf1Z\xf0\xc3\xee[\xedO\xec\xb8\xecD\xee\xb8\xf0\xfb\xf3\x8a\xf7>\xfa\xa0\xfbq\xfc\x01\xfe\xc5\x00\x18\x04\xb0\x06c\x08}\n\xd9\r\xfd\x11{\x16\xf9\x1a\xd9\x1e\xf5 \x11!\x0c \x8f\x1e\x1d\x1d\xa7\x1b\x93\x1a;\x1a^\x19\xeb\x16!\x13n\x0f\x0f\r4\x0b\x86\t\x1f\x08\x05\x07\x86\x05\n\x03/\x00\x83\xfd\x88\xfb8\xfa\x0c\xfa\x15\xfbu\xfc\xe6\xfc\xc5\xfb\xc1\xf9\xe9\xf7\xde\xf6\xd9\xf6\xde\xf7\xf1\xf9\xd2\xfc\xa5\xff3\x01\xd1\x00\xdb\xfe\xfd\xfb5\xf9T\xf7\x0c\xf7\t\xf8:\xf9\xa9\xf9\xcc\xf8\xc8\xf6\xf8\xf3\x03\xf1\x17\xef\xc3\xee\x1d\xf0\x0f\xf2\x10\xf4\xb1\xf5\x88\xf6R\xf6C\xf5V\xf4\x0b\xf4\xa4\xf44\xf6@\xf8/\xfa\x93\xfb\x1b\xfcO\xfc\xc3\xfc\x85\xfd\x08\xff\xf8\x00\xe3\x02\x14\x04$\x04\xfb\x026\x01\xe7\xff\xd8\xff\xf0\x00Q\x02\x07\x03\xb3\x02\x8e\x01\xe0\xffl\xfe\xc4\xfd-\xfe\x94\xffY\x01\x11\x03\xb0\x045\x05|\x04\xee\x021\x01@\x00U\x00\r\x01g\x02h\x03-\x03*\x02\x03\x01/\x00s\xff\xc8\xfe\x8f\xfe\xf5\xfe\x88\xff]\x00\xdf\x01!\x04\x06\x07\xe3\t\xa6\x0c\xef\x0ev\x10a\x117\x12\xfe\x120\x13\xd9\x12U\x12F\x12\xa8\x12\x13\x13\xa6\x12\xf0\x10$\x0e\xa4\n\xe2\x06\xf0\x02\xfe\xfem\xfbM\xf8\xb4\xf5\x8d\xf3\xd4\xf0\xc3\xed$\xebN\xe9U\xe8\x02\xe8\x17\xe8\xf8\xe8_\xea^\xec\x80\xee\xb4\xf0\x00\xf3\xda\xf5\x94\xf9\x7f\xfd\x0f\x01\x9d\x030\x05w\x06\x0f\x08\x1e\nv\x0c\xd8\x0ea\x11\xe7\x13\xd3\x15,\x16\x9c\x14,\x11\x9f\x0c\xe8\x07\xb9\x03|\x00>\xfe\xf3\xfc%\xfc\x08\xfb\x18\xf9\xf0\xf5\xe9\xf1\x01\xee\xf7\xeaF\xe91\xe9\x8a\xea\xcd\xecx\xef\x84\xf1\x02\xf3-\xf4\x18\xf57\xf6\x96\xf7c\xf9\xa5\xfb\xe1\xfd\xf4\xffh\x01K\x02\xcc\x02%\x03\x91\x03\xfb\x03\xfb\x03:\x03\xa5\x01a\xff\xfe\xfc\xc0\xfa\x13\xf9_\xf8q\xf8\xfa\xf81\xf9\xaf\xf8,\xf7)\xf5\x10\xf3}\xf1\'\xf1k\xf2\n\xf5Q\xf8v\xfb\xca\xfd\xf4\xfeX\xff-\x00\xcf\x02\x0c\x08\x9d\x0f\xb4\x18,"\xe4*\xab1t5\xe45f4\x802T1\x061\x041\xc80S/\x15,\xb3&\x97\x1fe\x17\xcc\x0e\xbd\x06\x83\xff\xf0\xf8\xbf\xf2\xee\xec\xf3\xe7\xe6\xe3\xe1\xe0\x7f\xdeP\xdc\x06\xda!\xd8\xcd\xd6T\xd6\xbc\xd6K\xd8\xaa\xdb\xc9\xe0\xe3\xe6\xe6\xec\xec\xf1\xa8\xf5g\xf8\x8c\xfaB\xfc\xfe\xfd\x12\x00\xd3\x02W\x06\xcd\t\x82\x0c\xbf\r\x80\r\xb6\x0b\xd3\x08G\x05\xa3\x01\xc9\xfe\x08\xfdA\xfc\xee\xfbA\xfb\x9d\xf9\xd8\xf6P\xf3\x83\xefl\xec\xe0\xea\x1d\xeb$\xed\xb5\xef\xa2\xf1"\xf2\xc9\xf1\xa8\xf1%\xf3\xb8\xf6\xc5\xfb\x01\x01"\x05\x98\x07q\x08\xb9\x08B\t2\x0bG\x0e\xc5\x11\xd9\x14$\x16s\x15\xf7\x12?\x0f\xbb\x0b:\t\x92\x07\x92\x06-\x05@\x03\x04\x01\xfd\xfd@\xfaA\xf6?\xf2\xfa\xee\x81\xec\x00\xeb\xfe\xea\xf4\xeb:\xed\xbf\xed\xfe\xecG\xebs\xe9@\xe9\xcb\xeb0\xf1.\xf7o\xfb\x1f\xfd&\xfc\xd1\xfan\xfai\xfc\xc9\x00\xe7\x05p\n\xbf\x0c\xc0\x0c\xac\x0bK\x0c1\x12Q\x1e7.\x91\xf5Q\xf0+\xec\x14\xe9\xd6\xe6c\xe6\x95\xe8\xd0\xec\t\xf2\x91\xf64\xfa\xa8\xfbj\xfaM\xf7\xb3\xf4\xc5\xf4o\xf8\xaa\xfd"\x02b\x02\x16\xfe\xb3\xf6P\xee\xf8\xe7\xc9\xe4w\xe5\x88\xe8%\xebc\xeb(\xe9\xf9\xe4\x9a\xe0\x10\xde\xda\xdf\xf9\xe5\xf7\xed\xdd\xf4\x05\xf9\xef\xfa\x82\xfbT\xfd\x18\x04\x07\x15\xb8/\x89M\xafc\xa2j\xbacrW\xf1P\x05U2a2o*xLu\xbdd\xddH%)\x84\x0e`\xfdQ\xf4\xc9\xef\x9d\xebp\xe4,\xd9\x19\xcaJ\xb9*\xaaB\xa0\x19\x9d\xb6\xa1\x19\xad\xd2\xbd\xb4\xd07\xdfX\xe5Z\xe4\xbe\xe1Q\xe6\xc8\xf4\xe7\x0b\x06&\xe9:5E\xcaB:7\r)\xe3\x1f@\x1ew"\xea%|"\x0f\x17\x1d\x06#\xf4\xe1\xe2\xe4\xd3\xd9\xc80\xc3a\xc1_\xc06\xbf\xb2\xbe\x7f\xbf\xdd\xc0r\xc1\xa6\xc1\xda\xc5\xbe\xd0\x91\xe2\xe0\xf5\xb6\x04-\x0e4\x12G\x14a\x18,\x1f\x88)@4\xda:\xac:\xa23\xc5*Q#\xa3\x1e}\x1bU\x17 \x12\xd7\n\x9d\x01\x8b\xf9\xe6\xf2+\xefr\xed\xa3\xeb\x14\xe9\xa9\xe6\xb1\xe6\xb6\xea\x8e\xf0K\xf5\x8c\xf8d\xfa\x8b\xfb\xc5\xfc\\\xfeg\x02<\x066\x08\xbe\x06i\x02k\xfd\xa5\xf9\x9e\xf7\xb7\xf5\xcf\xf2\x7f\xee\xac\xe8?\xe3\xb3\xdf\xe6\xdd\x88\xde\x92\xdf\xe0\xdf\x18\xe0\xf8\xdf\x08\xe0\xec\xe2\x08\xe7\xb8\xed\xe2\xf4J\xf9\x8d\xfdT\x01\xaa\x06\xa5\x0bu\x0f\xae\x14\xa1\x1b\x89%\xb04JJ\xa8`\x9cg\x15\\\x01I\x8a>}D1Q\r^pd_\\\x8eDP#@\x07V\xfa\x9c\xf8\x9b\xfa\xb5\xf8?\xef[\xe1\xbc\xd2*\xc6z\xbc\xbe\xb8o\xba\x94\xc0\x85\xc6\xc6\xcc\x85\xd6,\xe2\xbd\xe9\xe3\xe9\xee\xe8\x00\xeeM\xfc\x1a\x0e\x98\x1d\x81(L.7-\x85%\xb2\x1c|\x19>\x1d\xe5 \x97\x1f\xf5\x16W\n\xfd\xfb\x13\xee>\xe3\x7f\xdb\x81\xd5w\xcf\x9c\xc8\xc4\xc3\x08\xc2\xd5\xc2\xa3\xc3<\xc3\xaf\xc3\x16\xc5\xb7\xcaa\xd6\x01\xe6\x99\xf3\x12\xfb\xe9\xfe\xf3\x01\xed\x06\xe4\x0f\x01\x1cM(\xdc.\x1c/\n,\xe0(\xb8&#&\x00&\xb2#\xc1\x1e\xdc\x184\x13\x9e\r\x04\x07q\xff\xa5\xf9]\xf4\x15\xf2`\xf2\x04\xf4\x07\xf4&\xf1\x85\xec\xcf\xeau\xed\x87\xf1\xc3\xf65\xfa\xea\xfb2\xfc\x99\xfa\xd2\xf9L\xfa\xf0\xfa\x1d\xfd\xf1\xfd\x11\xfeP\xfc\xd5\xf8\xb3\xf5\xff\xf0O\xed\xd6\xea3\xeb\xd0\xec\xc1\xedg\xec8\xe9\xba\xe6\xeb\xe4\xe2\xe5\xbc\xe9 \xed\x8f\xf3\x95\xf7\xe6\xf8\\\xf9Q\xf4\x8c\xf3\xd2\xf7\xc3\xff\xf6\x0b.\x12\x9d\x11\x9e\x0c\x9f\x0e\xeb \xc9=\xaeP\x9bM:A\xf79\x89=IH\xfaS\xd6_5c\xd8RK:I(b#<&:!\xb2\x15c\x08e\xfd}\xf4\xa8\xeb\xc9\xe08\xd7\x17\xd1\x89\xccW\xccO\xcf"\xd5I\xd9_\xd4\xe6\xcb\xa4\xcaX\xd6>\xe9J\xf7S\xfd\xab\xfe8\xfe\x83\xfd\x89\x015\x0br\x17`\x1e(\x1d\r\x17)\x10$\x0b\x95\x08"\x07\xe2\x03\xb7\xfd\x16\xf5\xcd\xed\x16\xe7\x1c\xe1\x03\xd9\x94\xd1\x95\xcc\x86\xcb\xfc\xcc\x9a\xcf1\xd2\xfd\xd2\xc2\xd0\xac\xcf\x91\xd5\x89\xe0,\xef\xa7\xfa}\xff\x10\x00\x8c\x02\xad\n\xa0\x16s\x1fp& +\x9c)\x06&{%d)\\,G)\xec"\x99\x1d\xff\x18s\x15N\x11F\x0c\xaa\x05\x04\x00U\xfb\x0f\xf9\x90\xf7\x84\xf5;\xf3\xd5\xef\x00\xee-\xee\x08\xef\xb0\xefr\xefL\xf0\x98\xf1\xfe\xf2\x90\xf3\xba\xf33\xf5H\xf5\x00\xf5\x17\xf4)\xf4\xb0\xf5\x02\xf5V\xf2\x01\xef\xbc\xed\xa9\xed\x13\xee&\xedA\xea\x9a\xe7_\xe4\xb5\xe5|\xea\x9f\xed\x12\xf0\x00\xed\xbe\xeb\x0e\xecp\xed\xea\xf0\xfa\xf1t\xf3\x97\xf3\xd7\xf9\xc8\n\x13!\xcc0\xda+\x08!<"z4\xf4L\\]\xc3f\x04g6[kJ\x08D[MaV\xa5R\'Ce1i"4\x13\x1c\x07/\xffG\xf8\x0c\xf0D\xe4\x91\xd9X\xd1c\xca\x97\xc2\xd2\xbb\xb2\xba_\xc0\xff\xc9\xf8\xcf\x92\xd0j\xcf\xe1\xd1z\xda+\xe7S\xf4\x1e\x01?\n\xf1\rF\x0e\xf9\x0e\xf2\x13=\x1c\x05"\x06#\xd1\x1f\x9c\x191\x12)\t6\x03\xc5\xfe\xf3\xfa>\xf5\x0f\xed-\xe2\xaa\xd6\xe3\xcdD\xca\xf9\xca\xf0\xcc\x7f\xcf\x99\xce\xe7\xc9\xf6\xc6\x9d\xcaD\xd5\x10\xe1\xaf\xeb\xc8\xf5N\xfb*\xff\xa3\x02p\x0b\xdd\x15\x03\x1f\xa6\'X-\x840\x11/a-I-\xba,\xd3,(,$*^&Z\x1e\xf1\x15\xc8\r\xa7\x08\xb4\x04.\x01h\xfdU\xf8=\xf2Y\xebl\xe7I\xe6\x15\xe7\xd7\xe7\xdc\xe6\x00\xe7\x8a\xe6+\xe5K\xe5\xf0\xe5\xc6\xe9?\xed\x10\xee&\xee\xc4\xee\x85\xf0R\xf1\xa1\xf0\xbf\xf0$\xf2M\xf3Q\xf3/\xf3\x85\xf3b\xf2Y\xef\xca\xef3\xf1\x1a\xf3\xa5\xf2\xe6\xec\xc8\xeb\xaa\xec0\xf2l\xf6z\xf7]\xfb\xc2\x02\xfa\x0b\x84\x12\\\x1b,+\x84<_DR@^=\x87D\xd6P&[sa;c\x8c]!O)>]6\x076\xd85\xfc/\xac"\x8c\x13\x03\x03\xa7\xf2\x05\xe6\xfd\xdel\xdb\x0f\xd7\xca\xd0\xef\xc8\xfd\xc2\xd1\xbe\x03\xbb\xb5\xba\xd2\xbe\xd5\xc7\x98\xd3\xfe\xd9\xc9\xdc\x1c\xde\x0f\xe2W\xeb\x1f\xf7`\x05T\x11y\x18\xfa\x17\xa9\x14\xd1\x13\xd4\x16\xaf\x1dZ"\xc3"\x9d\x1d\xe3\x12\xb0\x07c\xfe\xeb\xf8\xd2\xf6\xc6\xf4u\xef\xc5\xe6\xa8\xddy\xd4\x19\xcd\xa8\xc8(\xc9(\xce\xae\xd1\xef\xd3\xb6\xd5z\xd6!\xd7\x06\xda\xf9\xe3\r\xf3=\x02~\t\t\n\xdd\x08\xea\x0c\'\x18\xd6#K,\xd4/\x8c.\xca*\x81\'\xc9\'a+\xc5+\x0b*d%\xb0\x1fO\x17\xc1\x0e\x0f\x08\xa4\x04\xb9\x02\xe4\xffV\xfbs\xf3\xbc\xea\xbe\xe2\xe1\xdf\xbd\xe0\x0e\xe4.\xe6\xea\xe4\x89\xe0\xd5\xdb\x0b\xdbn\xdf\xda\xe5s\xeb\x9b\xed\x12\xed\xb2\xeb\\\xeau\xed\r\xf1\x0b\xf7\x8a\xfbY\xfc\xbb\xfay\xf8\r\xf6M\xf6\x1c\xfa\x81\xfe=\x04\xfc\xff\xd4\xf7\x0e\xf2e\xf1\x7f\xf8u\xfd\xe5\xfe\x02\xfc\x0b\xfdC\t/\x1a\x81#\xc9\x1d\xa7\x18\xc8\x1f\x1d/\xd4A:M\xbcQVM[A\xe8<\xffB!M6P2H[9\xdc,\x86#x\x1c\xcc\x17q\x10\\\x07\xfc\xfa8\xed\xf1\xe2\xa2\xdb\xa2\xd6\\\xd1\xe5\xca\xa0\xc63\xc5/\xc6!\xc5\x95\xc3\xf3\xc4\xe5\xca\x93\xd4\x7f\xdc\x03\xe3\xa7\xe7@\xeb\xda\xef\xd2\xf7\xde\x02u\r\x04\x13M\x13%\x12\x15\x12\x10\x15_\x18b\x19p\x17K\x13U\r\x11\x07\xb1\x004\xfdE\xfa^\xf5\xdd\xee\xc2\xe8\xa7\xe3\xad\xdd\xb0\xd9j\xd8}\xd9\xe6\xd9\xd9\xd8\x1b\xd9&\xdb\xec\xdc\xfb\xe0L\xe6\x95\xeeA\xf6s\xfa#\xfeM\x02\x98\x08\xa1\x0f\xe3\x15K\x1b|\x1f\xd0!/#\x9e#\x92$\xb7%\xf7%\x1e%\xcb"\x80\x1f{\x1b\xcf\x16Q\x13A\x0f*\x0b&\x05\x95\xfe]\xfa\xdd\xf5;\xf24\xedY\xe9\xb2\xe6G\xe4\xe5\xe3$\xe3=\xe3N\xe1\xb9\xe1\x83\xe3\x07\xe6\xcc\xe8J\xe9o\xea\xb0\xeb\xd7\xee\xfd\xf2?\xf6F\xf7\x8c\xf8\xf3\xf9\xe3\xfa\xed\xfc\xa6\xffj\x018\x01\xa7\xfe\xde\xff\x11\x01c\x01\x8d\x01_\x01\xcc\x02l\xffT\xff\x05\x01\xee\x08[\x14\x92\x1b\xc2\x1d \x17\x8f\x14Y\x1bs*\xf98\x17=\xf8:\x813I/H0p67>!=\xee5\xf3)| \xd6\x1b\xd9\x19\xe5\x19\x97\x14\xd1\x0b\xa6\x00a\xf6@\xef\xd5\xe9n\xe8\'\xe7\xa3\xe4B\xdfQ\xd9.\xd5\xe4\xd3\xbc\xd5\xdf\xd9\x1a\xdf\xac\xe2U\xe3\x9c\xe2\x98\xe2\x13\xe6G\xec\x00\xf4\x1e\xfaK\xfc\x0e\xfc%\xfa\x15\xfa9\xfc\x16\x01\xaa\x05[\x06J\x02\x8d\xfd\xeb\xfa$\xfa\xd7\xfa1\xfb\xb5\xfa\x8d\xf6\x9a\xf1U\xf0\x7f\xf0\xd6\xef\xeb\xec\x88\xebt\xeeM\xf0!\xf1\x9f\xef@\xee$\xee\x1a\xf1c\xf7\xee\xfc\xbe\xfe\x0e\xfd\r\xfc\xd6\xfdQ\x02\xc9\x08^\x0e\x8f\x10\xa6\x0e\x1c\x0c\xe4\x0ct\x10\xbb\x14\xb1\x16\xed\x16\x04\x15\xa1\x11\xc3\x0e\xb4\r\x19\x0e\x05\x0e\xe9\x0b\xd0\x08`\x04\x1c\x00\xe3\xfc\xd6\xfa\x86\xf9\xa3\xf7\xdb\xf5\x85\xf3\xd6\xef(\xedE\xecl\xec\xe2\xed\'\xed\x12\xed\xf0\xeb\xf6\xea\x17\xeb|\xebN\xed$\xef\xae\xf0c\xf2\xe3\xf2b\xf2\xd2\xf1f\xf1b\xf6\xe9\xfb\x82\xff\x91\xfe\xaa\xf9\xc4\xf9\xe5\xfc\x0b\x04\xaf\x08\xff\x06\xd6\x02/\x01e\x07 \x10.\x15\xfd\x13\xc2\x117\x13y\x1b\xb0%\xcf*N)\xda%\x90\'N-[3\x8b675\x851\x0c-\x9c*\xa7*\x0e*\r([#\xbb\x1c\xaa\x15\x01\x10\xb2\r\xd9\n\xee\x05\x08\xffr\xf8+\xf4\xe5\xef\x0c\xed\xe2\xe9\x0b\xe7T\xe3\xdc\xdf\x15\xdf\x8a\xdf\xa2\xe0\x94\xdf\xd4\xde&\xdf\xf5\xe0\xbf\xe3\x81\xe6\xe6\xe8\xdd\xe9\xd0\xe9\x01\xea\xcd\xeb2\xefj\xf2\x02\xf4c\xf3\x0b\xf2\xe3\xf1\x0f\xf2V\xf3\xa1\xf4\xb0\xf4\x0e\xf4\xe5\xf2\xaa\xf2\xdf\xf2\x0f\xf2S\xf1\r\xf2\xab\xf3+\xf51\xf5P\xf5\x0e\xf6\xd2\xf6\xfd\xf8\t\xfb\x90\xfd\x9b\xff\xdb\x00i\x02*\x04\x8e\x05\x00\x08,\nl\x0c\'\x0e\xc0\x0e\x82\x0f\xf6\x0f\x8b\x10\xe5\x10.\x12\x86\x12m\x12Q\x11u\x0fU\x0eU\ra\x0c\x93\x0b\xe1\t\x82\x06\x88\x04h\x03X\x02\xf7\x00\xa0\xfe\xb8\xfb\xa9\xf9\x9f\xf8\xe5\xf8\x8f\xf7\xd4\xf5\xd6\xf5\xc6\xf5W\xf5\xf2\xf3B\xf0\t\xef\xc7\xf1u\xf5\xfc\xf6\x06\xf57\xf2\x00\xf18\xf1T\xf3\xf7\xf5R\xf6c\xf4\x08\xf4\x1c\xf8\x92\xfd\xb6\xfe\xe4\xf91\xf7\xb4\xf7\xba\xfa#\x00K\x04#\x06S\x05\xf6\x05\xd6\x06x\x07E\x07 \x06\xb7\n\x1b\x14\xbc\x17\xa0\x132\x0b~\t\x83\x11\xc7\x1a3 s\x1b\x08\x11\xc5\x0bx\x10\xbc\x1eO*\r(\xf7\x1b\xa0\x0f\x96\r,\x16\xf3\x1f\x16%\x8d"9\x1a\xb5\x11\x0f\x0e\x8a\x0f\xc8\x11\xdc\x12\x02\x11\x88\x0bK\x06d\x01y\xff_\xff\xcc\xfd\x0e\xfa\xc9\xf5\n\xf3 \xf1\xbf\xee\xef\xea\x92\xe8X\xe7\xcc\xe6\xdb\xe60\xe4\xb8\xe0\xab\xde\xc9\xde\xe1\xe0\x90\xe2c\xe3Z\xe3\x8b\xe2\x88\xe0\x95\xdf\xcf\xe2O\xe8-\xec\xff\xec\x96\xed\x12\xed\\\xee8\xefE\xf0\x05\xf6]\xfc\xa2\xff\xe8\xfe\x8e\xfc\x92\xfd\xe4\x00\x90\x013\x03\xb9\x07\xec\x0bJ\x0c*\nS\x07\xcc\x06\xfa\nm\x0f\xef\x11\xf2\x0e\xc6\n_\n\xa0\x0b\xa4\x0eH\x0f\x82\r\xf7\x07\xb2\x08\xf9\n\x1b\x0b\xeb\x06&\x04\x01\x05Q\x05\xc9\x03\xde\x02h\x01\xbb\x00@\x01_\xfe<\xfb\x89\xf5\xb0\xf9H\x00\xab\x01\x11\xf9\x16\xf2u\xf1 \xf3\x0e\xfb\xf5\xfe\xa5\xfbD\xf0N\xe78\xf0\x7f\xfa\x19\xfc|\xf9\xce\xf8\x9a\xf3^\xec&\xf1R\xfaU\xf5\x83\xf8\xc3\x01\xbd\xfe\xe1\xf6\n\xef\xc8\xf5\xa9\xfe\xdf\x08\xad\t8\x01B\xf7\xf7\xf9\x0e\x0bC\x0e\xd0\x05\x82\x01f\x03\x1c\x0f\xa8\x1e\x9f\x13s\xfc\x1a\xf7\xe6\x05\x9e\x1c\\#I\x1d1\x0eC\xfa\x06\x00y\r\x7f\x12\xc9\x15\x98\x1aW\x11y\x0b\xeb\n\xb5\x01R\x03{\t\r\x16y\x19\x18\x0f\x1c\x08>\x00\x08\x01\xe2\n\x1a\n\xa3\nr\rC\r\xa4\x07\x11\xfd\x0b\xf9\xc5\xff9\x01a\x05\xea\x06\x89\xfb\x95\xf8\xf5\xf6\x9d\xf4L\xf1\x13\xef\xc1\xf1N\xf5d\xfb\xb0\xf3\xb1\xea\xe8\xddJ\xe3\xd5\xf1|\xf5F\xf8\x9a\xef\xaf\xe7\xfa\xe33\xe8;\xf3\xf1\xf8&\xfe\xf7\x00\xad\xf2\xd1\xe9X\xed:\xfbE\t&\n\xc4\x06\x8f\xfel\xfd\xa3\x00\xc4\xfd?\n\x8b\x15\xcb\r\x8b\x06\xe2\x04=\x0f\x15\x11_\x01g\x02\xe0\x0e/\x14\x8d\x17\x10\x10B\xfe\xd7\xf7\x8e\x04\x1b\x15&\x10\x07\nW\x06m\xff<\xfc9\xff\xfd\x03\xe2\x02\xb1\xfcA\xfb\x0c\xf6u\xfa\xe9\x03\xd4\xfd\x12\xef8\xe5#\xec\x82\xf7\xa9\x02c\xf9\x8c\xf0Z\xe4U\xe1\x12\xfb\xeb\x06\x17\xfdH\xf2q\xe8\x8f\xe1\xef\xf2\xda\x05m\x08\xa2\x07\xa2\x06\x12\xef\x10\xdd@\xf3*\x0f\xce\x0f\x98\x0b\xdb\x02\xd6\xf6z\xf6\x85\xfe\xe5\x10F\x0ek\x04"\x02\x10\x05-\x04r\x03\xe6\r\x0f\x13}\x07@\xfa\xeb\xf93\t7\x16\xde\x15\xf1\x07\xf6\xfeU\xf7\x1f\xf4[\x13\xb3\x1c\xae\x15\xa7\x00G\xf1\xa8\xf8\xe1\x04\xe5\x183\x19L\t\xeb\xfa\xaf\xf4\x9f\x01`\x07%\n\x99\x0fO\t\xbb\x082\x042\xf4Z\xf3y\x082\x12Q\x0b\xf7\xfa\xd2\xf6\x87\xfb&\xfc\xe8\x05"\x07\xb3\xec\xae\xf2M\x03z\xf8\xf3\xfa\r\xf5\xc7\xf1m\xf8m\xf9\x9d\xf3\x7f\xedR\xeeq\xf0w\x02\x92\xfd\xec\xf0l\xeb,\xef[\x00\x8e\xf9\x1f\xf4~\xf8\xe0\xf1)\xfc$\x10n\x0b\x90\xf3v\xef\'\xfeS\t\xb6\x11\xc9\x06\x8e\xfc\xf9\x072\x11\xb0\x18H\x04\x07\xf3\x80\x00\x8e\x12\xc9#\x92\x14\xa3\xf3\x87\xfb\xd3\x14\x83\x0cp\x0c\xe7\t\xa3\xfc\xbd\x00\xb5\x05\xaf\x0e\xad\x03\x17\xfbJ\xf8\xb7\x08;\x0f\xd9\x005\xe8\xe0\xe1\xf7\x04\xa7\x12\xe4\xff\xfe\xfaW\xe8\'\xe5\xc9\x03\xbb\xf9/\xf6\x8b\xf5\xa2\xf2\xcb\xf3\xaf\xf7\x0b\xfez\xf9\xa1\xf5\x0f\xf6\x10\xf0|\xe2J\xff\x1b\x17h\x16\xbc\xf5\xc5\xd0\xa8\xdc\xde\x07\xf4$\x15\x12\x8b\t%\xf4?\xdb\xd1\xe6\xa6\x06\xd7/\xe4#\xa3\xff\x89\xdb\xf8\xd36\x11\xae;\n W\xf0\x18\xdb\x16\xf3k\x1dr*\x93\x18E\xfb\xff\xe1L\xeb\x7f\x1a\xd7&P\x12\t\x02\x87\xf2\x05\xfb)\x05\\\r\x9c\x1b3\x06\x0c\xf2K\xf9\xec\xfcd\x15\xe3\x12n\x01\x06\xfc\xc3\xf1c\xfb\x12\x03N\x07\x1b\x06\x1b\x04\xdf\x05\xf1\xf8\x8b\xf5\x99\xe1\xef\xe9\'%\xd8\'\xaf\xf6\xc7\xe0\x93\xe3\xcf\xef\xdc\x07\x9e\x04}\x01}\x04\x04\xebA\xf6_\xf8\xc3\xef3\xf0\x00\xfe\x8c\t\x10\x046\xf2}\xe1\xe4\xef\xb1\x08\xa0\x07\xf7\xfc \xfb\xe4\xee@\x06h\nc\x03\'\xf0\xd6\xf1p\x0f>\x11y\x10\xd9\x08_\x00c\xfd\xcb\x02\xe4\x07H\x07g\x08\x93!\xd7\x1d\xcd\x02L\xeb\x16\xedm\n\xfa\x1eb&\xe1\r\xc0\xfbO\xf5u\xf1k\xfa\xce\x08\xbf\x16\x1d\tx\x02\xbb\x03\xf5\x00\xc7\xf6\x0b\xe7\x7f\xec\xa2\xfaC\x08\x16\x19d\x0b\xc4\xf5\xe7\xe7\x93\xd1\xf6\xe9 \x0b\xbb\tG\x05\xfe\xf3\t\xfb\x10\xfd\x9b\xf5z\xe4x\xda\xd6\xfc4\x0f@\x1a\xb9\x07\xd7\xe4$\xe3/\xed\x0e\x08\xab\x061\xf6\x08\x07\xe4\x0cI\xf9\x1b\xf3\xca\xf9\x17\xfb\xf5\t\x0b\x0c\xb2\n\x05\x104\xfd4\xf8\xab\xfb\xf4\xfb\xac\x0c>\x19S\rv\x17\r\x0b\xae\xf2\xbb\xff<\xf8\x0b\x05\xe1\x16\xf7\x1d\xdb\x18\x9b\x03\x8f\xfc\xa5\xf29\xf4\xde\x0b\x96\x12\x02\x13\xdd\rr\x08\xfb\xf8\xf4\xe69\xfe\xd1\x05\x04\x00\xda\x0f#\t\xa6\xf4\xb5\xf2\xc3\xf8]\x04\x81\xee\xd5\xf2i\r\xb8\xf8\xff\xf9\xb8\x00J\nF\xdd\xcb\xcf\x10\x00X\x17v\x1b\x0b\xf0\x9c\xe5\x96\xe1Q\xea\x15\xfe\'\x0f\xc8\x16\xc1\x05o\xd5w\xd3\xb0\xfa\xb6\x0e\xc5\x1c\xe2\x11\x12\xf3\x1a\xd9\xa4\xee\x0b\x02\x14\x0c\x99\x1c5\x10\xef\xe8\xf8\xdb;\t\x0f$v\x16\xb6\xfb*\xf0\xfc\xf8\xe0\t\x19\x1c\xfb\x11~\xf6v\xfd2\x0b\xff\t\xf5\x0f\x03\xffW\xff\'\x0e\xdb\x10\xd4\x02\x00\xf6\x99\xfe\'\x08K\x0f-\x06\x89\x10\x81\x05\xa9\xe7\x86\xf9\xe4\xfee\xfe;\x17|\x11|\x03!\xee\xbb\xd7g\xf6\xd7\x154\x15*\x01?\xe8-\xec%\xfa\x97\x05\xfa\xf5\xaf\xf4\x95\xf9Z\xff\x9b\xf6$\xf7\xa5\x03}\xf0\x1e\xf8G\xf4\xe7\xefH\xfan\x0e?\nq\xf2f\xf4\x15\xe0K\xf1[$g\n8\xf7s\xffi\xef\x01\xf3\x94\nk\x12\x00\x00W\xfb\xae\xfcp\x03q\x11\xfc\x0c"\xfb\xa9\xf0}\xf04\x14\xa7)Q\x1c\x1d\xf4K\xebQ\xee\xcb\xefw"\x83@\xcf\x16^\xddF\xed\x07\xff>\x04\xb3\x16\x12\x1dV\x02\xb7\xea\xfb\x0b!\x10{\xfaH\t1\x06\xc0\xdc\x7f\xf5s \xc2!\xd8\xf9\xc8\xe7\x03\xf2\xc5\xe2\xcb\n@\x07\xea\xf8J\x10\x0e\x00\xb1\xf54\xd0\x02\xe4\x81\x0c9\x18R\x13\xc7\xebN\xdf\xf2\xe07\xf6\xb1\x079\x1b\xe8\xff\x98\xf1O\xf4\xe9\xe3a\xeeB\x0b\xa5\x130\x12\xb3\xfc\xf9\xda\xbf\xf3\xd2\x03\xf9\r\xe3\x0e=\x14\x1e\xf5t\xec~\xef\xd6\xff\xbd5T\x19\x90\xfb|\xdff\xd9\x17\r\x18*\xf07\x83\x14P\xd8\xbc\xc8\xcc\xef\xaf/\x1d"\x85\x12\xf1\x17\xd6\xe8l\xe8\r\xe8\xcd\xfd\xf80[\x19\xad\x081\xf4]\xdf\xb0\xf2g\x0c1!d\x0b\n\xed\xa6\xf1\xaf\xfb\xd7\xff\xbc\x13t\xf8d\xe2\x90\xfd\xd2\x12V\x04\x96\xe1\x01\xfaN\x17h\xf0\xb3\xe8\xdd\xf9U\xfe\xf9\xfdR\xf1!\x17P%\xcf\xdf\xca\xb7B\xee\xf4\x0f\xcd)\x01-`\xf1\xe9\xd3u\xd98\xf7\x89\t\x93\x10\n\x1b8\x14f\xe6\xcf\xe3d\xed\x06\x04p\'\x14\x13\x1c\xf7H\xdb_\xeb\xf2!\xd6+J\x1e_\xe9\xba\xc6H\xf2\x04\x11\xb8;\xb95\xdb\xffa\xc7\x9c\xd5\xdb\x13\xdb-\xb3#n\xfaa\xf6\xa8\xfc\x0f\x07\x84\xfaV\x00\xea\x15\xde\x02\x8f\xf4\xf8\xffY\xf8l\x0f\xbc\x0f\xb3\xf8\xba\xe6\x9e\xed*&\x14\xfc.\xe4\xcf\x0c\x14\x02\xc8\xf1\xdb\xf5\x84\xe8y\x13e\x0b\x8b\xe5\x89\x11\x05\xf9\xd9\xe9\xbe\xf5\xeb\xf2\xfa\xfd3\x19\xe7\n\xec\xf4x\xec\x00\xf4\xeb\x00a\xf2\xd4\xf3\xb0\x14\xc5\x1e\xff\x07\xb2\xe3V\xdb\xa9\xf1[\x13l*\x97\xf2`\xec@\xf8\x1d\x06\xf97\xe5\x01\x00\xc5\xea\xd4\xe4\t\xc0=\x08A\x1f\xf09\xcb\r\xf0\xbc\xff\xf2\x04\x18\x1f\xd2\r@\x08H\t\xad\xe9\xeb\xf3u\x0b\x1c\x0f;\x01\x98\x0e&\xfb\x80\xffh\xf5\xff\xff\x89!\xe3\xfd\x99\xf8\x04\xf2\x07\xe5B\x0b\x1c!\xf0\x1c\'\xf87\xbf\xd0\xe5\x06\r\xac#\x91\x1c\x10\xee\x07\xecA\xe7\x1f\xec\xa7\x12\xcd\x07\xac\xfa+\x06\xd0\xe2\xa2\xe9f#\xdd\n\xcd\xfaW\x05\xba\xcb\xb5\xdc\xb8 m# \x10\xf4\xfd\'\xe2\x08\xf4E\t\xb3\xf6\x9e\x05\x9d\x05\xc0\x02\x14\nI\x03!\x06\xa3\x00\x03\xe3\xa7\xf8)\x19Z\x16\xca\xf92\xf0\xfe\x02\x9f\tb\x06\xd2\xff.\xf8\xbe\xfb\x91\xfao\x0f\x17\x0eD\x03(\x07M\xea\xd3\x08p\xf0\xd1\xef+%\r\x01\xd4\x00\xbe\x19n\xfec\xe0\x87\xe2\xe3\xff\xa1\x1b\xb2!_\x065\xef \xf0[\xe6\xbf\xf1r\x1b#\x1b\xb8\xff\x80\xfdC\xe2\xa5\xe8\xed\x04H\x12A\x10\xa6\xf6\xb5\xf3t\xf5v\xf4\xec\x05[\x03e\t\xa8\x14\xa2\xd8\x1f\xdd\xbb\x1ct\x0fZ\x00*\x0f#\xea\x19\xe5w\xf8\xc2\x10\xa0!\xbd\x02#\xfa\x16\xed8\xe2\xdc\x03\x84$p\x06f\xff8\x04\'\xf5\x1c\xff\x82\xf1\xd5\xf7^\x14\xaf\x19\xd7\x19j\xe4\xe4\xd7\x18\xfb\x8d#\xd7\x13\x91\t\xc3\xee\x96\xdfI\x0c\xf3\x17z\x10\xa8\xf3y\xe7\xec\xf2\xa3\x1c\\\x02\x85\xfb\x9b\x17\x82\xf7H\xeb\x94\xfa\x90\xff\x00\xf6\xef\xfb\xbc"\xa6!\xe4\xe0f\xe0\x1b\xee\xc5\x03\xf5\x11.\nJ\x01\x10\xfe>\xfa\x85\xfb-\xfc\x00\xe2w\x00\xa8%\x8c\n\xe1\xe2\x84\xf4r\x0bR\n\xb1\x08\xc4\xeac\xd8\xe9\x02\xb2&)\'p\x07\xa6\xc7J\xd0\x83\x18\xc4&\r\x01:\xfe\x0c\xfay\x03\xbd\xfe"\xf6\xc8\x04\x9d\xf2\x85\x00\x0b\x1e\xfb\xf9h\xf4w\x1e3\t\xdf\xd4\x10\xe7\xf5\t\xda\x1ae,\x9c\x04Y\xe0\xce\xd7\xfa\xfdZ3(\x1a\x0b\xe1\xee\xe7\x94\x03A\x12\x0f\x10}\x00\xbe\x04\x90\xe9\xc2\xf4o\x0c|\xf9\xe4\xfb\x8c\x14\xfa\x06\x85\xfe\x01\xef(\xf2\n\xff\xe1\xfd\xdf\x15\x0c\xf9\xd1\xef\xd1\xee\xe1\n\x051\x90\xe3P\xd0\xea\x0c\x9f\x17\x9a\x05$\xf65\xef\xed\xf2E\x12\xe6\x1a\xac\xf3\x9b\xf0\xa4\xf9\x89\x0b\xdc\x08B\xdf\x81\x04%#"\xfb7\xf6@\xfd\xf7\xeel\x07\xd9\x04\xef\t\xc1\x1aL\xe8\xba\xdb2\x04\xb7\x18\x98\x16\x87\xff}\xe3%\xec\x18\t\x16\x1cz\xff4\xf4\xbe\x04\xcb\xf7!\xfaw\x0b\x1e\x02r\xff\x8a\xf7v\xff?\r\x7f\xf7\x01\xfc\xc2\r\xdf\t)\xfc"\xe6\xfb\xee\xc1\x193\x1a\x9c\xffA\xfc\xc9\xebJ\xde\x8d\x16\x8b4^\xfd\xc9\xde\xf9\xe2^\xfdU\x1e\xbe\x13-\x05\x0e\t\x9f\xdf\xc3\xc7\x0f\x0f\xe5/P\x17\xb6\x01\xee\xdb\xdb\xe8g\xfb\x1d\x02\xdb\x15:\x1c\x14\xf1!\xdd\xc5\xf6\xb9\x11\xff\x1e\xae\xefo\xe2\x1a\xf8\xbf\x12\x81\n9\xf6~\x0f\xa7\x05\xf1\xfa\x92\xdd\x95\xe4\x0c e&\xa4\x0e\t\xf0\xc8\xea\x91\xea\x8f\xf9\x90\x1aQ \x81\x04\xc9\xe6\xd9\xea\xf1\x01v\nD\x14d\x17\xf4\xee\xed\xdf\x18\xec<\r 2?\x0e\xcf\xf1\x00\xea\'\xdf\x04\xf5\xf0\x1d\\/\xee\x0b\xa6\xdf\x89\xd5L\xfe\x1c\x18k\x0b+\x0b-\xf2"\xfb\xcc\xfd \xef\xdb\x13\xe2\x03 \xf4&\xff\x98\x04\x19\xfas\xec\x98\x0e\x8c#\x01\xf0\x1b\xd4B\xff\x05\x14\xaa\nQ\x05\x7f\xf9\xa4\xfe\xb1\xec\xcf\xfa0\x13\xaa\xf4K\xfdd\x12\xb0\x12\xf2\xfa\x88\xd9\xfc\xe4\xf5\x0f\xc5*\x9a\x15\xc1\xfd\x0c\xe0g\xd9{\xfd\xa3\x1d~*\xee\xfdE\xea\xf2\xf0\x13\xec\xb1\n\x94\x1e\x92\t\x1c\xf9.\xfa\x1a\xdey\xf7\xb3"\xf4\x1c!\n3\xe6q\xdar\xf3M\x13\xf7\x1a\xf2%\xfa\x00<\xcc^\xe8\xf8\xfdU\x10\x19&\x91\r\xc4\xfd\xda\xe3\xac\xe1\xf2\xfa\x8b\t<"\xe4\x16\t\x00~\xd8\xa0\xe3\xfd\x06\x83\x17\xfd\x1b\xc0\xf1\xc2\xdb\xa2\xfb\xe5\x172\x14\xee\xfc\x7f\xf4X\xea(\xf2\x07\x07>\x04\x9e\x0b\xb3\x18\x1d\x00\xeb\xe4n\xf3#\xf3\x97\x02\xd9\x0b\xe2\r\xd8\x11Y\xf9\xbd\xf1\xe2\xf2\xf4\xf7\xfe\x08\x9a\x10\xf5\x02\xb6\x03i\xf7W\xf0\xab\xfc\x1c\to\x13\xd4\rD\xf5d\xe4+\xfb\xec\x07\x9f\x06\x82\r\x13\x08\x06\xf7\x03\xeb\xc6\xf5\x89\x19m\x1d\x7f\xfc\x99\xdd\x11\xeb\xa4\xfe\xa1\x1e\'\x1d\x8d\x07\xd9\xf6e\xde+\xef\xda\x04\xd6\x0b\xa5\x11\xc4\x12c\xf1\xab\xedN\xef\xcf\xf8\xd6\x167\x14(\xfaA\xe6\x88\xf4\xc5\x0e\xc0\x02\x10\xf9\x05\x13\xb3\xfd\xad\xe2Z\xef\x8d\t\xb8\x11\x94\x12X\n\xb3\xef\xb9\xd8e\xf2]\x1a\xfe\x13^\x04K\x01z\xf6\x8a\xed\xa5\x00\xa8\x06k\nD\x0b\x15\xf6`\xed\x98\x07J\x06+\rY\xff\x8e\xeb\x11\x0c\xd2\xfe\xf6\xf6"\x10\x9a\x06j\xefa\xfd\x8c\t\xe2\x05\xc0\x04v\xfc,\xf9^\xfaT\xfb\x8a\x108\n\x91\xf7v\x06\x87\xf9E\xf9\x98\x03b\xfb\x81\x02\xa0\x0b\x0c\x00\xea\xf2n\x01\xc3\x0f\x01\x04\xae\xe8\xc7\xea\xc8\x05>\x19\xe9\x0e\x18\x00\xa8\xea;\xe3\xdd\x04\xe2\x10\x1e\x0b\x9e\x02c\xef%\xf0\xd3\x08\xb4\n\xad\x03\x16\xf6\xb8\xfe\x14\xf4\xc5\xec\xad\x15\x10\x1a\xca\x08L\xee\xdf\xde\xc1\xf3i\n#\x17*\x1d\xb7\x00\x8e\xdb?\xee\xda\x04\x88\r$\x0f\xe7\x06`\xf3O\xf2s\xfd)\r\xb3\x19\xa3\xf9\xcf\xe24\xf2\x17\x05\xc6\x16\x17\x14(\x08\xbb\xf4\x82\xe2\xe9\xf2\xe6\x04\xc3\x13\x8f\x11\xcd\x08~\xf9\xd7\xea6\xf6\x98\x06\xc3\x0b\xc4\x0eD\xfd\xf1\xee\xc0\xfb[\x07Q\x06\xfb\x08\xa1\x06\x9c\xf2\xfb\xefu\xff\xd9\x05<\x08\x1f\x0b\xfd\x04=\xf9\xed\xe8i\xf5\xc7\x0c\xbf\x0c\x13\x05\x7f\xfb\x93\xf7<\xf6+\x00\x0c\x04\xed\xfe\x07\xfe\xfa\xf95\x00\x9c\x01\x04\xfe\x10\x06.\xfd\xeb\xf6\xa3\xf9\x94\xf6\x17\x03\xb4\x0f/\x0bn\xfc\x0b\xf1\xeb\xf1\x80\x05x\x11\xbe\x01v\xf8\x1c\xfe\x16\x02\xe5\x04\xb0\x04`\x05\xa3\xfd\x01\xf4D\xf7"\x0bF\t@\x08n\x07g\xf9\xb1\xf7\x7f\xf3+\xfc+\x08\x8b\x0e\xd5\x10\x98\xf8\x03\xf4\xcf\xfe\x1e\xf6\xf2\xff\x07\x13\xa9\x03\x1a\xf8\xc3\xf6\xac\xfe\x92\r\xb2\x02\xdf\x02&\x01G\xe86\xef\xe4\x0cE\x17B\x0b\xf5\xfc\xfc\xf0~\xf2\xe4\xfa)\x02\xbe\x0f\xb6\x02\xba\xf7\x8d\xfb*\x004\x07,\xfdx\xf2,\xfd\x8c\x04\xb8\x00\xc1\xfef\xfes\xff4\xff\xbe\xfcq\x00,\x01\xb8\x013\xfd.\xfb\r\x02\xc1\x02\xe6\x00\xbb\x00\x8f\x01"\xfe\xbb\xfd\x82\xfbG\x01\x91\x06P\x02\xec\xfc\x13\xfb\x0c\x05\x9a\x03}\x00\x10\x00\xc7\xffa\xfbB\x00\xdb\t>\x03A\xff\xf6\xffH\xff4\xfd\x11\xff\x91\x03\xc5\x06\xd7\x02\xce\xfd\xf0\xfcj\xfc\xf2\x01D\t#\x04w\xf9\x13\xf9\xe7\x03H\x04\xe9\xfb\x08\xfe\x18\x02\xbd\xff8\xfa\x97\x003\x01\xe9\xffe\x01\xf3\xfc\xb9\xfc\xd5\xf9p\xf7\'\x05o\x0e\xb4\x02\xd5\xf6\x13\xf5e\xfc`\x00R\x06\x05\t/\xff\xfb\xf7,\xf8\x82\xfe\xb9\nU\x05\xec\xfbx\xfcx\xfc2\xfc\xa2\x01\xf0\tT\x03M\xfd\x96\xf7\xbf\xfc\x9b\x04\xdf\x02\xfe\x03\xaa\x00\x06\xffr\x01\xf4\xfd\x93\xfb\x8f\x022\x04\xe9\x00\xcc\x01\x97\x02\xe9\xfc1\xfc\xce\x02\xd9\x03k\xff\xe4\xfb6\x01\xa7\x00~\x00H\x02e\xfe{\x02\xb9\xff\xc0\xf9m\x00\xf5\x02\xcf\x00\xf0\xff\xdb\xfe\x81\xfeA\x019\x01\x00\x00\x13\xfe\xc8\xfa\x8c\x00\xab\x02\xd7\xfd\xe9\x00*\x02\xad\xfe\x87\xfdU\xfc\xfe\xfd/\x01\x04\xff\xf0\xfd\xc2\x02\xaa\x00\x9e\xfda\xffy\xff\x15\xfe,\xfe6\x01\xbc\xff\xd1\xfd\x9c\x01Z\x03w\xff\xc3\x00e\xff\xf1\xfa\x08\x017\x02\xa9\xff+\x00,\xfe \xffk\x03\x00\x03\xcd\xff\xd6\xfa\xc7\xf8Q\x00\x03\x05\t\x04&\x00\xac\xfd\xaf\xfbP\xfdw\x03&\x02\xb5\xfe\xf1\xfb\x97\xfe\xd1\x03\x99\x05\x89\x01\x81\xfb\xdd\xfc\x95\xff\xb6\x00T\x03$\x02\xb6\x02[\x04\x0e\xfd\xc3\xf9\xab\xfdz\x02\xd1\x06\x82\x04F\xfd\xad\xf8\xcf\xfb\xed\x01\xdc\x06\xf1\x04\x1a\xfd&\xf7_\xf7\xe7\xff\xcc\x07\xaf\x06\x8c\xff-\xf8\x87\xf6`\xfc\xb0\x05r\x06Q\x00\x8c\xf8;\xf4\xfa\xfas\x02\xa3\x05 \x003\xf9\x1c\xf7\xb5\xf8\xd9\xff\x7f\x07\xd9\x05\xfe\xfd\xdb\xfbQ\x01B\x08\x16\x0e\xd3\r\xa6\t_\x07r\x08\xd3\x0e\xeb\x15\x01\x15\xcb\x108\x0c\xf4\n\xc1\x0e\xaf\x11r\x0f?\x07\x0b\x03$\x04\xbf\x04^\x04\xa5\xffV\xf9\xc0\xf5l\xf3\xc6\xf3\xb9\xf5\xf3\xf3\xa4\xf0\xa2\xeeV\xee\xa9\xee\xc0\xef\xaf\xf1\xaf\xf3s\xf3\xc5\xf3\x8d\xf6^\xf9h\xfd\xc2\xfe\xb6\xfd+\xfe\xf0\xfe\x8a\x02\xdb\x08\x87\n\x96\x05\xf2\x00\x87\x01\xc3\x06\xde\x08#\x07d\x04\x9a\xfd\xd5\xf9\x8c\xfd\x7f\x05v\x06\xc7\xfb~\xf1\x87\xef\xcc\xf6i\xfe\x08\xff\x06\xf9\xd6\xf0`\xf0\xec\xf4\xbd\xf9\x83\xfe\xf8\xfa\x84\xf6\x93\xf6\xa3\xfcb\x00\xc5\xfd\xab\xfd\xd0\xfdy\xfe\xc2\xff\xc8\xff\x92\x00\xfb\x02\xa6\x02z\x04\xa1\xffy\xfa\xc4\xfb=\xff\xe7\x05\x02\x06\xe0\x00\xda\xfc*\xfc]\xfcp\xff\xf4\xff\xf5\x00_\x01\x00\x00/\xff\xca\xff\xd8\x00\xc3\x00\xde\xffd\xfd\xc7\xfc(\x00\n\x046\x04K\x03U\x01u\xff\xba\x01\xf9\x02\x87\x02f\x05\x8a\x0b\xd7\x11\xc1\x14\xae\x11\xa0\r\xf5\x0e_\x14Q\x1a\x11\x1e\xf0\x1dP\x1c\x1c\x1c\x86\x1c\xca\x1b\xaf\x17\x95\x12M\x10@\x0f\x1f\rA\n\xe5\x06\xbc\x02\x80\xfe7\xf9b\xf3/\xefm\xee\x82\xeeX\xed\xe0\xeb\xd9\xea\x97\xea\xe1\xea/\xec\x07\xed\x8a\xebY\xea\t\xed\xab\xf3\xf0\xf8j\xfaK\xf8\x1e\xf6a\xf7\x1a\xfa\x06\xfdh\xfeM\xfd\x0f\xfb\x1e\xfbl\xfd\x91\xfe\x07\xfd\xfd\xf9"\xf8>\xf7\x90\xf7\xb9\xf8$\xf9\xab\xf8\n\xf7w\xf6\xbd\xf7\xd9\xf8\x01\xf9\xac\xf8\xfb\xf81\xfa\xa6\xfb\x90\xfd\xa2\xff\xb6\xff\xd7\xfe\xc7\xff\xe6\x00\xf0\x01\x17\x03\xe9\x03\x81\x04S\x045\x04\'\x05\xf2\x05\xcf\x05\xef\x04T\x04>\x05-\x06X\x06r\x06\x1c\x06L\x05\x80\x058\x06\xc1\x06_\x07\xb8\x06<\x06l\x07\x0e\x08\xd8\x07\x91\x07\xcf\x06B\x06%\x06\x1c\x06\x11\x06\x15\x06h\x04.\x03\xf3\x02\xbd\x014\x01\x7f\x00\x0b\xff\xed\xfd\x1c\xfe\xd8\xfd\xb9\xfd\xc0\xfd\xbf\xfd\x8d\xfc[\xfc\x9f\xfcF\xfd\x03\xfe\xe6\xfd\x1f\xff\x06\x00\xdf\xffV\xff\xa2\xff\xe6\xff8\xff#\xff\xd4\xff\xcf\xff\\\xff\xbb\xfeO\xfem\xfd\xbd\xfc\x7f\xfc\xb5\xfb\x8e\xfb:\xfb\xe1\xfa\r\xfb\x88\xfa-\xfa\xb1\xf9\xfc\xf8]\xf9\x89\xf9\xc6\xf9\x97\xf9\xa3\xf9>\xfaE\xfa_\xfa\x04\xfas\xfa\x05\xfb\x90\xfb!\xfc/\xfc\xd3\xfc*\xfd\xd3\xfcA\xfd4\xfe\x82\xfe]\xfe\xb2\xfe\x0b\xff9\xff\x9b\xff\xa8\xff,\x00E\x00G\x00\xda\xff\xc0\xff\xe1\xff\x0c\x00\xcb\xff\xb8\xff\xa4\xff\xf6\xfeh\xff\xcf\xff\x92\x00\xf5\xff\xb0\xff:\x01\'\x04\x8b\x07<\n\x85\x0b}\x0c4\x0f\xc0\x12\xf9\x16<\x1bj\x1f\x03 \xaf\x1e\xdd\x1e\xce!4$\xbb"\x15 \xa9\x1dk\x1bt\x18\xe4\x13\x88\x10\xd9\r\x82\t\x07\x04\\\xfe&\xfb\xc3\xf9Q\xf6\xe6\xf1\x87\xee\xb9\xec\xd3\xea\x80\xe9Q\xe9\xc3\xe9\x1d\xea\xe6\xe8\xc1\xe7\x97\xe8L\xea\x85\xeb \xec\xb3\xec\xbd\xee6\xf0F\xf0\xeb\xf0\xec\xf1`\xf3<\xf4\xd1\xf4;\xf6\xc4\xf7\xbb\xf7|\xf7^\xf9\x07\xfby\xfb&\xfb\x84\xfb\x84\xfc\x9b\xfci\xfc\xfe\xfd\xf4\xfeB\xfe\x89\xfdM\xfd\xdb\xfdF\xfe\xc2\xfd\xb5\xfd\x88\xfe\x80\xfe\xf7\xfd\xff\xfd\xe0\xfep\xffK\xff\x14\xff\x19\x00\x05\x01m\x012\x02+\x03\xd1\x03\x07\x04h\x04]\x05\xb6\x06\xbc\x07a\x08\xd8\x08W\t\xee\t\xc1\nb\x0b\xaf\x0b\xdc\x0b\x98\x0b\xb4\x0b\xd4\x0bW\x0b\xde\n\n\n\xb5\x08\xa4\x07\xc5\x06\x8b\x058\x04n\x02N\x00\x10\xffI\xfe+\xfd\x0b\xfcV\xfa\xa9\xf8)\xf8\x9d\xf7Z\xf7\xb5\xf7\xb5\xf7i\xf7\x9a\xf7>\xf8\xf3\xf8\xdd\xf9\xa4\xfa[\xfb!\xfd\x97\xfe\xd3\xff\x1c\x01j\x028\x03\xa9\x03\xab\x04\xdc\x05\xc3\x06\xb6\x06&\x06$\x06M\x06\x8b\x05.\x04q\x03\xc7\x02\x1e\x01I\xff\xfe\xfe\xdc\xfe\x97\xfd\x9b\xfb\x98\xfa0\xfal\xf9\xb6\xf8\xd2\xf8k\xf9u\xf9\xb2\xf8H\xf8&\xf9\x81\xf9y\xf9\xaf\xf9 \xfa\xd7\xfay\xfbw\xfb\xbc\xfb\xa1\xfc\x84\xfc1\xfcl\xfc\x9c\xfd4\xfe\xe6\xfd\xe7\xfd\x07\xfe\x16\xfe\xe8\xfd\x87\xfdT\xfd\xa4\xfd\x7f\xfd\x85\xfd\x03\xfd\xab\xfc\x85\xfc\xef\xfb\x07\xfc\x81\xfc\xcb\xfc\xe3\xfc$\xfd\r\xfd\x85\xfdN\xfd\xf5\xfdA\xff\x17\x00\x08\x01\xff\x01\x8c\x02\x99\x03/\x05\xe4\x05\xbc\x06\x04\t\xde\x0c\x81\x10z\x12.\x14\x83\x17L\x1aY\x1b_\x1cu\x1f\xb0#\x1b%W#\t"\x87"A!\x18\x1d\xde\x19\x90\x19\xff\x16\x99\x0f\x9b\x08\x90\x06Z\x05i\x00\x8f\xf9\x80\xf5\x88\xf3\xaf\xef|\xea\x98\xe8\x0e\xeac\xea \xe7v\xe4F\xe5&\xe7\x1f\xe7c\xe6\xcf\xe7\'\xea\xd6\xea\r\xeb\xac\xecD\xefH\xf0\xc3\xef\xf4\xf0\x8e\xf3\x02\xf5\x87\xf5k\xf6\xdf\xf7\xca\xf8O\xf9\xc8\xfaj\xfc\xa2\xfc\xf9\xfb\xa3\xfc\xef\xfd\x7f\xfeP\xfe\xcc\xfeF\xffL\xfe\xf8\xfd\'\xff\xab\xff\xcc\xfe\xcd\xfd$\xfe6\xff\x82\xfe\x8f\xfe\xff\xffS\x00g\xffE\xffn\x00\xe9\x01\xdd\x01\xfb\x01d\x03\x16\x04z\x044\x05\x81\x06\xcb\x07\x04\x08\xf2\x07\x00\t)\n\x8b\n\xb0\n\xfb\n[\x0b?\x0b\xde\n\xd2\n\xe3\nG\nH\t\x8b\x08\x0e\x08\xa1\x07z\x06[\x05d\x04H\x03\xbd\x01\x91\x00\xf3\xff\xf5\xfeY\xfd\x18\xfc\x82\xfb\xb4\xfal\xf9\xaf\xf8[\xf8\x99\xf7p\xf7e\xf7\xcc\xf6\xa7\xf65\xf7\xab\xf7K\xf8j\xf9\xef\xf9\x82\xfa\x88\xfb\x83\xfc\xe6\xfdI\xffm\x00\x89\x01\x95\x02=\x03\x9d\x03M\x04\x1a\x05\x12\x05\xeb\x04\xb4\x04\xa0\x04a\x04s\x03\x8d\x02\xf1\x01c\x01X\x006\xff\x06\xff\x8c\xfe\x8e\xfd\xd2\xfc\xc9\xfc\xb5\xfcJ\xfc\xf1\xfb\xf7\xfb"\xfcZ\xfc\x88\xfc\xfc\xfcr\xfdu\xfd9\xfd\x8f\xfd6\xfe\x82\xfe\xdb\xfe.\xff,\xff\x0f\xff\x0f\xff1\xff\x9b\xff\x08\x00w\xff"\xffW\xff?\xff\xd3\xfe\xaa\xfe+\xfe\xca\xfdD\xfd\x8c\xfc0\xfc\x02\xfcx\xfb\xc7\xfae\xfa{\xfaK\xfaM\xf96\xf9\xdf\xf9o\xfab\xfaj\xfa\xcb\xfa\xef\xfa\x0b\xfbh\xfb\xf2\xfb\x8c\xfc/\xfd\x95\xfdI\xfe\x1a\xff\x1c\x00\xb1\x00\x8c\x01\x99\x03@\x06\xee\x07\x02\nf\x0e6\x13\x9c\x15Z\x16i\x18Y\x1dL!\xf4!V"\xb2$Y&\x16$\xef \xb4 \xa5 \xff\x1b6\x15\x13\x12\x04\x11\xb6\x0c\x14\x05_\xffj\xfd3\xfa\xb3\xf3\xdf\xee?\xeeh\xed\xfd\xe8\xed\xe4)\xe5j\xe7a\xe6\xab\xe3~\xe4\xe6\xe7\xf6\xe8\x16\xe8A\xe9\xfc\xec2\xef\xd2\xee\xc0\xef\xb6\xf2\xbd\xf4\xd3\xf4C\xf5M\xf7\xbe\xf8\xe6\xf8e\xf9\xb7\xfah\xfbK\xfbw\xfb \xfc\xb3\xfc\x0e\xfdo\xfdy\xfd\x1a\xfd\x1e\xfd\xde\xfd4\xfe\x98\xfdS\xfd\xe1\xfd\xf6\xfd\x17\xfdB\xfd\xab\xfe1\xffN\xfeJ\xfe\xf2\xff\x0b\x01\xc8\x00J\x01\x18\x03?\x048\x04\xf6\x04\xff\x06{\x08u\x08\xdc\x08\x83\n\xb0\x0b\xd4\x0b\xff\x0b\x07\r\xbe\r^\r#\r\xa0\r\xd4\r\x0e\r\x01\x0c\x83\x0b[\x0bh\n\xe3\x08\xc0\x07\xcb\x06;\x05D\x03\xae\x01w\x00\xfa\xfe\xf9\xfcU\xfb8\xfa\x08\xf9\x89\xf7T\xf6\xcd\xf5[\xf5\x80\xf4%\xf4\x81\xf4\xc5\xf4\xcf\xf46\xf5\t\xf6\xe1\xf6\xd0\xf7\xa1\xf8~\xf9\x9e\xfa\xc2\xfb\xf1\xfc>\xfe\xd4\xff\xc4\x00\x84\x01\xba\x02\xa0\x03\xa7\x04\x8a\x05e\x06H\x07\x97\x07\x85\x07\x8a\x07\x03\x08\xe4\x07<\x07\xbf\x062\x06\xbf\x05\xd1\x04\xc2\x03\xd2\x02\xfc\x01\x1f\x01+\x00p\xff\t\xff4\xfej\xfd\x08\xfd\x00\xfd\xd3\xfcn\xfc<\xfcV\xfcx\xfcz\xfc\xb7\xfc-\xfdv\xfdw\xfdm\xfd\xd0\xfd_\xfer\xfe\x81\xfe\xc7\xfe\xe7\xfe\xe7\xfe\xa4\xfe\x89\xfe\xa2\xfet\xfe\xa5\xfd\xf0\xfc\xb9\xfco\xfc\xa8\xfb\xdb\xfa4\xfa\x9e\xf9\xe1\xf8(\xf8\xe5\xf7\xc3\xf7o\xf7.\xf7K\xf7\xb1\xf7\xe7\xf7\x16\xf8\x9d\xf8\x81\xf9s\xfa\x1d\xfb\xeb\xfb\xd0\xfco\xfd=\xfe,\xff\xe2\xff\xb9\x00{\x01\xeb\x01Y\x02\x94\x02\xf6\x02\xab\x03\x1e\x04x\x04"\x05\xf9\x05,\x07\x99\x08\x1c\n\xf2\x0b{\r\x0b\x0f\xda\x11S\x15\x99\x17\x81\x18\x15\x1a\xaf\x1c\x0f\x1e\xbf\x1d6\x1e\xc3\x1f\x15\x1fp\x1b\xa1\x18\xfa\x17#\x16\x04\x11\x02\x0c\x8d\t\x86\x06\x80\x00\xa0\xfaV\xf8\xb5\xf6\xec\xf1X\xec\x10\xea\x02\xea\xf7\xe7\xe9\xe4d\xe4\x1f\xe6G\xe6\xbd\xe4\x04\xe5\xbb\xe7\xdd\xe9\x02\xea\x82\xea\xfc\xec\xa4\xef\xdf\xf0k\xf1\x19\xf3u\xf5\xd6\xf6B\xf7O\xf8|\xfaO\xfcX\xfc6\xfc\xca\xfd\xf8\xff\x84\x00\xe0\xffy\x00\x08\x02)\x02\x0b\x01M\x01\t\x035\x03s\x01\xa8\x00\xe5\x01\x87\x029\x01:\x00\xfe\x00\x92\x01\x83\x00\xbf\xff\xcc\x00\xfd\x01g\x01{\x00.\x01\x89\x02\xc3\x02F\x02\x02\x03\x8a\x04\xf3\x04\xb9\x04n\x05\xd3\x06o\x07\x14\x07T\x07h\x08\xe3\x08\x8b\x08p\x08\xef\x08\x17\tl\x08\xf3\x07\xe2\x07m\x075\x06\x05\x05r\x04\xd9\x03\xa1\x02S\x01J\x001\xff\xc7\xfd\x85\xfc\xbb\xfb\r\xfb\x15\xfa\x1a\xf9\xa3\xf8T\xf8\xf1\xf7\xbe\xf7\xf0\xf7.\xf81\xf8\x86\xf8?\xf9\xfe\xf9\xac\xfar\xfbT\xfc!\xfd\xd8\xfd\xa8\xfe|\xff1\x00\xac\x00H\x01\xde\x016\x02^\x02\x91\x02\x07\x03[\x03g\x03\x87\x03\x9d\x03\xaa\x03n\x03n\x03@\x04N\x05\\\x05\xd7\x040\x05+\x06f\x06\xef\x05T\x06U\x07-\x07\xcf\x05I\x05\x06\x06\xad\x05\xb4\x03\\\x02p\x02\x06\x02"\x00\xa9\xfe\xa0\xfe0\xfex\xfc.\xfb[\xfb\x83\xfb\x91\xfa~\xf9\x95\xf9\xdf\xf9\x84\xf9\x1e\xf9W\xf9\x8f\xf9)\xf9\xc8\xf8\xfe\xf8\x81\xf9\xb1\xf9\x91\xf9\xa9\xf9\xe3\xf9:\xfa\xa4\xfa\x08\xfbl\xfb\xe0\xfbT\xfc\xb9\xfc/\xfd\xe8\xfd\x85\xfe\xc2\xfe\xf2\xfeN\xff\xc8\xff\x05\x00#\x00T\x00|\x00\x89\x00\xa1\x00\xd1\x00\xec\x00\xf4\x00\xd9\x00\xcf\x00\xff\x00#\x01%\x01&\x01\xf1\x00\xc7\x00\xbf\x00\x8f\x00i\x00N\x00(\x00\xf8\xff\xe1\xff!\x00Y\x00E\x000\x00\xc3\x00\xe7\x01*\x03\x99\x04c\x06Y\x08\x0c\nM\x0b\xea\x0cZ\x0f\xcd\x11>\x13$\x14C\x154\x16\x13\x16S\x15\xeb\x14h\x14\x81\x12\xa4\x0fF\rR\x0bl\x08\x93\x04&\x01r\xfeh\xfb\xe4\xf7\x17\xf53\xf3\'\xf1\xdb\xeeD\xed\xe0\xec\x9a\xec\x00\xec\xeb\xeb\xa3\xec~\xed-\xeeR\xef\xff\xf0p\xf2\x87\xf3\xb7\xf4R\xf6\xfd\xf7<\xf9^\xfa\x92\xfb|\xfc_\xfdY\xfe~\xffK\x00\x95\x00\xe8\x00[\x01\xb8\x01\xc9\x01\xe8\x014\x02\x1e\x02\xa9\x01L\x01/\x01\xde\x00\x08\x00G\xff\xf1\xfe\x9b\xfe\x0f\xfer\xfd\x10\xfd\xa7\xfc\x00\xfc\xb4\xfb\xe3\xfb\x10\xfc\xf5\xfb\xd4\xfb\x1f\xfc\x81\xfc\xb2\xfc\x08\xfd\xaa\xfdX\xfe\xdb\xfeT\xff\'\x00\x13\x01\xbb\x01\\\x02&\x03\x0b\x04\xec\x04\xb3\x05w\x061\x07\xb9\x07,\x08\x90\x08\xe5\x08\r\t\x03\t\xe4\x08\x9e\x08&\x08y\x07\xa3\x06\xa8\x05\xa3\x04\x9f\x03\x9b\x02}\x01T\x00+\xff\x12\xfe\x13\xfd;\xfc\x83\xfb\xe8\xfaa\xfa\x15\xfa\x04\xfa\x10\xfa:\xfa\xa0\xfa0\xfb\xce\xfbh\xfc\x01\xfd\xcb\xfd\x97\xfec\xffW\x007\x01\xea\x01^\x02\xa7\x02\t\x03t\x03\xb8\x03\xd8\x03\xdb\x03\xc9\x03\x8e\x03/\x03\xc3\x02H\x02\xa2\x01\xf8\x00f\x00\xee\xffz\xff\xe1\xfe(\xfe\x7f\xfd\xf7\xfc\xa3\xfcg\xfcC\xfcN\xfcL\xfcH\xfc\\\xfc\x9f\xfc\x13\xfd|\xfd\xdd\xfdv\xfe3\xff\xc7\xffH\x00\xe6\x00\x97\x01#\x02\x96\x02(\x03\xc0\x03*\x04a\x04\x90\x04\xd5\x04\xf8\x04\xd3\x04\xa9\x04\xa0\x04\x87\x047\x04\xf0\x03\xa3\x03/\x03\x9b\x02\x1c\x02\xbc\x01[\x01\r\x01\xa2\x00\x0f\x00\xa5\xffb\xff\x1a\xff\xb6\xfeg\xfeT\xfeh\xfeN\xfe\x10\xfe\xd2\xfd\xb3\xfd\xc1\xfd\xc9\xfd\xc6\xfd\xb6\xfd\xb5\xfd\xb1\xfd\xc5\xfd\xe2\xfd\xf2\xfd\xd6\xfd\xa9\xfd\xde\xfdU\xfe\xca\xfe\xf0\xfe\x1f\xff\x9d\xff\'\x00\x87\x00J\x01\xe6\x01\xbe\x00C\xff\x97\x01<\x07\x14\t\xbc\x03~\xfei\xffn\x03\xf0\x04\x80\x04\xed\x03\xe0\x00H\xfbJ\xf9V\xfd)\x00#\xfc\x88\xf6#\xf6\xdc\xf8 \xf9\x9e\xf6\xed\xf4\xc5\xf4\xb8\xf5\xb3\xf7\x0b\xfa\xc8\xfa\xdf\xf8*\xf7\xae\xf8\x08\xfd\x01\x01\x9c\x01\xd3\xff\xe3\xfeW\x006\x03\x01\x05X\x05\x0e\x05q\x04\x16\x04B\x04\xb0\x04\x14\x04\xe8\x01!\x00\x14\x00\xf6\x00\x7f\x00t\xfe\x0b\xfc\x9a\xfa\xc7\xf9\x10\xf9i\xf9V\xfca\x00\x0e\x01\xa1\xfd<\xfb\xe4\xfc\xd1\x01:\x08\t\x0f$\x13c\x0f^\x08\r\x08=\x10\xe0\x17M\x18\xda\x15\x90\x14\x97\x11A\x0c\x08\nk\x0cr\r\xe6\t\xed\x06\xaf\x05\x90\x01-\xf9\xed\xf3\x92\xf6\xfa\xfad\xfb\xe8\xf7X\xf3r\xee\xfd\xea\xf3\xech\xf3\xec\xf7\\\xf7j\xf4\x84\xf2j\xf2\x1e\xf4\xa4\xf8\x0c\xfe\xe0\x00\x88\x00e\xff+\xff\xc8\xff\xba\x01,\x05l\x08$\t\xce\x07\xc7\x05\x81\x03\x0e\x02\xd5\x02\xa9\x05\x05\x07\xda\x04O\x00\x10\xfc\xb9\xf9\xc7\xf9\xfe\xfbG\xfdc\xfb\x89\xf7\xb9\xf4/\xf4\x8d\xf4u\xf5\xee\xf6\x03\xf8\xdb\xf7\xe2\xf6\xa8\xf6\x85\xf7,\xf9\x84\xfb"\xfe\xcb\xff\xde\xff\x8d\xff9\x00^\x02\xb5\x04^\x06P\x07\xa4\x07y\x07\x18\x072\x07\xc2\x07\x8b\x08\xcf\x08j\x08Z\x07\xaf\x051\x04v\x03\x93\x03\xb6\x03\x0f\x03\xa8\x01\x17\x00\xe0\xfe?\xfe\x1a\xfes\xfe\xcd\xfe\xa0\xfe\xf1\xfd\\\xfdi\xfd\xc0\xfdC\xfe\x0c\xff\x06\x00\x93\x00z\x00<\x00d\x00\xdf\x00\x96\x01X\x02\xf0\x02\x02\x03]\x02l\x01\x02\x01M\x01\x02\x02O\x02\xdc\x01\xd7\x00\x94\xff\x08\xffn\xff\x0e\x00h\xff<\xfe\xab\xfe\x15\x00y\xff\xbf\xfcG\xfbB\xfd\x1a\x00\xca\x00\x1f\xff\xa9\xfc<\xfb1\xfc\t\xff\xa4\x00n\xffn\xfd\xf8\xfc\x84\xfd\xb6\xfd \xfe \xff\x92\xff-\xff\xd8\xfe\xd7\xfe\xb2\xfe\xb2\xfe\xd7\xffN\x01\xa4\x01\x16\x01\xb0\x00\xc9\x00\n\x01\x9c\x01\xac\x02\x92\x03\x89\x03\xfb\x02\x8d\x02{\x02\xf2\x02\xd7\x03\x8e\x04F\x04l\x03\xc0\x02\x9c\x02\xac\x02\xf3\x02&\x03\xb5\x02\xae\x01\xce\x00\x93\x00\x8e\x00,\x00\xbe\xffe\xff\xf9\xfei\xfe\x00\xfe\xcb\xfdz\xfdI\xfd~\xfd\xc2\xfdX\xfd\xaf\xfc\xe9\xfc\xb1\xfdv\xfes\xfe\x1a\xfe\x05\xfek\xfeb\xff*\x00;\x00\xe8\xff\x05\x00d\x00\xa5\x00\xb8\x00\x15\x01Q\x01\x01\x01\xc1\x00\xa7\x00\x86\x00*\x00\xf4\xff+\x00;\x00\xc5\xff\xfe\xfei\xfe3\xfe7\xfeZ\xfeh\xfe\x13\xfe}\xfdH\xfdq\xfd\xc8\xfd\n\xfe]\xfe\xc3\xfe\xfb\xfe\x03\xff,\xff\x90\xff#\x00\xb4\x00\x11\x01/\x01\x0e\x01%\x01s\x01\xc7\x01\x08\x02\x16\x02\x02\x02\xaa\x01U\x01K\x01T\x01Q\x01-\x01\xec\x00\x85\x00\xf8\xff\xb9\xff\xeb\xff\x0f\x00\xe5\xff\xa2\xff}\xffc\xff5\xffK\xff\x85\xff\xbc\xff\xcc\xff\xbf\xff\xb3\xff\x9a\xff\xa0\xff\xca\xff\x0f\x003\x00$\x00\xf6\xff\xc6\xff\xba\xff\xc2\xff\xcb\xff\xb3\xff\x97\xffz\xffP\xff.\xff\x15\xff\x13\xff&\xff%\xff:\xff;\xffL\xff_\xff\x97\xff\xe6\xff\x1c\x004\x00F\x00o\x00\xcc\x00\x17\x01)\x01\x19\x01\x0f\x01\x19\x014\x01U\x01r\x01J\x01\xec\x00\xb5\x00\xbd\x00\xcf\x00\x9d\x00U\x00\x01\x00\xb7\xffy\xffh\xffk\xff<\xff\xec\xfe\xaf\xfe\x9a\xfe\x9b\xfe\xab\xfe\xaf\xfe\xc9\xfe\xd2\xfe\xf5\xfe-\xff\\\xff\x83\xff\xbf\xff,\x00\x90\x00\xcb\x00\xe8\x00\r\x01I\x01\x8a\x01\xba\x01\xc8\x01\xa5\x01m\x01j\x01q\x01E\x01\xe2\x00|\x005\x00\xe0\xff\xac\xff\xa5\xffh\xff\xf2\xfeK\xfe\x0f\xfe&\xfe?\xfe\x1a\xfe\x01\xfe5\xfe\xb0\xfen\xfe!\xfeN\xfe&\xfe3\xfe\xc3\xff@\x04\x12\x05\xf6\xff\x86\xfbt\xfe\xae\x05\xb9\x07\xd4\x05\xa6\x03\x88\x01\xb8\xfe\x17\x00\x82\x06\x12\t\xb5\x03$\xfe\x16\xff\xa1\x01\xcc\x00W\xff>\x00S\x00\xd5\xfd\xc6\xfc\x04\xfe\xd4\xfdh\xfb\x17\xfb*\xfe\xbc\xff\x87\xfd\xe9\xfa%\xfb\xf2\xfc0\xfeN\xff&\x00\x8b\xff\x84\xfd/\xfdh\xff\xba\x019\x02\x84\x01\x14\x01\xb1\x00H\x00\xf1\x00I\x02\xbf\x02\xa0\x01\x86\x00\x89\x00o\x00\xbd\xff)\xff\'\xff>\xff\xc5\xfe+\xfeQ\xfds\xfc\x0e\xfc\x94\xfc`\xfdC\xfdz\xfc\x8e\xfb\x07\xfbP\xfb\\\xfcP\xfd,\xfd?\xfc\xbc\xfb\x0c\xfc\x93\xfc\xc4\xfc\x1c\xfd\xa6\xfe\xa4\x00w\x01O\x00W\xff\xc6\x00\xb2\x04\x83\x08\xcc\t\xe9\x08\x9a\x075\x08\xca\n\xd0\r[\x0f\x9a\x0e\xf4\x0c\xa2\x0b0\x0b*\x0b\xa8\n\x97\t\x06\x08\xf5\x05n\x03\x98\x00d\xfe\\\xfd\xdf\xfc\xfe\xfb\xee\xf9:\xf7F\xf5\xda\xf4\xbd\xf5\xe9\xf6y\xf7$\xf7g\xf6j\xf6\xf6\xf7g\xfav\xfc\xb8\xfd_\xfe\xdf\xfe*\xff\xfb\xff\xa0\x01/\x03\xc1\x031\x03m\x02\xba\x01)\x01\x0c\x01\xfe\x00\xa0\x00p\xff\xd6\xfde\xfcU\xfb\xf1\xfa\xfd\xfa\xe9\xfaO\xfas\xf9\xcb\xf8\xac\xf8\r\xf9\x1e\xfa6\xfb\xc6\xfb\xd9\xfb\xe5\xfb\xa3\xfc\xff\xfd\xa8\xff\x0c\x01\x94\x01\x99\x01\xde\x01\xbf\x02\xb0\x03]\x04\xdf\x044\x05\x08\x05\xae\x04\x84\x04\xab\x04\xc9\x04\xa5\x04\x85\x04\x17\x04t\x03\xe1\x02\x88\x02p\x02_\x024\x02\xc8\x01\x1a\x01\x93\x00l\x00\x81\x00\x85\x00]\x00\r\x00\xb1\xffq\xff[\xffO\xff\x13\xff\xec\xfe\xf7\xfe\xe9\xfe\x99\xfe=\xfe\x10\xfe\x0e\xfe\x1a\xfe6\xfeM\xfe=\xfe\x1e\xfe-\xfe\x80\xfe\xe2\xfe\x15\xff6\xffm\xff\xcb\xff"\x00]\x00|\x00\xa7\x00\xce\x00\xf6\x00\x1d\x015\x01/\x01\x0b\x01\xe4\x00\xda\x00\xdc\x00\xac\x00\\\x00\x1a\x00\x02\x00\xea\xff\xc5\xff\x93\xffi\xff0\xff$\xff9\xff^\xff`\xff5\xffB\xff}\xff\xad\xff\xcf\xff\x02\x00.\x00\\\x00\x80\x00\xca\x00\xfb\x00\xf3\x00\xf1\x00\x14\x01c\x01s\x01a\x01S\x01>\x011\x01)\x013\x01#\x01\xd3\x00\x8f\x00\x99\x00\xa1\x00{\x00)\x00\x07\x00\xff\xff\xd6\xff\xb5\xff\xbb\xff\xab\xff\x89\xff6\xff\x0c\xff1\xffW\xff\x05\xff\xdb\xfe`\xff\x11\x00\x97\xff\x04\xff\xb3\xffT\xff<\xfe\x9e\xff\xd0\x04\xd9\x05B\xff\x9e\xfa\xaf\xfe\x81\x05\xe8\x05\x80\x03O\x02\x08\x00c\xfco\xfe\xcf\x05,\x074\x00J\xfb-\xfeM\x01\x0c\x00\xd8\xfe\xea\xffo\xff\x03\xfdU\xfd\x84\xff\xfc\xfeR\xfc\xa6\xfc\x87\xff9\x00\xef\xfd_\xfc \xfd%\xfe\xfb\xfe\x07\x00H\x00\xa3\xfe\xfd\xfc\xcd\xfd\xdb\xff\xb0\x00\\\x00/\x00\xc5\xff\xdb\xfe\xc6\xfeB\x00B\x01\xd4\x00\xec\xff\xdc\xff\n\x00\xd2\xff\xde\xffQ\x00P\x00\xf4\xff\xef\xff(\x00\xc4\xff\xea\xfe\xb7\xfeq\xff=\x00\x0f\x00J\xff\x8d\xfe\'\xfe\xab\xfe\xf0\xff\xab\x00\xdd\xffx\xfeG\xfer\xff.\x00\x0f\x00\x83\xff\x07\xff\xb8\xfe\xf9\xfe\xb3\xff\xb1\xff|\xfe\x9e\xfd\x12\xfe\xdb\xfe#\xff\x15\xff\x86\xff\x89\xff\x18\xff\x02\x00W\x02\xdc\x04X\x05\xf9\x04M\x054\x06C\x08\xab\n\x8f\x0c\xf6\x0bs\t\x8b\x08\xf5\t\x8f\x0b\xfb\n\x94\x08\x00\x06\x1f\x04\xc7\x02\x02\x02\xa3\x00:\xfe\x91\xfb\xfc\xf9|\xf9V\xf8_\xf6\xf5\xf4\xef\xf4\xcf\xf5`\xf6p\xf6T\xf6n\xf6w\xf7\xe7\xf9z\xfc\xcf\xfd\xdf\xfd-\xfe\xac\xff\x9f\x014\x03\x08\x04\x01\x04V\x03\xc7\x02\x17\x03~\x03\xd8\x02\x10\x01\xae\xff\x16\xffD\xfe\xbd\xfc\\\xfb\x8c\xfa\x92\xf9\x9c\xf8[\xf8\x96\xf8&\xf8P\xf7\x88\xf7\xa5\xf8\xc2\xf9\x8e\xfa\x9c\xfb\xa2\xfcc\xfd~\xfeX\x00\x14\x02\xd5\x02g\x03\x8d\x04\xae\x055\x06i\x06\xf3\x067\x07\x14\x07#\x07\\\x07\xf4\x06\n\x06V\x05\x14\x05\x98\x04\x03\x04\x94\x03\x00\x03\x03\x02\x11\x01\x9f\x00\x8d\x00g\x00\t\x00\x92\xff\x08\xff\xc0\xfe\xdf\xfe+\xffn\xffP\xff\x03\xff\xd5\xfe\x04\xffW\xffk\xffy\xff\x92\xff|\xffK\xff\x16\xff\x02\xff\xe2\xfe\xc5\xfe\xc1\xfe\xbf\xfe\x90\xfe2\xfe\xcf\xfd\xab\xfd\xc4\xfd\xdc\xfd\xc9\xfd\xf5\xfd\x82\xfe\x98\xfe\xe0\xfd\x92\xfd\xae\xfe\x14\x00p\x00n\x00\x96\x00L\x00\x01\x00\x8d\x01\xdc\x03\xe3\x03\x03\x02\xbd\x01e\x03\xb7\x03\x89\x02\xaa\x02\x03\x04\xb3\x03\xec\x01R\x01\xc2\x010\x01$\x00\x93\x00w\x01\x83\x00U\xfe\xc6\xfd\xe9\xfec\xff\xbb\xfe\x8c\xfe\x03\xff\xb8\xfe\xdc\xfd-\xfea\xff\xa7\xff\xf8\xfe \xff\xc5\xff\x95\xff\xf0\xfeG\xff#\x00\x00\x00i\xff\x98\xff\xbe\xff\x02\xff\x85\xfe7\xff\x92\xff\x8c\xfe\xd6\xfd\x7f\xfe\xa5\xfe\x97\xfd\xf7\xfc\xaf\xfd\xe4\xfdW\xfd~\xfd\xd6\xfd#\xfdY\xfc8\xfdc\xfe\xe9\xfd\x1c\xfdf\xfd\xd0\xfdU\xfd\x9c\xfd^\xfeO\xfeN\xfdl\xfdU\xfeY\xfem\xfd\xe5\xfcT\xfdY\xfd\xf1\xfc\x90\xfcc\xfc\x07\xfc\xcb\xfb\xcb\xfb@\xfcn\xfc*\xfdB\xfe\xdf\xffj\x02\x0f\x04\x13\x05\xb4\x053\t\xa1\x0e\xaf\x12,\x14\xd3\x13K\x14n\x16\xe4\x19\x86\x1c\xe8\x1b\'\x19\xaf\x161\x15\x01\x14\x12\x12\xed\x0ec\n\xc7\x05\x9b\x02\x8a\xffd\xfb\xb6\xf6<\xf3\r\xf1\xf9\xee\x01\xed\xd7\xea\xec\xe8\x1b\xe8\x01\xe9\x9a\xea\xb6\xeb\xbb\xec,\xee\x02\xf0{\xf2\xd1\xf5\x0b\xf9\x07\xfb\xdb\xfc\xc2\xff\xff\x02_\x04H\x04M\x05\x8f\x07\xd0\x08\x12\x08,\x07\xaf\x060\x05\x04\x03{\x02\x1c\x03V\x01\xeb\xfc&\xfa\x8e\xfa\xa0\xfa\x81\xf8\xa3\xf6\x84\xf6\x1b\xf6\x85\xf4\xc7\xf4"\xf7\xe1\xf7\x84\xf6R\xf6\xd7\xf9\xba\xfc\x9a\xfcF\xfd\xeb\xff\x1d\x02\xf6\x01L\x03G\x07\xb0\x08\x14\x08\xb3\x08y\n\x11\n\xcc\x08/\x0b\xc8\x0c\x15\n\x13\x07D\x07\xf1\x07\xf1\x05Q\x043\x04\x01\x028\xff\x9e\xfe?\xff\xb3\xfd\x0f\xfbQ\xfax\xfa\xa7\xf94\xf9\x80\xf9!\xf9\xd6\xf7\xab\xf7C\xf9e\xfa\x0c\xfa\xba\xf9w\xfaW\xfb\xb1\xfbT\xfc\x95\xfd<\xfe\xbf\xfd\xee\xfd3\xff\xfc\xff)\xffo\xfe\xea\xfeb\xffB\xff\xe5\xfe.\xfe\x0e\xfd6\xfc\xa5\xfc-\xfd\xae\xfc\x96\xfbG\xfa?\xf9I\xf9w\xfbN\xfc9\xfa\xe9\xf7C\xf8\x13\xfa3\xfa\x04\xfb\x08\xfc\xd4\xfb`\xfa\x14\xfc\x1b\x01\xfd\x02\xbc\x01*\x02-\x07\xd4\x0b\x9a\x0e\xeb\x12<\x17\xab\x17u\x15\xf6\x18\xc7"\xa3(\xe5%\xb6 \x82 \xf5"\xa6"f \xf3\x1d\x1f\x1a\xd6\x13\xfa\x0e\xfb\x0c.\ta\x01\x9d\xf9\x85\xf6\xd0\xf5\x89\xf23\xed\xd8\xe7\xac\xe4\xcc\xe3\xa6\xe4\t\xe6 \xe6\xd1\xe5a\xe6\xfa\xe7\x8d\xeb\xde\xef\xe7\xf2\xc2\xf3Q\xf6\xad\xfb\xa0\xff\xd9\xff\xf0\xff=\x03#\x05\xf4\x04&\x06\xa8\x08\x1b\x07\x1c\x01\xb8\xff\x9e\x02\xe4\x01\x08\xfc\xa9\xf9\xee\xfb\xb0\xfa\xb4\xf4M\xf2\x9f\xf4\x8e\xf4\xb3\xf1\xae\xf2\xac\xf6\xc6\xf6\xa2\xf3^\xf4\n\xf9\xb9\xfb\xa8\xfc\x16\xff\xe9\x02\x8f\x03\x04\x03\xa5\x040\x08\xc7\t\xb0\t\x02\x0b\xa7\x0c\xee\x0c\xf2\nS\t\xdd\x08\xba\x08\xa2\x08\x91\x08\xb1\x07!\x05u\x01o\xff2\xff\xf4\xfe\xe0\xfd0\xfd\x8b\xfc&\xfb\xe8\xf8\t\xf87\xf8a\xf8\xd5\xf8Z\xfa8\xfbn\xfa!\xf9Z\xf9\x8d\xfa\xa0\xfb\x1d\xfd(\xfeM\xfeU\xfd\x1c\xfdR\xfd{\xfd\xe6\xfdu\xfe\x8b\xfeE\xfd\xdc\xfb\xa6\xfa>\xfat\xfa\x87\xfa\x9e\xf9\xd9\xf7X\xf6\xe6\xf5\x8f\xf5\x81\xf6\x03\xf7\x1c\xf6\\\xf4>\xf5\x0f\xf9B\xfa\x99\xf8\x93\xf7\xd9\xfa8\x00\x06\x04\x82\x06\x07\x07q\x06\x0c\x08&\x0e\xda\x16\xb5\x1b\x07\x1c\xe0\x1cm \'$\x11%\x8b&i)\x89+\x85*5(y&\n#J\x1d\x96\x17\xc1\x14\xb6\x12N\x0e(\x07\xd7\xff\xc6\xf9\xb8\xf4\xea\xf0\xbb\xedP\xeb\xe3\xe8\xda\xe6`\xe5^\xe4\xfc\xe30\xe4\xf0\xe5T\xe9\xc2\xec\x11\xefS\xf05\xf2\xa1\xf4K\xf7k\xfay\xfd=\xff\x0b\xff*\xff\xad\x00\xc5\x01t\x00\x9c\xfd\xaa\xfc\x84\xfd4\xfde\xfa\x00\xf7\x1b\xf5\xcb\xf3_\xf2\xa1\xf1\x8d\xf1~\xf0\xe9\xee/\xefP\xf1\x18\xf3\x06\xf3\x10\xf4\xa5\xf6O\xf9\xcc\xfb\xe2\xfe\x91\x02\xe4\x03\x80\x04\x94\x06E\n\x99\x0c\xee\x0cc\r\xe6\rr\x0ey\x0e\x8c\x0e\xf8\x0c\xbe\nw\t\xac\t\xe4\x08B\x06\xd1\x03\x1f\x02\x05\x01\xd8\xff\xe5\xfe\xff\xfdC\xfc\xf0\xfa\xc4\xfaO\xfb\xfb\xfb\xc1\xfbd\xfb\x03\xfbF\xfbd\xfct\xfd-\xfe\x14\xfe\xbc\xfd\x12\xfd\r\xfd;\xfd-\xfd\xcd\xfb\x12\xfa\xcd\xf8d\xf8Q\xf8\xf0\xf6n\xf4\xaa\xf1\x98\xf0<\xf2\xa6\xf3\xf0\xf2T\xf0\xab\xee\\\xf0\xa9\xf3O\xf6\xb9\xf6<\xf59\xf5f\xf8g\xfdK\x00\xce\xff\xe1\xfe\x0c\x01\xdd\x05\x8f\tm\nB\n)\x0b%\x0e\x9f\x11Y\x14\x7f\x15\xa5\x15|\x16G\x18\xe5\x1a\xf7\x1cF\x1e\xba\x1e%\x1e\x1d\x1eP\x1f\xf7 \x07!\xa9\x1eH\x1c\x9e\x1b\x84\x1b\x13\x1a\xf6\x16\xa2\x13\x8c\x10|\r\xb3\n\x81\x08\xd3\x05\xa0\x01\x0c\xfd\x1c\xfa\xd9\xf8\xea\xf6\x9a\xf3\x01\xf1\xaf\xefJ\xeea\xec@\xebn\xeb\xc7\xea_\xe9*\xe9g\xea\x93\xebA\xeb\x1e\xeb\xd5\xeb\xe2\xec\xfe\xed\xe0\xee\xe7\xef\x9b\xf0\xc7\xf0P\xf1`\xf2\xbe\xf3Z\xf4\xf9\xf3,\xf4@\xf5\x8f\xf6\xf8\xf6\xc8\xf6\x80\xf7\x95\xf8\x8c\xf9)\xfa!\xfb\xb0\xfcR\xfd\xb6\xfd\xd3\xfe\x87\x00\xe7\x01b\x029\x03\x7f\x04\x80\x05\x1c\x06\x0c\x07H\x08\x80\x08"\x08|\x08\xd4\t\\\n\x85\t\xb6\x08\x9b\x08\xb4\x08\x06\x08\xc5\x07\xbe\x07A\x07\x0b\x06$\x05\x06\x05\xed\x04{\x04\xb7\x03\xf7\x02@\x02\xd8\x01\xb2\x01;\x01+\x00\x1e\xffZ\xfe\xe0\xfda\xfd\xb3\xfc\xaf\xfb=\xfa\xdf\xf8!\xf8\xa1\xf7\xd4\xf6\xa8\xf5f\xf4m\xf3\xb9\xf2E\xf2\xf7\xf1\xa9\xf1(\xf1\xa5\xf0\xa6\xf0 \xf1\x91\xf1\xd8\xf1J\xf2\x05\xf3\xe7\xf3\xee\xf4\x1b\xf6A\xf7L\xf8U\xf9\xb1\xfau\xfc4\xfeq\xffi\x00\xd2\x01\xb5\x03y\x05\xae\x06\x04\x08\xcc\t\x18\x0b\xef\x0b2\r\xc1\x0eb\x10\x8f\x11\x96\x12\xe3\x13\xcd\x14\x9b\x15b\x16\x92\x17\xa4\x187\x19\x1e\x1a1\x1b/\x1c^\x1c\x90\x1cu\x1d\xd8\x1d\x97\x1d$\x1d\x1d\x1d\xc0\x1c\xf1\x1a\xe2\x182\x17o\x15\xed\x12j\x0fC\x0c\xf9\x08\x19\x05I\x01\xf0\xfd\xca\xfa\x0e\xf7\r\xf3\xde\xef\x1f\xed\x97\xeap\xe8\xaf\xe6\xfc\xe4A\xe3.\xe2\x0e\xe2\x1f\xe2\x06\xe2>\xe2\xcc\xe2\x90\xe3~\xe4\xc7\xe5?\xe7\x97\xe8\xb2\xe9\x05\xeb\xb6\xec\x81\xee \xf0b\xf1\xb9\xf2B\xf4\xdb\xf55\xf7q\xf8\xb6\xf9\xef\xfa\xfe\xfb\x12\xfd\x85\xfe\x16\x00=\x01+\x02\x87\x03\xfa\x04\xec\x05j\x066\x07\x8d\x08\x04\n\xf5\n9\x0b*\x0b\xdb\n\x98\n\xba\n\x03\x0b\xdb\n\x17\n%\tI\x08~\x07\xc8\x065\x06d\x05:\x04e\x03\x1b\x03\xce\x02\x19\x02C\x01\xca\x00d\x00\xda\xff\x9d\xff\x9f\xffd\xff\xa6\xfe\x00\xfe\xe8\xfd\xce\xfd;\xfdO\xfcs\xfb\xbd\xfa\xeb\xf9/\xf9`\xf8E\xf7\xf0\xf5\xa8\xf4\xa6\xf3\x03\xf3\x7f\xf2\xf2\xf1`\xf1\x06\xf1%\xf1\x8a\xf1\x05\xf2Q\xf2\xf0\xf2\xe0\xf3\t\xf5U\xf6\xa7\xf7\xf7\xf82\xfaS\xfb\xa2\xfc.\xfe\xc5\xff)\x01j\x02\xd4\x03(\x05\x84\x06\xb2\x07\xf6\x08\x87\n+\x0c\xbe\r\xe1\x0e\'\x10\x18\x11\xb8\x11\xc9\x12\n\x14]\x15\x16\x16\x0b\x16\x14\x16U\x16\xa8\x16j\x17`\x18\xdf\x18\xe4\x18\x11\x19\xbb\x19!\x1a~\x1a:\x1b\xce\x1b\xed\x1a\x1d\x19\x89\x18l\x18>\x17\x8b\x14\xf0\x11\xe3\x0f\xad\x0c~\x08\x1c\x05\x90\x02d\xff\x99\xfa\x8d\xf6?\xf4\xd7\xf1x\xee\'\xebL\xe9\x1d\xe8\xfd\xe5\x06\xe4s\xe3\xa5\xe3=\xe3=\xe2b\xe2\xbf\xe3o\xe4F\xe4\xd0\xe4\xca\xe6\x0c\xe9\xec\xe9\xba\xea\xcf\xec&\xef\xab\xf0\xb2\xf1\xaa\xf3U\xf6\x11\xf8\xae\xf8\xfa\xf9d\xfcs\xfe5\xff\xbc\xffK\x01\xe0\x02-\x03H\x03G\x04\xc8\x05.\x06\xa9\x05\r\x06\xf9\x06\x1a\x07\xff\x05\x93\x05\x83\x06\xd5\x06\t\x06F\x05\x97\x05\xb3\x05q\x04J\x03B\x03\xa5\x03\xff\x02\xe4\x01\xd3\x01$\x02\x92\x01P\x00\xb1\xff\xe6\xff\xa1\xff\xa2\xfe=\xfe\xa0\xfe\x84\xfe\x8c\xfd\xca\xfc\xbf\xfc\x9d\xfc\xb9\xfb\x10\xfb5\xfb?\xfb\x82\xfa\x9a\xf9D\xf9\xfd\xf8;\xf8=\xf7\xc0\xf6\x98\xf6\xfd\xf5N\xf5\'\xf5!\xf5\xd6\xf4\x80\xf4\xab\xf4R\xf5\xe4\xf5|\xf6\x96\xf7\xc2\xf8\xb4\xf9\xb5\xfa>\xfc\xd4\xfd\xfd\xfe*\x00\x96\x01\x17\x03\x1b\x04\xe7\x04b\x06\xb8\x07Z\x08\x06\t\xf2\tL\x0b\xea\x0b\x18\x0c9\ro\x0e\x0b\x0f;\x0f\x8d\x0f\xba\x10\x1d\x11t\x10\xf7\x10`\x11-\x11i\x10\xd3\x0fo\x10"\x10h\x0e\xce\r@\x0e8\x0ea\r1\r\xbb\x0e-\x0f\xf8\r\x86\r$\x0f\xa6\x10\xbb\x0f\xbe\x0eK\x0f\xeb\x0f\xbc\x0e2\x0c\x00\x0ca\x0c\x1b\nC\x06\x98\x03C\x03\x96\x01\xfd\xfc\xb3\xf9\xc1\xf8D\xf7"\xf3b\xef#\xef\xe1\xee\xac\xebx\xe8\xf7\xe8q\xea\x8b\xe8\xe4\xe5\x07\xe7\xc6\xe9\xab\xe9a\xe8\xe7\xe9?\xed\x18\xee\x13\xed/\xef\xf3\xf2j\xf46\xf4\xa1\xf5\xcb\xf8\x1c\xfa\xd7\xf9\x82\xfb\x16\xfe\xb1\xfe\xc3\xfd\x83\xfe\xc5\x00\x98\x01\xf2\x00\x0c\x01Q\x02\xa2\x02<\x02i\x02V\x03\x9e\x03\xee\x02\xdf\x02\x90\x03\xdc\x03_\x03\xc3\x02\xe2\x02\x14\x03\xd1\x02\x07\x02\xba\x01\x9f\x01?\x01x\x00\xd1\xff\xf7\xff\xcf\xff\x1e\xff<\xfe\x07\xfe\xe8\xfdd\xfd\xc3\xfc\xd9\xfc\x07\xfdw\xfc\xb5\xfb\x84\xfb\'\xfcA\xfc\x8d\xfb\x8f\xfb\xea\xfb3\xfcd\xfc\x8a\xfcL\xfd\xa4\xfd#\xfd\x03\xfd\xa4\xfd\x9d\xfe\xbc\xfe\xe9\xfd\xf2\xfb\xdd\xfa\xb3\xfc\x7f\x00\xd4\x03\x9a\x00\x93\xf9\x16\xf6v\xf8O\xfe\xff\x01\t\x01\xc4\xfe\xac\xfd*\xfd\x11\xfd6\xfd{\xfe\x10\x01\xb4\x02 \x03O\x05\x7f\x07{\x08q\x07!\x07<\t3\n\xe5\n#\x0c!\x0f\x03\x12\x16\x11\xf7\x0f\xbf\x0e\x82\x0c\x81\x0b&\x0b\xd5\x0c\xed\r\xfd\x0b\xa8\n\x14\n\x08\t\x13\x06i\x029\x00\xc3\xff\xaa\x00=\x02\xb4\x03\x94\x04\xc0\x02\xf8\xffP\xfeV\x00(\x05\xca\x08>\n\t\x0b\x05\x0eE\x11\xdb\x11#\x0f\xa3\x0cl\r\xf0\x11T\x16B\x18\x9d\x14\x1e\r\xc2\x07\xde\x06\x1e\t|\x07\x8c\x01\x8b\xfc[\xfbn\xfc\x1d\xfaD\xf3\xe9\xea\x0c\xe5\'\xe5\xdb\xe8~\xec\x02\xebP\xe5\xd9\xe1\x95\xe3\xf7\xe7e\xe8X\xe5l\xe6\xf5\xed$\xf6|\xf9\x07\xf7\xd1\xf4\x9f\xf4\x15\xf7O\xfbI\xff0\x02\x03\x03r\x04\xe2\x04G\x03s\xff\xb0\xfc:\xfeb\x01\xa4\x03_\x03V\x01\x8d\xfep\xfa\x93\xf7\x8a\xf6\xea\xf7Y\xf9\xcc\xf9\x18\xfal\xf9\xbb\xf8x\xf6\xf8\xf4\xed\xf3*\xf5z\xf8;\xfc\xe2\xfec\xfe!\xfc\x07\xfa\x98\xf9\n\xfbm\xfc\x12\xfe\xb7\xffL\x01\xeb\x01C\x00\xd7\xfdr\xfb\x84\xfb\xfc\xfc\x19\x00q\x02\x00\x03z\x02\xef\x00\xe1\xff\xa4\xfe\x80\xfe9\x00\xa3\x02\xdb\x044\x05u\x05\xc2\x04^\x02\xb7\xff\xd5\xfe\x98\x01\xe1\x04A\x06\xea\x04\xd2\x02\x04\x01\\\x00\xc6\x02\x07\x03\x86\x01\xb3\xfe\x1a\xff\xbb\x04\xcd\x061\x03\x80\xfe\xbf\xfc\xac\x00\x14\x03p\x02\xcb\x011\x01\x1d\x04\xc5\x06c\x08\x07\x07\xec\x03>\x04\\\x08\xb4\x0cB\x0e\xc2\nR\x083\t\xeb\n\xe2\r\xbe\x0b\xf6\x08m\x07v\x07\\\t\x14\t\xe9\x06b\x04W\x02\x9a\x02\xe4\x03\x89\x04\xd9\x04\r\x03\xa9\x00\xb0\xfe\x1d\xfd\x8d\xfc\x1f\xfd$\x02!\n\x1f\x0f\x16\x0f\xe6\x08\x05\x05\xc5\x05\x01\r\xa5\x15 \x19\xe9\x17\xcf\x13\xc4\x14s\x14\xda\x10\xe4\x08\x99\x02\xfb\x04V\n\xd1\r\x9f\n\xc3\x00\x02\xf7\x0c\xf0\x8b\xef\xd8\xf1\xd1\xf2\xbd\xf3}\xf2>\xf1d\xec\x18\xe71\xe5%\xe6\x1a\xebB\xef\x83\xf2\xf4\xf3\xc9\xf2\xb8\xf2\xeb\xf1\xfb\xf0\x8c\xf1\xff\xf4\xb6\xfc\xe3\x02\xec\x04\xc7\x00$\xfa\xf9\xf6P\xf8\xda\xfd\xa4\x01\xda\x02\xbd\x01(\xfe\xad\xfb%\xf9\x07\xf9u\xf8\xdf\xf6\xff\xf6\x00\xf9O\xfcD\xfcl\xf8\x01\xf3m\xf1\n\xf4\xdf\xf9(\xfe_\xfd*\xfb\x84\xf8p\xfa\x02\xfd\xc1\xfdb\xfd\x14\xfd"\x00$\x03b\x03$\x00\xda\xfb\x1d\xf9\x1c\xfa\xb6\xfd\xb4\x005\x01,\xfe\x11\xfb\xad\xf9k\xfa}\xfbu\xfb+\xfb\xa8\xfd\x91\x01\x9e\x04\x90\x03F\xfeh\xfbj\xfcK\x00\xa2\x03\x98\x02\\\x01\x0c\x02\x16\x04\xc2\x05?\x02\xe2\xfc\xbf\xfee\x03=\to\n\xc5\x07/\x08Y\x06k\x02\xe7\x01\xe6\x01o\x07\x08\n\x8c\x07\xbc\x06\x1d\x03\xc3\x01\x00\x01\xbf\x00\xe5\x033\x07\xa4\nP\x0et\x0b4\x07\xc2\x019\x02g\x08\xb4\x0c\x97\x11\x90\x0f\xbc\x0c\xc0\x08S\x06\xf5\x06\x1e\x05H\x01}\x02\xad\x06=\x0c\xfb\x0cl\x07O\x05"\x01\x10\xff\x9d\xff!\x01\xf0\x05\xb7\x08\x84\t\xd9\t\xe0\x04%\xff\x11\xfb$\xfb\xa3\x01\x15\x08\xdb\x0c\xa8\n\x00\x03~\xfa\x9a\xf7\xb1\xfb\xf5\x00-\x03\x9c\x01\x88\x00\x8d\xffS\xfd\xce\xfa\xa6\xf80\xfb\x90\xfd\x93\x01\xfe\x01\x95\xff\x01\xfd*\xf7\t\xf6>\xf4\x8c\xf7\xa3\xfa\xdc\xf9m\xf8\xc4\xf3\x92\xf3\xcd\xf4\x91\xf7\xb8\xf9y\xf9\xcc\xfa)\xfb\xb0\xfdF\xfe\xeb\xfe(\x00q\xffC\x00X\xfc\x86\xf9\xaa\xf8\x06\xf9\x88\xfbo\xfb2\xf90\xf8^\xf6>\xf6\x89\xf5q\xf4c\xf8\xec\xfc\xca\x01\x84\x00\'\xfbC\xf8k\xf9\xe6\xfdr\x00-\x00%\x00\xdc\x00\xd9\x02\x06\x03!\x00v\xfd\xb6\xfc\xbc\x01\xa6\x08\xf4\t\xea\x04i\xfe8\xfde\x00\x04\x02\xd1\x02G\x01\xbb\x00\xe3\xff\xf4\xfe\x9a\xfc\xec\xf9\xe6\xf8y\xf9u\xfbl\xfa\xc8\xfa@\xfb,\xfc\x90\xfcf\xf9\xe7\xf9\xf4\xfc\xd7\x01\x04\x06L\x05\x0f\x05\xb9\x04x\x05\x84\x07\x93\x08z\x08\xd9\x03\xf2\x01\xf3\x02O\x07\xef\x08m\x06w\x04u\x01>\x02\xe2\x02>\x03\x1e\x03o\xff\xe9\xfd\xd8\xfef\x02#\x07-\x077\x03\x9d\xfd5\xf9\xe0\xf9\x9e\xff\xf0\x05\xc9\t\x9c\x08\xb8\x04\t\x03:\x02\xe8\x03\xe6\x024\x01E\x02\x89\x06\x81\x0b\xb7\t\xed\x03:\xfcr\xf8\xff\xfa\xf7\x00\xed\x06\xc8\x06\x1c\x04\x1a\x00]\xfc\x85\xf9\x9f\xf8\xd9\xfb\x88\x00\x8f\x02\x06\x01\xa4\xfd\xb0\xfbt\xfa\xe9\xf9\xca\xfa,\xfc\xca\xfe\xd3\x01c\x05\x1a\x067\x04j\x01\xad\x00\xd9\x01\xbb\x01\xe9\x01V\x01\x16\x02;\x01\xcb\xfe3\xfd\x8d\xfcR\xfcE\xfc\xe3\xfc\'\xff\xba\x01\xdb\x03}\x05\x1c\x04\xc9\x00\x91\xfc\xeb\xfb\x06\xff\xd2\x03\x19\x07`\x06s\x02\x93\xfd\x90\xfav\xfa\x12\xfd\xf2\xffW\x03\xf7\x06\xe5\n6\x0cI\t\xee\x01=\xfbV\xf9F\xff\xec\x06}\x08\xfc\x02}\xf9\xd1\xf4\x90\xf4\x90\xf7)\xfb\x05\xfc\x80\xfd \xff\xb0\x001\x01\x1c\xfc\xd6\xf6\xd4\xf2\xe1\xf4\xaa\xfb\xba\x00\x82\x03\xa8\x00\xb8\xfb\xcf\xf5\x81\xf3$\xf7K\xfc\xd5\xfe\xb4\xfe\x80\xfe\xa2\xffQ\x01\x17\x00G\xfe\x8b\xfbU\xfb\x04\xff\xc5\x01\xe6\x02\xae\xff\x07\xfd\xff\xfd\x9e\x01\xdc\x04y\x03\xb5\xff\xb0\xfd\xcb\xfdy\xff\x8f\x01\xd7\x02\xad\x01\xa7\xff\x91\xff\x15\x005\x00"\xff\xdc\xff\x80\x00,\x02\x81\x03\x9b\x04\x08\x06\x0f\x04W\x02\x06\x00\xa7\x00j\x03\x90\x04\x99\x04K\x02k\x00\x9b\x00\x7f\x00@\x01\xbc\x00\x17\x00\xf1\x01\xe7\x02b\x03\xc0\x00\xc5\xfd\xc6\xfb?\xfb\xe7\xff\xb9\x04\xc0\x04O\xfe\x1d\xf7i\xf6D\xfcp\x01\x17\x02\xc9\xfe\xcf\xfc\xa0\xfe\xc5\xffZ\xfe"\xfa7\xf7 \xf8\xe5\xfc/\x02\xf4\x04\xad\x06\xbc\x05T\x02\x01\xfd\xb0\xf8D\xfb%\x03i\x0b\xf1\x0c\xc8\x06\x8a\xfe\xce\xf7\xf7\xf4\xfa\xf5v\xfa\xeb\xfd\xa8\xff\x1e\x03*\x05r\x04\x01\x01l\xfdY\xfe\\\x00\xc5\x04\x00\nR\x0c#\x0b\x82\x05\xab\xfe\xf4\xf9J\xfa\xb7\xfdw\x01u\x03Z\x03\x8b\x00_\xfdn\xfdF\x00\xd2\x04p\x08\x83\x06\x10\x02\xdd\xff\x18\x00\xaa\xffa\xfd\x93\xfd\x86\x00\x84\x05\xf3\x07`\x07\x11\x033\xfe\xfa\xfb\x98\xf8\xae\xfa\xfb\xff\x0e\x06q\t\xfe\x04\x05\xfe\xe9\xf6?\xf6\x12\xfc\xbb\x00\xa1\x01j\xff\x92\xff\x1c\x02\xae\x03\x99\x01\x97\xfeM\xfc\xbe\xfb\x11\xfc\xc1\xfe0\x03\x1c\x03\x08\xff\xdc\xf9\xb1\xf8z\xfcq\x00\x7f\x02\xf5\x01\xf0\xff\xf7\xfe\r\xfez\xffr\x01\x8c\x01\xba\x00\xb1\xfe\xca\xff\x81\x00\x1b\xff\x9c\xfbP\xf7\x92\xf8\xa1\xfc\x85\x01\xf2\x04\xd8\x04\xe2\x02\xbe\xfek\xfe6\xff\xcc\xff?\x01L\x04\x88\x07\xed\x04e\x01\x01\xff\x9d\xfeQ\xffj\x01\xe8\x03\x12\x06\xba\x06\x87\x04o\x00\xfc\xfc\xb5\xfc\x1c\xff\xb3\x03\xa5\x057\x01\x87\xf9\x1e\xf7\x99\xfa\x15\xff_\x01\\\x01y\x003\xfdv\xfbQ\xfc\x81\xfe\x87\xff\xfd\xfe`\x01\xdb\x04\xa4\x03k\xff\x1a\xfd\xe9\xfd\t\xfe\xfd\xfc\xb6\xfd\x04\x02\xf4\x04\xf4\x01\x01\xfb\xea\xf3\x1e\xf3\xa3\xf6\x90\xfdP\x04\xe0\x03\xbb\x00\n\xfeb\xfd\xb3\xfe\xdc\xfd\xef\xfd\x93\xfe\xb1\x01 \x06K\x064\x02o\xfa\xfa\xf5\x11\xf5h\xf8^\xfe\xee\x04\x12\t\xb5\x07\xee\x05\xff\x04\x97\x02\xca\xfdB\xfd\x1c\x016\x06U\nq\t\xef\x03\x82\xf9\xdb\xf0\xa8\xf1B\xfc\x89\x08\x9b\x0c`\x08\xed\x00\xf9\xf8\x06\xf6D\xfa\xce\x01\xf0\x06{\t\x86\x0c2\n\xaf\x01\xfa\xf8\x93\xf5{\xf7\x04\xfd\xeb\x04:\x0b\xb5\x0c\x93\x07\xed\xfd\xf3\xf3\xf1\xf1\xc8\xf9\xb8\x02J\ne\x0c\x9f\x07N\xff\x1b\xf7&\xf4\xf7\xf4\xd8\xf9\x8c\x01\x9f\x08o\ro\x0b\x01\x04)\xfa\xb2\xf1l\xf1M\xf7\xc8\x00 \tC\n1\x07*\x02v\xfc.\xf9\xb2\xf6\xa9\xf7B\xfdh\x03\xc1\x08\x82\x06\x12\xff\xf5\xf8\xa6\xf6V\xf9\x0c\xfey\x04k\x086\x05h\xff\x99\xf99\xf8\x99\xfd\xe1\x04\xb4\t\xf0\x08\xc1\x04\x00\x00 \xfc\x7f\xfb\x82\xfc:\xfd\xb3\xfdW\x00\x04\x05j\x08O\x08u\x04\x88\xff\xab\xfa\xe9\xf9\xd5\xfd_\x04\x82\t\xec\x08\r\x07\xdf\x03\xcb\xff\x0b\xfc\xa1\xf7\xcb\xf7\x17\xfb\xc2\xfd\xd3\x01\x16\x06:\x08%\x06n\x00\x95\xfa\xd1\xf4\xee\xf4\xfb\xfa\x17\xff\xac\x01"\x03L\x04\x80\x03n\xfe\xaf\xfa\xa9\xf8\xfe\xf86\xfc\x92\xfe\x16\x03:\x05\x9c\x04\x9b\x02}\xfd\xda\xf9\xfa\xf5\xc0\xf5"\xfd\xfe\x03U\x07>\x03\x9d\xfc\x08\xfb9\xfc4\xfe\x9d\xfe\xb9\xfei\x00\x95\x02\x16\x05\x10\x07[\x03`\xfd\xc4\xfaS\xfc\xa0\xfe\x90\x01\x9b\x05F\x08\xbb\x06\x82\x02j\xffS\xfc\x86\xfa\x0c\xfc\x9f\xff\xa1\x03\xc5\x06\xce\x06\xcf\x04\xba\x00 \xfb*\xf6(\xf5\x81\xfa\xa4\x02\x83\x08\x97\n1\x07>\x01c\xfdn\xfd\xa0\xfd{\xfdA\x01V\x05:\x08\xeb\x07\xd1\x05$\x01@\xfc\x12\xf9M\xf8\x05\xfd\x9a\x04\xa6\x08\xa0\x06\xde\x01\x00\xff1\xfd\x03\xfbq\xf9\x96\xfa\x97\xffe\x03\xad\x05\xec\x04U\x00\xfd\xfa\x8f\xf6\xde\xf6\x96\xfaD\x01\xc7\t\xe4\x0ct\x08v\x013\xfc\x84\xf8I\xf6\x08\xf8\x8b\xfd\xe2\x05\x96\x0c<\r\xf3\x07\xaa\xfe\xb8\xf3E\xec\xf2\xed\xc3\xf5\x88\x01\xaa\x0c\xa7\x10b\x0e\xa8\x06\x8b\xfd\xda\xf6X\xf1\xfd\xf2\x93\xfa\xb7\x041\x0e\x96\x11\xe0\x0c\xf0\xff\xd2\xf3s\xed\x98\xf2\x04\xff\xf5\x06U\tz\x07\xa4\x07\xec\x07\x0c\x05\x16\xfe\x98\xf6\xeb\xf4T\xf9\xcf\x00\xb5\x05\xfa\x06\x9f\x03\x03\xfc\x00\xf7\xe8\xfa\xd1\x01\xfd\x04{\x04j\x02o\xff\t\xfe|\x01\xb8\x04=\x02\x11\xfd\x87\xfb\xc6\xfa\xd2\xfb\x1a\x00F\x03\xa6\xff\xa2\xfa9\xfaA\xfd\x05\x02\x9c\x04i\x04\xfd\x00T\xfe\xe8\xfd\xe9\xfcR\xfdc\xfe\xc4\x00\t\x02q\xfem\xfa<\xf7\x07\xf6\xc6\xf9\x84\xff\x8e\x05\xc7\x07\xc8\x06h\x02\x8c\xf9\x13\xf5\x18\xf6\x08\xfb\x82\x02\xb2\t\xc4\x0f\x1c\x0f\xaa\x07L\xff\xf8\xf6\x1f\xf3\xcc\xf74\xff\xae\x08\xee\x0cj\x0b\x11\x05\x82\xf9\xb9\xf0I\xee>\xf8c\x07=\x11\xcb\x13N\x0f\xcb\x07\xf8\xff\xa9\xf8>\xf4\xe1\xf5/\xfd\xb8\x04G\x08\xe8\x07\xc3\x02\xf3\xf7\xfe\xec\x92\xecj\xf6\xae\x03b\x0e\x03\x14\x9e\x11\xa8\x07T\xfb\xf3\xf1\xdd\xf2\xe7\xfbc\x04\'\n\xd6\n<\x08\x01\x02\xe2\xf6+\xf1\xf0\xf2\xf9\xf6\x8d\xfd-\x07\xbe\x0f\xde\x0f;\x08\x8b\xfc\xed\xf0x\xeeR\xf3|\xf8\xa3\xfe:\x05\x99\x0b\xd6\x0b\xee\x03t\xfd\xf6\xf8\xd7\xf5e\xf6\xd7\xf9\x1a\x03Y\x0b\x8a\r\xaf\tv\xffl\xf8\x84\xf4@\xf3A\xfb\xdc\x03Q\t\xd6\x07V\x03\x00\x02\x97\x01;\x00\x96\xfc\xad\xfa6\xfc \xff\xb4\x019\x04\xec\x02C\xff\xaf\xfd\x00\xfe\x89\xfe]\xff\xaa\x00f\x01\xa2\xff\xa1\x00\xca\x04\x84\x06\x81\x03\xa2\xfe^\xfe*\xff`\x00\xfe\x00x\xff\x16\xfeF\xfb\xea\xf8\x9c\xf7\x1d\xf8\xd2\xfe\xfb\x05\x94\n\xc6\t9\x05V\x01\x8b\xfdE\xfa\xc1\xf9\xe6\xfd\xe9\x01\xf1\x04\x1f\x07\xb3\x07\xda\x04\xd5\xff\x0f\xfb\xfa\xf6\xce\xf8\xd7\xfe\xc3\x03\xe5\x06\xb2\x08\xc2\x08\xd8\x03g\xfc\x98\xf8\x02\xf8\x87\xfaG\xff`\x04\x7f\x07n\x05\xa8\x00\xf1\xfc\xb7\xfaT\xfa\x07\xfb\x91\xfd\xfe\x01\x8b\x06\xfd\x07|\x05\xef\xff\x07\xfb\xad\xf9$\xfb\xd6\xff\x80\x05\xcd\ta\n.\x07\x1b\x01N\xfb*\xf8\xca\xf7%\xfb\xf2\xff\xbb\x03^\x05!\x03\xa5\xfe\xd0\xfb\xfb\xf8h\xf8i\xf9\xcb\xfc\xf3\x01\xc5\x05\x8e\x08^\x08\xa1\x05\x02\x00u\xfb\x06\xfb\xff\xfbU\xfd;\xfe-\x01\xad\x04!\x06\xcc\x02\xd4\xfb\x97\xf9\xfd\xfd\xf1\x00\xb4\x00\x99\x012\x03n\x00A\xfdM\x00\x9a\x02\xa5\x01\x81\x00T\xff\xa1\xfb\x19\xf8\xba\xf8Y\xfb\x03\xfe\r\x01g\x04\xf6\x02\x7f\x00h\xff\xa2\xffF\xfe\xfe\xfcH\xfe\xdb\xfeD\xff\xff\xff\xef\x01b\x02\x80\x00\xbd\xfe\t\xff\x95\x00\x95\x00\t\x01\x02\x03\x1e\x027\xff\xd0\xfez\xff\xaf\xfe\xa4\xfc\xff\xfb@\xfd\xe0\xff\xae\x02t\x02\x17\xffQ\xfb*\xf9\xaf\xfa\xd8\xfd|\x01d\x03\xc5\x04\xc3\x04,\x01\x0b\xfd\x80\xfb\xa8\xfbb\xfd\xf7\xffg\x03D\x05\x80\x03\x1f\x01\xbc\xfe\xdd\xff\x90\x03<\x04\x87\x01\x81\xfe>\xfe\x99\x01\x80\x03&\x02\xe9\x00+\xff6\xfc<\xfa#\xfb\x83\xfb;\xfbM\xffH\x06\\\x06\xf7\x00\xb0\xfd\xd8\xfd\x84\xffJ\xff\x86\xfd\xb1\xfe\xb5\x04-\nH\ta\x02\x00\xfb!\xf8\x9d\xf8M\xfd\xaa\x04Q\x08\x8e\x07\x17\x04"\x01\x9c\xfdj\xfa\xdb\xfb+\xff\xab\x01e\x03\x93\x02\xa9\x01\r\x02\xe3\xff\xe7\xfd\xfe\xfcD\xfc\xbd\xff\x94\x04\xa9\x07\x82\x07\x01\x03\x9d\xfet\xfbY\xfa\xb0\xfe\xf9\x01\x92\x01\xf5\x00f\x00\x05\x02\t\x00\xbe\xfd\xaa\xfc\xfc\xf9\x17\xfb\xbd\xffP\x04x\x08\xa8\x07/\x02\xcd\xfb\xb6\xf8\xc7\xfc\xf5\x02\x11\x07,\x07g\x04\x81\x03\x0f\x04\xb8\x02_\xfe\xf0\xf8\xbe\xf7\xc9\xf9\xd9\xfe\x9a\x04}\x05C\x03\xa4\xfd\x9a\xf9\'\xf7\xe7\xf5\xb5\xf8\xa9\xfb\xaf\xffW\x03%\x04\xc8\x03\xc0\x02u\x028\x01\x03\x00\xb0\x01\xe1\x02\x9a\x02\xc8\x03+\x04%\x02\xfb\xffp\xff\xf9\xff\x95\xff\xba\xff\xc6\x00\x90\xff\xbb\xfe\x94\xfd\xd5\xfc\x19\xfe:\xff\x91\xfe\xf3\xfb\xb1\xfc#\xff\x87\xff%\xff\x84\xfe\x1d\xfd/\xfd\xe4\xfe\x87\x00\n\x01\xe7\x00\x1b\x019\xff6\xfc\x8a\xfb\xd4\xfa\x82\xf9\x87\xfb\xb0\xfc%\xfe\\\xff\x9d\xff\x1d\xff\x18\xfc!\xfb\xe8\xfa\x11\xfc\xfc\xfei\x02\xdf\x04I\x04\xd8\x02\xee\x00Q\xffu\xfe^\xfc\xd0\xf9\xfb\xf9\xea\xfeW\x04\xbe\x05\xb1\x04\x97\x00\xc8\xfa\n\xf9\x15\xfd\xea\x02\xf9\x05\xe0\x06S\x06-\x06\x93\x05\xdf\x01W\xfe\x89\xf9\xd9\xf6\x13\xf9\xfa\xfb\xbf\x01?\t\x01\x0c\x03\x06\xe3\xfc\x99\xf94\xf9.\xfa\xb5\x00P\t\\\x0e\x95\x0f\x82\re\x07\xe6\xfd\xff\xf5\xcb\xf3\x18\xf7\x86\x00\x13\nu\r\xa0\x0ce\x06>\xff\xd4\xfa\xe2\xf8C\xfb\xfb\xfc.\x00\xe5\x04B\x08\xd9\t\xc8\x04\x12\xfc1\xf8\x06\xf7\x85\xf7E\xfc\xaa\x020\x075\x04"\x01\x9d\x01\xce\xfer\xfb\x02\xf8g\xf5l\xf8\x1b\xfb\xab\xfc\xc6\xfcH\x02\xcd\r\x13\x12K\x11D\x0f\xa2\x10\x06\x13\x19\x13=\x15B\x17G\x18\xe8\x16y\x13a\x10P\x0b\xb8\x04\xf4\xfd\x1e\xf9\xe0\xf7\xfb\xf8\xcb\xfa,\xfa\x13\xf8\xbf\xf5\x12\xf5\xb8\xf5\xde\xf5\xb3\xf4$\xf3\x97\xf4|\xf5\x1a\xf4\x10\xf6e\xf8^\xf9\xea\xf7\xa6\xf6p\xf7W\xf8\x98\xfaH\xfcl\xfd\x08\xfe~\xff\xed\xffL\xfe\xf0\xfd_\xfc\\\xf8\r\xf5k\xf4o\xf4\x18\xf4\x1c\xf4C\xf4R\xf3\'\xf2\x05\xf2\xae\xf1\xde\xf1z\xf3M\xf5\x05\xf6\x11\xf7\x8b\xf8K\xf8\xeb\xf6\xd8\xf5\xa5\xf5\xd3\xf59\xf7$\xf9\xee\xf9n\xfa\x10\xfb\x97\xfbj\xfbf\xfb\x0e\xfc\xec\xfc\x9b\xfd@\xfda\xfd\xc0\xfd?\xff\x05\x01\xf8\x01\x9b\x01\x19\x02p\x05\x82\x08\x82\x08@\x07\xcb\x06\xab\x06\xb0\x08\xf7\x0b\x07\r \x0b\x97\nz\x0b\xaa\tr\x03\xbc\x00\xce\x04\x1d\x08\xfa\x08\xdd\nI\x0e\xee\x10\xa9\x0f\x1a\x0b$\x08\x9c\t\x86\x0bz\n\x91\tc\x0bV\r&\x0c\x89\te\x05&\x02\xee\x00?\x00\xac\x00\xc9\xfd.\xf9\xd5\xf9(\x06\xb4\x1d\x9e3\x97:\x994\xe4,i(\xc3#e f!\xc4$A%\xd6!G\x1f\x7f\x1aE\x11r\x01\xe3\xef;\xe3\xc9\xdb\x1b\xdb\x1e\xde@\xe2h\xe4N\xe3\x12\xe2\x96\xe1\xe0\xdf\x10\xdc\x83\xd8~\xd8\x1c\xdd\x10\xe5r\xf0\x00\xfd\x16\x03^\x00\x96\xfb\xd3\xf8\xf4\xf8\xbe\xf88\xfa\xed\xfd\xf2\xff\x11\x02W\x03f\x03R\x01\x14\xfbJ\xf4\x87\xee\xd8\xeb\xbf\xeb\x0c\xebg\xecR\xf0\x0e\xf4Y\xf4\xf0\xf3\xf0\xf4u\xf4e\xf3\xa0\xf4\xad\xfae\x01\xa5\x07\xeb\x0e\xec\x12\xc3\x12\xfe\x12\xf7\x13\xae\x14\xa9\x11c\x0e\xa4\r\xdb\ng\x08\xf6\x05&\x02r\xfd\xf8\xf7\xeb\xf41\xf4\xc3\xf3\x12\xf4\x16\xf3A\xf1\x04\xf1\xf1\xf1\xa7\xf3\xfc\xf4\x82\xf5Q\xf6Y\xf6k\xf6\x14\xf8C\xf9\xd5\xf9R\xf9h\xf9`\xfb\x11\xfd.\xfe\x06\xfed\xfc\xe0\xfa\xce\xf9\xe1\xf8\xef\xf8\xe8\xf8c\xf8\xa7\xf5\x1d\xf1\xf3\xedn\xed\x97\xedO\xed*\xeeA\xf1,\xf5\x07\xf9T\xfc5\xffb\x00\xe9\xff\x82\x01&\x05\x7f\t\xee\x0e\xd8\x11\xfd\x13\x07\x14\xf0\x11\xa6\x0c>\tE\x14\xca.DJ9WJW\xb6S@P\xbbI\xb5A?=\xad;X8\x982\x19.<+\xe7")\x12\xe1\xfaM\xe5\xd0\xd6&\xce\xda\xcbU\xcdC\xd2)\xd8"\xdb\xc5\xdb\xb3\xdb\x8d\xdb+\xda\xda\xd7\x12\xda1\xe4R\xf3\xe5\x01\xa2\x0b6\x10]\x0fz\x0c\xc0\x07\x9b\x03r\x00\x88\xfeL\xfd\xa1\xfb\x03\xfc:\xfcE\xf8\xed\xf0\xd3\xe7\x90\xdfA\xd8\xa4\xd3\xee\xd3X\xd5\xe3\xd7\x8a\xdb>\xe0d\xe4\x15\xe7\x08\xea\xef\xec\xc2\xef\xa6\xf3\xb4\xf9%\x01b\t\\\x11\x05\x17\xf0\x19B\x1c\xaf\x1d\xcd\x1d\xf1\x1b\xb2\x19\xdb\x17g\x16\x96\x17x\x1a\xde\x1a\x0f\x18^\x12\x13\x0b\x97\x02(\xfbQ\xf7Z\xf5\x9c\xf3i\xf3X\xf59\xf8\xeb\xfa\x91\xfc\xdc\xfd\x05\xfe\xc9\xfd\xff\xfe\x00\x02\xd6\x05\x07\t)\x0b\xd2\x0bi\n=\x06h\x00\xf2\xf8\xdc\xf0\x10\xea(\xe5\x83\xe2R\xe1\xf7\xe0\xd8\xe0\xaa\xdfH\xdda\xda\x02\xd8V\xd7\x1a\xd9\x0c\xdd\xfd\xe1-\xe7p\xeb\x8b\xee=\xf1\x85\xf34\xf6\xe9\xf9\x98\xfd\xb7\xffD\x02\x9a\x06\xda\x0c\xb2\x13\xac\x18\xed\x1cF\x1f\xe0\x1fO >#\xa2)\xd30A8\xc5A\xdbK&R\xdaPLJeD\x8b=\xc04H*\xce"\xbc\x1f\xac\x1b\xa3\x14\x12\x0b\xd0\x02\xbb\xfb\xb3\xf2\xdd\xe8\xaa\xe0(\xdc\xe6\xdbB\xdd!\xe0\xe2\xe45\xebn\xf0~\xf2\x91\xf3\x88\xf6g\xfa\x0f\xfc\xf2\xfc2\xff\xd0\x02\xb1\x05\xa9\x07\xc4\x08\xe8\x07\x97\x04\xfc\xff\xeb\xf9\xe7\xf3W\xef\x91\xeb\xd4\xe6I\xe1=\xde\xdc\xdca\xdbj\xd9X\xd8\x83\xd7\x0b\xd6\x83\xd6\t\xdaU\xdf\x87\xe5\xb5\xeb\x07\xf1\xfd\xf5\xf4\xfb\x0b\x03\x05\x083\n\xf2\x0b^\x0e#\x10\xcc\x11\xc7\x13\xd3\x14\x9f\x13\x19\x11\xf7\x0e\x0f\r\xb2\x0b\x00\x0b\xe4\x08-\x068\x04\xd3\x04\xf1\x05\xf3\x05\xb5\x05p\x05]\x05\x92\x05\xe1\x06\xc6\x08x\n\xed\n\x8e\np\n9\x0b\xf9\x0c \x0e\xa4\r\xd6\x0b\xd7\t\xff\x07\xdd\x05-\x03\xc8\xff~\xfb\xaf\xf6G\xf2$\xef\x9e\xec\xce\xe9;\xe6\xae\xe2\x05\xe0\xf9\xde0\xdf\x80\xdfG\xe0\xc7\xe1\xed\xe3Y\xe6T\xe9>\xed\x12\xf1\xb5\xf2+\xf37\xf4\xbf\xf6\xb4\xf8\r\xf8w\xf6\x16\xf6\x9b\xf6\xf8\xf5Y\xf4\xb3\xf3\xdf\xf3\xab\xf3;\xf26\xf1\xb1\xf2\x16\xf6\xce\xf9\xd4\xfd\xe8\x02\xc6\n\x9a\x13\x07\x1d9*\xe7=\xaeR;]\xf5\\\x1a[(_\xd8aJ[\xc6O\xdbH\xc1E\xc5=@0\xed#\x18\x1bT\x0f\n\xfcN\xe7\xeb\xda6\xd6\x9a\xd1!\xca\xd1\xc6\xcb\xca\xf3\xd04\xd4#\xd79\xddZ\xe3\xb0\xe6A\xe9\xfd\xee\xf7\xf7\xa9\x01\xac\x08\x11\r\x9c\x10\x8d\x14\\\x16\x14\x14s\x0f?\n\x97\x02\x92\xf9\xcb\xf2@\xee6\xe9\xc2\xe2\xb1\xdd\xae\xd9\xd7\xd4\x07\xd1\xcf\xcf\x15\xcf\x05\xce\x00\xce\x01\xd1\xf9\xd6\n\xdf]\xe8\x91\xf0k\xf7^\xfe\xae\x05\xb9\x0b}\x10\xf7\x14/\x19\xf8\x1aU\x1b&\x1d7 \xfa!\x94 z\x1c\xa6\x18\xe8\x14\xe3\x11<\r\x82\x08]\x04Z\x01\x1e\xff\xd6\xfcH\xfd`\xffb\x00t\xffY\xff\x04\x03\x11\x07\xaf\x08g\t\x80\n\xf9\x0b\x08\x0cN\x0cu\r\xbd\r\xdf\x0cZ\n\xea\x07\xba\x05\xf9\x03\xfc\x00\xa5\xfbX\xf6*\xf2\x1b\xef\x18\xec\xbe\xe96\xe8~\xe6\xba\xe4.\xe3\xa8\xe2c\xe2\xfc\xe1\xf3\xe1&\xe2\x8b\xe3\x9c\xe6\xbf\xeak\xee\xed\xf0\x04\xf3\xc5\xf4\x95\xf5\x8c\xf5\xae\xf5\xe8\xf5\xbf\xf4\x9a\xf2\xd2\xf0*\xf1\xbf\xf2i\xf4\r\xf5\xf7\xf4\x05\xf5y\xf6u\xf9K\xfc#\xff\xc5\x02q\x06@\n\xba\x104\x1b\x91\'\xf93\x04C#U\xa0a5c{`\xafa\x15d\xcc]GQ\x85G\xedB\xb5;\xa1.B"Q\x18 \x0c+\xfa&\xe7\xad\xd9\xcd\xd1\xd6\xc9\xb3\xc0\xb5\xba\x9f\xbc\x86\xc3j\xc9\x11\xcdl\xd2\xe0\xd9\x89\xdfZ\xe3\xf0\xe8\xe4\xf1\x10\xfap\xff\t\x04y\n\xf1\x11)\x17\\\x17\x01\x14\x97\x0f\x96\n\xc3\x02\x0f\xf9l\xf1B\xec\xa6\xe6\x93\xdf\xe3\xda\xe3\xd9v\xd9\x14\xd7\x0f\xd4"\xd2\xc0\xd1\x11\xd3\x01\xd6\x8e\xda\xd7\xe0\xb9\xe8\xd0\xf0\x1d\xf9l\x02\xd7\x0b\x07\x13a\x17\x03\x1a\x9d\x1c\xa4\x1ej \xc0!N"\xd2!\xb8 \xaf\x1e\x90\x1c,\x1a#\x16K\x0fA\x07@\x01\x92\xfdV\xfa>\xf8=\xf8\x17\xf9\xe3\xf8\x1e\xf9<\xfc\xa8\xff\xca\xff_\xfd5\xfd\xf0\xff\x9a\x02\xf9\x04\x14\x08\xd7\x0b\xe3\r\xac\x0eR\x0f-\x0f\x0f\ru\x08\xae\x02\xeb\xfc\xcf\xf8\x92\xf5\'\xf2O\xee\xc4\xeav\xe8e\xe65\xe4!\xe2a\xe0\xd2\xde9\xdde\xddz\xe0$\xe5\xc6\xe9\xb3\xed\xf6\xf1\x03\xf7\x85\xfb\x04\xff#\x01\'\x02\x93\x02\x9b\x02\xca\x021\x03(\x04\x95\x04r\x03:\x01\xfe\xfe\xf3\xfc[\xfa\xe4\xf6\xa4\xf3\xb4\xf1%\xf1\xed\xf1\xad\xf3\xf4\xf6\xae\xfa\xb7\xfd?\x00\x99\x02<\x06\xa6\x0b\xa3\x13\x86 \x9f1\xf2AJL\x13R>YKaxc\xe6\\\xdfS\xd7M\xe0H\x1b@\xea5\xcf-\xab%_\x19\x8e\t\xd4\xfb\x90\xf0\x99\xe4\x87\xd5\xc4\xc6\xa6\xbc\xd4\xb7\xd9\xb5\x98\xb5a\xb7=\xbc\xb7\xc2;\xc9"\xd0\t\xd8\xf6\xdf\xbc\xe5Y\xea\x85\xf07\xf9\x1b\x03\xeb\x0b\xb8\x12\xbd\x17\xa9\x1b\x9a\x1e&\x1f\xeb\x1cd\x17\xe7\x0f\xda\x06\xc9\xfd^\xf6o\xf0k\xebg\xe66\xe2N\xdf\xa9\xdd\xee\xdc\xab\xdc\xf6\xdb\x88\xdb\x8e\xdcg\xdfN\xe4\xfe\xea\xdf\xf2\xf4\xfa\xb6\x02\xa3\n\xb8\x12\xbc\x19$\x1f\xab!a"v!S +\x1f\xb2\x1d\xca\x1a-\x17\xf5\x12\x8d\x0fx\x0c\x9c\x08v\x04\xee\xfe:\xfa\xd1\xf5B\xf2o\xf0A\xf0n\xf1s\xf2t\xf3 \xf6\x97\xfa\xca\xfe\x96\x01\xac\x02\xc7\x03\x1e\x05E\x06v\x067\x06\x17\x06\x03\x06H\x05\x9f\x04\xce\x04b\x04i\x02\x92\xfe\x11\xfb[\xf8\x95\xf5\xd0\xf2\x19\xf0\x14\xeeC\xed\xb2\xed$\xef\xef\xf0\xb1\xf2t\xf4\xf6\xf5"\xf7N\xf8\x04\xfa\xb6\xfb\xc4\xfc\x7f\xfd\xe8\xfe\x99\x013\x04_\x05[\x05v\x04\x0b\x03\x8b\x00\xce\xfd\xaf\xfb\x04\xfa\x83\xf8\x84\xf6\x19\xf56\xf5[\xf6`\xf7S\xf7\xaf\xf6\x8d\xf6\xf4\xf6\xd4\xf7\x08\xf9\xb2\xfa`\xfc\x88\xfdP\xff:\x02\x9e\x05\xb0\tO\x10\x03\x1a\x7f#_*%1\xdf9\xc7A\xd5C\xecA\x8b@J@\xe2<\xa35\x85.\x19)}"\x08\x19\xe2\x0f\xa8\x08g\x01E\xf7#\xecU\xe3\xc6\xdcu\xd6\xec\xcf\x1f\xcb\xa9\xc9~\xca/\xcc\xe4\xceT\xd46\xdb\x10\xe14\xe6\x14\xec\xcc\xf2\x9e\xf8_\xfdI\x02\xfe\x06\xc2\n\x9b\x0e\xb2\x12}\x15\xc8\x15;\x15\\\x14\n\x12\xad\rr\x08D\x03W\xfd\xad\xf6\x08\xf1\x1d\xed!\xea7\xe7@\xe4\x9a\xe2b\xe2\r\xe3\xbb\xe3\x83\xe4e\xe5\x9e\xe6\xe0\xe8\x11\xec\r\xf0!\xf4@\xf8d\xfc\x94\x00\x88\x04x\x08\xb3\x0b\xab\r\xe0\r\xf7\r\xf8\r*\x0e\xec\r\xf4\x0c\x8d\x0c\xa1\x0b\x06\x0bR\n\x92\t%\t?\x08\x11\x07V\x05\xfd\x03\xb1\x03\xc5\x03\x0c\x04\x1c\x04\x18\x04X\x04\xb6\x04t\x05\xa8\x05\x7f\x05\xeb\x04\x11\x04\x13\x03%\x02\xe0\x01\x85\x01\xe1\x00\xfd\xff@\xff\xf7\xfe\x8f\xfe\x0b\xfe>\xfdQ\xfc\xa4\xfbE\xfbM\xfb(\xfb\x1d\xfbp\xfb\xd7\xfb\'\xfc4\xfcU\xfcC\xfc\xd5\xfb2\xfb\xd3\xfa\xf8\xfa$\xfb\xf4\xfa\xc2\xfa\xb1\xfa\xda\xfa\xb8\xfaI\xfa\xd6\xf9X\xf9\xc2\xf8\xcf\xf7\xd6\xf67\xf6\xd6\xf5\x81\xf5\x00\xf5\xa9\xf4\xc7\xf4\x00\xf5\xc8\xf4I\xf4\xfa\xf3\x05\xf4\xf4\xf3-\xf4K\xf5z\xf7\x18\xfa\x86\xfc\xe2\xfe9\x01\x9a\x03\xfb\x05N\x08.\x0b\xc5\x0fR\x16k\x1dV#\x16()-\x952\xdb6X8G8\xe97\x037\x0e4\xfb/\x8c,\xe7)\xea%\xc1\x1f\x18\x19x\x13m\x0e\xe4\x07\xa9\x00\xea\xf9\x02\xf4\x1c\xee\x1c\xe8/\xe3\x16\xe0\x08\xde/\xdc\xda\xda\xee\xdaj\xdc)\xde\xaf\xdfD\xe1\xa9\xe3G\xe6\xd3\xe8c\xeb\xe4\xedE\xf0\x9b\xf2\xf0\xf4\x17\xf7\xb3\xf8%\xfae\xfb\xdd\xfb\x95\xfb\n\xfb\xb5\xfa\x07\xfa\x8f\xf8\x07\xf7\xcc\xf5\xed\xf4A\xf4\xae\xf3\x96\xf3\xdf\xf3F\xf4\xe7\xf4\xdf\xf5\x0b\xf7;\xf8\x7f\xf9\xd8\xfaM\xfc\xd3\xfdf\xff\xfa\x00\x81\x02\xd5\x03\x17\x05<\x06\x15\x07\xa3\x07\x06\x080\x08/\x08&\x08\xf3\x07\xf2\x07\x10\x08R\x08\xc5\x08\x8e\t\x95\n\xcc\x0b\x1f\r9\x0e\x1a\x0f\x80\x0f\xb9\x0f\xb8\x0f\x80\x0f\xbf\x0e\xdf\r\xe7\x0c\xd2\x0bx\n\x9b\x08\xc3\x06\xed\x04\xeb\x02w\x00\xe1\xfd~\xfbM\xf94\xf7#\xf5W\xf3\xfe\xf1\xf0\xf0\x10\xf0u\xefO\xefj\xef\x85\xef\xb0\xef2\xf0!\xf1\x12\xf2\xd5\xf2x\xf3U\xf4J\xf51\xf6\xf3\xf6\xcb\xf7\xbd\xf8\x8f\xf9&\xfa\x9f\xfa-\xfb\xad\xfb\xea\xfb\xfa\xfb*\xfc\x8b\xfc\x07\xfd~\xfd\xd9\xfdI\xfe\x8e\xfe\xcd\xfe\x16\xffl\xff\xcc\xff!\x00\x7f\x00\xf7\x00X\x01\xb6\x01\x15\x02\x94\x02\'\x03\x91\x03\x1a\x04\xf1\x04\x06\x06\x1b\x07\xfd\x07\xda\x08\r\n1\x0b\xdb\x0b\x19\x0c\x8c\x0cl\r[\x0e^\x0f\xdb\x10\x14\x13(\x15G\x16\xb4\x16b\x17z\x18)\x19\x0c\x19\xe4\x18\xf8\x18\xa6\x18l\x17\xe5\x15\x1a\x15\x81\x14\xe8\x124\x10T\r\xda\n\x0b\x08\x87\x04\x00\x01\xdd\xfd\xf4\xfa\xc2\xf7\x9e\xf4-\xf2\x81\xf0!\xef\x9f\xed \xec\xd6\xea\xe5\xe9=\xe9\xc3\xe8\xb3\xe8\xe1\xe8*\xe9/\xe9A\xe9\xf4\xe9>\xeb\xd6\xecR\xee\xa3\xef\x12\xf1\x9b\xf2\x13\xf4\x99\xf5\x16\xf7\x8d\xf8\xd3\xf9\xda\xfa\xbf\xfb\xcf\xfc%\xfe\x8b\xff\xb9\x00\xbe\x01\xbe\x02\xae\x03v\x04\xe6\x043\x05s\x05e\x05\xfb\x04K\x04\xb1\x03.\x03\x88\x02\xb7\x01\xe5\x00j\x00\x1e\x00\xbc\xffS\xffE\xffy\xffv\xff\'\xff\x1a\xffo\xff\xdf\xff\'\x00\x96\x00f\x01w\x02k\x03T\x04_\x05t\x06\x8d\x07J\x08\x96\x08\xe3\x08%\t4\t\xe8\x08v\x08#\x08\xad\x07\xbb\x06e\x05\x14\x04\xe8\x02\x93\x01\xda\xff!\xfe\xa3\xfcW\xfb\x04\xfa\xd9\xf8.\xf8\xfa\xf7\xcf\xf7\x84\xf7[\xf7\x99\xf7\x18\xf8G\xf8V\xf8\x93\xf8\xee\xf82\xf9O\xf9\x8b\xf9!\xfa\xc1\xfa\x0c\xfb*\xfbw\xfb\xfb\xfbH\xfcB\xfc%\xfc)\xfc\x1e\xfc\xca\xfb\x94\xfb\xab\xfb\x06\xfcA\xfcA\xfcX\xfc\xac\xfc\x1d\xfdU\xfd\x83\xfd\xce\xfd \xfe(\xfe\x17\xfeU\xfe\xca\xfe.\xffj\xff\xb9\xff^\x00\x1d\x01\xaa\x01+\x02\xcc\x02l\x03\xc8\x03\x13\x04\xeb\x04e\x06\xd8\x07I\t\r\x0b\x1a\r\x06\x0f\xb5\x10\xe9\x12\xa6\x15\xe0\x17\n\x19P\x1a\'\x1c\xe1\x1d\xa8\x1e\x1b\x1f\x05 h +\x1f\xfd\x1cD\x1b\xc6\x19(\x17\x19\x13\x03\x0f\x89\x0b\xd9\x07Y\x03\x08\xff\xac\xfb\xe9\xf8\x95\xf5\x01\xf27\xef\x91\xed,\xecK\xea{\xe8x\xe7\x0e\xe7d\xe6\x85\xe5\x83\xe5q\xe6v\xe7\x04\xe8\xb8\xe8a\xeab\xec\xf6\xed2\xef\xbd\xf0\x8b\xf2\xf2\xf3\xe2\xf4\xce\xf5\x06\xf7R\xf8(\xf9\xc9\xf9\x9a\xfa\x92\xfb=\xfcv\xfc\xbe\xfcM\xfd\x95\xfdU\xfd\xe0\xfc\x93\xfc[\xfc\xdd\xfbe\xfb\x17\xfb\xfa\xfa\n\xfb\x1c\xfb]\xfb\xdc\xfb\x8d\xfcY\xfd\n\xfe\xd0\xfe\xcc\xff\xef\x00\x08\x02\x06\x03#\x04{\x05\xd6\x06\x01\x08\x12\tB\nu\x0bz\x0c`\r3\x0e\xd0\x0e*\x0f%\x0f\xe3\x0ee\x0e\xaf\r\xd0\x0c\xcf\x0b\xb2\n\x83\t^\x08O\x07N\x065\x05 \x04+\x03%\x02\xf3\x00\xa7\xffZ\xfe\x0b\xfd\xa4\xfbN\xfaJ\xf9P\xf8\\\xf7\x9f\xf61\xf6\xf4\xf5\xa1\xf5:\xf5\xf6\xf4\xa9\xf4/\xf4\xb2\xf3E\xf3\x01\xf3\xd8\xf2\xba\xf2\xe9\xf2S\xf3\xd9\xf3p\xf4\xfe\xf4\xab\xf5C\xf6\xb4\xf6\x12\xf7d\xf7\xc0\xf7\x1b\xf8u\xf8\xf8\xf8\xab\xf9\x98\xfa\x8f\xfb\x88\xfc\x83\xfd}\xfeY\xff\x05\x00\xd5\x00\xdb\x01\xde\x02\x7f\x03\x15\x04\x0c\x05*\x06\x11\x07\xe7\x07\x05\t\x7f\n\xc5\x0b\xd9\x0ce\x0e\xbf\x10/\x13Y\x15\xba\x17\x87\x1a>\x1d\x02\x1f\x88 J"\xdb#V$n$\xf1$\\%\xc4$R#\xf1!k \xc4\x1d\x00\x1a\x18\x166\x12\xc4\r\xa9\x08\xc4\x03\x87\xff\x96\xfbo\xf7~\xf3\x1f\xf0N\xed\xb8\xeaT\xe8U\xe6\xbf\xe4\x83\xe3I\xe2/\xe1\xb6\xe0\xd0\xe0F\xe1\xa6\xe1[\xe2}\xe3\xce\xe4%\xe6z\xe7\x19\xe9\xab\xea\xcb\xeb\xc3\xec\xe2\xed2\xef\xa4\xf0\xff\xf1g\xf3\xfa\xf4\xa5\xf69\xf8\xb2\xf92\xfb\xba\xfc\t\xfe\x0c\xff\xeb\xff\xde\x00\xd1\x01\x92\x02.\x03\xaf\x034\x04\x8f\x04\xad\x04\xba\x04\xd9\x04\xfa\x04\xce\x04m\x04\x12\x04\xe8\x03\xce\x03\x8d\x03y\x03\xb9\x03/\x04\xab\x04\x19\x05\xc3\x05\xa6\x06h\x07\xe2\x07e\x087\t\xfe\tv\n\xd0\nq\x0bM\x0c\xdf\x0c\x0b\r=\rV\r\xe4\x0c\xbc\x0bD\n\xda\x08G\x07Y\x05\x8e\x03,\x02\x0f\x01\xee\xff\xdb\xfe\xee\xfd\xff\xfc\x06\xfc\xdd\xfa\xaa\xf9e\xf8\x15\xf7\xcf\xf5\x90\xf4\x8a\xf3\xdb\xf2\x82\xf2!\xf2\xd6\xf1\xe5\xf1+\xf2u\xf2\x8d\xf2\xaa\xf2\xcf\xf2\xdb\xf2\xd9\xf2\xed\xf2M\xf3\xd7\xf3y\xf4I\xf5[\xf6{\xf7e\xf80\xf9\xea\xf9\x99\xfa\xfc\xfa\'\xfbd\xfb\xaf\xfb#\xfc\xaf\xfcl\xfdb\xfe5\xff\xc4\xffF\x00\xe6\x00l\x01\xc4\x01\x0e\x02\x9c\x02Z\x03\x00\x04\x0b\x05\xe6\x06&\t\t\x0b\xc5\x0c\xfc\x0e\x8b\x11\xd5\x13\xcc\x15!\x18\xbe\x1a\xce\x1c%\x1e\x98\x1f\x95!?#\xd2#\xd6#&$G$!#\xf6 \xe8\x1e\xe3\x1c\xc1\x19\x8e\x15\x90\x11X\x0e\xd3\np\x06M\x02\t\xff\xf7\xfbN\xf8\x87\xf4\x94\xf1\x1b\xefC\xecO\xe9\xe9\xe63\xe5\xce\xe3s\xe2}\xe1e\xe1\x85\xe1\xbd\xe1\xfa\xe1\x9d\xe2\xcd\xe3\xf2\xe4\x03\xe63\xe7\xb5\xe8f\xea%\xec\x17\xee^\xf0\xc6\xf2\x0f\xf5>\xf7\x84\xf9\x96\xfbu\xfdP\xff\x02\x01\x94\x02\xd2\x03\xda\x04\xc0\x05p\x06\xd5\x06\x02\x07\x08\x07\xd5\x06\x8a\x06\x12\x06_\x05\x93\x04\xd0\x03\x08\x03\x1e\x02-\x01u\x00\x00\x00\x87\xff\xfb\xfe\xb7\xfe\xeb\xfeG\xff\x81\xff\xf6\xff\xf0\x00\'\x02\x15\x03\xfd\x03B\x05\x9b\x06\xb1\x07l\x08D\tK\n\x1c\x0b{\x0b\xc1\x0b\x07\x0c\x18\x0c\xcf\x0bQ\x0b\xce\n@\n~\tb\x08\x1a\x07\xc3\x05Y\x04\xb7\x02\xfc\x00_\xff\xf5\xfd\xa4\xfcZ\xfb+\xfaG\xf9\x91\xf8\xee\xf7S\xf7\xd7\xf6\x88\xf6?\xf6\xf3\xf5\xbc\xf5\xb4\xf5\xc9\xf5\xee\xf5+\xf6\x9b\xf66\xf7\xcb\xf7P\xf8\xcf\xf8\\\xf9\xc2\xf9\x02\xfa6\xfaf\xfa\x8d\xfa\xa5\xfa\xd2\xfa\x10\xfb\\\xfb\x98\xfb\xcf\xfb\x0e\xfc.\xfc4\xfc\x07\xfc\xbf\xfby\xfb\x1f\xfb\xbf\xfan\xfaG\xfa+\xfa\x00\xfa\xfe\xf9u\xfaB\xfb\r\xfc\xc5\xfc\xeb\xfds\xff\x11\x01\xe5\x02f\x05\xb0\x08\xf8\x0b\x9d\x0eB\x11}\x14\xa3\x17\x1e\x1a"\x1c\xa5\x1e&!h"\xa0"K#~$\xa8$N#\xd2!\xe4 (\x1f\x8d\x1b\x92\x17\xac\x14\xc5\x11b\r,\x08\x17\x04\xe8\x00\x1d\xfd\xa1\xf8\x1b\xf5\xcf\xf2u\xf0I\xedQ\xea\x9c\xe8\x90\xe7\x1f\xe6\x89\xe4\xb6\xe3\xe2\xe3\x1d\xe4\xf3\xe3A\xe4\xaa\xe5q\xe7\xae\xe8\xba\xe9J\xebO\xed\xfe\xee>\xf0\xc4\xf1\xb2\xf3h\xf5\xa0\xf6\xe4\xf7|\xf91\xfb\x9d\xfc\xce\xfd\x10\xff4\x009\x01\x06\x02\xab\x02@\x03\xa6\x03\xf5\x03\x15\x04\x0f\x04\xf7\x03\xd0\x03\xa0\x03w\x03\'\x03\xc1\x02T\x02\xe9\x01\x96\x01\x16\x01\xc3\x00\xb1\x00\xac\x00\x96\x00\x80\x00\xd7\x00h\x01\xd5\x01P\x02\'\x03\x13\x04\xcd\x04g\x05#\x06\xed\x06\x84\x07\xfb\x07h\x08\xc8\x08\xe9\x08\xc4\x08y\x08\x04\x08u\x07\xc3\x06\xe1\x05\xee\x04\xf4\x03\xed\x02\xc7\x01\x91\x00r\xfft\xfel\xfdj\xfc\x84\xfb\xb4\xfa\xe3\xf9,\xf9\x95\xf8$\xf8\xef\xf7\xc7\xf7\xbd\xf7\xd2\xf7*\xf8\xa3\xf8\x15\xf9\x81\xf9\xf1\xf9\x96\xfa"\xfb\x95\xfb\x1d\xfc\xd2\xfc\x99\xfd!\xfe\xb1\xfer\xff*\x00\xca\x00%\x01\x9c\x01\x03\x02@\x02O\x02R\x02\x82\x02\xa3\x02\x96\x02u\x02\x81\x02\x9f\x02\xa4\x02\x94\x02\x92\x02\x9b\x02m\x02\x1a\x02\xd1\x01\x96\x01T\x01\xe9\x00s\x00\x06\x00\x94\xff\x16\xff\xa2\xfe\x0b\xfen\xfd\xe0\xfcF\xfc\x91\xfb\xcc\xfa!\xfa\xa0\xf93\xf9\xe2\xf8\r\xf9\xa7\xf9d\xfa)\xfbB\xfc\xdc\xfd\xbb\xffj\x01-\x03<\x05t\x07X\t\x08\x0b\xe1\x0c\x06\x0f\x00\x11U\x12\x7f\x13\xcf\x14%\x16\xd7\x16\xe7\x16\xea\x16\xcd\x16\x05\x16]\x14x\x12\xd3\x10\xff\x0e\x8d\x0c\xc8\ta\x07@\x05\xde\x02)\x00\xd3\xfd\xfc\xfbF\xfa=\xf8-\xf6\xcb\xf4\xdd\xf3\xec\xf2\x0c\xf2\xb1\xf1\x04\xf2`\xf2\x9d\xf2\x1e\xf32\xf4Y\xf5\x1e\xf6\xc6\xf6\xb7\xf7\xa2\xf8\x1e\xf9a\xf9\xda\xf9\x83\xfa\xd6\xfa\xee\xfa\x1a\xfb`\xfbt\xfb\\\xfbO\xfbH\xfb#\xfb\xcc\xfat\xfa6\xfa\xf2\xf9\xba\xf9}\xf9L\xf9\x1e\xf9\xe7\xf8\xda\xf8\xfe\xf8:\xf9l\xf9\x9d\xf9\xc4\xf9\x11\xfaz\xfa\xf1\xfa\x92\xfbG\xfc\xef\xfc\x87\xfdN\xfe<\xffA\x00:\x01"\x02\xff\x02\xd8\x03\x9c\x04E\x05\xe4\x05z\x06\xe3\x06\x16\x070\x07I\x07a\x07g\x07S\x07\x1f\x07\xca\x06\x7f\x06\x15\x06\x98\x05+\x05\xaf\x04*\x04\x8d\x03\xf4\x02u\x02\xf8\x01\x8b\x010\x01\xce\x00o\x00+\x00\xf6\xff\xcb\xff\x99\xff\x82\xffn\xffM\xff&\xff\x0f\xff\r\xff\xf5\xfe\xdc\xfe\xe9\xfe\x0e\xff;\xffc\xff\xbc\xff#\x00A\x001\x00S\x00\x9c\x00\xc5\x00\xab\x00\xba\x00\xfd\x00\x00\x01\xcf\x00\xc3\x00\x04\x012\x01\xd0\x00J\x00\x0c\x00\xd8\xff^\xff\xa9\xfe.\xfe\xd7\xfdP\xfd_\xfc\xa1\xfbM\xfb*\xfb\xe2\xfaY\xfa\xfe\xf9\xe8\xf9\xdd\xf9\xb7\xf9\xb0\xf9\x1a\xfa\xa2\xfa\x01\xfbB\xfb\xe7\xfb\xfb\xfc\x02\xfe\xbe\xfeR\xff\x03\x00\xd4\x00\x94\x01\x0c\x02o\x02\xcb\x02\x0c\x03\xf1\x02\xa1\x02j\x02<\x02\xe4\x01I\x01\x99\x00\x1b\x00\xaa\xffO\xff\xd0\xfeS\xfe\xf0\xfd\xa6\xfdi\xfdE\xfdQ\xfd\xa7\xfdX\xfe7\xff7\x00w\x01\xbc\x02\xc8\x03\xb6\x04\xb8\x05\xd5\x06\xd8\x07\xa1\x08\x8a\t\x9a\n\xca\x0b\xa0\x0c*\r\xd3\r\x1e\x0e\xe6\rp\r\xd3\x0cG\x0co\x0b~\n\xcc\t\x19\t=\x08]\x07\x9d\x06\xc0\x05\x98\x04D\x031\x02!\x01t\xff\xf7\xfd\x19\xfd2\xfc\xd9\xfaM\xf9\x87\xf8F\xf8!\xf7\xb6\xf5{\xf5d\xf5h\xf4n\xf3I\xf3t\xf3\xb6\xf2=\xf2\xc6\xf2M\xf3\xb6\xf3[\xf4\x88\xf5\x9a\xf6\x11\xf7\x9e\xf7\x9f\xf8\x95\xf9T\xfaO\xfb\x91\xfc\xed\xfdJ\xff\xc1\x00\xff\x01\xb1\x02W\x03\xe0\x03\xcf\x03\xbf\x03\xa8\x03:\x03\xeb\x02\'\x03?\x03\xd4\x02o\x02\x03\x02\x12\x01\xe9\xff\x10\xffA\xfej\xfd5\xfdt\xfd\xc5\xfdf\xfe\x8e\xff\xa4\x00\xfe\x00"\x01f\x01O\x01F\x01\x81\x01\xe8\x01\xac\x02\x97\x03~\x04K\x05\xdf\x05d\x06\x06\x06C\x05\x8d\x04\xae\x03\xf9\x02e\x02%\x02\'\x02\x06\x02\xe3\x01t\x01\x17\x01\xe6\x00\x19\x00\x16\xffW\xfe\xed\xfd\xb8\xfd\xa0\xfd\xba\xfd\xfd\xfd9\xfeX\xfep\xfe\x9c\xfe\xa0\xfe\x92\xfer\xfe?\xfe8\xfe\x9e\xfeu\xffh\xff4\xff\x9b\xff\xfc\xff\xf3\xff\xff\xff\x95\x00\xf5\x00\xb5\x00\x99\x00\xf1\x00\x1f\x01\'\x01\xde\x00~\x00\x8e\x00\x02\x01P\x01`\x01\xac\x01\xc7\x01\xf6\x00\x04\x00\x0e\x00\x08\x00K\xff\xd6\xfe \xffU\xffk\xff\xab\xff0\xff\xf3\xfc\x17\xfb\xfb\xf9\x86\xf8~\xf9w\x01\xbd\t\xce\x08I\x04D\x05Q\x088\x04b\xfeR\xfe\\\x00\xb9\x00\xfa\x00\xb8\x03&\x05p\x02\xef\xfc\xe7\xf7Y\xf6\xc2\xf70\xf9\xfa\xf8\xa5\xfa#\xff\x8e\x03C\x051\x06\x97\x07\x92\x05\xad\x00\xfc\xfe\xce\x01q\x05\xd3\x07\x15\x08+\x07\x91\x05\xaf\x03\xaf\xff~\xf8B\xf4\x82\xf3\xfe\xf1Q\xf1\xcf\xf3\\\xf7\xab\xf6\xa3\xf3,\xf1\xff\xed\xd6\xec\xdc\xedU\xef\x10\xf0\xa9\xf3\xb1\xf8\xd2\xfa_\xfa\x1e\xfa\xd5\xf9\xb2\xf7=\xf5\xb5\xf6U\xfb\x83\xff\x17\x05\xfb\x0f,\x1d\xdd!\x8a\x1eo\x1d\xd6\x1f\x85 \x04\x1f\xcb\x1e\xc4\x1fx\x1fV\x1f\xb4\x1c\r\x16l\r\xa4\x03\x1d\xfa\xca\xf1\x91\xed\xa0\xeb\xfe\xe9\xb0\xe9\xae\xeb\xc3\xed\x83\xef\xd4\xf0\xc0\xf0\xe8\xf0\xb0\xf3K\xf9\x90\xff\x08\x05\xb8\tk\rt\x0e\xd4\r\xf7\x0c\x19\x0b\xed\x07:\x04\x85\x02q\x03\xee\x03\x81\x02;\xffF\xfb\x9d\xf8\xbc\xf6g\xf4\xe0\xf1\x9f\xf1\x91\xf3\x00\xf5\x8e\xf6\xad\xf9x\xfb\xf3\xf9\xe4\xf8{\xfa0\xfc\x8e\xfc\xfd\xfc+\xfeX\xff\xe0\x00\xbd\x02\xde\x02\x13\x01\xfb\xfey\xfd\xd9\xfcm\xfdZ\xfe\x90\xfe\xdb\xfe\xeb\xffw\x01\x08\x02m\x01?\x00:\xff\x87\xff\xf9\x00\xcb\x02(\x04\xb4\x04\x85\x04\xc1\x04~\x05\x03\x06\xc4\x04\xf5\x02i\x02"\x03\xac\x04\x80\x05[\x05\x8d\x04n\x03\x9c\x02\x06\x02\xa4\x01\xeb\x00t\xff:\xfe`\xfe\x81\xff\xc0\xffC\xfeA\xfcK\xfb?\xfb#\xfb\xea\xfa\xda\xfa\xb4\xfao\xfav\xfa \xfb\x12\xfc\xcc\xfc\xc8\xfc\x16\xfd\xcf\xfeK\x01\x1e\x03{\x03\x96\x03R\x04u\x05\x80\x06\x00\x07s\x07.\x07\x14\x06K\x05\xc9\x04C\x04\xcb\x02\xc3\x00v\xff\xb6\xfe\x06\xff\x04\xff\xf6\xfd\r\xfd\x94\xfc\xaf\xfc\xe1\xfc\xf6\xfc\xaa\xfd\x10\xfe!\xfe\xe0\xfe,\x00|\x01\x15\x01\xee\xff)\xff\x07\xff\x92\xff\xc6\xff\xab\xff%\xff\x88\xff\x13\x00\x7f\x00\xd2\x00\x05\x01\n\x01\xbe\x00X\x01\x97\x02l\x03\xaf\x03\xad\x03\xdc\x03Z\x04\xd1\x04k\x04\x1c\x037\x01\xa3\xff\x9b\xfep\xfec\xfe\r\xfd\x80\xfb\xbf\xfa\x87\xfa0\xf9*\xf7\x1f\xf6\xb5\xf5\xf6\xf5\x07\xf7\xed\xf8\xf1\xf9\t\xfak\xfaM\xfbQ\xfc\x18\xfdb\xfd*\xfe\xc2\xff\x0e\x02\x9d\x03\xee\x03[\x04\xa7\x04,\x04F\x03\xb1\x02\x0b\x03d\x032\x03W\x03\xda\x03"\x04\xb9\x02\x86\x00@\xffP\xff\xfb\xff\'\x01&\x02#\x02\xc3\x01\x0b\x01;\x01\xe4\x013\x02\x90\x01G\x00/\x02\x0e\x063\x07\xb0\x03\x00\x00\xbc\x01\x12\x04t\x01\x83\xfc\xc4\xfbr\xfe\x96\xff\xc7\xfe\xc5\xfe\xa8\xff5\xff\x02\xffU\x00Z\x01\xec\xff\x8e\xfe\x90\x00[\x04\xf1\x04\xf1\x00\x83\xfc\xee\xfaS\xfcz\xfdL\xfc\x02\xfa\x18\xf9\x96\xfb\x84\xff\xe0\x00\x91\xfe\xe9\xfb<\xfet\x02b\x03>\x01\xdc\x00\xcb\x01\x0e\x00\xc7\xfd)\xfe\xfb\xff9\xff`\xfd\x02\xfd\xc2\xfd\xf7\xfd\x99\xfd[\xfe\xfe\xff_\x02\xf9\x03\xc4\x04}\x05\xb4\x06s\x07?\x06\xa4\x04\x06\x05\x00\tq\r\x1e\x0e\x99\n\xe9\x05\xe7\x03\x06\x03\xd8\x01\x03\x01F\x01b\x01\xda\xff\x17\xffR\x01\x95\x01\x86\xfb\x1f\xf5s\xf6\xe3\xfbO\xfc\xd5\xf8\x96\xf6\xe2\xf6b\xf8\xc0\xf9M\xfa\xa6\xf8\xf3\xf6\\\xf7\xa0\xf86\xfa\x82\xfby\xfd4\x01\xd9\x05x\x08u\x07\xe1\x05\xc1\x04\x0e\x03\x11\x02$\x03\x9c\x04.\x04$\x03\xf5\x01\x0f\x00\xab\xfd`\xfbu\xf9\x8d\xf8>\xf9S\xfa\xdd\xfa\xe8\xfb\xa0\xfd:\xfe\xba\xfd\xaa\xfd\xc0\xfe\x1a\x00q\x00\x9c\x00\x96\x01\xb7\x03\x1c\x05\xa5\x04\xa1\x04I\x05\x83\x04\x83\x02U\x01\xa2\x01=\x02\x87\x02\xef\x01\xd3\x00\xad\x00>\x01\xd6\x00\xfc\xff\x19\x00\x92\x00r\x00\x15\x00p\x00:\x01\x8f\x01\x8b\x01q\x01\x8d\x01c\x01\xaf\x00\xf3\xff\x90\xffy\xff\x91\xff\xac\xff\x87\xfft\xffN\xff\xed\xfe\x9e\xfeh\xfe9\xfe\x19\xfeL\xfe\x80\xfeF\xfe\xd9\xfd\x8d\xfd\x95\xfd\xc2\xfd\x04\xfe/\xfe\x06\xfe\xe6\xfd\x16\xfel\xfe\xc3\xfe\x14\xff\xc8\xff\x8f\x00j\x01S\x02\xd5\x02\xf3\x02\xcb\x02z\x02\x1b\x02\xc2\x01\xb6\x01\x8a\x01\x11\x01@\x01\xc8\x01N\x01\x0b\x00\x11\xff\x82\xfe\x93\xfd\xa5\xfc\xa0\xfcA\xfct\xfd\x0f\x021\x07\xf7\x08\x8d\x06w\x04\x00\x03Z\x01\x1c\x01\xa1\x02e\x04^\x059\x06M\x06^\x05\'\x03\\\xff\xa4\xfct\xfc\x8e\xfe\x9e\x00\x01\x02\x1d\x03\xf7\x01I\xff\xd5\xfd\x19\xfd\xeb\xfb\xd0\xf9\n\xf8\x9e\xf7k\xf8.\xfa\xd7\xfae\xf9-\xf8\xa7\xf7\x1d\xf78\xf7f\xf8\xa1\xf9\xf1\xf9\xb8\xfan\xfc\x12\xfeL\xfee\xfd)\xfc\xf8\xfa\xa8\xfa\xe0\xfaU\xfb.\xfb\x9c\xfa\xa6\xf9:\xf8\xd5\xf7-\xf9\xc9\xf8\x90\xf4\x96\xef)\xef$\xf1*\xf2\xe4\xf1)\xf2\xc1\xf3\xd0\xf3U\xf2\x19\xf2\x18\xf5\xdf\xf9\xd8\xfd\x90\x02_\tj\x11A\x18\x1f\x1f\xae(\xda1\'5a4\xfb6\xad9\xc66\xe8/f*\x1a\'U"$\x1b\xa0\x12\xb2\x08\xe5\xfd`\xf3\x9f\xea\x89\xe6\xc7\xe4\x84\xe3\xa4\xe2\xff\xe2E\xe4\xb2\xe6+\xea\x85\xec\xa5\xed\x96\xf0\x8f\xf6.\xfd\xce\x02_\x07\x04\n\xb2\t%\t\x98\t\x05\t\xeb\x06d\x04\x1a\x03-\x03\x17\x03\xec\x01\xc4\xfe\x1b\xfa\xed\xf4\x87\xf1\xd2\xef\xca\xee\x18\xef%\xf0\xbd\xf0z\xf15\xf3f\xf4\x15\xf4\x90\xf3\xbb\xf3\xc3\xf4\xdb\xf7\x8f\xfc\xdf\xff+\x01\xe1\x01\x89\x029\x02\xac\x01\x8b\x01\xf1\x00+\x00\xf7\x00\xaa\x02\x84\x036\x03\x1f\x02\xbc\x00s\xff\xdc\xfe\xd2\xfe\x95\xfex\xffc\x00\xde\x00\x1b\x01v\x01w\x01\x95\x00\xda\x00\xf4\x01)\x03/\x048\x05f\x06z\x06\xb8\x06=\x07\xd9\x06\xfc\x05\x15\t\x12\x12v\x16 \x12`\x0b\'\x06\xc6\x01\x87\xfe\xc3\xff\x96\x02\xe6\xffP\xfc%\xfe\xbc\xfeb\xf9\xe3\xf2`\xee*\xec\xa4\xee9\xf6\xc8\xfc\x8c\xfey\xfdS\xfd\x1e\xfd\x88\xfc\xb3\xfc\x00\xfb\x1d\xf9\x18\xfb\x02\xff\xa6\x02\x90\x02\xb0\xff&\xfc\xec\xf7\xdc\xf4\x12\xf4\xfb\xf3\xa6\xf3o\xf4\xef\xf5-\xf9\xb1\xf9\xc0\xf6-\xf4\x07\xf12\xf1\xda\xf4@\xf7\x98\xf9\xb2\xf9\x16\xfa\xf9\xfas\xf8\x13\xf9\x03\xfa\x94\xfbZ\x00\xfe\x02\x12\x02\xd9\x02\xeb\x07\xdc\x0e(\x10\xdb\x10g\x14\xd2\x13U\x17\x0c$\xa34y8$0\x1a.\x920o1],]%\xa9 "\x1c\xee\x1c\xd4\x1b\x97\x12\xf0\x06\x0c\xfb\xc7\xf0\xce\xe8j\xe5,\xe5c\xe1T\xdb\xa3\xdc\xaf\xe20\xe5\xbf\xe2\x08\xe0\x90\xe1\x95\xe7\xbe\xed\xb1\xf5\xd4\xfd2\x02\x11\x04\xe0\x05\xa6\n\x18\x0eO\x0c\xd5\t\\\n&\n\xa3\x08\xe2\x06]\x06 \x03\x0f\xfdh\xf7\x8c\xf3\xc1\xef\xc4\xe9J\xe7\xb8\xe7M\xe8\xba\xe8\xc3\xe9\xce\xea!\xea\x88\xeb\x0e\xeff\xf2\x19\xf6\x95\xfb\x96\x01\xfe\x044\x07\x1f\na\x0b\r\n\xd0\t\xb1\n\x86\n\x82\tv\x08\xc0\x07b\x05\xd3\x03\x10\x04>\x02\xae\xfe\xa6\xfd\xd3\xfe\x8b\x00T\x01\xd9\x02\\\x06\xe7\x052\x051\x07\xa3\x08*\t\x11\x07\x08\x07\xa7\x08\xeb\x06\xcf\x04\x93\x03\xdb\x023\x01\x95\xff%\xff\x14\x00\'\xfe\xd7\xfai\xfaq\xf9O\xf7\x08\xf2\xfd\xee\xda\xf0\x8d\xee\xb7\xeb\xfb\xeaR\xe9L\xe8\x8d\xe4\xfd\xe5R\xe9\xce\xe7)\xe9\xf2\xebt\xf1n\xf5\x86\xf6\x18\xf9Z\xfb\x84\xfd<\x00Q\x03Z\x05v\x05\xd9\x05[\t\x8a\x0b)\x0b{\x06\xb9\x03\t\t\x84\r\xe0\x0e/\t\xc2\x06\x06\x08\x8d\x05\xdb\x03\xa1\x04\xd7\x05\x8a\x05\xf7\x08-\x0e\x1b\x12`\x11\x90\x0f\xde\x12\x1c\x15\xaf\x19\xc5\x1c\xa1\x1f1#v"\x16\'\x81/\x993\xae,*$\x91#\xec"\xc4\x1c\x83\x18\xc1\x16\xdf\x0eh\x055\x00\xf8\xfd\x8f\xf4.\xe8\xd2\xe0\x98\xdf\xa1\xe1\x03\xe2\x9d\xe2.\xe2\xca\xe2\xf9\xe1\x01\xe39\xe9\xe8\xed\x1e\xf1N\xf5\x1e\xfdn\x01\t\x03\x8e\x05\x10\x03O\xfd\xe6\xf9Z\xfa\xf5\xfaQ\xf9\x91\xf8\x89\xf7\xda\xf35\xf1\xe1\xefc\xec\xbe\xe9\xcd\xe8\x81\xeb+\xf1d\xf4\x12\xf7/\xf8\xbb\xf9;\xf9]\xf9\x87\xfc,\x00\xd1\x01r\x03\xd9\x07\x8f\x0b\xcc\x0c\xab\x0b`\x0b^\x08\xd7\x06\xf2\x06n\x08\t\x0b\x8f\x08\xef\x06v\x04\xcf\x020\x02\x0f\xfe4\xfc\xd3\xf9\xce\xf6\xf9\xf6\xca\xf5\x1c\xf5d\xf2\xfb\xeep\xeeL\xef\x11\xf1\x14\xf2\x0f\xf2R\xf2i\xf3\xbd\xf4\x9c\xf7\xa5\xfa\xee\xfc\x9f\xfd\xca\xff\x88\x02\xa3\x03\xaa\x04B\x05]\x05\xdb\x03s\x04r\x06\xa6\x06\x10\x06e\x03r\x00\x0f\xffu\xff\xcc\x01!\x017\xff\xe4\xff\xbf\xfc\x7f\x01@\x04\xc5\x02\xde\x036\x01c\x00W\x00\xe3\x05[\x0b\xfd\n@\n\x0b\x0e\xbd\x0f\xb4\x0e\x10\r\r\x0fo\nW\x06\xda\x0e\xae\x10+\r\x9b\rC\r\x9d\x07!\x047\x06\x82\x06\x05\x00b\x02\x81\x08\xd6\x06\xcc\x08\x08\x0c\xb8\nz\x01\\\xffY\x06m\x05\x05\x04\xd6\n\xc1\x08\xe4\x03\x87\x07?\x07\x13\x02\xdd\xfdp\xfd\xd9\xfb\xb7\xfa\xd2\xfd\x89\x01t\x00\xeb\xfaw\xf9\xd1\xf6\xd6\xf3\x8d\xf3\xb5\xf5?\xf7Q\xf2\xc0\xf4$\xf9q\xf7\x9c\xf7!\xf8\x86\xf43\xf5\x8c\xf8z\xfa\x06\xfe]\xffP\xff\'\xfb\x87\xfc~\x00\x15\x00(\xfe\xd5\xff!\x01\x8c\xfe]\x02\n\x04q\x00\r\xff#\xfc+\xf9e\xfdM\xfe\xf2\xfb\xbc\xfa\xe0\xfc\xca\xf8\xab\xf6\xed\xf8\x9a\xfb|\xfb\xf1\xf3p\xf7\xbb\xfd\xfe\xfb\xd7\xfap\xfe\xd4\xfa\n\xfc\xd2\x003\xff@\x02|\x02\x87\xfd\xef\xf9\xd6\x038\x07;\x01\x87\xfe_\x02m\x05\xfa\xfb\x94\x00\x90\x01K\xffu\xfdB\xffM\x03\x87\xfa9\x08\x0e\xff\xdb\xf3G\x01/\xfe\xa5\xf4\x18\x02\xd0\x05\x0c\xf4\xd3\xf8\x12\x02\xed\xff\xd9\xfa\xb0\xfc;\x02~\xf5\x8e\xf53\x0e\xc1\xfc\x01\xfa8\x0c\x01\xfcB\x02z\x0c&\x00R\xfe\x92\x08\xb5\x04\xe1\x05\x8f\r:\x0c\xf3\x01\xbf\x02\xec\x07\xa4\x04\xa2\x01]\x08\xef\x03~\x03\xf1\x04\x85\x03\x16\x05I\n\xc3\x03\xe1\xff\x97\x0b\x99\xfc\xe8\xfb\x18\x03v\x0b\xff\x06]\x02Y\x06?\x015\x05\xa7\xfc\xc7\xfe+\x03"\xfb|\x08\x9e\x05\x17\x03\x8d\x08\xb3\xfc"\xf8\xad\x01\xba\xf8\xfa\xf9\x9b\x0b\xfc\xf3\xa0\xfc\xa9\x06\xa1\xfa\xdb\xf9\xf3\x00\x9a\xef\xbd\xf3#\xff\x12\xfd\xbb\x02*\xfd\xe2\x01)\xfd\x1d\xff\x8a\xf2W\x05#\x03\x95\xf6\xa6\x06b\x06\x81\xfe;\x00\xad\n6\xfbV\x04\xc3\xfc\x9d\x00A\x03q\xfe\xc6\x01\x03\x01B\xf7\xa9\x04\xb3\xfc\xca\xf2\xbc\x03:\xfb,\xfbi\xf9\x9c\x04-\x0bd\xf4\xc6\xfa\xf2\x0c\xbb\xf9\xe1\xff\x1a\nR\xfe/\x02\x0c\x0b\xb6\xf7/\x00B\t\xb9\xf3\xa6\x00\x9b\xfbR\x01g\xff\xbc\x01\x11\x04o\xf8\x8a\x00N\x08s\xfc\xdc\xf6\x13\x0e.\xf3\xc1\xf2{\x13:\xf5\xc4\xfce\nU\xf4\xf2\xfeF\xfd\x0c\xee\xee\x08\x8d\x06\x0f\xee\xdc\x04\x08\xffB\xfb?\x05\xc3\x04\xc6\xf56\xfc]\x08"\xf3\xa7\x06\xa3\t\x02\x03\xba\xf6:\x00N\x03\x92\xfc\xf2\x04\xd6\xf9!\x0b\x82\xfac\xf5\xc0\x15\xd5\xf7\xcc\xf8\xc4\n\x8c\xf56\x03\xbc\x0e\xbd\xff\xf1\xfd|\x07u\xfc\xb8\x07D\xfd\xd7\x03\xba\x0e\xaf\xf6\xb5\x03\n\x06\xc9\x03\x02\x01\xbf\xfe\x8e\x01\xe7\xfe\xe7\x00*\xfc\x90\xfd\x8f\x02R\x02u\xf2\xfa\x054\x02\x0c\xf2\xa4\x02H\x00{\xf3\xd3\x00\xfa\x07^\xf8\xe5\x02\x1a\x00\xd0\xfc\xed\x05\xff\xf9\xec\xf9e\x07\xd7\xf6W\x02\xd9\n\xd2\xf4\x15\xfd\x14\x077\xef\x87\xfb\x11\x10\x0b\xf8@\xf4\xe6\x08\x05\x06\xe5\xef\xdb\r\x07\xfb%\xf5\xf2\x05\x14\xf7\x17\x07\x1b\x06\x90\xf2)\xfej\x10\xd5\xf9\xbb\xf9\x1e\x0b\xec\xf8\x15\x03\xa5\xff\xb8\x08\x8b\x00\xae\x07\xf0\x03\x96\xf6\xc8\n\xdd\x02\xc4\xfbz\x02\'\x11K\xf5\xb4\x03\x1f\x01:\xffL\x03\xe2\xee\xb7\x0f\x84\xfc\xce\xf9\xef\x0e\xa2\xf5\x0b\xf7B\x15{\xf4\xa2\xf0\xce\x17\x1c\xfc#\xf0\xef\x0c\xea\x07m\xf1\xcd\x06\xbe\xfb>\xfb\x8b\x02\xc1\xfe\xfd\x03\xc3\x00*\xf6\x08\xff\x10\x0b\xc5\xef9\xfe\x81\ns\xee\xe2\x04\xb2\x00]\xfa/\x07I\xfd\x00\xf8;\x009\xf8\xbc\xf6\x84\x18T\xf89\xfb\x81\x07\x10\xfd\xf9\xfb\xa8\x02\xca\x05\xa7\xfd\xf2\x04\xda\xfd{\x05\xb6\x08\xd6\xfc\xdf\x02\xb0\x04\xed\xf8\xff\n\x11\x03\xa4\x00y\xfd:\x0b\x16\x04\\\xf7\x02\xfe\xd3\x01\xf3\x06G\xf7z\xfe\x91\x08v\x04\xe8\xf8\x93\xfb\x90\xfa\r\x06\xd1\xf9\xa8\xf4\xdb\x14\x87\xfen\xf2\x16\x08\xa6\xfc\x1c\xfc\xb1\xfe\xdd\t\xb0\xf7\xff\x01\xd5\x03:\xf5\x8f\t\x8b\x00\xcc\xfc3\x00R\x06\xe9\xf1\xd1\x06\x8d\x01\x1b\xf8&\xfb4\x00|\x02\xe3\xf5\xa6\x08\xcf\xfa\xb9\xedT\x02{\xff\x06\xf2\xb3\x0e\xcf\x00\xd7\xf1\x90\x06\xa3\x08\xe2\xf9g\xfc|\x01\xb7\t*\xfbe\t\xc0\x0b\xb2\xf60\x05\xb0\x00\xd1\x06\xff\xf8z\x00\x10\x0f\xf6\xfdi\xf8\x98\na\x04%\xf5W\x08\xd0\x03\xeb\xfc\xdc\xf8\x0b\x06\xfa\x04y\xfc<\xfd\xff\x00f\x0c(\xf4\xbf\x001\x07\x8f\xf4\xce\x01\xdb\x00\xb9\xfc\x9d\x06#\x01\x8f\xf6\xe1\xfb_\x04\x8e\xf9|\xff\\\x03l\xedE\x04\x07\x08Q\xf3O\xf8\xea\x10<\xf2\x92\xfa\x1c\x08x\xf5\xa5\xfc\x93\x06\xd9\xff\xc2\xfe\xc4\x01\xab\xff\xf0\x00k\xf6\xe0\rA\xf6)\x05\xa1\x03B\xfe\xdd\xffI\xfcg\x0e\x91\xefN\x0b\x1f\x01\x04\xef\xda\x07\x88\x0c:\xf45\x04\xfb\xf9\xf8\x05j\x08\xae\xf3I\tu\x08\xf8\xf4\xdc\x02\xe3\x13^\xef%\x06>\x07\x0e\x00\xcd\xff\xce\x01\xab\x06\x98\xff\xb5\x06\xac\xf8\xc9\x02&\t=\xf6\xc8\x00\xd6\x01D\x07\xda\xfcy\xf2a\x19\xb0\xed\x81\xff\x0c\x031\xfcZ\x03\xf1\xec\x7f\x12\xb6\xef$\x00\x9b\x085\xf6\xa8\xf5\xaf\x05\x1b\xfc\xa2\xf7 \x02\xfe\x02g\x03\x06\xf9P\xfc\x0b\x01\xe4\xfc\xa1\xfb\x18\x08\xba\x04\xdd\xf5\x88\x08\x8a\x03\x13\xf0I\x07\xbe\xfe%\x02\xfd\xf8\xc6\r4\xff\x01\xf2\xd7\r\xde\x01\xc7\xf6F\xf61\x1a\xaa\xf3\x9a\xf9\xff\x12\xec\xfb\xca\xfc\x98\x03\xc2\x04H\xf5\x13\x04\x9c\x03\x9b\xfb\x87\x01(\t\xda\xff\xc7\xf5\x84\x04f\x07\xd0\xf6J\x04\xe8\x08`\xefZ\x11G\x02\xb6\xf5\xb1\x04\x13\x07\x04\xf6\xc3\xf2\xd9\x18\x8b\xf9\xbd\xfa\xfa\x04o\x08|\xf5\xa3\xfdf\x0b\x90\xf1\xb2\x03\x94\x03\xf0\xfc\x00\x00\xab\n\xb5\xf2\xdd\xf9\xfd\x07\xfb\xefN\n\x19\xf5\xd9\xf6J\x16R\xf3\x1c\xf5\x80\t\x12\xf6\xe0\xffW\xfe\xcc\xf7\x02\x06\xb4\xfcT\x04\x14\xf5\x86\x06\xd6\t\xc5\xedD\x07A\x01\xea\xf9N\x04\xe6\x01\xa6\n>\x07\xa3\xfa#\xfdV\x05\xe3\xf9\x93\r=\xff\xdf\xff\x9d\tQ\xfc5\xfdB\tV\xffz\xfb\x99\x07\x9b\xf1\x17\x13\x98\xfb\x8f\xfb\xf1\x06E\x05\xab\xeeM\xfe\xac\x12s\xfc\xab\xf7\xdd\x00\x91\nC\xf5\x0f\xfc`\x05\x13\x03v\xee\xc4\x08\x18\x07\xf4\xf5\x14\xfb\x95\n\xfe\xfc\xd5\xee\x8b\x0c\\\x03\x14\xf2\x99\n\xb6\x01\xfe\xee\xf5\np\x05-\xf1\xf2\x00\xb4\x14\x1b\xeb\x90\xfa\x85\x17s\xf5\xd9\xed\xe0\nb\n.\xefJ\x02\xf6\r\xd7\xfc\xef\xf6p\xf8\xc9\x06\x84\x06\xa1\xf4\xe2\xf8\x02\x19E\xf4\xf3\xf3T\x16\x10\xef\xaa\xfb\x11\n\xd5\x01\x80\xf6\xce\x05\xb2\x06\xf8\xf5e\x03~\x10\xdd\xea\x13\xfd\xbe\x16&\xef\xc8\xff\xc8\x12\xd9\xf9c\xf8d\x0e\x81\xff\xc9\xf8\x19\x06\xef\x05\xa1\xf1\xbf\x05d\x18\xc7\xed\x0c\xf5\x06\x1c\x9e\xfc\xeb\xe6\xf4\x0f\xe0\x07\xd8\xf0\xed\x00\xe1\x07Y\x02-\xff)\xf6r\x06\xe8\xfe\xeb\xed"\x15\xf4\xf6#\xf1\xa8\x0b\r\x07?\xf5\xa3\xf5\xb3\x10.\x00\xdd\xed\x1e\x05\x89\x08\xb6\xf6+\xff\x9d\x04\x9a\x04Q\xf8\xdd\x04e\xff\xd4\x00Z\xfb\x06\xffn\nI\xf87\xfe\xc0\x0b!\xf7\xbe\xf8\xa9\x0f\xd7\xfa:\xf7\xdd\x01\xf3\np\xfa\x1b\xffH\xfd\xfb\x030\x02\xb6\xfb^\x01\xaa\x03j\x03\xd7\xfa\xc0\xff\xa8\xff\xe6\r\x15\xfa7\xfc/\r\x9c\xfc\n\xf8\xaf\x10\x80\xf8/\xfc\x8e\x10+\xfc\xf3\xf0\x1f\x08\xd7\x08e\xf6\r\xff\n\x01~\x07\xa5\xed\x90\x04O\x07I\xf1\x9e\x02\x91\xfdn\x06\x14\xff\x1e\xf8\xae\x0bS\xf8o\xf1U\x12a\x02`\xf01\x0f8\t\x18\xed\x88\xfb\x94\x12v\xf2\xbd\xff\xfc\x05\xdd\xfc\xf4\x04Q\xfb2\xff\x10\xfd1\x05\x16\xfcB\xfe\xc4\xfb\xe1\x0f$\xf6\x08\xf7?\x14;\xf6;\xf5\xc2\x06i\x05\xdc\xf0\\\x07\x17\x07\xfc\xfa\xb0\x018\xff\x85\xfd\xe5\xf7\xe2\x0f\x8f\xfd\xa1\xf5\xb5\x06\xec\x01\xd8\x01c\xf5d\r4\xffx\xf5[\x080\x08M\xf2\x92\x02\xe8\n\xef\xf7\xeb\x07\xeb\xf9d\xfe\xa7\x01\xb0\x01\x07\xfe\x9b\xfd5\x04i\x02\xf5\xf1&\rx\x04o\xe7*\x0e\x9b\x0c\xd7\xeb\xf1\xfcC\x12\x80\xfc\x1b\xf0\x97\xfc\x18\x1a\xa7\xf4\x86\xe9\xd1\x16 \x05t\xe2\x11\r\xef\x15\x08\xea\xb5\xfa9\x14\xac\xf3g\xf3\xa6\x15y\xfb\x7f\xf3I\x03Y\x0e\x81\xf83\xf6\xc8\x19\x12\xf0\xe4\xef\xe2\x10\x87\n\xe7\xe7\xde\x08|\x1b\x88\xe36\xf4\x8d\x1e\xdc\xfa\xb9\xe6t\x17a\xfa\x08\xfb\x9b\x05W\xfe\xba\x01\xcf\xfc\xb4\x00Y\xfeR\x06b\xf1g\x0fb\xff\xdd\xf1m\x0c\x81\x00\xbb\xf6O\x06M\x0b|\xec\xcd\x04d\n\'\xf3\xd1\x04\xd6\x0c~\xf3\x85\xffR\x0cB\xf3\xc5\xff\xf6\x08\x13\xfb\xe1\xfe`\x02.\x05\xc7\xf8\x9c\x00}\x05>\xf3\xed\x05\xab\x064\xf6\xb9\xf9\xd3\x13\x11\xfa\x17\xf1S\x0cg\x02w\xf2\xcf\x02\x06\t\xc6\xf7\xbb\xff\xf0\n9\xff"\xf5n\x06\x8e\xfeN\x00\xbb\xf7\xb6\x05\x17\nZ\xf9E\xfdX\x07O\x02U\xebj\x0b_\x12\xe0\xe4F\x03\xc8\x11\xb4\xf2\xe3\x03g\x0b\xe2\xf0\n\x015\x04\xe5\xfa\xbb\xff\xcb\x04\x7f\x02j\xf9_\xff\xec\tM\x04F\xeb\xae\x01\xcb\n\x8f\xf5\xa3\xfbI\x1a(\xf4z\xf4O\x0fD\xf7\xbf\xf9\xfe\x06\x81\x05\xf3\xf3\x15\x07"\x0ch\xf8\xc4\xf7O\x0c\xe0\xf3S\xfc\x80\x0e\xa6\xfbg\xfa,\x10:\xf6\x9f\xf6\xa7\x0fb\xf9`\xfa\x03\x02^\x07\n\x01\x97\xfd\xd1\xfc\xba\ts\xf7\xa8\xfcD\x05H\xfe\xce\x01P\x07\xfa\xf9\xcd\xfeb\x08\x9a\xf0\xb6\x08.\x03`\xf8\xa0\x04,\x02\xe9\xfb\x16\x05`\xfdi\xf8\xf9\x02\xd6\xf7D\x01}\x05N\x07\xb5\xf3\xe3\xff\x16\t5\xf6\'\x006\t\xa3\xfb\x8f\xfd\xb1\x0b\x8f\xf8V\x04#\x02\xb1\xfat\xfe\xcd\x06\xf5\xfao\xfe\xd4\x02\xd4\xfc\xf1\xfdT\xff\xf7\x00\xc7\xfcY\xffU\xfb\xe1\x0ca\xf2\xb8\x02B\n\x96\xf3l\xf8\x17\x04\x86\x07\x81\xf99\x05\xeb\xfe\xa8\xf7:\x064\x04X\xf8-\xff\x1f\t\xd7\xfe\xb1\xf7\xd7\x0e\xbf\x00\xa6\xed\x9f\x07\xbf\x0e\xb9\xf2\x1a\xfd\xac\x0e\xcf\xfap\xf2\xf2\x10P\x06\xb8\xed\xc3\x08\xd1\x00\x8a\xf5\x94\x045\x0c\xf7\xf7\x11\xf9\r\x0b\x8f\xfe\x0f\xf9\x80\x01\xb6\x000\xfe\xcd\xffZ\x06o\x02\xf4\xfa\x1b\x03\xe8\xfa\xeb\xfd\x8f\x00\x9e\x00\xf0\x03\x8d\xffw\xfd\xcc\x03\xc0\xfa\xb4\xfel\x04\x8f\xf8h\xfdJ\x00n\x01\xcd\x04\xd5\x02\xf9\xf5 \x02\x12\xfd\xa2\xfb\'\x07\x80\xfd\x12\xfe\xbd\x03k\xf8\xf6\x05\x82\x06\x8b\xf5\xe4\x04_\xfd6\xf8\x87\x01D\x08\\\x02\xae\xfc\xae\x03\x99\xfb\'\xfd\x18\x03\xbb\x03\xfe\xf8y\x02\xf2\x07\xb6\xf6\x89\x07i\x07\xad\xf8\xa1\xfdb\xfe5\xff\xea\xffe\t\x96\xfe5\xfa\xc8\x06g\x00\x9a\xfbG\xff[\x03\x9d\xf7\x8a\x00\x8b\x05+\x06\x8b\xff\xe5\xfd\x1f\xf9\x90\xfd=\x05N\xfc\xe5\x02\x18\x01\x9d\x01\x90\xfc\x10\x00\xb4\x046\x02S\xf9\x91\xf3\xc4\x08y\t\xbb\xf96\x06\x96\x00\x93\xf5\xb7\x00g\x08Z\xfcX\xfaP\x04\xc6\xf9e\x01\xfd\x05\xfc\x04\xb0\xfb|\xf5\x8b\xfe\xc6\x04c\xff)\xfc{\x058\xff"\xfa\x05\xfe\xb1\t%\xfe\x9a\xf9\x1d\xfe\x06\xfdD\x020\x06\xb3\x00\xbb\xfe\xb9\x00\xff\xfb\x19\x00H\x03\x16\x03N\xfc\xbc\xfe\xb6\x01\xba\x01I\x03\x05\x02\x8b\xfd\xa3\xfd\xa4\x00D\x00t\xff5\x02\xe3\xff\xa4\xfdD\x04\xc4\x00\xa9\xff\xe8\xff\xb5\xfcB\xfdC\x01\xa2\x02\x10\x01D\x01\x15\xfe\xbb\x00\xcc\x00k\xfeQ\x00\xc4\xfcV\xff[\x03\x99\xff\xa1\xff=\x01\xab\xff\xba\xfd\x90\x01\x17\x03\xbf\xff\xd0\xfa\xfe\xfc\xb0\x05\xb2\x01\n\x01B\x04\xba\xfd\x0b\xfc\xbf\x01\x93\x00\xe6\xfc\x91\x01P\x02\x9e\xfd\xc4\x01\x1e\x04\x81\xff\xde\xf9\xa4\xfer\x03c\xfdi\x01\xe3\x02\xc5\xfd\x89\x01\xdb\x00[\xfe\xf5\xff\xe9\xff`\xfdv\xfd\x1a\x03\x98\x05\xd3\xfd\r\xfe\x01\x02x\xfe\x87\xfd\xc9\x02\xd2\x00\xd1\xfe\xa0\x00\xd7\xff}\xffS\x01\xc2\x02\xce\xfd\x0b\xfe"\x018\x016\xfdA\x02o\x01h\xfd\xbe\x00*\x00\xe5\x01u\x02\t\xff,\xfb"\x00^\x01\x08\x00\x80\x04\xdf\xff\x92\xfd1\xff\x06\x01z\xfd\x91\xff\xef\x02a\xfc\x92\x00\x89\x03]\x01!\xfe\x01\xfe\xf9\xfd\xf7\xfd\xe3\x00\r\x00T\x02#\x02\xcc\xfb\x07\x00\x92\x01\x8f\xfc\xb8\xfe\xdc\x00\xbd\xfc\xc1\xfd\t\x05\x0f\x024\xfb(\xfe&\x01\xf3\xfa\x80\xfc\x80\x02k\xff\'\xff%\x00X\xfe\xe9\xffp\xff\xb7\xfb\x12\xffa\xfd\x8d\xfd\x9c\x01\x8b\x01q\xfeG\xff2\xfew\xfbv\xff\xc3\x00\x8a\xfd\xf3\xfd\x1b\x030\x00\xb2\xfd\x82\x01?\x00\xc1\xfbZ\x00\x8a\x00K\xfe"\x04\xc8\x02\x9e\xfe\xfb\x00\xc6\x01\xd4\xff\x96\x00p\x01\x05\x03p\x01\x92\x03&\x06x\x03\x87\x03\xa0\x05T\x03\x93\x05\'\n\xae\x07[\x08\x9d\n\x9a\n\x82\t\xcc\x0b.\n\xcb\x06\x8e\ns\n\xca\x07\xcc\x07a\x08_\x042\x02W\x02I\xfeD\xfc\xdb\xfd@\xfbW\xf8\xa5\xf8\xb6\xf6m\xf4\x8e\xf3\xa5\xf3\xa0\xf2\xeb\xf1\x03\xf4\x8d\xf5\x1c\xf5\x9f\xf5K\xf6-\xf7\xdb\xf8F\xfa\xf1\xfa\x8c\xfd\x05\xfe\x0e\xfe\xc4\x00\xb3\x00\x1d\x00\x83\xff\x81\x00\x1c\x01\x80\x00J\x00\x87\xfd\x02\xfd_\xfd\xb0\xfa\x96\xf9\xf8\xfa9\xf8|\xf5:\xf63\xf5\x14\xf5\xb0\xf3F\xf3\xf5\xf4\xd4\xf4\x88\xf3n\xf5\xbb\xf8\xa1\xf54\xf8G\xfb+\xf9\xb0\xfc\x01\x01\x80\xfeh\xfc\x8f\x03\x0e\x05\x85\xffc\x02[\x07\xf6\x02\xab\xfe3\x05(\x05{\x01u\x02`\x04\x13\x00\x8e\xfe\x90\x00\xb2\xfe?\xfe\xed\x03}\x03\x14\xfe7\x03\x82\x06\x94\x06r\x05\x0b\x07\xa1\t\xb5\x0cT\x148\x19\xf4\x1b\xaa \x9f\x1f\xf6\x1dF!5&Y\'h%4(\xf3*\xba)F%: |\x1b\x0c\x14\xdb\x0c\x8a\x0b\x05\r=\x08X\xfeq\xf8a\xf5\x88\xef\xac\xe7\xce\xe1\r\xe01\xdes\xdc\x0e\xde\x8d\xe0>\xdf\xe2\xda\xbd\xd8\xab\xdc\x8e\xe0|\xe1$\xe6y\xed\x15\xf1j\xf3\xa9\xf6I\xfa\x86\xfc}\xfc!\xff\xf8\x04V\t\x83\x0bx\x0bu\x0cY\x0c\x05\t\x7f\x07:\x07\xa9\x06:\x04&\x03%\x03o\x01\xc1\xfd\xc9\xfa\xaf\xf7\xa3\xf4L\xf3\xc1\xf36\xf5\x97\xf4\'\xf3Z\xf3\xf4\xf5l\xf5^\xf4\x98\xf6|\xf9i\xfb\xe4\xfef\x02#\x04\xc9\x04;\x07B\x084\x08\x82\x0b\xca\ru\rF\x0e\x0e\x10\xd0\x0eM\rD\x0cE\x0b\x0f\t\x9a\x08\xea\x07 \x06\x86\x04\x8d\x02v\x00\n\xfe]\xfc/\xfaR\xf8\xac\xf9#\xf8S\xf6\xc6\xf5x\xf4~\xf3\xba\xf2\xf2\xf2_\xf5\xf3\xf2\x00\xf2\xb1\xf5\x84\xf5s\xf4\x1a\xf6P\xf61\xf5L\xf7e\xf9\x93\xfc!\xf9+\xf9m\x00\xd7\xfe\xb6\xfb\x88\xff\x89\x03\xd2\xfd\x07\x00)\x06\x98\x04A\x01\x03\x04"\x08\x89\x03@\x04\xb8\x05\xd4\x05\xcc\x06\xed\x06\xc6\t\x1e\t\xe4\x08\x10\x08\xf3\x08\x93\n\x1c\r\x8e\x0f\xe9\r\xec\x0f+\x14\xf6\x17m\x16c\x14.\x14d\x15\xb1\x18\xdf\x1b[\x1b\xa8\x18Y\x16u\x14\xd9\x13A\x12\xc1\r\x88\t\xd6\x05\'\x03\xf6\x02\xfc\x00\xcb\xfb\x93\xf6\t\xf2K\xee4\xecO\xeb}\xe9L\xe7\xce\xe5\x9a\xe5w\xe7_\xe8\xd7\xe6\x16\xe63\xe7\x9b\xe9M\xedG\xf1\x16\xf5/\xf6\xa2\xf5\xd0\xf7\x1b\xfb\xe8\xfd\xd4\xfd\xb9\xfe\xd3\x00<\x039\x05\x9f\x05\xe0\x04\x80\x02\xc5\x00\x84\xff\xa8\x01*\x03\xc9\x00\x19\xff\xd3\xfe<\xfe,\xfd\xaa\xfbB\xfbL\xfa\x8c\xf8\x86\xfaf\xfe\n\xff2\xfd\xd3\xfc\xde\xfc\xff\xfdX\xff\xdd\xff\xa9\x01~\x03b\x03^\x03\xed\x07\x14\x07D\x02\xe6\x02\x8e\x04\xb2\x04\xfc\x04\x94\x04\xc5\x06z\x04\'\xff\x1f\x00!\x05C\x00}\xf8\x02\xfe\xc2\xff\xd2\xf9n\xfd\x10\xff\x8e\xf9\x87\xf6\xb0\xfb\x05\xf5x\xf6u\xfb\x9c\xf5\x8e\xf9\xcd\xf4\x9e\xfa>\x02A\xf2,\xf5\xbc\xfe\xbc\xf6\xd8\xf5\xd7\xfb\xd4\x00[\x01\xf5\xfaX\xff\xba\xfe\x12\xfd\x1a\x02N\xfe\xef\xfe\xe8\x07\x07\x07m\x04\x0e\x07z\np\x08\x13\xff\x93\x06\\\x11\xe5\t)\x06\x8e\x10K\x0e\x0e\x087\t\xe0\n\x97\x07B\x06\x98\t\xed\n\x8c\t\x92\x0b)\x07\xbb\x04\xf4\x05\xb5\x07N\x08\xb4\x08B\t\xa7\x0c3\x0f)\n\xe4\x0cS\x0eW\x0b!\re\x10\x8e\x10,\x0fW\x0eq\x0e\xe6\x0c\xca\tj\x08J\x05\xf2\x03\xd1\x02\x05\x01^\xffN\xfb\xc1\xf7\xcb\xf4\x01\xf3 \xf14\xef\xa4\xee#\xee\xb0\xeb\x1c\xebi\xed\xaf\xecR\xeaZ\xed4\xeef\xee\x05\xf1\x8c\xf4o\xf4`\xf5^\xf8\x9d\xf8\'\xfa\xd5\xfb\x9d\xfd\xe9\xfc\x17\xfe\xc8\x01\x9c\x00\x02\xff3\x01x\x00\xdd\xfe@\x00\xa9\xfe \x01\x9d\xff\x00\xfe\xaf\xff\xad\xfe\xd7\xfe\xbc\xffq\xff3\xfd\xc7\xff\xb5\x03\xe1\xfe\xdb\xff\xf8\x05\xca\x01\x87\xff\x92\x06\x0c\x04h\x01\x00\x05\x04\x05Q\xffd\x034\x05\x01\xfe\x1d\x01#\x02\xae\xfd\x8c\xfd\x84\xfd\xb5\x00\xbd\xf76\xfa;\x00M\xf6\xdf\xfbd\xfa\t\xfa*\xfe\x18\xf8\xf2\xf6\xfb\xfd\x0b\xfd_\xfaM\xfd\xd4\xfc\x08\xff,\xf8\xfa\x03C\xfe\x9b\xfa-\x02\xc2\x01\xd3\xfe=\xf9\n\x07!\xfcp\xfa\xd6\xfcc\x02w\x03\x12\xf5\xf8\xfe\x98\t\x04\xf5Q\xf2\xab\n\xc0\x02\xfe\xf6\xb2\xff\xc2\x08\x08\xfe;\xfe\xdc\t0\x00\x0c\xfe\xe3\x0e\xa7\xfe\xa3\x07)\x12/\x04\xa5\x07\x04\n\x86\t\x86\x018\x0f\xc0\x0ft\x03\t\n\xb8\n\x8e\n\xb4\x00:\tT\nd\xfb\xcd\x03(\x10\xba\x08A\xf6~\t\xe3\x04"\xfc6\x00\xeb\x06;\x07\xe4\xf7x\x08\xaf\x03d\xffU\x04\x92\x03\x82\xfb\xf0\xfd\x9a\x08)\x01\xfa\xfe\x08\x01=\x06B\xf9\x95\xfe\xec\x03%\xfc\xa6\xff\xff\xfb\x16\xff\x18\x02\xee\xfdS\xfb\xaa\xfd\x18\xfc\xad\xf9\x8f\xfb\x0e\x02L\xf88\xf5\xac\x03*\xfab\xf2\x06\xfeH\x02l\xf0\x0e\xefo\x08\xa6\xfc\x18\xf4T\xfb\xdc\xfe*\xfd\x14\xf2\xe8\x03\xdf\xfeo\xf4U\xfc\xec\x01\xa0\xff\x84\xfa\x1f\x01>\xfcL\xfa\x1a\x03\xed\xfcZ\xfd\xec\x00\xd4\x07\x8c\xf6\r\xffA\x0b\x9e\x06\xb5\xed[\x02e\x11C\xf9%\xf85\r\xc1\x0br\xe8\xf4\x0b\x94\x05f\xf3\xdc\x01\xe4\xff\xfe\xfe\xbc\xfcC\x01\x96\x03\x9a\xf5\xad\xf5\x7f\x07\xfc\xfa\\\xf3\xdf\t\xe4\x01^\xed\xef\x08\xe0\x01\xce\xf4&\x03[\xf8\xa5\x05\xae\xffx\xfc\x9e\n\x91\x02N\xf4e\x06\xb7\x0cL\xee\xbb\x0b\xb1\nP\xf7i\x08\x8e\x01l\x003\t\xaa\xfc\xc2\x00/\x08\xa9\xf1\x9c\x04\xc6\x19\xf6\xef\x9e\xfcm\x13\x88\xf8\xe6\xec\'\x18\x90\x06\x0e\xe7D\x12\xce\x04\xc4\xfcr\xff\xd1\x08V\xf8\x0e\x04C\xfe\xa5\x03\xe1\x08\xd0\xf5\xb2\x07{\x01*\x04\x1c\xf9\xbd\xfd\xcf\t\x0c\xf8s\xfd\x8c\x06\xa3\xff|\xfeM\xfdq\t\xda\xe9\x16\r\r\x03\x98\xef\x94\x06\x8d\x05\x8a\x03\x99\xefT\r\xae\xfcS\xfc;\x06\xbd\x00I\xfd\xe3\x03\x10\xff:\x03\xfe\x03+\x01*\x02\xe8\xfd\x89\x06U\xfa%\x04z\x05\xe5\xf8>\x00v\x06h\xfb\xd3\x06\x06\xff\xd8\xed\xb3\x12\x11\xf7\xad\xf5\x92\x11\x17\xffL\xf5/\x05\xed\x08\xa9\xf7\xaa\x01\x8f\x00\xd2\x05\xd9\xf4o\x13\x9a\xff\xa9\xf7~\x05g\x01\xf9\x00\x9e\xf7\xa5\nm\xfef\x00\\\xf8\x05\n\xb0\xfd\xd3\xf4A\x07\x18\xfe\xd6\xfc\xff\xf6\x8a\x059\x03\xa6\xeeW\x06\x87\x01j\xf3.\xffK\x05\xdf\xf7r\xfc\xfa\xfe\x98\xfa\\\x03\xa8\xfd\x1e\xf9\x88\x01]\xfdZ\xf9\x85\x06\xa3\xf9\x16\x06\x18\xf2\xc8\x03\xf3\x053\xfa\xb1\xf8\xd1\x078\x02\xd1\xf6\x18\x08@\xfb\x85\x02\x9b\xf6e\x10a\xfd;\xf1\xb2\x0e\xda\x04\x99\xf4M\x01D\x0bT\xfd\xdd\xf4G\r\xf9\x05\x9b\xefb\t\xc3\x0b\xd0\xf1\x07\xff\x13\r\xae\xfa\x0b\xfd\xce\x01\xac\x0b\x9d\xee\x99\x06\xab\x03?\x02&\xf6H\x02\x95\x0c\xcd\xee\xfd\x05\x81\x05\x91\xfd\xd3\xf0p\x15\x97\xf2f\xfd)\t=\xfa^\x00\xb9\xfe|\x03\xc2\xf9J\x01\xfa\xf9\xe3\x07@\xff$\xfa\xd2\x02m\x02C\xf6\xa0\x05\xf6\x02\x88\xfa\xe2\xfa\xb3\x05\x7f\x02\xff\xf5(\t\x03\x00Q\xf4y\x03\xf8\x0b\xb5\xef\x81\x05\xcf\x03\xdb\xfa\xea\x07\x96\xff\xb2\xfbc\x04L\xff\x0b\xfe\xe3\x04s\x07O\xfc\x99\x03\xef\x017\xfa6\n\xeb\xfc\x13\x00\xa7\x05@\x06&\xf7\x80\x06\xea\x04\xf5\xfc\x06\xfc \x02[\x05\x86\xff|\x02\xac\xfa\x16\x06\x1d\x00;\xfa\x80\x03n\x03^\xf7-\x02\x03\x03\xd6\xfdH\xff\xdf\x03\xaa\xf7!\x00\x8b\x040\xfcf\x00\xf1\x00\x9c\xfc=\x03\xb8\x00\x94\xfa\xb2\x06\x16\xfc\xb7\xfe\x9d\xff\xf2\x01\xd1\x02\x0c\xfcX\x00\xd7\x03\x0c\xff\x98\xf9\xbe\x05\x96\xfd\x11\x01\xd3\x01w\xfa\x07\x04\x9a\xff2\xff\xff\xfc\xdb\x01J\x01\x87\xfa5\x01W\x03p\xfe]\xfcu\x04\x88\xfe?\xfd[\x06\xa7\xfd:\xfb7\x06\x04\x02\xe0\xfa\xb0\x02\x83\x04\x11\xfc\xc6\x03\xde\x01\xa8\xfe\x06\x01x\xff\xf5\x01-\x03\xca\x02\xee\xfc\xd3\xfe\xa9\x06a\xfb\x14\x01W\x05\x19\xfb5\x02\xb4\xfd\x96\x00e\x019\xfe\xcb\x00M\xf9h\x06~\xfb\xb5\xfa0\x07\xfd\xfa\xdd\xff\xa5\xfb\xc7\xfe\xd3\x02\xa8\x00\x1f\xf7\x12\x03/\x02X\xf7Y\x04\x98\xfeH\xf9\xcd\x03\x81\xfe\x8b\xf9\xfe\x05\r\xfd\x1c\xfc\x07\x01.\x01\xa6\xfaQ\x01Y\x03\xc6\xfe\n\xfbB\x04R\x03\xcc\xf9\xa0\x01B\x01\xc2\x04\x02\xf9\xc7\x03\x90\x02\x02\x00T\xfdm\xfde\x08,\xfb\x95\x01\x9c\x02\x9a\xff\xb3\xff\xe3\xfe\xb0\x00\x98\x01\x08\x02G\xfd\xd9\x00/\x04*\xfe\xdb\xfe@\x01\xb1\x00\xb6\x00\x80\xfei\x01\x98\x01\xd8\xfc\xdb\x01p\x00T\xfe\x95\x00\xeb\xfd\x13\x00\xf0\xff\x0f\xff|\xff\x03\x00d\xfd\x86\xff\x91\xff\x04\xfe+\xffv\xfd\xe6\xff\r\xff\xf6\xfc\xc3\xffF\xff8\xfc\x04\xff\xd6\xff-\xfc\xbd\xff\xa0\xff\xf5\xfb\xb4\xff\xdd\x00\xd1\xfcT\xfe}\x00\xba\xfe\x96\xfe\x15\x00\xdc\xff\x9c\xff\xe5\xffu\x003\x01\xd7\x00\x06\x01\x18\x00\x90\x01\xa6\x02k\x00\xb0\x01\xec\x02C\x00\xa8\x02r\x03\x08\x00X\x02\xf6\x02\xd1\x00\xa0\x01R\x02N\x01!\x01\x8a\x01\x14\x02^\x00\xb0\x00c\x01\xb0\xff{\x00\x92\x01}\xff\xbd\xfe+\x01\xb3\x00\xce\xfd\xdf\xff\x98\x00\xd7\xfe\xad\xfe>\x00\xbe\xff\xe4\xfdd\x00\xf4\x00C\xfe\xea\xfd\xfe\x00\xa7\x00\xa8\xfd\x9c\xffw\x000\xff\xdd\xfe\xd2\xff\x9b\xff\x13\xfe.\xff\xb9\x007\xfe8\xfe\x93\xff|\xff\xbc\xfd\xdb\xfe*\xff\x07\xfe*\xff\xf7\xfe\x7f\xfe\xa4\xfe+\xff\xc5\xfe\x80\xfe\xd8\xfe\xe5\xffA\xff\xe6\xfex\x00\x85\xffh\xff\xc3\x00\xca\x00\xea\xffb\x00\x81\x01k\x01\xb4\x00\x83\x01C\x02\xf5\x00\xaf\x01\xc0\x02t\x01p\x01\xc4\x02"\x02\x11\x01\xaa\x02\xe0\x01Z\x01\xab\x01\xa0\x01R\x01\xae\x00u\x01v\x01\x10\x00\xce\xff\xea\x009\x00R\xff\x03\x00\xc5\xff\xdb\xfe-\xffZ\xff\xd5\xfes\xfe\xbb\xfe\x0b\xff_\xfe5\xfe\x8c\xfe\xcb\xfe1\xfe7\xfe\xad\xfe\xa4\xfe\'\xfe\xeb\xfe_\xff\x86\xfe\xde\xfe\x89\xffK\xff\xdd\xfe\x17\x00\xe6\xff\x10\xff\x00\x00\xb9\x00w\xff\x1f\x005\x01\xc1\xff*\x00\xf3\x00\xd3\x00j\x00e\x00R\x01\x0b\x01x\x00b\x01\x04\x01\xb7\x00\x10\x01\x01\x01\t\x01\xf7\x008\x01R\x01\x08\x01\xd1\x00\x1f\x01\x0b\x01\x9b\x00\x01\x01\xf7\x00y\x00\xd9\x00\x0c\x01w\x00@\x00\xd8\x00u\x00\x14\x00Q\x00\x12\x00\xd0\xff\xe8\xff\xf0\xffN\xff\xa1\xff\xe7\xff\x13\xff\xd5\xfe\xd3\xff*\xffl\xfe~\xff=\xff\x87\xfeB\xff\x02\xff\xed\xfe$\xff\xd3\xfe&\xff\x1e\xff\xfe\xfe<\xff\x1b\xff\x08\xff\x87\xff\x1e\xff \xff\xce\xff\x85\xffH\xffv\xff\x17\x00b\xffY\xff=\x00\xf7\xff\x92\xff%\x00\x88\x00\xca\xff/\x00\\\x00F\x00T\x00y\x00\x86\x00\x8d\x00\xc2\x00\x9a\x00\xa1\x00\x8c\x00\xbf\x00\xcc\x009\x00\x90\x00?\x01\x88\x00\x14\x00\xff\x00\xb8\x00\xef\xff\x86\x00q\x000\x00\xe3\xffM\x00S\x00\xce\xff\xf8\xff\x0c\x00\x80\xff\xea\xff\x01\x00\x98\xffu\xff\xb6\xff\xd2\xff-\xffo\xff\xc9\xffS\xff\x13\xff\x99\xffL\xff%\xffe\xff,\xff^\xffE\xff.\xffz\xff\x7f\xff\x88\xffh\xffs\xff\xa1\xff^\xff\x8a\xff\xf7\xff\xa3\xff\xc8\xff\xe9\xff\xe2\xff\x03\x00"\x00\xf1\xff\xff\xffi\x00(\x00E\x00\x8b\x00?\x00L\x00\x9b\x00R\x00\x8c\x00\x94\x00\x94\x00T\x00\x7f\x00\xb6\x00C\x00F\x00\x93\x00D\x00\xf6\xff\x83\x00[\x00\xfb\xff*\x00)\x00\xeb\xff \x00\x1b\x00\xd8\xff\n\x00\x0f\x00\xdc\xff\x00\x00\xb9\xff\xdc\xff\x0f\x00d\xff\xc5\xff^\x00v\xff\xe1\xff\xfd\xff\x8f\xff\xe1\xff\xb5\xff\xc4\xff\xf5\xff\x14\x00k\xff\xf6\xff\x02\x00\x94\xff\xaa\xff\x0e\x00\xa9\xff\x86\xff<\x00\xb0\xff\x90\xffN\x00<\x00^\xff\xbd\xff\x86\x00\xe9\xff\x99\xffk\x00M\x00\x1f\x00\xe8\xff8\x00\x91\x00\xd0\xff\r\x00\x98\x00\xb4\x00\xc8\xff!\x00\x87\x00\x1b\x00M\x00B\x00\x0f\x00G\x00\x08\x00r\x00F\x00\xbc\xffJ\x00m\x00\xdf\xff\x06\x00\xa0\x00\xdc\xff4\x00P\x00\xfd\xff`\x00\x12\x00\xf6\xffg\x00\xfd\xff:\x00\x19\x00\x11\x00\x11\x00\xee\xff\x1f\x00,\x00\xf7\xff!\x00\x08\x00\x9d\xffd\x00\x03\x00\x80\xff\xe7\xff*\x00\xc6\xff\xf4\xfe\x15\x00\x1d\x00\xbd\xfe\x90\xff\\\x00h\xff\xba\xfeL\x00;\x00_\xff\x8a\xff\xf8\xff\xc7\xff\x19\xff\x1a\x00\xb6\x004\xffL\xffn\x00\xbb\x00\x99\xff\xf1\xff\x83\x00\xb7\xff\x8e\xff\xdf\x00\x1e\x01\xf4\xfex\xff\r\x02\xfb\x00M\xfd\xbb\x01\xee\x02\xf8\xfc\x95\xff\x18\x03 \x00\xc2\xfe\x08\x00,\x01*\x00\xb3\xff4\x01\xb6\xff[\xff;\x00\x9d\x00\x1c\x00\t\x00\x92\xff,\xff\xaf\x00X\xff,\xfe\x88\x02L\xff\xe4\xfd\xd4\xff\xcc\x00t\xff\xd0\xfd\xd2\x01e\xff\x98\x00<\xfe\xe9\xff\xa0\xff\xd0\xff\xd6\x01r\xfd4\xfe\xe1\x00\x16\x01h\xfe\xd0\xfe\x85\x00|\x01\xe3\xfe\x8d\xfe\x84\xfc&\x00D\x03\xa9\x03\\\x02\xcf\xfe\xf2\xfb\xf5\xfe\xca\x07\xf3\xfe\xdf\xfbv\x04\xa1\x00X\xfc8\x02\xc8\x04\x82\xfcY\xfa\x9c\x02 \x03z\xfd\xef\xfdh\x000\x00\x8e\xfe\xfa\xff{\xffc\x00\xa7\xfe\xd4\xff\xa2\x00\xd0\xfc\xc4\xff\x8e\x02\x1d\x05>\xfc\x18\xfd\x0b\x04\xb3\x02\x82\xfdb\xff\xc4\x03\xe6\xfe\xb5\xff\xd1\x00L\x00\xb4\xfc\xc1\x00\\\xff\xaa\xfd\x18\x02\xde\xfd\x07\x01~\xff\x94\xfd\xea\xfe\xac\x01\xfb\xfcZ\x00\x9e\x00\xf1\xfd\xf4\x00U\x01+\xff}\xfd\n\x00!\xff\x89\x02\xa2\xffD\xff\xcd\x02\xbf\x00\xfa\x01S\xfd\n\xfdr\x00\x88\x05\xeb\x00\\\xfdn\x02\xff\xfdB\x03(\x00\xbe\xf7\xe1\x02\x99\x04\\\xf9)\x00\x1f\t\xd7\xf8\xaa\xf8~\x04\xb7\x04j\xf9\xfb\xfbu\x03\xc9\x03\xa9\xfe\t\xfe\x85\x01\x8e\xfd\\\x00\xf6\x01$\x03*\xfa\xa4\x00E\t\xed\xfb\xc9\xfc\xa9\x04D\x00]\xfc;\x03\x16\xfd\x18\xff\x9c\x03^\x01-\xfb2\x01`\xfeh\x02\x82\xfe\t\xf8\xfc\x06\xd2\x07\xba\xf9<\xf7C\t\xff\x03h\xfb\x88\xfb\xca\x00\x81\x01\x08\x01\xe0\x00w\xff^\xfdN\xfc\xae\x05a\xfe\xfa\xf8\x95\x02k\x07\x89\xf7\xa3\xfc3\x0b\xb1\xfe,\xf8\xad\xfd1\x0b\x1d\x00\xd0\xf7\xe4\x03O\x03\t\xfd\xee\xfd\xb2\x06\xef\x02$\xf9\xe5\xffR\n\xf7\xfb\xba\xf8\x1c\t\xb0\x06\xff\xf7\xd8\xffb\tO\xf9 \xf8\xc1\t\x11\x07\xc2\xfav\xf8\x17\x03\xfc\x01\xe5\xff\xf5\x00E\xfd\xd9\xfct\x03\xb6\x02E\xf8\xb2\x02\x1e\x08q\xfa\xba\xf66\x04\x05\x0b\x8b\xfc\xdb\xf7\xda\x04\xc6\x05\'\xf90\xf4\x95\x0e\xc3\x04\xce\xed\xad\x05\xe6\x0b*\xf48\xf9\x8c\x0f\x16\xf6\x84\xee^\x13\x13\n\xe2\xec|\xf91\x12w\x01K\xea\xaf\x08\xf3\x0b\xee\xf5\x1e\xf7o\r\xb3\x08\x14\xf1\xa5\x02)\x06\xf7\xf5G\xf9e\x13\xb7\x03\x00\xf4m\x02\x88\x02\xd2\xfex\xfe\x7f\xff\x0f\x06\xf3\xfee\xfa\xc7\x03O\x02\xf9\x00l\xfe\xeb\xfdO\xfc\x99\x02\x13\x03\xea\x02j\xfb\n\xfb\xd8\x05A\x00\xed\xfep\xfc\x9c\x02\xd3\xfe\xab\x02\xe7\xfd\xfc\x01g\x08\xdb\xff\xeb\xf4\x88\xfb\xd2\x0bN\x04\xa1\xf6F\xff\x0e\x0fK\xf9\xee\xf0\x1c\x05\xab\re\xf6?\xf2,\x06@\t%\xfc\x95\xf8"\xfe\x80\x02P\xfa\xad\xfcD\x08G\xfc\x9a\xff\x89\xfb\xd3\xfe\xf3\x07\x96\x05\x08\xfc\xb3\xf9\xb4\x01\x99\x07z\x05\x0e\xfaJ\xfb!\x02\xf8\x04\x91\xf9\xc0\xff\x85\x0cp\xfd\x12\xf0\xef\x02\xb9\x08U\xfb\x8f\xf7F\x07$\x04\x03\xfb\x01\xfec\x04h\x02B\xf7\'\xff\xe8\x08&\x03\x88\xf5h\x026\x07\xd6\x01\xe0\xf8/\xfd\n\x05\xfa\xfdN\xfd\xdd\x02\n\x03q\xfdo\xfd=\x00Q\x03v\xff&\xfb\xe7\x00\xbb\x00&\xffT\x070\x00\xf0\xf8\xfe\xffd\x04\x80\xff$\xfey\x04w\x03\'\xf9\x84\xf9\xac\x0cj\x07\x19\xf4\xc8\xf9\'\x08\xd6\x05\x03\xfb\xa4\xfe\xe8\x03\x96\xfd\x1d\xfbg\x03\xb4\x049\xfe\x9b\xfc\xea\xfcg\xffW\x02;\x03~\xfe \xfa\xd1\xffN\x04}\x03\x1e\xfc!\xfd\x06\x01\xef\xfa\xb6\x03\xff\x0b\xea\xfd\x16\xf7\xe8\x00\xb7\x06-\x00p\xfe\xa7\x04N\xffk\xfa\x14\x07>\n\x0c\xfa\xf7\xf5\xcc\x01a\x05\x16\xff&\xff:\x01g\xfa\xf0\xf6\x08\x03e\x08\x84\xfa\xae\xf3\x16\xfd\xdc\x04\x97\xfe#\x00\xe0\x00\xb9\xf5$\xf9\xcf\x06\x88\x06l\xfc\x0b\xfc\xf0\xff\xb4\xfd\xab\xffD\x086\x03\x9a\xf6\x9a\xf9\x0c\x04\xc1\x03\x08\xff+\xfe\x16\xfb\x1b\xf9\xc8\xfe\xda\x02\xfe\xfb\xb1\xf6\x9f\xfa\x01\xfeu\xfcz\xf8=\xf8\x1b\xf9\xde\xf6|\xf9h\xfd\xe4\xfe\x98\xfdJ\xfcM\xfa\xf3\xfd\xc6\x03\xe4\x084\x12\xf8\x16h\x10\x01\x0bB\x14\xb4\x1e\xcd\x1b\x0f\x14\xcb\x19\xf7$\'$0\x1d4\x1a&\x19\x85\x0f\\\x06\x96\n\x10\x12z\x0c\xcd\xfe\x15\xfa\xb1\xfd$\xf7U\xeaG\xe3\xfc\xe5Q\xe8H\xe4V\xe6\xda\xeb\x13\xe8\x9e\xdc\x11\xd9\x90\xe3\'\xec\xf1\xe9\xb8\xe8\xb1\xf2\xfa\xfc}\xfc\x99\xf7X\xf8\xfa\xfd:\xff\x88\x00\x80\x08C\x13\xc2\x11@\x067\x05\x07\x0c\xf9\x0b\xf7\x02\xf4\x00\x9a\x08U\rX\x07r\x00\xd0\xfei\xfa\xa1\xf4\xe9\xf4\xb0\xfa\xea\xfc\xa5\xf8N\xf6\x14\xf8\xf2\xf8\x89\xf3\'\xf0\xb5\xf2{\xf8*\xfe\x8f\x00V\x02\xf7\xfe\x9a\xfb>\xfc\x8f\x01\xc0\x05\xc6\x06\xd3\x08S\x0c\xb5\x0e\xdc\x0cN\x0c\xfb\t\xf4\x07\xbd\t|\x0eH\x13\x18\x12(\x0cy\x08o\x07\xb6\x06\xe7\x05h\x05M\x05\xc2\x04\xe5\x02D\x01\x08\xfe\xb8\xfa\xcb\xf7\xfd\xf5E\xf8\x17\xfa\xf0\xf8\xc9\xf5\x03\xf3\x95\xf1h\xf2\x96\xf1\xbb\xf1#\xf5\x8a\xf6\x1d\xf7\x1e\xf6\xcd\xf7<\xf8\xab\xf6>\xf8\xfa\xfb\xb3\xff\xae\x00a\xff\xa0\xffk\x00\xba\xfe\r\xfe\xa4\xff\xd3\x01(\x02d\x01\x1c\x01\xa0\xff\xed\xfd\x82\xfc\xa8\xfc\xe5\xfc\'\xfd\x02\xfd`\xfb\xb4\xfb\x0b\xf9\x96\xf5\x1c\xf4\x9c\xf2\xa4\xf5\x16\xf6O\xf6\x17\xf6\xe6\xf5L\xf7\xe4\xf7\x1c\xfa\xde\xfb\xfd\xfc\xd9\x01\xb9\x10B"J%L\x18\xf3\x13\xe4"Z1#2I0\xcc6!;\xa88\x825\xd10\xea$\xae\x18\x89\x1a\x9f$\xd3"R\x12s\x02x\xfb?\xf3I\xec\xf0\xe7>\xe4E\xdd\x1a\xd7\xea\xd9\x98\xde\x81\xd8\xa5\xc8\xd0\xc2i\xce\xc8\xdbh\xdei\xdc\x9a\xe1\x98\xe9$\xed\xe4\xec\xfb\xf0\xae\xf8x\xfcy\x00>\t\x10\x15\x91\x15\x0c\n\n\x07P\x0f\xa2\x14x\x0eI\n(\x0e}\x10\xf1\n[\x035\x00\x87\xfb4\xf4\xbe\xf3V\xf9F\xfb\xa1\xf4M\xed\x17\xee*\xf1\xb7\xed\xbf\xea=\xee\xa9\xf4(\xf9\x1b\xfac\xfc\x85\xfc^\xf9z\xf9u\x00\xe0\x07\xcb\n\xa3\x0b\xff\x0cL\x0f\x1d\x0f\xe4\r\x1c\r\xa3\r\x1a\x0f,\x12%\x15\xa7\x14\xd5\x0f\xd6\t\x97\x06S\x05\xda\x05E\x05)\x04\x87\x03O\x01c\xfe\xa0\xfa\xc0\xf7z\xf4;\xf3\x9a\xf6E\xfc\xa0\xffJ\xfe\xbb\xfa;\xf7\x04\xf8\xeb\xf9\x0b\xfd\xb9\x01\xf0\x02\xb8\x03\'\x046\x03\xff\x00j\xfdQ\xfc\xfd\xfd7\x01\x84\x03\x17\x02\x11\x00\xa9\xfc=\xfa\xc8\xf8h\xf9\xe2\xfa\xdb\xfb\xc0\xfcC\xfd\xd4\xfc\x9e\xfa\x9d\xf8\xde\xf7\xd0\xf8\x80\xfbv\xfdv\xfe\xdb\xfft\xfe\xa6\xfcp\xfc"\xfd\x85\xff(\x00\xb2\x012\x04\xbd\x03\xc0\x02\xa8\x00\x9c\xfe\xe5\xfe<\xff\x9e\x00\x1b\x02\x85\x01:\xfff\xfc\xf1\xfa\x0b\xfb\xef\xfak\xfa*\xfc\x0f\xfeT\xff\xf9\xfe\xd0\xfc\xb1\xfb&\xfb\xa0\xfc\xfb\x00\xe3\x01\xcf\x00\x87\xfet\xfc\x9f\xfd\xeb\xfbF\xfc\xab\xfe\x12\x01\xb7\x00I\xff\x1f\xfe\x16\xfb\xc2\xf8\x94\x01\xb9\x18\xa7%\x81\x19\xd5\x06f\x0e\x7f$\x83(G\x1f\x80!\xc91y4\xb7*N(\xdd&~\x18\xf3\x08\xb9\x10E$\xaa!\xe4\t\x89\xf9\xe7\xfa\xf3\xf5s\xe8>\xe1\xb5\xe5\x9b\xe6i\xdfe\xe0h\xe6\xfb\xde8\xcc\xfe\xc6R\xd8\x0b\xe9\xd5\xe8\xe7\xe3a\xe9\xb3\xf1\xa1\xf1\xb8\xee\xa5\xf2\xd8\xfa\xce\xfc\xec\xfe\xf4\x08q\x13\x86\x0fh\x00\xa0\xfd\x11\x08\x88\r\x8b\x06:\x02\xff\x06\x16\t\xad\x013\xfa\xa2\xf9\xf5\xf6(\xf0\xfe\xf0G\xf9\xc6\xfa\x8d\xf1\xfd\xea\xe2\xee\xe1\xf2\x9b\xee\xbc\xec^\xf3\xf3\xf8\x8c\xf8\xe9\xf8\x81\xfe\xf0\xff\xee\xf9\xa9\xf8,\x02B\x0b\xf8\n)\tq\x0b!\x0e\x80\x0c\xab\x0bk\x0ep\x10q\x0f_\x0f\xa0\x12\x8f\x14\x91\x10\xb7\t1\x07\x1c\to\n\xc5\t\x11\x08\xb3\x06\xbd\x03\xd3\xff\xdf\xfd\xdb\xfdZ\xfcm\xf9\xdd\xf8\xd7\xfa\xc0\xfb\x14\xfa\xab\xf6w\xf5\xd0\xf6F\xf7]\xf8\xf6\xfa\xdc\xfcY\xfd\x84\xfc[\xfd\xf7\xff\xb3\x00\x14\x00\xee\x00c\x04\x00\x07\r\x07\xcf\x06\xbc\x06\xd8\x05=\x04\x19\x04x\x05k\x06;\x05k\x039\x02Y\x01\xd6\xff\x04\xfe\x90\xfd\x10\xfe\x08\xfe\xc0\xfc\xc7\xfcD\xfc[\xfb\x89\xfa&\xfa\xf2\xfbG\xfd\xdc\xfd!\xfe\x82\xfe\xd6\xfe\x9a\xfeo\xff\x14\x02\xfb\x04s\x03*\x02G\x04\x1e\t\x13\x0b\xca\x07\x17\x07B\x07\xe7\x07|\x07K\x08\xd1\x08\x11\x07\xe5\x04 \x04\xb5\x02g\xff\x9f\xfdU\xfc\xa7\xfc\x9e\xfc\xae\xfa]\xf9y\xf7\xe2\xf5\x04\xf5\xa2\xf4\xe9\xf5&\xf6\xec\xf5\x82\xf6\xdc\xf6\xeb\xf6"\xf6\xb4\xf6g\xf8v\xfaA\xfbc\xfc\xef\xfd\xea\xfe\x9f\xff\xce\xff*\x01 \x031\x04\x12\x05\xfc\x05\xaf\x06\xb7\x06\xf4\x05L\x06?\x07\xb0\x06\x00\x06\xef\x05\xef\x05\xf8\x041\x03}\x02\xf5\x01r\x006\xffK\xfe\t\xfeW\xfd\x9d\xfb7\xfa\x99\xf9E\xf9\x81\xf9r\xf9\xe8\xf8j\xf9\xf8\xf9V\xfau\xfa[\xfa\x81\xfa\x9d\xfa\xc9\xfa\xe4\xfb\x02\xfc \xfb\xe8\xfa\xe2\xfa\xd6\xfa8\xf9\x1d\xf8(\xfa\xe1\xfc\xcb\xfcK\xfb@\xfc_\x02\x87\x07\x8f\t\x13\x0e\xba\x15\xfc\x18\x9f\x14\xf9\x14m\x1f6(_&\x9f"\x8d&]*l%9\x1d\xdf\x1a\xa1\x1a\xde\x166\x11\xa7\x0f\xdc\x0c\xd6\x03\xa3\xf9k\xf4\xf0\xf1\x81\xed\x91\xe8Y\xe6T\xe5m\xe3\xfc\xe1\xa8\xe1\xe7\xdf\xf9\xdc\xb7\xdd&\xe3\xe0\xe81\xec\xbe\xedF\xf0^\xf3\xbf\xf5d\xf8\xed\xfa\x97\xfdV\x00\xc2\x03u\x07\xfb\t\xa1\to\x06\t\x04\xfb\x03\xe2\x05u\x06}\x05\x10\x04.\x02\x00\x00\xb1\xfd\xc4\xfb1\xf9n\xf6\xb9\xf5\x9c\xf7\xd9\xf8\x13\xf7\x1c\xf5\xd9\xf4\x1b\xf5a\xf4\xa5\xf4\x92\xf7*\xfam\xfa\x05\xfb\xe1\xfd0\x00\x93\xff\xd8\xfe\xd3\x00\x08\x04n\x05$\x06\xf5\x07$\tR\x08I\x07\xf8\x07j\t\x84\t\xc2\x08\xf2\x08\xe0\t\x8f\t\xcc\x07y\x06\r\x061\x05M\x04-\x04R\x04S\x03O\x01\xfe\xff\x9e\xff\xc8\xfe?\xfd[\xfc]\xfc\x99\xfcn\xfc\x9d\xfb\\\xfbO\xfb\xc9\xfa\xd4\xfa\xf3\xfb\x14\xfd\x80\xfd\xb7\xfdC\xfe,\xff\xe9\xff\n\x00\xa6\x00\xdb\x01\xca\x02\xf4\x02\x12\x03\x99\x03\x96\x03<\x03\x1c\x03$\x03\xf8\x02\xe2\x02\xb3\x029\x02\xb4\x01\x0c\x01i\x00\x15\x00\xc9\xff]\xff!\xff/\xff/\xff\xfd\xfe\x9a\xfe\\\xfep\xfer\xfe\x9e\xfe\x07\xff`\xff\xad\xff\xc6\xff\xaf\xff\xb5\xff\xa2\xff\xaa\xff\x18\x00S\x00d\x00\xa8\x00\x94\x00Z\x00M\x00F\x00\x19\x00\xe5\xff\x0f\x00o\x00\xa6\x00\x84\x00\x14\x00\x01\x00>\x00.\x00,\x00\xc6\x00\xbc\x01Q\x02`\x02\xaa\x02\xe7\x02\xfa\x02Q\x03\xf2\x03m\x04?\x04E\x04\x0e\x05\x1a\x06#\x05\xd5\x02\xde\x02b\x04\x16\x04t\x01q\x00\x06\x02\x0b\x02\x13\xffW\xfd\xa2\xfe\xd6\xfe\x9d\xfb\x88\xf9\x98\xfb\xf9\xfc`\xfa\xb9\xf7\x19\xf9\x1c\xfb\xa8\xf9\x8a\xf7y\xf8\x88\xfa:\xfa\xf7\xf8F\xfa\x97\xfc\xae\xfc\x95\xfb\x8c\xfc\xf8\xfe\xdc\xffe\xff\xae\xffx\x01\xe4\x02\xef\x02\xf8\x02\xad\x03\'\x04\xbc\x03-\x03|\x03\r\x04\x88\x03E\x02\xb2\x01\xef\x01t\x01\xe9\xff\xb6\xfe\x8a\xfe0\xfe/\xfdd\xfc:\xfc\xfe\xfb:\xfb\xbf\xfa\x03\xfbe\xfbZ\xfb$\xfbo\xfb7\xfc\xc2\xfc\xfb\xfcr\xfd\x1a\xfe\xa7\xfe\xc0\xfe\xdf\xfe*\xff)\xff\xbc\xfe{\xfe\xa9\xfe\xcf\xfek\xfe\xce\xfd8\xfd\xe5\xfc\xb4\xfc\xa2\xfc&\xfd\x8d\xfe\xca\x00\xf7\x02f\x04b\x05m\x07\xb0\n\xaf\r\x13\x10\xd6\x12\xc9\x15z\x17\xd0\x17k\x18\xc9\x19Y\x1a6\x19e\x17\x15\x16\x9b\x14\xf5\x11\xb5\x0e\xe1\x0b~\tz\x06\xdd\x02\x89\xff\n\xfd\xb7\xfa\x16\xf8\xcb\xf5W\xf4v\xf3\x10\xf2w\xf0\x9c\xef}\xef\x82\xef\x0f\xef\xd2\xeex\xef?\xf0\xb1\xf04\xf1#\xf2$\xf3\xc9\xf3Z\xf4\x8d\xf5\x01\xf7:\xf8C\xf9\x1a\xfa\x14\xfb\x00\xfc\xa4\xfc2\xfd\xde\xfd\x93\xfe\xfc\xfe\xfc\xfe\xe4\xfe\xf2\xfe\xd1\xfel\xfe\x03\xfe\xcb\xfd\x8b\xfd\x18\xfdk\xfc\x00\xfc\x1a\xfc!\xfc&\xfch\xfc\xc0\xfc\xf5\xfc\xf4\xfc#\xfd\x84\xfd\xfc\xfd\x83\xfe\x0f\xff\xa1\xff\x1d\x00s\x00\xbd\x00\x07\x01F\x01\xb9\x01i\x02\x11\x03\xb0\x03X\x04\xda\x04P\x05\xba\x05Y\x06\x0e\x07\x9c\x07\x07\x08\x95\x08B\tq\t\x1a\t\xe2\x08\xd6\x08V\x08{\x07\xf7\x06\x9a\x06\x85\x05\x01\x04\xe1\x02$\x02\xdd\x00?\xff/\xfe\xa7\xfd\xbb\xfc\x8a\xfb\xfd\xfa\xf8\xfa\x8f\xfa\xdc\xf9\xd7\xf9l\xfa\x81\xfa0\xfa\x93\xfa\x85\xfb\xcd\xfb\xc5\xfb9\xfc\xf4\xfc \xfd\x06\xfdj\xfd\x1d\xfe[\xfe7\xfew\xfe(\xffK\xff\x11\xff]\xff\xef\xff$\x00\x1c\x00}\x00\x13\x01F\x01O\x01\x94\x01\xec\x01\xf5\x01\xde\x01\xeb\x01\r\x02\x11\x02\xd7\x01\xaa\x01\xb4\x01\x8d\x01>\x01\x18\x01\x18\x01\x16\x01\x08\x01\x1e\x01:\x01x\x01\x8a\x01\x84\x01\xb7\x01\xed\x01\xfc\x01\t\x02#\x02*\x02\x1a\x02\x08\x02\x01\x02\xd8\x01\x85\x01\x0e\x01\xb8\x00\x99\x00r\x00\x02\x00{\xff\xa2\xff\x06\x00\xdb\xff\xbb\xff\xf2\xff[\x00\x17\x00\xf1\xff|\x01\xb6\x03\xd7\x03\x84\x02\r\x03\x12\x05t\x05\n\x04\x1a\x04\x7f\x05[\x05c\x03\x8f\x02M\x03\x87\x02\xf0\xff\\\xfe\xe9\xfe\xdc\xfe\x8a\xfc~\xfa\x82\xfa\xb1\xfa\xb5\xf9\xc0\xf8\x01\xf9[\xf9\xde\xf8k\xf8Q\xf9\xc8\xfa=\xfbF\xfb\x1b\xfcs\xfds\xfe\x0b\xffi\xff.\x00\xf7\x00\x83\x01\x00\x02_\x02\xb9\x02\xb0\x02q\x02f\x02h\x02\xf9\x01\x17\x01\x81\x00\x82\x00?\x00S\xff\x81\xfe-\xfe\xbb\xfd\x0c\xfd\x82\xfcZ\xfc\x18\xfc\x96\xfb|\xfb\xe0\xfb<\xfc,\xfc\x13\xfcd\xfc\xe7\xfcA\xfd\x97\xfd"\xfe\x9b\xfe\xfc\xfeW\xff\xda\xffi\x00\xab\x00\xae\x00\xdb\x00Z\x01\xc8\x01\xd7\x01\xd5\x01\x12\x02q\x02v\x02[\x02}\x02\xa0\x02u\x02E\x02g\x02o\x02\x1a\x02\xbc\x01\x7f\x01\x14\x01a\x00\xc9\xffL\xff\xa6\xfe\xde\xfdA\xfd\xde\xfcw\xfc\r\xfc\xcd\xfb\xc8\xfb\xc7\xfb9\xfc^\xfd\x04\xff\xa8\x00R\x02N\x04\x88\x06\xa8\x08\xbd\n2\r\xd2\x0f\xca\x11\x04\x13e\x14\xe4\x15o\x16\xd8\x15b\x156\x157\x14\xc7\x11l\x0f\xc8\r\x85\x0b\xec\x07x\x049\x02\xbd\xff\xfe\xfbx\xf8{\xf6\xec\xf4S\xf2\xd0\xef\xcb\xee{\xee\x82\xed[\xecq\xecD\xedy\xedZ\xed%\xee\xbb\xef\xd8\xf0S\xf1c\xf2\x0b\xf4L\xf5\x18\xf6\x15\xf7\xa3\xf8\xe7\xf9\xb1\xfa\xb1\xfb\x04\xfd\x1d\xfe\xbb\xfeQ\xff\x07\x00\x8d\x00\xf2\x00i\x01\xd1\x01\xd6\x01\xa9\x01\xae\x01\xb0\x01Y\x01\xf0\x00\xa1\x00W\x00\xe1\xffy\xffn\xffE\xff\xd0\xfey\xfe\x8f\xfe\xd0\xfe\xab\xfe\x98\xfe\xf2\xfeX\xff\xc6\xff<\x00\xd7\x00g\x01\xd0\x01K\x02\xf7\x02\x9b\x03\xef\x03E\x04\xc0\x04F\x05\xb0\x05\x08\x06K\x06e\x06v\x06\x99\x06\xd9\x06\xf5\x06\xd4\x06\x96\x06G\x06\xe8\x05t\x05\xe5\x04\x18\x04<\x03~\x02\xbe\x01\xcc\x00\xad\xff\x95\xfe\xa5\xfd\xc3\xfc\xe0\xfb"\xfb\x92\xfa\xf3\xf9G\xf9\xd4\xf8\xb6\xf8\xa2\xf8\x84\xf8\x9c\xf8\xf3\xf8e\xf9\xd3\xf9F\xfa\xeb\xfa\x9e\xfbH\xfc\xfb\xfc\xb9\xfd\x93\xfe{\xffL\x00\x1b\x01\xd5\x01\xab\x02x\x03\x02\x04t\x04\xd7\x04C\x05\x9c\x05\xd9\x05\xfb\x05\xf5\x05\xd4\x05\x82\x05%\x05\xd9\x04s\x04\xe3\x03X\x03\xdc\x02Q\x02\xc5\x01@\x01\xb6\x00*\x00\xb3\xff{\xffZ\xff0\xff\x19\xff\n\xff\xfb\xfe\r\xff)\xffD\xffa\xff\x92\xff\xd4\xff\x0b\x00-\x00M\x00l\x00q\x00v\x00\xa7\x00\xd7\x00\xbf\x00\x95\x00\x90\x00\x91\x00~\x00A\x00\x13\x00\xf8\xff\xc4\xfft\xff\'\xff\x0f\xff\xf6\xfe\xb2\xfe|\xfe\x9a\xfe\xc5\xfe\xb7\xfe\xd2\xfe%\xff\x95\xff\xba\xff\xda\xff\x9b\x00\xa5\x01\x03\x02\xee\x01t\x02A\x03p\x03\r\x03"\x03\xa4\x03\x9b\x03\x05\x03\xa1\x02\x85\x02\r\x02\x19\x01\\\x009\x00\xe7\xff\xf5\xfe\x05\xfe\x9e\xfdW\xfd\xa6\xfc\xf4\xfb\x99\xfbg\xfb-\xfb\xd8\xfa\xd2\xfa\xf3\xfa\xe2\xfa\xc4\xfa\xdd\xfa.\xfb{\xfb\xa8\xfb\xc9\xfb\x1d\xfc\x8a\xfc\xdc\xfc\x1b\xfd>\xfdy\xfd\xd4\xfd0\xfe~\xfe\xd8\xfe\x1d\xffI\xffu\xff\xc3\xff\x1d\x00T\x00\x8b\x00\xba\x00\x05\x01x\x01\xdf\x01 \x02V\x02\x99\x02\xeb\x02;\x03}\x03\xe1\x03\x16\x04\x1c\x04B\x04q\x04\x88\x04W\x04\x13\x04\xe7\x03\xca\x03\xaa\x03V\x03\xeb\x02s\x02\x13\x02\xbe\x01o\x01\x1d\x01\xb8\x00`\x00\x1d\x00\xf0\xff\xcf\xff\xc5\xff\xa3\xff\x8a\xff\xa1\xff\xc7\xff\xd5\xff\xdf\xff\x00\x00,\x00^\x00\x87\x00\xae\x00\xc8\x00\xca\x00\xcd\x00\xd5\x00\xe1\x00\xdc\x00\xbb\x00\x9d\x00v\x00`\x00:\x00\xf9\xff\xb9\xff}\xff;\xff\t\xff\xd4\xfe\x9c\xfex\xfeC\xfe\x12\xfe\xee\xfd\xd5\xfd\xce\xfd\xbd\xfd\xc3\xfd\xc8\xfd\xd1\xfd\xdd\xfd\x01\xfe8\xfek\xfe\xa7\xfe\xdd\xfe$\xff^\xff\x97\xff\xd4\xff\x08\x00S\x00\x8b\x00\xa0\x00\xab\x00\xa1\x00\x96\x00\x81\x00Z\x005\x00\xf4\xff\x91\xff!\xff\xca\xfey\xfe+\xfe\xe4\xfd\xb5\xfd\x8a\xfda\xfd3\xfd2\xfdo\xfd\xd4\xfd\x1f\xfeD\xfe\x89\xfe\xe2\xfe2\xff^\xff\xc3\xff6\x00p\x00\x95\x00\xbc\x00\xea\x00\x01\x01\xfe\x00\xef\x00\xff\x00\x00\x01\xf3\x00\xcb\x00\xbb\x00\xbe\x00\xa9\x00\xa0\x00\x91\x00\x90\x00\x93\x00\x98\x00\x97\x00\xa6\x00\xad\x00\xb7\x00\xc1\x00\xc1\x00\xcc\x00\xc6\x00\xb7\x00\xbc\x00\xba\x00\xa4\x00\x83\x00]\x00E\x00$\x00\xfb\xff\xd6\xff\xc1\xff\x92\xffN\xff\x14\xff\xfd\xfe\xec\xfe\xc1\xfe\xb1\xfe\xb3\xfe\xb3\xfe\xa1\xfe\x97\xfe\xb2\xfe\xd4\xfe\xe6\xfe\xfc\xfe4\xffh\xff\x8c\xff\xbd\xff\xfe\xff2\x00=\x00`\x00\x8f\x00\xba\x00\xe5\x00\xfd\x00&\x01=\x01H\x01[\x01c\x01n\x01\x85\x01\x90\x01\xa4\x01\xb7\x01\xbc\x01\xb3\x01\x84\x01~\x01\xa2\x01\xae\x01\xaf\x01\xbe\x01\xdf\x01\xec\x01\xeb\x01\xf7\x01\n\x02"\x02;\x02Q\x02c\x02s\x02t\x02w\x02q\x02]\x02<\x024\x02/\x02\x13\x02\xfc\x01\xb9\x01X\x01\xef\x00\xa5\x00^\x00 \x00\xe7\xff\x98\xffL\xff\xf0\xfe\x97\xfen\xfeG\xfe\x01\xfe\xf0\xfd\xf7\xfd\xdc\xfd\xcc\xfd\xb5\xfd\xa6\xfd\xb5\xfd\x9e\xfd\x96\xfd\xb8\xfd\xb8\xfd\xa5\xfd\x94\xfd\x92\xfd\x98\xfd\x91\xfd}\xfd\x80\xfd\x85\xfdx\xfdh\xfdO\xfdQ\xfdQ\xfd?\xfdN\xfd`\xfdv\xfd\x90\xfd\x93\xfd\xa5\xfd\xc2\xfd\xe3\xfd\xec\xfd\x05\xfe>\xfe]\xfet\xfe\x95\xfe\xdc\xfe\x15\xff\x15\xffB\xff\x92\xff\xc3\xff\xee\xff!\x00d\x00\x94\x00\xc2\x00\x02\x01M\x01z\x01\x99\x01\xca\x01\xf9\x01!\x029\x02H\x02]\x02f\x02i\x02a\x02V\x02D\x025\x02%\x02\x0e\x02\x02\x02\xf0\x01\xe1\x01\xc8\x01\xa8\x01\x8e\x01\x89\x01z\x01j\x01i\x01W\x019\x01\x1f\x01\x05\x01\xf5\x00\xe8\x00\xcd\x00\xc5\x00\xb4\x00\x8e\x00n\x00E\x00\x19\x00\x00\x00\xdc\xff\xaf\xff\x9a\xff~\xffG\xff:\xff(\xff\xf9\xfe\xe2\xfe\xcd\xfe\xbc\xfe\x9c\xfe\x8e\xfe\x8f\xfe}\xfeW\xfeD\xfe]\xfeT\xfeU\xfec\xfe]\xfea\xfeo\xfe|\xfe\x83\xfe\x9c\xfe\xab\xfe\xb8\xfe\xe2\xfe\xf5\xfe\x12\xff.\xff/\xffK\xffx\xff\x96\xff\x9a\xff\xb4\xff\xda\xff\xed\xff\xfa\xff\xee\xff\xfc\xff\x1b\x00\x10\x00\x13\x00/\x00@\x00B\x00I\x00m\x00{\x00\x96\x00\xb4\x00\xd0\x00\xf3\x00\x1a\x018\x01h\x01\x99\x01\xbe\x01\xd0\x01\xe4\x01\x02\x02\x1e\x02$\x02\x07\x02\xfe\x01\xf3\x01\xcd\x01\x9c\x01n\x01F\x01\r\x01\xba\x00w\x00A\x00\x01\x00\xab\xffU\xff)\xff\xf0\xfe\xbf\xfe\x97\xfel\xfeM\xfe+\xfe\x1c\xfe\x15\xfe\x15\xfe\x14\xfe!\xfeS\xfev\xfe\x92\xfe\xb5\xfe\xe7\xfe\x12\xffD\xff|\xff\xb2\xff\xe7\xff\x0c\x000\x00`\x00~\x00\x89\x00\xa1\x00\xb2\x00\xc7\x00\xcb\x00\xc7\x00\xce\x00\xcd\x00\xc3\x00\xb0\x00\xae\x00\xa1\x00~\x00x\x00a\x00K\x00C\x00,\x008\x007\x00!\x00$\x009\x008\x000\x004\x00=\x00Q\x00M\x00A\x00N\x00S\x007\x00-\x006\x00?\x009\x00 \x00\x1e\x00\x17\x00\x08\x00\xfb\xff\xe3\xff\xce\xff\xb5\xff\x9b\xff\x93\xff\x8e\xff\x8a\xffz\xffe\xffW\xff^\xffa\xffa\xffe\xffs\xff\x86\xff\x88\xff\x89\xff\x92\xff\xa5\xff\xbc\xff\xcc\xff\xe2\xff\x07\x00\x10\x00\x1c\x004\x00H\x00W\x00n\x00\x8b\x00\x99\x00\xa2\x00\x9b\x00\x96\x00\x8d\x00\x88\x00\x7f\x00z\x00\x7f\x00v\x00j\x00g\x00[\x00J\x00H\x00B\x00A\x00?\x00<\x000\x00/\x00(\x00\x1c\x00\x13\x00\x06\x00\xfd\xff\xf0\xff\xf1\xff\xdd\xff\xbd\xff\xb2\xff\xab\xff\xa1\xff\x86\xffz\xff}\xffr\xffc\xffe\xffm\xfff\xff]\xffW\xfft\xff\x7f\xff\x80\xff\x86\xff\x8e\xff\x99\xff\xa2\xff\xa5\xff\xa3\xff\xab\xff\xae\xff\xb2\xff\xb3\xff\xb5\xff\xaf\xff\xaf\xff\xb1\xff\xa6\xff\xa3\xff\xb0\xff\xba\xff\xb5\xff\xbc\xff\xb8\xff\xb8\xff\xb7\xff\xc2\xff\xc6\xff\xcb\xff\xd4\xff\xda\xff\xeb\xff\xf3\xff\xff\xff\x08\x00\x14\x00\x17\x00\x1f\x00:\x00B\x00A\x00\\\x00g\x00o\x00\x7f\x00\x85\x00\x83\x00\x8f\x00\xa2\x00\x9b\x00\x91\x00\x91\x00\x8f\x00\x8b\x00\x90\x00\x98\x00\x8e\x00y\x00v\x00m\x00k\x00m\x00X\x00D\x00@\x00;\x00#\x00\x0c\x00\xf4\xff\xe1\xff\xdf\xff\xce\xff\xc0\xff\xab\xff\x96\xff\x85\xff{\xffj\xffh\xffl\xff\\\xffR\xffR\xffN\xffF\xff:\xff9\xff1\xff4\xff@\xffF\xffH\xffG\xffK\xffV\xffl\xff{\xff\x89\xff\x9d\xff\xb3\xff\xc9\xff\xd4\xff\xec\xff\x01\x00\x12\x00&\x00<\x00P\x00e\x00v\x00\x84\x00\x95\x00\x9d\x00\xa7\x00\xb2\x00\xbc\x00\xbc\x00\xb6\x00\xba\x00\xbc\x00\xb2\x00\xa8\x00\xa7\x00\xa5\x00\xa2\x00\x9e\x00\x9b\x00\x8e\x00{\x00t\x00n\x00j\x00[\x00S\x00I\x00C\x009\x00,\x00\'\x00\x1d\x00\x12\x00\x0e\x00\x04\x00\x07\x00\r\x00\x00\x00\xeb\xff\xe1\xff\xd6\xff\xd1\xff\xc4\xff\xb2\xff\xa3\xff\x8a\xff\x8a\xff\x8d\xff\x82\xff}\xff~\xffv\xffr\xff\x83\xffy\xffs\xff~\xffv\xffz\xff\x82\xff}\xffr\xff\x80\xff\x90\xff\x9b\xff\xa6\xff\xb1\xff\xbb\xff\xc8\xff\xce\xff\xd5\xff\xe1\xff\xf6\xff\x0c\x00\x15\x00\x14\x00\x1f\x008\x00G\x00W\x00[\x00b\x00t\x00\x84\x00\x7f\x00\x84\x00\x89\x00\x91\x00\x90\x00\x90\x00\x93\x00\x8d\x00\x90\x00\x81\x00{\x00d\x00c\x00\\\x00B\x00A\x00/\x00\x1c\x00\x1d\x00\t\x00\xf2\xff\xee\xff\xdc\xff\xcf\xff\xc8\xff\xb3\xff\xa5\xff\xb4\xff\xb3\xff\xb4\xff\x9c\xff\x8a\xff\xa4\xff\xa2\xff\x99\xff\xae\xff\x9b\xff\x93\xff\xab\xff\xc8\xff\xae\xff\x93\xff\xb0\xff\xe2\xff\xde\xff\xb4\xff\xb8\xff\x91\xff\x9d\xff\xb9\xff\xb1\xff\xc4\xff\xc0\xff\xad\xffa\xffC\xff\x84\xffn\xffP\xff\x92\xff\xbc\xff\xb5\xff\x9a\xff\x97\xff\xe4\xff\xc0\xff\x86\xff\xa5\xff\xae\xff\xfc\xff\x1d\x00v\x00\x04\x01\xf5\x00\x81\x01\x1e\x01\xef\x01>\x01\xa7\x01\xdd\x00\x00\xfd\xdc\x08\xea\x10G\x04\xce\xf3.\xfe\x19\x05\x10\x04\xaa\x02A\xff\xb1\xfa/\xf7\xce\x00\xc7\xf9[\xfc\xbd\xf9\xa5\xf40\xfe\xd9\x00\xc7\xfe5\xfe)\xfdy\xfb \xfb\xec\x05-\x0b\xb7\xfb\xba\xfeC\x04\xcf\x01\xe2\x03m\x03v\x03\xee\xffM\xfa\xe1\x00\x98\tR\x03`\xfad\x01\xae\x01\xfe\xf8\xb1\x00\x96\x04h\xff\xd1\xfb\xa5\xfb\xaa\x011\x00\xa1\xfc\xf5\x01+\xfeH\xf33\x02]\x05`\x00}\xfa\x9e\xfd\xee\x01\xc6\x00\xae\xfe@\xfb\x18\x07\x1b\x04a\xfa\x19\x02s\x06\xab\xfe}\x02\xd5\x02-\xff4\x02c\xfdu\x06~\x08\xcc\xfe\xf8\xf5\x18\x07a\x07\xc9\xfey\xff\x01\xff:\x01K\xff\xb0\x04\x9e\xfft\x03\x9a\xf9\xe8\xfb\xce\x01\xc3\xff\xdf\x02\xda\xf6 \x03\xcb\x00\x9a\xf6\x96\xfeS\x00\xd5\xff2\xf7&\xfei\x04\x9d\xf9\x03\xfd\xec\x06O\xfei\xf4\xf3\x02=\x06\x90\x01\xad\xfe\x18\x02\xe4\x00\xa4\x01L\xffp\x044\x05\x19\x00\xbb\xff\xa3\xfe2\x04\xec\xff\xdf\x04{\x04c\xfa\xbc\xfd\xc0\x0bG\xf9\xaf\xf9\x8a\x06\xf5\x01n\xfc\x0c\xfap\x068\xfd\xc1\xfa\\\xfcL\x05-\x01]\xf8\xd5\xfd\x99\x039\xf9=\x00\x8b\n\xde\xfbz\xf7A\x04\xd1\np\xf2Z\xff\x9e\x10x\xfb\xb7\xf3\x02\t\xe9\t\x08\xef\xfd\x01\xad\r\xf1\xfa)\xf5\xbc\xffa\x0b\xa8\xfci\xfc\x98\x05O\xfd\x7f\xfa>\x00\x13\t-\x00\xc4\xfa|\xfc\x9a\x05\xd3\x01\x15\xfa\r\x05L\xfe\xce\xf4L\x01\xda\x05\xf3\xfb\x91\x02\x1e\x01\xc5\xefS\x03\x18\x0b\xfe\xf6C\xff\x11\x06\xad\xff\x91\xfe\xfe\xff\x05\x04\xb6\x04\xa1\xf9\x03\xfe\xdf\x05\x99\xfb\x02\x03\xb4\x07u\xf6h\xffd\x05\xe1\xfc\xa1\xfd\x87\xfe\xd1\x01\x94\xff\xe6\x00N\xf8\x1d\x04\xb8\xfeL\xfa\xba\x04\xbb\xfd\x85\xf5\x8b\x07\xc0\x08\xe2\xf5\'\x07\x88\xffD\x01\x1f\x00\xb8\x04\xa8\x05\xca\xf8E\x01z\t\xcf\x00L\xfc\xc2\x02\x06\x01\xef\xf8\xb2\xfbH\x06\xc7\x06%\xf8\x86\xf3A\x0e\x02\xfe\'\xf3\xa5\x00\x14\n\x8a\xfeJ\xf0$\x07\x12\x07\xaa\xfc\x04\xfbf\x01U\x04\xac\xff\x9b\x02\x04\x06J\x01~\xfc\x9c\x00\x1a\x06)\x05\x9a\x05 \x04\x14\xf6\x1e\xfe1\x06O\x03\x9f\x03\x05\xf9,\xfc\x8f\x04P\xfb\xc4\xf8N\x03\xe1\x03o\xf7A\xf9{\xff\xf1\xfe\xcf\x05f\xf9\xe4\xfa#\x04\x82\xfcG\x009\x02\xb3\xfe\xe4\xff\x83\x02\x9e\xfb\x0c\xf9\x97\x05\x87\x04\x02\xfd:\x01\x98\xfa\x7f\xfa\xe2\x07\xab\x05[\xf4[\xfc\xe0\x0br\xfc\xc9\xfc\xdd\t\x16\x00\x93\xf3!\xfa:\x11\xcb\x06\xf9\xf24\x01\xb7\x07:\xf5\xfc\x00\xe4\x10\xf6\xfau\xe8\xdb\x00\xab\x16`\xfd\xea\xf9v\x06\xc3\xfdw\xea\xa8\x02\xff\x13\x10\x03v\xf7\x13\xfaq\x03\xcc\xff\x83\x02J\x06\xa2\xfd\x87\xf3#\xfc\x02\x0c#\x06\xb5\xfc\x86\xfd\xba\xfa\xc3\xf8\x13\xfe`\x0c\xe3\x02\x17\xf5\xb9\x00\x85\x05\x0c\xfeP\xff\x81\x08\xee\x01\xfe\xef8\xfd\xc3\x0b\xd3\x06\xdd\x03\xa1\xfb\xdc\xf7\xbf\xffM\x06\xd0\xfc\xf8\xff\xa0\x04\xaa\xfe\xa2\xfe\x98\xfcG\x01r\x05\xea\xfc\xc3\xf7\xe9\xfc\xd2\x04a\x06x\xfe\xec\xf6c\x01\x07\x03;\xff@\x000\x01\xc3\xfe:\xff\xa0\x05\xb4\x00\xe0\xfe\x1d\x02\xf3\x00\x97\xfd\xcd\xffO\x06\xcb\x02p\xfd\x01\xffS\x02\xbe\x01\x9c\xfd\x81\xfd\xf2\x03C\xff\xcf\xfb^\x01L\x03\xb7\x00e\xfb2\xfe;\x03K\xff\xb7\xf8`\x03\x1e\x04\x04\xfc\xe1\xffO\x01\xcd\xfew\xfb\xcc\x00\xc4\x04\xef\x00\xe6\xfa\x14\x01\x82\x04\xc8\xff\x94\x00\xa1\x00\xbd\xff\xb4\xfaU\x011\x084\x02t\xfc%\xfci\xfcZ\xfe\xf5\x06c\x06q\xf8[\xf6X\x02\x89\x04\xd4\xfeM\xff;\x05\xb6\xfe3\xf8\xbb\x00\xea\x04\xa5\x04U\xff\x98\xfc`\xfa\xf4\x02\x18\n\x92\x01\xe6\xf6\x9e\xfa\x9e\x022\x01A\x02\xd5\x01\xe9\x02D\xfbi\xfbb\x01\xae\x03V\x00!\xfd\x87\x00^\x03\xa2\x00\x07\xfc\xf3\x00\xcd\x00\xd2\xfeO\x00\xca\x02\xef\xfd\x02\xfc\x14\x02\xe9\x05S\x01\x17\xfa\xaf\xfc\xf2\x00j\xff\x0e\x03\x15\x06[\xfe\xaf\xf5z\xfdu\x03Y\x06\t\x01\x14\xf6\x9a\x00\x05\x05/\x04\xbb\xfd\xd0\xfcY\xfer\xfeh\x03\x9a\x03\x98\x00\x8f\xfb\xbe\x00x\x03\x1c\xff\xf8\xffc\x00:\x00\xd4\xfa\xa3\xfd_\t\x14\x08\xb5\xfc?\xf2$\xfb\x19\x08b\x04\xd0\x00T\xfb\xe9\xfa\xa5\xfe\xf6\x03\xd1\x05M\xfc\xb9\xf8N\xfe\x08\x04V\x03.\x03\xf8\x01t\xfa\x8d\xf82\x00\x96\x06\xd9\x04y\x00\x08\xfc\x97\xfb\x99\x02e\x06u\xff\xc0\xfbG\xfb\x14\xfe\xa3\x06M\x06\xcc\xffm\xfc\xf0\xfb\xbe\xfe\x14\x01\xaf\x00&\x03\xcf\x02\xeb\xf9%\xfa2\x05B\x06\xc4\xff\x99\xfa\x9f\xf7,\xff\x9a\x06\xd1\x04e\x01B\xfb\xb7\xfc\xb7\xfd\x1e\x03\xab\x04\xce\x02\x86\xf9+\xf8\x84\x04\x0c\x08`\x05s\xfb\xd6\xf8\xc5\xfe~\x01\x1a\x02e\x05\xd3\xfe@\xf9)\x00a\x06\x1f\x00\xf8\xfe\x83\xfe\xeb\xf7f\xff,\t\xb8\x05{\xfe\xa5\xfa\x16\xfb~\xff\xc5\x02\xdf\x03\n\x01p\xfe\xbc\xfa\xf9\xff)\x05D\x02\xcd\x00H\xfb\x1c\xfa\xaa\x01\xf1\x05\xe2\x00s\xfd\xfb\xfd\t\x01&\x01m\xff\xbf\xfdK\xff\xf1\x00\xe7\x01\x97\x02S\xfc\n\xff4\xffK\xfd%\x02\x92\x04\xac\x01\xff\xfa!\xfc\x8d\x01\x95\x02\xef\xff\x14\xfe\x83\xfeS\x00:\x026\x03%\x01\x18\xfb\xb8\xf8\x80\x00C\x07~\x05>\xfel\xfb\xf7\xff\x0c\x02\x84\x01\x98\x00}\xff\x82\xfd\x15\x00\xed\x01L\x03\xca\x01\x07\xfem\xfc5\xffW\xff\xca\xffI\x04d\x02\x89\xfc]\xfa\xda\x03\x9d\x04\x8f\xfcD\xff\x06\x00\x86\xfd\xa8\x03\x9f\x03q\xfe1\xfd_\xff;\x010\x04\xa8\x01\x87\xfe^\xfe\x0c\xfd\xec\x00I\x03\xa8\x02\xea\x01\xc7\xfd\xe2\xfa\xda\xfe \x04\xfb\x02\x04\xfeh\xfa\xfd\xfcZ\x00\xb2\x00\xf6\x01|\x02n\xfd\xf3\xf8n\xfe\xf0\x02\x9e\x04\x07\x04\xc9\xfc<\xfa`\x03\x0c\x05\x99\x00\xa7\xff\xd6\xfd[\x00s\x01\xd0\x00\x16\xffO\x00d\xffN\xfc\x93\xffw\x02O\x01\xa5\xff\xac\xfc\xae\xfeS\x04W\x00V\xfa\x82\xfdp\x00\t\x01X\x02\x93\x01\x00\xfd\x86\xfcT\x02`\x02\xdb\xfd>\x00\x8f\x02\xdc\xfeb\x00\xa2\x03I\x01\xe9\xfc\xcf\xfc\x02\x02)\x02\xe1\xfed\x00.\x02\x7f\x00\x05\xfd\xeb\xff<\xff\x7f\xfe\x9c\x03h\x00I\xfc@\xfd\x9b\x02\xa8\x04\xe9\x01-\xfd\xb5\xfb\xfa\xfc\xc3\x02\xf8\x03,\x02:\x02m\xff\x1d\xfd\x84\xfd\x88\x02@\x02\xe2\x00K\xfeb\xfe\x07\x01\xc3\x02\xe3\x01\xb1\xfd\xb3\xfc\t\xfdF\xff\xf6\xff\xb3\x00c\x03\x19\x02\xe1\xfc\xa5\xfaS\xfe\x15\x01s\x01:\x02h\xfe\xa4\xfc\x16\x01B\x03~\x00\xb4\xfey\xfd\x8f\xfc/\x01\xd7\x03\x14\x02\xb3\xfd~\xfe\x81\x000\x01\x05\x01R\xffv\xff\xba\xfd\x15\x01\x92\x03\xa2\x02\xf5\x00\n\xfe>\xfc\xe3\xfd\x19\x03E\x04\xa3\x00\x18\xfd\x06\xfe\x91\xff_\x01\x1e\x03\x9f\xfe\xb1\xfb\xc2\xfdU\x01\x81\x02@\x01\xb3\xff\xab\xfd)\xfe\xa7\xff\xca\x00w\x00~\xff\xc0\xffm\x00\xc6\xff\xb3\x00\x00\x03\xd5\xfe\x83\xfc\xa8\xfeN\x00\xfa\x00\xd0\x02\xa4\x024\xfd\x1f\xfe\xb9\x00\x9c\xffb\x00n\x00y\xff`\xff\x8a\x00\x88\x01\x06\x00\t\xff\xfa\xff\x81\xfe\x19\xfe\xf6\xff0\x01$\x01\xc0\xff\x87\xfe\xc7\xff,\x00\x11\x00\xc1\xff\x1c\xff`\xff\xca\x00*\x00>\xff\xc0\x01)\x01U\xfek\xfej\xff\xd2\x00\x9c\x02\xf3\x01j\xff\xd8\xfd\xcf\xfe\xb6\x01\xb8\x01\x94\xff\x9a\xff\xfa\xfe\x99\xff\x95\x01\xa1\x01S\x00\x87\xfe=\xfe\xe9\xfes\x00\xd7\x00\xd6\x01{\x00\xae\xfc\x85\xff\x83\x01\xad\x00!\x00\xdf\xfdU\xfe\xac\x00\xa5\x02\xb4\x01]\xffl\xfe7\xfe\xcd\xff\xeb\x00h\x00\x0b\x00\xbc\xff_\x00H\x01\x8c\x00\xdb\xff\x10\xff\x04\xfe\xe6\xffq\x01\xdf\x00\xfe\x00\x12\x01\xae\x00C\xff\xe8\xfd\x1d\xfe\xa2\xffc\x01\xad\x01\x02\x01\xd4\xff>\xff\xda\xff\xae\xff{\xff\xdb\xff#\x00\xd0\x00%\x01\x17\x00/\x01(\x02E\x00\xa6\xfd\xff\xfc\xa9\xff\xdd\x01\n\x02\x8d\x00\xb0\xfe\xe6\xfe\x0f\x00\xb1\x00#\x00\x1f\xff\x88\xfe\x19\xff\xde\x00\x90\x01\x90\x01\x95\x00\x83\xfe\xa6\xfd\x83\xfe\xbe\x00\xbf\x01\x16\x01F\x00W\xff\x8e\xff\x7f\x00T\x01\xd0\x00Q\xff\xf3\xfe\xb0\xff`\x01J\x01\xed\x00\x9e\xff\x7f\xfd\x03\xfe\x14\x00\xff\x00\x87\x00\xe7\xffY\xff\xe4\xff\xca\x00\x89\x00\x91\xff\xf3\xfe\xc5\xfe\x8a\x00\x93\x01\xaf\x00\x85\x00\x91\x00\xed\xffj\xff\xb1\xff\x93\x00c\x00\xdb\xff_\x00R\x01\x06\x01u\xff\x11\xff\'\xff\xa3\xff\xb0\xff\\\xffq\xffr\xff/\x00\xdc\x00\xd7\xff\xd2\xfe\x80\xfe\xb7\xfel\x00\xab\x01\x10\x01\x10\x00|\xffT\xff\xbc\xff>\x00\xc4\x00\xc4\x00Q\x00\x82\xff\x85\xff^\x00\xb6\x00\x1b\x00%\xff\x0b\xff\xaa\xff\x97\x004\x01\x8e\x00R\xff\xe1\xfeS\xff\xe3\xff+\x00\xff\xff\x02\x00\xe6\xff\xd5\xff\x91\xff8\xffx\xff\xf6\xffE\x00\x08\x00\xed\xff\x83\xff\xaf\xff\xb0\x00\xef\x00\xaf\xff\xcd\xfe\x98\xffs\x00\xc0\x00\x1c\x01\xa0\x00\xf7\xfe\xb4\xfe\xd1\xff\x03\x008\x00\xb7\x00\x89\xff&\xfe^\xff@\x01m\x01O\x00\xa9\xfe\xfe\xfd>\xff\xff\x00}\x01\xa0\x00L\xff\x8e\xfeD\xff8\x00\xe5\x00\xd1\x00C\xff\xfc\xfe\x00\x00\x1b\x01\x10\x01\xa3\xff\xc7\xfee\xff\x89\x00\x03\x01u\x00\xa7\xff\x99\xff\xd9\xff3\x00,\x00\xc4\xffM\xff\x83\xff5\x00h\x00\x04\x00\xc6\xff\x92\xff`\xff\x8d\xffz\xffg\xff\xfa\xff\x82\x00{\x00\x1b\x00\xbc\xff\x93\xffs\xff\xbd\xff6\x00\x9f\x00\x8b\x00\xe1\xff\xf8\xff\x85\x00\xc1\x00,\x00*\xff\xef\xfe\x14\x00>\x01\x19\x01N\x00\xa9\xff\x7f\xfff\xff*\x00\xab\x00\x00\x00\xb1\xff\xe5\xff?\x00\xcc\x00\xcd\x00z\xff~\xfe;\xff\x91\x00\x00\x01\xc3\x00A\x00\x86\xff\xe2\xfe\x91\xff\xea\x00\xb3\x00\xfa\xff{\xff\xa1\xffM\x00\xe5\x00\xda\x00\xf2\xff\x01\xff\xcb\xfe\xc8\xff\xdb\x00\xd2\x00\\\x00\xd4\xffC\xffm\xff\xf6\xff\xf4\xff\xc7\xff\x9b\xff\xaf\xff<\x00\x9a\x00d\x00\xd9\xffT\xffQ\xff\x0e\x00\xea\x00\xc3\x001\x00V\x00\x7f\x009\x00F\x00\x11\x00}\xff\x8c\xff\x95\x006\x01\xd1\x00\x04\x00\x1e\xff\xae\xfeV\xff\x8e\x00\xfc\x00\x8a\x00\x16\x00_\xffr\xff4\x00o\x00<\x00z\xffE\xff\x17\x00/\x01$\x01F\x00h\xff\x15\xff\xb2\xff\xcf\x00M\x01\xf6\x00q\x00\xf6\xff\xcc\xff0\x00k\x00\xb8\xffM\xff\x97\xff!\x00\x97\x00o\x00\xbf\xff\x0c\xff\xef\xfea\xff\xd8\xff\x15\x00\x00\x00\xc5\xff\xba\xff\xcb\xff\xed\xff\xf8\xff\xbf\xff\x94\xff\xa4\xff\xf9\xffT\x00m\x00-\x00\xa7\xff\x8b\xff\xda\xff?\x00\xbc\x00\xc5\x00b\x00\xf4\xff\xec\xff\xf4\xff\xde\xff\xe1\xff\xe9\xff\xfa\xff\n\x00(\x00.\x00\xd2\xffS\xffx\xff\xe8\xff\x0c\x00\x10\x003\x002\x005\x00U\x00\xff\xff\xc0\xff\xdb\xff\x07\x008\x00j\x00s\x00c\x00L\x00\x18\x00\xae\xff\x96\xff\xf4\xff\x05\x001\x00\x8a\x00W\x00\xbf\xff\x88\xff\xbf\xff\xd5\xff\xfe\xff\x03\x00\xc2\xff\xc6\xff9\x00\xa5\x007\x00_\xff\xf6\xfe`\xff$\x00Y\x00\xf0\xff\xae\xff\x9e\xff\xb7\xff\xee\xff\xdf\xff\x89\xffG\xff\x93\xff\xe9\xffJ\x00g\x00\xf8\xff\x95\xff\xa6\xff\xe5\xff\x18\x00.\x00\x0b\x00\xb7\xffx\xff\xec\xffb\x009\x00\x94\xffR\xff\xa0\xff\xcf\xff\x1e\x00U\x00\xf4\xffp\xff\x92\xff\x00\x008\x001\x00\xf5\xff\x8f\xff\x86\xff\xe2\xff:\x005\x00\xe4\xff\x9f\xff\xa1\xff\xeb\xffV\x00K\x00$\x00!\x00D\x00t\x00g\x00\x1e\x00\xf4\xff\x0b\x00F\x00e\x00<\x00\xdd\xff\xb0\xff\xca\xff\xf5\xff\xff\xff\xca\xff\x9b\xff\xa1\xff\xe5\xff:\x00N\x00\xec\xfft\xffs\xff\xd3\xffA\x00;\x00\xee\xff\x98\xffw\xff\xde\xff]\x00$\x00~\xffW\xff\xaa\xff!\x00r\x002\x00\xb3\xffi\xff\xb0\xff3\x00_\x00 \x00\xd8\xff\xc3\xff\xd8\xff*\x00~\x00I\x00\xb5\xff\\\xff\x82\xff\xfc\xffi\x00w\x00\x0c\x00\x8c\xff\x84\xff\xf2\xff|\x00\x8b\x00.\x00\xe0\xff\xfd\xff\\\x00\xa5\x00\x93\x00\'\x00\xd1\xff\xce\xff\x12\x00Y\x00K\x00\x0e\x00\xe4\xff\xd0\xff\xf9\xff/\x00 \x00\xde\xff\xc2\xff\x03\x005\x00?\x00-\x00\r\x00\xee\xff\xf0\xff*\x00S\x00C\x00\r\x00\xf2\xff\x13\x00T\x00}\x00M\x00\xc4\xff\x9c\xff\x05\x00r\x00f\x00$\x00\xf5\xff\xc8\xff\xf2\xffH\x00M\x00\xf0\xff\xb7\xff\xda\xff\t\x00E\x00y\x008\x00\xb9\xff\x9a\xff\xef\xffF\x00M\x00\t\x00\xd4\xff\xdc\xff&\x00]\x00?\x00\xf0\xff\xdb\xff+\x00S\x00M\x00A\x00 \x00\xee\xff\xff\xffA\x00:\x00\xf2\xff\xc0\xff\xcd\xff\xf5\xff\x1b\x00\x1e\x00\xfc\xff\xcf\xff\xd3\xff\x01\x00\n\x00\xdb\xff\xd5\xff\xef\xff\x00\x00\x07\x00\xfc\xff\xe6\xff\xc3\xff\xd5\xff\xe6\xff\xfb\xff\x06\x00\xf1\xff\xd3\xff\xf0\xffG\x002\x00\xf8\xff\xd4\xff\xe7\xff\x15\x00F\x008\x00\xf6\xff\xbd\xff\xcb\xff\x15\x001\x00\x08\x00\xb2\xff\x8d\xff\xb7\xff\x07\x002\x00\x08\x00\xbe\xff\x9a\xff\xb5\xff\xfa\xffD\x00%\x00\xd8\xff\xb9\xff\xda\xff:\x00{\x008\x00\xbd\xff\xa6\xff\xdf\xff8\x00h\x004\x00\xaa\xff\x8c\xff\xf6\xff3\x003\x00\x04\x00\xc7\xff\xa4\xff\xea\xffB\x000\x00\xeb\xff\xb5\xff\xa8\xff\xce\xff\x12\x004\x00\x14\x00\xd9\xff\xbe\xff\xda\xff\x04\x00\x0f\x00\x06\x00\xc5\xff\xb4\xff\xf6\xff0\x008\x00\x0f\x00\xde\xff\xa9\xff\xc2\xff\xf0\xff\x00\x00\xf2\xff\xd2\xff\xbe\xff\xb2\xff\xd6\xff\xf7\xff\xfa\xff\xd8\xff\xc6\xff\xde\xff\x08\x00"\x00\x00\x00\xdb\xff\xcd\xff\xe9\xff\xf9\xff\x1b\x00%\x00\x04\x00\xf9\xff\x06\x00\x0c\x00\xff\xff\xf4\xff\xf4\xff\xed\xff\xe5\xff\xf1\xff\xfc\xff\xe6\xff\xc2\xff\xaf\xff\xb6\xff\xc6\xff\xdc\xff\xe2\xff\xd7\xff\xd3\xff\xd3\xff\xd3\xff\xd3\xff\xdf\xff\xe4\xff\xd4\xff\xea\xff\x03\x00\x07\x00\t\x00\x0b\x00\xf5\xff\xe1\xff\xee\xff\x16\x00*\x00\x1a\x00\x0b\x00\x04\x00\x07\x00\x0c\x00\n\x00\x02\x00\xfe\xff\xf8\xff\xf4\xff\x02\x00\x12\x00\t\x00\x03\x00\x02\x00\xf7\xff\xf4\xff\xfe\xff!\x00\x1e\x00\x05\x00\x0f\x00\x11\x00\x01\x00\xfc\xff\x06\x00\x01\x00\x01\x00\x06\x00\x03\x00\x05\x00\x07\x00\x00\x00\xfe\xff\xfc\xff\x00\x00\x10\x00\x0e\x00\t\x00\n\x00\x14\x00\x19\x00\x1a\x00\x11\x00\x00\x00\xfe\xff\x08\x00\x17\x00\x15\x00\x02\x00\xf2\xff\xfa\xff\x0f\x00\x1c\x00\x04\x00\xfc\xff\x11\x00\x1d\x002\x00=\x00A\x00(\x00\x14\x00\x1d\x00-\x001\x00*\x00\x19\x00\x0e\x00\x16\x00\x1f\x00.\x00\x1d\x00\xfe\xff\xf2\xff\x05\x00,\x005\x00(\x00\t\x00\xfb\xff\r\x00%\x003\x00%\x00\x13\x00\x11\x00\x16\x00"\x00+\x00!\x00\t\x00\x00\x00\x12\x00\x05\x00\x07\x00*\x00\x11\x00\xef\xff\xe4\xff\xf4\xff\xfb\xff\xfa\xff\xeb\xff\xd5\xff\xdf\xff\x0f\x00*\x00\x0c\x00\x00\x00\x03\x00\x05\x00\xfc\xff\xfd\xff\x07\x00\x05\x00\n\x00\x14\x00\x12\x00\x01\x00\xfe\xff\xf3\xff\xe9\xff\xe2\xff\xf8\xff\x05\x00\xff\xff\xfa\xff\xf7\xff\xf4\xff\xf0\xff\xf6\xff\xfc\xff\xfb\xff\xfe\xff\x04\x00\x01\x00\xfe\xff\x02\x00\xfe\xff\xf5\xff\xeb\xff\xef\xff\xfc\xff\x00\x00\xf6\xff\xdf\xff\xd6\xff\xea\xff\xf7\xff\xf9\xff\xea\xff\xd7\xff\xdc\xff\xf8\xff\x06\x00\xfc\xff\xf2\xff\xfc\xff\xf4\xff\xfe\xff\x18\x00\x17\x00\xff\xff\xf5\xff\x01\x00\x08\x00\x0c\x00\r\x00\xfc\xff\xef\xff\xf0\xff\xf4\xff\xf4\xff\xe9\xff\xdf\xff\xcf\xff\xcf\xff\xe8\xff\xf1\xff\xf4\xff\xf0\xff\xed\xff\xe4\xff\xf1\xff\x03\x00\x06\x00\x00\x00\x04\x00\x0c\x00\xfb\xff\x0b\x00"\x00\x0e\x00\xf0\xff\xf8\xff\x0e\x00\x12\x00\x02\x00\x0e\x00\x05\x00\xdf\xff\xea\xff\xf3\xff\xec\xff\xda\xff\xd0\xff\xc9\xff\xc4\xff\xc6\xff\xc9\xff\xbc\xff\xaa\xff\xb6\xff\xc1\xff\xc8\xff\xd3\xff\xc8\xff\xce\xff\xd3\xff\xdf\xff\xec\xff\xeb\xff\xea\xff\xf2\xff\x00\x00\t\x00\x10\x00\n\x00\xf8\xff\xf6\xff\xf7\xff\xf1\xff\xf5\xff\xf9\xff\xf3\xff\xea\xff\xe4\xff\xef\xff\xe3\xff\xd8\xff\xd6\xff\xda\xff\xe9\xff\xf1\xff\xf7\xff\xf0\xff\xe5\xff\xde\xff\xeb\xff\xfe\xff\xfb\xff\xfa\xff\xfe\xff\xfd\xff\x00\x00\n\x00\x0c\x00\xfe\xff\xf2\xff\xfb\xff\x05\x00\x15\x00\x1d\x00\x0c\x00\x03\x00\x05\x00\x0c\x00\r\x00\x10\x00\x11\x00\x10\x00\x0b\x00\x12\x00\x12\x00\x08\x00\x06\x00\x00\x00\x03\x00\x0f\x00"\x00\x1e\x00\x0f\x00\t\x00\x07\x00\x06\x00\x15\x00\x16\x00\x00\x00\xf6\xff\x00\x00\x13\x00\x0b\x00\xf9\xff\xe9\xff\xe1\xff\xe3\xff\xf7\xff\x00\x00\xee\xff\xd9\xff\xd8\xff\xec\xff\xf0\xff\xf7\xff\xfa\xff\xf6\xff\xf8\xff\x05\x00\x16\x00\x14\x00\t\x00\x04\x00\x04\x00\x0e\x00 \x00,\x00&\x00\x1f\x00\x19\x00%\x00)\x00\x1b\x00\x10\x00\n\x00\x07\x00\r\x00\x18\x00\x04\x00\xfc\xff\t\x00\x07\x00\x05\x00\x0b\x00\t\x00\r\x00\x03\x00\x02\x00\x05\x00\r\x00\x1e\x00 \x00 \x00\x1d\x00\'\x00"\x00\x13\x00\x12\x00\x18\x00\x14\x00\x13\x00\x12\x00\x1c\x00 \x00\x1f\x00#\x00\x18\x00\x17\x00\x1a\x00\x1a\x00\x18\x00 \x00\x1c\x00\x10\x00\x13\x00\x04\x00\xf9\xff\xfe\xff\x0e\x00\x11\x00\t\x00\x05\x00\x00\x00\xfe\xff\x08\x00\t\x00\xfe\xff\xf8\xff\xf8\xff\x00\x00\x00\x00\x00\x00\x08\x00\xfb\xff\xfc\xff\x05\x00\x04\x00\x00\x00\xfe\xff\xf8\xff\xf5\xff\xf5\xff\xf7\xff\xfb\xff\x01\x00\x01\x00\xf6\xff\xf6\xff\xfc\xff\xfc\xff\xf7\xff\xf9\xff\xfc\xff\xf9\xff\xf8\xff\xff\xff\xfe\xff\xf7\xff\xf4\xff\xed\xff\xe8\xff\xed\xff\xec\xff\xe3\xff\xe8\xff\xe4\xff\xde\xff\xe1\xff\xe7\xff\xde\xff\xdc\xff\xe2\xff\xe1\xff\xe7\xff\xf6\xff\xfc\xff\xfb\xff\x00\x00\x0c\x00\x07\x00\x04\x00\x06\x00\xfd\xff\xfd\xff\xfe\xff\xfb\xff\xf8\xff\xed\xff\xee\xff\xf3\xff\xf1\xff\xef\xff\xeb\xff\xea\xff\xeb\xff\xf3\xff\xfc\xff\xfa\xff\xf7\xff\xfb\xff\x05\x00\x06\x00\x00\x00\xfe\xff\xff\xff\xfa\xff\xf3\xff\xfa\xff\x01\x00\xfe\xff\xfb\xff\x00\x00\xf7\xff\xf9\xff\xfa\xff\x02\x00\x06\x00\x05\x00\x02\x00\xff\xff\x05\x00\xfd\xff\xfd\xff\xf5\xff\xf4\xff\xf3\xff\xea\xff\xe9\xff\xe6\xff\xe4\xff\xdb\xff\xd6\xff\xdb\xff\xe5\xff\xe4\xff\xe3\xff\xd4\xff\xd4\xff\xe1\xff\xdd\xff\xe6\xff\xe8\xff\xf1\xff\xf5\xff\xf4\xff\xf6\xff\xf4\xff\xf4\xff\xe8\xff\xe5\xff\xeb\xff\xef\xff\xf1\xff\xf1\xff\xe4\xff\xdb\xff\xe0\xff\xe2\xff\xe9\xff\xec\xff\xeb\xff\xeb\xff\xe9\xff\xf0\xff\xf8\xff\xf0\xff\xee\xff\xf5\xff\xfb\xff\xf6\xff\xfc\xff\x00\x00\xfa\xff\x01\x00\x00\x00\x05\x00\x06\x00\x03\x00\xff\xff\xf6\xff\x03\x00\x06\x00\xfe\xff\x02\x00\x06\x00\xfc\xff\xfc\xff\x00\x00\xfe\xff\x00\x00\xfe\xff\xf9\xff\xfc\xff\x03\x00\x03\x00\x00\x00\xff\xff\x02\x00\x10\x00\x11\x00\x10\x00\x13\x00\x1c\x00"\x00\x1b\x00\x14\x00\x13\x00\x11\x00\x10\x00\x0b\x00\x00\x00\xfc\xff\xfd\xff\x00\x00\xfc\xff\xf5\xff\xf2\xff\xf7\xff\xf9\xff\xfe\xff\t\x00\x0c\x00\x12\x00\x10\x00\x06\x00\x02\x00\r\x00\r\x00\x08\x00\x03\x00\x04\x00\x05\x00\xfa\xff\xf9\xff\xfb\xff\xf9\xff\xf7\xff\xff\xff\x02\x00\x01\x00\x02\x00\x01\x00\x01\x00\xfd\xff\xfe\xff\x01\x00\x07\x00\x0c\x00\x12\x00\x18\x00\x11\x00\x14\x00#\x00\x1f\x00\x1f\x00(\x00\x1f\x00\x1e\x00&\x00\x1f\x00\x17\x00\x11\x00\x15\x00\x16\x00\x14\x00\x16\x00\x13\x00\r\x00\x15\x00\x1b\x00\x0f\x00\x16\x00 \x00\x1c\x00\x16\x00\x13\x00\x0f\x00\x0f\x00\x0f\x00\x10\x00\x0e\x00\n\x00\n\x00\x02\x00\x05\x00\x04\x00\x04\x00\x00\x00\x00\x00\x04\x00\x05\x00\x04\x00\xfd\xff\xf8\xff\xfb\xff\xfa\xff\xfa\xff\xf8\xff\xf8\xff\xfb\xff\xfa\xff\xf5\xff\xf7\xff\xf7\xff\xfa\xff\xf6\xff\xf6\xff\xfa\xff\xf9\xff\xf9\xff\xf8\xff\xfb\xff\xff\xff\x00\x00\xfb\xff\xfb\xff\x04\x00\x04\x00\xfd\xff\xfa\xff\x05\x00\x01\x00\x07\x00\x13\x00\r\x00\x03\x00\x04\x00\x01\x00\xff\xff\xfc\xff\xfd\xff\xfb\xff\x02\x00\t\x00\xff\xff\xfd\xff\xfd\xff\xfd\xff\xf4\xff\xee\xff\xfb\xff\x01\x00\xfb\xff\xfb\xff\x00\x00\xff\xff\x01\x00\x06\x00\x07\x00\x00\x00\x02\x00\x07\x00\x00\x00\x01\x00\xfb\xff\xf3\xff\xee\xff\xee\xff\xf1\xff\xf4\xff\xee\xff\xf5\xff\xfb\xff\xef\xff\xea\xff\xe8\xff\xef\xff\xec\xff\xf2\xff\xf1\xff\xec\xff\xee\xff\xeb\xff\xe7\xff\xe4\xff\xed\xff\xef\xff\xec\xff\xec\xff\xea\xff\xef\xff\xeb\xff\xe7\xff\xed\xff\xf7\xff\xf9\xff\xff\xff\xfe\xff\xf3\xff\xef\xff\xee\xff\xf3\xff\xf0\xff\xee\xff\xee\xff\xea\xff\xe6\xff\xe6\xff\xe9\xff\xe7\xff\xf0\xff\xe9\xff\xe3\xff\xe7\xff\xe7\xff\xec\xff\xef\xff\xf1\xff\xf4\xff\xf5\xff\xf1\xff\xf4\xff\x00\x00\xf8\xff\xf9\xff\x00\x00\xfb\xff\xfc\xff\xfd\xff\x00\x00\xf8\xff\xf9\xff\xfb\xff\xf4\xff\xef\xff\xf8\xff\xf6\xff\xf4\xff\xf8\xff\xf9\xff\xfb\xff\xfa\xff\xfc\xff\xf8\xff\xff\xff\x00\x00\xfd\xff\xf7\xff\xf6\xff\xf8\xff\xfc\xff\x06\x00\x04\x00\x02\x00\x08\x00\x05\x00\xfd\xff\xfe\xff\xfe\xff\xfc\xff\xfb\xff\xf8\xff\xef\xff\xf2\xff\xf2\xff\xf1\xff\xf5\xff\xec\xff\xec\xff\xf0\xff\xef\xff\xec\xff\xec\xff\xf1\xff\xf6\xff\xf4\xff\xf5\xff\xfa\xff\xfe\xff\x05\x00\x04\x00\x04\x00\x05\x00\x03\x00\x03\x00\x04\x00\t\x00\x0c\x00\t\x00\t\x00\t\x00\x08\x00\x08\x00\t\x00\r\x00\r\x00\x0c\x00\n\x00\t\x00\x08\x00\x08\x00\x0c\x00\x0c\x00\x0b\x00\n\x00\x08\x00\n\x00\x0c\x00\x03\x00\x04\x00\x00\x00\x05\x00\x0e\x00\r\x00\x0c\x00\r\x00\x15\x00\x0e\x00\t\x00\x0e\x00\x11\x00\x0b\x00\x0b\x00\t\x00\x0e\x00\x15\x00\x14\x00\x15\x00\x0e\x00\x0f\x00\x14\x00\x16\x00\x12\x00\x16\x00\x19\x00\x12\x00\x11\x00\x0b\x00\x01\x00\x01\x00\n\x00\x0b\x00\x0f\x00\x14\x00\x12\x00\x10\x00\x17\x00\x14\x00\x16\x00\x15\x00\n\x00\x0b\x00\x0f\x00\r\x00\x0f\x00\n\x00\r\x00\x10\x00\r\x00\r\x00\x10\x00\r\x00\x04\x00\x00\x00\x07\x00\n\x00\n\x00\n\x00\x08\x00\x05\x00\x02\x00\x06\x00\x05\x00\x07\x00\x04\x00\x02\x00\x05\x00\x02\x00\xff\xff\xfa\xff\xfa\xff\xf9\xff\xf4\xff\xf3\xff\xf6\xff\xf4\xff\xf7\xff\xf7\xff\xf8\xff\xf6\xff\xf8\xff\xee\xff\xea\xff\xee\xff\xf2\xff\xf6\xff\xfa\xff\xfc\xff\x00\x00\x01\x00\x05\x00\x06\x00\x04\x00\x03\x00\x00\x00\x01\x00\xfd\xff\xfb\xff\xfa\xff\xf4\xff\xf4\xff\xf7\xff\xf6\xff\xf3\xff\xf3\xff\xf2\xff\xf3\xff\xf5\xff\xf4\xff\xf4\xff\xfc\xff\xfe\xff\x00\x00\x02\x00\xfe\xff\xfa\xff\xfc\xff\xfc\xff\xf7\xff\xf8\xff\xfd\xff\xfe\xff\xfd\xff\xfd\xff\xfc\xff\xfd\xff\xf6\xff\xf5\xff\xf5\xff\xfa\xff\xfc\xff\xfa\xff\xfc\xff\xf9\xff\xfa\xff\xf4\xff\xf6\xff\xf4\xff\xee\xff\xee\xff\xf0\xff\xee\xff\xed\xff\xea\xff\xea\xff\xea\xff\xe5\xff\xeb\xff\xe8\xff\xed\xff\xf2\xff\xed\xff\xf3\xff\xf1\xff\xf5\xff\xf4\xff\xf7\xff\xfa\xff\xf2\xff\xf6\xff\xf0\xff\xea\xff\xef\xff\xf2\xff\xef\xff\xec\xff\xe6\xff\xe3\xff\xe3\xff\xe1\xff\xe5\xff\xec\xff\xe6\xff\xe6\xff\xea\xff\xee\xff\xf4\xff\xf3\xff\xf5\xff\xfb\xff\xfd\xff\xf9\xff\xfd\xff\xfa\xff\xf5\xff\xfa\xff\xf9\xff\x00\x00\xfc\xff\xff\xff\xff\xff\xf3\xff\xfb\xff\x00\x00\x00\x00\x00\x00\x00\x00\xfd\xff\x00\x00\xff\xff\xfc\xff\x00\x00\xff\xff\xfe\xff\x01\x00\x04\x00\xff\xff\xfc\xff\xff\xff\x02\x00\x03\x00\x05\x00\x03\x00\x03\x00\x06\x00\x07\x00\x04\x00\x03\x00\x07\x00\x03\x00\x00\x00\xfe\xff\xf9\xff\xfb\xff\xf9\xff\xfa\xff\xfa\xff\xf7\xff\xf4\xff\xf5\xff\xf4\xff\xf9\xff\xfe\xff\x00\x00\x07\x00\x05\x00\x04\x00\x02\x00\x04\x00\x08\x00\x03\x00\x01\x00\x01\x00\x04\x00\x01\x00\x02\x00\x05\x00\x02\x00\x00\x00\x06\x00\x08\x00\n\x00\x07\x00\t\x00\n\x00\x02\x00\x04\x00\x05\x00\x07\x00\x04\x00\x07\x00\x02\x00\x00\x00\x04\x00\x0c\x00\t\x00\x08\x00\x0c\x00\x0b\x00\r\x00\r\x00\x0c\x00\x0c\x00\x0f\x00\x10\x00\x11\x00\x12\x00\x13\x00\x18\x00\x16\x00\x18\x00\x15\x00\x13\x00\x14\x00\x13\x00\x12\x00\x0f\x00\r\x00\t\x00\x07\x00\x06\x00\x06\x00\x06\x00\x04\x00\x00\x00\xfc\xff\x01\x00\x03\x00\x07\x00\x02\x00\x02\x00\x07\x00\x07\x00\t\x00\x07\x00\t\x00\t\x00\x06\x00\x08\x00\x05\x00\x03\x00\xff\xff\x00\x00\xff\xff\xff\xff\x02\x00\x04\x00\x07\x00\x06\x00\x02\x00\x02\x00\x04\x00\x01\x00\x01\x00\x05\x00\x03\x00\xff\xff\xfa\xff\xff\xff\xff\xff\xfa\xff\xf9\xff\x00\x00\xff\xff\xfd\xff\x01\x00\x00\x00\xfe\xff\x00\x00\xfd\xff\x00\x00\x00\x00\x02\x00\x02\x00\x06\x00\x08\x00\x02\x00\x01\x00\x04\x00\x07\x00\x03\x00\x00\x00\x05\x00\x03\x00\xff\xff\x00\x00\xff\xff\xfe\xff\xff\xff\x00\x00\x02\x00\xfe\xff\xf9\xff\xfa\xff\xfb\xff\xfd\xff\xf8\xff\xf5\xff\xf3\xff\xed\xff\xf2\xff\xf5\xff\xf3\xff\xf6\xff\xf7\xff\xf6\xff\xf7\xff\xf6\xff\xf8\xff\xf5\xff\xfe\xff\xfd\xff\xff\xff\x04\x00\x00\x00\xfa\xff\xfa\xff\xfc\xff\xf6\xff\xf4\xff\xf8\xff\xf8\xff\xfa\xff\xfa\xff\xf6\xff\xfa\xff\xfc\xff\xfe\xff\x03\x00\x00\x00\xf7\xff\xf4\xff\xf4\xff\xf8\xff\xf8\xff\xf8\xff\xfa\xff\xf4\xff\xf1\xff\xf3\xff\xf4\xff\xf2\xff\xf0\xff\xef\xff\xef\xff\xeb\xff\xee\xff\xee\xff\xf0\xff\xf2\xff\xf3\xff\xf5\xff\xf2\xff\xf2\xff\xf7\xff\xf5\xff\xf1\xff\xf6\xff\xf9\xff\xfa\xff\xf3\xff\xf1\xff\xef\xff\xed\xff\xe8\xff\xe9\xff\xea\xff\xf1\xff\xf1\xff\xf0\xff\xf7\xff\xf6\xff\xfc\xff\xf7\xff\xf8\xff\xf7\xff\xfd\xff\x00\x00\xf6\xff\xf6\xff\xfb\xff\xfb\xff\xfe\xff\x04\x00\xfe\xff\xf9\xff\x01\x00\xff\xff\xfd\xff\xff\xff\xfe\xff\xfa\xff\xfb\xff\xfc\xff\xfc\xff\xff\xff\xfb\xff\xfc\xff\xfe\xff\xfb\xff\xfd\xff\xfd\xff\xfa\xff\xf9\xff\xfa\xff\xfd\xff\xfe\xff\xfd\xff\x00\x00\xff\xff\x00\x00\x03\x00\x02\x00\x03\x00\x02\x00\x03\x00\x04\x00\x04\x00\x06\x00\x05\x00\x03\x00\x01\x00\xff\xff\xff\xff\xfa\xff\xfa\xff\xfd\xff\xfe\xff\xfc\xff\xfe\xff\xfe\xff\xfd\xff\xfb\xff\xfb\xff\xfb\xff\xf9\xff\xf9\xff\xf9\xff\xfc\xff\xfd\xff\xf8\xff\xf9\xff\xf6\xff\xf3\xff\xfa\xff\xfa\xff\xf6\xff\xf8\xff\xfc\xff\xfa\xff\xfa\xff\xf9\xff\xfb\xff\xf9\xff\xff\xff\xff\xff\xfe\xff\x00\x00\xff\xff\xfe\xff\xfc\xff\x00\x00\x01\x00\x06\x00\x00\x00\x03\x00\t\x00\t\x00\t\x00\x06\x00\x04\x00\x02\x00\x0c\x00\t\x00\t\x00\x0f\x00\x12\x00\x13\x00\x17\x00\x19\x00\x1d\x00\x1d\x00\x16\x00\x16\x00\x18\x00\x19\x00\x15\x00\x16\x00\x16\x00\x14\x00\x13\x00\x17\x00\x16\x00\x10\x00\r\x00\n\x00\x0b\x00\t\x00\x04\x00\x03\x00\x04\x00\x03\x00\x03\x00\x05\x00\x02\x00\x03\x00\x04\x00\x02\x00\x02\x00\xff\xff\xfc\xff\xfb\xff\xff\xff\xff\xff\xfb\xff\xfe\xff\x01\x00\x01\x00\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\xfe\xff\xfd\xff\xff\xff\x02\x00\x00\x00\x00\x00\x01\x00\x04\x00\x06\x00\x05\x00\x08\x00\x05\x00\x04\x00\x06\x00\x05\x00\x01\x00\x00\x00\x02\x00\xfc\xff\xfd\xff\xff\xff\xfe\xff\xfa\xff\xfa\xff\xf8\xff\xf4\xff\xfa\xff\xfe\xff\xfa\xff\xfe\xff\xfe\xff\x01\x00\x01\x00\xfc\xff\xfd\xff\xfd\xff\xfc\xff\xfb\xff\xfb\xff\xfd\xff\x00\x00\xfd\xff\xfc\xff\x00\x00\x00\x00\xfb\xff\xf8\xff\xf9\xff\xf9\xff\xf9\xff\xfc\xff\xfc\xff\xfb\xff\xfd\xff\xfa\xff\xf9\xff\xf8\xff\xf6\xff\xf7\xff\xf6\xff\xf7\xff\xf7\xff\xf6\xff\xf8\xff\xf7\xff\xf7\xff\xf7\xff\xfa\xff\xfb\xff\xfa\xff\xfc\xff\x01\x00\xfc\xff\xff\xff\xfe\xff\xfe\xff\xff\xff\xf7\xff\xfa\xff\xfa\xff\xf6\xff\xf7\xff\xf9\xff\xf6\xff\xf4\xff\xf3\xff\xf0\xff\xf2\xff\xf0\xff\xed\xff\xf2\xff\xf2\xff\xf2\xff\xf6\xff\xf6\xff\xf7\xff\xf5\xff\xf3\xff\xf6\xff\xf8\xff\xf7\xff\xf4\xff\xf3\xff\xef\xff\xf2\xff\xf0\xff\xf1\xff\xef\xff\xf2\xff\xf4\xff\xeb\xff\xef\xff\xf3\xff\xf5\xff\xf3\xff\xf6\xff\xf7\xff\xf3\xff\xf5\xff\xf5\xff\xf6\xff\xf5\xff\xf4\xff\xf5\xff\xf7\xff\xf3\xff\xf3\xff\xf4\xff\xf8\xff\xf8\xff\xf6\xff\xfa\xff\xfd\xff\xfa\xff\xf8\xff\xfb\xff\xfc\xff\xfe\xff\xfa\xff\xf8\xff\xf6\xff\xf7\xff\xf9\xff\xf8\xff\xf8\xff\xf9\xff\xf9\xff\xf9\xff\xfc\xff\xf8\xff\xf9\xff\xff\xff\xfe\xff\xfd\xff\x00\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x01\x00\x03\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x02\x00\x00\x00\x02\x00\x01\x00\x01\x00\x05\x00\xff\xff\xfe\xff\xff\xff\xfd\xff\xfa\xff\xfd\xff\xfc\xff\xfa\xff\xfc\xff\xfe\xff\xfb\xff\xfc\xff\xfd\xff\xfe\xff\xff\xff\xfa\xff\xf9\xff\xfb\xff\x00\x00\x00\x00\x02\x00\x01\x00\x04\x00\x06\x00\x04\x00\x05\x00\x05\x00\x06\x00\x06\x00\x07\x00\t\x00\t\x00\t\x00\x03\x00\x01\x00\x04\x00\x02\x00\x01\x00\x02\x00\x02\x00\x00\x00\x00\x00\x03\x00\x06\x00\x02\x00\x05\x00\x08\x00\x05\x00\x07\x00\t\x00\x08\x00\x05\x00\x06\x00\x0c\x00\x0c\x00\x08\x00\t\x00\x0b\x00\x0b\x00\t\x00\n\x00\x08\x00\x07\x00\n\x00\x04\x00\x06\x00\x08\x00\x08\x00\x08\x00\t\x00\x08\x00\x07\x00\x08\x00\x08\x00\x06\x00\x06\x00\t\x00\n\x00\x06\x00\x08\x00\n\x00\x07\x00\t\x00\x0e\x00\r\x00\x0c\x00\n\x00\x08\x00\x05\x00\x06\x00\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\xfc\xff\xfb\xff\xfd\xff\xfa\xff\xf7\xff\xfd\xff\xfd\xff\xf8\xff\xfc\xff\xfc\xff\xfe\xff\xfc\xff\xfb\xff\xfd\xff\xfe\xff\x00\x00\x00\x00\x00\x00\xfe\xff\xfb\xff\xff\xff\xfe\xff\xff\xff\xfc\xff\xfb\xff\xfe\xff\xff\xff\xfe\xff\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xfe\xff\xfd\xff\xfd\xff\xfb\xff\xfb\xff\xfb\xff\xfb\xff\xfa\xff\xf8\xff\xf7\xff\xf9\xff\xff\xff\x00\x00\x02\x00\x00\x00\xff\xff\xfb\xff\xfd\xff\xfe\xff\xf8\xff\xfa\xff\xff\xff\xfc\xff\xfb\xff\xfd\xff\xf9\xff\xf5\xff\xf6\xff\xfa\xff\xf9\xff\xf7\xff\xf6\xff\xf2\xff\xf7\xff\xf7\xff\xf7\xff\xf8\xff\xf5\xff\xf5\xff\xf7\xff\xf5\xff\xf5\xff\xf5\xff\xf9\xff\xf7\xff\xf0\xff\xf1\xff\xef\xff\xef\xff\xea\xff\xe8\xff\xea\xff\xed\xff\xee\xff\xf0\xff\xf7\xff\xf6\xff\xf8\xff\xf3\xff\xf1\xff\xf2\xff\xf4\xff\xf7\xff\xf5\xff\xf4\xff\xf5\xff\xf4\xff\xf6\xff\xf7\xff\xf4\xff\xf3\xff\xf6\xff\xf5\xff\xf1\xff\xf5\xff\xfa\xff\xf7\xff\xf7\xff\xf8\xff\xfa\xff\xfe\xff\xfc\xff\xfc\xff\xfa\xff\xf7\xff\xf6\xff\xf7\xff\xf7\xff\xfa\xff\xf7\xff\xf7\xff\xfd\xff\xfb\xff\xfb\xff\xfa\xff\xfb\xff\xff\xff\xff\xff\x00\x00\x02\x00\x03\x00\x02\x00\x01\x00\x01\x00\x02\x00\x01\x00\x02\x00\x00\x00\xfe\xff\xfe\xff\xfd\xff\xff\xff\x01\x00\xfd\xff\xfb\xff\xfc\xff\xfe\xff\xfd\xff\xfb\xff\xfc\xff\xf9\xff\xfa\xff\xfb\xff\xfc\xff\xfd\xff\xf8\xff\xf6\xff\xf4\xff\xf4\xff\xf9\xff\xf9\xff\xf6\xff\xfa\xff\xf9\xff\xf8\xff\xfb\xff\xfb\xff\xfa\xff\xf8\xff\xfb\xff\xfd\xff\xfb\xff\xfd\xff\xfb\xff\xf9\xff\xf9\xff\xfd\xff\xfe\xff\xfd\xff\xfb\xff\xfb\xff\xfc\xff\xfd\xff\xfc\xff\xfc\xff\xff\xff\xfe\xff\x00\x00\x00\x00\x03\x00\x06\x00\x06\x00\x07\x00\x08\x00\x08\x00\n\x00\x0b\x00\x08\x00\t\x00\x08\x00\n\x00\x08\x00\x08\x00\x05\x00\x08\x00\t\x00\n\x00\x0b\x00\t\x00\x05\x00\x03\x00\x04\x00\x06\x00\x04\x00\x01\x00\x02\x00\x03\x00\x03\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\x00\x00\xfc\xff\xfc\xff\x03\x00\x01\x00\xff\xff\x00\x00\x01\x00\x00\x00\xff\xff\xfe\xff\x01\x00\x01\x00\x02\x00\x01\x00\xfd\xff\x02\x00\x03\x00\x00\x00\x00\x00\x00\x00\x01\x00\x03\x00\x01\x00\x03\x00\x00\x00\x00\x00\x04\x00\x04\x00\x04\x00\x06\x00\n\x00\x03\x00\x03\x00\x04\x00\x05\x00\x06\x00\x03\x00\x02\x00\x03\x00\x06\x00\x08\x00\x04\x00\x07\x00\t\x00\x08\x00\x07\x00\x07\x00\x08\x00\x07\x00\x07\x00\x07\x00\x08\x00\t\x00\t\x00\x07\x00\x05\x00\x06\x00\x05\x00\x01\x00\x00\x00\x01\x00\x02\x00\x00\x00\x00\x00\xfe\xff\xfc\xff\xfe\xff\xfc\xff\xf9\xff\xf7\xff\xf7\xff\xf8\xff\xf7\xff\xf7\xff\xf8\xff\xf5\xff\xf6\xff\xf4\xff\xf5\xff\xf6\xff\xf7\xff\xf8\xff\xfb\xff\xfd\xff\xfc\xff\xf8\xff\xfa\xff\xfc\xff\xfd\xff\xff\xff\xfb\xff\xfd\xff\xf8\xff\xf4\xff\xf6\xff\xf7\xff\xf5\xff\xf6\xff\xf4\xff\xf4\xff\xf4\xff\xf2\xff\xf4\xff\xf7\xff\xf6\xff\xf4\xff\xfa\xff\xfb\xff\xfa\xff\xf4\xff\xf7\xff\xf8\xff\xfa\xff\xf9\xff\xf6\xff\xfa\xff\xf8\xff\xf9\xff\xf8\xff\xf8\xff\xf8\xff\xfa\xff\xf8\xff\xf5\xff\xf8\xff\xf9\xff\xfc\xff\xfc\xff\xfe\xff\xff\xff\xfc\xff\xfb\xff\xfb\xff\xfc\xff\xf9\xff\xf8\xff\xf9\xff\xf8\xff\xf3\xff\xf2\xff\xf3\xff\xf3\xff\xef\xff\xf0\xff\xf2\xff\xf3\xff\xf2\xff\xf2\xff\xf7\xff\xf7\xff\xfa\xff\xf8\xff\xf7\xff\xf7\xff\xf5\xff\xf7\xff\xf8\xff\xf8\xff\xf9\xff\xfa\xff\xf9\xff\xfc\xff\xfa\xff\xfa\xff\xfc\xff\xfb\xff\xfd\xff\x00\x00\xfe\xff\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\xfb\xff\xff\xff\xfc\xff\xfb\xff\xfd\xff\xfb\xff\xfe\xff\xfd\xff\xfe\xff\x00\x00\xf8\xff\xfc\xff\xfe\xff\xfd\xff\xfb\xff\xfd\xff\xfc\xff\xf8\xff\xf9\xff\xfa\xff\xf7\xff\xf7\xff\xfa\xff\xf9\xff\xfa\xff\xf6\xff\xf6\xff\xf7\xff\xfb\xff\xfe\xff\xff\xff\xfe\xff\xff\xff\xff\xff\xff\xff\xfd\xff\xfe\xff\x00\x00\x00\x00\xfb\xff\xf9\xff\xfd\xff\xfc\xff\xfb\xff\xfa\xff\xfa\xff\xf7\xff\xf7\xff\xfc\xff\xfa\xff\xfc\xff\xfd\xff\xfd\xff\x00\x00\x00\x00\xfa\xff\xf8\xff\xfa\xff\xf7\xff\xf3\xff\xf6\xff\xf7\xff\xf6\xff\xf8\xff\xfa\xff\xf7\xff\xf7\xff\xf6\xff\xfb\xff\xfb\xff\xfc\xff\xfd\xff\xfd\xff\x03\x00\x00\x00\x04\x00\t\x00\x05\x00\x07\x00\t\x00\t\x00\x08\x00\t\x00\n\x00\n\x00\x05\x00\x02\x00\x02\x00\x02\x00\x00\x00\x01\x00\x01\x00\x02\x00\x07\x00\x05\x00\x06\x00\x04\x00\x01\x00\x00\x00\x02\x00\x02\x00\x00\x00\x05\x00\t\x00\x0b\x00\n\x00\x0b\x00\x0c\x00\x0b\x00\t\x00\n\x00\t\x00\n\x00\r\x00\x0e\x00\r\x00\x08\x00\x05\x00\x02\x00\x04\x00\x02\x00\x01\x00\x03\x00\x00\x00\xff\xff\x03\x00\x05\x00\x04\x00\x03\x00\x00\x00\x00\x00\x06\x00\t\x00\x04\x00\x01\x00\x01\x00\xfe\xff\x00\x00\x00\x00\x00\x00\xfc\xff\xfd\xff\xfe\xff\xfd\xff\xfe\xff\xff\xff\xfe\xff\x01\x00\x02\x00\x00\x00\x03\x00\x02\x00\x04\x00\x05\x00\x01\x00\x02\x00\x03\x00\x04\x00\x03\x00\x04\x00\x06\x00\x06\x00\x01\x00\xfe\xff\xfd\xff\xfb\xff\xfa\xff\xfb\xff\xfc\xff\xf8\xff\xf5\xff\xf6\xff\xf6\xff\xf5\xff\xf6\xff\xf9\xff\xf8\xff\xf5\xff\xf6\xff\xf4\xff\xf1\xff\xf4\xff\xf4\xff\xf2\xff\xee\xff\xee\xff\xef\xff\xee\xff\xf0\xff\xf2\xff\xf2\xff\xf5\xff\xef\xff\xf0\xff\xee\xff\xf1\xff\xf3\xff\xf1\xff\xf0\xff\xf3\xff\xf3\xff\xf3\xff\xf0\xff\xf4\xff\xf8\xff\xee\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' -# --- diff --git a/tests/components/webhook/test_init.py b/tests/components/webhook/test_init.py index af07616024a8f8..15ec1b15ee581f 100644 --- a/tests/components/webhook/test_init.py +++ b/tests/components/webhook/test_init.py @@ -9,8 +9,8 @@ import pytest from homeassistant.components import webhook -from homeassistant.config import async_process_ha_core_config from homeassistant.core import HomeAssistant +from homeassistant.core_config import async_process_ha_core_config from homeassistant.setup import async_setup_component from tests.typing import ClientSessionGenerator, WebSocketGenerator diff --git a/tests/components/webmin/snapshots/test_sensor.ambr b/tests/components/webmin/snapshots/test_sensor.ambr index 8803ee684aea10..6af768d63a8efd 100644 --- a/tests/components/webmin/snapshots/test_sensor.ambr +++ b/tests/components/webmin/snapshots/test_sensor.ambr @@ -1,5 +1,5 @@ # serializer version: 1 -# name: test_sensor[sensor.192_168_1_1_data_size-entry] +# name: test_sensor[sensor.192_168_1_1_disk_free_inodes-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -13,7 +13,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.192_168_1_1_data_size', + 'entity_id': 'sensor.192_168_1_1_disk_free_inodes', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -22,41 +22,33 @@ }), 'name': None, 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), }), - 'original_device_class': , + 'original_device_class': None, 'original_icon': None, - 'original_name': 'Data size', + 'original_name': 'Disk free inodes /', 'platform': 'webmin', 'previous_unique_id': None, 'supported_features': 0, - 'translation_key': 'disk_total', - 'unique_id': '12:34:56:78:9a:bc_disk_total', - 'unit_of_measurement': , + 'translation_key': 'disk_fs_ifree', + 'unique_id': '12:34:56:78:9a:bc_/_ifree', + 'unit_of_measurement': None, }) # --- -# name: test_sensor[sensor.192_168_1_1_data_size-state] +# name: test_sensor[sensor.192_168_1_1_disk_free_inodes-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'device_class': 'data_size', - 'friendly_name': '192.168.1.1 Data size', + 'friendly_name': '192.168.1.1 Disk free inodes /', 'state_class': , - 'unit_of_measurement': , }), 'context': , - 'entity_id': 'sensor.192_168_1_1_data_size', + 'entity_id': 'sensor.192_168_1_1_disk_free_inodes', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '16861.5074996948', + 'state': '14927206', }) # --- -# name: test_sensor[sensor.192_168_1_1_data_size_10-entry] +# name: test_sensor[sensor.192_168_1_1_disk_free_inodes_media_disk1-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -70,7 +62,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.192_168_1_1_data_size_10', + 'entity_id': 'sensor.192_168_1_1_disk_free_inodes_media_disk1', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -79,41 +71,33 @@ }), 'name': None, 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), }), - 'original_device_class': , + 'original_device_class': None, 'original_icon': None, - 'original_name': 'Data size', + 'original_name': 'Disk free inodes /media/disk1', 'platform': 'webmin', 'previous_unique_id': None, 'supported_features': 0, - 'translation_key': 'disk_fs_total', - 'unique_id': '12:34:56:78:9a:bc_/media/disk1_total', - 'unit_of_measurement': , + 'translation_key': 'disk_fs_ifree', + 'unique_id': '12:34:56:78:9a:bc_/media/disk1_ifree', + 'unit_of_measurement': None, }) # --- -# name: test_sensor[sensor.192_168_1_1_data_size_10-state] +# name: test_sensor[sensor.192_168_1_1_disk_free_inodes_media_disk1-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'device_class': 'data_size', - 'friendly_name': '192.168.1.1 Data size', + 'friendly_name': '192.168.1.1 Disk free inodes /media/disk1', 'state_class': , - 'unit_of_measurement': , }), 'context': , - 'entity_id': 'sensor.192_168_1_1_data_size_10', + 'entity_id': 'sensor.192_168_1_1_disk_free_inodes_media_disk1', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '5543.82404708862', + 'state': '183130757', }) # --- -# name: test_sensor[sensor.192_168_1_1_data_size_11-entry] +# name: test_sensor[sensor.192_168_1_1_disk_free_inodes_media_disk2-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -127,7 +111,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.192_168_1_1_data_size_11', + 'entity_id': 'sensor.192_168_1_1_disk_free_inodes_media_disk2', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -136,41 +120,33 @@ }), 'name': None, 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), }), - 'original_device_class': , + 'original_device_class': None, 'original_icon': None, - 'original_name': 'Data size', + 'original_name': 'Disk free inodes /media/disk2', 'platform': 'webmin', 'previous_unique_id': None, 'supported_features': 0, - 'translation_key': 'disk_fs_used', - 'unique_id': '12:34:56:78:9a:bc_/media/disk1_used', - 'unit_of_measurement': , + 'translation_key': 'disk_fs_ifree', + 'unique_id': '12:34:56:78:9a:bc_/media/disk2_ifree', + 'unit_of_measurement': None, }) # --- -# name: test_sensor[sensor.192_168_1_1_data_size_11-state] +# name: test_sensor[sensor.192_168_1_1_disk_free_inodes_media_disk2-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'device_class': 'data_size', - 'friendly_name': '192.168.1.1 Data size', + 'friendly_name': '192.168.1.1 Disk free inodes /media/disk2', 'state_class': , - 'unit_of_measurement': , }), 'context': , - 'entity_id': 'sensor.192_168_1_1_data_size_11', + 'entity_id': 'sensor.192_168_1_1_disk_free_inodes_media_disk2', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '4638.98014068604', + 'state': '362656466', }) # --- -# name: test_sensor[sensor.192_168_1_1_data_size_12-entry] +# name: test_sensor[sensor.192_168_1_1_disk_free_space-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -184,7 +160,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.192_168_1_1_data_size_12', + 'entity_id': 'sensor.192_168_1_1_disk_free_space', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -202,32 +178,32 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Data size', + 'original_name': 'Disk free space /', 'platform': 'webmin', 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'disk_fs_free', - 'unique_id': '12:34:56:78:9a:bc_/media/disk1_free', + 'unique_id': '12:34:56:78:9a:bc_/_free', 'unit_of_measurement': , }) # --- -# name: test_sensor[sensor.192_168_1_1_data_size_12-state] +# name: test_sensor[sensor.192_168_1_1_disk_free_space-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'data_size', - 'friendly_name': '192.168.1.1 Data size', + 'friendly_name': '192.168.1.1 Disk free space /', 'state_class': , 'unit_of_measurement': , }), 'context': , - 'entity_id': 'sensor.192_168_1_1_data_size_12', + 'entity_id': 'sensor.192_168_1_1_disk_free_space', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '625.379589080811', + 'state': '45.6910972595215', }) # --- -# name: test_sensor[sensor.192_168_1_1_data_size_2-entry] +# name: test_sensor[sensor.192_168_1_1_disk_free_space_media_disk1-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -241,7 +217,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.192_168_1_1_data_size_2', + 'entity_id': 'sensor.192_168_1_1_disk_free_space_media_disk1', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -259,32 +235,32 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Data size', + 'original_name': 'Disk free space /media/disk1', 'platform': 'webmin', 'previous_unique_id': None, 'supported_features': 0, - 'translation_key': 'disk_free', - 'unique_id': '12:34:56:78:9a:bc_disk_free', + 'translation_key': 'disk_fs_free', + 'unique_id': '12:34:56:78:9a:bc_/media/disk1_free', 'unit_of_measurement': , }) # --- -# name: test_sensor[sensor.192_168_1_1_data_size_2-state] +# name: test_sensor[sensor.192_168_1_1_disk_free_space_media_disk1-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'data_size', - 'friendly_name': '192.168.1.1 Data size', + 'friendly_name': '192.168.1.1 Disk free space /media/disk1', 'state_class': , 'unit_of_measurement': , }), 'context': , - 'entity_id': 'sensor.192_168_1_1_data_size_2', + 'entity_id': 'sensor.192_168_1_1_disk_free_space_media_disk1', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '7217.11803817749', + 'state': '625.379589080811', }) # --- -# name: test_sensor[sensor.192_168_1_1_data_size_3-entry] +# name: test_sensor[sensor.192_168_1_1_disk_free_space_media_disk2-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -298,7 +274,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.192_168_1_1_data_size_3', + 'entity_id': 'sensor.192_168_1_1_disk_free_space_media_disk2', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -316,32 +292,32 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Data size', + 'original_name': 'Disk free space /media/disk2', 'platform': 'webmin', 'previous_unique_id': None, 'supported_features': 0, - 'translation_key': 'disk_used', - 'unique_id': '12:34:56:78:9a:bc_disk_used', + 'translation_key': 'disk_fs_free', + 'unique_id': '12:34:56:78:9a:bc_/media/disk2_free', 'unit_of_measurement': , }) # --- -# name: test_sensor[sensor.192_168_1_1_data_size_3-state] +# name: test_sensor[sensor.192_168_1_1_disk_free_space_media_disk2-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'data_size', - 'friendly_name': '192.168.1.1 Data size', + 'friendly_name': '192.168.1.1 Disk free space /media/disk2', 'state_class': , 'unit_of_measurement': , }), 'context': , - 'entity_id': 'sensor.192_168_1_1_data_size_3', + 'entity_id': 'sensor.192_168_1_1_disk_free_space_media_disk2', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '8794.3125', + 'state': '6546.04735183716', }) # --- -# name: test_sensor[sensor.192_168_1_1_data_size_4-entry] +# name: test_sensor[sensor.192_168_1_1_disk_inode_usage-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -355,7 +331,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.192_168_1_1_data_size_4', + 'entity_id': 'sensor.192_168_1_1_disk_inode_usage', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -364,41 +340,34 @@ }), 'name': None, 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), }), - 'original_device_class': , + 'original_device_class': None, 'original_icon': None, - 'original_name': 'Data size', + 'original_name': 'Disk inode usage /', 'platform': 'webmin', 'previous_unique_id': None, 'supported_features': 0, - 'translation_key': 'disk_fs_total', - 'unique_id': '12:34:56:78:9a:bc_/_total', - 'unit_of_measurement': , + 'translation_key': 'disk_fs_iused_percent', + 'unique_id': '12:34:56:78:9a:bc_/_iused_percent', + 'unit_of_measurement': '%', }) # --- -# name: test_sensor[sensor.192_168_1_1_data_size_4-state] +# name: test_sensor[sensor.192_168_1_1_disk_inode_usage-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'device_class': 'data_size', - 'friendly_name': '192.168.1.1 Data size', + 'friendly_name': '192.168.1.1 Disk inode usage /', 'state_class': , - 'unit_of_measurement': , + 'unit_of_measurement': '%', }), 'context': , - 'entity_id': 'sensor.192_168_1_1_data_size_4', + 'entity_id': 'sensor.192_168_1_1_disk_inode_usage', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '231.369548797607', + 'state': '4', }) # --- -# name: test_sensor[sensor.192_168_1_1_data_size_5-entry] +# name: test_sensor[sensor.192_168_1_1_disk_inode_usage_media_disk1-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -412,7 +381,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.192_168_1_1_data_size_5', + 'entity_id': 'sensor.192_168_1_1_disk_inode_usage_media_disk1', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -421,41 +390,34 @@ }), 'name': None, 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), }), - 'original_device_class': , + 'original_device_class': None, 'original_icon': None, - 'original_name': 'Data size', + 'original_name': 'Disk inode usage /media/disk1', 'platform': 'webmin', 'previous_unique_id': None, 'supported_features': 0, - 'translation_key': 'disk_fs_used', - 'unique_id': '12:34:56:78:9a:bc_/_used', - 'unit_of_measurement': , + 'translation_key': 'disk_fs_iused_percent', + 'unique_id': '12:34:56:78:9a:bc_/media/disk1_iused_percent', + 'unit_of_measurement': '%', }) # --- -# name: test_sensor[sensor.192_168_1_1_data_size_5-state] +# name: test_sensor[sensor.192_168_1_1_disk_inode_usage_media_disk1-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'device_class': 'data_size', - 'friendly_name': '192.168.1.1 Data size', + 'friendly_name': '192.168.1.1 Disk inode usage /media/disk1', 'state_class': , - 'unit_of_measurement': , + 'unit_of_measurement': '%', }), 'context': , - 'entity_id': 'sensor.192_168_1_1_data_size_5', + 'entity_id': 'sensor.192_168_1_1_disk_inode_usage_media_disk1', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '173.85604095459', + 'state': '1', }) # --- -# name: test_sensor[sensor.192_168_1_1_data_size_6-entry] +# name: test_sensor[sensor.192_168_1_1_disk_inode_usage_media_disk2-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -469,7 +431,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.192_168_1_1_data_size_6', + 'entity_id': 'sensor.192_168_1_1_disk_inode_usage_media_disk2', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -478,41 +440,34 @@ }), 'name': None, 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), }), - 'original_device_class': , + 'original_device_class': None, 'original_icon': None, - 'original_name': 'Data size', + 'original_name': 'Disk inode usage /media/disk2', 'platform': 'webmin', 'previous_unique_id': None, 'supported_features': 0, - 'translation_key': 'disk_fs_free', - 'unique_id': '12:34:56:78:9a:bc_/_free', - 'unit_of_measurement': , + 'translation_key': 'disk_fs_iused_percent', + 'unique_id': '12:34:56:78:9a:bc_/media/disk2_iused_percent', + 'unit_of_measurement': '%', }) # --- -# name: test_sensor[sensor.192_168_1_1_data_size_6-state] +# name: test_sensor[sensor.192_168_1_1_disk_inode_usage_media_disk2-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'device_class': 'data_size', - 'friendly_name': '192.168.1.1 Data size', + 'friendly_name': '192.168.1.1 Disk inode usage /media/disk2', 'state_class': , - 'unit_of_measurement': , + 'unit_of_measurement': '%', }), 'context': , - 'entity_id': 'sensor.192_168_1_1_data_size_6', + 'entity_id': 'sensor.192_168_1_1_disk_inode_usage_media_disk2', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '45.6910972595215', + 'state': '1', }) # --- -# name: test_sensor[sensor.192_168_1_1_data_size_7-entry] +# name: test_sensor[sensor.192_168_1_1_disk_total_inodes-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -526,7 +481,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.192_168_1_1_data_size_7', + 'entity_id': 'sensor.192_168_1_1_disk_total_inodes', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -535,41 +490,33 @@ }), 'name': None, 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), }), - 'original_device_class': , + 'original_device_class': None, 'original_icon': None, - 'original_name': 'Data size', + 'original_name': 'Disk total inodes /', 'platform': 'webmin', 'previous_unique_id': None, 'supported_features': 0, - 'translation_key': 'disk_fs_total', - 'unique_id': '12:34:56:78:9a:bc_/media/disk2_total', - 'unit_of_measurement': , + 'translation_key': 'disk_fs_itotal', + 'unique_id': '12:34:56:78:9a:bc_/_itotal', + 'unit_of_measurement': None, }) # --- -# name: test_sensor[sensor.192_168_1_1_data_size_7-state] +# name: test_sensor[sensor.192_168_1_1_disk_total_inodes-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'device_class': 'data_size', - 'friendly_name': '192.168.1.1 Data size', + 'friendly_name': '192.168.1.1 Disk total inodes /', 'state_class': , - 'unit_of_measurement': , }), 'context': , - 'entity_id': 'sensor.192_168_1_1_data_size_7', + 'entity_id': 'sensor.192_168_1_1_disk_total_inodes', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '11086.3139038086', + 'state': '15482880', }) # --- -# name: test_sensor[sensor.192_168_1_1_data_size_8-entry] +# name: test_sensor[sensor.192_168_1_1_disk_total_inodes_media_disk1-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -583,7 +530,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.192_168_1_1_data_size_8', + 'entity_id': 'sensor.192_168_1_1_disk_total_inodes_media_disk1', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -592,41 +539,33 @@ }), 'name': None, 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), }), - 'original_device_class': , + 'original_device_class': None, 'original_icon': None, - 'original_name': 'Data size', + 'original_name': 'Disk total inodes /media/disk1', 'platform': 'webmin', 'previous_unique_id': None, 'supported_features': 0, - 'translation_key': 'disk_fs_used', - 'unique_id': '12:34:56:78:9a:bc_/media/disk2_used', - 'unit_of_measurement': , + 'translation_key': 'disk_fs_itotal', + 'unique_id': '12:34:56:78:9a:bc_/media/disk1_itotal', + 'unit_of_measurement': None, }) # --- -# name: test_sensor[sensor.192_168_1_1_data_size_8-state] +# name: test_sensor[sensor.192_168_1_1_disk_total_inodes_media_disk1-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'device_class': 'data_size', - 'friendly_name': '192.168.1.1 Data size', + 'friendly_name': '192.168.1.1 Disk total inodes /media/disk1', 'state_class': , - 'unit_of_measurement': , }), 'context': , - 'entity_id': 'sensor.192_168_1_1_data_size_8', + 'entity_id': 'sensor.192_168_1_1_disk_total_inodes_media_disk1', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '3981.47631835938', + 'state': '183140352', }) # --- -# name: test_sensor[sensor.192_168_1_1_data_size_9-entry] +# name: test_sensor[sensor.192_168_1_1_disk_total_inodes_media_disk2-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -640,7 +579,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.192_168_1_1_data_size_9', + 'entity_id': 'sensor.192_168_1_1_disk_total_inodes_media_disk2', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -649,41 +588,33 @@ }), 'name': None, 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), }), - 'original_device_class': , + 'original_device_class': None, 'original_icon': None, - 'original_name': 'Data size', + 'original_name': 'Disk total inodes /media/disk2', 'platform': 'webmin', 'previous_unique_id': None, 'supported_features': 0, - 'translation_key': 'disk_fs_free', - 'unique_id': '12:34:56:78:9a:bc_/media/disk2_free', - 'unit_of_measurement': , + 'translation_key': 'disk_fs_itotal', + 'unique_id': '12:34:56:78:9a:bc_/media/disk2_itotal', + 'unit_of_measurement': None, }) # --- -# name: test_sensor[sensor.192_168_1_1_data_size_9-state] +# name: test_sensor[sensor.192_168_1_1_disk_total_inodes_media_disk2-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'device_class': 'data_size', - 'friendly_name': '192.168.1.1 Data size', + 'friendly_name': '192.168.1.1 Disk total inodes /media/disk2', 'state_class': , - 'unit_of_measurement': , }), 'context': , - 'entity_id': 'sensor.192_168_1_1_data_size_9', + 'entity_id': 'sensor.192_168_1_1_disk_total_inodes_media_disk2', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '6546.04735183716', + 'state': '366198784', }) # --- -# name: test_sensor[sensor.192_168_1_1_disk_free_inodes-entry] +# name: test_sensor[sensor.192_168_1_1_disk_total_space-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -697,105 +628,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.192_168_1_1_disk_free_inodes', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Disk free inodes /', - 'platform': 'webmin', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'disk_fs_ifree', - 'unique_id': '12:34:56:78:9a:bc_/_ifree', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[sensor.192_168_1_1_disk_free_inodes-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': '192.168.1.1 Disk free inodes /', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.192_168_1_1_disk_free_inodes', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '14927206', - }) -# --- -# name: test_sensor[sensor.192_168_1_1_disk_free_inodes_media_disk1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.192_168_1_1_disk_free_inodes_media_disk1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Disk free inodes /media/disk1', - 'platform': 'webmin', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'disk_fs_ifree', - 'unique_id': '12:34:56:78:9a:bc_/media/disk1_ifree', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[sensor.192_168_1_1_disk_free_inodes_media_disk1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': '192.168.1.1 Disk free inodes /media/disk1', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.192_168_1_1_disk_free_inodes_media_disk1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '183130757', - }) -# --- -# name: test_sensor[sensor.192_168_1_1_disk_free_inodes_media_disk2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.192_168_1_1_disk_free_inodes_media_disk2', + 'entity_id': 'sensor.192_168_1_1_disk_total_space', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -804,33 +637,41 @@ }), 'name': None, 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), }), - 'original_device_class': None, + 'original_device_class': , 'original_icon': None, - 'original_name': 'Disk free inodes /media/disk2', + 'original_name': 'Disk total space /', 'platform': 'webmin', 'previous_unique_id': None, 'supported_features': 0, - 'translation_key': 'disk_fs_ifree', - 'unique_id': '12:34:56:78:9a:bc_/media/disk2_ifree', - 'unit_of_measurement': None, + 'translation_key': 'disk_fs_total', + 'unique_id': '12:34:56:78:9a:bc_/_total', + 'unit_of_measurement': , }) # --- -# name: test_sensor[sensor.192_168_1_1_disk_free_inodes_media_disk2-state] +# name: test_sensor[sensor.192_168_1_1_disk_total_space-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': '192.168.1.1 Disk free inodes /media/disk2', + 'device_class': 'data_size', + 'friendly_name': '192.168.1.1 Disk total space /', 'state_class': , + 'unit_of_measurement': , }), 'context': , - 'entity_id': 'sensor.192_168_1_1_disk_free_inodes_media_disk2', + 'entity_id': 'sensor.192_168_1_1_disk_total_space', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '362656466', + 'state': '231.369548797607', }) # --- -# name: test_sensor[sensor.192_168_1_1_disk_free_space-entry] +# name: test_sensor[sensor.192_168_1_1_disk_total_space_media_disk1-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -844,7 +685,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.192_168_1_1_disk_free_space', + 'entity_id': 'sensor.192_168_1_1_disk_total_space_media_disk1', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -862,32 +703,32 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Disk free space /', + 'original_name': 'Disk total space /media/disk1', 'platform': 'webmin', 'previous_unique_id': None, 'supported_features': 0, - 'translation_key': 'disk_fs_free', - 'unique_id': '12:34:56:78:9a:bc_/_free', + 'translation_key': 'disk_fs_total', + 'unique_id': '12:34:56:78:9a:bc_/media/disk1_total', 'unit_of_measurement': , }) # --- -# name: test_sensor[sensor.192_168_1_1_disk_free_space-state] +# name: test_sensor[sensor.192_168_1_1_disk_total_space_media_disk1-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'data_size', - 'friendly_name': '192.168.1.1 Disk free space /', + 'friendly_name': '192.168.1.1 Disk total space /media/disk1', 'state_class': , 'unit_of_measurement': , }), 'context': , - 'entity_id': 'sensor.192_168_1_1_disk_free_space', + 'entity_id': 'sensor.192_168_1_1_disk_total_space_media_disk1', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '45.6910972595215', + 'state': '5543.82404708862', }) # --- -# name: test_sensor[sensor.192_168_1_1_disk_free_space_media_disk1-entry] +# name: test_sensor[sensor.192_168_1_1_disk_total_space_media_disk2-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -901,7 +742,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.192_168_1_1_disk_free_space_media_disk1', + 'entity_id': 'sensor.192_168_1_1_disk_total_space_media_disk2', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -919,32 +760,32 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Disk free space /media/disk1', + 'original_name': 'Disk total space /media/disk2', 'platform': 'webmin', 'previous_unique_id': None, 'supported_features': 0, - 'translation_key': 'disk_fs_free', - 'unique_id': '12:34:56:78:9a:bc_/media/disk1_free', + 'translation_key': 'disk_fs_total', + 'unique_id': '12:34:56:78:9a:bc_/media/disk2_total', 'unit_of_measurement': , }) # --- -# name: test_sensor[sensor.192_168_1_1_disk_free_space_media_disk1-state] +# name: test_sensor[sensor.192_168_1_1_disk_total_space_media_disk2-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'data_size', - 'friendly_name': '192.168.1.1 Disk free space /media/disk1', + 'friendly_name': '192.168.1.1 Disk total space /media/disk2', 'state_class': , 'unit_of_measurement': , }), 'context': , - 'entity_id': 'sensor.192_168_1_1_disk_free_space_media_disk1', + 'entity_id': 'sensor.192_168_1_1_disk_total_space_media_disk2', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '625.379589080811', + 'state': '11086.3139038086', }) # --- -# name: test_sensor[sensor.192_168_1_1_disk_free_space_media_disk2-entry] +# name: test_sensor[sensor.192_168_1_1_disk_usage-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -958,7 +799,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.192_168_1_1_disk_free_space_media_disk2', + 'entity_id': 'sensor.192_168_1_1_disk_usage', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -967,41 +808,34 @@ }), 'name': None, 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), }), - 'original_device_class': , + 'original_device_class': None, 'original_icon': None, - 'original_name': 'Disk free space /media/disk2', + 'original_name': 'Disk usage /', 'platform': 'webmin', 'previous_unique_id': None, 'supported_features': 0, - 'translation_key': 'disk_fs_free', - 'unique_id': '12:34:56:78:9a:bc_/media/disk2_free', - 'unit_of_measurement': , + 'translation_key': 'disk_fs_used_percent', + 'unique_id': '12:34:56:78:9a:bc_/_used_percent', + 'unit_of_measurement': '%', }) # --- -# name: test_sensor[sensor.192_168_1_1_disk_free_space_media_disk2-state] +# name: test_sensor[sensor.192_168_1_1_disk_usage-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'device_class': 'data_size', - 'friendly_name': '192.168.1.1 Disk free space /media/disk2', + 'friendly_name': '192.168.1.1 Disk usage /', 'state_class': , - 'unit_of_measurement': , + 'unit_of_measurement': '%', }), 'context': , - 'entity_id': 'sensor.192_168_1_1_disk_free_space_media_disk2', + 'entity_id': 'sensor.192_168_1_1_disk_usage', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '6546.04735183716', + 'state': '80', }) # --- -# name: test_sensor[sensor.192_168_1_1_disk_inode_usage-entry] +# name: test_sensor[sensor.192_168_1_1_disk_usage_media_disk1-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -1015,7 +849,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.192_168_1_1_disk_inode_usage', + 'entity_id': 'sensor.192_168_1_1_disk_usage_media_disk1', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -1027,31 +861,31 @@ }), 'original_device_class': None, 'original_icon': None, - 'original_name': 'Disk inode usage /', + 'original_name': 'Disk usage /media/disk1', 'platform': 'webmin', 'previous_unique_id': None, 'supported_features': 0, - 'translation_key': 'disk_fs_iused_percent', - 'unique_id': '12:34:56:78:9a:bc_/_iused_percent', + 'translation_key': 'disk_fs_used_percent', + 'unique_id': '12:34:56:78:9a:bc_/media/disk1_used_percent', 'unit_of_measurement': '%', }) # --- -# name: test_sensor[sensor.192_168_1_1_disk_inode_usage-state] +# name: test_sensor[sensor.192_168_1_1_disk_usage_media_disk1-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': '192.168.1.1 Disk inode usage /', + 'friendly_name': '192.168.1.1 Disk usage /media/disk1', 'state_class': , 'unit_of_measurement': '%', }), 'context': , - 'entity_id': 'sensor.192_168_1_1_disk_inode_usage', + 'entity_id': 'sensor.192_168_1_1_disk_usage_media_disk1', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '4', + 'state': '89', }) # --- -# name: test_sensor[sensor.192_168_1_1_disk_inode_usage_media_disk1-entry] +# name: test_sensor[sensor.192_168_1_1_disk_usage_media_disk2-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -1065,7 +899,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.192_168_1_1_disk_inode_usage_media_disk1', + 'entity_id': 'sensor.192_168_1_1_disk_usage_media_disk2', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -1077,31 +911,31 @@ }), 'original_device_class': None, 'original_icon': None, - 'original_name': 'Disk inode usage /media/disk1', + 'original_name': 'Disk usage /media/disk2', 'platform': 'webmin', 'previous_unique_id': None, 'supported_features': 0, - 'translation_key': 'disk_fs_iused_percent', - 'unique_id': '12:34:56:78:9a:bc_/media/disk1_iused_percent', + 'translation_key': 'disk_fs_used_percent', + 'unique_id': '12:34:56:78:9a:bc_/media/disk2_used_percent', 'unit_of_measurement': '%', }) # --- -# name: test_sensor[sensor.192_168_1_1_disk_inode_usage_media_disk1-state] +# name: test_sensor[sensor.192_168_1_1_disk_usage_media_disk2-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': '192.168.1.1 Disk inode usage /media/disk1', + 'friendly_name': '192.168.1.1 Disk usage /media/disk2', 'state_class': , 'unit_of_measurement': '%', }), 'context': , - 'entity_id': 'sensor.192_168_1_1_disk_inode_usage_media_disk1', + 'entity_id': 'sensor.192_168_1_1_disk_usage_media_disk2', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '1', + 'state': '38', }) # --- -# name: test_sensor[sensor.192_168_1_1_disk_inode_usage_media_disk2-entry] +# name: test_sensor[sensor.192_168_1_1_disk_used_inodes-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -1115,7 +949,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.192_168_1_1_disk_inode_usage_media_disk2', + 'entity_id': 'sensor.192_168_1_1_disk_used_inodes', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -1127,1348 +961,30 @@ }), 'original_device_class': None, 'original_icon': None, - 'original_name': 'Disk inode usage /media/disk2', + 'original_name': 'Disk used inodes /', 'platform': 'webmin', 'previous_unique_id': None, 'supported_features': 0, - 'translation_key': 'disk_fs_iused_percent', - 'unique_id': '12:34:56:78:9a:bc_/media/disk2_iused_percent', - 'unit_of_measurement': '%', + 'translation_key': 'disk_fs_iused', + 'unique_id': '12:34:56:78:9a:bc_/_iused', + 'unit_of_measurement': None, }) # --- -# name: test_sensor[sensor.192_168_1_1_disk_inode_usage_media_disk2-state] +# name: test_sensor[sensor.192_168_1_1_disk_used_inodes-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': '192.168.1.1 Disk inode usage /media/disk2', + 'friendly_name': '192.168.1.1 Disk used inodes /', 'state_class': , - 'unit_of_measurement': '%', }), 'context': , - 'entity_id': 'sensor.192_168_1_1_disk_inode_usage_media_disk2', + 'entity_id': 'sensor.192_168_1_1_disk_used_inodes', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '1', + 'state': '555674', }) # --- -# name: test_sensor[sensor.192_168_1_1_disk_total_inodes-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.192_168_1_1_disk_total_inodes', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Disk total inodes /', - 'platform': 'webmin', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'disk_fs_itotal', - 'unique_id': '12:34:56:78:9a:bc_/_itotal', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[sensor.192_168_1_1_disk_total_inodes-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': '192.168.1.1 Disk total inodes /', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.192_168_1_1_disk_total_inodes', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '15482880', - }) -# --- -# name: test_sensor[sensor.192_168_1_1_disk_total_inodes_media_disk1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.192_168_1_1_disk_total_inodes_media_disk1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Disk total inodes /media/disk1', - 'platform': 'webmin', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'disk_fs_itotal', - 'unique_id': '12:34:56:78:9a:bc_/media/disk1_itotal', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[sensor.192_168_1_1_disk_total_inodes_media_disk1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': '192.168.1.1 Disk total inodes /media/disk1', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.192_168_1_1_disk_total_inodes_media_disk1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '183140352', - }) -# --- -# name: test_sensor[sensor.192_168_1_1_disk_total_inodes_media_disk2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.192_168_1_1_disk_total_inodes_media_disk2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Disk total inodes /media/disk2', - 'platform': 'webmin', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'disk_fs_itotal', - 'unique_id': '12:34:56:78:9a:bc_/media/disk2_itotal', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[sensor.192_168_1_1_disk_total_inodes_media_disk2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': '192.168.1.1 Disk total inodes /media/disk2', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.192_168_1_1_disk_total_inodes_media_disk2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '366198784', - }) -# --- -# name: test_sensor[sensor.192_168_1_1_disk_total_space-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.192_168_1_1_disk_total_space', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Disk total space /', - 'platform': 'webmin', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'disk_fs_total', - 'unique_id': '12:34:56:78:9a:bc_/_total', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[sensor.192_168_1_1_disk_total_space-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'data_size', - 'friendly_name': '192.168.1.1 Disk total space /', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.192_168_1_1_disk_total_space', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '231.369548797607', - }) -# --- -# name: test_sensor[sensor.192_168_1_1_disk_total_space_media_disk1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.192_168_1_1_disk_total_space_media_disk1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Disk total space /media/disk1', - 'platform': 'webmin', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'disk_fs_total', - 'unique_id': '12:34:56:78:9a:bc_/media/disk1_total', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[sensor.192_168_1_1_disk_total_space_media_disk1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'data_size', - 'friendly_name': '192.168.1.1 Disk total space /media/disk1', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.192_168_1_1_disk_total_space_media_disk1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '5543.82404708862', - }) -# --- -# name: test_sensor[sensor.192_168_1_1_disk_total_space_media_disk2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.192_168_1_1_disk_total_space_media_disk2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Disk total space /media/disk2', - 'platform': 'webmin', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'disk_fs_total', - 'unique_id': '12:34:56:78:9a:bc_/media/disk2_total', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[sensor.192_168_1_1_disk_total_space_media_disk2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'data_size', - 'friendly_name': '192.168.1.1 Disk total space /media/disk2', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.192_168_1_1_disk_total_space_media_disk2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '11086.3139038086', - }) -# --- -# name: test_sensor[sensor.192_168_1_1_disk_usage-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.192_168_1_1_disk_usage', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Disk usage /', - 'platform': 'webmin', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'disk_fs_used_percent', - 'unique_id': '12:34:56:78:9a:bc_/_used_percent', - 'unit_of_measurement': '%', - }) -# --- -# name: test_sensor[sensor.192_168_1_1_disk_usage-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': '192.168.1.1 Disk usage /', - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.192_168_1_1_disk_usage', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '80', - }) -# --- -# name: test_sensor[sensor.192_168_1_1_disk_usage_media_disk1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.192_168_1_1_disk_usage_media_disk1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Disk usage /media/disk1', - 'platform': 'webmin', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'disk_fs_used_percent', - 'unique_id': '12:34:56:78:9a:bc_/media/disk1_used_percent', - 'unit_of_measurement': '%', - }) -# --- -# name: test_sensor[sensor.192_168_1_1_disk_usage_media_disk1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': '192.168.1.1 Disk usage /media/disk1', - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.192_168_1_1_disk_usage_media_disk1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '89', - }) -# --- -# name: test_sensor[sensor.192_168_1_1_disk_usage_media_disk2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.192_168_1_1_disk_usage_media_disk2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Disk usage /media/disk2', - 'platform': 'webmin', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'disk_fs_used_percent', - 'unique_id': '12:34:56:78:9a:bc_/media/disk2_used_percent', - 'unit_of_measurement': '%', - }) -# --- -# name: test_sensor[sensor.192_168_1_1_disk_usage_media_disk2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': '192.168.1.1 Disk usage /media/disk2', - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.192_168_1_1_disk_usage_media_disk2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '38', - }) -# --- -# name: test_sensor[sensor.192_168_1_1_disk_used_inodes-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.192_168_1_1_disk_used_inodes', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Disk used inodes /', - 'platform': 'webmin', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'disk_fs_iused', - 'unique_id': '12:34:56:78:9a:bc_/_iused', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[sensor.192_168_1_1_disk_used_inodes-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': '192.168.1.1 Disk used inodes /', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.192_168_1_1_disk_used_inodes', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '555674', - }) -# --- -# name: test_sensor[sensor.192_168_1_1_disk_used_inodes_media_disk1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.192_168_1_1_disk_used_inodes_media_disk1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Disk used inodes /media/disk1', - 'platform': 'webmin', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'disk_fs_iused', - 'unique_id': '12:34:56:78:9a:bc_/media/disk1_iused', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[sensor.192_168_1_1_disk_used_inodes_media_disk1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': '192.168.1.1 Disk used inodes /media/disk1', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.192_168_1_1_disk_used_inodes_media_disk1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '9595', - }) -# --- -# name: test_sensor[sensor.192_168_1_1_disk_used_inodes_media_disk2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.192_168_1_1_disk_used_inodes_media_disk2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Disk used inodes /media/disk2', - 'platform': 'webmin', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'disk_fs_iused', - 'unique_id': '12:34:56:78:9a:bc_/media/disk2_iused', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[sensor.192_168_1_1_disk_used_inodes_media_disk2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': '192.168.1.1 Disk used inodes /media/disk2', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.192_168_1_1_disk_used_inodes_media_disk2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '3542318', - }) -# --- -# name: test_sensor[sensor.192_168_1_1_disk_used_space-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.192_168_1_1_disk_used_space', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Disk used space /', - 'platform': 'webmin', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'disk_fs_used', - 'unique_id': '12:34:56:78:9a:bc_/_used', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[sensor.192_168_1_1_disk_used_space-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'data_size', - 'friendly_name': '192.168.1.1 Disk used space /', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.192_168_1_1_disk_used_space', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '173.85604095459', - }) -# --- -# name: test_sensor[sensor.192_168_1_1_disk_used_space_media_disk1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.192_168_1_1_disk_used_space_media_disk1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Disk used space /media/disk1', - 'platform': 'webmin', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'disk_fs_used', - 'unique_id': '12:34:56:78:9a:bc_/media/disk1_used', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[sensor.192_168_1_1_disk_used_space_media_disk1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'data_size', - 'friendly_name': '192.168.1.1 Disk used space /media/disk1', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.192_168_1_1_disk_used_space_media_disk1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '4638.98014068604', - }) -# --- -# name: test_sensor[sensor.192_168_1_1_disk_used_space_media_disk2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.192_168_1_1_disk_used_space_media_disk2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Disk used space /media/disk2', - 'platform': 'webmin', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'disk_fs_used', - 'unique_id': '12:34:56:78:9a:bc_/media/disk2_used', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[sensor.192_168_1_1_disk_used_space_media_disk2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'data_size', - 'friendly_name': '192.168.1.1 Disk used space /media/disk2', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.192_168_1_1_disk_used_space_media_disk2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '3981.47631835938', - }) -# --- -# name: test_sensor[sensor.192_168_1_1_disks_free_space-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.192_168_1_1_disks_free_space', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Disks free space', - 'platform': 'webmin', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'disk_free', - 'unique_id': '12:34:56:78:9a:bc_disk_free', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[sensor.192_168_1_1_disks_free_space-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'data_size', - 'friendly_name': '192.168.1.1 Disks free space', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.192_168_1_1_disks_free_space', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '7217.11803817749', - }) -# --- -# name: test_sensor[sensor.192_168_1_1_disks_total_space-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.192_168_1_1_disks_total_space', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Disks total space', - 'platform': 'webmin', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'disk_total', - 'unique_id': '12:34:56:78:9a:bc_disk_total', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[sensor.192_168_1_1_disks_total_space-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'data_size', - 'friendly_name': '192.168.1.1 Disks total space', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.192_168_1_1_disks_total_space', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '16861.5074996948', - }) -# --- -# name: test_sensor[sensor.192_168_1_1_disks_used_space-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.192_168_1_1_disks_used_space', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Disks used space', - 'platform': 'webmin', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'disk_used', - 'unique_id': '12:34:56:78:9a:bc_disk_used', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[sensor.192_168_1_1_disks_used_space-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'data_size', - 'friendly_name': '192.168.1.1 Disks used space', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.192_168_1_1_disks_used_space', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '8794.3125', - }) -# --- -# name: test_sensor[sensor.192_168_1_1_load_15m-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.192_168_1_1_load_15m', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Load (15m)', - 'platform': 'webmin', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'load_15m', - 'unique_id': '12:34:56:78:9a:bc_load_15m', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[sensor.192_168_1_1_load_15m-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': '192.168.1.1 Load (15m)', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.192_168_1_1_load_15m', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1.37', - }) -# --- -# name: test_sensor[sensor.192_168_1_1_load_1m-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.192_168_1_1_load_1m', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Load (1m)', - 'platform': 'webmin', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'load_1m', - 'unique_id': '12:34:56:78:9a:bc_load_1m', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[sensor.192_168_1_1_load_1m-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': '192.168.1.1 Load (1m)', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.192_168_1_1_load_1m', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1.29', - }) -# --- -# name: test_sensor[sensor.192_168_1_1_load_5m-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.192_168_1_1_load_5m', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Load (5m)', - 'platform': 'webmin', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'load_5m', - 'unique_id': '12:34:56:78:9a:bc_load_5m', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[sensor.192_168_1_1_load_5m-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': '192.168.1.1 Load (5m)', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.192_168_1_1_load_5m', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1.36', - }) -# --- -# name: test_sensor[sensor.192_168_1_1_memory_free-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.192_168_1_1_memory_free', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Memory free', - 'platform': 'webmin', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'mem_free', - 'unique_id': '12:34:56:78:9a:bc_mem_free', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[sensor.192_168_1_1_memory_free-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'data_size', - 'friendly_name': '192.168.1.1 Memory free', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.192_168_1_1_memory_free', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '27.2087860107422', - }) -# --- -# name: test_sensor[sensor.192_168_1_1_memory_total-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.192_168_1_1_memory_total', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Memory total', - 'platform': 'webmin', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'mem_total', - 'unique_id': '12:34:56:78:9a:bc_mem_total', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[sensor.192_168_1_1_memory_total-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'data_size', - 'friendly_name': '192.168.1.1 Memory total', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.192_168_1_1_memory_total', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '31.248420715332', - }) -# --- -# name: test_sensor[sensor.192_168_1_1_none-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.192_168_1_1_none', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': None, - 'platform': 'webmin', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'disk_fs_itotal', - 'unique_id': '12:34:56:78:9a:bc_/_itotal', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[sensor.192_168_1_1_none-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': '192.168.1.1 None', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.192_168_1_1_none', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '15482880', - }) -# --- -# name: test_sensor[sensor.192_168_1_1_none_10-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.192_168_1_1_none_10', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': None, - 'platform': 'webmin', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'disk_fs_iused_percent', - 'unique_id': '12:34:56:78:9a:bc_/media/disk2_iused_percent', - 'unit_of_measurement': '%', - }) -# --- -# name: test_sensor[sensor.192_168_1_1_none_10-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': '192.168.1.1 None', - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.192_168_1_1_none_10', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1', - }) -# --- -# name: test_sensor[sensor.192_168_1_1_none_11-entry] +# name: test_sensor[sensor.192_168_1_1_disk_used_inodes_media_disk1-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -2482,7 +998,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.192_168_1_1_none_11', + 'entity_id': 'sensor.192_168_1_1_disk_used_inodes_media_disk1', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -2494,30 +1010,30 @@ }), 'original_device_class': None, 'original_icon': None, - 'original_name': None, + 'original_name': 'Disk used inodes /media/disk1', 'platform': 'webmin', 'previous_unique_id': None, 'supported_features': 0, - 'translation_key': 'disk_fs_itotal', - 'unique_id': '12:34:56:78:9a:bc_/media/disk1_itotal', + 'translation_key': 'disk_fs_iused', + 'unique_id': '12:34:56:78:9a:bc_/media/disk1_iused', 'unit_of_measurement': None, }) # --- -# name: test_sensor[sensor.192_168_1_1_none_11-state] +# name: test_sensor[sensor.192_168_1_1_disk_used_inodes_media_disk1-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': '192.168.1.1 None', + 'friendly_name': '192.168.1.1 Disk used inodes /media/disk1', 'state_class': , }), 'context': , - 'entity_id': 'sensor.192_168_1_1_none_11', + 'entity_id': 'sensor.192_168_1_1_disk_used_inodes_media_disk1', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '183140352', + 'state': '9595', }) # --- -# name: test_sensor[sensor.192_168_1_1_none_12-entry] +# name: test_sensor[sensor.192_168_1_1_disk_used_inodes_media_disk2-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -2531,7 +1047,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.192_168_1_1_none_12', + 'entity_id': 'sensor.192_168_1_1_disk_used_inodes_media_disk2', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -2543,30 +1059,30 @@ }), 'original_device_class': None, 'original_icon': None, - 'original_name': None, + 'original_name': 'Disk used inodes /media/disk2', 'platform': 'webmin', 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'disk_fs_iused', - 'unique_id': '12:34:56:78:9a:bc_/media/disk1_iused', + 'unique_id': '12:34:56:78:9a:bc_/media/disk2_iused', 'unit_of_measurement': None, }) # --- -# name: test_sensor[sensor.192_168_1_1_none_12-state] +# name: test_sensor[sensor.192_168_1_1_disk_used_inodes_media_disk2-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': '192.168.1.1 None', + 'friendly_name': '192.168.1.1 Disk used inodes /media/disk2', 'state_class': , }), 'context': , - 'entity_id': 'sensor.192_168_1_1_none_12', + 'entity_id': 'sensor.192_168_1_1_disk_used_inodes_media_disk2', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '9595', + 'state': '3542318', }) # --- -# name: test_sensor[sensor.192_168_1_1_none_13-entry] +# name: test_sensor[sensor.192_168_1_1_disk_used_space-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -2580,7 +1096,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.192_168_1_1_none_13', + 'entity_id': 'sensor.192_168_1_1_disk_used_space', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -2589,33 +1105,41 @@ }), 'name': None, 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), }), - 'original_device_class': None, + 'original_device_class': , 'original_icon': None, - 'original_name': None, + 'original_name': 'Disk used space /', 'platform': 'webmin', 'previous_unique_id': None, 'supported_features': 0, - 'translation_key': 'disk_fs_ifree', - 'unique_id': '12:34:56:78:9a:bc_/media/disk1_ifree', - 'unit_of_measurement': None, + 'translation_key': 'disk_fs_used', + 'unique_id': '12:34:56:78:9a:bc_/_used', + 'unit_of_measurement': , }) # --- -# name: test_sensor[sensor.192_168_1_1_none_13-state] +# name: test_sensor[sensor.192_168_1_1_disk_used_space-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': '192.168.1.1 None', + 'device_class': 'data_size', + 'friendly_name': '192.168.1.1 Disk used space /', 'state_class': , + 'unit_of_measurement': , }), 'context': , - 'entity_id': 'sensor.192_168_1_1_none_13', + 'entity_id': 'sensor.192_168_1_1_disk_used_space', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '183130757', + 'state': '173.85604095459', }) # --- -# name: test_sensor[sensor.192_168_1_1_none_14-entry] +# name: test_sensor[sensor.192_168_1_1_disk_used_space_media_disk1-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -2629,7 +1153,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.192_168_1_1_none_14', + 'entity_id': 'sensor.192_168_1_1_disk_used_space_media_disk1', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -2638,34 +1162,41 @@ }), 'name': None, 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), }), - 'original_device_class': None, + 'original_device_class': , 'original_icon': None, - 'original_name': None, + 'original_name': 'Disk used space /media/disk1', 'platform': 'webmin', 'previous_unique_id': None, 'supported_features': 0, - 'translation_key': 'disk_fs_used_percent', - 'unique_id': '12:34:56:78:9a:bc_/media/disk1_used_percent', - 'unit_of_measurement': '%', + 'translation_key': 'disk_fs_used', + 'unique_id': '12:34:56:78:9a:bc_/media/disk1_used', + 'unit_of_measurement': , }) # --- -# name: test_sensor[sensor.192_168_1_1_none_14-state] +# name: test_sensor[sensor.192_168_1_1_disk_used_space_media_disk1-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': '192.168.1.1 None', + 'device_class': 'data_size', + 'friendly_name': '192.168.1.1 Disk used space /media/disk1', 'state_class': , - 'unit_of_measurement': '%', + 'unit_of_measurement': , }), 'context': , - 'entity_id': 'sensor.192_168_1_1_none_14', + 'entity_id': 'sensor.192_168_1_1_disk_used_space_media_disk1', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '89', + 'state': '4638.98014068604', }) # --- -# name: test_sensor[sensor.192_168_1_1_none_15-entry] +# name: test_sensor[sensor.192_168_1_1_disk_used_space_media_disk2-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -2679,7 +1210,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.192_168_1_1_none_15', + 'entity_id': 'sensor.192_168_1_1_disk_used_space_media_disk2', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -2688,34 +1219,41 @@ }), 'name': None, 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), }), - 'original_device_class': None, + 'original_device_class': , 'original_icon': None, - 'original_name': None, + 'original_name': 'Disk used space /media/disk2', 'platform': 'webmin', 'previous_unique_id': None, 'supported_features': 0, - 'translation_key': 'disk_fs_iused_percent', - 'unique_id': '12:34:56:78:9a:bc_/media/disk1_iused_percent', - 'unit_of_measurement': '%', + 'translation_key': 'disk_fs_used', + 'unique_id': '12:34:56:78:9a:bc_/media/disk2_used', + 'unit_of_measurement': , }) # --- -# name: test_sensor[sensor.192_168_1_1_none_15-state] +# name: test_sensor[sensor.192_168_1_1_disk_used_space_media_disk2-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': '192.168.1.1 None', + 'device_class': 'data_size', + 'friendly_name': '192.168.1.1 Disk used space /media/disk2', 'state_class': , - 'unit_of_measurement': '%', + 'unit_of_measurement': , }), 'context': , - 'entity_id': 'sensor.192_168_1_1_none_15', + 'entity_id': 'sensor.192_168_1_1_disk_used_space_media_disk2', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '1', + 'state': '3981.47631835938', }) # --- -# name: test_sensor[sensor.192_168_1_1_none_2-entry] +# name: test_sensor[sensor.192_168_1_1_disks_free_space-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -2729,7 +1267,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.192_168_1_1_none_2', + 'entity_id': 'sensor.192_168_1_1_disks_free_space', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -2738,33 +1276,41 @@ }), 'name': None, 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), }), - 'original_device_class': None, + 'original_device_class': , 'original_icon': None, - 'original_name': None, + 'original_name': 'Disks free space', 'platform': 'webmin', 'previous_unique_id': None, 'supported_features': 0, - 'translation_key': 'disk_fs_iused', - 'unique_id': '12:34:56:78:9a:bc_/_iused', - 'unit_of_measurement': None, + 'translation_key': 'disk_free', + 'unique_id': '12:34:56:78:9a:bc_disk_free', + 'unit_of_measurement': , }) # --- -# name: test_sensor[sensor.192_168_1_1_none_2-state] +# name: test_sensor[sensor.192_168_1_1_disks_free_space-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': '192.168.1.1 None', + 'device_class': 'data_size', + 'friendly_name': '192.168.1.1 Disks free space', 'state_class': , + 'unit_of_measurement': , }), 'context': , - 'entity_id': 'sensor.192_168_1_1_none_2', + 'entity_id': 'sensor.192_168_1_1_disks_free_space', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '555674', + 'state': '7217.11803817749', }) # --- -# name: test_sensor[sensor.192_168_1_1_none_3-entry] +# name: test_sensor[sensor.192_168_1_1_disks_total_space-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -2778,7 +1324,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.192_168_1_1_none_3', + 'entity_id': 'sensor.192_168_1_1_disks_total_space', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -2787,33 +1333,41 @@ }), 'name': None, 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), }), - 'original_device_class': None, + 'original_device_class': , 'original_icon': None, - 'original_name': None, + 'original_name': 'Disks total space', 'platform': 'webmin', 'previous_unique_id': None, 'supported_features': 0, - 'translation_key': 'disk_fs_ifree', - 'unique_id': '12:34:56:78:9a:bc_/_ifree', - 'unit_of_measurement': None, + 'translation_key': 'disk_total', + 'unique_id': '12:34:56:78:9a:bc_disk_total', + 'unit_of_measurement': , }) # --- -# name: test_sensor[sensor.192_168_1_1_none_3-state] +# name: test_sensor[sensor.192_168_1_1_disks_total_space-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': '192.168.1.1 None', + 'device_class': 'data_size', + 'friendly_name': '192.168.1.1 Disks total space', 'state_class': , + 'unit_of_measurement': , }), 'context': , - 'entity_id': 'sensor.192_168_1_1_none_3', + 'entity_id': 'sensor.192_168_1_1_disks_total_space', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '14927206', + 'state': '16861.5074996948', }) # --- -# name: test_sensor[sensor.192_168_1_1_none_4-entry] +# name: test_sensor[sensor.192_168_1_1_disks_used_space-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -2827,7 +1381,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.192_168_1_1_none_4', + 'entity_id': 'sensor.192_168_1_1_disks_used_space', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -2836,34 +1390,41 @@ }), 'name': None, 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), }), - 'original_device_class': None, + 'original_device_class': , 'original_icon': None, - 'original_name': None, + 'original_name': 'Disks used space', 'platform': 'webmin', 'previous_unique_id': None, 'supported_features': 0, - 'translation_key': 'disk_fs_used_percent', - 'unique_id': '12:34:56:78:9a:bc_/_used_percent', - 'unit_of_measurement': '%', + 'translation_key': 'disk_used', + 'unique_id': '12:34:56:78:9a:bc_disk_used', + 'unit_of_measurement': , }) # --- -# name: test_sensor[sensor.192_168_1_1_none_4-state] +# name: test_sensor[sensor.192_168_1_1_disks_used_space-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': '192.168.1.1 None', + 'device_class': 'data_size', + 'friendly_name': '192.168.1.1 Disks used space', 'state_class': , - 'unit_of_measurement': '%', + 'unit_of_measurement': , }), 'context': , - 'entity_id': 'sensor.192_168_1_1_none_4', + 'entity_id': 'sensor.192_168_1_1_disks_used_space', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '80', + 'state': '8794.3125', }) # --- -# name: test_sensor[sensor.192_168_1_1_none_5-entry] +# name: test_sensor[sensor.192_168_1_1_load_15m-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -2877,7 +1438,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.192_168_1_1_none_5', + 'entity_id': 'sensor.192_168_1_1_load_15m', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -2889,31 +1450,30 @@ }), 'original_device_class': None, 'original_icon': None, - 'original_name': None, + 'original_name': 'Load (15m)', 'platform': 'webmin', 'previous_unique_id': None, 'supported_features': 0, - 'translation_key': 'disk_fs_iused_percent', - 'unique_id': '12:34:56:78:9a:bc_/_iused_percent', - 'unit_of_measurement': '%', + 'translation_key': 'load_15m', + 'unique_id': '12:34:56:78:9a:bc_load_15m', + 'unit_of_measurement': None, }) # --- -# name: test_sensor[sensor.192_168_1_1_none_5-state] +# name: test_sensor[sensor.192_168_1_1_load_15m-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': '192.168.1.1 None', + 'friendly_name': '192.168.1.1 Load (15m)', 'state_class': , - 'unit_of_measurement': '%', }), 'context': , - 'entity_id': 'sensor.192_168_1_1_none_5', + 'entity_id': 'sensor.192_168_1_1_load_15m', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '4', + 'state': '1.37', }) # --- -# name: test_sensor[sensor.192_168_1_1_none_6-entry] +# name: test_sensor[sensor.192_168_1_1_load_1m-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -2927,7 +1487,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.192_168_1_1_none_6', + 'entity_id': 'sensor.192_168_1_1_load_1m', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -2939,30 +1499,30 @@ }), 'original_device_class': None, 'original_icon': None, - 'original_name': None, + 'original_name': 'Load (1m)', 'platform': 'webmin', 'previous_unique_id': None, 'supported_features': 0, - 'translation_key': 'disk_fs_itotal', - 'unique_id': '12:34:56:78:9a:bc_/media/disk2_itotal', + 'translation_key': 'load_1m', + 'unique_id': '12:34:56:78:9a:bc_load_1m', 'unit_of_measurement': None, }) # --- -# name: test_sensor[sensor.192_168_1_1_none_6-state] +# name: test_sensor[sensor.192_168_1_1_load_1m-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': '192.168.1.1 None', + 'friendly_name': '192.168.1.1 Load (1m)', 'state_class': , }), 'context': , - 'entity_id': 'sensor.192_168_1_1_none_6', + 'entity_id': 'sensor.192_168_1_1_load_1m', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '366198784', + 'state': '1.29', }) # --- -# name: test_sensor[sensor.192_168_1_1_none_7-entry] +# name: test_sensor[sensor.192_168_1_1_load_5m-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -2976,7 +1536,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.192_168_1_1_none_7', + 'entity_id': 'sensor.192_168_1_1_load_5m', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -2988,30 +1548,30 @@ }), 'original_device_class': None, 'original_icon': None, - 'original_name': None, + 'original_name': 'Load (5m)', 'platform': 'webmin', 'previous_unique_id': None, 'supported_features': 0, - 'translation_key': 'disk_fs_iused', - 'unique_id': '12:34:56:78:9a:bc_/media/disk2_iused', + 'translation_key': 'load_5m', + 'unique_id': '12:34:56:78:9a:bc_load_5m', 'unit_of_measurement': None, }) # --- -# name: test_sensor[sensor.192_168_1_1_none_7-state] +# name: test_sensor[sensor.192_168_1_1_load_5m-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': '192.168.1.1 None', + 'friendly_name': '192.168.1.1 Load (5m)', 'state_class': , }), 'context': , - 'entity_id': 'sensor.192_168_1_1_none_7', + 'entity_id': 'sensor.192_168_1_1_load_5m', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '3542318', + 'state': '1.36', }) # --- -# name: test_sensor[sensor.192_168_1_1_none_8-entry] +# name: test_sensor[sensor.192_168_1_1_memory_free-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -3025,7 +1585,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.192_168_1_1_none_8', + 'entity_id': 'sensor.192_168_1_1_memory_free', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -3034,33 +1594,41 @@ }), 'name': None, 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), }), - 'original_device_class': None, + 'original_device_class': , 'original_icon': None, - 'original_name': None, + 'original_name': 'Memory free', 'platform': 'webmin', 'previous_unique_id': None, 'supported_features': 0, - 'translation_key': 'disk_fs_ifree', - 'unique_id': '12:34:56:78:9a:bc_/media/disk2_ifree', - 'unit_of_measurement': None, + 'translation_key': 'mem_free', + 'unique_id': '12:34:56:78:9a:bc_mem_free', + 'unit_of_measurement': , }) # --- -# name: test_sensor[sensor.192_168_1_1_none_8-state] +# name: test_sensor[sensor.192_168_1_1_memory_free-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': '192.168.1.1 None', + 'device_class': 'data_size', + 'friendly_name': '192.168.1.1 Memory free', 'state_class': , + 'unit_of_measurement': , }), 'context': , - 'entity_id': 'sensor.192_168_1_1_none_8', + 'entity_id': 'sensor.192_168_1_1_memory_free', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '362656466', + 'state': '27.2087860107422', }) # --- -# name: test_sensor[sensor.192_168_1_1_none_9-entry] +# name: test_sensor[sensor.192_168_1_1_memory_total-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -3074,7 +1642,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.192_168_1_1_none_9', + 'entity_id': 'sensor.192_168_1_1_memory_total', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -3083,31 +1651,38 @@ }), 'name': None, 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), }), - 'original_device_class': None, + 'original_device_class': , 'original_icon': None, - 'original_name': None, + 'original_name': 'Memory total', 'platform': 'webmin', 'previous_unique_id': None, 'supported_features': 0, - 'translation_key': 'disk_fs_used_percent', - 'unique_id': '12:34:56:78:9a:bc_/media/disk2_used_percent', - 'unit_of_measurement': '%', + 'translation_key': 'mem_total', + 'unique_id': '12:34:56:78:9a:bc_mem_total', + 'unit_of_measurement': , }) # --- -# name: test_sensor[sensor.192_168_1_1_none_9-state] +# name: test_sensor[sensor.192_168_1_1_memory_total-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': '192.168.1.1 None', + 'device_class': 'data_size', + 'friendly_name': '192.168.1.1 Memory total', 'state_class': , - 'unit_of_measurement': '%', + 'unit_of_measurement': , }), 'context': , - 'entity_id': 'sensor.192_168_1_1_none_9', + 'entity_id': 'sensor.192_168_1_1_memory_total', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '38', + 'state': '31.248420715332', }) # --- # name: test_sensor[sensor.192_168_1_1_swap_free-entry] diff --git a/tests/components/webmin/test_config_flow.py b/tests/components/webmin/test_config_flow.py index 477ad230622d85..03da33405973a9 100644 --- a/tests/components/webmin/test_config_flow.py +++ b/tests/components/webmin/test_config_flow.py @@ -74,7 +74,7 @@ async def test_form_user( (Exception, "unknown"), ( Fault("5", "Webmin module net does not exist"), - "Fault 5: Webmin module net does not exist", + "unknown", ), ], ) diff --git a/tests/components/webmin/test_sensor.py b/tests/components/webmin/test_sensor.py index 5fb874825a3e35..dd68e2f9f8c37c 100644 --- a/tests/components/webmin/test_sensor.py +++ b/tests/components/webmin/test_sensor.py @@ -8,6 +8,8 @@ from .conftest import async_init_integration +from tests.common import snapshot_platform + @pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_sensor( @@ -19,11 +21,4 @@ async def test_sensor( entry = await async_init_integration(hass) - entity_entries = er.async_entries_for_config_entry(entity_registry, entry.entry_id) - - assert entity_entries - - for entity_entry in entity_entries: - assert entity_entry == snapshot(name=f"{entity_entry.entity_id}-entry") - assert (state := hass.states.get(entity_entry.entity_id)) - assert state == snapshot(name=f"{entity_entry.entity_id}-state") + await snapshot_platform(hass, entity_registry, snapshot, entry.entry_id) diff --git a/tests/components/websocket_api/test_auth.py b/tests/components/websocket_api/test_auth.py index 20a728cf3cd97d..d55d2f97017f4e 100644 --- a/tests/components/websocket_api/test_auth.py +++ b/tests/components/websocket_api/test_auth.py @@ -293,6 +293,6 @@ async def test_auth_sending_unknown_type_disconnects( auth_msg = await ws.receive_json() assert auth_msg["type"] == TYPE_AUTH_REQUIRED - await ws._writer._send_frame(b"1" * 130, 0x30) + await ws._writer.send_frame(b"1" * 130, 0x30) auth_msg = await ws.receive() assert auth_msg.type == WSMsgType.close diff --git a/tests/components/websocket_api/test_http.py b/tests/components/websocket_api/test_http.py index 2530d8859421eb..03e30c11ee9fb9 100644 --- a/tests/components/websocket_api/test_http.py +++ b/tests/components/websocket_api/test_http.py @@ -5,7 +5,7 @@ from typing import Any, cast from unittest.mock import patch -from aiohttp import WSMsgType, WSServerHandshakeError, web +from aiohttp import ServerDisconnectedError, WSMsgType, web import pytest from homeassistant.components.websocket_api import ( @@ -374,7 +374,7 @@ async def test_prepare_fail_timeout( "homeassistant.components.websocket_api.http.web.WebSocketResponse.prepare", side_effect=(TimeoutError, web.WebSocketResponse.prepare), ), - pytest.raises(WSServerHandshakeError), + pytest.raises(ServerDisconnectedError), ): await hass_ws_client(hass) @@ -392,7 +392,7 @@ async def test_prepare_fail_connection_reset( "homeassistant.components.websocket_api.http.web.WebSocketResponse.prepare", side_effect=(ConnectionResetError, web.WebSocketResponse.prepare), ), - pytest.raises(WSServerHandshakeError), + pytest.raises(ServerDisconnectedError), ): await hass_ws_client(hass) diff --git a/tests/components/weheat/conftest.py b/tests/components/weheat/conftest.py index 622882d6e8d4e0..6ecb64ffdf4d30 100644 --- a/tests/components/weheat/conftest.py +++ b/tests/components/weheat/conftest.py @@ -115,6 +115,9 @@ def mock_weheat_heat_pump_instance() -> MagicMock: mock_heat_pump_instance.power_output = 66 mock_heat_pump_instance.dhw_top_temperature = 77 mock_heat_pump_instance.dhw_bottom_temperature = 88 + mock_heat_pump_instance.thermostat_water_setpoint = 35 + mock_heat_pump_instance.thermostat_room_temperature = 19 + mock_heat_pump_instance.thermostat_room_temperature_setpoint = 21 mock_heat_pump_instance.cop = 4.5 mock_heat_pump_instance.heat_pump_state = HeatPump.State.HEATING mock_heat_pump_instance.energy_total = 12345 diff --git a/tests/components/weheat/snapshots/test_sensor.ambr b/tests/components/weheat/snapshots/test_sensor.ambr index fc2b6a845a88ad..3bd4a2545989c3 100644 --- a/tests/components/weheat/snapshots/test_sensor.ambr +++ b/tests/components/weheat/snapshots/test_sensor.ambr @@ -175,6 +175,60 @@ 'state': '4.5', }) # --- +# name: test_all_entities[sensor.test_model_current_room_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_model_current_room_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Current room temperature', + 'platform': 'weheat', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'thermostat_room_temperature', + 'unique_id': '0000-1111-2222-3333_thermostat_room_temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.test_model_current_room_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Test Model Current room temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.test_model_current_room_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '19', + }) +# --- # name: test_all_entities[sensor.test_model_dhw_bottom_temperature-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -496,7 +550,7 @@ 'state': '44', }) # --- -# name: test_all_entities[sensor.test_model_power_output-entry] +# name: test_all_entities[sensor.test_model_room_temperature_setpoint-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -510,7 +564,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.test_model_power_output', + 'entity_id': 'sensor.test_model_room_temperature_setpoint', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -520,34 +574,34 @@ 'name': None, 'options': dict({ 'sensor': dict({ - 'suggested_display_precision': 0, + 'suggested_display_precision': 1, }), }), - 'original_device_class': , + 'original_device_class': , 'original_icon': None, - 'original_name': 'power output', + 'original_name': 'Room temperature setpoint', 'platform': 'weheat', 'previous_unique_id': None, 'supported_features': 0, - 'translation_key': 'power_output', - 'unique_id': '0000-1111-2222-3333_power_output', - 'unit_of_measurement': , + 'translation_key': 'thermostat_room_temperature_setpoint', + 'unique_id': '0000-1111-2222-3333_thermostat_room_temperature_setpoint', + 'unit_of_measurement': , }) # --- -# name: test_all_entities[sensor.test_model_power_output-state] +# name: test_all_entities[sensor.test_model_room_temperature_setpoint-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'Test Model power output', + 'device_class': 'temperature', + 'friendly_name': 'Test Model Room temperature setpoint', 'state_class': , - 'unit_of_measurement': , + 'unit_of_measurement': , }), 'context': , - 'entity_id': 'sensor.test_model_power_output', + 'entity_id': 'sensor.test_model_room_temperature_setpoint', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '77', + 'state': '21', }) # --- # name: test_all_entities[sensor.test_model_water_inlet_temperature-entry] @@ -658,3 +712,57 @@ 'state': '22', }) # --- +# name: test_all_entities[sensor.test_model_water_target_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_model_water_target_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Water target temperature', + 'platform': 'weheat', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'thermostat_water_setpoint', + 'unique_id': '0000-1111-2222-3333_thermostat_water_setpoint', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.test_model_water_target_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Test Model Water target temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.test_model_water_target_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '35', + }) +# --- diff --git a/tests/components/weheat/test_sensor.py b/tests/components/weheat/test_sensor.py index 5bd05b5cb2b1d2..d9055addc67b6b 100644 --- a/tests/components/weheat/test_sensor.py +++ b/tests/components/weheat/test_sensor.py @@ -34,7 +34,7 @@ async def test_all_entities( await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) -@pytest.mark.parametrize(("has_dhw", "nr_of_entities"), [(False, 9), (True, 11)]) +@pytest.mark.parametrize(("has_dhw", "nr_of_entities"), [(False, 12), (True, 14)]) async def test_create_entities( hass: HomeAssistant, mock_weheat_discover: AsyncMock, diff --git a/tests/components/wilight/test_cover.py b/tests/components/wilight/test_cover.py index 5b89293032f45f..a844a61fc1a506 100644 --- a/tests/components/wilight/test_cover.py +++ b/tests/components/wilight/test_cover.py @@ -9,6 +9,7 @@ ATTR_CURRENT_POSITION, ATTR_POSITION, DOMAIN as COVER_DOMAIN, + CoverState, ) from homeassistant.const import ( ATTR_ENTITY_ID, @@ -16,10 +17,6 @@ SERVICE_OPEN_COVER, SERVICE_SET_COVER_POSITION, SERVICE_STOP_COVER, - STATE_CLOSED, - STATE_CLOSING, - STATE_OPEN, - STATE_OPENING, ) from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -70,7 +67,7 @@ async def test_loading_cover( # First segment of the strip state = hass.states.get("cover.wl000000000099_1") assert state - assert state.state == STATE_CLOSED + assert state.state == CoverState.CLOSED entry = entity_registry.async_get("cover.wl000000000099_1") assert entry @@ -94,7 +91,7 @@ async def test_open_close_cover_state( await hass.async_block_till_done() state = hass.states.get("cover.wl000000000099_1") assert state - assert state.state == STATE_OPENING + assert state.state == CoverState.OPENING # Close await hass.services.async_call( @@ -107,7 +104,7 @@ async def test_open_close_cover_state( await hass.async_block_till_done() state = hass.states.get("cover.wl000000000099_1") assert state - assert state.state == STATE_CLOSING + assert state.state == CoverState.CLOSING # Set position await hass.services.async_call( @@ -120,7 +117,7 @@ async def test_open_close_cover_state( await hass.async_block_till_done() state = hass.states.get("cover.wl000000000099_1") assert state - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN assert state.attributes.get(ATTR_CURRENT_POSITION) == 50 # Stop @@ -134,4 +131,4 @@ async def test_open_close_cover_state( await hass.async_block_till_done() state = hass.states.get("cover.wl000000000099_1") assert state - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN diff --git a/tests/components/withings/__init__.py b/tests/components/withings/__init__.py index 8469a5a462aa74..127bccbeb001dd 100644 --- a/tests/components/withings/__init__.py +++ b/tests/components/withings/__init__.py @@ -10,8 +10,8 @@ from freezegun.api import FrozenDateTimeFactory from homeassistant.components.webhook import async_generate_url -from homeassistant.config import async_process_ha_core_config from homeassistant.core import HomeAssistant +from homeassistant.core_config import async_process_ha_core_config from tests.common import ( MockConfigEntry, diff --git a/tests/components/wmspro/conftest.py b/tests/components/wmspro/conftest.py index 76c11e71316bb4..4b0e7eb4fefa17 100644 --- a/tests/components/wmspro/conftest.py +++ b/tests/components/wmspro/conftest.py @@ -82,6 +82,18 @@ def mock_hub_status_prod_awning() -> Generator[AsyncMock]: yield mock_dest_refresh +@pytest.fixture +def mock_hub_status_prod_dimmer() -> Generator[AsyncMock]: + """Override WebControlPro._getStatus.""" + with patch( + "wmspro.webcontrol.WebControlPro._getStatus", + return_value=load_json_object_fixture( + "example_status_prod_dimmer.json", DOMAIN + ), + ) as mock_dest_refresh: + yield mock_dest_refresh + + @pytest.fixture def mock_dest_refresh() -> Generator[AsyncMock]: """Override Destination.refresh.""" @@ -104,3 +116,12 @@ async def fake_call(self, **kwargs): fake_call, ) as mock_action_call: yield mock_action_call + + +@pytest.fixture +def mock_scene_call() -> Generator[AsyncMock]: + """Override Scene.__call__.""" + with patch( + "wmspro.scene.Scene.__call__", + ) as mock_scene_call: + yield mock_scene_call diff --git a/tests/components/wmspro/fixtures/example_status_prod_dimmer.json b/tests/components/wmspro/fixtures/example_status_prod_dimmer.json new file mode 100644 index 00000000000000..675549f2457f76 --- /dev/null +++ b/tests/components/wmspro/fixtures/example_status_prod_dimmer.json @@ -0,0 +1,28 @@ +{ + "command": "getStatus", + "protocolVersion": "1.0.0", + "details": [ + { + "destinationId": 97358, + "data": { + "drivingCause": 0, + "heartbeatError": false, + "blocking": false, + "productData": [ + { + "actionId": 0, + "value": { + "percentage": 0 + } + }, + { + "actionId": 20, + "value": { + "onOffState": false + } + } + ] + } + } + ] +} diff --git a/tests/components/wmspro/snapshots/test_cover.ambr b/tests/components/wmspro/snapshots/test_cover.ambr index 21042789c16fcf..0456f074d4935c 100644 --- a/tests/components/wmspro/snapshots/test_cover.ambr +++ b/tests/components/wmspro/snapshots/test_cover.ambr @@ -35,7 +35,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'attribution': 'Data provided by WMS WebControl pro API', - 'current_position': 100, + 'current_position': 0, 'device_class': 'awning', 'friendly_name': 'Markise', 'supported_features': , @@ -45,6 +45,6 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'open', + 'state': 'closed', }) # --- diff --git a/tests/components/wmspro/snapshots/test_diagnostics.ambr b/tests/components/wmspro/snapshots/test_diagnostics.ambr index 6a87c0416ab0a2..00cb62e18c44b5 100644 --- a/tests/components/wmspro/snapshots/test_diagnostics.ambr +++ b/tests/components/wmspro/snapshots/test_diagnostics.ambr @@ -149,6 +149,8 @@ }), 'status': dict({ }), + 'unknownProducts': dict({ + }), }), '97358': dict({ 'actions': dict({ @@ -203,6 +205,8 @@ }), 'status': dict({ }), + 'unknownProducts': dict({ + }), }), }), 'host': 'webcontrol', diff --git a/tests/components/wmspro/snapshots/test_light.ambr b/tests/components/wmspro/snapshots/test_light.ambr new file mode 100644 index 00000000000000..d13e444645db91 --- /dev/null +++ b/tests/components/wmspro/snapshots/test_light.ambr @@ -0,0 +1,53 @@ +# serializer version: 1 +# name: test_light_device + DeviceRegistryEntrySnapshot({ + 'area_id': 'terrasse', + 'config_entries': , + 'configuration_url': 'http://webcontrol/control', + 'connections': set({ + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'wmspro', + '97358', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'WAREMA Renkhoff SE', + 'model': 'Dimmer', + 'model_id': None, + 'name': 'Licht', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': '97358', + 'suggested_area': 'Terrasse', + 'sw_version': None, + 'via_device_id': , + }) +# --- +# name: test_light_update + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by WMS WebControl pro API', + 'brightness': None, + 'color_mode': None, + 'friendly_name': 'Licht', + 'supported_color_modes': list([ + , + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'light.licht', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- diff --git a/tests/components/wmspro/snapshots/test_scene.ambr b/tests/components/wmspro/snapshots/test_scene.ambr new file mode 100644 index 00000000000000..940d4e31e839a7 --- /dev/null +++ b/tests/components/wmspro/snapshots/test_scene.ambr @@ -0,0 +1,47 @@ +# serializer version: 1 +# name: test_scene_activate + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by WMS WebControl pro API', + 'friendly_name': 'Raum 0 Gute Nacht', + }), + 'context': , + 'entity_id': 'scene.raum_0_gute_nacht', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_scene_room_device + DeviceRegistryEntrySnapshot({ + 'area_id': 'raum_0', + 'config_entries': , + 'configuration_url': 'http://webcontrol/control', + 'connections': set({ + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'wmspro', + '42581', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'WAREMA Renkhoff SE', + 'model': 'Room', + 'model_id': None, + 'name': 'Raum 0', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': '42581', + 'suggested_area': 'Raum 0', + 'sw_version': None, + 'via_device_id': , + }) +# --- diff --git a/tests/components/wmspro/test_config_flow.py b/tests/components/wmspro/test_config_flow.py index 6a254a93836e7a..782dc051c8cfbc 100644 --- a/tests/components/wmspro/test_config_flow.py +++ b/tests/components/wmspro/test_config_flow.py @@ -6,13 +6,19 @@ from homeassistant.components.dhcp import DhcpServiceInfo from homeassistant.components.wmspro.const import DOMAIN -from homeassistant.config_entries import SOURCE_DHCP, SOURCE_USER +from homeassistant.config_entries import SOURCE_DHCP, SOURCE_USER, ConfigEntryState from homeassistant.const import CONF_HOST from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType +from . import setup_config_entry -async def test_config_flow(hass: HomeAssistant, mock_setup_entry: AsyncMock) -> None: +from tests.common import MockConfigEntry + + +async def test_config_flow( + hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_hub_refresh: AsyncMock +) -> None: """Test we can handle user-input to create a config entry.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER} @@ -40,7 +46,7 @@ async def test_config_flow(hass: HomeAssistant, mock_setup_entry: AsyncMock) -> async def test_config_flow_from_dhcp( - hass: HomeAssistant, mock_setup_entry: AsyncMock + hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_hub_refresh: AsyncMock ) -> None: """Test we can handle DHCP discovery to create a config entry.""" info = DhcpServiceInfo( @@ -74,6 +80,7 @@ async def test_config_flow_from_dhcp( async def test_config_flow_from_dhcp_add_mac( hass: HomeAssistant, mock_setup_entry: AsyncMock, + mock_hub_refresh: AsyncMock, ) -> None: """Test we can use DHCP discovery to add MAC address to a config entry.""" result = await hass.config_entries.flow.async_init( @@ -112,8 +119,100 @@ async def test_config_flow_from_dhcp_add_mac( assert hass.config_entries.async_entries(DOMAIN)[0].unique_id == "00:11:22:33:44:55" +async def test_config_flow_from_dhcp_ip_update( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_hub_refresh: AsyncMock, +) -> None: + """Test we can use DHCP discovery to update IP in a config entry.""" + info = DhcpServiceInfo( + ip="1.2.3.4", hostname="webcontrol", macaddress="00:11:22:33:44:55" + ) + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_DHCP}, data=info + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {} + + with patch( + "wmspro.webcontrol.WebControlPro.ping", + return_value=True, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_HOST: "1.2.3.4", + }, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "1.2.3.4" + assert result["data"] == { + CONF_HOST: "1.2.3.4", + } + assert len(mock_setup_entry.mock_calls) == 1 + assert hass.config_entries.async_entries(DOMAIN)[0].unique_id == "00:11:22:33:44:55" + + info = DhcpServiceInfo( + ip="5.6.7.8", hostname="webcontrol", macaddress="00:11:22:33:44:55" + ) + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_DHCP}, data=info + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + assert hass.config_entries.async_entries(DOMAIN)[0].unique_id == "00:11:22:33:44:55" + assert hass.config_entries.async_entries(DOMAIN)[0].data[CONF_HOST] == "5.6.7.8" + + +async def test_config_flow_from_dhcp_no_update( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_hub_refresh: AsyncMock, +) -> None: + """Test we do not use DHCP discovery to overwrite hostname with IP in config entry.""" + info = DhcpServiceInfo( + ip="1.2.3.4", hostname="webcontrol", macaddress="00:11:22:33:44:55" + ) + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_DHCP}, data=info + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {} + + with patch( + "wmspro.webcontrol.WebControlPro.ping", + return_value=True, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_HOST: "webcontrol", + }, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "webcontrol" + assert result["data"] == { + CONF_HOST: "webcontrol", + } + assert len(mock_setup_entry.mock_calls) == 1 + assert hass.config_entries.async_entries(DOMAIN)[0].unique_id == "00:11:22:33:44:55" + + info = DhcpServiceInfo( + ip="5.6.7.8", hostname="webcontrol", macaddress="00:11:22:33:44:55" + ) + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_DHCP}, data=info + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + assert hass.config_entries.async_entries(DOMAIN)[0].unique_id == "00:11:22:33:44:55" + assert hass.config_entries.async_entries(DOMAIN)[0].data[CONF_HOST] == "webcontrol" + + async def test_config_flow_ping_failed( - hass: HomeAssistant, mock_setup_entry: AsyncMock + hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_hub_refresh: AsyncMock ) -> None: """Test we handle ping failed error.""" result = await hass.config_entries.flow.async_init( @@ -154,7 +253,7 @@ async def test_config_flow_ping_failed( async def test_config_flow_cannot_connect( - hass: HomeAssistant, mock_setup_entry: AsyncMock + hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_hub_refresh: AsyncMock ) -> None: """Test we handle cannot connect error.""" result = await hass.config_entries.flow.async_init( @@ -195,7 +294,7 @@ async def test_config_flow_cannot_connect( async def test_config_flow_unknown_error( - hass: HomeAssistant, mock_setup_entry: AsyncMock + hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_hub_refresh: AsyncMock ) -> None: """Test we handle an unknown error.""" result = await hass.config_entries.flow.async_init( @@ -233,3 +332,63 @@ async def test_config_flow_unknown_error( CONF_HOST: "1.2.3.4", } assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_config_flow_duplicate_entries( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_hub_ping: AsyncMock, + mock_dest_refresh: AsyncMock, + mock_hub_configuration_test: AsyncMock, +) -> None: + """Test we prevent creation of duplicate config entries.""" + await setup_config_entry(hass, mock_config_entry) + assert mock_config_entry.state is ConfigEntryState.LOADED + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_HOST: "5.6.7.8", + }, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + assert len(hass.config_entries.async_entries(DOMAIN)) == 1 + + +async def test_config_flow_multiple_entries( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_hub_ping: AsyncMock, + mock_dest_refresh: AsyncMock, + mock_hub_configuration_test: AsyncMock, + mock_hub_configuration_prod: AsyncMock, +) -> None: + """Test we allow creation of different config entries.""" + await setup_config_entry(hass, mock_config_entry) + assert mock_config_entry.state is ConfigEntryState.LOADED + + mock_hub_configuration_prod.return_value = mock_hub_configuration_test.return_value + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_HOST: "5.6.7.8", + }, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "5.6.7.8" + assert result["data"] == { + CONF_HOST: "5.6.7.8", + } + assert len(hass.config_entries.async_entries(DOMAIN)) == 2 diff --git a/tests/components/wmspro/test_cover.py b/tests/components/wmspro/test_cover.py index 1e8653335a7ee2..2c20ef51b6472f 100644 --- a/tests/components/wmspro/test_cover.py +++ b/tests/components/wmspro/test_cover.py @@ -1,25 +1,28 @@ -"""Test the wmspro diagnostics.""" +"""Test the wmspro cover support.""" from unittest.mock import AsyncMock, patch +from freezegun.api import FrozenDateTimeFactory from syrupy import SnapshotAssertion from homeassistant.components.wmspro.const import DOMAIN +from homeassistant.components.wmspro.cover import SCAN_INTERVAL from homeassistant.const import ( ATTR_ENTITY_ID, SERVICE_CLOSE_COVER, SERVICE_OPEN_COVER, SERVICE_SET_COVER_POSITION, SERVICE_STOP_COVER, + STATE_CLOSED, + STATE_OPEN, Platform, ) from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr -from homeassistant.setup import async_setup_component from . import setup_config_entry -from tests.common import MockConfigEntry +from tests.common import MockConfigEntry, async_fire_time_changed async def test_cover_device( @@ -48,6 +51,7 @@ async def test_cover_update( mock_hub_ping: AsyncMock, mock_hub_configuration_prod: AsyncMock, mock_hub_status_prod_awning: AsyncMock, + freezer: FrozenDateTimeFactory, snapshot: SnapshotAssertion, ) -> None: """Test that a cover entity is created and updated correctly.""" @@ -60,18 +64,15 @@ async def test_cover_update( assert entity is not None assert entity == snapshot - await async_setup_component(hass, "homeassistant", {}) - await hass.services.async_call( - "homeassistant", - "update_entity", - {ATTR_ENTITY_ID: entity.entity_id}, - blocking=True, - ) + # Move time to next update + freezer.tick(SCAN_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done(wait_background_tasks=True) - assert len(mock_hub_status_prod_awning.mock_calls) == 3 + assert len(mock_hub_status_prod_awning.mock_calls) >= 3 -async def test_cover_close_and_open( +async def test_cover_open_and_close( hass: HomeAssistant, mock_config_entry: MockConfigEntry, mock_hub_ping: AsyncMock, @@ -87,8 +88,8 @@ async def test_cover_close_and_open( entity = hass.states.get("cover.markise") assert entity is not None - assert entity.state == "open" - assert entity.attributes["current_position"] == 100 + assert entity.state == STATE_CLOSED + assert entity.attributes["current_position"] == 0 with patch( "wmspro.destination.Destination.refresh", @@ -98,15 +99,15 @@ async def test_cover_close_and_open( await hass.services.async_call( Platform.COVER, - SERVICE_CLOSE_COVER, + SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: entity.entity_id}, blocking=True, ) entity = hass.states.get("cover.markise") assert entity is not None - assert entity.state == "closed" - assert entity.attributes["current_position"] == 0 + assert entity.state == STATE_OPEN + assert entity.attributes["current_position"] == 100 assert len(mock_hub_status_prod_awning.mock_calls) == before with patch( @@ -117,19 +118,19 @@ async def test_cover_close_and_open( await hass.services.async_call( Platform.COVER, - SERVICE_OPEN_COVER, + SERVICE_CLOSE_COVER, {ATTR_ENTITY_ID: entity.entity_id}, blocking=True, ) entity = hass.states.get("cover.markise") assert entity is not None - assert entity.state == "open" - assert entity.attributes["current_position"] == 100 + assert entity.state == STATE_CLOSED + assert entity.attributes["current_position"] == 0 assert len(mock_hub_status_prod_awning.mock_calls) == before -async def test_cover_move( +async def test_cover_open_to_pos( hass: HomeAssistant, mock_config_entry: MockConfigEntry, mock_hub_ping: AsyncMock, @@ -137,7 +138,7 @@ async def test_cover_move( mock_hub_status_prod_awning: AsyncMock, mock_action_call: AsyncMock, ) -> None: - """Test that a cover entity is moved and closed correctly.""" + """Test that a cover entity is opened to correct position.""" assert await setup_config_entry(hass, mock_config_entry) assert len(mock_hub_ping.mock_calls) == 1 assert len(mock_hub_configuration_prod.mock_calls) == 1 @@ -145,8 +146,8 @@ async def test_cover_move( entity = hass.states.get("cover.markise") assert entity is not None - assert entity.state == "open" - assert entity.attributes["current_position"] == 100 + assert entity.state == STATE_CLOSED + assert entity.attributes["current_position"] == 0 with patch( "wmspro.destination.Destination.refresh", @@ -163,12 +164,12 @@ async def test_cover_move( entity = hass.states.get("cover.markise") assert entity is not None - assert entity.state == "open" + assert entity.state == STATE_OPEN assert entity.attributes["current_position"] == 50 assert len(mock_hub_status_prod_awning.mock_calls) == before -async def test_cover_move_and_stop( +async def test_cover_open_and_stop( hass: HomeAssistant, mock_config_entry: MockConfigEntry, mock_hub_ping: AsyncMock, @@ -176,7 +177,7 @@ async def test_cover_move_and_stop( mock_hub_status_prod_awning: AsyncMock, mock_action_call: AsyncMock, ) -> None: - """Test that a cover entity is moved and closed correctly.""" + """Test that a cover entity is opened and stopped correctly.""" assert await setup_config_entry(hass, mock_config_entry) assert len(mock_hub_ping.mock_calls) == 1 assert len(mock_hub_configuration_prod.mock_calls) == 1 @@ -184,8 +185,8 @@ async def test_cover_move_and_stop( entity = hass.states.get("cover.markise") assert entity is not None - assert entity.state == "open" - assert entity.attributes["current_position"] == 100 + assert entity.state == STATE_CLOSED + assert entity.attributes["current_position"] == 0 with patch( "wmspro.destination.Destination.refresh", @@ -202,7 +203,7 @@ async def test_cover_move_and_stop( entity = hass.states.get("cover.markise") assert entity is not None - assert entity.state == "open" + assert entity.state == STATE_OPEN assert entity.attributes["current_position"] == 80 assert len(mock_hub_status_prod_awning.mock_calls) == before @@ -221,6 +222,6 @@ async def test_cover_move_and_stop( entity = hass.states.get("cover.markise") assert entity is not None - assert entity.state == "open" + assert entity.state == STATE_OPEN assert entity.attributes["current_position"] == 80 assert len(mock_hub_status_prod_awning.mock_calls) == before diff --git a/tests/components/wmspro/test_light.py b/tests/components/wmspro/test_light.py new file mode 100644 index 00000000000000..db53b54a2f6afc --- /dev/null +++ b/tests/components/wmspro/test_light.py @@ -0,0 +1,206 @@ +"""Test the wmspro light support.""" + +from unittest.mock import AsyncMock, patch + +from freezegun.api import FrozenDateTimeFactory +from syrupy import SnapshotAssertion + +from homeassistant.components.light import ATTR_BRIGHTNESS +from homeassistant.components.wmspro.const import DOMAIN +from homeassistant.components.wmspro.light import SCAN_INTERVAL +from homeassistant.const import ( + ATTR_ENTITY_ID, + SERVICE_TURN_OFF, + SERVICE_TURN_ON, + STATE_OFF, + STATE_ON, + Platform, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr + +from . import setup_config_entry + +from tests.common import MockConfigEntry, async_fire_time_changed + + +async def test_light_device( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_hub_ping: AsyncMock, + mock_hub_configuration_prod: AsyncMock, + mock_hub_status_prod_dimmer: AsyncMock, + device_registry: dr.DeviceRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test that a light device is created correctly.""" + assert await setup_config_entry(hass, mock_config_entry) + assert len(mock_hub_ping.mock_calls) == 1 + assert len(mock_hub_configuration_prod.mock_calls) == 1 + assert len(mock_hub_status_prod_dimmer.mock_calls) == 2 + + device_entry = device_registry.async_get_device(identifiers={(DOMAIN, "97358")}) + assert device_entry is not None + assert device_entry == snapshot + + +async def test_light_update( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_hub_ping: AsyncMock, + mock_hub_configuration_prod: AsyncMock, + mock_hub_status_prod_dimmer: AsyncMock, + freezer: FrozenDateTimeFactory, + snapshot: SnapshotAssertion, +) -> None: + """Test that a light entity is created and updated correctly.""" + assert await setup_config_entry(hass, mock_config_entry) + assert len(mock_hub_ping.mock_calls) == 1 + assert len(mock_hub_configuration_prod.mock_calls) == 1 + assert len(mock_hub_status_prod_dimmer.mock_calls) == 2 + + entity = hass.states.get("light.licht") + assert entity is not None + assert entity == snapshot + + # Move time to next update + freezer.tick(SCAN_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done(wait_background_tasks=True) + + assert len(mock_hub_status_prod_dimmer.mock_calls) >= 3 + + +async def test_light_turn_on_and_off( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_hub_ping: AsyncMock, + mock_hub_configuration_prod: AsyncMock, + mock_hub_status_prod_dimmer: AsyncMock, + mock_action_call: AsyncMock, +) -> None: + """Test that a light entity is turned on and off correctly.""" + assert await setup_config_entry(hass, mock_config_entry) + assert len(mock_hub_ping.mock_calls) == 1 + assert len(mock_hub_configuration_prod.mock_calls) == 1 + assert len(mock_hub_status_prod_dimmer.mock_calls) >= 1 + + entity = hass.states.get("light.licht") + assert entity is not None + assert entity.state == STATE_OFF + assert entity.attributes[ATTR_BRIGHTNESS] is None + + with patch( + "wmspro.destination.Destination.refresh", + return_value=True, + ): + before = len(mock_hub_status_prod_dimmer.mock_calls) + + await hass.services.async_call( + Platform.LIGHT, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: entity.entity_id}, + blocking=True, + ) + + entity = hass.states.get("light.licht") + assert entity is not None + assert entity.state == STATE_ON + assert entity.attributes[ATTR_BRIGHTNESS] >= 1 + assert len(mock_hub_status_prod_dimmer.mock_calls) == before + + with patch( + "wmspro.destination.Destination.refresh", + return_value=True, + ): + before = len(mock_hub_status_prod_dimmer.mock_calls) + + await hass.services.async_call( + Platform.LIGHT, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: entity.entity_id}, + blocking=True, + ) + + entity = hass.states.get("light.licht") + assert entity is not None + assert entity.state == STATE_OFF + assert entity.attributes[ATTR_BRIGHTNESS] is None + assert len(mock_hub_status_prod_dimmer.mock_calls) == before + + +async def test_light_dimm_on_and_off( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_hub_ping: AsyncMock, + mock_hub_configuration_prod: AsyncMock, + mock_hub_status_prod_dimmer: AsyncMock, + mock_action_call: AsyncMock, +) -> None: + """Test that a light entity is dimmed on and off correctly.""" + assert await setup_config_entry(hass, mock_config_entry) + assert len(mock_hub_ping.mock_calls) == 1 + assert len(mock_hub_configuration_prod.mock_calls) == 1 + assert len(mock_hub_status_prod_dimmer.mock_calls) >= 1 + + entity = hass.states.get("light.licht") + assert entity is not None + assert entity.state == STATE_OFF + assert entity.attributes[ATTR_BRIGHTNESS] is None + + with patch( + "wmspro.destination.Destination.refresh", + return_value=True, + ): + before = len(mock_hub_status_prod_dimmer.mock_calls) + + await hass.services.async_call( + Platform.LIGHT, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: entity.entity_id}, + blocking=True, + ) + + entity = hass.states.get("light.licht") + assert entity is not None + assert entity.state == STATE_ON + assert entity.attributes[ATTR_BRIGHTNESS] >= 1 + assert len(mock_hub_status_prod_dimmer.mock_calls) == before + + with patch( + "wmspro.destination.Destination.refresh", + return_value=True, + ): + before = len(mock_hub_status_prod_dimmer.mock_calls) + + await hass.services.async_call( + Platform.LIGHT, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: entity.entity_id, ATTR_BRIGHTNESS: 128}, + blocking=True, + ) + + entity = hass.states.get("light.licht") + assert entity is not None + assert entity.state == STATE_ON + assert entity.attributes[ATTR_BRIGHTNESS] == 128 + assert len(mock_hub_status_prod_dimmer.mock_calls) == before + + with patch( + "wmspro.destination.Destination.refresh", + return_value=True, + ): + before = len(mock_hub_status_prod_dimmer.mock_calls) + + await hass.services.async_call( + Platform.LIGHT, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: entity.entity_id}, + blocking=True, + ) + + entity = hass.states.get("light.licht") + assert entity is not None + assert entity.state == STATE_OFF + assert entity.attributes[ATTR_BRIGHTNESS] is None + assert len(mock_hub_status_prod_dimmer.mock_calls) == before diff --git a/tests/components/wmspro/test_scene.py b/tests/components/wmspro/test_scene.py new file mode 100644 index 00000000000000..a6b16e5bbc949c --- /dev/null +++ b/tests/components/wmspro/test_scene.py @@ -0,0 +1,63 @@ +"""Test the wmspro scene support.""" + +from unittest.mock import AsyncMock + +from syrupy import SnapshotAssertion + +from homeassistant.components.wmspro.const import DOMAIN +from homeassistant.const import ATTR_ENTITY_ID, SERVICE_TURN_ON +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr +from homeassistant.setup import async_setup_component + +from . import setup_config_entry + +from tests.common import MockConfigEntry + + +async def test_scene_room_device( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_hub_ping: AsyncMock, + mock_hub_configuration_test: AsyncMock, + mock_dest_refresh: AsyncMock, + device_registry: dr.DeviceRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test that a scene room device is created correctly.""" + assert await setup_config_entry(hass, mock_config_entry) + assert len(mock_hub_ping.mock_calls) == 1 + assert len(mock_hub_configuration_test.mock_calls) == 1 + + device_entry = device_registry.async_get_device(identifiers={(DOMAIN, "42581")}) + assert device_entry is not None + assert device_entry == snapshot + + +async def test_scene_activate( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_hub_ping: AsyncMock, + mock_hub_configuration_test: AsyncMock, + mock_dest_refresh: AsyncMock, + mock_scene_call: AsyncMock, + snapshot: SnapshotAssertion, +) -> None: + """Test that a scene entity is created and activated correctly.""" + assert await setup_config_entry(hass, mock_config_entry) + assert len(mock_hub_ping.mock_calls) == 1 + assert len(mock_hub_configuration_test.mock_calls) == 1 + + entity = hass.states.get("scene.raum_0_gute_nacht") + assert entity is not None + assert entity == snapshot + + await async_setup_component(hass, "homeassistant", {}) + await hass.services.async_call( + "homeassistant", + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: entity.entity_id}, + blocking=True, + ) + + assert len(mock_scene_call.mock_calls) == 1 diff --git a/tests/components/workday/snapshots/test_binary_sensor.ambr b/tests/components/workday/snapshots/test_binary_sensor.ambr index 8ad2f37f360384..4cf7dca48612bb 100644 --- a/tests/components/workday/snapshots/test_binary_sensor.ambr +++ b/tests/components/workday/snapshots/test_binary_sensor.ambr @@ -5,21 +5,55 @@ 'workday', 'bad_date_holiday-1-2024_08_15', ): IssueRegistryItemSnapshot({ + 'active': True, + 'breaks_in_ha_version': None, 'created': , + 'data': dict({ + 'country': 'DE', + 'entry_id': '1', + 'named_holiday': '2024-08-15', + }), 'dismissed_version': None, 'domain': 'workday', + 'is_fixable': True, 'is_persistent': False, + 'issue_domain': None, 'issue_id': 'bad_date_holiday-1-2024_08_15', + 'learn_more_url': None, + 'severity': , + 'translation_key': 'bad_date_holiday', + 'translation_placeholders': dict({ + 'country': 'DE', + 'remove_holidays': '2024-08-15', + 'title': 'Mock Title', + }), }), tuple( 'workday', 'bad_date_holiday-1-2025_08_15', ): IssueRegistryItemSnapshot({ + 'active': True, + 'breaks_in_ha_version': None, 'created': , + 'data': dict({ + 'country': 'DE', + 'entry_id': '1', + 'named_holiday': '2025-08-15', + }), 'dismissed_version': None, 'domain': 'workday', + 'is_fixable': True, 'is_persistent': False, + 'issue_domain': None, 'issue_id': 'bad_date_holiday-1-2025_08_15', + 'learn_more_url': None, + 'severity': , + 'translation_key': 'bad_date_holiday', + 'translation_placeholders': dict({ + 'country': 'DE', + 'remove_holidays': '2025-08-15', + 'title': 'Mock Title', + }), }), }) # --- diff --git a/tests/components/wyoming/__init__.py b/tests/components/wyoming/__init__.py index 5bfbbfe87b2b67..4540cdaabfdfba 100644 --- a/tests/components/wyoming/__init__.py +++ b/tests/components/wyoming/__init__.py @@ -8,7 +8,11 @@ AsrModel, AsrProgram, Attribution, + HandleModel, + HandleProgram, Info, + IntentModel, + IntentProgram, Satellite, TtsProgram, TtsVoice, @@ -87,6 +91,48 @@ ) ] ) +INTENT_INFO = Info( + intent=[ + IntentProgram( + name="Test Intent", + description="Test Intent", + installed=True, + attribution=TEST_ATTR, + models=[ + IntentModel( + name="Test Model", + description="Test Model", + installed=True, + attribution=TEST_ATTR, + languages=["en-US"], + version=None, + ) + ], + version=None, + ) + ] +) +HANDLE_INFO = Info( + handle=[ + HandleProgram( + name="Test Handle", + description="Test Handle", + installed=True, + attribution=TEST_ATTR, + models=[ + HandleModel( + name="Test Model", + description="Test Model", + installed=True, + attribution=TEST_ATTR, + languages=["en-US"], + version=None, + ) + ], + version=None, + ) + ] +) SATELLITE_INFO = Info( satellite=Satellite( name="Test Satellite", @@ -150,10 +196,10 @@ async def reload_satellite( return_value=SATELLITE_INFO, ), patch( - "homeassistant.components.wyoming.satellite.WyomingSatellite.run" + "homeassistant.components.wyoming.assist_satellite.WyomingAssistSatellite.run" ) as _run_mock, ): # _run_mock: satellite task does not actually run await hass.config_entries.async_reload(config_entry_id) - return hass.data[DOMAIN][config_entry_id].satellite.device + return hass.data[DOMAIN][config_entry_id].device diff --git a/tests/components/wyoming/conftest.py b/tests/components/wyoming/conftest.py index 770186d92aae81..018fff33821b6d 100644 --- a/tests/components/wyoming/conftest.py +++ b/tests/components/wyoming/conftest.py @@ -13,7 +13,14 @@ from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component -from . import SATELLITE_INFO, STT_INFO, TTS_INFO, WAKE_WORD_INFO +from . import ( + HANDLE_INFO, + INTENT_INFO, + SATELLITE_INFO, + STT_INFO, + TTS_INFO, + WAKE_WORD_INFO, +) from tests.common import MockConfigEntry @@ -83,6 +90,36 @@ def wake_word_config_entry(hass: HomeAssistant) -> ConfigEntry: return entry +@pytest.fixture +def intent_config_entry(hass: HomeAssistant) -> ConfigEntry: + """Create a config entry.""" + entry = MockConfigEntry( + domain="wyoming", + data={ + "host": "1.2.3.4", + "port": 1234, + }, + title="Test Intent", + ) + entry.add_to_hass(hass) + return entry + + +@pytest.fixture +def handle_config_entry(hass: HomeAssistant) -> ConfigEntry: + """Create a config entry.""" + entry = MockConfigEntry( + domain="wyoming", + data={ + "host": "1.2.3.4", + "port": 1234, + }, + title="Test Handle", + ) + entry.add_to_hass(hass) + return entry + + @pytest.fixture async def init_wyoming_stt(hass: HomeAssistant, stt_config_entry: ConfigEntry): """Initialize Wyoming STT.""" @@ -115,6 +152,34 @@ async def init_wyoming_wake_word( await hass.config_entries.async_setup(wake_word_config_entry.entry_id) +@pytest.fixture +async def init_wyoming_intent( + hass: HomeAssistant, intent_config_entry: ConfigEntry +) -> ConfigEntry: + """Initialize Wyoming intent recognizer.""" + with patch( + "homeassistant.components.wyoming.data.load_wyoming_info", + return_value=INTENT_INFO, + ): + await hass.config_entries.async_setup(intent_config_entry.entry_id) + + return intent_config_entry + + +@pytest.fixture +async def init_wyoming_handle( + hass: HomeAssistant, handle_config_entry: ConfigEntry +) -> ConfigEntry: + """Initialize Wyoming intent handler.""" + with patch( + "homeassistant.components.wyoming.data.load_wyoming_info", + return_value=HANDLE_INFO, + ): + await hass.config_entries.async_setup(handle_config_entry.entry_id) + + return handle_config_entry + + @pytest.fixture def metadata(hass: HomeAssistant) -> stt.SpeechMetadata: """Get default STT metadata.""" @@ -152,7 +217,7 @@ async def init_satellite(hass: HomeAssistant, satellite_config_entry: ConfigEntr return_value=SATELLITE_INFO, ), patch( - "homeassistant.components.wyoming.satellite.WyomingSatellite.run" + "homeassistant.components.wyoming.assist_satellite.WyomingAssistSatellite.run" ) as _run_mock, ): # _run_mock: satellite task does not actually run @@ -164,4 +229,4 @@ async def satellite_device( hass: HomeAssistant, init_satellite, satellite_config_entry: ConfigEntry ) -> SatelliteDevice: """Get a satellite device fixture.""" - return hass.data[DOMAIN][satellite_config_entry.entry_id].satellite.device + return hass.data[DOMAIN][satellite_config_entry.entry_id].device diff --git a/tests/components/wyoming/snapshots/test_conversation.ambr b/tests/components/wyoming/snapshots/test_conversation.ambr new file mode 100644 index 00000000000000..24763cac4416d1 --- /dev/null +++ b/tests/components/wyoming/snapshots/test_conversation.ambr @@ -0,0 +1,7 @@ +# serializer version: 1 +# name: test_connection_lost + 'Connection to service was lost' +# --- +# name: test_oserror + 'Error communicating with service: Boom!' +# --- diff --git a/tests/components/wyoming/test_config_flow.py b/tests/components/wyoming/test_config_flow.py index e363a0650bc559..6bca226d6213ed 100644 --- a/tests/components/wyoming/test_config_flow.py +++ b/tests/components/wyoming/test_config_flow.py @@ -8,11 +8,11 @@ from wyoming.info import Info from homeassistant import config_entries -from homeassistant.components.hassio import HassioServiceInfo from homeassistant.components.wyoming.const import DOMAIN from homeassistant.components.zeroconf import ZeroconfServiceInfo from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType +from homeassistant.helpers.service_info.hassio import HassioServiceInfo from . import EMPTY_INFO, SATELLITE_INFO, STT_INFO, TTS_INFO diff --git a/tests/components/wyoming/test_conversation.py b/tests/components/wyoming/test_conversation.py new file mode 100644 index 00000000000000..02b04503962887 --- /dev/null +++ b/tests/components/wyoming/test_conversation.py @@ -0,0 +1,224 @@ +"""Test conversation.""" + +from __future__ import annotations + +from unittest.mock import patch + +from syrupy import SnapshotAssertion +from wyoming.asr import Transcript +from wyoming.handle import Handled, NotHandled +from wyoming.intent import Entity, Intent, NotRecognized + +from homeassistant.components import conversation +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import Context, HomeAssistant +from homeassistant.helpers import intent + +from . import MockAsyncTcpClient + + +async def test_intent(hass: HomeAssistant, init_wyoming_intent: ConfigEntry) -> None: + """Test when an intent is recognized.""" + agent_id = "conversation.test_intent" + + conversation_id = "conversation-1234" + test_intent = Intent( + name="TestIntent", + entities=[Entity(name="entity", value="value")], + text="success", + ) + + class TestIntentHandler(intent.IntentHandler): + """Test Intent Handler.""" + + intent_type = "TestIntent" + + async def async_handle(self, intent_obj: intent.Intent): + """Handle the intent.""" + assert intent_obj.slots.get("entity", {}).get("value") == "value" + return intent_obj.create_response() + + intent.async_register(hass, TestIntentHandler()) + + with patch( + "homeassistant.components.wyoming.conversation.AsyncTcpClient", + MockAsyncTcpClient([test_intent.event()]), + ): + result = await conversation.async_converse( + hass=hass, + text="test text", + conversation_id=conversation_id, + context=Context(), + language=hass.config.language, + agent_id=agent_id, + ) + + assert result.response.response_type == intent.IntentResponseType.ACTION_DONE + assert result.response.speech, "No speech" + assert result.response.speech.get("plain", {}).get("speech") == "success" + assert result.conversation_id == conversation_id + + +async def test_intent_handle_error( + hass: HomeAssistant, init_wyoming_intent: ConfigEntry +) -> None: + """Test error during handling when an intent is recognized.""" + agent_id = "conversation.test_intent" + + test_intent = Intent(name="TestIntent", entities=[], text="success") + + class TestIntentHandler(intent.IntentHandler): + """Test Intent Handler.""" + + intent_type = "TestIntent" + + async def async_handle(self, intent_obj: intent.Intent): + """Handle the intent.""" + raise intent.IntentError + + intent.async_register(hass, TestIntentHandler()) + + with patch( + "homeassistant.components.wyoming.conversation.AsyncTcpClient", + MockAsyncTcpClient([test_intent.event()]), + ): + result = await conversation.async_converse( + hass=hass, + text="test text", + conversation_id=None, + context=Context(), + language=hass.config.language, + agent_id=agent_id, + ) + + assert result.response.response_type == intent.IntentResponseType.ERROR + assert result.response.error_code == intent.IntentResponseErrorCode.FAILED_TO_HANDLE + + +async def test_not_recognized( + hass: HomeAssistant, init_wyoming_intent: ConfigEntry +) -> None: + """Test when an intent is not recognized.""" + agent_id = "conversation.test_intent" + + with patch( + "homeassistant.components.wyoming.conversation.AsyncTcpClient", + MockAsyncTcpClient([NotRecognized(text="failure").event()]), + ): + result = await conversation.async_converse( + hass=hass, + text="test text", + conversation_id=None, + context=Context(), + language=hass.config.language, + agent_id=agent_id, + ) + + assert result.response.response_type == intent.IntentResponseType.ERROR + assert result.response.error_code == intent.IntentResponseErrorCode.NO_INTENT_MATCH + assert result.response.speech, "No speech" + assert result.response.speech.get("plain", {}).get("speech") == "failure" + + +async def test_handle(hass: HomeAssistant, init_wyoming_handle: ConfigEntry) -> None: + """Test when an intent is handled.""" + agent_id = "conversation.test_handle" + + conversation_id = "conversation-1234" + + with patch( + "homeassistant.components.wyoming.conversation.AsyncTcpClient", + MockAsyncTcpClient([Handled(text="success").event()]), + ): + result = await conversation.async_converse( + hass=hass, + text="test text", + conversation_id=conversation_id, + context=Context(), + language=hass.config.language, + agent_id=agent_id, + ) + + assert result.response.response_type == intent.IntentResponseType.ACTION_DONE + assert result.response.speech, "No speech" + assert result.response.speech.get("plain", {}).get("speech") == "success" + assert result.conversation_id == conversation_id + + +async def test_not_handled( + hass: HomeAssistant, init_wyoming_handle: ConfigEntry +) -> None: + """Test when an intent is not handled.""" + agent_id = "conversation.test_handle" + + with patch( + "homeassistant.components.wyoming.conversation.AsyncTcpClient", + MockAsyncTcpClient([NotHandled(text="failure").event()]), + ): + result = await conversation.async_converse( + hass=hass, + text="test text", + conversation_id=None, + context=Context(), + language=hass.config.language, + agent_id=agent_id, + ) + + assert result.response.response_type == intent.IntentResponseType.ERROR + assert result.response.error_code == intent.IntentResponseErrorCode.FAILED_TO_HANDLE + assert result.response.speech, "No speech" + assert result.response.speech.get("plain", {}).get("speech") == "failure" + + +async def test_connection_lost( + hass: HomeAssistant, init_wyoming_handle: ConfigEntry, snapshot: SnapshotAssertion +) -> None: + """Test connection to client is lost.""" + agent_id = "conversation.test_handle" + + with patch( + "homeassistant.components.wyoming.conversation.AsyncTcpClient", + MockAsyncTcpClient([None]), + ): + result = await conversation.async_converse( + hass=hass, + text="test text", + conversation_id=None, + context=Context(), + language=hass.config.language, + agent_id=agent_id, + ) + + assert result.response.response_type == intent.IntentResponseType.ERROR + assert result.response.error_code == intent.IntentResponseErrorCode.UNKNOWN + assert result.response.speech, "No speech" + assert result.response.speech.get("plain", {}).get("speech") == snapshot() + + +async def test_oserror( + hass: HomeAssistant, init_wyoming_handle: ConfigEntry, snapshot: SnapshotAssertion +) -> None: + """Test connection error.""" + agent_id = "conversation.test_handle" + + mock_client = MockAsyncTcpClient([Transcript("success").event()]) + + with ( + patch( + "homeassistant.components.wyoming.conversation.AsyncTcpClient", mock_client + ), + patch.object(mock_client, "read_event", side_effect=OSError("Boom!")), + ): + result = await conversation.async_converse( + hass=hass, + text="test text", + conversation_id=None, + context=Context(), + language=hass.config.language, + agent_id=agent_id, + ) + + assert result.response.response_type == intent.IntentResponseType.ERROR + assert result.response.error_code == intent.IntentResponseErrorCode.UNKNOWN + assert result.response.speech, "No speech" + assert result.response.speech.get("plain", {}).get("speech") == snapshot() diff --git a/tests/components/wyoming/test_satellite.py b/tests/components/wyoming/test_satellite.py index 1a291153ad0ac6..f293f976242e2b 100644 --- a/tests/components/wyoming/test_satellite.py +++ b/tests/components/wyoming/test_satellite.py @@ -23,6 +23,7 @@ from wyoming.wake import Detect, Detection from homeassistant.components import assist_pipeline, wyoming +from homeassistant.components.wyoming.assist_satellite import WyomingAssistSatellite from homeassistant.components.wyoming.devices import SatelliteDevice from homeassistant.const import STATE_ON from homeassistant.core import HomeAssistant, State @@ -240,23 +241,22 @@ async def async_pipeline_from_audio_stream( return_value=SATELLITE_INFO, ), patch( - "homeassistant.components.wyoming.satellite.AsyncTcpClient", + "homeassistant.components.wyoming.assist_satellite.AsyncTcpClient", SatelliteAsyncTcpClient(events), ) as mock_client, patch( - "homeassistant.components.wyoming.satellite.assist_pipeline.async_pipeline_from_audio_stream", + "homeassistant.components.assist_satellite.entity.async_pipeline_from_audio_stream", async_pipeline_from_audio_stream, ), patch( - "homeassistant.components.wyoming.satellite.tts.async_get_media_source_audio", + "homeassistant.components.wyoming.assist_satellite.tts.async_get_media_source_audio", return_value=("wav", get_test_wav()), ), - patch("homeassistant.components.wyoming.satellite._PING_SEND_DELAY", 0), + patch("homeassistant.components.wyoming.assist_satellite._PING_SEND_DELAY", 0), ): entry = await setup_config_entry(hass) - device: SatelliteDevice = hass.data[wyoming.DOMAIN][ - entry.entry_id - ].satellite.device + device: SatelliteDevice = hass.data[wyoming.DOMAIN][entry.entry_id].device + assert device is not None async with asyncio.timeout(1): await mock_client.connect_event.wait() @@ -443,7 +443,7 @@ async def test_satellite_muted(hass: HomeAssistant) -> None: """Test callback for a satellite that has been muted.""" on_muted_event = asyncio.Event() - original_on_muted = wyoming.satellite.WyomingSatellite.on_muted + original_on_muted = WyomingAssistSatellite.on_muted async def on_muted(self): # Trigger original function @@ -462,12 +462,16 @@ async def on_muted(self): "homeassistant.components.wyoming.data.load_wyoming_info", return_value=SATELLITE_INFO, ), + patch( + "homeassistant.components.wyoming.assist_satellite.AsyncTcpClient", + SatelliteAsyncTcpClient([]), + ), patch( "homeassistant.components.wyoming.switch.WyomingSatelliteMuteSwitch.async_get_last_state", return_value=State("switch.test_mute", STATE_ON), ), patch( - "homeassistant.components.wyoming.satellite.WyomingSatellite.on_muted", + "homeassistant.components.wyoming.assist_satellite.WyomingAssistSatellite.on_muted", on_muted, ), ): @@ -484,11 +488,11 @@ async def test_satellite_restart(hass: HomeAssistant) -> None: """Test pipeline loop restart after unexpected error.""" on_restart_event = asyncio.Event() - original_on_restart = wyoming.satellite.WyomingSatellite.on_restart + original_on_restart = WyomingAssistSatellite.on_restart async def on_restart(self): await original_on_restart(self) - self.stop() + self.stop_satellite() on_restart_event.set() with ( @@ -497,14 +501,14 @@ async def on_restart(self): return_value=SATELLITE_INFO, ), patch( - "homeassistant.components.wyoming.satellite.WyomingSatellite._connect_and_loop", + "homeassistant.components.wyoming.assist_satellite.WyomingAssistSatellite._connect_and_loop", side_effect=RuntimeError(), ), patch( - "homeassistant.components.wyoming.satellite.WyomingSatellite.on_restart", + "homeassistant.components.wyoming.assist_satellite.WyomingAssistSatellite.on_restart", on_restart, ), - patch("homeassistant.components.wyoming.satellite._RESTART_SECONDS", 0), + patch("homeassistant.components.wyoming.assist_satellite._RESTART_SECONDS", 0), ): await setup_config_entry(hass) async with asyncio.timeout(1): @@ -517,7 +521,7 @@ async def test_satellite_reconnect(hass: HomeAssistant) -> None: reconnect_event = asyncio.Event() stopped_event = asyncio.Event() - original_on_reconnect = wyoming.satellite.WyomingSatellite.on_reconnect + original_on_reconnect = WyomingAssistSatellite.on_reconnect async def on_reconnect(self): await original_on_reconnect(self) @@ -526,7 +530,7 @@ async def on_reconnect(self): num_reconnects += 1 if num_reconnects >= 2: reconnect_event.set() - self.stop() + self.stop_satellite() async def on_stopped(self): stopped_event.set() @@ -537,18 +541,20 @@ async def on_stopped(self): return_value=SATELLITE_INFO, ), patch( - "homeassistant.components.wyoming.satellite.AsyncTcpClient.connect", + "homeassistant.components.wyoming.assist_satellite.AsyncTcpClient.connect", side_effect=ConnectionRefusedError(), ), patch( - "homeassistant.components.wyoming.satellite.WyomingSatellite.on_reconnect", + "homeassistant.components.wyoming.assist_satellite.WyomingAssistSatellite.on_reconnect", on_reconnect, ), patch( - "homeassistant.components.wyoming.satellite.WyomingSatellite.on_stopped", + "homeassistant.components.wyoming.assist_satellite.WyomingAssistSatellite.on_stopped", on_stopped, ), - patch("homeassistant.components.wyoming.satellite._RECONNECT_SECONDS", 0), + patch( + "homeassistant.components.wyoming.assist_satellite._RECONNECT_SECONDS", 0 + ), ): await setup_config_entry(hass) async with asyncio.timeout(1): @@ -561,7 +567,7 @@ async def test_satellite_disconnect_before_pipeline(hass: HomeAssistant) -> None on_restart_event = asyncio.Event() async def on_restart(self): - self.stop() + self.stop_satellite() on_restart_event.set() with ( @@ -570,14 +576,14 @@ async def on_restart(self): return_value=SATELLITE_INFO, ), patch( - "homeassistant.components.wyoming.satellite.AsyncTcpClient", + "homeassistant.components.wyoming.assist_satellite.AsyncTcpClient", MockAsyncTcpClient([]), # no RunPipeline event ), patch( - "homeassistant.components.wyoming.satellite.assist_pipeline.async_pipeline_from_audio_stream", + "homeassistant.components.assist_satellite.entity.async_pipeline_from_audio_stream", ) as mock_run_pipeline, patch( - "homeassistant.components.wyoming.satellite.WyomingSatellite.on_restart", + "homeassistant.components.wyoming.assist_satellite.WyomingAssistSatellite.on_restart", on_restart, ), ): @@ -603,7 +609,7 @@ async def test_satellite_disconnect_during_pipeline(hass: HomeAssistant) -> None async def on_restart(self): # Pretend sensor got stuck on self.device.is_active = True - self.stop() + self.stop_satellite() on_restart_event.set() async def on_stopped(self): @@ -615,25 +621,23 @@ async def on_stopped(self): return_value=SATELLITE_INFO, ), patch( - "homeassistant.components.wyoming.satellite.AsyncTcpClient", + "homeassistant.components.wyoming.assist_satellite.AsyncTcpClient", MockAsyncTcpClient(events), ), patch( - "homeassistant.components.wyoming.satellite.assist_pipeline.async_pipeline_from_audio_stream", + "homeassistant.components.assist_satellite.entity.async_pipeline_from_audio_stream", ) as mock_run_pipeline, patch( - "homeassistant.components.wyoming.satellite.WyomingSatellite.on_restart", + "homeassistant.components.wyoming.assist_satellite.WyomingAssistSatellite.on_restart", on_restart, ), patch( - "homeassistant.components.wyoming.satellite.WyomingSatellite.on_stopped", + "homeassistant.components.wyoming.assist_satellite.WyomingAssistSatellite.on_stopped", on_stopped, ), ): entry = await setup_config_entry(hass) - device: SatelliteDevice = hass.data[wyoming.DOMAIN][ - entry.entry_id - ].satellite.device + device: SatelliteDevice = hass.data[wyoming.DOMAIN][entry.entry_id].device async with asyncio.timeout(1): await on_restart_event.wait() @@ -665,11 +669,11 @@ def _async_pipeline_from_audio_stream(*args: Any, **kwargs: Any) -> None: return_value=SATELLITE_INFO, ), patch( - "homeassistant.components.wyoming.satellite.AsyncTcpClient", + "homeassistant.components.wyoming.assist_satellite.AsyncTcpClient", SatelliteAsyncTcpClient(events), ) as mock_client, patch( - "homeassistant.components.wyoming.satellite.assist_pipeline.async_pipeline_from_audio_stream", + "homeassistant.components.assist_satellite.entity.async_pipeline_from_audio_stream", wraps=_async_pipeline_from_audio_stream, ) as mock_run_pipeline, ): @@ -701,7 +705,7 @@ async def test_tts_not_wav(hass: HomeAssistant) -> None: """Test satellite receiving non-WAV audio from text-to-speech.""" assert await async_setup_component(hass, assist_pipeline.DOMAIN, {}) - original_stream_tts = wyoming.satellite.WyomingSatellite._stream_tts + original_stream_tts = WyomingAssistSatellite._stream_tts error_event = asyncio.Event() async def _stream_tts(self, media_id): @@ -724,19 +728,19 @@ def _async_pipeline_from_audio_stream(*args: Any, **kwargs: Any) -> None: return_value=SATELLITE_INFO, ), patch( - "homeassistant.components.wyoming.satellite.AsyncTcpClient", + "homeassistant.components.wyoming.assist_satellite.AsyncTcpClient", SatelliteAsyncTcpClient(events), ) as mock_client, patch( - "homeassistant.components.wyoming.satellite.assist_pipeline.async_pipeline_from_audio_stream", + "homeassistant.components.assist_satellite.entity.async_pipeline_from_audio_stream", wraps=_async_pipeline_from_audio_stream, ) as mock_run_pipeline, patch( - "homeassistant.components.wyoming.satellite.tts.async_get_media_source_audio", + "homeassistant.components.wyoming.assist_satellite.tts.async_get_media_source_audio", return_value=("mp3", bytes(1)), ), patch( - "homeassistant.components.wyoming.satellite.WyomingSatellite._stream_tts", + "homeassistant.components.wyoming.assist_satellite.WyomingAssistSatellite._stream_tts", _stream_tts, ), ): @@ -819,18 +823,16 @@ async def async_pipeline_from_audio_stream( return_value=SATELLITE_INFO, ), patch( - "homeassistant.components.wyoming.satellite.AsyncTcpClient", + "homeassistant.components.wyoming.assist_satellite.AsyncTcpClient", SatelliteAsyncTcpClient(events), ) as mock_client, patch( - "homeassistant.components.wyoming.satellite.assist_pipeline.async_pipeline_from_audio_stream", + "homeassistant.components.assist_satellite.entity.async_pipeline_from_audio_stream", async_pipeline_from_audio_stream, ), ): entry = await setup_config_entry(hass) - device: SatelliteDevice = hass.data[wyoming.DOMAIN][ - entry.entry_id - ].satellite.device + device: SatelliteDevice = hass.data[wyoming.DOMAIN][entry.entry_id].device async with asyncio.timeout(1): await mock_client.connect_event.wait() @@ -893,18 +895,16 @@ async def async_pipeline_from_audio_stream( return_value=SATELLITE_INFO, ), patch( - "homeassistant.components.wyoming.satellite.AsyncTcpClient", + "homeassistant.components.wyoming.assist_satellite.AsyncTcpClient", SatelliteAsyncTcpClient(events), ) as mock_client, patch( - "homeassistant.components.wyoming.satellite.assist_pipeline.async_pipeline_from_audio_stream", + "homeassistant.components.assist_satellite.entity.async_pipeline_from_audio_stream", async_pipeline_from_audio_stream, ), ): entry = await setup_config_entry(hass) - device: SatelliteDevice = hass.data[wyoming.DOMAIN][ - entry.entry_id - ].satellite.device + device: SatelliteDevice = hass.data[wyoming.DOMAIN][entry.entry_id].device async with asyncio.timeout(1): await mock_client.connect_event.wait() @@ -938,7 +938,7 @@ async def test_invalid_stages(hass: HomeAssistant) -> None: ).event(), ] - original_run_pipeline_once = wyoming.satellite.WyomingSatellite._run_pipeline_once + original_run_pipeline_once = WyomingAssistSatellite._run_pipeline_once start_stage_event = asyncio.Event() end_stage_event = asyncio.Event() @@ -967,11 +967,11 @@ def _run_pipeline_once(self, run_pipeline, wake_word_phrase): return_value=SATELLITE_INFO, ), patch( - "homeassistant.components.wyoming.satellite.AsyncTcpClient", + "homeassistant.components.wyoming.assist_satellite.AsyncTcpClient", SatelliteAsyncTcpClient(events), ) as mock_client, patch( - "homeassistant.components.wyoming.satellite.WyomingSatellite._run_pipeline_once", + "homeassistant.components.wyoming.assist_satellite.WyomingAssistSatellite._run_pipeline_once", _run_pipeline_once, ), ): @@ -1029,11 +1029,11 @@ async def async_pipeline_from_audio_stream( return_value=SATELLITE_INFO, ), patch( - "homeassistant.components.wyoming.satellite.AsyncTcpClient", + "homeassistant.components.wyoming.assist_satellite.AsyncTcpClient", SatelliteAsyncTcpClient(events), ) as mock_client, patch( - "homeassistant.components.wyoming.satellite.assist_pipeline.async_pipeline_from_audio_stream", + "homeassistant.components.assist_satellite.entity.async_pipeline_from_audio_stream", async_pipeline_from_audio_stream, ), ): @@ -1083,11 +1083,11 @@ def _async_pipeline_from_audio_stream(*args: Any, **kwargs: Any) -> None: return_value=SATELLITE_INFO, ), patch( - "homeassistant.components.wyoming.satellite.AsyncTcpClient", + "homeassistant.components.wyoming.assist_satellite.AsyncTcpClient", SatelliteAsyncTcpClient(events), ), patch( - "homeassistant.components.wyoming.satellite.assist_pipeline.async_pipeline_from_audio_stream", + "homeassistant.components.assist_satellite.entity.async_pipeline_from_audio_stream", wraps=_async_pipeline_from_audio_stream, ) as mock_run_pipeline, ): @@ -1114,14 +1114,12 @@ async def test_timers(hass: HomeAssistant) -> None: return_value=SATELLITE_INFO, ), patch( - "homeassistant.components.wyoming.satellite.AsyncTcpClient", + "homeassistant.components.wyoming.assist_satellite.AsyncTcpClient", SatelliteAsyncTcpClient([]), ) as mock_client, ): entry = await setup_config_entry(hass) - device: SatelliteDevice = hass.data[wyoming.DOMAIN][ - entry.entry_id - ].satellite.device + device: SatelliteDevice = hass.data[wyoming.DOMAIN][entry.entry_id].device async with asyncio.timeout(1): await mock_client.connect_event.wait() @@ -1285,104 +1283,3 @@ async def test_timers(hass: HomeAssistant) -> None: timer_finished = mock_client.timer_finished assert timer_finished is not None assert timer_finished.id == timer_started.id - - -async def test_satellite_conversation_id(hass: HomeAssistant) -> None: - """Test that the same conversation id is used until timeout.""" - assert await async_setup_component(hass, assist_pipeline.DOMAIN, {}) - - events = [ - RunPipeline( - start_stage=PipelineStage.WAKE, - end_stage=PipelineStage.TTS, - restart_on_end=True, - ).event(), - ] - - pipeline_kwargs: dict[str, Any] = {} - pipeline_event_callback: Callable[[assist_pipeline.PipelineEvent], None] | None = ( - None - ) - run_pipeline_called = asyncio.Event() - - async def async_pipeline_from_audio_stream( - hass: HomeAssistant, - context, - event_callback, - stt_metadata, - stt_stream, - **kwargs, - ) -> None: - nonlocal pipeline_kwargs, pipeline_event_callback - pipeline_kwargs = kwargs - pipeline_event_callback = event_callback - - run_pipeline_called.set() - - with ( - patch( - "homeassistant.components.wyoming.data.load_wyoming_info", - return_value=SATELLITE_INFO, - ), - patch( - "homeassistant.components.wyoming.satellite.AsyncTcpClient", - SatelliteAsyncTcpClient(events), - ) as mock_client, - patch( - "homeassistant.components.wyoming.satellite.assist_pipeline.async_pipeline_from_audio_stream", - async_pipeline_from_audio_stream, - ), - patch( - "homeassistant.components.wyoming.satellite.tts.async_get_media_source_audio", - return_value=("wav", get_test_wav()), - ), - patch("homeassistant.components.wyoming.satellite._PING_SEND_DELAY", 0), - ): - entry = await setup_config_entry(hass) - satellite: wyoming.WyomingSatellite = hass.data[wyoming.DOMAIN][ - entry.entry_id - ].satellite - - async with asyncio.timeout(1): - await mock_client.connect_event.wait() - await mock_client.run_satellite_event.wait() - - async with asyncio.timeout(1): - await run_pipeline_called.wait() - - assert pipeline_event_callback is not None - - # A conversation id should have been generated - conversation_id = pipeline_kwargs.get("conversation_id") - assert conversation_id - - # Reset and run again - run_pipeline_called.clear() - pipeline_kwargs.clear() - - pipeline_event_callback( - assist_pipeline.PipelineEvent(assist_pipeline.PipelineEventType.RUN_END) - ) - - async with asyncio.timeout(1): - await run_pipeline_called.wait() - - # Should be the same conversation id - assert pipeline_kwargs.get("conversation_id") == conversation_id - - # Reset and run again, but this time "time out" - satellite._conversation_id_time = None - run_pipeline_called.clear() - pipeline_kwargs.clear() - - pipeline_event_callback( - assist_pipeline.PipelineEvent(assist_pipeline.PipelineEventType.RUN_END) - ) - - async with asyncio.timeout(1): - await run_pipeline_called.wait() - - # Should be a different conversation id - new_conversation_id = pipeline_kwargs.get("conversation_id") - assert new_conversation_id - assert new_conversation_id != conversation_id diff --git a/tests/components/xiaomi_ble/test_config_flow.py b/tests/components/xiaomi_ble/test_config_flow.py index f690665608b6d3..e25ac939a53779 100644 --- a/tests/components/xiaomi_ble/test_config_flow.py +++ b/tests/components/xiaomi_ble/test_config_flow.py @@ -2,7 +2,12 @@ from unittest.mock import patch -from xiaomi_ble import XiaomiBluetoothDeviceData as DeviceData +from xiaomi_ble import ( + XiaomiBluetoothDeviceData as DeviceData, + XiaomiCloudBLEDevice, + XiaomiCloudException, + XiaomiCloudInvalidAuthenticationException, +) from homeassistant import config_entries from homeassistant.components.bluetooth import BluetoothChange @@ -96,20 +101,25 @@ async def _async_process_advertisements( context={"source": config_entries.SOURCE_BLUETOOTH}, data=MISSING_PAYLOAD_ENCRYPTED, ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "get_encryption_key_4_5" + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "get_encryption_key_4_5_choose_method" + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={"next_step_id": "get_encryption_key_4_5"}, + ) with patch( "homeassistant.components.xiaomi_ble.async_setup_entry", return_value=True ): - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], + result3 = await hass.config_entries.flow.async_configure( + result2["flow_id"], user_input={"bindkey": "a115210eed7a88e50ad52662e732a9fb"}, ) - assert result2["type"] is FlowResultType.CREATE_ENTRY - assert result2["data"] == {"bindkey": "a115210eed7a88e50ad52662e732a9fb"} - assert result2["result"].unique_id == "A4:C1:38:56:53:84" + assert result3["type"] is FlowResultType.CREATE_ENTRY + assert result3["data"] == {"bindkey": "a115210eed7a88e50ad52662e732a9fb"} + assert result3["result"].unique_id == "A4:C1:38:56:53:84" async def test_async_step_bluetooth_during_onboarding(hass: HomeAssistant) -> None: @@ -239,21 +249,244 @@ async def test_async_step_bluetooth_valid_device_v4_encryption( context={"source": config_entries.SOURCE_BLUETOOTH}, data=JTYJGD03MI_SERVICE_INFO, ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "get_encryption_key_4_5" + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "get_encryption_key_4_5_choose_method" + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={"next_step_id": "get_encryption_key_4_5"}, + ) with patch( "homeassistant.components.xiaomi_ble.async_setup_entry", return_value=True ): - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], + result3 = await hass.config_entries.flow.async_configure( + result2["flow_id"], user_input={"bindkey": "5b51a7c91cde6707c9ef18dfda143a58"}, ) - assert result2["type"] is FlowResultType.CREATE_ENTRY - assert result2["title"] == "Smoke Detector 9CBC (JTYJGD03MI)" - assert result2["data"] == {"bindkey": "5b51a7c91cde6707c9ef18dfda143a58"} - assert result2["result"].unique_id == "54:EF:44:E3:9C:BC" + assert result3["type"] is FlowResultType.CREATE_ENTRY + assert result3["title"] == "Smoke Detector 9CBC (JTYJGD03MI)" + assert result3["data"] == {"bindkey": "5b51a7c91cde6707c9ef18dfda143a58"} + assert result3["result"].unique_id == "54:EF:44:E3:9C:BC" + + +async def test_bluetooth_discovery_device_v4_encryption_from_cloud( + hass: HomeAssistant, +) -> None: + """Test discovery via bluetooth with a valid v4 device, with auth from cloud.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_BLUETOOTH}, + data=JTYJGD03MI_SERVICE_INFO, + ) + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "get_encryption_key_4_5_choose_method" + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={"next_step_id": "cloud_auth"}, + ) + device = XiaomiCloudBLEDevice( + name="x", + mac="54:EF:44:E3:9C:BC", + bindkey="5b51a7c91cde6707c9ef18dfda143a58", + ) + with patch( + "homeassistant.components.xiaomi_ble.config_flow.XiaomiCloudTokenFetch.get_device_info", + return_value=device, + ): + result3 = await hass.config_entries.flow.async_configure( + result2["flow_id"], + user_input={"username": "x@x.x", "password": "x"}, + ) + + assert result3["type"] is FlowResultType.CREATE_ENTRY + assert result3["title"] == "Smoke Detector 9CBC (JTYJGD03MI)" + assert result3["data"] == {"bindkey": "5b51a7c91cde6707c9ef18dfda143a58"} + assert result3["result"].unique_id == "54:EF:44:E3:9C:BC" + + +async def test_bluetooth_discovery_device_v4_encryption_from_cloud_wrong_key( + hass: HomeAssistant, +) -> None: + """Test discovery via bluetooth with a valid v4 device, with wrong auth from cloud.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_BLUETOOTH}, + data=JTYJGD03MI_SERVICE_INFO, + ) + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "get_encryption_key_4_5_choose_method" + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={"next_step_id": "cloud_auth"}, + ) + + device = XiaomiCloudBLEDevice( + name="x", + mac="54:EF:44:E3:9C:BC", + bindkey="aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", + ) + with patch( + "homeassistant.components.xiaomi_ble.config_flow.XiaomiCloudTokenFetch.get_device_info", + return_value=device, + ): + result3 = await hass.config_entries.flow.async_configure( + result2["flow_id"], + user_input={"username": "x@x.x", "password": "x"}, + ) + + assert result3["type"] is FlowResultType.FORM + assert result3["step_id"] == "get_encryption_key_4_5" + assert result3["errors"]["bindkey"] == "decryption_failed" + + # Verify we can fallback to manual key + with patch( + "homeassistant.components.xiaomi_ble.async_setup_entry", return_value=True + ): + result4 = await hass.config_entries.flow.async_configure( + result3["flow_id"], + user_input={"bindkey": "5b51a7c91cde6707c9ef18dfda143a58"}, + ) + + assert result4["type"] is FlowResultType.CREATE_ENTRY + assert result4["title"] == "Smoke Detector 9CBC (JTYJGD03MI)" + assert result4["data"] == {"bindkey": "5b51a7c91cde6707c9ef18dfda143a58"} + assert result4["result"].unique_id == "54:EF:44:E3:9C:BC" + + +async def test_bluetooth_discovery_incorrect_cloud_account( + hass: HomeAssistant, +) -> None: + """Test discovery via bluetooth with incorrect cloud account.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_BLUETOOTH}, + data=JTYJGD03MI_SERVICE_INFO, + ) + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "get_encryption_key_4_5_choose_method" + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={"next_step_id": "cloud_auth"}, + ) + + with patch( + "homeassistant.components.xiaomi_ble.config_flow.XiaomiCloudTokenFetch.get_device_info", + return_value=None, + ): + result3 = await hass.config_entries.flow.async_configure( + result2["flow_id"], + user_input={"username": "wrong@wrong.wrong", "password": "correct"}, + ) + + assert result3["type"] is FlowResultType.FORM + assert result3["step_id"] == "cloud_auth" + assert result3["errors"]["base"] == "api_device_not_found" + + device = XiaomiCloudBLEDevice( + name="x", + mac="54:EF:44:E3:9C:BC", + bindkey="5b51a7c91cde6707c9ef18dfda143a58", + ) + # Verify we can try again with the correct account + with patch( + "homeassistant.components.xiaomi_ble.config_flow.XiaomiCloudTokenFetch.get_device_info", + return_value=device, + ): + result4 = await hass.config_entries.flow.async_configure( + result3["flow_id"], + user_input={"username": "correct@correct.correct", "password": "correct"}, + ) + + assert result4["type"] is FlowResultType.CREATE_ENTRY + assert result4["title"] == "Smoke Detector 9CBC (JTYJGD03MI)" + assert result4["data"] == {"bindkey": "5b51a7c91cde6707c9ef18dfda143a58"} + assert result4["result"].unique_id == "54:EF:44:E3:9C:BC" + + +async def test_bluetooth_discovery_incorrect_cloud_auth( + hass: HomeAssistant, +) -> None: + """Test discovery via bluetooth with incorrect cloud auth.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_BLUETOOTH}, + data=JTYJGD03MI_SERVICE_INFO, + ) + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "get_encryption_key_4_5_choose_method" + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={"next_step_id": "cloud_auth"}, + ) + + with patch( + "homeassistant.components.xiaomi_ble.config_flow.XiaomiCloudTokenFetch.get_device_info", + side_effect=XiaomiCloudInvalidAuthenticationException, + ): + result3 = await hass.config_entries.flow.async_configure( + result2["flow_id"], + user_input={"username": "x@x.x", "password": "wrong"}, + ) + + assert result3["type"] is FlowResultType.FORM + assert result3["step_id"] == "cloud_auth" + assert result3["errors"]["base"] == "auth_failed" + + device = XiaomiCloudBLEDevice( + name="x", + mac="54:EF:44:E3:9C:BC", + bindkey="5b51a7c91cde6707c9ef18dfda143a58", + ) + # Verify we can try again with the correct password + with patch( + "homeassistant.components.xiaomi_ble.config_flow.XiaomiCloudTokenFetch.get_device_info", + return_value=device, + ): + result4 = await hass.config_entries.flow.async_configure( + result3["flow_id"], + user_input={"username": "x@x.x", "password": "correct"}, + ) + + assert result4["type"] is FlowResultType.CREATE_ENTRY + assert result4["title"] == "Smoke Detector 9CBC (JTYJGD03MI)" + assert result4["data"] == {"bindkey": "5b51a7c91cde6707c9ef18dfda143a58"} + assert result4["result"].unique_id == "54:EF:44:E3:9C:BC" + + +async def test_bluetooth_discovery_cloud_offline( + hass: HomeAssistant, +) -> None: + """Test discovery via bluetooth when the cloud is offline.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_BLUETOOTH}, + data=JTYJGD03MI_SERVICE_INFO, + ) + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "get_encryption_key_4_5_choose_method" + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={"next_step_id": "cloud_auth"}, + ) + + with patch( + "homeassistant.components.xiaomi_ble.config_flow.XiaomiCloudTokenFetch.get_device_info", + side_effect=XiaomiCloudException, + ): + result3 = await hass.config_entries.flow.async_configure( + result2["flow_id"], + user_input={"username": "x@x.x", "password": "wrong"}, + ) + + assert result3["type"] is FlowResultType.ABORT + assert result3["reason"] == "api_error" async def test_async_step_bluetooth_valid_device_v4_encryption_wrong_key( @@ -265,31 +498,36 @@ async def test_async_step_bluetooth_valid_device_v4_encryption_wrong_key( context={"source": config_entries.SOURCE_BLUETOOTH}, data=JTYJGD03MI_SERVICE_INFO, ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "get_encryption_key_4_5" + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "get_encryption_key_4_5_choose_method" result2 = await hass.config_entries.flow.async_configure( result["flow_id"], + user_input={"next_step_id": "get_encryption_key_4_5"}, + ) + + result3 = await hass.config_entries.flow.async_configure( + result2["flow_id"], user_input={"bindkey": "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"}, ) - assert result2["type"] is FlowResultType.FORM - assert result2["step_id"] == "get_encryption_key_4_5" - assert result2["errors"]["bindkey"] == "decryption_failed" + assert result3["type"] is FlowResultType.FORM + assert result3["step_id"] == "get_encryption_key_4_5" + assert result3["errors"]["bindkey"] == "decryption_failed" # Test can finish flow with patch( "homeassistant.components.xiaomi_ble.async_setup_entry", return_value=True ): - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], + result4 = await hass.config_entries.flow.async_configure( + result3["flow_id"], user_input={"bindkey": "5b51a7c91cde6707c9ef18dfda143a58"}, ) - assert result2["type"] is FlowResultType.CREATE_ENTRY - assert result2["title"] == "Smoke Detector 9CBC (JTYJGD03MI)" - assert result2["data"] == {"bindkey": "5b51a7c91cde6707c9ef18dfda143a58"} - assert result2["result"].unique_id == "54:EF:44:E3:9C:BC" + assert result4["type"] is FlowResultType.CREATE_ENTRY + assert result4["title"] == "Smoke Detector 9CBC (JTYJGD03MI)" + assert result4["data"] == {"bindkey": "5b51a7c91cde6707c9ef18dfda143a58"} + assert result4["result"].unique_id == "54:EF:44:E3:9C:BC" async def test_async_step_bluetooth_valid_device_v4_encryption_wrong_key_length( @@ -301,31 +539,36 @@ async def test_async_step_bluetooth_valid_device_v4_encryption_wrong_key_length( context={"source": config_entries.SOURCE_BLUETOOTH}, data=JTYJGD03MI_SERVICE_INFO, ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "get_encryption_key_4_5" + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "get_encryption_key_4_5_choose_method" result2 = await hass.config_entries.flow.async_configure( result["flow_id"], + user_input={"next_step_id": "get_encryption_key_4_5"}, + ) + + result3 = await hass.config_entries.flow.async_configure( + result2["flow_id"], user_input={"bindkey": "5b51a7c91cde6707c9ef18fda143a58"}, ) - assert result2["type"] is FlowResultType.FORM - assert result2["step_id"] == "get_encryption_key_4_5" - assert result2["errors"]["bindkey"] == "expected_32_characters" + assert result3["type"] is FlowResultType.FORM + assert result3["step_id"] == "get_encryption_key_4_5" + assert result3["errors"]["bindkey"] == "expected_32_characters" # Test can finish flow with patch( "homeassistant.components.xiaomi_ble.async_setup_entry", return_value=True ): - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], + result4 = await hass.config_entries.flow.async_configure( + result3["flow_id"], user_input={"bindkey": "5b51a7c91cde6707c9ef18dfda143a58"}, ) - assert result2["type"] is FlowResultType.CREATE_ENTRY - assert result2["title"] == "Smoke Detector 9CBC (JTYJGD03MI)" - assert result2["data"] == {"bindkey": "5b51a7c91cde6707c9ef18dfda143a58"} - assert result2["result"].unique_id == "54:EF:44:E3:9C:BC" + assert result4["type"] is FlowResultType.CREATE_ENTRY + assert result4["title"] == "Smoke Detector 9CBC (JTYJGD03MI)" + assert result4["data"] == {"bindkey": "5b51a7c91cde6707c9ef18dfda143a58"} + assert result4["result"].unique_id == "54:EF:44:E3:9C:BC" async def test_async_step_bluetooth_not_xiaomi(hass: HomeAssistant) -> None: @@ -457,20 +700,25 @@ async def _async_process_advertisements( result["flow_id"], user_input={"address": "A4:C1:38:56:53:84"}, ) - assert result1["type"] is FlowResultType.FORM - assert result1["step_id"] == "get_encryption_key_4_5" + assert result1["type"] is FlowResultType.MENU + assert result1["step_id"] == "get_encryption_key_4_5_choose_method" + + result2 = await hass.config_entries.flow.async_configure( + result1["flow_id"], + user_input={"next_step_id": "get_encryption_key_4_5"}, + ) with patch( "homeassistant.components.xiaomi_ble.async_setup_entry", return_value=True ): - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], + result3 = await hass.config_entries.flow.async_configure( + result2["flow_id"], user_input={"bindkey": "a115210eed7a88e50ad52662e732a9fb"}, ) - assert result2["type"] is FlowResultType.CREATE_ENTRY - assert result2["title"] == "Temperature/Humidity Sensor 5384 (LYWSD03MMC)" - assert result2["data"] == {"bindkey": "a115210eed7a88e50ad52662e732a9fb"} + assert result3["type"] is FlowResultType.CREATE_ENTRY + assert result3["title"] == "Temperature/Humidity Sensor 5384 (LYWSD03MMC)" + assert result3["data"] == {"bindkey": "a115210eed7a88e50ad52662e732a9fb"} async def test_async_step_user_with_found_devices_v4_encryption( @@ -492,21 +740,26 @@ async def test_async_step_user_with_found_devices_v4_encryption( result["flow_id"], user_input={"address": "54:EF:44:E3:9C:BC"}, ) - assert result1["type"] is FlowResultType.FORM - assert result1["step_id"] == "get_encryption_key_4_5" + assert result1["type"] is FlowResultType.MENU + assert result1["step_id"] == "get_encryption_key_4_5_choose_method" + + result2 = await hass.config_entries.flow.async_configure( + result1["flow_id"], + user_input={"next_step_id": "get_encryption_key_4_5"}, + ) with patch( "homeassistant.components.xiaomi_ble.async_setup_entry", return_value=True ): - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], + result3 = await hass.config_entries.flow.async_configure( + result2["flow_id"], user_input={"bindkey": "5b51a7c91cde6707c9ef18dfda143a58"}, ) - assert result2["type"] is FlowResultType.CREATE_ENTRY - assert result2["title"] == "Smoke Detector 9CBC (JTYJGD03MI)" - assert result2["data"] == {"bindkey": "5b51a7c91cde6707c9ef18dfda143a58"} - assert result2["result"].unique_id == "54:EF:44:E3:9C:BC" + assert result3["type"] is FlowResultType.CREATE_ENTRY + assert result3["title"] == "Smoke Detector 9CBC (JTYJGD03MI)" + assert result3["data"] == {"bindkey": "5b51a7c91cde6707c9ef18dfda143a58"} + assert result3["result"].unique_id == "54:EF:44:E3:9C:BC" async def test_async_step_user_with_found_devices_v4_encryption_wrong_key( @@ -530,31 +783,36 @@ async def test_async_step_user_with_found_devices_v4_encryption_wrong_key( result["flow_id"], user_input={"address": "54:EF:44:E3:9C:BC"}, ) - assert result1["type"] is FlowResultType.FORM - assert result1["step_id"] == "get_encryption_key_4_5" + assert result1["type"] is FlowResultType.MENU + assert result1["step_id"] == "get_encryption_key_4_5_choose_method" - # Try an incorrect key result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], + result1["flow_id"], + user_input={"next_step_id": "get_encryption_key_4_5"}, + ) + + # Try an incorrect key + result3 = await hass.config_entries.flow.async_configure( + result2["flow_id"], user_input={"bindkey": "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"}, ) - assert result2["type"] is FlowResultType.FORM - assert result2["step_id"] == "get_encryption_key_4_5" - assert result2["errors"]["bindkey"] == "decryption_failed" + assert result3["type"] is FlowResultType.FORM + assert result3["step_id"] == "get_encryption_key_4_5" + assert result3["errors"]["bindkey"] == "decryption_failed" # Check can still finish flow with patch( "homeassistant.components.xiaomi_ble.async_setup_entry", return_value=True ): - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], + result4 = await hass.config_entries.flow.async_configure( + result3["flow_id"], user_input={"bindkey": "5b51a7c91cde6707c9ef18dfda143a58"}, ) - assert result2["type"] is FlowResultType.CREATE_ENTRY - assert result2["title"] == "Smoke Detector 9CBC (JTYJGD03MI)" - assert result2["data"] == {"bindkey": "5b51a7c91cde6707c9ef18dfda143a58"} - assert result2["result"].unique_id == "54:EF:44:E3:9C:BC" + assert result4["type"] is FlowResultType.CREATE_ENTRY + assert result4["title"] == "Smoke Detector 9CBC (JTYJGD03MI)" + assert result4["data"] == {"bindkey": "5b51a7c91cde6707c9ef18dfda143a58"} + assert result4["result"].unique_id == "54:EF:44:E3:9C:BC" async def test_async_step_user_with_found_devices_v4_encryption_wrong_key_length( @@ -578,33 +836,38 @@ async def test_async_step_user_with_found_devices_v4_encryption_wrong_key_length result["flow_id"], user_input={"address": "54:EF:44:E3:9C:BC"}, ) - assert result1["type"] is FlowResultType.FORM - assert result1["step_id"] == "get_encryption_key_4_5" + assert result1["type"] is FlowResultType.MENU + assert result1["step_id"] == "get_encryption_key_4_5_choose_method" - # Try an incorrect key result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], + result1["flow_id"], + user_input={"next_step_id": "get_encryption_key_4_5"}, + ) + + # Try an incorrect key + result3 = await hass.config_entries.flow.async_configure( + result2["flow_id"], user_input={"bindkey": "5b51a7c91cde6707c9ef1dfda143a58"}, ) - assert result2["type"] is FlowResultType.FORM - assert result2["type"] is FlowResultType.FORM - assert result2["step_id"] == "get_encryption_key_4_5" - assert result2["errors"]["bindkey"] == "expected_32_characters" + assert result3["type"] is FlowResultType.FORM + assert result3["type"] is FlowResultType.FORM + assert result3["step_id"] == "get_encryption_key_4_5" + assert result3["errors"]["bindkey"] == "expected_32_characters" # Check can still finish flow with patch( "homeassistant.components.xiaomi_ble.async_setup_entry", return_value=True ): - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], + result4 = await hass.config_entries.flow.async_configure( + result3["flow_id"], user_input={"bindkey": "5b51a7c91cde6707c9ef18dfda143a58"}, ) - assert result2["type"] is FlowResultType.CREATE_ENTRY - assert result2["title"] == "Smoke Detector 9CBC (JTYJGD03MI)" - assert result2["data"] == {"bindkey": "5b51a7c91cde6707c9ef18dfda143a58"} - assert result2["result"].unique_id == "54:EF:44:E3:9C:BC" + assert result4["type"] is FlowResultType.CREATE_ENTRY + assert result4["title"] == "Smoke Detector 9CBC (JTYJGD03MI)" + assert result4["data"] == {"bindkey": "5b51a7c91cde6707c9ef18dfda143a58"} + assert result4["result"].unique_id == "54:EF:44:E3:9C:BC" async def test_async_step_user_with_found_devices_legacy_encryption( @@ -1003,14 +1266,19 @@ def _async_register_callback(_hass, _callback, _matcher, _mode): assert len(results) == 1 result = results[0] - assert result["step_id"] == "get_encryption_key_4_5" + assert result["step_id"] == "get_encryption_key_4_5_choose_method" result2 = await hass.config_entries.flow.async_configure( result["flow_id"], + user_input={"next_step_id": "get_encryption_key_4_5"}, + ) + + result3 = await hass.config_entries.flow.async_configure( + result2["flow_id"], user_input={"bindkey": "5b51a7c91cde6707c9ef18dfda143a58"}, ) - assert result2["type"] is FlowResultType.ABORT - assert result2["reason"] == "reauth_successful" + assert result3["type"] is FlowResultType.ABORT + assert result3["reason"] == "reauth_successful" async def test_async_step_reauth_v4_wrong_key(hass: HomeAssistant) -> None: @@ -1052,22 +1320,90 @@ def _async_register_callback(_hass, _callback, _matcher, _mode): assert len(results) == 1 result = results[0] - assert result["step_id"] == "get_encryption_key_4_5" + assert result["step_id"] == "get_encryption_key_4_5_choose_method" result2 = await hass.config_entries.flow.async_configure( result["flow_id"], + user_input={"next_step_id": "get_encryption_key_4_5"}, + ) + + result3 = await hass.config_entries.flow.async_configure( + result2["flow_id"], user_input={"bindkey": "5b51a7c91cde6707c9ef18dada143a58"}, ) - assert result2["type"] is FlowResultType.FORM - assert result2["step_id"] == "get_encryption_key_4_5" - assert result2["errors"]["bindkey"] == "decryption_failed" + assert result3["type"] is FlowResultType.FORM + assert result3["step_id"] == "get_encryption_key_4_5" + assert result3["errors"]["bindkey"] == "decryption_failed" + + result4 = await hass.config_entries.flow.async_configure( + result3["flow_id"], + user_input={"bindkey": "5b51a7c91cde6707c9ef18dfda143a58"}, + ) + assert result4["type"] is FlowResultType.ABORT + assert result4["reason"] == "reauth_successful" + + +async def test_async_step_reauth_v4_from_cloud(hass: HomeAssistant) -> None: + """Test reauth with a v4 key from the cloud.""" + entry = MockConfigEntry( + domain=DOMAIN, + unique_id="54:EF:44:E3:9C:BC", + ) + entry.add_to_hass(hass) + saved_callback = None + + def _async_register_callback(_hass, _callback, _matcher, _mode): + nonlocal saved_callback + saved_callback = _callback + return lambda: None + + with patch( + "homeassistant.components.bluetooth.update_coordinator.async_register_callback", + _async_register_callback, + ): + assert await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + assert len(hass.states.async_all()) == 0 + + # WARNING: This test data is synthetic, rather than captured from a real device + # obj type is 0x1310, payload len is 0x2 and payload is 0x6000 + saved_callback( + make_advertisement( + "54:EF:44:E3:9C:BC", + b"XY\x97\tf\xbc\x9c\xe3D\xefT\x01\x08\x12\x05\x00\x00\x00q^\xbe\x90", + ), + BluetoothChange.ADVERTISEMENT, + ) + + await hass.async_block_till_done() + + results = hass.config_entries.flow.async_progress() + assert len(results) == 1 + result = results[0] + + assert result["step_id"] == "get_encryption_key_4_5_choose_method" result2 = await hass.config_entries.flow.async_configure( result["flow_id"], - user_input={"bindkey": "5b51a7c91cde6707c9ef18dfda143a58"}, + user_input={"next_step_id": "cloud_auth"}, ) - assert result2["type"] is FlowResultType.ABORT - assert result2["reason"] == "reauth_successful" + device = XiaomiCloudBLEDevice( + name="x", + mac="54:EF:44:E3:9C:BC", + bindkey="5b51a7c91cde6707c9ef18dfda143a58", + ) + with patch( + "homeassistant.components.xiaomi_ble.config_flow.XiaomiCloudTokenFetch.get_device_info", + return_value=device, + ): + result3 = await hass.config_entries.flow.async_configure( + result2["flow_id"], + user_input={"username": "x@x.x", "password": "x"}, + ) + + assert result3["type"] is FlowResultType.ABORT + assert result3["reason"] == "reauth_successful" async def test_async_step_reauth_abort_early(hass: HomeAssistant) -> None: diff --git a/tests/components/yale_smart_alarm/fixtures/get_all.json b/tests/components/yale_smart_alarm/fixtures/get_all.json index e85a93f3c3e9cd..6c68e05c566060 100644 --- a/tests/components/yale_smart_alarm/fixtures/get_all.json +++ b/tests/components/yale_smart_alarm/fixtures/get_all.json @@ -175,7 +175,7 @@ "address": "RF4", "type": "device_type.door_contact", "name": "Device4", - "status1": "device_status.dc_close", + "status1": "device_status.dc_close,device_status.low_battery", "status2": null, "status_switch": null, "status_power": null, @@ -763,7 +763,7 @@ "address": "RF4", "type": "device_type.door_contact", "name": "Device4", - "status1": "device_status.dc_close", + "status1": "device_status.dc_close,device_status.low_battery", "status2": null, "status_switch": null, "status_power": null, diff --git a/tests/components/yale_smart_alarm/snapshots/test_binary_sensor.ambr b/tests/components/yale_smart_alarm/snapshots/test_binary_sensor.ambr index 7bb144e8d2ad17..ed7e847439c609 100644 --- a/tests/components/yale_smart_alarm/snapshots/test_binary_sensor.ambr +++ b/tests/components/yale_smart_alarm/snapshots/test_binary_sensor.ambr @@ -1,4 +1,51 @@ # serializer version: 1 +# name: test_binary_sensor[load_platforms0][binary_sensor.device4_battery-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.device4_battery', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Battery', + 'platform': 'yale_smart_alarm', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'RF4-battery', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[load_platforms0][binary_sensor.device4_battery-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Device4 Battery', + }), + 'context': , + 'entity_id': 'binary_sensor.device4_battery', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- # name: test_binary_sensor[load_platforms0][binary_sensor.device4_door-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -46,6 +93,53 @@ 'state': 'off', }) # --- +# name: test_binary_sensor[load_platforms0][binary_sensor.device5_battery-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.device5_battery', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Battery', + 'platform': 'yale_smart_alarm', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'RF5-battery', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[load_platforms0][binary_sensor.device5_battery-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Device5 Battery', + }), + 'context': , + 'entity_id': 'binary_sensor.device5_battery', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- # name: test_binary_sensor[load_platforms0][binary_sensor.device5_door-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -93,6 +187,53 @@ 'state': 'on', }) # --- +# name: test_binary_sensor[load_platforms0][binary_sensor.device6_battery-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.device6_battery', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Battery', + 'platform': 'yale_smart_alarm', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'RF6-battery', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[load_platforms0][binary_sensor.device6_battery-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Device6 Battery', + }), + 'context': , + 'entity_id': 'binary_sensor.device6_battery', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- # name: test_binary_sensor[load_platforms0][binary_sensor.device6_door-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/yale_smart_alarm/snapshots/test_diagnostics.ambr b/tests/components/yale_smart_alarm/snapshots/test_diagnostics.ambr index e78c9520429a6e..af939336677596 100644 --- a/tests/components/yale_smart_alarm/snapshots/test_diagnostics.ambr +++ b/tests/components/yale_smart_alarm/snapshots/test_diagnostics.ambr @@ -203,6 +203,7 @@ 'type_no': '72', }), dict({ + '_battery': True, '_state': 'closed', 'address': '**REDACTED**', 'area': '1', @@ -234,7 +235,7 @@ 'sresp_button_2': None, 'sresp_button_3': None, 'sresp_button_4': None, - 'status1': 'device_status.dc_close', + 'status1': 'device_status.dc_close,device_status.low_battery', 'status2': None, 'status_dim_level': None, 'status_fault': list([ @@ -264,6 +265,7 @@ 'type_no': '4', }), dict({ + '_battery': False, '_state': 'open', 'address': '**REDACTED**', 'area': '1', @@ -325,6 +327,7 @@ 'type_no': '4', }), dict({ + '_battery': False, '_state': 'unavailable', 'address': '**REDACTED**', 'area': '1', @@ -855,7 +858,7 @@ 'sresp_button_2': None, 'sresp_button_3': None, 'sresp_button_4': None, - 'status1': 'device_status.dc_close', + 'status1': 'device_status.dc_close,device_status.low_battery', 'status2': None, 'status_dim_level': None, 'status_fault': list([ diff --git a/tests/components/yale_smart_alarm/test_config_flow.py b/tests/components/yale_smart_alarm/test_config_flow.py index d5651503768583..e5b59f79463146 100644 --- a/tests/components/yale_smart_alarm/test_config_flow.py +++ b/tests/components/yale_smart_alarm/test_config_flow.py @@ -149,7 +149,6 @@ async def test_reauth_flow(hass: HomeAssistant) -> None: result2 = await hass.config_entries.flow.async_configure( result["flow_id"], { - "username": "test-username", "password": "new-test-password", }, ) @@ -203,7 +202,6 @@ async def test_reauth_flow_error( result2 = await hass.config_entries.flow.async_configure( result["flow_id"], { - "username": "test-username", "password": "wrong-password", }, ) @@ -226,7 +224,6 @@ async def test_reauth_flow_error( result2 = await hass.config_entries.flow.async_configure( result["flow_id"], { - "username": "test-username", "password": "new-test-password", }, ) @@ -242,6 +239,211 @@ async def test_reauth_flow_error( } +async def test_reconfigure(hass: HomeAssistant) -> None: + """Test reconfigure config flow.""" + entry = MockConfigEntry( + domain=DOMAIN, + unique_id="test-username", + data={ + "username": "test-username", + "password": "test-password", + "name": "Yale Smart Alarm", + "area_id": "1", + }, + version=2, + ) + entry.add_to_hass(hass) + + result = await entry.start_reconfigure_flow(hass) + + with ( + patch( + "homeassistant.components.yale_smart_alarm.config_flow.YaleSmartAlarmClient", + return_value="", + ), + patch( + "homeassistant.components.yale_smart_alarm.async_setup_entry", + return_value=True, + ), + ): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + "username": "test-username", + "password": "new-test-password", + "area_id": "2", + }, + ) + await hass.async_block_till_done() + + assert result2["type"] is FlowResultType.ABORT + assert result2["reason"] == "reconfigure_successful" + assert entry.data == { + "username": "test-username", + "password": "new-test-password", + "name": "Yale Smart Alarm", + "area_id": "2", + } + + +async def test_reconfigure_username_exist(hass: HomeAssistant) -> None: + """Test reconfigure config flow abort other username already exist.""" + entry = MockConfigEntry( + domain=DOMAIN, + unique_id="test-username", + data={ + "username": "test-username", + "password": "test-password", + "name": "Yale Smart Alarm", + "area_id": "1", + }, + version=2, + ) + entry.add_to_hass(hass) + entry2 = MockConfigEntry( + domain=DOMAIN, + unique_id="other-username", + data={ + "username": "other-username", + "password": "test-password", + "name": "Yale Smart Alarm 2", + "area_id": "1", + }, + version=2, + ) + entry2.add_to_hass(hass) + + result = await entry.start_reconfigure_flow(hass) + + with ( + patch( + "homeassistant.components.yale_smart_alarm.config_flow.YaleSmartAlarmClient", + return_value="", + ), + patch( + "homeassistant.components.yale_smart_alarm.async_setup_entry", + return_value=True, + ), + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + "username": "other-username", + "password": "test-password", + "area_id": "1", + }, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "unique_id_exists"} + + with ( + patch( + "homeassistant.components.yale_smart_alarm.config_flow.YaleSmartAlarmClient", + return_value="", + ), + patch( + "homeassistant.components.yale_smart_alarm.async_setup_entry", + return_value=True, + ), + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + "username": "other-new-username", + "password": "test-password", + "area_id": "1", + }, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" + assert entry.data == { + "username": "other-new-username", + "name": "Yale Smart Alarm", + "password": "test-password", + "area_id": "1", + } + + +@pytest.mark.parametrize( + ("sideeffect", "p_error"), + [ + (AuthenticationError, "invalid_auth"), + (ConnectionError, "cannot_connect"), + (TimeoutError, "cannot_connect"), + (UnknownError, "cannot_connect"), + ], +) +async def test_reconfigure_flow_error( + hass: HomeAssistant, sideeffect: Exception, p_error: str +) -> None: + """Test a reauthentication flow.""" + entry = MockConfigEntry( + domain=DOMAIN, + unique_id="test-username", + data={ + "username": "test-username", + "password": "test-password", + "name": "Yale Smart Alarm", + "area_id": "1", + }, + version=2, + ) + entry.add_to_hass(hass) + + result = await entry.start_reconfigure_flow(hass) + + with patch( + "homeassistant.components.yale_smart_alarm.config_flow.YaleSmartAlarmClient", + side_effect=sideeffect, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + "username": "test-username", + "password": "update-password", + "area_id": "1", + }, + ) + await hass.async_block_till_done() + + assert result["step_id"] == "reconfigure" + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": p_error} + + with ( + patch( + "homeassistant.components.yale_smart_alarm.config_flow.YaleSmartAlarmClient", + return_value="", + ), + patch( + "homeassistant.components.yale_smart_alarm.async_setup_entry", + return_value=True, + ), + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + "username": "test-username", + "password": "new-test-password", + "area_id": "1", + }, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" + assert entry.data == { + "username": "test-username", + "name": "Yale Smart Alarm", + "password": "new-test-password", + "area_id": "1", + } + + async def test_options_flow(hass: HomeAssistant) -> None: """Test options config flow.""" entry = MockConfigEntry( diff --git a/tests/components/yale_smart_alarm/test_coordinator.py b/tests/components/yale_smart_alarm/test_coordinator.py index 41362f2318a701..386e4ad72f7ebf 100644 --- a/tests/components/yale_smart_alarm/test_coordinator.py +++ b/tests/components/yale_smart_alarm/test_coordinator.py @@ -13,9 +13,10 @@ YaleSmartAlarmData, ) +from homeassistant.components.alarm_control_panel import AlarmControlPanelState from homeassistant.components.yale_smart_alarm.const import DOMAIN from homeassistant.config_entries import SOURCE_USER -from homeassistant.const import STATE_ALARM_ARMED_AWAY, STATE_UNAVAILABLE +from homeassistant.const import STATE_UNAVAILABLE from homeassistant.core import HomeAssistant from homeassistant.util import dt as dt_util @@ -74,7 +75,7 @@ async def test_coordinator_setup_and_update_errors( client = load_config_entry[1] state = hass.states.get("alarm_control_panel.yale_smart_alarm") - assert state.state == STATE_ALARM_ARMED_AWAY + assert state.state == AlarmControlPanelState.ARMED_AWAY client.reset_mock() client.get_information.side_effect = ConnectionError("Could not connect") @@ -116,7 +117,7 @@ async def test_coordinator_setup_and_update_errors( await hass.async_block_till_done(wait_background_tasks=True) client.get_information.assert_called_once() state = hass.states.get("alarm_control_panel.yale_smart_alarm") - assert state.state == STATE_ALARM_ARMED_AWAY + assert state.state == AlarmControlPanelState.ARMED_AWAY client.reset_mock() client.get_information.side_effect = AuthenticationError("Can not authenticate") diff --git a/tests/components/zha/test_alarm_control_panel.py b/tests/components/zha/test_alarm_control_panel.py index 3473a9b00adeb6..609438cd7250fd 100644 --- a/tests/components/zha/test_alarm_control_panel.py +++ b/tests/components/zha/test_alarm_control_panel.py @@ -8,22 +8,17 @@ from zigpy.zcl.clusters import security import zigpy.zcl.foundation as zcl_f -from homeassistant.components.alarm_control_panel import DOMAIN as ALARM_DOMAIN +from homeassistant.components.alarm_control_panel import ( + DOMAIN as ALARM_DOMAIN, + AlarmControlPanelState, +) from homeassistant.components.zha.helpers import ( ZHADeviceProxy, ZHAGatewayProxy, get_zha_gateway, get_zha_gateway_proxy, ) -from homeassistant.const import ( - ATTR_ENTITY_ID, - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_DISARMED, - STATE_ALARM_TRIGGERED, - Platform, -) +from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant from .common import find_entity_id @@ -79,7 +74,7 @@ async def test_alarm_control_panel( cluster = zigpy_device.endpoints[1].ias_ace assert entity_id is not None - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED # arm_away from HA cluster.client_command.reset_mock() @@ -90,7 +85,7 @@ async def test_alarm_control_panel( blocking=True, ) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_AWAY + assert hass.states.get(entity_id).state == AlarmControlPanelState.ARMED_AWAY assert cluster.client_command.call_count == 2 assert cluster.client_command.await_count == 2 assert cluster.client_command.call_args == call( @@ -113,7 +108,7 @@ async def test_alarm_control_panel( blocking=True, ) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_AWAY + assert hass.states.get(entity_id).state == AlarmControlPanelState.ARMED_AWAY cluster.client_command.reset_mock() await hass.services.async_call( ALARM_DOMAIN, @@ -128,7 +123,7 @@ async def test_alarm_control_panel( blocking=True, ) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_ALARM_TRIGGERED + assert hass.states.get(entity_id).state == AlarmControlPanelState.TRIGGERED assert cluster.client_command.call_count == 4 assert cluster.client_command.await_count == 4 assert cluster.client_command.call_args == call( @@ -151,7 +146,7 @@ async def test_alarm_control_panel( blocking=True, ) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_HOME + assert hass.states.get(entity_id).state == AlarmControlPanelState.ARMED_HOME assert cluster.client_command.call_count == 2 assert cluster.client_command.await_count == 2 assert cluster.client_command.call_args == call( @@ -171,7 +166,7 @@ async def test_alarm_control_panel( blocking=True, ) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_NIGHT + assert hass.states.get(entity_id).state == AlarmControlPanelState.ARMED_NIGHT assert cluster.client_command.call_count == 2 assert cluster.client_command.await_count == 2 assert cluster.client_command.call_args == call( @@ -190,7 +185,7 @@ async def test_alarm_control_panel( "cluster_command", 1, 0, [security.IasAce.ArmMode.Arm_All_Zones, "", 0] ) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_AWAY + assert hass.states.get(entity_id).state == AlarmControlPanelState.ARMED_AWAY # reset the panel await reset_alarm_panel(hass, cluster, entity_id) @@ -200,7 +195,7 @@ async def test_alarm_control_panel( "cluster_command", 1, 0, [security.IasAce.ArmMode.Arm_Day_Home_Only, "", 0] ) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_HOME + assert hass.states.get(entity_id).state == AlarmControlPanelState.ARMED_HOME # reset the panel await reset_alarm_panel(hass, cluster, entity_id) @@ -210,33 +205,33 @@ async def test_alarm_control_panel( "cluster_command", 1, 0, [security.IasAce.ArmMode.Arm_Night_Sleep_Only, "", 0] ) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_NIGHT + assert hass.states.get(entity_id).state == AlarmControlPanelState.ARMED_NIGHT # disarm from panel with bad code cluster.listener_event( "cluster_command", 1, 0, [security.IasAce.ArmMode.Disarm, "", 0] ) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_NIGHT + assert hass.states.get(entity_id).state == AlarmControlPanelState.ARMED_NIGHT # disarm from panel with bad code for 2nd time trips alarm cluster.listener_event( "cluster_command", 1, 0, [security.IasAce.ArmMode.Disarm, "", 0] ) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_ALARM_TRIGGERED + assert hass.states.get(entity_id).state == AlarmControlPanelState.TRIGGERED # disarm from panel with good code cluster.listener_event( "cluster_command", 1, 0, [security.IasAce.ArmMode.Disarm, "4321", 0] ) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED # panic from panel cluster.listener_event("cluster_command", 1, 4, []) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_ALARM_TRIGGERED + assert hass.states.get(entity_id).state == AlarmControlPanelState.TRIGGERED # reset the panel await reset_alarm_panel(hass, cluster, entity_id) @@ -244,7 +239,7 @@ async def test_alarm_control_panel( # fire from panel cluster.listener_event("cluster_command", 1, 3, []) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_ALARM_TRIGGERED + assert hass.states.get(entity_id).state == AlarmControlPanelState.TRIGGERED # reset the panel await reset_alarm_panel(hass, cluster, entity_id) @@ -252,7 +247,7 @@ async def test_alarm_control_panel( # emergency from panel cluster.listener_event("cluster_command", 1, 2, []) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_ALARM_TRIGGERED + assert hass.states.get(entity_id).state == AlarmControlPanelState.TRIGGERED # reset the panel await reset_alarm_panel(hass, cluster, entity_id) @@ -264,7 +259,7 @@ async def test_alarm_control_panel( blocking=True, ) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_ALARM_TRIGGERED + assert hass.states.get(entity_id).state == AlarmControlPanelState.TRIGGERED assert cluster.client_command.call_count == 1 assert cluster.client_command.await_count == 1 assert cluster.client_command.call_args == call( @@ -290,7 +285,7 @@ async def reset_alarm_panel(hass: HomeAssistant, cluster: Cluster, entity_id: st blocking=True, ) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED assert cluster.client_command.call_count == 2 assert cluster.client_command.await_count == 2 assert cluster.client_command.call_args == call( diff --git a/tests/components/zha/test_binary_sensor.py b/tests/components/zha/test_binary_sensor.py index 419823b3b5202b..a9765a1b547792 100644 --- a/tests/components/zha/test_binary_sensor.py +++ b/tests/components/zha/test_binary_sensor.py @@ -14,6 +14,7 @@ ) from homeassistant.const import STATE_OFF, STATE_ON, Platform from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er from .common import find_entity_id, send_attributes_report from .conftest import SIG_EP_INPUT, SIG_EP_OUTPUT, SIG_EP_PROFILE, SIG_EP_TYPE @@ -37,6 +38,7 @@ def binary_sensor_platform_only(): async def test_binary_sensor( hass: HomeAssistant, + entity_registry: er.EntityRegistry, setup_zha, zigpy_device_mock, ) -> None: @@ -77,3 +79,20 @@ async def test_binary_sensor( hass, cluster, {general.OnOff.AttributeDefs.on_off.id: OFF} ) assert hass.states.get(entity_id).state == STATE_OFF + + # test enable / disable sync w/ ZHA library + entity_entry = entity_registry.async_get(entity_id) + entity_key = (Platform.BINARY_SENSOR, entity_entry.unique_id) + assert zha_device_proxy.device.platform_entities.get(entity_key).enabled + + entity_registry.async_update_entity( + entity_id=entity_id, disabled_by=er.RegistryEntryDisabler.USER + ) + await hass.async_block_till_done() + + assert not zha_device_proxy.device.platform_entities.get(entity_key).enabled + + entity_registry.async_update_entity(entity_id=entity_id, disabled_by=None) + await hass.async_block_till_done() + + assert zha_device_proxy.device.platform_entities.get(entity_key).enabled diff --git a/tests/components/zha/test_config_flow.py b/tests/components/zha/test_config_flow.py index af6f2d9af0c3af..1382c5c2569e7b 100644 --- a/tests/components/zha/test_config_flow.py +++ b/tests/components/zha/test_config_flow.py @@ -121,6 +121,13 @@ def backup(make_backup): return make_backup() +@pytest.fixture(autouse=True) +def mock_supervisor_client( + supervisor_client: AsyncMock, addon_store_info: AsyncMock +) -> None: + """Mock supervisor client.""" + + def mock_detect_radio_type( radio_type: RadioType = RadioType.ezsp, ret: ProbeResult = ProbeResult.RADIO_TYPE_DETECTED, @@ -772,6 +779,7 @@ async def test_user_flow_show_form(hass: HomeAssistant) -> None: assert result["step_id"] == "choose_serial_port" +@pytest.mark.usefixtures("addon_not_installed") @patch("serial.tools.list_ports.comports", MagicMock(return_value=[])) async def test_user_flow_show_manual(hass: HomeAssistant) -> None: """Test user flow manual entry when no comport detected.""" diff --git a/tests/components/zha/test_cover.py b/tests/components/zha/test_cover.py index afef2aab70f0bb..e5d588aa1bf5c6 100644 --- a/tests/components/zha/test_cover.py +++ b/tests/components/zha/test_cover.py @@ -20,6 +20,7 @@ SERVICE_SET_COVER_TILT_POSITION, SERVICE_STOP_COVER, SERVICE_STOP_COVER_TILT, + CoverState, ) from homeassistant.components.zha.helpers import ( ZHADeviceProxy, @@ -27,13 +28,7 @@ get_zha_gateway, get_zha_gateway_proxy, ) -from homeassistant.const import ( - STATE_CLOSED, - STATE_CLOSING, - STATE_OPEN, - STATE_OPENING, - Platform, -) +from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_component import async_update_entity @@ -118,7 +113,7 @@ async def test_cover(hass: HomeAssistant, setup_zha, zigpy_device_mock) -> None: await async_update_entity(hass, entity_id) state = hass.states.get(entity_id) assert state - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN assert state.attributes[ATTR_CURRENT_POSITION] == 100 assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 58 @@ -126,25 +121,25 @@ async def test_cover(hass: HomeAssistant, setup_zha, zigpy_device_mock) -> None: await send_attributes_report( hass, cluster, {WCAttrs.current_position_lift_percentage.id: 100} ) - assert hass.states.get(entity_id).state == STATE_CLOSED + assert hass.states.get(entity_id).state == CoverState.CLOSED # test to see if it opens await send_attributes_report( hass, cluster, {WCAttrs.current_position_lift_percentage.id: 0} ) - assert hass.states.get(entity_id).state == STATE_OPEN + assert hass.states.get(entity_id).state == CoverState.OPEN # test that the state remains after tilting to 100% await send_attributes_report( hass, cluster, {WCAttrs.current_position_tilt_percentage.id: 100} ) - assert hass.states.get(entity_id).state == STATE_OPEN + assert hass.states.get(entity_id).state == CoverState.OPEN # test to see the state remains after tilting to 0% await send_attributes_report( hass, cluster, {WCAttrs.current_position_tilt_percentage.id: 0} ) - assert hass.states.get(entity_id).state == STATE_OPEN + assert hass.states.get(entity_id).state == CoverState.OPEN # close from UI with patch("zigpy.zcl.Cluster.request", return_value=[0x1, zcl_f.Status.SUCCESS]): @@ -157,13 +152,13 @@ async def test_cover(hass: HomeAssistant, setup_zha, zigpy_device_mock) -> None: assert cluster.request.call_args[0][2].command.name == WCCmds.down_close.name assert cluster.request.call_args[1]["expect_reply"] is True - assert hass.states.get(entity_id).state == STATE_CLOSING + assert hass.states.get(entity_id).state == CoverState.CLOSING await send_attributes_report( hass, cluster, {WCAttrs.current_position_lift_percentage.id: 100} ) - assert hass.states.get(entity_id).state == STATE_CLOSED + assert hass.states.get(entity_id).state == CoverState.CLOSED with patch("zigpy.zcl.Cluster.request", return_value=[0x1, zcl_f.Status.SUCCESS]): await hass.services.async_call( @@ -182,13 +177,13 @@ async def test_cover(hass: HomeAssistant, setup_zha, zigpy_device_mock) -> None: assert cluster.request.call_args[0][3] == 100 assert cluster.request.call_args[1]["expect_reply"] is True - assert hass.states.get(entity_id).state == STATE_CLOSING + assert hass.states.get(entity_id).state == CoverState.CLOSING await send_attributes_report( hass, cluster, {WCAttrs.current_position_tilt_percentage.id: 100} ) - assert hass.states.get(entity_id).state == STATE_CLOSED + assert hass.states.get(entity_id).state == CoverState.CLOSED # open from UI with patch("zigpy.zcl.Cluster.request", return_value=[0x0, zcl_f.Status.SUCCESS]): @@ -201,13 +196,13 @@ async def test_cover(hass: HomeAssistant, setup_zha, zigpy_device_mock) -> None: assert cluster.request.call_args[0][2].command.name == WCCmds.up_open.name assert cluster.request.call_args[1]["expect_reply"] is True - assert hass.states.get(entity_id).state == STATE_OPENING + assert hass.states.get(entity_id).state == CoverState.OPENING await send_attributes_report( hass, cluster, {WCAttrs.current_position_lift_percentage.id: 0} ) - assert hass.states.get(entity_id).state == STATE_OPEN + assert hass.states.get(entity_id).state == CoverState.OPEN with patch("zigpy.zcl.Cluster.request", return_value=[0x0, zcl_f.Status.SUCCESS]): await hass.services.async_call( @@ -226,13 +221,13 @@ async def test_cover(hass: HomeAssistant, setup_zha, zigpy_device_mock) -> None: assert cluster.request.call_args[0][3] == 0 assert cluster.request.call_args[1]["expect_reply"] is True - assert hass.states.get(entity_id).state == STATE_OPENING + assert hass.states.get(entity_id).state == CoverState.OPENING await send_attributes_report( hass, cluster, {WCAttrs.current_position_tilt_percentage.id: 0} ) - assert hass.states.get(entity_id).state == STATE_OPEN + assert hass.states.get(entity_id).state == CoverState.OPEN # set position UI with patch("zigpy.zcl.Cluster.request", return_value=[0x5, zcl_f.Status.SUCCESS]): @@ -252,19 +247,19 @@ async def test_cover(hass: HomeAssistant, setup_zha, zigpy_device_mock) -> None: assert cluster.request.call_args[0][3] == 53 assert cluster.request.call_args[1]["expect_reply"] is True - assert hass.states.get(entity_id).state == STATE_CLOSING + assert hass.states.get(entity_id).state == CoverState.CLOSING await send_attributes_report( hass, cluster, {WCAttrs.current_position_lift_percentage.id: 35} ) - assert hass.states.get(entity_id).state == STATE_CLOSING + assert hass.states.get(entity_id).state == CoverState.CLOSING await send_attributes_report( hass, cluster, {WCAttrs.current_position_lift_percentage.id: 53} ) - assert hass.states.get(entity_id).state == STATE_OPEN + assert hass.states.get(entity_id).state == CoverState.OPEN with patch("zigpy.zcl.Cluster.request", return_value=[0x5, zcl_f.Status.SUCCESS]): await hass.services.async_call( @@ -283,19 +278,19 @@ async def test_cover(hass: HomeAssistant, setup_zha, zigpy_device_mock) -> None: assert cluster.request.call_args[0][3] == 53 assert cluster.request.call_args[1]["expect_reply"] is True - assert hass.states.get(entity_id).state == STATE_CLOSING + assert hass.states.get(entity_id).state == CoverState.CLOSING await send_attributes_report( hass, cluster, {WCAttrs.current_position_lift_percentage.id: 35} ) - assert hass.states.get(entity_id).state == STATE_CLOSING + assert hass.states.get(entity_id).state == CoverState.CLOSING await send_attributes_report( hass, cluster, {WCAttrs.current_position_lift_percentage.id: 53} ) - assert hass.states.get(entity_id).state == STATE_OPEN + assert hass.states.get(entity_id).state == CoverState.OPEN # stop from UI with patch("zigpy.zcl.Cluster.request", return_value=[0x2, zcl_f.Status.SUCCESS]): @@ -358,11 +353,11 @@ async def test_cover_failures( # test that the state has changed from unavailable to closed await send_attributes_report(hass, cluster, {0: 0, 8: 100, 1: 1}) - assert hass.states.get(entity_id).state == STATE_CLOSED + assert hass.states.get(entity_id).state == CoverState.CLOSED # test to see if it opens await send_attributes_report(hass, cluster, {0: 1, 8: 0, 1: 100}) - assert hass.states.get(entity_id).state == STATE_OPEN + assert hass.states.get(entity_id).state == CoverState.OPEN # close from UI with patch( diff --git a/tests/components/zha/test_diagnostics.py b/tests/components/zha/test_diagnostics.py index ed3f83c0c36dc7..0e78a9a1b5bb6c 100644 --- a/tests/components/zha/test_diagnostics.py +++ b/tests/components/zha/test_diagnostics.py @@ -69,10 +69,11 @@ async def test_diagnostics_for_config_entry( scan = {c: c for c in range(11, 26 + 1)} - with patch.object(gateway.application_controller, "energy_scan", return_value=scan): - diagnostics_data = await get_diagnostics_for_config_entry( - hass, hass_client, config_entry - ) + gateway.application_controller.energy_scan.side_effect = None + gateway.application_controller.energy_scan.return_value = scan + diagnostics_data = await get_diagnostics_for_config_entry( + hass, hass_client, config_entry + ) assert diagnostics_data == snapshot( exclude=props("created_at", "modified_at", "entry_id", "versions") diff --git a/tests/components/zha/test_init.py b/tests/components/zha/test_init.py index 00fc3afd0ea628..887284919da2e3 100644 --- a/tests/components/zha/test_init.py +++ b/tests/components/zha/test_init.py @@ -252,7 +252,7 @@ async def test_zha_retry_unique_ids( ) as mock_connect: with patch( "homeassistant.config_entries.async_call_later", - lambda hass, delay, action: async_call_later(hass, 0, action), + lambda hass, delay, action: async_call_later(hass, 0.01, action), ): await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done(wait_background_tasks=True) diff --git a/tests/components/zha/test_update.py b/tests/components/zha/test_update.py index e2a614915f9587..4b6dff4fc6b839 100644 --- a/tests/components/zha/test_update.py +++ b/tests/components/zha/test_update.py @@ -23,6 +23,7 @@ ATTR_IN_PROGRESS, ATTR_INSTALLED_VERSION, ATTR_LATEST_VERSION, + ATTR_UPDATE_PERCENTAGE, DOMAIN as UPDATE_DOMAIN, SERVICE_INSTALL, ) @@ -172,7 +173,8 @@ async def test_firmware_update_notification_from_zigpy( assert state.state == STATE_ON attrs = state.attributes assert attrs[ATTR_INSTALLED_VERSION] == f"0x{installed_fw_version:08x}" - assert not attrs[ATTR_IN_PROGRESS] + assert attrs[ATTR_IN_PROGRESS] is False + assert attrs[ATTR_UPDATE_PERCENTAGE] is None assert ( attrs[ATTR_LATEST_VERSION] == f"0x{fw_image.firmware.header.file_version:08x}" ) @@ -231,7 +233,8 @@ async def _async_image_notify_side_effect(*args, **kwargs): assert state.state == STATE_ON attrs = state.attributes assert attrs[ATTR_INSTALLED_VERSION] == f"0x{installed_fw_version:08x}" - assert not attrs[ATTR_IN_PROGRESS] + assert attrs[ATTR_IN_PROGRESS] is False + assert attrs[ATTR_UPDATE_PERCENTAGE] is None assert ( attrs[ATTR_LATEST_VERSION] == f"0x{fw_image.firmware.header.file_version:08x}" @@ -272,7 +275,7 @@ async def test_firmware_update_success( ) -> None: """Test ZHA update platform - firmware update success.""" await setup_zha() - zha_device, cluster, fw_image, installed_fw_version = await setup_test_data( + zha_device, ota_cluster, fw_image, installed_fw_version = await setup_test_data( hass, zigpy_device_mock ) @@ -284,7 +287,7 @@ async def test_firmware_update_success( assert hass.states.get(entity_id).state == STATE_UNKNOWN # simulate an image available notification - await cluster._handle_query_next_image( + await ota_cluster._handle_query_next_image( foundation.ZCLHeader.cluster( tsn=0x12, command_id=general.Ota.ServerCommandDefs.query_next_image.id ), @@ -301,19 +304,20 @@ async def test_firmware_update_success( assert state.state == STATE_ON attrs = state.attributes assert attrs[ATTR_INSTALLED_VERSION] == f"0x{installed_fw_version:08x}" - assert not attrs[ATTR_IN_PROGRESS] + assert attrs[ATTR_IN_PROGRESS] is False + assert attrs[ATTR_UPDATE_PERCENTAGE] is None assert ( attrs[ATTR_LATEST_VERSION] == f"0x{fw_image.firmware.header.file_version:08x}" ) - async def endpoint_reply(cluster_id, tsn, data, command_id): - if cluster_id == general.Ota.cluster_id: - hdr, cmd = cluster.deserialize(data) + async def endpoint_reply(cluster, sequence, data, **kwargs): + if cluster == general.Ota.cluster_id: + hdr, cmd = ota_cluster.deserialize(data) if isinstance(cmd, general.Ota.ImageNotifyCommand): zha_device.device.device.packet_received( make_packet( zha_device.device.device, - cluster, + ota_cluster, general.Ota.ServerCommandDefs.query_next_image.name, field_control=general.Ota.QueryNextImageCommand.FieldControl.HardwareVersion, manufacturer_code=fw_image.firmware.header.manufacturer_id, @@ -333,7 +337,7 @@ async def endpoint_reply(cluster_id, tsn, data, command_id): zha_device.device.device.packet_received( make_packet( zha_device.device.device, - cluster, + ota_cluster, general.Ota.ServerCommandDefs.image_block.name, field_control=general.Ota.ImageBlockCommand.FieldControl.RequestNodeAddr, manufacturer_code=fw_image.firmware.header.manufacturer_id, @@ -360,7 +364,7 @@ async def endpoint_reply(cluster_id, tsn, data, command_id): zha_device.device.device.packet_received( make_packet( zha_device.device.device, - cluster, + ota_cluster, general.Ota.ServerCommandDefs.image_block.name, field_control=general.Ota.ImageBlockCommand.FieldControl.RequestNodeAddr, manufacturer_code=fw_image.firmware.header.manufacturer_id, @@ -389,7 +393,8 @@ async def endpoint_reply(cluster_id, tsn, data, command_id): assert ( attrs[ATTR_INSTALLED_VERSION] == f"0x{installed_fw_version:08x}" ) - assert attrs[ATTR_IN_PROGRESS] == 58 + assert attrs[ATTR_IN_PROGRESS] is True + assert attrs[ATTR_UPDATE_PERCENTAGE] == 58 assert ( attrs[ATTR_LATEST_VERSION] == f"0x{fw_image.firmware.header.file_version:08x}" @@ -398,7 +403,7 @@ async def endpoint_reply(cluster_id, tsn, data, command_id): zha_device.device.device.packet_received( make_packet( zha_device.device.device, - cluster, + ota_cluster, general.Ota.ServerCommandDefs.upgrade_end.name, status=foundation.Status.SUCCESS, manufacturer_code=fw_image.firmware.header.manufacturer_id, @@ -417,7 +422,7 @@ async def endpoint_reply(cluster_id, tsn, data, command_id): assert cmd.upgrade_time == 0 def read_new_fw_version(*args, **kwargs): - cluster.update_attribute( + ota_cluster.update_attribute( attrid=general.Ota.AttributeDefs.current_file_version.id, value=fw_image.firmware.header.file_version, ) @@ -427,9 +432,9 @@ def read_new_fw_version(*args, **kwargs): ) }, {} - cluster.read_attributes.side_effect = read_new_fw_version + ota_cluster.read_attributes.side_effect = read_new_fw_version - cluster.endpoint.reply = AsyncMock(side_effect=endpoint_reply) + ota_cluster.endpoint.reply = AsyncMock(side_effect=endpoint_reply) await hass.services.async_call( UPDATE_DOMAIN, SERVICE_INSTALL, @@ -446,7 +451,8 @@ def read_new_fw_version(*args, **kwargs): attrs[ATTR_INSTALLED_VERSION] == f"0x{fw_image.firmware.header.file_version:08x}" ) - assert not attrs[ATTR_IN_PROGRESS] + assert attrs[ATTR_IN_PROGRESS] is False + assert attrs[ATTR_UPDATE_PERCENTAGE] is None assert attrs[ATTR_LATEST_VERSION] == attrs[ATTR_INSTALLED_VERSION] # If we send a progress notification incorrectly, it won't be handled @@ -454,7 +460,8 @@ def read_new_fw_version(*args, **kwargs): entity.entity_data.entity._update_progress(50, 100, 0.50) state = hass.states.get(entity_id) - assert not attrs[ATTR_IN_PROGRESS] + assert attrs[ATTR_IN_PROGRESS] is False + assert attrs[ATTR_UPDATE_PERCENTAGE] is None assert state.state == STATE_OFF @@ -465,7 +472,7 @@ async def test_firmware_update_raises( ) -> None: """Test ZHA update platform - firmware update raises.""" await setup_zha() - zha_device, cluster, fw_image, installed_fw_version = await setup_test_data( + zha_device, ota_cluster, fw_image, installed_fw_version = await setup_test_data( hass, zigpy_device_mock ) @@ -475,7 +482,7 @@ async def test_firmware_update_raises( assert hass.states.get(entity_id).state == STATE_UNKNOWN # simulate an image available notification - await cluster._handle_query_next_image( + await ota_cluster._handle_query_next_image( foundation.ZCLHeader.cluster( tsn=0x12, command_id=general.Ota.ServerCommandDefs.query_next_image.id ), @@ -493,19 +500,20 @@ async def test_firmware_update_raises( assert state.state == STATE_ON attrs = state.attributes assert attrs[ATTR_INSTALLED_VERSION] == f"0x{installed_fw_version:08x}" - assert not attrs[ATTR_IN_PROGRESS] + assert attrs[ATTR_IN_PROGRESS] is False + assert attrs[ATTR_UPDATE_PERCENTAGE] is None assert ( attrs[ATTR_LATEST_VERSION] == f"0x{fw_image.firmware.header.file_version:08x}" ) - async def endpoint_reply(cluster_id, tsn, data, command_id): - if cluster_id == general.Ota.cluster_id: - hdr, cmd = cluster.deserialize(data) + async def endpoint_reply(cluster, sequence, data, **kwargs): + if cluster == general.Ota.cluster_id: + hdr, cmd = ota_cluster.deserialize(data) if isinstance(cmd, general.Ota.ImageNotifyCommand): zha_device.device.device.packet_received( make_packet( zha_device.device.device, - cluster, + ota_cluster, general.Ota.ServerCommandDefs.query_next_image.name, field_control=general.Ota.QueryNextImageCommand.FieldControl.HardwareVersion, manufacturer_code=fw_image.firmware.header.manufacturer_id, @@ -524,7 +532,7 @@ async def endpoint_reply(cluster_id, tsn, data, command_id): assert cmd.image_size == fw_image.firmware.header.image_size raise DeliveryError("failed to deliver") - cluster.endpoint.reply = AsyncMock(side_effect=endpoint_reply) + ota_cluster.endpoint.reply = AsyncMock(side_effect=endpoint_reply) with pytest.raises(HomeAssistantError): await hass.services.async_call( UPDATE_DOMAIN, diff --git a/tests/components/zwave_js/conftest.py b/tests/components/zwave_js/conftest.py index 0a8e445a3e6d42..37b1dde7316953 100644 --- a/tests/components/zwave_js/conftest.py +++ b/tests/components/zwave_js/conftest.py @@ -498,6 +498,15 @@ def siren_neo_coolcam_state_state_fixture() -> NodeDataType: ) +@pytest.fixture(name="aeotec_smart_switch_7_state") +def aeotec_smart_switch_7_state_fixture() -> NodeDataType: + """Load node with fixture data for Aeotec Smart Switch 7.""" + return cast( + NodeDataType, + load_json_object_fixture("aeotec_smart_switch_7_state.json", DOMAIN), + ) + + # model fixtures @@ -1212,3 +1221,13 @@ def siren_neo_coolcam_fixture( node = Node(client, siren_neo_coolcam_state) client.driver.controller.nodes[node.node_id] = node return node + + +@pytest.fixture(name="aeotec_smart_switch_7") +def aeotec_smart_switch_7_fixture( + client: MagicMock, aeotec_smart_switch_7_state: NodeDataType +) -> Node: + """Load node for Aeotec Smart Switch 7.""" + node = Node(client, aeotec_smart_switch_7_state) + client.driver.controller.nodes[node.node_id] = node + return node diff --git a/tests/components/zwave_js/fixtures/aeotec_smart_switch_7_state.json b/tests/components/zwave_js/fixtures/aeotec_smart_switch_7_state.json new file mode 100644 index 00000000000000..ea7bbe8b16c8ae --- /dev/null +++ b/tests/components/zwave_js/fixtures/aeotec_smart_switch_7_state.json @@ -0,0 +1,1863 @@ +{ + "nodeId": 9, + "index": 0, + "installerIcon": 1792, + "userIcon": 1792, + "status": 4, + "ready": true, + "isListening": true, + "isRouting": true, + "isSecure": true, + "manufacturerId": 881, + "productId": 175, + "productType": 3, + "firmwareVersion": "1.3", + "zwavePlusVersion": 1, + "deviceConfig": { + "filename": "/data/db/devices/0x0371/zw175.json", + "isEmbedded": true, + "manufacturer": "Aeotec Ltd.", + "manufacturerId": 881, + "label": "ZW175", + "description": "Smart Switch 7", + "devices": [ + { + "productType": 3, + "productId": 175 + } + ], + "firmwareVersion": { + "min": "0.0", + "max": "255.255" + }, + "preferred": false, + "associations": {}, + "paramInformation": { + "_map": {} + }, + "metadata": { + "inclusion": "This product supports Security 2 Command Class. While a Security S2 enabled Controller is needed in order to fully use the security feature. This product can be included and operated in any Z-Wave network with other Z-Wave certified devices from other manufacturers and/or other applications. All non-battery operated nodes within the network will act as repeaters regardless of vendor to increase reliability of the network.\n\n(1) SmartStart Learn Mode\nSmartStart enabled products can be added into a Z-Wave network by scanning the Z-Wave QR Code present on the product with a controller providing SmartStart inclusion. No further action is required and the SmartStart product will be added automatically within 10 minutes of being switched on in the network vicinity.\nIndicator Light will become flash white light for 1s indicating the product has been powered, and then become flash blue light indicating SmartStart Learn Mode starts. It will become constantly bright yellow light after being assigned a NodeID.\nIf Adding succeeds, it will bright blue light for 2s and become Load Indicator Mode.\nIf Adding fails, it will bright red light for 2s and turn back to breathing blue light and then start SmartStart Learn Mode again.\nNote:\nThe label of QR Code on the product and package are used for SmartStart Inclusion. The Z-Wave DSK Code is at bottom of the package. Please do not remove or damage them.\n\n(2) Classic Inclusion Learn Mode\n1. Set your Z-Wave Controller into its 'Add Device' mode in order to add the product into your Z-Wave system. Refer to the Controller's manual if you are unsure of how to perform this step.\n2. Make sure the product is powered. If not, plug it into a wall socket and power on; its LED will be breathing blue light all the time. \n3. Click Action Button once, it will quickly flash blue light for 30 seconds until it is added into the network. It will become constantly bright yellow light after being assigned a NodeID.\n4. If your Z-Wave Controller supports S2 encryption, enter the first 5 digits of DSK into your Controller's interface if /when requested. The DSK is printed on its housing.\n5. If Adding fails, it will bright red light for 2s and then become breathing blue light; repeat steps 1 to 4. Contact us for further support if needed.\n6. If Adding succeeds, it will bright blue light for 2s and then turn to Load Indicator Mode. Now, this product is a part of your Z-Wave home control system. You can configure it and its automations via your Z-Wave system; please refer to your software's user guide for precise instructions.\nNote:\nIf Action Button is clicked again during the Classic Inclusion Learn Mode, the Classic Inclusion Learn Mode will exit. At the same time, Indicator Light will bright red light for 2s, and then become breathing blue light", + "exclusion": "1. Set your Z-Wave Controller into its 'Remove Device' mode in order to remove the product from your Z-Wave system. Refer to the Controller's manual if you are unsure of how to perform this step.\n2. Make sure the product is powered. If not, plug it into a wall socket and power on. \n3. Click Action Button 2 times quickly; it will bright violet light, up to 2s.\n4. If Removing fails, it will bright red light for 2s and then turn back to Load Indicator Mode; repeat steps 1 to 3. Contact us for further support if needed.\n5. If Removing succeeds, it will become breathing blue light. Now, it is removed from Z-Wave network successfully", + "reset": "If the primary controller is missing or inoperable, you may need to reset the device to factory settings.\nMake sure the product is powered. If not, plug it into a wall socket and power on. To complete the reset process manually, press and hold the Action Button for at least 15s and then release. The LED indicator will become breathing blue light, which indicates the reset operation is successful. Otherwise, please try again. Contact us for further support if needed. \nNote: \n1. This procedure should only be used when the primary controller is missing or inoperable.\n2. Factory Reset will:\n(a) Remove the product from Z-Wave network;\n(b) Delete the Association setting;\n(c) Restore the configuration settings to the default.", + "manual": "https://products.z-wavealliance.org/ProductManual/File?folder=&filename=MarketCertificationFiles/3437/Smart%20Switch%207%20product%20manual.pdf" + } + }, + "label": "ZW175", + "interviewAttempts": 1, + "isFrequentListening": false, + "maxDataRate": 100000, + "supportedDataRates": [40000, 100000], + "protocolVersion": 3, + "supportsBeaming": true, + "supportsSecurity": false, + "nodeType": 1, + "zwavePlusNodeType": 0, + "zwavePlusRoleType": 5, + "deviceClass": { + "basic": { + "key": 4, + "label": "Routing End Node" + }, + "generic": { + "key": 16, + "label": "Binary Switch" + }, + "specific": { + "key": 1, + "label": "Binary Power Switch" + } + }, + "interviewStage": "Complete", + "deviceDatabaseUrl": "https://devices.zwave-js.io/?jumpTo=0x0371:0x0003:0x00af:1.3", + "statistics": { + "commandsTX": 221, + "commandsRX": 1452, + "commandsDroppedRX": 22, + "commandsDroppedTX": 0, + "timeoutResponse": 3, + "rtt": 29.9, + "lastSeen": "2024-10-01T13:21:14.968Z" + }, + "highestSecurityClass": 1, + "isControllerNode": false, + "keepAwake": false, + "lastSeen": "2024-10-01T13:12:41.805Z", + "protocol": 0, + "values": [ + { + "endpoint": 0, + "commandClass": 37, + "commandClassName": "Binary Switch", + "property": "currentValue", + "propertyName": "currentValue", + "ccVersion": 1, + "metadata": { + "type": "boolean", + "readable": true, + "writeable": false, + "label": "Current value", + "stateful": true, + "secret": false + }, + "value": true + }, + { + "endpoint": 0, + "commandClass": 37, + "commandClassName": "Binary Switch", + "property": "targetValue", + "propertyName": "targetValue", + "ccVersion": 1, + "metadata": { + "type": "boolean", + "readable": true, + "writeable": true, + "label": "Target value", + "valueChangeOptions": ["transitionDuration"], + "stateful": true, + "secret": false + }, + "value": true + }, + { + "endpoint": 0, + "commandClass": 38, + "commandClassName": "Multilevel Switch", + "property": "targetValue", + "propertyName": "targetValue", + "ccVersion": 2, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "Target value", + "valueChangeOptions": ["transitionDuration"], + "min": 0, + "max": 99, + "stateful": true, + "secret": false + }, + "value": 50 + }, + { + "endpoint": 0, + "commandClass": 38, + "commandClassName": "Multilevel Switch", + "property": "currentValue", + "propertyName": "currentValue", + "ccVersion": 2, + "metadata": { + "type": "number", + "readable": true, + "writeable": false, + "label": "Current value", + "min": 0, + "max": 99, + "stateful": true, + "secret": false + }, + "value": 50 + }, + { + "endpoint": 0, + "commandClass": 38, + "commandClassName": "Multilevel Switch", + "property": "Up", + "propertyName": "Up", + "ccVersion": 2, + "metadata": { + "type": "boolean", + "readable": false, + "writeable": true, + "label": "Perform a level change (Up)", + "ccSpecific": { + "switchType": 2 + }, + "valueChangeOptions": ["transitionDuration"], + "states": { + "true": "Start", + "false": "Stop" + }, + "stateful": true, + "secret": false + } + }, + { + "endpoint": 0, + "commandClass": 38, + "commandClassName": "Multilevel Switch", + "property": "Down", + "propertyName": "Down", + "ccVersion": 2, + "metadata": { + "type": "boolean", + "readable": false, + "writeable": true, + "label": "Perform a level change (Down)", + "ccSpecific": { + "switchType": 2 + }, + "valueChangeOptions": ["transitionDuration"], + "states": { + "true": "Start", + "false": "Stop" + }, + "stateful": true, + "secret": false + } + }, + { + "endpoint": 0, + "commandClass": 38, + "commandClassName": "Multilevel Switch", + "property": "duration", + "propertyName": "duration", + "ccVersion": 2, + "metadata": { + "type": "duration", + "readable": true, + "writeable": false, + "label": "Remaining duration", + "stateful": true, + "secret": false + } + }, + { + "endpoint": 0, + "commandClass": 38, + "commandClassName": "Multilevel Switch", + "property": "restorePrevious", + "propertyName": "restorePrevious", + "ccVersion": 2, + "metadata": { + "type": "boolean", + "readable": false, + "writeable": true, + "label": "Restore previous value", + "states": { + "true": "Restore" + }, + "stateful": true, + "secret": false + } + }, + { + "endpoint": 0, + "commandClass": 43, + "commandClassName": "Scene Activation", + "property": "sceneId", + "propertyName": "sceneId", + "ccVersion": 1, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "Scene ID", + "valueChangeOptions": ["transitionDuration"], + "min": 1, + "max": 255, + "stateful": false, + "secret": false + } + }, + { + "endpoint": 0, + "commandClass": 43, + "commandClassName": "Scene Activation", + "property": "dimmingDuration", + "propertyName": "dimmingDuration", + "ccVersion": 1, + "metadata": { + "type": "duration", + "readable": true, + "writeable": true, + "label": "Dimming duration", + "stateful": true, + "secret": false + } + }, + { + "endpoint": 0, + "commandClass": 50, + "commandClassName": "Meter", + "property": "value", + "propertyKey": 65537, + "propertyName": "value", + "propertyKeyName": "Electric_kWh_Consumed", + "ccVersion": 4, + "metadata": { + "type": "number", + "readable": true, + "writeable": false, + "label": "Electric Consumption [kWh]", + "ccSpecific": { + "meterType": 1, + "scale": 0, + "rateType": 1 + }, + "unit": "kWh", + "stateful": true, + "secret": false + }, + "value": 1.259 + }, + { + "endpoint": 0, + "commandClass": 50, + "commandClassName": "Meter", + "property": "value", + "propertyKey": 66049, + "propertyName": "value", + "propertyKeyName": "Electric_W_Consumed", + "ccVersion": 4, + "metadata": { + "type": "number", + "readable": true, + "writeable": false, + "label": "Electric Consumption [W]", + "ccSpecific": { + "meterType": 1, + "scale": 2, + "rateType": 1 + }, + "unit": "W", + "stateful": true, + "secret": false + }, + "value": 0 + }, + { + "endpoint": 0, + "commandClass": 50, + "commandClassName": "Meter", + "property": "value", + "propertyKey": 66561, + "propertyName": "value", + "propertyKeyName": "Electric_V_Consumed", + "ccVersion": 4, + "metadata": { + "type": "number", + "readable": true, + "writeable": false, + "label": "Electric Consumption [V]", + "ccSpecific": { + "meterType": 1, + "scale": 4, + "rateType": 1 + }, + "unit": "V", + "stateful": true, + "secret": false + }, + "value": 232.895 + }, + { + "endpoint": 0, + "commandClass": 50, + "commandClassName": "Meter", + "property": "value", + "propertyKey": 66817, + "propertyName": "value", + "propertyKeyName": "Electric_A_Consumed", + "ccVersion": 4, + "metadata": { + "type": "number", + "readable": true, + "writeable": false, + "label": "Electric Consumption [A]", + "ccSpecific": { + "meterType": 1, + "scale": 5, + "rateType": 1 + }, + "unit": "A", + "stateful": true, + "secret": false + }, + "value": 0 + }, + { + "endpoint": 0, + "commandClass": 50, + "commandClassName": "Meter", + "property": "reset", + "propertyName": "reset", + "ccVersion": 4, + "metadata": { + "type": "boolean", + "readable": false, + "writeable": true, + "label": "Reset accumulated values", + "states": { + "true": "Reset" + }, + "stateful": true, + "secret": false + } + }, + { + "endpoint": 0, + "commandClass": 51, + "commandClassName": "Color Switch", + "property": "currentColor", + "propertyKey": 2, + "propertyName": "currentColor", + "propertyKeyName": "Red", + "ccVersion": 1, + "metadata": { + "type": "number", + "readable": true, + "writeable": false, + "description": "The current value of the Red channel.", + "label": "Current value (Red)", + "min": 0, + "max": 255, + "stateful": true, + "secret": false + }, + "value": 255 + }, + { + "endpoint": 0, + "commandClass": 51, + "commandClassName": "Color Switch", + "property": "currentColor", + "propertyKey": 3, + "propertyName": "currentColor", + "propertyKeyName": "Green", + "ccVersion": 1, + "metadata": { + "type": "number", + "readable": true, + "writeable": false, + "description": "The current value of the Green channel.", + "label": "Current value (Green)", + "min": 0, + "max": 255, + "stateful": true, + "secret": false + }, + "value": 251 + }, + { + "endpoint": 0, + "commandClass": 51, + "commandClassName": "Color Switch", + "property": "currentColor", + "propertyKey": 4, + "propertyName": "currentColor", + "propertyKeyName": "Blue", + "ccVersion": 1, + "metadata": { + "type": "number", + "readable": true, + "writeable": false, + "description": "The current value of the Blue channel.", + "label": "Current value (Blue)", + "min": 0, + "max": 255, + "stateful": true, + "secret": false + }, + "value": 246 + }, + { + "endpoint": 0, + "commandClass": 51, + "commandClassName": "Color Switch", + "property": "currentColor", + "propertyName": "currentColor", + "ccVersion": 1, + "metadata": { + "type": "any", + "readable": true, + "writeable": false, + "label": "Current color", + "stateful": true, + "secret": false + }, + "value": { + "red": 255, + "green": 251, + "blue": 246 + } + }, + { + "endpoint": 0, + "commandClass": 51, + "commandClassName": "Color Switch", + "property": "targetColor", + "propertyName": "targetColor", + "ccVersion": 1, + "metadata": { + "type": "any", + "readable": true, + "writeable": true, + "label": "Target color", + "valueChangeOptions": ["transitionDuration"], + "stateful": true, + "secret": false + }, + "value": { + "red": 255, + "green": 251, + "blue": 246 + } + }, + { + "endpoint": 0, + "commandClass": 51, + "commandClassName": "Color Switch", + "property": "hexColor", + "propertyName": "hexColor", + "ccVersion": 1, + "metadata": { + "type": "color", + "readable": true, + "writeable": true, + "label": "RGB Color", + "valueChangeOptions": ["transitionDuration"], + "minLength": 6, + "maxLength": 7, + "stateful": true, + "secret": false + }, + "value": "fffbf6" + }, + { + "endpoint": 0, + "commandClass": 51, + "commandClassName": "Color Switch", + "property": "targetColor", + "propertyKey": 2, + "propertyName": "targetColor", + "propertyKeyName": "Red", + "ccVersion": 1, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "description": "The target value of the Red channel.", + "label": "Target value (Red)", + "valueChangeOptions": ["transitionDuration"], + "min": 0, + "max": 255, + "stateful": true, + "secret": false + } + }, + { + "endpoint": 0, + "commandClass": 51, + "commandClassName": "Color Switch", + "property": "targetColor", + "propertyKey": 3, + "propertyName": "targetColor", + "propertyKeyName": "Green", + "ccVersion": 1, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "description": "The target value of the Green channel.", + "label": "Target value (Green)", + "valueChangeOptions": ["transitionDuration"], + "min": 0, + "max": 255, + "stateful": true, + "secret": false + } + }, + { + "endpoint": 0, + "commandClass": 51, + "commandClassName": "Color Switch", + "property": "targetColor", + "propertyKey": 4, + "propertyName": "targetColor", + "propertyKeyName": "Blue", + "ccVersion": 1, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "description": "The target value of the Blue channel.", + "label": "Target value (Blue)", + "valueChangeOptions": ["transitionDuration"], + "min": 0, + "max": 255, + "stateful": true, + "secret": false + } + }, + { + "endpoint": 0, + "commandClass": 51, + "commandClassName": "Color Switch", + "property": "duration", + "propertyName": "duration", + "ccVersion": 1, + "metadata": { + "type": "duration", + "readable": true, + "writeable": false, + "label": "Remaining duration", + "stateful": true, + "secret": false + } + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 4, + "propertyName": "Current Overload Protection Threshold", + "ccVersion": 1, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "Current Overload Protection Threshold", + "default": 2415, + "min": 0, + "max": 2415, + "states": { + "0": "Disable" + }, + "unit": "W", + "valueSize": 2, + "format": 1, + "allowManualEntry": true, + "isFromConfig": true + }, + "value": 2415 + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 9, + "propertyKey": 1, + "propertyName": "Alarm Trigger State", + "ccVersion": 1, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "Alarm Trigger State", + "default": 0, + "min": 0, + "max": 1, + "states": { + "0": "Trigger on open state", + "1": "Trigger on closed state" + }, + "valueSize": 2, + "format": 0, + "allowManualEntry": false, + "isFromConfig": true + }, + "value": 0 + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 9, + "propertyKey": 256, + "propertyName": "React to Alarm Type: Smoke Alarms", + "ccVersion": 1, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "React to Alarm Type: Smoke Alarms", + "default": 0, + "min": 0, + "max": 1, + "states": { + "0": "Disable", + "1": "Enable" + }, + "valueSize": 2, + "format": 1, + "allowManualEntry": false, + "isFromConfig": true + }, + "value": 0 + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 9, + "propertyKey": 512, + "propertyName": "React to Alarm Type: CO Alarms", + "ccVersion": 1, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "React to Alarm Type: CO Alarms", + "default": 0, + "min": 0, + "max": 1, + "states": { + "0": "Disable", + "1": "Enable" + }, + "valueSize": 2, + "format": 1, + "allowManualEntry": false, + "isFromConfig": true + }, + "value": 0 + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 9, + "propertyKey": 1024, + "propertyName": "React to Alarm Type: CO2 Alarms", + "ccVersion": 1, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "description": "React to CO2 Alarms from other Z-Wave devices.", + "label": "React to Alarm Type: CO2 Alarms", + "default": 0, + "min": 0, + "max": 1, + "states": { + "0": "Disable", + "1": "Enable" + }, + "valueSize": 2, + "format": 1, + "allowManualEntry": false, + "isFromConfig": true + }, + "value": 0 + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 9, + "propertyKey": 2048, + "propertyName": "React to Alarm Type: Heart Alarms", + "ccVersion": 1, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "React to Alarm Type: Heart Alarms", + "default": 0, + "min": 0, + "max": 1, + "states": { + "0": "Disable", + "1": "Enable" + }, + "valueSize": 2, + "format": 1, + "allowManualEntry": false, + "isFromConfig": true + }, + "value": 0 + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 9, + "propertyKey": 4096, + "propertyName": "React to Alarm Type: Water Alarms", + "ccVersion": 1, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "React to Alarm Type: Water Alarms", + "default": 0, + "min": 0, + "max": 1, + "states": { + "0": "Disable", + "1": "Enable" + }, + "valueSize": 2, + "format": 1, + "allowManualEntry": false, + "isFromConfig": true + }, + "value": 0 + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 9, + "propertyKey": 8192, + "propertyName": "React to Alarm Type: Access Control Alarms", + "ccVersion": 1, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "React to Alarm Type: Access Control Alarms", + "default": 0, + "min": 0, + "max": 1, + "states": { + "0": "Disable", + "1": "Enable" + }, + "valueSize": 2, + "format": 1, + "allowManualEntry": false, + "isFromConfig": true + }, + "value": 0 + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 9, + "propertyKey": 16384, + "propertyName": "React to Alarm Type: Home Security Alarms", + "ccVersion": 1, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "React to Alarm Type: Home Security Alarms", + "default": 0, + "min": 0, + "max": 1, + "states": { + "0": "Disable", + "1": "Enable" + }, + "valueSize": 2, + "format": 1, + "allowManualEntry": false, + "isFromConfig": true + }, + "value": 0 + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 8, + "propertyName": "Switch Action on Alarm", + "ccVersion": 1, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "Switch Action on Alarm", + "default": 0, + "min": 0, + "max": 3, + "states": { + "0": "Disable", + "1": "Turn on", + "2": "Turn off", + "3": "Cyclce on/off in 5 second intervals" + }, + "valueSize": 1, + "format": 0, + "allowManualEntry": false, + "isFromConfig": true + }, + "value": 0 + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 10, + "propertyName": "Method to Disable Alarm", + "ccVersion": 1, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "description": "Allowable range: 10-255 - Sets the method to disable the alarm or alarm duration", + "label": "Method to Disable Alarm", + "default": 0, + "min": 0, + "max": 255, + "states": { + "0": "Tap action button 3x", + "1": "Idle state from corresponding alarm" + }, + "unit": "seconds", + "valueSize": 2, + "format": 0, + "allowManualEntry": true, + "isFromConfig": true + }, + "value": 0 + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 18, + "propertyName": "LED Blinking Frequency", + "ccVersion": 1, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "LED Blinking Frequency", + "default": 2, + "min": 0, + "max": 9, + "unit": "Hz", + "valueSize": 1, + "format": 0, + "allowManualEntry": true, + "isFromConfig": true + }, + "value": 2 + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 20, + "propertyName": "State After Power Failure", + "ccVersion": 1, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "State After Power Failure", + "default": 0, + "min": 0, + "max": 2, + "states": { + "0": "Previous state", + "1": "Always on", + "2": "Always off" + }, + "valueSize": 1, + "format": 0, + "allowManualEntry": false, + "isFromConfig": true + }, + "value": 0 + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 80, + "propertyName": "Report Type To Send", + "ccVersion": 1, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "Report Type To Send", + "default": 2, + "min": 0, + "max": 2, + "states": { + "0": "Disable", + "1": "Basic CC Report", + "2": "Binary Switch CC Report" + }, + "valueSize": 1, + "format": 1, + "allowManualEntry": false, + "isFromConfig": true + }, + "value": 2 + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 81, + "propertyName": "LED Indicator", + "ccVersion": 1, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "LED Indicator", + "default": 2, + "min": 0, + "max": 2, + "states": { + "0": "Disable", + "1": "Night light mode", + "2": "On/off mode" + }, + "valueSize": 1, + "format": 0, + "allowManualEntry": false, + "isFromConfig": true + }, + "value": 2 + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 82, + "propertyKey": 4278190080, + "propertyName": "Night Light (Enable): Hour", + "ccVersion": 1, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "description": "Allowable range: 0-23", + "label": "Night Light (Enable): Hour", + "default": 18, + "min": 0, + "max": 23, + "valueSize": 4, + "format": 0, + "allowManualEntry": true, + "isFromConfig": true + }, + "value": 18 + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 82, + "propertyKey": 16711680, + "propertyName": "Night Light (Enable): Minute", + "ccVersion": 1, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "description": "Allowable range: 0-59", + "label": "Night Light (Enable): Minute", + "default": 0, + "min": 0, + "max": 59, + "valueSize": 4, + "format": 0, + "allowManualEntry": true, + "isFromConfig": true + }, + "value": 0 + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 82, + "propertyKey": 65280, + "propertyName": "Night Light (Disable): Hour", + "ccVersion": 1, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "description": "Allowable range: 0-23", + "label": "Night Light (Disable): Hour", + "default": 8, + "min": 0, + "max": 23, + "valueSize": 4, + "format": 0, + "allowManualEntry": true, + "isFromConfig": true + }, + "value": 8 + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 82, + "propertyKey": 255, + "propertyName": "Night Light (Disable): Minute", + "ccVersion": 1, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "description": "Allowable range: 0-59", + "label": "Night Light (Disable): Minute", + "default": 0, + "min": 0, + "max": 59, + "valueSize": 4, + "format": 0, + "allowManualEntry": true, + "isFromConfig": true + }, + "value": 0 + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 91, + "propertyName": "Power Change Threshold", + "ccVersion": 1, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "description": "Threshold change in power consumption to induce an automatic report", + "label": "Power Change Threshold", + "default": 0, + "min": 0, + "max": 2300, + "states": { + "0": "Disable" + }, + "unit": "W", + "valueSize": 2, + "format": 1, + "allowManualEntry": true, + "isFromConfig": true + }, + "value": 0 + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 92, + "propertyName": "Power (kWh) Change Threshold", + "ccVersion": 1, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "Power (kWh) Change Threshold", + "default": 0, + "min": 0, + "max": 10000, + "states": { + "0": "Disable" + }, + "unit": "KwH", + "valueSize": 2, + "format": 1, + "allowManualEntry": true, + "isFromConfig": true + }, + "value": 0 + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 93, + "propertyName": "Current Change Threshold", + "ccVersion": 1, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "Current Change Threshold", + "default": 0, + "min": 0, + "max": 100, + "states": { + "0": "Disable" + }, + "unit": "A", + "valueSize": 1, + "format": 1, + "allowManualEntry": true, + "isFromConfig": true + }, + "value": 0 + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 101, + "propertyKey": 1, + "propertyName": "Automatic Report: kWh", + "ccVersion": 1, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "Automatic Report: kWh", + "default": 1, + "min": 0, + "max": 1, + "states": { + "0": "Disable", + "1": "Enable" + }, + "valueSize": 4, + "format": 1, + "allowManualEntry": false, + "isFromConfig": true + }, + "value": 1 + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 101, + "propertyKey": 2, + "propertyName": "Automatic Report: Power", + "ccVersion": 1, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "Automatic Report: Power", + "default": 1, + "min": 0, + "max": 1, + "states": { + "0": "Disable", + "1": "Enable" + }, + "valueSize": 4, + "format": 1, + "allowManualEntry": false, + "isFromConfig": true + }, + "value": 1 + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 101, + "propertyKey": 4, + "propertyName": "Automatic Report: Voltage", + "ccVersion": 1, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "Automatic Report: Voltage", + "default": 1, + "min": 0, + "max": 1, + "states": { + "0": "Disable", + "1": "Enable" + }, + "valueSize": 4, + "format": 1, + "allowManualEntry": false, + "isFromConfig": true + }, + "value": 1 + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 101, + "propertyKey": 8, + "propertyName": "Automatic Report: Current", + "ccVersion": 1, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "Automatic Report: Current", + "default": 1, + "min": 0, + "max": 1, + "states": { + "0": "Disable", + "1": "Enable" + }, + "valueSize": 4, + "format": 1, + "allowManualEntry": false, + "isFromConfig": true + }, + "value": 1 + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 111, + "propertyName": "Automatic Reporting Interval", + "ccVersion": 1, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "Automatic Reporting Interval", + "default": 600, + "min": 0, + "max": 2592000, + "states": { + "0": "Disable" + }, + "unit": "seconds", + "valueSize": 4, + "format": 0, + "allowManualEntry": true, + "isFromConfig": true + }, + "value": 600 + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 19, + "propertyName": "LED Blink Duration", + "ccVersion": 1, + "metadata": { + "type": "number", + "readable": false, + "writeable": true, + "label": "LED Blink Duration", + "default": 0, + "min": 0, + "max": 255, + "unit": "seconds", + "valueSize": 2, + "format": 1, + "allowManualEntry": true, + "isFromConfig": true + } + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 255, + "propertyName": "Reset to Factory Default Setting", + "ccVersion": 1, + "metadata": { + "type": "number", + "readable": false, + "writeable": true, + "label": "Reset to Factory Default Setting", + "default": 0, + "min": 0, + "max": 1431655765, + "states": { + "0": "Normal Operation", + "1": "Resets all configuration parameters to default setting", + "1431655765": "Reset the product to factory default setting and exclude from Z-Wave network" + }, + "valueSize": 4, + "format": 1, + "allowManualEntry": false, + "isFromConfig": true + } + }, + { + "endpoint": 0, + "commandClass": 113, + "commandClassName": "Notification", + "property": "Power Management", + "propertyKey": "Over-current status", + "propertyName": "Power Management", + "propertyKeyName": "Over-current status", + "ccVersion": 4, + "metadata": { + "type": "number", + "readable": true, + "writeable": false, + "label": "Over-current status", + "ccSpecific": { + "notificationType": 8 + }, + "min": 0, + "max": 255, + "states": { + "0": "idle", + "6": "Over-current detected" + }, + "stateful": true, + "secret": false + }, + "value": 0 + }, + { + "endpoint": 0, + "commandClass": 113, + "commandClassName": "Notification", + "property": "Power Management", + "propertyKey": "Over-load status", + "propertyName": "Power Management", + "propertyKeyName": "Over-load status", + "ccVersion": 4, + "metadata": { + "type": "number", + "readable": true, + "writeable": false, + "label": "Over-load status", + "ccSpecific": { + "notificationType": 8 + }, + "min": 0, + "max": 255, + "states": { + "0": "idle", + "8": "Over-load detected" + }, + "stateful": true, + "secret": false + }, + "value": 0 + }, + { + "endpoint": 0, + "commandClass": 113, + "commandClassName": "Notification", + "property": "System", + "propertyKey": "Hardware status", + "propertyName": "System", + "propertyKeyName": "Hardware status", + "ccVersion": 4, + "metadata": { + "type": "number", + "readable": true, + "writeable": false, + "label": "Hardware status", + "ccSpecific": { + "notificationType": 9 + }, + "min": 0, + "max": 255, + "states": { + "0": "idle", + "3": "System hardware failure (with failure code)" + }, + "stateful": true, + "secret": false + }, + "value": 0 + }, + { + "endpoint": 0, + "commandClass": 113, + "commandClassName": "Notification", + "property": "alarmType", + "propertyName": "alarmType", + "ccVersion": 4, + "metadata": { + "type": "number", + "readable": true, + "writeable": false, + "label": "Alarm Type", + "min": 0, + "max": 255, + "stateful": true, + "secret": false + } + }, + { + "endpoint": 0, + "commandClass": 113, + "commandClassName": "Notification", + "property": "alarmLevel", + "propertyName": "alarmLevel", + "ccVersion": 4, + "metadata": { + "type": "number", + "readable": true, + "writeable": false, + "label": "Alarm Level", + "min": 0, + "max": 255, + "stateful": true, + "secret": false + } + }, + { + "endpoint": 0, + "commandClass": 114, + "commandClassName": "Manufacturer Specific", + "property": "manufacturerId", + "propertyName": "manufacturerId", + "ccVersion": 2, + "metadata": { + "type": "number", + "readable": true, + "writeable": false, + "label": "Manufacturer ID", + "min": 0, + "max": 65535, + "stateful": true, + "secret": false + }, + "value": 881 + }, + { + "endpoint": 0, + "commandClass": 114, + "commandClassName": "Manufacturer Specific", + "property": "productType", + "propertyName": "productType", + "ccVersion": 2, + "metadata": { + "type": "number", + "readable": true, + "writeable": false, + "label": "Product type", + "min": 0, + "max": 65535, + "stateful": true, + "secret": false + }, + "value": 3 + }, + { + "endpoint": 0, + "commandClass": 114, + "commandClassName": "Manufacturer Specific", + "property": "productId", + "propertyName": "productId", + "ccVersion": 2, + "metadata": { + "type": "number", + "readable": true, + "writeable": false, + "label": "Product ID", + "min": 0, + "max": 65535, + "stateful": true, + "secret": false + }, + "value": 175 + }, + { + "endpoint": 0, + "commandClass": 117, + "commandClassName": "Protection", + "property": "local", + "propertyName": "local", + "ccVersion": 2, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "Local protection state", + "states": { + "0": "Unprotected", + "2": "NoOperationPossible" + }, + "stateful": true, + "secret": false + }, + "value": 0 + }, + { + "endpoint": 0, + "commandClass": 117, + "commandClassName": "Protection", + "property": "rf", + "propertyName": "rf", + "ccVersion": 2, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "RF protection state", + "states": { + "0": "Unprotected", + "1": "NoControl" + }, + "stateful": true, + "secret": false + }, + "value": 0 + }, + { + "endpoint": 0, + "commandClass": 117, + "commandClassName": "Protection", + "property": "exclusiveControlNodeId", + "propertyName": "exclusiveControlNodeId", + "ccVersion": 2, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "Node ID with exclusive control", + "min": 1, + "max": 232, + "stateful": true, + "secret": false + } + }, + { + "endpoint": 0, + "commandClass": 117, + "commandClassName": "Protection", + "property": "timeout", + "propertyName": "timeout", + "ccVersion": 2, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "RF protection timeout", + "min": 0, + "max": 255, + "stateful": true, + "secret": false + } + }, + { + "endpoint": 0, + "commandClass": 134, + "commandClassName": "Version", + "property": "libraryType", + "propertyName": "libraryType", + "ccVersion": 2, + "metadata": { + "type": "number", + "readable": true, + "writeable": false, + "label": "Library type", + "states": { + "0": "Unknown", + "1": "Static Controller", + "2": "Controller", + "3": "Enhanced Slave", + "4": "Slave", + "5": "Installer", + "6": "Routing Slave", + "7": "Bridge Controller", + "8": "Device under Test", + "9": "N/A", + "10": "AV Remote", + "11": "AV Device" + }, + "stateful": true, + "secret": false + }, + "value": 3 + }, + { + "endpoint": 0, + "commandClass": 134, + "commandClassName": "Version", + "property": "protocolVersion", + "propertyName": "protocolVersion", + "ccVersion": 2, + "metadata": { + "type": "string", + "readable": true, + "writeable": false, + "label": "Z-Wave protocol version", + "stateful": true, + "secret": false + }, + "value": "6.4" + }, + { + "endpoint": 0, + "commandClass": 134, + "commandClassName": "Version", + "property": "firmwareVersions", + "propertyName": "firmwareVersions", + "ccVersion": 2, + "metadata": { + "type": "string[]", + "readable": true, + "writeable": false, + "label": "Z-Wave chip firmware versions", + "stateful": true, + "secret": false + }, + "value": ["1.3"] + }, + { + "endpoint": 0, + "commandClass": 134, + "commandClassName": "Version", + "property": "hardwareVersion", + "propertyName": "hardwareVersion", + "ccVersion": 2, + "metadata": { + "type": "number", + "readable": true, + "writeable": false, + "label": "Z-Wave chip hardware version", + "stateful": true, + "secret": false + }, + "value": 175 + } + ], + "endpoints": [ + { + "nodeId": 9, + "index": 0, + "installerIcon": 1792, + "userIcon": 1792, + "deviceClass": { + "basic": { + "key": 4, + "label": "Routing End Node" + }, + "generic": { + "key": 16, + "label": "Binary Switch" + }, + "specific": { + "key": 1, + "label": "Binary Power Switch" + } + }, + "commandClasses": [ + { + "id": 94, + "name": "Z-Wave Plus Info", + "version": 2, + "isSecure": false + }, + { + "id": 85, + "name": "Transport Service", + "version": 2, + "isSecure": false + }, + { + "id": 152, + "name": "Security", + "version": 1, + "isSecure": true + }, + { + "id": 159, + "name": "Security 2", + "version": 1, + "isSecure": true + }, + { + "id": 108, + "name": "Supervision", + "version": 1, + "isSecure": false + }, + { + "id": 133, + "name": "Association", + "version": 2, + "isSecure": true + }, + { + "id": 89, + "name": "Association Group Information", + "version": 1, + "isSecure": true + }, + { + "id": 112, + "name": "Configuration", + "version": 1, + "isSecure": true + }, + { + "id": 44, + "name": "Scene Actuator Configuration", + "version": 1, + "isSecure": true + }, + { + "id": 43, + "name": "Scene Activation", + "version": 1, + "isSecure": true + }, + { + "id": 129, + "name": "Clock", + "version": 1, + "isSecure": true + }, + { + "id": 113, + "name": "Notification", + "version": 4, + "isSecure": true + }, + { + "id": 50, + "name": "Meter", + "version": 4, + "isSecure": true + }, + { + "id": 37, + "name": "Binary Switch", + "version": 1, + "isSecure": true + }, + { + "id": 51, + "name": "Color Switch", + "version": 1, + "isSecure": true + }, + { + "id": 38, + "name": "Multilevel Switch", + "version": 2, + "isSecure": true + }, + { + "id": 117, + "name": "Protection", + "version": 2, + "isSecure": true + }, + { + "id": 115, + "name": "Powerlevel", + "version": 1, + "isSecure": true + }, + { + "id": 122, + "name": "Firmware Update Meta Data", + "version": 4, + "isSecure": true + }, + { + "id": 134, + "name": "Version", + "version": 2, + "isSecure": true + }, + { + "id": 90, + "name": "Device Reset Locally", + "version": 1, + "isSecure": true + }, + { + "id": 114, + "name": "Manufacturer Specific", + "version": 2, + "isSecure": true + } + ] + } + ] +} diff --git a/tests/components/zwave_js/test_api.py b/tests/components/zwave_js/test_api.py index f636401a942433..df1adbc98e5c32 100644 --- a/tests/components/zwave_js/test_api.py +++ b/tests/components/zwave_js/test_api.py @@ -5,7 +5,7 @@ from io import BytesIO import json from typing import Any -from unittest.mock import patch +from unittest.mock import PropertyMock, patch import pytest from zwave_js_server.const import ( @@ -78,9 +78,16 @@ TYPE, UUID, VALUE, + VALUE_FORMAT, + VALUE_SIZE, VERSION, ) from homeassistant.components.zwave_js.const import ( + ATTR_COMMAND_CLASS, + ATTR_ENDPOINT, + ATTR_METHOD_NAME, + ATTR_PARAMETERS, + ATTR_WAIT_FOR_RESULT, CONF_DATA_COLLECTION_OPTED_IN, DOMAIN, ) @@ -88,7 +95,7 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr -from tests.common import MockUser +from tests.common import MockConfigEntry, MockUser from tests.typing import ClientSessionGenerator, WebSocketGenerator CONTROLLER_PATCH_PREFIX = "zwave_js_server.model.controller.Controller" @@ -489,6 +496,7 @@ async def test_node_alerts( async def test_add_node( hass: HomeAssistant, + nortek_thermostat, nortek_thermostat_added_event, integration, client, @@ -590,6 +598,7 @@ async def test_add_node( "status": 0, "ready": False, "low_security": False, + "low_security_reason": None, } assert msg["event"]["node"] == node_details @@ -935,12 +944,46 @@ async def test_add_node( assert msg["error"]["code"] == "zwave_error" assert msg["error"]["message"] == "zwave_error: Z-Wave error 1 - error message" + # Test inclusion already in progress + client.async_send_command.reset_mock() + type(client.driver.controller).inclusion_state = PropertyMock( + return_value=InclusionState.INCLUDING + ) + + # Create a node that's not ready + node_data = deepcopy(nortek_thermostat.data) # Copy to allow modification in tests. + node_data["ready"] = False + node_data["values"] = {} + node_data["endpoints"] = {} + node = Node(client, node_data) + client.driver.controller.nodes[node.node_id] = node + + await ws_client.send_json( + { + ID: 11, + TYPE: "zwave_js/add_node", + ENTRY_ID: entry.entry_id, + INCLUSION_STRATEGY: InclusionStrategy.DEFAULT.value, + } + ) + + msg = await ws_client.receive_json() + assert msg["success"] + + # Verify no command was sent since inclusion is already in progress + assert len(client.async_send_command.call_args_list) == 0 + + # Verify we got a node added event + msg = await ws_client.receive_json() + assert msg["event"]["event"] == "node added" + assert msg["event"]["node"]["node_id"] == node.node_id + # Test sending command with not loaded entry fails await hass.config_entries.async_unload(entry.entry_id) await hass.async_block_till_done() await ws_client.send_json( - {ID: 11, TYPE: "zwave_js/add_node", ENTRY_ID: entry.entry_id} + {ID: 12, TYPE: "zwave_js/add_node", ENTRY_ID: entry.entry_id} ) msg = await ws_client.receive_json() @@ -3096,6 +3139,180 @@ async def test_get_config_parameters( assert msg["error"]["code"] == ERR_NOT_LOADED +async def test_set_raw_config_parameter( + hass: HomeAssistant, + client, + multisensor_6, + integration, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test that the set_raw_config_parameter WS API call works.""" + entry = integration + ws_client = await hass_ws_client(hass) + device = get_device(hass, multisensor_6) + + # Change from async_send_command to async_send_command_no_wait + client.async_send_command_no_wait.return_value = None + + # Test setting a raw config parameter value + await ws_client.send_json_auto_id( + { + TYPE: "zwave_js/set_raw_config_parameter", + DEVICE_ID: device.id, + PROPERTY: 102, + VALUE: 1, + VALUE_SIZE: 2, + VALUE_FORMAT: 1, + } + ) + + msg = await ws_client.receive_json() + assert msg["success"] + assert msg["result"]["status"] == "queued" + + assert len(client.async_send_command_no_wait.call_args_list) == 1 + args = client.async_send_command_no_wait.call_args[0][0] + assert args["command"] == "endpoint.set_raw_config_parameter_value" + assert args["nodeId"] == multisensor_6.node_id + assert args["options"]["parameter"] == 102 + assert args["options"]["value"] == 1 + assert args["options"]["valueSize"] == 2 + assert args["options"]["valueFormat"] == 1 + + # Reset the mock for async_send_command_no_wait instead + client.async_send_command_no_wait.reset_mock() + + # Test getting non-existent node fails + await ws_client.send_json_auto_id( + { + TYPE: "zwave_js/set_raw_config_parameter", + DEVICE_ID: "fake_device", + PROPERTY: 102, + VALUE: 1, + VALUE_SIZE: 2, + VALUE_FORMAT: 1, + } + ) + msg = await ws_client.receive_json() + assert not msg["success"] + assert msg["error"]["code"] == ERR_NOT_FOUND + + # Test sending command with not loaded entry fails + await hass.config_entries.async_unload(entry.entry_id) + await hass.async_block_till_done() + + await ws_client.send_json_auto_id( + { + TYPE: "zwave_js/set_raw_config_parameter", + DEVICE_ID: device.id, + PROPERTY: 102, + VALUE: 1, + VALUE_SIZE: 2, + VALUE_FORMAT: 1, + } + ) + msg = await ws_client.receive_json() + + assert not msg["success"] + assert msg["error"]["code"] == ERR_NOT_LOADED + + +async def test_get_raw_config_parameter( + hass: HomeAssistant, + multisensor_6, + integration, + client, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test the get_raw_config_parameter websocket command.""" + entry = integration + ws_client = await hass_ws_client(hass) + device = get_device(hass, multisensor_6) + + client.async_send_command.return_value = {"value": 1} + + # Test getting a raw config parameter value + await ws_client.send_json_auto_id( + { + TYPE: "zwave_js/get_raw_config_parameter", + DEVICE_ID: device.id, + PROPERTY: 102, + } + ) + + msg = await ws_client.receive_json() + assert msg["success"] + assert msg["result"]["value"] == 1 + + assert len(client.async_send_command.call_args_list) == 1 + args = client.async_send_command.call_args[0][0] + assert args["command"] == "endpoint.get_raw_config_parameter_value" + assert args["nodeId"] == multisensor_6.node_id + assert args["options"]["parameter"] == 102 + + client.async_send_command.reset_mock() + + # Test FailedZWaveCommand is caught + with patch( + "zwave_js_server.model.node.Node.async_get_raw_config_parameter_value", + side_effect=FailedZWaveCommand("failed_command", 1, "error message"), + ): + await ws_client.send_json_auto_id( + { + TYPE: "zwave_js/get_raw_config_parameter", + DEVICE_ID: device.id, + PROPERTY: 102, + } + ) + msg = await ws_client.receive_json() + + assert not msg["success"] + assert msg["error"]["code"] == "zwave_error" + assert msg["error"]["message"] == "zwave_error: Z-Wave error 1 - error message" + + # Test getting non-existent node fails + await ws_client.send_json_auto_id( + { + TYPE: "zwave_js/get_raw_config_parameter", + DEVICE_ID: "fake_device", + PROPERTY: 102, + } + ) + msg = await ws_client.receive_json() + assert not msg["success"] + assert msg["error"]["code"] == ERR_NOT_FOUND + + # Test FailedCommand exception + client.async_send_command.side_effect = FailedCommand("test", "test") + await ws_client.send_json_auto_id( + { + TYPE: "zwave_js/get_raw_config_parameter", + DEVICE_ID: device.id, + PROPERTY: 102, + } + ) + msg = await ws_client.receive_json() + assert not msg["success"] + assert msg["error"]["code"] == "test" + assert msg["error"]["message"] == "Command failed: test" + + # Test sending command with not loaded entry fails + await hass.config_entries.async_unload(entry.entry_id) + await hass.async_block_till_done() + + await ws_client.send_json_auto_id( + { + TYPE: "zwave_js/get_raw_config_parameter", + DEVICE_ID: device.id, + PROPERTY: 102, + } + ) + msg = await ws_client.receive_json() + + assert not msg["success"] + assert msg["error"]["code"] == ERR_NOT_LOADED + + @pytest.mark.parametrize( ("firmware_data", "expected_data"), [({"target": "1"}, {"firmware_target": 1}), ({}, {})], @@ -4792,3 +5009,157 @@ async def test_hard_reset_controller( assert not msg["success"] assert msg["error"]["code"] == ERR_NOT_FOUND + + +async def test_node_capabilities( + hass: HomeAssistant, + multisensor_6: Node, + integration: MockConfigEntry, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test the node_capabilities websocket command.""" + entry = integration + ws_client = await hass_ws_client(hass) + + node = multisensor_6 + device = get_device(hass, node) + await ws_client.send_json_auto_id( + { + TYPE: "zwave_js/node_capabilities", + DEVICE_ID: device.id, + } + ) + msg = await ws_client.receive_json() + assert msg["result"] == { + "0": [ + { + "id": 113, + "name": "Notification", + "version": 8, + "isSecure": False, + "is_secure": False, + } + ] + } + + # Test getting non-existent node fails + await ws_client.send_json_auto_id( + { + TYPE: "zwave_js/node_status", + DEVICE_ID: "fake_device", + } + ) + msg = await ws_client.receive_json() + assert not msg["success"] + assert msg["error"]["code"] == ERR_NOT_FOUND + + # Test sending command with not loaded entry fails + await hass.config_entries.async_unload(entry.entry_id) + await hass.async_block_till_done() + + await ws_client.send_json_auto_id( + { + TYPE: "zwave_js/node_status", + DEVICE_ID: device.id, + } + ) + msg = await ws_client.receive_json() + + assert not msg["success"] + assert msg["error"]["code"] == ERR_NOT_LOADED + + +async def test_invoke_cc_api( + hass: HomeAssistant, + client, + climate_radio_thermostat_ct100_plus_different_endpoints: Node, + integration: MockConfigEntry, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test the invoke_cc_api websocket command.""" + ws_client = await hass_ws_client(hass) + + device_radio_thermostat = get_device( + hass, climate_radio_thermostat_ct100_plus_different_endpoints + ) + assert device_radio_thermostat + + # Test successful invoke_cc_api call with a static endpoint + client.async_send_command.return_value = {"response": True} + client.async_send_command_no_wait.return_value = {"response": True} + + # Test with wait_for_result=False (default) + await ws_client.send_json_auto_id( + { + TYPE: "zwave_js/invoke_cc_api", + DEVICE_ID: device_radio_thermostat.id, + ATTR_COMMAND_CLASS: 67, + ATTR_METHOD_NAME: "someMethod", + ATTR_PARAMETERS: [1, 2], + } + ) + msg = await ws_client.receive_json() + assert msg["success"] + assert msg["result"] is None # We did not specify wait_for_result=True + + await hass.async_block_till_done() + + assert len(client.async_send_command_no_wait.call_args_list) == 1 + args = client.async_send_command_no_wait.call_args[0][0] + assert args == { + "command": "endpoint.invoke_cc_api", + "nodeId": 26, + "endpoint": 0, + "commandClass": 67, + "methodName": "someMethod", + "args": [1, 2], + } + + client.async_send_command_no_wait.reset_mock() + + # Test with wait_for_result=True + await ws_client.send_json_auto_id( + { + TYPE: "zwave_js/invoke_cc_api", + DEVICE_ID: device_radio_thermostat.id, + ATTR_COMMAND_CLASS: 67, + ATTR_ENDPOINT: 0, + ATTR_METHOD_NAME: "someMethod", + ATTR_PARAMETERS: [1, 2], + ATTR_WAIT_FOR_RESULT: True, + } + ) + msg = await ws_client.receive_json() + assert msg["success"] + assert msg["result"] is True + + await hass.async_block_till_done() + + assert len(client.async_send_command.call_args_list) == 1 + args = client.async_send_command.call_args[0][0] + assert args == { + "command": "endpoint.invoke_cc_api", + "nodeId": 26, + "endpoint": 0, + "commandClass": 67, + "methodName": "someMethod", + "args": [1, 2], + } + + client.async_send_command.side_effect = NotFoundError + + # Ensure an error is returned + await ws_client.send_json_auto_id( + { + TYPE: "zwave_js/invoke_cc_api", + DEVICE_ID: device_radio_thermostat.id, + ATTR_COMMAND_CLASS: 67, + ATTR_ENDPOINT: 0, + ATTR_METHOD_NAME: "someMethod", + ATTR_PARAMETERS: [1, 2], + ATTR_WAIT_FOR_RESULT: True, + } + ) + msg = await ws_client.receive_json() + assert not msg["success"] + assert msg["error"] == {"code": "NotFoundError", "message": ""} diff --git a/tests/components/zwave_js/test_climate.py b/tests/components/zwave_js/test_climate.py index 9a4559de1a59df..5d711528a281ed 100644 --- a/tests/components/zwave_js/test_climate.py +++ b/tests/components/zwave_js/test_climate.py @@ -812,8 +812,8 @@ async def test_thermostat_heatit_z_trm2fx( | ClimateEntityFeature.TURN_OFF | ClimateEntityFeature.TURN_ON ) - assert state.attributes[ATTR_MIN_TEMP] == 7 - assert state.attributes[ATTR_MAX_TEMP] == 35 + assert state.attributes[ATTR_MIN_TEMP] == 0 + assert state.attributes[ATTR_MAX_TEMP] == 50 # Try switching to external sensor event = Event( diff --git a/tests/components/zwave_js/test_config_flow.py b/tests/components/zwave_js/test_config_flow.py index d9111d0cb4c4e8..b60515cacd45c3 100644 --- a/tests/components/zwave_js/test_config_flow.py +++ b/tests/components/zwave_js/test_config_flow.py @@ -6,7 +6,10 @@ from ipaddress import ip_address from typing import Any from unittest.mock import AsyncMock, MagicMock, call, patch +from uuid import uuid4 +from aiohasupervisor import SupervisorError +from aiohasupervisor.models import AddonsOptions, Discovery import aiohttp import pytest from serial.tools.list_ports_common import ListPortInfo @@ -14,12 +17,12 @@ from homeassistant import config_entries from homeassistant.components import usb -from homeassistant.components.hassio import HassioAPIError, HassioServiceInfo from homeassistant.components.zeroconf import ZeroconfServiceInfo from homeassistant.components.zwave_js.config_flow import SERVER_VERSION_TIMEOUT, TITLE from homeassistant.components.zwave_js.const import ADDON_SLUG, DOMAIN from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType +from homeassistant.helpers.service_info.hassio import HassioServiceInfo from tests.common import MockConfigEntry @@ -553,7 +556,19 @@ async def test_abort_hassio_discovery_for_other_addon( assert result2["reason"] == "not_zwave_js_addon" -@pytest.mark.parametrize("discovery_info", [{"config": ADDON_DISCOVERY_INFO}]) +@pytest.mark.parametrize( + "discovery_info", + [ + [ + Discovery( + addon="core_zwave_js", + service="zwave_js", + uuid=uuid4(), + config=ADDON_DISCOVERY_INFO, + ) + ] + ], +) async def test_usb_discovery( hass: HomeAssistant, supervisor, @@ -583,7 +598,7 @@ async def test_usb_discovery( result = await hass.config_entries.flow.async_configure(result["flow_id"]) - assert install_addon.call_args == call(hass, "core_zwave_js") + assert install_addon.call_args == call("core_zwave_js") assert result["type"] is FlowResultType.FORM assert result["step_id"] == "configure_addon" @@ -601,10 +616,9 @@ async def test_usb_discovery( ) assert set_addon_options.call_args == call( - hass, "core_zwave_js", - { - "options": { + AddonsOptions( + config={ "device": USB_DISCOVERY_INFO.device, "s0_legacy_key": "new123", "s2_access_control_key": "new456", @@ -613,7 +627,7 @@ async def test_usb_discovery( "lr_s2_access_control_key": "new654", "lr_s2_authenticated_key": "new321", } - }, + ), ) assert result["type"] is FlowResultType.SHOW_PROGRESS @@ -652,7 +666,19 @@ async def test_usb_discovery( assert len(mock_setup_entry.mock_calls) == 1 -@pytest.mark.parametrize("discovery_info", [{"config": ADDON_DISCOVERY_INFO}]) +@pytest.mark.parametrize( + "discovery_info", + [ + [ + Discovery( + addon="core_zwave_js", + service="zwave_js", + uuid=uuid4(), + config=ADDON_DISCOVERY_INFO, + ) + ] + ], +) async def test_usb_discovery_addon_not_running( hass: HomeAssistant, supervisor, @@ -702,10 +728,9 @@ async def test_usb_discovery_addon_not_running( ) assert set_addon_options.call_args == call( - hass, "core_zwave_js", - { - "options": { + AddonsOptions( + config={ "device": USB_DISCOVERY_INFO.device, "s0_legacy_key": "new123", "s2_access_control_key": "new456", @@ -714,7 +739,7 @@ async def test_usb_discovery_addon_not_running( "lr_s2_access_control_key": "new654", "lr_s2_authenticated_key": "new321", } - }, + ), ) assert result["type"] is FlowResultType.SHOW_PROGRESS @@ -797,10 +822,9 @@ async def test_discovery_addon_not_running( ) assert set_addon_options.call_args == call( - hass, "core_zwave_js", - { - "options": { + AddonsOptions( + config={ "device": "/test", "s0_legacy_key": "new123", "s2_access_control_key": "new456", @@ -809,7 +833,7 @@ async def test_discovery_addon_not_running( "lr_s2_access_control_key": "new654", "lr_s2_authenticated_key": "new321", } - }, + ), ) assert result["type"] is FlowResultType.SHOW_PROGRESS @@ -881,7 +905,7 @@ async def test_discovery_addon_not_installed( result = await hass.config_entries.flow.async_configure(result["flow_id"]) - assert install_addon.call_args == call(hass, "core_zwave_js") + assert install_addon.call_args == call("core_zwave_js") assert result["type"] is FlowResultType.FORM assert result["step_id"] == "configure_addon" @@ -900,10 +924,9 @@ async def test_discovery_addon_not_installed( ) assert set_addon_options.call_args == call( - hass, "core_zwave_js", - { - "options": { + AddonsOptions( + config={ "device": "/test", "s0_legacy_key": "new123", "s2_access_control_key": "new456", @@ -912,7 +935,7 @@ async def test_discovery_addon_not_installed( "lr_s2_access_control_key": "new654", "lr_s2_authenticated_key": "new321", } - }, + ), ) assert result["type"] is FlowResultType.SHOW_PROGRESS @@ -1092,7 +1115,19 @@ async def test_not_addon(hass: HomeAssistant, supervisor) -> None: assert len(mock_setup_entry.mock_calls) == 1 -@pytest.mark.parametrize("discovery_info", [{"config": ADDON_DISCOVERY_INFO}]) +@pytest.mark.parametrize( + "discovery_info", + [ + [ + Discovery( + addon="core_zwave_js", + service="zwave_js", + uuid=uuid4(), + config=ADDON_DISCOVERY_INFO, + ) + ] + ], +) async def test_addon_running( hass: HomeAssistant, supervisor, @@ -1158,31 +1193,52 @@ async def test_addon_running( ), [ ( - {"config": ADDON_DISCOVERY_INFO}, - HassioAPIError(), + [ + Discovery( + addon="core_zwave_js", + service="zwave_js", + uuid=uuid4(), + config=ADDON_DISCOVERY_INFO, + ) + ], + SupervisorError(), None, None, "addon_get_discovery_info_failed", ), ( - {"config": ADDON_DISCOVERY_INFO}, + [ + Discovery( + addon="core_zwave_js", + service="zwave_js", + uuid=uuid4(), + config=ADDON_DISCOVERY_INFO, + ) + ], None, TimeoutError, None, "cannot_connect", ), ( - None, + [], None, None, None, "addon_get_discovery_info_failed", ), ( - {"config": ADDON_DISCOVERY_INFO}, + [ + Discovery( + addon="core_zwave_js", + service="zwave_js", + uuid=uuid4(), + config=ADDON_DISCOVERY_INFO, + ) + ], None, None, - HassioAPIError(), + SupervisorError(), "addon_info_failed", ), ], @@ -1214,7 +1270,19 @@ async def test_addon_running_failures( assert result["reason"] == abort_reason -@pytest.mark.parametrize("discovery_info", [{"config": ADDON_DISCOVERY_INFO}]) +@pytest.mark.parametrize( + "discovery_info", + [ + [ + Discovery( + addon="core_zwave_js", + service="zwave_js", + uuid=uuid4(), + config=ADDON_DISCOVERY_INFO, + ) + ] + ], +) async def test_addon_running_already_configured( hass: HomeAssistant, supervisor, @@ -1273,7 +1341,19 @@ async def test_addon_running_already_configured( assert entry.data["lr_s2_authenticated_key"] == "new321" -@pytest.mark.parametrize("discovery_info", [{"config": ADDON_DISCOVERY_INFO}]) +@pytest.mark.parametrize( + "discovery_info", + [ + [ + Discovery( + addon="core_zwave_js", + service="zwave_js", + uuid=uuid4(), + config=ADDON_DISCOVERY_INFO, + ) + ] + ], +) async def test_addon_installed( hass: HomeAssistant, supervisor, @@ -1313,10 +1393,9 @@ async def test_addon_installed( ) assert set_addon_options.call_args == call( - hass, "core_zwave_js", - { - "options": { + AddonsOptions( + config={ "device": "/test", "s0_legacy_key": "new123", "s2_access_control_key": "new456", @@ -1325,7 +1404,7 @@ async def test_addon_installed( "lr_s2_access_control_key": "new654", "lr_s2_authenticated_key": "new321", } - }, + ), ) assert result["type"] is FlowResultType.SHOW_PROGRESS @@ -1366,7 +1445,17 @@ async def test_addon_installed( @pytest.mark.parametrize( ("discovery_info", "start_addon_side_effect"), - [({"config": ADDON_DISCOVERY_INFO}, HassioAPIError())], + [ + ( + Discovery( + addon="core_zwave_js", + service="zwave_js", + uuid=uuid4(), + config=ADDON_DISCOVERY_INFO, + ), + SupervisorError(), + ) + ], ) async def test_addon_installed_start_failure( hass: HomeAssistant, @@ -1407,10 +1496,9 @@ async def test_addon_installed_start_failure( ) assert set_addon_options.call_args == call( - hass, "core_zwave_js", - { - "options": { + AddonsOptions( + config={ "device": "/test", "s0_legacy_key": "new123", "s2_access_control_key": "new456", @@ -1419,7 +1507,7 @@ async def test_addon_installed_start_failure( "lr_s2_access_control_key": "new654", "lr_s2_authenticated_key": "new321", } - }, + ), ) assert result["type"] is FlowResultType.SHOW_PROGRESS @@ -1438,11 +1526,18 @@ async def test_addon_installed_start_failure( ("discovery_info", "server_version_side_effect"), [ ( - {"config": ADDON_DISCOVERY_INFO}, + [ + Discovery( + addon="core_zwave_js", + service="zwave_js", + uuid=uuid4(), + config=ADDON_DISCOVERY_INFO, + ) + ], TimeoutError, ), ( - None, + [], None, ), ], @@ -1486,10 +1581,9 @@ async def test_addon_installed_failures( ) assert set_addon_options.call_args == call( - hass, "core_zwave_js", - { - "options": { + AddonsOptions( + config={ "device": "/test", "s0_legacy_key": "new123", "s2_access_control_key": "new456", @@ -1498,7 +1592,7 @@ async def test_addon_installed_failures( "lr_s2_access_control_key": "new654", "lr_s2_authenticated_key": "new321", } - }, + ), ) assert result["type"] is FlowResultType.SHOW_PROGRESS @@ -1515,7 +1609,19 @@ async def test_addon_installed_failures( @pytest.mark.parametrize( ("set_addon_options_side_effect", "discovery_info"), - [(HassioAPIError(), {"config": ADDON_DISCOVERY_INFO})], + [ + ( + SupervisorError(), + [ + Discovery( + addon="core_zwave_js", + service="zwave_js", + uuid=uuid4(), + config=ADDON_DISCOVERY_INFO, + ) + ], + ) + ], ) async def test_addon_installed_set_options_failure( hass: HomeAssistant, @@ -1556,10 +1662,9 @@ async def test_addon_installed_set_options_failure( ) assert set_addon_options.call_args == call( - hass, "core_zwave_js", - { - "options": { + AddonsOptions( + config={ "device": "/test", "s0_legacy_key": "new123", "s2_access_control_key": "new456", @@ -1568,7 +1673,7 @@ async def test_addon_installed_set_options_failure( "lr_s2_access_control_key": "new654", "lr_s2_authenticated_key": "new321", } - }, + ), ) assert result["type"] is FlowResultType.ABORT @@ -1577,7 +1682,19 @@ async def test_addon_installed_set_options_failure( assert start_addon.call_count == 0 -@pytest.mark.parametrize("discovery_info", [{"config": ADDON_DISCOVERY_INFO}]) +@pytest.mark.parametrize( + "discovery_info", + [ + [ + Discovery( + addon="core_zwave_js", + service="zwave_js", + uuid=uuid4(), + config=ADDON_DISCOVERY_INFO, + ) + ] + ], +) async def test_addon_installed_already_configured( hass: HomeAssistant, supervisor, @@ -1634,10 +1751,9 @@ async def test_addon_installed_already_configured( ) assert set_addon_options.call_args == call( - hass, "core_zwave_js", - { - "options": { + AddonsOptions( + config={ "device": "/new", "s0_legacy_key": "new123", "s2_access_control_key": "new456", @@ -1646,7 +1762,7 @@ async def test_addon_installed_already_configured( "lr_s2_access_control_key": "new654", "lr_s2_authenticated_key": "new321", } - }, + ), ) assert result["type"] is FlowResultType.SHOW_PROGRESS @@ -1669,7 +1785,19 @@ async def test_addon_installed_already_configured( assert entry.data["lr_s2_authenticated_key"] == "new321" -@pytest.mark.parametrize("discovery_info", [{"config": ADDON_DISCOVERY_INFO}]) +@pytest.mark.parametrize( + "discovery_info", + [ + [ + Discovery( + addon="core_zwave_js", + service="zwave_js", + uuid=uuid4(), + config=ADDON_DISCOVERY_INFO, + ) + ] + ], +) async def test_addon_not_installed( hass: HomeAssistant, supervisor, @@ -1700,7 +1828,7 @@ async def test_addon_not_installed( result = await hass.config_entries.flow.async_configure(result["flow_id"]) - assert install_addon.call_args == call(hass, "core_zwave_js") + assert install_addon.call_args == call("core_zwave_js") assert result["type"] is FlowResultType.FORM assert result["step_id"] == "configure_addon" @@ -1719,10 +1847,9 @@ async def test_addon_not_installed( ) assert set_addon_options.call_args == call( - hass, "core_zwave_js", - { - "options": { + AddonsOptions( + config={ "device": "/test", "s0_legacy_key": "new123", "s2_access_control_key": "new456", @@ -1731,7 +1858,7 @@ async def test_addon_not_installed( "lr_s2_access_control_key": "new654", "lr_s2_authenticated_key": "new321", } - }, + ), ) assert result["type"] is FlowResultType.SHOW_PROGRESS @@ -1774,7 +1901,7 @@ async def test_install_addon_failure( hass: HomeAssistant, supervisor, addon_not_installed, install_addon ) -> None: """Test add-on install failure.""" - install_addon.side_effect = HassioAPIError() + install_addon.side_effect = SupervisorError() result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} @@ -1794,7 +1921,7 @@ async def test_install_addon_failure( result = await hass.config_entries.flow.async_configure(result["flow_id"]) - assert install_addon.call_args == call(hass, "core_zwave_js") + assert install_addon.call_args == call("core_zwave_js") assert result["type"] is FlowResultType.ABORT assert result["reason"] == "addon_install_failed" @@ -1895,7 +2022,14 @@ async def test_options_not_addon( ), [ ( - {"config": ADDON_DISCOVERY_INFO}, + [ + Discovery( + addon="core_zwave_js", + service="zwave_js", + uuid=uuid4(), + config=ADDON_DISCOVERY_INFO, + ) + ], {}, { "device": "/test", @@ -1921,7 +2055,14 @@ async def test_options_not_addon( 0, ), ( - {"config": ADDON_DISCOVERY_INFO}, + [ + Discovery( + addon="core_zwave_js", + service="zwave_js", + uuid=uuid4(), + config=ADDON_DISCOVERY_INFO, + ) + ], {"use_addon": True}, { "device": "/test", @@ -1994,9 +2135,8 @@ async def test_options_addon_running( new_addon_options["device"] = new_addon_options.pop("usb_path") assert set_addon_options.call_args == call( - hass, "core_zwave_js", - {"options": new_addon_options}, + AddonsOptions(config=new_addon_options), ) assert client.disconnect.call_count == disconnect_calls @@ -2042,7 +2182,14 @@ async def test_options_addon_running( ("discovery_info", "entry_data", "old_addon_options", "new_addon_options"), [ ( - {"config": ADDON_DISCOVERY_INFO}, + [ + Discovery( + addon="core_zwave_js", + service="zwave_js", + uuid=uuid4(), + config=ADDON_DISCOVERY_INFO, + ) + ], {}, { "device": "/test", @@ -2169,7 +2316,14 @@ async def different_device_server_version(*args): ), [ ( - {"config": ADDON_DISCOVERY_INFO}, + [ + Discovery( + addon="core_zwave_js", + service="zwave_js", + uuid=uuid4(), + config=ADDON_DISCOVERY_INFO, + ) + ], {}, { "device": "/test", @@ -2198,7 +2352,14 @@ async def different_device_server_version(*args): different_device_server_version, ), ( - {"config": ADDON_DISCOVERY_INFO}, + [ + Discovery( + addon="core_zwave_js", + service="zwave_js", + uuid=uuid4(), + config=ADDON_DISCOVERY_INFO, + ) + ], {}, { "device": "/test", @@ -2275,9 +2436,7 @@ async def test_options_different_device( assert set_addon_options.call_count == 1 new_addon_options["device"] = new_addon_options.pop("usb_path") assert set_addon_options.call_args == call( - hass, - "core_zwave_js", - {"options": new_addon_options}, + "core_zwave_js", AddonsOptions(config=new_addon_options) ) assert client.disconnect.call_count == disconnect_calls assert result["type"] is FlowResultType.SHOW_PROGRESS @@ -2298,9 +2457,7 @@ async def test_options_different_device( assert set_addon_options.call_count == 2 assert set_addon_options.call_args == call( - hass, - "core_zwave_js", - {"options": addon_options}, + "core_zwave_js", AddonsOptions(config=addon_options) ) assert result["type"] is FlowResultType.SHOW_PROGRESS assert result["step_id"] == "start_addon" @@ -2331,7 +2488,14 @@ async def test_options_different_device( ), [ ( - {"config": ADDON_DISCOVERY_INFO}, + [ + Discovery( + addon="core_zwave_js", + service="zwave_js", + uuid=uuid4(), + config=ADDON_DISCOVERY_INFO, + ) + ], {}, { "device": "/test", @@ -2357,10 +2521,17 @@ async def test_options_different_device( "emulate_hardware": False, }, 0, - [HassioAPIError(), None], + [SupervisorError(), None], ), ( - {"config": ADDON_DISCOVERY_INFO}, + [ + Discovery( + addon="core_zwave_js", + service="zwave_js", + uuid=uuid4(), + config=ADDON_DISCOVERY_INFO, + ) + ], {}, { "device": "/test", @@ -2387,8 +2558,8 @@ async def test_options_different_device( }, 0, [ - HassioAPIError(), - HassioAPIError(), + SupervisorError(), + SupervisorError(), ], ), ], @@ -2441,9 +2612,7 @@ async def test_options_addon_restart_failed( assert set_addon_options.call_count == 1 new_addon_options["device"] = new_addon_options.pop("usb_path") assert set_addon_options.call_args == call( - hass, - "core_zwave_js", - {"options": new_addon_options}, + "core_zwave_js", AddonsOptions(config=new_addon_options) ) assert client.disconnect.call_count == disconnect_calls assert result["type"] is FlowResultType.SHOW_PROGRESS @@ -2461,9 +2630,7 @@ async def test_options_addon_restart_failed( old_addon_options.pop("network_key") assert set_addon_options.call_count == 2 assert set_addon_options.call_args == call( - hass, - "core_zwave_js", - {"options": old_addon_options}, + "core_zwave_js", AddonsOptions(config=old_addon_options) ) assert result["type"] is FlowResultType.SHOW_PROGRESS assert result["step_id"] == "start_addon" @@ -2494,7 +2661,14 @@ async def test_options_addon_restart_failed( ), [ ( - {"config": ADDON_DISCOVERY_INFO}, + [ + Discovery( + addon="core_zwave_js", + service="zwave_js", + uuid=uuid4(), + config=ADDON_DISCOVERY_INFO, + ) + ], {}, { "device": "/test", @@ -2587,7 +2761,14 @@ async def test_options_addon_running_server_info_failure( ), [ ( - {"config": ADDON_DISCOVERY_INFO}, + [ + Discovery( + addon="core_zwave_js", + service="zwave_js", + uuid=uuid4(), + config=ADDON_DISCOVERY_INFO, + ) + ], {}, { "device": "/test", @@ -2613,7 +2794,14 @@ async def test_options_addon_running_server_info_failure( 0, ), ( - {"config": ADDON_DISCOVERY_INFO}, + [ + Discovery( + addon="core_zwave_js", + service="zwave_js", + uuid=uuid4(), + config=ADDON_DISCOVERY_INFO, + ) + ], {"use_addon": True}, { "device": "/test", @@ -2685,7 +2873,7 @@ async def test_options_addon_not_installed( result = await hass.config_entries.options.async_configure(result["flow_id"]) - assert install_addon.call_args == call(hass, "core_zwave_js") + assert install_addon.call_args == call("core_zwave_js") assert result["type"] is FlowResultType.FORM assert result["step_id"] == "configure_addon" @@ -2697,9 +2885,7 @@ async def test_options_addon_not_installed( new_addon_options["device"] = new_addon_options.pop("usb_path") assert set_addon_options.call_args == call( - hass, - "core_zwave_js", - {"options": new_addon_options}, + "core_zwave_js", AddonsOptions(config=new_addon_options) ) assert client.disconnect.call_count == disconnect_calls diff --git a/tests/components/zwave_js/test_config_validation.py b/tests/components/zwave_js/test_config_validation.py index 8428972bde1963..cebbde3c9b13f0 100644 --- a/tests/components/zwave_js/test_config_validation.py +++ b/tests/components/zwave_js/test_config_validation.py @@ -1,27 +1,31 @@ """Test the Z-Wave JS config validation helpers.""" +from typing import Any + import pytest import voluptuous as vol -from homeassistant.components.zwave_js.config_validation import boolean +from homeassistant.components.zwave_js.config_validation import VALUE_SCHEMA, boolean -def test_boolean_validation() -> None: - """Test boolean config validator.""" - # test bool - assert boolean(True) - assert not boolean(False) - # test strings - assert boolean("TRUE") - assert not boolean("FALSE") - assert boolean("ON") - assert not boolean("NO") - # ensure 1's and 0's don't get converted to bool - with pytest.raises(vol.Invalid): - boolean("1") - with pytest.raises(vol.Invalid): - boolean("0") - with pytest.raises(vol.Invalid): - boolean(1) +@pytest.mark.parametrize( + ("test_cases", "expected_value"), + [ + ([True, "true", "yes", "on", "ON", "enable"], True), + ([False, "false", "no", "off", "NO", "disable"], False), + ([1.1, "1.1"], 1.1), + ([1.0, "1.0"], 1.0), + ([1, "1"], 1), + ], +) +def test_validation(test_cases: list[Any], expected_value: Any) -> None: + """Test config validation.""" + for case in test_cases: + assert VALUE_SCHEMA(case) == expected_value + + +@pytest.mark.parametrize("value", ["invalid", "1", "0", 1, 0]) +def test_invalid_boolean_validation(value: str | int) -> None: + """Test invalid cases for boolean config validator.""" with pytest.raises(vol.Invalid): - boolean(0) + boolean(value) diff --git a/tests/components/zwave_js/test_cover.py b/tests/components/zwave_js/test_cover.py index ce394cb90678d4..b13d4f9787fd4e 100644 --- a/tests/components/zwave_js/test_cover.py +++ b/tests/components/zwave_js/test_cover.py @@ -26,6 +26,7 @@ SERVICE_STOP_COVER_TILT, CoverDeviceClass, CoverEntityFeature, + CoverState, ) from homeassistant.components.zwave_js.const import LOGGER from homeassistant.components.zwave_js.helpers import ZwaveValueMatcher @@ -33,10 +34,6 @@ ATTR_DEVICE_CLASS, ATTR_ENTITY_ID, ATTR_SUPPORTED_FEATURES, - STATE_CLOSED, - STATE_CLOSING, - STATE_OPEN, - STATE_OPENING, STATE_UNKNOWN, ) from homeassistant.core import HomeAssistant @@ -63,7 +60,7 @@ async def test_window_cover( assert state assert state.attributes[ATTR_DEVICE_CLASS] == CoverDeviceClass.WINDOW - assert state.state == STATE_CLOSED + assert state.state == CoverState.CLOSED assert state.attributes[ATTR_CURRENT_POSITION] == 0 # Test setting position @@ -170,7 +167,7 @@ async def test_window_cover( client.async_send_command.reset_mock() state = hass.states.get(WINDOW_COVER_ENTITY) - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN # Test closing await hass.services.async_call( @@ -233,7 +230,7 @@ async def test_window_cover( node.receive_event(event) state = hass.states.get(WINDOW_COVER_ENTITY) - assert state.state == STATE_CLOSED + assert state.state == CoverState.CLOSED async def test_fibaro_fgr222_shutter_cover( @@ -244,7 +241,7 @@ async def test_fibaro_fgr222_shutter_cover( assert state assert state.attributes[ATTR_DEVICE_CLASS] == CoverDeviceClass.SHUTTER - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 0 # Test opening tilts @@ -345,7 +342,7 @@ async def test_fibaro_fgr223_shutter_cover( assert state assert state.attributes[ATTR_DEVICE_CLASS] == CoverDeviceClass.SHUTTER - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 0 # Test opening tilts @@ -441,7 +438,7 @@ async def test_aeotec_nano_shutter_cover( assert state assert state.attributes[ATTR_DEVICE_CLASS] == CoverDeviceClass.WINDOW - assert state.state == STATE_CLOSED + assert state.state == CoverState.CLOSED assert state.attributes[ATTR_CURRENT_POSITION] == 0 # Test opening @@ -507,7 +504,7 @@ async def test_aeotec_nano_shutter_cover( client.async_send_command.reset_mock() state = hass.states.get(AEOTEC_SHUTTER_COVER_ENTITY) - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN # Test closing await hass.services.async_call( @@ -579,7 +576,7 @@ async def test_motor_barrier_cover( assert state assert state.attributes[ATTR_DEVICE_CLASS] == CoverDeviceClass.GARAGE - assert state.state == STATE_CLOSED + assert state.state == CoverState.CLOSED # Test open await hass.services.async_call( @@ -602,7 +599,7 @@ async def test_motor_barrier_cover( # state doesn't change until currentState value update is received state = hass.states.get(GDC_COVER_ENTITY) - assert state.state == STATE_CLOSED + assert state.state == CoverState.CLOSED client.async_send_command.reset_mock() @@ -627,7 +624,7 @@ async def test_motor_barrier_cover( # state doesn't change until currentState value update is received state = hass.states.get(GDC_COVER_ENTITY) - assert state.state == STATE_CLOSED + assert state.state == CoverState.CLOSED client.async_send_command.reset_mock() @@ -652,7 +649,7 @@ async def test_motor_barrier_cover( node.receive_event(event) state = hass.states.get(GDC_COVER_ENTITY) - assert state.state == STATE_OPENING + assert state.state == CoverState.OPENING # Barrier sends an opened state event = Event( @@ -675,7 +672,7 @@ async def test_motor_barrier_cover( node.receive_event(event) state = hass.states.get(GDC_COVER_ENTITY) - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN # Barrier sends a closing state event = Event( @@ -698,7 +695,7 @@ async def test_motor_barrier_cover( node.receive_event(event) state = hass.states.get(GDC_COVER_ENTITY) - assert state.state == STATE_CLOSING + assert state.state == CoverState.CLOSING # Barrier sends a closed state event = Event( @@ -721,7 +718,7 @@ async def test_motor_barrier_cover( node.receive_event(event) state = hass.states.get(GDC_COVER_ENTITY) - assert state.state == STATE_CLOSED + assert state.state == CoverState.CLOSED # Barrier sends a stopped state event = Event( @@ -827,7 +824,7 @@ async def test_fibaro_fgr223_shutter_cover_no_tilt( state = hass.states.get(FIBARO_FGR_223_SHUTTER_COVER_ENTITY) assert state - assert state.state == STATE_OPEN + assert state.state == CoverState.OPEN assert ATTR_CURRENT_POSITION in state.attributes assert ATTR_CURRENT_TILT_POSITION not in state.attributes @@ -944,7 +941,7 @@ async def test_nice_ibt4zwave_cover( state = hass.states.get(entity_id) assert state # This device has no state because there is no position value - assert state.state == STATE_CLOSED + assert state.state == CoverState.CLOSED assert state.attributes[ATTR_SUPPORTED_FEATURES] == ( CoverEntityFeature.CLOSE | CoverEntityFeature.OPEN diff --git a/tests/components/zwave_js/test_discovery.py b/tests/components/zwave_js/test_discovery.py index efcd551d70af7a..0be0cca78c8b39 100644 --- a/tests/components/zwave_js/test_discovery.py +++ b/tests/components/zwave_js/test_discovery.py @@ -6,6 +6,7 @@ from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSOR_DOMAIN from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS +from homeassistant.components.light import ATTR_SUPPORTED_COLOR_MODES, ColorMode from homeassistant.components.number import ( ATTR_VALUE, DOMAIN as NUMBER_DOMAIN, @@ -426,3 +427,19 @@ async def test_rediscovery( assert state assert state.state == "Beep Beep" assert "Platform zwave_js does not generate unique IDs" not in caplog.text + + +async def test_aeotec_smart_switch_7( + hass: HomeAssistant, + aeotec_smart_switch_7: Node, + integration: MockConfigEntry, +) -> None: + """Test that Smart Switch 7 has a light and a switch entity.""" + state = hass.states.get("light.smart_switch_7") + assert state + assert state.attributes[ATTR_SUPPORTED_COLOR_MODES] == [ + ColorMode.HS, + ] + + state = hass.states.get("switch.smart_switch_7") + assert state diff --git a/tests/components/zwave_js/test_init.py b/tests/components/zwave_js/test_init.py index ad268ee8af3c5a..4f858f3e54539e 100644 --- a/tests/components/zwave_js/test_init.py +++ b/tests/components/zwave_js/test_init.py @@ -6,6 +6,7 @@ from unittest.mock import AsyncMock, call, patch from aiohasupervisor import SupervisorError +from aiohasupervisor.models import AddonsOptions import pytest from zwave_js_server.client import Client from zwave_js_server.event import Event @@ -554,7 +555,7 @@ async def test_start_addon( assert install_addon.call_count == 0 assert set_addon_options.call_count == 1 assert set_addon_options.call_args == call( - hass, "core_zwave_js", {"options": addon_options} + "core_zwave_js", AddonsOptions(config=addon_options) ) assert start_addon.call_count == 1 assert start_addon.call_args == call("core_zwave_js") @@ -600,16 +601,16 @@ async def test_install_addon( assert entry.state is ConfigEntryState.SETUP_RETRY assert install_addon.call_count == 1 - assert install_addon.call_args == call(hass, "core_zwave_js") + assert install_addon.call_args == call("core_zwave_js") assert set_addon_options.call_count == 1 assert set_addon_options.call_args == call( - hass, "core_zwave_js", {"options": addon_options} + "core_zwave_js", AddonsOptions(config=addon_options) ) assert start_addon.call_count == 1 assert start_addon.call_args == call("core_zwave_js") -@pytest.mark.parametrize("addon_info_side_effect", [HassioAPIError("Boom")]) +@pytest.mark.parametrize("addon_info_side_effect", [SupervisorError("Boom")]) async def test_addon_info_failure( hass: HomeAssistant, addon_installed, @@ -747,7 +748,7 @@ async def test_addon_options_changed( [ ("1.0.0", True, 1, 1, None, None), ("1.0.0", False, 0, 0, None, None), - ("1.0.0", True, 1, 1, HassioAPIError("Boom"), None), + ("1.0.0", True, 1, 1, SupervisorError("Boom"), None), ("1.0.0", True, 0, 1, None, HassioAPIError("Boom")), ], ) diff --git a/tests/components/zwave_js/test_light.py b/tests/components/zwave_js/test_light.py index 376bd700a2a1b9..4c725c6dc291b0 100644 --- a/tests/components/zwave_js/test_light.py +++ b/tests/components/zwave_js/test_light.py @@ -8,6 +8,7 @@ ATTR_BRIGHTNESS, ATTR_COLOR_MODE, ATTR_COLOR_TEMP, + ATTR_HS_COLOR, ATTR_MAX_MIREDS, ATTR_MIN_MIREDS, ATTR_RGB_COLOR, @@ -37,8 +38,8 @@ ZEN_31_ENTITY, ) -HSM200_V1_ENTITY = "light.hsm200" ZDB5100_ENTITY = "light.matrix_office" +HSM200_V1_ENTITY = "light.hsm200" async def test_light( @@ -510,14 +511,388 @@ async def test_light_none_color_value( assert state.attributes[ATTR_SUPPORTED_COLOR_MODES] == ["hs"] -async def test_black_is_off( +async def test_light_on_off_color( + hass: HomeAssistant, client, logic_group_zdb5100, integration +) -> None: + """Test the light entity for RGB lights without dimming support.""" + node = logic_group_zdb5100 + state = hass.states.get(ZDB5100_ENTITY) + assert state.state == STATE_OFF + + async def update_color(red: int, green: int, blue: int) -> None: + event = Event( + type="value updated", + data={ + "source": "node", + "event": "value updated", + "nodeId": node.node_id, + "args": { + "commandClassName": "Color Switch", + "commandClass": 51, + "endpoint": 1, + "property": "currentColor", + "propertyKey": 2, # red + "newValue": red, + "prevValue": None, + "propertyName": "currentColor", + "propertyKeyName": "red", + }, + }, + ) + node.receive_event(event) + await hass.async_block_till_done() + + event = Event( + type="value updated", + data={ + "source": "node", + "event": "value updated", + "nodeId": node.node_id, + "args": { + "commandClassName": "Color Switch", + "commandClass": 51, + "endpoint": 1, + "property": "currentColor", + "propertyKey": 3, # green + "newValue": green, + "prevValue": None, + "propertyName": "currentColor", + "propertyKeyName": "green", + }, + }, + ) + node.receive_event(event) + await hass.async_block_till_done() + + event = Event( + type="value updated", + data={ + "source": "node", + "event": "value updated", + "nodeId": node.node_id, + "args": { + "commandClassName": "Color Switch", + "commandClass": 51, + "endpoint": 1, + "property": "currentColor", + "propertyKey": 4, # blue + "newValue": blue, + "prevValue": None, + "propertyName": "currentColor", + "propertyKeyName": "blue", + }, + }, + ) + node.receive_event(event) + await hass.async_block_till_done() + + event = Event( + type="value updated", + data={ + "source": "node", + "event": "value updated", + "nodeId": node.node_id, + "args": { + "commandClassName": "Color Switch", + "commandClass": 51, + "endpoint": 1, + "property": "currentColor", + "newValue": { + "red": red, + "green": green, + "blue": blue, + }, + "prevValue": None, + "propertyName": "currentColor", + }, + }, + ) + node.receive_event(event) + await hass.async_block_till_done() + + async def update_switch_state(state: bool) -> None: + event = Event( + type="value updated", + data={ + "source": "node", + "event": "value updated", + "nodeId": node.node_id, + "args": { + "commandClassName": "Binary Switch", + "commandClass": 37, + "endpoint": 1, + "property": "currentValue", + "newValue": state, + "prevValue": None, + "propertyName": "currentValue", + }, + }, + ) + node.receive_event(event) + await hass.async_block_till_done() + + # Turn on the light. Since this is the first call, the light should default to white + await hass.services.async_call( + LIGHT_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: ZDB5100_ENTITY}, + blocking=True, + ) + assert len(client.async_send_command.call_args_list) == 2 + args = client.async_send_command.call_args_list[0][0][0] + assert args["command"] == "node.set_value" + assert args["nodeId"] == node.node_id + assert args["valueId"] == { + "commandClass": 51, + "endpoint": 1, + "property": "targetColor", + } + assert args["value"] == { + "red": 255, + "green": 255, + "blue": 255, + } + + args = client.async_send_command.call_args_list[1][0][0] + assert args["command"] == "node.set_value" + assert args["nodeId"] == node.node_id + assert args["valueId"] == { + "commandClass": 37, + "endpoint": 1, + "property": "targetValue", + } + assert args["value"] is True + + # Force the light to turn off + await update_switch_state(False) + + state = hass.states.get(ZDB5100_ENTITY) + assert state.state == STATE_OFF + + # Force the light to turn on (green) + await update_color(0, 255, 0) + await update_switch_state(True) + + state = hass.states.get(ZDB5100_ENTITY) + assert state.state == STATE_ON + + client.async_send_command.reset_mock() + + # Set the brightness to 128. This should be encoded in the color value + await hass.services.async_call( + LIGHT_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: ZDB5100_ENTITY, ATTR_BRIGHTNESS: 128}, + blocking=True, + ) + assert len(client.async_send_command.call_args_list) == 2 + args = client.async_send_command.call_args_list[0][0][0] + assert args["command"] == "node.set_value" + assert args["nodeId"] == node.node_id + assert args["valueId"] == { + "commandClass": 51, + "endpoint": 1, + "property": "targetColor", + } + assert args["value"] == { + "red": 0, + "green": 128, + "blue": 0, + } + + args = client.async_send_command.call_args_list[1][0][0] + assert args["command"] == "node.set_value" + assert args["nodeId"] == node.node_id + assert args["valueId"] == { + "commandClass": 37, + "endpoint": 1, + "property": "targetValue", + } + assert args["value"] is True + + client.async_send_command.reset_mock() + + # Force the light to turn on (green, 50%) + await update_color(0, 128, 0) + + # Set the color to red. This should preserve the previous brightness value + await hass.services.async_call( + LIGHT_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: ZDB5100_ENTITY, ATTR_HS_COLOR: (0, 100)}, + blocking=True, + ) + assert len(client.async_send_command.call_args_list) == 2 + args = client.async_send_command.call_args_list[0][0][0] + assert args["command"] == "node.set_value" + assert args["nodeId"] == node.node_id + assert args["valueId"] == { + "commandClass": 51, + "endpoint": 1, + "property": "targetColor", + } + assert args["value"] == { + "red": 128, + "green": 0, + "blue": 0, + } + + args = client.async_send_command.call_args_list[1][0][0] + assert args["command"] == "node.set_value" + assert args["nodeId"] == node.node_id + assert args["valueId"] == { + "commandClass": 37, + "endpoint": 1, + "property": "targetValue", + } + assert args["value"] is True + + client.async_send_command.reset_mock() + + # Force the light to turn on (red, 50%) + await update_color(128, 0, 0) + + # Turn the device off. This should only affect the binary switch, not the color + await hass.services.async_call( + LIGHT_DOMAIN, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: ZDB5100_ENTITY}, + blocking=True, + ) + assert len(client.async_send_command.call_args_list) == 1 + args = client.async_send_command.call_args_list[0][0][0] + assert args["command"] == "node.set_value" + assert args["nodeId"] == node.node_id + assert args["valueId"] == { + "commandClass": 37, + "endpoint": 1, + "property": "targetValue", + } + assert args["value"] is False + + client.async_send_command.reset_mock() + + # Force the light to turn off + await update_switch_state(False) + + # Turn the device on again. This should only affect the binary switch, not the color + await hass.services.async_call( + LIGHT_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: ZDB5100_ENTITY}, + blocking=True, + ) + assert len(client.async_send_command.call_args_list) == 1 + args = client.async_send_command.call_args_list[0][0][0] + assert args["command"] == "node.set_value" + assert args["nodeId"] == node.node_id + assert args["valueId"] == { + "commandClass": 37, + "endpoint": 1, + "property": "targetValue", + } + assert args["value"] is True + + +async def test_light_color_only( hass: HomeAssistant, client, express_controls_ezmultipli, integration ) -> None: - """Test the black is off light entity.""" + """Test the light entity for RGB lights with Color Switch CC only.""" node = express_controls_ezmultipli state = hass.states.get(HSM200_V1_ENTITY) assert state.state == STATE_ON + async def update_color(red: int, green: int, blue: int) -> None: + event = Event( + type="value updated", + data={ + "source": "node", + "event": "value updated", + "nodeId": node.node_id, + "args": { + "commandClassName": "Color Switch", + "commandClass": 51, + "endpoint": 0, + "property": "currentColor", + "propertyKey": 2, # red + "newValue": red, + "prevValue": None, + "propertyName": "currentColor", + "propertyKeyName": "red", + }, + }, + ) + node.receive_event(event) + await hass.async_block_till_done() + + event = Event( + type="value updated", + data={ + "source": "node", + "event": "value updated", + "nodeId": node.node_id, + "args": { + "commandClassName": "Color Switch", + "commandClass": 51, + "endpoint": 0, + "property": "currentColor", + "propertyKey": 3, # green + "newValue": green, + "prevValue": None, + "propertyName": "currentColor", + "propertyKeyName": "green", + }, + }, + ) + node.receive_event(event) + await hass.async_block_till_done() + + event = Event( + type="value updated", + data={ + "source": "node", + "event": "value updated", + "nodeId": node.node_id, + "args": { + "commandClassName": "Color Switch", + "commandClass": 51, + "endpoint": 0, + "property": "currentColor", + "propertyKey": 4, # blue + "newValue": blue, + "prevValue": None, + "propertyName": "currentColor", + "propertyKeyName": "blue", + }, + }, + ) + node.receive_event(event) + await hass.async_block_till_done() + + event = Event( + type="value updated", + data={ + "source": "node", + "event": "value updated", + "nodeId": node.node_id, + "args": { + "commandClassName": "Color Switch", + "commandClass": 51, + "endpoint": 0, + "property": "currentColor", + "newValue": { + "red": red, + "green": green, + "blue": blue, + }, + "prevValue": None, + "propertyName": "currentColor", + }, + }, + ) + node.receive_event(event) + await hass.async_block_till_done() + # Attempt to turn on the light and ensure it defaults to white await hass.services.async_call( LIGHT_DOMAIN, @@ -539,64 +914,14 @@ async def test_black_is_off( client.async_send_command.reset_mock() # Force the light to turn off - event = Event( - type="value updated", - data={ - "source": "node", - "event": "value updated", - "nodeId": node.node_id, - "args": { - "commandClassName": "Color Switch", - "commandClass": 51, - "endpoint": 0, - "property": "currentColor", - "newValue": { - "red": 0, - "green": 0, - "blue": 0, - }, - "prevValue": { - "red": 0, - "green": 255, - "blue": 0, - }, - "propertyName": "currentColor", - }, - }, - ) - node.receive_event(event) - await hass.async_block_till_done() + await update_color(0, 0, 0) + state = hass.states.get(HSM200_V1_ENTITY) assert state.state == STATE_OFF - # Force the light to turn on - event = Event( - type="value updated", - data={ - "source": "node", - "event": "value updated", - "nodeId": node.node_id, - "args": { - "commandClassName": "Color Switch", - "commandClass": 51, - "endpoint": 0, - "property": "currentColor", - "newValue": { - "red": 0, - "green": 255, - "blue": 0, - }, - "prevValue": { - "red": 0, - "green": 0, - "blue": 0, - }, - "propertyName": "currentColor", - }, - }, - ) - node.receive_event(event) - await hass.async_block_till_done() + # Force the light to turn on (50% green) + await update_color(0, 128, 0) + state = hass.states.get(HSM200_V1_ENTITY) assert state.state == STATE_ON @@ -619,6 +944,9 @@ async def test_black_is_off( client.async_send_command.reset_mock() + # Force the light to turn off + await update_color(0, 0, 0) + # Assert that the last color is restored await hass.services.async_call( LIGHT_DOMAIN, @@ -635,44 +963,23 @@ async def test_black_is_off( "endpoint": 0, "property": "targetColor", } - assert args["value"] == {"red": 0, "green": 255, "blue": 0} + assert args["value"] == {"red": 0, "green": 128, "blue": 0} client.async_send_command.reset_mock() - # Force the light to turn on - event = Event( - type="value updated", - data={ - "source": "node", - "event": "value updated", - "nodeId": node.node_id, - "args": { - "commandClassName": "Color Switch", - "commandClass": 51, - "endpoint": 0, - "property": "currentColor", - "newValue": None, - "prevValue": { - "red": 0, - "green": 255, - "blue": 0, - }, - "propertyName": "currentColor", - }, - }, - ) - node.receive_event(event) - await hass.async_block_till_done() + # Force the light to turn on (50% green) + await update_color(0, 128, 0) + state = hass.states.get(HSM200_V1_ENTITY) - assert state.state == STATE_UNKNOWN + assert state.state == STATE_ON client.async_send_command.reset_mock() - # Assert that call fails if attribute is added to service call + # Assert that the brightness is preserved when changing colors await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: HSM200_V1_ENTITY, ATTR_RGBW_COLOR: (255, 76, 255, 0)}, + {ATTR_ENTITY_ID: HSM200_V1_ENTITY, ATTR_RGB_COLOR: (255, 0, 0)}, blocking=True, ) assert len(client.async_send_command.call_args_list) == 1 @@ -684,22 +991,21 @@ async def test_black_is_off( "endpoint": 0, "property": "targetColor", } - assert args["value"] == {"red": 255, "green": 76, "blue": 255} + assert args["value"] == {"red": 128, "green": 0, "blue": 0} + + client.async_send_command.reset_mock() + # Force the light to turn on (50% red) + await update_color(128, 0, 0) -async def test_black_is_off_zdb5100( - hass: HomeAssistant, client, logic_group_zdb5100, integration -) -> None: - """Test the black is off light entity.""" - node = logic_group_zdb5100 - state = hass.states.get(ZDB5100_ENTITY) - assert state.state == STATE_OFF + state = hass.states.get(HSM200_V1_ENTITY) + assert state.state == STATE_ON - # Attempt to turn on the light and ensure it defaults to white + # Assert that the color is preserved when changing brightness await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: ZDB5100_ENTITY}, + {ATTR_ENTITY_ID: HSM200_V1_ENTITY, ATTR_BRIGHTNESS: 69}, blocking=True, ) assert len(client.async_send_command.call_args_list) == 1 @@ -708,79 +1014,31 @@ async def test_black_is_off_zdb5100( assert args["nodeId"] == node.node_id assert args["valueId"] == { "commandClass": 51, - "endpoint": 1, + "endpoint": 0, "property": "targetColor", } - assert args["value"] == {"red": 255, "green": 255, "blue": 255} + assert args["value"] == {"red": 69, "green": 0, "blue": 0} client.async_send_command.reset_mock() - # Force the light to turn off - event = Event( - type="value updated", - data={ - "source": "node", - "event": "value updated", - "nodeId": node.node_id, - "args": { - "commandClassName": "Color Switch", - "commandClass": 51, - "endpoint": 1, - "property": "currentColor", - "newValue": { - "red": 0, - "green": 0, - "blue": 0, - }, - "prevValue": { - "red": 0, - "green": 255, - "blue": 0, - }, - "propertyName": "currentColor", - }, - }, - ) - node.receive_event(event) - await hass.async_block_till_done() - state = hass.states.get(ZDB5100_ENTITY) - assert state.state == STATE_OFF + await update_color(69, 0, 0) - # Force the light to turn on - event = Event( - type="value updated", - data={ - "source": "node", - "event": "value updated", - "nodeId": node.node_id, - "args": { - "commandClassName": "Color Switch", - "commandClass": 51, - "endpoint": 1, - "property": "currentColor", - "newValue": { - "red": 0, - "green": 255, - "blue": 0, - }, - "prevValue": { - "red": 0, - "green": 0, - "blue": 0, - }, - "propertyName": "currentColor", - }, - }, + # Turn off again + await hass.services.async_call( + LIGHT_DOMAIN, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: HSM200_V1_ENTITY}, + blocking=True, ) - node.receive_event(event) - await hass.async_block_till_done() - state = hass.states.get(ZDB5100_ENTITY) - assert state.state == STATE_ON + await update_color(0, 0, 0) + + client.async_send_command.reset_mock() + # Assert that the color is preserved when turning on with brightness await hass.services.async_call( LIGHT_DOMAIN, - SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: ZDB5100_ENTITY}, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: HSM200_V1_ENTITY, ATTR_BRIGHTNESS: 123}, blocking=True, ) assert len(client.async_send_command.call_args_list) == 1 @@ -789,18 +1047,31 @@ async def test_black_is_off_zdb5100( assert args["nodeId"] == node.node_id assert args["valueId"] == { "commandClass": 51, - "endpoint": 1, + "endpoint": 0, "property": "targetColor", } - assert args["value"] == {"red": 0, "green": 0, "blue": 0} + assert args["value"] == {"red": 123, "green": 0, "blue": 0} client.async_send_command.reset_mock() - # Assert that the last color is restored + await update_color(123, 0, 0) + + # Turn off again + await hass.services.async_call( + LIGHT_DOMAIN, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: HSM200_V1_ENTITY}, + blocking=True, + ) + await update_color(0, 0, 0) + + client.async_send_command.reset_mock() + + # Assert that the brightness is preserved when turning on with color await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: ZDB5100_ENTITY}, + {ATTR_ENTITY_ID: HSM200_V1_ENTITY, ATTR_HS_COLOR: (240, 100)}, blocking=True, ) assert len(client.async_send_command.call_args_list) == 1 @@ -809,14 +1080,14 @@ async def test_black_is_off_zdb5100( assert args["nodeId"] == node.node_id assert args["valueId"] == { "commandClass": 51, - "endpoint": 1, + "endpoint": 0, "property": "targetColor", } - assert args["value"] == {"red": 0, "green": 255, "blue": 0} + assert args["value"] == {"red": 0, "green": 0, "blue": 123} client.async_send_command.reset_mock() - # Force the light to turn on + # Clear the color value to trigger an unknown state event = Event( type="value updated", data={ @@ -826,21 +1097,18 @@ async def test_black_is_off_zdb5100( "args": { "commandClassName": "Color Switch", "commandClass": 51, - "endpoint": 1, + "endpoint": 0, "property": "currentColor", "newValue": None, - "prevValue": { - "red": 0, - "green": 255, - "blue": 0, - }, + "prevValue": None, "propertyName": "currentColor", }, }, ) node.receive_event(event) await hass.async_block_till_done() - state = hass.states.get(ZDB5100_ENTITY) + + state = hass.states.get(HSM200_V1_ENTITY) assert state.state == STATE_UNKNOWN client.async_send_command.reset_mock() @@ -849,7 +1117,7 @@ async def test_black_is_off_zdb5100( await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: ZDB5100_ENTITY, ATTR_RGBW_COLOR: (255, 76, 255, 0)}, + {ATTR_ENTITY_ID: HSM200_V1_ENTITY, ATTR_RGBW_COLOR: (255, 76, 255, 0)}, blocking=True, ) assert len(client.async_send_command.call_args_list) == 1 @@ -858,7 +1126,7 @@ async def test_black_is_off_zdb5100( assert args["nodeId"] == node.node_id assert args["valueId"] == { "commandClass": 51, - "endpoint": 1, + "endpoint": 0, "property": "targetColor", } assert args["value"] == {"red": 255, "green": 76, "blue": 255} diff --git a/tests/components/zwave_js/test_services.py b/tests/components/zwave_js/test_services.py index ec13d0262f8ae1..41477f18b97875 100644 --- a/tests/components/zwave_js/test_services.py +++ b/tests/components/zwave_js/test_services.py @@ -497,13 +497,12 @@ async def test_set_config_parameter( caplog.clear() - config_value = aeotec_zw164_siren.values["2-112-0-32"] cmd_result = SetConfigParameterResult("accepted", {"status": 255}) # Test accepted return with patch( "homeassistant.components.zwave_js.services.Endpoint.async_set_raw_config_parameter_value", - return_value=(config_value, cmd_result), + return_value=cmd_result, ) as mock_set_raw_config_parameter_value: await hass.services.async_call( DOMAIN, @@ -534,7 +533,7 @@ async def test_set_config_parameter( cmd_result.status = "queued" with patch( "homeassistant.components.zwave_js.services.Endpoint.async_set_raw_config_parameter_value", - return_value=(config_value, cmd_result), + return_value=cmd_result, ) as mock_set_raw_config_parameter_value: await hass.services.async_call( DOMAIN, diff --git a/tests/components/zwave_js/test_update.py b/tests/components/zwave_js/test_update.py index abdceb155f771e..d6683fa24cb5de 100644 --- a/tests/components/zwave_js/test_update.py +++ b/tests/components/zwave_js/test_update.py @@ -16,6 +16,7 @@ ATTR_LATEST_VERSION, ATTR_RELEASE_URL, ATTR_SKIPPED_VERSION, + ATTR_UPDATE_PERCENTAGE, DOMAIN as UPDATE_DOMAIN, SERVICE_INSTALL, SERVICE_SKIP, @@ -155,9 +156,10 @@ async def test_update_entity_states( attrs = state.attributes assert not attrs[ATTR_AUTO_UPDATE] assert attrs[ATTR_INSTALLED_VERSION] == "10.7" - assert not attrs[ATTR_IN_PROGRESS] + assert attrs[ATTR_IN_PROGRESS] is False assert attrs[ATTR_LATEST_VERSION] == "11.2.4" assert attrs[ATTR_RELEASE_URL] is None + assert attrs[ATTR_UPDATE_PERCENTAGE] is None await ws_client.send_json( { @@ -417,6 +419,7 @@ async def test_update_entity_progress( assert state attrs = state.attributes assert attrs[ATTR_IN_PROGRESS] is True + assert attrs[ATTR_UPDATE_PERCENTAGE] is None event = Event( type="firmware update progress", @@ -439,7 +442,8 @@ async def test_update_entity_progress( state = hass.states.get(UPDATE_ENTITY) assert state attrs = state.attributes - assert attrs[ATTR_IN_PROGRESS] == 5 + assert attrs[ATTR_IN_PROGRESS] is True + assert attrs[ATTR_UPDATE_PERCENTAGE] == 5 event = Event( type="firmware update finished", @@ -463,6 +467,7 @@ async def test_update_entity_progress( assert state attrs = state.attributes assert attrs[ATTR_IN_PROGRESS] is False + assert attrs[ATTR_UPDATE_PERCENTAGE] is None assert attrs[ATTR_INSTALLED_VERSION] == "11.2.4" assert attrs[ATTR_LATEST_VERSION] == "11.2.4" assert state.state == STATE_OFF @@ -532,7 +537,8 @@ async def test_update_entity_install_failed( state = hass.states.get(UPDATE_ENTITY) assert state attrs = state.attributes - assert attrs[ATTR_IN_PROGRESS] == 5 + assert attrs[ATTR_IN_PROGRESS] is True + assert attrs[ATTR_UPDATE_PERCENTAGE] == 5 event = Event( type="firmware update finished", @@ -556,6 +562,7 @@ async def test_update_entity_install_failed( assert state attrs = state.attributes assert attrs[ATTR_IN_PROGRESS] is False + assert attrs[ATTR_UPDATE_PERCENTAGE] is None assert attrs[ATTR_INSTALLED_VERSION] == "10.7" assert attrs[ATTR_LATEST_VERSION] == "11.2.4" assert state.state == STATE_ON @@ -594,7 +601,8 @@ async def test_update_entity_reload( attrs = state.attributes assert not attrs[ATTR_AUTO_UPDATE] assert attrs[ATTR_INSTALLED_VERSION] == "10.7" - assert not attrs[ATTR_IN_PROGRESS] + assert attrs[ATTR_IN_PROGRESS] is False + assert attrs[ATTR_UPDATE_PERCENTAGE] is None assert attrs[ATTR_LATEST_VERSION] == "11.2.4" assert attrs[ATTR_RELEASE_URL] is None @@ -833,6 +841,7 @@ async def test_update_entity_full_restore_data_update_available( assert state attrs = state.attributes assert attrs[ATTR_IN_PROGRESS] is True + assert attrs[ATTR_UPDATE_PERCENTAGE] is None assert len(client.async_send_command.call_args_list) == 2 assert client.async_send_command.call_args_list[1][0][0] == { diff --git a/tests/conftest.py b/tests/conftest.py index 10c9a740256025..35b65c5653cc1b 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -36,6 +36,7 @@ import requests_mock import respx from syrupy.assertion import SnapshotAssertion +from syrupy.session import SnapshotSession from homeassistant import block_async_io from homeassistant.exceptions import ServiceNotFound @@ -92,7 +93,7 @@ from homeassistant.util.json import json_loads from .ignore_uncaught_exceptions import IGNORE_UNCAUGHT_EXCEPTIONS -from .syrupy import HomeAssistantSnapshotExtension +from .syrupy import HomeAssistantSnapshotExtension, override_syrupy_finish from .typing import ( ClientSessionGenerator, MockHAClientWebSocket, @@ -149,6 +150,11 @@ def pytest_configure(config: pytest.Config) -> None: if config.getoption("verbose") > 0: logging.getLogger().setLevel(logging.DEBUG) + # Override default finish to detect unused snapshots despite xdist + # Temporary workaround until it is finalised inside syrupy + # See https://github.com/syrupy-project/syrupy/pull/901 + SnapshotSession.finish = override_syrupy_finish + def pytest_runtest_setup() -> None: """Prepare pytest_socket and freezegun. @@ -1766,10 +1772,30 @@ def mock_bleak_scanner_start() -> Generator[MagicMock]: @pytest.fixture -def mock_integration_frame() -> Generator[Mock]: - """Mock as if we're calling code from inside an integration.""" +def integration_frame_path() -> str: + """Return the path to the integration frame. + + Can be parametrized with + `@pytest.mark.parametrize("integration_frame_path", ["path_to_frame"])` + + - "custom_components/XYZ" for a custom integration + - "homeassistant/components/XYZ" for a core integration + - "homeassistant/XYZ" for core (no integration) + + Defaults to core component `hue` + """ + return "homeassistant/components/hue" + + +@pytest.fixture +def mock_integration_frame(integration_frame_path: str) -> Generator[Mock]: + """Mock where we are calling code from. + + Defaults to calling from `hue` core integration, and can be parametrized + with `integration_frame_path`. + """ correct_frame = Mock( - filename="/home/paulus/homeassistant/components/hue/light.py", + filename=f"/home/paulus/{integration_frame_path}/light.py", lineno="23", line="self.light.is_on", ) diff --git a/tests/helpers/snapshots/test_entity_platform.ambr b/tests/helpers/snapshots/test_entity_platform.ambr new file mode 100644 index 00000000000000..84cbb07bd736f0 --- /dev/null +++ b/tests/helpers/snapshots/test_entity_platform.ambr @@ -0,0 +1,37 @@ +# serializer version: 1 +# name: test_device_info_called + DeviceRegistryEntrySnapshot({ + 'area_id': 'heliport', + 'config_entries': , + 'configuration_url': 'http://192.168.0.100/config', + 'connections': set({ + tuple( + 'mac', + 'abcd', + ), + }), + 'disabled_by': None, + 'entry_type': , + 'hw_version': 'test-hw', + 'id': , + 'identifiers': set({ + tuple( + 'hue', + '1234', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'test-manuf', + 'model': 'test-model', + 'model_id': None, + 'name': 'test-name', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': None, + 'suggested_area': 'Heliport', + 'sw_version': 'test-sw', + 'via_device_id': , + }) +# --- diff --git a/tests/helpers/test_config_entry_flow.py b/tests/helpers/test_config_entry_flow.py index 498e57d45a4fe4..13e28bb8840bcb 100644 --- a/tests/helpers/test_config_entry_flow.py +++ b/tests/helpers/test_config_entry_flow.py @@ -6,8 +6,8 @@ import pytest from homeassistant import config_entries, data_entry_flow, setup -from homeassistant.config import async_process_ha_core_config from homeassistant.core import HomeAssistant +from homeassistant.core_config import async_process_ha_core_config from homeassistant.helpers import config_entry_flow from tests.common import MockConfigEntry, MockModule, mock_integration, mock_platform diff --git a/tests/helpers/test_deprecation.py b/tests/helpers/test_deprecation.py index b48e70eff82da7..4cf7e851af35f1 100644 --- a/tests/helpers/test_deprecation.py +++ b/tests/helpers/test_deprecation.py @@ -13,6 +13,7 @@ DeprecatedAlias, DeprecatedConstant, DeprecatedConstantEnum, + EnumWithDeprecatedMembers, check_if_deprecated_constant, deprecated_class, deprecated_function, @@ -520,3 +521,119 @@ def test_dir_with_deprecated_constants( ) -> None: """Test dir() with deprecated constants.""" assert dir_with_deprecated_constants([*module_globals.keys()]) == expected + + +@pytest.mark.parametrize( + ("module_name", "extra_extra_msg"), + [ + ("homeassistant.components.hue.light", ""), # builtin integration + ( + "config.custom_components.hue.light", + ", please report it to the author of the 'hue' custom integration", + ), # custom component integration + ], +) +def test_enum_with_deprecated_members( + caplog: pytest.LogCaptureFixture, + module_name: str, + extra_extra_msg: str, +) -> None: + """Test EnumWithDeprecatedMembers.""" + filename = f"/home/paulus/{module_name.replace('.', '/')}.py" + + class TestEnum( + StrEnum, + metaclass=EnumWithDeprecatedMembers, + deprecated={ + "CATS": ("TestEnum.CATS_PER_CM", "2025.11.0"), + "DOGS": ("TestEnum.DOGS_PER_CM", None), + }, + ): + """Zoo units.""" + + CATS_PER_CM = "cats/cm" + DOGS_PER_CM = "dogs/cm" + CATS = "cats/cm" + DOGS = "dogs/cm" + + # mock sys.modules for homeassistant/helpers/frame.py#get_integration_frame + with ( + patch.dict(sys.modules, {module_name: Mock(__file__=filename)}), + patch( + "homeassistant.helpers.frame.linecache.getline", + return_value="await session.close()", + ), + patch( + "homeassistant.helpers.frame.get_current_frame", + return_value=extract_stack_to_frame( + [ + Mock( + filename="/home/paulus/homeassistant/core.py", + lineno="23", + line="do_something()", + ), + Mock( + filename=filename, + lineno="23", + line="await session.close()", + ), + Mock( + filename="/home/paulus/aiohue/lights.py", + lineno="2", + line="something()", + ), + ] + ), + ), + ): + TestEnum.CATS # noqa: B018 + TestEnum.DOGS # noqa: B018 + + assert len(caplog.record_tuples) == 2 + assert ( + "tests.helpers.test_deprecation", + logging.WARNING, + ( + "TestEnum.CATS was used from hue, this is a deprecated enum member which " + "will be removed in HA Core 2025.11.0. Use TestEnum.CATS_PER_CM instead" + f"{extra_extra_msg}" + ), + ) in caplog.record_tuples + assert ( + "tests.helpers.test_deprecation", + logging.WARNING, + ( + "TestEnum.DOGS was used from hue, this is a deprecated enum member. Use " + f"TestEnum.DOGS_PER_CM instead{extra_extra_msg}" + ), + ) in caplog.record_tuples + + +def test_enum_with_deprecated_members_integration_not_found( + caplog: pytest.LogCaptureFixture, +) -> None: + """Test check_if_deprecated_constant.""" + + class TestEnum( + StrEnum, + metaclass=EnumWithDeprecatedMembers, + deprecated={ + "CATS": ("TestEnum.CATS_PER_CM", "2025.11.0"), + "DOGS": ("TestEnum.DOGS_PER_CM", None), + }, + ): + """Zoo units.""" + + CATS_PER_CM = "cats/cm" + DOGS_PER_CM = "dogs/cm" + CATS = "cats/cm" + DOGS = "dogs/cm" + + with patch( + "homeassistant.helpers.frame.get_current_frame", + side_effect=MissingIntegrationFrame, + ): + TestEnum.CATS # noqa: B018 + TestEnum.DOGS # noqa: B018 + + assert len(caplog.record_tuples) == 0 diff --git a/tests/helpers/test_device_registry.py b/tests/helpers/test_device_registry.py index 129c6b0d37ce56..837400d502d791 100644 --- a/tests/helpers/test_device_registry.py +++ b/tests/helpers/test_device_registry.py @@ -308,12 +308,12 @@ async def test_loading_from_storage( @pytest.mark.parametrize("load_registries", [False]) @pytest.mark.usefixtures("freezer") -async def test_migration_1_1_to_1_7( +async def test_migration_from_1_1( hass: HomeAssistant, hass_storage: dict[str, Any], mock_config_entry: MockConfigEntry, ) -> None: - """Test migration from version 1.1 to 1.7.""" + """Test migration from version 1.1.""" hass_storage[dr.STORAGE_KEY] = { "version": 1, "minor_version": 1, @@ -332,7 +332,7 @@ async def test_migration_1_1_to_1_7( }, # Invalid entry type { - "config_entries": [None], + "config_entries": ["234567"], "connections": [], "entry_type": "INVALID_VALUE", "id": "invalid-entry-type", @@ -412,7 +412,7 @@ async def test_migration_1_1_to_1_7( }, { "area_id": None, - "config_entries": [None], + "config_entries": ["234567"], "configuration_url": None, "connections": [], "created_at": "1970-01-01T00:00:00+00:00", @@ -451,12 +451,12 @@ async def test_migration_1_1_to_1_7( @pytest.mark.parametrize("load_registries", [False]) @pytest.mark.usefixtures("freezer") -async def test_migration_1_2_to_1_7( +async def test_migration_from_1_2( hass: HomeAssistant, hass_storage: dict[str, Any], mock_config_entry: MockConfigEntry, ) -> None: - """Test migration from version 1.2 to 1.7.""" + """Test migration from version 1.2.""" hass_storage[dr.STORAGE_KEY] = { "version": 1, "minor_version": 2, @@ -482,7 +482,7 @@ async def test_migration_1_2_to_1_7( }, { "area_id": None, - "config_entries": [None], + "config_entries": ["234567"], "configuration_url": None, "connections": [], "disabled_by": None, @@ -556,7 +556,7 @@ async def test_migration_1_2_to_1_7( }, { "area_id": None, - "config_entries": [None], + "config_entries": ["234567"], "configuration_url": None, "connections": [], "created_at": "1970-01-01T00:00:00+00:00", @@ -585,12 +585,12 @@ async def test_migration_1_2_to_1_7( @pytest.mark.parametrize("load_registries", [False]) @pytest.mark.usefixtures("freezer") -async def test_migration_1_3_to_1_7( +async def test_migration_fom_1_3( hass: HomeAssistant, hass_storage: dict[str, Any], mock_config_entry: MockConfigEntry, ) -> None: - """Test migration from version 1.3 to 1.7.""" + """Test migration from version 1.3.""" hass_storage[dr.STORAGE_KEY] = { "version": 1, "minor_version": 3, @@ -616,7 +616,7 @@ async def test_migration_1_3_to_1_7( }, { "area_id": None, - "config_entries": [None], + "config_entries": ["234567"], "configuration_url": None, "connections": [], "disabled_by": None, @@ -690,7 +690,7 @@ async def test_migration_1_3_to_1_7( }, { "area_id": None, - "config_entries": [None], + "config_entries": ["234567"], "configuration_url": None, "connections": [], "created_at": "1970-01-01T00:00:00+00:00", @@ -719,12 +719,12 @@ async def test_migration_1_3_to_1_7( @pytest.mark.parametrize("load_registries", [False]) @pytest.mark.usefixtures("freezer") -async def test_migration_1_4_to_1_7( +async def test_migration_from_1_4( hass: HomeAssistant, hass_storage: dict[str, Any], mock_config_entry: MockConfigEntry, ) -> None: - """Test migration from version 1.4 to 1.7.""" + """Test migration from version 1.4.""" hass_storage[dr.STORAGE_KEY] = { "version": 1, "minor_version": 4, @@ -751,7 +751,7 @@ async def test_migration_1_4_to_1_7( }, { "area_id": None, - "config_entries": [None], + "config_entries": ["234567"], "configuration_url": None, "connections": [], "disabled_by": None, @@ -826,7 +826,7 @@ async def test_migration_1_4_to_1_7( }, { "area_id": None, - "config_entries": [None], + "config_entries": ["234567"], "configuration_url": None, "connections": [], "created_at": "1970-01-01T00:00:00+00:00", @@ -855,12 +855,12 @@ async def test_migration_1_4_to_1_7( @pytest.mark.parametrize("load_registries", [False]) @pytest.mark.usefixtures("freezer") -async def test_migration_1_5_to_1_7( +async def test_migration_from_1_5( hass: HomeAssistant, hass_storage: dict[str, Any], mock_config_entry: MockConfigEntry, ) -> None: - """Test migration from version 1.5 to 1.7.""" + """Test migration from version 1.5.""" hass_storage[dr.STORAGE_KEY] = { "version": 1, "minor_version": 5, @@ -888,7 +888,7 @@ async def test_migration_1_5_to_1_7( }, { "area_id": None, - "config_entries": [None], + "config_entries": ["234567"], "configuration_url": None, "connections": [], "disabled_by": None, @@ -964,7 +964,7 @@ async def test_migration_1_5_to_1_7( }, { "area_id": None, - "config_entries": [None], + "config_entries": ["234567"], "configuration_url": None, "connections": [], "created_at": "1970-01-01T00:00:00+00:00", @@ -993,12 +993,12 @@ async def test_migration_1_5_to_1_7( @pytest.mark.parametrize("load_registries", [False]) @pytest.mark.usefixtures("freezer") -async def test_migration_1_6_to_1_8( +async def test_migration_from_1_6( hass: HomeAssistant, hass_storage: dict[str, Any], mock_config_entry: MockConfigEntry, ) -> None: - """Test migration from version 1.6 to 1.8.""" + """Test migration from version 1.6.""" hass_storage[dr.STORAGE_KEY] = { "version": 1, "minor_version": 6, @@ -1027,7 +1027,7 @@ async def test_migration_1_6_to_1_8( }, { "area_id": None, - "config_entries": [None], + "config_entries": ["234567"], "configuration_url": None, "connections": [], "disabled_by": None, @@ -1104,7 +1104,7 @@ async def test_migration_1_6_to_1_8( }, { "area_id": None, - "config_entries": [None], + "config_entries": ["234567"], "configuration_url": None, "connections": [], "created_at": "1970-01-01T00:00:00+00:00", @@ -1133,12 +1133,12 @@ async def test_migration_1_6_to_1_8( @pytest.mark.parametrize("load_registries", [False]) @pytest.mark.usefixtures("freezer") -async def test_migration_1_7_to_1_8( +async def test_migration_from_1_7( hass: HomeAssistant, hass_storage: dict[str, Any], mock_config_entry: MockConfigEntry, ) -> None: - """Test migration from version 1.7 to 1.8.""" + """Test migration from version 1.7.""" hass_storage[dr.STORAGE_KEY] = { "version": 1, "minor_version": 7, @@ -1168,7 +1168,7 @@ async def test_migration_1_7_to_1_8( }, { "area_id": None, - "config_entries": [None], + "config_entries": ["234567"], "configuration_url": None, "connections": [], "disabled_by": None, @@ -1246,7 +1246,7 @@ async def test_migration_1_7_to_1_8( }, { "area_id": None, - "config_entries": [None], + "config_entries": ["234567"], "configuration_url": None, "connections": [], "created_at": "1970-01-01T00:00:00+00:00", diff --git a/tests/helpers/test_entity.py b/tests/helpers/test_entity.py index bada0869ffd27f..2bf441f70fd22b 100644 --- a/tests/helpers/test_entity.py +++ b/tests/helpers/test_entity.py @@ -2314,7 +2314,12 @@ async def test_update_capabilities_too_often_cooldown( @pytest.mark.parametrize( - ("property", "default_value", "values"), [("attribution", None, ["abcd", "efgh"])] + ("property", "default_value", "values"), + [ + ("attribution", None, ["abcd", "efgh"]), + ("attribution", None, [True, 1]), + ("attribution", None, [1.0, 1]), + ], ) async def test_cached_entity_properties( hass: HomeAssistant, property: str, default_value: Any, values: Any @@ -2323,22 +2328,30 @@ async def test_cached_entity_properties( ent1 = entity.Entity() ent2 = entity.Entity() assert getattr(ent1, property) == default_value + assert type(getattr(ent1, property)) is type(default_value) assert getattr(ent2, property) == default_value + assert type(getattr(ent2, property)) is type(default_value) # Test set setattr(ent1, f"_attr_{property}", values[0]) assert getattr(ent1, property) == values[0] + assert type(getattr(ent1, property)) is type(values[0]) assert getattr(ent2, property) == default_value + assert type(getattr(ent2, property)) is type(default_value) # Test update setattr(ent1, f"_attr_{property}", values[1]) assert getattr(ent1, property) == values[1] + assert type(getattr(ent1, property)) is type(values[1]) assert getattr(ent2, property) == default_value + assert type(getattr(ent2, property)) is type(default_value) # Test delete delattr(ent1, f"_attr_{property}") assert getattr(ent1, property) == default_value + assert type(getattr(ent1, property)) is type(default_value) assert getattr(ent2, property) == default_value + assert type(getattr(ent2, property)) is type(default_value) async def test_cached_entity_property_delete_attr(hass: HomeAssistant) -> None: diff --git a/tests/helpers/test_entity_platform.py b/tests/helpers/test_entity_platform.py index db83819085bf2c..e80006dff84a74 100644 --- a/tests/helpers/test_entity_platform.py +++ b/tests/helpers/test_entity_platform.py @@ -8,6 +8,7 @@ from unittest.mock import ANY, AsyncMock, Mock, patch import pytest +from syrupy.assertion import SnapshotAssertion import voluptuous as vol from homeassistant.config_entries import ConfigEntry @@ -878,9 +879,9 @@ async def async_setup_entry( assert full_name in hass.config.components assert len(hass.states.async_entity_ids()) == 1 assert len(entity_registry.entities) == 1 - assert ( - entity_registry.entities["test_domain.test1"].config_entry_id == "super-mock-id" - ) + + entity_registry_entry = entity_registry.entities["test_domain.test1"] + assert entity_registry_entry.config_entry_id == "super-mock-id" async def test_setup_entry_platform_not_ready( @@ -1131,7 +1132,9 @@ async def test_add_entity_with_invalid_id( async def test_device_info_called( - hass: HomeAssistant, device_registry: dr.DeviceRegistry + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + snapshot: SnapshotAssertion, ) -> None: """Test device info is forwarded correctly.""" config_entry = MockConfigEntry(entry_id="super-mock-id") @@ -1185,18 +1188,9 @@ async def async_setup_entry( assert len(hass.states.async_entity_ids()) == 2 device = device_registry.async_get_device(identifiers={("hue", "1234")}) - assert device is not None - assert device.identifiers == {("hue", "1234")} - assert device.configuration_url == "http://192.168.0.100/config" - assert device.connections == {(dr.CONNECTION_NETWORK_MAC, "abcd")} - assert device.entry_type is dr.DeviceEntryType.SERVICE - assert device.manufacturer == "test-manuf" - assert device.model == "test-model" - assert device.name == "test-name" + assert device == snapshot + assert device.config_entries == {config_entry.entry_id} assert device.primary_config_entry == config_entry.entry_id - assert device.suggested_area == "Heliport" - assert device.sw_version == "test-sw" - assert device.hw_version == "test-hw" assert device.via_device_id == via.id diff --git a/tests/helpers/test_entity_registry.py b/tests/helpers/test_entity_registry.py index 9b1d68c777781b..97f7e1dcc561f9 100644 --- a/tests/helpers/test_entity_registry.py +++ b/tests/helpers/test_entity_registry.py @@ -653,36 +653,36 @@ async def test_deleted_entity_removing_config_entry_id( entity_registry: er.EntityRegistry, ) -> None: """Test that we update config entry id in registry on deleted entity.""" - mock_config = MockConfigEntry(domain="light", entry_id="mock-id-1") + mock_config1 = MockConfigEntry(domain="light", entry_id="mock-id-1") + mock_config2 = MockConfigEntry(domain="light", entry_id="mock-id-2") - entry = entity_registry.async_get_or_create( - "light", "hue", "5678", config_entry=mock_config + entry1 = entity_registry.async_get_or_create( + "light", "hue", "5678", config_entry=mock_config1 ) - assert entry.config_entry_id == "mock-id-1" - entity_registry.async_remove(entry.entity_id) + assert entry1.config_entry_id == "mock-id-1" + entry2 = entity_registry.async_get_or_create( + "light", "hue", "1234", config_entry=mock_config2 + ) + assert entry2.config_entry_id == "mock-id-2" + entity_registry.async_remove(entry1.entity_id) + entity_registry.async_remove(entry2.entity_id) assert len(entity_registry.entities) == 0 - assert len(entity_registry.deleted_entities) == 1 - assert ( - entity_registry.deleted_entities[("light", "hue", "5678")].config_entry_id - == "mock-id-1" - ) - assert ( - entity_registry.deleted_entities[("light", "hue", "5678")].orphaned_timestamp - is None - ) + assert len(entity_registry.deleted_entities) == 2 + deleted_entry1 = entity_registry.deleted_entities[("light", "hue", "5678")] + assert deleted_entry1.config_entry_id == "mock-id-1" + assert deleted_entry1.orphaned_timestamp is None + deleted_entry2 = entity_registry.deleted_entities[("light", "hue", "1234")] + assert deleted_entry2.config_entry_id == "mock-id-2" + assert deleted_entry2.orphaned_timestamp is None entity_registry.async_clear_config_entry("mock-id-1") assert len(entity_registry.entities) == 0 - assert len(entity_registry.deleted_entities) == 1 - assert ( - entity_registry.deleted_entities[("light", "hue", "5678")].config_entry_id - is None - ) - assert ( - entity_registry.deleted_entities[("light", "hue", "5678")].orphaned_timestamp - is not None - ) + assert len(entity_registry.deleted_entities) == 2 + deleted_entry1 = entity_registry.deleted_entities[("light", "hue", "5678")] + assert deleted_entry1.config_entry_id is None + assert deleted_entry1.orphaned_timestamp is not None + assert entity_registry.deleted_entities[("light", "hue", "1234")] == deleted_entry2 async def test_removing_area_id(entity_registry: er.EntityRegistry) -> None: @@ -842,6 +842,123 @@ async def test_migration_1_7(hass: HomeAssistant, hass_storage: dict[str, Any]) assert entry.original_device_class == "class_by_integration" +@pytest.mark.parametrize("load_registries", [False]) +async def test_migration_1_11( + hass: HomeAssistant, hass_storage: dict[str, Any] +) -> None: + """Test migration from version 1.11. + + This is the first version which has deleted entities, make sure deleted entities + are updated. + """ + hass_storage[er.STORAGE_KEY] = { + "version": 1, + "minor_version": 11, + "data": { + "entities": [ + { + "aliases": [], + "area_id": None, + "capabilities": {}, + "config_entry_id": None, + "device_id": None, + "disabled_by": None, + "entity_category": None, + "entity_id": "test.entity", + "has_entity_name": False, + "hidden_by": None, + "icon": None, + "id": "12345", + "modified_at": "1970-01-01T00:00:00+00:00", + "name": None, + "options": {}, + "original_device_class": "best_class", + "original_icon": None, + "original_name": None, + "platform": "super_platform", + "supported_features": 0, + "translation_key": None, + "unique_id": "very_unique", + "unit_of_measurement": None, + "device_class": None, + } + ], + "deleted_entities": [ + { + "config_entry_id": None, + "entity_id": "test.deleted_entity", + "id": "23456", + "orphaned_timestamp": None, + "platform": "super_duper_platform", + "unique_id": "very_very_unique", + } + ], + }, + } + + await er.async_load(hass) + registry = er.async_get(hass) + + entry = registry.async_get_or_create("test", "super_platform", "very_unique") + + assert entry.device_class is None + assert entry.original_device_class == "best_class" + + # Check migrated data + await flush_store(registry._store) + assert hass_storage[er.STORAGE_KEY] == { + "version": er.STORAGE_VERSION_MAJOR, + "minor_version": er.STORAGE_VERSION_MINOR, + "key": er.STORAGE_KEY, + "data": { + "entities": [ + { + "aliases": [], + "area_id": None, + "capabilities": {}, + "categories": {}, + "config_entry_id": None, + "created_at": "1970-01-01T00:00:00+00:00", + "device_id": None, + "disabled_by": None, + "entity_category": None, + "entity_id": "test.entity", + "has_entity_name": False, + "hidden_by": None, + "icon": None, + "id": ANY, + "labels": [], + "modified_at": "1970-01-01T00:00:00+00:00", + "name": None, + "options": {}, + "original_device_class": "best_class", + "original_icon": None, + "original_name": None, + "platform": "super_platform", + "previous_unique_id": None, + "supported_features": 0, + "translation_key": None, + "unique_id": "very_unique", + "unit_of_measurement": None, + "device_class": None, + } + ], + "deleted_entities": [ + { + "config_entry_id": None, + "created_at": "1970-01-01T00:00:00+00:00", + "entity_id": "test.deleted_entity", + "id": "23456", + "modified_at": "1970-01-01T00:00:00+00:00", + "orphaned_timestamp": None, + "platform": "super_duper_platform", + "unique_id": "very_very_unique", + } + ], + }, + } + + async def test_update_entity_unique_id(entity_registry: er.EntityRegistry) -> None: """Test entity's unique_id is updated.""" mock_config = MockConfigEntry(domain="light", entry_id="mock-id-1") @@ -1030,14 +1147,17 @@ async def test_disabled_by(entity_registry: er.EntityRegistry) -> None: "light", "hue", "5678", disabled_by=er.RegistryEntryDisabler.HASS ) assert entry.disabled_by is er.RegistryEntryDisabler.HASS + assert entry.disabled is True entry = entity_registry.async_get_or_create( "light", "hue", "5678", disabled_by=er.RegistryEntryDisabler.INTEGRATION ) assert entry.disabled_by is er.RegistryEntryDisabler.HASS + assert entry.disabled is True entry2 = entity_registry.async_get_or_create("light", "hue", "1234") assert entry2.disabled_by is None + assert entry2.disabled is False async def test_disabled_by_config_entry_pref( @@ -1064,6 +1184,25 @@ async def test_disabled_by_config_entry_pref( assert entry2.disabled_by is er.RegistryEntryDisabler.USER +async def test_hidden_by(entity_registry: er.EntityRegistry) -> None: + """Test that we can hide an entry when we create it.""" + entry = entity_registry.async_get_or_create( + "light", "hue", "5678", hidden_by=er.RegistryEntryHider.USER + ) + assert entry.hidden_by is er.RegistryEntryHider.USER + assert entry.hidden is True + + entry = entity_registry.async_get_or_create( + "light", "hue", "5678", disabled_by=er.RegistryEntryHider.INTEGRATION + ) + assert entry.hidden_by is er.RegistryEntryHider.USER + assert entry.hidden is True + + entry2 = entity_registry.async_get_or_create("light", "hue", "1234") + assert entry2.hidden_by is None + assert entry2.hidden is False + + async def test_restore_states( hass: HomeAssistant, entity_registry: er.EntityRegistry ) -> None: diff --git a/tests/helpers/test_event.py b/tests/helpers/test_event.py index 19f1ef5bb761d0..a45b418c526938 100644 --- a/tests/helpers/test_event.py +++ b/tests/helpers/test_event.py @@ -1892,10 +1892,10 @@ def specific_run_callback( "time": False, } - hass.states.async_set("binary_sensor.single", "binary_sensor_on") + hass.states.async_set("binary_sensor.single", "on") await hass.async_block_till_done() assert len(specific_runs) == 9 - assert specific_runs[8] == "binary_sensor_on" + assert specific_runs[8] == "on" assert info.listeners == { "all": False, "domains": set(), diff --git a/tests/helpers/test_frame.py b/tests/helpers/test_frame.py index b3fbb0faaf4faa..a2a4890810b836 100644 --- a/tests/helpers/test_frame.py +++ b/tests/helpers/test_frame.py @@ -1,5 +1,6 @@ """Test the frame helper.""" +from typing import Any from unittest.mock import ANY, Mock, patch import pytest @@ -156,6 +157,97 @@ async def test_get_integration_logger_no_integration( assert logger.name == __name__ +@pytest.mark.parametrize( + ("integration_frame_path", "keywords", "expected_error", "expected_log"), + [ + pytest.param( + "homeassistant/test_core", + {}, + True, + 0, + id="core default", + ), + pytest.param( + "homeassistant/components/test_core_integration", + {}, + False, + 1, + id="core integration default", + ), + pytest.param( + "custom_components/test_custom_integration", + {}, + False, + 1, + id="custom integration default", + ), + pytest.param( + "custom_components/test_custom_integration", + {"custom_integration_behavior": frame.ReportBehavior.IGNORE}, + False, + 0, + id="custom integration ignore", + ), + pytest.param( + "custom_components/test_custom_integration", + {"custom_integration_behavior": frame.ReportBehavior.ERROR}, + True, + 1, + id="custom integration error", + ), + pytest.param( + "homeassistant/components/test_integration_frame", + {"core_integration_behavior": frame.ReportBehavior.IGNORE}, + False, + 0, + id="core_integration_behavior ignore", + ), + pytest.param( + "homeassistant/components/test_integration_frame", + {"core_integration_behavior": frame.ReportBehavior.ERROR}, + True, + 1, + id="core_integration_behavior error", + ), + pytest.param( + "homeassistant/test_integration_frame", + {"core_behavior": frame.ReportBehavior.IGNORE}, + False, + 0, + id="core_behavior ignore", + ), + pytest.param( + "homeassistant/test_integration_frame", + {"core_behavior": frame.ReportBehavior.LOG}, + False, + 1, + id="core_behavior log", + ), + ], +) +@pytest.mark.usefixtures("mock_integration_frame") +async def test_report_usage( + caplog: pytest.LogCaptureFixture, + keywords: dict[str, Any], + expected_error: bool, + expected_log: int, +) -> None: + """Test report.""" + + what = "test_report_string" + + errored = False + try: + with patch.object(frame, "_REPORTED_INTEGRATIONS", set()): + frame.report_usage(what, **keywords) + except RuntimeError: + errored = True + + assert errored == expected_error + + assert caplog.text.count(what) == expected_log + + @patch.object(frame, "_REPORTED_INTEGRATIONS", set()) async def test_prevent_flooding( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, mock_integration_frame: Mock @@ -247,3 +339,87 @@ async def test_report_error_if_integration( ), ): frame.report("did a bad thing", error_if_integration=True) + + +@pytest.mark.parametrize( + ("integration_frame_path", "keywords", "expected_error", "expected_log"), + [ + pytest.param( + "homeassistant/test_core", + {}, + True, + 0, + id="core default", + ), + pytest.param( + "homeassistant/components/test_core_integration", + {}, + False, + 1, + id="core integration default", + ), + pytest.param( + "custom_components/test_custom_integration", + {}, + False, + 1, + id="custom integration default", + ), + pytest.param( + "custom_components/test_integration_frame", + {"log_custom_component_only": True}, + False, + 1, + id="log_custom_component_only with custom integration", + ), + pytest.param( + "homeassistant/components/test_integration_frame", + {"log_custom_component_only": True}, + False, + 0, + id="log_custom_component_only with core integration", + ), + pytest.param( + "homeassistant/test_integration_frame", + {"error_if_core": False}, + False, + 1, + id="disable error_if_core", + ), + pytest.param( + "custom_components/test_integration_frame", + {"error_if_integration": True}, + True, + 1, + id="error_if_integration with custom integration", + ), + pytest.param( + "homeassistant/components/test_integration_frame", + {"error_if_integration": True}, + True, + 1, + id="error_if_integration with core integration", + ), + ], +) +@pytest.mark.usefixtures("mock_integration_frame") +async def test_report( + caplog: pytest.LogCaptureFixture, + keywords: dict[str, Any], + expected_error: bool, + expected_log: int, +) -> None: + """Test report.""" + + what = "test_report_string" + + errored = False + try: + with patch.object(frame, "_REPORTED_INTEGRATIONS", set()): + frame.report(what, **keywords) + except RuntimeError: + errored = True + + assert errored == expected_error + + assert caplog.text.count(what) == expected_log diff --git a/tests/helpers/test_llm.py b/tests/helpers/test_llm.py index 4d14abb98194a8..cd36fe18933453 100644 --- a/tests/helpers/test_llm.py +++ b/tests/helpers/test_llm.py @@ -374,11 +374,16 @@ async def test_assist_api_prompt( "beer": {"description": "Number of beers"}, "wine": {}, }, - } + }, + "script_with_no_fields": { + "description": "This is another test script", + "sequence": [], + }, } }, ) async_expose_entity(hass, "conversation", "script.test_script", True) + async_expose_entity(hass, "conversation", "script.script_with_no_fields", True) entry = MockConfigEntry(title=None) entry.add_to_hass(hass) @@ -511,6 +516,10 @@ def create_entity( ) ) exposed_entities_prompt = """An overview of the areas and the devices in this smart home: +- names: script_with_no_fields + domain: script + state: 'off' + description: This is another test script - names: Kitchen domain: light state: 'on' @@ -657,6 +666,10 @@ async def test_script_tool( "extra_field": {"selector": {"area": {}}}, }, }, + "script_with_no_fields": { + "description": "This is another test script", + "sequence": [], + }, "unexposed_script": { "sequence": [], }, @@ -664,6 +677,7 @@ async def test_script_tool( }, ) async_expose_entity(hass, "conversation", "script.test_script", True) + async_expose_entity(hass, "conversation", "script.script_with_no_fields", True) entity_registry.async_update_entity( "script.test_script", name="script name", aliases={"script alias"} @@ -700,7 +714,8 @@ async def test_script_tool( "test_script": ( "This is a test script. Aliases: ['script name', 'script alias']", vol.Schema(schema), - ) + ), + "script_with_no_fields": ("This is another test script", vol.Schema({})), } tool_input = llm.ToolInput( @@ -781,7 +796,8 @@ async def test_script_tool( "test_script": ( "This is a new test script. Aliases: ['script name', 'script alias']", vol.Schema(schema), - ) + ), + "script_with_no_fields": ("This is another test script", vol.Schema({})), } diff --git a/tests/helpers/test_network.py b/tests/helpers/test_network.py index 0787c56219fbff..3064b215f2f338 100644 --- a/tests/helpers/test_network.py +++ b/tests/helpers/test_network.py @@ -8,8 +8,8 @@ from yarl import URL from homeassistant.components import cloud -from homeassistant.config import async_process_ha_core_config from homeassistant.core import HomeAssistant +from homeassistant.core_config import async_process_ha_core_config from homeassistant.helpers.network import ( NoURLAvailableError, _get_cloud_url, @@ -727,7 +727,7 @@ async def test_get_current_request_url_with_known_host( @patch( - "homeassistant.components.hassio.is_hassio", + "homeassistant.helpers.network.is_hassio", Mock(return_value={"hostname": "homeassistant"}), ) @patch( diff --git a/tests/helpers/test_schema_config_entry_flow.py b/tests/helpers/test_schema_config_entry_flow.py index 877e3762d3bed1..e67525253bcf78 100644 --- a/tests/helpers/test_schema_config_entry_flow.py +++ b/tests/helpers/test_schema_config_entry_flow.py @@ -648,6 +648,10 @@ class TestFlow(MockSchemaConfigFlowHandler, domain="test"): options_handler = hass.config_entries.options._progress[result["flow_id"]] assert options_handler._common_handler.flow_state == {"idx": None} + # Ensure that self.options and self._common_handler.options refer to the + # same mutable copy of the options + assert options_handler.options is options_handler._common_handler.options + # In step 1, flow state is updated with user input result = await hass.config_entries.options.async_configure( result["flow_id"], {"option1": "blublu"} diff --git a/tests/helpers/test_script.py b/tests/helpers/test_script.py index 1bc331401242cd..f67519905a15cf 100644 --- a/tests/helpers/test_script.py +++ b/tests/helpers/test_script.py @@ -943,18 +943,9 @@ async def test_wait_basic(hass: HomeAssistant, action_type) -> None: assert not script_obj.is_running assert script_obj.last_action is None - if action_type == "template": - assert_action_trace( - { - "0": [ - { - "result": {"wait": {"completed": True, "remaining": None}}, - "variables": {"wait": {"completed": True, "remaining": None}}, - } - ], - } - ) - else: + expected_var = {"completed": True, "remaining": None} + + if action_type == "trigger": expected_trigger = { "alias": None, "attribute": None, @@ -967,23 +958,18 @@ async def test_wait_basic(hass: HomeAssistant, action_type) -> None: "platform": "state", "to_state": ANY, } - assert_action_trace( - { - "0": [ - { - "result": { - "wait": { - "trigger": expected_trigger, - "remaining": None, - } - }, - "variables": { - "wait": {"remaining": None, "trigger": expected_trigger} - }, - } - ], - } - ) + expected_var["trigger"] = expected_trigger + + assert_action_trace( + { + "0": [ + { + "result": {"wait": expected_var}, + "variables": {"wait": expected_var}, + } + ], + } + ) async def test_wait_for_trigger_variables(hass: HomeAssistant) -> None: @@ -1059,28 +1045,21 @@ async def test_wait_basic_times_out(hass: HomeAssistant, action_type) -> None: assert timed_out - if action_type == "template": - assert_action_trace( - { - "0": [ - { - "result": {"wait": {"completed": False, "remaining": None}}, - "variables": {"wait": {"completed": False, "remaining": None}}, - } - ], - } - ) - else: - assert_action_trace( - { - "0": [ - { - "result": {"wait": {"trigger": None, "remaining": None}}, - "variables": {"wait": {"remaining": None, "trigger": None}}, - } - ], - } - ) + expected_var = {"completed": False, "remaining": None} + + if action_type == "trigger": + expected_var["trigger"] = None + + assert_action_trace( + { + "0": [ + { + "result": {"wait": expected_var}, + "variables": {"wait": expected_var}, + } + ], + } + ) @pytest.mark.parametrize("action_type", ["template", "trigger"]) @@ -1183,30 +1162,22 @@ async def test_cancel_wait(hass: HomeAssistant, action_type) -> None: assert not script_obj.is_running assert len(events) == 0 - if action_type == "template": - assert_action_trace( - { - "0": [ - { - "result": {"wait": {"completed": False, "remaining": None}}, - "variables": {"wait": {"completed": False, "remaining": None}}, - } - ], - }, - expected_script_execution="cancelled", - ) - else: - assert_action_trace( - { - "0": [ - { - "result": {"wait": {"trigger": None, "remaining": None}}, - "variables": {"wait": {"remaining": None, "trigger": None}}, - } - ], - }, - expected_script_execution="cancelled", - ) + expected_var = {"completed": False, "remaining": None} + + if action_type == "trigger": + expected_var["trigger"] = None + + assert_action_trace( + { + "0": [ + { + "result": {"wait": expected_var}, + "variables": {"wait": expected_var}, + } + ], + }, + expected_script_execution="cancelled", + ) async def test_wait_template_not_schedule(hass: HomeAssistant) -> None: @@ -1294,10 +1265,11 @@ async def test_wait_timeout( assert len(events) == 1 assert "(timeout: 0:00:05)" in caplog.text - if action_type == "template": - variable_wait = {"wait": {"completed": False, "remaining": 0.0}} - else: - variable_wait = {"wait": {"trigger": None, "remaining": 0.0}} + variable_wait = {"wait": {"completed": False, "remaining": 0.0}} + + if action_type == "trigger": + variable_wait["wait"]["trigger"] = None + expected_trace = { "0": [ { @@ -1345,7 +1317,7 @@ async def test_wait_trigger_with_zero_timeout( assert len(events) == 1 assert "(timeout: 0:00:00)" in caplog.text - variable_wait = {"wait": {"trigger": None, "remaining": 0.0}} + variable_wait = {"wait": {"completed": False, "trigger": None, "remaining": 0.0}} expected_trace = { "0": [ { @@ -1393,7 +1365,7 @@ async def test_wait_trigger_matches_with_zero_timeout( assert len(events) == 1 assert "(timeout: 0:00:00)" in caplog.text - variable_wait = {"wait": {"trigger": None, "remaining": 0.0}} + variable_wait = {"wait": {"completed": False, "trigger": None, "remaining": 0.0}} expected_trace = { "0": [ { @@ -1533,12 +1505,11 @@ async def test_wait_continue_on_timeout( assert not script_obj.is_running assert len(events) == n_events - if action_type == "template": - result_wait = {"wait": {"completed": False, "remaining": 0.0}} - variable_wait = dict(result_wait) - else: - result_wait = {"wait": {"trigger": None, "remaining": 0.0}} - variable_wait = dict(result_wait) + result_wait = {"wait": {"completed": False, "remaining": 0.0}} + if action_type == "trigger": + result_wait["wait"]["trigger"] = None + + variable_wait = dict(result_wait) expected_trace = { "0": [{"result": result_wait, "variables": variable_wait}], } @@ -1766,8 +1737,12 @@ async def async_attach_trigger_mock(*args, **kwargs): { "0": [ { - "result": {"wait": {"trigger": None, "remaining": None}}, - "variables": {"wait": {"remaining": None, "trigger": None}}, + "result": { + "wait": {"completed": False, "trigger": None, "remaining": None} + }, + "variables": { + "wait": {"completed": False, "remaining": None, "trigger": None} + }, } ], } @@ -1807,8 +1782,12 @@ async def async_attach_trigger_mock(*args, **kwargs): { "0": [ { - "result": {"wait": {"trigger": None, "remaining": None}}, - "variables": {"wait": {"remaining": None, "trigger": None}}, + "result": { + "wait": {"completed": False, "trigger": None, "remaining": None} + }, + "variables": { + "wait": {"completed": False, "remaining": None, "trigger": None} + }, } ], } @@ -3717,11 +3696,18 @@ async def test_parallel(hass: HomeAssistant, caplog: pytest.LogCaptureFixture) - { "result": { "wait": { + "completed": True, + "remaining": None, + "trigger": expected_trigger, + } + }, + "variables": { + "wait": { + "completed": True, "remaining": None, "trigger": expected_trigger, } }, - "variables": {"wait": {"remaining": None, "trigger": expected_trigger}}, } ], "0/parallel/1/sequence/0": [ diff --git a/tests/helpers/test_service.py b/tests/helpers/test_service.py index b8da913d4c572f..d0e1aa3434014f 100644 --- a/tests/helpers/test_service.py +++ b/tests/helpers/test_service.py @@ -347,6 +347,13 @@ def label_mock(hass: HomeAssistant) -> None: platform="test", device_id=device_has_label1.id, ) + entity_with_label1_from_device_and_different_area = er.RegistryEntry( + entity_id="light.with_label1_from_device_diff_area", + unique_id="with_label1_from_device_diff_area", + platform="test", + device_id=device_has_label1.id, + area_id=area_without_labels.id, + ) entity_with_label1_and_label2_from_device = er.RegistryEntry( entity_id="light.with_label1_and_label2_from_device", unique_id="with_label1_and_label2_from_device", @@ -373,6 +380,7 @@ def label_mock(hass: HomeAssistant) -> None: config_entity_with_my_label.entity_id: config_entity_with_my_label, entity_with_label1_and_label2_from_device.entity_id: entity_with_label1_and_label2_from_device, entity_with_label1_from_device.entity_id: entity_with_label1_from_device, + entity_with_label1_from_device_and_different_area.entity_id: entity_with_label1_from_device_and_different_area, entity_with_labels_from_device.entity_id: entity_with_labels_from_device, entity_with_my_label.entity_id: entity_with_my_label, entity_with_no_labels.entity_id: entity_with_no_labels, @@ -754,6 +762,7 @@ async def test_extract_entity_ids_from_labels(hass: HomeAssistant) -> None: assert { "light.with_label1_from_device", + "light.with_label1_from_device_diff_area", "light.with_labels_from_device", "light.with_label1_and_label2_from_device", } == await service.async_extract_entity_ids(hass, call) diff --git a/tests/helpers/test_template.py b/tests/helpers/test_template.py index 339b372f137356..b8c6b5a25af373 100644 --- a/tests/helpers/test_template.py +++ b/tests/helpers/test_template.py @@ -4549,7 +4549,7 @@ async def test_async_render_to_info_with_wildcard_matching_state( hass.states.async_set("cover.office_window", "closed") hass.states.async_set("cover.office_skylight", "open") hass.states.async_set("cover.x_skylight", "open") - hass.states.async_set("binary_sensor.door", "open") + hass.states.async_set("binary_sensor.door", "on") await hass.async_block_till_done() info = render_to_info(hass, template_complex_str) @@ -4559,7 +4559,7 @@ async def test_async_render_to_info_with_wildcard_matching_state( assert info.all_states is True assert info.rate_limit == template.ALL_STATES_RATE_LIMIT - hass.states.async_set("binary_sensor.door", "closed") + hass.states.async_set("binary_sensor.door", "off") info = render_to_info(hass, template_complex_str) assert not info.domains @@ -6564,3 +6564,21 @@ def test_warn_no_hass(hass: HomeAssistant, caplog: pytest.LogCaptureFixture) -> template.Template("blah", hass) assert message not in caplog.text caplog.clear() + + +async def test_merge_response_not_mutate_original_object( + hass: HomeAssistant, snapshot: SnapshotAssertion +) -> None: + """Test the merge_response does not mutate original service response value.""" + + value = '{"calendar.family": {"events": [{"summary": "An event"}]}' + _template = ( + "{% set calendar_response = " + value + "} %}" + "{{ merge_response(calendar_response) }}" + # We should be able to merge the same response again + # as the merge is working on a copy of the original object (response) + "{{ merge_response(calendar_response) }}" + ) + + tpl = template.Template(_template, hass) + assert tpl.async_render() diff --git a/tests/components/template/test_manual_trigger_entity.py b/tests/helpers/test_trigger_template_entity.py similarity index 100% rename from tests/components/template/test_manual_trigger_entity.py rename to tests/helpers/test_trigger_template_entity.py diff --git a/tests/helpers/test_update_coordinator.py b/tests/helpers/test_update_coordinator.py index d450d924f1fa03..50da0ab6332e7f 100644 --- a/tests/helpers/test_update_coordinator.py +++ b/tests/helpers/test_update_coordinator.py @@ -18,7 +18,7 @@ ConfigEntryError, ConfigEntryNotReady, ) -from homeassistant.helpers import update_coordinator +from homeassistant.helpers import frame, update_coordinator from homeassistant.util.dt import utcnow from tests.common import MockConfigEntry, async_fire_time_changed @@ -57,7 +57,9 @@ def get_crd( - hass: HomeAssistant, update_interval: timedelta | None + hass: HomeAssistant, + update_interval: timedelta | None, + config_entry: config_entries.ConfigEntry | None = None, ) -> update_coordinator.DataUpdateCoordinator[int]: """Make coordinator mocks.""" calls = 0 @@ -70,6 +72,7 @@ async def refresh() -> int: return update_coordinator.DataUpdateCoordinator[int]( hass, _LOGGER, + config_entry=config_entry, name="test", update_method=refresh, update_interval=update_interval, @@ -121,8 +124,7 @@ def update_callback(): async def test_shutdown( - hass: HomeAssistant, - crd: update_coordinator.DataUpdateCoordinator[int], + hass: HomeAssistant, crd: update_coordinator.DataUpdateCoordinator[int] ) -> None: """Test async_shutdown for update coordinator.""" assert crd.data is None @@ -158,8 +160,7 @@ def update_callback(): async def test_shutdown_on_entry_unload( - hass: HomeAssistant, - crd: update_coordinator.DataUpdateCoordinator[int], + hass: HomeAssistant, crd: update_coordinator.DataUpdateCoordinator[int] ) -> None: """Test shutdown is requested on entry unload.""" entry = MockConfigEntry() @@ -191,8 +192,7 @@ async def _refresh() -> int: async def test_shutdown_on_hass_stop( - hass: HomeAssistant, - crd: update_coordinator.DataUpdateCoordinator[int], + hass: HomeAssistant, crd: update_coordinator.DataUpdateCoordinator[int] ) -> None: """Test shutdown can be shutdown on STOP event.""" calls = 0 @@ -539,8 +539,8 @@ async def test_stop_refresh_on_ha_stop( ["update_method", "setup_method"], ) async def test_async_config_entry_first_refresh_failure( + hass: HomeAssistant, err_msg: tuple[Exception, type[Exception], str], - crd: update_coordinator.DataUpdateCoordinator[int], method: str, caplog: pytest.LogCaptureFixture, ) -> None: @@ -550,6 +550,11 @@ async def test_async_config_entry_first_refresh_failure( will be caught by config_entries.async_setup which will log it with a decreasing level of logging once the first message is logged. """ + entry = MockConfigEntry() + entry._async_set_state( + hass, config_entries.ConfigEntryState.SETUP_IN_PROGRESS, None + ) + crd = get_crd(hass, DEFAULT_UPDATE_INTERVAL, entry) setattr(crd, method, AsyncMock(side_effect=err_msg[0])) with pytest.raises(ConfigEntryNotReady): @@ -572,8 +577,8 @@ async def test_async_config_entry_first_refresh_failure( ["update_method", "setup_method"], ) async def test_async_config_entry_first_refresh_failure_passed_through( + hass: HomeAssistant, err_msg: tuple[Exception, type[Exception], str], - crd: update_coordinator.DataUpdateCoordinator[int], method: str, caplog: pytest.LogCaptureFixture, ) -> None: @@ -583,6 +588,11 @@ async def test_async_config_entry_first_refresh_failure_passed_through( will be caught by config_entries.async_setup which will log it with a decreasing level of logging once the first message is logged. """ + entry = MockConfigEntry() + entry._async_set_state( + hass, config_entries.ConfigEntryState.SETUP_IN_PROGRESS, None + ) + crd = get_crd(hass, DEFAULT_UPDATE_INTERVAL, entry) setattr(crd, method, AsyncMock(side_effect=err_msg[0])) with pytest.raises(err_msg[1]): @@ -593,11 +603,13 @@ async def test_async_config_entry_first_refresh_failure_passed_through( assert err_msg[2] not in caplog.text -async def test_async_config_entry_first_refresh_success( - crd: update_coordinator.DataUpdateCoordinator[int], caplog: pytest.LogCaptureFixture -) -> None: +async def test_async_config_entry_first_refresh_success(hass: HomeAssistant) -> None: """Test first refresh successfully.""" - + entry = MockConfigEntry() + entry._async_set_state( + hass, config_entries.ConfigEntryState.SETUP_IN_PROGRESS, None + ) + crd = get_crd(hass, DEFAULT_UPDATE_INTERVAL, entry) crd.setup_method = AsyncMock() await crd.async_config_entry_first_refresh() @@ -605,13 +617,69 @@ async def test_async_config_entry_first_refresh_success( crd.setup_method.assert_called_once() +async def test_async_config_entry_first_refresh_invalid_state( + hass: HomeAssistant, +) -> None: + """Test first refresh fails due to invalid state.""" + entry = MockConfigEntry() + crd = get_crd(hass, DEFAULT_UPDATE_INTERVAL, entry) + crd.setup_method = AsyncMock() + with pytest.raises( + RuntimeError, + match="Detected code that uses `async_config_entry_first_refresh`, which " + "is only supported when entry state is ConfigEntryState.SETUP_IN_PROGRESS, " + "but it is in state ConfigEntryState.NOT_LOADED. This will stop working " + "in Home Assistant 2025.11. Please report this issue.", + ): + await crd.async_config_entry_first_refresh() + + assert crd.last_update_success is True + crd.setup_method.assert_not_called() + + +@pytest.mark.usefixtures("mock_integration_frame") +@patch.object(frame, "_REPORTED_INTEGRATIONS", set()) +async def test_async_config_entry_first_refresh_invalid_state_in_integration( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture +) -> None: + """Test first refresh successfully, despite wrong state.""" + entry = MockConfigEntry() + crd = get_crd(hass, DEFAULT_UPDATE_INTERVAL, entry) + crd.setup_method = AsyncMock() + + await crd.async_config_entry_first_refresh() + assert crd.last_update_success is True + crd.setup_method.assert_called() + assert ( + "Detected that integration 'hue' uses `async_config_entry_first_refresh`, which " + "is only supported when entry state is ConfigEntryState.SETUP_IN_PROGRESS, " + "but it is in state ConfigEntryState.NOT_LOADED, This will stop working " + "in Home Assistant 2025.11" + ) in caplog.text + + +async def test_async_config_entry_first_refresh_no_entry(hass: HomeAssistant) -> None: + """Test first refresh successfully.""" + crd = get_crd(hass, DEFAULT_UPDATE_INTERVAL, None) + crd.setup_method = AsyncMock() + with pytest.raises( + RuntimeError, + match="Detected code that uses `async_config_entry_first_refresh`, " + "which is only supported for coordinators with a config entry and will " + "stop working in Home Assistant 2025.11. Please report this issue.", + ): + await crd.async_config_entry_first_refresh() + + assert crd.last_update_success is True + crd.setup_method.assert_not_called() + + async def test_not_schedule_refresh_if_system_option_disable_polling( hass: HomeAssistant, ) -> None: """Test we do not schedule a refresh if disable polling in config entry.""" entry = MockConfigEntry(pref_disable_polling=True) - config_entries.current_entry.set(entry) - crd = get_crd(hass, DEFAULT_UPDATE_INTERVAL) + crd = get_crd(hass, DEFAULT_UPDATE_INTERVAL, entry) crd.async_add_listener(lambda: None) assert crd._unsub_refresh is None @@ -651,7 +719,7 @@ async def test_async_set_update_error( async def test_only_callback_on_change_when_always_update_is_false( - crd: update_coordinator.DataUpdateCoordinator[int], caplog: pytest.LogCaptureFixture + crd: update_coordinator.DataUpdateCoordinator[int], ) -> None: """Test we do not callback listeners unless something has actually changed when always_update is false.""" update_callback = Mock() @@ -721,7 +789,7 @@ async def _update_method() -> int: async def test_always_callback_when_always_update_is_true( - crd: update_coordinator.DataUpdateCoordinator[int], caplog: pytest.LogCaptureFixture + crd: update_coordinator.DataUpdateCoordinator[int], ) -> None: """Test we callback listeners even though the data is the same when always_update is True.""" update_callback = Mock() @@ -795,3 +863,38 @@ def listener(): unsub() await crd.async_refresh() assert len(last_update_success_times) == 1 + + +async def test_config_entry(hass: HomeAssistant) -> None: + """Test behavior of coordinator.entry.""" + entry = MockConfigEntry() + + # Default without context should be None + crd = update_coordinator.DataUpdateCoordinator[int](hass, _LOGGER, name="test") + assert crd.config_entry is None + + # Explicit None is OK + crd = update_coordinator.DataUpdateCoordinator[int]( + hass, _LOGGER, name="test", config_entry=None + ) + assert crd.config_entry is None + + # Explicit entry is OK + crd = update_coordinator.DataUpdateCoordinator[int]( + hass, _LOGGER, name="test", config_entry=entry + ) + assert crd.config_entry is entry + + # set ContextVar + config_entries.current_entry.set(entry) + + # Default with ContextVar should match the ContextVar + crd = update_coordinator.DataUpdateCoordinator[int](hass, _LOGGER, name="test") + assert crd.config_entry is entry + + # Explicit entry different from ContextVar not recommended, but should work + another_entry = MockConfigEntry() + crd = update_coordinator.DataUpdateCoordinator[int]( + hass, _LOGGER, name="test", config_entry=another_entry + ) + assert crd.config_entry is another_entry diff --git a/tests/script/test_gen_requirements_all.py b/tests/script/test_gen_requirements_all.py index 793b3de63c5649..519a5c21855563 100644 --- a/tests/script/test_gen_requirements_all.py +++ b/tests/script/test_gen_requirements_all.py @@ -1,5 +1,7 @@ """Tests for the gen_requirements_all script.""" +from unittest.mock import patch + from script import gen_requirements_all @@ -23,3 +25,27 @@ def test_include_overrides_subsets() -> None: for overrides in gen_requirements_all.OVERRIDDEN_REQUIREMENTS_ACTIONS.values(): for req in overrides["include"]: assert req in gen_requirements_all.EXCLUDED_REQUIREMENTS_ALL + + +def test_requirement_override_markers() -> None: + """Test override markers are applied to the correct requirements.""" + data = { + "pytest": { + "exclude": set(), + "include": set(), + "markers": {"env-canada": "python_version<'3.13'"}, + } + } + with patch.dict( + gen_requirements_all.OVERRIDDEN_REQUIREMENTS_ACTIONS, data, clear=True + ): + assert ( + gen_requirements_all.process_action_requirement( + "env-canada==0.7.2", "pytest" + ) + == "env-canada==0.7.2;python_version<'3.13'" + ) + assert ( + gen_requirements_all.process_action_requirement("other==1.0", "pytest") + == "other==1.0" + ) diff --git a/tests/snapshots/test_config_entries.ambr b/tests/snapshots/test_config_entries.ambr index e30b2824af2a97..51e56f4874e272 100644 --- a/tests/snapshots/test_config_entries.ambr +++ b/tests/snapshots/test_config_entries.ambr @@ -21,3 +21,83 @@ 'version': 1, }) # --- +# name: test_unique_id_collision_issues + IssueRegistryItemSnapshot({ + 'active': True, + 'breaks_in_ha_version': '2025.11.0', + 'created': , + 'data': dict({ + 'issue_type': 'config_entry_unique_id_collision', + 'unique_id': 'group_1', + }), + 'dismissed_version': None, + 'domain': 'homeassistant', + 'is_fixable': False, + 'is_persistent': False, + 'issue_domain': 'test2', + 'issue_id': 'config_entry_unique_id_collision_test2_group_1', + 'learn_more_url': None, + 'severity': , + 'translation_key': 'config_entry_unique_id_collision', + 'translation_placeholders': dict({ + 'configure_url': '/config/integrations/integration/test2', + 'domain': 'test2', + 'titles': "'Mock Title', 'Mock Title', 'Mock Title'", + 'unique_id': 'group_1', + }), + }) +# --- +# name: test_unique_id_collision_issues.1 + IssueRegistryItemSnapshot({ + 'active': True, + 'breaks_in_ha_version': '2025.11.0', + 'created': , + 'data': dict({ + 'issue_type': 'config_entry_unique_id_collision', + 'unique_id': 'not_unique', + }), + 'dismissed_version': None, + 'domain': 'homeassistant', + 'is_fixable': False, + 'is_persistent': False, + 'issue_domain': 'test3', + 'issue_id': 'config_entry_unique_id_collision_test3_not_unique', + 'learn_more_url': None, + 'severity': , + 'translation_key': 'config_entry_unique_id_collision_many', + 'translation_placeholders': dict({ + 'configure_url': '/config/integrations/integration/test3', + 'domain': 'test3', + 'number_of_entries': '6', + 'title_limit': '5', + 'titles': "'Mock Title', 'Mock Title', 'Mock Title', 'Mock Title', 'Mock Title'", + 'unique_id': 'not_unique', + }), + }) +# --- +# name: test_unique_id_collision_issues.2 + IssueRegistryItemSnapshot({ + 'active': True, + 'breaks_in_ha_version': '2025.11.0', + 'created': , + 'data': dict({ + 'issue_type': 'config_entry_unique_id_collision', + 'unique_id': 'not_unique', + }), + 'dismissed_version': None, + 'domain': 'homeassistant', + 'is_fixable': False, + 'is_persistent': False, + 'issue_domain': 'test3', + 'issue_id': 'config_entry_unique_id_collision_test3_not_unique', + 'learn_more_url': None, + 'severity': , + 'translation_key': 'config_entry_unique_id_collision', + 'translation_placeholders': dict({ + 'configure_url': '/config/integrations/integration/test3', + 'domain': 'test3', + 'titles': "'Mock Title', 'Mock Title', 'Mock Title', 'Mock Title', 'Mock Title'", + 'unique_id': 'not_unique', + }), + }) +# --- diff --git a/tests/syrupy.py b/tests/syrupy.py index b6f753e6c7f908..a3b3f76306369d 100644 --- a/tests/syrupy.py +++ b/tests/syrupy.py @@ -5,14 +5,22 @@ from contextlib import suppress import dataclasses from enum import IntFlag +import json +import os from pathlib import Path from typing import Any import attr import attrs +import pytest +from syrupy.constants import EXIT_STATUS_FAIL_UNUSED +from syrupy.data import Snapshot, SnapshotCollection, SnapshotCollections from syrupy.extensions.amber import AmberDataSerializer, AmberSnapshotExtension from syrupy.location import PyTestLocation +from syrupy.report import SnapshotReport +from syrupy.session import ItemStatus, SnapshotSession from syrupy.types import PropertyFilter, PropertyMatcher, PropertyPath, SerializableData +from syrupy.utils import is_xdist_controller, is_xdist_worker import voluptuous as vol import voluptuous_serialize @@ -197,7 +205,7 @@ def _serializable_issue_registry_entry( cls, data: ir.IssueEntry ) -> SerializableData: """Prepare a Home Assistant issue registry entry for serialization.""" - return IssueRegistryItemSnapshot(data.to_json() | {"created": ANY}) + return IssueRegistryItemSnapshot(dataclasses.asdict(data) | {"created": ANY}) @classmethod def _serializable_state(cls, data: State) -> SerializableData: @@ -246,3 +254,164 @@ def dirname(cls, *, test_location: PyTestLocation) -> str: """ test_dir = Path(test_location.filepath).parent return str(test_dir.joinpath("snapshots")) + + +# Classes and Methods to override default finish behavior in syrupy +# This is needed to handle the xdist plugin in pytest +# The default implementation does not handle the xdist plugin +# and will not work correctly when running tests in parallel +# with pytest-xdist. +# Temporary workaround until it is finalised inside syrupy +# See https://github.com/syrupy-project/syrupy/pull/901 + + +class _FakePytestObject: + """Fake object.""" + + def __init__(self, collected_item: dict[str, str]) -> None: + """Initialise fake object.""" + self.__module__ = collected_item["modulename"] + self.__name__ = collected_item["methodname"] + + +class _FakePytestItem: + """Fake pytest.Item object.""" + + def __init__(self, collected_item: dict[str, str]) -> None: + """Initialise fake pytest.Item object.""" + self.nodeid = collected_item["nodeid"] + self.name = collected_item["name"] + self.path = Path(collected_item["path"]) + self.obj = _FakePytestObject(collected_item) + + +def _serialize_collections(collections: SnapshotCollections) -> dict[str, Any]: + return { + k: [c.name for c in v] for k, v in collections._snapshot_collections.items() + } + + +def _serialize_report( + report: SnapshotReport, + collected_items: set[pytest.Item], + selected_items: dict[str, ItemStatus], +) -> dict[str, Any]: + return { + "discovered": _serialize_collections(report.discovered), + "created": _serialize_collections(report.created), + "failed": _serialize_collections(report.failed), + "matched": _serialize_collections(report.matched), + "updated": _serialize_collections(report.updated), + "used": _serialize_collections(report.used), + "_collected_items": [ + { + "nodeid": c.nodeid, + "name": c.name, + "path": str(c.path), + "modulename": c.obj.__module__, + "methodname": c.obj.__name__, + } + for c in list(collected_items) + ], + "_selected_items": { + key: status.value for key, status in selected_items.items() + }, + } + + +def _merge_serialized_collections( + collections: SnapshotCollections, json_data: dict[str, list[str]] +) -> None: + if not json_data: + return + for location, names in json_data.items(): + snapshot_collection = SnapshotCollection(location=location) + for name in names: + snapshot_collection.add(Snapshot(name)) + collections.update(snapshot_collection) + + +def _merge_serialized_report(report: SnapshotReport, json_data: dict[str, Any]) -> None: + _merge_serialized_collections(report.discovered, json_data["discovered"]) + _merge_serialized_collections(report.created, json_data["created"]) + _merge_serialized_collections(report.failed, json_data["failed"]) + _merge_serialized_collections(report.matched, json_data["matched"]) + _merge_serialized_collections(report.updated, json_data["updated"]) + _merge_serialized_collections(report.used, json_data["used"]) + for collected_item in json_data["_collected_items"]: + custom_item = _FakePytestItem(collected_item) + if not any( + t.nodeid == custom_item.nodeid and t.name == custom_item.nodeid + for t in report.collected_items + ): + report.collected_items.add(custom_item) + for key, selected_item in json_data["_selected_items"].items(): + if key in report.selected_items: + status = ItemStatus(selected_item) + if status != ItemStatus.NOT_RUN: + report.selected_items[key] = status + else: + report.selected_items[key] = ItemStatus(selected_item) + + +def override_syrupy_finish(self: SnapshotSession) -> int: + """Override the finish method to allow for custom handling.""" + exitstatus = 0 + self.flush_snapshot_write_queue() + self.report = SnapshotReport( + base_dir=self.pytest_session.config.rootpath, + collected_items=self._collected_items, + selected_items=self._selected_items, + assertions=self._assertions, + options=self.pytest_session.config.option, + ) + + needs_xdist_merge = self.update_snapshots or bool( + self.pytest_session.config.option.include_snapshot_details + ) + + if is_xdist_worker(): + if not needs_xdist_merge: + return exitstatus + with open(".pytest_syrupy_worker_count", "w", encoding="utf-8") as f: + f.write(os.getenv("PYTEST_XDIST_WORKER_COUNT")) + with open( + f".pytest_syrupy_{os.getenv("PYTEST_XDIST_WORKER")}_result", + "w", + encoding="utf-8", + ) as f: + json.dump( + _serialize_report( + self.report, self._collected_items, self._selected_items + ), + f, + indent=2, + ) + return exitstatus + if is_xdist_controller(): + return exitstatus + + if needs_xdist_merge: + worker_count = None + try: + with open(".pytest_syrupy_worker_count", encoding="utf-8") as f: + worker_count = f.read() + os.remove(".pytest_syrupy_worker_count") + except FileNotFoundError: + pass + + if worker_count: + for i in range(int(worker_count)): + with open(f".pytest_syrupy_gw{i}_result", encoding="utf-8") as f: + _merge_serialized_report(self.report, json.load(f)) + os.remove(f".pytest_syrupy_gw{i}_result") + + if self.report.num_unused: + if self.update_snapshots: + self.remove_unused_snapshots( + unused_snapshot_collections=self.report.unused, + used_snapshot_collections=self.report.used, + ) + elif not self.warn_unused_snapshots: + exitstatus |= EXIT_STATUS_FAIL_UNUSED + return exitstatus diff --git a/tests/test_backup_restore.py b/tests/test_backup_restore.py new file mode 100644 index 00000000000000..44a05c0540e527 --- /dev/null +++ b/tests/test_backup_restore.py @@ -0,0 +1,215 @@ +"""Test methods in backup_restore.""" + +from pathlib import Path +import tarfile +from unittest import mock + +import pytest + +from homeassistant import backup_restore + +from .common import get_test_config_dir + + +@pytest.mark.parametrize( + ("side_effect", "content", "expected"), + [ + (FileNotFoundError, "", None), + (None, "", None), + ( + None, + '{"path": "test"}', + backup_restore.RestoreBackupFileContent(backup_file_path=Path("test")), + ), + ], +) +def test_reading_the_instruction_contents( + side_effect: Exception | None, + content: str, + expected: backup_restore.RestoreBackupFileContent | None, +) -> None: + """Test reading the content of the .HA_RESTORE file.""" + with ( + mock.patch( + "pathlib.Path.read_text", + return_value=content, + side_effect=side_effect, + ), + ): + read_content = backup_restore.restore_backup_file_content( + Path(get_test_config_dir()) + ) + assert read_content == expected + + +def test_restoring_backup_that_does_not_exist() -> None: + """Test restoring a backup that does not exist.""" + backup_file_path = Path(get_test_config_dir("backups", "test")) + with ( + mock.patch( + "homeassistant.backup_restore.restore_backup_file_content", + return_value=backup_restore.RestoreBackupFileContent( + backup_file_path=backup_file_path + ), + ), + mock.patch("pathlib.Path.read_text", side_effect=FileNotFoundError), + pytest.raises( + ValueError, match=f"Backup file {backup_file_path} does not exist" + ), + ): + assert backup_restore.restore_backup(Path(get_test_config_dir())) is False + + +def test_restoring_backup_when_instructions_can_not_be_read() -> None: + """Test restoring a backup when instructions can not be read.""" + with ( + mock.patch( + "homeassistant.backup_restore.restore_backup_file_content", + return_value=None, + ), + ): + assert backup_restore.restore_backup(Path(get_test_config_dir())) is False + + +def test_restoring_backup_that_is_not_a_file() -> None: + """Test restoring a backup that is not a file.""" + backup_file_path = Path(get_test_config_dir("backups", "test")) + with ( + mock.patch( + "homeassistant.backup_restore.restore_backup_file_content", + return_value=backup_restore.RestoreBackupFileContent( + backup_file_path=backup_file_path + ), + ), + mock.patch("pathlib.Path.exists", return_value=True), + mock.patch("pathlib.Path.is_file", return_value=False), + pytest.raises( + ValueError, match=f"Backup file {backup_file_path} does not exist" + ), + ): + assert backup_restore.restore_backup(Path(get_test_config_dir())) is False + + +def test_aborting_for_older_versions() -> None: + """Test that we abort for older versions.""" + config_dir = Path(get_test_config_dir()) + backup_file_path = Path(config_dir, "backups", "test.tar") + + def _patched_path_read_text(path: Path, **kwargs): + return '{"homeassistant": {"version": "9999.99.99"}, "compressed": false}' + + with ( + mock.patch( + "homeassistant.backup_restore.restore_backup_file_content", + return_value=backup_restore.RestoreBackupFileContent( + backup_file_path=backup_file_path + ), + ), + mock.patch("securetar.SecureTarFile"), + mock.patch("homeassistant.backup_restore.TemporaryDirectory"), + mock.patch("pathlib.Path.read_text", _patched_path_read_text), + mock.patch("homeassistant.backup_restore.HA_VERSION", "2013.09.17"), + pytest.raises( + ValueError, + match="You need at least Home Assistant version 9999.99.99 to restore this backup", + ), + ): + assert backup_restore.restore_backup(config_dir) is True + + +def test_removal_of_current_configuration_when_restoring() -> None: + """Test that we are removing the current configuration directory.""" + config_dir = Path(get_test_config_dir()) + backup_file_path = Path(config_dir, "backups", "test.tar") + mock_config_dir = [ + {"path": Path(config_dir, ".HA_RESTORE"), "is_file": True}, + {"path": Path(config_dir, ".HA_VERSION"), "is_file": True}, + {"path": Path(config_dir, "backups"), "is_file": False}, + {"path": Path(config_dir, "www"), "is_file": False}, + ] + + def _patched_path_read_text(path: Path, **kwargs): + return '{"homeassistant": {"version": "2013.09.17"}, "compressed": false}' + + def _patched_path_is_file(path: Path, **kwargs): + return [x for x in mock_config_dir if x["path"] == path][0]["is_file"] + + def _patched_path_is_dir(path: Path, **kwargs): + return not [x for x in mock_config_dir if x["path"] == path][0]["is_file"] + + with ( + mock.patch( + "homeassistant.backup_restore.restore_backup_file_content", + return_value=backup_restore.RestoreBackupFileContent( + backup_file_path=backup_file_path + ), + ), + mock.patch("securetar.SecureTarFile"), + mock.patch("homeassistant.backup_restore.TemporaryDirectory"), + mock.patch("homeassistant.backup_restore.HA_VERSION", "2013.09.17"), + mock.patch("pathlib.Path.read_text", _patched_path_read_text), + mock.patch("pathlib.Path.is_file", _patched_path_is_file), + mock.patch("pathlib.Path.is_dir", _patched_path_is_dir), + mock.patch( + "pathlib.Path.iterdir", + return_value=[x["path"] for x in mock_config_dir], + ), + mock.patch("pathlib.Path.unlink") as unlink_mock, + mock.patch("shutil.rmtree") as rmtreemock, + ): + assert backup_restore.restore_backup(config_dir) is True + assert unlink_mock.call_count == 2 + assert ( + rmtreemock.call_count == 1 + ) # We have 2 directories in the config directory, but backups is kept + + removed_directories = {Path(call.args[0]) for call in rmtreemock.mock_calls} + assert removed_directories == {Path(config_dir, "www")} + + +def test_extracting_the_contents_of_a_backup_file() -> None: + """Test extracting the contents of a backup file.""" + config_dir = Path(get_test_config_dir()) + backup_file_path = Path(config_dir, "backups", "test.tar") + + def _patched_path_read_text(path: Path, **kwargs): + return '{"homeassistant": {"version": "2013.09.17"}, "compressed": false}' + + getmembers_mock = mock.MagicMock( + return_value=[ + tarfile.TarInfo(name="data"), + tarfile.TarInfo(name="data/../test"), + tarfile.TarInfo(name="data/.HA_VERSION"), + tarfile.TarInfo(name="data/.storage"), + tarfile.TarInfo(name="data/www"), + ] + ) + extractall_mock = mock.MagicMock() + + with ( + mock.patch( + "homeassistant.backup_restore.restore_backup_file_content", + return_value=backup_restore.RestoreBackupFileContent( + backup_file_path=backup_file_path + ), + ), + mock.patch( + "tarfile.open", + return_value=mock.MagicMock( + getmembers=getmembers_mock, + extractall=extractall_mock, + __iter__=lambda x: iter(getmembers_mock.return_value), + ), + ), + mock.patch("homeassistant.backup_restore.TemporaryDirectory"), + mock.patch("pathlib.Path.read_text", _patched_path_read_text), + mock.patch("pathlib.Path.is_file", return_value=False), + mock.patch("pathlib.Path.iterdir", return_value=[]), + ): + assert backup_restore.restore_backup(config_dir) is True + assert getmembers_mock.call_count == 1 + assert extractall_mock.call_count == 2 + + assert { + member.name for member in extractall_mock.mock_calls[-1].kwargs["members"] + } == {".HA_VERSION", ".storage", "www"} diff --git a/tests/test_config.py b/tests/test_config.py index 02f8e1fc078361..c8c5b0811196fc 100644 --- a/tests/test_config.py +++ b/tests/test_config.py @@ -4,62 +4,32 @@ from collections import OrderedDict from collections.abc import Generator import contextlib -import copy import logging import os from pathlib import Path -from typing import Any from unittest import mock from unittest.mock import AsyncMock, MagicMock, Mock, patch import pytest from syrupy.assertion import SnapshotAssertion import voluptuous as vol -from voluptuous import Invalid, MultipleInvalid import yaml from homeassistant import loader import homeassistant.config as config_util -from homeassistant.const import ( - ATTR_ASSUMED_STATE, - ATTR_FRIENDLY_NAME, - CONF_AUTH_MFA_MODULES, - CONF_AUTH_PROVIDERS, - CONF_CUSTOMIZE, - CONF_LATITUDE, - CONF_LONGITUDE, - CONF_NAME, - CONF_PACKAGES, - __version__, -) -from homeassistant.core import ( - DOMAIN as HOMEASSISTANT_DOMAIN, - ConfigSource, - HomeAssistant, - State, -) +from homeassistant.const import CONF_PACKAGES, __version__ +from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant from homeassistant.exceptions import ConfigValidationError, HomeAssistantError -from homeassistant.helpers import ( - check_config, - config_validation as cv, - issue_registry as ir, -) -from homeassistant.helpers.entity import Entity +from homeassistant.helpers import check_config, config_validation as cv from homeassistant.helpers.typing import ConfigType from homeassistant.loader import Integration, async_get_integration from homeassistant.setup import async_setup_component -from homeassistant.util.unit_system import ( - METRIC_SYSTEM, - US_CUSTOMARY_SYSTEM, - UnitSystem, -) from homeassistant.util.yaml import SECRET_YAML from homeassistant.util.yaml.objects import NodeDictClass from .common import ( MockModule, MockPlatform, - MockUser, get_test_config_dir, mock_integration, mock_platform, @@ -509,104 +479,6 @@ async def test_create_default_config_returns_none_if_write_error( assert mock_print.called -def test_core_config_schema() -> None: - """Test core config schema.""" - for value in ( - {"unit_system": "K"}, - {"time_zone": "non-exist"}, - {"latitude": "91"}, - {"longitude": -181}, - {"external_url": "not an url"}, - {"internal_url": "not an url"}, - {"currency", 100}, - {"customize": "bla"}, - {"customize": {"light.sensor": 100}}, - {"customize": {"entity_id": []}}, - {"country": "xx"}, - {"language": "xx"}, - {"radius": -10}, - ): - with pytest.raises(MultipleInvalid): - config_util.CORE_CONFIG_SCHEMA(value) - - config_util.CORE_CONFIG_SCHEMA( - { - "name": "Test name", - "latitude": "-23.45", - "longitude": "123.45", - "external_url": "https://www.example.com", - "internal_url": "http://example.local", - "unit_system": "metric", - "currency": "USD", - "customize": {"sensor.temperature": {"hidden": True}}, - "country": "SE", - "language": "sv", - "radius": "10", - } - ) - - -def test_core_config_schema_internal_external_warning( - caplog: pytest.LogCaptureFixture, -) -> None: - """Test that we warn for internal/external URL with path.""" - config_util.CORE_CONFIG_SCHEMA( - { - "external_url": "https://www.example.com/bla", - "internal_url": "http://example.local/yo", - } - ) - - assert "Invalid external_url set" in caplog.text - assert "Invalid internal_url set" in caplog.text - - -def test_customize_dict_schema() -> None: - """Test basic customize config validation.""" - values = ({ATTR_FRIENDLY_NAME: None}, {ATTR_ASSUMED_STATE: "2"}) - - for val in values: - with pytest.raises(MultipleInvalid): - config_util.CUSTOMIZE_DICT_SCHEMA(val) - - assert config_util.CUSTOMIZE_DICT_SCHEMA( - {ATTR_FRIENDLY_NAME: 2, ATTR_ASSUMED_STATE: "0"} - ) == {ATTR_FRIENDLY_NAME: "2", ATTR_ASSUMED_STATE: False} - - -def test_customize_glob_is_ordered() -> None: - """Test that customize_glob preserves order.""" - conf = config_util.CORE_CONFIG_SCHEMA({"customize_glob": OrderedDict()}) - assert isinstance(conf["customize_glob"], OrderedDict) - - -async def _compute_state(hass: HomeAssistant, config: dict[str, Any]) -> State | None: - await config_util.async_process_ha_core_config(hass, config) - - entity = Entity() - entity.entity_id = "test.test" - entity.hass = hass - entity.schedule_update_ha_state() - - await hass.async_block_till_done() - - return hass.states.get("test.test") - - -async def test_entity_customization(hass: HomeAssistant) -> None: - """Test entity customization through configuration.""" - config = { - CONF_LATITUDE: 50, - CONF_LONGITUDE: 50, - CONF_NAME: "Test", - CONF_CUSTOMIZE: {"test.test": {"hidden": True}}, - } - - state = await _compute_state(hass, config) - - assert state.attributes["hidden"] - - @patch("homeassistant.config.shutil") @patch("homeassistant.config.os") @patch("homeassistant.config.is_docker_env", return_value=False) @@ -696,361 +568,6 @@ def test_config_upgrade_no_file(hass: HomeAssistant) -> None: assert opened_file.write.call_args == mock.call(__version__) -async def test_loading_configuration_from_storage( - hass: HomeAssistant, hass_storage: dict[str, Any] -) -> None: - """Test loading core config onto hass object.""" - hass_storage["core.config"] = { - "data": { - "elevation": 10, - "latitude": 55, - "location_name": "Home", - "longitude": 13, - "time_zone": "Europe/Copenhagen", - "unit_system": "metric", - "external_url": "https://www.example.com", - "internal_url": "http://example.local", - "currency": "EUR", - "country": "SE", - "language": "sv", - "radius": 150, - }, - "key": "core.config", - "version": 1, - "minor_version": 4, - } - await config_util.async_process_ha_core_config( - hass, {"allowlist_external_dirs": "/etc"} - ) - - assert hass.config.latitude == 55 - assert hass.config.longitude == 13 - assert hass.config.elevation == 10 - assert hass.config.location_name == "Home" - assert hass.config.units is METRIC_SYSTEM - assert hass.config.time_zone == "Europe/Copenhagen" - assert hass.config.external_url == "https://www.example.com" - assert hass.config.internal_url == "http://example.local" - assert hass.config.currency == "EUR" - assert hass.config.country == "SE" - assert hass.config.language == "sv" - assert hass.config.radius == 150 - assert len(hass.config.allowlist_external_dirs) == 3 - assert "/etc" in hass.config.allowlist_external_dirs - assert hass.config.config_source is ConfigSource.STORAGE - - -async def test_loading_configuration_from_storage_with_yaml_only( - hass: HomeAssistant, hass_storage: dict[str, Any] -) -> None: - """Test loading core and YAML config onto hass object.""" - hass_storage["core.config"] = { - "data": { - "elevation": 10, - "latitude": 55, - "location_name": "Home", - "longitude": 13, - "time_zone": "Europe/Copenhagen", - "unit_system": "metric", - }, - "key": "core.config", - "version": 1, - } - await config_util.async_process_ha_core_config( - hass, {"media_dirs": {"mymedia": "/usr"}, "allowlist_external_dirs": "/etc"} - ) - - assert hass.config.latitude == 55 - assert hass.config.longitude == 13 - assert hass.config.elevation == 10 - assert hass.config.location_name == "Home" - assert hass.config.units is METRIC_SYSTEM - assert hass.config.time_zone == "Europe/Copenhagen" - assert len(hass.config.allowlist_external_dirs) == 3 - assert "/etc" in hass.config.allowlist_external_dirs - assert hass.config.media_dirs == {"mymedia": "/usr"} - assert hass.config.config_source is ConfigSource.STORAGE - - -async def test_migration_and_updating_configuration( - hass: HomeAssistant, hass_storage: dict[str, Any] -) -> None: - """Test updating configuration stores the new configuration.""" - core_data = { - "data": { - "elevation": 10, - "latitude": 55, - "location_name": "Home", - "longitude": 13, - "time_zone": "Europe/Copenhagen", - "unit_system": "imperial", - "external_url": "https://www.example.com", - "internal_url": "http://example.local", - "currency": "BTC", - }, - "key": "core.config", - "version": 1, - "minor_version": 1, - } - hass_storage["core.config"] = dict(core_data) - await config_util.async_process_ha_core_config( - hass, {"allowlist_external_dirs": "/etc"} - ) - await hass.config.async_update(latitude=50, currency="USD") - - expected_new_core_data = copy.deepcopy(core_data) - # From async_update above - expected_new_core_data["data"]["latitude"] = 50 - expected_new_core_data["data"]["currency"] = "USD" - # 1.1 -> 1.2 store migration with migrated unit system - expected_new_core_data["data"]["unit_system_v2"] = "us_customary" - # 1.1 -> 1.3 defaults for country and language - expected_new_core_data["data"]["country"] = None - expected_new_core_data["data"]["language"] = "en" - # 1.1 -> 1.4 defaults for zone radius - expected_new_core_data["data"]["radius"] = 100 - # Bumped minor version - expected_new_core_data["minor_version"] = 4 - assert hass_storage["core.config"] == expected_new_core_data - assert hass.config.latitude == 50 - assert hass.config.currency == "USD" - assert hass.config.country is None - assert hass.config.language == "en" - assert hass.config.radius == 100 - - -async def test_override_stored_configuration( - hass: HomeAssistant, hass_storage: dict[str, Any] -) -> None: - """Test loading core and YAML config onto hass object.""" - hass_storage["core.config"] = { - "data": { - "elevation": 10, - "latitude": 55, - "location_name": "Home", - "longitude": 13, - "time_zone": "Europe/Copenhagen", - "unit_system": "metric", - }, - "key": "core.config", - "version": 1, - } - await config_util.async_process_ha_core_config( - hass, {"latitude": 60, "allowlist_external_dirs": "/etc"} - ) - - assert hass.config.latitude == 60 - assert hass.config.longitude == 13 - assert hass.config.elevation == 10 - assert hass.config.location_name == "Home" - assert hass.config.units is METRIC_SYSTEM - assert hass.config.time_zone == "Europe/Copenhagen" - assert len(hass.config.allowlist_external_dirs) == 3 - assert "/etc" in hass.config.allowlist_external_dirs - assert hass.config.config_source is ConfigSource.YAML - - -async def test_loading_configuration(hass: HomeAssistant) -> None: - """Test loading core config onto hass object.""" - await config_util.async_process_ha_core_config( - hass, - { - "latitude": 60, - "longitude": 50, - "elevation": 25, - "name": "Huis", - "unit_system": "imperial", - "time_zone": "America/New_York", - "allowlist_external_dirs": "/etc", - "external_url": "https://www.example.com", - "internal_url": "http://example.local", - "media_dirs": {"mymedia": "/usr"}, - "debug": True, - "currency": "EUR", - "country": "SE", - "language": "sv", - "radius": 150, - }, - ) - - assert hass.config.latitude == 60 - assert hass.config.longitude == 50 - assert hass.config.elevation == 25 - assert hass.config.location_name == "Huis" - assert hass.config.units is US_CUSTOMARY_SYSTEM - assert hass.config.time_zone == "America/New_York" - assert hass.config.external_url == "https://www.example.com" - assert hass.config.internal_url == "http://example.local" - assert len(hass.config.allowlist_external_dirs) == 3 - assert "/etc" in hass.config.allowlist_external_dirs - assert "/usr" in hass.config.allowlist_external_dirs - assert hass.config.media_dirs == {"mymedia": "/usr"} - assert hass.config.config_source is ConfigSource.YAML - assert hass.config.debug is True - assert hass.config.currency == "EUR" - assert hass.config.country == "SE" - assert hass.config.language == "sv" - assert hass.config.radius == 150 - - -@pytest.mark.parametrize( - ("minor_version", "users", "user_data", "default_language"), - [ - (2, (), {}, "en"), - (2, ({"is_owner": True},), {}, "en"), - ( - 2, - ({"id": "user1", "is_owner": True},), - {"user1": {"language": {"language": "sv"}}}, - "sv", - ), - ( - 2, - ({"id": "user1", "is_owner": False},), - {"user1": {"language": {"language": "sv"}}}, - "en", - ), - (3, (), {}, "en"), - (3, ({"is_owner": True},), {}, "en"), - ( - 3, - ({"id": "user1", "is_owner": True},), - {"user1": {"language": {"language": "sv"}}}, - "en", - ), - ( - 3, - ({"id": "user1", "is_owner": False},), - {"user1": {"language": {"language": "sv"}}}, - "en", - ), - ], -) -async def test_language_default( - hass: HomeAssistant, - hass_storage: dict[str, Any], - minor_version, - users, - user_data, - default_language, -) -> None: - """Test language config default to owner user's language during migration. - - This should only happen if the core store version < 1.3 - """ - core_data = { - "data": {}, - "key": "core.config", - "version": 1, - "minor_version": minor_version, - } - hass_storage["core.config"] = dict(core_data) - - for user_config in users: - user = MockUser(**user_config).add_to_hass(hass) - if user.id not in user_data: - continue - storage_key = f"frontend.user_data_{user.id}" - hass_storage[storage_key] = { - "key": storage_key, - "version": 1, - "data": user_data[user.id], - } - - await config_util.async_process_ha_core_config( - hass, - {}, - ) - assert hass.config.language == default_language - - -async def test_loading_configuration_default_media_dirs_docker( - hass: HomeAssistant, -) -> None: - """Test loading core config onto hass object.""" - with patch("homeassistant.config.is_docker_env", return_value=True): - await config_util.async_process_ha_core_config( - hass, - { - "name": "Huis", - }, - ) - - assert hass.config.location_name == "Huis" - assert len(hass.config.allowlist_external_dirs) == 2 - assert "/media" in hass.config.allowlist_external_dirs - assert hass.config.media_dirs == {"local": "/media"} - - -async def test_loading_configuration_from_packages(hass: HomeAssistant) -> None: - """Test loading packages config onto hass object config.""" - await config_util.async_process_ha_core_config( - hass, - { - "latitude": 39, - "longitude": -1, - "elevation": 500, - "name": "Huis", - "unit_system": "metric", - "time_zone": "Europe/Madrid", - "external_url": "https://www.example.com", - "internal_url": "http://example.local", - "packages": { - "package_1": {"wake_on_lan": None}, - "package_2": { - "light": {"platform": "hue"}, - "media_extractor": None, - "sun": None, - }, - }, - }, - ) - - # Empty packages not allowed - with pytest.raises(MultipleInvalid): - await config_util.async_process_ha_core_config( - hass, - { - "latitude": 39, - "longitude": -1, - "elevation": 500, - "name": "Huis", - "unit_system": "metric", - "time_zone": "Europe/Madrid", - "packages": {"empty_package": None}, - }, - ) - - -@pytest.mark.parametrize( - ("unit_system_name", "expected_unit_system"), - [ - ("metric", METRIC_SYSTEM), - ("imperial", US_CUSTOMARY_SYSTEM), - ("us_customary", US_CUSTOMARY_SYSTEM), - ], -) -async def test_loading_configuration_unit_system( - hass: HomeAssistant, unit_system_name: str, expected_unit_system: UnitSystem -) -> None: - """Test backward compatibility when loading core config.""" - await config_util.async_process_ha_core_config( - hass, - { - "latitude": 60, - "longitude": 50, - "elevation": 25, - "name": "Huis", - "unit_system": unit_system_name, - "time_zone": "America/New_York", - "external_url": "https://www.example.com", - "internal_url": "http://example.local", - }, - ) - - assert hass.config.units is expected_unit_system - - @patch("homeassistant.helpers.check_config.async_check_ha_config_file") async def test_check_ha_config_file_correct(mock_check, hass: HomeAssistant) -> None: """Check that restart propagates to stop.""" @@ -1302,148 +819,6 @@ async def test_merge_duplicate_keys( assert len(config["input_select"]) == 1 -async def test_merge_customize(hass: HomeAssistant) -> None: - """Test loading core config onto hass object.""" - core_config = { - "latitude": 60, - "longitude": 50, - "elevation": 25, - "name": "Huis", - "unit_system": "imperial", - "time_zone": "GMT", - "customize": {"a.a": {"friendly_name": "A"}}, - "packages": { - "pkg1": {"homeassistant": {"customize": {"b.b": {"friendly_name": "BB"}}}} - }, - } - await config_util.async_process_ha_core_config(hass, core_config) - - assert hass.data[config_util.DATA_CUSTOMIZE].get("b.b") == {"friendly_name": "BB"} - - -async def test_auth_provider_config(hass: HomeAssistant) -> None: - """Test loading auth provider config onto hass object.""" - core_config = { - "latitude": 60, - "longitude": 50, - "elevation": 25, - "name": "Huis", - "unit_system": "imperial", - "time_zone": "GMT", - CONF_AUTH_PROVIDERS: [ - {"type": "homeassistant"}, - ], - CONF_AUTH_MFA_MODULES: [{"type": "totp"}, {"type": "totp", "id": "second"}], - } - if hasattr(hass, "auth"): - del hass.auth - await config_util.async_process_ha_core_config(hass, core_config) - - assert len(hass.auth.auth_providers) == 1 - assert hass.auth.auth_providers[0].type == "homeassistant" - assert len(hass.auth.auth_mfa_modules) == 2 - assert hass.auth.auth_mfa_modules[0].id == "totp" - assert hass.auth.auth_mfa_modules[1].id == "second" - - -async def test_auth_provider_config_default(hass: HomeAssistant) -> None: - """Test loading default auth provider config.""" - core_config = { - "latitude": 60, - "longitude": 50, - "elevation": 25, - "name": "Huis", - "unit_system": "imperial", - "time_zone": "GMT", - } - if hasattr(hass, "auth"): - del hass.auth - await config_util.async_process_ha_core_config(hass, core_config) - - assert len(hass.auth.auth_providers) == 1 - assert hass.auth.auth_providers[0].type == "homeassistant" - assert len(hass.auth.auth_mfa_modules) == 1 - assert hass.auth.auth_mfa_modules[0].id == "totp" - - -async def test_disallowed_auth_provider_config(hass: HomeAssistant) -> None: - """Test loading insecure example auth provider is disallowed.""" - core_config = { - "latitude": 60, - "longitude": 50, - "elevation": 25, - "name": "Huis", - "unit_system": "imperial", - "time_zone": "GMT", - CONF_AUTH_PROVIDERS: [ - { - "type": "insecure_example", - "users": [ - { - "username": "test-user", - "password": "test-pass", - "name": "Test Name", - } - ], - } - ], - } - with pytest.raises(Invalid): - await config_util.async_process_ha_core_config(hass, core_config) - - -async def test_disallowed_duplicated_auth_provider_config(hass: HomeAssistant) -> None: - """Test loading insecure example auth provider is disallowed.""" - core_config = { - "latitude": 60, - "longitude": 50, - "elevation": 25, - "name": "Huis", - "unit_system": "imperial", - "time_zone": "GMT", - CONF_AUTH_PROVIDERS: [{"type": "homeassistant"}, {"type": "homeassistant"}], - } - with pytest.raises(Invalid): - await config_util.async_process_ha_core_config(hass, core_config) - - -async def test_disallowed_auth_mfa_module_config(hass: HomeAssistant) -> None: - """Test loading insecure example auth mfa module is disallowed.""" - core_config = { - "latitude": 60, - "longitude": 50, - "elevation": 25, - "name": "Huis", - "unit_system": "imperial", - "time_zone": "GMT", - CONF_AUTH_MFA_MODULES: [ - { - "type": "insecure_example", - "data": [{"user_id": "mock-user", "pin": "test-pin"}], - } - ], - } - with pytest.raises(Invalid): - await config_util.async_process_ha_core_config(hass, core_config) - - -async def test_disallowed_duplicated_auth_mfa_module_config( - hass: HomeAssistant, -) -> None: - """Test loading insecure example auth mfa module is disallowed.""" - core_config = { - "latitude": 60, - "longitude": 50, - "elevation": 25, - "name": "Huis", - "unit_system": "imperial", - "time_zone": "GMT", - CONF_AUTH_MFA_MODULES: [{"type": "totp"}, {"type": "totp"}], - } - with pytest.raises(Invalid): - await config_util.async_process_ha_core_config(hass, core_config) - - async def test_merge_split_component_definition(hass: HomeAssistant) -> None: """Test components with trailing description in packages are merged.""" packages = { @@ -1995,74 +1370,6 @@ def test_identify_config_schema(domain, schema, expected) -> None: ) -async def test_core_config_schema_historic_currency( - hass: HomeAssistant, issue_registry: ir.IssueRegistry -) -> None: - """Test core config schema.""" - await config_util.async_process_ha_core_config(hass, {"currency": "LTT"}) - - issue = issue_registry.async_get_issue("homeassistant", "historic_currency") - assert issue - assert issue.translation_placeholders == {"currency": "LTT"} - - -async def test_core_store_historic_currency( - hass: HomeAssistant, hass_storage: dict[str, Any], issue_registry: ir.IssueRegistry -) -> None: - """Test core config store.""" - core_data = { - "data": { - "currency": "LTT", - }, - "key": "core.config", - "version": 1, - "minor_version": 1, - } - hass_storage["core.config"] = dict(core_data) - await config_util.async_process_ha_core_config(hass, {}) - - issue_id = "historic_currency" - issue = issue_registry.async_get_issue("homeassistant", issue_id) - assert issue - assert issue.translation_placeholders == {"currency": "LTT"} - - await hass.config.async_update(currency="EUR") - issue = issue_registry.async_get_issue("homeassistant", issue_id) - assert not issue - - -async def test_core_config_schema_no_country( - hass: HomeAssistant, issue_registry: ir.IssueRegistry -) -> None: - """Test core config schema.""" - await config_util.async_process_ha_core_config(hass, {}) - - issue = issue_registry.async_get_issue("homeassistant", "country_not_configured") - assert issue - - -async def test_core_store_no_country( - hass: HomeAssistant, hass_storage: dict[str, Any], issue_registry: ir.IssueRegistry -) -> None: - """Test core config store.""" - core_data = { - "data": {}, - "key": "core.config", - "version": 1, - "minor_version": 1, - } - hass_storage["core.config"] = dict(core_data) - await config_util.async_process_ha_core_config(hass, {}) - - issue_id = "country_not_configured" - issue = issue_registry.async_get_issue("homeassistant", issue_id) - assert issue - - await hass.config.async_update(country="SE") - issue = issue_registry.async_get_issue("homeassistant", issue_id) - assert not issue - - async def test_safe_mode(hass: HomeAssistant) -> None: """Test safe mode.""" assert config_util.safe_mode_enabled(hass.config.config_dir) is False @@ -2482,30 +1789,3 @@ def _load_platform(self, platform: str) -> MockModule: ("platform_int", "sensor"), ("platform_int2", "sensor"), ] - - -async def test_configuration_legacy_template_is_removed(hass: HomeAssistant) -> None: - """Test loading core config onto hass object.""" - await config_util.async_process_ha_core_config( - hass, - { - "latitude": 60, - "longitude": 50, - "elevation": 25, - "name": "Huis", - "unit_system": "imperial", - "time_zone": "America/New_York", - "allowlist_external_dirs": "/etc", - "external_url": "https://www.example.com", - "internal_url": "http://example.local", - "media_dirs": {"mymedia": "/usr"}, - "legacy_templates": True, - "debug": True, - "currency": "EUR", - "country": "SE", - "language": "sv", - "radius": 150, - }, - ) - - assert not getattr(hass.config, "legacy_templates") diff --git a/tests/test_config_entries.py b/tests/test_config_entries.py index db78fb2903ed51..41af8af3f21d2c 100644 --- a/tests/test_config_entries.py +++ b/tests/test_config_entries.py @@ -6,6 +6,7 @@ from collections.abc import Generator from datetime import timedelta import logging +import re from typing import Any, Self from unittest.mock import ANY, AsyncMock, Mock, patch @@ -16,9 +17,9 @@ from homeassistant import config_entries, data_entry_flow, loader from homeassistant.components import dhcp -from homeassistant.components.hassio import HassioServiceInfo from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( + CONF_NAME, EVENT_COMPONENT_LOADED, EVENT_HOMEASSISTANT_STARTED, EVENT_HOMEASSISTANT_STOP, @@ -36,10 +37,11 @@ ConfigEntryNotReady, HomeAssistantError, ) -from homeassistant.helpers import entity_registry as er, issue_registry as ir +from homeassistant.helpers import entity_registry as er, frame, issue_registry as ir from homeassistant.helpers.discovery_flow import DiscoveryKey from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.json import json_dumps +from homeassistant.helpers.service_info.hassio import HassioServiceInfo from homeassistant.helpers.typing import ConfigType from homeassistant.helpers.update_coordinator import DataUpdateCoordinator from homeassistant.setup import async_set_domains_to_be_loaded, async_setup_component @@ -85,8 +87,27 @@ async def async_step_reconfigure(self, data): """Mock Reauth.""" return await self.async_step_reauth_confirm() + class MockFlowHandler2(config_entries.ConfigFlow): + """Define a second mock flow handler.""" + + VERSION = 1 + + async def async_step_reauth(self, data): + """Mock Reauth.""" + return await self.async_step_reauth_confirm() + + async def async_step_reauth_confirm(self, user_input=None): + """Test reauth confirm step.""" + if user_input is None: + return self.async_show_form( + step_id="reauth_confirm", + description_placeholders={CONF_NAME: "Custom title"}, + ) + return self.async_abort(reason="test") + with patch.dict( - config_entries.HANDLERS, {"comp": MockFlowHandler, "test": MockFlowHandler} + config_entries.HANDLERS, + {"comp": MockFlowHandler, "test": MockFlowHandler, "test2": MockFlowHandler2}, ): yield @@ -989,7 +1010,6 @@ async def test_as_dict(snapshot: SnapshotAssertion) -> None: "_tries", "_setup_again_job", "_supports_options", - "_reconfigure_lock", "supports_reconfigure", } @@ -1158,6 +1178,9 @@ async def test_reauth_notification(hass: HomeAssistant) -> None: mock_integration(hass, MockModule("test")) mock_platform(hass, "test.config_flow", None) + entry = MockConfigEntry(title="test_title", domain="test") + entry.add_to_hass(hass) + class TestFlow(config_entries.ConfigFlow): """Test flow.""" @@ -1191,7 +1214,11 @@ async def async_step_reauth_confirm(self, user_input): # Start first reauth flow to assert that reconfigure notification fires flow1 = await hass.config_entries.flow.async_init( - "test", context={"source": config_entries.SOURCE_REAUTH} + "test", + context={ + "source": config_entries.SOURCE_REAUTH, + "entry_id": entry.entry_id, + }, ) await hass.async_block_till_done() @@ -1201,7 +1228,11 @@ async def async_step_reauth_confirm(self, user_input): # Start a second reauth flow so we can finish the first and assert that # the reconfigure notification persists until the second one is complete flow2 = await hass.config_entries.flow.async_init( - "test", context={"source": config_entries.SOURCE_REAUTH} + "test", + context={ + "source": config_entries.SOURCE_REAUTH, + "entry_id": entry.entry_id, + }, ) flow1 = await hass.config_entries.flow.async_configure(flow1["flow_id"], {}) @@ -4748,6 +4779,75 @@ async def test_reauth( assert len(hass.config_entries.flow.async_progress()) == 1 +@pytest.mark.parametrize( + "source", [config_entries.SOURCE_REAUTH, config_entries.SOURCE_RECONFIGURE] +) +async def test_reauth_reconfigure_missing_entry( + hass: HomeAssistant, + manager: config_entries.ConfigEntries, + source: str, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test the async_reauth_helper.""" + entry = MockConfigEntry(title="test_title", domain="test") + entry.add_to_hass(hass) + + mock_setup_entry = AsyncMock(return_value=True) + mock_integration(hass, MockModule("test", async_setup_entry=mock_setup_entry)) + mock_platform(hass, "test.config_flow", None) + + await manager.async_setup(entry.entry_id) + await hass.async_block_till_done() + + with pytest.raises( + RuntimeError, + match=f"Detected code that initialises a {source} flow without a link " + "to the config entry. Please report this issue.", + ): + await manager.flow.async_init("test", context={"source": source}) + await hass.async_block_till_done() + + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 0 + + +@pytest.mark.usefixtures("mock_integration_frame") +@patch.object(frame, "_REPORTED_INTEGRATIONS", set()) +@pytest.mark.parametrize( + "source", [config_entries.SOURCE_REAUTH, config_entries.SOURCE_RECONFIGURE] +) +async def test_reauth_reconfigure_missing_entry_component( + hass: HomeAssistant, + manager: config_entries.ConfigEntries, + source: str, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test the async_reauth_helper.""" + entry = MockConfigEntry(title="test_title", domain="test") + entry.add_to_hass(hass) + + mock_setup_entry = AsyncMock(return_value=True) + mock_integration(hass, MockModule("test", async_setup_entry=mock_setup_entry)) + mock_platform(hass, "test.config_flow", None) + + await manager.async_setup(entry.entry_id) + await hass.async_block_till_done() + + with patch.object(frame, "_REPORTED_INTEGRATIONS", set()): + await manager.flow.async_init("test", context={"source": source}) + await hass.async_block_till_done() + + # Flow still created, but deprecation logged + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 1 + assert flows[0]["context"]["source"] == source + + assert ( + f"Detected that integration 'hue' initialises a {source} flow" + " without a link to the config entry at homeassistant/components" in caplog.text + ) + + async def test_reconfigure( hass: HomeAssistant, manager: config_entries.ConfigEntries ) -> None: @@ -4764,67 +4864,68 @@ async def test_reconfigure( await manager.async_setup(entry.entry_id) await hass.async_block_till_done() - flow = hass.config_entries.flow - with patch.object(flow, "async_init", wraps=flow.async_init) as mock_init: - entry.async_start_reconfigure( - hass, - context={"extra_context": "some_extra_context"}, - data={"extra_data": 1234}, + def _async_start_reconfigure(config_entry: MockConfigEntry) -> None: + hass.async_create_task( + manager.flow.async_init( + config_entry.domain, + context={ + "source": config_entries.SOURCE_RECONFIGURE, + "entry_id": config_entry.entry_id, + }, + ), + f"config entry reconfigure {config_entry.title} " + f"{config_entry.domain} {config_entry.entry_id}", ) - await hass.async_block_till_done() + + _async_start_reconfigure(entry) + await hass.async_block_till_done() flows = hass.config_entries.flow.async_progress() assert len(flows) == 1 assert flows[0]["context"]["entry_id"] == entry.entry_id assert flows[0]["context"]["source"] == config_entries.SOURCE_RECONFIGURE - assert flows[0]["context"]["title_placeholders"] == {"name": "test_title"} - assert flows[0]["context"]["extra_context"] == "some_extra_context" - - assert mock_init.call_args.kwargs["data"]["extra_data"] == 1234 assert entry.entry_id != entry2.entry_id - # Check that we can't start duplicate reconfigure flows - entry.async_start_reconfigure(hass, {"extra_context": "some_extra_context"}) + # Check that we can start duplicate reconfigure flows + # (may need revisiting) + _async_start_reconfigure(entry) await hass.async_block_till_done() - assert len(hass.config_entries.flow.async_progress()) == 1 - - # Check that we can't start duplicate reconfigure flows when the context is different - entry.async_start_reconfigure(hass, {"diff": "diff"}) - await hass.async_block_till_done() - assert len(hass.config_entries.flow.async_progress()) == 1 + assert len(hass.config_entries.flow.async_progress()) == 2 # Check that we can start a reconfigure flow for a different entry - entry2.async_start_reconfigure(hass, {"extra_context": "some_extra_context"}) + _async_start_reconfigure(entry2) await hass.async_block_till_done() - assert len(hass.config_entries.flow.async_progress()) == 2 + assert len(hass.config_entries.flow.async_progress()) == 3 # Abort all existing flows for flow in hass.config_entries.flow.async_progress(): hass.config_entries.flow.async_abort(flow["flow_id"]) await hass.async_block_till_done() - # Check that we can't start duplicate reconfigure flows + # Check that we can start duplicate reconfigure flows # without blocking between flows - entry.async_start_reconfigure(hass, {"extra_context": "some_extra_context"}) - entry.async_start_reconfigure(hass, {"extra_context": "some_extra_context"}) - entry.async_start_reconfigure(hass, {"extra_context": "some_extra_context"}) - entry.async_start_reconfigure(hass, {"extra_context": "some_extra_context"}) + # (may need revisiting) + _async_start_reconfigure(entry) + _async_start_reconfigure(entry) + _async_start_reconfigure(entry) + _async_start_reconfigure(entry) await hass.async_block_till_done() - assert len(hass.config_entries.flow.async_progress()) == 1 + assert len(hass.config_entries.flow.async_progress()) == 4 # Abort all existing flows for flow in hass.config_entries.flow.async_progress(): hass.config_entries.flow.async_abort(flow["flow_id"]) await hass.async_block_till_done() - # Check that we can't start reconfigure flows with active reauth flow + # Check that we can start reconfigure flows with active reauth flow + # (may need revisiting) entry.async_start_reauth(hass, {"extra_context": "some_extra_context"}) await hass.async_block_till_done() assert len(hass.config_entries.flow.async_progress()) == 1 - entry.async_start_reconfigure(hass, {"extra_context": "some_extra_context"}) + _async_start_reconfigure(entry) await hass.async_block_till_done() - assert len(hass.config_entries.flow.async_progress()) == 1 + assert len(hass.config_entries.flow.async_progress()) == 2 # Abort all existing flows for flow in hass.config_entries.flow.async_progress(): @@ -4832,7 +4933,7 @@ async def test_reconfigure( await hass.async_block_till_done() # Check that we can't start reauth flows with active reconfigure flow - entry.async_start_reconfigure(hass, {"extra_context": "some_extra_context"}) + _async_start_reconfigure(entry) await hass.async_block_till_done() assert len(hass.config_entries.flow.async_progress()) == 1 entry.async_start_reauth(hass, {"extra_context": "some_extra_context"}) @@ -4939,20 +5040,46 @@ async def mock_setup(hass: HomeAssistant, _) -> bool: assert "test" in hass.config.components -async def test_options_flow_options_not_mutated() -> None: +@pytest.mark.parametrize( + "integration_frame_path", + ["homeassistant/components/my_integration", "homeassistant.core"], +) +@pytest.mark.usefixtures("mock_integration_frame") +async def test_options_flow_with_config_entry_core() -> None: + """Test that OptionsFlowWithConfigEntry cannot be used in core.""" + entry = MockConfigEntry( + domain="hue", + data={"first": True}, + options={"sub_dict": {"1": "one"}, "sub_list": ["one"]}, + ) + + with pytest.raises(RuntimeError, match="inherits from OptionsFlowWithConfigEntry"): + _ = config_entries.OptionsFlowWithConfigEntry(entry) + + +@pytest.mark.parametrize("integration_frame_path", ["custom_components/my_integration"]) +@pytest.mark.usefixtures("mock_integration_frame") +@patch.object(frame, "_REPORTED_INTEGRATIONS", set()) +async def test_options_flow_with_config_entry(caplog: pytest.LogCaptureFixture) -> None: """Test that OptionsFlowWithConfigEntry doesn't mutate entry options.""" entry = MockConfigEntry( - domain="test", + domain="hue", data={"first": True}, options={"sub_dict": {"1": "one"}, "sub_list": ["one"]}, ) options_flow = config_entries.OptionsFlowWithConfigEntry(entry) + assert caplog.text == "" # No deprecation warning for custom components + + # Ensure available at startup + assert options_flow.config_entry is entry + assert options_flow.options == entry.options - options_flow._options["sub_dict"]["2"] = "two" - options_flow._options["sub_list"].append("two") + options_flow.options["sub_dict"]["2"] = "two" + options_flow.options["sub_list"].append("two") - assert options_flow._options == { + # Ensure it does not mutate the entry options + assert options_flow.options == { "sub_dict": {"1": "one", "2": "two"}, "sub_list": ["one", "two"], } @@ -4980,7 +5107,9 @@ async def async_step_reauth(self, data): config_entries.HANDLERS, {"comp": MockFlowHandler, "test": MockFlowHandler} ): task = asyncio.create_task( - manager.flow.async_init("test", context={"source": "reauth"}) + manager.flow.async_init( + "test", context={"source": "reauth", "entry_id": "abc"} + ) ) await hass.async_block_till_done() manager.flow.async_shutdown() @@ -5122,6 +5251,7 @@ def test_raise_trying_to_add_same_config_entry_twice( "expected_data", "expected_options", "calls_entry_load_unload", + "raises", ), [ ( @@ -5136,6 +5266,7 @@ def test_raise_trying_to_add_same_config_entry_twice( {"vendor": "data2"}, {"vendor": "options2"}, (2, 1), + None, ), ( { @@ -5149,6 +5280,7 @@ def test_raise_trying_to_add_same_config_entry_twice( {"vendor": "data"}, {"vendor": "options"}, (2, 1), + None, ), ( { @@ -5163,6 +5295,7 @@ def test_raise_trying_to_add_same_config_entry_twice( {"vendor": "data2"}, {"vendor": "options2"}, (2, 1), + None, ), ( { @@ -5177,6 +5310,7 @@ def test_raise_trying_to_add_same_config_entry_twice( {"vendor": "data"}, {"vendor": "options"}, (1, 0), + None, ), ( {}, @@ -5185,6 +5319,7 @@ def test_raise_trying_to_add_same_config_entry_twice( {"vendor": "data"}, {"vendor": "options"}, (2, 1), + None, ), ( {"data": {"buyer": "me"}, "options": {}}, @@ -5193,6 +5328,31 @@ def test_raise_trying_to_add_same_config_entry_twice( {"buyer": "me"}, {}, (2, 1), + None, + ), + ( + {"data_updates": {"buyer": "me"}}, + "Test", + "1234", + {"vendor": "data", "buyer": "me"}, + {"vendor": "options"}, + (2, 1), + None, + ), + ( + { + "unique_id": "5678", + "title": "Updated title", + "data": {"vendor": "data2"}, + "options": {"vendor": "options2"}, + "data_updates": {"buyer": "me"}, + }, + "Test", + "1234", + {"vendor": "data"}, + {"vendor": "options"}, + (1, 0), + ValueError, ), ], ids=[ @@ -5202,6 +5362,8 @@ def test_raise_trying_to_add_same_config_entry_twice( "unchanged_entry_no_reload", "no_kwargs", "replace_data", + "update_data", + "update_and_data_raises", ], ) @pytest.mark.parametrize( @@ -5221,6 +5383,7 @@ async def test_update_entry_and_reload( expected_options: dict[str, Any], kwargs: dict[str, Any], calls_entry_load_unload: tuple[int, int], + raises: type[Exception] | None, ) -> None: """Test updating an entry and reloading.""" entry = MockConfigEntry( @@ -5255,11 +5418,15 @@ async def async_step_reconfigure(self, data): """Mock Reconfigure.""" return self.async_update_reload_and_abort(entry, **kwargs) + err: Exception with mock_config_flow("comp", MockFlowHandler): - if source == config_entries.SOURCE_REAUTH: - result = await entry.start_reauth_flow(hass) - elif source == config_entries.SOURCE_RECONFIGURE: - result = await entry.start_reconfigure_flow(hass) + try: + if source == config_entries.SOURCE_REAUTH: + result = await entry.start_reauth_flow(hass) + elif source == config_entries.SOURCE_RECONFIGURE: + result = await entry.start_reconfigure_flow(hass) + except Exception as ex: # noqa: BLE001 + err = ex await hass.async_block_till_done() @@ -5268,18 +5435,21 @@ async def async_step_reconfigure(self, data): assert entry.data == expected_data assert entry.options == expected_options assert entry.state == config_entries.ConfigEntryState.LOADED - assert result["type"] == FlowResultType.ABORT - assert result["reason"] == reason + if raises: + assert isinstance(err, raises) + else: + assert result["type"] == FlowResultType.ABORT + assert result["reason"] == reason # Assert entry was reloaded assert len(comp.async_setup_entry.mock_calls) == calls_entry_load_unload[0] assert len(comp.async_unload_entry.mock_calls) == calls_entry_load_unload[1] @pytest.mark.parametrize("unique_id", [["blah", "bleh"], {"key": "value"}]) -async def test_unhashable_unique_id( +async def test_unhashable_unique_id_fails( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, unique_id: Any ) -> None: - """Test the ConfigEntryItems user dict handles unhashable unique_id.""" + """Test the ConfigEntryItems user dict fails unhashable unique_id.""" entries = config_entries.ConfigEntryItems(hass) entry = config_entries.ConfigEntry( data={}, @@ -5294,23 +5464,97 @@ async def test_unhashable_unique_id( version=1, ) + unique_id_string = re.escape(str(unique_id)) + with pytest.raises( + HomeAssistantError, + match=f"The entry unique id {unique_id_string} is not a string.", + ): + entries[entry.entry_id] = entry + + assert entry.entry_id not in entries + + with pytest.raises( + HomeAssistantError, + match=f"The entry unique id {unique_id_string} is not a string.", + ): + entries.get_entry_by_domain_and_unique_id("test", unique_id) + + +@pytest.mark.parametrize("unique_id", [["blah", "bleh"], {"key": "value"}]) +async def test_unhashable_unique_id_fails_on_update( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture, unique_id: Any +) -> None: + """Test the ConfigEntryItems user dict fails non-hashable unique_id on update.""" + entries = config_entries.ConfigEntryItems(hass) + entry = config_entries.ConfigEntry( + data={}, + discovery_keys={}, + domain="test", + entry_id="mock_id", + minor_version=1, + options={}, + source="test", + title="title", + unique_id="123", + version=1, + ) + + entries[entry.entry_id] = entry + assert entry.entry_id in entries + + unique_id_string = re.escape(str(unique_id)) + with pytest.raises( + HomeAssistantError, + match=f"The entry unique id {unique_id_string} is not a string.", + ): + entries.update_unique_id(entry, unique_id) + + +async def test_string_unique_id_no_warning( + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test the ConfigEntryItems user dict string unique id doesn't log warning.""" + entries = config_entries.ConfigEntryItems(hass) + entry = config_entries.ConfigEntry( + data={}, + discovery_keys={}, + domain="test", + entry_id="mock_id", + minor_version=1, + options={}, + source="test", + title="title", + unique_id="123", + version=1, + ) + entries[entry.entry_id] = entry + assert ( - "Config entry 'title' from integration test has an invalid unique_id " - f"'{unique_id!s}'" - ) in caplog.text + "Config entry 'title' from integration test has an invalid unique_id" + ) not in caplog.text assert entry.entry_id in entries assert entries[entry.entry_id] is entry - assert entries.get_entry_by_domain_and_unique_id("test", unique_id) == entry + assert entries.get_entry_by_domain_and_unique_id("test", "123") == entry del entries[entry.entry_id] assert not entries - assert entries.get_entry_by_domain_and_unique_id("test", unique_id) is None + assert entries.get_entry_by_domain_and_unique_id("test", "123") is None -@pytest.mark.parametrize("unique_id", [123]) -async def test_hashable_non_string_unique_id( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture, unique_id: Any +@pytest.mark.parametrize( + ("unique_id", "type_name"), + [ + (123, "int"), + (2.3, "float"), + ], +) +async def test_hashable_unique_id( + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, + unique_id: Any, + type_name: str, ) -> None: """Test the ConfigEntryItems user dict handles hashable non string unique_id.""" entries = config_entries.ConfigEntryItems(hass) @@ -5328,8 +5572,10 @@ async def test_hashable_non_string_unique_id( ) entries[entry.entry_id] = entry + assert ( "Config entry 'title' from integration test has an invalid unique_id" + f" '{unique_id}' of type {type_name} when a string is expected" ) in caplog.text assert entry.entry_id in entries @@ -5340,26 +5586,55 @@ async def test_hashable_non_string_unique_id( assert entries.get_entry_by_domain_and_unique_id("test", unique_id) is None +async def test_no_unique_id_no_warning( + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test the ConfigEntryItems user dict don't log warning with no unique id.""" + entries = config_entries.ConfigEntryItems(hass) + entry = config_entries.ConfigEntry( + data={}, + discovery_keys={}, + domain="test", + entry_id="mock_id", + minor_version=1, + options={}, + source="test", + title="title", + unique_id=None, + version=1, + ) + + entries[entry.entry_id] = entry + + assert ( + "Config entry 'title' from integration test has an invalid unique_id" + ) not in caplog.text + + assert entry.entry_id in entries + assert entries[entry.entry_id] is entry + + @pytest.mark.parametrize( - ("source", "user_input", "expected_result"), + ("context", "user_input", "expected_result"), [ ( - config_entries.SOURCE_IGNORE, + {"source": config_entries.SOURCE_IGNORE}, {"unique_id": "blah", "title": "blah"}, {"type": data_entry_flow.FlowResultType.CREATE_ENTRY}, ), ( - config_entries.SOURCE_REAUTH, + {"source": config_entries.SOURCE_REAUTH, "entry_id": "1234"}, None, {"type": data_entry_flow.FlowResultType.FORM, "step_id": "reauth_confirm"}, ), ( - config_entries.SOURCE_RECONFIGURE, + {"source": config_entries.SOURCE_RECONFIGURE, "entry_id": "1234"}, None, {"type": data_entry_flow.FlowResultType.FORM, "step_id": "reauth_confirm"}, ), ( - config_entries.SOURCE_USER, + {"source": config_entries.SOURCE_USER}, None, { "type": data_entry_flow.FlowResultType.ABORT, @@ -5372,7 +5647,7 @@ async def test_hashable_non_string_unique_id( async def test_starting_config_flow_on_single_config_entry( hass: HomeAssistant, manager: config_entries.ConfigEntries, - source: str, + context: dict[str, Any], user_input: dict, expected_result: dict, ) -> None: @@ -5395,6 +5670,7 @@ async def test_starting_config_flow_on_single_config_entry( entry = MockConfigEntry( domain="comp", unique_id="1234", + entry_id="1234", title="Test", data={"vendor": "data"}, options={"vendor": "options"}, @@ -5403,6 +5679,7 @@ async def test_starting_config_flow_on_single_config_entry( ignored_entry = MockConfigEntry( domain="comp", unique_id="2345", + entry_id="2345", title="Test", data={"vendor": "data"}, options={"vendor": "options"}, @@ -5417,7 +5694,7 @@ async def test_starting_config_flow_on_single_config_entry( return_value=integration, ): result = await hass.config_entries.flow.async_init( - "comp", context={"source": source}, data=user_input + "comp", context=context, data=user_input ) for key in expected_result: @@ -5425,34 +5702,42 @@ async def test_starting_config_flow_on_single_config_entry( @pytest.mark.parametrize( - ("source", "user_input", "expected_result"), + ("context", "user_input", "expected_result"), [ ( - config_entries.SOURCE_IGNORE, + {"source": config_entries.SOURCE_IGNORE}, {"unique_id": "blah", "title": "blah"}, {"type": data_entry_flow.FlowResultType.CREATE_ENTRY}, ), ( - config_entries.SOURCE_REAUTH, + {"source": config_entries.SOURCE_REAUTH, "entry_id": "2345"}, None, {"type": data_entry_flow.FlowResultType.FORM, "step_id": "reauth_confirm"}, ), ( - config_entries.SOURCE_RECONFIGURE, + {"source": config_entries.SOURCE_RECONFIGURE, "entry_id": "2345"}, None, {"type": data_entry_flow.FlowResultType.FORM, "step_id": "reauth_confirm"}, ), ( - config_entries.SOURCE_USER, + {"source": config_entries.SOURCE_USER}, None, {"type": data_entry_flow.FlowResultType.ABORT, "reason": "not_implemented"}, ), + ( + {"source": config_entries.SOURCE_ZEROCONF}, + None, + { + "type": data_entry_flow.FlowResultType.ABORT, + "reason": "single_instance_allowed", + }, + ), ], ) async def test_starting_config_flow_on_single_config_entry_2( hass: HomeAssistant, manager: config_entries.ConfigEntries, - source: str, + context: dict[str, Any], user_input: dict, expected_result: dict, ) -> None: @@ -5475,6 +5760,7 @@ async def test_starting_config_flow_on_single_config_entry_2( ignored_entry = MockConfigEntry( domain="comp", unique_id="2345", + entry_id="2345", title="Test", data={"vendor": "data"}, options={"vendor": "options"}, @@ -5489,7 +5775,7 @@ async def test_starting_config_flow_on_single_config_entry_2( return_value=integration, ): result = await hass.config_entries.flow.async_init( - "comp", context={"source": source}, data=user_input + "comp", context=context, data=user_input ) for key in expected_result: @@ -5560,8 +5846,20 @@ async def async_step_user(self, user_input=None): assert result["translation_domain"] == HOMEASSISTANT_DOMAIN +@pytest.mark.parametrize( + ("flow_1_unique_id", "flow_2_unique_id"), + [ + (None, None), + ("very_unique", "very_unique"), + (None, config_entries.DEFAULT_DISCOVERY_UNIQUE_ID), + ("very_unique", config_entries.DEFAULT_DISCOVERY_UNIQUE_ID), + ], +) async def test_in_progress_get_canceled_when_entry_is_created( - hass: HomeAssistant, manager: config_entries.ConfigEntries + hass: HomeAssistant, + manager: config_entries.ConfigEntries, + flow_1_unique_id: str | None, + flow_2_unique_id: str | None, ) -> None: """Test that we abort all in progress flows when a new entry is created on a single instance only integration.""" integration = loader.Integration( @@ -5589,9 +5887,18 @@ async def async_step_user(self, user_input=None): if user_input is not None: return self.async_create_entry(title="Test Title", data=user_input) + await self.async_set_unique_id(flow_1_unique_id, raise_on_progress=False) return self.async_show_form(step_id="user") - with ( + async def async_step_zeroconfg(self, user_input=None): + """Test user step.""" + if user_input is not None: + return self.async_create_entry(title="Test Title", data=user_input) + + await self.async_set_unique_id(flow_2_unique_id, raise_on_progress=False) + return self.async_show_form(step_id="user") + + with ( mock_config_flow("comp", TestFlow), patch( "homeassistant.loader.async_get_integration", @@ -6677,6 +6984,73 @@ async def test_reauth_helper_alignment( assert helper_flow_init_data == reauth_flow_init_data +@pytest.mark.parametrize( + ("original_unique_id", "new_unique_id", "reason"), + [ + ("unique", "unique", "success"), + (None, None, "success"), + ("unique", "new", "unique_id_mismatch"), + ("unique", None, "unique_id_mismatch"), + (None, "new", "unique_id_mismatch"), + ], +) +@pytest.mark.parametrize( + "source", + [config_entries.SOURCE_REAUTH, config_entries.SOURCE_RECONFIGURE], +) +async def test_abort_if_unique_id_mismatch( + hass: HomeAssistant, + source: str, + original_unique_id: str | None, + new_unique_id: str | None, + reason: str, +) -> None: + """Test to check if_unique_id_mismatch behavior.""" + entry = MockConfigEntry( + title="From config flow", + domain="test", + entry_id="01J915Q6T9F6G5V0QJX6HBC94T", + data={"host": "any", "port": 123}, + unique_id=original_unique_id, + ) + entry.add_to_hass(hass) + + mock_setup_entry = AsyncMock(return_value=True) + + mock_integration(hass, MockModule("test", async_setup_entry=mock_setup_entry)) + mock_platform(hass, "test.config_flow", None) + + class TestFlow(config_entries.ConfigFlow): + VERSION = 1 + + async def async_step_user(self, user_input=None): + """Test user step.""" + return await self._async_step_confirm() + + async def async_step_reauth(self, entry_data): + """Test reauth step.""" + return await self._async_step_confirm() + + async def async_step_reconfigure(self, user_input=None): + """Test reauth step.""" + return await self._async_step_confirm() + + async def _async_step_confirm(self): + """Confirm input.""" + await self.async_set_unique_id(new_unique_id) + self._abort_if_unique_id_mismatch() + return self.async_abort(reason="success") + + with mock_config_flow("test", TestFlow): + if source == config_entries.SOURCE_REAUTH: + result = await entry.start_reauth_flow(hass) + elif source == config_entries.SOURCE_RECONFIGURE: + result = await entry.start_reconfigure_flow(hass) + await hass.async_block_till_done() + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == reason + + def test_state_not_stored_in_storage() -> None: """Test that state is not stored in storage. @@ -6720,3 +7094,520 @@ async def test_state_cache_is_cleared_on_entry_disable(hass: HomeAssistant) -> N ) loaded = json_loads(json_dumps(entry.as_json_fragment)) assert loaded["disabled_by"] == "user" + + +@pytest.mark.parametrize( + ("original_unique_id", "new_unique_id", "count"), + [ + ("unique", "unique", 1), + ("unique", "new", 2), + ("unique", None, 2), + (None, "unique", 2), + ], +) +@pytest.mark.parametrize( + "source", + [config_entries.SOURCE_REAUTH, config_entries.SOURCE_RECONFIGURE], +) +async def test_create_entry_reauth_reconfigure( + hass: HomeAssistant, + source: str, + original_unique_id: str | None, + new_unique_id: str | None, + count: int, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test to highlight unexpected behavior on create_entry.""" + entry = MockConfigEntry( + title="From config flow", + domain="test", + entry_id="01J915Q6T9F6G5V0QJX6HBC94T", + data={"host": "any", "port": 123}, + unique_id=original_unique_id, + ) + entry.add_to_hass(hass) + + mock_setup_entry = AsyncMock(return_value=True) + + mock_integration(hass, MockModule("test", async_setup_entry=mock_setup_entry)) + mock_platform(hass, "test.config_flow", None) + + class TestFlow(config_entries.ConfigFlow): + VERSION = 1 + + async def async_step_user(self, user_input=None): + """Test user step.""" + return await self._async_step_confirm() + + async def async_step_reauth(self, entry_data): + """Test reauth step.""" + return await self._async_step_confirm() + + async def async_step_reconfigure(self, user_input=None): + """Test reauth step.""" + return await self._async_step_confirm() + + async def _async_step_confirm(self): + """Confirm input.""" + await self.async_set_unique_id(new_unique_id) + return self.async_create_entry( + title="From config flow", + data={"token": "supersecret"}, + ) + + assert len(hass.config_entries.async_entries("test")) == 1 + + with mock_config_flow("test", TestFlow): + result = await getattr(entry, f"start_{source}_flow")(hass) + await hass.async_block_till_done() + assert result["type"] is FlowResultType.CREATE_ENTRY + + entries = hass.config_entries.async_entries("test") + assert len(entries) == count + if count == 1: + # Show that the previous entry got binned and recreated + assert entries[0].entry_id != entry.entry_id + + assert ( + f"Detected {source} config flow creating a new entry, when it is expected " + "to update an existing entry and abort. This will stop working in " + "2025.11, please create a bug report at https://github.com/home" + "-assistant/core/issues?q=is%3Aopen+is%3Aissue+" + "label%3A%22integration%3A+test%22" + ) in caplog.text + + +async def test_async_update_entry_unique_id_collision( + hass: HomeAssistant, + manager: config_entries.ConfigEntries, + caplog: pytest.LogCaptureFixture, + issue_registry: ir.IssueRegistry, +) -> None: + """Test we warn when async_update_entry creates a unique_id collision. + + Also test an issue registry issue is created. + """ + assert len(issue_registry.issues) == 0 + + entry1 = MockConfigEntry(domain="test", unique_id=None) + entry2 = MockConfigEntry(domain="test", unique_id="not none") + entry3 = MockConfigEntry(domain="test", unique_id="very unique") + entry4 = MockConfigEntry(domain="test", unique_id="also very unique") + entry1.add_to_manager(manager) + entry2.add_to_manager(manager) + entry3.add_to_manager(manager) + entry4.add_to_manager(manager) + + manager.async_update_entry(entry2, unique_id=None) + assert len(issue_registry.issues) == 0 + assert len(caplog.record_tuples) == 0 + + manager.async_update_entry(entry4, unique_id="very unique") + assert len(issue_registry.issues) == 1 + assert len(caplog.record_tuples) == 1 + + assert ( + "Unique id of config entry 'Mock Title' from integration test changed to " + "'very unique' which is already in use" + ) in caplog.text + + issue_id = "config_entry_unique_id_collision_test_very unique" + assert issue_registry.async_get_issue(HOMEASSISTANT_DOMAIN, issue_id) + + +@pytest.mark.parametrize("domain", ["flipr"]) +async def test_async_update_entry_unique_id_collision_allowed_domain( + hass: HomeAssistant, + manager: config_entries.ConfigEntries, + caplog: pytest.LogCaptureFixture, + issue_registry: ir.IssueRegistry, + domain: str, +) -> None: + """Test we warn when async_update_entry creates a unique_id collision. + + This tests we don't warn and don't create issues for domains which have + their own migration path. + """ + assert len(issue_registry.issues) == 0 + + entry1 = MockConfigEntry(domain=domain, unique_id=None) + entry2 = MockConfigEntry(domain=domain, unique_id="not none") + entry3 = MockConfigEntry(domain=domain, unique_id="very unique") + entry4 = MockConfigEntry(domain=domain, unique_id="also very unique") + entry1.add_to_manager(manager) + entry2.add_to_manager(manager) + entry3.add_to_manager(manager) + entry4.add_to_manager(manager) + + manager.async_update_entry(entry2, unique_id=None) + assert len(issue_registry.issues) == 0 + assert len(caplog.record_tuples) == 0 + + manager.async_update_entry(entry4, unique_id="very unique") + assert len(issue_registry.issues) == 0 + assert len(caplog.record_tuples) == 0 + + assert ("already in use") not in caplog.text + + +async def test_unique_id_collision_issues( + hass: HomeAssistant, + manager: config_entries.ConfigEntries, + caplog: pytest.LogCaptureFixture, + issue_registry: ir.IssueRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test issue registry issues are created and remove on unique id collision.""" + assert len(issue_registry.issues) == 0 + + mock_setup_entry = AsyncMock(return_value=True) + for i in range(3): + mock_integration( + hass, MockModule(f"test{i+1}", async_setup_entry=mock_setup_entry) + ) + mock_platform(hass, f"test{i+1}.config_flow", None) + + test2_group_1: list[MockConfigEntry] = [] + test2_group_2: list[MockConfigEntry] = [] + test3: list[MockConfigEntry] = [] + for _ in range(3): + await manager.async_add(MockConfigEntry(domain="test1", unique_id=None)) + test2_group_1.append(MockConfigEntry(domain="test2", unique_id="group_1")) + test2_group_2.append(MockConfigEntry(domain="test2", unique_id="group_2")) + await manager.async_add(test2_group_1[-1]) + await manager.async_add(test2_group_2[-1]) + for _ in range(6): + test3.append(MockConfigEntry(domain="test3", unique_id="not_unique")) + await manager.async_add(test3[-1]) + # Add an ignored config entry + await manager.async_add( + MockConfigEntry( + domain="test2", unique_id="group_1", source=config_entries.SOURCE_IGNORE + ) + ) + + # Check we get one issue for domain test2 and one issue for domain test3 + assert len(issue_registry.issues) == 2 + issue_id = "config_entry_unique_id_collision_test2_group_1" + assert issue_registry.async_get_issue(HOMEASSISTANT_DOMAIN, issue_id) == snapshot + issue_id = "config_entry_unique_id_collision_test3_not_unique" + assert issue_registry.async_get_issue(HOMEASSISTANT_DOMAIN, issue_id) == snapshot + + # Remove one config entry for domain test3, the translations should be updated + await manager.async_remove(test3[0].entry_id) + assert set(issue_registry.issues) == { + (HOMEASSISTANT_DOMAIN, "config_entry_unique_id_collision_test2_group_1"), + (HOMEASSISTANT_DOMAIN, "config_entry_unique_id_collision_test3_not_unique"), + } + assert issue_registry.async_get_issue(HOMEASSISTANT_DOMAIN, issue_id) == snapshot + + # Remove all but two config entries for domain test 3 + for i in range(3): + await manager.async_remove(test3[1 + i].entry_id) + assert set(issue_registry.issues) == { + (HOMEASSISTANT_DOMAIN, "config_entry_unique_id_collision_test2_group_1"), + (HOMEASSISTANT_DOMAIN, "config_entry_unique_id_collision_test3_not_unique"), + } + + # Remove the last test3 duplicate, the issue is cleared + await manager.async_remove(test3[-1].entry_id) + assert set(issue_registry.issues) == { + (HOMEASSISTANT_DOMAIN, "config_entry_unique_id_collision_test2_group_1"), + } + + await manager.async_remove(test2_group_1[0].entry_id) + assert set(issue_registry.issues) == { + (HOMEASSISTANT_DOMAIN, "config_entry_unique_id_collision_test2_group_1"), + } + + # Remove the last test2 group1 duplicate, a new issue is created + await manager.async_remove(test2_group_1[1].entry_id) + assert set(issue_registry.issues) == { + (HOMEASSISTANT_DOMAIN, "config_entry_unique_id_collision_test2_group_2"), + } + + await manager.async_remove(test2_group_2[0].entry_id) + assert set(issue_registry.issues) == { + (HOMEASSISTANT_DOMAIN, "config_entry_unique_id_collision_test2_group_2"), + } + + # Remove the last test2 group2 duplicate, the issue is cleared + await manager.async_remove(test2_group_2[1].entry_id) + assert not issue_registry.issues + + +async def test_context_no_leak(hass: HomeAssistant) -> None: + """Test ensure that config entry context does not leak. + + Unlikely to happen in real world, but occurs often in tests. + """ + + connected_future = asyncio.Future() + bg_tasks = [] + + async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + """Mock setup entry.""" + + async def _async_set_runtime_data(): + # Show that config_entries.current_entry is preserved for child tasks + await connected_future + entry.runtime_data = config_entries.current_entry.get() + + bg_tasks.append(hass.loop.create_task(_async_set_runtime_data())) + + return True + + async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + """Mock unload entry.""" + return True + + mock_integration( + hass, + MockModule( + "comp", + async_setup_entry=async_setup_entry, + async_unload_entry=async_unload_entry, + ), + ) + mock_platform(hass, "comp.config_flow", None) + + entry1 = MockConfigEntry(domain="comp") + entry1.add_to_hass(hass) + + await hass.config_entries.async_setup(entry1.entry_id) + assert entry1.state is config_entries.ConfigEntryState.LOADED + assert config_entries.current_entry.get() is None + + # Load an existing config entry + entry2 = MockConfigEntry(domain="comp") + entry2.add_to_hass(hass) + await hass.config_entries.async_setup(entry2.entry_id) + assert entry2.state is config_entries.ConfigEntryState.LOADED + assert config_entries.current_entry.get() is None + + # Add a new config entry (eg. from config flow) + entry3 = MockConfigEntry(domain="comp") + await hass.config_entries.async_add(entry3) + assert entry3.state is config_entries.ConfigEntryState.LOADED + assert config_entries.current_entry.get() is None + + for entry in (entry1, entry2, entry3): + assert entry.state is config_entries.ConfigEntryState.LOADED + assert not hasattr(entry, "runtime_data") + assert config_entries.current_entry.get() is None + + connected_future.set_result(None) + await asyncio.gather(*bg_tasks) + + for entry in (entry1, entry2, entry3): + assert entry.state is config_entries.ConfigEntryState.LOADED + assert entry.runtime_data is entry + assert config_entries.current_entry.get() is None + + +async def test_options_flow_config_entry( + hass: HomeAssistant, manager: config_entries.ConfigEntries +) -> None: + """Test _config_entry_id and config_entry properties in options flow.""" + original_entry = MockConfigEntry(domain="test", data={}) + original_entry.add_to_hass(hass) + + mock_setup_entry = AsyncMock(return_value=True) + + mock_integration(hass, MockModule("test", async_setup_entry=mock_setup_entry)) + mock_platform(hass, "test.config_flow", None) + + class TestFlow(config_entries.ConfigFlow): + """Test flow.""" + + @staticmethod + @callback + def async_get_options_flow(config_entry): + """Test options flow.""" + + class _OptionsFlow(config_entries.OptionsFlow): + """Test flow.""" + + def __init__(self) -> None: + """Test initialisation.""" + try: + self.init_entry_id = self._config_entry_id + except ValueError as err: + self.init_entry_id = err + try: + self.init_entry = self.config_entry + except ValueError as err: + self.init_entry = err + + async def async_step_init(self, user_input=None): + """Test user step.""" + errors = {} + if user_input is not None: + if user_input.get("abort"): + return self.async_abort(reason="abort") + + errors["entry_id"] = self._config_entry_id + try: + errors["entry"] = self.config_entry + except config_entries.UnknownEntry as err: + errors["entry"] = err + + return self.async_show_form(step_id="init", errors=errors) + + return _OptionsFlow() + + with mock_config_flow("test", TestFlow): + result = await hass.config_entries.options.async_init(original_entry.entry_id) + + options_flow = hass.config_entries.options._progress.get(result["flow_id"]) + assert isinstance(options_flow, config_entries.OptionsFlow) + assert options_flow.handler == original_entry.entry_id + assert isinstance(options_flow.init_entry_id, ValueError) + assert ( + str(options_flow.init_entry_id) + == "The config entry id is not available during initialisation" + ) + assert isinstance(options_flow.init_entry, ValueError) + assert ( + str(options_flow.init_entry) + == "The config entry is not available during initialisation" + ) + + assert result["type"] == FlowResultType.FORM + assert result["step_id"] == "init" + assert result["errors"] == {} + + result = await hass.config_entries.options.async_configure(result["flow_id"], {}) + assert result["type"] == FlowResultType.FORM + assert result["step_id"] == "init" + assert result["errors"]["entry_id"] == original_entry.entry_id + assert result["errors"]["entry"] is original_entry + + # Bad handler - not linked to a config entry + options_flow.handler = "123" + result = await hass.config_entries.options.async_configure(result["flow_id"], {}) + result = await hass.config_entries.options.async_configure(result["flow_id"], {}) + assert result["type"] == FlowResultType.FORM + assert result["step_id"] == "init" + assert result["errors"]["entry_id"] == "123" + assert isinstance(result["errors"]["entry"], config_entries.UnknownEntry) + # Reset handler + options_flow.handler = original_entry.entry_id + + result = await hass.config_entries.options.async_configure( + result["flow_id"], {"abort": True} + ) + assert result["type"] == FlowResultType.ABORT + assert result["reason"] == "abort" + + +@pytest.mark.parametrize("integration_frame_path", ["custom_components/my_integration"]) +@pytest.mark.usefixtures("mock_integration_frame") +@patch.object(frame, "_REPORTED_INTEGRATIONS", set()) +async def test_options_flow_deprecated_config_entry_setter( + hass: HomeAssistant, + manager: config_entries.ConfigEntries, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test that setting config_entry explicitly still works.""" + original_entry = MockConfigEntry(domain="my_integration", data={}) + original_entry.add_to_hass(hass) + + mock_setup_entry = AsyncMock(return_value=True) + + mock_integration( + hass, MockModule("my_integration", async_setup_entry=mock_setup_entry) + ) + mock_platform(hass, "my_integration.config_flow", None) + + class TestFlow(config_entries.ConfigFlow): + """Test flow.""" + + @staticmethod + @callback + def async_get_options_flow(config_entry): + """Test options flow.""" + + class _OptionsFlow(config_entries.OptionsFlow): + """Test flow.""" + + def __init__(self, entry) -> None: + """Test initialisation.""" + self.config_entry = entry + + async def async_step_init(self, user_input=None): + """Test user step.""" + errors = {} + if user_input is not None: + if user_input.get("abort"): + return self.async_abort(reason="abort") + + errors["entry_id"] = self._config_entry_id + try: + errors["entry"] = self.config_entry + except config_entries.UnknownEntry as err: + errors["entry"] = err + + return self.async_show_form(step_id="init", errors=errors) + + return _OptionsFlow(config_entry) + + with mock_config_flow("my_integration", TestFlow): + result = await hass.config_entries.options.async_init(original_entry.entry_id) + + options_flow = hass.config_entries.options._progress.get(result["flow_id"]) + assert options_flow.config_entry is original_entry + + assert ( + "Detected that custom integration 'my_integration' sets option flow " + "config_entry explicitly, which is deprecated and will stop working " + "in 2025.12" in caplog.text + ) + + +async def test_add_description_placeholder_automatically( + hass: HomeAssistant, + manager: config_entries.ConfigEntries, +) -> None: + """Test entry title is added automatically to reauth flows description placeholder.""" + + entry = MockConfigEntry(title="test_title", domain="test") + + mock_setup_entry = AsyncMock(side_effect=ConfigEntryAuthFailed()) + mock_integration(hass, MockModule("test", async_setup_entry=mock_setup_entry)) + mock_platform(hass, "test.config_flow", None) + + entry.add_to_hass(hass) + await manager.async_setup(entry.entry_id) + await hass.async_block_till_done() + + flows = hass.config_entries.flow.async_progress_by_handler("test") + assert len(flows) == 1 + + result = await hass.config_entries.flow.async_configure(flows[0]["flow_id"], None) + assert result["type"] == FlowResultType.FORM + assert result["description_placeholders"] == {"name": "test_title"} + + +async def test_add_description_placeholder_automatically_not_overwrites( + hass: HomeAssistant, + manager: config_entries.ConfigEntries, +) -> None: + """Test entry title is not added automatically to reauth flows when custom name exist.""" + + entry = MockConfigEntry(title="test_title", domain="test2") + + mock_setup_entry = AsyncMock(side_effect=ConfigEntryAuthFailed()) + mock_integration(hass, MockModule("test2", async_setup_entry=mock_setup_entry)) + mock_platform(hass, "test2.config_flow", None) + + entry.add_to_hass(hass) + await manager.async_setup(entry.entry_id) + await hass.async_block_till_done() + + flows = hass.config_entries.flow.async_progress_by_handler("test2") + assert len(flows) == 1 + + result = await hass.config_entries.flow.async_configure(flows[0]["flow_id"], None) + assert result["type"] == FlowResultType.FORM + assert result["description_placeholders"] == {"name": "Custom title"} diff --git a/tests/test_const.py b/tests/test_const.py index a370d0f28cd2ef..87a14ecfe9c312 100644 --- a/tests/test_const.py +++ b/tests/test_const.py @@ -1,13 +1,17 @@ """Test const module.""" from enum import Enum +import logging +import sys +from unittest.mock import Mock, patch import pytest from homeassistant import const -from homeassistant.components import lock, sensor +from homeassistant.components import alarm_control_panel, lock, sensor from .common import ( + extract_stack_to_frame, help_test_all, import_and_test_deprecated_constant, import_and_test_deprecated_constant_enum, @@ -62,7 +66,14 @@ def test_all() -> None: "DEVICE_CLASS_", ) + _create_tuples(const.UnitOfApparentPower, "POWER_") - + _create_tuples(const.UnitOfPower, "POWER_") + + _create_tuples( + [ + const.UnitOfPower.WATT, + const.UnitOfPower.KILO_WATT, + const.UnitOfPower.BTU_PER_HOUR, + ], + "POWER_", + ) + _create_tuples( [ const.UnitOfEnergy.KILO_WATT_HOUR, @@ -212,3 +223,110 @@ def test_deprecated_constants_lock( import_and_test_deprecated_constant_enum( caplog, const, enum, constant_prefix, remove_in_version ) + + +def _create_tuples_alarm_states( + enum: type[Enum], constant_prefix: str, remove_in_version: str +) -> list[tuple[Enum, str]]: + return [ + (enum_field, constant_prefix, remove_in_version) + for enum_field in enum + if enum_field + not in [ + lock.LockState.OPEN, + lock.LockState.OPENING, + ] + ] + + +@pytest.mark.parametrize( + ("enum", "constant_prefix", "remove_in_version"), + _create_tuples_lock_states( + alarm_control_panel.AlarmControlPanelState, "STATE_ALARM_", "2025.11" + ), +) +def test_deprecated_constants_alarm( + caplog: pytest.LogCaptureFixture, + enum: Enum, + constant_prefix: str, + remove_in_version: str, +) -> None: + """Test deprecated constants.""" + import_and_test_deprecated_constant_enum( + caplog, const, enum, constant_prefix, remove_in_version + ) + + +def test_deprecated_unit_of_conductivity_alias() -> None: + """Test UnitOfConductivity deprecation.""" + + # Test the deprecated members are aliases + assert set(const.UnitOfConductivity) == {"S/cm", "µS/cm", "mS/cm"} + + +def test_deprecated_unit_of_conductivity_members( + caplog: pytest.LogCaptureFixture, +) -> None: + """Test UnitOfConductivity deprecation.""" + + module_name = "config.custom_components.hue.light" + filename = f"/home/paulus/{module_name.replace('.', '/')}.py" + + with ( + patch.dict(sys.modules, {module_name: Mock(__file__=filename)}), + patch( + "homeassistant.helpers.frame.linecache.getline", + return_value="await session.close()", + ), + patch( + "homeassistant.helpers.frame.get_current_frame", + return_value=extract_stack_to_frame( + [ + Mock( + filename="/home/paulus/homeassistant/core.py", + lineno="23", + line="do_something()", + ), + Mock( + filename=filename, + lineno="23", + line="await session.close()", + ), + Mock( + filename="/home/paulus/aiohue/lights.py", + lineno="2", + line="something()", + ), + ] + ), + ), + ): + const.UnitOfConductivity.SIEMENS # noqa: B018 + const.UnitOfConductivity.MICROSIEMENS # noqa: B018 + const.UnitOfConductivity.MILLISIEMENS # noqa: B018 + + assert len(caplog.record_tuples) == 3 + + def deprecation_message(member: str, replacement: str) -> str: + return ( + f"UnitOfConductivity.{member} was used from hue, this is a deprecated enum " + "member which will be removed in HA Core 2025.11.0. Use UnitOfConductivity." + f"{replacement} instead, please report it to the author of the 'hue' custom" + " integration" + ) + + assert ( + const.__name__, + logging.WARNING, + deprecation_message("SIEMENS", "SIEMENS_PER_CM"), + ) in caplog.record_tuples + assert ( + const.__name__, + logging.WARNING, + deprecation_message("MICROSIEMENS", "MICROSIEMENS_PER_CM"), + ) in caplog.record_tuples + assert ( + const.__name__, + logging.WARNING, + deprecation_message("MILLISIEMENS", "MILLISIEMENS_PER_CM"), + ) in caplog.record_tuples diff --git a/tests/test_core.py b/tests/test_core.py index 9f19a372634f65..67ed99daa09ac1 100644 --- a/tests/test_core.py +++ b/tests/test_core.py @@ -9,13 +9,11 @@ import gc import logging import os -from pathlib import Path import re -from tempfile import TemporaryDirectory import threading import time from typing import Any -from unittest.mock import MagicMock, Mock, PropertyMock, patch +from unittest.mock import MagicMock, patch from freezegun import freeze_time import pytest @@ -24,7 +22,6 @@ from homeassistant.const import ( ATTR_FRIENDLY_NAME, - CONF_UNIT_SYSTEM, EVENT_CALL_SERVICE, EVENT_CORE_CONFIG_UPDATE, EVENT_HOMEASSISTANT_CLOSE, @@ -37,7 +34,6 @@ EVENT_STATE_CHANGED, EVENT_STATE_REPORTED, MATCH_ALL, - __version__, ) import homeassistant.core as ha from homeassistant.core import ( @@ -52,6 +48,7 @@ callback, get_release_channel, ) +from homeassistant.core_config import Config from homeassistant.exceptions import ( HomeAssistantError, InvalidEntityFormatError, @@ -65,12 +62,12 @@ from homeassistant.util.async_ import create_eager_task import homeassistant.util.dt as dt_util from homeassistant.util.read_only_dict import ReadOnlyDict -from homeassistant.util.unit_system import METRIC_SYSTEM from .common import ( async_capture_events, async_mock_service, help_test_all, + import_and_test_deprecated_alias, import_and_test_deprecated_constant_enum, ) @@ -1918,173 +1915,6 @@ def service_handler(call: ServiceCall) -> ServiceResponse: assert response_data == expected_response_data -async def test_config_defaults() -> None: - """Test config defaults.""" - hass = Mock() - hass.data = {} - config = ha.Config(hass, "/test/ha-config") - assert config.hass is hass - assert config.latitude == 0 - assert config.longitude == 0 - assert config.elevation == 0 - assert config.location_name == "Home" - assert config.time_zone == "UTC" - assert config.internal_url is None - assert config.external_url is None - assert config.config_source is ha.ConfigSource.DEFAULT - assert config.skip_pip is False - assert config.skip_pip_packages == [] - assert config.components == set() - assert config.api is None - assert config.config_dir == "/test/ha-config" - assert config.allowlist_external_dirs == set() - assert config.allowlist_external_urls == set() - assert config.media_dirs == {} - assert config.recovery_mode is False - assert config.legacy_templates is False - assert config.currency == "EUR" - assert config.country is None - assert config.language == "en" - assert config.radius == 100 - - -async def test_config_path_with_file() -> None: - """Test get_config_path method.""" - hass = Mock() - hass.data = {} - config = ha.Config(hass, "/test/ha-config") - assert config.path("test.conf") == "/test/ha-config/test.conf" - - -async def test_config_path_with_dir_and_file() -> None: - """Test get_config_path method.""" - hass = Mock() - hass.data = {} - config = ha.Config(hass, "/test/ha-config") - assert config.path("dir", "test.conf") == "/test/ha-config/dir/test.conf" - - -async def test_config_as_dict() -> None: - """Test as dict.""" - hass = Mock() - hass.data = {} - config = ha.Config(hass, "/test/ha-config") - type(config.hass.state).value = PropertyMock(return_value="RUNNING") - expected = { - "latitude": 0, - "longitude": 0, - "elevation": 0, - CONF_UNIT_SYSTEM: METRIC_SYSTEM.as_dict(), - "location_name": "Home", - "time_zone": "UTC", - "components": [], - "config_dir": "/test/ha-config", - "whitelist_external_dirs": [], - "allowlist_external_dirs": [], - "allowlist_external_urls": [], - "version": __version__, - "config_source": ha.ConfigSource.DEFAULT, - "recovery_mode": False, - "state": "RUNNING", - "external_url": None, - "internal_url": None, - "currency": "EUR", - "country": None, - "language": "en", - "safe_mode": False, - "debug": False, - "radius": 100, - } - - assert expected == config.as_dict() - - -async def test_config_is_allowed_path() -> None: - """Test is_allowed_path method.""" - hass = Mock() - hass.data = {} - config = ha.Config(hass, "/test/ha-config") - with TemporaryDirectory() as tmp_dir: - # The created dir is in /tmp. This is a symlink on OS X - # causing this test to fail unless we resolve path first. - config.allowlist_external_dirs = {os.path.realpath(tmp_dir)} - - test_file = os.path.join(tmp_dir, "test.jpg") - await asyncio.get_running_loop().run_in_executor( - None, Path(test_file).write_text, "test" - ) - - valid = [test_file, tmp_dir, os.path.join(tmp_dir, "notfound321")] - for path in valid: - assert config.is_allowed_path(path) - - config.allowlist_external_dirs = {"/home", "/var"} - - invalid = [ - "/hass/config/secure", - "/etc/passwd", - "/root/secure_file", - "/var/../etc/passwd", - test_file, - ] - for path in invalid: - assert not config.is_allowed_path(path) - - with pytest.raises(AssertionError): - config.is_allowed_path(None) - - -async def test_config_is_allowed_external_url() -> None: - """Test is_allowed_external_url method.""" - hass = Mock() - hass.data = {} - config = ha.Config(hass, "/test/ha-config") - config.allowlist_external_urls = [ - "http://x.com/", - "https://y.com/bla/", - "https://z.com/images/1.jpg/", - ] - - valid = [ - "http://x.com/1.jpg", - "http://x.com", - "https://y.com/bla/", - "https://y.com/bla/2.png", - "https://z.com/images/1.jpg", - ] - for url in valid: - assert config.is_allowed_external_url(url) - - invalid = [ - "https://a.co", - "https://y.com/bla_wrong", - "https://y.com/bla/../image.jpg", - "https://z.com/images", - ] - for url in invalid: - assert not config.is_allowed_external_url(url) - - -async def test_event_on_update(hass: HomeAssistant) -> None: - """Test that event is fired on update.""" - events = async_capture_events(hass, EVENT_CORE_CONFIG_UPDATE) - - assert hass.config.latitude != 12 - - await hass.config.async_update(latitude=12) - await hass.async_block_till_done() - - assert hass.config.latitude == 12 - assert len(events) == 1 - assert events[0].data == {"latitude": 12} - - -async def test_bad_timezone_raises_value_error(hass: HomeAssistant) -> None: - """Test bad timezone raises ValueError.""" - with pytest.raises(ValueError): - await hass.config.async_update(time_zone="not_a_timezone") - - async def test_start_taking_too_long(caplog: pytest.LogCaptureFixture) -> None: """Test when async_start takes too long.""" hass = ha.HomeAssistant("/test/ha-config") @@ -2299,53 +2129,6 @@ def test_valid_domain() -> None: assert ha.valid_domain(valid), valid -async def test_additional_data_in_core_config( - hass: HomeAssistant, hass_storage: dict[str, Any] -) -> None: - """Test that we can handle additional data in core configuration.""" - config = ha.Config(hass, "/test/ha-config") - config.async_initialize() - hass_storage[ha.CORE_STORAGE_KEY] = { - "version": 1, - "data": {"location_name": "Test Name", "additional_valid_key": "value"}, - } - await config.async_load() - assert config.location_name == "Test Name" - - -async def test_incorrect_internal_external_url( - hass: HomeAssistant, hass_storage: dict[str, Any], caplog: pytest.LogCaptureFixture -) -> None: - """Test that we warn when detecting invalid internal/external url.""" - config = ha.Config(hass, "/test/ha-config") - config.async_initialize() - - hass_storage[ha.CORE_STORAGE_KEY] = { - "version": 1, - "data": { - "internal_url": None, - "external_url": None, - }, - } - await config.async_load() - assert "Invalid external_url set" not in caplog.text - assert "Invalid internal_url set" not in caplog.text - - config = ha.Config(hass, "/test/ha-config") - config.async_initialize() - - hass_storage[ha.CORE_STORAGE_KEY] = { - "version": 1, - "data": { - "internal_url": "https://community.home-assistant.io/profile", - "external_url": "https://www.home-assistant.io/blue", - }, - } - await config.async_load() - assert "Invalid external_url set" in caplog.text - assert "Invalid internal_url set" in caplog.text - - async def test_start_events(hass: HomeAssistant) -> None: """Test events fired when starting Home Assistant.""" hass.state = ha.CoreState.not_running @@ -3213,6 +2996,11 @@ def test_deprecated_constants( import_and_test_deprecated_constant_enum(caplog, ha, enum, "SOURCE_", "2025.1") +def test_deprecated_config(caplog: pytest.LogCaptureFixture) -> None: + """Test deprecated Config class.""" + import_and_test_deprecated_alias(caplog, ha, "Config", Config, "2025.11") + + def test_one_time_listener_repr(hass: HomeAssistant) -> None: """Test one time listener repr.""" @@ -3462,28 +3250,6 @@ async def _test(event: ha.Event): ) in caplog.text -async def test_top_level_components(hass: HomeAssistant) -> None: - """Test top level components are updated when components change.""" - hass.config.components.add("homeassistant") - assert hass.config.components == {"homeassistant"} - assert hass.config.top_level_components == {"homeassistant"} - hass.config.components.add("homeassistant.scene") - assert hass.config.components == {"homeassistant", "homeassistant.scene"} - assert hass.config.top_level_components == {"homeassistant"} - hass.config.components.remove("homeassistant") - assert hass.config.components == {"homeassistant.scene"} - assert hass.config.top_level_components == set() - with pytest.raises(ValueError): - hass.config.components.remove("homeassistant.scene") - with pytest.raises(NotImplementedError): - hass.config.components.discard("homeassistant") - - -async def test_debug_mode_defaults_to_off(hass: HomeAssistant) -> None: - """Test debug mode defaults to off.""" - assert not hass.config.debug - - async def test_async_fire_thread_safety(hass: HomeAssistant) -> None: """Test async_fire thread safety.""" events = async_capture_events(hass, "test_event") @@ -3550,19 +3316,6 @@ async def test_thread_safety_message(hass: HomeAssistant) -> None: await hass.async_add_executor_job(hass.verify_event_loop_thread, "test") -async def test_set_time_zone_deprecated(hass: HomeAssistant) -> None: - """Test set_time_zone is deprecated.""" - with pytest.raises( - RuntimeError, - match=re.escape( - "Detected code that set the time zone using set_time_zone instead of " - "async_set_time_zone which will stop working in Home Assistant 2025.6. " - "Please report this issue.", - ), - ): - await hass.config.set_time_zone("America/New_York") - - async def test_async_set_updates_last_reported(hass: HomeAssistant) -> None: """Test async_set method updates last_reported AND last_reported_timestamp.""" hass.states.async_set("light.bowl", "on", {}) diff --git a/tests/test_core_config.py b/tests/test_core_config.py new file mode 100644 index 00000000000000..3e0c0999ad32fb --- /dev/null +++ b/tests/test_core_config.py @@ -0,0 +1,1083 @@ +"""Test core_config.""" + +import asyncio +from collections import OrderedDict +import copy +import os +from pathlib import Path +import re +from tempfile import TemporaryDirectory +from typing import Any +from unittest.mock import Mock, PropertyMock, patch + +import pytest +from voluptuous import Invalid, MultipleInvalid +from webrtc_models import RTCConfiguration, RTCIceServer + +from homeassistant.const import ( + ATTR_ASSUMED_STATE, + ATTR_FRIENDLY_NAME, + CONF_AUTH_MFA_MODULES, + CONF_AUTH_PROVIDERS, + CONF_CUSTOMIZE, + CONF_LATITUDE, + CONF_LONGITUDE, + CONF_NAME, + CONF_UNIT_SYSTEM, + EVENT_CORE_CONFIG_UPDATE, + __version__, +) +from homeassistant.core import HomeAssistant, State +from homeassistant.core_config import ( + _CUSTOMIZE_DICT_SCHEMA, + CORE_CONFIG_SCHEMA, + CORE_STORAGE_KEY, + DATA_CUSTOMIZE, + Config, + ConfigSource, + _validate_stun_or_turn_url, + async_process_ha_core_config, +) +from homeassistant.helpers import issue_registry as ir +from homeassistant.helpers.entity import Entity +from homeassistant.util.unit_system import ( + METRIC_SYSTEM, + US_CUSTOMARY_SYSTEM, + UnitSystem, +) + +from .common import MockUser, async_capture_events + + +def test_core_config_schema() -> None: + """Test core config schema.""" + for value in ( + {"unit_system": "K"}, + {"time_zone": "non-exist"}, + {"latitude": "91"}, + {"longitude": -181}, + {"external_url": "not an url"}, + {"internal_url": "not an url"}, + {"currency", 100}, + {"customize": "bla"}, + {"customize": {"light.sensor": 100}}, + {"customize": {"entity_id": []}}, + {"country": "xx"}, + {"language": "xx"}, + {"radius": -10}, + {"webrtc": "bla"}, + {"webrtc": {}}, + ): + with pytest.raises(MultipleInvalid): + CORE_CONFIG_SCHEMA(value) + + CORE_CONFIG_SCHEMA( + { + "name": "Test name", + "latitude": "-23.45", + "longitude": "123.45", + "external_url": "https://www.example.com", + "internal_url": "http://example.local", + "unit_system": "metric", + "currency": "USD", + "customize": {"sensor.temperature": {"hidden": True}}, + "country": "SE", + "language": "sv", + "radius": "10", + "webrtc": {"ice_servers": [{"url": "stun:custom_stun_server:3478"}]}, + } + ) + + +def test_core_config_schema_internal_external_warning( + caplog: pytest.LogCaptureFixture, +) -> None: + """Test that we warn for internal/external URL with path.""" + CORE_CONFIG_SCHEMA( + { + "external_url": "https://www.example.com/bla", + "internal_url": "http://example.local/yo", + } + ) + + assert "Invalid external_url set" in caplog.text + assert "Invalid internal_url set" in caplog.text + + +def test_customize_dict_schema() -> None: + """Test basic customize config validation.""" + values = ({ATTR_FRIENDLY_NAME: None}, {ATTR_ASSUMED_STATE: "2"}) + + for val in values: + with pytest.raises(MultipleInvalid): + _CUSTOMIZE_DICT_SCHEMA(val) + + assert _CUSTOMIZE_DICT_SCHEMA({ATTR_FRIENDLY_NAME: 2, ATTR_ASSUMED_STATE: "0"}) == { + ATTR_FRIENDLY_NAME: "2", + ATTR_ASSUMED_STATE: False, + } + + +def test_webrtc_schema() -> None: + """Test webrtc config validation.""" + invalid_webrtc_configs = ( + "bla", + {}, + {"ice_servers": [], "unknown_key": 123}, + {"ice_servers": [{}]}, + {"ice_servers": [{"invalid_key": 123}]}, + ) + + valid_webrtc_configs = ( + ( + {"ice_servers": []}, + {"ice_servers": []}, + ), + ( + {"ice_servers": {"url": "stun:custom_stun_server:3478"}}, + {"ice_servers": [{"url": ["stun:custom_stun_server:3478"]}]}, + ), + ( + {"ice_servers": [{"url": "stun:custom_stun_server:3478"}]}, + {"ice_servers": [{"url": ["stun:custom_stun_server:3478"]}]}, + ), + ( + {"ice_servers": [{"url": ["stun:custom_stun_server:3478"]}]}, + {"ice_servers": [{"url": ["stun:custom_stun_server:3478"]}]}, + ), + ( + { + "ice_servers": [ + { + "url": ["stun:custom_stun_server:3478"], + "username": "bla", + "credential": "hunter2", + } + ] + }, + { + "ice_servers": [ + { + "url": ["stun:custom_stun_server:3478"], + "username": "bla", + "credential": "hunter2", + } + ] + }, + ), + ) + + for config in invalid_webrtc_configs: + with pytest.raises(MultipleInvalid): + CORE_CONFIG_SCHEMA({"webrtc": config}) + + for config, validated_webrtc in valid_webrtc_configs: + validated = CORE_CONFIG_SCHEMA({"webrtc": config}) + assert validated["webrtc"] == validated_webrtc + + +def test_validate_stun_or_turn_url() -> None: + """Test _validate_stun_or_turn_url.""" + invalid_urls = ( + "custom_stun_server", + "custom_stun_server:3478", + "bum:custom_stun_server:3478" "http://blah.com:80", + ) + + valid_urls = ( + "stun:custom_stun_server:3478", + "turn:custom_stun_server:3478", + "stuns:custom_stun_server:3478", + "turns:custom_stun_server:3478", + # The validator does not reject urls with path + "stun:custom_stun_server:3478/path", + "turn:custom_stun_server:3478/path", + "stuns:custom_stun_server:3478/path", + "turns:custom_stun_server:3478/path", + # The validator allows any query + "stun:custom_stun_server:3478?query", + "turn:custom_stun_server:3478?query", + "stuns:custom_stun_server:3478?query", + "turns:custom_stun_server:3478?query", + ) + + for url in invalid_urls: + with pytest.raises(Invalid): + _validate_stun_or_turn_url(url) + + for url in valid_urls: + assert _validate_stun_or_turn_url(url) == url + + +def test_customize_glob_is_ordered() -> None: + """Test that customize_glob preserves order.""" + conf = CORE_CONFIG_SCHEMA({"customize_glob": OrderedDict()}) + assert isinstance(conf["customize_glob"], OrderedDict) + + +async def _compute_state(hass: HomeAssistant, config: dict[str, Any]) -> State | None: + await async_process_ha_core_config(hass, config) + + entity = Entity() + entity.entity_id = "test.test" + entity.hass = hass + entity.schedule_update_ha_state() + + await hass.async_block_till_done() + + return hass.states.get("test.test") + + +async def test_entity_customization(hass: HomeAssistant) -> None: + """Test entity customization through configuration.""" + config = { + CONF_LATITUDE: 50, + CONF_LONGITUDE: 50, + CONF_NAME: "Test", + CONF_CUSTOMIZE: {"test.test": {"hidden": True}}, + } + + state = await _compute_state(hass, config) + + assert state.attributes["hidden"] + + +async def test_loading_configuration_from_storage( + hass: HomeAssistant, hass_storage: dict[str, Any] +) -> None: + """Test loading core config onto hass object.""" + hass_storage["core.config"] = { + "data": { + "elevation": 10, + "latitude": 55, + "location_name": "Home", + "longitude": 13, + "time_zone": "Europe/Copenhagen", + "unit_system": "metric", + "external_url": "https://www.example.com", + "internal_url": "http://example.local", + "currency": "EUR", + "country": "SE", + "language": "sv", + "radius": 150, + }, + "key": "core.config", + "version": 1, + "minor_version": 4, + } + await async_process_ha_core_config(hass, {"allowlist_external_dirs": "/etc"}) + + assert hass.config.latitude == 55 + assert hass.config.longitude == 13 + assert hass.config.elevation == 10 + assert hass.config.location_name == "Home" + assert hass.config.units is METRIC_SYSTEM + assert hass.config.time_zone == "Europe/Copenhagen" + assert hass.config.external_url == "https://www.example.com" + assert hass.config.internal_url == "http://example.local" + assert hass.config.currency == "EUR" + assert hass.config.country == "SE" + assert hass.config.language == "sv" + assert hass.config.radius == 150 + assert len(hass.config.allowlist_external_dirs) == 3 + assert "/etc" in hass.config.allowlist_external_dirs + assert hass.config.config_source is ConfigSource.STORAGE + + +async def test_loading_configuration_from_storage_with_yaml_only( + hass: HomeAssistant, hass_storage: dict[str, Any] +) -> None: + """Test loading core and YAML config onto hass object.""" + hass_storage["core.config"] = { + "data": { + "elevation": 10, + "latitude": 55, + "location_name": "Home", + "longitude": 13, + "time_zone": "Europe/Copenhagen", + "unit_system": "metric", + }, + "key": "core.config", + "version": 1, + } + await async_process_ha_core_config( + hass, {"media_dirs": {"mymedia": "/usr"}, "allowlist_external_dirs": "/etc"} + ) + + assert hass.config.latitude == 55 + assert hass.config.longitude == 13 + assert hass.config.elevation == 10 + assert hass.config.location_name == "Home" + assert hass.config.units is METRIC_SYSTEM + assert hass.config.time_zone == "Europe/Copenhagen" + assert len(hass.config.allowlist_external_dirs) == 3 + assert "/etc" in hass.config.allowlist_external_dirs + assert hass.config.media_dirs == {"mymedia": "/usr"} + assert hass.config.config_source is ConfigSource.STORAGE + + +async def test_migration_and_updating_configuration( + hass: HomeAssistant, hass_storage: dict[str, Any] +) -> None: + """Test updating configuration stores the new configuration.""" + core_data = { + "data": { + "elevation": 10, + "latitude": 55, + "location_name": "Home", + "longitude": 13, + "time_zone": "Europe/Copenhagen", + "unit_system": "imperial", + "external_url": "https://www.example.com", + "internal_url": "http://example.local", + "currency": "BTC", + }, + "key": "core.config", + "version": 1, + "minor_version": 1, + } + hass_storage["core.config"] = dict(core_data) + await async_process_ha_core_config(hass, {"allowlist_external_dirs": "/etc"}) + await hass.config.async_update(latitude=50, currency="USD") + + expected_new_core_data = copy.deepcopy(core_data) + # From async_update above + expected_new_core_data["data"]["latitude"] = 50 + expected_new_core_data["data"]["currency"] = "USD" + # 1.1 -> 1.2 store migration with migrated unit system + expected_new_core_data["data"]["unit_system_v2"] = "us_customary" + # 1.1 -> 1.3 defaults for country and language + expected_new_core_data["data"]["country"] = None + expected_new_core_data["data"]["language"] = "en" + # 1.1 -> 1.4 defaults for zone radius + expected_new_core_data["data"]["radius"] = 100 + # Bumped minor version + expected_new_core_data["minor_version"] = 4 + assert hass_storage["core.config"] == expected_new_core_data + assert hass.config.latitude == 50 + assert hass.config.currency == "USD" + assert hass.config.country is None + assert hass.config.language == "en" + assert hass.config.radius == 100 + + +async def test_override_stored_configuration( + hass: HomeAssistant, hass_storage: dict[str, Any] +) -> None: + """Test loading core and YAML config onto hass object.""" + hass_storage["core.config"] = { + "data": { + "elevation": 10, + "latitude": 55, + "location_name": "Home", + "longitude": 13, + "time_zone": "Europe/Copenhagen", + "unit_system": "metric", + }, + "key": "core.config", + "version": 1, + } + await async_process_ha_core_config( + hass, {"latitude": 60, "allowlist_external_dirs": "/etc"} + ) + + assert hass.config.latitude == 60 + assert hass.config.longitude == 13 + assert hass.config.elevation == 10 + assert hass.config.location_name == "Home" + assert hass.config.units is METRIC_SYSTEM + assert hass.config.time_zone == "Europe/Copenhagen" + assert len(hass.config.allowlist_external_dirs) == 3 + assert "/etc" in hass.config.allowlist_external_dirs + assert hass.config.config_source is ConfigSource.YAML + + +async def test_loading_configuration(hass: HomeAssistant) -> None: + """Test loading core config onto hass object.""" + await async_process_ha_core_config( + hass, + { + "latitude": 60, + "longitude": 50, + "elevation": 25, + "name": "Huis", + "unit_system": "imperial", + "time_zone": "America/New_York", + "allowlist_external_dirs": "/etc", + "external_url": "https://www.example.com", + "internal_url": "http://example.local", + "media_dirs": {"mymedia": "/usr"}, + "debug": True, + "currency": "EUR", + "country": "SE", + "language": "sv", + "radius": 150, + "webrtc": {"ice_servers": [{"url": "stun:custom_stun_server:3478"}]}, + }, + ) + + assert hass.config.latitude == 60 + assert hass.config.longitude == 50 + assert hass.config.elevation == 25 + assert hass.config.location_name == "Huis" + assert hass.config.units is US_CUSTOMARY_SYSTEM + assert hass.config.time_zone == "America/New_York" + assert hass.config.external_url == "https://www.example.com" + assert hass.config.internal_url == "http://example.local" + assert len(hass.config.allowlist_external_dirs) == 3 + assert "/etc" in hass.config.allowlist_external_dirs + assert "/usr" in hass.config.allowlist_external_dirs + assert hass.config.media_dirs == {"mymedia": "/usr"} + assert hass.config.config_source is ConfigSource.YAML + assert hass.config.debug is True + assert hass.config.currency == "EUR" + assert hass.config.country == "SE" + assert hass.config.language == "sv" + assert hass.config.radius == 150 + assert hass.config.webrtc == RTCConfiguration( + [RTCIceServer(urls=["stun:custom_stun_server:3478"])] + ) + + +@pytest.mark.parametrize( + ("minor_version", "users", "user_data", "default_language"), + [ + (2, (), {}, "en"), + (2, ({"is_owner": True},), {}, "en"), + ( + 2, + ({"id": "user1", "is_owner": True},), + {"user1": {"language": {"language": "sv"}}}, + "sv", + ), + ( + 2, + ({"id": "user1", "is_owner": False},), + {"user1": {"language": {"language": "sv"}}}, + "en", + ), + (3, (), {}, "en"), + (3, ({"is_owner": True},), {}, "en"), + ( + 3, + ({"id": "user1", "is_owner": True},), + {"user1": {"language": {"language": "sv"}}}, + "en", + ), + ( + 3, + ({"id": "user1", "is_owner": False},), + {"user1": {"language": {"language": "sv"}}}, + "en", + ), + ], +) +async def test_language_default( + hass: HomeAssistant, + hass_storage: dict[str, Any], + minor_version, + users, + user_data, + default_language, +) -> None: + """Test language config default to owner user's language during migration. + + This should only happen if the core store version < 1.3 + """ + core_data = { + "data": {}, + "key": "core.config", + "version": 1, + "minor_version": minor_version, + } + hass_storage["core.config"] = dict(core_data) + + for user_config in users: + user = MockUser(**user_config).add_to_hass(hass) + if user.id not in user_data: + continue + storage_key = f"frontend.user_data_{user.id}" + hass_storage[storage_key] = { + "key": storage_key, + "version": 1, + "data": user_data[user.id], + } + + await async_process_ha_core_config( + hass, + {}, + ) + assert hass.config.language == default_language + + +async def test_loading_configuration_default_media_dirs_docker( + hass: HomeAssistant, +) -> None: + """Test loading core config onto hass object.""" + with patch("homeassistant.core_config.is_docker_env", return_value=True): + await async_process_ha_core_config( + hass, + { + "name": "Huis", + }, + ) + + assert hass.config.location_name == "Huis" + assert len(hass.config.allowlist_external_dirs) == 2 + assert "/media" in hass.config.allowlist_external_dirs + assert hass.config.media_dirs == {"local": "/media"} + + +async def test_loading_configuration_from_packages(hass: HomeAssistant) -> None: + """Test loading packages config onto hass object config.""" + await async_process_ha_core_config( + hass, + { + "latitude": 39, + "longitude": -1, + "elevation": 500, + "name": "Huis", + "unit_system": "metric", + "time_zone": "Europe/Madrid", + "external_url": "https://www.example.com", + "internal_url": "http://example.local", + "packages": { + "package_1": {"wake_on_lan": None}, + "package_2": { + "light": {"platform": "hue"}, + "media_extractor": None, + "sun": None, + }, + }, + }, + ) + + # Empty packages not allowed + with pytest.raises(MultipleInvalid): + await async_process_ha_core_config( + hass, + { + "latitude": 39, + "longitude": -1, + "elevation": 500, + "name": "Huis", + "unit_system": "metric", + "time_zone": "Europe/Madrid", + "packages": {"empty_package": None}, + }, + ) + + +@pytest.mark.parametrize( + ("unit_system_name", "expected_unit_system"), + [ + ("metric", METRIC_SYSTEM), + ("imperial", US_CUSTOMARY_SYSTEM), + ("us_customary", US_CUSTOMARY_SYSTEM), + ], +) +async def test_loading_configuration_unit_system( + hass: HomeAssistant, unit_system_name: str, expected_unit_system: UnitSystem +) -> None: + """Test backward compatibility when loading core config.""" + await async_process_ha_core_config( + hass, + { + "latitude": 60, + "longitude": 50, + "elevation": 25, + "name": "Huis", + "unit_system": unit_system_name, + "time_zone": "America/New_York", + "external_url": "https://www.example.com", + "internal_url": "http://example.local", + }, + ) + + assert hass.config.units is expected_unit_system + + +async def test_merge_customize(hass: HomeAssistant) -> None: + """Test loading core config onto hass object.""" + core_config = { + "latitude": 60, + "longitude": 50, + "elevation": 25, + "name": "Huis", + "unit_system": "imperial", + "time_zone": "GMT", + "customize": {"a.a": {"friendly_name": "A"}}, + "packages": { + "pkg1": {"homeassistant": {"customize": {"b.b": {"friendly_name": "BB"}}}} + }, + } + await async_process_ha_core_config(hass, core_config) + + assert hass.data[DATA_CUSTOMIZE].get("b.b") == {"friendly_name": "BB"} + + +async def test_auth_provider_config(hass: HomeAssistant) -> None: + """Test loading auth provider config onto hass object.""" + core_config = { + "latitude": 60, + "longitude": 50, + "elevation": 25, + "name": "Huis", + "unit_system": "imperial", + "time_zone": "GMT", + CONF_AUTH_PROVIDERS: [ + {"type": "homeassistant"}, + ], + CONF_AUTH_MFA_MODULES: [{"type": "totp"}, {"type": "totp", "id": "second"}], + } + if hasattr(hass, "auth"): + del hass.auth + await async_process_ha_core_config(hass, core_config) + + assert len(hass.auth.auth_providers) == 1 + assert hass.auth.auth_providers[0].type == "homeassistant" + assert len(hass.auth.auth_mfa_modules) == 2 + assert hass.auth.auth_mfa_modules[0].id == "totp" + assert hass.auth.auth_mfa_modules[1].id == "second" + + +async def test_auth_provider_config_default(hass: HomeAssistant) -> None: + """Test loading default auth provider config.""" + core_config = { + "latitude": 60, + "longitude": 50, + "elevation": 25, + "name": "Huis", + "unit_system": "imperial", + "time_zone": "GMT", + } + if hasattr(hass, "auth"): + del hass.auth + await async_process_ha_core_config(hass, core_config) + + assert len(hass.auth.auth_providers) == 1 + assert hass.auth.auth_providers[0].type == "homeassistant" + assert len(hass.auth.auth_mfa_modules) == 1 + assert hass.auth.auth_mfa_modules[0].id == "totp" + + +async def test_disallowed_auth_provider_config(hass: HomeAssistant) -> None: + """Test loading insecure example auth provider is disallowed.""" + core_config = { + "latitude": 60, + "longitude": 50, + "elevation": 25, + "name": "Huis", + "unit_system": "imperial", + "time_zone": "GMT", + CONF_AUTH_PROVIDERS: [ + { + "type": "insecure_example", + "users": [ + { + "username": "test-user", + "password": "test-pass", + "name": "Test Name", + } + ], + } + ], + } + with pytest.raises(Invalid): + await async_process_ha_core_config(hass, core_config) + + +async def test_disallowed_duplicated_auth_provider_config(hass: HomeAssistant) -> None: + """Test loading insecure example auth provider is disallowed.""" + core_config = { + "latitude": 60, + "longitude": 50, + "elevation": 25, + "name": "Huis", + "unit_system": "imperial", + "time_zone": "GMT", + CONF_AUTH_PROVIDERS: [{"type": "homeassistant"}, {"type": "homeassistant"}], + } + with pytest.raises(Invalid): + await async_process_ha_core_config(hass, core_config) + + +async def test_disallowed_auth_mfa_module_config(hass: HomeAssistant) -> None: + """Test loading insecure example auth mfa module is disallowed.""" + core_config = { + "latitude": 60, + "longitude": 50, + "elevation": 25, + "name": "Huis", + "unit_system": "imperial", + "time_zone": "GMT", + CONF_AUTH_MFA_MODULES: [ + { + "type": "insecure_example", + "data": [{"user_id": "mock-user", "pin": "test-pin"}], + } + ], + } + with pytest.raises(Invalid): + await async_process_ha_core_config(hass, core_config) + + +async def test_disallowed_duplicated_auth_mfa_module_config( + hass: HomeAssistant, +) -> None: + """Test loading insecure example auth mfa module is disallowed.""" + core_config = { + "latitude": 60, + "longitude": 50, + "elevation": 25, + "name": "Huis", + "unit_system": "imperial", + "time_zone": "GMT", + CONF_AUTH_MFA_MODULES: [{"type": "totp"}, {"type": "totp"}], + } + with pytest.raises(Invalid): + await async_process_ha_core_config(hass, core_config) + + +async def test_core_config_schema_historic_currency( + hass: HomeAssistant, issue_registry: ir.IssueRegistry +) -> None: + """Test core config schema.""" + await async_process_ha_core_config(hass, {"currency": "LTT"}) + + issue = issue_registry.async_get_issue("homeassistant", "historic_currency") + assert issue + assert issue.translation_placeholders == {"currency": "LTT"} + + +async def test_core_store_historic_currency( + hass: HomeAssistant, hass_storage: dict[str, Any], issue_registry: ir.IssueRegistry +) -> None: + """Test core config store.""" + core_data = { + "data": { + "currency": "LTT", + }, + "key": "core.config", + "version": 1, + "minor_version": 1, + } + hass_storage["core.config"] = dict(core_data) + await async_process_ha_core_config(hass, {}) + + issue_id = "historic_currency" + issue = issue_registry.async_get_issue("homeassistant", issue_id) + assert issue + assert issue.translation_placeholders == {"currency": "LTT"} + + await hass.config.async_update(currency="EUR") + issue = issue_registry.async_get_issue("homeassistant", issue_id) + assert not issue + + +async def test_core_config_schema_no_country( + hass: HomeAssistant, issue_registry: ir.IssueRegistry +) -> None: + """Test core config schema.""" + await async_process_ha_core_config(hass, {}) + + issue = issue_registry.async_get_issue("homeassistant", "country_not_configured") + assert issue + + +async def test_core_store_no_country( + hass: HomeAssistant, hass_storage: dict[str, Any], issue_registry: ir.IssueRegistry +) -> None: + """Test core config store.""" + core_data = { + "data": {}, + "key": "core.config", + "version": 1, + "minor_version": 1, + } + hass_storage["core.config"] = dict(core_data) + await async_process_ha_core_config(hass, {}) + + issue_id = "country_not_configured" + issue = issue_registry.async_get_issue("homeassistant", issue_id) + assert issue + + await hass.config.async_update(country="SE") + issue = issue_registry.async_get_issue("homeassistant", issue_id) + assert not issue + + +async def test_configuration_legacy_template_is_removed(hass: HomeAssistant) -> None: + """Test loading core config onto hass object.""" + await async_process_ha_core_config( + hass, + { + "latitude": 60, + "longitude": 50, + "elevation": 25, + "name": "Huis", + "unit_system": "imperial", + "time_zone": "America/New_York", + "allowlist_external_dirs": "/etc", + "external_url": "https://www.example.com", + "internal_url": "http://example.local", + "media_dirs": {"mymedia": "/usr"}, + "legacy_templates": True, + "debug": True, + "currency": "EUR", + "country": "SE", + "language": "sv", + "radius": 150, + }, + ) + + assert not getattr(hass.config, "legacy_templates") + + +async def test_config_defaults() -> None: + """Test config defaults.""" + hass = Mock() + hass.data = {} + config = Config(hass, "/test/ha-config") + assert config.hass is hass + assert config.latitude == 0 + assert config.longitude == 0 + assert config.elevation == 0 + assert config.location_name == "Home" + assert config.time_zone == "UTC" + assert config.internal_url is None + assert config.external_url is None + assert config.config_source is ConfigSource.DEFAULT + assert config.skip_pip is False + assert config.skip_pip_packages == [] + assert config.components == set() + assert config.api is None + assert config.config_dir == "/test/ha-config" + assert config.allowlist_external_dirs == set() + assert config.allowlist_external_urls == set() + assert config.media_dirs == {} + assert config.recovery_mode is False + assert config.legacy_templates is False + assert config.currency == "EUR" + assert config.country is None + assert config.language == "en" + assert config.radius == 100 + + +async def test_config_path_with_file() -> None: + """Test get_config_path method.""" + hass = Mock() + hass.data = {} + config = Config(hass, "/test/ha-config") + assert config.path("test.conf") == "/test/ha-config/test.conf" + + +async def test_config_path_with_dir_and_file() -> None: + """Test get_config_path method.""" + hass = Mock() + hass.data = {} + config = Config(hass, "/test/ha-config") + assert config.path("dir", "test.conf") == "/test/ha-config/dir/test.conf" + + +async def test_config_as_dict() -> None: + """Test as dict.""" + hass = Mock() + hass.data = {} + config = Config(hass, "/test/ha-config") + type(config.hass.state).value = PropertyMock(return_value="RUNNING") + expected = { + "latitude": 0, + "longitude": 0, + "elevation": 0, + CONF_UNIT_SYSTEM: METRIC_SYSTEM.as_dict(), + "location_name": "Home", + "time_zone": "UTC", + "components": [], + "config_dir": "/test/ha-config", + "whitelist_external_dirs": [], + "allowlist_external_dirs": [], + "allowlist_external_urls": [], + "version": __version__, + "config_source": ConfigSource.DEFAULT, + "recovery_mode": False, + "state": "RUNNING", + "external_url": None, + "internal_url": None, + "currency": "EUR", + "country": None, + "language": "en", + "safe_mode": False, + "debug": False, + "radius": 100, + } + + assert expected == config.as_dict() + + +async def test_config_is_allowed_path() -> None: + """Test is_allowed_path method.""" + hass = Mock() + hass.data = {} + config = Config(hass, "/test/ha-config") + with TemporaryDirectory() as tmp_dir: + # The created dir is in /tmp. This is a symlink on OS X + # causing this test to fail unless we resolve path first. + config.allowlist_external_dirs = {os.path.realpath(tmp_dir)} + + test_file = os.path.join(tmp_dir, "test.jpg") + await asyncio.get_running_loop().run_in_executor( + None, Path(test_file).write_text, "test" + ) + + valid = [test_file, tmp_dir, os.path.join(tmp_dir, "notfound321")] + for path in valid: + assert config.is_allowed_path(path) + + config.allowlist_external_dirs = {"/home", "/var"} + + invalid = [ + "/hass/config/secure", + "/etc/passwd", + "/root/secure_file", + "/var/../etc/passwd", + test_file, + ] + for path in invalid: + assert not config.is_allowed_path(path) + + with pytest.raises(AssertionError): + config.is_allowed_path(None) + + +async def test_config_is_allowed_external_url() -> None: + """Test is_allowed_external_url method.""" + hass = Mock() + hass.data = {} + config = Config(hass, "/test/ha-config") + config.allowlist_external_urls = [ + "http://x.com/", + "https://y.com/bla/", + "https://z.com/images/1.jpg/", + ] + + valid = [ + "http://x.com/1.jpg", + "http://x.com", + "https://y.com/bla/", + "https://y.com/bla/2.png", + "https://z.com/images/1.jpg", + ] + for url in valid: + assert config.is_allowed_external_url(url) + + invalid = [ + "https://a.co", + "https://y.com/bla_wrong", + "https://y.com/bla/../image.jpg", + "https://z.com/images", + ] + for url in invalid: + assert not config.is_allowed_external_url(url) + + +async def test_event_on_update(hass: HomeAssistant) -> None: + """Test that event is fired on update.""" + events = async_capture_events(hass, EVENT_CORE_CONFIG_UPDATE) + + assert hass.config.latitude != 12 + + await hass.config.async_update(latitude=12) + await hass.async_block_till_done() + + assert hass.config.latitude == 12 + assert len(events) == 1 + assert events[0].data == {"latitude": 12} + + +async def test_bad_timezone_raises_value_error(hass: HomeAssistant) -> None: + """Test bad timezone raises ValueError.""" + with pytest.raises(ValueError): + await hass.config.async_update(time_zone="not_a_timezone") + + +async def test_additional_data_in_core_config( + hass: HomeAssistant, hass_storage: dict[str, Any] +) -> None: + """Test that we can handle additional data in core configuration.""" + config = Config(hass, "/test/ha-config") + config.async_initialize() + hass_storage[CORE_STORAGE_KEY] = { + "version": 1, + "data": {"location_name": "Test Name", "additional_valid_key": "value"}, + } + await config.async_load() + assert config.location_name == "Test Name" + + +async def test_incorrect_internal_external_url( + hass: HomeAssistant, hass_storage: dict[str, Any], caplog: pytest.LogCaptureFixture +) -> None: + """Test that we warn when detecting invalid internal/external url.""" + config = Config(hass, "/test/ha-config") + config.async_initialize() + + hass_storage[CORE_STORAGE_KEY] = { + "version": 1, + "data": { + "internal_url": None, + "external_url": None, + }, + } + await config.async_load() + assert "Invalid external_url set" not in caplog.text + assert "Invalid internal_url set" not in caplog.text + + config = Config(hass, "/test/ha-config") + config.async_initialize() + + hass_storage[CORE_STORAGE_KEY] = { + "version": 1, + "data": { + "internal_url": "https://community.home-assistant.io/profile", + "external_url": "https://www.home-assistant.io/blue", + }, + } + await config.async_load() + assert "Invalid external_url set" in caplog.text + assert "Invalid internal_url set" in caplog.text + + +async def test_top_level_components(hass: HomeAssistant) -> None: + """Test top level components are updated when components change.""" + hass.config.components.add("homeassistant") + assert hass.config.components == {"homeassistant"} + assert hass.config.top_level_components == {"homeassistant"} + hass.config.components.add("homeassistant.scene") + assert hass.config.components == {"homeassistant", "homeassistant.scene"} + assert hass.config.top_level_components == {"homeassistant"} + hass.config.components.remove("homeassistant") + assert hass.config.components == {"homeassistant.scene"} + assert hass.config.top_level_components == set() + with pytest.raises(ValueError): + hass.config.components.remove("homeassistant.scene") + with pytest.raises(NotImplementedError): + hass.config.components.discard("homeassistant") + + +async def test_debug_mode_defaults_to_off(hass: HomeAssistant) -> None: + """Test debug mode defaults to off.""" + assert not hass.config.debug + + +async def test_set_time_zone_deprecated(hass: HomeAssistant) -> None: + """Test set_time_zone is deprecated.""" + with pytest.raises( + RuntimeError, + match=re.escape( + "Detected code that set the time zone using set_time_zone instead of " + "async_set_time_zone which will stop working in Home Assistant 2025.6. " + "Please report this issue.", + ), + ): + await hass.config.set_time_zone("America/New_York") diff --git a/tests/test_loader.py b/tests/test_loader.py index 01305dde002f22..57d3d6fa832530 100644 --- a/tests/test_loader.py +++ b/tests/test_loader.py @@ -6,7 +6,7 @@ import sys import threading from typing import Any -from unittest.mock import MagicMock, Mock, patch +from unittest.mock import MagicMock, patch from awesomeversion import AwesomeVersion import pytest @@ -583,6 +583,7 @@ def test_integration_properties(hass: HomeAssistant) -> None: assert integration.dependencies == ["test-dep"] assert integration.requirements == ["test-req==1.0.0"] assert integration.is_built_in is True + assert integration.overwrites_built_in is False assert integration.version == "1.0.0" integration = loader.Integration( @@ -597,6 +598,7 @@ def test_integration_properties(hass: HomeAssistant) -> None: }, ) assert integration.is_built_in is False + assert integration.overwrites_built_in is True assert integration.homekit is None assert integration.zeroconf is None assert integration.dhcp is None @@ -619,6 +621,7 @@ def test_integration_properties(hass: HomeAssistant) -> None: }, ) assert integration.is_built_in is False + assert integration.overwrites_built_in is True assert integration.homekit is None assert integration.zeroconf == [{"type": "_hue._tcp.local.", "name": "hue*"}] assert integration.dhcp is None @@ -815,7 +818,7 @@ async def test_get_custom_components(hass: HomeAssistant) -> None: test_1_integration = _get_test_integration(hass, "test_1", False) test_2_integration = _get_test_integration(hass, "test_2", True) - name = "homeassistant.loader._async_get_custom_components" + name = "homeassistant.loader._get_custom_components" with patch(name) as mock_get: mock_get.return_value = { "test_1": test_1_integration, @@ -828,6 +831,29 @@ async def test_get_custom_components(hass: HomeAssistant) -> None: mock_get.assert_called_once_with(hass) +@pytest.mark.usefixtures("enable_custom_integrations") +async def test_custom_component_overwriting_core(hass: HomeAssistant) -> None: + """Test loading a custom component that overwrites a core component.""" + # First load the core 'light' component + core_light = await loader.async_get_integration(hass, "light") + assert core_light.is_built_in is True + + # create a mock custom 'light' component + mock_integration( + hass, + MockModule("light", partial_manifest={"version": "1.0.0"}), + built_in=False, + ) + + # Try to load the 'light' component again + custom_light = await loader.async_get_integration(hass, "light") + + # Assert that we got the custom component instead of the core one + assert custom_light.is_built_in is False + assert custom_light.overwrites_built_in is True + assert custom_light.version == "1.0.0" + + async def test_get_config_flows(hass: HomeAssistant) -> None: """Verify that custom components with config_flow are available.""" test_1_integration = _get_test_integration(hass, "test_1", False) @@ -1269,26 +1295,29 @@ async def test_config_folder_not_in_path() -> None: import tests.testing_config.check_config_not_in_path # noqa: F401 +@pytest.mark.parametrize( + ("integration_frame_path", "expected"), + [ + pytest.param( + "custom_components/test_integration_frame", True, id="custom integration" + ), + pytest.param( + "homeassistant/components/test_integration_frame", + False, + id="core integration", + ), + pytest.param("homeassistant/test_integration_frame", False, id="core"), + ], +) +@pytest.mark.usefixtures("mock_integration_frame") +@patch.object(frame, "_REPORTED_INTEGRATIONS", set()) async def test_hass_components_use_reported( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture, mock_integration_frame: Mock + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, + expected: bool, ) -> None: - """Test that use of hass.components is reported.""" - mock_integration_frame.filename = ( - "/home/paulus/homeassistant/custom_components/demo/light.py" - ) - integration_frame = frame.IntegrationFrame( - custom_integration=True, - frame=mock_integration_frame, - integration="test_integration_frame", - module="custom_components.test_integration_frame", - relative_filename="custom_components/test_integration_frame/__init__.py", - ) - + """Test whether use of hass.components is reported.""" with ( - patch( - "homeassistant.helpers.frame.get_integration_frame", - return_value=integration_frame, - ), patch( "homeassistant.components.http.start_http_server_and_save_config", return_value=None, @@ -1296,10 +1325,11 @@ async def test_hass_components_use_reported( ): await hass.components.http.start_http_server_and_save_config(hass, [], None) - assert ( + reported = ( "Detected that custom integration 'test_integration_frame'" " accesses hass.components.http. This is deprecated" ) in caplog.text + assert reported == expected async def test_async_get_component_preloads_config_and_config_flow( @@ -1961,24 +1991,29 @@ async def test_has_services(hass: HomeAssistant) -> None: assert integration.has_services is True +@pytest.mark.parametrize( + ("integration_frame_path", "expected"), + [ + pytest.param( + "custom_components/test_integration_frame", True, id="custom integration" + ), + pytest.param( + "homeassistant/components/test_integration_frame", + False, + id="core integration", + ), + pytest.param("homeassistant/test_integration_frame", False, id="core"), + ], +) +@pytest.mark.usefixtures("mock_integration_frame") +@patch.object(frame, "_REPORTED_INTEGRATIONS", set()) async def test_hass_helpers_use_reported( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture, mock_integration_frame: Mock + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, + expected: bool, ) -> None: - """Test that use of hass.components is reported.""" - integration_frame = frame.IntegrationFrame( - custom_integration=True, - frame=mock_integration_frame, - integration="test_integration_frame", - module="custom_components.test_integration_frame", - relative_filename="custom_components/test_integration_frame/__init__.py", - ) - + """Test whether use of hass.helpers is reported.""" with ( - patch.object(frame, "_REPORTED_INTEGRATIONS", new=set()), - patch( - "homeassistant.helpers.frame.get_integration_frame", - return_value=integration_frame, - ), patch( "homeassistant.helpers.aiohttp_client.async_get_clientsession", return_value=None, @@ -1986,10 +2021,11 @@ async def test_hass_helpers_use_reported( ): hass.helpers.aiohttp_client.async_get_clientsession() - assert ( + reported = ( "Detected that custom integration 'test_integration_frame' " "accesses hass.helpers.aiohttp_client. This is deprecated" ) in caplog.text + assert reported == expected async def test_manifest_json_fragment_round_trip(hass: HomeAssistant) -> None: diff --git a/tests/test_main.py b/tests/test_main.py index 080787311a01ee..d32ca59a846f38 100644 --- a/tests/test_main.py +++ b/tests/test_main.py @@ -3,7 +3,7 @@ from unittest.mock import PropertyMock, patch from homeassistant import __main__ as main -from homeassistant.const import REQUIRED_PYTHON_VER +from homeassistant.const import REQUIRED_PYTHON_VER, RESTART_EXIT_CODE @patch("sys.exit") @@ -86,3 +86,13 @@ def parse_args(*args): assert mock_exit.called is False args = parse_args("--skip-pip", "--skip-pip-packages", "foo") assert mock_exit.called is True + + +def test_restart_after_backup_restore() -> None: + """Test restarting if we restored a backup.""" + with ( + patch("sys.argv", ["python"]), + patch("homeassistant.__main__.restore_backup", return_value=True), + ): + exit_code = main.main() + assert exit_code == RESTART_EXIT_CODE diff --git a/tests/test_requirements.py b/tests/test_requirements.py index 2885fa30036862..191e1b7368cc66 100644 --- a/tests/test_requirements.py +++ b/tests/test_requirements.py @@ -585,7 +585,8 @@ async def test_discovery_requirements_mqtt(hass: HomeAssistant) -> None: ) as mock_process: await async_get_integration_with_requirements(hass, "mqtt_comp") - assert len(mock_process.mock_calls) == 1 + assert len(mock_process.mock_calls) == 2 + # one for mqtt and one for hassio assert mock_process.mock_calls[0][1][1] == mqtt.requirements diff --git a/tests/util/test_package.py b/tests/util/test_package.py index 1015225491471c..b7497d620cd2b7 100644 --- a/tests/util/test_package.py +++ b/tests/util/test_package.py @@ -84,14 +84,18 @@ async def communicate(input=None): return async_popen -@pytest.mark.usefixtures("mock_sys", "mock_venv") -def test_install(mock_popen: MagicMock, mock_env_copy: MagicMock) -> None: +@pytest.mark.usefixtures("mock_venv") +def test_install( + mock_popen: MagicMock, mock_env_copy: MagicMock, mock_sys: MagicMock +) -> None: """Test an install attempt on a package that doesn't exist.""" env = mock_env_copy() assert package.install_package(TEST_NEW_REQ, False) assert mock_popen.call_count == 2 assert mock_popen.mock_calls[0] == call( [ + mock_sys.executable, + "-m", "uv", "pip", "install", @@ -109,8 +113,10 @@ def test_install(mock_popen: MagicMock, mock_env_copy: MagicMock) -> None: assert mock_popen.return_value.communicate.call_count == 1 -@pytest.mark.usefixtures("mock_sys", "mock_venv") -def test_install_with_timeout(mock_popen: MagicMock, mock_env_copy: MagicMock) -> None: +@pytest.mark.usefixtures("mock_venv") +def test_install_with_timeout( + mock_popen: MagicMock, mock_env_copy: MagicMock, mock_sys: MagicMock +) -> None: """Test an install attempt on a package that doesn't exist with a timeout set.""" env = mock_env_copy() assert package.install_package(TEST_NEW_REQ, False, timeout=10) @@ -118,6 +124,8 @@ def test_install_with_timeout(mock_popen: MagicMock, mock_env_copy: MagicMock) - env["HTTP_TIMEOUT"] = "10" assert mock_popen.mock_calls[0] == call( [ + mock_sys.executable, + "-m", "uv", "pip", "install", @@ -135,14 +143,16 @@ def test_install_with_timeout(mock_popen: MagicMock, mock_env_copy: MagicMock) - assert mock_popen.return_value.communicate.call_count == 1 -@pytest.mark.usefixtures("mock_sys", "mock_venv") -def test_install_upgrade(mock_popen, mock_env_copy) -> None: +@pytest.mark.usefixtures("mock_venv") +def test_install_upgrade(mock_popen, mock_env_copy, mock_sys) -> None: """Test an upgrade attempt on a package.""" env = mock_env_copy() assert package.install_package(TEST_NEW_REQ) assert mock_popen.call_count == 2 assert mock_popen.mock_calls[0] == call( [ + mock_sys.executable, + "-m", "uv", "pip", "install", @@ -183,6 +193,8 @@ def test_install_target( mock_venv.return_value = is_venv mock_sys.platform = "linux" args = [ + mock_sys.executable, + "-m", "uv", "pip", "install", @@ -226,6 +238,8 @@ def test_install_pip_compatibility_no_workaround( mock_venv.return_value = in_venv mock_sys.platform = "linux" args = [ + mock_sys.executable, + "-m", "uv", "pip", "install", @@ -257,6 +271,8 @@ def test_install_pip_compatibility_use_workaround( mock_sys.executable = python site_dir = "/site_dir" args = [ + mock_sys.executable, + "-m", "uv", "pip", "install", @@ -292,8 +308,8 @@ def test_install_error(caplog: pytest.LogCaptureFixture, mock_popen) -> None: assert record.levelname == "ERROR" -@pytest.mark.usefixtures("mock_sys", "mock_venv") -def test_install_constraint(mock_popen, mock_env_copy) -> None: +@pytest.mark.usefixtures("mock_venv") +def test_install_constraint(mock_popen, mock_env_copy, mock_sys) -> None: """Test install with constraint file on not installed package.""" env = mock_env_copy() constraints = "constraints_file.txt" @@ -301,6 +317,8 @@ def test_install_constraint(mock_popen, mock_env_copy) -> None: assert mock_popen.call_count == 2 assert mock_popen.mock_calls[0] == call( [ + mock_sys.executable, + "-m", "uv", "pip", "install", diff --git a/tests/util/test_timeout.py b/tests/util/test_timeout.py index 1c4b06d99b4533..5e8261c4c02b79 100644 --- a/tests/util/test_timeout.py +++ b/tests/util/test_timeout.py @@ -146,6 +146,62 @@ async def test_simple_global_timeout_freeze_with_executor_job( await hass.async_add_executor_job(time.sleep, 0.3) +async def test_simple_global_timeout_does_not_leak_upward( + hass: HomeAssistant, +) -> None: + """Test a global timeout does not leak upward.""" + timeout = TimeoutManager() + current_task = asyncio.current_task() + assert current_task is not None + cancelling_inside_timeout = None + + with pytest.raises(asyncio.TimeoutError): # noqa: PT012 + async with timeout.async_timeout(0.1): + cancelling_inside_timeout = current_task.cancelling() + await asyncio.sleep(0.3) + + assert cancelling_inside_timeout == 0 + # After the context manager exits, the task should no longer be cancelling + assert current_task.cancelling() == 0 + + +async def test_simple_global_timeout_does_swallow_cancellation( + hass: HomeAssistant, +) -> None: + """Test a global timeout does not swallow cancellation.""" + timeout = TimeoutManager() + current_task = asyncio.current_task() + assert current_task is not None + cancelling_inside_timeout = None + + async def task_with_timeout() -> None: + nonlocal cancelling_inside_timeout + new_task = asyncio.current_task() + assert new_task is not None + with pytest.raises(asyncio.TimeoutError): # noqa: PT012 + cancelling_inside_timeout = new_task.cancelling() + async with timeout.async_timeout(0.1): + await asyncio.sleep(0.3) + + # After the context manager exits, the task should no longer be cancelling + assert current_task.cancelling() == 0 + + task = asyncio.create_task(task_with_timeout()) + await asyncio.sleep(0) + task.cancel() + assert task.cancelling() == 1 + + assert cancelling_inside_timeout == 0 + # Cancellation should not leak into the current task + assert current_task.cancelling() == 0 + # Cancellation should not be swallowed if the task is cancelled + # and it also times out + await asyncio.sleep(0) + with pytest.raises(asyncio.CancelledError): + await task + assert task.cancelling() == 1 + + async def test_simple_global_timeout_freeze_reset() -> None: """Test a simple global timeout freeze reset.""" timeout = TimeoutManager() @@ -166,6 +222,62 @@ async def test_simple_zone_timeout() -> None: await asyncio.sleep(0.3) +async def test_simple_zone_timeout_does_not_leak_upward( + hass: HomeAssistant, +) -> None: + """Test a zone timeout does not leak upward.""" + timeout = TimeoutManager() + current_task = asyncio.current_task() + assert current_task is not None + cancelling_inside_timeout = None + + with pytest.raises(asyncio.TimeoutError): # noqa: PT012 + async with timeout.async_timeout(0.1, "test"): + cancelling_inside_timeout = current_task.cancelling() + await asyncio.sleep(0.3) + + assert cancelling_inside_timeout == 0 + # After the context manager exits, the task should no longer be cancelling + assert current_task.cancelling() == 0 + + +async def test_simple_zone_timeout_does_swallow_cancellation( + hass: HomeAssistant, +) -> None: + """Test a zone timeout does not swallow cancellation.""" + timeout = TimeoutManager() + current_task = asyncio.current_task() + assert current_task is not None + cancelling_inside_timeout = None + + async def task_with_timeout() -> None: + nonlocal cancelling_inside_timeout + new_task = asyncio.current_task() + assert new_task is not None + with pytest.raises(asyncio.TimeoutError): # noqa: PT012 + async with timeout.async_timeout(0.1, "test"): + cancelling_inside_timeout = current_task.cancelling() + await asyncio.sleep(0.3) + + # After the context manager exits, the task should no longer be cancelling + assert current_task.cancelling() == 0 + + task = asyncio.create_task(task_with_timeout()) + await asyncio.sleep(0) + task.cancel() + assert task.cancelling() == 1 + + # Cancellation should not leak into the current task + assert cancelling_inside_timeout == 0 + assert current_task.cancelling() == 0 + # Cancellation should not be swallowed if the task is cancelled + # and it also times out + await asyncio.sleep(0) + with pytest.raises(asyncio.CancelledError): + await task + assert task.cancelling() == 1 + + async def test_multiple_zone_timeout() -> None: """Test a simple zone timeout.""" timeout = TimeoutManager() @@ -327,7 +439,7 @@ async def test_simple_zone_timeout_freeze_without_timeout_exeption() -> None: await asyncio.sleep(0.4) -async def test_simple_zone_timeout_zone_with_timeout_exeption() -> None: +async def test_simple_zone_timeout_zone_with_timeout_exception() -> None: """Test a simple zone timeout freeze on a zone that does not have a timeout set.""" timeout = TimeoutManager() diff --git a/tests/util/test_unit_conversion.py b/tests/util/test_unit_conversion.py index 630c3d556f16ea..a57cdde821fc8e 100644 --- a/tests/util/test_unit_conversion.py +++ b/tests/util/test_unit_conversion.py @@ -11,6 +11,7 @@ CONCENTRATION_PARTS_PER_BILLION, CONCENTRATION_PARTS_PER_MILLION, PERCENTAGE, + UnitOfBloodGlucoseConcentration, UnitOfConductivity, UnitOfDataRate, UnitOfElectricCurrent, @@ -32,6 +33,7 @@ from homeassistant.util import unit_conversion from homeassistant.util.unit_conversion import ( BaseUnitConverter, + BloodGlugoseConcentrationConverter, ConductivityConverter, DataRateConverter, DistanceConverter, @@ -59,6 +61,7 @@ _ALL_CONVERTERS: dict[type[BaseUnitConverter], list[str | None]] = { converter: sorted(converter.VALID_UNITS, key=lambda x: (x is None, x)) for converter in ( + BloodGlugoseConcentrationConverter, ConductivityConverter, DataRateConverter, DistanceConverter, @@ -80,9 +83,14 @@ # Dict containing all converters with a corresponding unit ratio. _GET_UNIT_RATIO: dict[type[BaseUnitConverter], tuple[str | None, str | None, float]] = { + BloodGlugoseConcentrationConverter: ( + UnitOfBloodGlucoseConcentration.MILLIGRAMS_PER_DECILITER, + UnitOfBloodGlucoseConcentration.MILLIMOLE_PER_LITER, + 18, + ), ConductivityConverter: ( - UnitOfConductivity.MICROSIEMENS, - UnitOfConductivity.MILLISIEMENS, + UnitOfConductivity.MICROSIEMENS_PER_CM, + UnitOfConductivity.MILLISIEMENS_PER_CM, 1000, ), DataRateConverter: ( @@ -130,13 +138,99 @@ _CONVERTED_VALUE: dict[ type[BaseUnitConverter], list[tuple[float, str | None, float, str | None]] ] = { + BloodGlugoseConcentrationConverter: [ + ( + 90, + UnitOfBloodGlucoseConcentration.MILLIGRAMS_PER_DECILITER, + 5, + UnitOfBloodGlucoseConcentration.MILLIMOLE_PER_LITER, + ), + ( + 1, + UnitOfBloodGlucoseConcentration.MILLIMOLE_PER_LITER, + 18, + UnitOfBloodGlucoseConcentration.MILLIGRAMS_PER_DECILITER, + ), + ], ConductivityConverter: [ + # Deprecated to deprecated (5, UnitOfConductivity.SIEMENS, 5e3, UnitOfConductivity.MILLISIEMENS), (5, UnitOfConductivity.SIEMENS, 5e6, UnitOfConductivity.MICROSIEMENS), (5, UnitOfConductivity.MILLISIEMENS, 5e3, UnitOfConductivity.MICROSIEMENS), (5, UnitOfConductivity.MILLISIEMENS, 5e-3, UnitOfConductivity.SIEMENS), (5e6, UnitOfConductivity.MICROSIEMENS, 5e3, UnitOfConductivity.MILLISIEMENS), (5e6, UnitOfConductivity.MICROSIEMENS, 5, UnitOfConductivity.SIEMENS), + # Deprecated to new + (5, UnitOfConductivity.SIEMENS, 5e3, UnitOfConductivity.MILLISIEMENS_PER_CM), + (5, UnitOfConductivity.SIEMENS, 5e6, UnitOfConductivity.MICROSIEMENS_PER_CM), + ( + 5, + UnitOfConductivity.MILLISIEMENS, + 5e3, + UnitOfConductivity.MICROSIEMENS_PER_CM, + ), + (5, UnitOfConductivity.MILLISIEMENS, 5e-3, UnitOfConductivity.SIEMENS_PER_CM), + ( + 5e6, + UnitOfConductivity.MICROSIEMENS, + 5e3, + UnitOfConductivity.MILLISIEMENS_PER_CM, + ), + (5e6, UnitOfConductivity.MICROSIEMENS, 5, UnitOfConductivity.SIEMENS_PER_CM), + # New to deprecated + (5, UnitOfConductivity.SIEMENS_PER_CM, 5e3, UnitOfConductivity.MILLISIEMENS), + (5, UnitOfConductivity.SIEMENS_PER_CM, 5e6, UnitOfConductivity.MICROSIEMENS), + ( + 5, + UnitOfConductivity.MILLISIEMENS_PER_CM, + 5e3, + UnitOfConductivity.MICROSIEMENS, + ), + (5, UnitOfConductivity.MILLISIEMENS_PER_CM, 5e-3, UnitOfConductivity.SIEMENS), + ( + 5e6, + UnitOfConductivity.MICROSIEMENS_PER_CM, + 5e3, + UnitOfConductivity.MILLISIEMENS, + ), + (5e6, UnitOfConductivity.MICROSIEMENS_PER_CM, 5, UnitOfConductivity.SIEMENS), + # New to new + ( + 5, + UnitOfConductivity.SIEMENS_PER_CM, + 5e3, + UnitOfConductivity.MILLISIEMENS_PER_CM, + ), + ( + 5, + UnitOfConductivity.SIEMENS_PER_CM, + 5e6, + UnitOfConductivity.MICROSIEMENS_PER_CM, + ), + ( + 5, + UnitOfConductivity.MILLISIEMENS_PER_CM, + 5e3, + UnitOfConductivity.MICROSIEMENS_PER_CM, + ), + ( + 5, + UnitOfConductivity.MILLISIEMENS_PER_CM, + 5e-3, + UnitOfConductivity.SIEMENS_PER_CM, + ), + ( + 5e6, + UnitOfConductivity.MICROSIEMENS_PER_CM, + 5e3, + UnitOfConductivity.MILLISIEMENS_PER_CM, + ), + ( + 5e6, + UnitOfConductivity.MICROSIEMENS_PER_CM, + 5, + UnitOfConductivity.SIEMENS_PER_CM, + ), ], DataRateConverter: [ (8e3, UnitOfDataRate.BITS_PER_SECOND, 8, UnitOfDataRate.KILOBITS_PER_SECOND), @@ -285,10 +379,16 @@ EnergyConverter: [ (10, UnitOfEnergy.WATT_HOUR, 0.01, UnitOfEnergy.KILO_WATT_HOUR), (10, UnitOfEnergy.WATT_HOUR, 0.00001, UnitOfEnergy.MEGA_WATT_HOUR), + (10, UnitOfEnergy.WATT_HOUR, 0.00000001, UnitOfEnergy.GIGA_WATT_HOUR), + (10, UnitOfEnergy.WATT_HOUR, 0.00000000001, UnitOfEnergy.TERA_WATT_HOUR), (10, UnitOfEnergy.KILO_WATT_HOUR, 10000, UnitOfEnergy.WATT_HOUR), (10, UnitOfEnergy.KILO_WATT_HOUR, 0.01, UnitOfEnergy.MEGA_WATT_HOUR), (10, UnitOfEnergy.MEGA_WATT_HOUR, 10000000, UnitOfEnergy.WATT_HOUR), (10, UnitOfEnergy.MEGA_WATT_HOUR, 10000, UnitOfEnergy.KILO_WATT_HOUR), + (10, UnitOfEnergy.GIGA_WATT_HOUR, 10e6, UnitOfEnergy.KILO_WATT_HOUR), + (10, UnitOfEnergy.GIGA_WATT_HOUR, 10e9, UnitOfEnergy.WATT_HOUR), + (10, UnitOfEnergy.TERA_WATT_HOUR, 10e9, UnitOfEnergy.KILO_WATT_HOUR), + (10, UnitOfEnergy.TERA_WATT_HOUR, 10e12, UnitOfEnergy.WATT_HOUR), (10, UnitOfEnergy.GIGA_JOULE, 2777.78, UnitOfEnergy.KILO_WATT_HOUR), (10, UnitOfEnergy.GIGA_JOULE, 2.77778, UnitOfEnergy.MEGA_WATT_HOUR), (10, UnitOfEnergy.MEGA_JOULE, 2.77778, UnitOfEnergy.KILO_WATT_HOUR), @@ -367,6 +467,9 @@ ], PowerConverter: [ (10, UnitOfPower.KILO_WATT, 10000, UnitOfPower.WATT), + (10, UnitOfPower.MEGA_WATT, 10e6, UnitOfPower.WATT), + (10, UnitOfPower.GIGA_WATT, 10e9, UnitOfPower.WATT), + (10, UnitOfPower.TERA_WATT, 10e12, UnitOfPower.WATT), (10, UnitOfPower.WATT, 0.01, UnitOfPower.KILO_WATT), ], PressureConverter: [ diff --git a/tests/util/yaml/test_init.py b/tests/util/yaml/test_init.py index dbd7f1d2e99b13..12a7eca5f9d82b 100644 --- a/tests/util/yaml/test_init.py +++ b/tests/util/yaml/test_init.py @@ -6,7 +6,6 @@ import os import pathlib from typing import Any -import unittest from unittest.mock import Mock, patch import pytest @@ -19,7 +18,7 @@ from homeassistant.util import yaml from homeassistant.util.yaml import loader as yaml_loader -from tests.common import extract_stack_to_frame, get_test_config_dir, patch_yaml_files +from tests.common import extract_stack_to_frame @pytest.fixture(params=["enable_c_loader", "disable_c_loader"]) @@ -396,145 +395,6 @@ def test_dump_unicode() -> None: assert yaml.dump({"a": None, "b": "привет"}) == "a:\nb: привет\n" -FILES = {} - - -def load_yaml(fname, string, secrets=None): - """Write a string to file and return the parsed yaml.""" - FILES[fname] = string - with patch_yaml_files(FILES): - return load_yaml_config_file(fname, secrets) - - -class TestSecrets(unittest.TestCase): - """Test the secrets parameter in the yaml utility.""" - - def setUp(self): - """Create & load secrets file.""" - config_dir = get_test_config_dir() - self._yaml_path = os.path.join(config_dir, YAML_CONFIG_FILE) - self._secret_path = os.path.join(config_dir, yaml.SECRET_YAML) - self._sub_folder_path = os.path.join(config_dir, "subFolder") - self._unrelated_path = os.path.join(config_dir, "unrelated") - - load_yaml( - self._secret_path, - ( - "http_pw: pwhttp\n" - "comp1_un: un1\n" - "comp1_pw: pw1\n" - "stale_pw: not_used\n" - "logger: debug\n" - ), - ) - self._yaml = load_yaml( - self._yaml_path, - ( - "http:\n" - " api_password: !secret http_pw\n" - "component:\n" - " username: !secret comp1_un\n" - " password: !secret comp1_pw\n" - "" - ), - yaml_loader.Secrets(config_dir), - ) - - def tearDown(self): - """Clean up secrets.""" - FILES.clear() - - def test_secrets_from_yaml(self): - """Did secrets load ok.""" - expected = {"api_password": "pwhttp"} - assert expected == self._yaml["http"] - - expected = {"username": "un1", "password": "pw1"} - assert expected == self._yaml["component"] - - def test_secrets_from_parent_folder(self): - """Test loading secrets from parent folder.""" - expected = {"api_password": "pwhttp"} - self._yaml = load_yaml( - os.path.join(self._sub_folder_path, "sub.yaml"), - ( - "http:\n" - " api_password: !secret http_pw\n" - "component:\n" - " username: !secret comp1_un\n" - " password: !secret comp1_pw\n" - "" - ), - yaml_loader.Secrets(get_test_config_dir()), - ) - - assert expected == self._yaml["http"] - - def test_secret_overrides_parent(self): - """Test loading current directory secret overrides the parent.""" - expected = {"api_password": "override"} - load_yaml( - os.path.join(self._sub_folder_path, yaml.SECRET_YAML), "http_pw: override" - ) - self._yaml = load_yaml( - os.path.join(self._sub_folder_path, "sub.yaml"), - ( - "http:\n" - " api_password: !secret http_pw\n" - "component:\n" - " username: !secret comp1_un\n" - " password: !secret comp1_pw\n" - "" - ), - yaml_loader.Secrets(get_test_config_dir()), - ) - - assert expected == self._yaml["http"] - - def test_secrets_from_unrelated_fails(self): - """Test loading secrets from unrelated folder fails.""" - load_yaml(os.path.join(self._unrelated_path, yaml.SECRET_YAML), "test: failure") - with pytest.raises(HomeAssistantError): - load_yaml( - os.path.join(self._sub_folder_path, "sub.yaml"), - "http:\n api_password: !secret test", - ) - - def test_secrets_logger_removed(self): - """Ensure logger: debug was removed.""" - with pytest.raises(HomeAssistantError): - load_yaml(self._yaml_path, "api_password: !secret logger") - - @patch("homeassistant.util.yaml.loader._LOGGER.error") - def test_bad_logger_value(self, mock_error): - """Ensure logger: debug was removed.""" - load_yaml(self._secret_path, "logger: info\npw: abc") - load_yaml( - self._yaml_path, - "api_password: !secret pw", - yaml_loader.Secrets(get_test_config_dir()), - ) - assert mock_error.call_count == 1, "Expected an error about logger: value" - - def test_secrets_are_not_dict(self): - """Did secrets handle non-dict file.""" - FILES[self._secret_path] = ( - "- http_pw: pwhttp\n comp1_un: un1\n comp1_pw: pw1\n" - ) - with pytest.raises(HomeAssistantError): - load_yaml( - self._yaml_path, - ( - "http:\n" - " api_password: !secret http_pw\n" - "component:\n" - " username: !secret comp1_un\n" - " password: !secret comp1_pw\n" - "" - ), - ) - - @pytest.mark.parametrize("hass_config_yaml", ['key: [1, "2", 3]']) @pytest.mark.usefixtures("try_both_dumpers", "mock_hass_config_yaml") def test_representing_yaml_loaded_data() -> None: @@ -634,31 +494,6 @@ def mock_integration_frame() -> Generator[Mock]: yield correct_frame -@pytest.mark.parametrize( - ("loader_class", "message"), - [ - (yaml.loader.SafeLoader, "'SafeLoader' instead of 'FastSafeLoader'"), - ( - yaml.loader.SafeLineLoader, - "'SafeLineLoader' instead of 'PythonSafeLoader'", - ), - ], -) -@pytest.mark.usefixtures("mock_integration_frame") -async def test_deprecated_loaders( - caplog: pytest.LogCaptureFixture, - loader_class: type, - message: str, -) -> None: - """Test instantiating the deprecated yaml loaders logs a warning.""" - with ( - pytest.raises(TypeError), - patch("homeassistant.helpers.frame._REPORTED_INTEGRATIONS", set()), - ): - loader_class() - assert (f"Detected that integration 'hue' uses deprecated {message}") in caplog.text - - @pytest.mark.usefixtures("try_both_loaders") def test_string_annotated() -> None: """Test strings are annotated with file + line.""" diff --git a/tests/util/yaml/test_secrets.py b/tests/util/yaml/test_secrets.py new file mode 100644 index 00000000000000..35b5ae319c41da --- /dev/null +++ b/tests/util/yaml/test_secrets.py @@ -0,0 +1,185 @@ +"""Test Home Assistant secret substitution in YAML files.""" + +from dataclasses import dataclass +import logging +from pathlib import Path + +import pytest + +from homeassistant.config import YAML_CONFIG_FILE, load_yaml_config_file +from homeassistant.exceptions import HomeAssistantError +from homeassistant.util import yaml +from homeassistant.util.yaml import loader as yaml_loader + +from tests.common import get_test_config_dir, patch_yaml_files + + +@dataclass(frozen=True) +class YamlFile: + """Represents a .yaml file used for testing.""" + + path: Path + contents: str + + +def load_config_file(config_file_path: Path, files: list[YamlFile]): + """Patch secret files and return the loaded config file.""" + patch_files = {x.path.as_posix(): x.contents for x in files} + with patch_yaml_files(patch_files): + return load_yaml_config_file( + config_file_path.as_posix(), + yaml_loader.Secrets(Path(get_test_config_dir())), + ) + + +@pytest.fixture +def filepaths() -> dict[str, Path]: + """Return a dictionary of filepaths for testing.""" + config_dir = Path(get_test_config_dir()) + return { + "config": config_dir, + "sub_folder": config_dir / "subFolder", + "unrelated": config_dir / "unrelated", + } + + +@pytest.fixture +def default_config(filepaths: dict[str, Path]) -> YamlFile: + """Return the default config file for testing.""" + return YamlFile( + path=filepaths["config"] / YAML_CONFIG_FILE, + contents=( + "http:\n" + " api_password: !secret http_pw\n" + "component:\n" + " username: !secret comp1_un\n" + " password: !secret comp1_pw\n" + "" + ), + ) + + +@pytest.fixture +def default_secrets(filepaths: dict[str, Path]) -> YamlFile: + """Return the default secrets file for testing.""" + return YamlFile( + path=filepaths["config"] / yaml.SECRET_YAML, + contents=( + "http_pw: pwhttp\n" + "comp1_un: un1\n" + "comp1_pw: pw1\n" + "stale_pw: not_used\n" + "logger: debug\n" + ), + ) + + +def test_secrets_from_yaml(default_config: YamlFile, default_secrets: YamlFile) -> None: + """Did secrets load ok.""" + loaded_file = load_config_file( + default_config.path, [default_config, default_secrets] + ) + expected = {"api_password": "pwhttp"} + assert expected == loaded_file["http"] + + expected = {"username": "un1", "password": "pw1"} + assert expected == loaded_file["component"] + + +def test_secrets_from_parent_folder( + filepaths: dict[str, Path], + default_config: YamlFile, + default_secrets: YamlFile, +) -> None: + """Test loading secrets from parent folder.""" + config_file = YamlFile( + path=filepaths["sub_folder"] / "sub.yaml", + contents=default_config.contents, + ) + loaded_file = load_config_file(config_file.path, [config_file, default_secrets]) + expected = {"api_password": "pwhttp"} + + assert expected == loaded_file["http"] + + +def test_secret_overrides_parent( + filepaths: dict[str, Path], + default_config: YamlFile, + default_secrets: YamlFile, +) -> None: + """Test loading current directory secret overrides the parent.""" + config_file = YamlFile( + path=filepaths["sub_folder"] / "sub.yaml", contents=default_config.contents + ) + sub_secrets = YamlFile( + path=filepaths["sub_folder"] / yaml.SECRET_YAML, contents="http_pw: override" + ) + + loaded_file = load_config_file( + config_file.path, [config_file, default_secrets, sub_secrets] + ) + + expected = {"api_password": "override"} + assert loaded_file["http"] == expected + + +def test_secrets_from_unrelated_fails( + filepaths: dict[str, Path], + default_secrets: YamlFile, +) -> None: + """Test loading secrets from unrelated folder fails.""" + config_file = YamlFile( + path=filepaths["sub_folder"] / "sub.yaml", + contents="http:\n api_password: !secret test", + ) + unrelated_secrets = YamlFile( + path=filepaths["unrelated"] / yaml.SECRET_YAML, contents="test: failure" + ) + with pytest.raises(HomeAssistantError, match="Secret test not defined"): + load_config_file( + config_file.path, [config_file, default_secrets, unrelated_secrets] + ) + + +def test_secrets_logger_removed( + filepaths: dict[str, Path], + default_secrets: YamlFile, +) -> None: + """Ensure logger: debug gets removed from secrets file once logger is configured.""" + config_file = YamlFile( + path=filepaths["config"] / YAML_CONFIG_FILE, + contents="api_password: !secret logger", + ) + with pytest.raises(HomeAssistantError, match="Secret logger not defined"): + load_config_file(config_file.path, [config_file, default_secrets]) + + +def test_bad_logger_value( + caplog: pytest.LogCaptureFixture, filepaths: dict[str, Path] +) -> None: + """Ensure only logger: debug is allowed in secret file.""" + config_file = YamlFile( + path=filepaths["config"] / YAML_CONFIG_FILE, contents="api_password: !secret pw" + ) + secrets_file = YamlFile( + path=filepaths["config"] / yaml.SECRET_YAML, contents="logger: info\npw: abc" + ) + with caplog.at_level(logging.ERROR): + load_config_file(config_file.path, [config_file, secrets_file]) + assert ( + "Error in secrets.yaml: 'logger: debug' expected, but 'logger: info' found" + in caplog.messages + ) + + +def test_secrets_are_not_dict( + filepaths: dict[str, Path], + default_config: YamlFile, +) -> None: + """Did secrets handle non-dict file.""" + non_dict_secrets = YamlFile( + path=filepaths["config"] / yaml.SECRET_YAML, + contents="- http_pw: pwhttp\n comp1_un: un1\n comp1_pw: pw1\n", + ) + with pytest.raises(HomeAssistantError, match="Secrets is not a dictionary"): + load_config_file(default_config.path, [default_config, non_dict_secrets])