Merge branch 'dev' into Xiaomi-MCCGQ02HL

This commit is contained in:
Jesse Hills 2024-06-26 15:47:09 +12:00 committed by GitHub
commit af4cf00f8b
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
5271 changed files with 232752 additions and 30806 deletions

View file

@ -5,9 +5,12 @@ Checks: >-
-altera-*,
-android-*,
-boost-*,
-bugprone-easily-swappable-parameters,
-bugprone-implicit-widening-of-multiplication-result,
-bugprone-narrowing-conversions,
-bugprone-signed-char-misuse,
-cert-dcl50-cpp,
-cert-err33-c,
-cert-err58-cpp,
-cert-oop57-cpp,
-cert-str34-c,
@ -15,6 +18,7 @@ Checks: >-
-clang-analyzer-osx.*,
-clang-diagnostic-delete-abstract-non-virtual-dtor,
-clang-diagnostic-delete-non-abstract-non-virtual-dtor,
-clang-diagnostic-ignored-optimization-argument,
-clang-diagnostic-shadow-field,
-clang-diagnostic-unused-const-variable,
-clang-diagnostic-unused-parameter,
@ -25,6 +29,7 @@ Checks: >-
-cppcoreguidelines-macro-usage,
-cppcoreguidelines-narrowing-conversions,
-cppcoreguidelines-non-private-member-variables-in-classes,
-cppcoreguidelines-prefer-member-initializer,
-cppcoreguidelines-pro-bounds-array-to-pointer-decay,
-cppcoreguidelines-pro-bounds-constant-array-index,
-cppcoreguidelines-pro-bounds-pointer-arithmetic,
@ -36,6 +41,7 @@ Checks: >-
-cppcoreguidelines-pro-type-union-access,
-cppcoreguidelines-pro-type-vararg,
-cppcoreguidelines-special-member-functions,
-cppcoreguidelines-virtual-class-destructor,
-fuchsia-multiple-inheritance,
-fuchsia-overloaded-operator,
-fuchsia-statically-constructed-objects,
@ -68,6 +74,7 @@ Checks: >-
-modernize-use-nodiscard,
-mpi-*,
-objc-*,
-readability-container-data-pointer,
-readability-convert-member-functions-to-static,
-readability-else-after-return,
-readability-function-cognitive-complexity,
@ -82,8 +89,6 @@ WarningsAsErrors: '*'
AnalyzeTemporaryDtors: false
FormatStyle: google
CheckOptions:
- key: google-readability-braces-around-statements.ShortStatementLines
value: '1'
- key: google-readability-function-size.StatementThreshold
value: '800'
- key: google-runtime-int.TypeSuffix
@ -158,3 +163,9 @@ CheckOptions:
value: ''
- key: readability-qualified-auto.AddConstToQualified
value: 0
- key: readability-identifier-length.MinimumVariableNameLength
value: 0
- key: readability-identifier-length.MinimumParameterNameLength
value: 0
- key: readability-identifier-length.MinimumLoopCounterNameLength
value: 0

View file

@ -1,15 +1,29 @@
{
"name": "ESPHome Dev",
"image": "esphome/esphome-lint:dev",
"postCreateCommand": [
"script/devcontainer-post-create"
],
"image": "ghcr.io/esphome/esphome-lint:dev",
"postCreateCommand": ["script/devcontainer-post-create"],
"containerEnv": {
"DEVCONTAINER": "1",
"PIP_BREAK_SYSTEM_PACKAGES": "1",
"PIP_ROOT_USER_ACTION": "ignore"
},
"runArgs": [
"--privileged",
"-e",
"ESPHOME_DASHBOARD_USE_PING=1"
// uncomment and edit the path in order to pass though local USB serial to the conatiner
// , "--device=/dev/ttyACM0"
],
"appPort": 6052,
// if you are using avahi in the host device, uncomment these to allow the
// devcontainer to find devices via mdns
//"mounts": [
// "type=bind,source=/dev/bus/usb,target=/dev/bus/usb",
// "type=bind,source=/var/run/dbus,target=/var/run/dbus",
// "type=bind,source=/var/run/avahi-daemon/socket,target=/var/run/avahi-daemon/socket"
//],
"customizations": {
"vscode": {
"extensions": [
// python
"ms-python.python",
@ -19,7 +33,7 @@
// cpp
"ms-vscode.cpptools",
// editorconfig
"editorconfig.editorconfig",
"editorconfig.editorconfig"
],
"settings": {
"python.languageServer": "Pylance",
@ -35,6 +49,8 @@
"yaml.customTags": [
"!secret scalar",
"!lambda scalar",
"!extend scalar",
"!remove scalar",
"!include_dir_named scalar",
"!include_dir_list scalar",
"!include_dir_merge_list scalar",
@ -51,6 +67,8 @@
"files.associations": {
"**/.vscode/*.json": "jsonc"
},
"C_Cpp.clang_format_path": "/usr/bin/clang-format-11",
"C_Cpp.clang_format_path": "/usr/bin/clang-format-13"
}
}
}
}

View file

@ -25,10 +25,9 @@ indent_size = 2
[*.{yaml,yml}]
indent_style = space
indent_size = 2
quote_type = single
quote_type = double
# JSON
[*.json]
indent_style = space
indent_size = 2

42
.flake8 Normal file
View file

@ -0,0 +1,42 @@
[flake8]
max-line-length = 120
# Following 4 for black compatibility
# E501: line too long
# W503: Line break occurred before a binary operator
# E203: Whitespace before ':'
# D202 No blank lines allowed after function docstring
# TODO fix flake8
# D100 Missing docstring in public module
# D101 Missing docstring in public class
# D102 Missing docstring in public method
# D103 Missing docstring in public function
# D104 Missing docstring in public package
# D105 Missing docstring in magic method
# D107 Missing docstring in __init__
# D200 One-line docstring should fit on one line with quotes
# D205 1 blank line required between summary line and description
# D209 Multi-line docstring closing quotes should be on a separate line
# D400 First line should end with a period
# D401 First line should be in imperative mood
ignore =
E501,
W503,
E203,
D202,
D100,
D101,
D102,
D103,
D104,
D105,
D107,
D200,
D205,
D209,
D400,
D401,
exclude = api_pb2.py

1
.gitattributes vendored
View file

@ -1,2 +1,3 @@
# Normalize line endings to LF in the repository
* text eol=lf
*.png binary

1
.github/FUNDING.yml vendored
View file

@ -1,3 +1,4 @@
---
# These are supported funding model platforms
custom: https://www.nabucasa.com

View file

@ -1,3 +1,4 @@
---
blank_issues_enabled: false
contact_links:
- name: Issue Tracker
@ -5,7 +6,10 @@ contact_links:
about: Please create bug reports in the dedicated issue tracker.
- name: Feature Request Tracker
url: https://github.com/esphome/feature-requests
about: Please create feature requests in the dedicated feature request tracker.
about: |
Please create feature requests in the dedicated feature request tracker.
- name: Frequently Asked Question
url: https://esphome.io/guides/faq.html
about: Please view the FAQ for common questions and what to include in a bug report.
about: |
Please view the FAQ for common questions and what
to include in a bug report.

View file

@ -1,6 +1,6 @@
# What does this implement/fix?
Quick description and explanation of changes
<!-- Quick description and explanation of changes -->
## Types of changes
@ -18,6 +18,9 @@ Quick description and explanation of changes
- [ ] ESP32
- [ ] ESP32 IDF
- [ ] ESP8266
- [ ] RP2040
- [ ] BK72xx
- [ ] RTL87xx
## Example entry for `config.yaml`:
<!--

91
.github/actions/build-image/action.yaml vendored Normal file
View file

@ -0,0 +1,91 @@
name: Build Image
inputs:
platform:
description: "Platform to build for"
required: true
example: "linux/amd64"
target:
description: "Target to build"
required: true
example: "docker"
baseimg:
description: "Base image type"
required: true
example: "docker"
suffix:
description: "Suffix to add to tags"
required: true
version:
description: "Version to build"
required: true
example: "2023.12.0"
runs:
using: "composite"
steps:
- name: Generate short tags
id: tags
shell: bash
run: |
output=$(docker/generate_tags.py \
--tag "${{ inputs.version }}" \
--suffix "${{ inputs.suffix }}")
echo $output
for l in $output; do
echo $l >> $GITHUB_OUTPUT
done
# set cache-to only if dev branch
- id: cache-to
shell: bash
run: |-
if [[ "${{ github.ref }}" == "refs/heads/dev" ]]; then
echo "value=type=gha,mode=max" >> $GITHUB_OUTPUT
else
echo "value=" >> $GITHUB_OUTPUT
fi
- name: Build and push to ghcr by digest
id: build-ghcr
uses: docker/build-push-action@v6.1.0
with:
context: .
file: ./docker/Dockerfile
platforms: ${{ inputs.platform }}
target: ${{ inputs.target }}
cache-from: type=gha
cache-to: ${{ steps.cache-to.outputs.value }}
build-args: |
BASEIMGTYPE=${{ inputs.baseimg }}
BUILD_VERSION=${{ inputs.version }}
outputs: |
type=image,name=ghcr.io/${{ steps.tags.outputs.image_name }},push-by-digest=true,name-canonical=true,push=true
- name: Export ghcr digests
shell: bash
run: |
mkdir -p /tmp/digests/${{ inputs.target }}/ghcr
digest="${{ steps.build-ghcr.outputs.digest }}"
touch "/tmp/digests/${{ inputs.target }}/ghcr/${digest#sha256:}"
- name: Build and push to dockerhub by digest
id: build-dockerhub
uses: docker/build-push-action@v6.1.0
with:
context: .
file: ./docker/Dockerfile
platforms: ${{ inputs.platform }}
target: ${{ inputs.target }}
cache-from: type=gha
cache-to: ${{ steps.cache-to.outputs.value }}
build-args: |
BASEIMGTYPE=${{ inputs.baseimg }}
BUILD_VERSION=${{ inputs.version }}
outputs: |
type=image,name=docker.io/${{ steps.tags.outputs.image_name }},push-by-digest=true,name-canonical=true,push=true
- name: Export dockerhub digests
shell: bash
run: |
mkdir -p /tmp/digests/${{ inputs.target }}/dockerhub
digest="${{ steps.build-dockerhub.outputs.digest }}"
touch "/tmp/digests/${{ inputs.target }}/dockerhub/${digest#sha256:}"

View file

@ -0,0 +1,47 @@
name: Restore Python
inputs:
python-version:
description: Python version to restore
required: true
type: string
cache-key:
description: Cache key to use
required: true
type: string
outputs:
python-version:
description: Python version restored
value: ${{ steps.python.outputs.python-version }}
runs:
using: "composite"
steps:
- name: Set up Python ${{ inputs.python-version }}
id: python
uses: actions/setup-python@v5.1.0
with:
python-version: ${{ inputs.python-version }}
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache/restore@v4.0.2
with:
path: venv
# yamllint disable-line rule:line-length
key: ${{ runner.os }}-${{ steps.python.outputs.python-version }}-venv-${{ inputs.cache-key }}
- name: Create Python virtual environment
if: steps.cache-venv.outputs.cache-hit != 'true' && runner.os != 'Windows'
shell: bash
run: |
python -m venv venv
source venv/bin/activate
python --version
pip install -r requirements.txt -r requirements_optional.txt -r requirements_test.txt
pip install -e .
- name: Create Python virtual environment
if: steps.cache-venv.outputs.cache-hit != 'true' && runner.os == 'Windows'
shell: bash
run: |
python -m venv venv
./venv/Scripts/activate
python --version
pip install -r requirements.txt -r requirements_optional.txt -r requirements_test.txt
pip install -e .

View file

@ -1,9 +1,25 @@
---
version: 2
updates:
- package-ecosystem: "pip"
- package-ecosystem: pip
directory: "/"
schedule:
interval: "daily"
interval: daily
ignore:
# Hypotehsis is only used for testing and is updated quite often
- dependency-name: hypothesis
- package-ecosystem: github-actions
directory: "/"
schedule:
interval: daily
open-pull-requests-limit: 10
- package-ecosystem: github-actions
directory: "/.github/actions/build-image"
schedule:
interval: daily
open-pull-requests-limit: 10
- package-ecosystem: github-actions
directory: "/.github/actions/restore-python"
schedule:
interval: daily
open-pull-requests-limit: 10

80
.github/workflows/ci-api-proto.yml vendored Normal file
View file

@ -0,0 +1,80 @@
name: API Proto CI
on:
pull_request:
paths:
- "esphome/components/api/api.proto"
- "esphome/components/api/api_pb2.cpp"
- "esphome/components/api/api_pb2.h"
- "esphome/components/api/api_pb2_service.cpp"
- "esphome/components/api/api_pb2_service.h"
- "script/api_protobuf/api_protobuf.py"
- ".github/workflows/ci-api-proto.yml"
permissions:
contents: read
pull-requests: write
jobs:
check:
name: Check generated files
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4.1.7
- name: Set up Python
uses: actions/setup-python@v5.1.0
with:
python-version: "3.11"
- name: Install apt dependencies
run: |
sudo apt update
sudo apt-cache show protobuf-compiler
sudo apt install -y protobuf-compiler
protoc --version
- name: Install python dependencies
run: pip install aioesphomeapi -c requirements.txt -r requirements_dev.txt
- name: Generate files
run: script/api_protobuf/api_protobuf.py
- name: Check for changes
run: |
if ! git diff --quiet; then
echo "## Job Failed" | tee -a $GITHUB_STEP_SUMMARY
echo "You have altered the generated proto files but they do not match what is expected." | tee -a $GITHUB_STEP_SUMMARY
echo "Please run 'script/api_protobuf/api_protobuf.py' and commit the changes." | tee -a $GITHUB_STEP_SUMMARY
exit 1
fi
- if: failure()
name: Review PR
uses: actions/github-script@v7.0.1
with:
script: |
await github.rest.pulls.createReview({
pull_number: context.issue.number,
owner: context.repo.owner,
repo: context.repo.repo,
event: 'REQUEST_CHANGES',
body: 'You have altered the generated proto files but they do not match what is expected.\nPlease run "script/api_protobuf/api_protobuf.py" and commit the changes.'
})
- if: success()
name: Dismiss review
uses: actions/github-script@v7.0.1
with:
script: |
let reviews = await github.rest.pulls.listReviews({
pull_number: context.issue.number,
owner: context.repo.owner,
repo: context.repo.repo
});
for (let review of reviews.data) {
if (review.user.login === 'github-actions[bot]' && review.state === 'CHANGES_REQUESTED') {
await github.rest.pulls.dismissReview({
pull_number: context.issue.number,
owner: context.repo.owner,
repo: context.repo.repo,
review_id: review.id,
message: 'Files now match the expected proto files.'
});
}
}

View file

@ -1,44 +1,54 @@
---
name: CI for docker images
# Only run when docker paths change
on:
push:
branches: [dev, beta, release]
paths:
- 'docker/**'
- '.github/workflows/**'
- 'requirements*.txt'
- 'platformio.ini'
- "docker/**"
- ".github/workflows/ci-docker.yml"
- "requirements*.txt"
- "platformio.ini"
- "script/platformio_install_deps.py"
pull_request:
paths:
- 'docker/**'
- '.github/workflows/**'
- 'requirements*.txt'
- 'platformio.ini'
- "docker/**"
- ".github/workflows/ci-docker.yml"
- "requirements*.txt"
- "platformio.ini"
- "script/platformio_install_deps.py"
permissions:
contents: read
packages: read
concurrency:
# yamllint disable-line rule:line-length
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true
jobs:
check-docker:
name: Build docker containers
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
arch: [amd64, armv7, aarch64]
build_type: ["ha-addon", "docker", "lint"]
steps:
- uses: actions/checkout@v2
- uses: actions/checkout@v4.1.7
- name: Set up Python
uses: actions/setup-python@v2
uses: actions/setup-python@v5.1.0
with:
python-version: '3.9'
python-version: "3.9"
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v1
uses: docker/setup-buildx-action@v3.3.0
- name: Set up QEMU
uses: docker/setup-qemu-action@v1
uses: docker/setup-qemu-action@v3.0.0
- name: Set TAG
run: |

View file

@ -1,3 +1,4 @@
---
name: CI
on:
@ -5,50 +6,331 @@ on:
branches: [dev, beta, release]
pull_request:
paths:
- "**"
- "!.github/workflows/*.yml"
- ".github/workflows/ci.yml"
- "!.yamllint"
- "!.github/dependabot.yml"
merge_group:
permissions:
contents: read
env:
DEFAULT_PYTHON: "3.9"
PYUPGRADE_TARGET: "--py39-plus"
concurrency:
# yamllint disable-line rule:line-length
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true
jobs:
ci:
name: ${{ matrix.name }}
common:
name: Create common environment
runs-on: ubuntu-latest
outputs:
cache-key: ${{ steps.cache-key.outputs.key }}
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.1.7
- name: Generate cache-key
id: cache-key
run: echo key="${{ hashFiles('requirements.txt', 'requirements_optional.txt', 'requirements_test.txt') }}" >> $GITHUB_OUTPUT
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
id: python
uses: actions/setup-python@v5.1.0
with:
python-version: ${{ env.DEFAULT_PYTHON }}
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache@v4.0.2
with:
path: venv
# yamllint disable-line rule:line-length
key: ${{ runner.os }}-${{ steps.python.outputs.python-version }}-venv-${{ steps.cache-key.outputs.key }}
- name: Create Python virtual environment
if: steps.cache-venv.outputs.cache-hit != 'true'
run: |
python -m venv venv
. venv/bin/activate
python --version
pip install -r requirements.txt -r requirements_optional.txt -r requirements_test.txt
pip install -e .
black:
name: Check black
runs-on: ubuntu-latest
needs:
- common
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.1.7
- name: Restore Python
uses: ./.github/actions/restore-python
with:
python-version: ${{ env.DEFAULT_PYTHON }}
cache-key: ${{ needs.common.outputs.cache-key }}
- name: Run black
run: |
. venv/bin/activate
black --verbose esphome tests
- name: Suggested changes
run: script/ci-suggest-changes
if: always()
flake8:
name: Check flake8
runs-on: ubuntu-latest
needs:
- common
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.1.7
- name: Restore Python
uses: ./.github/actions/restore-python
with:
python-version: ${{ env.DEFAULT_PYTHON }}
cache-key: ${{ needs.common.outputs.cache-key }}
- name: Run flake8
run: |
. venv/bin/activate
flake8 esphome
- name: Suggested changes
run: script/ci-suggest-changes
if: always()
pylint:
name: Check pylint
runs-on: ubuntu-latest
needs:
- common
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.1.7
- name: Restore Python
uses: ./.github/actions/restore-python
with:
python-version: ${{ env.DEFAULT_PYTHON }}
cache-key: ${{ needs.common.outputs.cache-key }}
- name: Run pylint
run: |
. venv/bin/activate
pylint -f parseable --persistent=n esphome
- name: Suggested changes
run: script/ci-suggest-changes
if: always()
pyupgrade:
name: Check pyupgrade
runs-on: ubuntu-latest
needs:
- common
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.1.7
- name: Restore Python
uses: ./.github/actions/restore-python
with:
python-version: ${{ env.DEFAULT_PYTHON }}
cache-key: ${{ needs.common.outputs.cache-key }}
- name: Run pyupgrade
run: |
. venv/bin/activate
pyupgrade ${{ env.PYUPGRADE_TARGET }} `find esphome -name "*.py" -type f`
- name: Suggested changes
run: script/ci-suggest-changes
if: always()
ci-custom:
name: Run script/ci-custom
runs-on: ubuntu-latest
needs:
- common
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.1.7
- name: Restore Python
uses: ./.github/actions/restore-python
with:
python-version: ${{ env.DEFAULT_PYTHON }}
cache-key: ${{ needs.common.outputs.cache-key }}
- name: Register matcher
run: echo "::add-matcher::.github/workflows/matchers/ci-custom.json"
- name: Run script/ci-custom
run: |
. venv/bin/activate
script/ci-custom.py
script/build_codeowners.py --check
pytest:
name: Run pytest
strategy:
fail-fast: false
matrix:
python-version:
- "3.9"
- "3.10"
- "3.11"
- "3.12"
os:
- ubuntu-latest
- macOS-latest
- windows-latest
exclude:
# Minimize CI resource usage
# by only running the Python version
# version used for docker images on Windows and macOS
- python-version: "3.12"
os: windows-latest
- python-version: "3.10"
os: windows-latest
- python-version: "3.9"
os: windows-latest
- python-version: "3.12"
os: macOS-latest
- python-version: "3.10"
os: macOS-latest
- python-version: "3.9"
os: macOS-latest
runs-on: ${{ matrix.os }}
needs:
- common
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.1.7
- name: Restore Python
uses: ./.github/actions/restore-python
with:
python-version: ${{ matrix.python-version }}
cache-key: ${{ needs.common.outputs.cache-key }}
- name: Register matcher
run: echo "::add-matcher::.github/workflows/matchers/pytest.json"
- name: Run pytest
if: matrix.os == 'windows-latest'
run: |
./venv/Scripts/activate
pytest -vv --cov-report=xml --tb=native tests
- name: Run pytest
if: matrix.os == 'ubuntu-latest' || matrix.os == 'macOS-latest'
run: |
. venv/bin/activate
pytest -vv --cov-report=xml --tb=native tests
- name: Upload coverage to Codecov
uses: codecov/codecov-action@v4
with:
token: ${{ secrets.CODECOV_TOKEN }}
clang-format:
name: Check clang-format
runs-on: ubuntu-latest
needs:
- common
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.1.7
- name: Restore Python
uses: ./.github/actions/restore-python
with:
python-version: ${{ env.DEFAULT_PYTHON }}
cache-key: ${{ needs.common.outputs.cache-key }}
- name: Install clang-format
run: |
. venv/bin/activate
pip install clang-format -c requirements_dev.txt
- name: Run clang-format
run: |
. venv/bin/activate
script/clang-format -i
git diff-index --quiet HEAD --
- name: Suggested changes
run: script/ci-suggest-changes
if: always()
compile-tests-list:
runs-on: ubuntu-latest
outputs:
matrix: ${{ steps.set-matrix.outputs.matrix }}
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.1.7
- name: Find all YAML test files
id: set-matrix
run: echo "matrix=$(ls tests/test*.yaml | jq -R -s -c 'split("\n")[:-1]')" >> $GITHUB_OUTPUT
validate-tests:
name: Validate YAML test ${{ matrix.file }}
runs-on: ubuntu-latest
needs:
- common
- compile-tests-list
strategy:
fail-fast: false
matrix:
file: ${{ fromJson(needs.compile-tests-list.outputs.matrix) }}
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.1.7
- name: Restore Python
uses: ./.github/actions/restore-python
with:
python-version: ${{ env.DEFAULT_PYTHON }}
cache-key: ${{ needs.common.outputs.cache-key }}
- name: Run esphome config ${{ matrix.file }}
run: |
. venv/bin/activate
esphome config ${{ matrix.file }}
compile-tests:
name: Run YAML test ${{ matrix.file }}
runs-on: ubuntu-latest
needs:
- common
- black
- ci-custom
- clang-format
- flake8
- pylint
- pytest
- pyupgrade
- compile-tests-list
- validate-tests
strategy:
fail-fast: false
max-parallel: 2
matrix:
file: ${{ fromJson(needs.compile-tests-list.outputs.matrix) }}
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.1.7
- name: Restore Python
uses: ./.github/actions/restore-python
with:
python-version: ${{ env.DEFAULT_PYTHON }}
cache-key: ${{ needs.common.outputs.cache-key }}
- name: Run esphome compile ${{ matrix.file }}
run: |
. venv/bin/activate
esphome compile ${{ matrix.file }}
clang-tidy:
name: ${{ matrix.name }}
runs-on: ubuntu-latest
needs:
- common
- black
- ci-custom
- clang-format
- flake8
- pylint
- pytest
- pyupgrade
strategy:
fail-fast: false
max-parallel: 2
matrix:
include:
- id: ci-custom
name: Run script/ci-custom
- id: lint-python
name: Run script/lint-python
- id: test
file: tests/test1.yaml
name: Test tests/test1.yaml
pio_cache_key: test1
- id: test
file: tests/test2.yaml
name: Test tests/test2.yaml
pio_cache_key: test2
- id: test
file: tests/test3.yaml
name: Test tests/test3.yaml
pio_cache_key: test3
- id: test
file: tests/test4.yaml
name: Test tests/test4.yaml
pio_cache_key: test4
- id: test
file: tests/test5.yaml
name: Test tests/test5.yaml
pio_cache_key: test5
- id: pytest
name: Run pytest
- id: clang-format
name: Run script/clang-format
- id: clang-tidy
name: Run script/clang-tidy for ESP8266
options: --environment esp8266-arduino-tidy --grep USE_ESP8266
@ -75,92 +357,214 @@ jobs:
pio_cache_key: tidyesp32-idf
steps:
- uses: actions/checkout@v2
- name: Set up Python
uses: actions/setup-python@v2
id: python
- name: Check out code from GitHub
uses: actions/checkout@v4.1.7
- name: Restore Python
uses: ./.github/actions/restore-python
with:
python-version: '3.8'
python-version: ${{ env.DEFAULT_PYTHON }}
cache-key: ${{ needs.common.outputs.cache-key }}
- name: Cache virtualenv
uses: actions/cache@v2
with:
path: .venv
key: venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements*.txt') }}
restore-keys: |
venv-${{ steps.python.outputs.python-version }}-
- name: Set up virtualenv
run: |
python -m venv .venv
source .venv/bin/activate
pip install -U pip
pip install -r requirements.txt -r requirements_optional.txt -r requirements_test.txt
pip install -e .
echo "$GITHUB_WORKSPACE/.venv/bin" >> $GITHUB_PATH
echo "VIRTUAL_ENV=$GITHUB_WORKSPACE/.venv" >> $GITHUB_ENV
# Use per check platformio cache because checks use different parts
- name: Cache platformio
uses: actions/cache@v2
if: github.ref == 'refs/heads/dev'
uses: actions/cache@v4.0.2
with:
path: ~/.platformio
key: platformio-${{ matrix.pio_cache_key }}-${{ hashFiles('platformio.ini') }}
if: matrix.id == 'test' || matrix.id == 'clang-tidy'
key: platformio-${{ matrix.pio_cache_key }}
- name: Install clang tools
run: |
sudo apt-get install \
clang-format-11 \
clang-tidy-11
if: matrix.id == 'clang-tidy' || matrix.id == 'clang-format'
- name: Cache platformio
if: github.ref != 'refs/heads/dev'
uses: actions/cache/restore@v4.0.2
with:
path: ~/.platformio
key: platformio-${{ matrix.pio_cache_key }}
- name: Install clang-tidy
run: sudo apt-get install clang-tidy-14
- name: Register problem matchers
run: |
echo "::add-matcher::.github/workflows/matchers/ci-custom.json"
echo "::add-matcher::.github/workflows/matchers/lint-python.json"
echo "::add-matcher::.github/workflows/matchers/python.json"
echo "::add-matcher::.github/workflows/matchers/pytest.json"
echo "::add-matcher::.github/workflows/matchers/gcc.json"
echo "::add-matcher::.github/workflows/matchers/clang-tidy.json"
- name: Lint Custom
- name: Run 'pio run --list-targets -e esp32-idf-tidy'
if: matrix.name == 'Run script/clang-tidy for ESP32 IDF'
run: |
script/ci-custom.py
script/build_codeowners.py --check
if: matrix.id == 'ci-custom'
- name: Lint Python
run: script/lint-python
if: matrix.id == 'lint-python'
- run: esphome compile ${{ matrix.file }}
if: matrix.id == 'test'
env:
# Also cache libdeps, store them in a ~/.platformio subfolder
PLATFORMIO_LIBDEPS_DIR: ~/.platformio/libdeps
- name: Run pytest
run: |
pytest -vv --tb=native tests
if: matrix.id == 'pytest'
# Also run git-diff-index so that the step is marked as failed on formatting errors,
# since clang-format doesn't do anything but change files if -i is passed.
- name: Run clang-format
run: |
script/clang-format -i
git diff-index --quiet HEAD --
if: matrix.id == 'clang-format'
. venv/bin/activate
mkdir -p .temp
pio run --list-targets -e esp32-idf-tidy
- name: Run clang-tidy
run: |
. venv/bin/activate
script/clang-tidy --all-headers --fix ${{ matrix.options }}
if: matrix.id == 'clang-tidy'
env:
# Also cache libdeps, store them in a ~/.platformio subfolder
PLATFORMIO_LIBDEPS_DIR: ~/.platformio/libdeps
- name: Suggested changes
run: script/ci-suggest-changes
if: always() && (matrix.id == 'clang-tidy' || matrix.id == 'clang-format')
# yamllint disable-line rule:line-length
if: always()
list-components:
runs-on: ubuntu-latest
needs:
- common
if: github.event_name == 'pull_request'
outputs:
components: ${{ steps.list-components.outputs.components }}
count: ${{ steps.list-components.outputs.count }}
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.1.7
with:
# Fetch enough history so `git merge-base refs/remotes/origin/dev HEAD` works.
fetch-depth: 500
- name: Get target branch
id: target-branch
run: |
echo "branch=${{ github.event.pull_request.base.ref }}" >> $GITHUB_OUTPUT
- name: Fetch ${{ steps.target-branch.outputs.branch }} branch
run: |
git -c protocol.version=2 fetch --no-tags --prune --no-recurse-submodules --depth=1 origin +refs/heads/${{ steps.target-branch.outputs.branch }}:refs/remotes/origin/${{ steps.target-branch.outputs.branch }}
git merge-base refs/remotes/origin/${{ steps.target-branch.outputs.branch }} HEAD
- name: Restore Python
uses: ./.github/actions/restore-python
with:
python-version: ${{ env.DEFAULT_PYTHON }}
cache-key: ${{ needs.common.outputs.cache-key }}
- name: Find changed components
id: list-components
run: |
. venv/bin/activate
components=$(script/list-components.py --changed --branch ${{ steps.target-branch.outputs.branch }})
output_components=$(echo "$components" | jq -R -s -c 'split("\n")[:-1] | map(select(length > 0))')
count=$(echo "$output_components" | jq length)
echo "components=$output_components" >> $GITHUB_OUTPUT
echo "count=$count" >> $GITHUB_OUTPUT
echo "$count Components:"
echo "$output_components" | jq
test-build-components:
name: Component test ${{ matrix.file }}
runs-on: ubuntu-latest
needs:
- common
- list-components
if: github.event_name == 'pull_request' && fromJSON(needs.list-components.outputs.count) > 0 && fromJSON(needs.list-components.outputs.count) < 100
strategy:
fail-fast: false
max-parallel: 2
matrix:
file: ${{ fromJson(needs.list-components.outputs.components) }}
steps:
- name: Install dependencies
run: sudo apt-get install libsodium-dev libsdl2-dev
- name: Check out code from GitHub
uses: actions/checkout@v4.1.7
- name: Restore Python
uses: ./.github/actions/restore-python
with:
python-version: ${{ env.DEFAULT_PYTHON }}
cache-key: ${{ needs.common.outputs.cache-key }}
- name: test_build_components -e config -c ${{ matrix.file }}
run: |
. venv/bin/activate
./script/test_build_components -e config -c ${{ matrix.file }}
- name: test_build_components -e compile -c ${{ matrix.file }}
run: |
. venv/bin/activate
./script/test_build_components -e compile -c ${{ matrix.file }}
test-build-components-splitter:
name: Split components for testing into 20 groups maximum
runs-on: ubuntu-latest
needs:
- common
- list-components
if: github.event_name == 'pull_request' && fromJSON(needs.list-components.outputs.count) >= 100
outputs:
matrix: ${{ steps.split.outputs.components }}
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.1.7
- name: Split components into 20 groups
id: split
run: |
components=$(echo '${{ needs.list-components.outputs.components }}' | jq -c '.[]' | shuf | jq -s -c '[_nwise(20) | join(" ")]')
echo "components=$components" >> $GITHUB_OUTPUT
test-build-components-split:
name: Test split components
runs-on: ubuntu-latest
needs:
- common
- list-components
- test-build-components-splitter
if: github.event_name == 'pull_request' && fromJSON(needs.list-components.outputs.count) >= 100
strategy:
fail-fast: false
max-parallel: 4
matrix:
components: ${{ fromJson(needs.test-build-components-splitter.outputs.matrix) }}
steps:
- name: List components
run: echo ${{ matrix.components }}
- name: Install dependencies
run: sudo apt-get install libsodium-dev libsdl2-dev
- name: Check out code from GitHub
uses: actions/checkout@v4.1.7
- name: Restore Python
uses: ./.github/actions/restore-python
with:
python-version: ${{ env.DEFAULT_PYTHON }}
cache-key: ${{ needs.common.outputs.cache-key }}
- name: Validate config
run: |
. venv/bin/activate
for component in ${{ matrix.components }}; do
./script/test_build_components -e config -c $component
done
- name: Compile config
run: |
. venv/bin/activate
for component in ${{ matrix.components }}; do
./script/test_build_components -e compile -c $component
done
ci-status:
name: CI Status
runs-on: ubuntu-latest
needs:
- common
- black
- ci-custom
- clang-format
- flake8
- pylint
- pytest
- pyupgrade
- compile-tests
- clang-tidy
- list-components
- test-build-components
- test-build-components-splitter
- test-build-components-split
if: always()
steps:
- name: Success
if: ${{ !(contains(needs.*.result, 'failure')) }}
run: exit 0
- name: Failure
if: ${{ contains(needs.*.result, 'failure') }}
env:
JSON_DOC: ${{ toJSON(needs) }}
run: |
echo $JSON_DOC | jq
exit 1

View file

@ -1,8 +1,9 @@
---
name: Lock
on:
schedule:
- cron: '30 0 * * *'
- cron: "30 0 * * *"
workflow_dispatch:
permissions:
@ -16,7 +17,7 @@ jobs:
lock:
runs-on: ubuntu-latest
steps:
- uses: dessant/lock-threads@v3
- uses: dessant/lock-threads@v5.0.1
with:
pr-inactive-days: "1"
pr-lock-reason: ""

24
.github/workflows/needs-docs.yml vendored Normal file
View file

@ -0,0 +1,24 @@
name: Needs Docs
on:
pull_request:
types: [labeled, unlabeled]
jobs:
check:
name: Check
runs-on: ubuntu-latest
steps:
- name: Check for needs-docs label
uses: actions/github-script@v7.0.1
with:
script: |
const { data: labels } = await github.rest.issues.listLabelsOnIssue({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: context.issue.number
});
const needsDocs = labels.find(label => label.name === 'needs-docs');
if (needsDocs) {
core.setFailed('Pull request needs docs');
}

View file

@ -1,3 +1,4 @@
---
name: Publish Release
on:
@ -16,44 +17,58 @@ jobs:
runs-on: ubuntu-latest
outputs:
tag: ${{ steps.tag.outputs.tag }}
branch_build: ${{ steps.tag.outputs.branch_build }}
steps:
- uses: actions/checkout@v2
- uses: actions/checkout@v4.1.7
- name: Get tag
id: tag
# yamllint disable rule:line-length
run: |
if [[ "$GITHUB_EVENT_NAME" = "release" ]]; then
TAG="${GITHUB_REF#refs/tags/}"
if [[ "${{ github.event_name }}" = "release" ]]; then
TAG="${{ github.event.release.tag_name}}"
BRANCH_BUILD="false"
else
TAG=$(cat esphome/const.py | sed -n -E "s/^__version__\s+=\s+\"(.+)\"$/\1/p")
today="$(date --utc '+%Y%m%d')"
TAG="${TAG}${today}"
BRANCH=${GITHUB_REF#refs/heads/}
if [[ "$BRANCH" != "dev" ]]; then
TAG="${TAG}-${BRANCH}"
BRANCH_BUILD="true"
else
BRANCH_BUILD="false"
fi
echo "::set-output name=tag::${TAG}"
fi
echo "tag=${TAG}" >> $GITHUB_OUTPUT
echo "branch_build=${BRANCH_BUILD}" >> $GITHUB_OUTPUT
# yamllint enable rule:line-length
deploy-pypi:
name: Build and publish to PyPi
if: github.repository == 'esphome/esphome' && github.event_name == 'release'
runs-on: ubuntu-latest
permissions:
contents: read
id-token: write
steps:
- uses: actions/checkout@v2
- uses: actions/checkout@v4.1.7
- name: Set up Python
uses: actions/setup-python@v1
uses: actions/setup-python@v5.1.0
with:
python-version: '3.x'
python-version: "3.x"
- name: Set up python environment
run: |
script/setup
pip install setuptools wheel twine
- name: Build
run: python setup.py sdist bdist_wheel
- name: Upload
env:
TWINE_USERNAME: __token__
TWINE_PASSWORD: ${{ secrets.PYPI_TOKEN }}
run: twine upload dist/*
ESPHOME_NO_VENV: 1
run: script/setup
- name: Build
run: |-
pip3 install build
python3 -m build
- name: Publish
uses: pypa/gh-action-pypi-publish@v1.9.0
deploy-docker:
name: Build and publish docker containers
name: Build ESPHome ${{ matrix.platform }}
if: github.repository == 'esphome/esphome'
permissions:
contents: read
@ -61,94 +76,171 @@ jobs:
runs-on: ubuntu-latest
needs: [init]
strategy:
fail-fast: false
matrix:
arch: [amd64, armv7, aarch64]
build_type: ["ha-addon", "docker", "lint"]
platform:
- linux/amd64
- linux/arm/v7
- linux/arm64
steps:
- uses: actions/checkout@v2
- uses: actions/checkout@v4.1.7
- name: Set up Python
uses: actions/setup-python@v2
uses: actions/setup-python@v5.1.0
with:
python-version: '3.9'
python-version: "3.9"
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v1
uses: docker/setup-buildx-action@v3.3.0
- name: Set up QEMU
uses: docker/setup-qemu-action@v1
if: matrix.platform != 'linux/amd64'
uses: docker/setup-qemu-action@v3.0.0
- name: Log in to docker hub
uses: docker/login-action@v1
uses: docker/login-action@v3.2.0
with:
username: ${{ secrets.DOCKER_USER }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Log in to the GitHub container registry
uses: docker/login-action@v1
uses: docker/login-action@v3.2.0
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build and push
run: |
docker/build.py \
--tag "${{ needs.init.outputs.tag }}" \
--arch "${{ matrix.arch }}" \
--build-type "${{ matrix.build_type }}" \
build \
--push
- name: Build docker
uses: ./.github/actions/build-image
with:
platform: ${{ matrix.platform }}
target: docker
baseimg: docker
suffix: ""
version: ${{ needs.init.outputs.tag }}
deploy-docker-manifest:
- name: Build ha-addon
uses: ./.github/actions/build-image
with:
platform: ${{ matrix.platform }}
target: hassio
baseimg: hassio
suffix: "hassio"
version: ${{ needs.init.outputs.tag }}
- name: Build lint
uses: ./.github/actions/build-image
with:
platform: ${{ matrix.platform }}
target: lint
baseimg: docker
suffix: lint
version: ${{ needs.init.outputs.tag }}
- name: Sanitize platform name
id: sanitize
run: |
echo "${{ matrix.platform }}" | sed 's|/|-|g' > /tmp/platform
echo name=$(cat /tmp/platform) >> $GITHUB_OUTPUT
- name: Upload digests
uses: actions/upload-artifact@v4.3.3
with:
name: digests-${{ steps.sanitize.outputs.name }}
path: /tmp/digests
retention-days: 1
deploy-manifest:
name: Publish ESPHome ${{ matrix.image.title }} to ${{ matrix.registry }}
runs-on: ubuntu-latest
needs:
- init
- deploy-docker
if: github.repository == 'esphome/esphome'
permissions:
contents: read
packages: write
runs-on: ubuntu-latest
needs: [init, deploy-docker]
strategy:
fail-fast: false
matrix:
build_type: ["ha-addon", "docker", "lint"]
image:
- title: "ha-addon"
target: "hassio"
suffix: "hassio"
- title: "docker"
target: "docker"
suffix: ""
- title: "lint"
target: "lint"
suffix: "lint"
registry:
- ghcr
- dockerhub
steps:
- uses: actions/checkout@v2
- name: Set up Python
uses: actions/setup-python@v2
- uses: actions/checkout@v4.1.7
- name: Download digests
uses: actions/download-artifact@v4.1.7
with:
python-version: '3.9'
- name: Enable experimental manifest support
run: |
mkdir -p ~/.docker
echo "{\"experimental\": \"enabled\"}" > ~/.docker/config.json
pattern: digests-*
path: /tmp/digests
merge-multiple: true
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3.3.0
- name: Log in to docker hub
uses: docker/login-action@v1
if: matrix.registry == 'dockerhub'
uses: docker/login-action@v3.2.0
with:
username: ${{ secrets.DOCKER_USER }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Log in to the GitHub container registry
uses: docker/login-action@v1
if: matrix.registry == 'ghcr'
uses: docker/login-action@v3.2.0
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Run manifest
- name: Generate short tags
id: tags
run: |
docker/build.py \
output=$(docker/generate_tags.py \
--tag "${{ needs.init.outputs.tag }}" \
--build-type "${{ matrix.build_type }}" \
manifest
--suffix "${{ matrix.image.suffix }}" \
--registry "${{ matrix.registry }}")
echo $output
for l in $output; do
echo $l >> $GITHUB_OUTPUT
done
- name: Create manifest list and push
working-directory: /tmp/digests/${{ matrix.image.target }}/${{ matrix.registry }}
run: |
docker buildx imagetools create $(jq -Rcnr 'inputs | . / "," | map("-t " + .) | join(" ")' <<< "${{ steps.tags.outputs.tags}}") \
$(printf '${{ steps.tags.outputs.image }}@sha256:%s ' *)
deploy-ha-addon-repo:
if: github.repository == 'esphome/esphome' && github.event_name == 'release'
if: github.repository == 'esphome/esphome' && needs.init.outputs.branch_build == 'false'
runs-on: ubuntu-latest
needs: [deploy-docker]
needs:
- init
- deploy-manifest
steps:
- env:
TOKEN: ${{ secrets.DEPLOY_HA_ADDON_REPO_TOKEN }}
run: |
TAG="${GITHUB_REF#refs/tags/}"
curl \
-u ":$TOKEN" \
-X POST \
-H "Accept: application/vnd.github.v3+json" \
https://api.github.com/repos/esphome/home-assistant-addon/actions/workflows/bump-version.yml/dispatches \
-d "{\"ref\":\"main\",\"inputs\":{\"version\":\"$TAG\"}}"
- name: Trigger Workflow
uses: actions/github-script@v7.0.1
with:
github-token: ${{ secrets.DEPLOY_HA_ADDON_REPO_TOKEN }}
script: |
let description = "ESPHome";
if (context.eventName == "release") {
description = ${{ toJSON(github.event.release.body) }};
}
github.rest.actions.createWorkflowDispatch({
owner: "esphome",
repo: "home-assistant-addon",
workflow_id: "bump-version.yml",
ref: "main",
inputs: {
version: "${{ needs.init.outputs.tag }}",
content: description
}
})

View file

@ -1,8 +1,9 @@
---
name: Stale
on:
schedule:
- cron: '30 0 * * *'
- cron: "30 0 * * *"
workflow_dispatch:
permissions:
@ -16,7 +17,7 @@ jobs:
stale:
runs-on: ubuntu-latest
steps:
- uses: actions/stale@v4
- uses: actions/stale@v9.0.0
with:
days-before-pr-stale: 90
days-before-pr-close: 7
@ -24,18 +25,19 @@ jobs:
days-before-issue-close: -1
remove-stale-when-updated: true
stale-pr-label: "stale"
exempt-pr-labels: "no-stale"
exempt-pr-labels: "not-stale"
stale-pr-message: >
There hasn't been any activity on this pull request recently. This
pull request has been automatically marked as stale because of that
and will be closed if no further activity occurs within 7 days.
Thank you for your contributions.
# Use stale to automatically close issues with a reference to the issue tracker
# Use stale to automatically close issues with a
# reference to the issue tracker
close-issues:
runs-on: ubuntu-latest
steps:
- uses: actions/stale@v4
- uses: actions/stale@v9.0.0
with:
days-before-pr-stale: -1
days-before-pr-close: -1

View file

@ -0,0 +1,48 @@
---
name: Synchronise Device Classes from Home Assistant
on:
workflow_dispatch:
schedule:
- cron: "45 6 * * *"
jobs:
sync:
name: Sync Device Classes
runs-on: ubuntu-latest
if: github.repository == 'esphome/esphome'
steps:
- name: Checkout
uses: actions/checkout@v4.1.7
- name: Checkout Home Assistant
uses: actions/checkout@v4.1.7
with:
repository: home-assistant/core
path: lib/home-assistant
- name: Setup Python
uses: actions/setup-python@v5.1.0
with:
python-version: 3.12
- name: Install Home Assistant
run: |
python -m pip install --upgrade pip
pip install -e lib/home-assistant
- name: Sync
run: |
python ./script/sync-device_class.py
- name: Commit changes
uses: peter-evans/create-pull-request@v6.1.0
with:
commit-message: "Synchronise Device Classes from Home Assistant"
committer: esphomebot <esphome@nabucasa.com>
author: esphomebot <esphome@nabucasa.com>
branch: sync/device-classes
delete-branch: true
title: "Synchronise Device Classes from Home Assistant"
body-path: .github/PULL_REQUEST_TEMPLATE.md
token: ${{ secrets.DEVICE_CLASS_SYNC_TOKEN }}

25
.github/workflows/yaml-lint.yml vendored Normal file
View file

@ -0,0 +1,25 @@
---
name: YAML lint
on:
push:
branches: [dev, beta, release]
paths:
- "**.yaml"
- "**.yml"
pull_request:
paths:
- "**.yaml"
- "**.yml"
jobs:
yamllint:
name: yamllint
runs-on: ubuntu-latest
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.1.7
- name: Run yamllint
uses: frenck/action-yamllint@v1.5.0
with:
strict: true

10
.gitignore vendored
View file

@ -13,6 +13,12 @@ __pycache__/
# Intellij Idea
.idea
# Eclipse
.project
.cproject
.pydevproject
.settings/
# Vim
*.swp
@ -128,3 +134,7 @@ tests/.esphome/
sdkconfig.*
!sdkconfig.defaults
.tests/
/components

View file

@ -1,6 +0,0 @@
ports:
- port: 6052
onOpen: open-preview
tasks:
- before: pyenv local $(pyenv version | grep '^3\.' | cut -d ' ' -f 1) && script/setup
command: python -m esphome dashboard config

View file

@ -1,16 +1,17 @@
---
# See https://pre-commit.com for more information
# See https://pre-commit.com/hooks.html for more hooks
repos:
- repo: https://github.com/ambv/black
rev: 22.3.0
- repo: https://github.com/psf/black-pre-commit-mirror
rev: 24.4.2
hooks:
- id: black
args:
- --safe
- --quiet
files: ^((esphome|script|tests)/.+)?[^/]+\.py$
- repo: https://gitlab.com/pycqa/flake8
rev: 4.0.1
- repo: https://github.com/PyCQA/flake8
rev: 6.1.0
hooks:
- id: flake8
additional_dependencies:
@ -26,7 +27,23 @@ repos:
- --branch=release
- --branch=beta
- repo: https://github.com/asottile/pyupgrade
rev: v2.31.1
rev: v3.15.2
hooks:
- id: pyupgrade
args: [--py38-plus]
args: [--py39-plus]
- repo: https://github.com/adrienverge/yamllint.git
rev: v1.35.1
hooks:
- id: yamllint
- repo: https://github.com/pre-commit/mirrors-clang-format
rev: v13.0.1
hooks:
- id: clang-format
types_or: [c, c++]
- repo: local
hooks:
- id: pylint
name: pylint
entry: script/run-in-env.sh pylint
language: script
types: [python]

33
.vscode/tasks.json vendored
View file

@ -2,15 +2,24 @@
"version": "2.0.0",
"tasks": [
{
"label": "run",
"label": "Run Dashboard",
"type": "shell",
"command": "python3 -m esphome dashboard config/",
"command": "${command:python.interpreterPath}",
"args": [
"-m",
"esphome",
"dashboard",
"config/"
],
"problemMatcher": []
},
{
"label": "clang-tidy",
"type": "shell",
"command": "./script/clang-tidy",
"command": "${command:python.interpreterPath}",
"args": [
"./script/clang-tidy"
],
"problemMatcher": [
{
"owner": "clang-tidy",
@ -27,6 +36,24 @@
]
}
]
},
{
"label": "Generate proto files",
"type": "shell",
"command": "${command:python.interpreterPath}",
"args": [
"./script/api_protobuf/api_protobuf.py"
],
"group": {
"kind": "build",
"isDefault": true
},
"presentation": {
"reveal": "never",
"close": true,
"panel": "new"
},
"problemMatcher": []
}
]
}

19
.yamllint Normal file
View file

@ -0,0 +1,19 @@
---
extends: default
ignore-from-file: .gitignore
rules:
document-start: disable
empty-lines:
level: error
max: 1
max-start: 0
max-end: 1
indentation:
level: error
spaces: 2
indent-sequences: true
check-multi-line-strings: false
line-length: disable
truthy: disable

View file

@ -6,101 +6,220 @@
# the integration's code owner is automatically notified.
# Core Code
setup.py @esphome/core
pyproject.toml @esphome/core
esphome/*.py @esphome/core
esphome/core/* @esphome/core
# Integrations
esphome/components/a01nyub/* @MrSuicideParrot
esphome/components/a02yyuw/* @TH-Braemer
esphome/components/absolute_humidity/* @DAVe3283
esphome/components/ac_dimmer/* @glmnet
esphome/components/adc/* @esphome/core
esphome/components/adc128s102/* @DeerMaximum
esphome/components/addressable_light/* @justfalter
esphome/components/ade7880/* @kpfleming
esphome/components/ade7953/* @angelnu
esphome/components/ade7953_i2c/* @angelnu
esphome/components/ade7953_spi/* @angelnu
esphome/components/ads1118/* @solomondg1
esphome/components/ags10/* @mak-42
esphome/components/airthings_ble/* @jeromelaban
esphome/components/airthings_wave_base/* @jeromelaban @kpfleming @ncareau
esphome/components/airthings_wave_mini/* @ncareau
esphome/components/airthings_wave_plus/* @jeromelaban
esphome/components/alarm_control_panel/* @grahambrown11 @hwstar
esphome/components/alpha3/* @jan-hofmeier
esphome/components/am2315c/* @swoboda1337
esphome/components/am43/* @buxtronix
esphome/components/am43/cover/* @buxtronix
esphome/components/am43/sensor/* @buxtronix
esphome/components/analog_threshold/* @ianchi
esphome/components/animation/* @syndlex
esphome/components/anova/* @buxtronix
esphome/components/api/* @OttoWinter
esphome/components/as5600/* @ammmze
esphome/components/as5600/sensor/* @ammmze
esphome/components/as7341/* @mrgnr
esphome/components/async_tcp/* @OttoWinter
esphome/components/at581x/* @X-Ryl669
esphome/components/atc_mithermometer/* @ahpohl
esphome/components/atm90e26/* @danieltwagner
esphome/components/b_parasite/* @rbaron
esphome/components/ballu/* @bazuchan
esphome/components/bang_bang/* @OttoWinter
esphome/components/bedjet/* @jhansche
esphome/components/bedjet/climate/* @jhansche
esphome/components/bedjet/fan/* @jhansche
esphome/components/bedjet/sensor/* @javawizard @jhansche
esphome/components/beken_spi_led_strip/* @Mat931
esphome/components/bh1750/* @OttoWinter
esphome/components/binary_sensor/* @esphome/core
esphome/components/bk72xx/* @kuba2k2
esphome/components/bl0939/* @ziceva
esphome/components/bl0940/* @tobias-
esphome/components/ble_client/* @buxtronix
esphome/components/bl0942/* @dbuezas
esphome/components/ble_client/* @buxtronix @clydebarrow
esphome/components/bluetooth_proxy/* @jesserockz
esphome/components/bme280_base/* @esphome/core
esphome/components/bme280_spi/* @apbodrov
esphome/components/bme680_bsec/* @trvrnrth
esphome/components/bmp3xx/* @martgras
esphome/components/bmi160/* @flaviut
esphome/components/bmp3xx/* @latonita
esphome/components/bmp3xx_base/* @latonita @martgras
esphome/components/bmp3xx_i2c/* @latonita
esphome/components/bmp3xx_spi/* @latonita
esphome/components/bmp581/* @kahrendt
esphome/components/bp1658cj/* @Cossid
esphome/components/bp5758d/* @Cossid
esphome/components/button/* @esphome/core
esphome/components/canbus/* @danielschramm @mvturnho
esphome/components/cap1188/* @MrEditor97
esphome/components/cap1188/* @mreditor97
esphome/components/captive_portal/* @OttoWinter
esphome/components/ccs811/* @habbie
esphome/components/cd74hc4067/* @asoehlke
esphome/components/climate/* @esphome/core
esphome/components/climate_ir/* @glmnet
esphome/components/color_temperature/* @jesserockz
esphome/components/combination/* @Cat-Ion @kahrendt
esphome/components/coolix/* @glmnet
esphome/components/copy/* @OttoWinter
esphome/components/cover/* @esphome/core
esphome/components/cs5460a/* @balrog-kun
esphome/components/cse7761/* @berfenger
esphome/components/cst226/* @clydebarrow
esphome/components/cst816/* @clydebarrow
esphome/components/ct_clamp/* @jesserockz
esphome/components/current_based/* @djwmarcx
esphome/components/dac7678/* @NickB1
esphome/components/daikin_arc/* @MagicBear
esphome/components/daikin_brc/* @hagak
esphome/components/dallas_temp/* @ssieb
esphome/components/daly_bms/* @s1lvi0
esphome/components/dashboard_import/* @esphome/core
esphome/components/datetime/* @jesserockz @rfdarter
esphome/components/debug/* @OttoWinter
esphome/components/delonghi/* @grob6000
esphome/components/dfplayer/* @glmnet
esphome/components/dfrobot_sen0395/* @niklasweber
esphome/components/dht/* @OttoWinter
esphome/components/display_menu_base/* @numo68
esphome/components/dps310/* @kbx81
esphome/components/ds1307/* @badbadc0ffee
esphome/components/dsmr/* @glmnet @zuidwijk
esphome/components/ektf2232/* @jesserockz
esphome/components/duty_time/* @dudanov
esphome/components/ee895/* @Stock-M
esphome/components/ektf2232/touchscreen/* @jesserockz
esphome/components/emc2101/* @ellull
esphome/components/emmeti/* @E440QF
esphome/components/ens160/* @latonita
esphome/components/ens160_base/* @latonita @vincentscode
esphome/components/ens160_i2c/* @latonita
esphome/components/ens160_spi/* @latonita
esphome/components/ens210/* @itn3rd77
esphome/components/esp32/* @esphome/core
esphome/components/esp32_ble/* @jesserockz
esphome/components/esp32_ble_server/* @jesserockz
esphome/components/esp32_ble/* @Rapsssito @jesserockz
esphome/components/esp32_ble_client/* @jesserockz
esphome/components/esp32_ble_server/* @Rapsssito @clydebarrow @jesserockz
esphome/components/esp32_camera_web_server/* @ayufan
esphome/components/esp32_can/* @Sympatron
esphome/components/esp32_improv/* @jesserockz
esphome/components/esp32_rmt/* @jesserockz
esphome/components/esp32_rmt_led_strip/* @jesserockz
esphome/components/esp8266/* @esphome/core
esphome/components/ethernet_info/* @gtjadsonsantos
esphome/components/event/* @nohat
esphome/components/exposure_notifications/* @OttoWinter
esphome/components/ezo/* @ssieb
esphome/components/ezo_pmp/* @carlos-sarmiento
esphome/components/factory_reset/* @anatoly-savchenkov
esphome/components/fastled_base/* @OttoWinter
esphome/components/fingerprint_grow/* @OnFreund @loongyh
esphome/components/feedback/* @ianchi
esphome/components/fingerprint_grow/* @OnFreund @alexborro @loongyh
esphome/components/font/* @clydebarrow @esphome/core
esphome/components/fs3000/* @kahrendt
esphome/components/ft5x06/* @clydebarrow
esphome/components/ft63x6/* @gpambrozio
esphome/components/gcja5/* @gcormier
esphome/components/gdk101/* @Szewcson
esphome/components/globals/* @esphome/core
esphome/components/gp8403/* @jesserockz
esphome/components/gpio/* @esphome/core
esphome/components/gpio/one_wire/* @ssieb
esphome/components/gps/* @coogle
esphome/components/graph/* @synco
esphome/components/graphical_display_menu/* @MrMDavidson
esphome/components/gree/* @orestismers
esphome/components/grove_tb6612fng/* @max246
esphome/components/growatt_solar/* @leeuwte
esphome/components/gt911/* @clydebarrow @jesserockz
esphome/components/haier/* @paveldn
esphome/components/haier/binary_sensor/* @paveldn
esphome/components/haier/button/* @paveldn
esphome/components/haier/sensor/* @paveldn
esphome/components/haier/text_sensor/* @paveldn
esphome/components/havells_solar/* @sourabhjaiswal
esphome/components/hbridge/fan/* @WeekendWarrior
esphome/components/hbridge/light/* @DotNetDann
esphome/components/he60r/* @clydebarrow
esphome/components/heatpumpir/* @rob-deutsch
esphome/components/hitachi_ac424/* @sourabhjaiswal
esphome/components/hm3301/* @freekode
esphome/components/homeassistant/* @OttoWinter
esphome/components/honeywell_hih_i2c/* @Benichou34
esphome/components/honeywellabp/* @RubyBailey
esphome/components/honeywellabp2_i2c/* @jpfaff
esphome/components/host/* @clydebarrow @esphome/core
esphome/components/host/time/* @clydebarrow
esphome/components/hrxl_maxsonar_wr/* @netmikey
esphome/components/hte501/* @Stock-M
esphome/components/http_request/ota/* @oarcher
esphome/components/http_request/update/* @jesserockz
esphome/components/htu31d/* @betterengineering
esphome/components/hydreon_rgxx/* @functionpointer
esphome/components/hyt271/* @Philippe12
esphome/components/i2c/* @esphome/core
esphome/components/i2s_audio/* @jesserockz
esphome/components/i2s_audio/media_player/* @jesserockz
esphome/components/i2s_audio/microphone/* @jesserockz
esphome/components/i2s_audio/speaker/* @jesserockz
esphome/components/iaqcore/* @yozik04
esphome/components/ili9xxx/* @clydebarrow @nielsnl68
esphome/components/improv_base/* @esphome/core
esphome/components/improv_serial/* @esphome/core
esphome/components/ina260/* @MrEditor97
esphome/components/ina226/* @Sergio303 @latonita
esphome/components/ina260/* @mreditor97
esphome/components/ina2xx_base/* @latonita
esphome/components/ina2xx_i2c/* @latonita
esphome/components/ina2xx_spi/* @latonita
esphome/components/inkbird_ibsth1_mini/* @fkirill
esphome/components/inkplate6/* @jesserockz
esphome/components/integration/* @OttoWinter
esphome/components/internal_temperature/* @Mat931
esphome/components/interval/* @esphome/core
esphome/components/jsn_sr04t/* @Mafus1
esphome/components/json/* @OttoWinter
esphome/components/kalman_combinator/* @Cat-Ion
esphome/components/kamstrup_kmp/* @cfeenstra1024
esphome/components/key_collector/* @ssieb
esphome/components/key_provider/* @ssieb
esphome/components/kuntze/* @ssieb
esphome/components/lcd_menu/* @numo68
esphome/components/ld2410/* @regevbr @sebcaps
esphome/components/ld2420/* @descipher
esphome/components/ledc/* @OttoWinter
esphome/components/libretiny/* @kuba2k2
esphome/components/libretiny_pwm/* @kuba2k2
esphome/components/light/* @esphome/core
esphome/components/lightwaverf/* @max246
esphome/components/lilygo_t5_47/touchscreen/* @jesserockz
esphome/components/lock/* @esphome/core
esphome/components/logger/* @esphome/core
esphome/components/ltr390/* @sjtrny
esphome/components/ltr_als_ps/* @latonita
esphome/components/matrix_keypad/* @ssieb
esphome/components/max31865/* @DAVe3283
esphome/components/max44009/* @berfenger
esphome/components/max6956/* @looping40
esphome/components/max7219digit/* @rspaargaren
esphome/components/max9611/* @mckaymatthew
esphome/components/mcp23008/* @jesserockz
@ -114,13 +233,22 @@ esphome/components/mcp2515/* @danielschramm @mvturnho
esphome/components/mcp3204/* @rsumner
esphome/components/mcp4728/* @berfenger
esphome/components/mcp47a1/* @jesserockz
esphome/components/mcp9600/* @mreditor97
esphome/components/mcp9808/* @k7hpn
esphome/components/md5/* @esphome/core
esphome/components/mdns/* @esphome/core
esphome/components/media_player/* @jesserockz
esphome/components/micro_wake_word/* @jesserockz @kahrendt
esphome/components/micronova/* @jorre05
esphome/components/microphone/* @jesserockz
esphome/components/mics_4514/* @jesserockz
esphome/components/midea/* @dudanov
esphome/components/midea_ir/* @dudanov
esphome/components/mitsubishi/* @RubyBailey
esphome/components/mlx90393/* @functionpointer
esphome/components/mlx90614/* @jesserockz
esphome/components/mmc5603/* @benhoff
esphome/components/mmc5983/* @agoode
esphome/components/modbus_controller/* @martgras
esphome/components/modbus_controller/binary_sensor/* @martgras
esphome/components/modbus_controller/number/* @martgras
@ -129,60 +257,103 @@ esphome/components/modbus_controller/select/* @martgras @stegm
esphome/components/modbus_controller/sensor/* @martgras
esphome/components/modbus_controller/switch/* @martgras
esphome/components/modbus_controller/text_sensor/* @martgras
esphome/components/mopeka_ble/* @spbrogan
esphome/components/mopeka_ble/* @Fabian-Schmidt @spbrogan
esphome/components/mopeka_pro_check/* @spbrogan
esphome/components/mopeka_std_check/* @Fabian-Schmidt
esphome/components/mpl3115a2/* @kbickar
esphome/components/mpu6886/* @fabaff
esphome/components/ms8607/* @e28eta
esphome/components/network/* @esphome/core
esphome/components/nextion/* @senexcrenshaw
esphome/components/nextion/* @edwardtfn @senexcrenshaw
esphome/components/nextion/binary_sensor/* @senexcrenshaw
esphome/components/nextion/sensor/* @senexcrenshaw
esphome/components/nextion/switch/* @senexcrenshaw
esphome/components/nextion/text_sensor/* @senexcrenshaw
esphome/components/nfc/* @jesserockz
esphome/components/nfc/* @jesserockz @kbx81
esphome/components/noblex/* @AGalfra
esphome/components/number/* @esphome/core
esphome/components/one_wire/* @ssieb
esphome/components/ota/* @esphome/core
esphome/components/output/* @esphome/core
esphome/components/pca6416a/* @Mat931
esphome/components/pca9554/* @clydebarrow @hwstar
esphome/components/pcf85063/* @brogon
esphome/components/pcf8563/* @KoenBreeman
esphome/components/pid/* @OttoWinter
esphome/components/pipsolar/* @andreashergert1984
esphome/components/pm1006/* @habbie
esphome/components/pmsa003i/* @sjtrny
esphome/components/pmwcs3/* @SeByDocKy
esphome/components/pn532/* @OttoWinter @jesserockz
esphome/components/pn532_i2c/* @OttoWinter @jesserockz
esphome/components/pn532_spi/* @OttoWinter @jesserockz
esphome/components/pn7150/* @jesserockz @kbx81
esphome/components/pn7150_i2c/* @jesserockz @kbx81
esphome/components/pn7160/* @jesserockz @kbx81
esphome/components/pn7160_i2c/* @jesserockz @kbx81
esphome/components/pn7160_spi/* @jesserockz @kbx81
esphome/components/power_supply/* @esphome/core
esphome/components/preferences/* @esphome/core
esphome/components/psram/* @esphome/core
esphome/components/pulse_meter/* @cstaahl @stevebaxter
esphome/components/pulse_meter/* @TrentHouliston @cstaahl @stevebaxter
esphome/components/pvvx_mithermometer/* @pasiz
esphome/components/pylontech/* @functionpointer
esphome/components/qmp6988/* @andrewpc
esphome/components/qr_code/* @wjtje
esphome/components/qspi_amoled/* @clydebarrow
esphome/components/qwiic_pir/* @kahrendt
esphome/components/radon_eye_ble/* @jeffeb3
esphome/components/radon_eye_rd200/* @jeffeb3
esphome/components/rc522/* @glmnet
esphome/components/rc522_i2c/* @glmnet
esphome/components/rc522_spi/* @glmnet
esphome/components/resistance_sampler/* @jesserockz
esphome/components/restart/* @esphome/core
esphome/components/rf_bridge/* @jesserockz
esphome/components/rgbct/* @jesserockz
esphome/components/rp2040/* @jesserockz
esphome/components/rp2040_pio_led_strip/* @Papa-DMan
esphome/components/rp2040_pwm/* @jesserockz
esphome/components/rpi_dpi_rgb/* @clydebarrow
esphome/components/rtl87xx/* @kuba2k2
esphome/components/rtttl/* @glmnet
esphome/components/safe_mode/* @jsuanet @paulmonigatti
esphome/components/scd4x/* @sjtrny
esphome/components/safe_mode/* @jsuanet @kbx81 @paulmonigatti
esphome/components/scd4x/* @martgras @sjtrny
esphome/components/script/* @esphome/core
esphome/components/sdl/* @clydebarrow
esphome/components/sdm_meter/* @jesserockz @polyfaces
esphome/components/sdp3x/* @Azimath
esphome/components/seeed_mr24hpc1/* @limengdu
esphome/components/selec_meter/* @sourabhjaiswal
esphome/components/select/* @esphome/core
esphome/components/sen0321/* @notjj
esphome/components/sen21231/* @shreyaskarnik
esphome/components/sen5x/* @martgras
esphome/components/sensirion_common/* @martgras
esphome/components/sensor/* @esphome/core
esphome/components/sfa30/* @ghsensdev
esphome/components/sgp40/* @SenexCrenshaw
esphome/components/sgp4x/* @SenexCrenshaw @martgras
esphome/components/shelly_dimmer/* @edge90 @rnauber
esphome/components/sht3xd/* @mrtoy-me
esphome/components/sht4x/* @sjtrny
esphome/components/shutdown/* @esphome/core @jsuanet
esphome/components/sigma_delta_output/* @Cat-Ion
esphome/components/sim800l/* @glmnet
esphome/components/sm2135/* @BoukeHaarsma23
esphome/components/sm10bit_base/* @Cossid
esphome/components/sm2135/* @BoukeHaarsma23 @dd32 @matika77
esphome/components/sm2235/* @Cossid
esphome/components/sm2335/* @Cossid
esphome/components/sml/* @alengwenus
esphome/components/smt100/* @piechade
esphome/components/sn74hc165/* @jesserockz
esphome/components/socket/* @esphome/core
esphome/components/sonoff_d1/* @anatoly-savchenkov
esphome/components/spi/* @esphome/core
esphome/components/speaker/* @jesserockz
esphome/components/spi/* @clydebarrow @esphome/core
esphome/components/spi_device/* @clydebarrow
esphome/components/spi_led_strip/* @clydebarrow
esphome/components/sprinkler/* @kbx81
esphome/components/sps30/* @martgras
esphome/components/ssd1322_base/* @kbx81
esphome/components/ssd1322_spi/* @kbx81
@ -195,40 +366,91 @@ esphome/components/ssd1331_base/* @kbx81
esphome/components/ssd1331_spi/* @kbx81
esphome/components/ssd1351_base/* @kbx81
esphome/components/ssd1351_spi/* @kbx81
esphome/components/st7567_base/* @latonita
esphome/components/st7567_i2c/* @latonita
esphome/components/st7567_spi/* @latonita
esphome/components/st7701s/* @clydebarrow
esphome/components/st7735/* @SenexCrenshaw
esphome/components/st7789v/* @kbx81
esphome/components/st7920/* @marsjan155
esphome/components/substitutions/* @esphome/core
esphome/components/sun/* @OttoWinter
esphome/components/sun_gtil2/* @Mat931
esphome/components/switch/* @esphome/core
esphome/components/t6615/* @tylermenezes
esphome/components/tca9548a/* @andreashergert1984
esphome/components/tcl112/* @glmnet
esphome/components/tee501/* @Stock-M
esphome/components/teleinfo/* @0hax
esphome/components/template/alarm_control_panel/* @grahambrown11 @hwstar
esphome/components/template/datetime/* @rfdarter
esphome/components/template/event/* @nohat
esphome/components/template/fan/* @ssieb
esphome/components/text/* @mauritskorse
esphome/components/thermostat/* @kbx81
esphome/components/time/* @OttoWinter
esphome/components/tlc5947/* @rnauber
esphome/components/tlc5971/* @IJIJI
esphome/components/tm1621/* @Philippe12
esphome/components/tm1637/* @glmnet
esphome/components/tm1638/* @skykingjwc
esphome/components/tm1651/* @freekode
esphome/components/tmp102/* @timsavage
esphome/components/tmp1075/* @sybrenstuvel
esphome/components/tmp117/* @Azimath
esphome/components/tof10120/* @wstrzalka
esphome/components/toshiba/* @kbx81
esphome/components/touchscreen/* @jesserockz
esphome/components/touchscreen/* @jesserockz @nielsnl68
esphome/components/tsl2591/* @wjcarpenter
esphome/components/tt21100/* @kroimon
esphome/components/tuya/binary_sensor/* @jesserockz
esphome/components/tuya/climate/* @jesserockz
esphome/components/tuya/number/* @frankiboy1
esphome/components/tuya/select/* @bearpawmaxim
esphome/components/tuya/sensor/* @jesserockz
esphome/components/tuya/switch/* @jesserockz
esphome/components/tuya/text_sensor/* @dentra
esphome/components/uart/* @esphome/core
esphome/components/uart/button/* @ssieb
esphome/components/ufire_ec/* @pvizeli
esphome/components/ufire_ise/* @pvizeli
esphome/components/ultrasonic/* @OttoWinter
esphome/components/update/* @jesserockz
esphome/components/uponor_smatrix/* @kroimon
esphome/components/valve/* @esphome/core
esphome/components/vbus/* @ssieb
esphome/components/veml3235/* @kbx81
esphome/components/veml7700/* @latonita
esphome/components/version/* @esphome/core
esphome/components/wake_on_lan/* @willwill2will54
esphome/components/voice_assistant/* @jesserockz
esphome/components/wake_on_lan/* @clydebarrow @willwill2will54
esphome/components/waveshare_epaper/* @clydebarrow
esphome/components/web_server_base/* @OttoWinter
esphome/components/web_server_idf/* @dentra
esphome/components/weikai/* @DrCoolZic
esphome/components/weikai_i2c/* @DrCoolZic
esphome/components/weikai_spi/* @DrCoolZic
esphome/components/whirlpool/* @glmnet
esphome/components/whynter/* @aeonsablaze
esphome/components/wiegand/* @ssieb
esphome/components/wireguard/* @droscy @lhoracek @thomas0bernard
esphome/components/wk2132_i2c/* @DrCoolZic
esphome/components/wk2132_spi/* @DrCoolZic
esphome/components/wk2168_i2c/* @DrCoolZic
esphome/components/wk2168_spi/* @DrCoolZic
esphome/components/wk2204_i2c/* @DrCoolZic
esphome/components/wk2204_spi/* @DrCoolZic
esphome/components/wk2212_i2c/* @DrCoolZic
esphome/components/wk2212_spi/* @DrCoolZic
esphome/components/wl_134/* @hobbypunk90
esphome/components/x9c/* @EtienneMD
esphome/components/xgzp68xx/* @gcormier
esphome/components/xiaomi_hhccjcy10/* @fariouche
esphome/components/xiaomi_lywsd03mmc/* @ahpohl
esphome/components/xiaomi_mhoc303/* @drug123
esphome/components/xiaomi_mhoc401/* @vevsvevs
esphome/components/xiaomi_rtcgq02lm/* @jesserockz
esphome/components/xpt2046/* @numo68
esphome/components/xl9535/* @mreditor97
esphome/components/xpt2046/touchscreen/* @nielsnl68 @numo68
esphome/components/zhlt01/* @cfeenstra1024
esphome/components/zio_ultrasonic/* @kahrendt

View file

@ -10,5 +10,3 @@ Things to note when contributing:
for more information.
- Please also update the tests in the `tests/` folder. You can do so by just adding a line in one of the YAML files
which checks if your new feature compiles correctly.
- Sometimes I will let pull requests linger because I'm not 100% sure about them. Please feel free to ping
me after some time.

View file

@ -1,8 +1,6 @@
include LICENSE
include README.md
include requirements.txt
include esphome/dashboard/templates/*.html
recursive-include esphome/dashboard/static *.ico *.js *.css *.woff* LICENSE
recursive-include esphome *.cpp *.h *.tcc
recursive-include esphome *.cpp *.h *.tcc *.c
recursive-include esphome *.py.script
recursive-include esphome LICENSE.txt

View file

@ -5,32 +5,57 @@
# One of "docker", "hassio"
ARG BASEIMGTYPE=docker
# https://github.com/hassio-addons/addon-debian-base/releases
FROM ghcr.io/hassio-addons/debian-base/amd64:5.3.0 AS base-hassio-amd64
FROM ghcr.io/hassio-addons/debian-base/aarch64:5.3.0 AS base-hassio-arm64
FROM ghcr.io/hassio-addons/debian-base/armv7:5.3.0 AS base-hassio-armv7
# https://hub.docker.com/_/debian?tab=tags&page=1&name=bullseye
FROM debian:bullseye-20220328-slim AS base-docker-amd64
FROM debian:bullseye-20220328-slim AS base-docker-arm64
FROM debian:bullseye-20220328-slim AS base-docker-armv7
# Use TARGETARCH/TARGETVARIANT defined by docker
# https://docs.docker.com/engine/reference/builder/#automatic-platform-args-in-the-global-scope
FROM base-${BASEIMGTYPE}-${TARGETARCH}${TARGETVARIANT} AS base
# https://github.com/hassio-addons/addon-debian-base/releases
FROM ghcr.io/hassio-addons/debian-base:7.2.0 AS base-hassio
# https://hub.docker.com/_/debian?tab=tags&page=1&name=bookworm
FROM debian:12.2-slim AS base-docker
FROM base-${BASEIMGTYPE} AS base
ARG TARGETARCH
ARG TARGETVARIANT
# Note that --break-system-packages is used below because
# https://peps.python.org/pep-0668/ added a safety check that prevents
# installing packages with the same name as a system package. This is
# not a problem for us because we are not concerned about overwriting
# system packages because we are running in an isolated container.
RUN \
apt-get update \
# Use pinned versions so that we get updates with build caching
&& apt-get install -y --no-install-recommends \
python3=3.9.2-3 \
python3-pip=20.3.4-4+deb11u1 \
python3-setuptools=52.0.0-4 \
python3-pil=8.1.2+dfsg-0.3+deb11u1 \
python3-cryptography=3.3.2-1 \
iputils-ping=3:20210202-1 \
git=1:2.30.2-1 \
curl=7.74.0-1.3+deb11u1 \
&& rm -rf \
python3-pip=23.0.1+dfsg-1 \
python3-setuptools=66.1.1-1 \
python3-venv=3.11.2-1+b1 \
python3-wheel=0.38.4-2 \
iputils-ping=3:20221126-1 \
git=1:2.39.2-1.1 \
curl=7.88.1-10+deb12u5 \
openssh-client=1:9.2p1-2+deb12u2 \
python3-cffi=1.15.1-5 \
libcairo2=1.16.0-7 \
libmagic1=1:5.44-3 \
patch=2.7.6-7; \
if [ "$TARGETARCH$TARGETVARIANT" = "armv7" ]; then \
apt-get install -y --no-install-recommends \
build-essential=12.9 \
python3-dev=3.11.2-1+b1 \
zlib1g-dev=1:1.2.13.dfsg-1 \
libjpeg-dev=1:2.1.5-2 \
libfreetype-dev=2.12.1+dfsg-5 \
libssl-dev=3.0.11-1~deb12u2 \
libffi-dev=3.4.4-1 \
libopenjp2-7=2.5.0-2 \
libtiff6=4.5.0-6+deb12u1 \
cargo=0.66.0+ds1-1 \
pkg-config=1.8.1-1 \
gcc-arm-linux-gnueabihf=4:12.2.0-3; \
fi; \
rm -rf \
/tmp/* \
/var/{cache,log}/* \
/var/lib/apt/lists/*
@ -41,24 +66,43 @@ ENV \
# Store globally installed pio libs in /piolibs
PLATFORMIO_GLOBALLIB_DIR=/piolibs
# Support legacy binaries on Debian multiarch system. There is no "correct" way
# to do this, other than using properly built toolchains...
# See: https://unix.stackexchange.com/questions/553743/correct-way-to-add-lib-ld-linux-so-3-in-debian
RUN \
if [ "$TARGETARCH$TARGETVARIANT" = "armv7" ]; then \
ln -s /lib/arm-linux-gnueabihf/ld-linux-armhf.so.3 /lib/ld-linux.so.3; \
fi
RUN \
# Ubuntu python3-pip is missing wheel
pip3 install --no-cache-dir \
wheel==0.37.1 \
platformio==5.2.5 \
if [ "$TARGETARCH$TARGETVARIANT" = "armv7" ]; then \
export PIP_EXTRA_INDEX_URL="https://www.piwheels.org/simple"; \
fi; \
pip3 install \
--break-system-packages --no-cache-dir \
# Keep platformio version in sync with requirements.txt
platformio==6.1.15 \
# Change some platformio settings
&& platformio settings set enable_telemetry No \
&& platformio settings set check_libraries_interval 1000000 \
&& platformio settings set check_platformio_interval 1000000 \
&& platformio settings set check_platforms_interval 1000000 \
&& mkdir -p /piolibs
# First install requirements to leverage caching when requirements don't change
COPY requirements.txt requirements_optional.txt docker/platformio_install_deps.py platformio.ini /
RUN \
pip3 install --no-cache-dir -r /requirements.txt -r /requirements_optional.txt \
&& /platformio_install_deps.py /platformio.ini
# tmpfs is for https://github.com/rust-lang/cargo/issues/8719
COPY requirements.txt requirements_optional.txt script/platformio_install_deps.py platformio.ini /
RUN --mount=type=tmpfs,target=/root/.cargo if [ "$TARGETARCH$TARGETVARIANT" = "armv7" ]; then \
export PIP_EXTRA_INDEX_URL="https://www.piwheels.org/simple"; \
fi; \
CARGO_REGISTRIES_CRATES_IO_PROTOCOL=sparse CARGO_HOME=/root/.cargo \
pip3 install \
--break-system-packages --no-cache-dir -r /requirements.txt -r /requirements_optional.txt \
&& /platformio_install_deps.py /platformio.ini --libraries
# Avoid unsafe git error when container user and file config volume permissions don't match
RUN git config --system --add safe.directory '*'
# ======================= docker-type image =======================
@ -66,7 +110,11 @@ FROM base AS docker
# Copy esphome and install
COPY . /esphome
RUN pip3 install --no-cache-dir --no-use-pep517 -e /esphome
RUN if [ "$TARGETARCH$TARGETVARIANT" = "armv7" ]; then \
export PIP_EXTRA_INDEX_URL="https://www.piwheels.org/simple"; \
fi; \
pip3 install \
--break-system-packages --no-cache-dir -e /esphome
# Settings for dashboard
ENV USERNAME="" PASSWORD=""
@ -74,6 +122,10 @@ ENV USERNAME="" PASSWORD=""
# Expose the dashboard to Docker
EXPOSE 6052
# Run healthcheck (heartbeat)
HEALTHCHECK --interval=30s --timeout=30s \
CMD curl --fail http://localhost:6052/version -A "HealthCheck" || exit 1
COPY docker/docker_entrypoint.sh /entrypoint.sh
# The directory the user should mount their configuration files to
@ -95,7 +147,7 @@ RUN \
apt-get update \
# Use pinned versions so that we get updates with build caching
&& apt-get install -y --no-install-recommends \
nginx-light=1.18.0-6.1 \
nginx-light=1.22.1-9 \
&& rm -rf \
/tmp/* \
/var/{cache,log}/* \
@ -108,7 +160,11 @@ COPY docker/ha-addon-rootfs/ /
# Copy esphome and install
COPY . /esphome
RUN pip3 install --no-cache-dir --no-use-pep517 -e /esphome
RUN if [ "$TARGETARCH$TARGETVARIANT" = "armv7" ]; then \
export PIP_EXTRA_INDEX_URL="https://www.piwheels.org/simple"; \
fi; \
pip3 install \
--break-system-packages --no-cache-dir -e /esphome
# Labels
LABEL \
@ -131,20 +187,24 @@ RUN \
apt-get update \
# Use pinned versions so that we get updates with build caching
&& apt-get install -y --no-install-recommends \
clang-format-11=1:11.0.1-2 \
clang-tidy-11=1:11.0.1-2 \
clang-format-13=1:13.0.1-11+b2 \
clang-tidy-14=1:14.0.6-12 \
patch=2.7.6-7 \
software-properties-common=0.96.20.2-2.1 \
nano=5.4-2 \
software-properties-common=0.99.30-4 \
nano=7.2-1 \
build-essential=12.9 \
python3-dev=3.9.2-3 \
python3-dev=3.11.2-1+b1 \
&& rm -rf \
/tmp/* \
/var/{cache,log}/* \
/var/lib/apt/lists/*
COPY requirements_test.txt /
RUN pip3 install --no-cache-dir -r /requirements_test.txt
RUN if [ "$TARGETARCH$TARGETVARIANT" = "armv7" ]; then \
export PIP_EXTRA_INDEX_URL="https://www.piwheels.org/simple"; \
fi; \
pip3 install \
--break-system-packages --no-cache-dir -r /requirements_test.txt
VOLUME ["/esphome"]
WORKDIR /esphome

View file

@ -8,32 +8,49 @@ import re
import sys
CHANNEL_DEV = 'dev'
CHANNEL_BETA = 'beta'
CHANNEL_RELEASE = 'release'
CHANNEL_DEV = "dev"
CHANNEL_BETA = "beta"
CHANNEL_RELEASE = "release"
CHANNELS = [CHANNEL_DEV, CHANNEL_BETA, CHANNEL_RELEASE]
ARCH_AMD64 = 'amd64'
ARCH_ARMV7 = 'armv7'
ARCH_AARCH64 = 'aarch64'
ARCH_AMD64 = "amd64"
ARCH_ARMV7 = "armv7"
ARCH_AARCH64 = "aarch64"
ARCHS = [ARCH_AMD64, ARCH_ARMV7, ARCH_AARCH64]
TYPE_DOCKER = 'docker'
TYPE_HA_ADDON = 'ha-addon'
TYPE_LINT = 'lint'
TYPE_DOCKER = "docker"
TYPE_HA_ADDON = "ha-addon"
TYPE_LINT = "lint"
TYPES = [TYPE_DOCKER, TYPE_HA_ADDON, TYPE_LINT]
parser = argparse.ArgumentParser()
parser.add_argument("--tag", type=str, required=True, help="The main docker tag to push to. If a version number also adds latest and/or beta tag")
parser.add_argument("--arch", choices=ARCHS, required=False, help="The architecture to build for")
parser.add_argument("--build-type", choices=TYPES, required=True, help="The type of build to run")
parser.add_argument("--dry-run", action="store_true", help="Don't run any commands, just print them")
subparsers = parser.add_subparsers(help="Action to perform", dest="command", required=True)
parser.add_argument(
"--tag",
type=str,
required=True,
help="The main docker tag to push to. If a version number also adds latest and/or beta tag",
)
parser.add_argument(
"--arch", choices=ARCHS, required=False, help="The architecture to build for"
)
parser.add_argument(
"--build-type", choices=TYPES, required=True, help="The type of build to run"
)
parser.add_argument(
"--dry-run", action="store_true", help="Don't run any commands, just print them"
)
subparsers = parser.add_subparsers(
help="Action to perform", dest="command", required=True
)
build_parser = subparsers.add_parser("build", help="Build the image")
build_parser.add_argument("--push", help="Also push the images", action="store_true")
build_parser.add_argument("--load", help="Load the docker image locally", action="store_true")
manifest_parser = subparsers.add_parser("manifest", help="Create a manifest from already pushed images")
build_parser.add_argument(
"--load", help="Load the docker image locally", action="store_true"
)
manifest_parser = subparsers.add_parser(
"manifest", help="Create a manifest from already pushed images"
)
@dataclass(frozen=True)
@ -49,7 +66,7 @@ class DockerParams:
prefix = {
TYPE_DOCKER: "esphome/esphome",
TYPE_HA_ADDON: "esphome/esphome-hassio",
TYPE_LINT: "esphome/esphome-lint"
TYPE_LINT: "esphome/esphome-lint",
}[build_type]
build_to = f"{prefix}-{arch}"
baseimgtype = {
@ -88,10 +105,12 @@ def main():
sys.exit(1)
# detect channel from tag
match = re.match(r'^\d+\.\d+(?:\.\d+)?(b\d+)?$', args.tag)
match = re.match(r"^(\d+\.\d+)(?:\.\d+)?(b\d+)?$", args.tag)
major_minor_version = None
if match is None:
channel = CHANNEL_DEV
elif match.group(1) is None:
elif match.group(2) is None:
major_minor_version = match.group(1)
channel = CHANNEL_RELEASE
else:
channel = CHANNEL_BETA
@ -106,6 +125,11 @@ def main():
tags_to_push.append("beta")
tags_to_push.append("latest")
# Compatibility with HA tags
if major_minor_version:
tags_to_push.append("stable")
tags_to_push.append(major_minor_version)
if args.command == "build":
# 1. pull cache image
params = DockerParams.for_type_arch(args.build_type, args.arch)
@ -121,13 +145,21 @@ def main():
# 3. build
cmd = [
"docker", "buildx", "build",
"--build-arg", f"BASEIMGTYPE={params.baseimgtype}",
"--build-arg", f"BUILD_VERSION={args.tag}",
"--cache-from", f"type=registry,ref={cache_img}",
"--file", "docker/Dockerfile",
"--platform", params.platform,
"--target", params.target,
"docker",
"buildx",
"build",
"--build-arg",
f"BASEIMGTYPE={params.baseimgtype}",
"--build-arg",
f"BUILD_VERSION={args.tag}",
"--cache-from",
f"type=registry,ref={cache_img}",
"--file",
"docker/Dockerfile",
"--platform",
params.platform,
"--target",
params.target,
]
for img in imgs:
cmd += ["--tag", img]
@ -153,9 +185,7 @@ def main():
run_command(*cmd)
# 2. Push manifests
for target in targets:
run_command(
"docker", "manifest", "push", target
)
run_command("docker", "manifest", "push", target)
if __name__ == "__main__":

View file

@ -21,4 +21,10 @@ export PLATFORMIO_PLATFORMS_DIR="${pio_cache_base}/platforms"
export PLATFORMIO_PACKAGES_DIR="${pio_cache_base}/packages"
export PLATFORMIO_CACHE_DIR="${pio_cache_base}/cache"
# If /build is mounted, use that as the build path
# otherwise use path in /config (so that builds aren't lost on container restart)
if [[ -d /build ]]; then
export ESPHOME_BUILD_PATH=/build
fi
exec esphome "$@"

92
docker/generate_tags.py Executable file
View file

@ -0,0 +1,92 @@
#!/usr/bin/env python3
import re
import argparse
CHANNEL_DEV = "dev"
CHANNEL_BETA = "beta"
CHANNEL_RELEASE = "release"
GHCR = "ghcr"
DOCKERHUB = "dockerhub"
parser = argparse.ArgumentParser()
parser.add_argument(
"--tag",
type=str,
required=True,
help="The main docker tag to push to. If a version number also adds latest and/or beta tag",
)
parser.add_argument(
"--suffix",
type=str,
required=True,
help="The suffix of the tag.",
)
parser.add_argument(
"--registry",
type=str,
choices=[GHCR, DOCKERHUB],
required=False,
action="append",
help="The registry to build tags for.",
)
def main():
args = parser.parse_args()
# detect channel from tag
match = re.match(r"^(\d+\.\d+)(?:\.\d+)(?:(b\d+)|(-dev\d+))?$", args.tag)
major_minor_version = None
if match is None: # eg 2023.12.0-dev20231109-testbranch
channel = None # Ran with custom tag for a branch etc
elif match.group(3) is not None: # eg 2023.12.0-dev20231109
channel = CHANNEL_DEV
elif match.group(2) is not None: # eg 2023.12.0b1
channel = CHANNEL_BETA
else: # eg 2023.12.0
major_minor_version = match.group(1)
channel = CHANNEL_RELEASE
tags_to_push = [args.tag]
if channel == CHANNEL_DEV:
tags_to_push.append("dev")
elif channel == CHANNEL_BETA:
tags_to_push.append("beta")
elif channel == CHANNEL_RELEASE:
# Additionally push to beta
tags_to_push.append("beta")
tags_to_push.append("latest")
if major_minor_version:
tags_to_push.append("stable")
tags_to_push.append(major_minor_version)
suffix = f"-{args.suffix}" if args.suffix else ""
image_name = f"esphome/esphome{suffix}"
print(f"channel={channel}")
if args.registry is None:
args.registry = [GHCR, DOCKERHUB]
elif len(args.registry) == 1:
if GHCR in args.registry:
print(f"image=ghcr.io/{image_name}")
if DOCKERHUB in args.registry:
print(f"image=docker.io/{image_name}")
print(f"image_name={image_name}")
full_tags = []
for tag in tags_to_push:
if GHCR in args.registry:
full_tags += [f"ghcr.io/{image_name}:{tag}"]
if DOCKERHUB in args.registry:
full_tags += [f"docker.io/{image_name}:{tag}"]
print(f"tags={','.join(full_tags)}")
if __name__ == "__main__":
main()

View file

@ -1,41 +0,0 @@
#!/usr/bin/with-contenv bashio
# ==============================================================================
# Community Hass.io Add-ons: ESPHome
# This files check if all user configuration requirements are met
# ==============================================================================
# Check SSL requirements, if enabled
if bashio::config.true 'ssl'; then
if ! bashio::config.has_value 'certfile'; then
bashio::log.fatal 'SSL is enabled, but no certfile was specified.'
bashio::exit.nok
fi
if ! bashio::config.has_value 'keyfile'; then
bashio::log.fatal 'SSL is enabled, but no keyfile was specified'
bashio::exit.nok
fi
certfile="/ssl/$(bashio::config 'certfile')"
keyfile="/ssl/$(bashio::config 'keyfile')"
if ! bashio::fs.file_exists "${certfile}"; then
if ! bashio::fs.file_exists "${keyfile}"; then
# Both files are missing, let's print a friendlier error message
bashio::log.fatal 'You enabled encrypted connections using the "ssl": true option.'
bashio::log.fatal "However, the SSL files '${certfile}' and '${keyfile}'"
bashio::log.fatal "were not found. If you're using Hass.io on your local network and don't want"
bashio::log.fatal 'to encrypt connections to the ESPHome dashboard, you can manually disable'
bashio::log.fatal 'SSL by setting "ssl" to false."'
bashio::exit.nok
fi
bashio::log.fatal "The configured certfile '${certfile}' was not found."
bashio::exit.nok
fi
if ! bashio::fs.file_exists "/ssl/$(bashio::config 'keyfile')"; then
bashio::log.fatal "The configured keyfile '${keyfile}' was not found."
bashio::exit.nok
fi
fi

View file

@ -1,34 +0,0 @@
#!/usr/bin/with-contenv bashio
# ==============================================================================
# Community Hass.io Add-ons: ESPHome
# Configures NGINX for use with ESPHome
# ==============================================================================
declare certfile
declare keyfile
declare direct_port
declare ingress_interface
declare ingress_port
mkdir -p /var/log/nginx
direct_port=$(bashio::addon.port 6052)
if bashio::var.has_value "${direct_port}"; then
if bashio::config.true 'ssl'; then
certfile=$(bashio::config 'certfile')
keyfile=$(bashio::config 'keyfile')
mv /etc/nginx/servers/direct-ssl.disabled /etc/nginx/servers/direct.conf
sed -i "s/%%certfile%%/${certfile}/g" /etc/nginx/servers/direct.conf
sed -i "s/%%keyfile%%/${keyfile}/g" /etc/nginx/servers/direct.conf
else
mv /etc/nginx/servers/direct.disabled /etc/nginx/servers/direct.conf
fi
sed -i "s/%%port%%/${direct_port}/g" /etc/nginx/servers/direct.conf
fi
ingress_port=$(bashio::addon.ingress_port)
ingress_interface=$(bashio::addon.ip_address)
sed -i "s/%%port%%/${ingress_port}/g" /etc/nginx/servers/ingress.conf
sed -i "s/%%interface%%/${ingress_interface}/g" /etc/nginx/servers/ingress.conf

View file

@ -1,9 +0,0 @@
#!/usr/bin/with-contenv bashio
# ==============================================================================
# Community Hass.io Add-ons: ESPHome
# This files creates all directories used by esphome
# ==============================================================================
pio_cache_base=/data/cache/platformio
mkdir -p "${pio_cache_base}"

View file

@ -0,0 +1,47 @@
#!/usr/bin/with-contenv bashio
# ==============================================================================
# This file installs the user ESPHome fork if specified.
# The fork must be up to date with the latest ESPHome dev branch
# and have no conflicts.
# This config option only exists in the ESPHome Dev add-on.
# ==============================================================================
declare esphome_fork
if bashio::config.has_value 'esphome_fork'; then
esphome_fork=$(bashio::config 'esphome_fork')
# format: [username][/repository]:ref
if [[ "$esphome_fork" =~ ^(([^/]+)(/([^:]+))?:)?([^:/]+)$ ]]; then
username="${BASH_REMATCH[2]:-esphome}"
repository="${BASH_REMATCH[4]:-esphome}"
ref="${BASH_REMATCH[5]}"
else
bashio::exit.nok "Invalid esphome_fork format: $esphome_fork"
fi
full_url="https://github.com/${username}/${repository}/archive/${ref}.tar.gz"
bashio::log.info "Checking forked ESPHome"
dev_version=$(python3 -c "from esphome.const import __version__; print(__version__)")
bashio::log.info "Downloading ESPHome from fork '${esphome_fork}' (${full_url})..."
curl -L -o /tmp/esphome.tar.gz "${full_url}" -qq ||
bashio::exit.nok "Failed downloading ESPHome fork."
bashio::log.info "Installing ESPHome from fork '${esphome_fork}' (${full_url})..."
rm -rf /esphome || bashio::exit.nok "Failed to remove ESPHome."
mkdir /esphome
tar -zxf /tmp/esphome.tar.gz -C /esphome --strip-components=1 ||
bashio::exit.nok "Failed installing ESPHome from fork."
pip install -U -e /esphome || bashio::exit.nok "Failed installing ESPHome from fork."
rm -f /tmp/esphome.tar.gz
fork_version=$(python3 -c "from esphome.const import __version__; print(__version__)")
if [[ "$fork_version" != "$dev_version" ]]; then
bashio::log.error "############################"
bashio::log.error "Uninstalled fork as version does not match"
bashio::log.error "Update (or ask the author to update) the branch"
bashio::log.error "This is important as the dev addon and the dev ESPHome"
bashio::log.error "branch can have changes that are not compatible with old forks"
bashio::log.error "and get reported as bugs which we cannot solve easily."
bashio::log.error "############################"
bashio::exit.nok
fi
bashio::log.info "Installed ESPHome from fork '${esphome_fork}' (${full_url})..."
fi

View file

@ -1,6 +1,8 @@
root /dev/null;
server_name $hostname;
client_max_body_size 512m;
add_header X-Content-Type-Options nosniff;
add_header X-XSS-Protection "1; mode=block";
add_header X-Robots-Tag none;

View file

@ -1,7 +1,6 @@
ssl_protocols TLSv1.2;
ssl_prefer_server_ciphers on;
ssl_ciphers ECDHE-RSA-AES256-GCM-SHA512:DHE-RSA-AES256-GCM-SHA512:ECDHE-RSA-AES256-GCM-SHA384:DHE-RSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-SHA384:ECDHE-RSA-AES256-SHA:DHE-RSA-AES256-SHA;
ssl_ecdh_curve secp384r1;
ssl_protocols TLSv1.2 TLSv1.3;
ssl_prefer_server_ciphers off;
ssl_ciphers ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:DHE-RSA-AES128-GCM-SHA256:DHE-RSA-AES256-GCM-SHA384;
ssl_session_timeout 10m;
ssl_session_cache shared:SSL:10m;
ssl_session_tickets off;

View file

@ -0,0 +1,3 @@
upstream esphome {
server unix:/var/run/esphome.sock;
}

View file

@ -2,7 +2,6 @@ daemon off;
user root;
pid /var/run/nginx.pid;
worker_processes 1;
# Hass.io addon log
error_log /proc/1/fd/1 error;
events {
worker_connections 1024;
@ -10,24 +9,22 @@ events {
http {
include /etc/nginx/includes/mime.types;
access_log stdout;
access_log off;
default_type application/octet-stream;
gzip on;
keepalive_timeout 65;
sendfile on;
server_tokens off;
tcp_nodelay on;
tcp_nopush on;
map $http_upgrade $connection_upgrade {
default upgrade;
'' close;
}
# Use Hass.io supervisor as resolver
resolver 172.30.32.2;
upstream esphome {
server unix:/var/run/esphome.sock;
}
include /etc/nginx/includes/upstream.conf;
include /etc/nginx/servers/*.conf;
}

View file

@ -0,0 +1 @@
Without requirements or design, programming is the art of adding bugs to an empty text file. (Louis Srygley)

View file

@ -1,22 +0,0 @@
server {
listen %%port%% default_server ssl http2;
include /etc/nginx/includes/server_params.conf;
include /etc/nginx/includes/proxy_params.conf;
include /etc/nginx/includes/ssl_params.conf;
ssl on;
ssl_certificate /ssl/%%certfile%%;
ssl_certificate_key /ssl/%%keyfile%%;
# Clear Hass.io Ingress header
proxy_set_header X-HA-Ingress "";
# Redirect http requests to https on the same port.
# https://rageagainstshell.com/2016/11/redirect-http-to-https-on-the-same-port-in-nginx/
error_page 497 https://$http_host$request_uri;
location / {
proxy_pass http://esphome;
}
}

View file

@ -1,12 +0,0 @@
server {
listen %%port%% default_server;
include /etc/nginx/includes/server_params.conf;
include /etc/nginx/includes/proxy_params.conf;
# Clear Hass.io Ingress header
proxy_set_header X-HA-Ingress "";
location / {
proxy_pass http://esphome;
}
}

View file

@ -1,16 +0,0 @@
server {
listen %%interface%%:%%port%% default_server;
include /etc/nginx/includes/server_params.conf;
include /etc/nginx/includes/proxy_params.conf;
# Set Home Assistant Ingress header
proxy_set_header X-HA-Ingress "YES";
location / {
# Only allow from Hass.io supervisor
allow 172.30.32.2;
deny all;
proxy_pass http://esphome;
}
}

View file

@ -0,0 +1,28 @@
server {
{{ if not .ssl }}
listen 6052 default_server;
{{ else }}
listen 6052 default_server ssl http2;
{{ end }}
include /etc/nginx/includes/server_params.conf;
include /etc/nginx/includes/proxy_params.conf;
{{ if .ssl }}
include /etc/nginx/includes/ssl_params.conf;
ssl_certificate /ssl/{{ .certfile }};
ssl_certificate_key /ssl/{{ .keyfile }};
# Redirect http requests to https on the same port.
# https://rageagainstshell.com/2016/11/redirect-http-to-https-on-the-same-port-in-nginx/
error_page 497 https://$http_host$request_uri;
{{ end }}
# Clear Home Assistant Ingress header
proxy_set_header X-HA-Ingress "";
location / {
proxy_pass http://esphome;
}
}

View file

@ -0,0 +1,18 @@
server {
listen 127.0.0.1:{{ .port }} default_server;
listen {{ .interface }}:{{ .port }} default_server;
include /etc/nginx/includes/server_params.conf;
include /etc/nginx/includes/proxy_params.conf;
# Set Home Assistant Ingress header
proxy_set_header X-HA-Ingress "YES";
location / {
allow 172.30.32.2;
allow 127.0.0.1;
deny all;
proxy_pass http://esphome;
}
}

View file

@ -0,0 +1,32 @@
#!/command/with-contenv bashio
# shellcheck shell=bash
# ==============================================================================
# Home Assistant Add-on: ESPHome
# Sends discovery information to Home Assistant.
# ==============================================================================
declare config
declare port
# We only disable it when disabled explicitly
if bashio::config.false 'home_assistant_dashboard_integration';
then
bashio::log.info "Home Assistant discovery is disabled for this add-on."
bashio::exit.ok
fi
port=$(bashio::addon.ingress_port)
# Wait for NGINX to become available
bashio::net.wait_for "${port}" "127.0.0.1" 300
config=$(\
bashio::var.json \
host "127.0.0.1" \
port "^${port}" \
)
if bashio::discovery "esphome" "${config}" > /dev/null; then
bashio::log.info "Successfully send discovery information to Home Assistant."
else
bashio::log.error "Discovery message to Home Assistant failed!"
fi

View file

@ -0,0 +1 @@
oneshot

View file

@ -0,0 +1 @@
/etc/s6-overlay/s6-rc.d/discovery/run

View file

@ -0,0 +1,26 @@
#!/command/with-contenv bashio
# shellcheck shell=bash
# ==============================================================================
# Home Assistant Community Add-on: ESPHome
# Take down the S6 supervision tree when ESPHome dashboard fails
# ==============================================================================
declare exit_code
readonly exit_code_container=$(</run/s6-linux-init-container-results/exitcode)
readonly exit_code_service="${1}"
readonly exit_code_signal="${2}"
bashio::log.info \
"Service ESPHome dashboard exited with code ${exit_code_service}" \
"(by signal ${exit_code_signal})"
if [[ "${exit_code_service}" -eq 256 ]]; then
if [[ "${exit_code_container}" -eq 0 ]]; then
echo $((128 + $exit_code_signal)) > /run/s6-linux-init-container-results/exitcode
fi
[[ "${exit_code_signal}" -eq 15 ]] && exec /run/s6/basedir/bin/halt
elif [[ "${exit_code_service}" -ne 0 ]]; then
if [[ "${exit_code_container}" -eq 0 ]]; then
echo "${exit_code_service}" > /run/s6-linux-init-container-results/exitcode
fi
exec /run/s6/basedir/bin/halt
fi

View file

@ -0,0 +1,57 @@
#!/command/with-contenv bashio
# shellcheck shell=bash
# ==============================================================================
# Community Hass.io Add-ons: ESPHome
# Runs the ESPHome dashboard
# ==============================================================================
readonly pio_cache_base=/data/cache/platformio
export ESPHOME_IS_HA_ADDON=true
export PLATFORMIO_GLOBALLIB_DIR=/piolibs
# we can't set core_dir, because the settings file is stored in `core_dir/appstate.json`
# setting `core_dir` would therefore prevent pio from accessing
export PLATFORMIO_PLATFORMS_DIR="${pio_cache_base}/platforms"
export PLATFORMIO_PACKAGES_DIR="${pio_cache_base}/packages"
export PLATFORMIO_CACHE_DIR="${pio_cache_base}/cache"
if bashio::config.true 'leave_front_door_open'; then
export DISABLE_HA_AUTHENTICATION=true
fi
if bashio::config.true 'streamer_mode'; then
export ESPHOME_STREAMER_MODE=true
fi
if bashio::config.true 'status_use_ping'; then
export ESPHOME_DASHBOARD_USE_PING=true
fi
if bashio::config.has_value 'relative_url'; then
export ESPHOME_DASHBOARD_RELATIVE_URL=$(bashio::config 'relative_url')
fi
if bashio::config.has_value 'default_compile_process_limit'; then
export ESPHOME_DEFAULT_COMPILE_PROCESS_LIMIT=$(bashio::config 'default_compile_process_limit')
else
if grep -q 'Raspberry Pi 3' /proc/cpuinfo; then
export ESPHOME_DEFAULT_COMPILE_PROCESS_LIMIT=1
fi
fi
mkdir -p "${pio_cache_base}"
mkdir -p /config/esphome
if bashio::fs.directory_exists '/config/esphome/.esphome'; then
bashio::log.info "Migrating old .esphome directory..."
if bashio::fs.file_exists '/config/esphome/.esphome/esphome.json'; then
mv /config/esphome/.esphome/esphome.json /data/esphome.json
fi
mkdir -p "/data/storage"
mv /config/esphome/.esphome/*.json /data/storage/ || true
rm -rf /config/esphome/.esphome
fi
bashio::log.info "Starting ESPHome dashboard..."
exec esphome dashboard /config/esphome --socket /var/run/esphome.sock --ha-addon

View file

@ -0,0 +1 @@
longrun

View file

@ -0,0 +1,27 @@
#!/command/with-contenv bashio
# shellcheck shell=bash
# ==============================================================================
# Community Hass.io Add-ons: ESPHome
# Configures NGINX for use with ESPHome
# ==============================================================================
mkdir -p /var/log/nginx
# Generate Ingress configuration
bashio::var.json \
interface "$(bashio::addon.ip_address)" \
port "^$(bashio::addon.ingress_port)" \
| tempio \
-template /etc/nginx/templates/ingress.gtpl \
-out /etc/nginx/servers/ingress.conf
# Generate direct access configuration, if enabled.
if bashio::var.has_value "$(bashio::addon.port 6052)"; then
bashio::config.require.ssl
bashio::var.json \
certfile "$(bashio::config 'certfile')" \
keyfile "$(bashio::config 'keyfile')" \
ssl "^$(bashio::config 'ssl')" \
| tempio \
-template /etc/nginx/templates/direct.gtpl \
-out /etc/nginx/servers/direct.conf
fi

View file

@ -0,0 +1 @@
oneshot

View file

@ -0,0 +1 @@
/etc/s6-overlay/s6-rc.d/init-nginx/run

View file

@ -0,0 +1,25 @@
#!/command/with-contenv bashio
# ==============================================================================
# Community Hass.io Add-ons: ESPHome
# Take down the S6 supervision tree when NGINX fails
# ==============================================================================
declare exit_code
readonly exit_code_container=$(</run/s6-linux-init-container-results/exitcode)
readonly exit_code_service="${1}"
readonly exit_code_signal="${2}"
bashio::log.info \
"Service NGINX exited with code ${exit_code_service}" \
"(by signal ${exit_code_signal})"
if [[ "${exit_code_service}" -eq 256 ]]; then
if [[ "${exit_code_container}" -eq 0 ]]; then
echo $((128 + $exit_code_signal)) > /run/s6-linux-init-container-results/exitcode
fi
[[ "${exit_code_signal}" -eq 15 ]] && exec /run/s6/basedir/bin/halt
elif [[ "${exit_code_service}" -ne 0 ]]; then
if [[ "${exit_code_container}" -eq 0 ]]; then
echo "${exit_code_service}" > /run/s6-linux-init-container-results/exitcode
fi
exec /run/s6/basedir/bin/halt
fi

View file

@ -0,0 +1,15 @@
#!/command/with-contenv bashio
# shellcheck shell=bash
# ==============================================================================
# Community Hass.io Add-ons: ESPHome
# Runs the NGINX proxy
# ==============================================================================
bashio::log.info "Waiting for ESPHome dashboard to come up..."
while [[ ! -S /var/run/esphome.sock ]]; do
sleep 0.5
done
bashio::log.info "Starting NGINX..."
exec nginx

View file

@ -0,0 +1 @@
longrun

View file

@ -1,9 +0,0 @@
#!/usr/bin/execlineb -S0
# ==============================================================================
# Community Hass.io Add-ons: ESPHome
# Take down the S6 supervision tree when ESPHome fails
# ==============================================================================
if -n { s6-test $# -ne 0 }
if -n { s6-test ${1} -eq 256 }
s6-svscanctl -t /var/run/s6/services

View file

@ -1,35 +0,0 @@
#!/usr/bin/with-contenv bashio
# ==============================================================================
# Community Hass.io Add-ons: ESPHome
# Runs the ESPHome dashboard
# ==============================================================================
export ESPHOME_IS_HA_ADDON=true
if bashio::config.true 'leave_front_door_open'; then
export DISABLE_HA_AUTHENTICATION=true
fi
if bashio::config.true 'streamer_mode'; then
export ESPHOME_STREAMER_MODE=true
fi
if bashio::config.true 'status_use_ping'; then
export ESPHOME_DASHBOARD_USE_PING=true
fi
if bashio::config.has_value 'relative_url'; then
export ESPHOME_DASHBOARD_RELATIVE_URL=$(bashio::config 'relative_url')
fi
pio_cache_base=/data/cache/platformio
# we can't set core_dir, because the settings file is stored in `core_dir/appstate.json`
# setting `core_dir` would therefore prevent pio from accessing
export PLATFORMIO_PLATFORMS_DIR="${pio_cache_base}/platforms"
export PLATFORMIO_PACKAGES_DIR="${pio_cache_base}/packages"
export PLATFORMIO_CACHE_DIR="${pio_cache_base}/cache"
export PLATFORMIO_GLOBALLIB_DIR=/piolibs
bashio::log.info "Starting ESPHome dashboard..."
exec esphome dashboard /config/esphome --socket /var/run/esphome.sock --ha-addon

View file

@ -1,9 +0,0 @@
#!/usr/bin/execlineb -S0
# ==============================================================================
# Community Hass.io Add-ons: ESPHome
# Take down the S6 supervision tree when NGINX fails
# ==============================================================================
if -n { s6-test $# -ne 0 }
if -n { s6-test ${1} -eq 256 }
s6-svscanctl -t /var/run/s6/services

View file

@ -1,14 +0,0 @@
#!/usr/bin/with-contenv bashio
# ==============================================================================
# Community Hass.io Add-ons: ESPHome
# Runs the NGINX proxy
# ==============================================================================
bashio::log.info "Waiting for dashboard to come up..."
while [[ ! -S /var/run/esphome.sock ]]; do
sleep 0.5
done
bashio::log.info "Starting NGINX..."
exec nginx

View file

@ -1,30 +0,0 @@
#!/usr/bin/env python3
# This script is used in the docker containers to preinstall
# all platformio libraries in the global storage
import configparser
import subprocess
import sys
config = configparser.ConfigParser(inline_comment_prefixes=(';', ))
config.read(sys.argv[1])
libs = []
# Extract from every lib_deps key in all sections
for section in config.sections():
conf = config[section]
if "lib_deps" not in conf:
continue
for lib_dep in conf["lib_deps"].splitlines():
if not lib_dep:
# Empty line or comment
continue
if lib_dep.startswith("${"):
# Extending from another section
continue
if "@" not in lib_dep:
# No version pinned, this is an internal lib
continue
libs.append(lib_dep)
subprocess.check_call(['platformio', 'lib', '-g', 'install', *libs])

View file

@ -1,31 +1,44 @@
# PYTHON_ARGCOMPLETE_OK
import argparse
import functools
import logging
import os
import re
import sys
import time
from datetime import datetime
import argcomplete
from esphome import const, writer, yaml_util
import esphome.codegen as cg
from esphome.config import iter_components, read_config, strip_default_ids
from esphome.config import iter_component_configs, read_config, strip_default_ids
from esphome.const import (
ALLOWED_NAME_CHARS,
CONF_BAUD_RATE,
CONF_BROKER,
CONF_DEASSERT_RTS_DTR,
CONF_DISABLED,
CONF_ESPHOME,
CONF_LOGGER,
CONF_MDNS,
CONF_MQTT,
CONF_NAME,
CONF_OTA,
CONF_PASSWORD,
CONF_PORT,
CONF_ESPHOME,
CONF_PLATFORM,
CONF_PLATFORMIO_OPTIONS,
CONF_PORT,
CONF_SUBSTITUTIONS,
PLATFORM_BK72XX,
PLATFORM_ESP32,
PLATFORM_ESP8266,
PLATFORM_RP2040,
PLATFORM_RTL87XX,
SECRETS_FILES,
)
from esphome.core import CORE, EsphomeError, coroutine
from esphome.helpers import indent
from esphome.helpers import indent, is_ip_address
from esphome.util import (
run_external_command,
run_external_process,
@ -38,7 +51,7 @@ from esphome.log import color, setup_log, Fore
_LOGGER = logging.getLogger(__name__)
def choose_prompt(options):
def choose_prompt(options, purpose: str = None):
if not options:
raise EsphomeError(
"Found no valid options for upload/logging, please make sure relevant "
@ -49,9 +62,11 @@ def choose_prompt(options):
if len(options) == 1:
return options[0][1]
safe_print("Found multiple options, please choose one:")
safe_print(
f'Found multiple options{f" for {purpose}" if purpose else ""}, please choose one:'
)
for i, (desc, _) in enumerate(options):
safe_print(f" [{i+1}] {desc}")
safe_print(f" [{i + 1}] {desc}")
while True:
opt = input("(number): ")
@ -68,15 +83,19 @@ def choose_prompt(options):
return options[opt - 1][1]
def choose_upload_log_host(default, check_default, show_ota, show_mqtt, show_api):
def choose_upload_log_host(
default, check_default, show_ota, show_mqtt, show_api, purpose: str = None
):
options = []
for port in get_serial_ports():
options.append((f"{port.path} ({port.description})", port.path))
if default == "SERIAL":
return choose_prompt(options, purpose=purpose)
if (show_ota and "ota" in CORE.config) or (show_api and "api" in CORE.config):
options.append((f"Over The Air ({CORE.address})", CORE.address))
if default == "OTA":
return CORE.address
if show_mqtt and "mqtt" in CORE.config:
if show_mqtt and CONF_MQTT in CORE.config:
options.append((f"MQTT ({CORE.config['mqtt'][CONF_BROKER]})", "MQTT"))
if default == "OTA":
return "MQTT"
@ -84,7 +103,7 @@ def choose_upload_log_host(default, check_default, show_ota, show_mqtt, show_api
return default
if check_default is not None and check_default in [opt[1] for opt in options]:
return check_default
return choose_prompt(options)
return choose_prompt(options, purpose=purpose)
def get_port_type(port):
@ -101,11 +120,11 @@ def run_miniterm(config, port):
if CONF_LOGGER not in config:
_LOGGER.info("Logger is not enabled. Not starting UART logs.")
return
return 1
baud_rate = config["logger"][CONF_BAUD_RATE]
if baud_rate == 0:
_LOGGER.info("UART logging is disabled (baud_rate=0). Not starting UART logs.")
return
return 1
_LOGGER.info("Starting log output from %s with baud rate %s", port, baud_rate)
backtrace_state = False
@ -119,25 +138,36 @@ def run_miniterm(config, port):
ser.dtr = False
ser.rts = False
tries = 0
while tries < 5:
try:
with ser:
while True:
try:
raw = ser.readline()
except serial.SerialException:
_LOGGER.error("Serial port closed!")
return
return 0
line = (
raw.replace(b"\r", b"")
.replace(b"\n", b"")
.decode("utf8", "backslashreplace")
)
time = datetime.now().time().strftime("[%H:%M:%S]")
message = time + line
time_str = datetime.now().time().strftime("[%H:%M:%S]")
message = time_str + line
safe_print(message)
backtrace_state = platformio_api.process_stacktrace(
config, line, backtrace_state=backtrace_state
)
except serial.SerialException:
tries += 1
time.sleep(1)
if tries >= 5:
_LOGGER.error("Could not connect to serial port %s", port)
return 1
return 0
def wrap_to_code(name, comp):
@ -167,7 +197,7 @@ def write_cpp(config):
def generate_cpp_contents(config):
_LOGGER.info("Generating C++ source...")
for name, component, conf in iter_components(CORE.config):
for name, component, conf in iter_component_configs(CORE.config):
if component.to_code is not None:
coro = wrap_to_code(name, component)
CORE.add_job(coro, conf)
@ -194,14 +224,16 @@ def compile_program(args, config):
return 0 if idedata is not None else 1
def upload_using_esptool(config, port):
def upload_using_esptool(config, port, file):
from esphome import platformio_api
first_baudrate = config[CONF_ESPHOME][CONF_PLATFORMIO_OPTIONS].get(
"upload_speed", 460800
)
def run_esptool(baud_rate):
if file is not None:
flash_images = [platformio_api.FlashImage(path=file, offset="0x0")]
else:
idedata = platformio_api.get_idedata(config)
firmware_offset = "0x10000" if CORE.is_esp32 else "0x0"
@ -218,6 +250,7 @@ def upload_using_esptool(config, port):
mcu = get_esp32_variant().lower()
def run_esptool(baud_rate):
cmd = [
"esptool.py",
"--before",
@ -241,8 +274,7 @@ def upload_using_esptool(config, port):
if os.environ.get("ESPHOME_USE_SUBPROCESS") is None:
import esptool
# pylint: disable=protected-access
return run_external_command(esptool._main, *cmd)
return run_external_command(esptool.main, *cmd) # pylint: disable=no-member
return run_external_process(*cmd)
@ -257,22 +289,79 @@ def upload_using_esptool(config, port):
return run_esptool(115200)
def upload_using_platformio(config, port):
from esphome import platformio_api
upload_args = ["-t", "upload", "-t", "nobuild"]
if port is not None:
upload_args += ["--upload-port", port]
return platformio_api.run_platformio_cli_run(config, CORE.verbose, *upload_args)
def check_permissions(port):
if os.name == "posix" and get_port_type(port) == "SERIAL":
# Check if we can open selected serial port
if not os.access(port, os.F_OK):
raise EsphomeError(
"The selected serial port does not exist. To resolve this issue, "
"check that the device is connected to this computer with a USB cable and that "
"the USB cable can be used for data and is not a power-only cable."
)
if not (os.access(port, os.R_OK | os.W_OK)):
raise EsphomeError(
"You do not have read or write permission on the selected serial port. "
"To resolve this issue, you can add your user to the dialout group "
f"by running the following command: sudo usermod -a -G dialout {os.getlogin()}. "
"You will need to log out & back in or reboot to activate the new group access."
)
def upload_program(config, args, host):
# if upload is to a serial port use platformio, otherwise assume ota
if get_port_type(host) == "SERIAL":
return upload_using_esptool(config, host)
check_permissions(host)
if CORE.target_platform in (PLATFORM_ESP32, PLATFORM_ESP8266):
file = getattr(args, "file", None)
return upload_using_esptool(config, host, file)
if CORE.target_platform in (PLATFORM_RP2040):
return upload_using_platformio(config, args.device)
if CORE.target_platform in (PLATFORM_BK72XX, PLATFORM_RTL87XX):
return upload_using_platformio(config, host)
return 1 # Unknown target platform
ota_conf = {}
for ota_item in config.get(CONF_OTA, []):
if ota_item[CONF_PLATFORM] == CONF_ESPHOME:
ota_conf = ota_item
break
if not ota_conf:
raise EsphomeError(
f"Cannot upload Over the Air as the {CONF_OTA} configuration is not present or does not include {CONF_PLATFORM}: {CONF_ESPHOME}"
)
from esphome import espota2
if CONF_OTA not in config:
raise EsphomeError(
"Cannot upload Over the Air as the config does not include the ota: "
"component"
)
ota_conf = config[CONF_OTA]
remote_port = ota_conf[CONF_PORT]
password = ota_conf.get(CONF_PASSWORD, "")
if (
not is_ip_address(CORE.address) # pylint: disable=too-many-boolean-expressions
and (get_port_type(host) == "MQTT" or config[CONF_MDNS][CONF_DISABLED])
and CONF_MQTT in config
and (not args.device or args.device in ("MQTT", "OTA"))
):
from esphome import mqtt
host = mqtt.get_esphome_device_ip(
config, args.username, args.password, args.client_id
)
if getattr(args, "file", None) is not None:
return espota2.run_ota(host, remote_port, password, args.file)
return espota2.run_ota(host, remote_port, password, CORE.firmware_bin)
@ -280,9 +369,16 @@ def show_logs(config, args, port):
if "logger" not in config:
raise EsphomeError("Logger is not configured!")
if get_port_type(port) == "SERIAL":
run_miniterm(config, port)
return 0
check_permissions(port)
return run_miniterm(config, port)
if get_port_type(port) == "NETWORK" and "api" in config:
if config[CONF_MDNS][CONF_DISABLED] and CONF_MQTT in config:
from esphome import mqtt
port = mqtt.get_esphome_device_ip(
config, args.username, args.password, args.client_id
)
from esphome.components.api.client import run_logs
return run_logs(config, port)
@ -311,10 +407,17 @@ def command_wizard(args):
def command_config(args, config):
_LOGGER.info("Configuration is valid!")
if not CORE.verbose:
config = strip_default_ids(config)
safe_print(yaml_util.dump(config))
output = yaml_util.dump(config, args.show_secrets)
# add the console decoration so the front-end can hide the secrets
if not args.show_secrets:
output = re.sub(
r"(password|key|psk|ssid)\: (.+)", r"\1: \\033[5m\2\\033[6m", output
)
if not CORE.quiet:
safe_print(output)
_LOGGER.info("Configuration is valid!")
return 0
@ -347,6 +450,7 @@ def command_upload(args, config):
show_ota=True,
show_mqtt=False,
show_api=False,
purpose="uploading",
)
exit_code = upload_program(config, args, port)
if exit_code != 0:
@ -355,6 +459,15 @@ def command_upload(args, config):
return 0
def command_discover(args, config):
if "mqtt" in config:
from esphome import mqtt
return mqtt.show_discover(config, args.username, args.password, args.client_id)
raise EsphomeError("No discover method configured (mqtt)")
def command_logs(args, config):
port = choose_upload_log_host(
default=args.device,
@ -362,6 +475,7 @@ def command_logs(args, config):
show_ota=False,
show_mqtt=True,
show_api=True,
purpose="logging",
)
return show_logs(config, args, port)
@ -374,12 +488,22 @@ def command_run(args, config):
if exit_code != 0:
return exit_code
_LOGGER.info("Successfully compiled program.")
if CORE.is_host:
from esphome.platformio_api import get_idedata
idedata = get_idedata(config)
if idedata is None:
return 1
program_path = idedata.raw["prog_path"]
return run_external_process(program_path)
port = choose_upload_log_host(
default=args.device,
check_default=None,
show_ota=True,
show_mqtt=False,
show_api=True,
purpose="uploading",
)
exit_code = upload_program(config, args, port)
if exit_code != 0:
@ -393,6 +517,7 @@ def command_run(args, config):
show_ota=False,
show_mqtt=True,
show_api=True,
purpose="logging",
)
return show_logs(config, args, port)
@ -425,7 +550,7 @@ def command_clean(args, config):
def command_dashboard(args):
from esphome.dashboard import dashboard
return dashboard.start_web_server(args)
return dashboard.start_dashboard(args)
def command_update_all(args):
@ -496,14 +621,14 @@ def command_rename(args, config):
)
)
return 1
# Load existing yaml file
with open(CORE.config_path, mode="r+", encoding="utf-8") as raw_file:
raw_contents = raw_file.read()
yaml = yaml_util.load_yaml(CORE.config_path)
if CONF_ESPHOME not in yaml or CONF_NAME not in yaml[CONF_ESPHOME]:
print(
color(
Fore.BOLD_RED, "Complex YAML files cannot be automatically renamed."
)
color(Fore.BOLD_RED, "Complex YAML files cannot be automatically renamed.")
)
return 1
old_name = yaml[CONF_ESPHOME][CONF_NAME]
@ -536,23 +661,24 @@ def command_rename(args, config):
flags=re.MULTILINE,
)
raw_file.seek(0)
raw_file.write(new_raw)
raw_file.flush()
print(f"Updating {color(Fore.CYAN, CORE.config_path)}")
new_path = os.path.join(CORE.config_dir, args.name + ".yaml")
print(
f"Updating {color(Fore.CYAN, CORE.config_path)} to {color(Fore.CYAN, new_path)}"
)
print()
rc = run_external_process("esphome", "config", CORE.config_path)
with open(new_path, mode="w", encoding="utf-8") as new_file:
new_file.write(new_raw)
rc = run_external_process("esphome", "config", new_path)
if rc != 0:
raw_file.seek(0)
raw_file.write(raw_contents)
print(color(Fore.BOLD_RED, "Rename failed. Reverting changes."))
os.remove(new_path)
return 1
cli_args = [
"run",
CORE.config_path,
new_path,
"--no-logs",
"--device",
CORE.address,
@ -566,10 +692,11 @@ def command_rename(args, config):
except KeyboardInterrupt:
rc = 1
if rc != 0:
raw_file.seek(0)
raw_file.write(raw_contents)
os.remove(new_path)
return 1
os.remove(CORE.config_path)
print(color(Fore.BOLD_GREEN, "SUCCESS"))
print()
return 0
@ -594,6 +721,7 @@ POST_CONFIG_ACTIONS = {
"clean": command_clean,
"idedata": command_idedata,
"rename": command_rename,
"discover": command_discover,
}
@ -638,6 +766,9 @@ def parse_args(argv):
parser_config.add_argument(
"configuration", help="Your YAML configuration file(s).", nargs="+"
)
parser_config.add_argument(
"--show-secrets", help="Show secrets in output.", action="store_true"
)
parser_compile = subparsers.add_parser(
"compile", help="Read the configuration and compile a program."
@ -652,7 +783,9 @@ def parse_args(argv):
)
parser_upload = subparsers.add_parser(
"upload", help="Validate the configuration and upload the latest binary."
"upload",
help="Validate the configuration and upload the latest binary.",
parents=[mqtt_options],
)
parser_upload.add_argument(
"configuration", help="Your YAML configuration file(s).", nargs="+"
@ -661,10 +794,15 @@ def parse_args(argv):
"--device",
help="Manually specify the serial port/address to use, for example /dev/ttyUSB0.",
)
parser_upload.add_argument(
"--file",
help="Manually specify the binary file to upload.",
)
parser_logs = subparsers.add_parser(
"logs",
help="Validate the configuration and show all logs.",
aliases=["log"],
parents=[mqtt_options],
)
parser_logs.add_argument(
@ -675,6 +813,15 @@ def parse_args(argv):
help="Manually specify the serial port/address to use, for example /dev/ttyUSB0.",
)
parser_discover = subparsers.add_parser(
"discover",
help="Validate the configuration and show all discovered devices.",
parents=[mqtt_options],
)
parser_discover.add_argument(
"configuration", help="Your YAML configuration file.", nargs=1
)
parser_run = subparsers.add_parser(
"run",
help="Validate the configuration, create a binary, upload it, and start logs.",
@ -776,7 +923,10 @@ def parse_args(argv):
"configuration", help="Your YAML configuration file(s).", nargs=1
)
parser_rename = subparsers.add_parser("rename")
parser_rename = subparsers.add_parser(
"rename",
help="Rename a device in YAML, compile the binary and upload it.",
)
parser_rename.add_argument(
"configuration", help="Your YAML configuration file.", nargs=1
)
@ -858,6 +1008,7 @@ def parse_args(argv):
# Finally, run the new-style parser again with the possibly swapped arguments,
# and let it error out if the command is unparsable.
parser.set_defaults(deprecated_argv_suggestion=deprecated_argv_suggestion)
argcomplete.autocomplete(parser)
return parser.parse_args(arguments)
@ -893,6 +1044,8 @@ def run_esphome(argv):
_LOGGER.error(e, exc_info=args.verbose)
return 1
_LOGGER.info("ESPHome %s", const.__version__)
for conf_path in args.configuration:
if any(os.path.basename(conf_path) == x for x in SECRETS_FILES):
_LOGGER.warning("Skipping secrets file %s", conf_path)

View file

@ -11,23 +11,33 @@ from esphome.const import (
CONF_TRIGGER_ID,
CONF_TYPE_ID,
CONF_TIME,
CONF_UPDATE_INTERVAL,
)
from esphome.jsonschema import jschema_extractor
from esphome.schema_extractors import SCHEMA_EXTRACT, schema_extractor
from esphome.util import Registry
def maybe_simple_id(*validators):
"""Allow a raw ID to be specified in place of a config block.
If the value that's being validated is a dictionary, it's passed as-is to the specified validators. Otherwise, it's
wrapped in a dict that looks like ``{"id": <value>}``, and that dict is then handed off to the specified validators.
"""
return maybe_conf(CONF_ID, *validators)
def maybe_conf(conf, *validators):
"""Allow a raw value to be specified in place of a config block.
If the value that's being validated is a dictionary, it's passed as-is to the specified validators. Otherwise, it's
wrapped in a dict that looks like ``{<conf>: <value>}``, and that dict is then handed off to the specified
validators.
(This is a general case of ``maybe_simple_id`` that allows the wrapping key to be something other than ``id``.)
"""
validator = cv.All(*validators)
@jschema_extractor("maybe")
@schema_extractor("maybe")
def validate(value):
# pylint: disable=comparison-with-callable
if value == jschema_extractor:
return validator
if value == SCHEMA_EXTRACT:
return (validator, conf)
if isinstance(value, dict):
return validator(value)
@ -70,6 +80,8 @@ WhileAction = cg.esphome_ns.class_("WhileAction", Action)
RepeatAction = cg.esphome_ns.class_("RepeatAction", Action)
WaitUntilAction = cg.esphome_ns.class_("WaitUntilAction", Action, cg.Component)
UpdateComponentAction = cg.esphome_ns.class_("UpdateComponentAction", Action)
SuspendComponentAction = cg.esphome_ns.class_("SuspendComponentAction", Action)
ResumeComponentAction = cg.esphome_ns.class_("ResumeComponentAction", Action)
Automation = cg.esphome_ns.class_("Automation")
LambdaCondition = cg.esphome_ns.class_("LambdaCondition", Condition)
@ -111,11 +123,9 @@ def validate_automation(extra_schema=None, extra_validators=None, single=False):
# This should only happen with invalid configs, but let's have a nice error message.
return [schema(value)]
@jschema_extractor("automation")
@schema_extractor("automation")
def validator(value):
# hack to get the schema
# pylint: disable=comparison-with-callable
if value == jschema_extractor:
if value == SCHEMA_EXTRACT:
return schema
value = validator_(value)
@ -141,6 +151,7 @@ AUTOMATION_SCHEMA = cv.Schema(
AndCondition = cg.esphome_ns.class_("AndCondition", Condition)
OrCondition = cg.esphome_ns.class_("OrCondition", Condition)
NotCondition = cg.esphome_ns.class_("NotCondition", Condition)
XorCondition = cg.esphome_ns.class_("XorCondition", Condition)
@register_condition("and", AndCondition, validate_condition_list)
@ -161,6 +172,12 @@ async def not_condition_to_code(config, condition_id, template_arg, args):
return cg.new_Pvariable(condition_id, template_arg, condition)
@register_condition("xor", XorCondition, validate_condition_list)
async def xor_condition_to_code(config, condition_id, template_arg, args):
conditions = await build_condition_list(config, template_arg, args)
return cg.new_Pvariable(condition_id, template_arg, conditions)
@register_condition("lambda", LambdaCondition, cv.returning_lambda)
async def lambda_condition_to_code(config, condition_id, template_arg, args):
lambda_ = await cg.process_lambda(config, args, return_type=bool)
@ -257,7 +274,11 @@ async def repeat_action_to_code(config, action_id, template_arg, args):
var = cg.new_Pvariable(action_id, template_arg)
count_template = await cg.templatable(config[CONF_COUNT], args, cg.uint32)
cg.add(var.set_count(count_template))
actions = await build_action_list(config[CONF_THEN], template_arg, args)
actions = await build_action_list(
config[CONF_THEN],
cg.TemplateArguments(cg.uint32, *template_arg.args),
[(cg.uint32, "iteration"), *args],
)
cg.add(var.add_then(actions))
return var
@ -302,6 +323,41 @@ async def component_update_action_to_code(config, action_id, template_arg, args)
return cg.new_Pvariable(action_id, template_arg, comp)
@register_action(
"component.suspend",
SuspendComponentAction,
maybe_simple_id(
{
cv.Required(CONF_ID): cv.use_id(cg.PollingComponent),
}
),
)
async def component_suspend_action_to_code(config, action_id, template_arg, args):
comp = await cg.get_variable(config[CONF_ID])
return cg.new_Pvariable(action_id, template_arg, comp)
@register_action(
"component.resume",
ResumeComponentAction,
maybe_simple_id(
{
cv.Required(CONF_ID): cv.use_id(cg.PollingComponent),
cv.Optional(CONF_UPDATE_INTERVAL): cv.templatable(
cv.positive_time_period_milliseconds
),
}
),
)
async def component_resume_action_to_code(config, action_id, template_arg, args):
comp = await cg.get_variable(config[CONF_ID])
var = cg.new_Pvariable(action_id, template_arg, comp)
if CONF_UPDATE_INTERVAL in config:
template_ = await cg.templatable(config[CONF_UPDATE_INTERVAL], args, int)
cg.add(var.set_update_interval(template_))
return var
async def build_action(full_config, template_arg, args):
registry_entry, config = cg.extract_registry_entry_config(
ACTION_REGISTRY, full_config

View file

@ -22,6 +22,7 @@ from esphome.cpp_generator import ( # noqa
static_const_array,
statement,
variable,
with_local_variable,
new_variable,
Pvariable,
new_Pvariable,
@ -46,6 +47,7 @@ from esphome.cpp_helpers import ( # noqa
build_registry_list,
extract_registry_entry_config,
register_parented,
past_safe_mode,
)
from esphome.cpp_types import ( # noqa
global_ns,
@ -56,14 +58,18 @@ from esphome.cpp_types import ( # noqa
bool_,
int_,
std_ns,
std_shared_ptr,
std_string,
std_string_ref,
std_vector,
uint8,
uint16,
uint32,
uint64,
int16,
int32,
int64,
size_t,
const_char_ptr,
NAN,
esphome_ns,
@ -83,4 +89,5 @@ from esphome.cpp_types import ( # noqa
gpio_Flags,
EntityCategory,
Parented,
ESPTime,
)

View file

@ -0,0 +1 @@
CODEOWNERS = ["@MrSuicideParrot"]

View file

@ -0,0 +1,44 @@
// Datasheet https://wiki.dfrobot.com/A01NYUB%20Waterproof%20Ultrasonic%20Sensor%20SKU:%20SEN0313
#include "a01nyub.h"
#include "esphome/core/helpers.h"
#include "esphome/core/log.h"
namespace esphome {
namespace a01nyub {
static const char *const TAG = "a01nyub.sensor";
void A01nyubComponent::loop() {
uint8_t data;
while (this->available() > 0) {
this->read_byte(&data);
if (this->buffer_.empty() && (data != 0xff))
continue;
buffer_.push_back(data);
if (this->buffer_.size() == 4)
this->check_buffer_();
}
}
void A01nyubComponent::check_buffer_() {
uint8_t checksum = this->buffer_[0] + this->buffer_[1] + this->buffer_[2];
if (this->buffer_[3] == checksum) {
float distance = (this->buffer_[1] << 8) + this->buffer_[2];
if (distance > 280) {
float meters = distance / 1000.0;
ESP_LOGV(TAG, "Distance from sensor: %f mm, %f m", distance, meters);
this->publish_state(meters);
} else {
ESP_LOGW(TAG, "Invalid data read from sensor: %s", format_hex_pretty(this->buffer_).c_str());
}
} else {
ESP_LOGW(TAG, "checksum failed: %02x != %02x", checksum, this->buffer_[3]);
}
this->buffer_.clear();
}
void A01nyubComponent::dump_config() { LOG_SENSOR("", "A01nyub Sensor", this); }
} // namespace a01nyub
} // namespace esphome

View file

@ -0,0 +1,27 @@
#pragma once
#include <vector>
#include "esphome/core/component.h"
#include "esphome/components/sensor/sensor.h"
#include "esphome/components/uart/uart.h"
namespace esphome {
namespace a01nyub {
class A01nyubComponent : public sensor::Sensor, public Component, public uart::UARTDevice {
public:
// Nothing really public.
// ========== INTERNAL METHODS ==========
void loop() override;
void dump_config() override;
protected:
void check_buffer_();
std::vector<uint8_t> buffer_;
};
} // namespace a01nyub
} // namespace esphome

View file

@ -0,0 +1,41 @@
import esphome.codegen as cg
from esphome.components import sensor, uart
from esphome.const import (
STATE_CLASS_MEASUREMENT,
UNIT_METER,
ICON_ARROW_EXPAND_VERTICAL,
DEVICE_CLASS_DISTANCE,
)
CODEOWNERS = ["@MrSuicideParrot"]
DEPENDENCIES = ["uart"]
a01nyub_ns = cg.esphome_ns.namespace("a01nyub")
A01nyubComponent = a01nyub_ns.class_(
"A01nyubComponent", sensor.Sensor, cg.Component, uart.UARTDevice
)
CONFIG_SCHEMA = sensor.sensor_schema(
A01nyubComponent,
unit_of_measurement=UNIT_METER,
icon=ICON_ARROW_EXPAND_VERTICAL,
accuracy_decimals=3,
state_class=STATE_CLASS_MEASUREMENT,
device_class=DEVICE_CLASS_DISTANCE,
).extend(uart.UART_DEVICE_SCHEMA)
FINAL_VALIDATE_SCHEMA = uart.final_validate_device_schema(
"a01nyub",
baud_rate=9600,
require_tx=False,
require_rx=True,
data_bits=8,
parity=None,
stop_bits=1,
)
async def to_code(config):
var = await sensor.new_sensor(config)
await cg.register_component(var, config)
await uart.register_uart_device(var, config)

View file

@ -0,0 +1 @@
CODEOWNERS = ["@TH-Braemer"]

View file

@ -0,0 +1,43 @@
// Datasheet https://wiki.dfrobot.com/_A02YYUW_Waterproof_Ultrasonic_Sensor_SKU_SEN0311
#include "a02yyuw.h"
#include "esphome/core/helpers.h"
#include "esphome/core/log.h"
namespace esphome {
namespace a02yyuw {
static const char *const TAG = "a02yyuw.sensor";
void A02yyuwComponent::loop() {
uint8_t data;
while (this->available() > 0) {
this->read_byte(&data);
if (this->buffer_.empty() && (data != 0xff))
continue;
buffer_.push_back(data);
if (this->buffer_.size() == 4)
this->check_buffer_();
}
}
void A02yyuwComponent::check_buffer_() {
uint8_t checksum = this->buffer_[0] + this->buffer_[1] + this->buffer_[2];
if (this->buffer_[3] == checksum) {
float distance = (this->buffer_[1] << 8) + this->buffer_[2];
if (distance > 30) {
ESP_LOGV(TAG, "Distance from sensor: %f mm", distance);
this->publish_state(distance);
} else {
ESP_LOGW(TAG, "Invalid data read from sensor: %s", format_hex_pretty(this->buffer_).c_str());
}
} else {
ESP_LOGW(TAG, "checksum failed: %02x != %02x", checksum, this->buffer_[3]);
}
this->buffer_.clear();
}
void A02yyuwComponent::dump_config() { LOG_SENSOR("", "A02yyuw Sensor", this); }
} // namespace a02yyuw
} // namespace esphome

View file

@ -0,0 +1,27 @@
#pragma once
#include <vector>
#include "esphome/core/component.h"
#include "esphome/components/sensor/sensor.h"
#include "esphome/components/uart/uart.h"
namespace esphome {
namespace a02yyuw {
class A02yyuwComponent : public sensor::Sensor, public Component, public uart::UARTDevice {
public:
// Nothing really public.
// ========== INTERNAL METHODS ==========
void loop() override;
void dump_config() override;
protected:
void check_buffer_();
std::vector<uint8_t> buffer_;
};
} // namespace a02yyuw
} // namespace esphome

View file

@ -0,0 +1,41 @@
import esphome.codegen as cg
from esphome.components import sensor, uart
from esphome.const import (
STATE_CLASS_MEASUREMENT,
ICON_ARROW_EXPAND_VERTICAL,
DEVICE_CLASS_DISTANCE,
UNIT_MILLIMETER,
)
CODEOWNERS = ["@TH-Braemer"]
DEPENDENCIES = ["uart"]
a02yyuw_ns = cg.esphome_ns.namespace("a02yyuw")
A02yyuwComponent = a02yyuw_ns.class_(
"A02yyuwComponent", sensor.Sensor, cg.Component, uart.UARTDevice
)
CONFIG_SCHEMA = sensor.sensor_schema(
A02yyuwComponent,
unit_of_measurement=UNIT_MILLIMETER,
icon=ICON_ARROW_EXPAND_VERTICAL,
accuracy_decimals=0,
state_class=STATE_CLASS_MEASUREMENT,
device_class=DEVICE_CLASS_DISTANCE,
).extend(uart.UART_DEVICE_SCHEMA)
FINAL_VALIDATE_SCHEMA = uart.final_validate_device_schema(
"a02yyuw",
baud_rate=9600,
require_tx=False,
require_rx=True,
data_bits=8,
parity=None,
stop_bits=1,
)
async def to_code(config):
var = await sensor.new_sensor(config)
await cg.register_component(var, config)
await uart.register_uart_device(var, config)

View file

@ -46,6 +46,7 @@ void A4988::loop() {
return;
this->dir_pin_->digital_write(dir == 1);
delayMicroseconds(50);
this->step_pin_->digital_write(true);
delayMicroseconds(5);
this->step_pin_->digital_write(false);

View file

@ -28,6 +28,6 @@ async def to_code(config):
dir_pin = await cg.gpio_pin_expression(config[CONF_DIR_PIN])
cg.add(var.set_dir_pin(dir_pin))
if CONF_SLEEP_PIN in config:
sleep_pin = await cg.gpio_pin_expression(config[CONF_SLEEP_PIN])
if sleep_pin_config := config.get(CONF_SLEEP_PIN):
sleep_pin = await cg.gpio_pin_expression(sleep_pin_config)
cg.add(var.set_sleep_pin(sleep_pin))

View file

@ -0,0 +1 @@
CODEOWNERS = ["@DAVe3283"]

View file

@ -0,0 +1,182 @@
#include "esphome/core/log.h"
#include "absolute_humidity.h"
namespace esphome {
namespace absolute_humidity {
static const char *const TAG = "absolute_humidity.sensor";
void AbsoluteHumidityComponent::setup() {
ESP_LOGCONFIG(TAG, "Setting up absolute humidity '%s'...", this->get_name().c_str());
ESP_LOGD(TAG, " Added callback for temperature '%s'", this->temperature_sensor_->get_name().c_str());
this->temperature_sensor_->add_on_state_callback([this](float state) { this->temperature_callback_(state); });
if (this->temperature_sensor_->has_state()) {
this->temperature_callback_(this->temperature_sensor_->get_state());
}
ESP_LOGD(TAG, " Added callback for relative humidity '%s'", this->humidity_sensor_->get_name().c_str());
this->humidity_sensor_->add_on_state_callback([this](float state) { this->humidity_callback_(state); });
if (this->humidity_sensor_->has_state()) {
this->humidity_callback_(this->humidity_sensor_->get_state());
}
}
void AbsoluteHumidityComponent::dump_config() {
LOG_SENSOR("", "Absolute Humidity", this);
switch (this->equation_) {
case BUCK:
ESP_LOGCONFIG(TAG, "Saturation Vapor Pressure Equation: Buck");
break;
case TETENS:
ESP_LOGCONFIG(TAG, "Saturation Vapor Pressure Equation: Tetens");
break;
case WOBUS:
ESP_LOGCONFIG(TAG, "Saturation Vapor Pressure Equation: Wobus");
break;
default:
ESP_LOGE(TAG, "Invalid saturation vapor pressure equation selection!");
break;
}
ESP_LOGCONFIG(TAG, "Sources");
ESP_LOGCONFIG(TAG, " Temperature: '%s'", this->temperature_sensor_->get_name().c_str());
ESP_LOGCONFIG(TAG, " Relative Humidity: '%s'", this->humidity_sensor_->get_name().c_str());
}
float AbsoluteHumidityComponent::get_setup_priority() const { return setup_priority::DATA; }
void AbsoluteHumidityComponent::loop() {
if (!this->next_update_) {
return;
}
this->next_update_ = false;
// Ensure we have source data
const bool no_temperature = std::isnan(this->temperature_);
const bool no_humidity = std::isnan(this->humidity_);
if (no_temperature || no_humidity) {
if (no_temperature) {
ESP_LOGW(TAG, "No valid state from temperature sensor!");
}
if (no_humidity) {
ESP_LOGW(TAG, "No valid state from temperature sensor!");
}
ESP_LOGW(TAG, "Unable to calculate absolute humidity.");
this->publish_state(NAN);
this->status_set_warning();
return;
}
// Convert to desired units
const float temperature_c = this->temperature_;
const float temperature_k = temperature_c + 273.15;
const float hr = this->humidity_ / 100;
// Calculate saturation vapor pressure
float es;
switch (this->equation_) {
case BUCK:
es = es_buck(temperature_c);
break;
case TETENS:
es = es_tetens(temperature_c);
break;
case WOBUS:
es = es_wobus(temperature_c);
break;
default:
ESP_LOGE(TAG, "Invalid saturation vapor pressure equation selection!");
this->publish_state(NAN);
this->status_set_error();
return;
}
ESP_LOGD(TAG, "Saturation vapor pressure %f kPa", es);
// Calculate absolute humidity
const float absolute_humidity = vapor_density(es, hr, temperature_k);
// Publish absolute humidity
ESP_LOGD(TAG, "Publishing absolute humidity %f g/m³", absolute_humidity);
this->status_clear_warning();
this->publish_state(absolute_humidity);
}
// Buck equation (https://en.wikipedia.org/wiki/Arden_Buck_equation)
// More accurate than Tetens in normal meteorologic conditions
float AbsoluteHumidityComponent::es_buck(float temperature_c) {
float a, b, c, d;
if (temperature_c >= 0) {
a = 0.61121;
b = 18.678;
c = 234.5;
d = 257.14;
} else {
a = 0.61115;
b = 18.678;
c = 233.7;
d = 279.82;
}
return a * expf((b - (temperature_c / c)) * (temperature_c / (d + temperature_c)));
}
// Tetens equation (https://en.wikipedia.org/wiki/Tetens_equation)
float AbsoluteHumidityComponent::es_tetens(float temperature_c) {
float a, b;
if (temperature_c >= 0) {
a = 17.27;
b = 237.3;
} else {
a = 21.875;
b = 265.5;
}
return 0.61078 * expf((a * temperature_c) / (temperature_c + b));
}
// Wobus equation
// https://wahiduddin.net/calc/density_altitude.htm
// https://wahiduddin.net/calc/density_algorithms.htm
// Calculate the saturation vapor pressure (kPa)
float AbsoluteHumidityComponent::es_wobus(float t) {
// THIS FUNCTION RETURNS THE SATURATION VAPOR PRESSURE ESW (MILLIBARS)
// OVER LIQUID WATER GIVEN THE TEMPERATURE T (CELSIUS). THE POLYNOMIAL
// APPROXIMATION BELOW IS DUE TO HERMAN WOBUS, A MATHEMATICIAN WHO
// WORKED AT THE NAVY WEATHER RESEARCH FACILITY, NORFOLK, VIRGINIA,
// BUT WHO IS NOW RETIRED. THE COEFFICIENTS OF THE POLYNOMIAL WERE
// CHOSEN TO FIT THE VALUES IN TABLE 94 ON PP. 351-353 OF THE SMITH-
// SONIAN METEOROLOGICAL TABLES BY ROLAND LIST (6TH EDITION). THE
// APPROXIMATION IS VALID FOR -50 < T < 100C.
//
// Baker, Schlatter 17-MAY-1982 Original version.
const float c0 = +0.99999683e00;
const float c1 = -0.90826951e-02;
const float c2 = +0.78736169e-04;
const float c3 = -0.61117958e-06;
const float c4 = +0.43884187e-08;
const float c5 = -0.29883885e-10;
const float c6 = +0.21874425e-12;
const float c7 = -0.17892321e-14;
const float c8 = +0.11112018e-16;
const float c9 = -0.30994571e-19;
const float p = c0 + t * (c1 + t * (c2 + t * (c3 + t * (c4 + t * (c5 + t * (c6 + t * (c7 + t * (c8 + t * (c9)))))))));
return 0.61078 / pow(p, 8);
}
// From https://www.environmentalbiophysics.org/chalk-talk-how-to-calculate-absolute-humidity/
// H/T to https://esphome.io/cookbook/bme280_environment.html
// H/T to https://carnotcycle.wordpress.com/2012/08/04/how-to-convert-relative-humidity-to-absolute-humidity/
float AbsoluteHumidityComponent::vapor_density(float es, float hr, float ta) {
// es = saturated vapor pressure (kPa)
// hr = relative humidity [0-1]
// ta = absolute temperature (K)
const float ea = hr * es * 1000; // vapor pressure of the air (Pa)
const float mw = 18.01528; // molar mass of water (g⋅mol⁻¹)
const float r = 8.31446261815324; // molar gas constant (J⋅K⁻¹)
return (ea * mw) / (r * ta);
}
} // namespace absolute_humidity
} // namespace esphome

View file

@ -0,0 +1,76 @@
#pragma once
#include "esphome/core/component.h"
#include "esphome/components/sensor/sensor.h"
namespace esphome {
namespace absolute_humidity {
/// Enum listing all implemented saturation vapor pressure equations.
enum SaturationVaporPressureEquation {
BUCK,
TETENS,
WOBUS,
};
/// This class implements calculation of absolute humidity from temperature and relative humidity.
class AbsoluteHumidityComponent : public sensor::Sensor, public Component {
public:
AbsoluteHumidityComponent() = default;
void set_temperature_sensor(sensor::Sensor *temperature_sensor) { this->temperature_sensor_ = temperature_sensor; }
void set_humidity_sensor(sensor::Sensor *humidity_sensor) { this->humidity_sensor_ = humidity_sensor; }
void set_equation(SaturationVaporPressureEquation equation) { this->equation_ = equation; }
void setup() override;
void dump_config() override;
float get_setup_priority() const override;
void loop() override;
protected:
void temperature_callback_(float state) {
this->next_update_ = true;
this->temperature_ = state;
}
void humidity_callback_(float state) {
this->next_update_ = true;
this->humidity_ = state;
}
/** Buck equation for saturation vapor pressure in kPa.
*
* @param temperature_c Air temperature in °C.
*/
static float es_buck(float temperature_c);
/** Tetens equation for saturation vapor pressure in kPa.
*
* @param temperature_c Air temperature in °C.
*/
static float es_tetens(float temperature_c);
/** Wobus equation for saturation vapor pressure in kPa.
*
* @param temperature_c Air temperature in °C.
*/
static float es_wobus(float temperature_c);
/** Calculate vapor density (absolute humidity) in g/m³.
*
* @param es Saturation vapor pressure in kPa.
* @param hr Relative humidity 0 to 1.
* @param ta Absolute temperature in K.
* @param heater_duration The duration in ms that the heater should turn on for when measuring.
*/
static float vapor_density(float es, float hr, float ta);
sensor::Sensor *temperature_sensor_{nullptr};
sensor::Sensor *humidity_sensor_{nullptr};
bool next_update_{false};
float temperature_{NAN};
float humidity_{NAN};
SaturationVaporPressureEquation equation_;
};
} // namespace absolute_humidity
} // namespace esphome

View file

@ -0,0 +1,56 @@
import esphome.codegen as cg
import esphome.config_validation as cv
from esphome.components import sensor
from esphome.const import (
CONF_HUMIDITY,
CONF_TEMPERATURE,
STATE_CLASS_MEASUREMENT,
CONF_EQUATION,
ICON_WATER,
UNIT_GRAMS_PER_CUBIC_METER,
)
absolute_humidity_ns = cg.esphome_ns.namespace("absolute_humidity")
AbsoluteHumidityComponent = absolute_humidity_ns.class_(
"AbsoluteHumidityComponent", sensor.Sensor, cg.Component
)
SaturationVaporPressureEquation = absolute_humidity_ns.enum(
"SaturationVaporPressureEquation"
)
EQUATION = {
"BUCK": SaturationVaporPressureEquation.BUCK,
"TETENS": SaturationVaporPressureEquation.TETENS,
"WOBUS": SaturationVaporPressureEquation.WOBUS,
}
CONFIG_SCHEMA = (
sensor.sensor_schema(
unit_of_measurement=UNIT_GRAMS_PER_CUBIC_METER,
icon=ICON_WATER,
accuracy_decimals=2,
state_class=STATE_CLASS_MEASUREMENT,
)
.extend(
{
cv.GenerateID(): cv.declare_id(AbsoluteHumidityComponent),
cv.Required(CONF_TEMPERATURE): cv.use_id(sensor.Sensor),
cv.Required(CONF_HUMIDITY): cv.use_id(sensor.Sensor),
cv.Optional(CONF_EQUATION, default="WOBUS"): cv.enum(EQUATION, upper=True),
}
)
.extend(cv.COMPONENT_SCHEMA)
)
async def to_code(config):
var = await sensor.new_sensor(config)
await cg.register_component(var, config)
temperature_sensor = await cg.get_variable(config[CONF_TEMPERATURE])
cg.add(var.set_temperature_sensor(temperature_sensor))
humidity_sensor = await cg.get_variable(config[CONF_HUMIDITY])
cg.add(var.set_humidity_sensor(humidity_sensor))
cg.add(var.set_equation(config[CONF_EQUATION]))

View file

@ -122,6 +122,7 @@ void IRAM_ATTR HOT AcDimmerDataStore::gpio_intr() {
// also take into account min_power
auto min_us = this->cycle_time_us * this->min_power / 1000;
this->enable_time_us = std::max((uint32_t) 1, ((65535 - this->value) * (this->cycle_time_us - min_us)) / 65535);
if (this->method == DIM_METHOD_LEADING_PULSE) {
// Minimum pulse time should be enough for the triac to trigger when it is close to the ZC zone
// this is for brightness near 99%
@ -202,6 +203,7 @@ void AcDimmer::setup() {
#endif
}
void AcDimmer::write_state(float state) {
state = std::acos(1 - (2 * state)) / 3.14159; // RMS power compensation
auto new_value = static_cast<uint16_t>(roundf(state * 65535));
if (new_value != 0 && this->store_.value == 0)
this->store_.init_cycle = this->init_with_half_cycle_;

View file

@ -1 +1,208 @@
import esphome.codegen as cg
import esphome.config_validation as cv
from esphome import pins
from esphome.const import CONF_ANALOG, CONF_INPUT, CONF_NUMBER
from esphome.core import CORE
from esphome.components.esp32 import get_esp32_variant
from esphome.const import PLATFORM_ESP8266
from esphome.components.esp32.const import (
VARIANT_ESP32,
VARIANT_ESP32C2,
VARIANT_ESP32C3,
VARIANT_ESP32C6,
VARIANT_ESP32H2,
VARIANT_ESP32S2,
VARIANT_ESP32S3,
)
CODEOWNERS = ["@esphome/core"]
adc_ns = cg.esphome_ns.namespace("adc")
"""
From the below patch versions (and 5.2+) ADC_ATTEN_DB_11 is deprecated and replaced with ADC_ATTEN_DB_12.
4.4.7
5.0.5
5.1.3
5.2+
"""
ATTENUATION_MODES = {
"0db": cg.global_ns.ADC_ATTEN_DB_0,
"2.5db": cg.global_ns.ADC_ATTEN_DB_2_5,
"6db": cg.global_ns.ADC_ATTEN_DB_6,
"11db": adc_ns.ADC_ATTEN_DB_12_COMPAT,
"12db": adc_ns.ADC_ATTEN_DB_12_COMPAT,
"auto": "auto",
}
adc1_channel_t = cg.global_ns.enum("adc1_channel_t")
adc2_channel_t = cg.global_ns.enum("adc2_channel_t")
# From https://github.com/espressif/esp-idf/blob/master/components/driver/include/driver/adc_common.h
# pin to adc1 channel mapping
ESP32_VARIANT_ADC1_PIN_TO_CHANNEL = {
VARIANT_ESP32: {
36: adc1_channel_t.ADC1_CHANNEL_0,
37: adc1_channel_t.ADC1_CHANNEL_1,
38: adc1_channel_t.ADC1_CHANNEL_2,
39: adc1_channel_t.ADC1_CHANNEL_3,
32: adc1_channel_t.ADC1_CHANNEL_4,
33: adc1_channel_t.ADC1_CHANNEL_5,
34: adc1_channel_t.ADC1_CHANNEL_6,
35: adc1_channel_t.ADC1_CHANNEL_7,
},
VARIANT_ESP32S2: {
1: adc1_channel_t.ADC1_CHANNEL_0,
2: adc1_channel_t.ADC1_CHANNEL_1,
3: adc1_channel_t.ADC1_CHANNEL_2,
4: adc1_channel_t.ADC1_CHANNEL_3,
5: adc1_channel_t.ADC1_CHANNEL_4,
6: adc1_channel_t.ADC1_CHANNEL_5,
7: adc1_channel_t.ADC1_CHANNEL_6,
8: adc1_channel_t.ADC1_CHANNEL_7,
9: adc1_channel_t.ADC1_CHANNEL_8,
10: adc1_channel_t.ADC1_CHANNEL_9,
},
VARIANT_ESP32S3: {
1: adc1_channel_t.ADC1_CHANNEL_0,
2: adc1_channel_t.ADC1_CHANNEL_1,
3: adc1_channel_t.ADC1_CHANNEL_2,
4: adc1_channel_t.ADC1_CHANNEL_3,
5: adc1_channel_t.ADC1_CHANNEL_4,
6: adc1_channel_t.ADC1_CHANNEL_5,
7: adc1_channel_t.ADC1_CHANNEL_6,
8: adc1_channel_t.ADC1_CHANNEL_7,
9: adc1_channel_t.ADC1_CHANNEL_8,
10: adc1_channel_t.ADC1_CHANNEL_9,
},
VARIANT_ESP32C3: {
0: adc1_channel_t.ADC1_CHANNEL_0,
1: adc1_channel_t.ADC1_CHANNEL_1,
2: adc1_channel_t.ADC1_CHANNEL_2,
3: adc1_channel_t.ADC1_CHANNEL_3,
4: adc1_channel_t.ADC1_CHANNEL_4,
},
VARIANT_ESP32C2: {
0: adc1_channel_t.ADC1_CHANNEL_0,
1: adc1_channel_t.ADC1_CHANNEL_1,
2: adc1_channel_t.ADC1_CHANNEL_2,
3: adc1_channel_t.ADC1_CHANNEL_3,
4: adc1_channel_t.ADC1_CHANNEL_4,
},
VARIANT_ESP32C6: {
0: adc1_channel_t.ADC1_CHANNEL_0,
1: adc1_channel_t.ADC1_CHANNEL_1,
2: adc1_channel_t.ADC1_CHANNEL_2,
3: adc1_channel_t.ADC1_CHANNEL_3,
4: adc1_channel_t.ADC1_CHANNEL_4,
5: adc1_channel_t.ADC1_CHANNEL_5,
6: adc1_channel_t.ADC1_CHANNEL_6,
},
VARIANT_ESP32H2: {
0: adc1_channel_t.ADC1_CHANNEL_0,
1: adc1_channel_t.ADC1_CHANNEL_1,
2: adc1_channel_t.ADC1_CHANNEL_2,
3: adc1_channel_t.ADC1_CHANNEL_3,
4: adc1_channel_t.ADC1_CHANNEL_4,
},
}
ESP32_VARIANT_ADC2_PIN_TO_CHANNEL = {
# TODO: add other variants
VARIANT_ESP32: {
4: adc2_channel_t.ADC2_CHANNEL_0,
0: adc2_channel_t.ADC2_CHANNEL_1,
2: adc2_channel_t.ADC2_CHANNEL_2,
15: adc2_channel_t.ADC2_CHANNEL_3,
13: adc2_channel_t.ADC2_CHANNEL_4,
12: adc2_channel_t.ADC2_CHANNEL_5,
14: adc2_channel_t.ADC2_CHANNEL_6,
27: adc2_channel_t.ADC2_CHANNEL_7,
25: adc2_channel_t.ADC2_CHANNEL_8,
26: adc2_channel_t.ADC2_CHANNEL_9,
},
VARIANT_ESP32S2: {
11: adc2_channel_t.ADC2_CHANNEL_0,
12: adc2_channel_t.ADC2_CHANNEL_1,
13: adc2_channel_t.ADC2_CHANNEL_2,
14: adc2_channel_t.ADC2_CHANNEL_3,
15: adc2_channel_t.ADC2_CHANNEL_4,
16: adc2_channel_t.ADC2_CHANNEL_5,
17: adc2_channel_t.ADC2_CHANNEL_6,
18: adc2_channel_t.ADC2_CHANNEL_7,
19: adc2_channel_t.ADC2_CHANNEL_8,
20: adc2_channel_t.ADC2_CHANNEL_9,
},
VARIANT_ESP32S3: {
11: adc2_channel_t.ADC2_CHANNEL_0,
12: adc2_channel_t.ADC2_CHANNEL_1,
13: adc2_channel_t.ADC2_CHANNEL_2,
14: adc2_channel_t.ADC2_CHANNEL_3,
15: adc2_channel_t.ADC2_CHANNEL_4,
16: adc2_channel_t.ADC2_CHANNEL_5,
17: adc2_channel_t.ADC2_CHANNEL_6,
18: adc2_channel_t.ADC2_CHANNEL_7,
19: adc2_channel_t.ADC2_CHANNEL_8,
20: adc2_channel_t.ADC2_CHANNEL_9,
},
VARIANT_ESP32C3: {
5: adc2_channel_t.ADC2_CHANNEL_0,
},
VARIANT_ESP32C2: {},
VARIANT_ESP32C6: {},
VARIANT_ESP32H2: {},
}
def validate_adc_pin(value):
if str(value).upper() == "VCC":
if CORE.is_rp2040:
return pins.internal_gpio_input_pin_schema(29)
return cv.only_on([PLATFORM_ESP8266])("VCC")
if str(value).upper() == "TEMPERATURE":
return cv.only_on_rp2040("TEMPERATURE")
if CORE.is_esp32:
conf = pins.internal_gpio_input_pin_schema(value)
value = conf[CONF_NUMBER]
variant = get_esp32_variant()
if (
variant not in ESP32_VARIANT_ADC1_PIN_TO_CHANNEL
and variant not in ESP32_VARIANT_ADC2_PIN_TO_CHANNEL
):
raise cv.Invalid(f"This ESP32 variant ({variant}) is not supported")
if (
value not in ESP32_VARIANT_ADC1_PIN_TO_CHANNEL[variant]
and value not in ESP32_VARIANT_ADC2_PIN_TO_CHANNEL[variant]
):
raise cv.Invalid(f"{variant} doesn't support ADC on this pin")
return conf
if CORE.is_esp8266:
conf = pins.gpio_pin_schema(
{CONF_ANALOG: True, CONF_INPUT: True}, internal=True
)(value)
if conf[CONF_NUMBER] != 17: # A0
raise cv.Invalid("ESP8266: Only pin A0 (GPIO17) supports ADC")
return conf
if CORE.is_rp2040:
conf = pins.internal_gpio_input_pin_schema(value)
number = conf[CONF_NUMBER]
if number not in (26, 27, 28, 29):
raise cv.Invalid("RP2040: Only pins 26, 27, 28 and 29 support ADC")
return conf
if CORE.is_libretiny:
return pins.gpio_pin_schema(
{CONF_ANALOG: True, CONF_INPUT: True}, internal=True
)(value)
raise NotImplementedError

View file

@ -1,6 +1,6 @@
#include "adc_sensor.h"
#include "esphome/core/log.h"
#include "esphome/core/helpers.h"
#include "esphome/core/log.h"
#ifdef USE_ESP8266
#ifdef USE_ADC_SENSOR_VCC
@ -11,33 +11,62 @@ ADC_MODE(ADC_VCC)
#endif
#endif
#ifdef USE_RP2040
#ifdef CYW43_USES_VSYS_PIN
#include "pico/cyw43_arch.h"
#endif
#include <hardware/adc.h>
#endif
namespace esphome {
namespace adc {
static const char *const TAG = "adc";
// 13 bits for S3 / 12 bit for all other esp32 variants
// create a const to avoid the repated cast to enum
// 13-bit for S2, 12-bit for all other ESP32 variants
#ifdef USE_ESP32
static const adc_bits_width_t ADC_WIDTH_MAX_SOC_BITS = static_cast<adc_bits_width_t>(ADC_WIDTH_MAX - 1);
#ifndef SOC_ADC_RTC_MAX_BITWIDTH
#if USE_ESP32_VARIANT_ESP32S2
static const int32_t SOC_ADC_RTC_MAX_BITWIDTH = 13;
#else
static const int32_t SOC_ADC_RTC_MAX_BITWIDTH = 12;
#endif
#endif
void ADCSensor::setup() {
static const int ADC_MAX = (1 << SOC_ADC_RTC_MAX_BITWIDTH) - 1; // 4095 (12 bit) or 8191 (13 bit)
static const int ADC_HALF = (1 << SOC_ADC_RTC_MAX_BITWIDTH) >> 1; // 2048 (12 bit) or 4096 (13 bit)
#endif
#ifdef USE_RP2040
extern "C"
#endif
void
ADCSensor::setup() {
ESP_LOGCONFIG(TAG, "Setting up ADC '%s'...", this->get_name().c_str());
#ifndef USE_ADC_SENSOR_VCC
pin_->setup();
#if !defined(USE_ADC_SENSOR_VCC) && !defined(USE_RP2040)
this->pin_->setup();
#endif
#ifdef USE_ESP32
if (this->channel1_ != ADC1_CHANNEL_MAX) {
adc1_config_width(ADC_WIDTH_MAX_SOC_BITS);
if (!autorange_) {
adc1_config_channel_atten(channel_, attenuation_);
if (!this->autorange_) {
adc1_config_channel_atten(this->channel1_, this->attenuation_);
}
} else if (this->channel2_ != ADC2_CHANNEL_MAX) {
if (!this->autorange_) {
adc2_config_channel_atten(this->channel2_, this->attenuation_);
}
}
// load characteristics for each attenuation
for (int i = 0; i < (int) ADC_ATTEN_MAX; i++) {
auto cal_value = esp_adc_cal_characterize(ADC_UNIT_1, (adc_atten_t) i, ADC_WIDTH_MAX_SOC_BITS,
for (int32_t i = 0; i <= ADC_ATTEN_DB_12_COMPAT; i++) {
auto adc_unit = this->channel1_ != ADC1_CHANNEL_MAX ? ADC_UNIT_1 : ADC_UNIT_2;
auto cal_value = esp_adc_cal_characterize(adc_unit, (adc_atten_t) i, ADC_WIDTH_MAX_SOC_BITS,
1100, // default vref
&cal_characteristics_[i]);
&this->cal_characteristics_[i]);
switch (cal_value) {
case ESP_ADC_CAL_VAL_EFUSE_VREF:
ESP_LOGV(TAG, "Using eFuse Vref for calibration");
@ -51,46 +80,65 @@ void ADCSensor::setup() {
}
}
// adc_gpio_init doesn't exist on ESP32-C3 or ESP32-H2
#if !defined(USE_ESP32_VARIANT_ESP32C3) && !defined(USE_ESP32_VARIANT_ESP32H2)
adc_gpio_init(ADC_UNIT_1, (adc_channel_t) channel_);
#endif
#endif // USE_ESP32
#ifdef USE_RP2040
static bool initialized = false;
if (!initialized) {
adc_init();
initialized = true;
}
#endif
ESP_LOGCONFIG(TAG, "ADC '%s' setup finished!", this->get_name().c_str());
}
void ADCSensor::dump_config() {
LOG_SENSOR("", "ADC Sensor", this);
#ifdef USE_ESP8266
#if defined(USE_ESP8266) || defined(USE_LIBRETINY)
#ifdef USE_ADC_SENSOR_VCC
ESP_LOGCONFIG(TAG, " Pin: VCC");
#else
LOG_PIN(" Pin: ", pin_);
LOG_PIN(" Pin: ", this->pin_);
#endif
#endif // USE_ESP8266
#endif // USE_ESP8266 || USE_LIBRETINY
#ifdef USE_ESP32
LOG_PIN(" Pin: ", pin_);
if (autorange_) {
LOG_PIN(" Pin: ", this->pin_);
if (this->autorange_) {
ESP_LOGCONFIG(TAG, " Attenuation: auto");
} else {
switch (this->attenuation_) {
case ADC_ATTEN_DB_0:
ESP_LOGCONFIG(TAG, " Attenuation: 0db (max 1.1V)");
ESP_LOGCONFIG(TAG, " Attenuation: 0db");
break;
case ADC_ATTEN_DB_2_5:
ESP_LOGCONFIG(TAG, " Attenuation: 2.5db (max 1.5V)");
ESP_LOGCONFIG(TAG, " Attenuation: 2.5db");
break;
case ADC_ATTEN_DB_6:
ESP_LOGCONFIG(TAG, " Attenuation: 6db (max 2.2V)");
ESP_LOGCONFIG(TAG, " Attenuation: 6db");
break;
case ADC_ATTEN_DB_11:
ESP_LOGCONFIG(TAG, " Attenuation: 11db (max 3.9V)");
case ADC_ATTEN_DB_12_COMPAT:
ESP_LOGCONFIG(TAG, " Attenuation: 12db");
break;
default: // This is to satisfy the unused ADC_ATTEN_MAX
break;
}
}
#endif // USE_ESP32
#ifdef USE_RP2040
if (this->is_temperature_) {
ESP_LOGCONFIG(TAG, " Pin: Temperature");
} else {
#ifdef USE_ADC_SENSOR_VCC
ESP_LOGCONFIG(TAG, " Pin: VCC");
#else
LOG_PIN(" Pin: ", this->pin_);
#endif // USE_ADC_SENSOR_VCC
}
#endif // USE_RP2040
ESP_LOGCONFIG(TAG, " Samples: %i", this->sample_count_);
LOG_UPDATE_INTERVAL(this);
}
@ -101,14 +149,24 @@ void ADCSensor::update() {
this->publish_state(value_v);
}
void ADCSensor::set_sample_count(uint8_t sample_count) {
if (sample_count != 0) {
this->sample_count_ = sample_count;
}
}
#ifdef USE_ESP8266
float ADCSensor::sample() {
uint32_t raw = 0;
for (uint8_t sample = 0; sample < this->sample_count_; sample++) {
#ifdef USE_ADC_SENSOR_VCC
int raw = ESP.getVcc(); // NOLINT(readability-static-accessed-through-instance)
raw += ESP.getVcc(); // NOLINT(readability-static-accessed-through-instance)
#else
int raw = analogRead(this->pin_->get_pin()); // NOLINT
raw += analogRead(this->pin_->get_pin()); // NOLINT
#endif
if (output_raw_) {
}
raw = (raw + (this->sample_count_ >> 1)) / this->sample_count_; // NOLINT(clang-analyzer-core.DivideZero)
if (this->output_raw_) {
return raw;
}
return raw / 1024.0f;
@ -117,57 +175,155 @@ float ADCSensor::sample() {
#ifdef USE_ESP32
float ADCSensor::sample() {
if (!autorange_) {
int raw = adc1_get_raw(channel_);
if (!this->autorange_) {
uint32_t sum = 0;
for (uint8_t sample = 0; sample < this->sample_count_; sample++) {
int raw = -1;
if (this->channel1_ != ADC1_CHANNEL_MAX) {
raw = adc1_get_raw(this->channel1_);
} else if (this->channel2_ != ADC2_CHANNEL_MAX) {
adc2_get_raw(this->channel2_, ADC_WIDTH_MAX_SOC_BITS, &raw);
}
if (raw == -1) {
return NAN;
}
if (output_raw_) {
return raw;
sum += raw;
}
uint32_t mv = esp_adc_cal_raw_to_voltage(raw, &cal_characteristics_[(int) attenuation_]);
sum = (sum + (this->sample_count_ >> 1)) / this->sample_count_; // NOLINT(clang-analyzer-core.DivideZero)
if (this->output_raw_) {
return sum;
}
uint32_t mv = esp_adc_cal_raw_to_voltage(sum, &this->cal_characteristics_[(int32_t) this->attenuation_]);
return mv / 1000.0f;
}
int raw11, raw6 = 4095, raw2 = 4095, raw0 = 4095;
adc1_config_channel_atten(channel_, ADC_ATTEN_DB_11);
raw11 = adc1_get_raw(channel_);
if (raw11 < 4095) {
adc1_config_channel_atten(channel_, ADC_ATTEN_DB_6);
raw6 = adc1_get_raw(channel_);
if (raw6 < 4095) {
adc1_config_channel_atten(channel_, ADC_ATTEN_DB_2_5);
raw2 = adc1_get_raw(channel_);
if (raw2 < 4095) {
adc1_config_channel_atten(channel_, ADC_ATTEN_DB_0);
raw0 = adc1_get_raw(channel_);
int raw12 = ADC_MAX, raw6 = ADC_MAX, raw2 = ADC_MAX, raw0 = ADC_MAX;
if (this->channel1_ != ADC1_CHANNEL_MAX) {
adc1_config_channel_atten(this->channel1_, ADC_ATTEN_DB_12_COMPAT);
raw12 = adc1_get_raw(this->channel1_);
if (raw12 < ADC_MAX) {
adc1_config_channel_atten(this->channel1_, ADC_ATTEN_DB_6);
raw6 = adc1_get_raw(this->channel1_);
if (raw6 < ADC_MAX) {
adc1_config_channel_atten(this->channel1_, ADC_ATTEN_DB_2_5);
raw2 = adc1_get_raw(this->channel1_);
if (raw2 < ADC_MAX) {
adc1_config_channel_atten(this->channel1_, ADC_ATTEN_DB_0);
raw0 = adc1_get_raw(this->channel1_);
}
}
}
} else if (this->channel2_ != ADC2_CHANNEL_MAX) {
adc2_config_channel_atten(this->channel2_, ADC_ATTEN_DB_12_COMPAT);
adc2_get_raw(this->channel2_, ADC_WIDTH_MAX_SOC_BITS, &raw12);
if (raw12 < ADC_MAX) {
adc2_config_channel_atten(this->channel2_, ADC_ATTEN_DB_6);
adc2_get_raw(this->channel2_, ADC_WIDTH_MAX_SOC_BITS, &raw6);
if (raw6 < ADC_MAX) {
adc2_config_channel_atten(this->channel2_, ADC_ATTEN_DB_2_5);
adc2_get_raw(this->channel2_, ADC_WIDTH_MAX_SOC_BITS, &raw2);
if (raw2 < ADC_MAX) {
adc2_config_channel_atten(this->channel2_, ADC_ATTEN_DB_0);
adc2_get_raw(this->channel2_, ADC_WIDTH_MAX_SOC_BITS, &raw0);
}
}
}
}
if (raw0 == -1 || raw2 == -1 || raw6 == -1 || raw11 == -1) {
if (raw0 == -1 || raw2 == -1 || raw6 == -1 || raw12 == -1) {
return NAN;
}
uint32_t mv11 = esp_adc_cal_raw_to_voltage(raw11, &cal_characteristics_[(int) ADC_ATTEN_DB_11]);
uint32_t mv6 = esp_adc_cal_raw_to_voltage(raw6, &cal_characteristics_[(int) ADC_ATTEN_DB_6]);
uint32_t mv2 = esp_adc_cal_raw_to_voltage(raw2, &cal_characteristics_[(int) ADC_ATTEN_DB_2_5]);
uint32_t mv0 = esp_adc_cal_raw_to_voltage(raw0, &cal_characteristics_[(int) ADC_ATTEN_DB_0]);
uint32_t mv12 = esp_adc_cal_raw_to_voltage(raw12, &this->cal_characteristics_[(int32_t) ADC_ATTEN_DB_12_COMPAT]);
uint32_t mv6 = esp_adc_cal_raw_to_voltage(raw6, &this->cal_characteristics_[(int32_t) ADC_ATTEN_DB_6]);
uint32_t mv2 = esp_adc_cal_raw_to_voltage(raw2, &this->cal_characteristics_[(int32_t) ADC_ATTEN_DB_2_5]);
uint32_t mv0 = esp_adc_cal_raw_to_voltage(raw0, &this->cal_characteristics_[(int32_t) ADC_ATTEN_DB_0]);
// Contribution of each value, in range 0-2048
uint32_t c11 = std::min(raw11, 2048);
uint32_t c6 = 2048 - std::abs(raw6 - 2048);
uint32_t c2 = 2048 - std::abs(raw2 - 2048);
uint32_t c0 = std::min(4095 - raw0, 2048);
// max theoretical csum value is 2048*4 = 8192
uint32_t csum = c11 + c6 + c2 + c0;
// Contribution of each value, in range 0-2048 (12 bit ADC) or 0-4096 (13 bit ADC)
uint32_t c12 = std::min(raw12, ADC_HALF);
uint32_t c6 = ADC_HALF - std::abs(raw6 - ADC_HALF);
uint32_t c2 = ADC_HALF - std::abs(raw2 - ADC_HALF);
uint32_t c0 = std::min(ADC_MAX - raw0, ADC_HALF);
// max theoretical csum value is 4096*4 = 16384
uint32_t csum = c12 + c6 + c2 + c0;
// each mv is max 3900; so max value is 3900*2048*4, fits in unsigned
uint32_t mv_scaled = (mv11 * c11) + (mv6 * c6) + (mv2 * c2) + (mv0 * c0);
// each mv is max 3900; so max value is 3900*4096*4, fits in unsigned32
uint32_t mv_scaled = (mv12 * c12) + (mv6 * c6) + (mv2 * c2) + (mv0 * c0);
return mv_scaled / (float) (csum * 1000U);
}
#endif // USE_ESP32
#ifdef USE_RP2040
float ADCSensor::sample() {
if (this->is_temperature_) {
adc_set_temp_sensor_enabled(true);
delay(1);
adc_select_input(4);
uint32_t raw = 0;
for (uint8_t sample = 0; sample < this->sample_count_; sample++) {
raw += adc_read();
}
raw = (raw + (this->sample_count_ >> 1)) / this->sample_count_; // NOLINT(clang-analyzer-core.DivideZero)
adc_set_temp_sensor_enabled(false);
if (this->output_raw_) {
return raw;
}
return raw * 3.3f / 4096.0f;
} else {
uint8_t pin = this->pin_->get_pin();
#ifdef CYW43_USES_VSYS_PIN
if (pin == PICO_VSYS_PIN) {
// Measuring VSYS on Raspberry Pico W needs to be wrapped with
// `cyw43_thread_enter()`/`cyw43_thread_exit()` as discussed in
// https://github.com/raspberrypi/pico-sdk/issues/1222, since Wifi chip and
// VSYS ADC both share GPIO29
cyw43_thread_enter();
}
#endif // CYW43_USES_VSYS_PIN
adc_gpio_init(pin);
adc_select_input(pin - 26);
uint32_t raw = 0;
for (uint8_t sample = 0; sample < this->sample_count_; sample++) {
raw += adc_read();
}
raw = (raw + (this->sample_count_ >> 1)) / this->sample_count_; // NOLINT(clang-analyzer-core.DivideZero)
#ifdef CYW43_USES_VSYS_PIN
if (pin == PICO_VSYS_PIN) {
cyw43_thread_exit();
}
#endif // CYW43_USES_VSYS_PIN
if (this->output_raw_) {
return raw;
}
float coeff = pin == PICO_VSYS_PIN ? 3.0 : 1.0;
return raw * 3.3f / 4096.0f * coeff;
}
}
#endif
#ifdef USE_LIBRETINY
float ADCSensor::sample() {
uint32_t raw = 0;
if (this->output_raw_) {
for (uint8_t sample = 0; sample < this->sample_count_; sample++) {
raw += analogRead(this->pin_->get_pin()); // NOLINT
}
raw = (raw + (this->sample_count_ >> 1)) / this->sample_count_; // NOLINT(clang-analyzer-core.DivideZero)
return raw;
}
for (uint8_t sample = 0; sample < this->sample_count_; sample++) {
raw += analogReadVoltage(this->pin_->get_pin()); // NOLINT
}
raw = (raw + (this->sample_count_ >> 1)) / this->sample_count_; // NOLINT(clang-analyzer-core.DivideZero)
return raw / 1000.0f;
}
#endif // USE_LIBRETINY
#ifdef USE_ESP8266
std::string ADCSensor::unique_id() { return get_mac_address() + "-adc"; }
#endif

View file

@ -1,52 +1,89 @@
#pragma once
#include "esphome/core/component.h"
#include "esphome/core/hal.h"
#include "esphome/core/defines.h"
#include "esphome/components/sensor/sensor.h"
#include "esphome/components/voltage_sampler/voltage_sampler.h"
#include "esphome/core/component.h"
#include "esphome/core/defines.h"
#include "esphome/core/hal.h"
#ifdef USE_ESP32
#include "driver/adc.h"
#include <esp_adc_cal.h>
#include "driver/adc.h"
#endif
namespace esphome {
namespace adc {
#ifdef USE_ESP32
// clang-format off
#if (ESP_IDF_VERSION_MAJOR == 4 && ESP_IDF_VERSION >= ESP_IDF_VERSION_VAL(4, 4, 7)) || \
(ESP_IDF_VERSION_MAJOR == 5 && \
((ESP_IDF_VERSION_MINOR == 0 && ESP_IDF_VERSION_PATCH >= 5) || \
(ESP_IDF_VERSION_MINOR == 1 && ESP_IDF_VERSION_PATCH >= 3) || \
(ESP_IDF_VERSION_MINOR >= 2)) \
)
// clang-format on
static const adc_atten_t ADC_ATTEN_DB_12_COMPAT = ADC_ATTEN_DB_12;
#else
static const adc_atten_t ADC_ATTEN_DB_12_COMPAT = ADC_ATTEN_DB_11;
#endif
#endif // USE_ESP32
class ADCSensor : public sensor::Sensor, public PollingComponent, public voltage_sampler::VoltageSampler {
public:
#ifdef USE_ESP32
/// Set the attenuation for this pin. Only available on the ESP32.
void set_attenuation(adc_atten_t attenuation) { attenuation_ = attenuation; }
void set_channel(adc1_channel_t channel) { channel_ = channel; }
void set_autorange(bool autorange) { autorange_ = autorange; }
void set_attenuation(adc_atten_t attenuation) { this->attenuation_ = attenuation; }
void set_channel1(adc1_channel_t channel) {
this->channel1_ = channel;
this->channel2_ = ADC2_CHANNEL_MAX;
}
void set_channel2(adc2_channel_t channel) {
this->channel2_ = channel;
this->channel1_ = ADC1_CHANNEL_MAX;
}
void set_autorange(bool autorange) { this->autorange_ = autorange; }
#endif
/// Update adc values.
/// Update ADC values
void update() override;
/// Setup ADc
/// Setup ADC
void setup() override;
void dump_config() override;
/// `HARDWARE_LATE` setup priority.
/// `HARDWARE_LATE` setup priority
float get_setup_priority() const override;
void set_pin(InternalGPIOPin *pin) { this->pin_ = pin; }
void set_output_raw(bool output_raw) { output_raw_ = output_raw; }
void set_output_raw(bool output_raw) { this->output_raw_ = output_raw; }
void set_sample_count(uint8_t sample_count);
float sample() override;
#ifdef USE_ESP8266
std::string unique_id() override;
#endif
#ifdef USE_RP2040
void set_is_temperature() { this->is_temperature_ = true; }
#endif
protected:
InternalGPIOPin *pin_;
bool output_raw_{false};
uint8_t sample_count_{1};
#ifdef USE_RP2040
bool is_temperature_{false};
#endif
#ifdef USE_ESP32
adc_atten_t attenuation_{ADC_ATTEN_DB_0};
adc1_channel_t channel_{};
adc1_channel_t channel1_{ADC1_CHANNEL_MAX};
adc2_channel_t channel2_{ADC2_CHANNEL_MAX};
bool autorange_{false};
esp_adc_cal_characteristics_t cal_characteristics_[(int) ADC_ATTEN_MAX] = {};
#if ESP_IDF_VERSION_MAJOR >= 5
esp_adc_cal_characteristics_t cal_characteristics_[SOC_ADC_ATTEN_NUM] = {};
#else
esp_adc_cal_characteristics_t cal_characteristics_[ADC_ATTEN_MAX] = {};
#endif
#endif
};

View file

@ -1,132 +1,73 @@
import logging
import esphome.codegen as cg
import esphome.config_validation as cv
from esphome import pins
import esphome.final_validate as fv
from esphome.core import CORE
from esphome.components import sensor, voltage_sampler
from esphome.components.esp32 import get_esp32_variant
from esphome.const import (
CONF_ATTENUATION,
CONF_RAW,
CONF_ID,
CONF_INPUT,
CONF_NUMBER,
CONF_PIN,
CONF_RAW,
CONF_WIFI,
DEVICE_CLASS_VOLTAGE,
STATE_CLASS_MEASUREMENT,
UNIT_VOLT,
)
from esphome.core import CORE
from esphome.components.esp32 import get_esp32_variant
from esphome.components.esp32.const import (
VARIANT_ESP32,
VARIANT_ESP32C3,
VARIANT_ESP32H2,
VARIANT_ESP32S2,
VARIANT_ESP32S3,
from . import (
ATTENUATION_MODES,
ESP32_VARIANT_ADC1_PIN_TO_CHANNEL,
ESP32_VARIANT_ADC2_PIN_TO_CHANNEL,
adc_ns,
validate_adc_pin,
)
_LOGGER = logging.getLogger(__name__)
AUTO_LOAD = ["voltage_sampler"]
ATTENUATION_MODES = {
"0db": cg.global_ns.ADC_ATTEN_DB_0,
"2.5db": cg.global_ns.ADC_ATTEN_DB_2_5,
"6db": cg.global_ns.ADC_ATTEN_DB_6,
"11db": cg.global_ns.ADC_ATTEN_DB_11,
"auto": "auto",
}
adc1_channel_t = cg.global_ns.enum("adc1_channel_t")
# From https://github.com/espressif/esp-idf/blob/master/components/driver/include/driver/adc_common.h
# pin to adc1 channel mapping
ESP32_VARIANT_ADC1_PIN_TO_CHANNEL = {
VARIANT_ESP32: {
36: adc1_channel_t.ADC1_CHANNEL_0,
37: adc1_channel_t.ADC1_CHANNEL_1,
38: adc1_channel_t.ADC1_CHANNEL_2,
39: adc1_channel_t.ADC1_CHANNEL_3,
32: adc1_channel_t.ADC1_CHANNEL_4,
33: adc1_channel_t.ADC1_CHANNEL_5,
34: adc1_channel_t.ADC1_CHANNEL_6,
35: adc1_channel_t.ADC1_CHANNEL_7,
},
VARIANT_ESP32S2: {
1: adc1_channel_t.ADC1_CHANNEL_0,
2: adc1_channel_t.ADC1_CHANNEL_1,
3: adc1_channel_t.ADC1_CHANNEL_2,
4: adc1_channel_t.ADC1_CHANNEL_3,
5: adc1_channel_t.ADC1_CHANNEL_4,
6: adc1_channel_t.ADC1_CHANNEL_5,
7: adc1_channel_t.ADC1_CHANNEL_6,
8: adc1_channel_t.ADC1_CHANNEL_7,
9: adc1_channel_t.ADC1_CHANNEL_8,
10: adc1_channel_t.ADC1_CHANNEL_9,
},
VARIANT_ESP32S3: {
1: adc1_channel_t.ADC1_CHANNEL_0,
2: adc1_channel_t.ADC1_CHANNEL_1,
3: adc1_channel_t.ADC1_CHANNEL_2,
4: adc1_channel_t.ADC1_CHANNEL_3,
5: adc1_channel_t.ADC1_CHANNEL_4,
6: adc1_channel_t.ADC1_CHANNEL_5,
7: adc1_channel_t.ADC1_CHANNEL_6,
8: adc1_channel_t.ADC1_CHANNEL_7,
9: adc1_channel_t.ADC1_CHANNEL_8,
10: adc1_channel_t.ADC1_CHANNEL_9,
},
VARIANT_ESP32C3: {
0: adc1_channel_t.ADC1_CHANNEL_0,
1: adc1_channel_t.ADC1_CHANNEL_1,
2: adc1_channel_t.ADC1_CHANNEL_2,
3: adc1_channel_t.ADC1_CHANNEL_3,
4: adc1_channel_t.ADC1_CHANNEL_4,
},
VARIANT_ESP32H2: {
0: adc1_channel_t.ADC1_CHANNEL_0,
1: adc1_channel_t.ADC1_CHANNEL_1,
2: adc1_channel_t.ADC1_CHANNEL_2,
3: adc1_channel_t.ADC1_CHANNEL_3,
4: adc1_channel_t.ADC1_CHANNEL_4,
},
}
CONF_SAMPLES = "samples"
def validate_adc_pin(value):
if str(value).upper() == "VCC":
return cv.only_on_esp8266("VCC")
if CORE.is_esp32:
value = pins.internal_gpio_input_pin_number(value)
variant = get_esp32_variant()
if variant not in ESP32_VARIANT_ADC1_PIN_TO_CHANNEL:
raise cv.Invalid(f"This ESP32 variant ({variant}) is not supported")
if value not in ESP32_VARIANT_ADC1_PIN_TO_CHANNEL[variant]:
raise cv.Invalid(f"{variant} doesn't support ADC on this pin")
return pins.internal_gpio_input_pin_schema(value)
if CORE.is_esp8266:
from esphome.components.esp8266.gpio import CONF_ANALOG
value = pins.internal_gpio_pin_number({CONF_ANALOG: True, CONF_INPUT: True})(
value
)
if value != 17: # A0
raise cv.Invalid("ESP8266: Only pin A0 (GPIO17) supports ADC.")
return pins.gpio_pin_schema(
{CONF_ANALOG: True, CONF_INPUT: True}, internal=True
)(value)
raise NotImplementedError
_attenuation = cv.enum(ATTENUATION_MODES, lower=True)
def validate_config(config):
if config[CONF_RAW] and config.get(CONF_ATTENUATION, None) == "auto":
raise cv.Invalid("Automatic attenuation cannot be used when raw output is set.")
raise cv.Invalid("Automatic attenuation cannot be used when raw output is set")
if config.get(CONF_ATTENUATION, None) == "auto" and config.get(CONF_SAMPLES, 1) > 1:
raise cv.Invalid(
"Automatic attenuation cannot be used when multisampling is set"
)
if config.get(CONF_ATTENUATION) == "11db":
_LOGGER.warning(
"`attenuation: 11db` is deprecated, use `attenuation: 12db` instead"
)
# Alter value here so `config` command prints the recommended change
config[CONF_ATTENUATION] = _attenuation("12db")
return config
def final_validate_config(config):
if CORE.is_esp32:
variant = get_esp32_variant()
if (
CONF_WIFI in fv.full_config.get()
and config[CONF_PIN][CONF_NUMBER]
in ESP32_VARIANT_ADC2_PIN_TO_CHANNEL[variant]
):
raise cv.Invalid(
f"{variant} doesn't support ADC on this pin when Wi-Fi is configured"
)
return config
adc_ns = cg.esphome_ns.namespace("adc")
ADCSensor = adc_ns.class_(
"ADCSensor", sensor.Sensor, cg.PollingComponent, voltage_sampler.VoltageSampler
)
@ -144,14 +85,17 @@ CONFIG_SCHEMA = cv.All(
cv.Required(CONF_PIN): validate_adc_pin,
cv.Optional(CONF_RAW, default=False): cv.boolean,
cv.SplitDefault(CONF_ATTENUATION, esp32="0db"): cv.All(
cv.only_on_esp32, cv.enum(ATTENUATION_MODES, lower=True)
cv.only_on_esp32, _attenuation
),
cv.Optional(CONF_SAMPLES, default=1): cv.int_range(min=1, max=255),
}
)
.extend(cv.polling_component_schema("60s")),
validate_config,
)
FINAL_VALIDATE_SCHEMA = final_validate_config
async def to_code(config):
var = cg.new_Pvariable(config[CONF_ID])
@ -160,21 +104,33 @@ async def to_code(config):
if config[CONF_PIN] == "VCC":
cg.add_define("USE_ADC_SENSOR_VCC")
elif config[CONF_PIN] == "TEMPERATURE":
cg.add(var.set_is_temperature())
else:
pin = await cg.gpio_pin_expression(config[CONF_PIN])
cg.add(var.set_pin(pin))
if CONF_RAW in config:
cg.add(var.set_output_raw(config[CONF_RAW]))
cg.add(var.set_sample_count(config[CONF_SAMPLES]))
if CONF_ATTENUATION in config:
if config[CONF_ATTENUATION] == "auto":
if attenuation := config.get(CONF_ATTENUATION):
if attenuation == "auto":
cg.add(var.set_autorange(cg.global_ns.true))
else:
cg.add(var.set_attenuation(config[CONF_ATTENUATION]))
cg.add(var.set_attenuation(attenuation))
if CORE.is_esp32:
variant = get_esp32_variant()
pin_num = config[CONF_PIN][CONF_NUMBER]
if (
variant in ESP32_VARIANT_ADC1_PIN_TO_CHANNEL
and pin_num in ESP32_VARIANT_ADC1_PIN_TO_CHANNEL[variant]
):
chan = ESP32_VARIANT_ADC1_PIN_TO_CHANNEL[variant][pin_num]
cg.add(var.set_channel(chan))
cg.add(var.set_channel1(chan))
elif (
variant in ESP32_VARIANT_ADC2_PIN_TO_CHANNEL
and pin_num in ESP32_VARIANT_ADC2_PIN_TO_CHANNEL[variant]
):
chan = ESP32_VARIANT_ADC2_PIN_TO_CHANNEL[variant][pin_num]
cg.add(var.set_channel2(chan))

View file

@ -0,0 +1,23 @@
import esphome.codegen as cg
import esphome.config_validation as cv
from esphome.components import spi
from esphome.const import CONF_ID
DEPENDENCIES = ["spi"]
MULTI_CONF = True
CODEOWNERS = ["@DeerMaximum"]
adc128s102_ns = cg.esphome_ns.namespace("adc128s102")
ADC128S102 = adc128s102_ns.class_("ADC128S102", cg.Component, spi.SPIDevice)
CONFIG_SCHEMA = cv.Schema(
{
cv.GenerateID(): cv.declare_id(ADC128S102),
}
).extend(spi.spi_device_schema(cs_pin_required=True))
async def to_code(config):
var = cg.new_Pvariable(config[CONF_ID])
await cg.register_component(var, config)
await spi.register_spi_device(var, config)

View file

@ -0,0 +1,35 @@
#include "adc128s102.h"
#include "esphome/core/log.h"
namespace esphome {
namespace adc128s102 {
static const char *const TAG = "adc128s102";
float ADC128S102::get_setup_priority() const { return setup_priority::HARDWARE; }
void ADC128S102::setup() {
ESP_LOGCONFIG(TAG, "Setting up adc128s102");
this->spi_setup();
}
void ADC128S102::dump_config() {
ESP_LOGCONFIG(TAG, "ADC128S102:");
LOG_PIN(" CS Pin:", this->cs_);
}
uint16_t ADC128S102::read_data(uint8_t channel) {
uint8_t control = channel << 3;
this->enable();
uint8_t adc_primary_byte = this->transfer_byte(control);
uint8_t adc_secondary_byte = this->transfer_byte(0x00);
this->disable();
uint16_t digital_value = adc_primary_byte << 8 | adc_secondary_byte;
return digital_value;
}
} // namespace adc128s102
} // namespace esphome

View file

@ -0,0 +1,23 @@
#pragma once
#include "esphome/core/component.h"
#include "esphome/core/hal.h"
#include "esphome/components/spi/spi.h"
namespace esphome {
namespace adc128s102 {
class ADC128S102 : public Component,
public spi::SPIDevice<spi::BIT_ORDER_MSB_FIRST, spi::CLOCK_POLARITY_LOW, spi::CLOCK_PHASE_LEADING,
spi::DATA_RATE_10MHZ> {
public:
ADC128S102() = default;
void setup() override;
void dump_config() override;
float get_setup_priority() const override;
uint16_t read_data(uint8_t channel);
};
} // namespace adc128s102
} // namespace esphome

View file

@ -0,0 +1,38 @@
import esphome.codegen as cg
import esphome.config_validation as cv
from esphome.components import sensor, voltage_sampler
from esphome.const import CONF_ID, CONF_CHANNEL
from .. import adc128s102_ns, ADC128S102
AUTO_LOAD = ["voltage_sampler"]
DEPENDENCIES = ["adc128s102"]
ADC128S102Sensor = adc128s102_ns.class_(
"ADC128S102Sensor",
sensor.Sensor,
cg.PollingComponent,
voltage_sampler.VoltageSampler,
)
CONF_ADC128S102_ID = "adc128s102_id"
CONFIG_SCHEMA = (
sensor.sensor_schema(ADC128S102Sensor)
.extend(
{
cv.GenerateID(CONF_ADC128S102_ID): cv.use_id(ADC128S102),
cv.Required(CONF_CHANNEL): cv.int_range(min=0, max=7),
}
)
.extend(cv.polling_component_schema("60s"))
)
async def to_code(config):
var = cg.new_Pvariable(
config[CONF_ID],
config[CONF_CHANNEL],
)
await cg.register_parented(var, config[CONF_ADC128S102_ID])
await cg.register_component(var, config)
await sensor.register_sensor(var, config)

View file

@ -0,0 +1,24 @@
#include "adc128s102_sensor.h"
#include "esphome/core/log.h"
namespace esphome {
namespace adc128s102 {
static const char *const TAG = "adc128s102.sensor";
ADC128S102Sensor::ADC128S102Sensor(uint8_t channel) : channel_(channel) {}
float ADC128S102Sensor::get_setup_priority() const { return setup_priority::DATA; }
void ADC128S102Sensor::dump_config() {
LOG_SENSOR("", "ADC128S102 Sensor", this);
ESP_LOGCONFIG(TAG, " Pin: %u", this->channel_);
LOG_UPDATE_INTERVAL(this);
}
float ADC128S102Sensor::sample() { return this->parent_->read_data(this->channel_); }
void ADC128S102Sensor::update() { this->publish_state(this->sample()); }
} // namespace adc128s102
} // namespace esphome

Some files were not shown because too many files have changed in this diff Show more