Merge pull request #2034 from esphome/bump-1.20.0b3

1.20.0b3
This commit is contained in:
Jesse Hills 2021-07-19 08:44:04 +12:00 committed by GitHub
commit 6a09d7c49b
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
18 changed files with 637 additions and 855 deletions

View file

@ -18,38 +18,23 @@ jobs:
name: Build docker containers name: Build docker containers
runs-on: ubuntu-latest runs-on: ubuntu-latest
strategy: strategy:
fail-fast: false
matrix: matrix:
arch: [amd64, armv7, aarch64] arch: [amd64, armv7, aarch64]
build_type: ["hassio", "docker"] build_type: ["ha-addon", "docker", "lint"]
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
- name: Set up env variables - name: Set up Python
uses: actions/setup-python@v2
with:
python-version: '3.9'
- name: Set TAG
run: | run: |
base_version="3.4.0" echo "TAG=check" >> $GITHUB_ENV
if [[ "${{ matrix.build_type }}" == "hassio" ]]; then - name: Run build
build_from="esphome/esphome-hassio-base-${{ matrix.arch }}:${base_version}"
build_to="esphome/esphome-hassio-${{ matrix.arch }}"
dockerfile="docker/Dockerfile.hassio"
else
build_from="esphome/esphome-base-${{ matrix.arch }}:${base_version}"
build_to="esphome/esphome-${{ matrix.arch }}"
dockerfile="docker/Dockerfile"
fi
echo "BUILD_FROM=${build_from}" >> $GITHUB_ENV
echo "BUILD_TO=${build_to}" >> $GITHUB_ENV
echo "DOCKERFILE=${dockerfile}" >> $GITHUB_ENV
- name: Pull for cache
run: | run: |
docker pull "${BUILD_TO}:dev" || true docker/build.py \
- name: Register QEMU binfmt --tag "${TAG}" \
run: docker run --rm --privileged multiarch/qemu-user-static:5.2.0-2 --reset -p yes --arch "${{ matrix.arch }}" \
- run: | --build-type "${{ matrix.build_type }}" \
docker build \ build
--build-arg "BUILD_FROM=${BUILD_FROM}" \
--build-arg "BUILD_VERSION=ci" \
--cache-from "${BUILD_TO}:dev" \
--file "${DOCKERFILE}" \
.

View file

@ -4,40 +4,36 @@ name: CI
on: on:
push: push:
# On dev branch release-dev already performs CI checks branches: [dev, beta, release]
# On other branches the `pull_request` trigger will be used
branches: [beta, release]
pull_request: pull_request:
jobs: jobs:
lint-clang-format: ci-with-container:
name: ${{ matrix.name }}
runs-on: ubuntu-latest runs-on: ubuntu-latest
# cpp lint job runs with esphome-lint docker image so that clang-format-*
# doesn't have to be installed
container: esphome/esphome-lint:1.1
steps:
- uses: actions/checkout@v2
# Set up the pio project so that the cpp checks know how files are compiled
# (build flags, libraries etc)
- name: Set up platformio environment
run: pio init --ide atom
- name: Run clang-format
run: script/clang-format -i
- name: Suggest changes
run: script/ci-suggest-changes
lint-clang-tidy:
runs-on: ubuntu-latest
# cpp lint job runs with esphome-lint docker image so that clang-format-*
# doesn't have to be installed
container: esphome/esphome-lint:1.1
# Split clang-tidy check into 4 jobs. Each one will check 1/4th of the .cpp files
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
split: [1, 2, 3, 4] include:
- id: clang-format
name: Run script/clang-format
- id: clang-tidy
name: Run script/clang-tidy 1/4
split: 1
- id: clang-tidy
name: Run script/clang-tidy 2/4
split: 2
- id: clang-tidy
name: Run script/clang-tidy 3/4
split: 3
- id: clang-tidy
name: Run script/clang-tidy 4/4
split: 4
# cpp lint job runs with esphome-lint docker image so that clang-format-*
# doesn't have to be installed
container: ghcr.io/esphome/esphome-lint:1.1
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
# Set up the pio project so that the cpp checks know how files are compiled # Set up the pio project so that the cpp checks know how files are compiled
@ -45,26 +41,57 @@ jobs:
- name: Set up platformio environment - name: Set up platformio environment
run: pio init --ide atom run: pio init --ide atom
- name: Register problem matchers - name: Register problem matchers
run: | run: |
echo "::add-matcher::.github/workflows/matchers/clang-tidy.json" echo "::add-matcher::.github/workflows/matchers/clang-tidy.json"
echo "::add-matcher::.github/workflows/matchers/gcc.json" echo "::add-matcher::.github/workflows/matchers/gcc.json"
- name: Run clang-format
run: script/clang-format -i
if: ${{ matrix.id == 'clang-format' }}
- name: Run clang-tidy - name: Run clang-tidy
run: script/clang-tidy --all-headers --fix --split-num 4 --split-at ${{ matrix.split }} run: script/clang-tidy --all-headers --fix --split-num 4 --split-at ${{ matrix.split }}
if: ${{ matrix.id == 'clang-tidy' }}
- name: Suggest changes - name: Suggest changes
run: script/ci-suggest-changes run: script/ci-suggest-changes
lint-python: ci:
# Don't use the esphome-lint docker image because it may contain outdated requirements. # Don't use the esphome-lint docker image because it may contain outdated requirements.
# This way, all dependencies are cached via the cache action. # This way, all dependencies are cached via the cache action.
name: ${{ matrix.name }}
runs-on: ubuntu-latest runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
include:
- id: ci-custom
name: Run script/ci-custom
- id: lint-python
name: Run script/lint-python
- id: test
file: tests/test1.yaml
name: Test tests/test1.yaml
- id: test
file: tests/test2.yaml
name: Test tests/test2.yaml
- id: test
file: tests/test3.yaml
name: Test tests/test3.yaml
- id: test
file: tests/test4.yaml
name: Test tests/test4.yaml
- id: pytest
name: Run pytest
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
- name: Set up Python - name: Set up Python
uses: actions/setup-python@v2 uses: actions/setup-python@v2
with: with:
python-version: '3.7' python-version: '3.7'
- name: Cache pip modules - name: Cache pip modules
uses: actions/cache@v1 uses: actions/cache@v1
with: with:
@ -72,6 +99,17 @@ jobs:
key: esphome-pip-3.7-${{ hashFiles('setup.py') }} key: esphome-pip-3.7-${{ hashFiles('setup.py') }}
restore-keys: | restore-keys: |
esphome-pip-3.7- esphome-pip-3.7-
# Use per test platformio cache because tests have different platform versions
- name: Cache ~/.platformio
uses: actions/cache@v1
with:
path: ~/.platformio
key: test-home-platformio-${{ matrix.file }}-${{ hashFiles('esphome/core/config.py') }}
restore-keys: |
test-home-platformio-${{ matrix.file }}-
if: ${{ matrix.id == 'test' }}
- name: Set up python environment - name: Set up python environment
run: script/setup run: script/setup
@ -80,82 +118,22 @@ jobs:
echo "::add-matcher::.github/workflows/matchers/ci-custom.json" echo "::add-matcher::.github/workflows/matchers/ci-custom.json"
echo "::add-matcher::.github/workflows/matchers/lint-python.json" echo "::add-matcher::.github/workflows/matchers/lint-python.json"
echo "::add-matcher::.github/workflows/matchers/python.json" echo "::add-matcher::.github/workflows/matchers/python.json"
echo "::add-matcher::.github/workflows/matchers/pytest.json"
echo "::add-matcher::.github/workflows/matchers/gcc.json"
- name: Lint Custom - name: Lint Custom
run: script/ci-custom.py run: |
script/ci-custom.py
script/build_codeowners.py --check
if: ${{ matrix.id == 'ci-custom' }}
- name: Lint Python - name: Lint Python
run: script/lint-python run: script/lint-python
- name: Lint CODEOWNERS if: ${{ matrix.id == 'lint-python' }}
run: script/build_codeowners.py --check
test: - run: esphome compile ${{ matrix.file }}
runs-on: ubuntu-latest if: ${{ matrix.id == 'test' }}
strategy:
fail-fast: false
matrix:
test:
- test1
- test2
- test3
- test4
- test5
steps:
- uses: actions/checkout@v2
- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: '3.7'
- name: Cache pip modules
uses: actions/cache@v1
with:
path: ~/.cache/pip
key: esphome-pip-3.7-${{ hashFiles('setup.py') }}
restore-keys: |
esphome-pip-3.7-
# Use per test platformio cache because tests have different platform versions
- name: Cache ~/.platformio
uses: actions/cache@v1
with:
path: ~/.platformio
key: test-home-platformio-${{ matrix.test }}-${{ hashFiles('esphome/core/config.py') }}
restore-keys: |
test-home-platformio-${{ matrix.test }}-
- name: Set up environment
run: script/setup
- name: Register problem matchers
run: |
echo "::add-matcher::.github/workflows/matchers/gcc.json"
echo "::add-matcher::.github/workflows/matchers/python.json"
- run: esphome compile tests/${{ matrix.test }}.yaml
pytest:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: '3.7'
- name: Cache pip modules
uses: actions/cache@v1
with:
path: ~/.cache/pip
key: esphome-pip-3.7-${{ hashFiles('setup.py') }}
restore-keys: |
esphome-pip-3.7-
- name: Set up environment
run: script/setup
- name: Install Github Actions annotator
run: pip install pytest-github-actions-annotate-failures
- name: Register problem matchers
run: |
echo "::add-matcher::.github/workflows/matchers/python.json"
- name: Run pytest - name: Run pytest
run: | run: |
pytest \ pytest -vv --tb=native tests
-qq \ if: ${{ matrix.id == 'pytest' }}
--durations=10 \
-o console_output_style=count \
tests

View file

@ -13,30 +13,88 @@ on:
- '.github/workflows/docker-lint-build.yml' - '.github/workflows/docker-lint-build.yml'
jobs: jobs:
publish-docker-lint-iage: deploy-docker:
name: Build docker containers name: Build and publish docker containers
if: github.repository == 'esphome/esphome'
runs-on: ubuntu-latest runs-on: ubuntu-latest
strategy:
matrix:
arch: [amd64, armv7, aarch64]
build_type: ["lint"]
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: '3.9'
- name: Set TAG - name: Set TAG
run: | run: |
echo "TAG=1.1" >> $GITHUB_ENV echo "TAG=1.1" >> $GITHUB_ENV
- name: Pull for cache
- name: Run build
run: | run: |
docker pull "esphome/esphome-lint:latest" || true docker/build.py \
- name: Build --tag "${TAG}" \
run: | --arch "${{ matrix.arch }}" \
docker build \ --build-type "${{ matrix.build_type }}" \
--cache-from "esphome/esphome-lint:latest" \ build
--file "docker/Dockerfile.lint" \
--tag "esphome/esphome-lint:latest" \
--tag "esphome/esphome-lint:${TAG}" \
.
- name: Log in to docker hub - name: Log in to docker hub
env: uses: docker/login-action@v1
DOCKER_USER: ${{ secrets.DOCKER_USER }} with:
DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }} username: ${{ secrets.DOCKER_USER }}
run: docker login -u "${DOCKER_USER}" -p "${DOCKER_PASSWORD}" password: ${{ secrets.DOCKER_PASSWORD }}
- run: | - name: Log in to the GitHub container registry
docker push "esphome/esphome-lint:${TAG}" uses: docker/login-action@v1
docker push "esphome/esphome-lint:latest" with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Run push
run: |
docker/build.py \
--tag "${TAG}" \
--arch "${{ matrix.arch }}" \
--build-type "${{ matrix.build_type }}" \
push
deploy-docker-manifest:
if: github.repository == 'esphome/esphome'
runs-on: ubuntu-latest
needs: [deploy-docker]
strategy:
matrix:
build_type: ["lint"]
steps:
- uses: actions/checkout@v2
- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: '3.9'
- name: Set TAG
run: |
echo "TAG=1.1" >> $GITHUB_ENV
- name: Enable experimental manifest support
run: |
mkdir -p ~/.docker
echo "{\"experimental\": \"enabled\"}" > ~/.docker/config.json
- name: Log in to docker hub
uses: docker/login-action@v1
with:
username: ${{ secrets.DOCKER_USER }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Log in to the GitHub container registry
uses: docker/login-action@v1
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Run manifest
run: |
docker/build.py \
--tag "${TAG}" \
--build-type "${{ matrix.build_type }}" \
manifest

19
.github/workflows/matchers/pytest.json vendored Normal file
View file

@ -0,0 +1,19 @@
{
"problemMatcher": [
{
"owner": "pytest",
"fileLocation": "absolute",
"pattern": [
{
"regexp": "^\\s+File \"(.*)\", line (\\d+), in (.*)$",
"file": 1,
"line": 2
},
{
"regexp": "^\\s+(.*)$",
"message": 1
}
]
}
]
}

View file

@ -1,247 +0,0 @@
name: Publish dev releases to docker hub
on:
push:
branches:
- dev
jobs:
# THE LINT/TEST JOBS ARE COPIED FROM ci.yaml
lint-clang-format:
runs-on: ubuntu-latest
# cpp lint job runs with esphome-lint docker image so that clang-format-*
# doesn't have to be installed
container: esphome/esphome-lint:1.1
steps:
- uses: actions/checkout@v2
# Set up the pio project so that the cpp checks know how files are compiled
# (build flags, libraries etc)
- name: Set up platformio environment
run: pio init --ide atom
- name: Run clang-format
run: script/clang-format -i
- name: Suggest changes
run: script/ci-suggest-changes
lint-clang-tidy:
runs-on: ubuntu-latest
# cpp lint job runs with esphome-lint docker image so that clang-format-*
# doesn't have to be installed
container: esphome/esphome-lint:1.1
# Split clang-tidy check into 4 jobs. Each one will check 1/4th of the .cpp files
strategy:
fail-fast: false
matrix:
split: [1, 2, 3, 4]
steps:
- uses: actions/checkout@v2
# Set up the pio project so that the cpp checks know how files are compiled
# (build flags, libraries etc)
- name: Set up platformio environment
run: pio init --ide atom
- name: Register problem matchers
run: |
echo "::add-matcher::.github/workflows/matchers/clang-tidy.json"
echo "::add-matcher::.github/workflows/matchers/gcc.json"
- name: Run clang-tidy
run: script/clang-tidy --all-headers --fix --split-num 4 --split-at ${{ matrix.split }}
- name: Suggest changes
run: script/ci-suggest-changes
lint-python:
# Don't use the esphome-lint docker image because it may contain outdated requirements.
# This way, all dependencies are cached via the cache action.
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: '3.7'
- name: Cache pip modules
uses: actions/cache@v1
with:
path: ~/.cache/pip
key: esphome-pip-3.7-${{ hashFiles('setup.py') }}
restore-keys: |
esphome-pip-3.7-
- name: Set up python environment
run: script/setup
- name: Register problem matchers
run: |
echo "::add-matcher::.github/workflows/matchers/ci-custom.json"
echo "::add-matcher::.github/workflows/matchers/lint-python.json"
echo "::add-matcher::.github/workflows/matchers/python.json"
- name: Lint Custom
run: script/ci-custom.py
- name: Lint Python
run: script/lint-python
- name: Lint CODEOWNERS
run: script/build_codeowners.py --check
test:
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
test:
- test1
- test2
- test3
- test4
- test5
steps:
- uses: actions/checkout@v2
- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: '3.7'
- name: Cache pip modules
uses: actions/cache@v1
with:
path: ~/.cache/pip
key: esphome-pip-3.7-${{ hashFiles('setup.py') }}
restore-keys: |
esphome-pip-3.7-
# Use per test platformio cache because tests have different platform versions
- name: Cache ~/.platformio
uses: actions/cache@v1
with:
path: ~/.platformio
key: test-home-platformio-${{ matrix.test }}-${{ hashFiles('esphome/core/config.py') }}
restore-keys: |
test-home-platformio-${{ matrix.test }}-
- name: Set up environment
run: script/setup
- name: Register problem matchers
run: |
echo "::add-matcher::.github/workflows/matchers/gcc.json"
echo "::add-matcher::.github/workflows/matchers/python.json"
- run: esphome compile tests/${{ matrix.test }}.yaml
pytest:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: '3.7'
- name: Cache pip modules
uses: actions/cache@v1
with:
path: ~/.cache/pip
key: esphome-pip-3.7-${{ hashFiles('setup.py') }}
restore-keys: |
esphome-pip-3.7-
- name: Set up environment
run: script/setup
- name: Install Github Actions annotator
run: pip install pytest-github-actions-annotate-failures
- name: Register problem matchers
run: |
echo "::add-matcher::.github/workflows/matchers/python.json"
- name: Run pytest
run: |
pytest \
-qq \
--durations=10 \
-o console_output_style=count \
tests
deploy-docker:
name: Build and publish docker containers
if: github.repository == 'esphome/esphome'
runs-on: ubuntu-latest
needs: [lint-clang-format, lint-clang-tidy, lint-python, test, pytest]
strategy:
matrix:
arch: [amd64, armv7, aarch64]
# Hassio dev image doesn't use esphome/esphome-hassio-$arch and uses base directly
build_type: ["docker"]
steps:
- uses: actions/checkout@v2
- name: Set TAG
run: |
TAG="${GITHUB_SHA:0:7}"
echo "TAG=${TAG}" >> $GITHUB_ENV
- name: Set up env variables
run: |
base_version="3.4.0"
if [[ "${{ matrix.build_type }}" == "hassio" ]]; then
build_from="esphome/esphome-hassio-base-${{ matrix.arch }}:${base_version}"
build_to="esphome/esphome-hassio-${{ matrix.arch }}"
dockerfile="docker/Dockerfile.hassio"
else
build_from="esphome/esphome-base-${{ matrix.arch }}:${base_version}"
build_to="esphome/esphome-${{ matrix.arch }}"
dockerfile="docker/Dockerfile"
fi
echo "BUILD_FROM=${build_from}" >> $GITHUB_ENV
echo "BUILD_TO=${build_to}" >> $GITHUB_ENV
echo "DOCKERFILE=${dockerfile}" >> $GITHUB_ENV
- name: Pull for cache
run: |
docker pull "${BUILD_TO}:dev" || true
- name: Register QEMU binfmt
run: docker run --rm --privileged multiarch/qemu-user-static:5.2.0-2 --reset -p yes
- run: |
docker build \
--build-arg "BUILD_FROM=${BUILD_FROM}" \
--build-arg "BUILD_VERSION=${TAG}" \
--tag "${BUILD_TO}:${TAG}" \
--tag "${BUILD_TO}:dev" \
--cache-from "${BUILD_TO}:dev" \
--file "${DOCKERFILE}" \
.
- name: Log in to docker hub
env:
DOCKER_USER: ${{ secrets.DOCKER_USER }}
DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }}
run: docker login -u "${DOCKER_USER}" -p "${DOCKER_PASSWORD}"
- run: |
docker push "${BUILD_TO}:${TAG}"
docker push "${BUILD_TO}:dev"
deploy-docker-manifest:
if: github.repository == 'esphome/esphome'
runs-on: ubuntu-latest
needs: [deploy-docker]
steps:
- name: Enable experimental manifest support
run: |
mkdir -p ~/.docker
echo "{\"experimental\": \"enabled\"}" > ~/.docker/config.json
- name: Set TAG
run: |
TAG="${GITHUB_SHA:0:7}"
echo "TAG=${TAG}" >> $GITHUB_ENV
- name: Log in to docker hub
env:
DOCKER_USER: ${{ secrets.DOCKER_USER }}
DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }}
run: docker login -u "${DOCKER_USER}" -p "${DOCKER_PASSWORD}"
- name: "Create the manifest"
run: |
docker manifest create esphome/esphome:${TAG} \
esphome/esphome-aarch64:${TAG} \
esphome/esphome-amd64:${TAG} \
esphome/esphome-armv7:${TAG}
docker manifest push esphome/esphome:${TAG}
docker manifest create esphome/esphome:dev \
esphome/esphome-aarch64:${TAG} \
esphome/esphome-amd64:${TAG} \
esphome/esphome-armv7:${TAG}
docker manifest push esphome/esphome:dev

View file

@ -1,164 +1,35 @@
name: Publish Release name: Publish Release
on: on:
workflow_dispatch:
release: release:
types: [published] types: [published]
schedule:
- cron: "0 2 * * *"
jobs: jobs:
# THE LINT/TEST JOBS ARE COPIED FROM ci.yaml init:
name: Initialize build
lint-clang-format:
runs-on: ubuntu-latest runs-on: ubuntu-latest
# cpp lint job runs with esphome-lint docker image so that clang-format-* outputs:
# doesn't have to be installed tag: ${{ steps.tag.outputs.tag }}
container: esphome/esphome-lint:1.1
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
# Set up the pio project so that the cpp checks know how files are compiled - name: Get tag
# (build flags, libraries etc) id: tag
- name: Set up platformio environment
run: pio init --ide atom
- name: Run clang-format
run: script/clang-format -i
- name: Suggest changes
run: script/ci-suggest-changes
lint-clang-tidy:
runs-on: ubuntu-latest
# cpp lint job runs with esphome-lint docker image so that clang-format-*
# doesn't have to be installed
container: esphome/esphome-lint:1.1
# Split clang-tidy check into 4 jobs. Each one will check 1/4th of the .cpp files
strategy:
fail-fast: false
matrix:
split: [1, 2, 3, 4]
steps:
- uses: actions/checkout@v2
# Set up the pio project so that the cpp checks know how files are compiled
# (build flags, libraries etc)
- name: Set up platformio environment
run: pio init --ide atom
- name: Register problem matchers
run: | run: |
echo "::add-matcher::.github/workflows/matchers/clang-tidy.json" if [[ "$GITHUB_EVENT_NAME" = "release" ]]; then
echo "::add-matcher::.github/workflows/matchers/gcc.json" TAG="${GITHUB_REF#refs/tags/v}"
- name: Run clang-tidy else
run: script/clang-tidy --all-headers --fix --split-num 4 --split-at ${{ matrix.split }} TAG=$(cat esphome/const.py | sed -n -E "s/^__version__\s+=\s+\"(.+)\"$/\1/p")
- name: Suggest changes today="$(date --utc '+%Y%m%d')"
run: script/ci-suggest-changes TAG="${TAG}${today}"
fi
lint-python: echo "::set-output name=tag::${TAG}"
# Don't use the esphome-lint docker image because it may contain outdated requirements.
# This way, all dependencies are cached via the cache action.
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: '3.7'
- name: Cache pip modules
uses: actions/cache@v1
with:
path: ~/.cache/pip
key: esphome-pip-3.7-${{ hashFiles('setup.py') }}
restore-keys: |
esphome-pip-3.7-
- name: Set up python environment
run: script/setup
- name: Register problem matchers
run: |
echo "::add-matcher::.github/workflows/matchers/ci-custom.json"
echo "::add-matcher::.github/workflows/matchers/lint-python.json"
echo "::add-matcher::.github/workflows/matchers/python.json"
- name: Lint Custom
run: script/ci-custom.py
- name: Lint Python
run: script/lint-python
- name: Lint CODEOWNERS
run: script/build_codeowners.py --check
test:
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
test:
- test1
- test2
- test3
- test4
- test5
steps:
- uses: actions/checkout@v2
- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: '3.7'
- name: Cache pip modules
uses: actions/cache@v1
with:
path: ~/.cache/pip
key: esphome-pip-3.7-${{ hashFiles('setup.py') }}
restore-keys: |
esphome-pip-3.7-
# Use per test platformio cache because tests have different platform versions
- name: Cache ~/.platformio
uses: actions/cache@v1
with:
path: ~/.platformio
key: test-home-platformio-${{ matrix.test }}-${{ hashFiles('esphome/core/config.py') }}
restore-keys: |
test-home-platformio-${{ matrix.test }}-
- name: Set up environment
run: script/setup
- name: Register problem matchers
run: |
echo "::add-matcher::.github/workflows/matchers/gcc.json"
echo "::add-matcher::.github/workflows/matchers/python.json"
- run: esphome compile tests/${{ matrix.test }}.yaml
pytest:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: '3.7'
- name: Cache pip modules
uses: actions/cache@v1
with:
path: ~/.cache/pip
key: esphome-pip-3.7-${{ hashFiles('setup.py') }}
restore-keys: |
esphome-pip-3.7-
- name: Set up environment
run: script/setup
- name: Install Github Actions annotator
run: pip install pytest-github-actions-annotate-failures
- name: Register problem matchers
run: |
echo "::add-matcher::.github/workflows/matchers/python.json"
- name: Run pytest
run: |
pytest \
-qq \
--durations=10 \
-o console_output_style=count \
tests
deploy-pypi: deploy-pypi:
name: Build and publish to PyPi name: Build and publish to PyPi
if: github.repository == 'esphome/esphome' if: github.repository == 'esphome/esphome' && github.event_name == 'release'
needs: [lint-clang-format, lint-clang-tidy, lint-python, test, pytest]
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
@ -182,119 +53,85 @@ jobs:
name: Build and publish docker containers name: Build and publish docker containers
if: github.repository == 'esphome/esphome' if: github.repository == 'esphome/esphome'
runs-on: ubuntu-latest runs-on: ubuntu-latest
needs: [lint-clang-format, lint-clang-tidy, lint-python, test, pytest] needs: [init]
strategy: strategy:
matrix: matrix:
arch: [amd64, armv7, aarch64] arch: [amd64, armv7, aarch64]
build_type: ["hassio", "docker"] build_type: ["ha-addon", "docker"]
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
- name: Set TAG - name: Set up Python
run: | uses: actions/setup-python@v2
TAG="${GITHUB_REF#refs/tags/v}" with:
echo "TAG=${TAG}" >> $GITHUB_ENV python-version: '3.9'
- name: Set up env variables
run: |
base_version="3.4.0"
if [[ "${{ matrix.build_type }}" == "hassio" ]]; then - name: Run build
build_from="esphome/esphome-hassio-base-${{ matrix.arch }}:${base_version}"
build_to="esphome/esphome-hassio-${{ matrix.arch }}"
dockerfile="docker/Dockerfile.hassio"
else
build_from="esphome/esphome-base-${{ matrix.arch }}:${base_version}"
build_to="esphome/esphome-${{ matrix.arch }}"
dockerfile="docker/Dockerfile"
fi
if [[ "${{ github.event.release.prerelease }}" == "true" ]]; then
cache_tag="beta"
else
cache_tag="latest"
fi
# Set env variables so these values don't need to be calculated again
echo "BUILD_FROM=${build_from}" >> $GITHUB_ENV
echo "BUILD_TO=${build_to}" >> $GITHUB_ENV
echo "DOCKERFILE=${dockerfile}" >> $GITHUB_ENV
echo "CACHE_TAG=${cache_tag}" >> $GITHUB_ENV
- name: Pull for cache
run: | run: |
docker pull "${BUILD_TO}:${CACHE_TAG}" || true docker/build.py \
- name: Register QEMU binfmt --tag "${{ needs.init.outputs.tag }}" \
run: docker run --rm --privileged multiarch/qemu-user-static:5.2.0-2 --reset -p yes --arch "${{ matrix.arch }}" \
- run: | --build-type "${{ matrix.build_type }}" \
docker build \ build
--build-arg "BUILD_FROM=${BUILD_FROM}" \
--build-arg "BUILD_VERSION=${TAG}" \
--tag "${BUILD_TO}:${TAG}" \
--cache-from "${BUILD_TO}:${CACHE_TAG}" \
--file "${DOCKERFILE}" \
.
- name: Log in to docker hub - name: Log in to docker hub
env: uses: docker/login-action@v1
DOCKER_USER: ${{ secrets.DOCKER_USER }} with:
DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }} username: ${{ secrets.DOCKER_USER }}
run: docker login -u "${DOCKER_USER}" -p "${DOCKER_PASSWORD}" password: ${{ secrets.DOCKER_PASSWORD }}
- run: docker push "${BUILD_TO}:${TAG}" - name: Log in to the GitHub container registry
uses: docker/login-action@v1
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
# Always publish to beta tag (also full releases) - name: Run push
- name: Publish docker beta tag
run: | run: |
docker tag "${BUILD_TO}:${TAG}" "${BUILD_TO}:beta" docker/build.py \
docker push "${BUILD_TO}:beta" --tag "${{ needs.init.outputs.tag }}" \
--arch "${{ matrix.arch }}" \
- if: ${{ !github.event.release.prerelease }} --build-type "${{ matrix.build_type }}" \
name: Publish docker latest tag push
run: |
docker tag "${BUILD_TO}:${TAG}" "${BUILD_TO}:latest"
docker push "${BUILD_TO}:latest"
deploy-docker-manifest: deploy-docker-manifest:
if: github.repository == 'esphome/esphome' if: github.repository == 'esphome/esphome'
runs-on: ubuntu-latest runs-on: ubuntu-latest
needs: [deploy-docker] needs: [init, deploy-docker]
strategy:
matrix:
build_type: ["ha-addon", "docker"]
steps: steps:
- uses: actions/checkout@v2
- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: '3.9'
- name: Enable experimental manifest support - name: Enable experimental manifest support
run: | run: |
mkdir -p ~/.docker mkdir -p ~/.docker
echo "{\"experimental\": \"enabled\"}" > ~/.docker/config.json echo "{\"experimental\": \"enabled\"}" > ~/.docker/config.json
- name: Set TAG
run: |
TAG="${GITHUB_REF#refs/tags/v}"
echo "TAG=${TAG}" >> $GITHUB_ENV
- name: Log in to docker hub - name: Log in to docker hub
env: uses: docker/login-action@v1
DOCKER_USER: ${{ secrets.DOCKER_USER }} with:
DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }} username: ${{ secrets.DOCKER_USER }}
run: docker login -u "${DOCKER_USER}" -p "${DOCKER_PASSWORD}" password: ${{ secrets.DOCKER_PASSWORD }}
- name: "Create the manifest" - name: Log in to the GitHub container registry
run: | uses: docker/login-action@v1
docker manifest create esphome/esphome:${TAG} \ with:
esphome/esphome-aarch64:${TAG} \ registry: ghcr.io
esphome/esphome-amd64:${TAG} \ username: ${{ github.actor }}
esphome/esphome-armv7:${TAG} password: ${{ secrets.GITHUB_TOKEN }}
docker manifest push esphome/esphome:${TAG}
- name: Publish docker beta tag - name: Run manifest
run: | run: |
docker manifest create esphome/esphome:beta \ docker/build.py \
esphome/esphome-aarch64:${TAG} \ --tag "${{ needs.init.outputs.tag }}" \
esphome/esphome-amd64:${TAG} \ --build-type "${{ matrix.build_type }}" \
esphome/esphome-armv7:${TAG} manifest
docker manifest push esphome/esphome:beta
- name: Publish docker latest tag
if: ${{ !github.event.release.prerelease }}
run: |
docker manifest create esphome/esphome:latest \
esphome/esphome-aarch64:${TAG} \
esphome/esphome-amd64:${TAG} \
esphome/esphome-armv7:${TAG}
docker manifest push esphome/esphome:latest
deploy-hassio-repo: deploy-hassio-repo:
if: github.repository == 'esphome/esphome' if: github.repository == 'esphome/esphome' && github.event_name == 'release'
runs-on: ubuntu-latest runs-on: ubuntu-latest
needs: [deploy-docker] needs: [deploy-docker]
steps: steps:

3
.gitignore vendored
View file

@ -13,6 +13,9 @@ __pycache__/
# Intellij Idea # Intellij Idea
.idea .idea
# Vim
*.swp
# Hide some OS X stuff # Hide some OS X stuff
.DS_Store .DS_Store
.AppleDouble .AppleDouble

View file

@ -1,4 +1,4 @@
ARG BUILD_FROM=esphome/esphome-base-amd64:3.4.0 ARG BUILD_FROM=esphome/esphome-base:latest
FROM ${BUILD_FROM} FROM ${BUILD_FROM}
# First install requirements to leverage caching when requirements don't change # First install requirements to leverage caching when requirements don't change

View file

@ -1,4 +1,4 @@
ARG BUILD_FROM ARG BUILD_FROM=esphome/esphome-hassio-base:latest
FROM ${BUILD_FROM} FROM ${BUILD_FROM}
# First install requirements to leverage caching when requirements don't change # First install requirements to leverage caching when requirements don't change

View file

@ -1,4 +1,5 @@
FROM esphome/esphome-lint-base:3.4.0 ARG BUILD_FROM=esphome/esphome-lint-base:latest
FROM ${BUILD_FROM}
COPY requirements.txt requirements_optional.txt requirements_test.txt docker/platformio_install_deps.py platformio.ini / COPY requirements.txt requirements_optional.txt requirements_test.txt docker/platformio_install_deps.py platformio.ini /
RUN \ RUN \

177
docker/build.py Executable file
View file

@ -0,0 +1,177 @@
#!/usr/bin/env python3
from dataclasses import dataclass
import subprocess
import argparse
import platform
import shlex
import re
import sys
CHANNEL_DEV = 'dev'
CHANNEL_BETA = 'beta'
CHANNEL_RELEASE = 'release'
CHANNELS = [CHANNEL_DEV, CHANNEL_BETA, CHANNEL_RELEASE]
ARCH_AMD64 = 'amd64'
ARCH_ARMV7 = 'armv7'
ARCH_AARCH64 = 'aarch64'
ARCHS = [ARCH_AMD64, ARCH_ARMV7, ARCH_AARCH64]
TYPE_DOCKER = 'docker'
TYPE_HA_ADDON = 'ha-addon'
TYPE_LINT = 'lint'
TYPES = [TYPE_DOCKER, TYPE_HA_ADDON, TYPE_LINT]
BASE_VERSION = "3.6.0"
parser = argparse.ArgumentParser()
parser.add_argument("--tag", type=str, required=True, help="The main docker tag to push to. If a version number also adds latest and/or beta tag")
parser.add_argument("--arch", choices=ARCHS, required=False, help="The architecture to build for")
parser.add_argument("--build-type", choices=TYPES, required=True, help="The type of build to run")
parser.add_argument("--dry-run", action="store_true", help="Don't run any commands, just print them")
subparsers = parser.add_subparsers(help="Action to perform", dest="command", required=True)
build_parser = subparsers.add_parser("build", help="Build the image")
push_parser = subparsers.add_parser("push", help="Tag the already built image and push it to docker hub")
manifest_parser = subparsers.add_parser("manifest", help="Create a manifest from already pushed images")
# only lists some possibilities, doesn't have to be perfect
# https://stackoverflow.com/a/45125525
UNAME_TO_ARCH = {
"x86_64": ARCH_AMD64,
"aarch64": ARCH_AARCH64,
"aarch64_be": ARCH_AARCH64,
"arm": ARCH_ARMV7,
}
@dataclass(frozen=True)
class DockerParams:
build_from: str
build_to: str
manifest_to: str
dockerfile: str
@classmethod
def for_type_arch(cls, build_type, arch):
prefix = {
TYPE_DOCKER: "esphome/esphome",
TYPE_HA_ADDON: "esphome/esphome-hassio",
TYPE_LINT: "esphome/esphome-lint"
}[build_type]
build_from = f"ghcr.io/{prefix}-base-{arch}:{BASE_VERSION}"
build_to = f"{prefix}-{arch}"
dockerfile = {
TYPE_DOCKER: "docker/Dockerfile",
TYPE_HA_ADDON: "docker/Dockerfile.hassio",
TYPE_LINT: "docker/Dockerfile.lint",
}[build_type]
return cls(
build_from=build_from,
build_to=build_to,
manifest_to=prefix,
dockerfile=dockerfile
)
def main():
args = parser.parse_args()
def run_command(*cmd, ignore_error: bool = False):
print(f"$ {shlex.join(list(cmd))}")
if not args.dry_run:
rc = subprocess.call(list(cmd))
if rc != 0 and not ignore_error:
print("Command failed")
sys.exit(1)
# detect channel from tag
match = re.match(r'^\d+\.\d+(?:\.\d+)?(b\d+)?$', args.tag)
if match is None:
channel = CHANNEL_DEV
elif match.group(1) is None:
channel = CHANNEL_RELEASE
else:
channel = CHANNEL_BETA
tags_to_push = [args.tag]
if channel == CHANNEL_DEV:
tags_to_push.append("dev")
elif channel == CHANNEL_BETA:
tags_to_push.append("beta")
elif channel == CHANNEL_RELEASE:
# Additionally push to beta
tags_to_push.append("beta")
tags_to_push.append("latest")
if args.command == "build":
# 1. pull cache image
params = DockerParams.for_type_arch(args.build_type, args.arch)
cache_tag = {
CHANNEL_DEV: "dev",
CHANNEL_BETA: "beta",
CHANNEL_RELEASE: "latest",
}[channel]
cache_img = f"ghcr.io/{params.build_to}:{cache_tag}"
run_command("docker", "pull", cache_img, ignore_error=True)
# 2. register QEMU binfmt (if not host arch)
is_native = UNAME_TO_ARCH.get(platform.machine()) == args.arch
if not is_native:
run_command(
"docker", "run", "--rm", "--privileged", "multiarch/qemu-user-static:5.2.0-2",
"--reset", "-p", "yes"
)
# 3. build
run_command(
"docker", "build",
"--build-arg", f"BUILD_FROM={params.build_from}",
"--build-arg", f"BUILD_VERSION={args.tag}",
"--tag", f"{params.build_to}:{args.tag}",
"--cache-from", cache_img,
"--file", params.dockerfile,
"."
)
elif args.command == "push":
params = DockerParams.for_type_arch(args.build_type, args.arch)
imgs = [f"{params.build_to}:{tag}" for tag in tags_to_push]
imgs += [f"ghcr.io/{params.build_to}:{tag}" for tag in tags_to_push]
src = imgs[0]
# 1. tag images
for img in imgs[1:]:
run_command(
"docker", "tag", src, img
)
# 2. push images
for img in imgs:
run_command(
"docker", "push", img
)
elif args.command == "manifest":
manifest = DockerParams.for_type_arch(args.build_type, ARCH_AMD64).manifest_to
targets = [f"{manifest}:{tag}" for tag in tags_to_push]
targets += [f"ghcr.io/{manifest}:{tag}" for tag in tags_to_push]
# 1. Create manifests
for target in targets:
cmd = ["docker", "manifest", "create", target]
for arch in ARCHS:
src = f"{DockerParams.for_type_arch(args.build_type, arch).build_to}:{args.tag}"
if target.startswith("ghcr.io"):
src = f"ghcr.io/{src}"
cmd.append(src)
run_command(*cmd)
# 2. Push manifests
for target in targets:
run_command(
"docker", "manifest", "push", target
)
if __name__ == "__main__":
main()

View file

@ -25,6 +25,13 @@ static const char *const TAG = "ethernet";
EthernetComponent *global_eth_component; EthernetComponent *global_eth_component;
#define ESPHL_ERROR_CHECK(err, message) \
if (err != ESP_OK) { \
ESP_LOGE(TAG, message ": (%d) %s", err, esp_err_to_name(err)); \
this->mark_failed(); \
return; \
}
EthernetComponent::EthernetComponent() { global_eth_component = this; } EthernetComponent::EthernetComponent() { global_eth_component = this; }
void EthernetComponent::setup() { void EthernetComponent::setup() {
ESP_LOGCONFIG(TAG, "Setting up Ethernet..."); ESP_LOGCONFIG(TAG, "Setting up Ethernet...");
@ -36,103 +43,6 @@ void EthernetComponent::setup() {
this->power_pin_->setup(); this->power_pin_->setup();
} }
this->start_connect_();
#ifdef USE_MDNS
network_setup_mdns();
#endif
}
void EthernetComponent::loop() {
const uint32_t now = millis();
if (!this->connected_ && !this->last_connected_ && now - this->connect_begin_ > 15000) {
ESP_LOGW(TAG, "Connecting via ethernet failed! Re-connecting...");
this->start_connect_();
return;
}
if (this->connected_ == this->last_connected_)
// nothing changed
return;
if (this->connected_) {
// connection established
ESP_LOGI(TAG, "Connected via Ethernet!");
this->dump_connect_params_();
this->status_clear_warning();
} else {
// connection lost
ESP_LOGW(TAG, "Connection via Ethernet lost! Re-connecting...");
this->start_connect_();
}
this->last_connected_ = this->connected_;
network_tick_mdns();
}
void EthernetComponent::dump_config() {
ESP_LOGCONFIG(TAG, "Ethernet:");
this->dump_connect_params_();
LOG_PIN(" Power Pin: ", this->power_pin_);
ESP_LOGCONFIG(TAG, " MDC Pin: %u", this->mdc_pin_);
ESP_LOGCONFIG(TAG, " MDIO Pin: %u", this->mdio_pin_);
ESP_LOGCONFIG(TAG, " Type: %s", this->type_ == ETHERNET_TYPE_LAN8720 ? "LAN8720" : "TLK110");
}
float EthernetComponent::get_setup_priority() const { return setup_priority::WIFI; }
bool EthernetComponent::can_proceed() { return this->is_connected(); }
IPAddress EthernetComponent::get_ip_address() {
tcpip_adapter_ip_info_t ip;
tcpip_adapter_get_ip_info(TCPIP_ADAPTER_IF_ETH, &ip);
return IPAddress(ip.ip.addr);
}
void EthernetComponent::on_wifi_event_(system_event_id_t event, system_event_info_t info) {
const char *event_name;
switch (event) {
case SYSTEM_EVENT_ETH_START:
event_name = "ETH started";
break;
case SYSTEM_EVENT_ETH_STOP:
event_name = "ETH stopped";
this->connected_ = false;
break;
case SYSTEM_EVENT_ETH_CONNECTED:
event_name = "ETH connected";
break;
case SYSTEM_EVENT_ETH_DISCONNECTED:
event_name = "ETH disconnected";
this->connected_ = false;
break;
case SYSTEM_EVENT_ETH_GOT_IP:
event_name = "ETH Got IP";
this->connected_ = true;
break;
default:
return;
}
ESP_LOGV(TAG, "[Ethernet event] %s (num=%d)", event_name, event);
}
#define ESPHL_ERROR_CHECK(err, message) \
if (err != ESP_OK) { \
ESP_LOGE(TAG, message ": %d", err); \
this->mark_failed(); \
return; \
}
void EthernetComponent::start_connect_() {
this->connect_begin_ = millis();
this->status_set_warning();
esp_err_t err;
if (this->initialized_) {
// already initialized
err = esp_eth_enable();
ESPHL_ERROR_CHECK(err, "ETH enable error");
return;
}
switch (this->type_) { switch (this->type_) {
case ETHERNET_TYPE_LAN8720: { case ETHERNET_TYPE_LAN8720: {
memcpy(&this->eth_config, &phy_lan8720_default_ethernet_config, sizeof(eth_config_t)); memcpy(&this->eth_config, &phy_lan8720_default_ethernet_config, sizeof(eth_config_t));
@ -160,16 +70,111 @@ void EthernetComponent::start_connect_() {
tcpipInit(); tcpipInit();
esp_err_t err;
err = esp_eth_init(&this->eth_config); err = esp_eth_init(&this->eth_config);
if (err != ESP_OK) { ESPHL_ERROR_CHECK(err, "ETH init error");
ESP_LOGE(TAG, "ETH init error: %d", err); err = esp_eth_enable();
this->mark_failed(); ESPHL_ERROR_CHECK(err, "ETH enable error");
#ifdef USE_MDNS
network_setup_mdns();
#endif
}
void EthernetComponent::loop() {
const uint32_t now = millis();
switch (this->state_) {
case EthernetComponentState::STOPPED:
if (this->started_) {
ESP_LOGI(TAG, "Starting ethernet connection");
this->state_ = EthernetComponentState::CONNECTING;
this->start_connect_();
}
break;
case EthernetComponentState::CONNECTING:
if (!this->started_) {
ESP_LOGI(TAG, "Stopped ethernet connection");
this->state_ = EthernetComponentState::STOPPED;
} else if (this->connected_) {
// connection established
ESP_LOGI(TAG, "Connected via Ethernet!");
this->state_ = EthernetComponentState::CONNECTED;
this->dump_connect_params_();
this->status_clear_warning();
network_tick_mdns();
} else if (now - this->connect_begin_ > 15000) {
ESP_LOGW(TAG, "Connecting via ethernet failed! Re-connecting...");
this->start_connect_();
}
break;
case EthernetComponentState::CONNECTED:
if (!this->started_) {
ESP_LOGI(TAG, "Stopped ethernet connection");
this->state_ = EthernetComponentState::STOPPED;
} else if (!this->connected_) {
ESP_LOGW(TAG, "Connection via Ethernet lost! Re-connecting...");
this->state_ = EthernetComponentState::CONNECTING;
this->start_connect_();
}
break;
}
}
void EthernetComponent::dump_config() {
ESP_LOGCONFIG(TAG, "Ethernet:");
this->dump_connect_params_();
LOG_PIN(" Power Pin: ", this->power_pin_);
ESP_LOGCONFIG(TAG, " MDC Pin: %u", this->mdc_pin_);
ESP_LOGCONFIG(TAG, " MDIO Pin: %u", this->mdio_pin_);
ESP_LOGCONFIG(TAG, " Type: %s", this->type_ == ETHERNET_TYPE_LAN8720 ? "LAN8720" : "TLK110");
}
float EthernetComponent::get_setup_priority() const { return setup_priority::WIFI; }
bool EthernetComponent::can_proceed() { return this->is_connected(); }
IPAddress EthernetComponent::get_ip_address() {
tcpip_adapter_ip_info_t ip;
tcpip_adapter_get_ip_info(TCPIP_ADAPTER_IF_ETH, &ip);
return IPAddress(ip.ip.addr);
}
void EthernetComponent::on_wifi_event_(system_event_id_t event, system_event_info_t info) {
const char *event_name;
switch (event) {
case SYSTEM_EVENT_ETH_START:
event_name = "ETH started";
this->started_ = true;
break;
case SYSTEM_EVENT_ETH_STOP:
event_name = "ETH stopped";
this->started_ = false;
this->connected_ = false;
break;
case SYSTEM_EVENT_ETH_CONNECTED:
event_name = "ETH connected";
break;
case SYSTEM_EVENT_ETH_DISCONNECTED:
event_name = "ETH disconnected";
this->connected_ = false;
break;
case SYSTEM_EVENT_ETH_GOT_IP:
event_name = "ETH Got IP";
this->connected_ = true;
break;
default:
return; return;
} }
this->initialized_ = true; ESP_LOGV(TAG, "[Ethernet event] %s (num=%d)", event_name, event);
}
tcpip_adapter_set_hostname(TCPIP_ADAPTER_IF_ETH, App.get_name().c_str()); void EthernetComponent::start_connect_() {
this->connect_begin_ = millis();
this->status_set_warning();
esp_err_t err;
err = tcpip_adapter_set_hostname(TCPIP_ADAPTER_IF_ETH, App.get_name().c_str());
ESPHL_ERROR_CHECK(err, "ETH set hostname error");
tcpip_adapter_ip_info_t info; tcpip_adapter_ip_info_t info;
if (this->manual_ip_.has_value()) { if (this->manual_ip_.has_value()) {
@ -220,7 +225,7 @@ void EthernetComponent::eth_phy_power_enable_(bool enable) {
delay(1); delay(1);
global_eth_component->orig_power_enable_fun_(enable); global_eth_component->orig_power_enable_fun_(enable);
} }
bool EthernetComponent::is_connected() { return this->connected_ && this->last_connected_; } bool EthernetComponent::is_connected() { return this->state_ == EthernetComponentState::CONNECTED; }
void EthernetComponent::dump_connect_params_() { void EthernetComponent::dump_connect_params_() {
tcpip_adapter_ip_info_t ip; tcpip_adapter_ip_info_t ip;
tcpip_adapter_get_ip_info(TCPIP_ADAPTER_IF_ETH, &ip); tcpip_adapter_get_ip_info(TCPIP_ADAPTER_IF_ETH, &ip);

View file

@ -26,6 +26,12 @@ struct ManualIP {
IPAddress dns2; ///< The second DNS server. 0.0.0.0 for default. IPAddress dns2; ///< The second DNS server. 0.0.0.0 for default.
}; };
enum class EthernetComponentState {
STOPPED,
CONNECTING,
CONNECTED,
};
class EthernetComponent : public Component { class EthernetComponent : public Component {
public: public:
EthernetComponent(); EthernetComponent();
@ -65,9 +71,9 @@ class EthernetComponent : public Component {
eth_clock_mode_t clk_mode_{ETH_CLOCK_GPIO0_IN}; eth_clock_mode_t clk_mode_{ETH_CLOCK_GPIO0_IN};
optional<ManualIP> manual_ip_{}; optional<ManualIP> manual_ip_{};
bool initialized_{false}; bool started_{false};
bool connected_{false}; bool connected_{false};
bool last_connected_{false}; EthernetComponentState state_{EthernetComponentState::STOPPED};
uint32_t connect_begin_; uint32_t connect_begin_;
eth_config_t eth_config; eth_config_t eth_config;
eth_phy_power_enable_func orig_power_enable_fun_; eth_phy_power_enable_func orig_power_enable_fun_;

View file

@ -109,7 +109,15 @@ def _compute_destination_path(key: str) -> Path:
return base_dir / h.hexdigest()[:8] return base_dir / h.hexdigest()[:8]
def _handle_git_response(ret): def _run_git_command(cmd):
try:
ret = subprocess.run(cmd, capture_output=True, check=False)
except FileNotFoundError as err:
raise cv.Invalid(
"git is not installed but required for external_components.\n"
"Please see https://git-scm.com/book/en/v2/Getting-Started-Installing-Git for installing git"
) from err
if ret.returncode != 0 and ret.stderr: if ret.returncode != 0 and ret.stderr:
err_str = ret.stderr.decode("utf-8") err_str = ret.stderr.decode("utf-8")
lines = [x.strip() for x in err_str.splitlines()] lines = [x.strip() for x in err_str.splitlines()]
@ -118,18 +126,17 @@ def _handle_git_response(ret):
raise cv.Invalid(err_str) raise cv.Invalid(err_str)
def _process_single_config(config: dict): def _process_git_config(config: dict, refresh) -> str:
conf = config[CONF_SOURCE] key = f"{config[CONF_URL]}@{config.get(CONF_REF)}"
if conf[CONF_TYPE] == TYPE_GIT:
key = f"{conf[CONF_URL]}@{conf.get(CONF_REF)}"
repo_dir = _compute_destination_path(key) repo_dir = _compute_destination_path(key)
if not repo_dir.is_dir(): if not repo_dir.is_dir():
_LOGGER.info("Cloning %s", key)
_LOGGER.debug("Location: %s", repo_dir)
cmd = ["git", "clone", "--depth=1"] cmd = ["git", "clone", "--depth=1"]
if CONF_REF in conf: if CONF_REF in config:
cmd += ["--branch", conf[CONF_REF]] cmd += ["--branch", config[CONF_REF]]
cmd += [conf[CONF_URL], str(repo_dir)] cmd += ["--", config[CONF_URL], str(repo_dir)]
ret = subprocess.run(cmd, capture_output=True, check=False) _run_git_command(cmd)
_handle_git_response(ret)
else: else:
# Check refresh needed # Check refresh needed
@ -140,13 +147,18 @@ def _process_single_config(config: dict):
age = datetime.datetime.now() - datetime.datetime.fromtimestamp( age = datetime.datetime.now() - datetime.datetime.fromtimestamp(
file_timestamp.stat().st_mtime file_timestamp.stat().st_mtime
) )
if age.seconds > config[CONF_REFRESH].total_seconds: if age.seconds > refresh.total_seconds:
_LOGGER.info("Executing git pull %s", key) _LOGGER.info("Updating %s", key)
cmd = ["git", "pull"] _LOGGER.debug("Location: %s", repo_dir)
ret = subprocess.run( # Stash local changes (if any)
cmd, cwd=repo_dir, capture_output=True, check=False _run_git_command(["git", "stash", "push", "--include-untracked"])
) # Fetch remote ref
_handle_git_response(ret) cmd = ["git", "fetch", "--", "origin"]
if CONF_REF in config:
cmd.append(config[CONF_REF])
_run_git_command(cmd)
# Hard reset to FETCH_HEAD (short-lived git ref corresponding to most recent fetch)
_run_git_command(["git", "reset", "--hard", "FETCH_HEAD"])
if (repo_dir / "esphome" / "components").is_dir(): if (repo_dir / "esphome" / "components").is_dir():
components_dir = repo_dir / "esphome" / "components" components_dir = repo_dir / "esphome" / "components"
@ -154,10 +166,19 @@ def _process_single_config(config: dict):
components_dir = repo_dir / "components" components_dir = repo_dir / "components"
else: else:
raise cv.Invalid( raise cv.Invalid(
"Could not find components folder for source. Please check the source contains a 'components' or 'esphome/components' folder", "Could not find components folder for source. Please check the source contains a 'components' or 'esphome/components' folder"
[CONF_SOURCE],
) )
return components_dir
def _process_single_config(config: dict):
conf = config[CONF_SOURCE]
if conf[CONF_TYPE] == TYPE_GIT:
with cv.prepend_path([CONF_SOURCE]):
components_dir = _process_git_config(
config[CONF_SOURCE], config[CONF_REFRESH]
)
elif conf[CONF_TYPE] == TYPE_LOCAL: elif conf[CONF_TYPE] == TYPE_LOCAL:
components_dir = Path(CORE.relative_config_path(conf[CONF_PATH])) components_dir = Path(CORE.relative_config_path(conf[CONF_PATH]))
else: else:

View file

@ -1,10 +1,6 @@
"""Constants used by esphome.""" """Constants used by esphome."""
MAJOR_VERSION = 1 __version__ = "1.20.0b3"
MINOR_VERSION = 20
PATCH_VERSION = "0b2"
__short_version__ = f"{MAJOR_VERSION}.{MINOR_VERSION}"
__version__ = f"{__short_version__}.{PATCH_VERSION}"
ESP_PLATFORM_ESP32 = "ESP32" ESP_PLATFORM_ESP32 = "ESP32"
ESP_PLATFORM_ESP8266 = "ESP8266" ESP_PLATFORM_ESP8266 = "ESP8266"

View file

@ -782,7 +782,6 @@ def make_app(debug=get_bool_env(ENV_DEV)):
class StaticFileHandler(tornado.web.StaticFileHandler): class StaticFileHandler(tornado.web.StaticFileHandler):
def set_extra_headers(self, path): def set_extra_headers(self, path):
if debug:
self.set_header( self.set_header(
"Cache-Control", "no-store, no-cache, must-revalidate, max-age=0" "Cache-Control", "no-store, no-cache, must-revalidate, max-age=0"
) )

View file

@ -1,50 +0,0 @@
#!/usr/bin/env python3
import argparse
import re
import sys
def sub(path, pattern, repl, expected_count=1):
with open(path) as fh:
content = fh.read()
content, count = re.subn(pattern, repl, content, flags=re.MULTILINE)
if expected_count is not None:
assert count == expected_count, f"Pattern {pattern} replacement failed!"
with open(path, "wt") as fh:
fh.write(content)
def write_version(version: str):
for p in [
".github/workflows/ci-docker.yml",
".github/workflows/release-dev.yml",
".github/workflows/release.yml",
]:
sub(p, r'base_version=".*"', f'base_version="{version}"')
sub(
"docker/Dockerfile",
r"ARG BUILD_FROM=esphome/esphome-base-amd64:.*",
f"ARG BUILD_FROM=esphome/esphome-base-amd64:{version}",
)
sub(
"docker/Dockerfile.lint",
r"FROM esphome/esphome-lint-base:.*",
f"FROM esphome/esphome-lint-base:{version}",
)
def main():
parser = argparse.ArgumentParser()
parser.add_argument("new_version", type=str)
args = parser.parse_args()
version = args.new_version
print(f"Bumping to {version}")
write_version(version)
return 0
if __name__ == "__main__":
sys.exit(main() or 0)

View file

@ -50,16 +50,10 @@ def sub(path, pattern, repl, expected_count=1):
def write_version(version: Version): def write_version(version: Version):
sub(
"esphome/const.py", r"^MAJOR_VERSION = \d+$", f"MAJOR_VERSION = {version.major}"
)
sub(
"esphome/const.py", r"^MINOR_VERSION = \d+$", f"MINOR_VERSION = {version.minor}"
)
sub( sub(
"esphome/const.py", "esphome/const.py",
r"^PATCH_VERSION = .*$", r"^__version__ = .*$",
f'PATCH_VERSION = "{version.full_patch}"', f'__version__ = "{version}"',
) )