mirror of
https://github.com/esphome/esphome.git
synced 2024-11-21 22:48:10 +01:00
Merge branch 'dev' into rc
This commit is contained in:
commit
ab43390983
96 changed files with 1116 additions and 601 deletions
171
.gitlab-ci.yml
171
.gitlab-ci.yml
|
@ -61,44 +61,84 @@ test2:
|
||||||
<<: *docker-builder
|
<<: *docker-builder
|
||||||
stage: build
|
stage: build
|
||||||
script:
|
script:
|
||||||
|
- docker run --rm --privileged hassioaddons/qemu-user-static:latest
|
||||||
|
- BUILD_FROM=homeassistant/${ADDON_ARCH}-base-ubuntu:latest
|
||||||
|
- ADDON_VERSION="${CI_COMMIT_TAG#v}"
|
||||||
|
- ADDON_VERSION="${ADDON_VERSION:-${CI_COMMIT_SHA:0:7}}"
|
||||||
|
- ESPHOMELIB_VERSION="${ESPHOMELIB_VERSION:-''}"
|
||||||
|
- echo "Build from ${BUILD_FROM}"
|
||||||
|
- echo "Add-on version ${ADDON_VERSION}"
|
||||||
|
- echo "Esphomelib version ${ESPHOMELIB_VERSION}"
|
||||||
|
- echo "Tag ${CI_REGISTRY}/esphomeyaml-hassio-${ADDON_ARCH}:dev"
|
||||||
|
- echo "Tag ${CI_REGISTRY}/esphomeyaml-hassio-${ADDON_ARCH}:${CI_COMMIT_SHA}"
|
||||||
- |
|
- |
|
||||||
hassio-builder.sh \
|
docker build \
|
||||||
-t . \
|
--build-arg "BUILD_FROM=${BUILD_FROM}" \
|
||||||
-i ottowinter/esphomeyaml-hassio-${ADDON_ARCH} \
|
--build-arg "ADDON_ARCH=${ADDON_ARCH}" \
|
||||||
-d "$CI_REGISTRY" \
|
--build-arg "ADDON_VERSION=${ADDON_VERSION}" \
|
||||||
--${ADDON_ARCH}
|
--build-arg "ESPHOMELIB_VERSION=${ESPHOMELIB_VERSION}" \
|
||||||
|
--tag "${CI_REGISTRY}/esphomeyaml-hassio-${ADDON_ARCH}:dev" \
|
||||||
|
--tag "${CI_REGISTRY}/esphomeyaml-hassio-${ADDON_ARCH}:${CI_COMMIT_SHA}" \
|
||||||
|
--file "docker/Dockerfile.hassio" \
|
||||||
|
.
|
||||||
- |
|
- |
|
||||||
docker tag \
|
if [ "${DO_PUSH:-true}" = true ]; then
|
||||||
"${CI_REGISTRY}/ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:dev" \
|
echo "Pushing to CI registry"
|
||||||
"${CI_REGISTRY}/ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:${CI_COMMIT_SHA}"
|
docker push ${CI_REGISTRY}/esphomeyaml-hassio-${ADDON_ARCH}:${CI_COMMIT_SHA}
|
||||||
- docker push "${CI_REGISTRY}/ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:${CI_COMMIT_SHA}"
|
docker push ${CI_REGISTRY}/esphomeyaml-hassio-${ADDON_ARCH}:dev
|
||||||
- docker push "${CI_REGISTRY}/ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:dev"
|
fi
|
||||||
retry: 2
|
|
||||||
|
|
||||||
# Generic deploy template
|
# Generic deploy template
|
||||||
.deploy-release: &deploy-release
|
.deploy-release: &deploy-release
|
||||||
<<: *docker-builder
|
<<: *docker-builder
|
||||||
stage: deploy
|
stage: deploy
|
||||||
script:
|
script:
|
||||||
- version=${CI_COMMIT_TAG:1}
|
- version="${CI_COMMIT_TAG#v}"
|
||||||
- echo "Publishing release version ${version}"
|
- echo "Publishing release version ${version}"
|
||||||
- docker login -u "$DOCKER_USER" -p "$DOCKER_PASSWORD"
|
|
||||||
- docker pull "${CI_REGISTRY}/ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:${CI_COMMIT_SHA}"
|
- docker pull "${CI_REGISTRY}/ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:${CI_COMMIT_SHA}"
|
||||||
|
- docker login -u "$DOCKER_USER" -p "$DOCKER_PASSWORD"
|
||||||
|
|
||||||
|
- echo "Tag ${CI_REGISTRY}/ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:${version}"
|
||||||
|
- |
|
||||||
|
docker tag \
|
||||||
|
"${CI_REGISTRY}/ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:${CI_COMMIT_SHA}" \
|
||||||
|
"${CI_REGISTRY}/ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:${version}"
|
||||||
|
- docker push "${CI_REGISTRY}/ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:${version}"
|
||||||
|
|
||||||
|
- echo "Tag ${CI_REGISTRY}/ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:latest"
|
||||||
- |
|
- |
|
||||||
docker tag \
|
docker tag \
|
||||||
"${CI_REGISTRY}/ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:${CI_COMMIT_SHA}" \
|
"${CI_REGISTRY}/ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:${CI_COMMIT_SHA}" \
|
||||||
"${CI_REGISTRY}/ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:latest"
|
"${CI_REGISTRY}/ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:latest"
|
||||||
|
- docker push "${CI_REGISTRY}/ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:latest"
|
||||||
|
|
||||||
|
- echo "Tag ${CI_REGISTRY}/ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:rc"
|
||||||
|
- |
|
||||||
|
docker tag \
|
||||||
|
"${CI_REGISTRY}/ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:${CI_COMMIT_SHA}" \
|
||||||
|
"${CI_REGISTRY}/ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:rc"
|
||||||
|
- docker push "${CI_REGISTRY}/ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:rc"
|
||||||
|
|
||||||
|
- echo "Tag ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:${version}"
|
||||||
- |
|
- |
|
||||||
docker tag \
|
docker tag \
|
||||||
"${CI_REGISTRY}/ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:${CI_COMMIT_SHA}" \
|
"${CI_REGISTRY}/ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:${CI_COMMIT_SHA}" \
|
||||||
"ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:${version}"
|
"ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:${version}"
|
||||||
|
- docker push "ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:${version}"
|
||||||
|
|
||||||
|
- echo "Tag ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:latest"
|
||||||
- |
|
- |
|
||||||
docker tag \
|
docker tag \
|
||||||
"ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:${version}" \
|
"ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:${version}" \
|
||||||
"ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:latest"
|
"ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:latest"
|
||||||
- docker push "${CI_REGISTRY}/ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:latest"
|
|
||||||
- docker push "ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:${version}"
|
|
||||||
- docker push "ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:latest"
|
- docker push "ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:latest"
|
||||||
|
|
||||||
|
- echo "Tag ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:rc"
|
||||||
|
- |
|
||||||
|
docker tag \
|
||||||
|
"ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:${version}" \
|
||||||
|
"ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:rc"
|
||||||
|
- docker push "ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:rc"
|
||||||
only:
|
only:
|
||||||
- /^v\d+\.\d+\.\d+$/
|
- /^v\d+\.\d+\.\d+$/
|
||||||
except:
|
except:
|
||||||
|
@ -108,24 +148,37 @@ test2:
|
||||||
<<: *docker-builder
|
<<: *docker-builder
|
||||||
stage: deploy
|
stage: deploy
|
||||||
script:
|
script:
|
||||||
- version=${CI_COMMIT_TAG:1}
|
- version="${CI_COMMIT_TAG#v}"
|
||||||
- echo "Publishing beta version ${version}"
|
- echo "Publishing beta version ${version}"
|
||||||
- docker login -u "$DOCKER_USER" -p "$DOCKER_PASSWORD"
|
|
||||||
- docker pull "${CI_REGISTRY}/ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:${CI_COMMIT_SHA}"
|
- docker pull "${CI_REGISTRY}/ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:${CI_COMMIT_SHA}"
|
||||||
|
- docker login -u "$DOCKER_USER" -p "$DOCKER_PASSWORD"
|
||||||
|
|
||||||
|
- echo "Tag ${CI_REGISTRY}/ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:${version}"
|
||||||
|
- |
|
||||||
|
docker tag \
|
||||||
|
"${CI_REGISTRY}/ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:${CI_COMMIT_SHA}" \
|
||||||
|
"${CI_REGISTRY}/ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:${version}"
|
||||||
|
- docker push "${CI_REGISTRY}/ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:${version}"
|
||||||
|
|
||||||
|
- echo "Tag ${CI_REGISTRY}/ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:rc"
|
||||||
- |
|
- |
|
||||||
docker tag \
|
docker tag \
|
||||||
"${CI_REGISTRY}/ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:${CI_COMMIT_SHA}" \
|
"${CI_REGISTRY}/ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:${CI_COMMIT_SHA}" \
|
||||||
"${CI_REGISTRY}/ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:rc"
|
"${CI_REGISTRY}/ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:rc"
|
||||||
|
- docker push "${CI_REGISTRY}/ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:rc"
|
||||||
|
|
||||||
|
- echo "Tag ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:${version}"
|
||||||
- |
|
- |
|
||||||
docker tag \
|
docker tag \
|
||||||
"${CI_REGISTRY}/ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:${CI_COMMIT_SHA}" \
|
"${CI_REGISTRY}/ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:${CI_COMMIT_SHA}" \
|
||||||
"ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:${version}"
|
"ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:${version}"
|
||||||
|
- docker push "ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:${version}"
|
||||||
|
|
||||||
|
- echo "Tag ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:rc"
|
||||||
- |
|
- |
|
||||||
docker tag \
|
docker tag \
|
||||||
"ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:${version}" \
|
"ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:${version}" \
|
||||||
"ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:rc"
|
"ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:rc"
|
||||||
- docker push "${CI_REGISTRY}/ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:beta"
|
|
||||||
- docker push "ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:${version}"
|
|
||||||
- docker push "ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:rc"
|
- docker push "ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:rc"
|
||||||
only:
|
only:
|
||||||
- /^v\d+\.\d+\.\d+b\d+$/
|
- /^v\d+\.\d+\.\d+b\d+$/
|
||||||
|
@ -138,30 +191,64 @@ build:normal:
|
||||||
stage: build
|
stage: build
|
||||||
script:
|
script:
|
||||||
- docker build -t "${CI_REGISTRY}/ottowinter/esphomeyaml:dev" .
|
- docker build -t "${CI_REGISTRY}/ottowinter/esphomeyaml:dev" .
|
||||||
- |
|
|
||||||
docker tag \
|
|
||||||
"${CI_REGISTRY}/ottowinter/esphomeyaml:dev" \
|
|
||||||
"${CI_REGISTRY}/ottowinter/esphomeyaml:${CI_COMMIT_SHA}"
|
|
||||||
- docker push "${CI_REGISTRY}/ottowinter/esphomeyaml:${CI_COMMIT_SHA}"
|
|
||||||
- docker push "${CI_REGISTRY}/ottowinter/esphomeyaml:dev"
|
|
||||||
|
|
||||||
build:armhf:
|
.build-hassio-edge: &build-hassio-edge
|
||||||
<<: *build-hassio
|
<<: *build-hassio
|
||||||
|
except:
|
||||||
|
- /^v\d+\.\d+\.\d+$/
|
||||||
|
- /^v\d+\.\d+\.\d+b\d+$/
|
||||||
|
|
||||||
|
.build-hassio-release: &build-hassio-release
|
||||||
|
<<: *build-hassio
|
||||||
|
only:
|
||||||
|
- /^v\d+\.\d+\.\d+$/
|
||||||
|
- /^v\d+\.\d+\.\d+b\d+$/
|
||||||
|
|
||||||
|
build:hassio-armhf-edge:
|
||||||
|
<<: *build-hassio-edge
|
||||||
|
variables:
|
||||||
|
ADDON_ARCH: armhf
|
||||||
|
DO_PUSH: "false"
|
||||||
|
ESPHOMELIB_VERSION: "${CI_COMMIT_TAG}"
|
||||||
|
|
||||||
|
build:hassio-armhf:
|
||||||
|
<<: *build-hassio-release
|
||||||
variables:
|
variables:
|
||||||
ADDON_ARCH: armhf
|
ADDON_ARCH: armhf
|
||||||
|
|
||||||
#build:aarch64:
|
build:hassio-aarch64-edge:
|
||||||
# <<: *build
|
<<: *build-hassio-edge
|
||||||
# variables:
|
variables:
|
||||||
# ADDON_ARCH: aarch64
|
ADDON_ARCH: aarch64
|
||||||
|
DO_PUSH: "false"
|
||||||
|
ESPHOMELIB_VERSION: "${CI_COMMIT_TAG}"
|
||||||
|
|
||||||
build:i386:
|
build:hassio-aarch64:
|
||||||
<<: *build-hassio
|
<<: *build-hassio-release
|
||||||
|
variables:
|
||||||
|
ADDON_ARCH: aarch64
|
||||||
|
|
||||||
|
build:hassio-i386-edge:
|
||||||
|
<<: *build-hassio-edge
|
||||||
|
variables:
|
||||||
|
ADDON_ARCH: i386
|
||||||
|
DO_PUSH: "false"
|
||||||
|
ESPHOMELIB_VERSION: "${CI_COMMIT_TAG}"
|
||||||
|
|
||||||
|
build:hassio-i386:
|
||||||
|
<<: *build-hassio-release
|
||||||
variables:
|
variables:
|
||||||
ADDON_ARCH: i386
|
ADDON_ARCH: i386
|
||||||
|
|
||||||
build:amd64:
|
build:hassio-amd64-edge:
|
||||||
<<: *build-hassio
|
<<: *build-hassio-edge
|
||||||
|
variables:
|
||||||
|
ADDON_ARCH: amd64
|
||||||
|
DO_PUSH: "false"
|
||||||
|
ESPHOMELIB_VERSION: "${CI_COMMIT_TAG}"
|
||||||
|
|
||||||
|
build:hassio-amd64:
|
||||||
|
<<: *build-hassio-release
|
||||||
variables:
|
variables:
|
||||||
ADDON_ARCH: amd64
|
ADDON_ARCH: amd64
|
||||||
|
|
||||||
|
@ -176,15 +263,15 @@ deploy-beta:armhf:
|
||||||
variables:
|
variables:
|
||||||
ADDON_ARCH: armhf
|
ADDON_ARCH: armhf
|
||||||
|
|
||||||
#deploy-release:aarch64:
|
deploy-release:aarch64:
|
||||||
# <<: *deploy-release
|
<<: *deploy-release
|
||||||
# variables:
|
variables:
|
||||||
# ADDON_ARCH: aarch64
|
ADDON_ARCH: aarch64
|
||||||
|
|
||||||
#deploy-beta:aarch64:
|
deploy-beta:aarch64:
|
||||||
# <<: *deploy-beta
|
<<: *deploy-beta
|
||||||
# variables:
|
variables:
|
||||||
# ADDON_ARCH: aarch64
|
ADDON_ARCH: aarch64
|
||||||
|
|
||||||
deploy-release:i386:
|
deploy-release:i386:
|
||||||
<<: *deploy-release
|
<<: *deploy-release
|
||||||
|
|
27
Dockerfile
27
Dockerfile
|
@ -1,25 +1,32 @@
|
||||||
FROM python:2.7
|
ARG BUILD_FROM=python:2.7
|
||||||
|
FROM ${BUILD_FROM}
|
||||||
MAINTAINER Otto Winter <contact@otto-winter.com>
|
MAINTAINER Otto Winter <contact@otto-winter.com>
|
||||||
|
|
||||||
RUN apt-get update && apt-get install -y \
|
RUN apt-get update && apt-get install -y \
|
||||||
python-pil \
|
python-pil \
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
git \
|
||||||
|
&& apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/* && \
|
||||||
|
pip install --no-cache-dir --no-binary :all: platformio && \
|
||||||
|
platformio settings set enable_telemetry No && \
|
||||||
|
platformio settings set check_libraries_interval 1000000 && \
|
||||||
|
platformio settings set check_platformio_interval 1000000 && \
|
||||||
|
platformio settings set check_platforms_interval 1000000
|
||||||
|
|
||||||
ENV ESPHOMEYAML_OTA_HOST_PORT=6123
|
ENV ESPHOMEYAML_OTA_HOST_PORT=6123
|
||||||
EXPOSE 6123
|
EXPOSE 6123
|
||||||
VOLUME /config
|
VOLUME /config
|
||||||
WORKDIR /usr/src/app
|
WORKDIR /usr/src/app
|
||||||
|
|
||||||
RUN pip install --no-cache-dir --no-binary :all: platformio && \
|
COPY docker/platformio.ini /pio/platformio.ini
|
||||||
platformio settings set enable_telemetry No
|
ARG ESPHOMELIB_VERSION=""
|
||||||
|
RUN platformio run -d /pio; rm -rf /pio && \
|
||||||
COPY docker/platformio.ini /usr/src/app/
|
/bin/bash -c "if [ ! -z '$ESPHOMELIB_VERSION']; then \
|
||||||
RUN platformio settings set enable_telemetry No && \
|
platformio lib -g install '${ESPHOMELIB_VERSION}'; \
|
||||||
platformio run -e espressif32 -e espressif8266; exit 0
|
fi"
|
||||||
|
|
||||||
COPY . .
|
COPY . .
|
||||||
RUN pip install --no-cache-dir -e . && \
|
RUN pip install --no-cache-dir --no-binary :all: -e . && \
|
||||||
pip install --no-cache-dir tzlocal pillow
|
pip install --no-cache-dir --no-binary :all: tzlocal
|
||||||
|
|
||||||
WORKDIR /config
|
WORKDIR /config
|
||||||
ENTRYPOINT ["esphomeyaml"]
|
ENTRYPOINT ["esphomeyaml"]
|
||||||
|
|
|
@ -1,21 +0,0 @@
|
||||||
# Dockerfile for aarch64 version of HassIO add-on
|
|
||||||
FROM arm64v8/ubuntu:bionic
|
|
||||||
|
|
||||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
|
||||||
python \
|
|
||||||
python-pip \
|
|
||||||
python-setuptools \
|
|
||||||
python-pil \
|
|
||||||
git \
|
|
||||||
&& apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/*rm -rf /var/lib/apt/lists/* /tmp/* && \
|
|
||||||
pip install --no-cache-dir --no-binary :all: platformio && \
|
|
||||||
platformio settings set enable_telemetry No
|
|
||||||
|
|
||||||
COPY docker/platformio.ini /pio/platformio.ini
|
|
||||||
RUN platformio run -d /pio; rm -rf /pio
|
|
||||||
|
|
||||||
COPY . .
|
|
||||||
RUN pip install --no-cache-dir --no-binary :all: -e . && \
|
|
||||||
pip install --no-cache-dir --no-binary :all: tzlocal
|
|
||||||
|
|
||||||
CMD ["esphomeyaml", "/config/esphomeyaml", "dashboard"]
|
|
|
@ -1,21 +0,0 @@
|
||||||
# Dockerfile for amd64 version of HassIO add-on
|
|
||||||
FROM ubuntu:bionic
|
|
||||||
|
|
||||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
|
||||||
python \
|
|
||||||
python-pip \
|
|
||||||
python-setuptools \
|
|
||||||
python-pil \
|
|
||||||
git \
|
|
||||||
&& apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/*rm -rf /var/lib/apt/lists/* /tmp/* && \
|
|
||||||
pip install --no-cache-dir --no-binary :all: platformio && \
|
|
||||||
platformio settings set enable_telemetry No
|
|
||||||
|
|
||||||
COPY docker/platformio.ini /pio/platformio.ini
|
|
||||||
RUN platformio run -d /pio; rm -rf /pio
|
|
||||||
|
|
||||||
COPY . .
|
|
||||||
RUN pip install --no-cache-dir --no-binary :all: -e . && \
|
|
||||||
pip install --no-cache-dir --no-binary :all: tzlocal
|
|
||||||
|
|
||||||
CMD ["esphomeyaml", "/config/esphomeyaml", "dashboard"]
|
|
|
@ -1,31 +0,0 @@
|
||||||
# Dockerfile for armhf version of HassIO add-on
|
|
||||||
FROM homeassistant/armhf-base:latest
|
|
||||||
|
|
||||||
RUN apk add --no-cache \
|
|
||||||
python2 \
|
|
||||||
python2-dev \
|
|
||||||
py2-pip \
|
|
||||||
git \
|
|
||||||
gcc \
|
|
||||||
openssh \
|
|
||||||
libc6-compat \
|
|
||||||
jpeg-dev \
|
|
||||||
zlib-dev \
|
|
||||||
freetype-dev \
|
|
||||||
lcms2-dev \
|
|
||||||
openjpeg-dev \
|
|
||||||
tiff-dev \
|
|
||||||
libc-dev \
|
|
||||||
linux-headers \
|
|
||||||
&& \
|
|
||||||
pip install --no-cache-dir --no-binary :all: platformio && \
|
|
||||||
platformio settings set enable_telemetry No
|
|
||||||
|
|
||||||
COPY docker/platformio-esp8266.ini /pio/platformio.ini
|
|
||||||
RUN platformio run -d /pio; rm -rf /pio
|
|
||||||
|
|
||||||
COPY . .
|
|
||||||
RUN pip install --no-cache-dir --no-binary :all: -e . && \
|
|
||||||
pip install --no-cache-dir pillow tzlocal
|
|
||||||
|
|
||||||
CMD ["esphomeyaml", "/config/esphomeyaml", "dashboard"]
|
|
|
@ -27,6 +27,4 @@ RUN apt-get update && apt-get install -y \
|
||||||
binfmt-support \
|
binfmt-support \
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
COPY docker/hassio-builder.sh /usr/bin/
|
|
||||||
|
|
||||||
WORKDIR /data
|
WORKDIR /data
|
||||||
|
|
44
docker/Dockerfile.hassio
Normal file
44
docker/Dockerfile.hassio
Normal file
|
@ -0,0 +1,44 @@
|
||||||
|
# Dockerfile for HassIO add-on
|
||||||
|
ARG BUILD_FROM=homeassistant/amd64-base-ubuntu:latest
|
||||||
|
FROM ${BUILD_FROM}
|
||||||
|
|
||||||
|
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||||
|
python \
|
||||||
|
python-pip \
|
||||||
|
python-setuptools \
|
||||||
|
python-pil \
|
||||||
|
git \
|
||||||
|
&& apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/* && \
|
||||||
|
pip install --no-cache-dir --no-binary :all: platformio && \
|
||||||
|
platformio settings set enable_telemetry No && \
|
||||||
|
platformio settings set check_libraries_interval 1000000 && \
|
||||||
|
platformio settings set check_platformio_interval 1000000 && \
|
||||||
|
platformio settings set check_platforms_interval 1000000
|
||||||
|
|
||||||
|
COPY docker/platformio.ini /pio/platformio.ini
|
||||||
|
ARG ESPHOMELIB_VERSION=""
|
||||||
|
RUN platformio run -d /pio; rm -rf /pio && \
|
||||||
|
/bin/bash -c "if [ ! -z '$ESPHOMELIB_VERSION']; then \
|
||||||
|
platformio lib -g install '${ESPHOMELIB_VERSION}'; \
|
||||||
|
fi"
|
||||||
|
|
||||||
|
|
||||||
|
COPY . .
|
||||||
|
RUN pip install --no-cache-dir --no-binary :all: -e . && \
|
||||||
|
pip install --no-cache-dir --no-binary :all: tzlocal
|
||||||
|
|
||||||
|
CMD ["esphomeyaml", "/config/esphomeyaml", "dashboard"]
|
||||||
|
|
||||||
|
# Build arugments
|
||||||
|
ARG ADDON_ARCH
|
||||||
|
ARG ADDON_VERSION
|
||||||
|
|
||||||
|
# Labels
|
||||||
|
LABEL \
|
||||||
|
io.hass.name="esphomeyaml" \
|
||||||
|
io.hass.description="esphomeyaml HassIO add-on for intelligently managing all your ESP8266/ESP32 devices." \
|
||||||
|
io.hass.arch="${ADDON_ARCH}" \
|
||||||
|
io.hass.type="addon" \
|
||||||
|
io.hass.version="${ADDON_VERSION}" \
|
||||||
|
io.hass.url="https://esphomelib.com/esphomeyaml/index.html" \
|
||||||
|
maintainer="Otto Winter <contact@otto-winter.com>"
|
|
@ -1,21 +0,0 @@
|
||||||
# Dockerfile for i386 version of HassIO add-on
|
|
||||||
FROM i386/ubuntu:bionic
|
|
||||||
|
|
||||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
|
||||||
python \
|
|
||||||
python-pip \
|
|
||||||
python-setuptools \
|
|
||||||
python-pil \
|
|
||||||
git \
|
|
||||||
&& apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/*rm -rf /var/lib/apt/lists/* /tmp/* && \
|
|
||||||
pip install --no-cache-dir --no-binary :all: platformio && \
|
|
||||||
platformio settings set enable_telemetry No
|
|
||||||
|
|
||||||
COPY docker/platformio.ini /pio/platformio.ini
|
|
||||||
RUN platformio run -d /pio; rm -rf /pio
|
|
||||||
|
|
||||||
COPY . .
|
|
||||||
RUN pip install --no-cache-dir --no-binary :all: -e . && \
|
|
||||||
pip install --no-cache-dir --no-binary :all: tzlocal
|
|
||||||
|
|
||||||
CMD ["esphomeyaml", "/config/esphomeyaml", "dashboard"]
|
|
|
@ -1,318 +0,0 @@
|
||||||
#!/usr/bin/env bash
|
|
||||||
# Based on Home Assistant's docker builder
|
|
||||||
######################
|
|
||||||
# Hass.io Build-env
|
|
||||||
######################
|
|
||||||
set -e
|
|
||||||
|
|
||||||
echo -- "$@"
|
|
||||||
|
|
||||||
#### Variable ####
|
|
||||||
|
|
||||||
DOCKER_TIMEOUT=20
|
|
||||||
DOCKER_PID=-1
|
|
||||||
DOCKER_HUB=""
|
|
||||||
DOCKER_CACHE="true"
|
|
||||||
DOCKER_LOCAL="false"
|
|
||||||
TARGET=""
|
|
||||||
IMAGE=""
|
|
||||||
BUILD_LIST=()
|
|
||||||
BUILD_TASKS=()
|
|
||||||
|
|
||||||
#### Misc functions ####
|
|
||||||
|
|
||||||
function print_help() {
|
|
||||||
cat << EOF
|
|
||||||
Hass.io build-env for ecosystem:
|
|
||||||
docker run --rm homeassistant/{arch}-builder:latest [options]
|
|
||||||
|
|
||||||
Options:
|
|
||||||
-h, --help
|
|
||||||
Display this help and exit.
|
|
||||||
|
|
||||||
Repository / Data
|
|
||||||
-t, --target <PATH_TO_BUILD>
|
|
||||||
Set local folder or path inside repository for build.
|
|
||||||
|
|
||||||
Version/Image handling
|
|
||||||
-i, --image <IMAGE_NAME>
|
|
||||||
Overwrite image name of build / support {arch}
|
|
||||||
|
|
||||||
Architecture
|
|
||||||
--armhf
|
|
||||||
Build for arm.
|
|
||||||
--amd64
|
|
||||||
Build for intel/amd 64bit.
|
|
||||||
--aarch64
|
|
||||||
Build for arm 64bit.
|
|
||||||
--i386
|
|
||||||
Build for intel/amd 32bit.
|
|
||||||
--all
|
|
||||||
Build all architecture.
|
|
||||||
|
|
||||||
Build handling
|
|
||||||
--no-cache
|
|
||||||
Disable cache for the build (from latest).
|
|
||||||
-d, --docker-hub <DOCKER_REPOSITORY>
|
|
||||||
Set or overwrite the docker repository.
|
|
||||||
|
|
||||||
Use the host docker socket if mapped into container:
|
|
||||||
/var/run/docker.sock
|
|
||||||
|
|
||||||
EOF
|
|
||||||
|
|
||||||
exit 1
|
|
||||||
}
|
|
||||||
|
|
||||||
#### Docker functions ####
|
|
||||||
|
|
||||||
function start_docker() {
|
|
||||||
local starttime
|
|
||||||
local endtime
|
|
||||||
|
|
||||||
if [ -S "/var/run/docker.sock" ]; then
|
|
||||||
echo "[INFO] Use host docker setup with '/var/run/docker.sock'"
|
|
||||||
DOCKER_LOCAL="true"
|
|
||||||
return 0
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo "[INFO] Starting docker."
|
|
||||||
dockerd 2> /dev/null &
|
|
||||||
DOCKER_PID=$!
|
|
||||||
|
|
||||||
echo "[INFO] Waiting for docker to initialize..."
|
|
||||||
starttime="$(date +%s)"
|
|
||||||
endtime="$(date +%s)"
|
|
||||||
until docker info >/dev/null 2>&1; do
|
|
||||||
if [ $((endtime - starttime)) -le ${DOCKER_TIMEOUT} ]; then
|
|
||||||
sleep 1
|
|
||||||
endtime=$(date +%s)
|
|
||||||
else
|
|
||||||
echo "[ERROR] Timeout while waiting for docker to come up"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
done
|
|
||||||
echo "[INFO] Docker was initialized"
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
function stop_docker() {
|
|
||||||
local starttime
|
|
||||||
local endtime
|
|
||||||
|
|
||||||
if [ "$DOCKER_LOCAL" == "true" ]; then
|
|
||||||
return 0
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo "[INFO] Stopping in container docker..."
|
|
||||||
if [ "$DOCKER_PID" -gt 0 ] && kill -0 "$DOCKER_PID" 2> /dev/null; then
|
|
||||||
starttime="$(date +%s)"
|
|
||||||
endtime="$(date +%s)"
|
|
||||||
|
|
||||||
# Now wait for it to die
|
|
||||||
kill "$DOCKER_PID"
|
|
||||||
while kill -0 "$DOCKER_PID" 2> /dev/null; do
|
|
||||||
if [ $((endtime - starttime)) -le ${DOCKER_TIMEOUT} ]; then
|
|
||||||
sleep 1
|
|
||||||
endtime=$(date +%s)
|
|
||||||
else
|
|
||||||
echo "[ERROR] Timeout while waiting for container docker to die"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
done
|
|
||||||
else
|
|
||||||
echo "[WARN] Your host might have been left with unreleased resources"
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
function run_build() {
|
|
||||||
local build_dir=$1
|
|
||||||
local repository=$2
|
|
||||||
local image=$3
|
|
||||||
local version=$4
|
|
||||||
local build_arch=$5
|
|
||||||
local docker_cli=("${!6}")
|
|
||||||
|
|
||||||
local push_images=()
|
|
||||||
|
|
||||||
# Overwrites
|
|
||||||
if [ ! -z "$DOCKER_HUB" ]; then repository="$DOCKER_HUB"; fi
|
|
||||||
if [ ! -z "$IMAGE" ]; then image="$IMAGE"; fi
|
|
||||||
|
|
||||||
# Init Cache
|
|
||||||
if [ "$DOCKER_CACHE" == "true" ]; then
|
|
||||||
echo "[INFO] Init cache for $repository/$image:$version"
|
|
||||||
if docker pull "$repository/$image:latest" > /dev/null 2>&1; then
|
|
||||||
docker_cli+=("--cache-from" "$repository/$image:latest")
|
|
||||||
else
|
|
||||||
docker_cli+=("--no-cache")
|
|
||||||
echo "[WARN] No cache image found. Cache is disabled for build"
|
|
||||||
fi
|
|
||||||
else
|
|
||||||
docker_cli+=("--no-cache")
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Build image
|
|
||||||
echo "[INFO] Run build for $repository/$image:$version"
|
|
||||||
docker build --pull -t "$repository/$image:$version" \
|
|
||||||
--label "io.hass.version=$version" \
|
|
||||||
--label "io.hass.arch=$build_arch" \
|
|
||||||
-f "$TARGET/docker/Dockerfile.$build_arch" \
|
|
||||||
"${docker_cli[@]}" \
|
|
||||||
"$build_dir"
|
|
||||||
|
|
||||||
echo "[INFO] Finish build for $repository/$image:$version"
|
|
||||||
docker tag "$repository/$image:$version" "$repository/$image:dev"
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
#### HassIO functions ####
|
|
||||||
|
|
||||||
function build_addon() {
|
|
||||||
local build_arch=$1
|
|
||||||
|
|
||||||
local docker_cli=()
|
|
||||||
local image=""
|
|
||||||
local repository=""
|
|
||||||
local raw_image=""
|
|
||||||
local name=""
|
|
||||||
local description=""
|
|
||||||
local url=""
|
|
||||||
local args=""
|
|
||||||
|
|
||||||
# Read addon config.json
|
|
||||||
name="$(jq --raw-output '.name // empty' "$TARGET/esphomeyaml/config.json" | sed "s/'//g")"
|
|
||||||
description="$(jq --raw-output '.description // empty' "$TARGET/esphomeyaml/config.json" | sed "s/'//g")"
|
|
||||||
url="$(jq --raw-output '.url // empty' "$TARGET/esphomeyaml/config.json")"
|
|
||||||
version="$(jq --raw-output '.version' "$TARGET/esphomeyaml/config.json")"
|
|
||||||
raw_image="$(jq --raw-output '.image // empty' "$TARGET/esphomeyaml/config.json")"
|
|
||||||
|
|
||||||
# Read data from image
|
|
||||||
if [ ! -z "$raw_image" ]; then
|
|
||||||
repository="$(echo "$raw_image" | cut -f 1 -d '/')"
|
|
||||||
image="$(echo "$raw_image" | cut -f 2 -d '/')"
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Set additional labels
|
|
||||||
docker_cli+=("--label" "io.hass.name=$name")
|
|
||||||
docker_cli+=("--label" "io.hass.description=$description")
|
|
||||||
docker_cli+=("--label" "io.hass.type=addon")
|
|
||||||
|
|
||||||
if [ ! -z "$url" ]; then
|
|
||||||
docker_cli+=("--label" "io.hass.url=$url")
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Start build
|
|
||||||
run_build "$TARGET" "$repository" "$image" "$version" \
|
|
||||||
"$build_arch" docker_cli[@]
|
|
||||||
}
|
|
||||||
|
|
||||||
#### initialized cross-build ####
|
|
||||||
|
|
||||||
function init_crosscompile() {
|
|
||||||
echo "[INFO] Setup crosscompiling feature"
|
|
||||||
(
|
|
||||||
mount binfmt_misc -t binfmt_misc /proc/sys/fs/binfmt_misc
|
|
||||||
update-binfmts --enable qemu-arm
|
|
||||||
update-binfmts --enable qemu-aarch64
|
|
||||||
) > /dev/null 2>&1 || echo "[WARN] Can't enable crosscompiling feature"
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
function clean_crosscompile() {
|
|
||||||
echo "[INFO] Clean crosscompiling feature"
|
|
||||||
if [ -f /proc/sys/fs/binfmt_misc ]; then
|
|
||||||
umount /proc/sys/fs/binfmt_misc || true
|
|
||||||
fi
|
|
||||||
|
|
||||||
(
|
|
||||||
update-binfmts --disable qemu-arm
|
|
||||||
update-binfmts --disable qemu-aarch64
|
|
||||||
) > /dev/null 2>&1 || echo "[WARN] No crosscompiling feature found for cleanup"
|
|
||||||
}
|
|
||||||
|
|
||||||
#### Error handling ####
|
|
||||||
|
|
||||||
function error_handling() {
|
|
||||||
stop_docker
|
|
||||||
clean_crosscompile
|
|
||||||
|
|
||||||
exit 1
|
|
||||||
}
|
|
||||||
trap 'error_handling' SIGINT SIGTERM
|
|
||||||
|
|
||||||
#### Parse arguments ####
|
|
||||||
|
|
||||||
while [[ $# -gt 0 ]]; do
|
|
||||||
key=$1
|
|
||||||
case ${key} in
|
|
||||||
-h|--help)
|
|
||||||
print_help
|
|
||||||
;;
|
|
||||||
-t|--target)
|
|
||||||
TARGET=$2
|
|
||||||
shift
|
|
||||||
;;
|
|
||||||
-i|--image)
|
|
||||||
IMAGE=$2
|
|
||||||
shift
|
|
||||||
;;
|
|
||||||
--no-cache)
|
|
||||||
DOCKER_CACHE="false"
|
|
||||||
;;
|
|
||||||
-d|--docker-hub)
|
|
||||||
DOCKER_HUB=$2
|
|
||||||
shift
|
|
||||||
;;
|
|
||||||
--armhf)
|
|
||||||
BUILD_LIST+=("armhf")
|
|
||||||
;;
|
|
||||||
--amd64)
|
|
||||||
BUILD_LIST+=("amd64")
|
|
||||||
;;
|
|
||||||
--i386)
|
|
||||||
BUILD_LIST+=("i386")
|
|
||||||
;;
|
|
||||||
--aarch64)
|
|
||||||
BUILD_LIST+=("aarch64")
|
|
||||||
;;
|
|
||||||
--all)
|
|
||||||
BUILD_LIST=("armhf" "amd64" "i386" "aarch64")
|
|
||||||
;;
|
|
||||||
|
|
||||||
*)
|
|
||||||
echo "[WARN] $0 : Argument '$1' unknown will be Ignoring"
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
shift
|
|
||||||
done
|
|
||||||
|
|
||||||
# Check if an architecture is available
|
|
||||||
if [ "${#BUILD_LIST[@]}" -eq 0 ]; then
|
|
||||||
echo "[ERROR] You need select an architecture for build!"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
|
|
||||||
#### Main ####
|
|
||||||
|
|
||||||
mkdir -p /data
|
|
||||||
|
|
||||||
# Setup docker env
|
|
||||||
init_crosscompile
|
|
||||||
start_docker
|
|
||||||
|
|
||||||
# Select arch build
|
|
||||||
for arch in "${BUILD_LIST[@]}"; do
|
|
||||||
(build_addon "$arch") &
|
|
||||||
BUILD_TASKS+=($!)
|
|
||||||
done
|
|
||||||
|
|
||||||
# Wait until all build jobs are done
|
|
||||||
wait "${BUILD_TASKS[@]}"
|
|
||||||
|
|
||||||
# Cleanup docker env
|
|
||||||
clean_crosscompile
|
|
||||||
stop_docker
|
|
||||||
|
|
||||||
exit 0
|
|
|
@ -1,7 +0,0 @@
|
||||||
; This file allows the docker build file to install the required platformio
|
|
||||||
; platforms
|
|
||||||
|
|
||||||
[env:espressif8266]
|
|
||||||
platform = espressif8266
|
|
||||||
board = nodemcuv2
|
|
||||||
framework = arduino
|
|
|
@ -1,9 +1,9 @@
|
||||||
{
|
{
|
||||||
"name": "esphomeyaml-beta",
|
"name": "esphomeyaml-beta",
|
||||||
"version": "1.9.0b1",
|
"version": "1.9.0b2",
|
||||||
"slug": "esphomeyaml-beta",
|
"slug": "esphomeyaml-beta",
|
||||||
"description": "Beta version of esphomeyaml HassIO add-on.",
|
"description": "Beta version of esphomeyaml HassIO add-on.",
|
||||||
"url": "https://esphomelib.com/esphomeyaml/index.html",
|
"url": "https://beta.esphomelib.com/esphomeyaml/index.html",
|
||||||
"startup": "application",
|
"startup": "application",
|
||||||
"webui": "http://[HOST]:[PORT:6052]",
|
"webui": "http://[HOST]:[PORT:6052]",
|
||||||
"boot": "auto",
|
"boot": "auto",
|
||||||
|
|
|
@ -1,59 +1,24 @@
|
||||||
# Dockerfile for HassIO add-on
|
# Dockerfile for HassIO edge add-on
|
||||||
ARG BUILD_FROM=ubuntu:bionic
|
ARG BUILD_FROM=homeassistant/amd64-base-ubuntu:latest
|
||||||
FROM ${BUILD_FROM}
|
FROM ${BUILD_FROM}
|
||||||
|
|
||||||
# Re-declare BUILD_FROM to fix weird docker issue
|
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||||
ARG BUILD_FROM
|
python \
|
||||||
|
python-pip \
|
||||||
|
python-setuptools \
|
||||||
|
python-pil \
|
||||||
|
git \
|
||||||
|
&& apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/* && \
|
||||||
|
pip install --no-cache-dir --no-binary :all: platformio && \
|
||||||
|
platformio settings set enable_telemetry No && \
|
||||||
|
platformio settings set check_libraries_interval 1000000 && \
|
||||||
|
platformio settings set check_platformio_interval 1000000 && \
|
||||||
|
platformio settings set check_platforms_interval 1000000
|
||||||
|
|
||||||
# On amd64 and alike, using ubuntu as the base is better as building
|
|
||||||
# for the ESP32 only works with glibc (and ubuntu). However, on armhf
|
|
||||||
# the build toolchain frequently procudes segfaults under ubuntu.
|
|
||||||
# -> Use ubuntu for most architectures, except alpine for armhf
|
|
||||||
#
|
|
||||||
# * python and related required because this is a python project
|
|
||||||
# * git required for platformio library dependencies downloads
|
|
||||||
# * libc6-compat and openssh required on alpine for weird reasons
|
|
||||||
# * disable platformio telemetry on install
|
|
||||||
RUN /bin/bash -c "if [[ '$BUILD_FROM' = *\"ubuntu\"* ]]; then \
|
|
||||||
apt-get update && apt-get install -y --no-install-recommends \
|
|
||||||
python python-pip python-setuptools python-pil git && \
|
|
||||||
rm -rf /var/lib/apt/lists/* /tmp/*; \
|
|
||||||
else \
|
|
||||||
apk add --no-cache \
|
|
||||||
python2 \
|
|
||||||
python2-dev \
|
|
||||||
py2-pip \
|
|
||||||
git \
|
|
||||||
gcc \
|
|
||||||
openssh \
|
|
||||||
libc6-compat \
|
|
||||||
jpeg-dev \
|
|
||||||
zlib-dev \
|
|
||||||
freetype-dev \
|
|
||||||
lcms2-dev \
|
|
||||||
openjpeg-dev \
|
|
||||||
tiff-dev \
|
|
||||||
libc-dev \
|
|
||||||
linux-headers; \
|
|
||||||
fi" && \
|
|
||||||
pip install --no-cache-dir platformio && \
|
|
||||||
platformio settings set enable_telemetry No
|
|
||||||
|
|
||||||
|
|
||||||
# Create fake project to make platformio install all depdencies.
|
|
||||||
# * Ignore build errors from platformio - empty project
|
|
||||||
# * On alpine, only install ESP8266 toolchain
|
|
||||||
COPY platformio.ini /pio/platformio.ini
|
COPY platformio.ini /pio/platformio.ini
|
||||||
RUN /bin/bash -c "if [[ '$BUILD_FROM' = *\"ubuntu\"* ]]; then \
|
RUN platformio run -d /pio; rm -rf /pio
|
||||||
platformio run -e espressif32 -e espressif8266 -d /pio; exit 0; \
|
|
||||||
else \
|
|
||||||
echo \"\$(head -8 /pio/platformio.ini)\" >/pio/platformio.ini; \
|
|
||||||
platformio run -e espressif8266 -d /pio; exit 0; \
|
|
||||||
fi"
|
|
||||||
|
|
||||||
# Install latest esphomeyaml from git
|
RUN pip install --no-cache-dir git+https://github.com/OttoWinter/esphomeyaml.git@dev#egg=esphomeyaml && \
|
||||||
RUN pip install --no-cache-dir \
|
|
||||||
git+git://github.com/OttoWinter/esphomeyaml.git && \
|
|
||||||
pip install --no-cache-dir pillow tzlocal
|
pip install --no-cache-dir pillow tzlocal
|
||||||
|
|
||||||
CMD ["esphomeyaml", "/config/esphomeyaml", "dashboard"]
|
CMD ["esphomeyaml", "/config/esphomeyaml", "dashboard"]
|
||||||
|
|
|
@ -1,10 +1,10 @@
|
||||||
{
|
{
|
||||||
"squash": false,
|
"squash": false,
|
||||||
"build_from": {
|
"build_from": {
|
||||||
"aarch64": "arm64v8/ubuntu:bionic",
|
"aarch64": "homeassistant/aarch64-base-ubuntu:latest",
|
||||||
"amd64": "ubuntu:bionic",
|
"amd64": "homeassistant/amd64-base-ubuntu:latest",
|
||||||
"armhf": "homeassistant/armhf-base:latest",
|
"armhf": "homeassistant/armhf-base-ubuntu:latest",
|
||||||
"i386": "i386/ubuntu:bionic"
|
"i386": "homeassistant/i386-base-ubuntu:latest"
|
||||||
},
|
},
|
||||||
"args": {}
|
"args": {}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,22 +1,22 @@
|
||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
|
from collections import OrderedDict
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import random
|
import random
|
||||||
import sys
|
import sys
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
|
||||||
from esphomeyaml import const, core, core_config, mqtt, wizard, writer, yaml_util
|
from esphomeyaml import const, core, core_config, mqtt, wizard, writer, yaml_util, platformio_api
|
||||||
from esphomeyaml.config import get_component, iter_components, read_config
|
from esphomeyaml.config import get_component, iter_components, read_config
|
||||||
from esphomeyaml.const import CONF_BAUD_RATE, CONF_BUILD_PATH, CONF_DOMAIN, CONF_ESPHOMEYAML, \
|
from esphomeyaml.const import CONF_BAUD_RATE, CONF_BUILD_PATH, CONF_DOMAIN, CONF_ESPHOMEYAML, \
|
||||||
CONF_HOSTNAME, CONF_LOGGER, CONF_MANUAL_IP, CONF_NAME, CONF_STATIC_IP, CONF_USE_CUSTOM_CODE, \
|
CONF_HOSTNAME, CONF_LOGGER, CONF_MANUAL_IP, CONF_NAME, CONF_STATIC_IP, CONF_USE_CUSTOM_CODE, \
|
||||||
CONF_WIFI, ESP_PLATFORM_ESP8266
|
CONF_WIFI, ESP_PLATFORM_ESP8266
|
||||||
from esphomeyaml.core import ESPHomeYAMLError
|
from esphomeyaml.core import ESPHomeYAMLError
|
||||||
from esphomeyaml.helpers import AssignmentExpression, Expression, RawStatement, \
|
from esphomeyaml.helpers import AssignmentExpression, Expression, RawStatement, \
|
||||||
_EXPRESSIONS, add, \
|
_EXPRESSIONS, add, add_job, color, flush_tasks, indent, statement, relative_path
|
||||||
add_job, color, flush_tasks, indent, quote, statement, relative_path
|
from esphomeyaml.util import safe_print, run_external_command
|
||||||
from esphomeyaml.util import safe_print
|
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
@ -62,34 +62,6 @@ def choose_serial_port(config):
|
||||||
return result[opt][0]
|
return result[opt][0]
|
||||||
|
|
||||||
|
|
||||||
def run_platformio(*cmd, **kwargs):
|
|
||||||
def mock_exit(return_code):
|
|
||||||
raise SystemExit(return_code)
|
|
||||||
|
|
||||||
orig_argv = sys.argv
|
|
||||||
orig_exit = sys.exit # mock sys.exit
|
|
||||||
full_cmd = u' '.join(quote(x) for x in cmd)
|
|
||||||
_LOGGER.info(u"Running: %s", full_cmd)
|
|
||||||
try:
|
|
||||||
func = kwargs.get('main')
|
|
||||||
if func is None:
|
|
||||||
import platformio.__main__
|
|
||||||
func = platformio.__main__.main
|
|
||||||
sys.argv = list(cmd)
|
|
||||||
sys.exit = mock_exit
|
|
||||||
return func() or 0
|
|
||||||
except KeyboardInterrupt:
|
|
||||||
return 1
|
|
||||||
except SystemExit as err:
|
|
||||||
return err.args[0]
|
|
||||||
except Exception as err: # pylint: disable=broad-except
|
|
||||||
_LOGGER.error(u"Running platformio failed: %s", err)
|
|
||||||
_LOGGER.error(u"Please try running %s locally.", full_cmd)
|
|
||||||
finally:
|
|
||||||
sys.argv = orig_argv
|
|
||||||
sys.exit = orig_exit
|
|
||||||
|
|
||||||
|
|
||||||
def run_miniterm(config, port, escape=False):
|
def run_miniterm(config, port, escape=False):
|
||||||
import serial
|
import serial
|
||||||
if CONF_LOGGER not in config:
|
if CONF_LOGGER not in config:
|
||||||
|
@ -100,6 +72,7 @@ def run_miniterm(config, port, escape=False):
|
||||||
_LOGGER.info("UART logging is disabled (baud_rate=0). Not starting UART logs.")
|
_LOGGER.info("UART logging is disabled (baud_rate=0). Not starting UART logs.")
|
||||||
_LOGGER.info("Starting log output from %s with baud rate %s", port, baud_rate)
|
_LOGGER.info("Starting log output from %s with baud rate %s", port, baud_rate)
|
||||||
|
|
||||||
|
backtrace_state = False
|
||||||
with serial.Serial(port, baudrate=baud_rate) as ser:
|
with serial.Serial(port, baudrate=baud_rate) as ser:
|
||||||
while True:
|
while True:
|
||||||
try:
|
try:
|
||||||
|
@ -114,6 +87,9 @@ def run_miniterm(config, port, escape=False):
|
||||||
message = message.replace('\033', '\\033')
|
message = message.replace('\033', '\\033')
|
||||||
safe_print(message)
|
safe_print(message)
|
||||||
|
|
||||||
|
backtrace_state = platformio_api.process_stacktrace(
|
||||||
|
config, line, backtrace_state=backtrace_state)
|
||||||
|
|
||||||
|
|
||||||
def write_cpp(config):
|
def write_cpp(config):
|
||||||
_LOGGER.info("Generating C++ source...")
|
_LOGGER.info("Generating C++ source...")
|
||||||
|
@ -154,11 +130,7 @@ def write_cpp(config):
|
||||||
|
|
||||||
def compile_program(args, config):
|
def compile_program(args, config):
|
||||||
_LOGGER.info("Compiling app...")
|
_LOGGER.info("Compiling app...")
|
||||||
build_path = relative_path(config[CONF_ESPHOMEYAML][CONF_BUILD_PATH])
|
return platformio_api.run_compile(config, args.verbose)
|
||||||
command = ['platformio', 'run', '-d', build_path]
|
|
||||||
if args.verbose:
|
|
||||||
command.append('-v')
|
|
||||||
return run_platformio(*command)
|
|
||||||
|
|
||||||
|
|
||||||
def get_upload_host(config):
|
def get_upload_host(config):
|
||||||
|
@ -176,10 +148,10 @@ def upload_using_esptool(config, port):
|
||||||
|
|
||||||
build_path = relative_path(config[CONF_ESPHOMEYAML][CONF_BUILD_PATH])
|
build_path = relative_path(config[CONF_ESPHOMEYAML][CONF_BUILD_PATH])
|
||||||
path = os.path.join(build_path, '.pioenvs', core.NAME, 'firmware.bin')
|
path = os.path.join(build_path, '.pioenvs', core.NAME, 'firmware.bin')
|
||||||
|
cmd = ['esptool.py', '--before', 'default_reset', '--after', 'hard_reset',
|
||||||
|
'--chip', 'esp8266', '--port', port, 'write_flash', '0x0', path]
|
||||||
# pylint: disable=protected-access
|
# pylint: disable=protected-access
|
||||||
return run_platformio('esptool.py', '--before', 'default_reset', '--after', 'hard_reset',
|
return run_external_command(esptool._main, *cmd)
|
||||||
'--chip', 'esp8266', '--port', port, 'write_flash', '0x0',
|
|
||||||
path, main=esptool._main)
|
|
||||||
|
|
||||||
|
|
||||||
def upload_program(config, args, port):
|
def upload_program(config, args, port):
|
||||||
|
@ -190,11 +162,7 @@ def upload_program(config, args, port):
|
||||||
if port != 'OTA' and serial_port:
|
if port != 'OTA' and serial_port:
|
||||||
if core.ESP_PLATFORM == ESP_PLATFORM_ESP8266 and args.use_esptoolpy:
|
if core.ESP_PLATFORM == ESP_PLATFORM_ESP8266 and args.use_esptoolpy:
|
||||||
return upload_using_esptool(config, port)
|
return upload_using_esptool(config, port)
|
||||||
command = ['platformio', 'run', '-d', build_path,
|
return platformio_api.run_upload(config, args.verbose, port)
|
||||||
'-t', 'upload', '--upload-port', port]
|
|
||||||
if args.verbose:
|
|
||||||
command.append('-v')
|
|
||||||
return run_platformio(*command)
|
|
||||||
|
|
||||||
if 'ota' not in config:
|
if 'ota' not in config:
|
||||||
_LOGGER.error("No serial port found and OTA not enabled. Can't upload!")
|
_LOGGER.error("No serial port found and OTA not enabled. Can't upload!")
|
||||||
|
@ -243,7 +211,7 @@ def clean_mqtt(config, args):
|
||||||
def setup_log(debug=False):
|
def setup_log(debug=False):
|
||||||
log_level = logging.DEBUG if debug else logging.INFO
|
log_level = logging.DEBUG if debug else logging.INFO
|
||||||
logging.basicConfig(level=log_level)
|
logging.basicConfig(level=log_level)
|
||||||
fmt = "%(levelname)s [%(name)s] %(message)s"
|
fmt = "%(levelname)s %(message)s"
|
||||||
colorfmt = "%(log_color)s{}%(reset)s".format(fmt)
|
colorfmt = "%(log_color)s{}%(reset)s".format(fmt)
|
||||||
datefmt = '%H:%M:%S'
|
datefmt = '%H:%M:%S'
|
||||||
|
|
||||||
|
@ -367,6 +335,28 @@ def command_clean(args, config):
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
|
|
||||||
|
def command_hass_config(args, config):
|
||||||
|
from esphomeyaml.components import mqtt as mqtt_component
|
||||||
|
|
||||||
|
_LOGGER.info("This is what you should put in your Home Assistant YAML configuration.")
|
||||||
|
_LOGGER.info("Please note this is only necessary if you're not using MQTT discovery.")
|
||||||
|
data = mqtt_component.GenerateHassConfigData(config)
|
||||||
|
hass_config = OrderedDict()
|
||||||
|
for domain, component, conf in iter_components(config):
|
||||||
|
if not hasattr(component, 'to_hass_config'):
|
||||||
|
continue
|
||||||
|
func = getattr(component, 'to_hass_config')
|
||||||
|
ret = func(data, conf)
|
||||||
|
if not isinstance(ret, (list, tuple)):
|
||||||
|
ret = [ret]
|
||||||
|
ret = [x for x in ret if x is not None]
|
||||||
|
domain_conf = hass_config.setdefault(domain.split('.')[0], [])
|
||||||
|
domain_conf += ret
|
||||||
|
|
||||||
|
safe_print(yaml_util.dump(hass_config))
|
||||||
|
return 0
|
||||||
|
|
||||||
|
|
||||||
def command_dashboard(args):
|
def command_dashboard(args):
|
||||||
from esphomeyaml.dashboard import dashboard
|
from esphomeyaml.dashboard import dashboard
|
||||||
|
|
||||||
|
@ -388,6 +378,7 @@ POST_CONFIG_ACTIONS = {
|
||||||
'clean-mqtt': command_clean_mqtt,
|
'clean-mqtt': command_clean_mqtt,
|
||||||
'mqtt-fingerprint': command_mqtt_fingerprint,
|
'mqtt-fingerprint': command_mqtt_fingerprint,
|
||||||
'clean': command_clean,
|
'clean': command_clean,
|
||||||
|
'hass-config': command_hass_config,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -469,6 +460,9 @@ def parse_args(argv):
|
||||||
dashboard.add_argument("--open-ui", help="Open the dashboard UI in a browser.",
|
dashboard.add_argument("--open-ui", help="Open the dashboard UI in a browser.",
|
||||||
action='store_true')
|
action='store_true')
|
||||||
|
|
||||||
|
subparsers.add_parser('hass-config', help="Dump the configuration entries that should be added"
|
||||||
|
"to Home Assistant when not using MQTT discovery.")
|
||||||
|
|
||||||
return parser.parse_args(argv[1:])
|
return parser.parse_args(argv[1:])
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
|
from esphomeyaml.components import mqtt
|
||||||
import esphomeyaml.config_validation as cv
|
import esphomeyaml.config_validation as cv
|
||||||
from esphomeyaml import automation
|
from esphomeyaml import automation
|
||||||
from esphomeyaml.const import CONF_DEVICE_CLASS, CONF_ID, CONF_INTERNAL, CONF_INVERTED, \
|
from esphomeyaml.const import CONF_DEVICE_CLASS, CONF_ID, CONF_INTERNAL, CONF_INVERTED, \
|
||||||
|
@ -154,4 +155,14 @@ def register_binary_sensor(var, config):
|
||||||
add_job(setup_binary_sensor_core_, binary_sensor_var, mqtt_var, config)
|
add_job(setup_binary_sensor_core_, binary_sensor_var, mqtt_var, config)
|
||||||
|
|
||||||
|
|
||||||
|
def core_to_hass_config(data, config):
|
||||||
|
ret = mqtt.build_hass_config(data, 'binary_sensor', config,
|
||||||
|
include_state=True, include_command=False)
|
||||||
|
if ret is None:
|
||||||
|
return None
|
||||||
|
if CONF_DEVICE_CLASS in config:
|
||||||
|
ret['device_class'] = config[CONF_DEVICE_CLASS]
|
||||||
|
return ret
|
||||||
|
|
||||||
|
|
||||||
BUILD_FLAGS = '-DUSE_BINARY_SENSOR'
|
BUILD_FLAGS = '-DUSE_BINARY_SENSOR'
|
||||||
|
|
|
@ -21,3 +21,7 @@ def to_code(config):
|
||||||
yield
|
yield
|
||||||
rhs = hub.make_presence_sensor(config[CONF_NAME], make_address_array(config[CONF_MAC_ADDRESS]))
|
rhs = hub.make_presence_sensor(config[CONF_NAME], make_address_array(config[CONF_MAC_ADDRESS]))
|
||||||
binary_sensor.register_binary_sensor(rhs, config)
|
binary_sensor.register_binary_sensor(rhs, config)
|
||||||
|
|
||||||
|
|
||||||
|
def to_hass_config(data, config):
|
||||||
|
return binary_sensor.core_to_hass_config(data, config)
|
||||||
|
|
|
@ -51,3 +51,7 @@ def to_code(config):
|
||||||
|
|
||||||
|
|
||||||
BUILD_FLAGS = '-DUSE_ESP32_TOUCH_BINARY_SENSOR'
|
BUILD_FLAGS = '-DUSE_ESP32_TOUCH_BINARY_SENSOR'
|
||||||
|
|
||||||
|
|
||||||
|
def to_hass_config(data, config):
|
||||||
|
return binary_sensor.core_to_hass_config(data, config)
|
||||||
|
|
|
@ -24,3 +24,7 @@ def to_code(config):
|
||||||
|
|
||||||
|
|
||||||
BUILD_FLAGS = '-DUSE_GPIO_BINARY_SENSOR'
|
BUILD_FLAGS = '-DUSE_GPIO_BINARY_SENSOR'
|
||||||
|
|
||||||
|
|
||||||
|
def to_hass_config(data, config):
|
||||||
|
return binary_sensor.core_to_hass_config(data, config)
|
||||||
|
|
|
@ -24,3 +24,7 @@ def to_code(config):
|
||||||
rhs = hub.make_touch_component(config[CONF_NAME], config[CONF_PAGE_ID],
|
rhs = hub.make_touch_component(config[CONF_NAME], config[CONF_PAGE_ID],
|
||||||
config[CONF_COMPONENT_ID])
|
config[CONF_COMPONENT_ID])
|
||||||
binary_sensor.register_binary_sensor(rhs, config)
|
binary_sensor.register_binary_sensor(rhs, config)
|
||||||
|
|
||||||
|
|
||||||
|
def to_hass_config(data, config):
|
||||||
|
return binary_sensor.core_to_hass_config(data, config)
|
||||||
|
|
|
@ -40,3 +40,7 @@ def to_code(config):
|
||||||
addr = [HexInt(int(x, 16)) for x in config[CONF_UID].split('-')]
|
addr = [HexInt(int(x, 16)) for x in config[CONF_UID].split('-')]
|
||||||
rhs = hub.make_tag(config[CONF_NAME], ArrayInitializer(*addr, multiline=False))
|
rhs = hub.make_tag(config[CONF_NAME], ArrayInitializer(*addr, multiline=False))
|
||||||
binary_sensor.register_binary_sensor(rhs, config)
|
binary_sensor.register_binary_sensor(rhs, config)
|
||||||
|
|
||||||
|
|
||||||
|
def to_hass_config(data, config):
|
||||||
|
return binary_sensor.core_to_hass_config(data, config)
|
||||||
|
|
|
@ -21,3 +21,7 @@ def to_code(config):
|
||||||
yield
|
yield
|
||||||
rhs = hub.make_card(config[CONF_NAME], config[CONF_UID])
|
rhs = hub.make_card(config[CONF_NAME], config[CONF_UID])
|
||||||
binary_sensor.register_binary_sensor(rhs, config)
|
binary_sensor.register_binary_sensor(rhs, config)
|
||||||
|
|
||||||
|
|
||||||
|
def to_hass_config(data, config):
|
||||||
|
return binary_sensor.core_to_hass_config(data, config)
|
||||||
|
|
|
@ -119,3 +119,7 @@ def to_code(config):
|
||||||
|
|
||||||
|
|
||||||
BUILD_FLAGS = '-DUSE_REMOTE_RECEIVER'
|
BUILD_FLAGS = '-DUSE_REMOTE_RECEIVER'
|
||||||
|
|
||||||
|
|
||||||
|
def to_hass_config(data, config):
|
||||||
|
return binary_sensor.core_to_hass_config(data, config)
|
||||||
|
|
|
@ -19,3 +19,7 @@ def to_code(config):
|
||||||
|
|
||||||
|
|
||||||
BUILD_FLAGS = '-DUSE_STATUS_BINARY_SENSOR'
|
BUILD_FLAGS = '-DUSE_STATUS_BINARY_SENSOR'
|
||||||
|
|
||||||
|
|
||||||
|
def to_hass_config(data, config):
|
||||||
|
return binary_sensor.core_to_hass_config(data, config)
|
||||||
|
|
|
@ -26,3 +26,7 @@ def to_code(config):
|
||||||
|
|
||||||
|
|
||||||
BUILD_FLAGS = '-DUSE_TEMPLATE_BINARY_SENSOR'
|
BUILD_FLAGS = '-DUSE_TEMPLATE_BINARY_SENSOR'
|
||||||
|
|
||||||
|
|
||||||
|
def to_hass_config(data, config):
|
||||||
|
return binary_sensor.core_to_hass_config(data, config)
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
from esphomeyaml.automation import maybe_simple_id, ACTION_REGISTRY
|
from esphomeyaml.automation import maybe_simple_id, ACTION_REGISTRY
|
||||||
|
from esphomeyaml.components import mqtt
|
||||||
import esphomeyaml.config_validation as cv
|
import esphomeyaml.config_validation as cv
|
||||||
from esphomeyaml.const import CONF_ID, CONF_MQTT_ID, CONF_INTERNAL
|
from esphomeyaml.const import CONF_ID, CONF_MQTT_ID, CONF_INTERNAL
|
||||||
from esphomeyaml.helpers import Pvariable, esphomelib_ns, setup_mqtt_component, add, \
|
from esphomeyaml.helpers import Pvariable, esphomelib_ns, setup_mqtt_component, add, \
|
||||||
|
@ -88,3 +89,10 @@ def cover_stop_to_code(config, action_id, arg_type):
|
||||||
rhs = var.make_stop_action(template_arg)
|
rhs = var.make_stop_action(template_arg)
|
||||||
type = StopAction.template(arg_type)
|
type = StopAction.template(arg_type)
|
||||||
yield Pvariable(action_id, rhs, type=type)
|
yield Pvariable(action_id, rhs, type=type)
|
||||||
|
|
||||||
|
|
||||||
|
def core_to_hass_config(data, config):
|
||||||
|
ret = mqtt.build_hass_config(data, 'cover', config, include_state=True, include_command=True)
|
||||||
|
if ret is None:
|
||||||
|
return None
|
||||||
|
return ret
|
||||||
|
|
|
@ -45,3 +45,12 @@ def to_code(config):
|
||||||
|
|
||||||
|
|
||||||
BUILD_FLAGS = '-DUSE_TEMPLATE_COVER'
|
BUILD_FLAGS = '-DUSE_TEMPLATE_COVER'
|
||||||
|
|
||||||
|
|
||||||
|
def to_hass_config(data, config):
|
||||||
|
ret = cover.core_to_hass_config(data, config)
|
||||||
|
if ret is None:
|
||||||
|
return None
|
||||||
|
if CONF_OPTIMISTIC in config:
|
||||||
|
ret['optimistic'] = config[CONF_OPTIMISTIC]
|
||||||
|
return ret
|
||||||
|
|
|
@ -1,10 +1,11 @@
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
from esphomeyaml.automation import maybe_simple_id, ACTION_REGISTRY
|
from esphomeyaml.automation import maybe_simple_id, ACTION_REGISTRY
|
||||||
|
from esphomeyaml.components import mqtt
|
||||||
import esphomeyaml.config_validation as cv
|
import esphomeyaml.config_validation as cv
|
||||||
from esphomeyaml.const import CONF_ID, CONF_MQTT_ID, CONF_OSCILLATION_COMMAND_TOPIC, \
|
from esphomeyaml.const import CONF_ID, CONF_MQTT_ID, CONF_OSCILLATION_COMMAND_TOPIC, \
|
||||||
CONF_OSCILLATION_STATE_TOPIC, CONF_SPEED_COMMAND_TOPIC, CONF_SPEED_STATE_TOPIC, CONF_INTERNAL, \
|
CONF_OSCILLATION_STATE_TOPIC, CONF_SPEED_COMMAND_TOPIC, CONF_SPEED_STATE_TOPIC, CONF_INTERNAL, \
|
||||||
CONF_SPEED, CONF_OSCILLATING
|
CONF_SPEED, CONF_OSCILLATING, CONF_OSCILLATION_OUTPUT, CONF_NAME
|
||||||
from esphomeyaml.helpers import Application, Pvariable, add, esphomelib_ns, setup_mqtt_component, \
|
from esphomeyaml.helpers import Application, Pvariable, add, esphomelib_ns, setup_mqtt_component, \
|
||||||
TemplateArguments, get_variable, templatable, bool_
|
TemplateArguments, get_variable, templatable, bool_
|
||||||
|
|
||||||
|
@ -128,3 +129,15 @@ def fan_turn_on_to_code(config, action_id, arg_type):
|
||||||
yield None
|
yield None
|
||||||
add(action.set_speed(template_))
|
add(action.set_speed(template_))
|
||||||
yield action
|
yield action
|
||||||
|
|
||||||
|
|
||||||
|
def core_to_hass_config(data, config):
|
||||||
|
ret = mqtt.build_hass_config(data, 'fan', config, include_state=True, include_command=True)
|
||||||
|
if ret is None:
|
||||||
|
return None
|
||||||
|
if CONF_OSCILLATION_OUTPUT in config:
|
||||||
|
default = mqtt.get_default_topic_for(data, 'fan', config[CONF_NAME], 'oscillation/state')
|
||||||
|
ret['oscillation_state_topic'] = config.get(CONF_OSCILLATION_STATE_TOPIC, default)
|
||||||
|
default = mqtt.get_default_topic_for(data, 'fan', config[CONF_NAME], 'oscillation/command')
|
||||||
|
ret['oscillation_command__topic'] = config.get(CONF_OSCILLATION_COMMAND_TOPIC, default)
|
||||||
|
return ret
|
||||||
|
|
|
@ -27,3 +27,7 @@ def to_code(config):
|
||||||
add(fan_struct.Poutput.set_oscillation(oscillation_output))
|
add(fan_struct.Poutput.set_oscillation(oscillation_output))
|
||||||
|
|
||||||
fan.setup_fan(fan_struct.Pstate, fan_struct.Pmqtt, config)
|
fan.setup_fan(fan_struct.Pstate, fan_struct.Pmqtt, config)
|
||||||
|
|
||||||
|
|
||||||
|
def to_hass_config(data, config):
|
||||||
|
return fan.core_to_hass_config(data, config)
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
import esphomeyaml.config_validation as cv
|
import esphomeyaml.config_validation as cv
|
||||||
from esphomeyaml.components import fan
|
from esphomeyaml.components import fan, mqtt
|
||||||
from esphomeyaml.const import CONF_HIGH, CONF_LOW, CONF_MAKE_ID, CONF_MEDIUM, CONF_NAME, \
|
from esphomeyaml.const import CONF_HIGH, CONF_LOW, CONF_MAKE_ID, CONF_MEDIUM, CONF_NAME, \
|
||||||
CONF_OSCILLATION_OUTPUT, CONF_OUTPUT, CONF_SPEED, CONF_SPEED_COMMAND_TOPIC, \
|
CONF_OSCILLATION_OUTPUT, CONF_OUTPUT, CONF_SPEED, CONF_SPEED_COMMAND_TOPIC, \
|
||||||
CONF_SPEED_STATE_TOPIC
|
CONF_SPEED_STATE_TOPIC
|
||||||
|
@ -43,3 +43,14 @@ def to_code(config):
|
||||||
add(fan_struct.Poutput.set_oscillation(oscillation_output))
|
add(fan_struct.Poutput.set_oscillation(oscillation_output))
|
||||||
|
|
||||||
fan.setup_fan(fan_struct.Pstate, fan_struct.Pmqtt, config)
|
fan.setup_fan(fan_struct.Pstate, fan_struct.Pmqtt, config)
|
||||||
|
|
||||||
|
|
||||||
|
def to_hass_config(data, config):
|
||||||
|
ret = fan.core_to_hass_config(data, config)
|
||||||
|
if ret is None:
|
||||||
|
return None
|
||||||
|
default = mqtt.get_default_topic_for(data, 'fan', config[CONF_NAME], 'speed/state')
|
||||||
|
ret['speed_state_topic'] = config.get(CONF_SPEED_STATE_TOPIC, default)
|
||||||
|
default = mqtt.get_default_topic_for(data, 'fan', config[CONF_NAME], 'speed/command')
|
||||||
|
ret['speed_command__topic'] = config.get(CONF_SPEED_COMMAND_TOPIC, default)
|
||||||
|
return ret
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
from esphomeyaml.automation import maybe_simple_id, ACTION_REGISTRY
|
from esphomeyaml.automation import maybe_simple_id, ACTION_REGISTRY
|
||||||
|
from esphomeyaml.components import mqtt
|
||||||
import esphomeyaml.config_validation as cv
|
import esphomeyaml.config_validation as cv
|
||||||
from esphomeyaml.const import CONF_ALPHA, CONF_BLUE, CONF_BRIGHTNESS, CONF_COLORS, \
|
from esphomeyaml.const import CONF_ALPHA, CONF_BLUE, CONF_BRIGHTNESS, CONF_COLORS, \
|
||||||
CONF_DEFAULT_TRANSITION_LENGTH, CONF_DURATION, CONF_EFFECTS, CONF_EFFECT_ID, \
|
CONF_DEFAULT_TRANSITION_LENGTH, CONF_DURATION, CONF_EFFECTS, CONF_EFFECT_ID, \
|
||||||
|
@ -448,3 +449,24 @@ def light_turn_on_to_code(config, action_id, arg_type):
|
||||||
yield None
|
yield None
|
||||||
add(action.set_effect(template_))
|
add(action.set_effect(template_))
|
||||||
yield action
|
yield action
|
||||||
|
|
||||||
|
|
||||||
|
def core_to_hass_config(data, config, brightness=True, rgb=True, color_temp=True,
|
||||||
|
white_value=True):
|
||||||
|
ret = mqtt.build_hass_config(data, 'light', config, include_state=True, include_command=True,
|
||||||
|
platform='mqtt_json')
|
||||||
|
if ret is None:
|
||||||
|
return None
|
||||||
|
if brightness:
|
||||||
|
ret['brightness'] = True
|
||||||
|
if rgb:
|
||||||
|
ret['rgb'] = True
|
||||||
|
if color_temp:
|
||||||
|
ret['color_temp'] = True
|
||||||
|
if white_value:
|
||||||
|
ret['white_value'] = True
|
||||||
|
for effect in config.get(CONF_EFFECTS, []):
|
||||||
|
ret["effect"] = True
|
||||||
|
effects = ret.setdefault("effect_list", [])
|
||||||
|
effects.append(next(x for x in effect.values())[CONF_NAME])
|
||||||
|
return ret
|
||||||
|
|
|
@ -19,3 +19,8 @@ def to_code(config):
|
||||||
rhs = App.make_binary_light(config[CONF_NAME], output)
|
rhs = App.make_binary_light(config[CONF_NAME], output)
|
||||||
light_struct = variable(config[CONF_MAKE_ID], rhs)
|
light_struct = variable(config[CONF_MAKE_ID], rhs)
|
||||||
light.setup_light(light_struct.Pstate, light_struct.Pmqtt, config)
|
light.setup_light(light_struct.Pstate, light_struct.Pmqtt, config)
|
||||||
|
|
||||||
|
|
||||||
|
def to_hass_config(data, config):
|
||||||
|
return light.core_to_hass_config(data, config, brightness=False, rgb=False, color_temp=False,
|
||||||
|
white_value=False)
|
||||||
|
|
|
@ -32,3 +32,8 @@ def to_code(config):
|
||||||
cold_white, warm_white)
|
cold_white, warm_white)
|
||||||
light_struct = variable(config[CONF_MAKE_ID], rhs)
|
light_struct = variable(config[CONF_MAKE_ID], rhs)
|
||||||
light.setup_light(light_struct.Pstate, light_struct.Pmqtt, config)
|
light.setup_light(light_struct.Pstate, light_struct.Pmqtt, config)
|
||||||
|
|
||||||
|
|
||||||
|
def to_hass_config(data, config):
|
||||||
|
return light.core_to_hass_config(data, config, brightness=True, rgb=False, color_temp=True,
|
||||||
|
white_value=False)
|
||||||
|
|
|
@ -102,3 +102,8 @@ def to_code(config):
|
||||||
|
|
||||||
|
|
||||||
BUILD_FLAGS = '-DUSE_FAST_LED_LIGHT'
|
BUILD_FLAGS = '-DUSE_FAST_LED_LIGHT'
|
||||||
|
|
||||||
|
|
||||||
|
def to_hass_config(data, config):
|
||||||
|
return light.core_to_hass_config(data, config, brightness=True, rgb=True, color_temp=False,
|
||||||
|
white_value=False)
|
||||||
|
|
|
@ -82,3 +82,8 @@ def to_code(config):
|
||||||
|
|
||||||
|
|
||||||
BUILD_FLAGS = '-DUSE_FAST_LED_LIGHT'
|
BUILD_FLAGS = '-DUSE_FAST_LED_LIGHT'
|
||||||
|
|
||||||
|
|
||||||
|
def to_hass_config(data, config):
|
||||||
|
return light.core_to_hass_config(data, config, brightness=True, rgb=True, color_temp=False,
|
||||||
|
white_value=False)
|
||||||
|
|
|
@ -22,3 +22,8 @@ def to_code(config):
|
||||||
rhs = App.make_monochromatic_light(config[CONF_NAME], output)
|
rhs = App.make_monochromatic_light(config[CONF_NAME], output)
|
||||||
light_struct = variable(config[CONF_MAKE_ID], rhs)
|
light_struct = variable(config[CONF_MAKE_ID], rhs)
|
||||||
light.setup_light(light_struct.Pstate, light_struct.Pmqtt, config)
|
light.setup_light(light_struct.Pstate, light_struct.Pmqtt, config)
|
||||||
|
|
||||||
|
|
||||||
|
def to_hass_config(data, config):
|
||||||
|
return light.core_to_hass_config(data, config, brightness=True, rgb=False, color_temp=False,
|
||||||
|
white_value=False)
|
||||||
|
|
|
@ -30,3 +30,8 @@ def to_code(config):
|
||||||
rhs = App.make_rgb_light(config[CONF_NAME], red, green, blue)
|
rhs = App.make_rgb_light(config[CONF_NAME], red, green, blue)
|
||||||
light_struct = variable(config[CONF_MAKE_ID], rhs)
|
light_struct = variable(config[CONF_MAKE_ID], rhs)
|
||||||
light.setup_light(light_struct.Pstate, light_struct.Pmqtt, config)
|
light.setup_light(light_struct.Pstate, light_struct.Pmqtt, config)
|
||||||
|
|
||||||
|
|
||||||
|
def to_hass_config(data, config):
|
||||||
|
return light.core_to_hass_config(data, config, brightness=True, rgb=True, color_temp=False,
|
||||||
|
white_value=False)
|
||||||
|
|
|
@ -34,3 +34,8 @@ def to_code(config):
|
||||||
rhs = App.make_rgbw_light(config[CONF_NAME], red, green, blue, white)
|
rhs = App.make_rgbw_light(config[CONF_NAME], red, green, blue, white)
|
||||||
light_struct = variable(config[CONF_MAKE_ID], rhs)
|
light_struct = variable(config[CONF_MAKE_ID], rhs)
|
||||||
light.setup_light(light_struct.Pstate, light_struct.Pmqtt, config)
|
light.setup_light(light_struct.Pstate, light_struct.Pmqtt, config)
|
||||||
|
|
||||||
|
|
||||||
|
def to_hass_config(data, config):
|
||||||
|
return light.core_to_hass_config(data, config, brightness=True, rgb=True, color_temp=False,
|
||||||
|
white_value=True)
|
||||||
|
|
|
@ -60,3 +60,8 @@ def to_code(config):
|
||||||
red, green, blue, cold_white, warm_white)
|
red, green, blue, cold_white, warm_white)
|
||||||
light_struct = variable(config[CONF_MAKE_ID], rhs)
|
light_struct = variable(config[CONF_MAKE_ID], rhs)
|
||||||
light.setup_light(light_struct.Pstate, light_struct.Pmqtt, config)
|
light.setup_light(light_struct.Pstate, light_struct.Pmqtt, config)
|
||||||
|
|
||||||
|
|
||||||
|
def to_hass_config(data, config):
|
||||||
|
return light.core_to_hass_config(data, config, brightness=True, rgb=True, color_temp=True,
|
||||||
|
white_value=True)
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
from collections import OrderedDict
|
||||||
import re
|
import re
|
||||||
|
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
@ -10,7 +11,10 @@ from esphomeyaml.const import CONF_BIRTH_MESSAGE, CONF_BROKER, CONF_CLIENT_ID, C
|
||||||
CONF_DISCOVERY_PREFIX, CONF_DISCOVERY_RETAIN, CONF_ID, CONF_KEEPALIVE, CONF_LEVEL, \
|
CONF_DISCOVERY_PREFIX, CONF_DISCOVERY_RETAIN, CONF_ID, CONF_KEEPALIVE, CONF_LEVEL, \
|
||||||
CONF_LOG_TOPIC, CONF_ON_MESSAGE, CONF_PASSWORD, CONF_PAYLOAD, CONF_PORT, CONF_QOS, \
|
CONF_LOG_TOPIC, CONF_ON_MESSAGE, CONF_PASSWORD, CONF_PAYLOAD, CONF_PORT, CONF_QOS, \
|
||||||
CONF_REBOOT_TIMEOUT, CONF_RETAIN, CONF_SHUTDOWN_MESSAGE, CONF_SSL_FINGERPRINTS, CONF_TOPIC, \
|
CONF_REBOOT_TIMEOUT, CONF_RETAIN, CONF_SHUTDOWN_MESSAGE, CONF_SSL_FINGERPRINTS, CONF_TOPIC, \
|
||||||
CONF_TOPIC_PREFIX, CONF_TRIGGER_ID, CONF_USERNAME, CONF_WILL_MESSAGE, CONF_ON_JSON_MESSAGE
|
CONF_TOPIC_PREFIX, CONF_TRIGGER_ID, CONF_USERNAME, CONF_WILL_MESSAGE, CONF_ON_JSON_MESSAGE, \
|
||||||
|
CONF_STATE_TOPIC, CONF_MQTT, CONF_ESPHOMEYAML, CONF_NAME, CONF_AVAILABILITY, \
|
||||||
|
CONF_PAYLOAD_AVAILABLE, CONF_PAYLOAD_NOT_AVAILABLE, CONF_INTERNAL
|
||||||
|
from esphomeyaml.core import ESPHomeYAMLError
|
||||||
from esphomeyaml.helpers import App, ArrayInitializer, Pvariable, RawExpression, \
|
from esphomeyaml.helpers import App, ArrayInitializer, Pvariable, RawExpression, \
|
||||||
StructInitializer, TemplateArguments, add, esphomelib_ns, optional, std_string, templatable, \
|
StructInitializer, TemplateArguments, add, esphomelib_ns, optional, std_string, templatable, \
|
||||||
uint8, bool_, JsonObjectRef, process_lambda, JsonObjectConstRef
|
uint8, bool_, JsonObjectRef, process_lambda, JsonObjectConstRef
|
||||||
|
@ -239,3 +243,66 @@ def required_build_flags(config):
|
||||||
if CONF_SSL_FINGERPRINTS in config:
|
if CONF_SSL_FINGERPRINTS in config:
|
||||||
return '-DASYNC_TCP_SSL_ENABLED=1'
|
return '-DASYNC_TCP_SSL_ENABLED=1'
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def get_default_topic_for(data, component_type, name, suffix):
|
||||||
|
whitelist = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_'
|
||||||
|
sanitized_name = ''.join(x for x in name.lower().replace(' ', '_') if x in whitelist)
|
||||||
|
return '{}/{}/{}/{}'.format(data.topic_prefix, component_type,
|
||||||
|
sanitized_name, suffix)
|
||||||
|
|
||||||
|
|
||||||
|
def build_hass_config(data, component_type, config, include_state=True, include_command=True,
|
||||||
|
platform='mqtt'):
|
||||||
|
if config.get(CONF_INTERNAL, False):
|
||||||
|
return None
|
||||||
|
ret = OrderedDict()
|
||||||
|
ret['platform'] = platform
|
||||||
|
ret['name'] = config[CONF_NAME]
|
||||||
|
if include_state:
|
||||||
|
default = get_default_topic_for(data, component_type, config[CONF_NAME], 'state')
|
||||||
|
ret['state_topic'] = config.get(CONF_STATE_TOPIC, default)
|
||||||
|
if include_command:
|
||||||
|
default = get_default_topic_for(data, component_type, config[CONF_NAME], 'command')
|
||||||
|
ret['command_topic'] = config.get(CONF_STATE_TOPIC, default)
|
||||||
|
avail = config.get(CONF_AVAILABILITY, data.availability)
|
||||||
|
if avail:
|
||||||
|
ret['availability_topic'] = avail[CONF_TOPIC]
|
||||||
|
payload_available = avail[CONF_PAYLOAD_AVAILABLE]
|
||||||
|
if payload_available != 'online':
|
||||||
|
ret['payload_available'] = payload_available
|
||||||
|
payload_not_available = avail[CONF_PAYLOAD_NOT_AVAILABLE]
|
||||||
|
if payload_not_available != 'offline':
|
||||||
|
ret['payload_not_available'] = payload_not_available
|
||||||
|
return ret
|
||||||
|
|
||||||
|
|
||||||
|
class GenerateHassConfigData(object):
|
||||||
|
def __init__(self, config):
|
||||||
|
if 'mqtt' not in config:
|
||||||
|
raise ESPHomeYAMLError("Cannot generate Home Assistant MQTT config if MQTT is not "
|
||||||
|
"used!")
|
||||||
|
mqtt = config[CONF_MQTT]
|
||||||
|
self.topic_prefix = mqtt.get(CONF_TOPIC_PREFIX, config[CONF_ESPHOMEYAML][CONF_NAME])
|
||||||
|
birth_message = mqtt.get(CONF_BIRTH_MESSAGE)
|
||||||
|
if CONF_BIRTH_MESSAGE not in mqtt:
|
||||||
|
birth_message = {
|
||||||
|
CONF_TOPIC: self.topic_prefix + '/status',
|
||||||
|
CONF_PAYLOAD: 'online',
|
||||||
|
}
|
||||||
|
will_message = mqtt.get(CONF_WILL_MESSAGE)
|
||||||
|
if CONF_WILL_MESSAGE not in mqtt:
|
||||||
|
will_message = {
|
||||||
|
CONF_TOPIC: self.topic_prefix + '/status',
|
||||||
|
CONF_PAYLOAD: 'offline'
|
||||||
|
}
|
||||||
|
if not birth_message or not will_message:
|
||||||
|
self.availability = None
|
||||||
|
elif birth_message[CONF_TOPIC] != will_message[CONF_TOPIC]:
|
||||||
|
self.availability = None
|
||||||
|
else:
|
||||||
|
self.availability = {
|
||||||
|
CONF_TOPIC: birth_message[CONF_TOPIC],
|
||||||
|
CONF_PAYLOAD_AVAILABLE: birth_message[CONF_PAYLOAD],
|
||||||
|
CONF_PAYLOAD_NOT_AVAILABLE: will_message[CONF_PAYLOAD],
|
||||||
|
}
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
|
from esphomeyaml.components import mqtt
|
||||||
import esphomeyaml.config_validation as cv
|
import esphomeyaml.config_validation as cv
|
||||||
from esphomeyaml import automation
|
from esphomeyaml import automation
|
||||||
from esphomeyaml.const import CONF_ABOVE, CONF_ACCURACY_DECIMALS, CONF_ALPHA, CONF_BELOW, \
|
from esphomeyaml.const import CONF_ABOVE, CONF_ACCURACY_DECIMALS, CONF_ALPHA, CONF_BELOW, \
|
||||||
|
@ -211,3 +212,18 @@ def register_sensor(var, config):
|
||||||
|
|
||||||
|
|
||||||
BUILD_FLAGS = '-DUSE_SENSOR'
|
BUILD_FLAGS = '-DUSE_SENSOR'
|
||||||
|
|
||||||
|
|
||||||
|
def core_to_hass_config(data, config):
|
||||||
|
ret = mqtt.build_hass_config(data, 'sensor', config, include_state=True, include_command=False)
|
||||||
|
if ret is None:
|
||||||
|
return None
|
||||||
|
if CONF_UNIT_OF_MEASUREMENT in config:
|
||||||
|
ret['unit_of_measurement'] = config[CONF_UNIT_OF_MEASUREMENT]
|
||||||
|
if CONF_EXPIRE_AFTER in config:
|
||||||
|
expire = config[CONF_EXPIRE_AFTER]
|
||||||
|
if expire is not None:
|
||||||
|
ret['expire_after'] = expire.total_seconds
|
||||||
|
if CONF_ICON in config:
|
||||||
|
ret['icon'] = config[CONF_ICON]
|
||||||
|
return ret
|
||||||
|
|
|
@ -52,3 +52,7 @@ def required_build_flags(config):
|
||||||
if config[CONF_PIN] == 'VCC':
|
if config[CONF_PIN] == 'VCC':
|
||||||
return '-DUSE_ADC_SENSOR_VCC'
|
return '-DUSE_ADC_SENSOR_VCC'
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def to_hass_config(data, config):
|
||||||
|
return sensor.core_to_hass_config(data, config)
|
||||||
|
|
|
@ -65,3 +65,7 @@ def to_code(config):
|
||||||
|
|
||||||
|
|
||||||
BUILD_FLAGS = '-DUSE_ADS1115_SENSOR'
|
BUILD_FLAGS = '-DUSE_ADS1115_SENSOR'
|
||||||
|
|
||||||
|
|
||||||
|
def to_hass_config(data, config):
|
||||||
|
return sensor.core_to_hass_config(data, config)
|
||||||
|
|
|
@ -35,3 +35,7 @@ def to_code(config):
|
||||||
|
|
||||||
|
|
||||||
BUILD_FLAGS = '-DUSE_BH1750'
|
BUILD_FLAGS = '-DUSE_BH1750'
|
||||||
|
|
||||||
|
|
||||||
|
def to_hass_config(data, config):
|
||||||
|
return sensor.core_to_hass_config(data, config)
|
||||||
|
|
|
@ -21,3 +21,7 @@ def to_code(config):
|
||||||
yield
|
yield
|
||||||
rhs = hub.make_rssi_sensor(config[CONF_NAME], make_address_array(config[CONF_MAC_ADDRESS]))
|
rhs = hub.make_rssi_sensor(config[CONF_NAME], make_address_array(config[CONF_MAC_ADDRESS]))
|
||||||
sensor.register_sensor(rhs, config)
|
sensor.register_sensor(rhs, config)
|
||||||
|
|
||||||
|
|
||||||
|
def to_hass_config(data, config):
|
||||||
|
return sensor.core_to_hass_config(data, config)
|
||||||
|
|
|
@ -72,3 +72,9 @@ def to_code(config):
|
||||||
|
|
||||||
|
|
||||||
BUILD_FLAGS = '-DUSE_BME280'
|
BUILD_FLAGS = '-DUSE_BME280'
|
||||||
|
|
||||||
|
|
||||||
|
def to_hass_config(data, config):
|
||||||
|
return [sensor.core_to_hass_config(data, config[CONF_TEMPERATURE]),
|
||||||
|
sensor.core_to_hass_config(data, config[CONF_PRESSURE]),
|
||||||
|
sensor.core_to_hass_config(data, config[CONF_HUMIDITY])]
|
||||||
|
|
|
@ -92,3 +92,10 @@ def to_code(config):
|
||||||
|
|
||||||
|
|
||||||
BUILD_FLAGS = '-DUSE_BME680'
|
BUILD_FLAGS = '-DUSE_BME680'
|
||||||
|
|
||||||
|
|
||||||
|
def to_hass_config(data, config):
|
||||||
|
return [sensor.core_to_hass_config(data, config[CONF_TEMPERATURE]),
|
||||||
|
sensor.core_to_hass_config(data, config[CONF_PRESSURE]),
|
||||||
|
sensor.core_to_hass_config(data, config[CONF_HUMIDITY]),
|
||||||
|
sensor.core_to_hass_config(data, config[CONF_GAS_RESISTANCE])]
|
||||||
|
|
|
@ -34,3 +34,8 @@ def to_code(config):
|
||||||
|
|
||||||
|
|
||||||
BUILD_FLAGS = '-DUSE_BMP085_SENSOR'
|
BUILD_FLAGS = '-DUSE_BMP085_SENSOR'
|
||||||
|
|
||||||
|
|
||||||
|
def to_hass_config(data, config):
|
||||||
|
return [sensor.core_to_hass_config(data, config[CONF_TEMPERATURE]),
|
||||||
|
sensor.core_to_hass_config(data, config[CONF_PRESSURE])]
|
||||||
|
|
|
@ -65,3 +65,8 @@ def to_code(config):
|
||||||
|
|
||||||
|
|
||||||
BUILD_FLAGS = '-DUSE_BMP280'
|
BUILD_FLAGS = '-DUSE_BMP280'
|
||||||
|
|
||||||
|
|
||||||
|
def to_hass_config(data, config):
|
||||||
|
return [sensor.core_to_hass_config(data, config[CONF_TEMPERATURE]),
|
||||||
|
sensor.core_to_hass_config(data, config[CONF_PRESSURE])]
|
||||||
|
|
|
@ -40,3 +40,11 @@ def to_code(config):
|
||||||
|
|
||||||
|
|
||||||
BUILD_FLAGS = '-DUSE_CSE7766'
|
BUILD_FLAGS = '-DUSE_CSE7766'
|
||||||
|
|
||||||
|
|
||||||
|
def to_hass_config(data, config):
|
||||||
|
ret = []
|
||||||
|
for key in (CONF_VOLTAGE, CONF_CURRENT, CONF_POWER):
|
||||||
|
if key in config:
|
||||||
|
ret.append(sensor.core_to_hass_config(data, config[key]))
|
||||||
|
return ret
|
||||||
|
|
|
@ -29,3 +29,7 @@ def to_code(config):
|
||||||
|
|
||||||
|
|
||||||
BUILD_FLAGS = '-DUSE_DALLAS_SENSOR'
|
BUILD_FLAGS = '-DUSE_DALLAS_SENSOR'
|
||||||
|
|
||||||
|
|
||||||
|
def to_hass_config(data, config):
|
||||||
|
return sensor.core_to_hass_config(data, config)
|
||||||
|
|
|
@ -46,3 +46,8 @@ def to_code(config):
|
||||||
|
|
||||||
|
|
||||||
BUILD_FLAGS = '-DUSE_DHT_SENSOR'
|
BUILD_FLAGS = '-DUSE_DHT_SENSOR'
|
||||||
|
|
||||||
|
|
||||||
|
def to_hass_config(data, config):
|
||||||
|
return [sensor.core_to_hass_config(data, config[CONF_TEMPERATURE]),
|
||||||
|
sensor.core_to_hass_config(data, config[CONF_HUMIDITY])]
|
||||||
|
|
|
@ -31,3 +31,8 @@ def to_code(config):
|
||||||
|
|
||||||
|
|
||||||
BUILD_FLAGS = '-DUSE_DHT12_SENSOR'
|
BUILD_FLAGS = '-DUSE_DHT12_SENSOR'
|
||||||
|
|
||||||
|
|
||||||
|
def to_hass_config(data, config):
|
||||||
|
return [sensor.core_to_hass_config(data, config[CONF_TEMPERATURE]),
|
||||||
|
sensor.core_to_hass_config(data, config[CONF_HUMIDITY])]
|
||||||
|
|
|
@ -26,3 +26,7 @@ def to_code(config):
|
||||||
|
|
||||||
|
|
||||||
BUILD_FLAGS = '-DUSE_DUTY_CYCLE_SENSOR'
|
BUILD_FLAGS = '-DUSE_DUTY_CYCLE_SENSOR'
|
||||||
|
|
||||||
|
|
||||||
|
def to_hass_config(data, config):
|
||||||
|
return sensor.core_to_hass_config(data, config)
|
||||||
|
|
|
@ -22,3 +22,7 @@ def to_code(config):
|
||||||
|
|
||||||
|
|
||||||
BUILD_FLAGS = '-DUSE_ESP32_HALL_SENSOR'
|
BUILD_FLAGS = '-DUSE_ESP32_HALL_SENSOR'
|
||||||
|
|
||||||
|
|
||||||
|
def to_hass_config(data, config):
|
||||||
|
return sensor.core_to_hass_config(data, config)
|
||||||
|
|
|
@ -32,3 +32,8 @@ def to_code(config):
|
||||||
|
|
||||||
|
|
||||||
BUILD_FLAGS = '-DUSE_HDC1080_SENSOR'
|
BUILD_FLAGS = '-DUSE_HDC1080_SENSOR'
|
||||||
|
|
||||||
|
|
||||||
|
def to_hass_config(data, config):
|
||||||
|
return [sensor.core_to_hass_config(data, config[CONF_TEMPERATURE]),
|
||||||
|
sensor.core_to_hass_config(data, config[CONF_HUMIDITY])]
|
||||||
|
|
|
@ -55,3 +55,11 @@ def to_code(config):
|
||||||
|
|
||||||
|
|
||||||
BUILD_FLAGS = '-DUSE_HLW8012'
|
BUILD_FLAGS = '-DUSE_HLW8012'
|
||||||
|
|
||||||
|
|
||||||
|
def to_hass_config(data, config):
|
||||||
|
ret = []
|
||||||
|
for key in (CONF_VOLTAGE, CONF_CURRENT, CONF_POWER):
|
||||||
|
if key in config:
|
||||||
|
ret.append(sensor.core_to_hass_config(data, config[key]))
|
||||||
|
return ret
|
||||||
|
|
|
@ -71,3 +71,11 @@ def to_code(config):
|
||||||
|
|
||||||
|
|
||||||
BUILD_FLAGS = '-DUSE_HMC5883L'
|
BUILD_FLAGS = '-DUSE_HMC5883L'
|
||||||
|
|
||||||
|
|
||||||
|
def to_hass_config(data, config):
|
||||||
|
ret = []
|
||||||
|
for key in (CONF_FIELD_STRENGTH_X, CONF_FIELD_STRENGTH_Y, CONF_FIELD_STRENGTH_Z, CONF_HEADING):
|
||||||
|
if key in config:
|
||||||
|
ret.append(sensor.core_to_hass_config(data, config[key]))
|
||||||
|
return ret
|
||||||
|
|
|
@ -30,3 +30,8 @@ def to_code(config):
|
||||||
|
|
||||||
|
|
||||||
BUILD_FLAGS = '-DUSE_HTU21D_SENSOR'
|
BUILD_FLAGS = '-DUSE_HTU21D_SENSOR'
|
||||||
|
|
||||||
|
|
||||||
|
def to_hass_config(data, config):
|
||||||
|
return [sensor.core_to_hass_config(data, config[CONF_TEMPERATURE]),
|
||||||
|
sensor.core_to_hass_config(data, config[CONF_HUMIDITY])]
|
||||||
|
|
|
@ -44,3 +44,7 @@ def to_code(config):
|
||||||
|
|
||||||
|
|
||||||
BUILD_FLAGS = '-DUSE_HX711'
|
BUILD_FLAGS = '-DUSE_HX711'
|
||||||
|
|
||||||
|
|
||||||
|
def to_hass_config(data, config):
|
||||||
|
return sensor.core_to_hass_config(data, config)
|
||||||
|
|
|
@ -51,3 +51,11 @@ def to_code(config):
|
||||||
|
|
||||||
|
|
||||||
BUILD_FLAGS = '-DUSE_INA219'
|
BUILD_FLAGS = '-DUSE_INA219'
|
||||||
|
|
||||||
|
|
||||||
|
def to_hass_config(data, config):
|
||||||
|
ret = []
|
||||||
|
for key in (CONF_BUS_VOLTAGE, CONF_SHUNT_VOLTAGE, CONF_CURRENT, CONF_POWER):
|
||||||
|
if key in config:
|
||||||
|
ret.append(sensor.core_to_hass_config(data, config[key]))
|
||||||
|
return ret
|
||||||
|
|
|
@ -62,3 +62,15 @@ def to_code(config):
|
||||||
|
|
||||||
|
|
||||||
BUILD_FLAGS = '-DUSE_INA3221'
|
BUILD_FLAGS = '-DUSE_INA3221'
|
||||||
|
|
||||||
|
|
||||||
|
def to_hass_config(data, config):
|
||||||
|
ret = []
|
||||||
|
for channel in (CONF_CHANNEL_1, CONF_CHANNEL_2, CONF_CHANNEL_3):
|
||||||
|
if channel not in config:
|
||||||
|
continue
|
||||||
|
conf = config[channel]
|
||||||
|
for key in (CONF_BUS_VOLTAGE, CONF_SHUNT_VOLTAGE, CONF_CURRENT, CONF_POWER):
|
||||||
|
if key in conf:
|
||||||
|
ret.append(sensor.core_to_hass_config(data, conf[key]))
|
||||||
|
return ret
|
||||||
|
|
|
@ -32,3 +32,7 @@ def to_code(config):
|
||||||
|
|
||||||
|
|
||||||
BUILD_FLAGS = '-DUSE_MAX6675_SENSOR'
|
BUILD_FLAGS = '-DUSE_MAX6675_SENSOR'
|
||||||
|
|
||||||
|
|
||||||
|
def to_hass_config(data, config):
|
||||||
|
return sensor.core_to_hass_config(data, config)
|
||||||
|
|
|
@ -36,3 +36,11 @@ def to_code(config):
|
||||||
|
|
||||||
|
|
||||||
BUILD_FLAGS = '-DUSE_MHZ19'
|
BUILD_FLAGS = '-DUSE_MHZ19'
|
||||||
|
|
||||||
|
|
||||||
|
def to_hass_config(data, config):
|
||||||
|
ret = []
|
||||||
|
for key in (CONF_CO2, CONF_TEMPERATURE):
|
||||||
|
if key in config:
|
||||||
|
ret.append(sensor.core_to_hass_config(data, config[key]))
|
||||||
|
return ret
|
||||||
|
|
|
@ -69,3 +69,12 @@ def to_code(config):
|
||||||
|
|
||||||
|
|
||||||
BUILD_FLAGS = '-DUSE_MPU6050'
|
BUILD_FLAGS = '-DUSE_MPU6050'
|
||||||
|
|
||||||
|
|
||||||
|
def to_hass_config(data, config):
|
||||||
|
ret = []
|
||||||
|
for key in (CONF_ACCEL_X, CONF_ACCEL_Y, CONF_ACCEL_Z, CONF_GYRO_X, CONF_GYRO_Y, CONF_GYRO_Z,
|
||||||
|
CONF_TEMPERATURE):
|
||||||
|
if key in config:
|
||||||
|
ret.append(sensor.core_to_hass_config(data, config[key]))
|
||||||
|
return ret
|
||||||
|
|
|
@ -27,3 +27,7 @@ def to_code(config):
|
||||||
|
|
||||||
|
|
||||||
BUILD_FLAGS = '-DUSE_MQTT_SUBSCRIBE_SENSOR'
|
BUILD_FLAGS = '-DUSE_MQTT_SUBSCRIBE_SENSOR'
|
||||||
|
|
||||||
|
|
||||||
|
def to_hass_config(data, config):
|
||||||
|
return sensor.core_to_hass_config(data, config)
|
||||||
|
|
|
@ -35,3 +35,8 @@ def to_code(config):
|
||||||
|
|
||||||
|
|
||||||
BUILD_FLAGS = '-DUSE_MS5611'
|
BUILD_FLAGS = '-DUSE_MS5611'
|
||||||
|
|
||||||
|
|
||||||
|
def to_hass_config(data, config):
|
||||||
|
return [sensor.core_to_hass_config(data, config[CONF_TEMPERATURE]),
|
||||||
|
sensor.core_to_hass_config(data, config[CONF_PRESSURE])]
|
||||||
|
|
|
@ -85,3 +85,12 @@ def to_code(config):
|
||||||
|
|
||||||
|
|
||||||
BUILD_FLAGS = '-DUSE_PMSX003'
|
BUILD_FLAGS = '-DUSE_PMSX003'
|
||||||
|
|
||||||
|
|
||||||
|
def to_hass_config(data, config):
|
||||||
|
ret = []
|
||||||
|
for key in (CONF_PM_1_0, CONF_PM_2_5, CONF_PM_10_0, CONF_TEMPERATURE, CONF_HUMIDITY,
|
||||||
|
CONF_FORMALDEHYDE):
|
||||||
|
if key in config:
|
||||||
|
ret.append(sensor.core_to_hass_config(data, config[key]))
|
||||||
|
return ret
|
||||||
|
|
|
@ -65,3 +65,7 @@ def to_code(config):
|
||||||
|
|
||||||
|
|
||||||
BUILD_FLAGS = '-DUSE_PULSE_COUNTER_SENSOR'
|
BUILD_FLAGS = '-DUSE_PULSE_COUNTER_SENSOR'
|
||||||
|
|
||||||
|
|
||||||
|
def to_hass_config(data, config):
|
||||||
|
return sensor.core_to_hass_config(data, config)
|
||||||
|
|
|
@ -49,3 +49,7 @@ def to_code(config):
|
||||||
|
|
||||||
|
|
||||||
BUILD_FLAGS = '-DUSE_ROTARY_ENCODER_SENSOR'
|
BUILD_FLAGS = '-DUSE_ROTARY_ENCODER_SENSOR'
|
||||||
|
|
||||||
|
|
||||||
|
def to_hass_config(data, config):
|
||||||
|
return sensor.core_to_hass_config(data, config)
|
||||||
|
|
|
@ -36,3 +36,8 @@ def to_code(config):
|
||||||
|
|
||||||
|
|
||||||
BUILD_FLAGS = '-DUSE_SHT3XD'
|
BUILD_FLAGS = '-DUSE_SHT3XD'
|
||||||
|
|
||||||
|
|
||||||
|
def to_hass_config(data, config):
|
||||||
|
return [sensor.core_to_hass_config(data, config[CONF_TEMPERATURE]),
|
||||||
|
sensor.core_to_hass_config(data, config[CONF_HUMIDITY])]
|
||||||
|
|
|
@ -78,3 +78,12 @@ def to_code(config):
|
||||||
|
|
||||||
|
|
||||||
BUILD_FLAGS = '-DUSE_TCS34725'
|
BUILD_FLAGS = '-DUSE_TCS34725'
|
||||||
|
|
||||||
|
|
||||||
|
def to_hass_config(data, config):
|
||||||
|
ret = []
|
||||||
|
for key in (CONF_RED_CHANNEL, CONF_GREEN_CHANNEL, CONF_BLUE_CHANNEL, CONF_CLEAR_CHANNEL,
|
||||||
|
CONF_ILLUMINANCE, CONF_COLOR_TEMPERATURE):
|
||||||
|
if key in config:
|
||||||
|
ret.append(sensor.core_to_hass_config(data, config[key]))
|
||||||
|
return ret
|
||||||
|
|
|
@ -27,3 +27,7 @@ def to_code(config):
|
||||||
|
|
||||||
|
|
||||||
BUILD_FLAGS = '-DUSE_TEMPLATE_SENSOR'
|
BUILD_FLAGS = '-DUSE_TEMPLATE_SENSOR'
|
||||||
|
|
||||||
|
|
||||||
|
def to_hass_config(data, config):
|
||||||
|
return sensor.core_to_hass_config(data, config)
|
||||||
|
|
|
@ -55,3 +55,7 @@ def to_code(config):
|
||||||
|
|
||||||
|
|
||||||
BUILD_FLAGS = '-DUSE_TSL2561'
|
BUILD_FLAGS = '-DUSE_TSL2561'
|
||||||
|
|
||||||
|
|
||||||
|
def to_hass_config(data, config):
|
||||||
|
return sensor.core_to_hass_config(data, config)
|
||||||
|
|
|
@ -39,3 +39,7 @@ def to_code(config):
|
||||||
|
|
||||||
|
|
||||||
BUILD_FLAGS = '-DUSE_ULTRASONIC_SENSOR'
|
BUILD_FLAGS = '-DUSE_ULTRASONIC_SENSOR'
|
||||||
|
|
||||||
|
|
||||||
|
def to_hass_config(data, config):
|
||||||
|
return sensor.core_to_hass_config(data, config)
|
||||||
|
|
|
@ -20,3 +20,7 @@ def to_code(config):
|
||||||
|
|
||||||
|
|
||||||
BUILD_FLAGS = '-DUSE_UPTIME_SENSOR'
|
BUILD_FLAGS = '-DUSE_UPTIME_SENSOR'
|
||||||
|
|
||||||
|
|
||||||
|
def to_hass_config(data, config):
|
||||||
|
return sensor.core_to_hass_config(data, config)
|
||||||
|
|
|
@ -20,3 +20,7 @@ def to_code(config):
|
||||||
|
|
||||||
|
|
||||||
BUILD_FLAGS = '-DUSE_WIFI_SIGNAL_SENSOR'
|
BUILD_FLAGS = '-DUSE_WIFI_SIGNAL_SENSOR'
|
||||||
|
|
||||||
|
|
||||||
|
def to_hass_config(data, config):
|
||||||
|
return sensor.core_to_hass_config(data, config)
|
||||||
|
|
|
@ -45,3 +45,12 @@ def to_code(config):
|
||||||
if CONF_BATTERY_LEVEL in config:
|
if CONF_BATTERY_LEVEL in config:
|
||||||
conf = config[CONF_BATTERY_LEVEL]
|
conf = config[CONF_BATTERY_LEVEL]
|
||||||
sensor.register_sensor(dev.Pmake_battery_level_sensor(conf[CONF_NAME]), conf)
|
sensor.register_sensor(dev.Pmake_battery_level_sensor(conf[CONF_NAME]), conf)
|
||||||
|
|
||||||
|
|
||||||
|
def to_hass_config(data, config):
|
||||||
|
ret = []
|
||||||
|
for key in (CONF_TEMPERATURE, CONF_MOISTURE, CONF_ILLUMINANCE, CONF_CONDUCTIVITY,
|
||||||
|
CONF_BATTERY_LEVEL):
|
||||||
|
if key in config:
|
||||||
|
ret.append(sensor.core_to_hass_config(data, config[key]))
|
||||||
|
return ret
|
||||||
|
|
|
@ -37,3 +37,11 @@ def to_code(config):
|
||||||
if CONF_BATTERY_LEVEL in config:
|
if CONF_BATTERY_LEVEL in config:
|
||||||
conf = config[CONF_BATTERY_LEVEL]
|
conf = config[CONF_BATTERY_LEVEL]
|
||||||
sensor.register_sensor(dev.Pmake_battery_level_sensor(conf[CONF_NAME]), conf)
|
sensor.register_sensor(dev.Pmake_battery_level_sensor(conf[CONF_NAME]), conf)
|
||||||
|
|
||||||
|
|
||||||
|
def to_hass_config(data, config):
|
||||||
|
ret = []
|
||||||
|
for key in (CONF_TEMPERATURE, CONF_HUMIDITY, CONF_BATTERY_LEVEL):
|
||||||
|
if key in config:
|
||||||
|
ret.append(sensor.core_to_hass_config(data, config[key]))
|
||||||
|
return ret
|
||||||
|
|
|
@ -1,8 +1,10 @@
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
from esphomeyaml.automation import maybe_simple_id, ACTION_REGISTRY
|
from esphomeyaml.automation import maybe_simple_id, ACTION_REGISTRY
|
||||||
|
from esphomeyaml.components import mqtt
|
||||||
import esphomeyaml.config_validation as cv
|
import esphomeyaml.config_validation as cv
|
||||||
from esphomeyaml.const import CONF_ICON, CONF_ID, CONF_INVERTED, CONF_MQTT_ID, CONF_INTERNAL
|
from esphomeyaml.const import CONF_ICON, CONF_ID, CONF_INVERTED, CONF_MQTT_ID, CONF_INTERNAL, \
|
||||||
|
CONF_OPTIMISTIC
|
||||||
from esphomeyaml.helpers import App, Pvariable, add, esphomelib_ns, setup_mqtt_component, \
|
from esphomeyaml.helpers import App, Pvariable, add, esphomelib_ns, setup_mqtt_component, \
|
||||||
TemplateArguments, get_variable
|
TemplateArguments, get_variable
|
||||||
|
|
||||||
|
@ -100,3 +102,14 @@ def switch_turn_on_to_code(config, action_id, arg_type):
|
||||||
rhs = var.make_turn_on_action(template_arg)
|
rhs = var.make_turn_on_action(template_arg)
|
||||||
type = TurnOnAction.template(arg_type)
|
type = TurnOnAction.template(arg_type)
|
||||||
yield Pvariable(action_id, rhs, type=type)
|
yield Pvariable(action_id, rhs, type=type)
|
||||||
|
|
||||||
|
|
||||||
|
def core_to_hass_config(data, config):
|
||||||
|
ret = mqtt.build_hass_config(data, 'switch', config, include_state=True, include_command=True)
|
||||||
|
if ret is None:
|
||||||
|
return None
|
||||||
|
if CONF_ICON in config:
|
||||||
|
ret['icon'] = config[CONF_ICON]
|
||||||
|
if CONF_OPTIMISTIC in config:
|
||||||
|
ret['optimistic'] = config[CONF_OPTIMISTIC]
|
||||||
|
return ret
|
||||||
|
|
|
@ -29,3 +29,7 @@ def to_code(config):
|
||||||
|
|
||||||
|
|
||||||
BUILD_FLAGS = '-DUSE_GPIO_SWITCH'
|
BUILD_FLAGS = '-DUSE_GPIO_SWITCH'
|
||||||
|
|
||||||
|
|
||||||
|
def to_hass_config(data, config):
|
||||||
|
return switch.core_to_hass_config(data, config)
|
||||||
|
|
|
@ -23,3 +23,7 @@ def to_code(config):
|
||||||
|
|
||||||
|
|
||||||
BUILD_FLAGS = '-DUSE_OUTPUT_SWITCH'
|
BUILD_FLAGS = '-DUSE_OUTPUT_SWITCH'
|
||||||
|
|
||||||
|
|
||||||
|
def to_hass_config(data, config):
|
||||||
|
return switch.core_to_hass_config(data, config)
|
||||||
|
|
|
@ -146,3 +146,7 @@ def to_code(config):
|
||||||
|
|
||||||
|
|
||||||
BUILD_FLAGS = '-DUSE_REMOTE_TRANSMITTER'
|
BUILD_FLAGS = '-DUSE_REMOTE_TRANSMITTER'
|
||||||
|
|
||||||
|
|
||||||
|
def to_hass_config(data, config):
|
||||||
|
return switch.core_to_hass_config(data, config)
|
||||||
|
|
|
@ -20,3 +20,7 @@ def to_code(config):
|
||||||
|
|
||||||
|
|
||||||
BUILD_FLAGS = '-DUSE_RESTART_SWITCH'
|
BUILD_FLAGS = '-DUSE_RESTART_SWITCH'
|
||||||
|
|
||||||
|
|
||||||
|
def to_hass_config(data, config):
|
||||||
|
return switch.core_to_hass_config(data, config)
|
||||||
|
|
|
@ -20,3 +20,7 @@ def to_code(config):
|
||||||
|
|
||||||
|
|
||||||
BUILD_FLAGS = '-DUSE_SHUTDOWN_SWITCH'
|
BUILD_FLAGS = '-DUSE_SHUTDOWN_SWITCH'
|
||||||
|
|
||||||
|
|
||||||
|
def to_hass_config(data, config):
|
||||||
|
return switch.core_to_hass_config(data, config)
|
||||||
|
|
|
@ -42,3 +42,7 @@ def to_code(config):
|
||||||
|
|
||||||
|
|
||||||
BUILD_FLAGS = '-DUSE_TEMPLATE_SWITCH'
|
BUILD_FLAGS = '-DUSE_TEMPLATE_SWITCH'
|
||||||
|
|
||||||
|
|
||||||
|
def to_hass_config(data, config):
|
||||||
|
return switch.core_to_hass_config(data, config)
|
||||||
|
|
|
@ -43,3 +43,7 @@ def to_code(config):
|
||||||
|
|
||||||
|
|
||||||
BUILD_FLAGS = '-DUSE_UART_SWITCH'
|
BUILD_FLAGS = '-DUSE_UART_SWITCH'
|
||||||
|
|
||||||
|
|
||||||
|
def to_hass_config(data, config):
|
||||||
|
return switch.core_to_hass_config(data, config)
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
from esphomeyaml import automation
|
from esphomeyaml import automation
|
||||||
|
from esphomeyaml.components import mqtt
|
||||||
import esphomeyaml.config_validation as cv
|
import esphomeyaml.config_validation as cv
|
||||||
from esphomeyaml.const import CONF_ICON, CONF_ID, CONF_INTERNAL, CONF_MQTT_ID, CONF_ON_VALUE, \
|
from esphomeyaml.const import CONF_ICON, CONF_ID, CONF_INTERNAL, CONF_MQTT_ID, CONF_ON_VALUE, \
|
||||||
CONF_TRIGGER_ID
|
CONF_TRIGGER_ID
|
||||||
|
@ -58,3 +59,12 @@ def register_text_sensor(var, config):
|
||||||
|
|
||||||
|
|
||||||
BUILD_FLAGS = '-DUSE_TEXT_SENSOR'
|
BUILD_FLAGS = '-DUSE_TEXT_SENSOR'
|
||||||
|
|
||||||
|
|
||||||
|
def core_to_hass_config(data, config):
|
||||||
|
ret = mqtt.build_hass_config(data, 'sensor', config, include_state=True, include_command=False)
|
||||||
|
if ret is None:
|
||||||
|
return None
|
||||||
|
if CONF_ICON in config:
|
||||||
|
ret['icon'] = config[CONF_ICON]
|
||||||
|
return ret
|
||||||
|
|
|
@ -25,3 +25,7 @@ def to_code(config):
|
||||||
|
|
||||||
|
|
||||||
BUILD_FLAGS = '-DUSE_MQTT_SUBSCRIBE_TEXT_SENSOR'
|
BUILD_FLAGS = '-DUSE_MQTT_SUBSCRIBE_TEXT_SENSOR'
|
||||||
|
|
||||||
|
|
||||||
|
def to_hass_config(data, config):
|
||||||
|
return text_sensor.core_to_hass_config(data, config)
|
||||||
|
|
|
@ -28,3 +28,7 @@ def to_code(config):
|
||||||
|
|
||||||
|
|
||||||
BUILD_FLAGS = '-DUSE_TEMPLATE_TEXT_SENSOR'
|
BUILD_FLAGS = '-DUSE_TEMPLATE_TEXT_SENSOR'
|
||||||
|
|
||||||
|
|
||||||
|
def to_hass_config(data, config):
|
||||||
|
return text_sensor.core_to_hass_config(data, config)
|
||||||
|
|
|
@ -17,3 +17,7 @@ def to_code(config):
|
||||||
|
|
||||||
|
|
||||||
BUILD_FLAGS = '-DUSE_VERSION_TEXT_SENSOR'
|
BUILD_FLAGS = '-DUSE_VERSION_TEXT_SENSOR'
|
||||||
|
|
||||||
|
|
||||||
|
def to_hass_config(data, config):
|
||||||
|
return text_sensor.core_to_hass_config(data, config)
|
||||||
|
|
|
@ -13,7 +13,8 @@ from esphomeyaml.const import CONF_ESPHOMEYAML, CONF_BUILD_PATH
|
||||||
from esphomeyaml.core import ESPHomeYAMLError
|
from esphomeyaml.core import ESPHomeYAMLError
|
||||||
from esphomeyaml import const, core, __main__
|
from esphomeyaml import const, core, __main__
|
||||||
from esphomeyaml.__main__ import get_serial_ports
|
from esphomeyaml.__main__ import get_serial_ports
|
||||||
from esphomeyaml.helpers import quote, relative_path
|
from esphomeyaml.helpers import relative_path
|
||||||
|
from esphomeyaml.util import shlex_quote
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import tornado
|
import tornado
|
||||||
|
@ -51,7 +52,7 @@ class EsphomeyamlCommandWebSocket(tornado.websocket.WebSocketHandler):
|
||||||
if self.proc is not None:
|
if self.proc is not None:
|
||||||
return
|
return
|
||||||
command = self.build_command(message)
|
command = self.build_command(message)
|
||||||
_LOGGER.debug(u"WebSocket opened for command %s", [quote(x) for x in command])
|
_LOGGER.debug(u"WebSocket opened for command %s", [shlex_quote(x) for x in command])
|
||||||
self.proc = tornado.process.Subprocess(command,
|
self.proc = tornado.process.Subprocess(command,
|
||||||
stdout=tornado.process.Subprocess.STREAM,
|
stdout=tornado.process.Subprocess.STREAM,
|
||||||
stderr=subprocess.STDOUT)
|
stderr=subprocess.STDOUT)
|
||||||
|
@ -131,6 +132,13 @@ class EsphomeyamlCleanHandler(EsphomeyamlCommandWebSocket):
|
||||||
return ["esphomeyaml", config_file, "clean"]
|
return ["esphomeyaml", config_file, "clean"]
|
||||||
|
|
||||||
|
|
||||||
|
class EsphomeyamlHassConfigHandler(EsphomeyamlCommandWebSocket):
|
||||||
|
def build_command(self, message):
|
||||||
|
js = json.loads(message)
|
||||||
|
config_file = os.path.join(CONFIG_DIR, js['configuration'])
|
||||||
|
return ["esphomeyaml", config_file, "hass-config"]
|
||||||
|
|
||||||
|
|
||||||
class SerialPortRequestHandler(BaseHandler):
|
class SerialPortRequestHandler(BaseHandler):
|
||||||
def get(self):
|
def get(self):
|
||||||
if not self.is_authenticated():
|
if not self.is_authenticated():
|
||||||
|
@ -229,6 +237,7 @@ def make_app(debug=False):
|
||||||
(r"/validate", EsphomeyamlValidateHandler),
|
(r"/validate", EsphomeyamlValidateHandler),
|
||||||
(r"/clean-mqtt", EsphomeyamlCleanMqttHandler),
|
(r"/clean-mqtt", EsphomeyamlCleanMqttHandler),
|
||||||
(r"/clean", EsphomeyamlCleanHandler),
|
(r"/clean", EsphomeyamlCleanHandler),
|
||||||
|
(r"/hass-config", EsphomeyamlHassConfigHandler),
|
||||||
(r"/download.bin", DownloadBinaryRequestHandler),
|
(r"/download.bin", DownloadBinaryRequestHandler),
|
||||||
(r"/serial-ports", SerialPortRequestHandler),
|
(r"/serial-ports", SerialPortRequestHandler),
|
||||||
(r"/wizard.html", WizardRequestHandler),
|
(r"/wizard.html", WizardRequestHandler),
|
||||||
|
|
|
@ -215,6 +215,7 @@
|
||||||
<ul id="dropdown-{{ i }}" class="dropdown-content">
|
<ul id="dropdown-{{ i }}" class="dropdown-content">
|
||||||
<li><a href="#" class="action-clean-mqtt" data-node="{{ file }}">Clean MQTT</a></li>
|
<li><a href="#" class="action-clean-mqtt" data-node="{{ file }}">Clean MQTT</a></li>
|
||||||
<li><a href="#" class="action-clean" data-node="{{ file }}">Clean Build</a></li>
|
<li><a href="#" class="action-clean" data-node="{{ file }}">Clean Build</a></li>
|
||||||
|
<li><a href="#" class="action-hass-config" data-node="{{ file }}">Home Assistant Configuration</a></li>
|
||||||
</ul>
|
</ul>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
@ -491,6 +492,18 @@
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
<div id="modal-hass-config" class="modal modal-fixed-footer">
|
||||||
|
<div class="modal-content">
|
||||||
|
<h4>Generate Home Assistant Configuration <code class="inlinecode filename"></code></h4>
|
||||||
|
<div class="log-container">
|
||||||
|
<pre class="log"></pre>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="modal-footer">
|
||||||
|
<a class="modal-close waves-effect waves-green btn-flat stop-logs">Stop</a>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
<a class="btn-floating btn-large ribbon-fab waves-effect waves-light pink accent-2" id="setup-wizard-start">
|
<a class="btn-floating btn-large ribbon-fab waves-effect waves-light pink accent-2" id="setup-wizard-start">
|
||||||
<i class="material-icons">add</i>
|
<i class="material-icons">add</i>
|
||||||
</a>
|
</a>
|
||||||
|
@ -904,6 +917,54 @@
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
const hassConfigModalElem = document.getElementById("modal-hass-config");
|
||||||
|
|
||||||
|
document.querySelectorAll(".action-hass-config").forEach((btn) => {
|
||||||
|
btn.addEventListener('click', (e) => {
|
||||||
|
configuration = e.target.getAttribute('data-node');
|
||||||
|
const modalInstance = M.Modal.getInstance(hassConfigModalElem);
|
||||||
|
const log = hassConfigModalElem.querySelector(".log");
|
||||||
|
log.innerHTML = "";
|
||||||
|
const stopLogsButton = hassConfigModalElem.querySelector(".stop-logs");
|
||||||
|
let stopped = false;
|
||||||
|
stopLogsButton.innerHTML = "Stop";
|
||||||
|
modalInstance.open();
|
||||||
|
|
||||||
|
const filenameField = hassConfigModalElem.querySelector('.filename');
|
||||||
|
filenameField.innerHTML = configuration;
|
||||||
|
|
||||||
|
const logSocket = new WebSocket(wsUrl + "/hass-config");
|
||||||
|
logSocket.addEventListener('message', (event) => {
|
||||||
|
const data = JSON.parse(event.data);
|
||||||
|
if (data.event === "line") {
|
||||||
|
const msg = data.data;
|
||||||
|
log.innerHTML += colorReplace(msg);
|
||||||
|
} else if (data.event === "exit") {
|
||||||
|
if (data.code === 0) {
|
||||||
|
M.toast({html: "Program exited successfully."});
|
||||||
|
downloadButton.classList.remove('disabled');
|
||||||
|
} else {
|
||||||
|
M.toast({html: `Program failed with code ${data.code}`});
|
||||||
|
}
|
||||||
|
stopLogsButton.innerHTML = "Close";
|
||||||
|
stopped = true;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
logSocket.addEventListener('open', () => {
|
||||||
|
const msg = JSON.stringify({configuration: configuration});
|
||||||
|
logSocket.send(msg);
|
||||||
|
});
|
||||||
|
logSocket.addEventListener('close', () => {
|
||||||
|
if (!stopped) {
|
||||||
|
M.toast({html: 'Terminated process.'});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
modalInstance.options.onCloseStart = () => {
|
||||||
|
logSocket.close();
|
||||||
|
};
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
const modalSetupElem = document.getElementById("modal-wizard");
|
const modalSetupElem = document.getElementById("modal-wizard");
|
||||||
const setupWizardStart = document.getElementById('setup-wizard-start');
|
const setupWizardStart = document.getElementById('setup-wizard-start');
|
||||||
const startWizard = () => {
|
const startWizard = () => {
|
||||||
|
|
|
@ -27,17 +27,24 @@ OTA_VERSION_1_0 = 1
|
||||||
MAGIC_BYTES = [0x6C, 0x26, 0xF7, 0x5C, 0x45]
|
MAGIC_BYTES = [0x6C, 0x26, 0xF7, 0x5C, 0x45]
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
LAST_PROGRESS = -1
|
||||||
|
|
||||||
|
|
||||||
def update_progress(progress):
|
def update_progress(progress):
|
||||||
|
global LAST_PROGRESS
|
||||||
|
|
||||||
bar_length = 60
|
bar_length = 60
|
||||||
status = ""
|
status = ""
|
||||||
if progress >= 1:
|
if progress >= 1:
|
||||||
progress = 1
|
progress = 1
|
||||||
status = "Done...\r\n"
|
status = "Done...\r\n"
|
||||||
|
new_progress = int(progress * 100)
|
||||||
|
if new_progress == LAST_PROGRESS:
|
||||||
|
return
|
||||||
|
LAST_PROGRESS = new_progress
|
||||||
block = int(round(bar_length * progress))
|
block = int(round(bar_length * progress))
|
||||||
text = "\rUploading: [{0}] {1}% {2}".format("=" * block + " " * (bar_length - block),
|
text = "\rUploading: [{0}] {1}% {2}".format("=" * block + " " * (bar_length - block),
|
||||||
int(progress * 100), status)
|
new_progress, status)
|
||||||
sys.stderr.write(text)
|
sys.stderr.write(text)
|
||||||
sys.stderr.flush()
|
sys.stderr.flush()
|
||||||
|
|
||||||
|
|
|
@ -1,10 +1,9 @@
|
||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
|
|
||||||
|
from collections import OrderedDict, deque
|
||||||
import inspect
|
import inspect
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import re
|
|
||||||
from collections import OrderedDict, deque
|
|
||||||
|
|
||||||
from esphomeyaml import core
|
from esphomeyaml import core
|
||||||
from esphomeyaml.const import CONF_AVAILABILITY, CONF_COMMAND_TOPIC, CONF_DISCOVERY, \
|
from esphomeyaml.const import CONF_AVAILABILITY, CONF_COMMAND_TOPIC, CONF_DISCOVERY, \
|
||||||
|
@ -648,22 +647,6 @@ def setup_mqtt_component(obj, config):
|
||||||
availability[CONF_PAYLOAD_NOT_AVAILABLE]))
|
availability[CONF_PAYLOAD_NOT_AVAILABLE]))
|
||||||
|
|
||||||
|
|
||||||
# shlex's quote for Python 2.7
|
|
||||||
_find_unsafe = re.compile(r'[^\w@%+=:,./-]').search
|
|
||||||
|
|
||||||
|
|
||||||
def quote(s):
|
|
||||||
"""Return a shell-escaped version of the string *s*."""
|
|
||||||
if not s:
|
|
||||||
return u"''"
|
|
||||||
if _find_unsafe(s) is None:
|
|
||||||
return s
|
|
||||||
|
|
||||||
# use single quotes, and put single quotes into double quotes
|
|
||||||
# the string $'b is then quoted as '$'"'"'b'
|
|
||||||
return u"'" + s.replace(u"'", u"'\"'\"'") + u"'"
|
|
||||||
|
|
||||||
|
|
||||||
def color(the_color, message='', reset=None):
|
def color(the_color, message='', reset=None):
|
||||||
"""Color helper."""
|
"""Color helper."""
|
||||||
from colorlog.escape_codes import escape_codes, parse_colors
|
from colorlog.escape_codes import escape_codes, parse_colors
|
||||||
|
|
211
esphomeyaml/platformio_api.py
Normal file
211
esphomeyaml/platformio_api.py
Normal file
|
@ -0,0 +1,211 @@
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
import re
|
||||||
|
import subprocess
|
||||||
|
|
||||||
|
from esphomeyaml.const import CONF_BUILD_PATH, CONF_ESPHOMEYAML
|
||||||
|
from esphomeyaml.helpers import relative_path
|
||||||
|
from esphomeyaml.util import run_external_command
|
||||||
|
|
||||||
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def run_platformio_cli(*args, **kwargs):
|
||||||
|
import platformio.__main__
|
||||||
|
|
||||||
|
cmd = ['platformio'] + list(args)
|
||||||
|
return run_external_command(platformio.__main__.main,
|
||||||
|
*cmd, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
def run_platformio_cli_run(config, verbose, *args, **kwargs):
|
||||||
|
build_path = relative_path(config[CONF_ESPHOMEYAML][CONF_BUILD_PATH])
|
||||||
|
command = ['run', '-d', build_path]
|
||||||
|
if verbose:
|
||||||
|
command += ['-v']
|
||||||
|
command += list(args)
|
||||||
|
return run_platformio_cli(*command, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
def run_compile(config, verbose):
|
||||||
|
return run_platformio_cli_run(config, verbose)
|
||||||
|
|
||||||
|
|
||||||
|
def run_upload(config, verbose, port):
|
||||||
|
return run_platformio_cli_run(config, verbose, '-t', 'upload', '--upload-port', port)
|
||||||
|
|
||||||
|
|
||||||
|
def run_idedata(config):
|
||||||
|
args = ['-t', 'idedata']
|
||||||
|
stdout = run_platformio_cli_run(config, False, *args, capture_stdout=True)
|
||||||
|
match = re.search(r'{.*}', stdout)
|
||||||
|
if match is None:
|
||||||
|
return IDEData(None)
|
||||||
|
try:
|
||||||
|
return IDEData(json.loads(match.group()))
|
||||||
|
except ValueError:
|
||||||
|
return IDEData(None)
|
||||||
|
|
||||||
|
|
||||||
|
IDE_DATA = None
|
||||||
|
|
||||||
|
|
||||||
|
def get_idedata(config):
|
||||||
|
global IDE_DATA
|
||||||
|
|
||||||
|
if IDE_DATA is None:
|
||||||
|
_LOGGER.info("Need to fetch platformio IDE-data, please stand by")
|
||||||
|
IDE_DATA = run_idedata(config)
|
||||||
|
return IDE_DATA
|
||||||
|
|
||||||
|
|
||||||
|
# ESP logs stack trace decoder, based on https://github.com/me-no-dev/EspExceptionDecoder
|
||||||
|
ESP8266_EXCEPTION_CODES = {
|
||||||
|
0: "Illegal instruction",
|
||||||
|
1: "SYSCALL instruction",
|
||||||
|
2: "InstructionFetchError: Processor internal physical address or data error during "
|
||||||
|
"instruction fetch",
|
||||||
|
3: "LoadStoreError: Processor internal physical address or data error during load or store",
|
||||||
|
4: "Level1Interrupt: Level-1 interrupt as indicated by set level-1 bits in the INTERRUPT "
|
||||||
|
"register",
|
||||||
|
5: "Alloca: MOVSP instruction, if caller's registers are not in the register file",
|
||||||
|
6: "IntegerDivideByZero: QUOS, QUOU, REMS, or REMU divisor operand is zero",
|
||||||
|
7: "reserved",
|
||||||
|
8: "Privileged: Attempt to execute a privileged operation when CRING ? 0",
|
||||||
|
9: "LoadStoreAlignmentCause: Load or store to an unaligned address",
|
||||||
|
10: "reserved",
|
||||||
|
11: "reserved",
|
||||||
|
12: "InstrPIFDataError: PIF data error during instruction fetch",
|
||||||
|
13: "LoadStorePIFDataError: Synchronous PIF data error during LoadStore access",
|
||||||
|
14: "InstrPIFAddrError: PIF address error during instruction fetch",
|
||||||
|
15: "LoadStorePIFAddrError: Synchronous PIF address error during LoadStore access",
|
||||||
|
16: "InstTLBMiss: Error during Instruction TLB refill",
|
||||||
|
17: "InstTLBMultiHit: Multiple instruction TLB entries matched",
|
||||||
|
18: "InstFetchPrivilege: An instruction fetch referenced a virtual address at a ring level "
|
||||||
|
"less than CRING",
|
||||||
|
19: "reserved",
|
||||||
|
20: "InstFetchProhibited: An instruction fetch referenced a page mapped with an attribute "
|
||||||
|
"that does not permit instruction fetch",
|
||||||
|
21: "reserved",
|
||||||
|
22: "reserved",
|
||||||
|
23: "reserved",
|
||||||
|
24: "LoadStoreTLBMiss: Error during TLB refill for a load or store",
|
||||||
|
25: "LoadStoreTLBMultiHit: Multiple TLB entries matched for a load or store",
|
||||||
|
26: "LoadStorePrivilege: A load or store referenced a virtual address at a ring level less "
|
||||||
|
"than ",
|
||||||
|
27: "reserved",
|
||||||
|
28: "LoadProhibited: A load referenced a page mapped with an attribute that does not permit "
|
||||||
|
"loads",
|
||||||
|
29: "StoreProhibited: A store referenced a page mapped with an attribute that does not permit "
|
||||||
|
"stores",
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def _decode_pc(config, addr):
|
||||||
|
idedata = get_idedata(config)
|
||||||
|
if not idedata.addr2line_path or not idedata.firmware_elf_path:
|
||||||
|
return
|
||||||
|
command = [idedata.addr2line_path, '-pfiaC', '-e', idedata.firmware_elf_path, addr]
|
||||||
|
try:
|
||||||
|
translation = subprocess.check_output(command).strip()
|
||||||
|
except Exception: # pylint: disable=broad-except
|
||||||
|
return
|
||||||
|
|
||||||
|
if "?? ??:0" in translation:
|
||||||
|
# Nothing useful
|
||||||
|
return
|
||||||
|
translation = translation.replace(' at ??:?', '').replace(':?', '')
|
||||||
|
_LOGGER.warning("Decoded %s", translation)
|
||||||
|
|
||||||
|
|
||||||
|
def _parse_register(config, regex, line):
|
||||||
|
match = regex.match(line)
|
||||||
|
if match is not None:
|
||||||
|
_decode_pc(config, match.group(1))
|
||||||
|
|
||||||
|
|
||||||
|
STACKTRACE_ESP8266_EXCEPTION_TYPE_RE = re.compile(r'Exception \(([0-9]*)\):')
|
||||||
|
STACKTRACE_ESP8266_PC_RE = re.compile(r'epc1=0x(4[0-9a-fA-F]{7})')
|
||||||
|
STACKTRACE_ESP8266_EXCVADDR_RE = re.compile(r'excvaddr=0x(4[0-9a-fA-F]{7})')
|
||||||
|
STACKTRACE_ESP32_PC_RE = re.compile(r'PC\s*:\s*(?:0x)?(4[0-9a-fA-F]{7})')
|
||||||
|
STACKTRACE_ESP32_EXCVADDR_RE = re.compile(r'EXCVADDR\s*:\s*(?:0x)?(4[0-9a-fA-F]{7})')
|
||||||
|
STACKTRACE_BAD_ALLOC_RE = re.compile(r'^last failed alloc call: (4[0-9a-fA-F]{7})\((\d+)\)$')
|
||||||
|
STACKTRACE_ESP32_BACKTRACE_RE = re.compile(r'Backtrace:(?:\s+0x4[0-9a-fA-F]{7}:0x3[0-9a-fA-F]{7})+')
|
||||||
|
STACKTRACE_ESP32_BACKTRACE_PC_RE = re.compile(r'4[0-9a-f]{7}')
|
||||||
|
STACKTRACE_ESP8266_BACKTRACE_PC_RE = re.compile(r'4[0-9a-f]{7}')
|
||||||
|
|
||||||
|
|
||||||
|
def process_stacktrace(config, line, backtrace_state):
|
||||||
|
line = line.strip()
|
||||||
|
# ESP8266 Exception type
|
||||||
|
match = re.match(STACKTRACE_ESP8266_EXCEPTION_TYPE_RE, line)
|
||||||
|
if match is not None:
|
||||||
|
code = match.group(1)
|
||||||
|
_LOGGER.warning("Exception type: %s", ESP8266_EXCEPTION_CODES.get(code, 'unknown'))
|
||||||
|
|
||||||
|
# ESP8266 PC/EXCVADDR
|
||||||
|
_parse_register(config, STACKTRACE_ESP8266_PC_RE, line)
|
||||||
|
_parse_register(config, STACKTRACE_ESP8266_EXCVADDR_RE, line)
|
||||||
|
# ESP32 PC/EXCVADDR
|
||||||
|
_parse_register(config, STACKTRACE_ESP32_PC_RE, line)
|
||||||
|
_parse_register(config, STACKTRACE_ESP32_EXCVADDR_RE, line)
|
||||||
|
|
||||||
|
# bad alloc
|
||||||
|
match = re.match(STACKTRACE_BAD_ALLOC_RE, line)
|
||||||
|
if match is not None:
|
||||||
|
_LOGGER.warning("Memory allocation of %s bytes failed at %s",
|
||||||
|
match.group(2), match.group(1))
|
||||||
|
_decode_pc(config, match.group(1))
|
||||||
|
|
||||||
|
# ESP32 single-line backtrace
|
||||||
|
match = re.match(STACKTRACE_ESP32_BACKTRACE_RE, line)
|
||||||
|
if match is not None:
|
||||||
|
_LOGGER.warning("Found stack trace! Trying to decode it")
|
||||||
|
for addr in re.finditer(STACKTRACE_ESP32_BACKTRACE_PC_RE, line):
|
||||||
|
_decode_pc(config, addr.group())
|
||||||
|
|
||||||
|
# ESP8266 multi-line backtrace
|
||||||
|
if '>>>stack>>>' in line:
|
||||||
|
# Start of backtrace
|
||||||
|
backtrace_state = True
|
||||||
|
_LOGGER.warning("Found stack trace! Trying to decode it")
|
||||||
|
elif '<<<stack<<<' in line:
|
||||||
|
# End of backtrace
|
||||||
|
backtrace_state = False
|
||||||
|
|
||||||
|
if backtrace_state:
|
||||||
|
for addr in re.finditer(STACKTRACE_ESP8266_BACKTRACE_PC_RE, line):
|
||||||
|
_decode_pc(config, addr.group())
|
||||||
|
|
||||||
|
return backtrace_state
|
||||||
|
|
||||||
|
|
||||||
|
class IDEData(object):
|
||||||
|
def __init__(self, raw):
|
||||||
|
if not isinstance(raw, dict):
|
||||||
|
self.raw = {}
|
||||||
|
else:
|
||||||
|
self.raw = raw
|
||||||
|
|
||||||
|
@property
|
||||||
|
def firmware_elf_path(self):
|
||||||
|
return self.raw.get("prog_path")
|
||||||
|
|
||||||
|
@property
|
||||||
|
def flash_extra_images(self):
|
||||||
|
return [
|
||||||
|
(x['path'], x['offset']) for x in self.raw.get("flash_extra_images", [])
|
||||||
|
]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def cc_path(self):
|
||||||
|
# For example /Users/<USER>/.platformio/packages/toolchain-xtensa32/bin/xtensa-esp32-elf-gcc
|
||||||
|
return self.raw.get("cc_path")
|
||||||
|
|
||||||
|
@property
|
||||||
|
def addr2line_path(self):
|
||||||
|
cc_path = self.cc_path
|
||||||
|
if cc_path is None:
|
||||||
|
return None
|
||||||
|
# replace gcc at end with addr2line
|
||||||
|
return cc_path[:-3] + 'addr2line'
|
|
@ -1,5 +1,12 @@
|
||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
|
|
||||||
|
import io
|
||||||
|
import logging
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
|
||||||
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class Registry(dict):
|
class Registry(dict):
|
||||||
def register(self, name):
|
def register(self, name):
|
||||||
|
@ -30,3 +37,47 @@ def safe_print(message=""):
|
||||||
print(message.encode('ascii', 'backslashreplace'))
|
print(message.encode('ascii', 'backslashreplace'))
|
||||||
except UnicodeEncodeError:
|
except UnicodeEncodeError:
|
||||||
print("Cannot print line because of invalid locale!")
|
print("Cannot print line because of invalid locale!")
|
||||||
|
|
||||||
|
|
||||||
|
def shlex_quote(s):
|
||||||
|
if not s:
|
||||||
|
return u"''"
|
||||||
|
if re.search(r'[^\w@%+=:,./-]', s) is None:
|
||||||
|
return s
|
||||||
|
|
||||||
|
return u"'" + s.replace(u"'", u"'\"'\"'") + u"'"
|
||||||
|
|
||||||
|
|
||||||
|
def run_external_command(func, *cmd, **kwargs):
|
||||||
|
def mock_exit(return_code):
|
||||||
|
raise SystemExit(return_code)
|
||||||
|
|
||||||
|
orig_argv = sys.argv
|
||||||
|
orig_exit = sys.exit # mock sys.exit
|
||||||
|
full_cmd = u' '.join(shlex_quote(x) for x in cmd)
|
||||||
|
_LOGGER.info(u"Running: %s", full_cmd)
|
||||||
|
|
||||||
|
capture_stdout = kwargs.get('capture_stdout', False)
|
||||||
|
if capture_stdout:
|
||||||
|
sys.stdout = io.BytesIO()
|
||||||
|
|
||||||
|
try:
|
||||||
|
sys.argv = list(cmd)
|
||||||
|
sys.exit = mock_exit
|
||||||
|
return func() or 0
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
return 1
|
||||||
|
except SystemExit as err:
|
||||||
|
return err.args[0]
|
||||||
|
except Exception as err: # pylint: disable=broad-except
|
||||||
|
_LOGGER.error(u"Running command failed: %s", err)
|
||||||
|
_LOGGER.error(u"Please try running %s locally.", full_cmd)
|
||||||
|
finally:
|
||||||
|
sys.argv = orig_argv
|
||||||
|
sys.exit = orig_exit
|
||||||
|
|
||||||
|
if capture_stdout:
|
||||||
|
# pylint: disable=lost-exception
|
||||||
|
stdout = sys.stdout.getvalue()
|
||||||
|
sys.stdout = sys.__stdout__
|
||||||
|
return stdout
|
||||||
|
|
Loading…
Reference in a new issue