2019-04-17 12:06:00 +02:00
|
|
|
import codecs
|
2022-02-07 21:26:16 +01:00
|
|
|
from contextlib import suppress
|
2018-04-07 01:23:03 +02:00
|
|
|
import logging
|
2018-09-23 18:58:41 +02:00
|
|
|
import os
|
2020-07-25 15:57:18 +02:00
|
|
|
from pathlib import Path
|
2024-07-29 04:07:44 +02:00
|
|
|
import platform
|
|
|
|
import re
|
2020-07-25 15:57:18 +02:00
|
|
|
import tempfile
|
2024-07-29 04:07:44 +02:00
|
|
|
from typing import Union
|
2022-10-11 01:01:31 +02:00
|
|
|
from urllib.parse import urlparse
|
2018-04-07 01:23:03 +02:00
|
|
|
|
|
|
|
_LOGGER = logging.getLogger(__name__)
|
|
|
|
|
2024-01-19 02:10:23 +01:00
|
|
|
IS_MACOS = platform.system() == "Darwin"
|
|
|
|
IS_WINDOWS = platform.system() == "Windows"
|
|
|
|
IS_LINUX = platform.system() == "Linux"
|
|
|
|
|
2018-04-07 01:23:03 +02:00
|
|
|
|
|
|
|
def ensure_unique_string(preferred_string, current_strings):
|
|
|
|
test_string = preferred_string
|
|
|
|
current_strings_set = set(current_strings)
|
|
|
|
|
|
|
|
tries = 1
|
|
|
|
|
|
|
|
while test_string in current_strings_set:
|
|
|
|
tries += 1
|
2019-12-07 18:28:55 +01:00
|
|
|
test_string = f"{preferred_string}_{tries}"
|
2018-04-07 01:23:03 +02:00
|
|
|
|
|
|
|
return test_string
|
|
|
|
|
|
|
|
|
2021-03-07 20:03:16 +01:00
|
|
|
def indent_all_but_first_and_last(text, padding=" "):
|
2018-04-07 01:23:03 +02:00
|
|
|
lines = text.splitlines(True)
|
|
|
|
if len(lines) <= 2:
|
|
|
|
return text
|
2021-03-07 20:03:16 +01:00
|
|
|
return lines[0] + "".join(padding + line for line in lines[1:-1]) + lines[-1]
|
2018-04-07 01:23:03 +02:00
|
|
|
|
|
|
|
|
2021-03-07 20:03:16 +01:00
|
|
|
def indent_list(text, padding=" "):
|
2018-04-07 01:23:03 +02:00
|
|
|
return [padding + line for line in text.splitlines()]
|
|
|
|
|
|
|
|
|
2021-03-07 20:03:16 +01:00
|
|
|
def indent(text, padding=" "):
|
|
|
|
return "\n".join(indent_list(text, padding))
|
2018-04-07 01:23:03 +02:00
|
|
|
|
|
|
|
|
2018-05-15 11:09:27 +02:00
|
|
|
# From https://stackoverflow.com/a/14945195/8924614
|
2021-03-07 20:03:16 +01:00
|
|
|
def cpp_string_escape(string, encoding="utf-8"):
|
2022-10-05 09:09:27 +02:00
|
|
|
def _should_escape(byte: int) -> bool:
|
2019-01-05 14:24:15 +01:00
|
|
|
if not 32 <= byte < 127:
|
|
|
|
return True
|
2021-03-07 20:03:16 +01:00
|
|
|
if byte in (ord("\\"), ord('"')):
|
2019-01-05 14:24:15 +01:00
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
2019-12-07 18:28:55 +01:00
|
|
|
if isinstance(string, str):
|
2018-05-17 19:57:55 +02:00
|
|
|
string = string.encode(encoding)
|
2021-03-07 20:03:16 +01:00
|
|
|
result = ""
|
2018-05-17 19:57:55 +02:00
|
|
|
for character in string:
|
2019-01-05 14:24:15 +01:00
|
|
|
if _should_escape(character):
|
2021-03-07 20:03:16 +01:00
|
|
|
result += f"\\{character:03o}"
|
2018-05-15 11:09:27 +02:00
|
|
|
else:
|
2019-01-02 14:11:11 +01:00
|
|
|
result += chr(character)
|
2021-09-19 19:22:28 +02:00
|
|
|
return f'"{result}"'
|
2018-05-15 11:09:27 +02:00
|
|
|
|
|
|
|
|
2018-12-05 21:22:06 +01:00
|
|
|
def run_system_command(*args):
|
2019-04-08 22:19:21 +02:00
|
|
|
import subprocess
|
|
|
|
|
2021-05-10 22:57:25 +02:00
|
|
|
with subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE) as p:
|
|
|
|
stdout, stderr = p.communicate()
|
|
|
|
rc = p.returncode
|
|
|
|
return rc, stdout, stderr
|
2018-04-07 01:23:03 +02:00
|
|
|
|
2018-05-20 12:41:52 +02:00
|
|
|
|
2018-12-05 21:22:06 +01:00
|
|
|
def mkdir_p(path):
|
2019-11-02 19:35:37 +01:00
|
|
|
if not path:
|
|
|
|
# Empty path - means create current dir
|
|
|
|
return
|
2018-12-05 21:22:06 +01:00
|
|
|
try:
|
|
|
|
os.makedirs(path)
|
2019-10-24 21:53:42 +02:00
|
|
|
except OSError as err:
|
|
|
|
import errno
|
2021-03-07 20:03:16 +01:00
|
|
|
|
2019-10-24 21:53:42 +02:00
|
|
|
if err.errno == errno.EEXIST and os.path.isdir(path):
|
2018-12-05 21:22:06 +01:00
|
|
|
pass
|
2018-06-06 08:35:23 +02:00
|
|
|
else:
|
2019-10-24 21:53:42 +02:00
|
|
|
from esphome.core import EsphomeError
|
2021-03-07 20:03:16 +01:00
|
|
|
|
2020-09-16 12:12:40 +02:00
|
|
|
raise EsphomeError(f"Error creating directories {path}: {err}") from err
|
2018-12-18 19:31:43 +01:00
|
|
|
|
|
|
|
|
|
|
|
def is_ip_address(host):
|
2021-03-07 20:03:16 +01:00
|
|
|
parts = host.split(".")
|
2018-12-18 19:31:43 +01:00
|
|
|
if len(parts) != 4:
|
|
|
|
return False
|
|
|
|
try:
|
|
|
|
for p in parts:
|
|
|
|
int(p)
|
|
|
|
return True
|
|
|
|
except ValueError:
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
2019-02-10 16:57:34 +01:00
|
|
|
def _resolve_with_zeroconf(host):
|
2019-02-13 16:54:02 +01:00
|
|
|
from esphome.core import EsphomeError
|
2021-09-05 22:22:15 +02:00
|
|
|
from esphome.zeroconf import EsphomeZeroconf
|
2019-04-08 22:19:21 +02:00
|
|
|
|
2019-02-10 16:57:34 +01:00
|
|
|
try:
|
2021-09-05 22:22:15 +02:00
|
|
|
zc = EsphomeZeroconf()
|
2020-09-16 12:12:40 +02:00
|
|
|
except Exception as err:
|
2021-03-07 20:03:16 +01:00
|
|
|
raise EsphomeError(
|
|
|
|
"Cannot start mDNS sockets, is this a docker container without "
|
|
|
|
"host network mode?"
|
|
|
|
) from err
|
2019-02-10 16:57:34 +01:00
|
|
|
try:
|
2021-09-19 19:22:28 +02:00
|
|
|
info = zc.resolve_host(f"{host}.")
|
2019-02-10 16:57:34 +01:00
|
|
|
except Exception as err:
|
2020-09-16 12:12:40 +02:00
|
|
|
raise EsphomeError(f"Error resolving mDNS hostname: {err}") from err
|
2019-02-10 16:57:34 +01:00
|
|
|
finally:
|
|
|
|
zc.close()
|
|
|
|
if info is None:
|
2021-03-07 20:03:16 +01:00
|
|
|
raise EsphomeError(
|
|
|
|
"Error resolving address with mDNS: Did not respond. "
|
|
|
|
"Maybe the device is offline."
|
|
|
|
)
|
2019-02-10 16:57:34 +01:00
|
|
|
return info
|
|
|
|
|
|
|
|
|
2018-12-18 19:31:43 +01:00
|
|
|
def resolve_ip_address(host):
|
2019-04-08 22:19:21 +02:00
|
|
|
import socket
|
2019-02-10 16:57:34 +01:00
|
|
|
|
2024-07-29 04:07:44 +02:00
|
|
|
from esphome.core import EsphomeError
|
|
|
|
|
2019-06-03 15:21:36 +02:00
|
|
|
errs = []
|
|
|
|
|
2021-03-07 20:03:16 +01:00
|
|
|
if host.endswith(".local"):
|
2019-06-03 15:21:36 +02:00
|
|
|
try:
|
|
|
|
return _resolve_with_zeroconf(host)
|
|
|
|
except EsphomeError as err:
|
|
|
|
errs.append(str(err))
|
|
|
|
|
2018-12-18 19:31:43 +01:00
|
|
|
try:
|
2022-10-11 01:01:31 +02:00
|
|
|
host_url = host if (urlparse(host).scheme != "") else "http://" + host
|
|
|
|
return socket.gethostbyname(urlparse(host_url).hostname)
|
2019-12-07 18:28:55 +01:00
|
|
|
except OSError as err:
|
2019-06-03 15:21:36 +02:00
|
|
|
errs.append(str(err))
|
2021-09-19 19:22:28 +02:00
|
|
|
raise EsphomeError(f"Error resolving IP address: {', '.join(errs)}") from err
|
2019-02-28 10:15:57 +01:00
|
|
|
|
|
|
|
|
2019-03-03 16:50:06 +01:00
|
|
|
def get_bool_env(var, default=False):
|
2023-08-11 06:20:58 +02:00
|
|
|
value = os.getenv(var, default)
|
|
|
|
if isinstance(value, str):
|
|
|
|
value = value.lower()
|
|
|
|
if value in ["1", "true"]:
|
|
|
|
return True
|
|
|
|
if value in ["0", "false"]:
|
|
|
|
return False
|
|
|
|
return bool(value)
|
2019-03-03 16:50:06 +01:00
|
|
|
|
|
|
|
|
2023-05-17 06:29:56 +02:00
|
|
|
def get_str_env(var, default=None):
|
|
|
|
return str(os.getenv(var, default))
|
|
|
|
|
|
|
|
|
|
|
|
def get_int_env(var, default=0):
|
|
|
|
return int(os.getenv(var, default))
|
|
|
|
|
|
|
|
|
2022-02-09 11:46:20 +01:00
|
|
|
def is_ha_addon():
|
|
|
|
return get_bool_env("ESPHOME_IS_HA_ADDON")
|
2019-04-17 12:06:00 +02:00
|
|
|
|
|
|
|
|
2019-05-29 19:30:35 +02:00
|
|
|
def walk_files(path):
|
|
|
|
for root, _, files in os.walk(path):
|
|
|
|
for name in files:
|
|
|
|
yield os.path.join(root, name)
|
|
|
|
|
|
|
|
|
2019-04-17 12:06:00 +02:00
|
|
|
def read_file(path):
|
|
|
|
try:
|
2021-03-07 20:03:16 +01:00
|
|
|
with codecs.open(path, "r", encoding="utf-8") as f_handle:
|
2019-04-17 12:06:00 +02:00
|
|
|
return f_handle.read()
|
2019-10-24 21:53:42 +02:00
|
|
|
except OSError as err:
|
2019-04-17 12:06:00 +02:00
|
|
|
from esphome.core import EsphomeError
|
2021-03-07 20:03:16 +01:00
|
|
|
|
2020-09-16 12:12:40 +02:00
|
|
|
raise EsphomeError(f"Error reading file {path}: {err}") from err
|
2019-10-24 21:53:42 +02:00
|
|
|
except UnicodeDecodeError as err:
|
|
|
|
from esphome.core import EsphomeError
|
2021-03-07 20:03:16 +01:00
|
|
|
|
2020-09-16 12:12:40 +02:00
|
|
|
raise EsphomeError(f"Error reading file {path}: {err}") from err
|
2019-10-24 21:53:42 +02:00
|
|
|
|
|
|
|
|
2020-07-25 15:57:18 +02:00
|
|
|
def _write_file(path: Union[Path, str], text: Union[str, bytes]):
|
|
|
|
"""Atomically writes `text` to the given path.
|
2019-10-24 21:53:42 +02:00
|
|
|
|
2020-07-25 15:57:18 +02:00
|
|
|
Automatically creates all parent directories.
|
|
|
|
"""
|
|
|
|
if not isinstance(path, Path):
|
|
|
|
path = Path(path)
|
2019-12-07 18:28:55 +01:00
|
|
|
data = text
|
|
|
|
if isinstance(text, str):
|
|
|
|
data = text.encode()
|
2020-07-25 15:57:18 +02:00
|
|
|
|
|
|
|
directory = path.parent
|
|
|
|
directory.mkdir(exist_ok=True, parents=True)
|
|
|
|
|
|
|
|
tmp_path = None
|
2019-10-24 21:53:42 +02:00
|
|
|
try:
|
2021-03-07 20:03:16 +01:00
|
|
|
with tempfile.NamedTemporaryFile(
|
|
|
|
mode="wb", dir=directory, delete=False
|
|
|
|
) as f_handle:
|
2019-10-24 21:53:42 +02:00
|
|
|
tmp_path = f_handle.name
|
|
|
|
f_handle.write(data)
|
|
|
|
# Newer tempfile implementations create the file with mode 0o600
|
|
|
|
os.chmod(tmp_path, 0o644)
|
2019-12-07 18:28:55 +01:00
|
|
|
# If destination exists, will be overwritten
|
|
|
|
os.replace(tmp_path, path)
|
2019-10-24 21:53:42 +02:00
|
|
|
finally:
|
|
|
|
if tmp_path is not None and os.path.exists(tmp_path):
|
|
|
|
try:
|
|
|
|
os.remove(tmp_path)
|
|
|
|
except OSError as err:
|
|
|
|
_LOGGER.error("Write file cleanup failed: %s", err)
|
2019-04-17 12:06:00 +02:00
|
|
|
|
|
|
|
|
2020-07-25 15:57:18 +02:00
|
|
|
def write_file(path: Union[Path, str], text: str):
|
2019-04-17 12:06:00 +02:00
|
|
|
try:
|
2019-10-24 21:53:42 +02:00
|
|
|
_write_file(path, text)
|
2020-09-16 12:12:40 +02:00
|
|
|
except OSError as err:
|
2019-04-17 12:06:00 +02:00
|
|
|
from esphome.core import EsphomeError
|
2021-03-07 20:03:16 +01:00
|
|
|
|
2020-09-16 12:12:40 +02:00
|
|
|
raise EsphomeError(f"Could not write file at {path}") from err
|
2019-04-17 12:06:00 +02:00
|
|
|
|
|
|
|
|
2021-09-20 11:47:51 +02:00
|
|
|
def write_file_if_changed(path: Union[Path, str], text: str) -> bool:
|
|
|
|
"""Write text to the given path, but not if the contents match already.
|
|
|
|
|
|
|
|
Returns true if the file was changed.
|
|
|
|
"""
|
2020-07-25 15:57:18 +02:00
|
|
|
if not isinstance(path, Path):
|
|
|
|
path = Path(path)
|
|
|
|
|
2019-04-17 12:06:00 +02:00
|
|
|
src_content = None
|
2020-07-25 15:57:18 +02:00
|
|
|
if path.is_file():
|
2019-10-24 21:53:42 +02:00
|
|
|
src_content = read_file(path)
|
2021-09-20 11:47:51 +02:00
|
|
|
if src_content == text:
|
|
|
|
return False
|
|
|
|
write_file(path, text)
|
|
|
|
return True
|
2019-10-24 21:53:42 +02:00
|
|
|
|
|
|
|
|
2021-05-07 20:02:17 +02:00
|
|
|
def copy_file_if_changed(src: os.PathLike, dst: os.PathLike) -> None:
|
2019-10-24 21:53:42 +02:00
|
|
|
import shutil
|
2021-03-07 20:03:16 +01:00
|
|
|
|
2019-10-24 21:53:42 +02:00
|
|
|
if file_compare(src, dst):
|
|
|
|
return
|
|
|
|
mkdir_p(os.path.dirname(dst))
|
|
|
|
try:
|
2022-02-07 21:26:16 +01:00
|
|
|
shutil.copyfile(src, dst)
|
2019-10-24 21:53:42 +02:00
|
|
|
except OSError as err:
|
2022-02-07 21:26:16 +01:00
|
|
|
if isinstance(err, PermissionError):
|
|
|
|
# Older esphome versions copied over the src file permissions too.
|
|
|
|
# So when the dst file had 444 permissions, the dst file would have those
|
|
|
|
# too and subsequent writes would fail
|
|
|
|
|
|
|
|
# -> delete file (it would be overwritten anyway), and try again
|
|
|
|
# if that fails, use normal error handler
|
|
|
|
with suppress(OSError):
|
|
|
|
os.unlink(dst)
|
|
|
|
shutil.copyfile(src, dst)
|
|
|
|
return
|
|
|
|
|
2019-10-24 21:53:42 +02:00
|
|
|
from esphome.core import EsphomeError
|
2021-03-07 20:03:16 +01:00
|
|
|
|
2020-09-16 12:12:40 +02:00
|
|
|
raise EsphomeError(f"Error copying file {src} to {dst}: {err}") from err
|
2019-04-22 21:56:30 +02:00
|
|
|
|
|
|
|
|
|
|
|
def list_starts_with(list_, sub):
|
|
|
|
return len(sub) <= len(list_) and all(list_[i] == x for i, x in enumerate(sub))
|
2019-10-24 21:53:42 +02:00
|
|
|
|
|
|
|
|
2021-05-07 20:02:17 +02:00
|
|
|
def file_compare(path1: os.PathLike, path2: os.PathLike) -> bool:
|
2019-10-24 21:53:42 +02:00
|
|
|
"""Return True if the files path1 and path2 have the same contents."""
|
|
|
|
import stat
|
|
|
|
|
|
|
|
try:
|
|
|
|
stat1, stat2 = os.stat(path1), os.stat(path2)
|
|
|
|
except OSError:
|
|
|
|
# File doesn't exist or another error -> not equal
|
|
|
|
return False
|
|
|
|
|
2021-03-07 20:03:16 +01:00
|
|
|
if (
|
|
|
|
stat.S_IFMT(stat1.st_mode) != stat.S_IFREG
|
|
|
|
or stat.S_IFMT(stat2.st_mode) != stat.S_IFREG
|
|
|
|
):
|
2019-10-24 21:53:42 +02:00
|
|
|
# At least one of them is not a regular file (or does not exist)
|
|
|
|
return False
|
|
|
|
if stat1.st_size != stat2.st_size:
|
|
|
|
# Different sizes
|
|
|
|
return False
|
|
|
|
|
2021-03-07 20:03:16 +01:00
|
|
|
bufsize = 8 * 1024
|
2019-10-24 21:53:42 +02:00
|
|
|
# Read files in blocks until a mismatch is found
|
2021-03-07 20:03:16 +01:00
|
|
|
with open(path1, "rb") as fh1, open(path2, "rb") as fh2:
|
2019-10-24 21:53:42 +02:00
|
|
|
while True:
|
|
|
|
blob1, blob2 = fh1.read(bufsize), fh2.read(bufsize)
|
|
|
|
if blob1 != blob2:
|
|
|
|
# Different content
|
|
|
|
return False
|
|
|
|
if not blob1:
|
|
|
|
# Reached end
|
|
|
|
return True
|
2019-12-04 15:58:40 +01:00
|
|
|
|
|
|
|
|
|
|
|
# A dict of types that need to be converted to heaptypes before a class can be added
|
|
|
|
# to the object
|
|
|
|
_TYPE_OVERLOADS = {
|
2021-08-31 04:00:58 +02:00
|
|
|
int: type("EInt", (int,), {}),
|
|
|
|
float: type("EFloat", (float,), {}),
|
|
|
|
str: type("EStr", (str,), {}),
|
2022-06-30 11:24:24 +02:00
|
|
|
dict: type("EDict", (dict,), {}),
|
2021-08-31 04:00:58 +02:00
|
|
|
list: type("EList", (list,), {}),
|
2019-12-04 15:58:40 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
# cache created classes here
|
|
|
|
_CLASS_LOOKUP = {}
|
|
|
|
|
|
|
|
|
|
|
|
def add_class_to_obj(value, cls):
|
|
|
|
"""Add a class to a python type.
|
|
|
|
|
|
|
|
This function modifies value so that it has cls as a basetype.
|
|
|
|
The value itself may be modified by this action! You must use the return
|
|
|
|
value of this function however, since some types need to be copied first (heaptypes).
|
|
|
|
"""
|
|
|
|
if isinstance(value, cls):
|
|
|
|
# If already is instance, do not add
|
|
|
|
return value
|
|
|
|
|
|
|
|
try:
|
|
|
|
orig_cls = value.__class__
|
|
|
|
key = (orig_cls, cls)
|
|
|
|
new_cls = _CLASS_LOOKUP.get(key)
|
|
|
|
if new_cls is None:
|
|
|
|
new_cls = orig_cls.__class__(orig_cls.__name__, (orig_cls, cls), {})
|
|
|
|
_CLASS_LOOKUP[key] = new_cls
|
|
|
|
value.__class__ = new_cls
|
|
|
|
return value
|
|
|
|
except TypeError:
|
|
|
|
# Non heap type, look in overloads dict
|
|
|
|
for type_, func in _TYPE_OVERLOADS.items():
|
|
|
|
# Use type() here, we only need to trigger if it's the exact type,
|
|
|
|
# as otherwise we don't need to overload the class
|
|
|
|
if type(value) is type_: # pylint: disable=unidiomatic-typecheck
|
|
|
|
return add_class_to_obj(func(value), cls)
|
|
|
|
raise
|
2023-03-28 11:00:34 +02:00
|
|
|
|
|
|
|
|
|
|
|
def snake_case(value):
|
|
|
|
"""Same behaviour as `helpers.cpp` method `str_snake_case`."""
|
|
|
|
return value.replace(" ", "_").lower()
|
|
|
|
|
|
|
|
|
2023-12-21 15:28:25 +01:00
|
|
|
_DISALLOWED_CHARS = re.compile(r"[^a-zA-Z0-9-_]")
|
2023-11-24 00:29:08 +01:00
|
|
|
|
|
|
|
|
2023-03-28 11:00:34 +02:00
|
|
|
def sanitize(value):
|
|
|
|
"""Same behaviour as `helpers.cpp` method `str_sanitize`."""
|
2023-11-24 00:29:08 +01:00
|
|
|
return _DISALLOWED_CHARS.sub("_", value)
|