2019-04-17 12:06:00 +02:00
|
|
|
import collections
|
2018-04-07 01:23:03 +02:00
|
|
|
import logging
|
2018-12-05 21:22:06 +01:00
|
|
|
import re
|
2018-04-07 01:23:03 +02:00
|
|
|
|
2019-04-22 21:56:30 +02:00
|
|
|
# pylint: disable=unused-import, wrong-import-order
|
|
|
|
from contextlib import contextmanager
|
|
|
|
|
2018-04-07 01:23:03 +02:00
|
|
|
import voluptuous as vol
|
|
|
|
|
2021-05-07 20:02:17 +02:00
|
|
|
from esphome import core, yaml_util, loader
|
|
|
|
import esphome.core.config as core_config
|
2021-03-07 20:03:16 +01:00
|
|
|
from esphome.const import (
|
|
|
|
CONF_ESPHOME,
|
|
|
|
CONF_PLATFORM,
|
|
|
|
CONF_PACKAGES,
|
|
|
|
CONF_SUBSTITUTIONS,
|
2021-05-07 20:02:17 +02:00
|
|
|
CONF_EXTERNAL_COMPONENTS,
|
2021-03-07 20:03:16 +01:00
|
|
|
)
|
2021-05-07 20:02:17 +02:00
|
|
|
from esphome.core import CORE, EsphomeError
|
2021-04-08 13:58:01 +02:00
|
|
|
from esphome.helpers import indent
|
2019-04-17 12:06:00 +02:00
|
|
|
from esphome.util import safe_print, OrderedDict
|
2018-04-07 01:23:03 +02:00
|
|
|
|
2021-05-07 20:02:17 +02:00
|
|
|
from typing import List, Optional, Tuple, Union
|
|
|
|
from esphome.loader import get_component, get_platform, ComponentManifest
|
2019-12-04 15:58:40 +01:00
|
|
|
from esphome.yaml_util import is_secret, ESPHomeDataBase, ESPForceValue
|
2019-02-26 19:22:33 +01:00
|
|
|
from esphome.voluptuous_schema import ExtraKeysInvalid
|
2021-04-08 13:58:01 +02:00
|
|
|
from esphome.log import color, Fore
|
2021-06-17 21:54:14 +02:00
|
|
|
import esphome.final_validate as fv
|
|
|
|
import esphome.config_validation as cv
|
|
|
|
from esphome.types import ConfigType, ConfigPathType, ConfigFragmentType
|
2018-12-05 21:22:06 +01:00
|
|
|
|
2018-04-07 01:23:03 +02:00
|
|
|
_LOGGER = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
2018-04-18 18:43:13 +02:00
|
|
|
def iter_components(config):
|
2019-01-02 14:11:11 +01:00
|
|
|
for domain, conf in config.items():
|
2018-04-18 18:43:13 +02:00
|
|
|
component = get_component(domain)
|
2021-05-03 01:10:50 +02:00
|
|
|
if component.multi_conf:
|
2018-12-05 21:22:06 +01:00
|
|
|
for conf_ in conf:
|
|
|
|
yield domain, component, conf_
|
|
|
|
else:
|
|
|
|
yield domain, component, conf
|
2019-04-17 12:06:00 +02:00
|
|
|
if component.is_platform_component:
|
2018-04-18 18:43:13 +02:00
|
|
|
for p_config in conf:
|
2019-12-07 18:28:55 +01:00
|
|
|
p_name = "{}.{}".format(domain, p_config[CONF_PLATFORM])
|
2019-04-17 12:06:00 +02:00
|
|
|
platform = get_platform(domain, p_config[CONF_PLATFORM])
|
2018-04-18 18:43:13 +02:00
|
|
|
yield p_name, platform, p_config
|
2018-04-07 01:23:03 +02:00
|
|
|
|
|
|
|
|
2019-01-02 14:11:11 +01:00
|
|
|
ConfigPath = List[Union[str, int]]
|
2018-12-05 21:22:06 +01:00
|
|
|
|
|
|
|
|
2019-04-22 21:56:30 +02:00
|
|
|
def _path_begins_with(path, other): # type: (ConfigPath, ConfigPath) -> bool
|
2018-12-05 21:22:06 +01:00
|
|
|
if len(path) < len(other):
|
|
|
|
return False
|
2021-03-07 20:03:16 +01:00
|
|
|
return path[: len(other)] == other
|
2018-12-05 21:22:06 +01:00
|
|
|
|
|
|
|
|
2021-06-17 21:54:14 +02:00
|
|
|
class Config(OrderedDict, fv.FinalValidateConfig):
|
2018-04-07 01:23:03 +02:00
|
|
|
def __init__(self):
|
2019-12-07 18:28:55 +01:00
|
|
|
super().__init__()
|
2019-04-22 21:56:30 +02:00
|
|
|
# A list of voluptuous errors
|
|
|
|
self.errors = [] # type: List[vol.Invalid]
|
|
|
|
# A list of paths that should be fully outputted
|
|
|
|
# The values will be the paths to all "domain", for example (['logger'], 'logger')
|
|
|
|
# or (['sensor', 'ultrasonic'], 'sensor.ultrasonic')
|
2019-12-07 18:28:55 +01:00
|
|
|
self.output_paths = [] # type: List[Tuple[ConfigPath, str]]
|
2021-05-31 00:06:45 +02:00
|
|
|
# A list of components ids with the config path
|
|
|
|
self.declare_ids = [] # type: List[Tuple[core.ID, ConfigPath]]
|
2021-06-17 21:54:14 +02:00
|
|
|
self._data = {}
|
2019-04-22 21:56:30 +02:00
|
|
|
|
|
|
|
def add_error(self, error):
|
|
|
|
# type: (vol.Invalid) -> None
|
|
|
|
if isinstance(error, vol.MultipleInvalid):
|
|
|
|
for err in error.errors:
|
|
|
|
self.add_error(err)
|
|
|
|
return
|
2021-06-17 21:54:14 +02:00
|
|
|
if cv.ROOT_CONFIG_PATH in error.path:
|
|
|
|
# Root value means that the path before the root should be ignored
|
|
|
|
last_root = max(
|
|
|
|
i for i, v in enumerate(error.path) if v is cv.ROOT_CONFIG_PATH
|
|
|
|
)
|
|
|
|
error.path = error.path[last_root + 1 :]
|
2019-04-22 21:56:30 +02:00
|
|
|
self.errors.append(error)
|
|
|
|
|
|
|
|
@contextmanager
|
|
|
|
def catch_error(self, path=None):
|
|
|
|
path = path or []
|
|
|
|
try:
|
|
|
|
yield
|
|
|
|
except vol.Invalid as e:
|
|
|
|
e.prepend(path)
|
|
|
|
self.add_error(e)
|
2018-04-07 01:23:03 +02:00
|
|
|
|
2019-04-22 21:56:30 +02:00
|
|
|
def add_str_error(self, message, path):
|
2019-12-07 18:28:55 +01:00
|
|
|
# type: (str, ConfigPath) -> None
|
2019-04-22 21:56:30 +02:00
|
|
|
self.add_error(vol.Invalid(message, path))
|
|
|
|
|
|
|
|
def add_output_path(self, path, domain):
|
2019-12-07 18:28:55 +01:00
|
|
|
# type: (ConfigPath, str) -> None
|
2019-04-22 21:56:30 +02:00
|
|
|
self.output_paths.append((path, domain))
|
|
|
|
|
|
|
|
def remove_output_path(self, path, domain):
|
2019-12-07 18:28:55 +01:00
|
|
|
# type: (ConfigPath, str) -> None
|
2019-04-22 21:56:30 +02:00
|
|
|
self.output_paths.remove((path, domain))
|
2018-12-05 21:22:06 +01:00
|
|
|
|
|
|
|
def is_in_error_path(self, path):
|
2019-04-22 21:56:30 +02:00
|
|
|
# type: (ConfigPath) -> bool
|
|
|
|
for err in self.errors:
|
|
|
|
if _path_begins_with(err.path, path):
|
2018-12-05 21:22:06 +01:00
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
2019-04-22 21:56:30 +02:00
|
|
|
def set_by_path(self, path, value):
|
|
|
|
conf = self
|
|
|
|
for key in path[:-1]:
|
|
|
|
conf = conf[key]
|
|
|
|
conf[path[-1]] = value
|
|
|
|
|
2018-12-05 21:22:06 +01:00
|
|
|
def get_error_for_path(self, path):
|
2019-04-22 21:56:30 +02:00
|
|
|
# type: (ConfigPath) -> Optional[vol.Invalid]
|
|
|
|
for err in self.errors:
|
|
|
|
if self.get_deepest_path(err.path) == path:
|
|
|
|
return err
|
2018-12-05 21:22:06 +01:00
|
|
|
return None
|
2018-04-07 01:23:03 +02:00
|
|
|
|
2019-05-11 11:41:09 +02:00
|
|
|
def get_deepest_document_range_for_path(self, path):
|
|
|
|
# type: (ConfigPath) -> Optional[ESPHomeDataBase]
|
2019-04-22 21:56:30 +02:00
|
|
|
data = self
|
2019-05-11 11:41:09 +02:00
|
|
|
doc_range = None
|
2019-04-22 21:56:30 +02:00
|
|
|
for item_index in path:
|
|
|
|
try:
|
2021-02-27 23:21:07 +01:00
|
|
|
if item_index in data:
|
|
|
|
doc_range = [x for x in data.keys() if x == item_index][0].esp_range
|
2019-04-22 21:56:30 +02:00
|
|
|
data = data[item_index]
|
2021-03-07 23:59:32 +01:00
|
|
|
except (KeyError, IndexError, TypeError, AttributeError):
|
2019-05-11 11:41:09 +02:00
|
|
|
return doc_range
|
2021-02-06 16:09:15 +01:00
|
|
|
if isinstance(data, core.ID):
|
|
|
|
data = data.id
|
2019-05-11 11:41:09 +02:00
|
|
|
if isinstance(data, ESPHomeDataBase) and data.esp_range is not None:
|
|
|
|
doc_range = data.esp_range
|
2021-03-07 23:59:32 +01:00
|
|
|
elif isinstance(data, dict):
|
|
|
|
platform_item = data.get("platform")
|
|
|
|
if (
|
|
|
|
isinstance(platform_item, ESPHomeDataBase)
|
|
|
|
and platform_item.esp_range is not None
|
|
|
|
):
|
|
|
|
doc_range = platform_item.esp_range
|
2019-05-11 11:41:09 +02:00
|
|
|
|
|
|
|
return doc_range
|
2019-04-22 21:56:30 +02:00
|
|
|
|
2021-06-17 21:54:14 +02:00
|
|
|
def get_nested_item(
|
|
|
|
self, path: ConfigPathType, raise_error: bool = False
|
|
|
|
) -> ConfigFragmentType:
|
2018-12-05 21:22:06 +01:00
|
|
|
data = self
|
|
|
|
for item_index in path:
|
|
|
|
try:
|
|
|
|
data = data[item_index]
|
|
|
|
except (KeyError, IndexError, TypeError):
|
2021-06-17 21:54:14 +02:00
|
|
|
if raise_error:
|
|
|
|
raise
|
2018-12-05 21:22:06 +01:00
|
|
|
return {}
|
|
|
|
return data
|
|
|
|
|
2019-04-22 21:56:30 +02:00
|
|
|
def get_deepest_path(self, path):
|
|
|
|
# type: (ConfigPath) -> ConfigPath
|
|
|
|
"""Return the path that is the deepest reachable by following path."""
|
2018-12-05 21:22:06 +01:00
|
|
|
data = self
|
|
|
|
part = []
|
|
|
|
for item_index in path:
|
|
|
|
try:
|
|
|
|
data = data[item_index]
|
|
|
|
except (KeyError, IndexError, TypeError):
|
|
|
|
return part
|
|
|
|
part.append(item_index)
|
|
|
|
return part
|
|
|
|
|
2021-06-17 21:54:14 +02:00
|
|
|
def get_path_for_id(self, id: core.ID):
|
|
|
|
"""Return the config fragment where the given ID is declared."""
|
2021-05-31 00:06:45 +02:00
|
|
|
for declared_id, path in self.declare_ids:
|
|
|
|
if declared_id.id == str(id):
|
2021-06-17 21:54:14 +02:00
|
|
|
return path
|
|
|
|
raise KeyError(f"ID {id} not found in configuration")
|
|
|
|
|
|
|
|
def get_config_for_path(self, path: ConfigPathType) -> ConfigFragmentType:
|
|
|
|
return self.get_nested_item(path, raise_error=True)
|
|
|
|
|
|
|
|
@property
|
|
|
|
def data(self):
|
|
|
|
"""Return temporary data used by final validation functions."""
|
|
|
|
return self._data
|
2021-05-31 00:06:45 +02:00
|
|
|
|
2018-12-05 21:22:06 +01:00
|
|
|
|
|
|
|
def iter_ids(config, path=None):
|
|
|
|
path = path or []
|
2018-06-02 22:22:20 +02:00
|
|
|
if isinstance(config, core.ID):
|
2018-12-05 21:22:06 +01:00
|
|
|
yield config, path
|
2018-06-02 22:22:20 +02:00
|
|
|
elif isinstance(config, core.Lambda):
|
|
|
|
for id in config.requires_ids:
|
2018-12-05 21:22:06 +01:00
|
|
|
yield id, path
|
2018-06-02 22:22:20 +02:00
|
|
|
elif isinstance(config, list):
|
|
|
|
for i, item in enumerate(config):
|
2019-12-07 18:28:55 +01:00
|
|
|
yield from iter_ids(item, path + [i])
|
2018-06-02 22:22:20 +02:00
|
|
|
elif isinstance(config, dict):
|
2019-01-02 14:11:11 +01:00
|
|
|
for key, value in config.items():
|
2019-12-07 18:28:55 +01:00
|
|
|
yield from iter_ids(value, path + [key])
|
2018-06-02 22:22:20 +02:00
|
|
|
|
|
|
|
|
2018-12-05 21:22:06 +01:00
|
|
|
def do_id_pass(result): # type: (Config) -> None
|
2019-02-13 16:54:02 +01:00
|
|
|
from esphome.cpp_generator import MockObjClass
|
2019-05-21 12:23:38 +02:00
|
|
|
from esphome.cpp_types import Component
|
2018-12-05 21:22:06 +01:00
|
|
|
|
|
|
|
searching_ids = [] # type: List[Tuple[core.ID, ConfigPath]]
|
|
|
|
for id, path in iter_ids(result):
|
2018-06-02 22:22:20 +02:00
|
|
|
if id.is_declaration:
|
2019-04-22 21:56:30 +02:00
|
|
|
if id.id is not None:
|
|
|
|
# Look for duplicate definitions
|
2021-06-17 21:54:14 +02:00
|
|
|
match = next((v for v in result.declare_ids if v[0].id == id.id), None)
|
2019-04-22 21:56:30 +02:00
|
|
|
if match is not None:
|
2021-03-07 20:03:16 +01:00
|
|
|
opath = "->".join(str(v) for v in match[1])
|
2019-12-07 18:28:55 +01:00
|
|
|
result.add_str_error(f"ID {id.id} redefined! Check {opath}", path)
|
2019-04-22 21:56:30 +02:00
|
|
|
continue
|
2021-06-17 21:54:14 +02:00
|
|
|
result.declare_ids.append((id, path))
|
2018-06-02 22:22:20 +02:00
|
|
|
else:
|
2018-12-05 21:22:06 +01:00
|
|
|
searching_ids.append((id, path))
|
2018-06-02 22:22:20 +02:00
|
|
|
# Resolve default ids after manual IDs
|
2021-06-17 21:54:14 +02:00
|
|
|
for id, _ in result.declare_ids:
|
|
|
|
id.resolve([v[0].id for v in result.declare_ids])
|
2019-05-21 12:23:38 +02:00
|
|
|
if isinstance(id.type, MockObjClass) and id.type.inherits_from(Component):
|
|
|
|
CORE.component_ids.add(id.id)
|
2018-06-02 22:22:20 +02:00
|
|
|
|
|
|
|
# Check searched IDs
|
2018-12-05 21:22:06 +01:00
|
|
|
for id, path in searching_ids:
|
2018-11-12 23:30:31 +01:00
|
|
|
if id.id is not None:
|
|
|
|
# manually declared
|
2021-06-17 21:54:14 +02:00
|
|
|
match = next((v[0] for v in result.declare_ids if v[0].id == id.id), None)
|
2021-03-07 23:59:32 +01:00
|
|
|
if match is None or not match.is_manual:
|
2018-11-12 23:30:31 +01:00
|
|
|
# No declared ID with this name
|
2019-04-22 21:56:30 +02:00
|
|
|
import difflib
|
2021-03-07 20:03:16 +01:00
|
|
|
|
|
|
|
error = (
|
|
|
|
"Couldn't find ID '{}'. Please check you have defined "
|
|
|
|
"an ID with that name in your configuration.".format(id.id)
|
|
|
|
)
|
2019-04-22 21:56:30 +02:00
|
|
|
# Find candidates
|
2021-03-07 20:03:16 +01:00
|
|
|
matches = difflib.get_close_matches(
|
2021-06-17 21:54:14 +02:00
|
|
|
id.id, [v[0].id for v in result.declare_ids if v[0].is_manual]
|
2021-03-07 20:03:16 +01:00
|
|
|
)
|
2019-04-22 21:56:30 +02:00
|
|
|
if matches:
|
2021-03-07 20:03:16 +01:00
|
|
|
matches_s = ", ".join(f'"{x}"' for x in matches)
|
2019-12-07 18:28:55 +01:00
|
|
|
error += f" These IDs look similar: {matches_s}."
|
2019-04-22 21:56:30 +02:00
|
|
|
result.add_str_error(error, path)
|
2018-11-12 23:30:31 +01:00
|
|
|
continue
|
2021-03-07 20:03:16 +01:00
|
|
|
if not isinstance(match.type, MockObjClass) or not isinstance(
|
|
|
|
id.type, MockObjClass
|
|
|
|
):
|
2018-11-12 23:30:31 +01:00
|
|
|
continue
|
|
|
|
if not match.type.inherits_from(id.type):
|
2021-03-07 20:03:16 +01:00
|
|
|
result.add_str_error(
|
|
|
|
"ID '{}' of type {} doesn't inherit from {}. Please "
|
|
|
|
"double check your ID is pointing to the correct value"
|
|
|
|
"".format(id.id, match.type, id.type),
|
|
|
|
path,
|
|
|
|
)
|
2018-11-12 23:30:31 +01:00
|
|
|
|
2018-06-02 22:22:20 +02:00
|
|
|
if id.id is None and id.type is not None:
|
2021-03-07 23:59:32 +01:00
|
|
|
matches = []
|
2021-06-17 21:54:14 +02:00
|
|
|
for v in result.declare_ids:
|
2018-11-12 23:30:31 +01:00
|
|
|
if v[0] is None or not isinstance(v[0].type, MockObjClass):
|
|
|
|
continue
|
|
|
|
inherits = v[0].type.inherits_from(id.type)
|
|
|
|
if inherits:
|
2021-03-07 23:59:32 +01:00
|
|
|
matches.append(v[0])
|
|
|
|
|
|
|
|
if len(matches) == 0:
|
|
|
|
result.add_str_error(
|
|
|
|
f"Couldn't find any component that can be used for '{id.type}'. Are you missing a hub declaration?",
|
|
|
|
path,
|
|
|
|
)
|
|
|
|
elif len(matches) == 1:
|
|
|
|
id.id = matches[0].id
|
|
|
|
elif len(matches) > 1:
|
|
|
|
if str(id.type) == "time::RealTimeClock":
|
|
|
|
id.id = matches[0].id
|
|
|
|
else:
|
|
|
|
manual_declared_count = sum(1 for m in matches if m.is_manual)
|
|
|
|
if manual_declared_count > 0:
|
|
|
|
ids = ", ".join([f"'{m.id}'" for m in matches if m.is_manual])
|
|
|
|
result.add_str_error(
|
|
|
|
f"Too many candidates found for '{path[-1]}' type '{id.type}' {'Some are' if manual_declared_count > 1 else 'One is'} {ids}",
|
|
|
|
path,
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
result.add_str_error(
|
|
|
|
f"Too many candidates found for '{path[-1]}' type '{id.type}' You must assign an explicit ID to the parent component you want to use.",
|
|
|
|
path,
|
|
|
|
)
|
2018-06-02 22:22:20 +02:00
|
|
|
|
|
|
|
|
2019-12-04 15:58:40 +01:00
|
|
|
def recursive_check_replaceme(value):
|
|
|
|
if isinstance(value, list):
|
|
|
|
return cv.Schema([recursive_check_replaceme])(value)
|
|
|
|
if isinstance(value, dict):
|
|
|
|
return cv.Schema({cv.valid: recursive_check_replaceme})(value)
|
|
|
|
if isinstance(value, ESPForceValue):
|
|
|
|
pass
|
2021-03-07 20:03:16 +01:00
|
|
|
if isinstance(value, str) and value == "REPLACEME":
|
|
|
|
raise cv.Invalid(
|
|
|
|
"Found 'REPLACEME' in configuration, this is most likely an error. "
|
|
|
|
"Please make sure you have replaced all fields from the sample "
|
|
|
|
"configuration.\n"
|
|
|
|
"If you want to use the literal REPLACEME string, "
|
|
|
|
'please use "!force REPLACEME"'
|
|
|
|
)
|
2019-12-04 15:58:40 +01:00
|
|
|
return value
|
|
|
|
|
|
|
|
|
2020-06-21 20:33:01 +02:00
|
|
|
def validate_config(config, command_line_substitutions):
|
2018-04-07 01:23:03 +02:00
|
|
|
result = Config()
|
|
|
|
|
2021-05-07 20:02:17 +02:00
|
|
|
loader.clear_component_meta_finders()
|
|
|
|
loader.install_custom_components_meta_finder()
|
|
|
|
|
2020-07-13 16:45:06 +02:00
|
|
|
# 0. Load packages
|
|
|
|
if CONF_PACKAGES in config:
|
2020-07-15 15:27:24 +02:00
|
|
|
from esphome.components.packages import do_packages_pass
|
2021-03-07 20:03:16 +01:00
|
|
|
|
2020-07-13 16:45:06 +02:00
|
|
|
result.add_output_path([CONF_PACKAGES], CONF_PACKAGES)
|
|
|
|
try:
|
2020-07-15 15:27:24 +02:00
|
|
|
config = do_packages_pass(config)
|
2020-07-13 16:45:06 +02:00
|
|
|
except vol.Invalid as err:
|
|
|
|
result.update(config)
|
|
|
|
result.add_error(err)
|
|
|
|
return result
|
|
|
|
|
2019-04-22 21:56:30 +02:00
|
|
|
# 1. Load substitutions
|
|
|
|
if CONF_SUBSTITUTIONS in config:
|
2020-07-15 15:27:24 +02:00
|
|
|
from esphome.components import substitutions
|
2021-03-07 20:03:16 +01:00
|
|
|
|
|
|
|
result[CONF_SUBSTITUTIONS] = {
|
|
|
|
**config[CONF_SUBSTITUTIONS],
|
|
|
|
**command_line_substitutions,
|
|
|
|
}
|
2019-04-22 21:56:30 +02:00
|
|
|
result.add_output_path([CONF_SUBSTITUTIONS], CONF_SUBSTITUTIONS)
|
|
|
|
try:
|
2020-06-21 20:33:01 +02:00
|
|
|
substitutions.do_substitution_pass(config, command_line_substitutions)
|
2019-04-22 21:56:30 +02:00
|
|
|
except vol.Invalid as err:
|
|
|
|
result.add_error(err)
|
|
|
|
return result
|
2018-10-16 23:16:06 +02:00
|
|
|
|
2019-12-04 15:58:40 +01:00
|
|
|
# 1.1. Check for REPLACEME special value
|
|
|
|
try:
|
|
|
|
recursive_check_replaceme(config)
|
|
|
|
except vol.Invalid as err:
|
|
|
|
result.add_error(err)
|
|
|
|
|
2021-05-07 20:02:17 +02:00
|
|
|
# 1.2. Load external_components
|
|
|
|
if CONF_EXTERNAL_COMPONENTS in config:
|
|
|
|
from esphome.components.external_components import do_external_components_pass
|
|
|
|
|
|
|
|
result.add_output_path([CONF_EXTERNAL_COMPONENTS], CONF_EXTERNAL_COMPONENTS)
|
|
|
|
try:
|
|
|
|
do_external_components_pass(config)
|
|
|
|
except vol.Invalid as err:
|
|
|
|
result.update(config)
|
|
|
|
result.add_error(err)
|
|
|
|
return result
|
|
|
|
|
2021-03-07 20:03:16 +01:00
|
|
|
if "esphomeyaml" in config:
|
|
|
|
_LOGGER.warning(
|
|
|
|
"The esphomeyaml section has been renamed to esphome in 1.11.0. "
|
|
|
|
"Please replace 'esphomeyaml:' in your configuration with 'esphome:'."
|
|
|
|
)
|
|
|
|
config[CONF_ESPHOME] = config.pop("esphomeyaml")
|
2019-06-02 15:26:44 +02:00
|
|
|
|
|
|
|
if CONF_ESPHOME not in config:
|
2021-03-07 20:03:16 +01:00
|
|
|
result.add_str_error(
|
|
|
|
"'esphome' section missing from configuration. Please make sure "
|
|
|
|
"your configuration has an 'esphome:' line in it.",
|
|
|
|
[],
|
|
|
|
)
|
2019-06-02 15:26:44 +02:00
|
|
|
return result
|
|
|
|
|
2019-04-22 21:56:30 +02:00
|
|
|
# 2. Load partial core config
|
2019-02-13 16:54:02 +01:00
|
|
|
result[CONF_ESPHOME] = config[CONF_ESPHOME]
|
2019-04-22 21:56:30 +02:00
|
|
|
result.add_output_path([CONF_ESPHOME], CONF_ESPHOME)
|
|
|
|
try:
|
|
|
|
core_config.preload_core_config(config)
|
|
|
|
except vol.Invalid as err:
|
|
|
|
result.add_error(err)
|
|
|
|
return result
|
|
|
|
# Remove temporary esphome config path again, it will be reloaded later
|
|
|
|
result.remove_output_path([CONF_ESPHOME], CONF_ESPHOME)
|
|
|
|
|
|
|
|
# 3. Load components.
|
|
|
|
# Load components (also AUTO_LOAD) and set output paths of result
|
|
|
|
# Queue of items to load, FIFO
|
|
|
|
load_queue = collections.deque()
|
2019-01-02 14:11:11 +01:00
|
|
|
for domain, conf in config.items():
|
2019-04-22 21:56:30 +02:00
|
|
|
load_queue.append((domain, conf))
|
|
|
|
|
|
|
|
# List of items to enter next stage
|
2021-03-07 20:03:16 +01:00
|
|
|
check_queue = (
|
|
|
|
[]
|
|
|
|
) # type: List[Tuple[ConfigPath, str, ConfigType, ComponentManifest]]
|
2019-04-22 21:56:30 +02:00
|
|
|
|
|
|
|
# This step handles:
|
|
|
|
# - Adding output path
|
|
|
|
# - Auto Load
|
|
|
|
# - Loading configs into result
|
2019-04-17 12:06:00 +02:00
|
|
|
|
2019-04-22 21:56:30 +02:00
|
|
|
while load_queue:
|
|
|
|
domain, conf = load_queue.popleft()
|
2021-03-07 20:03:16 +01:00
|
|
|
if domain.startswith("."):
|
2019-04-22 21:56:30 +02:00
|
|
|
# Ignore top-level keys starting with a dot
|
2018-04-07 01:23:03 +02:00
|
|
|
continue
|
2019-04-22 21:56:30 +02:00
|
|
|
result.add_output_path([domain], domain)
|
2018-12-05 21:22:06 +01:00
|
|
|
result[domain] = conf
|
2018-04-07 01:23:03 +02:00
|
|
|
component = get_component(domain)
|
2019-04-22 21:56:30 +02:00
|
|
|
path = [domain]
|
2018-04-07 01:23:03 +02:00
|
|
|
if component is None:
|
2019-12-07 18:28:55 +01:00
|
|
|
result.add_str_error(f"Component not found: {domain}", path)
|
2018-12-05 21:22:06 +01:00
|
|
|
continue
|
2019-04-24 23:49:02 +02:00
|
|
|
CORE.loaded_integrations.add(domain)
|
2018-12-05 21:22:06 +01:00
|
|
|
|
2019-04-22 21:56:30 +02:00
|
|
|
# Process AUTO_LOAD
|
2019-04-17 12:06:00 +02:00
|
|
|
for load in component.auto_load:
|
|
|
|
if load not in config:
|
2019-04-22 21:56:30 +02:00
|
|
|
load_conf = core.AutoLoad()
|
|
|
|
config[load] = load_conf
|
|
|
|
load_queue.append((load, load_conf))
|
2018-04-07 01:23:03 +02:00
|
|
|
|
2019-04-17 12:06:00 +02:00
|
|
|
if not component.is_platform_component:
|
2019-04-22 21:56:30 +02:00
|
|
|
check_queue.append(([domain], domain, conf, component))
|
2018-04-07 01:23:03 +02:00
|
|
|
continue
|
|
|
|
|
2019-04-22 21:56:30 +02:00
|
|
|
# This is a platform component, proceed to reading platform entries
|
|
|
|
# Remove this is as an output path
|
|
|
|
result.remove_output_path([domain], domain)
|
2018-12-05 21:22:06 +01:00
|
|
|
|
2019-04-22 21:56:30 +02:00
|
|
|
# Ensure conf is a list
|
2019-05-26 10:28:46 +02:00
|
|
|
if not conf:
|
|
|
|
result[domain] = conf = []
|
|
|
|
elif not isinstance(conf, list):
|
2018-12-05 21:22:06 +01:00
|
|
|
result[domain] = conf = [conf]
|
|
|
|
|
|
|
|
for i, p_config in enumerate(conf):
|
2019-04-22 21:56:30 +02:00
|
|
|
path = [domain, i]
|
|
|
|
# Construct temporary unknown output path
|
2021-03-07 20:03:16 +01:00
|
|
|
p_domain = f"{domain}.unknown"
|
2019-04-22 21:56:30 +02:00
|
|
|
result.add_output_path(path, p_domain)
|
|
|
|
result[domain][i] = p_config
|
2018-04-07 01:23:03 +02:00
|
|
|
if not isinstance(p_config, dict):
|
2019-12-07 18:28:55 +01:00
|
|
|
result.add_str_error("Platform schemas must be key-value pairs.", path)
|
2018-04-07 01:23:03 +02:00
|
|
|
continue
|
2021-03-07 20:03:16 +01:00
|
|
|
p_name = p_config.get("platform")
|
2018-04-07 01:23:03 +02:00
|
|
|
if p_name is None:
|
2019-12-07 18:28:55 +01:00
|
|
|
result.add_str_error("No platform specified! See 'platform' key.", path)
|
2018-04-07 01:23:03 +02:00
|
|
|
continue
|
2019-04-22 21:56:30 +02:00
|
|
|
# Remove temp output path and construct new one
|
|
|
|
result.remove_output_path(path, p_domain)
|
2021-03-07 20:03:16 +01:00
|
|
|
p_domain = f"{domain}.{p_name}"
|
2019-04-22 21:56:30 +02:00
|
|
|
result.add_output_path(path, p_domain)
|
|
|
|
# Try Load platform
|
2018-04-07 01:23:03 +02:00
|
|
|
platform = get_platform(domain, p_name)
|
|
|
|
if platform is None:
|
2019-12-07 18:28:55 +01:00
|
|
|
result.add_str_error(f"Platform not found: '{p_domain}'", path)
|
2018-04-18 18:43:13 +02:00
|
|
|
continue
|
2019-04-24 23:49:02 +02:00
|
|
|
CORE.loaded_integrations.add(p_name)
|
2018-04-18 18:43:13 +02:00
|
|
|
|
2019-04-22 21:56:30 +02:00
|
|
|
# Process AUTO_LOAD
|
2019-04-17 12:06:00 +02:00
|
|
|
for load in platform.auto_load:
|
|
|
|
if load not in config:
|
2019-04-22 21:56:30 +02:00
|
|
|
load_conf = core.AutoLoad()
|
|
|
|
config[load] = load_conf
|
|
|
|
load_queue.append((load, load_conf))
|
2018-05-20 12:41:52 +02:00
|
|
|
|
2019-04-22 21:56:30 +02:00
|
|
|
check_queue.append((path, p_domain, p_config, platform))
|
2019-04-17 12:06:00 +02:00
|
|
|
|
2019-04-22 21:56:30 +02:00
|
|
|
# 4. Validate component metadata, including
|
|
|
|
# - Transformation (nullable, multi conf)
|
|
|
|
# - Dependencies
|
|
|
|
# - Conflicts
|
|
|
|
# - Supported ESP Platform
|
2018-12-05 21:22:06 +01:00
|
|
|
|
2019-04-22 21:56:30 +02:00
|
|
|
# List of items to proceed to next stage
|
|
|
|
validate_queue = [] # type: List[Tuple[ConfigPath, ConfigType, ComponentManifest]]
|
|
|
|
for path, domain, conf, comp in check_queue:
|
|
|
|
if conf is None:
|
|
|
|
result[domain] = conf = {}
|
|
|
|
|
|
|
|
success = True
|
|
|
|
for dependency in comp.dependencies:
|
|
|
|
if dependency not in config:
|
2021-03-07 20:03:16 +01:00
|
|
|
result.add_str_error(
|
|
|
|
"Component {} requires component {}" "".format(domain, dependency),
|
|
|
|
path,
|
|
|
|
)
|
2019-04-22 21:56:30 +02:00
|
|
|
success = False
|
|
|
|
if not success:
|
2018-12-05 21:22:06 +01:00
|
|
|
continue
|
|
|
|
|
2019-04-22 21:56:30 +02:00
|
|
|
success = True
|
|
|
|
for conflict in comp.conflicts_with:
|
|
|
|
if conflict in config:
|
2021-03-07 20:03:16 +01:00
|
|
|
result.add_str_error(
|
|
|
|
"Component {} cannot be used together with component {}"
|
|
|
|
"".format(domain, conflict),
|
|
|
|
path,
|
|
|
|
)
|
2019-04-22 21:56:30 +02:00
|
|
|
success = False
|
|
|
|
if not success:
|
|
|
|
continue
|
2018-12-05 21:22:06 +01:00
|
|
|
|
2019-04-22 21:56:30 +02:00
|
|
|
if CORE.esp_platform not in comp.esp_platforms:
|
2021-03-07 20:03:16 +01:00
|
|
|
result.add_str_error(
|
|
|
|
"Component {} doesn't support {}.".format(domain, CORE.esp_platform),
|
|
|
|
path,
|
|
|
|
)
|
2018-12-05 21:22:06 +01:00
|
|
|
continue
|
|
|
|
|
2021-03-07 20:03:16 +01:00
|
|
|
if (
|
|
|
|
not comp.is_platform_component
|
|
|
|
and comp.config_schema is None
|
|
|
|
and not isinstance(conf, core.AutoLoad)
|
|
|
|
):
|
|
|
|
result.add_str_error(
|
|
|
|
"Component {} cannot be loaded via YAML "
|
|
|
|
"(no CONFIG_SCHEMA).".format(domain),
|
|
|
|
path,
|
|
|
|
)
|
2019-04-22 21:56:30 +02:00
|
|
|
continue
|
2018-12-05 21:22:06 +01:00
|
|
|
|
2021-05-03 01:10:50 +02:00
|
|
|
if comp.multi_conf:
|
2019-04-22 21:56:30 +02:00
|
|
|
if not isinstance(conf, list):
|
|
|
|
result[domain] = conf = [conf]
|
2021-05-03 01:10:50 +02:00
|
|
|
if not isinstance(comp.multi_conf, bool) and len(conf) > comp.multi_conf:
|
|
|
|
result.add_str_error(
|
|
|
|
"Component {} supports a maximum of {} "
|
|
|
|
"entries ({} found).".format(domain, comp.multi_conf, len(conf)),
|
|
|
|
path,
|
|
|
|
)
|
|
|
|
continue
|
2019-04-22 21:56:30 +02:00
|
|
|
for i, part_conf in enumerate(conf):
|
|
|
|
validate_queue.append((path + [i], part_conf, comp))
|
|
|
|
continue
|
|
|
|
|
|
|
|
validate_queue.append((path, conf, comp))
|
|
|
|
|
|
|
|
# 5. Validate configuration schema
|
|
|
|
for path, conf, comp in validate_queue:
|
|
|
|
if comp.config_schema is None:
|
|
|
|
continue
|
|
|
|
with result.catch_error(path):
|
|
|
|
if comp.is_platform:
|
2019-04-17 12:06:00 +02:00
|
|
|
# Remove 'platform' key for validation
|
2019-04-22 21:56:30 +02:00
|
|
|
input_conf = OrderedDict(conf)
|
2021-03-07 20:03:16 +01:00
|
|
|
platform_val = input_conf.pop("platform")
|
2019-04-22 21:56:30 +02:00
|
|
|
validated = comp.config_schema(input_conf)
|
|
|
|
# Ensure result is OrderedDict so we can call move_to_end
|
|
|
|
if not isinstance(validated, OrderedDict):
|
|
|
|
validated = OrderedDict(validated)
|
2021-03-07 20:03:16 +01:00
|
|
|
validated["platform"] = platform_val
|
|
|
|
validated.move_to_end("platform", last=False)
|
2019-04-22 21:56:30 +02:00
|
|
|
result.set_by_path(path, validated)
|
|
|
|
else:
|
|
|
|
validated = comp.config_schema(conf)
|
|
|
|
result.set_by_path(path, validated)
|
2018-06-02 22:22:20 +02:00
|
|
|
|
2019-04-22 21:56:30 +02:00
|
|
|
# 6. If no validation errors, check IDs
|
2018-12-24 14:15:24 +01:00
|
|
|
if not result.errors:
|
|
|
|
# Only parse IDs if no validation error. Otherwise
|
|
|
|
# user gets confusing messages
|
|
|
|
do_id_pass(result)
|
2021-05-31 00:06:45 +02:00
|
|
|
|
|
|
|
# 7. Final validation
|
|
|
|
if not result.errors:
|
|
|
|
# Inter - components validation
|
2021-06-17 21:54:14 +02:00
|
|
|
token = fv.full_config.set(result)
|
|
|
|
|
|
|
|
for path, _, comp in validate_queue:
|
|
|
|
if comp.final_validate_schema is None:
|
2021-05-31 00:06:45 +02:00
|
|
|
continue
|
2021-06-17 21:54:14 +02:00
|
|
|
conf = result.get_nested_item(path)
|
|
|
|
with result.catch_error(path):
|
|
|
|
comp.final_validate_schema(conf)
|
|
|
|
|
|
|
|
fv.full_config.reset(token)
|
2021-05-31 00:06:45 +02:00
|
|
|
|
2018-04-07 01:23:03 +02:00
|
|
|
return result
|
|
|
|
|
|
|
|
|
2018-12-05 21:22:06 +01:00
|
|
|
def _nested_getitem(data, path):
|
|
|
|
for item_index in path:
|
|
|
|
try:
|
|
|
|
data = data[item_index]
|
|
|
|
except (KeyError, IndexError, TypeError):
|
|
|
|
return None
|
|
|
|
return data
|
|
|
|
|
|
|
|
|
|
|
|
def humanize_error(config, validation_error):
|
2019-12-07 18:28:55 +01:00
|
|
|
validation_error = str(validation_error)
|
2021-03-07 20:03:16 +01:00
|
|
|
m = re.match(
|
|
|
|
r"^(.*?)\s*(?:for dictionary value )?@ data\[.*$", validation_error, re.DOTALL
|
|
|
|
)
|
2018-12-05 21:22:06 +01:00
|
|
|
if m is not None:
|
|
|
|
validation_error = m.group(1)
|
|
|
|
validation_error = validation_error.strip()
|
2021-03-07 20:03:16 +01:00
|
|
|
if not validation_error.endswith("."):
|
|
|
|
validation_error += "."
|
2019-04-22 21:56:30 +02:00
|
|
|
return validation_error
|
|
|
|
|
2019-02-26 18:32:20 +01:00
|
|
|
|
2019-04-22 21:56:30 +02:00
|
|
|
def _get_parent_name(path, config):
|
|
|
|
if not path:
|
2021-03-07 20:03:16 +01:00
|
|
|
return "<root>"
|
2019-04-22 21:56:30 +02:00
|
|
|
for domain_path, domain in config.output_paths:
|
|
|
|
if _path_begins_with(path, domain_path):
|
|
|
|
if len(path) > len(domain_path):
|
|
|
|
# Sub-item
|
|
|
|
break
|
|
|
|
return domain
|
|
|
|
return path[-1]
|
2018-12-05 21:22:06 +01:00
|
|
|
|
|
|
|
|
2019-04-22 21:56:30 +02:00
|
|
|
def _format_vol_invalid(ex, config):
|
2019-12-07 18:28:55 +01:00
|
|
|
# type: (vol.Invalid, Config) -> str
|
2021-03-07 20:03:16 +01:00
|
|
|
message = ""
|
2019-04-22 21:56:30 +02:00
|
|
|
|
|
|
|
paren = _get_parent_name(ex.path[:-1], config)
|
2019-02-26 19:22:33 +01:00
|
|
|
|
|
|
|
if isinstance(ex, ExtraKeysInvalid):
|
|
|
|
if ex.candidates:
|
2021-03-07 20:03:16 +01:00
|
|
|
message += "[{}] is an invalid option for [{}]. Did you mean {}?".format(
|
|
|
|
ex.path[-1], paren, ", ".join(f"[{x}]" for x in ex.candidates)
|
|
|
|
)
|
2019-02-26 19:22:33 +01:00
|
|
|
else:
|
2021-03-07 20:03:16 +01:00
|
|
|
message += "[{}] is an invalid option for [{}]. Please check the indentation.".format(
|
|
|
|
ex.path[-1], paren
|
|
|
|
)
|
|
|
|
elif "extra keys not allowed" in str(ex):
|
|
|
|
message += "[{}] is an invalid option for [{}].".format(ex.path[-1], paren)
|
2021-06-17 21:54:14 +02:00
|
|
|
elif isinstance(ex, vol.RequiredFieldInvalid):
|
|
|
|
if ex.msg == "required key not provided":
|
|
|
|
message += "'{}' is a required option for [{}].".format(ex.path[-1], paren)
|
|
|
|
else:
|
|
|
|
# Required has set a custom error message
|
|
|
|
message += ex.msg
|
2018-04-07 01:23:03 +02:00
|
|
|
else:
|
2019-04-22 21:56:30 +02:00
|
|
|
message += humanize_error(config, ex)
|
2018-04-07 01:23:03 +02:00
|
|
|
|
|
|
|
return message
|
|
|
|
|
|
|
|
|
2019-04-22 21:56:30 +02:00
|
|
|
class InvalidYAMLError(EsphomeError):
|
2019-04-24 18:11:46 +02:00
|
|
|
def __init__(self, base_exc):
|
2019-06-30 12:19:03 +02:00
|
|
|
try:
|
|
|
|
base = str(base_exc)
|
|
|
|
except UnicodeDecodeError:
|
|
|
|
base = repr(base_exc)
|
2019-12-07 18:28:55 +01:00
|
|
|
message = f"Invalid YAML syntax:\n\n{base}"
|
|
|
|
super().__init__(message)
|
2019-04-22 21:56:30 +02:00
|
|
|
self.base_exc = base_exc
|
|
|
|
|
|
|
|
|
2020-06-21 20:33:01 +02:00
|
|
|
def _load_config(command_line_substitutions):
|
2018-04-07 01:23:03 +02:00
|
|
|
try:
|
2018-12-05 21:22:06 +01:00
|
|
|
config = yaml_util.load_yaml(CORE.config_path)
|
2019-04-22 21:56:30 +02:00
|
|
|
except EsphomeError as e:
|
2020-09-16 12:12:40 +02:00
|
|
|
raise InvalidYAMLError(e) from e
|
2018-12-05 21:22:06 +01:00
|
|
|
CORE.raw_config = config
|
2018-04-07 01:23:03 +02:00
|
|
|
|
|
|
|
try:
|
2020-06-21 20:33:01 +02:00
|
|
|
result = validate_config(config, command_line_substitutions)
|
2019-02-13 16:54:02 +01:00
|
|
|
except EsphomeError:
|
2018-06-02 22:22:20 +02:00
|
|
|
raise
|
2018-04-10 16:21:32 +02:00
|
|
|
except Exception:
|
2019-12-07 18:28:55 +01:00
|
|
|
_LOGGER.error("Unexpected exception while reading configuration:")
|
2018-04-07 01:23:03 +02:00
|
|
|
raise
|
|
|
|
|
|
|
|
return result
|
|
|
|
|
|
|
|
|
2020-06-21 20:33:01 +02:00
|
|
|
def load_config(command_line_substitutions):
|
2019-04-22 21:56:30 +02:00
|
|
|
try:
|
2020-06-21 20:33:01 +02:00
|
|
|
return _load_config(command_line_substitutions)
|
2019-04-22 21:56:30 +02:00
|
|
|
except vol.Invalid as err:
|
2020-09-16 12:12:40 +02:00
|
|
|
raise EsphomeError(f"Error while parsing config: {err}") from err
|
2019-04-22 21:56:30 +02:00
|
|
|
|
|
|
|
|
2021-02-27 23:21:07 +01:00
|
|
|
def line_info(config, path, highlight=True):
|
2018-04-07 01:23:03 +02:00
|
|
|
"""Display line config source."""
|
2018-12-05 21:22:06 +01:00
|
|
|
if not highlight:
|
|
|
|
return None
|
2021-02-27 23:21:07 +01:00
|
|
|
obj = config.get_deepest_document_range_for_path(path)
|
|
|
|
if obj:
|
|
|
|
mark = obj.start_mark
|
2019-12-07 18:28:55 +01:00
|
|
|
source = "[source {}:{}]".format(mark.document, mark.line + 1)
|
2021-04-08 13:58:01 +02:00
|
|
|
return color(Fore.CYAN, source)
|
2021-03-07 20:03:16 +01:00
|
|
|
return "None"
|
2018-12-05 21:22:06 +01:00
|
|
|
|
|
|
|
|
|
|
|
def _print_on_next_line(obj):
|
|
|
|
if isinstance(obj, (list, tuple, dict)):
|
|
|
|
return True
|
|
|
|
if isinstance(obj, str):
|
|
|
|
return len(obj) > 80
|
|
|
|
if isinstance(obj, core.Lambda):
|
|
|
|
return len(obj.value) > 80
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
|
|
|
def dump_dict(config, path, at_root=True):
|
2019-12-07 18:28:55 +01:00
|
|
|
# type: (Config, ConfigPath, bool) -> Tuple[str, bool]
|
2019-04-22 21:56:30 +02:00
|
|
|
conf = config.get_nested_item(path)
|
2021-03-07 20:03:16 +01:00
|
|
|
ret = ""
|
2018-12-05 21:22:06 +01:00
|
|
|
multiline = False
|
|
|
|
|
|
|
|
if at_root:
|
|
|
|
error = config.get_error_for_path(path)
|
|
|
|
if error is not None:
|
2021-04-08 13:58:01 +02:00
|
|
|
ret += (
|
|
|
|
"\n" + color(Fore.BOLD_RED, _format_vol_invalid(error, config)) + "\n"
|
|
|
|
)
|
2018-12-05 21:22:06 +01:00
|
|
|
|
|
|
|
if isinstance(conf, (list, tuple)):
|
|
|
|
multiline = True
|
|
|
|
if not conf:
|
2021-03-07 20:03:16 +01:00
|
|
|
ret += "[]"
|
2018-12-05 21:22:06 +01:00
|
|
|
multiline = False
|
|
|
|
|
|
|
|
for i in range(len(conf)):
|
|
|
|
path_ = path + [i]
|
|
|
|
error = config.get_error_for_path(path_)
|
|
|
|
if error is not None:
|
2021-03-07 20:03:16 +01:00
|
|
|
ret += (
|
2021-04-08 13:58:01 +02:00
|
|
|
"\n"
|
|
|
|
+ color(Fore.BOLD_RED, _format_vol_invalid(error, config))
|
|
|
|
+ "\n"
|
2021-03-07 20:03:16 +01:00
|
|
|
)
|
2018-12-05 21:22:06 +01:00
|
|
|
|
2021-03-07 20:03:16 +01:00
|
|
|
sep = "- "
|
2018-12-05 21:22:06 +01:00
|
|
|
if config.is_in_error_path(path_):
|
2021-04-08 13:58:01 +02:00
|
|
|
sep = color(Fore.RED, sep)
|
2018-12-05 21:22:06 +01:00
|
|
|
msg, _ = dump_dict(config, path_, at_root=False)
|
|
|
|
msg = indent(msg)
|
2021-02-27 23:21:07 +01:00
|
|
|
inf = line_info(config, path_, highlight=config.is_in_error_path(path_))
|
2018-12-05 21:22:06 +01:00
|
|
|
if inf is not None:
|
2021-03-07 20:03:16 +01:00
|
|
|
msg = inf + "\n" + msg
|
2018-12-05 21:22:06 +01:00
|
|
|
elif msg:
|
|
|
|
msg = msg[2:]
|
2021-03-07 20:03:16 +01:00
|
|
|
ret += sep + msg + "\n"
|
2018-12-05 21:22:06 +01:00
|
|
|
elif isinstance(conf, dict):
|
|
|
|
multiline = True
|
|
|
|
if not conf:
|
2021-03-07 20:03:16 +01:00
|
|
|
ret += "{}"
|
2018-12-05 21:22:06 +01:00
|
|
|
multiline = False
|
|
|
|
|
2019-01-02 14:11:11 +01:00
|
|
|
for k in conf.keys():
|
2018-12-05 21:22:06 +01:00
|
|
|
path_ = path + [k]
|
|
|
|
error = config.get_error_for_path(path_)
|
|
|
|
if error is not None:
|
2021-03-07 20:03:16 +01:00
|
|
|
ret += (
|
2021-04-08 13:58:01 +02:00
|
|
|
"\n"
|
|
|
|
+ color(Fore.BOLD_RED, _format_vol_invalid(error, config))
|
|
|
|
+ "\n"
|
2021-03-07 20:03:16 +01:00
|
|
|
)
|
2018-12-05 21:22:06 +01:00
|
|
|
|
2021-03-07 20:03:16 +01:00
|
|
|
st = f"{k}: "
|
2018-12-05 21:22:06 +01:00
|
|
|
if config.is_in_error_path(path_):
|
2021-04-08 13:58:01 +02:00
|
|
|
st = color(Fore.RED, st)
|
2018-12-05 21:22:06 +01:00
|
|
|
msg, m = dump_dict(config, path_, at_root=False)
|
|
|
|
|
2021-02-27 23:21:07 +01:00
|
|
|
inf = line_info(config, path_, highlight=config.is_in_error_path(path_))
|
2018-12-05 21:22:06 +01:00
|
|
|
if m:
|
2021-03-07 20:03:16 +01:00
|
|
|
msg = "\n" + indent(msg)
|
2018-12-05 21:22:06 +01:00
|
|
|
|
|
|
|
if inf is not None:
|
|
|
|
if m:
|
2021-03-07 20:03:16 +01:00
|
|
|
msg = " " + inf + msg
|
2018-12-05 21:22:06 +01:00
|
|
|
else:
|
2021-03-07 20:03:16 +01:00
|
|
|
msg = msg + " " + inf
|
|
|
|
ret += st + msg + "\n"
|
2018-12-05 21:22:06 +01:00
|
|
|
elif isinstance(conf, str):
|
2019-02-26 18:32:20 +01:00
|
|
|
if is_secret(conf):
|
2021-03-07 20:03:16 +01:00
|
|
|
conf = "!secret {}".format(is_secret(conf))
|
2018-12-05 21:22:06 +01:00
|
|
|
if not conf:
|
2019-12-07 18:28:55 +01:00
|
|
|
conf += "''"
|
2018-12-05 21:22:06 +01:00
|
|
|
|
|
|
|
if len(conf) > 80:
|
2021-03-07 20:03:16 +01:00
|
|
|
conf = "|-\n" + indent(conf)
|
2018-12-05 21:22:06 +01:00
|
|
|
error = config.get_error_for_path(path)
|
2021-04-08 13:58:01 +02:00
|
|
|
col = Fore.BOLD_RED if error else Fore.KEEP
|
2019-12-07 18:28:55 +01:00
|
|
|
ret += color(col, str(conf))
|
2018-12-05 21:22:06 +01:00
|
|
|
elif isinstance(conf, core.Lambda):
|
2019-02-26 18:32:20 +01:00
|
|
|
if is_secret(conf):
|
2021-03-07 20:03:16 +01:00
|
|
|
conf = "!secret {}".format(is_secret(conf))
|
2019-02-26 18:32:20 +01:00
|
|
|
|
2021-03-07 20:03:16 +01:00
|
|
|
conf = "!lambda |-\n" + indent(str(conf.value))
|
2018-12-05 21:22:06 +01:00
|
|
|
error = config.get_error_for_path(path)
|
2021-04-08 13:58:01 +02:00
|
|
|
col = Fore.BOLD_RED if error else Fore.KEEP
|
2018-12-05 21:22:06 +01:00
|
|
|
ret += color(col, conf)
|
|
|
|
elif conf is None:
|
|
|
|
pass
|
|
|
|
else:
|
|
|
|
error = config.get_error_for_path(path)
|
2021-04-08 13:58:01 +02:00
|
|
|
col = Fore.BOLD_RED if error else Fore.KEEP
|
2019-12-07 18:28:55 +01:00
|
|
|
ret += color(col, str(conf))
|
2021-03-07 20:03:16 +01:00
|
|
|
multiline = "\n" in ret
|
2018-12-05 21:22:06 +01:00
|
|
|
|
|
|
|
return ret, multiline
|
2018-04-07 01:23:03 +02:00
|
|
|
|
|
|
|
|
2018-12-05 21:22:06 +01:00
|
|
|
def strip_default_ids(config):
|
|
|
|
if isinstance(config, list):
|
|
|
|
to_remove = []
|
|
|
|
for i, x in enumerate(config):
|
|
|
|
x = config[i] = strip_default_ids(x)
|
2021-03-07 20:03:16 +01:00
|
|
|
if (isinstance(x, core.ID) and not x.is_manual) or isinstance(
|
|
|
|
x, core.AutoLoad
|
|
|
|
):
|
2018-12-05 21:22:06 +01:00
|
|
|
to_remove.append(x)
|
|
|
|
for x in to_remove:
|
|
|
|
config.remove(x)
|
|
|
|
elif isinstance(config, dict):
|
|
|
|
to_remove = []
|
2019-01-02 14:11:11 +01:00
|
|
|
for k, v in config.items():
|
2018-12-05 21:22:06 +01:00
|
|
|
v = config[k] = strip_default_ids(v)
|
2021-03-07 20:03:16 +01:00
|
|
|
if (isinstance(v, core.ID) and not v.is_manual) or isinstance(
|
|
|
|
v, core.AutoLoad
|
|
|
|
):
|
2018-12-05 21:22:06 +01:00
|
|
|
to_remove.append(k)
|
|
|
|
for k in to_remove:
|
|
|
|
config.pop(k)
|
|
|
|
return config
|
|
|
|
|
|
|
|
|
2020-06-21 20:33:01 +02:00
|
|
|
def read_config(command_line_substitutions):
|
2019-06-30 12:19:03 +02:00
|
|
|
_LOGGER.info("Reading configuration %s...", CORE.config_path)
|
2018-04-10 20:18:02 +02:00
|
|
|
try:
|
2020-06-21 20:33:01 +02:00
|
|
|
res = load_config(command_line_substitutions)
|
2019-02-13 16:54:02 +01:00
|
|
|
except EsphomeError as err:
|
2019-12-07 18:28:55 +01:00
|
|
|
_LOGGER.error("Error while reading config: %s", err)
|
2018-04-10 20:18:02 +02:00
|
|
|
return None
|
2018-12-05 21:22:06 +01:00
|
|
|
if res.errors:
|
2019-10-19 14:04:14 +02:00
|
|
|
if not CORE.verbose:
|
2018-12-05 21:22:06 +01:00
|
|
|
res = strip_default_ids(res)
|
|
|
|
|
2021-04-08 13:58:01 +02:00
|
|
|
safe_print(color(Fore.BOLD_RED, "Failed config"))
|
2021-03-07 20:03:16 +01:00
|
|
|
safe_print("")
|
2019-04-22 21:56:30 +02:00
|
|
|
for path, domain in res.output_paths:
|
2018-12-05 21:22:06 +01:00
|
|
|
if not res.is_in_error_path(path):
|
|
|
|
continue
|
|
|
|
|
2021-04-08 13:58:01 +02:00
|
|
|
errstr = color(Fore.BOLD_RED, f"{domain}:")
|
2021-02-27 23:21:07 +01:00
|
|
|
errline = line_info(res, path)
|
|
|
|
if errline:
|
2021-03-07 20:03:16 +01:00
|
|
|
errstr += " " + errline
|
2021-02-27 23:21:07 +01:00
|
|
|
safe_print(errstr)
|
2018-12-05 21:22:06 +01:00
|
|
|
safe_print(indent(dump_dict(res, path)[0]))
|
2018-04-07 01:23:03 +02:00
|
|
|
return None
|
2018-08-13 19:11:33 +02:00
|
|
|
return OrderedDict(res)
|