mirror of
https://github.com/esphome/esphome.git
synced 2024-12-22 21:44:55 +01:00
Update
This commit is contained in:
parent
c75edc4880
commit
5e3bc4ed2b
32 changed files with 741 additions and 185 deletions
|
@ -16,6 +16,8 @@ from esphomeyaml.const import CONF_BAUD_RATE, CONF_DOMAIN, CONF_ESPHOMEYAML, \
|
|||
from esphomeyaml.core import CORE, EsphomeyamlError
|
||||
from esphomeyaml.cpp_generator import Expression, RawStatement, add, statement
|
||||
from esphomeyaml.helpers import color, indent
|
||||
from esphomeyaml.storage_json import StorageJSON, storage_path, start_update_check_thread, \
|
||||
esphomeyaml_storage_path
|
||||
from esphomeyaml.util import run_external_command, safe_print
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
@ -125,7 +127,10 @@ def write_cpp(config):
|
|||
|
||||
def compile_program(args, config):
|
||||
_LOGGER.info("Compiling app...")
|
||||
return platformio_api.run_compile(config, args.verbose)
|
||||
thread = start_update_check_thread(esphomeyaml_storage_path(CORE.config_dir))
|
||||
rc = platformio_api.run_compile(config, args.verbose)
|
||||
thread.join()
|
||||
return rc
|
||||
|
||||
|
||||
def get_upload_host(config):
|
||||
|
@ -179,9 +184,16 @@ def upload_program(config, args, port):
|
|||
remote_port = ota.get_port(config)
|
||||
password = ota.get_auth(config)
|
||||
|
||||
storage = StorageJSON.load(storage_path())
|
||||
res = espota2.run_ota(host, remote_port, password, CORE.firmware_bin)
|
||||
if res == 0:
|
||||
if storage is not None and storage.use_legacy_ota:
|
||||
storage.use_legacy_ota = False
|
||||
storage.save(storage_path())
|
||||
return res
|
||||
if storage is not None and not storage.use_legacy_ota:
|
||||
return res
|
||||
|
||||
_LOGGER.warn("OTA v2 method failed. Trying with legacy OTA...")
|
||||
return espota2.run_legacy_ota(verbose, host_port, host, remote_port, password,
|
||||
CORE.firmware_bin)
|
||||
|
|
|
@ -2,12 +2,11 @@ import copy
|
|||
|
||||
import voluptuous as vol
|
||||
|
||||
from esphomeyaml import core
|
||||
import esphomeyaml.config_validation as cv
|
||||
from esphomeyaml.const import CONF_ABOVE, CONF_ACTION_ID, CONF_AND, CONF_AUTOMATION_ID, \
|
||||
CONF_BELOW, CONF_CONDITION, CONF_CONDITION_ID, CONF_DELAY, CONF_ELSE, CONF_ID, CONF_IF, \
|
||||
CONF_LAMBDA, CONF_OR, CONF_RANGE, CONF_THEN, CONF_TRIGGER_ID
|
||||
from esphomeyaml.core import CORE, EsphomeyamlError
|
||||
CONF_LAMBDA, CONF_OR, CONF_RANGE, CONF_THEN, CONF_TRIGGER_ID, CONF_WHILE
|
||||
from esphomeyaml.core import CORE
|
||||
from esphomeyaml.cpp_generator import ArrayInitializer, Pvariable, TemplateArguments, add, \
|
||||
get_variable, process_lambda, templatable
|
||||
from esphomeyaml.cpp_types import Action, App, Component, PollingComponent, Trigger, \
|
||||
|
@ -27,7 +26,29 @@ def maybe_simple_id(*validators):
|
|||
|
||||
|
||||
def validate_recursive_condition(value):
|
||||
return CONDITIONS_SCHEMA(value)
|
||||
value = cv.ensure_list(value)[:]
|
||||
for i, item in enumerate(value):
|
||||
item = copy.deepcopy(item)
|
||||
if not isinstance(item, dict):
|
||||
raise vol.Invalid(u"Condition must consist of key-value mapping! Got {}".format(item))
|
||||
key = next((x for x in item if x != CONF_CONDITION_ID), None)
|
||||
if key is None:
|
||||
raise vol.Invalid(u"Key missing from action! Got {}".format(item))
|
||||
if key not in CONDITION_REGISTRY:
|
||||
raise vol.Invalid(u"Unable to find condition with the name '{}', is the "
|
||||
u"component loaded?".format(key))
|
||||
item.setdefault(CONF_CONDITION_ID, None)
|
||||
key2 = next((x for x in item if x != CONF_CONDITION_ID and x != key), None)
|
||||
if key2 is not None:
|
||||
raise vol.Invalid(u"Cannot have two conditions in one item. Key '{}' overrides '{}'! "
|
||||
u"Did you forget to indent the block inside the condition?"
|
||||
u"".format(key, key2))
|
||||
validator = CONDITION_REGISTRY[key][0]
|
||||
value[i] = {
|
||||
CONF_CONDITION_ID: cv.declare_variable_id(Condition)(item[CONF_ACTION_ID]),
|
||||
key: validator(item[key])
|
||||
}
|
||||
return value
|
||||
|
||||
|
||||
def validate_recursive_action(value):
|
||||
|
@ -46,7 +67,7 @@ def validate_recursive_action(value):
|
|||
key2 = next((x for x in item if x != CONF_ACTION_ID and x != key), None)
|
||||
if key2 is not None:
|
||||
raise vol.Invalid(u"Cannot have two actions in one item. Key '{}' overrides '{}'! "
|
||||
u"Did you forget to indent the action?"
|
||||
u"Did you forget to indent the block inside the action?"
|
||||
u"".format(key, key2))
|
||||
validator = ACTION_REGISTRY[key][0]
|
||||
value[i] = {
|
||||
|
@ -57,11 +78,13 @@ def validate_recursive_action(value):
|
|||
|
||||
|
||||
ACTION_REGISTRY = ServiceRegistry()
|
||||
CONDITION_REGISTRY = ServiceRegistry()
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
DelayAction = esphomelib_ns.class_('DelayAction', Action, Component)
|
||||
LambdaAction = esphomelib_ns.class_('LambdaAction', Action)
|
||||
IfAction = esphomelib_ns.class_('IfAction', Action)
|
||||
WhileAction = esphomelib_ns.class_('WhileAction', Action)
|
||||
UpdateComponentAction = esphomelib_ns.class_('UpdateComponentAction', Action)
|
||||
Automation = esphomelib_ns.class_('Automation')
|
||||
|
||||
|
@ -76,8 +99,8 @@ CONDITIONS_SCHEMA = vol.All(cv.ensure_list, [cv.templatable({
|
|||
vol.Optional(CONF_AND): validate_recursive_condition,
|
||||
vol.Optional(CONF_OR): validate_recursive_condition,
|
||||
vol.Optional(CONF_RANGE): vol.All(vol.Schema({
|
||||
vol.Optional(CONF_ABOVE): vol.Coerce(float),
|
||||
vol.Optional(CONF_BELOW): vol.Coerce(float),
|
||||
vol.Optional(CONF_ABOVE): cv.float_,
|
||||
vol.Optional(CONF_BELOW): cv.float_,
|
||||
}), cv.has_at_least_one_key(CONF_ABOVE, CONF_BELOW)),
|
||||
vol.Optional(CONF_LAMBDA): cv.lambda_,
|
||||
})])
|
||||
|
@ -122,60 +145,63 @@ def validate_automation(extra_schema=None, extra_validators=None, single=False):
|
|||
AUTOMATION_SCHEMA = vol.Schema({
|
||||
cv.GenerateID(CONF_TRIGGER_ID): cv.declare_variable_id(Trigger),
|
||||
cv.GenerateID(CONF_AUTOMATION_ID): cv.declare_variable_id(Automation),
|
||||
vol.Optional(CONF_IF): CONDITIONS_SCHEMA,
|
||||
vol.Optional(CONF_IF): validate_recursive_condition,
|
||||
vol.Required(CONF_THEN): validate_recursive_action,
|
||||
})
|
||||
|
||||
AND_CONDITION_SCHEMA = validate_recursive_condition
|
||||
|
||||
def build_condition(config, arg_type):
|
||||
template_arg = TemplateArguments(arg_type)
|
||||
if isinstance(config, core.Lambda):
|
||||
for lambda_ in process_lambda(config, [(arg_type, 'x')]):
|
||||
|
||||
@CONDITION_REGISTRY.register(CONF_AND, AND_CONDITION_SCHEMA)
|
||||
def and_condition_to_code(config, condition_id, arg_type, template_arg):
|
||||
for conditions in build_conditions(config, arg_type):
|
||||
yield
|
||||
yield LambdaCondition.new(template_arg, lambda_)
|
||||
elif CONF_AND in config:
|
||||
yield AndCondition.new(template_arg, build_conditions(config[CONF_AND], template_arg))
|
||||
elif CONF_OR in config:
|
||||
yield OrCondition.new(template_arg, build_conditions(config[CONF_OR], template_arg))
|
||||
elif CONF_LAMBDA in config:
|
||||
lambda_ = None
|
||||
for lambda_ in process_lambda(config[CONF_LAMBDA], [(arg_type, 'x')]):
|
||||
rhs = AndCondition.new(template_arg, conditions)
|
||||
type = AndCondition.template(template_arg)
|
||||
yield Pvariable(condition_id, rhs, type=type)
|
||||
|
||||
|
||||
OR_CONDITION_SCHEMA = validate_recursive_condition
|
||||
|
||||
|
||||
@CONDITION_REGISTRY.register(CONF_OR, OR_CONDITION_SCHEMA)
|
||||
def or_condition_to_code(config, condition_id, arg_type, template_arg):
|
||||
for conditions in build_conditions(config, arg_type):
|
||||
yield
|
||||
yield LambdaCondition.new(template_arg, lambda_)
|
||||
elif CONF_RANGE in config:
|
||||
conf = config[CONF_RANGE]
|
||||
rhs = RangeCondition.new(template_arg)
|
||||
rhs = OrCondition.new(template_arg, conditions)
|
||||
type = OrCondition.template(template_arg)
|
||||
yield Pvariable(condition_id, rhs, type=type)
|
||||
|
||||
|
||||
RANGE_CONDITION_SCHEMA = vol.All(vol.Schema({
|
||||
vol.Optional(CONF_ABOVE): cv.templatable(cv.float_),
|
||||
vol.Optional(CONF_BELOW): cv.templatable(cv.float_),
|
||||
}), cv.has_at_least_one_key(CONF_ABOVE, CONF_BELOW))
|
||||
|
||||
|
||||
@CONDITION_REGISTRY.register(CONF_RANGE, RANGE_CONDITION_SCHEMA)
|
||||
def range_condition_to_code(config, condition_id, arg_type, template_arg):
|
||||
for conditions in build_conditions(config, arg_type):
|
||||
yield
|
||||
rhs = RangeCondition.new(template_arg, conditions)
|
||||
type = RangeCondition.template(template_arg)
|
||||
condition = Pvariable(config[CONF_CONDITION_ID], rhs, type=type)
|
||||
if CONF_ABOVE in conf:
|
||||
for template_ in templatable(conf[CONF_ABOVE], arg_type, float_):
|
||||
condition = Pvariable(condition_id, rhs, type=type)
|
||||
if CONF_ABOVE in config:
|
||||
for template_ in templatable(config[CONF_ABOVE], arg_type, float_):
|
||||
yield
|
||||
condition.set_min(template_)
|
||||
if CONF_BELOW in conf:
|
||||
for template_ in templatable(conf[CONF_BELOW], arg_type, float_):
|
||||
if CONF_BELOW in config:
|
||||
for template_ in templatable(config[CONF_BELOW], arg_type, float_):
|
||||
yield
|
||||
condition.set_max(template_)
|
||||
yield condition
|
||||
else:
|
||||
raise EsphomeyamlError(u"Unsupported condition {}".format(config))
|
||||
|
||||
|
||||
def build_conditions(config, arg_type):
|
||||
conditions = []
|
||||
for conf in config:
|
||||
condition = None
|
||||
for condition in build_condition(conf, arg_type):
|
||||
yield None
|
||||
conditions.append(condition)
|
||||
yield ArrayInitializer(*conditions)
|
||||
|
||||
|
||||
DELAY_ACTION_SCHEMA = cv.templatable(cv.positive_time_period_milliseconds)
|
||||
|
||||
|
||||
@ACTION_REGISTRY.register(CONF_DELAY, DELAY_ACTION_SCHEMA)
|
||||
def delay_action_to_code(config, action_id, arg_type):
|
||||
template_arg = TemplateArguments(arg_type)
|
||||
def delay_action_to_code(config, action_id, arg_type, template_arg):
|
||||
rhs = App.register_component(DelayAction.new(template_arg))
|
||||
type = DelayAction.template(template_arg)
|
||||
action = Pvariable(action_id, rhs, type=type)
|
||||
|
@ -193,8 +219,7 @@ IF_ACTION_SCHEMA = vol.All({
|
|||
|
||||
|
||||
@ACTION_REGISTRY.register(CONF_IF, IF_ACTION_SCHEMA)
|
||||
def if_action_to_code(config, action_id, arg_type):
|
||||
template_arg = TemplateArguments(arg_type)
|
||||
def if_action_to_code(config, action_id, arg_type, template_arg):
|
||||
for conditions in build_conditions(config[CONF_CONDITION], arg_type):
|
||||
yield None
|
||||
rhs = IfAction.new(template_arg, conditions)
|
||||
|
@ -211,12 +236,30 @@ def if_action_to_code(config, action_id, arg_type):
|
|||
yield action
|
||||
|
||||
|
||||
WHILE_ACTION_SCHEMA = vol.Schema({
|
||||
vol.Required(CONF_CONDITION): validate_recursive_condition,
|
||||
vol.Required(CONF_THEN): validate_recursive_action,
|
||||
})
|
||||
|
||||
|
||||
@ACTION_REGISTRY.register(CONF_WHILE, WHILE_ACTION_SCHEMA)
|
||||
def while_action_to_code(config, action_id, arg_type, template_arg):
|
||||
for conditions in build_conditions(config[CONF_CONDITION], arg_type):
|
||||
yield None
|
||||
rhs = WhileAction.new(template_arg, conditions)
|
||||
type = WhileAction.template(template_arg)
|
||||
action = Pvariable(action_id, rhs, type=type)
|
||||
for actions in build_actions(config[CONF_THEN], arg_type):
|
||||
yield None
|
||||
add(action.add_then(actions))
|
||||
yield action
|
||||
|
||||
|
||||
LAMBDA_ACTION_SCHEMA = cv.lambda_
|
||||
|
||||
|
||||
@ACTION_REGISTRY.register(CONF_LAMBDA, LAMBDA_ACTION_SCHEMA)
|
||||
def lambda_action_to_code(config, action_id, arg_type):
|
||||
template_arg = TemplateArguments(arg_type)
|
||||
def lambda_action_to_code(config, action_id, arg_type, template_arg):
|
||||
for lambda_ in process_lambda(config, [(arg_type, 'x')]):
|
||||
yield None
|
||||
rhs = LambdaAction.new(template_arg, lambda_)
|
||||
|
@ -224,6 +267,18 @@ def lambda_action_to_code(config, action_id, arg_type):
|
|||
yield Pvariable(action_id, rhs, type=type)
|
||||
|
||||
|
||||
LAMBDA_CONDITION_SCHEMA = cv.lambda_
|
||||
|
||||
|
||||
@CONDITION_REGISTRY.register(CONF_LAMBDA, LAMBDA_CONDITION_SCHEMA)
|
||||
def lambda_condition_to_code(config, condition_id, arg_type, template_arg):
|
||||
for lambda_ in process_lambda(config, [(arg_type, 'x')]):
|
||||
yield
|
||||
rhs = LambdaCondition.new(template_arg, lambda_)
|
||||
type = LambdaAction.template(template_arg)
|
||||
yield Pvariable(condition_id, rhs, type=type)
|
||||
|
||||
|
||||
CONF_COMPONENT_UPDATE = 'component.update'
|
||||
COMPONENT_UPDATE_ACTION_SCHEMA = maybe_simple_id({
|
||||
vol.Required(CONF_ID): cv.use_variable_id(PollingComponent),
|
||||
|
@ -231,8 +286,7 @@ COMPONENT_UPDATE_ACTION_SCHEMA = maybe_simple_id({
|
|||
|
||||
|
||||
@ACTION_REGISTRY.register(CONF_COMPONENT_UPDATE, COMPONENT_UPDATE_ACTION_SCHEMA)
|
||||
def component_update_action_to_code(config, action_id, arg_type):
|
||||
template_arg = TemplateArguments(arg_type)
|
||||
def component_update_action_to_code(config, action_id, arg_type, template_arg):
|
||||
for var in get_variable(config[CONF_ID]):
|
||||
yield None
|
||||
rhs = UpdateComponentAction.new(var)
|
||||
|
@ -245,7 +299,8 @@ def build_action(full_config, arg_type):
|
|||
key, config = next((k, v) for k, v in full_config.items() if k in ACTION_REGISTRY)
|
||||
|
||||
builder = ACTION_REGISTRY[key][1]
|
||||
for result in builder(config, action_id, arg_type):
|
||||
template_arg = TemplateArguments(arg_type)
|
||||
for result in builder(config, action_id, arg_type, template_arg):
|
||||
yield None
|
||||
yield result
|
||||
|
||||
|
@ -260,6 +315,26 @@ def build_actions(config, arg_type):
|
|||
yield ArrayInitializer(*actions, multiline=False)
|
||||
|
||||
|
||||
def build_condition(full_config, arg_type):
|
||||
action_id = full_config[CONF_ACTION_ID]
|
||||
key, config = next((k, v) for k, v in full_config.items() if k in CONDITION_REGISTRY)
|
||||
|
||||
builder = CONDITION_REGISTRY[key][1]
|
||||
template_arg = TemplateArguments(arg_type)
|
||||
for result in builder(config, action_id, arg_type, template_arg):
|
||||
yield None
|
||||
yield result
|
||||
|
||||
|
||||
def build_conditions(config, arg_type):
|
||||
conditions = []
|
||||
for conf in config:
|
||||
for condition in build_condition(conf, arg_type):
|
||||
yield None
|
||||
conditions.append(condition)
|
||||
yield ArrayInitializer(*conditions, multiline=False)
|
||||
|
||||
|
||||
def build_automation_(trigger, arg_type, config):
|
||||
rhs = App.make_automation(TemplateArguments(arg_type), trigger)
|
||||
type = Automation.template(arg_type)
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
import voluptuous as vol
|
||||
|
||||
from esphomeyaml import automation, core
|
||||
from esphomeyaml.automation import maybe_simple_id, CONDITION_REGISTRY, Condition
|
||||
from esphomeyaml.components import mqtt
|
||||
from esphomeyaml.components.mqtt import setup_mqtt_component
|
||||
import esphomeyaml.config_validation as cv
|
||||
|
@ -11,7 +12,7 @@ from esphomeyaml.const import CONF_DELAYED_OFF, CONF_DELAYED_ON, CONF_DEVICE_CLA
|
|||
CONF_TIMING, CONF_TRIGGER_ID
|
||||
from esphomeyaml.core import CORE
|
||||
from esphomeyaml.cpp_generator import process_lambda, ArrayInitializer, add, Pvariable, \
|
||||
StructInitializer
|
||||
StructInitializer, get_variable
|
||||
from esphomeyaml.cpp_types import esphomelib_ns, Nameable, Trigger, NoArg, Component, App, bool_
|
||||
|
||||
DEVICE_CLASSES = [
|
||||
|
@ -38,6 +39,9 @@ DoubleClickTrigger = binary_sensor_ns.class_('DoubleClickTrigger', Trigger.templ
|
|||
MultiClickTrigger = binary_sensor_ns.class_('MultiClickTrigger', Trigger.template(NoArg), Component)
|
||||
MultiClickTriggerEvent = binary_sensor_ns.struct('MultiClickTriggerEvent')
|
||||
|
||||
# Condition
|
||||
BinarySensorCondition = binary_sensor_ns.class_('BinarySensorCondition', Condition)
|
||||
|
||||
# Filters
|
||||
Filter = binary_sensor_ns.class_('Filter')
|
||||
DelayedOnFilter = binary_sensor_ns.class_('DelayedOnFilter', Filter, Component)
|
||||
|
@ -293,3 +297,33 @@ def core_to_hass_config(data, config):
|
|||
|
||||
|
||||
BUILD_FLAGS = '-DUSE_BINARY_SENSOR'
|
||||
|
||||
|
||||
CONF_BINARY_SENSOR_IS_ON = 'binary_sensor.is_on'
|
||||
BINARY_SENSOR_IS_ON_CONDITION_SCHEMA = maybe_simple_id({
|
||||
vol.Required(CONF_ID): cv.use_variable_id(BinarySensor),
|
||||
})
|
||||
|
||||
|
||||
@CONDITION_REGISTRY.register(CONF_BINARY_SENSOR_IS_ON, BINARY_SENSOR_IS_ON_CONDITION_SCHEMA)
|
||||
def binary_sensor_is_on_to_code(config, condition_id, arg_type, template_arg):
|
||||
for var in get_variable(config[CONF_ID]):
|
||||
yield None
|
||||
rhs = var.make_binary_sensor_is_on_condition(template_arg)
|
||||
type = BinarySensorCondition.template(arg_type)
|
||||
yield Pvariable(condition_id, rhs, type=type)
|
||||
|
||||
|
||||
CONF_BINARY_SENSOR_IS_OFF = 'binary_sensor.is_off'
|
||||
BINARY_SENSOR_IS_OFF_CONDITION_SCHEMA = maybe_simple_id({
|
||||
vol.Required(CONF_ID): cv.use_variable_id(BinarySensor),
|
||||
})
|
||||
|
||||
|
||||
@CONDITION_REGISTRY.register(CONF_BINARY_SENSOR_IS_OFF, BINARY_SENSOR_IS_OFF_CONDITION_SCHEMA)
|
||||
def binary_sensor_is_off_to_code(config, condition_id, arg_type, template_arg):
|
||||
for var in get_variable(config[CONF_ID]):
|
||||
yield None
|
||||
rhs = var.make_binary_sensor_is_off_condition(template_arg)
|
||||
type = BinarySensorCondition.template(arg_type)
|
||||
yield Pvariable(condition_id, rhs, type=type)
|
||||
|
|
|
@ -55,8 +55,7 @@ COVER_OPEN_ACTION_SCHEMA = maybe_simple_id({
|
|||
|
||||
|
||||
@ACTION_REGISTRY.register(CONF_COVER_OPEN, COVER_OPEN_ACTION_SCHEMA)
|
||||
def cover_open_to_code(config, action_id, arg_type):
|
||||
template_arg = TemplateArguments(arg_type)
|
||||
def cover_open_to_code(config, action_id, arg_type, template_arg):
|
||||
for var in get_variable(config[CONF_ID]):
|
||||
yield None
|
||||
rhs = var.make_open_action(template_arg)
|
||||
|
@ -71,8 +70,7 @@ COVER_CLOSE_ACTION_SCHEMA = maybe_simple_id({
|
|||
|
||||
|
||||
@ACTION_REGISTRY.register(CONF_COVER_CLOSE, COVER_CLOSE_ACTION_SCHEMA)
|
||||
def cover_close_to_code(config, action_id, arg_type):
|
||||
template_arg = TemplateArguments(arg_type)
|
||||
def cover_close_to_code(config, action_id, arg_type, template_arg):
|
||||
for var in get_variable(config[CONF_ID]):
|
||||
yield None
|
||||
rhs = var.make_close_action(template_arg)
|
||||
|
@ -87,8 +85,7 @@ COVER_STOP_ACTION_SCHEMA = maybe_simple_id({
|
|||
|
||||
|
||||
@ACTION_REGISTRY.register(CONF_COVER_STOP, COVER_STOP_ACTION_SCHEMA)
|
||||
def cover_stop_to_code(config, action_id, arg_type):
|
||||
template_arg = TemplateArguments(arg_type)
|
||||
def cover_stop_to_code(config, action_id, arg_type, template_arg):
|
||||
for var in get_variable(config[CONF_ID]):
|
||||
yield None
|
||||
rhs = var.make_stop_action(template_arg)
|
||||
|
|
|
@ -1,25 +1,34 @@
|
|||
import voluptuous as vol
|
||||
|
||||
import esphomeyaml.config_validation as cv
|
||||
from esphomeyaml.const import CONF_ID, CONF_LAMBDA
|
||||
from esphomeyaml.const import CONF_ID, CONF_LAMBDA, CONF_COMPONENTS
|
||||
from esphomeyaml.cpp_generator import process_lambda, variable
|
||||
from esphomeyaml.cpp_helpers import setup_component
|
||||
from esphomeyaml.cpp_types import Component, ComponentPtr, esphomelib_ns, std_vector
|
||||
|
||||
CustomComponentConstructor = esphomelib_ns.class_('CustomComponentConstructor')
|
||||
|
||||
CUSTOM_COMPONENT_SCHEMA = vol.Schema({
|
||||
cv.GenerateID(): cv.declare_variable_id(Component),
|
||||
cv.GenerateID(): cv.declare_variable_id(CustomComponentConstructor),
|
||||
vol.Required(CONF_LAMBDA): cv.lambda_,
|
||||
vol.Optional(CONF_COMPONENTS): vol.All(cv.ensure_list, [vol.Schema({
|
||||
cv.GenerateID(): cv.declare_variable_id(Component)
|
||||
}).extend(cv.COMPONENT_SCHEMA.schema)]),
|
||||
})
|
||||
|
||||
CONFIG_SCHEMA = vol.All(cv.ensure_list, [CUSTOM_COMPONENT_SCHEMA])
|
||||
|
||||
|
||||
def to_code(config):
|
||||
for template_ in process_lambda(config[CONF_LAMBDA], [],
|
||||
for conf in config:
|
||||
for template_ in process_lambda(conf[CONF_LAMBDA], [],
|
||||
return_type=std_vector.template(ComponentPtr)):
|
||||
yield
|
||||
|
||||
rhs = CustomComponentConstructor(template_)
|
||||
variable(config[CONF_ID], rhs)
|
||||
custom = variable(conf[CONF_ID], rhs)
|
||||
for i, comp in enumerate(conf.get(CONF_COMPONENTS, [])):
|
||||
setup_component(custom.get_component(i), comp)
|
||||
|
||||
|
||||
BUILD_FLAGS = '-DUSE_CUSTOM_COMPONENT'
|
||||
|
|
|
@ -4,8 +4,7 @@ from esphomeyaml import config_validation as cv, pins
|
|||
from esphomeyaml.automation import ACTION_REGISTRY, maybe_simple_id
|
||||
from esphomeyaml.const import CONF_ID, CONF_MODE, CONF_NUMBER, CONF_PINS, CONF_RUN_CYCLES, \
|
||||
CONF_RUN_DURATION, CONF_SLEEP_DURATION, CONF_WAKEUP_PIN
|
||||
from esphomeyaml.cpp_generator import Pvariable, StructInitializer, TemplateArguments, add, \
|
||||
get_variable
|
||||
from esphomeyaml.cpp_generator import Pvariable, StructInitializer, add, get_variable
|
||||
from esphomeyaml.cpp_helpers import gpio_input_pin_expression, setup_component
|
||||
from esphomeyaml.cpp_types import Action, App, Component, esphomelib_ns, global_ns
|
||||
|
||||
|
@ -96,8 +95,7 @@ DEEP_SLEEP_ENTER_ACTION_SCHEMA = maybe_simple_id({
|
|||
|
||||
|
||||
@ACTION_REGISTRY.register(CONF_DEEP_SLEEP_ENTER, DEEP_SLEEP_ENTER_ACTION_SCHEMA)
|
||||
def deep_sleep_enter_to_code(config, action_id, arg_type):
|
||||
template_arg = TemplateArguments(arg_type)
|
||||
def deep_sleep_enter_to_code(config, action_id, arg_type, template_arg):
|
||||
for var in get_variable(config[CONF_ID]):
|
||||
yield None
|
||||
rhs = var.make_enter_deep_sleep_action(template_arg)
|
||||
|
@ -112,8 +110,7 @@ DEEP_SLEEP_PREVENT_ACTION_SCHEMA = maybe_simple_id({
|
|||
|
||||
|
||||
@ACTION_REGISTRY.register(CONF_DEEP_SLEEP_PREVENT, DEEP_SLEEP_PREVENT_ACTION_SCHEMA)
|
||||
def deep_sleep_prevent_to_code(config, action_id, arg_type):
|
||||
template_arg = TemplateArguments(arg_type)
|
||||
def deep_sleep_prevent_to_code(config, action_id, arg_type, template_arg):
|
||||
for var in get_variable(config[CONF_ID]):
|
||||
yield None
|
||||
rhs = var.make_prevent_deep_sleep_action(template_arg)
|
||||
|
|
|
@ -77,8 +77,7 @@ FAN_TOGGLE_ACTION_SCHEMA = maybe_simple_id({
|
|||
|
||||
|
||||
@ACTION_REGISTRY.register(CONF_FAN_TOGGLE, FAN_TOGGLE_ACTION_SCHEMA)
|
||||
def fan_toggle_to_code(config, action_id, arg_type):
|
||||
template_arg = TemplateArguments(arg_type)
|
||||
def fan_toggle_to_code(config, action_id, arg_type, template_arg):
|
||||
for var in get_variable(config[CONF_ID]):
|
||||
yield None
|
||||
rhs = var.make_toggle_action(template_arg)
|
||||
|
@ -93,8 +92,7 @@ FAN_TURN_OFF_ACTION_SCHEMA = maybe_simple_id({
|
|||
|
||||
|
||||
@ACTION_REGISTRY.register(CONF_FAN_TURN_OFF, FAN_TURN_OFF_ACTION_SCHEMA)
|
||||
def fan_turn_off_to_code(config, action_id, arg_type):
|
||||
template_arg = TemplateArguments(arg_type)
|
||||
def fan_turn_off_to_code(config, action_id, arg_type, template_arg):
|
||||
for var in get_variable(config[CONF_ID]):
|
||||
yield None
|
||||
rhs = var.make_turn_off_action(template_arg)
|
||||
|
@ -111,8 +109,7 @@ FAN_TURN_ON_ACTION_SCHEMA = maybe_simple_id({
|
|||
|
||||
|
||||
@ACTION_REGISTRY.register(CONF_FAN_TURN_ON, FAN_TURN_ON_ACTION_SCHEMA)
|
||||
def fan_turn_on_to_code(config, action_id, arg_type):
|
||||
template_arg = TemplateArguments(arg_type)
|
||||
def fan_turn_on_to_code(config, action_id, arg_type, template_arg):
|
||||
for var in get_variable(config[CONF_ID]):
|
||||
yield None
|
||||
rhs = var.make_turn_on_action(template_arg)
|
||||
|
|
|
@ -363,8 +363,7 @@ LIGHT_TOGGLE_ACTION_SCHEMA = maybe_simple_id({
|
|||
|
||||
|
||||
@ACTION_REGISTRY.register(CONF_LIGHT_TOGGLE, LIGHT_TOGGLE_ACTION_SCHEMA)
|
||||
def light_toggle_to_code(config, action_id, arg_type):
|
||||
template_arg = TemplateArguments(arg_type)
|
||||
def light_toggle_to_code(config, action_id, arg_type, template_arg):
|
||||
for var in get_variable(config[CONF_ID]):
|
||||
yield None
|
||||
rhs = var.make_toggle_action(template_arg)
|
||||
|
@ -385,8 +384,7 @@ LIGHT_TURN_OFF_ACTION_SCHEMA = maybe_simple_id({
|
|||
|
||||
|
||||
@ACTION_REGISTRY.register(CONF_LIGHT_TURN_OFF, LIGHT_TURN_OFF_ACTION_SCHEMA)
|
||||
def light_turn_off_to_code(config, action_id, arg_type):
|
||||
template_arg = TemplateArguments(arg_type)
|
||||
def light_turn_off_to_code(config, action_id, arg_type, template_arg):
|
||||
for var in get_variable(config[CONF_ID]):
|
||||
yield None
|
||||
rhs = var.make_turn_off_action(template_arg)
|
||||
|
@ -417,8 +415,7 @@ LIGHT_TURN_ON_ACTION_SCHEMA = maybe_simple_id({
|
|||
|
||||
|
||||
@ACTION_REGISTRY.register(CONF_LIGHT_TURN_ON, LIGHT_TURN_ON_ACTION_SCHEMA)
|
||||
def light_turn_on_to_code(config, action_id, arg_type):
|
||||
template_arg = TemplateArguments(arg_type)
|
||||
def light_turn_on_to_code(config, action_id, arg_type, template_arg):
|
||||
for var in get_variable(config[CONF_ID]):
|
||||
yield None
|
||||
rhs = var.make_turn_on_action(template_arg)
|
||||
|
|
|
@ -7,9 +7,8 @@ import esphomeyaml.config_validation as cv
|
|||
from esphomeyaml.const import CONF_ARGS, CONF_BAUD_RATE, CONF_FORMAT, CONF_ID, CONF_LEVEL, \
|
||||
CONF_LOGS, CONF_TAG, CONF_TX_BUFFER_SIZE
|
||||
from esphomeyaml.core import EsphomeyamlError, Lambda
|
||||
from esphomeyaml.cpp_generator import Pvariable, add, TemplateArguments, RawExpression, statement, \
|
||||
process_lambda
|
||||
from esphomeyaml.cpp_types import global_ns, esphomelib_ns, Component, App
|
||||
from esphomeyaml.cpp_generator import Pvariable, RawExpression, add, process_lambda, statement
|
||||
from esphomeyaml.cpp_types import App, Component, esphomelib_ns, global_ns
|
||||
|
||||
LOG_LEVELS = {
|
||||
'NONE': global_ns.ESPHOMELIB_LOG_LEVEL_NONE,
|
||||
|
@ -116,8 +115,7 @@ LOGGER_LOG_ACTION_SCHEMA = vol.All(maybe_simple_message({
|
|||
|
||||
|
||||
@ACTION_REGISTRY.register(CONF_LOGGER_LOG, LOGGER_LOG_ACTION_SCHEMA)
|
||||
def logger_log_action_to_code(config, action_id, arg_type):
|
||||
template_arg = TemplateArguments(arg_type)
|
||||
def logger_log_action_to_code(config, action_id, arg_type, template_arg):
|
||||
esp_log = LOG_LEVEL_TO_ESP_LOG[config[CONF_LEVEL]]
|
||||
args = [RawExpression(unicode(x)) for x in config[CONF_ARGS]]
|
||||
|
||||
|
|
|
@ -197,8 +197,7 @@ MQTT_PUBLISH_ACTION_SCHEMA = vol.Schema({
|
|||
|
||||
|
||||
@ACTION_REGISTRY.register(CONF_MQTT_PUBLISH, MQTT_PUBLISH_ACTION_SCHEMA)
|
||||
def mqtt_publish_action_to_code(config, action_id, arg_type):
|
||||
template_arg = TemplateArguments(arg_type)
|
||||
def mqtt_publish_action_to_code(config, action_id, arg_type, template_arg):
|
||||
rhs = App.Pget_mqtt_client().Pmake_publish_action(template_arg)
|
||||
type = MQTTPublishAction.template(template_arg)
|
||||
action = Pvariable(action_id, rhs, type=type)
|
||||
|
@ -230,8 +229,7 @@ MQTT_PUBLISH_JSON_ACTION_SCHEMA = vol.Schema({
|
|||
|
||||
|
||||
@ACTION_REGISTRY.register(CONF_MQTT_PUBLISH_JSON, MQTT_PUBLISH_JSON_ACTION_SCHEMA)
|
||||
def mqtt_publish_json_action_to_code(config, action_id, arg_type):
|
||||
template_arg = TemplateArguments(arg_type)
|
||||
def mqtt_publish_json_action_to_code(config, action_id, arg_type, template_arg):
|
||||
rhs = App.Pget_mqtt_client().Pmake_publish_json_action(template_arg)
|
||||
type = MQTTPublishJsonAction.template(template_arg)
|
||||
action = Pvariable(action_id, rhs, type=type)
|
||||
|
|
|
@ -68,8 +68,7 @@ OUTPUT_TURN_ON_ACTION = maybe_simple_id({
|
|||
|
||||
|
||||
@ACTION_REGISTRY.register(CONF_OUTPUT_TURN_ON, OUTPUT_TURN_ON_ACTION)
|
||||
def output_turn_on_to_code(config, action_id, arg_type):
|
||||
template_arg = TemplateArguments(arg_type)
|
||||
def output_turn_on_to_code(config, action_id, arg_type, template_arg):
|
||||
for var in get_variable(config[CONF_ID]):
|
||||
yield None
|
||||
rhs = var.make_turn_on_action(template_arg)
|
||||
|
@ -84,8 +83,7 @@ OUTPUT_TURN_OFF_ACTION = maybe_simple_id({
|
|||
|
||||
|
||||
@ACTION_REGISTRY.register(CONF_OUTPUT_TURN_OFF, OUTPUT_TURN_OFF_ACTION)
|
||||
def output_turn_off_to_code(config, action_id, arg_type):
|
||||
template_arg = TemplateArguments(arg_type)
|
||||
def output_turn_off_to_code(config, action_id, arg_type, template_arg):
|
||||
for var in get_variable(config[CONF_ID]):
|
||||
yield None
|
||||
rhs = var.make_turn_off_action(template_arg)
|
||||
|
@ -101,8 +99,7 @@ OUTPUT_SET_LEVEL_ACTION = vol.Schema({
|
|||
|
||||
|
||||
@ACTION_REGISTRY.register(CONF_OUTPUT_SET_LEVEL, OUTPUT_SET_LEVEL_ACTION)
|
||||
def output_set_level_to_code(config, action_id, arg_type):
|
||||
template_arg = TemplateArguments(arg_type)
|
||||
def output_set_level_to_code(config, action_id, arg_type, template_arg):
|
||||
for var in get_variable(config[CONF_ID]):
|
||||
yield None
|
||||
rhs = var.make_set_level_action(template_arg)
|
||||
|
|
|
@ -64,7 +64,3 @@ def to_code(config):
|
|||
|
||||
|
||||
BUILD_FLAGS = '-DUSE_CUSTOM_OUTPUT'
|
||||
|
||||
|
||||
def to_hass_config(data, config):
|
||||
return [binary_sensor.core_to_hass_config(data, sens) for sens in config[CONF_OUTPUTS]]
|
||||
|
|
|
@ -4,11 +4,12 @@ from esphomeyaml import automation
|
|||
from esphomeyaml.automation import ACTION_REGISTRY, maybe_simple_id
|
||||
import esphomeyaml.config_validation as cv
|
||||
from esphomeyaml.const import CONF_ID
|
||||
from esphomeyaml.cpp_generator import Pvariable, TemplateArguments, get_variable
|
||||
from esphomeyaml.cpp_generator import Pvariable, get_variable
|
||||
from esphomeyaml.cpp_types import Action, NoArg, Trigger, esphomelib_ns
|
||||
|
||||
Script = esphomelib_ns.class_('Script', Trigger.template(NoArg))
|
||||
ScriptExecuteAction = esphomelib_ns.class_('ScriptExecuteAction', Action)
|
||||
ScriptStopAction = esphomelib_ns.class_('ScriptStopAction', Action)
|
||||
|
||||
CONFIG_SCHEMA = automation.validate_automation({
|
||||
vol.Required(CONF_ID): cv.declare_variable_id(Script),
|
||||
|
@ -28,10 +29,24 @@ SCRIPT_EXECUTE_ACTION_SCHEMA = maybe_simple_id({
|
|||
|
||||
|
||||
@ACTION_REGISTRY.register(CONF_SCRIPT_EXECUTE, SCRIPT_EXECUTE_ACTION_SCHEMA)
|
||||
def script_execute_action_to_code(config, action_id, arg_type):
|
||||
template_arg = TemplateArguments(arg_type)
|
||||
def script_execute_action_to_code(config, action_id, arg_type, template_arg):
|
||||
for var in get_variable(config[CONF_ID]):
|
||||
yield None
|
||||
rhs = var.make_execute_action(template_arg)
|
||||
type = ScriptExecuteAction.template(arg_type)
|
||||
yield Pvariable(action_id, rhs, type=type)
|
||||
|
||||
|
||||
CONF_SCRIPT_STOP = 'script.stop'
|
||||
SCRIPT_STOP_ACTION_SCHEMA = maybe_simple_id({
|
||||
vol.Required(CONF_ID): cv.use_variable_id(Script),
|
||||
})
|
||||
|
||||
|
||||
@ACTION_REGISTRY.register(CONF_SCRIPT_STOP, SCRIPT_STOP_ACTION_SCHEMA)
|
||||
def script_stop_action_to_code(config, action_id, arg_type, template_arg):
|
||||
for var in get_variable(config[CONF_ID]):
|
||||
yield None
|
||||
rhs = var.make_stop_action(template_arg)
|
||||
type = ScriptStopAction.template(arg_type)
|
||||
yield Pvariable(action_id, rhs, type=type)
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
import voluptuous as vol
|
||||
|
||||
from esphomeyaml import automation
|
||||
from esphomeyaml.automation import CONDITION_REGISTRY
|
||||
from esphomeyaml.components import mqtt
|
||||
from esphomeyaml.components.mqtt import setup_mqtt_component
|
||||
import esphomeyaml.config_validation as cv
|
||||
|
@ -12,7 +13,8 @@ from esphomeyaml.const import CONF_ABOVE, CONF_ACCURACY_DECIMALS, CONF_ALPHA, CO
|
|||
CONF_SLIDING_WINDOW_MOVING_AVERAGE, CONF_THROTTLE, CONF_TRIGGER_ID, CONF_UNIQUE, \
|
||||
CONF_UNIT_OF_MEASUREMENT, CONF_WINDOW_SIZE
|
||||
from esphomeyaml.core import CORE
|
||||
from esphomeyaml.cpp_generator import ArrayInitializer, Pvariable, add, process_lambda, templatable
|
||||
from esphomeyaml.cpp_generator import ArrayInitializer, Pvariable, add, process_lambda, \
|
||||
templatable, get_variable
|
||||
from esphomeyaml.cpp_types import App, Component, Nameable, PollingComponent, Trigger, \
|
||||
esphomelib_ns, float_
|
||||
|
||||
|
@ -39,9 +41,9 @@ FILTER_KEYS = [CONF_OFFSET, CONF_MULTIPLY, CONF_FILTER_OUT, CONF_FILTER_NAN,
|
|||
CONF_THROTTLE, CONF_DELTA, CONF_UNIQUE, CONF_HEARTBEAT, CONF_DEBOUNCE, CONF_OR]
|
||||
|
||||
FILTERS_SCHEMA = vol.All(cv.ensure_list, [vol.All({
|
||||
vol.Optional(CONF_OFFSET): vol.Coerce(float),
|
||||
vol.Optional(CONF_MULTIPLY): vol.Coerce(float),
|
||||
vol.Optional(CONF_FILTER_OUT): vol.Coerce(float),
|
||||
vol.Optional(CONF_OFFSET): cv.float_,
|
||||
vol.Optional(CONF_MULTIPLY): cv.float_,
|
||||
vol.Optional(CONF_FILTER_OUT): cv.float_,
|
||||
vol.Optional(CONF_FILTER_NAN): None,
|
||||
vol.Optional(CONF_SLIDING_WINDOW_MOVING_AVERAGE): vol.All(vol.Schema({
|
||||
vol.Required(CONF_WINDOW_SIZE): cv.positive_not_null_int,
|
||||
|
@ -54,7 +56,7 @@ FILTERS_SCHEMA = vol.All(cv.ensure_list, [vol.All({
|
|||
}),
|
||||
vol.Optional(CONF_LAMBDA): cv.lambda_,
|
||||
vol.Optional(CONF_THROTTLE): cv.positive_time_period_milliseconds,
|
||||
vol.Optional(CONF_DELTA): vol.Coerce(float),
|
||||
vol.Optional(CONF_DELTA): cv.float_,
|
||||
vol.Optional(CONF_UNIQUE): None,
|
||||
vol.Optional(CONF_HEARTBEAT): cv.positive_time_period_milliseconds,
|
||||
vol.Optional(CONF_DEBOUNCE): cv.positive_time_period_milliseconds,
|
||||
|
@ -74,7 +76,7 @@ EmptyPollingParentSensor = sensor_ns.class_('EmptyPollingParentSensor', EmptySen
|
|||
# Triggers
|
||||
SensorStateTrigger = sensor_ns.class_('SensorStateTrigger', Trigger.template(float_))
|
||||
SensorRawStateTrigger = sensor_ns.class_('SensorRawStateTrigger', Trigger.template(float_))
|
||||
ValueRangeTrigger = sensor_ns.class_('ValueRangeTrigger', Trigger.template(float_))
|
||||
ValueRangeTrigger = sensor_ns.class_('ValueRangeTrigger', Trigger.template(float_), Component)
|
||||
|
||||
# Filters
|
||||
Filter = sensor_ns.class_('Filter')
|
||||
|
@ -91,6 +93,7 @@ HeartbeatFilter = sensor_ns.class_('HeartbeatFilter', Filter, Component)
|
|||
DeltaFilter = sensor_ns.class_('DeltaFilter', Filter)
|
||||
OrFilter = sensor_ns.class_('OrFilter', Filter)
|
||||
UniqueFilter = sensor_ns.class_('UniqueFilter', Filter)
|
||||
SensorInRangeCondition = sensor_ns.class_('SensorInRangeCondition', Filter)
|
||||
|
||||
SENSOR_SCHEMA = cv.MQTT_COMPONENT_SCHEMA.extend({
|
||||
cv.GenerateID(CONF_MQTT_ID): cv.declare_variable_id(MQTTSensorComponent),
|
||||
|
@ -107,8 +110,8 @@ SENSOR_SCHEMA = cv.MQTT_COMPONENT_SCHEMA.extend({
|
|||
}),
|
||||
vol.Optional(CONF_ON_VALUE_RANGE): automation.validate_automation({
|
||||
cv.GenerateID(CONF_TRIGGER_ID): cv.declare_variable_id(ValueRangeTrigger),
|
||||
vol.Optional(CONF_ABOVE): vol.Coerce(float),
|
||||
vol.Optional(CONF_BELOW): vol.Coerce(float),
|
||||
vol.Optional(CONF_ABOVE): cv.float_,
|
||||
vol.Optional(CONF_BELOW): cv.float_,
|
||||
}, cv.has_at_least_one_key(CONF_ABOVE, CONF_BELOW)),
|
||||
})
|
||||
|
||||
|
@ -189,6 +192,7 @@ def setup_sensor_core_(sensor_var, mqtt_var, config):
|
|||
for conf in config.get(CONF_ON_VALUE_RANGE, []):
|
||||
rhs = sensor_var.make_value_range_trigger()
|
||||
trigger = Pvariable(conf[CONF_TRIGGER_ID], rhs)
|
||||
add(App.register_component(trigger))
|
||||
if CONF_ABOVE in conf:
|
||||
for template_ in templatable(conf[CONF_ABOVE], float_, float_):
|
||||
yield
|
||||
|
@ -223,6 +227,30 @@ def register_sensor(var, config):
|
|||
BUILD_FLAGS = '-DUSE_SENSOR'
|
||||
|
||||
|
||||
CONF_SENSOR_IN_RANGE = 'sensor.in_range'
|
||||
SENSOR_IN_RANGE_CONDITION_SCHEMA = vol.All({
|
||||
vol.Required(CONF_ID): cv.use_variable_id(Sensor),
|
||||
vol.Optional(CONF_ABOVE): cv.float_,
|
||||
vol.Optional(CONF_BELOW): cv.float_,
|
||||
}, cv.has_at_least_one_key(CONF_ABOVE, CONF_BELOW))
|
||||
|
||||
|
||||
@CONDITION_REGISTRY.register(CONF_SENSOR_IN_RANGE, SENSOR_IN_RANGE_CONDITION_SCHEMA)
|
||||
def sensor_in_range_to_code(config, condition_id, arg_type, template_arg):
|
||||
for var in get_variable(config[CONF_ID]):
|
||||
yield None
|
||||
rhs = var.make_sensor_in_range_condition(template_arg)
|
||||
type = SensorInRangeCondition.template(arg_type)
|
||||
cond = Pvariable(condition_id, rhs, type=type)
|
||||
|
||||
if CONF_ABOVE in config:
|
||||
add(cond.set_min(config[CONF_ABOVE]))
|
||||
if CONF_BELOW in config:
|
||||
add(cond.set_max(config[CONF_BELOW]))
|
||||
|
||||
yield cond
|
||||
|
||||
|
||||
def core_to_hass_config(data, config):
|
||||
ret = mqtt.build_hass_config(data, 'sensor', config, include_state=True, include_command=False)
|
||||
if ret is None:
|
||||
|
|
|
@ -92,8 +92,7 @@ STEPPER_SET_TARGET_ACTION_SCHEMA = vol.Schema({
|
|||
|
||||
|
||||
@ACTION_REGISTRY.register(CONF_STEPPER_SET_TARGET, STEPPER_SET_TARGET_ACTION_SCHEMA)
|
||||
def stepper_set_target_to_code(config, action_id, arg_type):
|
||||
template_arg = TemplateArguments(arg_type)
|
||||
def stepper_set_target_to_code(config, action_id, arg_type, template_arg):
|
||||
for var in get_variable(config[CONF_ID]):
|
||||
yield None
|
||||
rhs = var.make_set_target_action(template_arg)
|
||||
|
@ -113,8 +112,7 @@ STEPPER_REPORT_POSITION_ACTION_SCHEMA = vol.Schema({
|
|||
|
||||
|
||||
@ACTION_REGISTRY.register(CONF_STEPPER_REPORT_POSITION, STEPPER_REPORT_POSITION_ACTION_SCHEMA)
|
||||
def stepper_report_position_to_code(config, action_id, arg_type):
|
||||
template_arg = TemplateArguments(arg_type)
|
||||
def stepper_report_position_to_code(config, action_id, arg_type, template_arg):
|
||||
for var in get_variable(config[CONF_ID]):
|
||||
yield None
|
||||
rhs = var.make_report_position_action(template_arg)
|
||||
|
|
136
esphomeyaml/components/substitutions.py
Normal file
136
esphomeyaml/components/substitutions.py
Normal file
|
@ -0,0 +1,136 @@
|
|||
import logging
|
||||
import re
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from esphomeyaml import core
|
||||
import esphomeyaml.config_validation as cv
|
||||
from esphomeyaml.core import EsphomeyamlError
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
CONF_SUBSTITUTIONS = 'substitutions'
|
||||
|
||||
VALID_SUBSTITUTIONS_CHARACTERS = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ' \
|
||||
'0123456789_'
|
||||
|
||||
|
||||
def validate_substitution_key(value):
|
||||
value = cv.string(value)
|
||||
if not value:
|
||||
raise vol.Invalid("Substitution key must not be empty")
|
||||
if value[0].isdigit():
|
||||
raise vol.Invalid("First character in substitutions cannot be a digit.")
|
||||
if value[0] == '$':
|
||||
value = value[1:]
|
||||
for char in value:
|
||||
if char not in VALID_SUBSTITUTIONS_CHARACTERS:
|
||||
raise vol.Invalid(
|
||||
u"Substitution must only consist of upper/lowercase characters, the underscore "
|
||||
u"and numbers. The character '{}' cannot be used".format(char))
|
||||
return value
|
||||
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema({
|
||||
validate_substitution_key: cv.string_strict,
|
||||
})
|
||||
|
||||
|
||||
def to_code(config):
|
||||
pass
|
||||
|
||||
|
||||
VARIABLE_PROG = re.compile('\\$([{0}]+|\\{{[{0}]*\\}})'.format(VALID_SUBSTITUTIONS_CHARACTERS))
|
||||
|
||||
|
||||
def _expand_substitutions(substitutions, value, path):
|
||||
if u'$' not in value:
|
||||
return value
|
||||
|
||||
orig_value = value
|
||||
|
||||
i = 0
|
||||
while True:
|
||||
m = VARIABLE_PROG.search(value, i)
|
||||
if not m:
|
||||
# Nothing more to match. Done
|
||||
break
|
||||
|
||||
i, j = m.span(0)
|
||||
name = m.group(1)
|
||||
if name.startswith(u'{') and name.endswith(u'}'):
|
||||
name = name[1:-1]
|
||||
if name not in substitutions:
|
||||
_LOGGER.warn(u"Found '%s' (see %s) which looks like a substitution, but '%s' was not "
|
||||
u"declared", orig_value, u'->'.join(str(x) for x in path), name)
|
||||
i = j
|
||||
continue
|
||||
|
||||
sub = substitutions[name]
|
||||
tail = value[j:]
|
||||
value = value[:i] + sub
|
||||
i = len(value)
|
||||
value += tail
|
||||
return value
|
||||
|
||||
|
||||
def _substitute_item(substitutions, item, path):
|
||||
if isinstance(item, list):
|
||||
for i, it in enumerate(item):
|
||||
sub = _substitute_item(substitutions, it, path + [i])
|
||||
if sub is not None:
|
||||
item[i] = sub
|
||||
elif isinstance(item, dict):
|
||||
replace_keys = []
|
||||
for k, v in item.iteritems():
|
||||
if path or k != CONF_SUBSTITUTIONS:
|
||||
sub = _substitute_item(substitutions, k, path + [k])
|
||||
if sub is not None:
|
||||
replace_keys.append((k, sub))
|
||||
sub = _substitute_item(substitutions, v, path + [k])
|
||||
if sub is not None:
|
||||
item[k] = sub
|
||||
for old, new in replace_keys:
|
||||
item[new] = item[old]
|
||||
del item[old]
|
||||
elif isinstance(item, str):
|
||||
sub = _expand_substitutions(substitutions, item, path)
|
||||
if sub != item:
|
||||
return sub
|
||||
elif isinstance(item, core.Lambda):
|
||||
sub = _expand_substitutions(substitutions, item.value, path)
|
||||
if sub != item:
|
||||
item.value = sub
|
||||
return None
|
||||
|
||||
|
||||
def do_substitution_pass(config):
|
||||
if CONF_SUBSTITUTIONS not in config:
|
||||
return config
|
||||
|
||||
substitutions = config[CONF_SUBSTITUTIONS]
|
||||
if not isinstance(substitutions, dict):
|
||||
raise EsphomeyamlError(u"Substitutions must be a key to value mapping, got {}"
|
||||
u"".format(type(substitutions)))
|
||||
|
||||
key = ''
|
||||
try:
|
||||
replace_keys = []
|
||||
for key, value in substitutions.iteritems():
|
||||
sub = validate_substitution_key(key)
|
||||
if sub != key:
|
||||
replace_keys.append((key, sub))
|
||||
substitutions[key] = cv.string_strict(value)
|
||||
for old, new in replace_keys:
|
||||
substitutions[new] = substitutions[old]
|
||||
del substitutions[old]
|
||||
except vol.Invalid as err:
|
||||
from esphomeyaml.config import _format_config_error
|
||||
err.path.append(key)
|
||||
|
||||
raise EsphomeyamlError(_format_config_error(err, CONF_SUBSTITUTIONS, substitutions))
|
||||
|
||||
config[CONF_SUBSTITUTIONS] = substitutions
|
||||
_substitute_item(substitutions, config, [])
|
||||
|
||||
return config
|
|
@ -1,6 +1,6 @@
|
|||
import voluptuous as vol
|
||||
|
||||
from esphomeyaml.automation import maybe_simple_id, ACTION_REGISTRY
|
||||
from esphomeyaml.automation import maybe_simple_id, ACTION_REGISTRY, CONDITION_REGISTRY, Condition
|
||||
from esphomeyaml.components import mqtt
|
||||
from esphomeyaml.components.mqtt import setup_mqtt_component
|
||||
import esphomeyaml.config_validation as cv
|
||||
|
@ -22,6 +22,8 @@ ToggleAction = switch_ns.class_('ToggleAction', Action)
|
|||
TurnOffAction = switch_ns.class_('TurnOffAction', Action)
|
||||
TurnOnAction = switch_ns.class_('TurnOnAction', Action)
|
||||
|
||||
SwitchCondition = switch_ns.class_('SwitchCondition', Condition)
|
||||
|
||||
SWITCH_SCHEMA = cv.MQTT_COMMAND_COMPONENT_SCHEMA.extend({
|
||||
cv.GenerateID(CONF_MQTT_ID): cv.declare_variable_id(MQTTSwitchComponent),
|
||||
vol.Optional(CONF_ICON): cv.icon,
|
||||
|
@ -65,8 +67,7 @@ SWITCH_TOGGLE_ACTION_SCHEMA = maybe_simple_id({
|
|||
|
||||
|
||||
@ACTION_REGISTRY.register(CONF_SWITCH_TOGGLE, SWITCH_TOGGLE_ACTION_SCHEMA)
|
||||
def switch_toggle_to_code(config, action_id, arg_type):
|
||||
template_arg = TemplateArguments(arg_type)
|
||||
def switch_toggle_to_code(config, action_id, arg_type, template_arg):
|
||||
for var in get_variable(config[CONF_ID]):
|
||||
yield None
|
||||
rhs = var.make_toggle_action(template_arg)
|
||||
|
@ -81,8 +82,7 @@ SWITCH_TURN_OFF_ACTION_SCHEMA = maybe_simple_id({
|
|||
|
||||
|
||||
@ACTION_REGISTRY.register(CONF_SWITCH_TURN_OFF, SWITCH_TURN_OFF_ACTION_SCHEMA)
|
||||
def switch_turn_off_to_code(config, action_id, arg_type):
|
||||
template_arg = TemplateArguments(arg_type)
|
||||
def switch_turn_off_to_code(config, action_id, arg_type, template_arg):
|
||||
for var in get_variable(config[CONF_ID]):
|
||||
yield None
|
||||
rhs = var.make_turn_off_action(template_arg)
|
||||
|
@ -97,8 +97,7 @@ SWITCH_TURN_ON_ACTION_SCHEMA = maybe_simple_id({
|
|||
|
||||
|
||||
@ACTION_REGISTRY.register(CONF_SWITCH_TURN_ON, SWITCH_TURN_ON_ACTION_SCHEMA)
|
||||
def switch_turn_on_to_code(config, action_id, arg_type):
|
||||
template_arg = TemplateArguments(arg_type)
|
||||
def switch_turn_on_to_code(config, action_id, arg_type, template_arg):
|
||||
for var in get_variable(config[CONF_ID]):
|
||||
yield None
|
||||
rhs = var.make_turn_on_action(template_arg)
|
||||
|
@ -106,6 +105,36 @@ def switch_turn_on_to_code(config, action_id, arg_type):
|
|||
yield Pvariable(action_id, rhs, type=type)
|
||||
|
||||
|
||||
CONF_SWITCH_IS_ON = 'switch.is_on'
|
||||
SWITCH_IS_ON_CONDITION_SCHEMA = maybe_simple_id({
|
||||
vol.Required(CONF_ID): cv.use_variable_id(Switch),
|
||||
})
|
||||
|
||||
|
||||
@CONDITION_REGISTRY.register(CONF_SWITCH_IS_ON, SWITCH_IS_ON_CONDITION_SCHEMA)
|
||||
def switch_is_on_to_code(config, condition_id, arg_type, template_arg):
|
||||
for var in get_variable(config[CONF_ID]):
|
||||
yield None
|
||||
rhs = var.make_switch_is_on_condition(template_arg)
|
||||
type = SwitchCondition.template(arg_type)
|
||||
yield Pvariable(condition_id, rhs, type=type)
|
||||
|
||||
|
||||
CONF_SWITCH_IS_OFF = 'switch.is_off'
|
||||
SWITCH_IS_OFF_CONDITION_SCHEMA = maybe_simple_id({
|
||||
vol.Required(CONF_ID): cv.use_variable_id(Switch),
|
||||
})
|
||||
|
||||
|
||||
@CONDITION_REGISTRY.register(CONF_SWITCH_IS_OFF, SWITCH_IS_OFF_CONDITION_SCHEMA)
|
||||
def switch_is_off_to_code(config, condition_id, arg_type, template_arg):
|
||||
for var in get_variable(config[CONF_ID]):
|
||||
yield None
|
||||
rhs = var.make_switch_is_off_condition(template_arg)
|
||||
type = SwitchCondition.template(arg_type)
|
||||
yield Pvariable(condition_id, rhs, type=type)
|
||||
|
||||
|
||||
def core_to_hass_config(data, config):
|
||||
ret = mqtt.build_hass_config(data, 'switch', config, include_state=True, include_command=True)
|
||||
if ret is None:
|
||||
|
|
|
@ -8,6 +8,7 @@ import logging
|
|||
import voluptuous as vol
|
||||
|
||||
from esphomeyaml import core, core_config, yaml_util
|
||||
from esphomeyaml.components import substitutions
|
||||
from esphomeyaml.const import CONF_ESPHOMEYAML, CONF_PLATFORM, CONF_WIFI, ESP_PLATFORMS
|
||||
from esphomeyaml.core import CORE, EsphomeyamlError
|
||||
from esphomeyaml.helpers import color
|
||||
|
@ -325,6 +326,7 @@ def load_config():
|
|||
except OSError:
|
||||
raise EsphomeyamlError(u"Could not read configuration file at {}".format(CORE.config_path))
|
||||
CORE.raw_config = config
|
||||
config = substitutions.do_substitution_pass(config)
|
||||
core_config.preload_core_config(config)
|
||||
|
||||
try:
|
||||
|
@ -338,16 +340,15 @@ def load_config():
|
|||
return result
|
||||
|
||||
|
||||
def line_info(obj, **kwargs):
|
||||
def line_info(obj):
|
||||
"""Display line config source."""
|
||||
if hasattr(obj, '__config_file__'):
|
||||
return color('cyan', "[source {}:{}]"
|
||||
.format(obj.__config_file__, obj.__line__ or '?'),
|
||||
**kwargs)
|
||||
.format(obj.__config_file__, obj.__line__ or '?'))
|
||||
return '?'
|
||||
|
||||
|
||||
def dump_dict(layer, indent_count=0, listi=False, **kwargs):
|
||||
def dump_dict(layer, indent_count=0, listi=False):
|
||||
def sort_dict_key(val):
|
||||
"""Return the dict key for sorting."""
|
||||
key = str.lower(val[0])
|
||||
|
@ -359,7 +360,7 @@ def dump_dict(layer, indent_count=0, listi=False, **kwargs):
|
|||
if isinstance(layer, dict):
|
||||
for key, value in sorted(layer.items(), key=sort_dict_key):
|
||||
if isinstance(value, (dict, list)):
|
||||
safe_print(u"{} {}: {}".format(indent_str, key, line_info(value, **kwargs)))
|
||||
safe_print(u"{} {}: {}".format(indent_str, key, line_info(value)))
|
||||
dump_dict(value, indent_count + 2)
|
||||
else:
|
||||
safe_print(u"{} {}: {}".format(indent_str, key, value))
|
||||
|
|
|
@ -22,8 +22,9 @@ _LOGGER = logging.getLogger(__name__)
|
|||
# pylint: disable=invalid-name
|
||||
|
||||
port = vol.All(vol.Coerce(int), vol.Range(min=1, max=65535))
|
||||
positive_float = vol.All(vol.Coerce(float), vol.Range(min=0))
|
||||
zero_to_one_float = vol.All(vol.Coerce(float), vol.Range(min=0, max=1))
|
||||
float_ = vol.Coerce(float)
|
||||
positive_float = vol.All(float_, vol.Range(min=0))
|
||||
zero_to_one_float = vol.All(float_, vol.Range(min=0, max=1))
|
||||
positive_int = vol.All(vol.Coerce(int), vol.Range(min=0))
|
||||
positive_not_null_int = vol.All(vol.Coerce(int), vol.Range(min=0, min_included=False))
|
||||
|
||||
|
@ -155,8 +156,9 @@ def variable_id_str_(value):
|
|||
raise vol.Invalid("Dashes are not supported in IDs, please use underscores instead.")
|
||||
for char in value:
|
||||
if char != '_' and not char.isalnum():
|
||||
raise vol.Invalid(u"IDs must only consist of upper/lowercase characters and numbers."
|
||||
u"The character '{}' cannot be used".format(char))
|
||||
raise vol.Invalid(u"IDs must only consist of upper/lowercase characters, the underscore"
|
||||
u"character and numbers. The character '{}' cannot be used"
|
||||
u"".format(char))
|
||||
if value in RESERVED_IDS:
|
||||
raise vol.Invalid(u"ID {} is reserved internally and cannot be used".format(value))
|
||||
return value
|
||||
|
@ -258,12 +260,12 @@ TIME_PERIOD_ERROR = "Time period {} should be format number + unit, for example
|
|||
|
||||
time_period_dict = vol.All(
|
||||
dict, vol.Schema({
|
||||
'days': vol.Coerce(float),
|
||||
'hours': vol.Coerce(float),
|
||||
'minutes': vol.Coerce(float),
|
||||
'seconds': vol.Coerce(float),
|
||||
'milliseconds': vol.Coerce(float),
|
||||
'microseconds': vol.Coerce(float),
|
||||
'days': float_,
|
||||
'hours': float_,
|
||||
'minutes': float_,
|
||||
'seconds': float_,
|
||||
'milliseconds': float_,
|
||||
'microseconds': float_,
|
||||
}),
|
||||
has_at_least_one_key('days', 'hours', 'minutes',
|
||||
'seconds', 'milliseconds', 'microseconds'),
|
||||
|
@ -701,5 +703,5 @@ MQTT_COMMAND_COMPONENT_SCHEMA = MQTT_COMPONENT_SCHEMA.extend({
|
|||
})
|
||||
|
||||
COMPONENT_SCHEMA = vol.Schema({
|
||||
vol.Optional(CONF_SETUP_PRIORITY): vol.Coerce(float)
|
||||
vol.Optional(CONF_SETUP_PRIORITY): float_
|
||||
})
|
||||
|
|
|
@ -91,6 +91,7 @@ CONF_ABOVE = 'above'
|
|||
CONF_BELOW = 'below'
|
||||
CONF_ON = 'on'
|
||||
CONF_IF = 'if'
|
||||
CONF_WHILE = 'while'
|
||||
CONF_THEN = 'then'
|
||||
CONF_BINARY = 'binary'
|
||||
CONF_WHITE = 'white'
|
||||
|
@ -384,6 +385,7 @@ CONF_PIN_C = 'pin_c'
|
|||
CONF_PIN_D = 'pin_d'
|
||||
CONF_SLEEP_WHEN_DONE = 'sleep_when_done'
|
||||
CONF_STEP_MODE = 'step_mode'
|
||||
CONF_COMPONENTS = 'components'
|
||||
|
||||
ALLOWED_NAME_CHARS = u'abcdefghijklmnopqrstuvwxyz0123456789_'
|
||||
ARDUINO_VERSION_ESP32_DEV = 'https://github.com/platformio/platform-espressif32.git#feature/stage'
|
||||
|
|
|
@ -209,11 +209,36 @@ class TimePeriodSeconds(TimePeriod):
|
|||
pass
|
||||
|
||||
|
||||
LAMBDA_PROG = re.compile(r'id\(\s*([a-zA-Z_][a-zA-Z0-9_]*)\s*\)(\.?)')
|
||||
|
||||
|
||||
class Lambda(object):
|
||||
def __init__(self, value):
|
||||
self.value = value
|
||||
self.parts = re.split(r'id\(\s*([a-zA-Z_][a-zA-Z0-9_]*)\s*\)(\.?)', value)
|
||||
self.requires_ids = [ID(self.parts[i]) for i in range(1, len(self.parts), 3)]
|
||||
self._value = value
|
||||
self._parts = None
|
||||
self._requires_ids = None
|
||||
|
||||
@property
|
||||
def parts(self):
|
||||
if self._parts is None:
|
||||
self._parts = re.split(LAMBDA_PROG, self._value)
|
||||
return self._parts
|
||||
|
||||
@property
|
||||
def requires_ids(self):
|
||||
if self._requires_ids is None:
|
||||
self._requires_ids = [ID(self.parts[i]) for i in range(1, len(self.parts), 3)]
|
||||
return self._requires_ids
|
||||
|
||||
@property
|
||||
def value(self):
|
||||
return self._value
|
||||
|
||||
@value.setter
|
||||
def value(self, value):
|
||||
self._value = value
|
||||
self._parts = None
|
||||
self._requires_ids = None
|
||||
|
||||
def __str__(self):
|
||||
return self.value
|
||||
|
|
|
@ -167,7 +167,7 @@ CONFIG_SCHEMA = vol.Schema({
|
|||
vol.Optional(CONF_BOARD_FLASH_MODE): cv.one_of(*BUILD_FLASH_MODES, lower=True),
|
||||
vol.Optional(CONF_ON_BOOT): automation.validate_automation({
|
||||
cv.GenerateID(CONF_TRIGGER_ID): cv.declare_variable_id(StartupTrigger),
|
||||
vol.Optional(CONF_PRIORITY): vol.Coerce(float),
|
||||
vol.Optional(CONF_PRIORITY): cv.float_,
|
||||
}),
|
||||
vol.Optional(CONF_ON_SHUTDOWN): automation.validate_automation({
|
||||
cv.GenerateID(CONF_TRIGGER_ID): cv.declare_variable_id(ShutdownTrigger),
|
||||
|
|
|
@ -464,7 +464,8 @@ class MockObj(Expression):
|
|||
return obj
|
||||
|
||||
def class_(self, name, *parents): # type: (str, *MockObjClass) -> MockObjClass
|
||||
obj = MockObjClass(u'{}{}{}'.format(self.base, self.op, name), u'.', parents=parents)
|
||||
op = '' if self.op == '' else '::'
|
||||
obj = MockObjClass(u'{}{}{}'.format(self.base, op, name), u'.', parents=parents)
|
||||
obj.requires.append(self)
|
||||
return obj
|
||||
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
# pylint: disable=wrong-import-position
|
||||
from __future__ import print_function
|
||||
|
||||
import binascii
|
||||
import collections
|
||||
import hmac
|
||||
import json
|
||||
|
@ -24,8 +23,9 @@ import tornado.websocket
|
|||
|
||||
from esphomeyaml import const
|
||||
from esphomeyaml.__main__ import get_serial_ports
|
||||
from esphomeyaml.helpers import run_system_command, mkdir_p
|
||||
from esphomeyaml.storage_json import StorageJSON, ext_storage_path
|
||||
from esphomeyaml.helpers import mkdir_p, run_system_command
|
||||
from esphomeyaml.storage_json import EsphomeyamlStorageJSON, StorageJSON, \
|
||||
esphomeyaml_storage_path, ext_storage_path
|
||||
from esphomeyaml.util import shlex_quote
|
||||
|
||||
# pylint: disable=unused-import, wrong-import-order
|
||||
|
@ -273,7 +273,17 @@ class DashboardEntry(object):
|
|||
def update_available(self):
|
||||
if self.storage is None:
|
||||
return True
|
||||
return self.storage.esphomeyaml_version != const.__version__
|
||||
return self.update_old != self.update_new
|
||||
|
||||
@property
|
||||
def update_old(self):
|
||||
if self.storage is None:
|
||||
return ''
|
||||
return self.storage.esphomeyaml_version or ''
|
||||
|
||||
@property
|
||||
def update_new(self):
|
||||
return const.__version__
|
||||
|
||||
|
||||
class MainRequestHandler(BaseHandler):
|
||||
|
@ -464,6 +474,11 @@ def make_app(debug=False):
|
|||
log_method("%d %s %.2fms", handler.get_status(),
|
||||
handler._request_summary(), request_time)
|
||||
|
||||
class StaticFileHandler(tornado.web.StaticFileHandler):
|
||||
def set_extra_headers(self, path):
|
||||
if debug:
|
||||
self.set_header('Cache-Control', 'no-store, no-cache, must-revalidate, max-age=0')
|
||||
|
||||
static_path = os.path.join(os.path.dirname(__file__), 'static')
|
||||
app = tornado.web.Application([
|
||||
(r"/", MainRequestHandler),
|
||||
|
@ -480,7 +495,7 @@ def make_app(debug=False):
|
|||
(r"/serial-ports", SerialPortRequestHandler),
|
||||
(r"/ping", PingRequestHandler),
|
||||
(r"/wizard.html", WizardRequestHandler),
|
||||
(r'/static/(.*)', tornado.web.StaticFileHandler, {'path': static_path}),
|
||||
(r'/static/(.*)', StaticFileHandler, {'path': static_path}),
|
||||
], debug=debug, cookie_secret=COOKIE_SECRET, log_function=log_function)
|
||||
return app
|
||||
|
||||
|
@ -542,14 +557,12 @@ def start_web_server(args):
|
|||
PASSWORD_DIGEST = hmac.new(args.password).digest()
|
||||
|
||||
if USING_HASSIO_AUTH or USING_PASSWORD:
|
||||
cookie_secret_path = os.path.join(CONFIG_DIR, '.esphomeyaml', 'cookie_secret')
|
||||
if os.path.exists(cookie_secret_path):
|
||||
with open(cookie_secret_path, 'r') as f:
|
||||
COOKIE_SECRET = f.read()
|
||||
else:
|
||||
COOKIE_SECRET = binascii.hexlify(os.urandom(64))
|
||||
with open(cookie_secret_path, 'w') as f:
|
||||
f.write(COOKIE_SECRET)
|
||||
path = esphomeyaml_storage_path(CONFIG_DIR)
|
||||
storage = EsphomeyamlStorageJSON.load(path)
|
||||
if storage is None:
|
||||
storage = EsphomeyamlStorageJSON.get_default()
|
||||
storage.save(path)
|
||||
COOKIE_SECRET = storage.cookie_secret
|
||||
|
||||
_LOGGER.info("Starting dashboard web server on port %s and configuration dir %s...",
|
||||
args.port, CONFIG_DIR)
|
||||
|
|
|
@ -212,3 +212,8 @@ ul.stepper:not(.horizontal) .step.active::before, ul.stepper:not(.horizontal) .s
|
|||
margin-right: -4.5px;
|
||||
margin-left: -5.5px;
|
||||
}
|
||||
|
||||
.flash-using-esphomeflasher {
|
||||
vertical-align: middle;
|
||||
color: #666 !important;
|
||||
}
|
||||
|
|
|
@ -157,7 +157,7 @@ document.querySelectorAll(".action-show-logs").forEach((showLogs) => {
|
|||
const data = JSON.parse(event.data);
|
||||
if (data.event === "line") {
|
||||
const msg = data.data;
|
||||
log.innerHTML += colorReplace(msg);
|
||||
log.insertAdjacentHTML('beforeend', colorReplace(msg));
|
||||
} else if (data.event === "exit") {
|
||||
if (data.code === 0) {
|
||||
M.toast({html: "Program exited successfully."});
|
||||
|
@ -205,7 +205,7 @@ document.querySelectorAll(".action-upload").forEach((upload) => {
|
|||
const data = JSON.parse(event.data);
|
||||
if (data.event === "line") {
|
||||
const msg = data.data;
|
||||
log.innerHTML += colorReplace(msg);
|
||||
log.insertAdjacentHTML('beforeend', colorReplace(msg));
|
||||
} else if (data.event === "exit") {
|
||||
if (data.code === 0) {
|
||||
M.toast({html: "Program exited successfully."});
|
||||
|
@ -254,7 +254,7 @@ document.querySelectorAll(".action-validate").forEach((upload) => {
|
|||
const data = JSON.parse(event.data);
|
||||
if (data.event === "line") {
|
||||
const msg = data.data;
|
||||
log.innerHTML += colorReplace(msg);
|
||||
log.insertAdjacentHTML('beforeend', colorReplace(msg));
|
||||
} else if (data.event === "exit") {
|
||||
if (data.code === 0) {
|
||||
M.toast({
|
||||
|
@ -311,7 +311,7 @@ document.querySelectorAll(".action-compile").forEach((upload) => {
|
|||
const data = JSON.parse(event.data);
|
||||
if (data.event === "line") {
|
||||
const msg = data.data;
|
||||
log.innerHTML += colorReplace(msg);
|
||||
log.insertAdjacentHTML('beforeend', colorReplace(msg));
|
||||
} else if (data.event === "exit") {
|
||||
if (data.code === 0) {
|
||||
M.toast({html: "Program exited successfully."});
|
||||
|
@ -367,7 +367,7 @@ document.querySelectorAll(".action-clean-mqtt").forEach((btn) => {
|
|||
const data = JSON.parse(event.data);
|
||||
if (data.event === "line") {
|
||||
const msg = data.data;
|
||||
log.innerHTML += colorReplace(msg);
|
||||
log.insertAdjacentHTML('beforeend', colorReplace(msg));
|
||||
} else if (data.event === "exit") {
|
||||
stopLogsButton.innerHTML = "Close";
|
||||
stopped = true;
|
||||
|
@ -409,7 +409,7 @@ document.querySelectorAll(".action-clean").forEach((btn) => {
|
|||
const data = JSON.parse(event.data);
|
||||
if (data.event === "line") {
|
||||
const msg = data.data;
|
||||
log.innerHTML += colorReplace(msg);
|
||||
log.insertAdjacentHTML('beforeend', colorReplace(msg));
|
||||
} else if (data.event === "exit") {
|
||||
if (data.code === 0) {
|
||||
M.toast({html: "Program exited successfully."});
|
||||
|
@ -457,7 +457,7 @@ document.querySelectorAll(".action-hass-config").forEach((btn) => {
|
|||
const data = JSON.parse(event.data);
|
||||
if (data.event === "line") {
|
||||
const msg = data.data;
|
||||
log.innerHTML += colorReplace(msg);
|
||||
log.insertAdjacentHTML('beforeend', colorReplace(msg));
|
||||
} else if (data.event === "exit") {
|
||||
if (data.code === 0) {
|
||||
M.toast({html: "Program exited successfully."});
|
||||
|
|
|
@ -67,7 +67,7 @@
|
|||
{% if entry.update_available %}
|
||||
<p class="update-available" data-node="{{ entry.filename }}">
|
||||
<i class="material-icons">system_update</i>
|
||||
Update Available!
|
||||
Update Available! {{ entry.update_old }} ➡️{{ entry.update_new }}
|
||||
</p>
|
||||
{% end %}
|
||||
</div>
|
||||
|
@ -110,6 +110,10 @@
|
|||
</div>
|
||||
</div>
|
||||
<div class="modal-footer">
|
||||
<a href="https://esphomelib.com/esphomeyaml/guides/faq.html#i-can-t-get-flashing-over-usb-to-work" target="_blank"
|
||||
class="tooltipped" data-position="left" data-tooltip="Flash using esphomeflasher">
|
||||
<i class="material-icons flash-using-esphomeflasher">help_outline</i>
|
||||
</a>
|
||||
<a class="modal-close waves-effect waves-green btn-flat stop-logs">Stop</a>
|
||||
</div>
|
||||
</div>
|
||||
|
@ -122,6 +126,10 @@
|
|||
</div>
|
||||
</div>
|
||||
<div class="modal-footer">
|
||||
<a href="https://esphomelib.com/esphomeyaml/guides/faq.html#i-can-t-get-flashing-over-usb-to-work" target="_blank"
|
||||
class="tooltipped" data-position="left" data-tooltip="Flash using esphomeflasher">
|
||||
<i class="material-icons flash-using-esphomeflasher">help_outline</i>
|
||||
</a>
|
||||
<a class="modal-close waves-effect waves-green btn-flat disabled download-binary">Download Binary</a>
|
||||
<a class="modal-close waves-effect waves-green btn-flat stop-logs">Stop</a>
|
||||
</div>
|
||||
|
@ -427,7 +435,7 @@
|
|||
</form>
|
||||
</div>
|
||||
<div class="modal-footer">
|
||||
<a href="#!" class="modal-close waves-effect waves-green btn-flat">Abort</a>
|
||||
<a class="modal-close waves-effect waves-green btn-flat">Abort</a>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
|
|
|
@ -4,11 +4,11 @@
|
|||
<meta charset="UTF-8">
|
||||
<title>esphomeyaml Dashboard</title>
|
||||
<link href="https://fonts.googleapis.com/icon?family=Material+Icons" rel="stylesheet">
|
||||
<link rel="stylesheet" href="/static/materialize.min.css">
|
||||
<link rel="stylesheet" href="/static/esphomeyaml.css">
|
||||
<link rel="shortcut icon" href="/static/favicon.ico">
|
||||
<link rel="stylesheet" href="/static/materialize.min.css?v=1">
|
||||
<link rel="stylesheet" href="/static/esphomeyaml.css?v=1">
|
||||
<link rel="shortcut icon" href="/static/favicon.ico?v=1">
|
||||
|
||||
<script src="/static/materialize.min.js"></script>
|
||||
<script src="/static/materialize.min.js?v=1"></script>
|
||||
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0"/>
|
||||
</head>
|
||||
|
|
|
@ -263,7 +263,7 @@ def resolve_ip_address(host):
|
|||
return ip
|
||||
|
||||
|
||||
def run_ota(remote_host, remote_port, password, filename):
|
||||
def run_ota_impl_(remote_host, remote_port, password, filename):
|
||||
ip = resolve_ip_address(remote_host)
|
||||
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
sock.settimeout(10.0)
|
||||
|
@ -287,6 +287,14 @@ def run_ota(remote_host, remote_port, password, filename):
|
|||
return 0
|
||||
|
||||
|
||||
def run_ota(remote_host, remote_port, password, filename):
|
||||
try:
|
||||
return run_ota_impl_(remote_host, remote_port, password, filename)
|
||||
except OTAError as err:
|
||||
_LOGGER.error(err)
|
||||
return 1
|
||||
|
||||
|
||||
def run_legacy_ota(verbose, host_port, remote_host, remote_port, password, filename):
|
||||
from esphomeyaml import espota
|
||||
|
||||
|
|
|
@ -1,6 +1,10 @@
|
|||
import binascii
|
||||
import codecs
|
||||
from datetime import datetime, timedelta
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import threading
|
||||
|
||||
from esphomeyaml import const
|
||||
from esphomeyaml.core import CORE
|
||||
|
@ -11,6 +15,9 @@ from esphomeyaml.core import CoreType # noqa
|
|||
from typing import Any, Dict, Optional # noqa
|
||||
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def storage_path(): # type: () -> str
|
||||
return CORE.relative_path('.esphomeyaml', '{}.json'.format(CORE.config_filename))
|
||||
|
||||
|
@ -19,11 +26,15 @@ def ext_storage_path(base_path, config_filename): # type: (str, str) -> str
|
|||
return os.path.join(base_path, '.esphomeyaml', '{}.json'.format(config_filename))
|
||||
|
||||
|
||||
def esphomeyaml_storage_path(base_path): # type: (str) -> str
|
||||
return os.path.join(base_path, '.esphomeyaml', 'esphomeyaml.json')
|
||||
|
||||
|
||||
# pylint: disable=too-many-instance-attributes
|
||||
class StorageJSON(object):
|
||||
def __init__(self, storage_version, name, esphomelib_version, esphomeyaml_version,
|
||||
src_version, arduino_version, address, esp_platform, board, build_path,
|
||||
firmware_bin_path):
|
||||
firmware_bin_path, use_legacy_ota):
|
||||
# Version of the storage JSON schema
|
||||
assert storage_version is None or isinstance(storage_version, int)
|
||||
self.storage_version = storage_version # type: int
|
||||
|
@ -50,6 +61,8 @@ class StorageJSON(object):
|
|||
self.build_path = build_path # type: str
|
||||
# The absolute path to the firmware binary
|
||||
self.firmware_bin_path = firmware_bin_path # type: str
|
||||
# Whether to use legacy OTA, will be off after the first successful flash
|
||||
self.use_legacy_ota = use_legacy_ota
|
||||
|
||||
def as_dict(self):
|
||||
return {
|
||||
|
@ -64,6 +77,7 @@ class StorageJSON(object):
|
|||
'board': self.board,
|
||||
'build_path': self.build_path,
|
||||
'firmware_bin_path': self.firmware_bin_path,
|
||||
'use_legacy_ota': self.use_legacy_ota,
|
||||
}
|
||||
|
||||
def to_json(self):
|
||||
|
@ -75,7 +89,7 @@ class StorageJSON(object):
|
|||
f_handle.write(self.to_json())
|
||||
|
||||
@staticmethod
|
||||
def from_esphomeyaml_core(esph): # type: (CoreType) -> StorageJSON
|
||||
def from_esphomeyaml_core(esph, old): # type: (CoreType, Optional[StorageJSON]) -> StorageJSON
|
||||
return StorageJSON(
|
||||
storage_version=1,
|
||||
name=esph.name,
|
||||
|
@ -88,6 +102,7 @@ class StorageJSON(object):
|
|||
board=esph.board,
|
||||
build_path=esph.build_path,
|
||||
firmware_bin_path=esph.firmware_bin,
|
||||
use_legacy_ota=True if old is None else old.use_legacy_ota,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
|
@ -105,6 +120,7 @@ class StorageJSON(object):
|
|||
board=board,
|
||||
build_path=None,
|
||||
firmware_bin_path=None,
|
||||
use_legacy_ota=False,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
|
@ -123,9 +139,10 @@ class StorageJSON(object):
|
|||
board = storage.get('board')
|
||||
build_path = storage.get('build_path')
|
||||
firmware_bin_path = storage.get('firmware_bin_path')
|
||||
use_legacy_ota = storage.get('use_legacy_ota')
|
||||
return StorageJSON(storage_version, name, esphomelib_version, esphomeyaml_version,
|
||||
src_version, arduino_version, address, esp_platform, board, build_path,
|
||||
firmware_bin_path)
|
||||
firmware_bin_path, use_legacy_ota)
|
||||
|
||||
@staticmethod
|
||||
def load(path): # type: (str) -> Optional[StorageJSON]
|
||||
|
@ -136,3 +153,144 @@ class StorageJSON(object):
|
|||
|
||||
def __eq__(self, o): # type: (Any) -> bool
|
||||
return isinstance(o, StorageJSON) and self.as_dict() == o.as_dict()
|
||||
|
||||
|
||||
class EsphomeyamlStorageJSON(object):
|
||||
def __init__(self, storage_version, cookie_secret, last_update_check,
|
||||
remote_version):
|
||||
# Version of the storage JSON schema
|
||||
assert storage_version is None or isinstance(storage_version, int)
|
||||
self.storage_version = storage_version # type: int
|
||||
# The cookie secret for the dashboard
|
||||
self.cookie_secret = cookie_secret # type: str
|
||||
# The last time esphomeyaml checked for an update as an isoformat encoded str
|
||||
self.last_update_check_str = last_update_check # type: str
|
||||
# Cache of the version gotten in the last version check
|
||||
self.remote_version = remote_version # type: Optional[str]
|
||||
|
||||
def as_dict(self): # type: () -> dict
|
||||
return {
|
||||
'storage_version': self.storage_version,
|
||||
'cookie_secret': self.cookie_secret,
|
||||
'last_update_check': self.last_update_check_str,
|
||||
'remote_version': self.remote_version,
|
||||
}
|
||||
|
||||
@property
|
||||
def last_update_check(self): # type: () -> Optional[datetime]
|
||||
try:
|
||||
return datetime.strptime(self.last_update_check_str, "%Y-%m-%dT%H:%M:%S")
|
||||
except Exception: # pylint: disable=broad-except
|
||||
return None
|
||||
|
||||
@last_update_check.setter
|
||||
def last_update_check(self, new): # type: (datetime) -> None
|
||||
self.last_update_check_str = new.strftime("%Y-%m-%dT%H:%M:%S")
|
||||
|
||||
def to_json(self): # type: () -> dict
|
||||
return json.dumps(self.as_dict(), indent=2) + u'\n'
|
||||
|
||||
def save(self, path): # type: (str) -> None
|
||||
mkdir_p(os.path.dirname(path))
|
||||
with codecs.open(path, 'w', encoding='utf-8') as f_handle:
|
||||
f_handle.write(self.to_json())
|
||||
|
||||
@staticmethod
|
||||
def _load_impl(path): # type: (str) -> Optional[EsphomeyamlStorageJSON]
|
||||
with codecs.open(path, 'r', encoding='utf-8') as f_handle:
|
||||
text = f_handle.read()
|
||||
storage = json.loads(text, encoding='utf-8')
|
||||
storage_version = storage['storage_version']
|
||||
cookie_secret = storage.get('cookie_secret')
|
||||
last_update_check = storage.get('last_update_check')
|
||||
remote_version = storage.get('remote_version')
|
||||
return EsphomeyamlStorageJSON(storage_version, cookie_secret, last_update_check,
|
||||
remote_version)
|
||||
|
||||
@staticmethod
|
||||
def load(path): # type: (str) -> Optional[EsphomeyamlStorageJSON]
|
||||
try:
|
||||
return EsphomeyamlStorageJSON._load_impl(path)
|
||||
except Exception: # pylint: disable=broad-except
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def get_default(): # type: () -> EsphomeyamlStorageJSON
|
||||
return EsphomeyamlStorageJSON(
|
||||
storage_version=1,
|
||||
cookie_secret=binascii.hexlify(os.urandom(64)),
|
||||
last_update_check=None,
|
||||
remote_version=None,
|
||||
)
|
||||
|
||||
def __eq__(self, o): # type: (Any) -> bool
|
||||
return isinstance(o, EsphomeyamlStorageJSON) and self.as_dict() == o.as_dict()
|
||||
|
||||
@property
|
||||
def should_do_esphomeyaml_update_check(self): # type: () -> bool
|
||||
if self.last_update_check is None:
|
||||
return True
|
||||
return self.last_update_check + timedelta(days=3) < datetime.utcnow()
|
||||
|
||||
|
||||
class CheckForUpdateThread(threading.Thread):
|
||||
def __init__(self, path):
|
||||
threading.Thread.__init__(self)
|
||||
self._path = path
|
||||
|
||||
@property
|
||||
def docs_base(self):
|
||||
return 'https://beta.esphomelib.com' if 'b' in const.__version__ else \
|
||||
'https://esphomelib.com'
|
||||
|
||||
def fetch_remote_version(self):
|
||||
import requests
|
||||
|
||||
storage = EsphomeyamlStorageJSON.load(self._path) or \
|
||||
EsphomeyamlStorageJSON.get_default()
|
||||
if not storage.should_do_esphomeyaml_update_check:
|
||||
return storage
|
||||
|
||||
req = requests.get('{}/_static/version'.format(self.docs_base))
|
||||
req.raise_for_status()
|
||||
storage.remote_version = req.text
|
||||
storage.last_update_check = datetime.utcnow()
|
||||
storage.save(self._path)
|
||||
return storage
|
||||
|
||||
@staticmethod
|
||||
def format_version(ver):
|
||||
vstr = '.'.join(map(str, ver.version))
|
||||
if ver.prerelease:
|
||||
vstr += ver.prerelease[0] + str(ver.prerelease[1])
|
||||
return vstr
|
||||
|
||||
def cmp_versions(self, storage):
|
||||
# pylint: disable=no-name-in-module, import-error
|
||||
from distutils.version import StrictVersion
|
||||
|
||||
remote_version = StrictVersion(storage.remote_version)
|
||||
self_version = StrictVersion(const.__version__)
|
||||
if remote_version > self_version:
|
||||
_LOGGER.warn("*" * 80)
|
||||
_LOGGER.warn("A new version of esphomeyaml is available: %s (this is %s)",
|
||||
self.format_version(remote_version), self.format_version(self_version))
|
||||
_LOGGER.warn("Changelog: %s/esphomeyaml/changelog/index.html", self.docs_base)
|
||||
_LOGGER.warn("Update Instructions: %s/esphomeyaml/guides/faq.html"
|
||||
"#how-do-i-update-to-the-latest-version", self.docs_base)
|
||||
_LOGGER.warn("*" * 80)
|
||||
|
||||
def run(self):
|
||||
try:
|
||||
storage = self.fetch_remote_version()
|
||||
self.cmp_versions(storage)
|
||||
except Exception: # pylint: disable=broad-except
|
||||
pass
|
||||
|
||||
|
||||
def start_update_check_thread(path):
|
||||
# dummy call to strptime as python 2.7 has a bug with strptime when importing from threads
|
||||
datetime.strptime('20180101', '%Y%m%d')
|
||||
thread = CheckForUpdateThread(path)
|
||||
thread.start()
|
||||
return thread
|
||||
|
|
|
@ -184,16 +184,36 @@ def migrate_src_version(old, new):
|
|||
migrate_src_version_0_to_1()
|
||||
|
||||
|
||||
def storage_should_clean(old, new): # type: (StorageJSON, StorageJSON) -> bool
|
||||
if old is None:
|
||||
return True
|
||||
|
||||
if old.esphomelib_version != new.esphomelib_version:
|
||||
return True
|
||||
if old.esphomeyaml_version != new.esphomeyaml_version:
|
||||
return True
|
||||
if old.src_version != new.src_version:
|
||||
return True
|
||||
if old.arduino_version != new.arduino_version:
|
||||
return True
|
||||
if old.board != new.board:
|
||||
return True
|
||||
if old.build_path != new.build_path:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def update_storage_json():
|
||||
path = storage_path()
|
||||
old = StorageJSON.load(path)
|
||||
new = StorageJSON.from_esphomeyaml_core(CORE)
|
||||
new = StorageJSON.from_esphomeyaml_core(CORE, old)
|
||||
if old == new:
|
||||
return
|
||||
|
||||
old_src_version = old.src_version if old is not None else 0
|
||||
migrate_src_version(old_src_version, new.src_version)
|
||||
|
||||
if storage_should_clean(old, new):
|
||||
_LOGGER.info("Core config or version changed, cleaning build files...")
|
||||
clean_build()
|
||||
|
||||
|
|
|
@ -348,7 +348,7 @@ def represent_time_period(dumper, data):
|
|||
|
||||
|
||||
def represent_lambda(_, data):
|
||||
node = yaml.ScalarNode(tag='!lambda', value=data.value, style='>')
|
||||
node = yaml.ScalarNode(tag='!lambda', value=data.value, style='|')
|
||||
return node
|
||||
|
||||
|
||||
|
|
Loading…
Reference in a new issue