mirror of
https://github.com/esphome/esphome.git
synced 2024-11-25 00:18:11 +01:00
Schema dump (#1564)
* schema dump idea accept boolean or anything default accept null also for full dicts added some common validators more simple validators support multi_conf better handle automations updates updates handle lists removed not needed class move to own folder generalized for automations lists, etc updates updates clean up clean up fix automations made comment optional basic docs support added more docs fixes docs handling updates updates fix components parent updates updates updates Fix inkplate 6 registration updates Disable logging for vscode add on better handle buses keep extended order as in CONFIGs updates updates updates disable comments moved to scripts/build_jsonschema added configurable decorators path handling fix handle list_schema fixes and cleanup add jschema_extractor to maybe updates lint no schema in git add generated loggers list * lint
This commit is contained in:
parent
d5cf4b7eac
commit
1e227e8051
12 changed files with 879 additions and 50 deletions
|
@ -11,6 +11,7 @@ from esphome.const import (
|
||||||
CONF_TIME,
|
CONF_TIME,
|
||||||
)
|
)
|
||||||
from esphome.core import coroutine
|
from esphome.core import coroutine
|
||||||
|
from esphome.jsonschema import jschema_extractor
|
||||||
from esphome.util import Registry
|
from esphome.util import Registry
|
||||||
|
|
||||||
|
|
||||||
|
@ -21,7 +22,12 @@ def maybe_simple_id(*validators):
|
||||||
def maybe_conf(conf, *validators):
|
def maybe_conf(conf, *validators):
|
||||||
validator = cv.All(*validators)
|
validator = cv.All(*validators)
|
||||||
|
|
||||||
|
@jschema_extractor("maybe")
|
||||||
def validate(value):
|
def validate(value):
|
||||||
|
# pylint: disable=comparison-with-callable
|
||||||
|
if value == jschema_extractor:
|
||||||
|
return validator
|
||||||
|
|
||||||
if isinstance(value, dict):
|
if isinstance(value, dict):
|
||||||
return validator(value)
|
return validator(value)
|
||||||
with cv.remove_prepend_path([conf]):
|
with cv.remove_prepend_path([conf]):
|
||||||
|
@ -103,7 +109,13 @@ def validate_automation(extra_schema=None, extra_validators=None, single=False):
|
||||||
# This should only happen with invalid configs, but let's have a nice error message.
|
# This should only happen with invalid configs, but let's have a nice error message.
|
||||||
return [schema(value)]
|
return [schema(value)]
|
||||||
|
|
||||||
|
@jschema_extractor("automation")
|
||||||
def validator(value):
|
def validator(value):
|
||||||
|
# hack to get the schema
|
||||||
|
# pylint: disable=comparison-with-callable
|
||||||
|
if value == jschema_extractor:
|
||||||
|
return schema
|
||||||
|
|
||||||
value = validator_(value)
|
value = validator_(value)
|
||||||
if extra_validators is not None:
|
if extra_validators is not None:
|
||||||
value = cv.Schema([extra_validators])(value)
|
value = cv.Schema([extra_validators])(value)
|
||||||
|
|
|
@ -12,7 +12,6 @@ CONF_USE_EXTENDED_ID = "use_extended_id"
|
||||||
CONF_CANBUS_ID = "canbus_id"
|
CONF_CANBUS_ID = "canbus_id"
|
||||||
CONF_BIT_RATE = "bit_rate"
|
CONF_BIT_RATE = "bit_rate"
|
||||||
CONF_ON_FRAME = "on_frame"
|
CONF_ON_FRAME = "on_frame"
|
||||||
CONF_CANBUS_SEND = "canbus.send"
|
|
||||||
|
|
||||||
|
|
||||||
def validate_id(id_value, id_ext):
|
def validate_id(id_value, id_ext):
|
||||||
|
@ -59,7 +58,7 @@ CAN_SPEEDS = {
|
||||||
"1000KBPS": CanSpeed.CAN_1000KBPS,
|
"1000KBPS": CanSpeed.CAN_1000KBPS,
|
||||||
}
|
}
|
||||||
|
|
||||||
CONFIG_SCHEMA = cv.Schema(
|
CANBUS_SCHEMA = cv.Schema(
|
||||||
{
|
{
|
||||||
cv.GenerateID(): cv.declare_id(CanbusComponent),
|
cv.GenerateID(): cv.declare_id(CanbusComponent),
|
||||||
cv.Required(CONF_CAN_ID): cv.int_range(min=0, max=0x1FFFFFFF),
|
cv.Required(CONF_CAN_ID): cv.int_range(min=0, max=0x1FFFFFFF),
|
||||||
|
@ -70,6 +69,13 @@ CONFIG_SCHEMA = cv.Schema(
|
||||||
cv.GenerateID(CONF_TRIGGER_ID): cv.declare_id(CanbusTrigger),
|
cv.GenerateID(CONF_TRIGGER_ID): cv.declare_id(CanbusTrigger),
|
||||||
cv.GenerateID(CONF_CAN_ID): cv.int_range(min=0, max=0x1FFFFFFF),
|
cv.GenerateID(CONF_CAN_ID): cv.int_range(min=0, max=0x1FFFFFFF),
|
||||||
cv.Optional(CONF_USE_EXTENDED_ID, default=False): cv.boolean,
|
cv.Optional(CONF_USE_EXTENDED_ID, default=False): cv.boolean,
|
||||||
|
cv.Optional(CONF_ON_FRAME): automation.validate_automation(
|
||||||
|
{
|
||||||
|
cv.GenerateID(CONF_TRIGGER_ID): cv.declare_id(CanbusTrigger),
|
||||||
|
cv.GenerateID(CONF_CAN_ID): cv.int_range(min=0, max=0x1FFFFFFF),
|
||||||
|
cv.Optional(CONF_USE_EXTENDED_ID, default=False): cv.boolean,
|
||||||
|
}
|
||||||
|
),
|
||||||
}
|
}
|
||||||
),
|
),
|
||||||
}
|
}
|
||||||
|
@ -104,7 +110,7 @@ def register_canbus(var, config):
|
||||||
|
|
||||||
# Actions
|
# Actions
|
||||||
@automation.register_action(
|
@automation.register_action(
|
||||||
CONF_CANBUS_SEND,
|
"canbus.send",
|
||||||
canbus_ns.class_("CanbusSendAction", automation.Action),
|
canbus_ns.class_("CanbusSendAction", automation.Action),
|
||||||
cv.maybe_simple_value(
|
cv.maybe_simple_value(
|
||||||
{
|
{
|
||||||
|
|
|
@ -87,7 +87,9 @@ CONFIG_SCHEMA = cv.All(
|
||||||
CONF_DISPLAY_DATA_7_PIN, default=27
|
CONF_DISPLAY_DATA_7_PIN, default=27
|
||||||
): pins.internal_gpio_output_pin_schema,
|
): pins.internal_gpio_output_pin_schema,
|
||||||
}
|
}
|
||||||
).extend(cv.polling_component_schema("5s").extend(i2c.i2c_device_schema(0x48))),
|
)
|
||||||
|
.extend(cv.polling_component_schema("5s"))
|
||||||
|
.extend(i2c.i2c_device_schema(0x48)),
|
||||||
cv.has_at_most_one_key(CONF_PAGES, CONF_LAMBDA),
|
cv.has_at_most_one_key(CONF_PAGES, CONF_LAMBDA),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
@ -26,7 +26,7 @@ MCP_MODE = {
|
||||||
"LISTENONLY": McpMode.CANCTRL_REQOP_LISTENONLY,
|
"LISTENONLY": McpMode.CANCTRL_REQOP_LISTENONLY,
|
||||||
}
|
}
|
||||||
|
|
||||||
CONFIG_SCHEMA = canbus.CONFIG_SCHEMA.extend(
|
CONFIG_SCHEMA = canbus.CANBUS_SCHEMA.extend(
|
||||||
{
|
{
|
||||||
cv.GenerateID(): cv.declare_id(mcp2515),
|
cv.GenerateID(): cv.declare_id(mcp2515),
|
||||||
cv.Optional(CONF_CLOCK, default="8MHZ"): cv.enum(CAN_CLOCK, upper=True),
|
cv.Optional(CONF_CLOCK, default="8MHZ"): cv.enum(CAN_CLOCK, upper=True),
|
||||||
|
|
|
@ -29,6 +29,7 @@ from esphome.const import (
|
||||||
CONF_RC_CODE_2,
|
CONF_RC_CODE_2,
|
||||||
)
|
)
|
||||||
from esphome.core import coroutine
|
from esphome.core import coroutine
|
||||||
|
from esphome.jsonschema import jschema_extractor
|
||||||
from esphome.util import Registry, SimpleRegistry
|
from esphome.util import Registry, SimpleRegistry
|
||||||
|
|
||||||
AUTO_LOAD = ["binary_sensor"]
|
AUTO_LOAD = ["binary_sensor"]
|
||||||
|
@ -123,13 +124,16 @@ def validate_repeat(value):
|
||||||
return validate_repeat({CONF_TIMES: value})
|
return validate_repeat({CONF_TIMES: value})
|
||||||
|
|
||||||
|
|
||||||
|
BASE_REMOTE_TRANSMITTER_SCHEMA = cv.Schema(
|
||||||
|
{
|
||||||
|
cv.GenerateID(CONF_TRANSMITTER_ID): cv.use_id(RemoteTransmitterBase),
|
||||||
|
cv.Optional(CONF_REPEAT): validate_repeat,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def register_action(name, type_, schema):
|
def register_action(name, type_, schema):
|
||||||
validator = templatize(schema).extend(
|
validator = templatize(schema).extend(BASE_REMOTE_TRANSMITTER_SCHEMA)
|
||||||
{
|
|
||||||
cv.GenerateID(CONF_TRANSMITTER_ID): cv.use_id(RemoteTransmitterBase),
|
|
||||||
cv.Optional(CONF_REPEAT): validate_repeat,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
registerer = automation.register_action(
|
registerer = automation.register_action(
|
||||||
f"remote_transmitter.transmit_{name}", type_, validator
|
f"remote_transmitter.transmit_{name}", type_, validator
|
||||||
)
|
)
|
||||||
|
@ -190,11 +194,15 @@ def validate_dumpers(value):
|
||||||
def validate_triggers(base_schema):
|
def validate_triggers(base_schema):
|
||||||
assert isinstance(base_schema, cv.Schema)
|
assert isinstance(base_schema, cv.Schema)
|
||||||
|
|
||||||
|
@jschema_extractor("triggers")
|
||||||
def validator(config):
|
def validator(config):
|
||||||
added_keys = {}
|
added_keys = {}
|
||||||
for key, (_, valid) in TRIGGER_REGISTRY.items():
|
for key, (_, valid) in TRIGGER_REGISTRY.items():
|
||||||
added_keys[cv.Optional(key)] = valid
|
added_keys[cv.Optional(key)] = valid
|
||||||
new_schema = base_schema.extend(added_keys)
|
new_schema = base_schema.extend(added_keys)
|
||||||
|
# pylint: disable=comparison-with-callable
|
||||||
|
if config == jschema_extractor:
|
||||||
|
return new_schema
|
||||||
return new_schema(config)
|
return new_schema(config)
|
||||||
|
|
||||||
return validator
|
return validator
|
||||||
|
|
|
@ -46,6 +46,7 @@ from esphome.core import (
|
||||||
TimePeriodMinutes,
|
TimePeriodMinutes,
|
||||||
)
|
)
|
||||||
from esphome.helpers import list_starts_with, add_class_to_obj
|
from esphome.helpers import list_starts_with, add_class_to_obj
|
||||||
|
from esphome.jsonschema import jschema_composite, jschema_registry, jschema_typed
|
||||||
from esphome.voluptuous_schema import _Schema
|
from esphome.voluptuous_schema import _Schema
|
||||||
from esphome.yaml_util import make_data_base
|
from esphome.yaml_util import make_data_base
|
||||||
|
|
||||||
|
@ -306,6 +307,7 @@ def boolean(value):
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@jschema_composite
|
||||||
def ensure_list(*validators):
|
def ensure_list(*validators):
|
||||||
"""Validate this configuration option to be a list.
|
"""Validate this configuration option to be a list.
|
||||||
|
|
||||||
|
@ -1341,6 +1343,7 @@ def extract_keys(schema):
|
||||||
return keys
|
return keys
|
||||||
|
|
||||||
|
|
||||||
|
@jschema_typed
|
||||||
def typed_schema(schemas, **kwargs):
|
def typed_schema(schemas, **kwargs):
|
||||||
"""Create a schema that has a key to distinguish between schemas"""
|
"""Create a schema that has a key to distinguish between schemas"""
|
||||||
key = kwargs.pop("key", CONF_TYPE)
|
key = kwargs.pop("key", CONF_TYPE)
|
||||||
|
@ -1442,6 +1445,7 @@ def validate_registry_entry(name, registry):
|
||||||
)
|
)
|
||||||
ignore_keys = extract_keys(base_schema)
|
ignore_keys = extract_keys(base_schema)
|
||||||
|
|
||||||
|
@jschema_registry(registry)
|
||||||
def validator(value):
|
def validator(value):
|
||||||
if isinstance(value, str):
|
if isinstance(value, str):
|
||||||
value = {value: {}}
|
value = {value: {}}
|
||||||
|
@ -1488,6 +1492,7 @@ def validate_registry(name, registry):
|
||||||
return ensure_list(validate_registry_entry(name, registry))
|
return ensure_list(validate_registry_entry(name, registry))
|
||||||
|
|
||||||
|
|
||||||
|
@jschema_composite
|
||||||
def maybe_simple_value(*validators, **kwargs):
|
def maybe_simple_value(*validators, **kwargs):
|
||||||
key = kwargs.pop("key", CONF_VALUE)
|
key = kwargs.pop("key", CONF_VALUE)
|
||||||
validator = All(*validators)
|
validator = All(*validators)
|
||||||
|
|
|
@ -180,7 +180,6 @@ CONF_ENERGY = "energy"
|
||||||
CONF_ENTITY_ID = "entity_id"
|
CONF_ENTITY_ID = "entity_id"
|
||||||
CONF_ESP8266_RESTORE_FROM_FLASH = "esp8266_restore_from_flash"
|
CONF_ESP8266_RESTORE_FROM_FLASH = "esp8266_restore_from_flash"
|
||||||
CONF_ESPHOME = "esphome"
|
CONF_ESPHOME = "esphome"
|
||||||
CONF_ESPHOME_CORE_VERSION = "esphome_core_version"
|
|
||||||
CONF_EVENT = "event"
|
CONF_EVENT = "event"
|
||||||
CONF_EXPIRE_AFTER = "expire_after"
|
CONF_EXPIRE_AFTER = "expire_after"
|
||||||
CONF_EXTERNAL_VCC = "external_vcc"
|
CONF_EXTERNAL_VCC = "external_vcc"
|
||||||
|
|
90
esphome/jsonschema.py
Normal file
90
esphome/jsonschema.py
Normal file
|
@ -0,0 +1,90 @@
|
||||||
|
# These are a helper decorators to help get schema from some
|
||||||
|
# components which uses volutuous in a way where validation
|
||||||
|
# is hidden in local functions
|
||||||
|
|
||||||
|
# These decorators should not modify at all what the functions
|
||||||
|
# originally do.
|
||||||
|
#
|
||||||
|
# However there is a property to further disable decorator
|
||||||
|
# impat.
|
||||||
|
#
|
||||||
|
# This is set to true by script/build_jsonschema.py
|
||||||
|
# only, so data is collected (again functionality is not modified)
|
||||||
|
EnableJsonSchemaCollect = False
|
||||||
|
|
||||||
|
extended_schemas = {}
|
||||||
|
list_schemas = {}
|
||||||
|
registry_schemas = {}
|
||||||
|
hidden_schemas = {}
|
||||||
|
typed_schemas = {}
|
||||||
|
|
||||||
|
|
||||||
|
def jschema_extractor(validator_name):
|
||||||
|
if EnableJsonSchemaCollect:
|
||||||
|
|
||||||
|
def decorator(func):
|
||||||
|
hidden_schemas[str(func)] = validator_name
|
||||||
|
return func
|
||||||
|
|
||||||
|
return decorator
|
||||||
|
|
||||||
|
def dummy(f):
|
||||||
|
return f
|
||||||
|
|
||||||
|
return dummy
|
||||||
|
|
||||||
|
|
||||||
|
def jschema_extended(func):
|
||||||
|
if EnableJsonSchemaCollect:
|
||||||
|
|
||||||
|
def decorate(*args, **kwargs):
|
||||||
|
ret = func(*args, **kwargs)
|
||||||
|
assert len(args) == 2
|
||||||
|
extended_schemas[str(ret)] = args
|
||||||
|
return ret
|
||||||
|
|
||||||
|
return decorate
|
||||||
|
|
||||||
|
return func
|
||||||
|
|
||||||
|
|
||||||
|
def jschema_composite(func):
|
||||||
|
if EnableJsonSchemaCollect:
|
||||||
|
|
||||||
|
def decorate(*args, **kwargs):
|
||||||
|
ret = func(*args, **kwargs)
|
||||||
|
# args length might be 2, but 2nd is always validator
|
||||||
|
list_schemas[str(ret)] = args
|
||||||
|
return ret
|
||||||
|
|
||||||
|
return decorate
|
||||||
|
|
||||||
|
return func
|
||||||
|
|
||||||
|
|
||||||
|
def jschema_registry(registry):
|
||||||
|
if EnableJsonSchemaCollect:
|
||||||
|
|
||||||
|
def decorator(func):
|
||||||
|
registry_schemas[str(func)] = registry
|
||||||
|
return func
|
||||||
|
|
||||||
|
return decorator
|
||||||
|
|
||||||
|
def dummy(f):
|
||||||
|
return f
|
||||||
|
|
||||||
|
return dummy
|
||||||
|
|
||||||
|
|
||||||
|
def jschema_typed(func):
|
||||||
|
if EnableJsonSchemaCollect:
|
||||||
|
|
||||||
|
def decorate(*args, **kwargs):
|
||||||
|
ret = func(*args, **kwargs)
|
||||||
|
typed_schemas[str(ret)] = (args, kwargs)
|
||||||
|
return ret
|
||||||
|
|
||||||
|
return decorate
|
||||||
|
|
||||||
|
return func
|
|
@ -2,6 +2,7 @@ import difflib
|
||||||
import itertools
|
import itertools
|
||||||
|
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
from esphome.jsonschema import jschema_extended
|
||||||
|
|
||||||
|
|
||||||
class ExtraKeysInvalid(vol.Invalid):
|
class ExtraKeysInvalid(vol.Invalid):
|
||||||
|
@ -202,6 +203,7 @@ class _Schema(vol.Schema):
|
||||||
self._extra_schemas.append(validator)
|
self._extra_schemas.append(validator)
|
||||||
return self
|
return self
|
||||||
|
|
||||||
|
@jschema_extended
|
||||||
# pylint: disable=signature-differs
|
# pylint: disable=signature-differs
|
||||||
def extend(self, *schemas, **kwargs):
|
def extend(self, *schemas, **kwargs):
|
||||||
extra = kwargs.pop("extra", None)
|
extra = kwargs.pop("extra", None)
|
||||||
|
|
708
script/build_jsonschema.py
Normal file
708
script/build_jsonschema.py
Normal file
|
@ -0,0 +1,708 @@
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
import json
|
||||||
|
import argparse
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
from pathlib import Path
|
||||||
|
import voluptuous as vol
|
||||||
|
|
||||||
|
# NOTE: Cannot import other esphome components globally as a modification in jsonschema
|
||||||
|
# is needed before modules are loaded
|
||||||
|
import esphome.jsonschema as ejs
|
||||||
|
|
||||||
|
ejs.EnableJsonSchemaCollect = True
|
||||||
|
|
||||||
|
DUMP_COMMENTS = False
|
||||||
|
|
||||||
|
JSC_ACTION = "automation.ACTION_REGISTRY"
|
||||||
|
JSC_ALLOF = "allOf"
|
||||||
|
JSC_ANYOF = "anyOf"
|
||||||
|
JSC_COMMENT = "$comment"
|
||||||
|
JSC_CONDITION = "automation.CONDITION_REGISTRY"
|
||||||
|
JSC_DESCRIPTION = "description"
|
||||||
|
JSC_ONEOF = "oneOf"
|
||||||
|
JSC_PROPERTIES = "properties"
|
||||||
|
JSC_REF = "$ref"
|
||||||
|
SIMPLE_AUTOMATION = "simple_automation"
|
||||||
|
|
||||||
|
schema_names = {}
|
||||||
|
schema_registry = {}
|
||||||
|
components = {}
|
||||||
|
modules = {}
|
||||||
|
registries = []
|
||||||
|
pending_refs = []
|
||||||
|
|
||||||
|
definitions = {}
|
||||||
|
base_props = {}
|
||||||
|
|
||||||
|
|
||||||
|
parser = argparse.ArgumentParser()
|
||||||
|
parser.add_argument(
|
||||||
|
"--output", default="esphome.json", help="Output filename", type=os.path.abspath
|
||||||
|
)
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
|
||||||
|
def get_ref(definition):
|
||||||
|
return {JSC_REF: "#/definitions/" + definition}
|
||||||
|
|
||||||
|
|
||||||
|
def is_ref(jschema):
|
||||||
|
return isinstance(jschema, dict) and JSC_REF in jschema
|
||||||
|
|
||||||
|
|
||||||
|
def unref(jschema):
|
||||||
|
return definitions[jschema[JSC_REF][len("#/definitions/") :]]
|
||||||
|
|
||||||
|
|
||||||
|
def add_definition_array_or_single_object(ref):
|
||||||
|
return {JSC_ANYOF: [{"type": "array", "items": ref}, ref]}
|
||||||
|
|
||||||
|
|
||||||
|
def add_core():
|
||||||
|
from esphome.core_config import CONFIG_SCHEMA
|
||||||
|
|
||||||
|
base_props["esphome"] = get_jschema("esphome", CONFIG_SCHEMA.schema)
|
||||||
|
|
||||||
|
|
||||||
|
def add_buses():
|
||||||
|
# uart
|
||||||
|
from esphome.components.uart import UART_DEVICE_SCHEMA
|
||||||
|
|
||||||
|
get_jschema("uart_bus", UART_DEVICE_SCHEMA)
|
||||||
|
|
||||||
|
# spi
|
||||||
|
from esphome.components.spi import spi_device_schema
|
||||||
|
|
||||||
|
get_jschema("spi_bus", spi_device_schema(False))
|
||||||
|
|
||||||
|
# i2c
|
||||||
|
from esphome.components.i2c import i2c_device_schema
|
||||||
|
|
||||||
|
get_jschema("i2c_bus", i2c_device_schema(None))
|
||||||
|
|
||||||
|
|
||||||
|
def add_registries():
|
||||||
|
for domain, module in modules.items():
|
||||||
|
add_module_registries(domain, module)
|
||||||
|
|
||||||
|
|
||||||
|
def add_module_registries(domain, module):
|
||||||
|
from esphome.util import Registry
|
||||||
|
|
||||||
|
for c in dir(module):
|
||||||
|
m = getattr(module, c)
|
||||||
|
if isinstance(m, Registry):
|
||||||
|
add_registry(domain + "." + c, m)
|
||||||
|
|
||||||
|
|
||||||
|
def add_registry(registry_name, registry):
|
||||||
|
validators = []
|
||||||
|
registries.append((registry, registry_name))
|
||||||
|
for name in registry.keys():
|
||||||
|
schema = get_jschema(str(name), registry[name].schema, create_return_ref=False)
|
||||||
|
if not schema:
|
||||||
|
schema = {"type": "string"}
|
||||||
|
o_schema = {"type": "object", JSC_PROPERTIES: {name: schema}}
|
||||||
|
validators.append(o_schema)
|
||||||
|
definitions[registry_name] = {JSC_ANYOF: validators}
|
||||||
|
|
||||||
|
|
||||||
|
def get_registry_ref(registry):
|
||||||
|
# we don't know yet
|
||||||
|
ref = {JSC_REF: "pending"}
|
||||||
|
pending_refs.append((ref, registry))
|
||||||
|
return ref
|
||||||
|
|
||||||
|
|
||||||
|
def solve_pending_refs():
|
||||||
|
for ref, registry in pending_refs:
|
||||||
|
for registry_match, name in registries:
|
||||||
|
if registry == registry_match:
|
||||||
|
ref[JSC_REF] = "#/definitions/" + name
|
||||||
|
|
||||||
|
|
||||||
|
def add_module_schemas(name, module):
|
||||||
|
import esphome.config_validation as cv
|
||||||
|
|
||||||
|
for c in dir(module):
|
||||||
|
v = getattr(module, c)
|
||||||
|
if isinstance(v, cv.Schema):
|
||||||
|
get_jschema(name + "." + c, v)
|
||||||
|
|
||||||
|
|
||||||
|
def get_dirs():
|
||||||
|
from esphome.config import CORE_COMPONENTS_PATH
|
||||||
|
|
||||||
|
dir_names = [
|
||||||
|
d
|
||||||
|
for d in os.listdir(CORE_COMPONENTS_PATH)
|
||||||
|
if not d.startswith("__")
|
||||||
|
and os.path.isdir(os.path.join(CORE_COMPONENTS_PATH, d))
|
||||||
|
]
|
||||||
|
return dir_names
|
||||||
|
|
||||||
|
|
||||||
|
def get_logger_tags():
|
||||||
|
from esphome.config import CORE_COMPONENTS_PATH
|
||||||
|
import glob
|
||||||
|
|
||||||
|
pattern = re.compile(r'^static const char(\*\s|\s\*)TAG = "(\w.*)";', re.MULTILINE)
|
||||||
|
tags = [
|
||||||
|
"app",
|
||||||
|
"component",
|
||||||
|
"esphal",
|
||||||
|
"helpers",
|
||||||
|
"preferences",
|
||||||
|
"scheduler",
|
||||||
|
"api.service",
|
||||||
|
]
|
||||||
|
for x in os.walk(CORE_COMPONENTS_PATH):
|
||||||
|
for y in glob.glob(os.path.join(x[0], "*.cpp")):
|
||||||
|
with open(y, "r") as file:
|
||||||
|
data = file.read()
|
||||||
|
match = pattern.search(data)
|
||||||
|
if match:
|
||||||
|
tags.append(match.group(2))
|
||||||
|
return tags
|
||||||
|
|
||||||
|
|
||||||
|
def load_components():
|
||||||
|
import esphome.config_validation as cv
|
||||||
|
from esphome.config import get_component
|
||||||
|
|
||||||
|
modules["cv"] = cv
|
||||||
|
from esphome import automation
|
||||||
|
|
||||||
|
modules["automation"] = automation
|
||||||
|
|
||||||
|
for domain in get_dirs():
|
||||||
|
components[domain] = get_component(domain)
|
||||||
|
modules[domain] = components[domain].module
|
||||||
|
|
||||||
|
|
||||||
|
def add_components():
|
||||||
|
from esphome.config import get_platform
|
||||||
|
|
||||||
|
for domain, c in components.items():
|
||||||
|
if c.is_platform_component:
|
||||||
|
# this is a platform_component, e.g. binary_sensor
|
||||||
|
platform_schema = [
|
||||||
|
{
|
||||||
|
"type": "object",
|
||||||
|
"properties": {"platform": {"type": "string"}},
|
||||||
|
}
|
||||||
|
]
|
||||||
|
if domain != "output" and domain != "display":
|
||||||
|
# output bases are either FLOAT or BINARY so don't add common base for this
|
||||||
|
# display bases are either simple or FULL so don't add common base for this
|
||||||
|
platform_schema = [
|
||||||
|
{"$ref": f"#/definitions/{domain}.{domain.upper()}_SCHEMA"}
|
||||||
|
] + platform_schema
|
||||||
|
|
||||||
|
base_props[domain] = {"type": "array", "items": {"allOf": platform_schema}}
|
||||||
|
|
||||||
|
add_module_registries(domain, c.module)
|
||||||
|
add_module_schemas(domain, c.module)
|
||||||
|
|
||||||
|
# need first to iterate all platforms then iteate components
|
||||||
|
# a platform component can have other components as properties,
|
||||||
|
# e.g. climate components usually have a temperature sensor
|
||||||
|
|
||||||
|
for domain, c in components.items():
|
||||||
|
if (c.config_schema is not None) or c.is_platform_component:
|
||||||
|
if c.is_platform_component:
|
||||||
|
platform_schema = base_props[domain]["items"]["allOf"]
|
||||||
|
for platform in get_dirs():
|
||||||
|
p = get_platform(domain, platform)
|
||||||
|
if p is not None:
|
||||||
|
# this is a platform element, e.g.
|
||||||
|
# - platform: gpio
|
||||||
|
schema = get_jschema(
|
||||||
|
domain + "-" + platform,
|
||||||
|
p.config_schema,
|
||||||
|
create_return_ref=False,
|
||||||
|
)
|
||||||
|
if (
|
||||||
|
schema
|
||||||
|
): # for invalid schemas, None is returned thus is deprecated
|
||||||
|
platform_schema.append(
|
||||||
|
{
|
||||||
|
"if": {
|
||||||
|
JSC_PROPERTIES: {
|
||||||
|
"platform": {"const": platform}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"then": schema,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
elif c.config_schema is not None:
|
||||||
|
# adds root components which are not platforms, e.g. api: logger:
|
||||||
|
if c.is_multi_conf:
|
||||||
|
schema = get_jschema(domain, c.config_schema)
|
||||||
|
schema = add_definition_array_or_single_object(schema)
|
||||||
|
else:
|
||||||
|
schema = get_jschema(domain, c.config_schema, False)
|
||||||
|
base_props[domain] = schema
|
||||||
|
|
||||||
|
|
||||||
|
def get_automation_schema(name, vschema):
|
||||||
|
from esphome.automation import AUTOMATION_SCHEMA
|
||||||
|
|
||||||
|
# ensure SIMPLE_AUTOMATION
|
||||||
|
if SIMPLE_AUTOMATION not in definitions:
|
||||||
|
simple_automation = add_definition_array_or_single_object(get_ref(JSC_ACTION))
|
||||||
|
simple_automation[JSC_ANYOF].append(
|
||||||
|
get_jschema(AUTOMATION_SCHEMA.__module__, AUTOMATION_SCHEMA)
|
||||||
|
)
|
||||||
|
|
||||||
|
definitions[schema_names[str(AUTOMATION_SCHEMA)]][JSC_PROPERTIES][
|
||||||
|
"then"
|
||||||
|
] = add_definition_array_or_single_object(get_ref(JSC_ACTION))
|
||||||
|
definitions[SIMPLE_AUTOMATION] = simple_automation
|
||||||
|
|
||||||
|
extra_vschema = None
|
||||||
|
if AUTOMATION_SCHEMA == ejs.extended_schemas[str(vschema)][0]:
|
||||||
|
extra_vschema = ejs.extended_schemas[str(vschema)][1]
|
||||||
|
|
||||||
|
if not extra_vschema:
|
||||||
|
return get_ref(SIMPLE_AUTOMATION)
|
||||||
|
|
||||||
|
# add then property
|
||||||
|
extra_jschema = get_jschema(name, extra_vschema, False)
|
||||||
|
|
||||||
|
if is_ref(extra_jschema):
|
||||||
|
return extra_jschema
|
||||||
|
|
||||||
|
if not JSC_PROPERTIES in extra_jschema:
|
||||||
|
# these are interval: and exposure_notifications, featuring automations a component
|
||||||
|
extra_jschema[JSC_ALLOF][0][JSC_PROPERTIES][
|
||||||
|
"then"
|
||||||
|
] = add_definition_array_or_single_object(get_ref(JSC_ACTION))
|
||||||
|
ref = create_ref(name, extra_vschema, extra_jschema)
|
||||||
|
return add_definition_array_or_single_object(ref)
|
||||||
|
|
||||||
|
# automations can be either
|
||||||
|
# * a single action,
|
||||||
|
# * an array of action,
|
||||||
|
# * an object with automation's schema and a then key
|
||||||
|
# with again a single action or an array of actions
|
||||||
|
|
||||||
|
extra_jschema[JSC_PROPERTIES]["then"] = add_definition_array_or_single_object(
|
||||||
|
get_ref(JSC_ACTION)
|
||||||
|
)
|
||||||
|
jschema = add_definition_array_or_single_object(get_ref(JSC_ACTION))
|
||||||
|
jschema[JSC_ANYOF].append(extra_jschema)
|
||||||
|
|
||||||
|
return create_ref(name, extra_vschema, jschema)
|
||||||
|
|
||||||
|
|
||||||
|
def get_entry(parent_key, vschema):
|
||||||
|
from esphome.voluptuous_schema import _Schema as schema_type
|
||||||
|
|
||||||
|
entry = {}
|
||||||
|
# annotate schema validator info
|
||||||
|
if DUMP_COMMENTS:
|
||||||
|
entry[JSC_COMMENT] = "entry: " + parent_key + "/" + str(vschema)
|
||||||
|
|
||||||
|
if isinstance(vschema, list):
|
||||||
|
ref = get_jschema(parent_key + "[]", vschema[0])
|
||||||
|
entry = {"type": "array", "items": ref}
|
||||||
|
elif isinstance(vschema, schema_type) and hasattr(vschema, "schema"):
|
||||||
|
entry = get_jschema(parent_key, vschema, False)
|
||||||
|
elif hasattr(vschema, "validators"):
|
||||||
|
entry = get_jschema(parent_key, vschema, False)
|
||||||
|
elif vschema in schema_registry:
|
||||||
|
entry = schema_registry[vschema].copy()
|
||||||
|
elif str(vschema) in ejs.registry_schemas:
|
||||||
|
entry = get_registry_ref(ejs.registry_schemas[str(vschema)])
|
||||||
|
elif str(vschema) in ejs.list_schemas:
|
||||||
|
ref = get_jschema(parent_key, ejs.list_schemas[str(vschema)][0])
|
||||||
|
entry = {JSC_ANYOF: [ref, {"type": "array", "items": ref}]}
|
||||||
|
|
||||||
|
elif str(vschema) in ejs.typed_schemas:
|
||||||
|
schema_types = [{"type": "object", "properties": {"type": {"type": "string"}}}]
|
||||||
|
entry = {"allOf": schema_types}
|
||||||
|
for schema_key, vschema_type in ejs.typed_schemas[str(vschema)][0][0].items():
|
||||||
|
schema_types.append(
|
||||||
|
{
|
||||||
|
"if": {"properties": {"type": {"const": schema_key}}},
|
||||||
|
"then": get_jschema(f"{parent_key}-{schema_key}", vschema_type),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
elif str(vschema) in ejs.hidden_schemas:
|
||||||
|
# get the schema from the automation schema
|
||||||
|
type = ejs.hidden_schemas[str(vschema)]
|
||||||
|
inner_vschema = vschema(ejs.jschema_extractor)
|
||||||
|
if type == "automation":
|
||||||
|
entry = get_automation_schema(parent_key, inner_vschema)
|
||||||
|
elif type == "maybe":
|
||||||
|
entry = get_jschema(parent_key, inner_vschema)
|
||||||
|
else:
|
||||||
|
raise ValueError("Unknown extracted schema type")
|
||||||
|
elif str(vschema).startswith("<function invalid."):
|
||||||
|
# deprecated options, don't list as valid schema
|
||||||
|
return None
|
||||||
|
else:
|
||||||
|
# everything else just accept string and let ESPHome validate
|
||||||
|
try:
|
||||||
|
from esphome.core import ID
|
||||||
|
|
||||||
|
v = vschema(None)
|
||||||
|
if isinstance(v, ID):
|
||||||
|
entry = {"type": "string", "id_type": v.type.base}
|
||||||
|
elif isinstance(v, str):
|
||||||
|
entry = {"type": "string"}
|
||||||
|
elif isinstance(v, list):
|
||||||
|
entry = {"type": "array"}
|
||||||
|
else:
|
||||||
|
entry = default_schema()
|
||||||
|
except:
|
||||||
|
entry = default_schema()
|
||||||
|
|
||||||
|
return entry
|
||||||
|
|
||||||
|
|
||||||
|
def default_schema():
|
||||||
|
# Accept anything
|
||||||
|
return {"type": ["null", "object", "string", "array", "number"]}
|
||||||
|
|
||||||
|
|
||||||
|
def is_default_schema(jschema):
|
||||||
|
if is_ref(jschema):
|
||||||
|
return is_default_schema(unref(jschema))
|
||||||
|
return "type" in jschema and jschema["type"] == default_schema()["type"]
|
||||||
|
|
||||||
|
|
||||||
|
def get_jschema(path, vschema, create_return_ref=True):
|
||||||
|
name = schema_names.get(get_schema_str(vschema))
|
||||||
|
if name:
|
||||||
|
return get_ref(name)
|
||||||
|
|
||||||
|
jschema = convert_schema(path, vschema)
|
||||||
|
|
||||||
|
if is_ref(jschema):
|
||||||
|
# this can happen when returned extended
|
||||||
|
# schemas where all properties found in previous extended schema
|
||||||
|
return jschema
|
||||||
|
|
||||||
|
if not create_return_ref:
|
||||||
|
return jschema
|
||||||
|
|
||||||
|
return create_ref(path, vschema, jschema)
|
||||||
|
|
||||||
|
|
||||||
|
def get_schema_str(vschema):
|
||||||
|
# Hack on cs.use_id, in the future this can be improved by trackign which type is required by
|
||||||
|
# the id, this information can be added somehow to schema (not supported by jsonschema) and
|
||||||
|
# completion can be improved listing valid ids only Meanwhile it's a problem because it makes
|
||||||
|
# all partial schemas with cv.use_id different, e.g. i2c
|
||||||
|
|
||||||
|
return re.sub(
|
||||||
|
pattern="function use_id.<locals>.validator at 0[xX][0-9a-fA-F]+>",
|
||||||
|
repl="function use_id.<locals>.validator<>",
|
||||||
|
string=str(vschema),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def create_ref(name, vschema, jschema):
|
||||||
|
if name in schema_names:
|
||||||
|
raise ValueError("Not supported")
|
||||||
|
|
||||||
|
schema_str = get_schema_str(vschema)
|
||||||
|
|
||||||
|
schema_names[schema_str] = name
|
||||||
|
definitions[name] = jschema
|
||||||
|
return get_ref(name)
|
||||||
|
|
||||||
|
|
||||||
|
def get_all_properties(jschema):
|
||||||
|
if JSC_PROPERTIES in jschema:
|
||||||
|
return list(jschema[JSC_PROPERTIES].keys())
|
||||||
|
if is_ref(jschema):
|
||||||
|
return get_all_properties(unref(jschema))
|
||||||
|
arr = jschema.get(JSC_ALLOF, jschema.get(JSC_ANYOF))
|
||||||
|
props = []
|
||||||
|
for x in arr:
|
||||||
|
props = props + get_all_properties(x)
|
||||||
|
|
||||||
|
return props
|
||||||
|
|
||||||
|
|
||||||
|
def merge(arr, element):
|
||||||
|
# arr is an array of dicts, dicts can have keys like, properties, $ref, required:[], etc
|
||||||
|
# element is a single dict which might have several keys to
|
||||||
|
# the result should be an array with only one element containing properties, required, etc
|
||||||
|
# and other elements for needed $ref elements
|
||||||
|
# NOTE: json schema supports allof with properties in different elements, but that makes
|
||||||
|
# complex for later adding docs to the schema
|
||||||
|
for k, v in element.items():
|
||||||
|
if k == JSC_PROPERTIES:
|
||||||
|
props_found = False
|
||||||
|
for a_dict in arr:
|
||||||
|
if JSC_PROPERTIES in a_dict:
|
||||||
|
# found properties
|
||||||
|
arr_props = a_dict[JSC_PROPERTIES]
|
||||||
|
for v_k, v_v in v.items():
|
||||||
|
arr_props[v_k] = v_v # add or overwrite
|
||||||
|
props_found = True
|
||||||
|
if not props_found:
|
||||||
|
arr.append(element)
|
||||||
|
elif k == JSC_REF:
|
||||||
|
ref_found = False
|
||||||
|
for a_dict in arr:
|
||||||
|
if k in a_dict and a_dict[k] == v:
|
||||||
|
ref_found = True
|
||||||
|
continue
|
||||||
|
if not ref_found:
|
||||||
|
arr.append(element)
|
||||||
|
else:
|
||||||
|
# TODO: Required might require special handling
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def convert_schema(path, vschema, un_extend=True):
|
||||||
|
import esphome.config_validation as cv
|
||||||
|
|
||||||
|
# analyze input key, if it is not a Required or Optional, then it is an array
|
||||||
|
output = {}
|
||||||
|
|
||||||
|
if str(vschema) in ejs.hidden_schemas:
|
||||||
|
# this can get another think twist. When adding this I've already figured out
|
||||||
|
# interval and script in other way
|
||||||
|
if path not in ["interval", "script"]:
|
||||||
|
vschema = vschema(ejs.jschema_extractor)
|
||||||
|
|
||||||
|
if un_extend:
|
||||||
|
extended = ejs.extended_schemas.get(str(vschema))
|
||||||
|
if extended:
|
||||||
|
lhs = get_jschema(path, extended[0], False)
|
||||||
|
rhs = get_jschema(path, extended[1], False)
|
||||||
|
|
||||||
|
# check if we are not merging properties which are already in base component
|
||||||
|
lprops = get_all_properties(lhs)
|
||||||
|
rprops = get_all_properties(rhs)
|
||||||
|
|
||||||
|
if all(item in lprops for item in rprops):
|
||||||
|
return lhs
|
||||||
|
if all(item in rprops for item in lprops):
|
||||||
|
return rhs
|
||||||
|
|
||||||
|
# merge
|
||||||
|
|
||||||
|
if JSC_ALLOF in lhs and JSC_ALLOF in rhs:
|
||||||
|
output = lhs[JSC_ALLOF]
|
||||||
|
for k in rhs[JSC_ALLOF]:
|
||||||
|
merge(output[JSC_ALLOF], k)
|
||||||
|
elif JSC_ALLOF in lhs:
|
||||||
|
output = lhs
|
||||||
|
merge(output[JSC_ALLOF], rhs)
|
||||||
|
elif JSC_ALLOF in rhs:
|
||||||
|
output = rhs
|
||||||
|
merge(output[JSC_ALLOF], lhs)
|
||||||
|
else:
|
||||||
|
output = {JSC_ALLOF: [lhs]}
|
||||||
|
merge(output[JSC_ALLOF], rhs)
|
||||||
|
|
||||||
|
return output
|
||||||
|
|
||||||
|
# When schema contains all, all also has a schema which points
|
||||||
|
# back to the containing schema
|
||||||
|
while hasattr(vschema, "schema") and not hasattr(vschema, "validators"):
|
||||||
|
vschema = vschema.schema
|
||||||
|
|
||||||
|
if hasattr(vschema, "validators"):
|
||||||
|
output = default_schema()
|
||||||
|
for v in vschema.validators:
|
||||||
|
if v:
|
||||||
|
# we should take the valid schema,
|
||||||
|
# commonly all is used to validate a schema, and then a function which
|
||||||
|
# is not a schema es also given, get_schema will then return a default_schema()
|
||||||
|
val_schema = get_jschema(path, v, False)
|
||||||
|
if is_default_schema(val_schema):
|
||||||
|
if not output:
|
||||||
|
output = val_schema
|
||||||
|
else:
|
||||||
|
if is_default_schema(output):
|
||||||
|
output = val_schema
|
||||||
|
else:
|
||||||
|
output = {**output, **val_schema}
|
||||||
|
|
||||||
|
return output
|
||||||
|
|
||||||
|
if not vschema:
|
||||||
|
return output
|
||||||
|
|
||||||
|
if not hasattr(vschema, "keys"):
|
||||||
|
return get_entry(path, vschema)
|
||||||
|
|
||||||
|
key = list(vschema.keys())[0]
|
||||||
|
|
||||||
|
# used for platformio_options in core_config
|
||||||
|
# pylint: disable=comparison-with-callable
|
||||||
|
if key == cv.string_strict:
|
||||||
|
output["type"] = "object"
|
||||||
|
return output
|
||||||
|
|
||||||
|
props = output[JSC_PROPERTIES] = {}
|
||||||
|
|
||||||
|
output["type"] = ["object", "null"]
|
||||||
|
if DUMP_COMMENTS:
|
||||||
|
output[JSC_COMMENT] = "converted: " + path + "/" + str(vschema)
|
||||||
|
|
||||||
|
if path == "logger-logs":
|
||||||
|
tags = get_logger_tags()
|
||||||
|
for k in tags:
|
||||||
|
props[k] = {
|
||||||
|
"enum": [
|
||||||
|
"NONE",
|
||||||
|
"ERROR",
|
||||||
|
"WARN",
|
||||||
|
"INFO",
|
||||||
|
"DEBUG",
|
||||||
|
"VERBOSE",
|
||||||
|
"VERY_VERBOSE",
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
else:
|
||||||
|
for k in vschema:
|
||||||
|
if str(k).startswith("<function"):
|
||||||
|
# generate all logger tags
|
||||||
|
|
||||||
|
# TODO handle key functions
|
||||||
|
|
||||||
|
continue
|
||||||
|
|
||||||
|
v = vschema[k]
|
||||||
|
prop = {}
|
||||||
|
|
||||||
|
if isinstance(v, vol.Schema):
|
||||||
|
prop = get_jschema(path + "-" + str(k), v.schema)
|
||||||
|
elif hasattr(v, "validators"):
|
||||||
|
prop = convert_schema(path + "-" + str(k), v, False)
|
||||||
|
else:
|
||||||
|
prop = get_entry(path + "-" + str(k), v)
|
||||||
|
|
||||||
|
if prop: # Deprecated (cv.Invalid) properties not added
|
||||||
|
props[str(k)] = prop
|
||||||
|
# TODO: see required, sometimes completions doesn't show up because of this...
|
||||||
|
# if isinstance(k, cv.Required):
|
||||||
|
# required.append(str(k))
|
||||||
|
try:
|
||||||
|
if str(k.default) != "...":
|
||||||
|
prop["default"] = k.default()
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
return output
|
||||||
|
|
||||||
|
|
||||||
|
def dump_schema():
|
||||||
|
import esphome.config_validation as cv
|
||||||
|
|
||||||
|
from esphome import automation
|
||||||
|
from esphome.automation import validate_potentially_and_condition
|
||||||
|
from esphome import pins
|
||||||
|
from esphome.core import CORE
|
||||||
|
from esphome.helpers import write_file_if_changed
|
||||||
|
from esphome.components import remote_base
|
||||||
|
|
||||||
|
# The root directory of the repo
|
||||||
|
root = Path(__file__).parent.parent
|
||||||
|
|
||||||
|
# Fake some diretory so that get_component works
|
||||||
|
CORE.config_path = str(root)
|
||||||
|
|
||||||
|
file_path = args.output
|
||||||
|
|
||||||
|
schema_registry[cv.boolean] = {"type": "boolean"}
|
||||||
|
|
||||||
|
for v in [
|
||||||
|
cv.int_,
|
||||||
|
cv.int_range,
|
||||||
|
cv.positive_int,
|
||||||
|
cv.float_,
|
||||||
|
cv.positive_float,
|
||||||
|
cv.positive_float,
|
||||||
|
cv.positive_not_null_int,
|
||||||
|
cv.negative_one_to_one_float,
|
||||||
|
cv.port,
|
||||||
|
]:
|
||||||
|
schema_registry[v] = {"type": "number"}
|
||||||
|
|
||||||
|
for v in [
|
||||||
|
cv.string,
|
||||||
|
cv.string_strict,
|
||||||
|
cv.valid_name,
|
||||||
|
cv.hex_int,
|
||||||
|
cv.hex_int_range,
|
||||||
|
pins.output_pin,
|
||||||
|
pins.input_pin,
|
||||||
|
pins.input_pullup_pin,
|
||||||
|
cv.subscribe_topic,
|
||||||
|
cv.publish_topic,
|
||||||
|
cv.mqtt_payload,
|
||||||
|
cv.ssid,
|
||||||
|
cv.percentage_int,
|
||||||
|
cv.percentage,
|
||||||
|
cv.possibly_negative_percentage,
|
||||||
|
cv.positive_time_period,
|
||||||
|
cv.positive_time_period_microseconds,
|
||||||
|
cv.positive_time_period_milliseconds,
|
||||||
|
cv.positive_time_period_minutes,
|
||||||
|
cv.positive_time_period_seconds,
|
||||||
|
]:
|
||||||
|
schema_registry[v] = {"type": "string"}
|
||||||
|
|
||||||
|
schema_registry[validate_potentially_and_condition] = get_ref("condition_list")
|
||||||
|
|
||||||
|
for v in [pins.gpio_input_pin_schema, pins.gpio_input_pullup_pin_schema]:
|
||||||
|
schema_registry[v] = get_ref("PIN.GPIO_FULL_INPUT_PIN_SCHEMA")
|
||||||
|
|
||||||
|
for v in [pins.gpio_output_pin_schema, pins.internal_gpio_output_pin_schema]:
|
||||||
|
schema_registry[v] = get_ref("PIN.GPIO_FULL_OUTPUT_PIN_SCHEMA")
|
||||||
|
|
||||||
|
add_module_schemas("CONFIG", cv)
|
||||||
|
get_jschema("POLLING_COMPONENT", cv.polling_component_schema("60s"))
|
||||||
|
|
||||||
|
add_module_schemas("PIN", pins)
|
||||||
|
# fix short shothand pin IO:
|
||||||
|
definitions["PIN.GPIO_FULL_INPUT_PIN_SCHEMA"]["type"] = ["string", "object"]
|
||||||
|
definitions["PIN.GPIO_FULL_OUTPUT_PIN_SCHEMA"]["type"] = ["string", "object"]
|
||||||
|
|
||||||
|
add_module_schemas("REMOTE_BASE", remote_base)
|
||||||
|
add_module_schemas("AUTOMATION", automation)
|
||||||
|
|
||||||
|
load_components()
|
||||||
|
add_registries()
|
||||||
|
|
||||||
|
definitions["condition_list"] = {
|
||||||
|
JSC_ONEOF: [
|
||||||
|
{"type": "array", "items": get_ref(JSC_CONDITION)},
|
||||||
|
get_ref(JSC_CONDITION),
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
output = {
|
||||||
|
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||||
|
"type": "object",
|
||||||
|
"definitions": definitions,
|
||||||
|
JSC_PROPERTIES: base_props,
|
||||||
|
}
|
||||||
|
|
||||||
|
add_core()
|
||||||
|
add_buses()
|
||||||
|
add_components()
|
||||||
|
|
||||||
|
add_registries() # need second pass, e.g. climate.pid.autotune
|
||||||
|
solve_pending_refs()
|
||||||
|
|
||||||
|
write_file_if_changed(file_path, json.dumps(output))
|
||||||
|
print(f"Wrote {file_path}")
|
||||||
|
|
||||||
|
|
||||||
|
dump_schema()
|
69
setup.py
69
setup.py
|
@ -6,49 +6,50 @@ from setuptools import setup, find_packages
|
||||||
|
|
||||||
from esphome import const
|
from esphome import const
|
||||||
|
|
||||||
PROJECT_NAME = 'esphome'
|
PROJECT_NAME = "esphome"
|
||||||
PROJECT_PACKAGE_NAME = 'esphome'
|
PROJECT_PACKAGE_NAME = "esphome"
|
||||||
PROJECT_LICENSE = 'MIT'
|
PROJECT_LICENSE = "MIT"
|
||||||
PROJECT_AUTHOR = 'ESPHome'
|
PROJECT_AUTHOR = "ESPHome"
|
||||||
PROJECT_COPYRIGHT = '2019, ESPHome'
|
PROJECT_COPYRIGHT = "2019, ESPHome"
|
||||||
PROJECT_URL = 'https://esphome.io/'
|
PROJECT_URL = "https://esphome.io/"
|
||||||
PROJECT_EMAIL = 'contact@esphome.io'
|
PROJECT_EMAIL = "contact@esphome.io"
|
||||||
|
|
||||||
PROJECT_GITHUB_USERNAME = 'esphome'
|
PROJECT_GITHUB_USERNAME = "esphome"
|
||||||
PROJECT_GITHUB_REPOSITORY = 'esphome'
|
PROJECT_GITHUB_REPOSITORY = "esphome"
|
||||||
|
|
||||||
PYPI_URL = 'https://pypi.python.org/pypi/{}'.format(PROJECT_PACKAGE_NAME)
|
PYPI_URL = "https://pypi.python.org/pypi/{}".format(PROJECT_PACKAGE_NAME)
|
||||||
GITHUB_PATH = '{}/{}'.format(PROJECT_GITHUB_USERNAME, PROJECT_GITHUB_REPOSITORY)
|
GITHUB_PATH = "{}/{}".format(PROJECT_GITHUB_USERNAME, PROJECT_GITHUB_REPOSITORY)
|
||||||
GITHUB_URL = 'https://github.com/{}'.format(GITHUB_PATH)
|
GITHUB_URL = "https://github.com/{}".format(GITHUB_PATH)
|
||||||
|
|
||||||
DOWNLOAD_URL = '{}/archive/v{}.zip'.format(GITHUB_URL, const.__version__)
|
DOWNLOAD_URL = "{}/archive/v{}.zip".format(GITHUB_URL, const.__version__)
|
||||||
|
|
||||||
here = os.path.abspath(os.path.dirname(__file__))
|
here = os.path.abspath(os.path.dirname(__file__))
|
||||||
|
|
||||||
with open(os.path.join(here, 'requirements.txt')) as requirements_txt:
|
with open(os.path.join(here, "requirements.txt")) as requirements_txt:
|
||||||
REQUIRES = requirements_txt.read().splitlines()
|
REQUIRES = requirements_txt.read().splitlines()
|
||||||
|
|
||||||
with open(os.path.join(here, 'README.md')) as readme:
|
with open(os.path.join(here, "README.md")) as readme:
|
||||||
LONG_DESCRIPTION = readme.read()
|
LONG_DESCRIPTION = readme.read()
|
||||||
|
|
||||||
# If you have problems importing platformio and esptool as modules you can set
|
# If you have problems importing platformio and esptool as modules you can set
|
||||||
# $ESPHOME_USE_SUBPROCESS to make ESPHome call their executables instead.
|
# $ESPHOME_USE_SUBPROCESS to make ESPHome call their executables instead.
|
||||||
# This means they have to be in your $PATH.
|
# This means they have to be in your $PATH.
|
||||||
if 'ESPHOME_USE_SUBPROCESS' in os.environ:
|
if "ESPHOME_USE_SUBPROCESS" in os.environ:
|
||||||
# Remove platformio and esptool from requirements
|
# Remove platformio and esptool from requirements
|
||||||
REQUIRES = [
|
REQUIRES = [
|
||||||
req for req in REQUIRES
|
req
|
||||||
if not any(req.startswith(prefix) for prefix in ['platformio', 'esptool'])
|
for req in REQUIRES
|
||||||
|
if not any(req.startswith(prefix) for prefix in ["platformio", "esptool"])
|
||||||
]
|
]
|
||||||
|
|
||||||
CLASSIFIERS = [
|
CLASSIFIERS = [
|
||||||
'Environment :: Console',
|
"Environment :: Console",
|
||||||
'Intended Audience :: Developers',
|
"Intended Audience :: Developers",
|
||||||
'Intended Audience :: End Users/Desktop',
|
"Intended Audience :: End Users/Desktop",
|
||||||
'License :: OSI Approved :: MIT License',
|
"License :: OSI Approved :: MIT License",
|
||||||
'Programming Language :: C++',
|
"Programming Language :: C++",
|
||||||
'Programming Language :: Python :: 3',
|
"Programming Language :: Python :: 3",
|
||||||
'Topic :: Home Automation',
|
"Topic :: Home Automation",
|
||||||
]
|
]
|
||||||
|
|
||||||
setup(
|
setup(
|
||||||
|
@ -68,18 +69,14 @@ setup(
|
||||||
author_email=PROJECT_EMAIL,
|
author_email=PROJECT_EMAIL,
|
||||||
description="Make creating custom firmwares for ESP32/ESP8266 super easy.",
|
description="Make creating custom firmwares for ESP32/ESP8266 super easy.",
|
||||||
long_description=LONG_DESCRIPTION,
|
long_description=LONG_DESCRIPTION,
|
||||||
long_description_content_type='text/markdown',
|
long_description_content_type="text/markdown",
|
||||||
include_package_data=True,
|
include_package_data=True,
|
||||||
zip_safe=False,
|
zip_safe=False,
|
||||||
platforms='any',
|
platforms="any",
|
||||||
test_suite='tests',
|
test_suite="tests",
|
||||||
python_requires='>=3.6,<4.0',
|
python_requires=">=3.6,<4.0",
|
||||||
install_requires=REQUIRES,
|
install_requires=REQUIRES,
|
||||||
keywords=['home', 'automation'],
|
keywords=["home", "automation"],
|
||||||
entry_points={
|
entry_points={"console_scripts": ["esphome = esphome.__main__:main"]},
|
||||||
'console_scripts': [
|
packages=find_packages(include="esphome.*"),
|
||||||
'esphome = esphome.__main__:main'
|
|
||||||
]
|
|
||||||
},
|
|
||||||
packages=find_packages(include="esphome.*")
|
|
||||||
)
|
)
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
""" Fixtures for component tests """
|
"""Fixtures for component tests."""
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
|
@ -9,7 +9,7 @@ from esphome.__main__ import generate_cpp_contents
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def generate_main():
|
def generate_main():
|
||||||
""" Generates the C++ main.cpp file and returns it in string form """
|
"""Generates the C++ main.cpp file and returns it in string form."""
|
||||||
|
|
||||||
def generator(path: str) -> str:
|
def generator(path: str) -> str:
|
||||||
CORE.config_path = path
|
CORE.config_path = path
|
||||||
|
|
Loading…
Reference in a new issue