mirror of
https://github.com/esphome/esphome.git
synced 2024-11-10 01:07:45 +01:00
Bump pylint from 2.10.2 to 2.11.1 (#2334)
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Otto winter <otto@otto-winter.com>
This commit is contained in:
parent
50da630811
commit
dbb195691b
40 changed files with 219 additions and 384 deletions
|
@ -72,7 +72,7 @@ def choose_upload_log_host(default, check_default, show_ota, show_mqtt, show_api
|
|||
if default == "OTA":
|
||||
return CORE.address
|
||||
if show_mqtt and "mqtt" in CORE.config:
|
||||
options.append(("MQTT ({})".format(CORE.config["mqtt"][CONF_BROKER]), "MQTT"))
|
||||
options.append((f"MQTT ({CORE.config['mqtt'][CONF_BROKER]})", "MQTT"))
|
||||
if default == "OTA":
|
||||
return "MQTT"
|
||||
if default is not None:
|
||||
|
@ -415,30 +415,30 @@ def command_update_all(args):
|
|||
click.echo(f"{half_line}{middle_text}{half_line}")
|
||||
|
||||
for f in files:
|
||||
print("Updating {}".format(color(Fore.CYAN, f)))
|
||||
print(f"Updating {color(Fore.CYAN, f)}")
|
||||
print("-" * twidth)
|
||||
print()
|
||||
rc = run_external_process(
|
||||
"esphome", "--dashboard", "run", f, "--no-logs", "--device", "OTA"
|
||||
)
|
||||
if rc == 0:
|
||||
print_bar("[{}] {}".format(color(Fore.BOLD_GREEN, "SUCCESS"), f))
|
||||
print_bar(f"[{color(Fore.BOLD_GREEN, 'SUCCESS')}] {f}")
|
||||
success[f] = True
|
||||
else:
|
||||
print_bar("[{}] {}".format(color(Fore.BOLD_RED, "ERROR"), f))
|
||||
print_bar(f"[{color(Fore.BOLD_RED, 'ERROR')}] {f}")
|
||||
success[f] = False
|
||||
|
||||
print()
|
||||
print()
|
||||
print()
|
||||
|
||||
print_bar("[{}]".format(color(Fore.BOLD_WHITE, "SUMMARY")))
|
||||
print_bar(f"[{color(Fore.BOLD_WHITE, 'SUMMARY')}]")
|
||||
failed = 0
|
||||
for f in files:
|
||||
if success[f]:
|
||||
print(" - {}: {}".format(f, color(Fore.GREEN, "SUCCESS")))
|
||||
print(f" - {f}: {color(Fore.GREEN, 'SUCCESS')}")
|
||||
else:
|
||||
print(" - {}: {}".format(f, color(Fore.BOLD_RED, "FAILED")))
|
||||
print(f" - {f}: {color(Fore.BOLD_RED, 'FAILED')}")
|
||||
failed += 1
|
||||
return failed
|
||||
|
||||
|
|
|
@ -231,17 +231,16 @@ def parse_multi_click_timing_str(value):
|
|||
parts = value.lower().split(" ")
|
||||
if len(parts) != 5:
|
||||
raise cv.Invalid(
|
||||
"Multi click timing grammar consists of exactly 5 words, not {}"
|
||||
"".format(len(parts))
|
||||
f"Multi click timing grammar consists of exactly 5 words, not {len(parts)}"
|
||||
)
|
||||
try:
|
||||
state = cv.boolean(parts[0])
|
||||
except cv.Invalid:
|
||||
# pylint: disable=raise-missing-from
|
||||
raise cv.Invalid("First word must either be ON or OFF, not {}".format(parts[0]))
|
||||
raise cv.Invalid(f"First word must either be ON or OFF, not {parts[0]}")
|
||||
|
||||
if parts[1] != "for":
|
||||
raise cv.Invalid("Second word must be 'for', got {}".format(parts[1]))
|
||||
raise cv.Invalid(f"Second word must be 'for', got {parts[1]}")
|
||||
|
||||
if parts[2] == "at":
|
||||
if parts[3] == "least":
|
||||
|
@ -250,8 +249,7 @@ def parse_multi_click_timing_str(value):
|
|||
key = CONF_MAX_LENGTH
|
||||
else:
|
||||
raise cv.Invalid(
|
||||
"Third word after at must either be 'least' or 'most', got {}"
|
||||
"".format(parts[3])
|
||||
f"Third word after at must either be 'least' or 'most', got {parts[3]}"
|
||||
)
|
||||
try:
|
||||
length = cv.positive_time_period_milliseconds(parts[4])
|
||||
|
@ -296,13 +294,11 @@ def validate_multi_click_timing(value):
|
|||
new_state = v_.get(CONF_STATE, not state)
|
||||
if new_state == state:
|
||||
raise cv.Invalid(
|
||||
"Timings must have alternating state. Indices {} and {} have "
|
||||
"the same state {}".format(i, i + 1, state)
|
||||
f"Timings must have alternating state. Indices {i} and {i + 1} have the same state {state}"
|
||||
)
|
||||
if max_length is not None and max_length < min_length:
|
||||
raise cv.Invalid(
|
||||
"Max length ({}) must be larger than min length ({})."
|
||||
"".format(max_length, min_length)
|
||||
f"Max length ({max_length}) must be larger than min length ({min_length})."
|
||||
)
|
||||
|
||||
state = new_state
|
||||
|
|
|
@ -16,8 +16,7 @@ def validate_pin_number(value):
|
|||
valid_pins = [0, 2, 4, 12, 13, 14, 15, 25, 26, 27, 32, 33, 34, 35, 36, 37, 38, 39]
|
||||
if value[CONF_NUMBER] not in valid_pins:
|
||||
raise cv.Invalid(
|
||||
"Only pins {} support wakeup"
|
||||
"".format(", ".join(str(x) for x in valid_pins))
|
||||
f"Only pins {', '.join(str(x) for x in valid_pins)} support wakeup"
|
||||
)
|
||||
return value
|
||||
|
||||
|
|
|
@ -24,9 +24,7 @@ CONFIG_SCHEMA = cv.Schema(
|
|||
|
||||
async def to_code(config):
|
||||
uuid = config[CONF_UUID].hex
|
||||
uuid_arr = [
|
||||
cg.RawExpression("0x{}".format(uuid[i : i + 2])) for i in range(0, len(uuid), 2)
|
||||
]
|
||||
uuid_arr = [cg.RawExpression(f"0x{uuid[i:i + 2]}") for i in range(0, len(uuid), 2)]
|
||||
var = cg.new_Pvariable(config[CONF_ID], uuid_arr)
|
||||
await cg.register_component(var, config)
|
||||
cg.add(var.set_major(config[CONF_MAJOR]))
|
||||
|
|
|
@ -51,8 +51,7 @@ def validate_scan_parameters(config):
|
|||
|
||||
if window > interval:
|
||||
raise cv.Invalid(
|
||||
"Scan window ({}) needs to be smaller than scan interval ({})"
|
||||
"".format(window, interval)
|
||||
f"Scan window ({window}) needs to be smaller than scan interval ({interval})"
|
||||
)
|
||||
|
||||
if interval.total_milliseconds * 3 > duration.total_milliseconds:
|
||||
|
@ -97,9 +96,7 @@ def bt_uuid(value):
|
|||
)
|
||||
return value
|
||||
raise cv.Invalid(
|
||||
"Service UUID must be in 16 bit '{}', 32 bit '{}', or 128 bit '{}' format".format(
|
||||
bt_uuid16_format, bt_uuid32_format, bt_uuid128_format
|
||||
)
|
||||
f"Service UUID must be in 16 bit '{bt_uuid16_format}', 32 bit '{bt_uuid32_format}', or 128 bit '{bt_uuid128_format}' format"
|
||||
)
|
||||
|
||||
|
||||
|
@ -112,9 +109,7 @@ def as_hex_array(value):
|
|||
cpp_array = [
|
||||
f"0x{part}" for part in [value[i : i + 2] for i in range(0, len(value), 2)]
|
||||
]
|
||||
return cg.RawExpression(
|
||||
"(uint8_t*)(const uint8_t[16]){{{}}}".format(",".join(cpp_array))
|
||||
)
|
||||
return cg.RawExpression(f"(uint8_t*)(const uint8_t[16]){{{','.join(cpp_array)}}}")
|
||||
|
||||
|
||||
def as_reversed_hex_array(value):
|
||||
|
@ -123,7 +118,7 @@ def as_reversed_hex_array(value):
|
|||
f"0x{part}" for part in [value[i : i + 2] for i in range(0, len(value), 2)]
|
||||
]
|
||||
return cg.RawExpression(
|
||||
"(uint8_t*)(const uint8_t[16]){{{}}}".format(",".join(reversed(cpp_array)))
|
||||
f"(uint8_t*)(const uint8_t[16]){{{','.join(reversed(cpp_array))}}}"
|
||||
)
|
||||
|
||||
|
||||
|
|
|
@ -61,8 +61,7 @@ def validate_pillow_installed(value):
|
|||
def validate_truetype_file(value):
|
||||
if value.endswith(".zip"): # for Google Fonts downloads
|
||||
raise cv.Invalid(
|
||||
"Please unzip the font archive '{}' first and then use the .ttf files "
|
||||
"inside.".format(value)
|
||||
f"Please unzip the font archive '{value}' first and then use the .ttf files inside."
|
||||
)
|
||||
if not value.endswith(".ttf"):
|
||||
raise cv.Invalid(
|
||||
|
@ -131,7 +130,7 @@ async def to_code(config):
|
|||
("a_char", glyph),
|
||||
(
|
||||
"data",
|
||||
cg.RawExpression(str(prog_arr) + " + " + str(glyph_args[glyph][0])),
|
||||
cg.RawExpression(f"{str(prog_arr)} + {str(glyph_args[glyph][0])}"),
|
||||
),
|
||||
("offset_x", glyph_args[glyph][1]),
|
||||
("offset_y", glyph_args[glyph][2]),
|
||||
|
|
|
@ -20,8 +20,7 @@ GPIOLCDDisplay = lcd_gpio_ns.class_("GPIOLCDDisplay", lcd_base.LCDDisplay)
|
|||
def validate_pin_length(value):
|
||||
if len(value) != 4 and len(value) != 8:
|
||||
raise cv.Invalid(
|
||||
"LCD Displays can either operate in 4-pin or 8-pin mode,"
|
||||
"not {}-pin mode".format(len(value))
|
||||
f"LCD Displays can either operate in 4-pin or 8-pin mode,not {len(value)}-pin mode"
|
||||
)
|
||||
return value
|
||||
|
||||
|
|
|
@ -28,13 +28,11 @@ def validate_frequency(value):
|
|||
max_freq = calc_max_frequency(1)
|
||||
if value < min_freq:
|
||||
raise cv.Invalid(
|
||||
"This frequency setting is not possible, please choose a higher "
|
||||
"frequency (at least {}Hz)".format(int(min_freq))
|
||||
f"This frequency setting is not possible, please choose a higher frequency (at least {int(min_freq)}Hz)"
|
||||
)
|
||||
if value > max_freq:
|
||||
raise cv.Invalid(
|
||||
"This frequency setting is not possible, please choose a lower "
|
||||
"frequency (at most {}Hz)".format(int(max_freq))
|
||||
f"This frequency setting is not possible, please choose a lower frequency (at most {int(max_freq)}Hz)"
|
||||
)
|
||||
return value
|
||||
|
||||
|
|
|
@ -490,8 +490,7 @@ def validate_effects(allowed_effects):
|
|||
if key not in allowed_effects:
|
||||
errors.append(
|
||||
cv.Invalid(
|
||||
"The effect '{}' is not allowed for this "
|
||||
"light type".format(key),
|
||||
f"The effect '{key}' is not allowed for this light type",
|
||||
[i],
|
||||
)
|
||||
)
|
||||
|
@ -500,8 +499,7 @@ def validate_effects(allowed_effects):
|
|||
if name in names:
|
||||
errors.append(
|
||||
cv.Invalid(
|
||||
"Found the effect name '{}' twice. All effects must have "
|
||||
"unique names".format(name),
|
||||
f"Found the effect name '{name}' twice. All effects must have unique names",
|
||||
[i],
|
||||
)
|
||||
)
|
||||
|
|
|
@ -86,8 +86,7 @@ def validate_local_no_higher_than_global(value):
|
|||
for tag, level in value.get(CONF_LOGS, {}).items():
|
||||
if LOG_LEVEL_SEVERITY.index(level) > LOG_LEVEL_SEVERITY.index(global_level):
|
||||
raise EsphomeError(
|
||||
"The local log level {} for {} must be less severe than the "
|
||||
"global log level {}.".format(level, tag, global_level)
|
||||
f"The local log level {level} for {tag} must be less severe than the global log level {global_level}."
|
||||
)
|
||||
return value
|
||||
|
||||
|
@ -145,7 +144,7 @@ async def to_code(config):
|
|||
level = config[CONF_LEVEL]
|
||||
cg.add_define("USE_LOGGER")
|
||||
this_severity = LOG_LEVEL_SEVERITY.index(level)
|
||||
cg.add_build_flag("-DESPHOME_LOG_LEVEL={}".format(LOG_LEVELS[level]))
|
||||
cg.add_build_flag(f"-DESPHOME_LOG_LEVEL={LOG_LEVELS[level]}")
|
||||
|
||||
verbose_severity = LOG_LEVEL_SEVERITY.index("VERBOSE")
|
||||
very_verbose_severity = LOG_LEVEL_SEVERITY.index("VERY_VERBOSE")
|
||||
|
@ -220,8 +219,7 @@ def validate_printf(value):
|
|||
matches = re.findall(cfmt, value[CONF_FORMAT], flags=re.X)
|
||||
if len(matches) != len(value[CONF_ARGS]):
|
||||
raise cv.Invalid(
|
||||
"Found {} printf-patterns ({}), but {} args were given!"
|
||||
"".format(len(matches), ", ".join(matches), len(value[CONF_ARGS]))
|
||||
f"Found {len(matches)} printf-patterns ({', '.join(matches)}), but {len(value[CONF_ARGS])} args were given!"
|
||||
)
|
||||
return value
|
||||
|
||||
|
|
|
@ -266,8 +266,7 @@ async def to_code(config):
|
|||
if CONF_SSL_FINGERPRINTS in config:
|
||||
for fingerprint in config[CONF_SSL_FINGERPRINTS]:
|
||||
arr = [
|
||||
cg.RawExpression("0x{}".format(fingerprint[i : i + 2]))
|
||||
for i in range(0, 40, 2)
|
||||
cg.RawExpression(f"0x{fingerprint[i:i + 2]}") for i in range(0, 40, 2)
|
||||
]
|
||||
cg.add(var.add_ssl_fingerprint(arr))
|
||||
cg.add_build_flag("-DASYNC_TCP_SSL_ENABLED=1")
|
||||
|
@ -353,9 +352,7 @@ def get_default_topic_for(data, component_type, name, suffix):
|
|||
sanitized_name = "".join(
|
||||
x for x in name.lower().replace(" ", "_") if x in allowlist
|
||||
)
|
||||
return "{}/{}/{}/{}".format(
|
||||
data.topic_prefix, component_type, sanitized_name, suffix
|
||||
)
|
||||
return f"{data.topic_prefix}/{component_type}/{sanitized_name}/{suffix}"
|
||||
|
||||
|
||||
async def register_mqtt_component(var, config):
|
||||
|
|
|
@ -40,7 +40,7 @@ def validate_type(value):
|
|||
raise cv.Invalid("Must have B in type")
|
||||
rest = set(value) - set("RGBW")
|
||||
if rest:
|
||||
raise cv.Invalid("Type has invalid color: {}".format(", ".join(rest)))
|
||||
raise cv.Invalid(f"Type has invalid color: {', '.join(rest)}")
|
||||
if len(set(value)) != len(value):
|
||||
raise cv.Invalid("Type has duplicate color!")
|
||||
return value
|
||||
|
@ -95,9 +95,7 @@ def validate_method_pin(value):
|
|||
for opt in (CONF_PIN, CONF_CLOCK_PIN, CONF_DATA_PIN):
|
||||
if opt in value and value[opt] not in pins_:
|
||||
raise cv.Invalid(
|
||||
"Method {} only supports pin(s) {}".format(
|
||||
method, ", ".join(f"GPIO{x}" for x in pins_)
|
||||
),
|
||||
f"Method {method} only supports pin(s) {', '.join(f'GPIO{x}' for x in pins_)}",
|
||||
path=[CONF_METHOD],
|
||||
)
|
||||
return value
|
||||
|
@ -139,7 +137,7 @@ def format_method(config):
|
|||
|
||||
if config[CONF_INVERT]:
|
||||
if method == "ESP8266_DMA":
|
||||
variant = "Inverted" + variant
|
||||
variant = f"Inverted{variant}"
|
||||
else:
|
||||
variant += "Inverted"
|
||||
|
||||
|
|
|
@ -30,16 +30,14 @@ CONF_FONT_ID = "font_id"
|
|||
|
||||
|
||||
def NextionName(value):
|
||||
valid_chars = ascii_letters + digits + "."
|
||||
valid_chars = f"{ascii_letters + digits}."
|
||||
if not isinstance(value, str) or len(value) > 29:
|
||||
raise cv.Invalid("Must be a string less than 29 characters")
|
||||
|
||||
for char in value:
|
||||
if char not in valid_chars:
|
||||
raise cv.Invalid(
|
||||
"Must only consist of upper/lowercase characters, numbers and the period '.'. The character '{}' cannot be used.".format(
|
||||
char
|
||||
)
|
||||
f"Must only consist of upper/lowercase characters, numbers and the period '.'. The character '{char}' cannot be used."
|
||||
)
|
||||
|
||||
return value
|
||||
|
|
|
@ -105,9 +105,7 @@ def process_calibration(value):
|
|||
a, b, c = calc_steinhart_hart(value)
|
||||
else:
|
||||
raise cv.Invalid(
|
||||
"Calibration parameter accepts either a list for steinhart-hart "
|
||||
"calibration, or mapping for b-constant calibration, "
|
||||
"not {}".format(type(value))
|
||||
f"Calibration parameter accepts either a list for steinhart-hart calibration, or mapping for b-constant calibration, not {type(value)}"
|
||||
)
|
||||
|
||||
return {
|
||||
|
|
|
@ -151,8 +151,7 @@ def do_packages_pass(config: dict):
|
|||
packages = CONFIG_SCHEMA(packages)
|
||||
if not isinstance(packages, dict):
|
||||
raise cv.Invalid(
|
||||
"Packages must be a key to value mapping, got {} instead"
|
||||
"".format(type(packages))
|
||||
f"Packages must be a key to value mapping, got {type(packages)} instead"
|
||||
)
|
||||
|
||||
for package_name, package_config in packages.items():
|
||||
|
|
|
@ -20,8 +20,7 @@ PartitionLightOutput = partitions_ns.class_(
|
|||
def validate_from_to(value):
|
||||
if value[CONF_FROM] > value[CONF_TO]:
|
||||
raise cv.Invalid(
|
||||
"From ({}) must not be larger than to ({})"
|
||||
"".format(value[CONF_FROM], value[CONF_TO])
|
||||
f"From ({value[CONF_FROM]}) must not be larger than to ({value[CONF_TO]})"
|
||||
)
|
||||
return value
|
||||
|
||||
|
|
|
@ -63,9 +63,7 @@ SENSORS_TO_TYPE = {
|
|||
def validate_pmsx003_sensors(value):
|
||||
for key, types in SENSORS_TO_TYPE.items():
|
||||
if key in value and value[CONF_TYPE] not in types:
|
||||
raise cv.Invalid(
|
||||
"{} does not have {} sensor!".format(value[CONF_TYPE], key)
|
||||
)
|
||||
raise cv.Invalid(f"{value[CONF_TYPE]} does not have {key} sensor!")
|
||||
return value
|
||||
|
||||
|
||||
|
|
|
@ -491,8 +491,7 @@ def validate_raw_alternating(value):
|
|||
if i != 0:
|
||||
if this_negative == last_negative:
|
||||
raise cv.Invalid(
|
||||
"Values must alternate between being positive and negative, "
|
||||
"please see index {} and {}".format(i, i + 1),
|
||||
f"Values must alternate between being positive and negative, please see index {i} and {i + 1}",
|
||||
[i],
|
||||
)
|
||||
last_negative = this_negative
|
||||
|
@ -619,13 +618,11 @@ def validate_rc_switch_code(value):
|
|||
for c in value:
|
||||
if c not in ("0", "1"):
|
||||
raise cv.Invalid(
|
||||
"Invalid RCSwitch code character '{}'. Only '0' and '1' are allowed"
|
||||
"".format(c)
|
||||
f"Invalid RCSwitch code character '{c}'. Only '0' and '1' are allowed"
|
||||
)
|
||||
if len(value) > 64:
|
||||
raise cv.Invalid(
|
||||
"Maximum length for RCSwitch codes is 64, code '{}' has length {}"
|
||||
"".format(value, len(value))
|
||||
f"Maximum length for RCSwitch codes is 64, code '{value}' has length {len(value)}"
|
||||
)
|
||||
if not value:
|
||||
raise cv.Invalid("RCSwitch code must not be empty")
|
||||
|
@ -638,14 +635,11 @@ def validate_rc_switch_raw_code(value):
|
|||
for c in value:
|
||||
if c not in ("0", "1", "x"):
|
||||
raise cv.Invalid(
|
||||
"Invalid RCSwitch raw code character '{}'.Only '0', '1' and 'x' are allowed".format(
|
||||
c
|
||||
)
|
||||
f"Invalid RCSwitch raw code character '{c}'.Only '0', '1' and 'x' are allowed"
|
||||
)
|
||||
if len(value) > 64:
|
||||
raise cv.Invalid(
|
||||
"Maximum length for RCSwitch raw codes is 64, code '{}' has length {}"
|
||||
"".format(value, len(value))
|
||||
f"Maximum length for RCSwitch raw codes is 64, code '{value}' has length {len(value)}"
|
||||
)
|
||||
if not value:
|
||||
raise cv.Invalid("RCSwitch raw code must not be empty")
|
||||
|
|
|
@ -49,8 +49,7 @@ def validate_min_max_value(config):
|
|||
max_val = config[CONF_MAX_VALUE]
|
||||
if min_val >= max_val:
|
||||
raise cv.Invalid(
|
||||
"Max value {} must be smaller than min value {}"
|
||||
"".format(max_val, min_val)
|
||||
f"Max value {max_val} must be smaller than min value {min_val}"
|
||||
)
|
||||
return config
|
||||
|
||||
|
|
|
@ -109,8 +109,7 @@ def validate_send_first_at(value):
|
|||
send_every = value[CONF_SEND_EVERY]
|
||||
if send_first_at is not None and send_first_at > send_every:
|
||||
raise cv.Invalid(
|
||||
"send_first_at must be smaller than or equal to send_every! {} <= {}"
|
||||
"".format(send_first_at, send_every)
|
||||
f"send_first_at must be smaller than or equal to send_every! {send_first_at} <= {send_every}"
|
||||
)
|
||||
return value
|
||||
|
||||
|
@ -459,8 +458,7 @@ CONF_DEGREE = "degree"
|
|||
def validate_calibrate_polynomial(config):
|
||||
if config[CONF_DEGREE] >= len(config[CONF_DATAPOINTS]):
|
||||
raise cv.Invalid(
|
||||
"Degree is too high! Maximum possible degree with given datapoints is "
|
||||
"{}".format(len(config[CONF_DATAPOINTS]) - 1),
|
||||
f"Degree is too high! Maximum possible degree with given datapoints is {len(config[CONF_DATAPOINTS]) - 1}",
|
||||
[CONF_DEGREE],
|
||||
)
|
||||
return config
|
||||
|
|
|
@ -25,8 +25,7 @@ def validate_substitution_key(value):
|
|||
for char in value:
|
||||
if char not in VALID_SUBSTITUTIONS_CHARACTERS:
|
||||
raise cv.Invalid(
|
||||
"Substitution must only consist of upper/lowercase characters, the underscore "
|
||||
"and numbers. The character '{}' cannot be used".format(char)
|
||||
f"Substitution must only consist of upper/lowercase characters, the underscore and numbers. The character '{char}' cannot be used"
|
||||
)
|
||||
return value
|
||||
|
||||
|
@ -42,6 +41,7 @@ async def to_code(config):
|
|||
pass
|
||||
|
||||
|
||||
# pylint: disable=consider-using-f-string
|
||||
VARIABLE_PROG = re.compile(
|
||||
"\\$([{0}]+|\\{{[{0}]*\\}})".format(VALID_SUBSTITUTIONS_CHARACTERS)
|
||||
)
|
||||
|
@ -133,8 +133,7 @@ def do_substitution_pass(config, command_line_substitutions):
|
|||
with cv.prepend_path("substitutions"):
|
||||
if not isinstance(substitutions, dict):
|
||||
raise cv.Invalid(
|
||||
"Substitutions must be a key to value mapping, got {}"
|
||||
"".format(type(substitutions))
|
||||
f"Substitutions must be a key to value mapping, got {type(substitutions)}"
|
||||
)
|
||||
|
||||
replace_keys = []
|
||||
|
|
|
@ -67,9 +67,7 @@ def _week_of_month(dt):
|
|||
|
||||
def _tz_dst_str(dt):
|
||||
td = datetime.timedelta(hours=dt.hour, minutes=dt.minute, seconds=dt.second)
|
||||
return "M{}.{}.{}/{}".format(
|
||||
dt.month, _week_of_month(dt), dt.isoweekday() % 7, _tz_timedelta(td)
|
||||
)
|
||||
return f"M{dt.month}.{_week_of_month(dt)}.{dt.isoweekday() % 7}/{_tz_timedelta(td)}"
|
||||
|
||||
|
||||
def _safe_tzname(tz, dt):
|
||||
|
@ -88,7 +86,7 @@ def _non_dst_tz(tz, dt):
|
|||
_LOGGER.info(
|
||||
"Detected timezone '%s' with UTC offset %s", tzname, _tz_timedelta(utcoffset)
|
||||
)
|
||||
tzbase = "{}{}".format(tzname, _tz_timedelta(-1 * utcoffset))
|
||||
tzbase = f"{tzname}{_tz_timedelta(-1 * utcoffset)}"
|
||||
return tzbase
|
||||
|
||||
|
||||
|
@ -129,14 +127,9 @@ def convert_tz(pytz_obj):
|
|||
dst_ends_utc = transition_times[idx2]
|
||||
dst_ends_local = dst_ends_utc + utcoffset_on
|
||||
|
||||
tzbase = "{}{}".format(tzname_off, _tz_timedelta(-1 * utcoffset_off))
|
||||
tzbase = f"{tzname_off}{_tz_timedelta(-1 * utcoffset_off)}"
|
||||
|
||||
tzext = "{}{},{},{}".format(
|
||||
tzname_on,
|
||||
_tz_timedelta(-1 * utcoffset_on),
|
||||
_tz_dst_str(dst_begins_local),
|
||||
_tz_dst_str(dst_ends_local),
|
||||
)
|
||||
tzext = f"{tzname_on}{_tz_timedelta(-1 * utcoffset_on)},{_tz_dst_str(dst_begins_local)},{_tz_dst_str(dst_ends_local)}"
|
||||
_LOGGER.info(
|
||||
"Detected timezone '%s' with UTC offset %s and daylight saving time from "
|
||||
"%s to %s",
|
||||
|
@ -176,9 +169,7 @@ def _parse_cron_part(part, min_value, max_value, special_mapping):
|
|||
data = part.split("/")
|
||||
if len(data) > 2:
|
||||
raise cv.Invalid(
|
||||
"Can't have more than two '/' in one time expression, got {}".format(
|
||||
part
|
||||
)
|
||||
f"Can't have more than two '/' in one time expression, got {part}"
|
||||
)
|
||||
offset, repeat = data
|
||||
offset_n = 0
|
||||
|
@ -194,18 +185,14 @@ def _parse_cron_part(part, min_value, max_value, special_mapping):
|
|||
except ValueError:
|
||||
# pylint: disable=raise-missing-from
|
||||
raise cv.Invalid(
|
||||
"Repeat for '/' time expression must be an integer, got {}".format(
|
||||
repeat
|
||||
)
|
||||
f"Repeat for '/' time expression must be an integer, got {repeat}"
|
||||
)
|
||||
return set(range(offset_n, max_value + 1, repeat_n))
|
||||
if "-" in part:
|
||||
data = part.split("-")
|
||||
if len(data) > 2:
|
||||
raise cv.Invalid(
|
||||
"Can't have more than two '-' in range time expression '{}'".format(
|
||||
part
|
||||
)
|
||||
f"Can't have more than two '-' in range time expression '{part}'"
|
||||
)
|
||||
begin, end = data
|
||||
begin_n = _parse_cron_int(
|
||||
|
@ -233,13 +220,11 @@ def cron_expression_validator(name, min_value, max_value, special_mapping=None):
|
|||
for v in value:
|
||||
if not isinstance(v, int):
|
||||
raise cv.Invalid(
|
||||
"Expected integer for {} '{}', got {}".format(v, name, type(v))
|
||||
f"Expected integer for {v} '{name}', got {type(v)}"
|
||||
)
|
||||
if v < min_value or v > max_value:
|
||||
raise cv.Invalid(
|
||||
"{} {} is out of range (min={} max={}).".format(
|
||||
name, v, min_value, max_value
|
||||
)
|
||||
f"{name} {v} is out of range (min={min_value} max={max_value})."
|
||||
)
|
||||
return list(sorted(value))
|
||||
value = cv.string(value)
|
||||
|
@ -295,8 +280,7 @@ def validate_cron_raw(value):
|
|||
value = value.split(" ")
|
||||
if len(value) != 6:
|
||||
raise cv.Invalid(
|
||||
"Cron expression must consist of exactly 6 space-separated parts, "
|
||||
"not {}".format(len(value))
|
||||
f"Cron expression must consist of exactly 6 space-separated parts, not {len(value)}"
|
||||
)
|
||||
seconds, minutes, hours, days_of_month, months, days_of_week = value
|
||||
return {
|
||||
|
|
|
@ -57,6 +57,7 @@ def wrapped_load_pem_private_key(value, password):
|
|||
|
||||
|
||||
def read_relative_config_path(value):
|
||||
# pylint: disable=unspecified-encoding
|
||||
return Path(CORE.relative_config_path(value)).read_text()
|
||||
|
||||
|
||||
|
|
|
@ -42,7 +42,7 @@ def iter_components(config):
|
|||
yield domain, component, conf
|
||||
if component.is_platform_component:
|
||||
for p_config in conf:
|
||||
p_name = "{}.{}".format(domain, p_config[CONF_PLATFORM])
|
||||
p_name = f"{domain}.{p_config[CONF_PLATFORM]}"
|
||||
platform = get_platform(domain, p_config[CONF_PLATFORM])
|
||||
yield p_name, platform, p_config
|
||||
|
||||
|
@ -238,10 +238,7 @@ def do_id_pass(result): # type: (Config) -> None
|
|||
# No declared ID with this name
|
||||
import difflib
|
||||
|
||||
error = (
|
||||
"Couldn't find ID '{}'. Please check you have defined "
|
||||
"an ID with that name in your configuration.".format(id.id)
|
||||
)
|
||||
error = f"Couldn't find ID '{id.id}'. Please check you have defined an ID with that name in your configuration."
|
||||
# Find candidates
|
||||
matches = difflib.get_close_matches(
|
||||
id.id, [v[0].id for v in result.declare_ids if v[0].is_manual]
|
||||
|
@ -257,9 +254,7 @@ def do_id_pass(result): # type: (Config) -> None
|
|||
continue
|
||||
if not match.type.inherits_from(id.type):
|
||||
result.add_str_error(
|
||||
"ID '{}' of type {} doesn't inherit from {}. Please "
|
||||
"double check your ID is pointing to the correct value"
|
||||
"".format(id.id, match.type, id.type),
|
||||
f"ID '{id.id}' of type {match.type} doesn't inherit from {id.type}. Please double check your ID is pointing to the correct value",
|
||||
path,
|
||||
)
|
||||
|
||||
|
@ -497,7 +492,7 @@ def validate_config(config, command_line_substitutions):
|
|||
for dependency in comp.dependencies:
|
||||
if dependency not in config:
|
||||
result.add_str_error(
|
||||
"Component {} requires component {}" "".format(domain, dependency),
|
||||
f"Component {domain} requires component {dependency}",
|
||||
path,
|
||||
)
|
||||
success = False
|
||||
|
@ -508,8 +503,7 @@ def validate_config(config, command_line_substitutions):
|
|||
for conflict in comp.conflicts_with:
|
||||
if conflict in config:
|
||||
result.add_str_error(
|
||||
"Component {} cannot be used together with component {}"
|
||||
"".format(domain, conflict),
|
||||
f"Component {domain} cannot be used together with component {conflict}",
|
||||
path,
|
||||
)
|
||||
success = False
|
||||
|
@ -518,7 +512,7 @@ def validate_config(config, command_line_substitutions):
|
|||
|
||||
if CORE.esp_platform not in comp.esp_platforms:
|
||||
result.add_str_error(
|
||||
"Component {} doesn't support {}.".format(domain, CORE.esp_platform),
|
||||
f"Component {domain} doesn't support {CORE.esp_platform}.",
|
||||
path,
|
||||
)
|
||||
continue
|
||||
|
@ -529,8 +523,7 @@ def validate_config(config, command_line_substitutions):
|
|||
and not isinstance(conf, core.AutoLoad)
|
||||
):
|
||||
result.add_str_error(
|
||||
"Component {} cannot be loaded via YAML "
|
||||
"(no CONFIG_SCHEMA).".format(domain),
|
||||
f"Component {domain} cannot be loaded via YAML (no CONFIG_SCHEMA).",
|
||||
path,
|
||||
)
|
||||
continue
|
||||
|
@ -540,8 +533,7 @@ def validate_config(config, command_line_substitutions):
|
|||
result[domain] = conf = [conf]
|
||||
if not isinstance(comp.multi_conf, bool) and len(conf) > comp.multi_conf:
|
||||
result.add_str_error(
|
||||
"Component {} supports a maximum of {} "
|
||||
"entries ({} found).".format(domain, comp.multi_conf, len(conf)),
|
||||
f"Component {domain} supports a maximum of {comp.multi_conf} entries ({len(conf)} found).",
|
||||
path,
|
||||
)
|
||||
continue
|
||||
|
@ -636,18 +628,14 @@ def _format_vol_invalid(ex, config):
|
|||
|
||||
if isinstance(ex, ExtraKeysInvalid):
|
||||
if ex.candidates:
|
||||
message += "[{}] is an invalid option for [{}]. Did you mean {}?".format(
|
||||
ex.path[-1], paren, ", ".join(f"[{x}]" for x in ex.candidates)
|
||||
)
|
||||
message += f"[{ex.path[-1]}] is an invalid option for [{paren}]. Did you mean {', '.join(f'[{x}]' for x in ex.candidates)}?"
|
||||
else:
|
||||
message += "[{}] is an invalid option for [{}]. Please check the indentation.".format(
|
||||
ex.path[-1], paren
|
||||
)
|
||||
message += f"[{ex.path[-1]}] is an invalid option for [{paren}]. Please check the indentation."
|
||||
elif "extra keys not allowed" in str(ex):
|
||||
message += "[{}] is an invalid option for [{}].".format(ex.path[-1], paren)
|
||||
message += f"[{ex.path[-1]}] is an invalid option for [{paren}]."
|
||||
elif isinstance(ex, vol.RequiredFieldInvalid):
|
||||
if ex.msg == "required key not provided":
|
||||
message += "'{}' is a required option for [{}].".format(ex.path[-1], paren)
|
||||
message += f"'{ex.path[-1]}' is a required option for [{paren}]."
|
||||
else:
|
||||
# Required has set a custom error message
|
||||
message += ex.msg
|
||||
|
@ -700,7 +688,7 @@ def line_info(config, path, highlight=True):
|
|||
obj = config.get_deepest_document_range_for_path(path)
|
||||
if obj:
|
||||
mark = obj.start_mark
|
||||
source = "[source {}:{}]".format(mark.document, mark.line + 1)
|
||||
source = f"[source {mark.document}:{mark.line + 1}]"
|
||||
return color(Fore.CYAN, source)
|
||||
return "None"
|
||||
|
||||
|
@ -724,9 +712,7 @@ def dump_dict(config, path, at_root=True):
|
|||
if at_root:
|
||||
error = config.get_error_for_path(path)
|
||||
if error is not None:
|
||||
ret += (
|
||||
"\n" + color(Fore.BOLD_RED, _format_vol_invalid(error, config)) + "\n"
|
||||
)
|
||||
ret += f"\n{color(Fore.BOLD_RED, _format_vol_invalid(error, config))}\n"
|
||||
|
||||
if isinstance(conf, (list, tuple)):
|
||||
multiline = True
|
||||
|
@ -738,11 +724,7 @@ def dump_dict(config, path, at_root=True):
|
|||
path_ = path + [i]
|
||||
error = config.get_error_for_path(path_)
|
||||
if error is not None:
|
||||
ret += (
|
||||
"\n"
|
||||
+ color(Fore.BOLD_RED, _format_vol_invalid(error, config))
|
||||
+ "\n"
|
||||
)
|
||||
ret += f"\n{color(Fore.BOLD_RED, _format_vol_invalid(error, config))}\n"
|
||||
|
||||
sep = "- "
|
||||
if config.is_in_error_path(path_):
|
||||
|
@ -751,10 +733,10 @@ def dump_dict(config, path, at_root=True):
|
|||
msg = indent(msg)
|
||||
inf = line_info(config, path_, highlight=config.is_in_error_path(path_))
|
||||
if inf is not None:
|
||||
msg = inf + "\n" + msg
|
||||
msg = f"{inf}\n{msg}"
|
||||
elif msg:
|
||||
msg = msg[2:]
|
||||
ret += sep + msg + "\n"
|
||||
ret += f"{sep + msg}\n"
|
||||
elif isinstance(conf, dict):
|
||||
multiline = True
|
||||
if not conf:
|
||||
|
@ -765,11 +747,7 @@ def dump_dict(config, path, at_root=True):
|
|||
path_ = path + [k]
|
||||
error = config.get_error_for_path(path_)
|
||||
if error is not None:
|
||||
ret += (
|
||||
"\n"
|
||||
+ color(Fore.BOLD_RED, _format_vol_invalid(error, config))
|
||||
+ "\n"
|
||||
)
|
||||
ret += f"\n{color(Fore.BOLD_RED, _format_vol_invalid(error, config))}\n"
|
||||
|
||||
st = f"{k}: "
|
||||
if config.is_in_error_path(path_):
|
||||
|
@ -778,30 +756,30 @@ def dump_dict(config, path, at_root=True):
|
|||
|
||||
inf = line_info(config, path_, highlight=config.is_in_error_path(path_))
|
||||
if m:
|
||||
msg = "\n" + indent(msg)
|
||||
msg = f"\n{indent(msg)}"
|
||||
|
||||
if inf is not None:
|
||||
if m:
|
||||
msg = " " + inf + msg
|
||||
msg = f" {inf}{msg}"
|
||||
else:
|
||||
msg = msg + " " + inf
|
||||
ret += st + msg + "\n"
|
||||
msg = f"{msg} {inf}"
|
||||
ret += f"{st + msg}\n"
|
||||
elif isinstance(conf, str):
|
||||
if is_secret(conf):
|
||||
conf = "!secret {}".format(is_secret(conf))
|
||||
conf = f"!secret {is_secret(conf)}"
|
||||
if not conf:
|
||||
conf += "''"
|
||||
|
||||
if len(conf) > 80:
|
||||
conf = "|-\n" + indent(conf)
|
||||
conf = f"|-\n{indent(conf)}"
|
||||
error = config.get_error_for_path(path)
|
||||
col = Fore.BOLD_RED if error else Fore.KEEP
|
||||
ret += color(col, str(conf))
|
||||
elif isinstance(conf, core.Lambda):
|
||||
if is_secret(conf):
|
||||
conf = "!secret {}".format(is_secret(conf))
|
||||
conf = f"!secret {is_secret(conf)}"
|
||||
|
||||
conf = "!lambda |-\n" + indent(str(conf.value))
|
||||
conf = f"!lambda |-\n{indent(str(conf.value))}"
|
||||
error = config.get_error_for_path(path)
|
||||
col = Fore.BOLD_RED if error else Fore.KEEP
|
||||
ret += color(col, conf)
|
||||
|
@ -860,7 +838,7 @@ def read_config(command_line_substitutions):
|
|||
errstr = color(Fore.BOLD_RED, f"{domain}:")
|
||||
errline = line_info(res, path)
|
||||
if errline:
|
||||
errstr += " " + errline
|
||||
errstr += f" {errline}"
|
||||
safe_print(errstr)
|
||||
safe_print(indent(dump_dict(res, path)[0]))
|
||||
return None
|
||||
|
|
|
@ -277,8 +277,7 @@ def string_strict(value):
|
|||
if isinstance(value, str):
|
||||
return value
|
||||
raise Invalid(
|
||||
"Must be string, got {}. did you forget putting quotes "
|
||||
"around the value?".format(type(value))
|
||||
f"Must be string, got {type(value)}. did you forget putting quotes around the value?"
|
||||
)
|
||||
|
||||
|
||||
|
@ -311,8 +310,7 @@ def boolean(value):
|
|||
if value in ("false", "no", "off", "disable"):
|
||||
return False
|
||||
raise Invalid(
|
||||
"Expected boolean value, but cannot convert {} to a boolean. "
|
||||
"Please use 'true' or 'false'".format(value)
|
||||
f"Expected boolean value, but cannot convert {value} to a boolean. Please use 'true' or 'false'"
|
||||
)
|
||||
|
||||
|
||||
|
@ -358,8 +356,7 @@ def int_(value):
|
|||
if int(value) == value:
|
||||
return int(value)
|
||||
raise Invalid(
|
||||
"This option only accepts integers with no fractional part. Please remove "
|
||||
"the fractional part from {}".format(value)
|
||||
f"This option only accepts integers with no fractional part. Please remove the fractional part from {value}"
|
||||
)
|
||||
value = string_strict(value).lower()
|
||||
base = 10
|
||||
|
@ -424,20 +421,17 @@ def validate_id_name(value):
|
|||
raise Invalid(
|
||||
"Dashes are not supported in IDs, please use underscores instead."
|
||||
)
|
||||
valid_chars = ascii_letters + digits + "_"
|
||||
valid_chars = f"{ascii_letters + digits}_"
|
||||
for char in value:
|
||||
if char not in valid_chars:
|
||||
raise Invalid(
|
||||
"IDs must only consist of upper/lowercase characters, the underscore"
|
||||
"character and numbers. The character '{}' cannot be used"
|
||||
"".format(char)
|
||||
f"IDs must only consist of upper/lowercase characters, the underscorecharacter and numbers. The character '{char}' cannot be used"
|
||||
)
|
||||
if value in RESERVED_IDS:
|
||||
raise Invalid(f"ID '{value}' is reserved internally and cannot be used")
|
||||
if value in CORE.loaded_integrations:
|
||||
raise Invalid(
|
||||
"ID '{}' conflicts with the name of an esphome integration, please use "
|
||||
"another ID name.".format(value)
|
||||
f"ID '{value}' conflicts with the name of an esphome integration, please use another ID name."
|
||||
)
|
||||
return value
|
||||
|
||||
|
@ -525,7 +519,7 @@ def has_at_least_one_key(*keys):
|
|||
raise Invalid("expected dictionary")
|
||||
|
||||
if not any(k in keys for k in obj):
|
||||
raise Invalid("Must contain at least one of {}.".format(", ".join(keys)))
|
||||
raise Invalid(f"Must contain at least one of {', '.join(keys)}.")
|
||||
return obj
|
||||
|
||||
return validate
|
||||
|
@ -540,9 +534,9 @@ def has_exactly_one_key(*keys):
|
|||
|
||||
number = sum(k in keys for k in obj)
|
||||
if number > 1:
|
||||
raise Invalid("Cannot specify more than one of {}.".format(", ".join(keys)))
|
||||
raise Invalid(f"Cannot specify more than one of {', '.join(keys)}.")
|
||||
if number < 1:
|
||||
raise Invalid("Must contain exactly one of {}.".format(", ".join(keys)))
|
||||
raise Invalid(f"Must contain exactly one of {', '.join(keys)}.")
|
||||
return obj
|
||||
|
||||
return validate
|
||||
|
@ -557,7 +551,7 @@ def has_at_most_one_key(*keys):
|
|||
|
||||
number = sum(k in keys for k in obj)
|
||||
if number > 1:
|
||||
raise Invalid("Cannot specify more than one of {}.".format(", ".join(keys)))
|
||||
raise Invalid(f"Cannot specify more than one of {', '.join(keys)}.")
|
||||
return obj
|
||||
|
||||
return validate
|
||||
|
@ -572,9 +566,7 @@ def has_none_or_all_keys(*keys):
|
|||
|
||||
number = sum(k in keys for k in obj)
|
||||
if number != 0 and number != len(keys):
|
||||
raise Invalid(
|
||||
"Must specify either none or all of {}.".format(", ".join(keys))
|
||||
)
|
||||
raise Invalid(f"Must specify either none or all of {', '.join(keys)}.")
|
||||
return obj
|
||||
|
||||
return validate
|
||||
|
@ -632,8 +624,7 @@ def time_period_str_unit(value):
|
|||
|
||||
if isinstance(value, int):
|
||||
raise Invalid(
|
||||
"Don't know what '{0}' means as it has no time *unit*! Did you mean "
|
||||
"'{0}s'?".format(value)
|
||||
f"Don't know what '{value}' means as it has no time *unit*! Did you mean '{value}s'?"
|
||||
)
|
||||
if isinstance(value, TimePeriod):
|
||||
value = str(value)
|
||||
|
@ -659,7 +650,7 @@ def time_period_str_unit(value):
|
|||
match = re.match(r"^([-+]?[0-9]*\.?[0-9]*)\s*(\w*)$", value)
|
||||
|
||||
if match is None:
|
||||
raise Invalid("Expected time period with unit, " "got {}".format(value))
|
||||
raise Invalid(f"Expected time period with unit, got {value}")
|
||||
kwarg = unit_to_kwarg[one_of(*unit_to_kwarg)(match.group(2))]
|
||||
|
||||
return TimePeriod(**{kwarg: float(match.group(1))})
|
||||
|
@ -796,7 +787,7 @@ METRIC_SUFFIXES = {
|
|||
|
||||
def float_with_unit(quantity, regex_suffix, optional_unit=False):
|
||||
pattern = re.compile(
|
||||
r"^([-+]?[0-9]*\.?[0-9]*)\s*(\w*?)" + regex_suffix + r"$", re.UNICODE
|
||||
f"^([-+]?[0-9]*\\.?[0-9]*)\\s*(\\w*?){regex_suffix}$", re.UNICODE
|
||||
)
|
||||
|
||||
def validator(value):
|
||||
|
@ -812,7 +803,7 @@ def float_with_unit(quantity, regex_suffix, optional_unit=False):
|
|||
|
||||
mantissa = float(match.group(1))
|
||||
if match.group(2) not in METRIC_SUFFIXES:
|
||||
raise Invalid("Invalid {} suffix {}".format(quantity, match.group(2)))
|
||||
raise Invalid(f"Invalid {quantity} suffix {match.group(2)}")
|
||||
|
||||
multiplier = METRIC_SUFFIXES[match.group(2)]
|
||||
return mantissa * multiplier
|
||||
|
@ -879,12 +870,11 @@ def validate_bytes(value):
|
|||
|
||||
mantissa = int(match.group(1))
|
||||
if match.group(2) not in METRIC_SUFFIXES:
|
||||
raise Invalid("Invalid metric suffix {}".format(match.group(2)))
|
||||
raise Invalid(f"Invalid metric suffix {match.group(2)}")
|
||||
multiplier = METRIC_SUFFIXES[match.group(2)]
|
||||
if multiplier < 1:
|
||||
raise Invalid(
|
||||
"Only suffixes with positive exponents are supported. "
|
||||
"Got {}".format(match.group(2))
|
||||
f"Only suffixes with positive exponents are supported. Got {match.group(2)}"
|
||||
)
|
||||
return int(mantissa * multiplier)
|
||||
|
||||
|
@ -1184,10 +1174,8 @@ def one_of(*values, **kwargs):
|
|||
option = str(value)
|
||||
matches = difflib.get_close_matches(option, options_)
|
||||
if matches:
|
||||
raise Invalid(
|
||||
"Unknown value '{}', did you mean {}?"
|
||||
"".format(value, ", ".join(f"'{x}'" for x in matches))
|
||||
)
|
||||
matches_str = ", ".join(f"'{x}'" for x in matches)
|
||||
raise Invalid(f"Unknown value '{value}', did you mean {matches_str}?")
|
||||
raise Invalid(f"Unknown value '{value}', valid options are {options}.")
|
||||
return value
|
||||
|
||||
|
@ -1229,13 +1217,10 @@ def lambda_(value):
|
|||
entity_id_parts = re.split(LAMBDA_ENTITY_ID_PROG, value.value)
|
||||
if len(entity_id_parts) != 1:
|
||||
entity_ids = " ".join(
|
||||
"'{}'".format(entity_id_parts[i]) for i in range(1, len(entity_id_parts), 2)
|
||||
f"'{entity_id_parts[i]}'" for i in range(1, len(entity_id_parts), 2)
|
||||
)
|
||||
raise Invalid(
|
||||
"Lambda contains reference to entity-id-style ID {}. "
|
||||
"The id() wrapper only works for ESPHome-internal types. For importing "
|
||||
"states from Home Assistant use the 'homeassistant' sensor platforms."
|
||||
"".format(entity_ids)
|
||||
f"Lambda contains reference to entity-id-style ID {entity_ids}. The id() wrapper only works for ESPHome-internal types. For importing states from Home Assistant use the 'homeassistant' sensor platforms."
|
||||
)
|
||||
return value
|
||||
|
||||
|
@ -1259,9 +1244,7 @@ def returning_lambda(value):
|
|||
def dimensions(value):
|
||||
if isinstance(value, list):
|
||||
if len(value) != 2:
|
||||
raise Invalid(
|
||||
"Dimensions must have a length of two, not {}".format(len(value))
|
||||
)
|
||||
raise Invalid(f"Dimensions must have a length of two, not {len(value)}")
|
||||
try:
|
||||
width, height = int(value[0]), int(value[1])
|
||||
except ValueError:
|
||||
|
@ -1301,19 +1284,16 @@ def directory(value):
|
|||
if data["content"]:
|
||||
return value
|
||||
raise Invalid(
|
||||
"Could not find directory '{}'. Please make sure it exists (full path: {})."
|
||||
"".format(path, os.path.abspath(path))
|
||||
f"Could not find directory '{path}'. Please make sure it exists (full path: {os.path.abspath(path)})."
|
||||
)
|
||||
|
||||
if not os.path.exists(path):
|
||||
raise Invalid(
|
||||
"Could not find directory '{}'. Please make sure it exists (full path: {})."
|
||||
"".format(path, os.path.abspath(path))
|
||||
f"Could not find directory '{path}'. Please make sure it exists (full path: {os.path.abspath(path)})."
|
||||
)
|
||||
if not os.path.isdir(path):
|
||||
raise Invalid(
|
||||
"Path '{}' is not a directory (full path: {})."
|
||||
"".format(path, os.path.abspath(path))
|
||||
f"Path '{path}' is not a directory (full path: {os.path.abspath(path)})."
|
||||
)
|
||||
return value
|
||||
|
||||
|
@ -1340,19 +1320,16 @@ def file_(value):
|
|||
if data["content"]:
|
||||
return value
|
||||
raise Invalid(
|
||||
"Could not find file '{}'. Please make sure it exists (full path: {})."
|
||||
"".format(path, os.path.abspath(path))
|
||||
f"Could not find file '{path}'. Please make sure it exists (full path: {os.path.abspath(path)})."
|
||||
)
|
||||
|
||||
if not os.path.exists(path):
|
||||
raise Invalid(
|
||||
"Could not find file '{}'. Please make sure it exists (full path: {})."
|
||||
"".format(path, os.path.abspath(path))
|
||||
f"Could not find file '{path}'. Please make sure it exists (full path: {os.path.abspath(path)})."
|
||||
)
|
||||
if not os.path.isfile(path):
|
||||
raise Invalid(
|
||||
"Path '{}' is not a file (full path: {})."
|
||||
"".format(path, os.path.abspath(path))
|
||||
f"Path '{path}' is not a file (full path: {os.path.abspath(path)})."
|
||||
)
|
||||
return value
|
||||
|
||||
|
@ -1405,7 +1382,7 @@ def typed_schema(schemas, **kwargs):
|
|||
value = value.copy()
|
||||
schema_option = value.pop(key, default_schema_option)
|
||||
if schema_option is None:
|
||||
raise Invalid(key + " not specified!")
|
||||
raise Invalid(f"{key} not specified!")
|
||||
key_v = key_validator(schema_option)
|
||||
value = schemas[key_v](value)
|
||||
value[key] = key_v
|
||||
|
@ -1498,8 +1475,7 @@ def validate_registry_entry(name, registry):
|
|||
value = {value: {}}
|
||||
if not isinstance(value, dict):
|
||||
raise Invalid(
|
||||
"{} must consist of key-value mapping! Got {}"
|
||||
"".format(name.title(), value)
|
||||
f"{name.title()} must consist of key-value mapping! Got {value}"
|
||||
)
|
||||
key = next((x for x in value if x not in ignore_keys), None)
|
||||
if key is None:
|
||||
|
@ -1509,9 +1485,8 @@ def validate_registry_entry(name, registry):
|
|||
key2 = next((x for x in value if x != key and x not in ignore_keys), None)
|
||||
if key2 is not None:
|
||||
raise Invalid(
|
||||
"Cannot have two {0}s in one item. Key '{1}' overrides '{2}'! "
|
||||
"Did you forget to indent the block inside the {0}?"
|
||||
"".format(name, key, key2)
|
||||
f"Cannot have two {name}s in one item. Key '{key}' overrides '{key2}'! "
|
||||
f"Did you forget to indent the block inside the {key}?"
|
||||
)
|
||||
|
||||
if value[key] is None:
|
||||
|
|
|
@ -624,8 +624,7 @@ class EsphomeCore:
|
|||
expression = statement(expression)
|
||||
if not isinstance(expression, Statement):
|
||||
raise ValueError(
|
||||
"Add '{}' must be expression or statement, not {}"
|
||||
"".format(expression, type(expression))
|
||||
f"Add '{expression}' must be expression or statement, not {type(expression)}"
|
||||
)
|
||||
|
||||
self.main_statements.append(expression)
|
||||
|
@ -639,8 +638,7 @@ class EsphomeCore:
|
|||
expression = statement(expression)
|
||||
if not isinstance(expression, Statement):
|
||||
raise ValueError(
|
||||
"Add '{}' must be expression or statement, not {}"
|
||||
"".format(expression, type(expression))
|
||||
f"Add '{expression}' must be expression or statement, not {type(expression)}"
|
||||
)
|
||||
self.global_statements.append(expression)
|
||||
_LOGGER.debug("Adding global: %s", expression)
|
||||
|
@ -649,8 +647,7 @@ class EsphomeCore:
|
|||
def add_library(self, library):
|
||||
if not isinstance(library, Library):
|
||||
raise ValueError(
|
||||
"Library {} must be instance of Library, not {}"
|
||||
"".format(library, type(library))
|
||||
f"Library {library} must be instance of Library, not {type(library)}"
|
||||
)
|
||||
for other in self.libraries[:]:
|
||||
if other.name != library.name or other.name is None or library.name is None:
|
||||
|
@ -660,9 +657,8 @@ class EsphomeCore:
|
|||
# Other is using a/the same repository, takes precendence
|
||||
break
|
||||
raise ValueError(
|
||||
"Adding named Library with repository failed! Libraries {} and {} "
|
||||
f"Adding named Library with repository failed! Libraries {library} and {other} "
|
||||
"requested with conflicting repositories!"
|
||||
"".format(library, other)
|
||||
)
|
||||
|
||||
if library.repository is not None:
|
||||
|
@ -681,9 +677,8 @@ class EsphomeCore:
|
|||
break
|
||||
|
||||
raise ValueError(
|
||||
"Version pinning failed! Libraries {} and {} "
|
||||
f"Version pinning failed! Libraries {library} and {other} "
|
||||
"requested with conflicting versions!"
|
||||
"".format(library, other)
|
||||
)
|
||||
else:
|
||||
_LOGGER.debug("Adding library: %s", library)
|
||||
|
@ -702,8 +697,7 @@ class EsphomeCore:
|
|||
pass
|
||||
else:
|
||||
raise ValueError(
|
||||
"Define {} must be string or Define, not {}"
|
||||
"".format(define, type(define))
|
||||
f"Define {define} must be string or Define, not {type(define)}"
|
||||
)
|
||||
self.defines.add(define)
|
||||
_LOGGER.debug("Adding define: %s", define)
|
||||
|
|
|
@ -61,9 +61,7 @@ def validate_board(value: str):
|
|||
|
||||
if value not in boardlist:
|
||||
raise cv.Invalid(
|
||||
"Could not find board '{}'. Valid boards are {}".format(
|
||||
value, ", ".join(sorted(boardlist))
|
||||
)
|
||||
f"Could not find board '{value}'. Valid boards are {', '.join(sorted(boardlist))}"
|
||||
)
|
||||
return value
|
||||
|
||||
|
@ -102,9 +100,7 @@ def validate_arduino_version(value):
|
|||
and value_ not in PLATFORMIO_ESP8266_LUT
|
||||
):
|
||||
raise cv.Invalid(
|
||||
"Unfortunately the arduino framework version '{}' is unsupported "
|
||||
"at this time. You can override this by manually using "
|
||||
"espressif8266@<platformio version>".format(value)
|
||||
f"Unfortunately the arduino framework version '{value}' is unsupported at this time. You can override this by manually using espressif8266@<platformio version>"
|
||||
)
|
||||
if value_ in PLATFORMIO_ESP8266_LUT:
|
||||
return PLATFORMIO_ESP8266_LUT[value_]
|
||||
|
@ -115,9 +111,7 @@ def validate_arduino_version(value):
|
|||
and value_ not in PLATFORMIO_ESP32_LUT
|
||||
):
|
||||
raise cv.Invalid(
|
||||
"Unfortunately the arduino framework version '{}' is unsupported "
|
||||
"at this time. You can override this by manually using "
|
||||
"espressif32@<platformio version>".format(value)
|
||||
f"Unfortunately the arduino framework version '{value}' is unsupported at this time. You can override this by manually using espressif32@<platformio version>"
|
||||
)
|
||||
if value_ in PLATFORMIO_ESP32_LUT:
|
||||
return PLATFORMIO_ESP32_LUT[value_]
|
||||
|
@ -141,8 +135,7 @@ def valid_include(value):
|
|||
_, ext = os.path.splitext(value)
|
||||
if ext not in VALID_INCLUDE_EXTS:
|
||||
raise cv.Invalid(
|
||||
"Include has invalid file extension {} - valid extensions are {}"
|
||||
"".format(ext, ", ".join(VALID_INCLUDE_EXTS))
|
||||
f"Include has invalid file extension {ext} - valid extensions are {', '.join(VALID_INCLUDE_EXTS)}"
|
||||
)
|
||||
return value
|
||||
|
||||
|
|
|
@ -182,7 +182,7 @@ class ArrayInitializer(Expression):
|
|||
cpp += f" {arg},\n"
|
||||
cpp += "}"
|
||||
else:
|
||||
cpp = "{" + ", ".join(str(arg) for arg in self.args) + "}"
|
||||
cpp = f"{{{', '.join(str(arg) for arg in self.args)}}}"
|
||||
return cpp
|
||||
|
||||
|
||||
|
@ -348,13 +348,11 @@ def safe_exp(obj: SafeExpType) -> Expression:
|
|||
return float_
|
||||
if isinstance(obj, ID):
|
||||
raise ValueError(
|
||||
"Object {} is an ID. Did you forget to register the variable?"
|
||||
"".format(obj)
|
||||
f"Object {obj} is an ID. Did you forget to register the variable?"
|
||||
)
|
||||
if inspect.isgenerator(obj):
|
||||
raise ValueError(
|
||||
"Object {} is a coroutine. Did you forget to await the expression with "
|
||||
"'await'?".format(obj)
|
||||
f"Object {obj} is a coroutine. Did you forget to await the expression with 'await'?"
|
||||
)
|
||||
raise ValueError("Object is not an expression", obj)
|
||||
|
||||
|
@ -703,7 +701,7 @@ class MockObj(Expression):
|
|||
return str(self.base)
|
||||
|
||||
def __repr__(self):
|
||||
return "MockObj<{}>".format(str(self.base))
|
||||
return f"MockObj<{str(self.base)}>"
|
||||
|
||||
@property
|
||||
def _(self) -> "MockObj":
|
||||
|
@ -761,7 +759,7 @@ class MockObjEnum(MockObj):
|
|||
self._is_class = kwargs.pop("is_class")
|
||||
base = kwargs.pop("base")
|
||||
if self._is_class:
|
||||
base = base + "::" + self._enum
|
||||
base = f"{base}::{self._enum}"
|
||||
kwargs["op"] = "::"
|
||||
kwargs["base"] = base
|
||||
MockObj.__init__(self, *args, **kwargs)
|
||||
|
|
|
@ -52,9 +52,7 @@ async def register_component(var, config):
|
|||
id_ = str(var.base)
|
||||
if id_ not in CORE.component_ids:
|
||||
raise ValueError(
|
||||
"Component ID {} was not declared to inherit from Component, "
|
||||
"or was registered twice. Please create a bug report with your "
|
||||
"configuration.".format(id_)
|
||||
f"Component ID {id_} was not declared to inherit from Component, or was registered twice. Please create a bug report with your configuration."
|
||||
)
|
||||
CORE.component_ids.remove(id_)
|
||||
if CONF_SETUP_PRIORITY in config:
|
||||
|
|
|
@ -368,7 +368,7 @@ class WizardRequestHandler(BaseHandler):
|
|||
if k in ("name", "platform", "board", "ssid", "psk", "password")
|
||||
}
|
||||
kwargs["ota_password"] = secrets.token_hex(16)
|
||||
destination = settings.rel_path(kwargs["name"] + ".yaml")
|
||||
destination = settings.rel_path(f"{kwargs['name']}.yaml")
|
||||
wizard.wizard_write(path=destination, **kwargs)
|
||||
self.set_status(200)
|
||||
self.finish()
|
||||
|
@ -512,7 +512,7 @@ class MDNSStatusThread(threading.Thread):
|
|||
while not STOP_EVENT.is_set():
|
||||
entries = _list_dashboard_entries()
|
||||
stat.request_query(
|
||||
{entry.filename: entry.name + ".local." for entry in entries}
|
||||
{entry.filename: f"{entry.name}.local." for entry in entries}
|
||||
)
|
||||
|
||||
PING_REQUEST.wait()
|
||||
|
@ -795,27 +795,27 @@ def make_app(debug=get_bool_env(ENV_DEV)):
|
|||
rel = settings.relative_url
|
||||
app = tornado.web.Application(
|
||||
[
|
||||
(rel + "", MainRequestHandler),
|
||||
(rel + "login", LoginHandler),
|
||||
(rel + "logout", LogoutHandler),
|
||||
(rel + "logs", EsphomeLogsHandler),
|
||||
(rel + "upload", EsphomeUploadHandler),
|
||||
(rel + "compile", EsphomeCompileHandler),
|
||||
(rel + "validate", EsphomeValidateHandler),
|
||||
(rel + "clean-mqtt", EsphomeCleanMqttHandler),
|
||||
(rel + "clean", EsphomeCleanHandler),
|
||||
(rel + "vscode", EsphomeVscodeHandler),
|
||||
(rel + "ace", EsphomeAceEditorHandler),
|
||||
(rel + "update-all", EsphomeUpdateAllHandler),
|
||||
(rel + "info", InfoRequestHandler),
|
||||
(rel + "edit", EditRequestHandler),
|
||||
(rel + "download.bin", DownloadBinaryRequestHandler),
|
||||
(rel + "serial-ports", SerialPortRequestHandler),
|
||||
(rel + "ping", PingRequestHandler),
|
||||
(rel + "delete", DeleteRequestHandler),
|
||||
(rel + "undo-delete", UndoDeleteRequestHandler),
|
||||
(rel + "wizard.html", WizardRequestHandler),
|
||||
(rel + r"static/(.*)", StaticFileHandler, {"path": get_static_path()}),
|
||||
(f"{rel}", MainRequestHandler),
|
||||
(f"{rel}login", LoginHandler),
|
||||
(f"{rel}logout", LogoutHandler),
|
||||
(f"{rel}logs", EsphomeLogsHandler),
|
||||
(f"{rel}upload", EsphomeUploadHandler),
|
||||
(f"{rel}compile", EsphomeCompileHandler),
|
||||
(f"{rel}validate", EsphomeValidateHandler),
|
||||
(f"{rel}clean-mqtt", EsphomeCleanMqttHandler),
|
||||
(f"{rel}clean", EsphomeCleanHandler),
|
||||
(f"{rel}vscode", EsphomeVscodeHandler),
|
||||
(f"{rel}ace", EsphomeAceEditorHandler),
|
||||
(f"{rel}update-all", EsphomeUpdateAllHandler),
|
||||
(f"{rel}info", InfoRequestHandler),
|
||||
(f"{rel}edit", EditRequestHandler),
|
||||
(f"{rel}download.bin", DownloadBinaryRequestHandler),
|
||||
(f"{rel}serial-ports", SerialPortRequestHandler),
|
||||
(f"{rel}ping", PingRequestHandler),
|
||||
(f"{rel}delete", DeleteRequestHandler),
|
||||
(f"{rel}undo-delete", UndoDeleteRequestHandler),
|
||||
(f"{rel}wizard.html", WizardRequestHandler),
|
||||
(f"{rel}static/(.*)", StaticFileHandler, {"path": get_static_path()}),
|
||||
],
|
||||
**app_settings,
|
||||
)
|
||||
|
|
|
@ -52,9 +52,7 @@ class ProgressBar:
|
|||
return
|
||||
self.last_progress = new_progress
|
||||
block = int(round(bar_length * progress))
|
||||
text = "\rUploading: [{0}] {1}% {2}".format(
|
||||
"=" * block + " " * (bar_length - block), new_progress, status
|
||||
)
|
||||
text = f"\rUploading: [{'=' * block + ' ' * (bar_length - block)}] {new_progress}% {status}"
|
||||
sys.stderr.write(text)
|
||||
sys.stderr.flush()
|
||||
|
||||
|
@ -154,7 +152,7 @@ def check_error(data, expect):
|
|||
if not isinstance(expect, (list, tuple)):
|
||||
expect = [expect]
|
||||
if dat not in expect:
|
||||
raise OTAError("Unexpected response from ESP: 0x{:02X}".format(data[0]))
|
||||
raise OTAError(f"Unexpected response from ESP: 0x{data[0]:02X}")
|
||||
|
||||
|
||||
def send_check(sock, data, msg):
|
||||
|
|
|
@ -54,7 +54,7 @@ def cpp_string_escape(string, encoding="utf-8"):
|
|||
result += f"\\{character:03o}"
|
||||
else:
|
||||
result += chr(character)
|
||||
return '"' + result + '"'
|
||||
return f'"{result}"'
|
||||
|
||||
|
||||
def run_system_command(*args):
|
||||
|
@ -107,7 +107,7 @@ def _resolve_with_zeroconf(host):
|
|||
"host network mode?"
|
||||
) from err
|
||||
try:
|
||||
info = zc.resolve_host(host + ".")
|
||||
info = zc.resolve_host(f"{host}.")
|
||||
except Exception as err:
|
||||
raise EsphomeError(f"Error resolving mDNS hostname: {err}") from err
|
||||
finally:
|
||||
|
@ -136,9 +136,7 @@ def resolve_ip_address(host):
|
|||
return socket.gethostbyname(host)
|
||||
except OSError as err:
|
||||
errs.append(str(err))
|
||||
raise EsphomeError(
|
||||
"Error resolving IP address: {}" "".format(", ".join(errs))
|
||||
) from err
|
||||
raise EsphomeError(f"Error resolving IP address: {', '.join(errs)}") from err
|
||||
|
||||
|
||||
def get_bool_env(var, default=False):
|
||||
|
|
|
@ -104,9 +104,9 @@ def show_logs(config, topic=None, username=None, password=None, client_id=None):
|
|||
if CONF_LOG_TOPIC in conf:
|
||||
topic = config[CONF_MQTT][CONF_LOG_TOPIC][CONF_TOPIC]
|
||||
elif CONF_TOPIC_PREFIX in config[CONF_MQTT]:
|
||||
topic = config[CONF_MQTT][CONF_TOPIC_PREFIX] + "/debug"
|
||||
topic = f"{config[CONF_MQTT][CONF_TOPIC_PREFIX]}/debug"
|
||||
else:
|
||||
topic = config[CONF_ESPHOME][CONF_NAME] + "/debug"
|
||||
topic = f"{config[CONF_ESPHOME][CONF_NAME]}/debug"
|
||||
else:
|
||||
_LOGGER.error("MQTT isn't setup, can't start MQTT logs")
|
||||
return 1
|
||||
|
@ -158,9 +158,8 @@ def get_fingerprint(config):
|
|||
|
||||
sha1 = hashlib.sha1(cert_der).hexdigest()
|
||||
|
||||
safe_print("SHA1 Fingerprint: " + color(Fore.CYAN, sha1))
|
||||
safe_print(f"SHA1 Fingerprint: {color(Fore.CYAN, sha1)}")
|
||||
safe_print(
|
||||
"Copy the string above into mqtt.ssl_fingerprints section of {}"
|
||||
"".format(CORE.config_path)
|
||||
f"Copy the string above into mqtt.ssl_fingerprints section of {CORE.config_path}"
|
||||
)
|
||||
return 0
|
||||
|
|
|
@ -77,8 +77,7 @@ def validate_gpio_pin(value):
|
|||
raise cv.Invalid(f"ESP32-C3: Invalid pin number: {value}")
|
||||
if value in _ESP32C3_SDIO_PINS:
|
||||
raise cv.Invalid(
|
||||
"This pin cannot be used on ESP32-C3s and is already used by "
|
||||
"the flash interface (function: {})".format(_ESP_SDIO_PINS[value])
|
||||
f"This pin cannot be used on ESP32-C3s and is already used by the flash interface (function: {_ESP_SDIO_PINS[value]})"
|
||||
)
|
||||
return value
|
||||
if CORE.is_esp32:
|
||||
|
@ -86,8 +85,7 @@ def validate_gpio_pin(value):
|
|||
raise cv.Invalid(f"ESP32: Invalid pin number: {value}")
|
||||
if value in _ESP_SDIO_PINS:
|
||||
raise cv.Invalid(
|
||||
"This pin cannot be used on ESP32s and is already used by "
|
||||
"the flash interface (function: {})".format(_ESP_SDIO_PINS[value])
|
||||
f"This pin cannot be used on ESP32s and is already used by the flash interface (function: {_ESP_SDIO_PINS[value]})"
|
||||
)
|
||||
if 9 <= value <= 10:
|
||||
_LOGGER.warning(
|
||||
|
@ -105,8 +103,7 @@ def validate_gpio_pin(value):
|
|||
raise cv.Invalid(f"ESP8266: Invalid pin number: {value}")
|
||||
if value in _ESP_SDIO_PINS:
|
||||
raise cv.Invalid(
|
||||
"This pin cannot be used on ESP8266s and is already used by "
|
||||
"the flash interface (function: {})".format(_ESP_SDIO_PINS[value])
|
||||
f"This pin cannot be used on ESP8266s and is already used by the flash interface (function: {_ESP_SDIO_PINS[value]})"
|
||||
)
|
||||
if 9 <= value <= 10:
|
||||
_LOGGER.warning(
|
||||
|
@ -144,8 +141,7 @@ def output_pin(value):
|
|||
if CORE.is_esp32:
|
||||
if 34 <= value <= 39:
|
||||
raise cv.Invalid(
|
||||
"ESP32: GPIO{} (34-39) can only be used as an "
|
||||
"input pin.".format(value)
|
||||
f"ESP32: GPIO{value} (34-39) can only be used as an input pin."
|
||||
)
|
||||
return value
|
||||
if CORE.is_esp8266:
|
||||
|
@ -278,8 +274,7 @@ def validate_has_interrupt(value):
|
|||
if CORE.is_esp8266:
|
||||
if value[CONF_NUMBER] >= 16:
|
||||
raise cv.Invalid(
|
||||
"Pins GPIO16 and GPIO17 do not support interrupts and cannot be used "
|
||||
"here, got {}".format(value[CONF_NUMBER])
|
||||
f"Pins GPIO16 and GPIO17 do not support interrupts and cannot be used here, got {value[CONF_NUMBER]}"
|
||||
)
|
||||
return value
|
||||
|
||||
|
|
|
@ -39,7 +39,7 @@ def patch_structhash():
|
|||
command.clean_build_dir = patched_clean_build_dir
|
||||
|
||||
|
||||
IGNORE_LIB_WARNINGS = r"(?:" + "|".join(["Hash", "Update"]) + r")"
|
||||
IGNORE_LIB_WARNINGS = f"(?:{'|'.join(['Hash', 'Update'])})"
|
||||
FILTER_PLATFORMIO_LINES = [
|
||||
r"Verbose mode can be enabled via `-v, --verbose` option.*",
|
||||
r"CONFIGURATION: https://docs.platformio.org/.*",
|
||||
|
@ -48,13 +48,9 @@ FILTER_PLATFORMIO_LINES = [
|
|||
r"PACKAGES: .*",
|
||||
r"LDF: Library Dependency Finder -> http://bit.ly/configure-pio-ldf.*",
|
||||
r"LDF Modes: Finder ~ chain, Compatibility ~ soft.*",
|
||||
r"Looking for " + IGNORE_LIB_WARNINGS + r" library in registry",
|
||||
r"Warning! Library `.*'"
|
||||
+ IGNORE_LIB_WARNINGS
|
||||
+ r".*` has not been found in PlatformIO Registry.",
|
||||
r"You can ignore this message, if `.*"
|
||||
+ IGNORE_LIB_WARNINGS
|
||||
+ r".*` is a built-in library.*",
|
||||
f"Looking for {IGNORE_LIB_WARNINGS} library in registry",
|
||||
f"Warning! Library `.*'{IGNORE_LIB_WARNINGS}.*` has not been found in PlatformIO Registry.",
|
||||
f"You can ignore this message, if `.*{IGNORE_LIB_WARNINGS}.*` is a built-in library.*",
|
||||
r"Scanning dependencies...",
|
||||
r"Found \d+ compatible libraries",
|
||||
r"Memory Usage -> http://bit.ly/pio-memory-usage",
|
||||
|
@ -296,6 +292,6 @@ class IDEData:
|
|||
|
||||
# Windows
|
||||
if cc_path.endswith(".exe"):
|
||||
return cc_path[:-7] + "addr2line.exe"
|
||||
return f"{cc_path[:-7]}addr2line.exe"
|
||||
|
||||
return cc_path[:-3] + "addr2line"
|
||||
return f"{cc_path[:-3]}addr2line"
|
||||
|
|
|
@ -94,7 +94,7 @@ class StorageJSON:
|
|||
}
|
||||
|
||||
def to_json(self):
|
||||
return json.dumps(self.as_dict(), indent=2) + "\n"
|
||||
return f"{json.dumps(self.as_dict(), indent=2)}\n"
|
||||
|
||||
def save(self, path):
|
||||
write_file_if_changed(path, self.to_json())
|
||||
|
@ -214,7 +214,7 @@ class EsphomeStorageJSON:
|
|||
self.last_update_check_str = new.strftime("%Y-%m-%dT%H:%M:%S")
|
||||
|
||||
def to_json(self): # type: () -> dict
|
||||
return json.dumps(self.as_dict(), indent=2) + "\n"
|
||||
return f"{json.dumps(self.as_dict(), indent=2)}\n"
|
||||
|
||||
def save(self, path): # type: (str) -> None
|
||||
write_file_if_changed(path, self.to_json())
|
||||
|
|
|
@ -74,19 +74,20 @@ def wizard_file(**kwargs):
|
|||
|
||||
# Configure API
|
||||
if "password" in kwargs:
|
||||
config += ' password: "{0}"\n'.format(kwargs["password"])
|
||||
config += f" password: \"{kwargs['password']}\"\n"
|
||||
|
||||
# Configure OTA
|
||||
config += "\nota:\n"
|
||||
if "ota_password" in kwargs:
|
||||
config += ' password: "{0}"'.format(kwargs["ota_password"])
|
||||
config += f" password: \"{kwargs['ota_password']}\""
|
||||
elif "password" in kwargs:
|
||||
config += ' password: "{0}"'.format(kwargs["password"])
|
||||
config += f" password: \"{kwargs['password']}\""
|
||||
|
||||
# Configuring wifi
|
||||
config += "\n\nwifi:\n"
|
||||
|
||||
if "ssid" in kwargs:
|
||||
# pylint: disable=consider-using-f-string
|
||||
config += """ ssid: "{ssid}"
|
||||
password: "{psk}"
|
||||
""".format(
|
||||
|
@ -99,6 +100,7 @@ def wizard_file(**kwargs):
|
|||
networks:
|
||||
"""
|
||||
|
||||
# pylint: disable=consider-using-f-string
|
||||
config += """
|
||||
# Enable fallback hotspot (captive portal) in case wifi connection fails
|
||||
ap:
|
||||
|
@ -126,7 +128,7 @@ def wizard_write(path, **kwargs):
|
|||
platform = kwargs["platform"]
|
||||
|
||||
write_file(path, wizard_file(**kwargs))
|
||||
storage = StorageJSON.from_wizard(name, name + ".local", platform, board)
|
||||
storage = StorageJSON.from_wizard(name, f"{name}.local", platform, board)
|
||||
storage_path = ext_storage_path(os.path.dirname(path), os.path.basename(path))
|
||||
storage.save(storage_path)
|
||||
|
||||
|
@ -168,14 +170,12 @@ def strip_accents(value):
|
|||
def wizard(path):
|
||||
if not path.endswith(".yaml") and not path.endswith(".yml"):
|
||||
safe_print(
|
||||
"Please make your configuration file {} have the extension .yaml or .yml"
|
||||
"".format(color(Fore.CYAN, path))
|
||||
f"Please make your configuration file {color(Fore.CYAN, path)} have the extension .yaml or .yml"
|
||||
)
|
||||
return 1
|
||||
if os.path.exists(path):
|
||||
safe_print(
|
||||
"Uh oh, it seems like {} already exists, please delete that file first "
|
||||
"or chose another configuration file.".format(color(Fore.CYAN, path))
|
||||
f"Uh oh, it seems like {color(Fore.CYAN, path)} already exists, please delete that file first or chose another configuration file."
|
||||
)
|
||||
return 2
|
||||
safe_print("Hi there!")
|
||||
|
@ -191,17 +191,13 @@ def wizard(path):
|
|||
sleep(3.0)
|
||||
safe_print()
|
||||
safe_print_step(1, CORE_BIG)
|
||||
safe_print(
|
||||
"First up, please choose a " + color(Fore.GREEN, "name") + " for your node."
|
||||
)
|
||||
safe_print(f"First up, please choose a {color(Fore.GREEN, 'name')} for your node.")
|
||||
safe_print(
|
||||
"It should be a unique name that can be used to identify the device later."
|
||||
)
|
||||
sleep(1)
|
||||
safe_print(
|
||||
"For example, I like calling the node in my living room {}.".format(
|
||||
color(Fore.BOLD_WHITE, "livingroom")
|
||||
)
|
||||
f"For example, I like calling the node in my living room {color(Fore.BOLD_WHITE, 'livingroom')}."
|
||||
)
|
||||
safe_print()
|
||||
sleep(1)
|
||||
|
@ -222,13 +218,11 @@ def wizard(path):
|
|||
name = strip_accents(name).lower().replace(" ", "-")
|
||||
name = strip_accents(name).lower().replace("_", "-")
|
||||
name = "".join(c for c in name if c in ALLOWED_NAME_CHARS)
|
||||
safe_print(
|
||||
'Shall I use "{}" as the name instead?'.format(color(Fore.CYAN, name))
|
||||
)
|
||||
safe_print(f'Shall I use "{color(Fore.CYAN, name)}" as the name instead?')
|
||||
sleep(0.5)
|
||||
name = default_input("(name [{}]): ", name)
|
||||
|
||||
safe_print('Great! Your node is now called "{}".'.format(color(Fore.CYAN, name)))
|
||||
safe_print(f'Great! Your node is now called "{color(Fore.CYAN, name)}".')
|
||||
sleep(1)
|
||||
safe_print_step(2, ESP_BIG)
|
||||
safe_print(
|
||||
|
@ -236,11 +230,7 @@ def wizard(path):
|
|||
"firmwares for it."
|
||||
)
|
||||
safe_print(
|
||||
"Are you using an "
|
||||
+ color(Fore.GREEN, "ESP32")
|
||||
+ " or "
|
||||
+ color(Fore.GREEN, "ESP8266")
|
||||
+ " platform? (Choose ESP8266 for Sonoff devices)"
|
||||
f"Are you using an {color(Fore.GREEN, 'ESP32')} or {color(Fore.GREEN, 'ESP8266')} platform? (Choose ESP8266 for Sonoff devices)"
|
||||
)
|
||||
while True:
|
||||
sleep(0.5)
|
||||
|
@ -252,12 +242,9 @@ def wizard(path):
|
|||
break
|
||||
except vol.Invalid:
|
||||
safe_print(
|
||||
"Unfortunately, I can't find an espressif microcontroller called "
|
||||
'"{}". Please try again.'.format(platform)
|
||||
)
|
||||
safe_print(
|
||||
"Thanks! You've chosen {} as your platform.".format(color(Fore.CYAN, platform))
|
||||
f'Unfortunately, I can\'t find an espressif microcontroller called "{platform}". Please try again.'
|
||||
)
|
||||
safe_print(f"Thanks! You've chosen {color(Fore.CYAN, platform)} as your platform.")
|
||||
safe_print()
|
||||
sleep(1)
|
||||
|
||||
|
@ -270,24 +257,20 @@ def wizard(path):
|
|||
"http://docs.platformio.org/en/latest/platforms/espressif8266.html#boards"
|
||||
)
|
||||
|
||||
safe_print(
|
||||
"Next, I need to know what " + color(Fore.GREEN, "board") + " you're using."
|
||||
)
|
||||
safe_print(f"Next, I need to know what {color(Fore.GREEN, 'board')} you're using.")
|
||||
sleep(0.5)
|
||||
safe_print(
|
||||
"Please go to {} and choose a board.".format(color(Fore.GREEN, board_link))
|
||||
)
|
||||
safe_print(f"Please go to {color(Fore.GREEN, board_link)} and choose a board.")
|
||||
if platform == "ESP32":
|
||||
safe_print("(Type " + color(Fore.GREEN, "esp01_1m") + " for Sonoff devices)")
|
||||
safe_print(f"(Type {color(Fore.GREEN, 'esp01_1m')} for Sonoff devices)")
|
||||
safe_print()
|
||||
# Don't sleep because user needs to copy link
|
||||
if platform == "ESP32":
|
||||
safe_print('For example "{}".'.format(color(Fore.BOLD_WHITE, "nodemcu-32s")))
|
||||
safe_print(f"For example \"{color(Fore.BOLD_WHITE, 'nodemcu-32s')}\".")
|
||||
boards = list(ESP32_BOARD_PINS.keys())
|
||||
else:
|
||||
safe_print('For example "{}".'.format(color(Fore.BOLD_WHITE, "nodemcuv2")))
|
||||
safe_print(f"For example \"{color(Fore.BOLD_WHITE, 'nodemcuv2')}\".")
|
||||
boards = list(ESP8266_BOARD_PINS.keys())
|
||||
safe_print("Options: {}".format(", ".join(sorted(boards))))
|
||||
safe_print(f"Options: {', '.join(sorted(boards))}")
|
||||
|
||||
while True:
|
||||
board = input(color(Fore.BOLD_WHITE, "(board): "))
|
||||
|
@ -302,9 +285,7 @@ def wizard(path):
|
|||
sleep(0.25)
|
||||
safe_print()
|
||||
|
||||
safe_print(
|
||||
"Way to go! You've chosen {} as your board.".format(color(Fore.CYAN, board))
|
||||
)
|
||||
safe_print(f"Way to go! You've chosen {color(Fore.CYAN, board)} as your board.")
|
||||
safe_print()
|
||||
sleep(1)
|
||||
|
||||
|
@ -313,12 +294,10 @@ def wizard(path):
|
|||
safe_print()
|
||||
sleep(1)
|
||||
safe_print(
|
||||
"First, what's the "
|
||||
+ color(Fore.GREEN, "SSID")
|
||||
+ f" (the name) of the WiFi network {name} should connect to?"
|
||||
f"First, what's the {color(Fore.GREEN, 'SSID')} (the name) of the WiFi network {name} should connect to?"
|
||||
)
|
||||
sleep(1.5)
|
||||
safe_print('For example "{}".'.format(color(Fore.BOLD_WHITE, "Abraham Linksys")))
|
||||
safe_print(f"For example \"{color(Fore.BOLD_WHITE, 'Abraham Linksys')}\".")
|
||||
while True:
|
||||
ssid = input(color(Fore.BOLD_WHITE, "(ssid): "))
|
||||
try:
|
||||
|
@ -328,27 +307,23 @@ def wizard(path):
|
|||
safe_print(
|
||||
color(
|
||||
Fore.RED,
|
||||
'Unfortunately, "{}" doesn\'t seem to be a valid SSID. '
|
||||
"Please try again.".format(ssid),
|
||||
f'Unfortunately, "{ssid}" doesn\'t seem to be a valid SSID. Please try again.',
|
||||
)
|
||||
)
|
||||
safe_print()
|
||||
sleep(1)
|
||||
|
||||
safe_print(
|
||||
'Thank you very much! You\'ve just chosen "{}" as your SSID.'
|
||||
"".format(color(Fore.CYAN, ssid))
|
||||
f'Thank you very much! You\'ve just chosen "{color(Fore.CYAN, ssid)}" as your SSID.'
|
||||
)
|
||||
safe_print()
|
||||
sleep(0.75)
|
||||
|
||||
safe_print(
|
||||
"Now please state the "
|
||||
+ color(Fore.GREEN, "password")
|
||||
+ " of the WiFi network so that I can connect to it (Leave empty for no password)"
|
||||
f"Now please state the {color(Fore.GREEN, 'password')} of the WiFi network so that I can connect to it (Leave empty for no password)"
|
||||
)
|
||||
safe_print()
|
||||
safe_print('For example "{}"'.format(color(Fore.BOLD_WHITE, "PASSWORD42")))
|
||||
safe_print(f"For example \"{color(Fore.BOLD_WHITE, 'PASSWORD42')}\"")
|
||||
sleep(0.5)
|
||||
psk = input(color(Fore.BOLD_WHITE, "(PSK): "))
|
||||
safe_print(
|
||||
|
@ -362,8 +337,7 @@ def wizard(path):
|
|||
"(over the air) and integrates into Home Assistant with a native API."
|
||||
)
|
||||
safe_print(
|
||||
"This can be insecure if you do not trust the WiFi network. Do you want to set "
|
||||
"a " + color(Fore.GREEN, "password") + " for connecting to this ESP?"
|
||||
f"This can be insecure if you do not trust the WiFi network. Do you want to set a {color(Fore.GREEN, 'password')} for connecting to this ESP?"
|
||||
)
|
||||
safe_print()
|
||||
sleep(0.25)
|
||||
|
|
|
@ -96,7 +96,7 @@ def get_include_text():
|
|||
includes = "\n".join(includes)
|
||||
if not includes:
|
||||
continue
|
||||
include_text += includes + "\n"
|
||||
include_text += f"{includes}\n"
|
||||
return include_text
|
||||
|
||||
|
||||
|
@ -134,7 +134,7 @@ def migrate_src_version_0_to_1():
|
|||
content, count = replace_file_content(
|
||||
content,
|
||||
r'#include "esphomelib/application.h"',
|
||||
CPP_INCLUDE_BEGIN + "\n" + CPP_INCLUDE_END,
|
||||
f"{CPP_INCLUDE_BEGIN}\n{CPP_INCLUDE_END}",
|
||||
)
|
||||
if count == 0:
|
||||
_LOGGER.error(
|
||||
|
@ -322,7 +322,7 @@ def write_platformio_ini(content):
|
|||
)
|
||||
else:
|
||||
content_format = INI_BASE_FORMAT
|
||||
full_file = content_format[0] + INI_AUTO_GENERATE_BEGIN + "\n" + content
|
||||
full_file = f"{content_format[0] + INI_AUTO_GENERATE_BEGIN}\n{content}"
|
||||
full_file += INI_AUTO_GENERATE_END + content_format[1]
|
||||
write_file_if_changed(path, full_file)
|
||||
|
||||
|
@ -444,9 +444,9 @@ def write_cpp(code_s):
|
|||
global_s = '#include "esphome.h"\n'
|
||||
global_s += CORE.cpp_global_section
|
||||
|
||||
full_file = code_format[0] + CPP_INCLUDE_BEGIN + "\n" + global_s + CPP_INCLUDE_END
|
||||
full_file = f"{code_format[0] + CPP_INCLUDE_BEGIN}\n{global_s}{CPP_INCLUDE_END}"
|
||||
full_file += (
|
||||
code_format[1] + CPP_AUTO_GENERATE_BEGIN + "\n" + code_s + CPP_AUTO_GENERATE_END
|
||||
f"{code_format[1] + CPP_AUTO_GENERATE_BEGIN}\n{code_s}{CPP_AUTO_GENERATE_END}"
|
||||
)
|
||||
full_file += code_format[2]
|
||||
write_file_if_changed(path, full_file)
|
||||
|
|
|
@ -183,9 +183,7 @@ class ESPHomeLoader(yaml.SafeLoader): # pylint: disable=too-many-ancestors
|
|||
raise yaml.constructor.ConstructorError(
|
||||
"While constructing a mapping",
|
||||
node.start_mark,
|
||||
"Expected a mapping for merging, but found {}".format(
|
||||
type(item)
|
||||
),
|
||||
f"Expected a mapping for merging, but found {type(item)}",
|
||||
value_node.start_mark,
|
||||
)
|
||||
merge_pairs.extend(item.items())
|
||||
|
@ -193,8 +191,7 @@ class ESPHomeLoader(yaml.SafeLoader): # pylint: disable=too-many-ancestors
|
|||
raise yaml.constructor.ConstructorError(
|
||||
"While constructing a mapping",
|
||||
node.start_mark,
|
||||
"Expected a mapping or list of mappings for merging, "
|
||||
"but found {}".format(type(value)),
|
||||
f"Expected a mapping or list of mappings for merging, but found {type(value)}",
|
||||
value_node.start_mark,
|
||||
)
|
||||
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
pylint==2.10.2
|
||||
pylint==2.11.1
|
||||
flake8==3.9.2
|
||||
black==21.9b0
|
||||
pexpect==4.8.0
|
||||
|
|
Loading…
Reference in a new issue