mirror of
https://github.com/esphome/esphome.git
synced 2024-11-10 01:07:45 +01:00
Raise minimum python version to 3.8 (#3176)
This commit is contained in:
parent
ad6c5ff11d
commit
40e06c9819
18 changed files with 253 additions and 180 deletions
2
.github/workflows/ci.yml
vendored
2
.github/workflows/ci.yml
vendored
|
@ -80,7 +80,7 @@ jobs:
|
||||||
uses: actions/setup-python@v2
|
uses: actions/setup-python@v2
|
||||||
id: python
|
id: python
|
||||||
with:
|
with:
|
||||||
python-version: '3.7'
|
python-version: '3.8'
|
||||||
|
|
||||||
- name: Cache virtualenv
|
- name: Cache virtualenv
|
||||||
uses: actions/cache@v2
|
uses: actions/cache@v2
|
||||||
|
|
1
.gitignore
vendored
1
.gitignore
vendored
|
@ -77,6 +77,7 @@ venv/
|
||||||
ENV/
|
ENV/
|
||||||
env.bak/
|
env.bak/
|
||||||
venv.bak/
|
venv.bak/
|
||||||
|
venv-*/
|
||||||
|
|
||||||
# mypy
|
# mypy
|
||||||
.mypy_cache/
|
.mypy_cache/
|
||||||
|
|
|
@ -25,3 +25,8 @@ repos:
|
||||||
- --branch=dev
|
- --branch=dev
|
||||||
- --branch=release
|
- --branch=release
|
||||||
- --branch=beta
|
- --branch=beta
|
||||||
|
- repo: https://github.com/asottile/pyupgrade
|
||||||
|
rev: v2.31.0
|
||||||
|
hooks:
|
||||||
|
- id: pyupgrade
|
||||||
|
args: [--py38-plus]
|
||||||
|
|
|
@ -778,10 +778,10 @@ def run_esphome(argv):
|
||||||
_LOGGER.warning("Please instead use:")
|
_LOGGER.warning("Please instead use:")
|
||||||
_LOGGER.warning(" esphome %s", " ".join(args.deprecated_argv_suggestion))
|
_LOGGER.warning(" esphome %s", " ".join(args.deprecated_argv_suggestion))
|
||||||
|
|
||||||
if sys.version_info < (3, 7, 0):
|
if sys.version_info < (3, 8, 0):
|
||||||
_LOGGER.error(
|
_LOGGER.error(
|
||||||
"You're running ESPHome with Python <3.7. ESPHome is no longer compatible "
|
"You're running ESPHome with Python <3.8. ESPHome is no longer compatible "
|
||||||
"with this Python version. Please reinstall ESPHome with Python 3.7+"
|
"with this Python version. Please reinstall ESPHome with Python 3.8+"
|
||||||
)
|
)
|
||||||
return 1
|
return 1
|
||||||
|
|
||||||
|
|
|
@ -43,7 +43,7 @@ async def to_code(config):
|
||||||
await lcd_base.setup_lcd_display(var, config)
|
await lcd_base.setup_lcd_display(var, config)
|
||||||
pins_ = []
|
pins_ = []
|
||||||
for conf in config[CONF_DATA_PINS]:
|
for conf in config[CONF_DATA_PINS]:
|
||||||
pins_.append((await cg.gpio_pin_expression(conf)))
|
pins_.append(await cg.gpio_pin_expression(conf))
|
||||||
cg.add(var.set_data_pins(*pins_))
|
cg.add(var.set_data_pins(*pins_))
|
||||||
enable = await cg.gpio_pin_expression(config[CONF_ENABLE_PIN])
|
enable = await cg.gpio_pin_expression(config[CONF_ENABLE_PIN])
|
||||||
cg.add(var.set_enable_pin(enable))
|
cg.add(var.set_enable_pin(enable))
|
||||||
|
|
|
@ -847,7 +847,7 @@ async def rc_switch_raw_action(var, config, args):
|
||||||
config[CONF_PROTOCOL], args, RCSwitchBase, to_exp=build_rc_switch_protocol
|
config[CONF_PROTOCOL], args, RCSwitchBase, to_exp=build_rc_switch_protocol
|
||||||
)
|
)
|
||||||
cg.add(var.set_protocol(proto))
|
cg.add(var.set_protocol(proto))
|
||||||
cg.add(var.set_code((await cg.templatable(config[CONF_CODE], args, cg.std_string))))
|
cg.add(var.set_code(await cg.templatable(config[CONF_CODE], args, cg.std_string)))
|
||||||
|
|
||||||
|
|
||||||
@register_binary_sensor(
|
@register_binary_sensor(
|
||||||
|
@ -868,13 +868,11 @@ async def rc_switch_type_a_action(var, config, args):
|
||||||
config[CONF_PROTOCOL], args, RCSwitchBase, to_exp=build_rc_switch_protocol
|
config[CONF_PROTOCOL], args, RCSwitchBase, to_exp=build_rc_switch_protocol
|
||||||
)
|
)
|
||||||
cg.add(var.set_protocol(proto))
|
cg.add(var.set_protocol(proto))
|
||||||
|
cg.add(var.set_group(await cg.templatable(config[CONF_GROUP], args, cg.std_string)))
|
||||||
cg.add(
|
cg.add(
|
||||||
var.set_group((await cg.templatable(config[CONF_GROUP], args, cg.std_string)))
|
var.set_device(await cg.templatable(config[CONF_DEVICE], args, cg.std_string))
|
||||||
)
|
)
|
||||||
cg.add(
|
cg.add(var.set_state(await cg.templatable(config[CONF_STATE], args, bool)))
|
||||||
var.set_device((await cg.templatable(config[CONF_DEVICE], args, cg.std_string)))
|
|
||||||
)
|
|
||||||
cg.add(var.set_state((await cg.templatable(config[CONF_STATE], args, bool))))
|
|
||||||
|
|
||||||
|
|
||||||
@register_binary_sensor(
|
@register_binary_sensor(
|
||||||
|
@ -897,13 +895,9 @@ async def rc_switch_type_b_action(var, config, args):
|
||||||
config[CONF_PROTOCOL], args, RCSwitchBase, to_exp=build_rc_switch_protocol
|
config[CONF_PROTOCOL], args, RCSwitchBase, to_exp=build_rc_switch_protocol
|
||||||
)
|
)
|
||||||
cg.add(var.set_protocol(proto))
|
cg.add(var.set_protocol(proto))
|
||||||
cg.add(
|
cg.add(var.set_address(await cg.templatable(config[CONF_ADDRESS], args, cg.uint8)))
|
||||||
var.set_address((await cg.templatable(config[CONF_ADDRESS], args, cg.uint8)))
|
cg.add(var.set_channel(await cg.templatable(config[CONF_CHANNEL], args, cg.uint8)))
|
||||||
)
|
cg.add(var.set_state(await cg.templatable(config[CONF_STATE], args, bool)))
|
||||||
cg.add(
|
|
||||||
var.set_channel((await cg.templatable(config[CONF_CHANNEL], args, cg.uint8)))
|
|
||||||
)
|
|
||||||
cg.add(var.set_state((await cg.templatable(config[CONF_STATE], args, bool))))
|
|
||||||
|
|
||||||
|
|
||||||
@register_binary_sensor(
|
@register_binary_sensor(
|
||||||
|
@ -932,11 +926,11 @@ async def rc_switch_type_c_action(var, config, args):
|
||||||
)
|
)
|
||||||
cg.add(var.set_protocol(proto))
|
cg.add(var.set_protocol(proto))
|
||||||
cg.add(
|
cg.add(
|
||||||
var.set_family((await cg.templatable(config[CONF_FAMILY], args, cg.std_string)))
|
var.set_family(await cg.templatable(config[CONF_FAMILY], args, cg.std_string))
|
||||||
)
|
)
|
||||||
cg.add(var.set_group((await cg.templatable(config[CONF_GROUP], args, cg.uint8))))
|
cg.add(var.set_group(await cg.templatable(config[CONF_GROUP], args, cg.uint8)))
|
||||||
cg.add(var.set_device((await cg.templatable(config[CONF_DEVICE], args, cg.uint8))))
|
cg.add(var.set_device(await cg.templatable(config[CONF_DEVICE], args, cg.uint8)))
|
||||||
cg.add(var.set_state((await cg.templatable(config[CONF_STATE], args, bool))))
|
cg.add(var.set_state(await cg.templatable(config[CONF_STATE], args, bool)))
|
||||||
|
|
||||||
|
|
||||||
@register_binary_sensor(
|
@register_binary_sensor(
|
||||||
|
@ -959,11 +953,9 @@ async def rc_switch_type_d_action(var, config, args):
|
||||||
config[CONF_PROTOCOL], args, RCSwitchBase, to_exp=build_rc_switch_protocol
|
config[CONF_PROTOCOL], args, RCSwitchBase, to_exp=build_rc_switch_protocol
|
||||||
)
|
)
|
||||||
cg.add(var.set_protocol(proto))
|
cg.add(var.set_protocol(proto))
|
||||||
cg.add(
|
cg.add(var.set_group(await cg.templatable(config[CONF_GROUP], args, cg.std_string)))
|
||||||
var.set_group((await cg.templatable(config[CONF_GROUP], args, cg.std_string)))
|
cg.add(var.set_device(await cg.templatable(config[CONF_DEVICE], args, cg.uint8)))
|
||||||
)
|
cg.add(var.set_state(await cg.templatable(config[CONF_STATE], args, bool)))
|
||||||
cg.add(var.set_device((await cg.templatable(config[CONF_DEVICE], args, cg.uint8))))
|
|
||||||
cg.add(var.set_state((await cg.templatable(config[CONF_STATE], args, bool))))
|
|
||||||
|
|
||||||
|
|
||||||
@register_trigger("rc_switch", RCSwitchTrigger, RCSwitchData)
|
@register_trigger("rc_switch", RCSwitchTrigger, RCSwitchData)
|
||||||
|
|
|
@ -36,32 +36,26 @@ def validate_temperature_multipliers(value):
|
||||||
or CONF_TARGET_TEMPERATURE_MULTIPLIER in value
|
or CONF_TARGET_TEMPERATURE_MULTIPLIER in value
|
||||||
):
|
):
|
||||||
raise cv.Invalid(
|
raise cv.Invalid(
|
||||||
(
|
|
||||||
f"Cannot have {CONF_TEMPERATURE_MULTIPLIER} at the same time as "
|
f"Cannot have {CONF_TEMPERATURE_MULTIPLIER} at the same time as "
|
||||||
f"{CONF_CURRENT_TEMPERATURE_MULTIPLIER} and "
|
f"{CONF_CURRENT_TEMPERATURE_MULTIPLIER} and "
|
||||||
f"{CONF_TARGET_TEMPERATURE_MULTIPLIER}"
|
f"{CONF_TARGET_TEMPERATURE_MULTIPLIER}"
|
||||||
)
|
)
|
||||||
)
|
|
||||||
if (
|
if (
|
||||||
CONF_CURRENT_TEMPERATURE_MULTIPLIER in value
|
CONF_CURRENT_TEMPERATURE_MULTIPLIER in value
|
||||||
and CONF_TARGET_TEMPERATURE_MULTIPLIER not in value
|
and CONF_TARGET_TEMPERATURE_MULTIPLIER not in value
|
||||||
):
|
):
|
||||||
raise cv.Invalid(
|
raise cv.Invalid(
|
||||||
(
|
|
||||||
f"{CONF_TARGET_TEMPERATURE_MULTIPLIER} required if using "
|
f"{CONF_TARGET_TEMPERATURE_MULTIPLIER} required if using "
|
||||||
f"{CONF_CURRENT_TEMPERATURE_MULTIPLIER}"
|
f"{CONF_CURRENT_TEMPERATURE_MULTIPLIER}"
|
||||||
)
|
)
|
||||||
)
|
|
||||||
if (
|
if (
|
||||||
CONF_TARGET_TEMPERATURE_MULTIPLIER in value
|
CONF_TARGET_TEMPERATURE_MULTIPLIER in value
|
||||||
and CONF_CURRENT_TEMPERATURE_MULTIPLIER not in value
|
and CONF_CURRENT_TEMPERATURE_MULTIPLIER not in value
|
||||||
):
|
):
|
||||||
raise cv.Invalid(
|
raise cv.Invalid(
|
||||||
(
|
|
||||||
f"{CONF_CURRENT_TEMPERATURE_MULTIPLIER} required if using "
|
f"{CONF_CURRENT_TEMPERATURE_MULTIPLIER} required if using "
|
||||||
f"{CONF_TARGET_TEMPERATURE_MULTIPLIER}"
|
f"{CONF_TARGET_TEMPERATURE_MULTIPLIER}"
|
||||||
)
|
)
|
||||||
)
|
|
||||||
keys = (
|
keys = (
|
||||||
CONF_TEMPERATURE_MULTIPLIER,
|
CONF_TEMPERATURE_MULTIPLIER,
|
||||||
CONF_CURRENT_TEMPERATURE_MULTIPLIER,
|
CONF_CURRENT_TEMPERATURE_MULTIPLIER,
|
||||||
|
@ -76,19 +70,15 @@ def validate_active_state_values(value):
|
||||||
if CONF_ACTIVE_STATE_DATAPOINT not in value:
|
if CONF_ACTIVE_STATE_DATAPOINT not in value:
|
||||||
if CONF_ACTIVE_STATE_COOLING_VALUE in value:
|
if CONF_ACTIVE_STATE_COOLING_VALUE in value:
|
||||||
raise cv.Invalid(
|
raise cv.Invalid(
|
||||||
(
|
|
||||||
f"{CONF_ACTIVE_STATE_DATAPOINT} required if using "
|
f"{CONF_ACTIVE_STATE_DATAPOINT} required if using "
|
||||||
f"{CONF_ACTIVE_STATE_COOLING_VALUE}"
|
f"{CONF_ACTIVE_STATE_COOLING_VALUE}"
|
||||||
)
|
)
|
||||||
)
|
|
||||||
else:
|
else:
|
||||||
if value[CONF_SUPPORTS_COOL] and CONF_ACTIVE_STATE_COOLING_VALUE not in value:
|
if value[CONF_SUPPORTS_COOL] and CONF_ACTIVE_STATE_COOLING_VALUE not in value:
|
||||||
raise cv.Invalid(
|
raise cv.Invalid(
|
||||||
(
|
|
||||||
f"{CONF_ACTIVE_STATE_COOLING_VALUE} required if using "
|
f"{CONF_ACTIVE_STATE_COOLING_VALUE} required if using "
|
||||||
f"{CONF_ACTIVE_STATE_DATAPOINT} and device supports cooling"
|
f"{CONF_ACTIVE_STATE_DATAPOINT} and device supports cooling"
|
||||||
)
|
)
|
||||||
)
|
|
||||||
return value
|
return value
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -77,11 +77,11 @@ async def to_code(config):
|
||||||
if CONF_CSS_INCLUDE in config:
|
if CONF_CSS_INCLUDE in config:
|
||||||
cg.add_define("WEBSERVER_CSS_INCLUDE")
|
cg.add_define("WEBSERVER_CSS_INCLUDE")
|
||||||
path = CORE.relative_config_path(config[CONF_CSS_INCLUDE])
|
path = CORE.relative_config_path(config[CONF_CSS_INCLUDE])
|
||||||
with open(file=path, mode="r", encoding="utf-8") as myfile:
|
with open(file=path, encoding="utf-8") as myfile:
|
||||||
cg.add(var.set_css_include(myfile.read()))
|
cg.add(var.set_css_include(myfile.read()))
|
||||||
if CONF_JS_INCLUDE in config:
|
if CONF_JS_INCLUDE in config:
|
||||||
cg.add_define("WEBSERVER_JS_INCLUDE")
|
cg.add_define("WEBSERVER_JS_INCLUDE")
|
||||||
path = CORE.relative_config_path(config[CONF_JS_INCLUDE])
|
path = CORE.relative_config_path(config[CONF_JS_INCLUDE])
|
||||||
with open(file=path, mode="r", encoding="utf-8") as myfile:
|
with open(file=path, encoding="utf-8") as myfile:
|
||||||
cg.add(var.set_js_include(myfile.read()))
|
cg.add(var.set_js_include(myfile.read()))
|
||||||
cg.add(var.set_include_internal(config[CONF_INCLUDE_INTERNAL]))
|
cg.add(var.set_include_internal(config[CONF_INCLUDE_INTERNAL]))
|
||||||
|
|
|
@ -225,11 +225,11 @@ def _validate(config):
|
||||||
if CONF_MANUAL_IP in config:
|
if CONF_MANUAL_IP in config:
|
||||||
use_address = str(config[CONF_MANUAL_IP][CONF_STATIC_IP])
|
use_address = str(config[CONF_MANUAL_IP][CONF_STATIC_IP])
|
||||||
elif CONF_NETWORKS in config:
|
elif CONF_NETWORKS in config:
|
||||||
ips = set(
|
ips = {
|
||||||
str(net[CONF_MANUAL_IP][CONF_STATIC_IP])
|
str(net[CONF_MANUAL_IP][CONF_STATIC_IP])
|
||||||
for net in config[CONF_NETWORKS]
|
for net in config[CONF_NETWORKS]
|
||||||
if CONF_MANUAL_IP in net
|
if CONF_MANUAL_IP in net
|
||||||
)
|
}
|
||||||
if len(ips) > 1:
|
if len(ips) > 1:
|
||||||
raise cv.Invalid(
|
raise cv.Invalid(
|
||||||
"Must specify use_address when using multiple static IP addresses."
|
"Must specify use_address when using multiple static IP addresses."
|
||||||
|
|
|
@ -733,7 +733,7 @@ class EditRequestHandler(BaseHandler):
|
||||||
content = ""
|
content = ""
|
||||||
if os.path.isfile(filename):
|
if os.path.isfile(filename):
|
||||||
# pylint: disable=no-value-for-parameter
|
# pylint: disable=no-value-for-parameter
|
||||||
with open(file=filename, mode="r", encoding="utf-8") as f:
|
with open(file=filename, encoding="utf-8") as f:
|
||||||
content = f.read()
|
content = f.read()
|
||||||
self.write(content)
|
self.write(content)
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
pylint==2.12.2
|
pylint==2.12.2
|
||||||
flake8==4.0.1
|
flake8==4.0.1
|
||||||
black==22.1.0
|
black==22.1.0
|
||||||
|
pyupgrade==2.31.0
|
||||||
pre-commit
|
pre-commit
|
||||||
|
|
||||||
# Unit tests
|
# Unit tests
|
||||||
|
|
|
@ -170,7 +170,7 @@ def get_logger_tags():
|
||||||
]
|
]
|
||||||
for x in os.walk(CORE_COMPONENTS_PATH):
|
for x in os.walk(CORE_COMPONENTS_PATH):
|
||||||
for y in glob.glob(os.path.join(x[0], "*.cpp")):
|
for y in glob.glob(os.path.join(x[0], "*.cpp")):
|
||||||
with open(y, "r") as file:
|
with open(y) as file:
|
||||||
data = file.read()
|
data = file.read()
|
||||||
match = pattern.search(data)
|
match = pattern.search(data)
|
||||||
if match:
|
if match:
|
||||||
|
|
|
@ -281,9 +281,7 @@ def highlight(s):
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
def lint_no_defines(fname, match):
|
def lint_no_defines(fname, match):
|
||||||
s = highlight(
|
s = highlight(f"static const uint8_t {match.group(1)} = {match.group(2)};")
|
||||||
"static const uint8_t {} = {};".format(match.group(1), match.group(2))
|
|
||||||
)
|
|
||||||
return (
|
return (
|
||||||
"#define macros for integer constants are not allowed, please use "
|
"#define macros for integer constants are not allowed, please use "
|
||||||
"{} style instead (replace uint8_t with the appropriate "
|
"{} style instead (replace uint8_t with the appropriate "
|
||||||
|
|
|
@ -17,14 +17,14 @@ def run_format(args, queue, lock, failed_files):
|
||||||
"""Takes filenames out of queue and runs clang-format on them."""
|
"""Takes filenames out of queue and runs clang-format on them."""
|
||||||
while True:
|
while True:
|
||||||
path = queue.get()
|
path = queue.get()
|
||||||
invocation = ['clang-format-11']
|
invocation = ["clang-format-11"]
|
||||||
if args.inplace:
|
if args.inplace:
|
||||||
invocation.append('-i')
|
invocation.append("-i")
|
||||||
else:
|
else:
|
||||||
invocation.extend(['--dry-run', '-Werror'])
|
invocation.extend(["--dry-run", "-Werror"])
|
||||||
invocation.append(path)
|
invocation.append(path)
|
||||||
|
|
||||||
proc = subprocess.run(invocation, capture_output=True, encoding='utf-8')
|
proc = subprocess.run(invocation, capture_output=True, encoding="utf-8")
|
||||||
if proc.returncode != 0:
|
if proc.returncode != 0:
|
||||||
with lock:
|
with lock:
|
||||||
print_error_for_file(path, proc.stderr)
|
print_error_for_file(path, proc.stderr)
|
||||||
|
@ -33,28 +33,36 @@ def run_format(args, queue, lock, failed_files):
|
||||||
|
|
||||||
|
|
||||||
def progress_bar_show(value):
|
def progress_bar_show(value):
|
||||||
return value if value is not None else ''
|
return value if value is not None else ""
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
colorama.init()
|
colorama.init()
|
||||||
|
|
||||||
parser = argparse.ArgumentParser()
|
parser = argparse.ArgumentParser()
|
||||||
parser.add_argument('-j', '--jobs', type=int,
|
parser.add_argument(
|
||||||
|
"-j",
|
||||||
|
"--jobs",
|
||||||
|
type=int,
|
||||||
default=multiprocessing.cpu_count(),
|
default=multiprocessing.cpu_count(),
|
||||||
help='number of format instances to be run in parallel.')
|
help="number of format instances to be run in parallel.",
|
||||||
parser.add_argument('files', nargs='*', default=[],
|
)
|
||||||
help='files to be processed (regex on path)')
|
parser.add_argument(
|
||||||
parser.add_argument('-i', '--inplace', action='store_true',
|
"files", nargs="*", default=[], help="files to be processed (regex on path)"
|
||||||
help='reformat files in-place')
|
)
|
||||||
parser.add_argument('-c', '--changed', action='store_true',
|
parser.add_argument(
|
||||||
help='only run on changed files')
|
"-i", "--inplace", action="store_true", help="reformat files in-place"
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"-c", "--changed", action="store_true", help="only run on changed files"
|
||||||
|
)
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
get_output('clang-format-11', '-version')
|
get_output("clang-format-11", "-version")
|
||||||
except:
|
except:
|
||||||
print("""
|
print(
|
||||||
|
"""
|
||||||
Oops. It looks like clang-format is not installed.
|
Oops. It looks like clang-format is not installed.
|
||||||
|
|
||||||
Please check you can run "clang-format-11 -version" in your terminal and install
|
Please check you can run "clang-format-11 -version" in your terminal and install
|
||||||
|
@ -62,16 +70,17 @@ def main():
|
||||||
|
|
||||||
Note you can also upload your code as a pull request on GitHub and see the CI check
|
Note you can also upload your code as a pull request on GitHub and see the CI check
|
||||||
output to apply clang-format.
|
output to apply clang-format.
|
||||||
""")
|
"""
|
||||||
|
)
|
||||||
return 1
|
return 1
|
||||||
|
|
||||||
files = []
|
files = []
|
||||||
for path in git_ls_files(['*.cpp', '*.h', '*.tcc']):
|
for path in git_ls_files(["*.cpp", "*.h", "*.tcc"]):
|
||||||
files.append(os.path.relpath(path, os.getcwd()))
|
files.append(os.path.relpath(path, os.getcwd()))
|
||||||
|
|
||||||
if args.files:
|
if args.files:
|
||||||
# Match against files specified on command-line
|
# Match against files specified on command-line
|
||||||
file_name_re = re.compile('|'.join(args.files))
|
file_name_re = re.compile("|".join(args.files))
|
||||||
files = [p for p in files if file_name_re.search(p)]
|
files = [p for p in files if file_name_re.search(p)]
|
||||||
|
|
||||||
if args.changed:
|
if args.changed:
|
||||||
|
@ -84,14 +93,16 @@ def main():
|
||||||
task_queue = queue.Queue(args.jobs)
|
task_queue = queue.Queue(args.jobs)
|
||||||
lock = threading.Lock()
|
lock = threading.Lock()
|
||||||
for _ in range(args.jobs):
|
for _ in range(args.jobs):
|
||||||
t = threading.Thread(target=run_format,
|
t = threading.Thread(
|
||||||
args=(args, task_queue, lock, failed_files))
|
target=run_format, args=(args, task_queue, lock, failed_files)
|
||||||
|
)
|
||||||
t.daemon = True
|
t.daemon = True
|
||||||
t.start()
|
t.start()
|
||||||
|
|
||||||
# Fill the queue with files.
|
# Fill the queue with files.
|
||||||
with click.progressbar(files, width=30, file=sys.stderr,
|
with click.progressbar(
|
||||||
item_show_func=progress_bar_show) as bar:
|
files, width=30, file=sys.stderr, item_show_func=progress_bar_show
|
||||||
|
) as bar:
|
||||||
for name in bar:
|
for name in bar:
|
||||||
task_queue.put(name)
|
task_queue.put(name)
|
||||||
|
|
||||||
|
@ -100,11 +111,11 @@ def main():
|
||||||
|
|
||||||
except KeyboardInterrupt:
|
except KeyboardInterrupt:
|
||||||
print()
|
print()
|
||||||
print('Ctrl-C detected, goodbye.')
|
print("Ctrl-C detected, goodbye.")
|
||||||
os.kill(0, 9)
|
os.kill(0, 9)
|
||||||
|
|
||||||
sys.exit(len(failed_files))
|
sys.exit(len(failed_files))
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == "__main__":
|
||||||
main()
|
main()
|
||||||
|
|
|
@ -1,7 +1,17 @@
|
||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
from helpers import print_error_for_file, get_output, filter_grep, \
|
from helpers import (
|
||||||
build_all_include, temp_header_file, git_ls_files, filter_changed, load_idedata, root_path, basepath
|
print_error_for_file,
|
||||||
|
get_output,
|
||||||
|
filter_grep,
|
||||||
|
build_all_include,
|
||||||
|
temp_header_file,
|
||||||
|
git_ls_files,
|
||||||
|
filter_changed,
|
||||||
|
load_idedata,
|
||||||
|
root_path,
|
||||||
|
basepath,
|
||||||
|
)
|
||||||
import argparse
|
import argparse
|
||||||
import click
|
import click
|
||||||
import colorama
|
import colorama
|
||||||
|
@ -20,67 +30,81 @@ def clang_options(idedata):
|
||||||
cmd = []
|
cmd = []
|
||||||
|
|
||||||
# extract target architecture from triplet in g++ filename
|
# extract target architecture from triplet in g++ filename
|
||||||
triplet = os.path.basename(idedata['cxx_path'])[:-4]
|
triplet = os.path.basename(idedata["cxx_path"])[:-4]
|
||||||
if triplet.startswith("xtensa-"):
|
if triplet.startswith("xtensa-"):
|
||||||
# clang doesn't support Xtensa (yet?), so compile in 32-bit mode and pretend we're the Xtensa compiler
|
# clang doesn't support Xtensa (yet?), so compile in 32-bit mode and pretend we're the Xtensa compiler
|
||||||
cmd.append('-m32')
|
cmd.append("-m32")
|
||||||
cmd.append('-D__XTENSA__')
|
cmd.append("-D__XTENSA__")
|
||||||
else:
|
else:
|
||||||
cmd.append(f'--target={triplet}')
|
cmd.append(f"--target={triplet}")
|
||||||
|
|
||||||
# set flags
|
# set flags
|
||||||
cmd.extend([
|
cmd.extend(
|
||||||
|
[
|
||||||
# disable built-in include directories from the host
|
# disable built-in include directories from the host
|
||||||
'-nostdinc',
|
"-nostdinc",
|
||||||
'-nostdinc++',
|
"-nostdinc++",
|
||||||
# replace pgmspace.h, as it uses GNU extensions clang doesn't support
|
# replace pgmspace.h, as it uses GNU extensions clang doesn't support
|
||||||
# https://github.com/earlephilhower/newlib-xtensa/pull/18
|
# https://github.com/earlephilhower/newlib-xtensa/pull/18
|
||||||
'-D_PGMSPACE_H_',
|
"-D_PGMSPACE_H_",
|
||||||
'-Dpgm_read_byte(s)=(*(const uint8_t *)(s))',
|
"-Dpgm_read_byte(s)=(*(const uint8_t *)(s))",
|
||||||
'-Dpgm_read_byte_near(s)=(*(const uint8_t *)(s))',
|
"-Dpgm_read_byte_near(s)=(*(const uint8_t *)(s))",
|
||||||
'-Dpgm_read_word(s)=(*(const uint16_t *)(s))',
|
"-Dpgm_read_word(s)=(*(const uint16_t *)(s))",
|
||||||
'-Dpgm_read_dword(s)=(*(const uint32_t *)(s))',
|
"-Dpgm_read_dword(s)=(*(const uint32_t *)(s))",
|
||||||
'-DPROGMEM=',
|
"-DPROGMEM=",
|
||||||
'-DPGM_P=const char *',
|
"-DPGM_P=const char *",
|
||||||
'-DPSTR(s)=(s)',
|
"-DPSTR(s)=(s)",
|
||||||
# this next one is also needed with upstream pgmspace.h
|
# this next one is also needed with upstream pgmspace.h
|
||||||
# suppress warning about identifier naming in expansion of this macro
|
# suppress warning about identifier naming in expansion of this macro
|
||||||
'-DPSTRN(s, n)=(s)',
|
"-DPSTRN(s, n)=(s)",
|
||||||
# suppress warning about attribute cannot be applied to type
|
# suppress warning about attribute cannot be applied to type
|
||||||
# https://github.com/esp8266/Arduino/pull/8258
|
# https://github.com/esp8266/Arduino/pull/8258
|
||||||
'-Ddeprecated(x)=',
|
"-Ddeprecated(x)=",
|
||||||
# allow to condition code on the presence of clang-tidy
|
# allow to condition code on the presence of clang-tidy
|
||||||
'-DCLANG_TIDY',
|
"-DCLANG_TIDY",
|
||||||
# (esp-idf) Disable this header because they use asm with registers clang-tidy doesn't know
|
# (esp-idf) Disable this header because they use asm with registers clang-tidy doesn't know
|
||||||
'-D__XTENSA_API_H__',
|
"-D__XTENSA_API_H__",
|
||||||
# (esp-idf) Fix __once_callable in some libstdc++ headers
|
# (esp-idf) Fix __once_callable in some libstdc++ headers
|
||||||
'-D_GLIBCXX_HAVE_TLS',
|
"-D_GLIBCXX_HAVE_TLS",
|
||||||
])
|
]
|
||||||
|
)
|
||||||
|
|
||||||
# copy compiler flags, except those clang doesn't understand.
|
# copy compiler flags, except those clang doesn't understand.
|
||||||
cmd.extend(flag for flag in idedata['cxx_flags'].split(' ')
|
cmd.extend(
|
||||||
if flag not in ('-free', '-fipa-pta', '-fstrict-volatile-bitfields',
|
flag
|
||||||
'-mlongcalls', '-mtext-section-literals',
|
for flag in idedata["cxx_flags"].split(" ")
|
||||||
'-mfix-esp32-psram-cache-issue', '-mfix-esp32-psram-cache-strategy=memw'))
|
if flag
|
||||||
|
not in (
|
||||||
|
"-free",
|
||||||
|
"-fipa-pta",
|
||||||
|
"-fstrict-volatile-bitfields",
|
||||||
|
"-mlongcalls",
|
||||||
|
"-mtext-section-literals",
|
||||||
|
"-mfix-esp32-psram-cache-issue",
|
||||||
|
"-mfix-esp32-psram-cache-strategy=memw",
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
# defines
|
# defines
|
||||||
cmd.extend(f'-D{define}' for define in idedata['defines'])
|
cmd.extend(f"-D{define}" for define in idedata["defines"])
|
||||||
|
|
||||||
# add toolchain include directories using -isystem to suppress their errors
|
# add toolchain include directories using -isystem to suppress their errors
|
||||||
# idedata contains include directories for all toolchains of this platform, only use those from the one in use
|
# idedata contains include directories for all toolchains of this platform, only use those from the one in use
|
||||||
toolchain_dir = os.path.normpath(f"{idedata['cxx_path']}/../../")
|
toolchain_dir = os.path.normpath(f"{idedata['cxx_path']}/../../")
|
||||||
for directory in idedata['includes']['toolchain']:
|
for directory in idedata["includes"]["toolchain"]:
|
||||||
if directory.startswith(toolchain_dir):
|
if directory.startswith(toolchain_dir):
|
||||||
cmd.extend(['-isystem', directory])
|
cmd.extend(["-isystem", directory])
|
||||||
|
|
||||||
# add library include directories using -isystem to suppress their errors
|
# add library include directories using -isystem to suppress their errors
|
||||||
for directory in sorted(set(idedata['includes']['build'])):
|
for directory in sorted(set(idedata["includes"]["build"])):
|
||||||
# skip our own directories, we add those later
|
# skip our own directories, we add those later
|
||||||
if not directory.startswith(f"{root_path}/") or directory.startswith(f"{root_path}/.pio/"):
|
if not directory.startswith(f"{root_path}/") or directory.startswith(
|
||||||
cmd.extend(['-isystem', directory])
|
f"{root_path}/.pio/"
|
||||||
|
):
|
||||||
|
cmd.extend(["-isystem", directory])
|
||||||
|
|
||||||
# add the esphome include directory using -I
|
# add the esphome include directory using -I
|
||||||
cmd.extend(['-I', root_path])
|
cmd.extend(["-I", root_path])
|
||||||
|
|
||||||
return cmd
|
return cmd
|
||||||
|
|
||||||
|
@ -88,28 +112,28 @@ def clang_options(idedata):
|
||||||
def run_tidy(args, options, tmpdir, queue, lock, failed_files):
|
def run_tidy(args, options, tmpdir, queue, lock, failed_files):
|
||||||
while True:
|
while True:
|
||||||
path = queue.get()
|
path = queue.get()
|
||||||
invocation = ['clang-tidy-11']
|
invocation = ["clang-tidy-11"]
|
||||||
|
|
||||||
if tmpdir is not None:
|
if tmpdir is not None:
|
||||||
invocation.append('--export-fixes')
|
invocation.append("--export-fixes")
|
||||||
# Get a temporary file. We immediately close the handle so clang-tidy can
|
# Get a temporary file. We immediately close the handle so clang-tidy can
|
||||||
# overwrite it.
|
# overwrite it.
|
||||||
(handle, name) = tempfile.mkstemp(suffix='.yaml', dir=tmpdir)
|
(handle, name) = tempfile.mkstemp(suffix=".yaml", dir=tmpdir)
|
||||||
os.close(handle)
|
os.close(handle)
|
||||||
invocation.append(name)
|
invocation.append(name)
|
||||||
|
|
||||||
if args.quiet:
|
if args.quiet:
|
||||||
invocation.append('--quiet')
|
invocation.append("--quiet")
|
||||||
|
|
||||||
if sys.stdout.isatty():
|
if sys.stdout.isatty():
|
||||||
invocation.append('--use-color')
|
invocation.append("--use-color")
|
||||||
|
|
||||||
invocation.append(f"--header-filter={os.path.abspath(basepath)}/.*")
|
invocation.append(f"--header-filter={os.path.abspath(basepath)}/.*")
|
||||||
invocation.append(os.path.abspath(path))
|
invocation.append(os.path.abspath(path))
|
||||||
invocation.append('--')
|
invocation.append("--")
|
||||||
invocation.extend(options)
|
invocation.extend(options)
|
||||||
|
|
||||||
proc = subprocess.run(invocation, capture_output=True, encoding='utf-8')
|
proc = subprocess.run(invocation, capture_output=True, encoding="utf-8")
|
||||||
if proc.returncode != 0:
|
if proc.returncode != 0:
|
||||||
with lock:
|
with lock:
|
||||||
print_error_for_file(path, proc.stdout)
|
print_error_for_file(path, proc.stdout)
|
||||||
|
@ -119,7 +143,7 @@ def run_tidy(args, options, tmpdir, queue, lock, failed_files):
|
||||||
|
|
||||||
def progress_bar_show(value):
|
def progress_bar_show(value):
|
||||||
if value is None:
|
if value is None:
|
||||||
return ''
|
return ""
|
||||||
|
|
||||||
|
|
||||||
def split_list(a, n):
|
def split_list(a, n):
|
||||||
|
@ -131,31 +155,48 @@ def main():
|
||||||
colorama.init()
|
colorama.init()
|
||||||
|
|
||||||
parser = argparse.ArgumentParser()
|
parser = argparse.ArgumentParser()
|
||||||
parser.add_argument('-j', '--jobs', type=int,
|
parser.add_argument(
|
||||||
|
"-j",
|
||||||
|
"--jobs",
|
||||||
|
type=int,
|
||||||
default=multiprocessing.cpu_count(),
|
default=multiprocessing.cpu_count(),
|
||||||
help='number of tidy instances to be run in parallel.')
|
help="number of tidy instances to be run in parallel.",
|
||||||
parser.add_argument('-e', '--environment', default='esp32-arduino-tidy',
|
)
|
||||||
help='the PlatformIO environment to use (as defined in platformio.ini)')
|
parser.add_argument(
|
||||||
parser.add_argument('files', nargs='*', default=[],
|
"-e",
|
||||||
help='files to be processed (regex on path)')
|
"--environment",
|
||||||
parser.add_argument('--fix', action='store_true', help='apply fix-its')
|
default="esp32-arduino-tidy",
|
||||||
parser.add_argument('-q', '--quiet', action='store_false',
|
help="the PlatformIO environment to use (as defined in platformio.ini)",
|
||||||
help='run clang-tidy in quiet mode')
|
)
|
||||||
parser.add_argument('-c', '--changed', action='store_true',
|
parser.add_argument(
|
||||||
help='only run on changed files')
|
"files", nargs="*", default=[], help="files to be processed (regex on path)"
|
||||||
parser.add_argument('-g', '--grep', help='only run on files containing value')
|
)
|
||||||
parser.add_argument('--split-num', type=int, help='split the files into X jobs.',
|
parser.add_argument("--fix", action="store_true", help="apply fix-its")
|
||||||
default=None)
|
parser.add_argument(
|
||||||
parser.add_argument('--split-at', type=int, help='which split is this? starts at 1',
|
"-q", "--quiet", action="store_false", help="run clang-tidy in quiet mode"
|
||||||
default=None)
|
)
|
||||||
parser.add_argument('--all-headers', action='store_true',
|
parser.add_argument(
|
||||||
help='create a dummy file that checks all headers')
|
"-c", "--changed", action="store_true", help="only run on changed files"
|
||||||
|
)
|
||||||
|
parser.add_argument("-g", "--grep", help="only run on files containing value")
|
||||||
|
parser.add_argument(
|
||||||
|
"--split-num", type=int, help="split the files into X jobs.", default=None
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--split-at", type=int, help="which split is this? starts at 1", default=None
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--all-headers",
|
||||||
|
action="store_true",
|
||||||
|
help="create a dummy file that checks all headers",
|
||||||
|
)
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
get_output('clang-tidy-11', '-version')
|
get_output("clang-tidy-11", "-version")
|
||||||
except:
|
except:
|
||||||
print("""
|
print(
|
||||||
|
"""
|
||||||
Oops. It looks like clang-tidy-11 is not installed.
|
Oops. It looks like clang-tidy-11 is not installed.
|
||||||
|
|
||||||
Please check you can run "clang-tidy-11 -version" in your terminal and install
|
Please check you can run "clang-tidy-11 -version" in your terminal and install
|
||||||
|
@ -163,19 +204,20 @@ def main():
|
||||||
|
|
||||||
Note you can also upload your code as a pull request on GitHub and see the CI check
|
Note you can also upload your code as a pull request on GitHub and see the CI check
|
||||||
output to apply clang-tidy.
|
output to apply clang-tidy.
|
||||||
""")
|
"""
|
||||||
|
)
|
||||||
return 1
|
return 1
|
||||||
|
|
||||||
idedata = load_idedata(args.environment)
|
idedata = load_idedata(args.environment)
|
||||||
options = clang_options(idedata)
|
options = clang_options(idedata)
|
||||||
|
|
||||||
files = []
|
files = []
|
||||||
for path in git_ls_files(['*.cpp']):
|
for path in git_ls_files(["*.cpp"]):
|
||||||
files.append(os.path.relpath(path, os.getcwd()))
|
files.append(os.path.relpath(path, os.getcwd()))
|
||||||
|
|
||||||
if args.files:
|
if args.files:
|
||||||
# Match against files specified on command-line
|
# Match against files specified on command-line
|
||||||
file_name_re = re.compile('|'.join(args.files))
|
file_name_re = re.compile("|".join(args.files))
|
||||||
files = [p for p in files if file_name_re.search(p)]
|
files = [p for p in files if file_name_re.search(p)]
|
||||||
|
|
||||||
if args.changed:
|
if args.changed:
|
||||||
|
@ -202,14 +244,17 @@ def main():
|
||||||
task_queue = queue.Queue(args.jobs)
|
task_queue = queue.Queue(args.jobs)
|
||||||
lock = threading.Lock()
|
lock = threading.Lock()
|
||||||
for _ in range(args.jobs):
|
for _ in range(args.jobs):
|
||||||
t = threading.Thread(target=run_tidy,
|
t = threading.Thread(
|
||||||
args=(args, options, tmpdir, task_queue, lock, failed_files))
|
target=run_tidy,
|
||||||
|
args=(args, options, tmpdir, task_queue, lock, failed_files),
|
||||||
|
)
|
||||||
t.daemon = True
|
t.daemon = True
|
||||||
t.start()
|
t.start()
|
||||||
|
|
||||||
# Fill the queue with files.
|
# Fill the queue with files.
|
||||||
with click.progressbar(files, width=30, file=sys.stderr,
|
with click.progressbar(
|
||||||
item_show_func=progress_bar_show) as bar:
|
files, width=30, file=sys.stderr, item_show_func=progress_bar_show
|
||||||
|
) as bar:
|
||||||
for name in bar:
|
for name in bar:
|
||||||
task_queue.put(name)
|
task_queue.put(name)
|
||||||
|
|
||||||
|
@ -218,21 +263,21 @@ def main():
|
||||||
|
|
||||||
except KeyboardInterrupt:
|
except KeyboardInterrupt:
|
||||||
print()
|
print()
|
||||||
print('Ctrl-C detected, goodbye.')
|
print("Ctrl-C detected, goodbye.")
|
||||||
if tmpdir:
|
if tmpdir:
|
||||||
shutil.rmtree(tmpdir)
|
shutil.rmtree(tmpdir)
|
||||||
os.kill(0, 9)
|
os.kill(0, 9)
|
||||||
|
|
||||||
if args.fix and failed_files:
|
if args.fix and failed_files:
|
||||||
print('Applying fixes ...')
|
print("Applying fixes ...")
|
||||||
try:
|
try:
|
||||||
subprocess.call(['clang-apply-replacements-11', tmpdir])
|
subprocess.call(["clang-apply-replacements-11", tmpdir])
|
||||||
except:
|
except:
|
||||||
print('Error applying fixes.\n', file=sys.stderr)
|
print("Error applying fixes.\n", file=sys.stderr)
|
||||||
raise
|
raise
|
||||||
|
|
||||||
sys.exit(len(failed_files))
|
sys.exit(len(failed_files))
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == "__main__":
|
||||||
main()
|
main()
|
||||||
|
|
|
@ -12,13 +12,16 @@ temp_header_file = os.path.join(temp_folder, "all-include.cpp")
|
||||||
|
|
||||||
|
|
||||||
def styled(color, msg, reset=True):
|
def styled(color, msg, reset=True):
|
||||||
prefix = ''.join(color) if isinstance(color, tuple) else color
|
prefix = "".join(color) if isinstance(color, tuple) else color
|
||||||
suffix = colorama.Style.RESET_ALL if reset else ''
|
suffix = colorama.Style.RESET_ALL if reset else ""
|
||||||
return prefix + msg + suffix
|
return prefix + msg + suffix
|
||||||
|
|
||||||
|
|
||||||
def print_error_for_file(file, body):
|
def print_error_for_file(file, body):
|
||||||
print(styled(colorama.Fore.GREEN, "### File ") + styled((colorama.Fore.GREEN, colorama.Style.BRIGHT), file))
|
print(
|
||||||
|
styled(colorama.Fore.GREEN, "### File ")
|
||||||
|
+ styled((colorama.Fore.GREEN, colorama.Style.BRIGHT), file)
|
||||||
|
)
|
||||||
print()
|
print()
|
||||||
if body is not None:
|
if body is not None:
|
||||||
print(body)
|
print(body)
|
||||||
|
@ -100,7 +103,7 @@ def filter_changed(files):
|
||||||
def filter_grep(files, value):
|
def filter_grep(files, value):
|
||||||
matched = []
|
matched = []
|
||||||
for file in files:
|
for file in files:
|
||||||
with open(file, "r") as handle:
|
with open(file) as handle:
|
||||||
contents = handle.read()
|
contents = handle.read()
|
||||||
if value in contents:
|
if value in contents:
|
||||||
matched.append(file)
|
matched.append(file)
|
||||||
|
|
|
@ -1,7 +1,13 @@
|
||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
from __future__ import print_function
|
from helpers import (
|
||||||
from helpers import styled, print_error_for_file, get_output, get_err, git_ls_files, filter_changed
|
styled,
|
||||||
|
print_error_for_file,
|
||||||
|
get_output,
|
||||||
|
get_err,
|
||||||
|
git_ls_files,
|
||||||
|
filter_changed,
|
||||||
|
)
|
||||||
import argparse
|
import argparse
|
||||||
import colorama
|
import colorama
|
||||||
import os
|
import os
|
||||||
|
@ -34,6 +40,12 @@ def main():
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"-c", "--changed", action="store_true", help="Only run on changed files"
|
"-c", "--changed", action="store_true", help="Only run on changed files"
|
||||||
)
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"-a",
|
||||||
|
"--apply",
|
||||||
|
action="store_true",
|
||||||
|
help="Apply changes to files where possible",
|
||||||
|
)
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
files = []
|
files = []
|
||||||
|
@ -56,7 +68,7 @@ def main():
|
||||||
|
|
||||||
errors = 0
|
errors = 0
|
||||||
|
|
||||||
cmd = ["black", "--verbose", "--check"] + files
|
cmd = ["black", "--verbose"] + ([] if args.apply else ["--check"]) + files
|
||||||
print("Running black...")
|
print("Running black...")
|
||||||
print()
|
print()
|
||||||
log = get_err(*cmd)
|
log = get_err(*cmd)
|
||||||
|
@ -97,6 +109,21 @@ def main():
|
||||||
print_error(file_, linno, msg)
|
print_error(file_, linno, msg)
|
||||||
errors += 1
|
errors += 1
|
||||||
|
|
||||||
|
PYUPGRADE_TARGET = "--py38-plus"
|
||||||
|
cmd = ["pyupgrade", PYUPGRADE_TARGET] + files
|
||||||
|
print()
|
||||||
|
print("Running pyupgrade...")
|
||||||
|
print()
|
||||||
|
log = get_err(*cmd)
|
||||||
|
for line in log.splitlines():
|
||||||
|
REWRITING = "Rewriting"
|
||||||
|
if line.startswith(REWRITING):
|
||||||
|
file_ = line[len(REWRITING) + 1 :]
|
||||||
|
print_error(
|
||||||
|
file_, None, f"Please run pyupgrade {PYUPGRADE_TARGET} on this file"
|
||||||
|
)
|
||||||
|
errors += 1
|
||||||
|
|
||||||
sys.exit(errors)
|
sys.exit(errors)
|
||||||
|
|
||||||
|
|
||||||
|
|
10
setup.py
10
setup.py
|
@ -17,11 +17,11 @@ PROJECT_EMAIL = "esphome@nabucasa.com"
|
||||||
PROJECT_GITHUB_USERNAME = "esphome"
|
PROJECT_GITHUB_USERNAME = "esphome"
|
||||||
PROJECT_GITHUB_REPOSITORY = "esphome"
|
PROJECT_GITHUB_REPOSITORY = "esphome"
|
||||||
|
|
||||||
PYPI_URL = "https://pypi.python.org/pypi/{}".format(PROJECT_PACKAGE_NAME)
|
PYPI_URL = f"https://pypi.python.org/pypi/{PROJECT_PACKAGE_NAME}"
|
||||||
GITHUB_PATH = "{}/{}".format(PROJECT_GITHUB_USERNAME, PROJECT_GITHUB_REPOSITORY)
|
GITHUB_PATH = f"{PROJECT_GITHUB_USERNAME}/{PROJECT_GITHUB_REPOSITORY}"
|
||||||
GITHUB_URL = "https://github.com/{}".format(GITHUB_PATH)
|
GITHUB_URL = f"https://github.com/{GITHUB_PATH}"
|
||||||
|
|
||||||
DOWNLOAD_URL = "{}/archive/{}.zip".format(GITHUB_URL, const.__version__)
|
DOWNLOAD_URL = f"{GITHUB_URL}/archive/{const.__version__}.zip"
|
||||||
|
|
||||||
here = os.path.abspath(os.path.dirname(__file__))
|
here = os.path.abspath(os.path.dirname(__file__))
|
||||||
|
|
||||||
|
@ -74,7 +74,7 @@ setup(
|
||||||
zip_safe=False,
|
zip_safe=False,
|
||||||
platforms="any",
|
platforms="any",
|
||||||
test_suite="tests",
|
test_suite="tests",
|
||||||
python_requires=">=3.7,<4.0",
|
python_requires=">=3.8,<4.0",
|
||||||
install_requires=REQUIRES,
|
install_requires=REQUIRES,
|
||||||
keywords=["home", "automation"],
|
keywords=["home", "automation"],
|
||||||
entry_points={"console_scripts": ["esphome = esphome.__main__:main"]},
|
entry_points={"console_scripts": ["esphome = esphome.__main__:main"]},
|
||||||
|
|
Loading…
Reference in a new issue