2023-12-22 08:03:47 +01:00
|
|
|
import json
|
2019-05-12 23:04:36 +02:00
|
|
|
import os.path
|
2024-07-24 13:35:07 +02:00
|
|
|
from pathlib import Path
|
2019-05-12 23:04:36 +02:00
|
|
|
import re
|
|
|
|
import subprocess
|
|
|
|
|
2023-12-22 08:03:47 +01:00
|
|
|
import colorama
|
|
|
|
|
2021-03-07 20:03:16 +01:00
|
|
|
root_path = os.path.abspath(os.path.normpath(os.path.join(__file__, "..", "..")))
|
|
|
|
basepath = os.path.join(root_path, "esphome")
|
2021-08-09 22:43:18 +02:00
|
|
|
temp_folder = os.path.join(root_path, ".temp")
|
|
|
|
temp_header_file = os.path.join(temp_folder, "all-include.cpp")
|
2019-05-12 23:04:36 +02:00
|
|
|
|
|
|
|
|
2021-11-25 21:54:11 +01:00
|
|
|
def styled(color, msg, reset=True):
|
2022-02-10 09:55:11 +01:00
|
|
|
prefix = "".join(color) if isinstance(color, tuple) else color
|
|
|
|
suffix = colorama.Style.RESET_ALL if reset else ""
|
2021-11-25 21:54:11 +01:00
|
|
|
return prefix + msg + suffix
|
2019-05-12 23:04:36 +02:00
|
|
|
|
2021-11-25 21:54:11 +01:00
|
|
|
|
|
|
|
def print_error_for_file(file, body):
|
2022-02-10 09:55:11 +01:00
|
|
|
print(
|
|
|
|
styled(colorama.Fore.GREEN, "### File ")
|
|
|
|
+ styled((colorama.Fore.GREEN, colorama.Style.BRIGHT), file)
|
|
|
|
)
|
2021-11-25 21:54:11 +01:00
|
|
|
print()
|
|
|
|
if body is not None:
|
|
|
|
print(body)
|
|
|
|
print()
|
2019-05-12 23:04:36 +02:00
|
|
|
|
|
|
|
|
2024-08-09 20:00:41 +02:00
|
|
|
def build_all_include(exclude_components):
|
2019-05-12 23:04:36 +02:00
|
|
|
# Build a cpp file that includes all header files in this repo.
|
|
|
|
# Otherwise header-only integrations would not be tested by clang-tidy
|
|
|
|
headers = []
|
|
|
|
for path in walk_files(basepath):
|
2021-03-07 20:03:16 +01:00
|
|
|
filetypes = (".h",)
|
2019-05-12 23:04:36 +02:00
|
|
|
ext = os.path.splitext(path)[1]
|
|
|
|
if ext in filetypes:
|
2024-08-09 20:00:41 +02:00
|
|
|
parts = path.split("/components/")
|
|
|
|
if len(parts) > 1 and parts[1].split("/")[0] in exclude_components:
|
|
|
|
continue
|
2019-05-12 23:04:36 +02:00
|
|
|
path = os.path.relpath(path, root_path)
|
2021-03-07 20:03:16 +01:00
|
|
|
include_p = path.replace(os.path.sep, "/")
|
2019-12-07 18:28:55 +01:00
|
|
|
headers.append(f'#include "{include_p}"')
|
2019-05-12 23:04:36 +02:00
|
|
|
headers.sort()
|
2021-03-07 20:03:16 +01:00
|
|
|
headers.append("")
|
|
|
|
content = "\n".join(headers)
|
2021-08-09 22:43:18 +02:00
|
|
|
p = Path(temp_header_file)
|
|
|
|
p.parent.mkdir(exist_ok=True)
|
2023-12-22 08:03:47 +01:00
|
|
|
p.write_text(content, encoding="utf-8")
|
2019-05-12 23:04:36 +02:00
|
|
|
|
|
|
|
|
|
|
|
def walk_files(path):
|
|
|
|
for root, _, files in os.walk(path):
|
|
|
|
for name in files:
|
|
|
|
yield os.path.join(root, name)
|
|
|
|
|
|
|
|
|
|
|
|
def get_output(*args):
|
2023-12-22 08:03:47 +01:00
|
|
|
with subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE) as proc:
|
|
|
|
output, _ = proc.communicate()
|
2021-03-07 20:03:16 +01:00
|
|
|
return output.decode("utf-8")
|
|
|
|
|
|
|
|
|
|
|
|
def get_err(*args):
|
2023-12-22 08:03:47 +01:00
|
|
|
with subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE) as proc:
|
|
|
|
_, err = proc.communicate()
|
2021-03-07 20:03:16 +01:00
|
|
|
return err.decode("utf-8")
|
2019-05-12 23:04:36 +02:00
|
|
|
|
|
|
|
|
|
|
|
def splitlines_no_ends(string):
|
|
|
|
return [s.strip() for s in string.splitlines()]
|
|
|
|
|
|
|
|
|
2024-03-16 02:22:34 +01:00
|
|
|
def changed_files(branch="dev"):
|
2021-03-07 20:03:16 +01:00
|
|
|
check_remotes = ["upstream", "origin"]
|
|
|
|
check_remotes.extend(splitlines_no_ends(get_output("git", "remote")))
|
2020-07-14 14:34:44 +02:00
|
|
|
for remote in check_remotes:
|
2024-03-16 02:22:34 +01:00
|
|
|
command = ["git", "merge-base", f"refs/remotes/{remote}/{branch}", "HEAD"]
|
2019-05-12 23:04:36 +02:00
|
|
|
try:
|
|
|
|
merge_base = splitlines_no_ends(get_output(*command))[0]
|
|
|
|
break
|
2021-03-07 20:03:16 +01:00
|
|
|
# pylint: disable=bare-except
|
2023-12-22 08:03:47 +01:00
|
|
|
except: # noqa: E722
|
2019-05-12 23:04:36 +02:00
|
|
|
pass
|
|
|
|
else:
|
|
|
|
raise ValueError("Git not configured")
|
2021-03-07 20:03:16 +01:00
|
|
|
command = ["git", "diff", merge_base, "--name-only"]
|
2019-05-12 23:04:36 +02:00
|
|
|
changed = splitlines_no_ends(get_output(*command))
|
|
|
|
changed = [os.path.relpath(f, os.getcwd()) for f in changed]
|
|
|
|
changed.sort()
|
|
|
|
return changed
|
|
|
|
|
|
|
|
|
|
|
|
def filter_changed(files):
|
|
|
|
changed = changed_files()
|
|
|
|
files = [f for f in files if f in changed]
|
|
|
|
print("Changed files:")
|
|
|
|
if not files:
|
|
|
|
print(" No changed files!")
|
|
|
|
for c in files:
|
2019-12-07 18:28:55 +01:00
|
|
|
print(f" {c}")
|
2019-05-12 23:04:36 +02:00
|
|
|
return files
|
2019-05-24 17:20:06 +02:00
|
|
|
|
|
|
|
|
2021-09-13 18:11:27 +02:00
|
|
|
def filter_grep(files, value):
|
|
|
|
matched = []
|
|
|
|
for file in files:
|
2023-12-22 08:03:47 +01:00
|
|
|
with open(file, encoding="utf-8") as handle:
|
2021-09-13 18:11:27 +02:00
|
|
|
contents = handle.read()
|
|
|
|
if value in contents:
|
|
|
|
matched.append(file)
|
|
|
|
return matched
|
|
|
|
|
|
|
|
|
2021-07-25 23:54:32 +02:00
|
|
|
def git_ls_files(patterns=None):
|
2021-03-07 20:03:16 +01:00
|
|
|
command = ["git", "ls-files", "-s"]
|
2021-07-25 23:54:32 +02:00
|
|
|
if patterns is not None:
|
|
|
|
command.extend(patterns)
|
2023-12-22 08:03:47 +01:00
|
|
|
with subprocess.Popen(command, stdout=subprocess.PIPE) as proc:
|
|
|
|
output, _ = proc.communicate()
|
2021-03-07 20:03:16 +01:00
|
|
|
lines = [x.split() for x in output.decode("utf-8").splitlines()]
|
|
|
|
return {s[3].strip(): int(s[0]) for s in lines}
|
2021-08-09 22:43:18 +02:00
|
|
|
|
|
|
|
|
|
|
|
def load_idedata(environment):
|
|
|
|
platformio_ini = Path(root_path) / "platformio.ini"
|
|
|
|
temp_idedata = Path(temp_folder) / f"idedata-{environment}.json"
|
2021-09-20 11:47:51 +02:00
|
|
|
changed = False
|
2021-08-09 22:43:18 +02:00
|
|
|
if not platformio_ini.is_file() or not temp_idedata.is_file():
|
|
|
|
changed = True
|
|
|
|
elif platformio_ini.stat().st_mtime >= temp_idedata.stat().st_mtime:
|
|
|
|
changed = True
|
2021-09-20 11:47:51 +02:00
|
|
|
|
2021-09-21 17:12:17 +02:00
|
|
|
if "idf" in environment:
|
|
|
|
# remove full sdkconfig when the defaults have changed so that it is regenerated
|
|
|
|
default_sdkconfig = Path(root_path) / "sdkconfig.defaults"
|
|
|
|
temp_sdkconfig = Path(temp_folder) / f"sdkconfig-{environment}"
|
|
|
|
|
|
|
|
if not temp_sdkconfig.is_file():
|
|
|
|
changed = True
|
|
|
|
elif default_sdkconfig.stat().st_mtime >= temp_sdkconfig.stat().st_mtime:
|
|
|
|
temp_sdkconfig.unlink()
|
2021-09-20 11:47:51 +02:00
|
|
|
changed = True
|
2021-08-09 22:43:18 +02:00
|
|
|
|
|
|
|
if not changed:
|
2021-08-10 11:14:04 +02:00
|
|
|
return json.loads(temp_idedata.read_text())
|
2021-08-09 22:43:18 +02:00
|
|
|
|
2021-09-21 17:12:17 +02:00
|
|
|
# ensure temp directory exists before running pio, as it writes sdkconfig to it
|
|
|
|
Path(temp_folder).mkdir(exist_ok=True)
|
|
|
|
|
2024-07-24 13:35:07 +02:00
|
|
|
if "nrf" in environment:
|
|
|
|
build_environment = environment.replace("-tidy", "")
|
|
|
|
build_dir = Path(temp_folder) / f"build-{build_environment}"
|
|
|
|
Path(build_dir).mkdir(exist_ok=True)
|
|
|
|
Path(build_dir / "platformio.ini").write_text(
|
|
|
|
Path(platformio_ini).read_text(encoding="utf-8"), encoding="utf-8"
|
|
|
|
)
|
|
|
|
esphome_dir = Path(build_dir / "esphome")
|
|
|
|
esphome_dir.mkdir(exist_ok=True)
|
|
|
|
Path(esphome_dir / "main.cpp").write_text(
|
|
|
|
"""
|
|
|
|
#include <zephyr/kernel.h>
|
|
|
|
int main() { return 0;}
|
|
|
|
""",
|
|
|
|
encoding="utf-8",
|
|
|
|
)
|
|
|
|
zephyr_dir = Path(build_dir / "zephyr")
|
|
|
|
zephyr_dir.mkdir(exist_ok=True)
|
2024-07-24 21:28:16 +02:00
|
|
|
Path(zephyr_dir / "prj.conf").write_text(
|
|
|
|
"""
|
|
|
|
CONFIG_NEWLIB_LIBC=y
|
|
|
|
""",
|
|
|
|
encoding="utf-8",
|
|
|
|
)
|
2024-07-24 13:43:45 +02:00
|
|
|
subprocess.run(
|
2024-07-26 09:54:46 +02:00
|
|
|
["pio", "run", "-e", build_environment, "-d", build_dir], check=True
|
2024-07-24 13:35:07 +02:00
|
|
|
)
|
2021-08-09 22:43:18 +02:00
|
|
|
|
2024-07-24 13:35:07 +02:00
|
|
|
def extract_include_paths(command):
|
|
|
|
include_paths = []
|
|
|
|
include_pattern = re.compile(r"(-I|-isystem)\s*([^\s]+)")
|
|
|
|
for match in include_pattern.findall(command):
|
|
|
|
include_paths.append(match[1])
|
|
|
|
return include_paths
|
|
|
|
|
|
|
|
def extract_defines(command):
|
|
|
|
defines = []
|
|
|
|
define_pattern = re.compile(r"-D\s*([^\s]+)")
|
|
|
|
for match in define_pattern.findall(command):
|
2024-07-24 14:39:26 +02:00
|
|
|
if match not in ("_ASMLANGUAGE"):
|
|
|
|
defines.append(match)
|
2024-07-24 13:35:07 +02:00
|
|
|
return defines
|
|
|
|
|
|
|
|
def find_cxx_path(commands):
|
|
|
|
for entry in commands:
|
|
|
|
command = entry["command"]
|
|
|
|
cxx_path = command.split()[0]
|
2024-07-26 09:42:58 +02:00
|
|
|
if not cxx_path.endswith("++"):
|
|
|
|
continue
|
2024-07-24 13:35:07 +02:00
|
|
|
return cxx_path
|
|
|
|
|
|
|
|
def get_builtin_include_paths(compiler):
|
|
|
|
result = subprocess.run(
|
|
|
|
[compiler, "-E", "-x", "c++", "-", "-v"],
|
|
|
|
input="",
|
|
|
|
text=True,
|
|
|
|
stderr=subprocess.PIPE,
|
2024-07-26 09:54:46 +02:00
|
|
|
stdout=subprocess.DEVNULL,
|
|
|
|
check=True,
|
2024-07-24 13:35:07 +02:00
|
|
|
)
|
|
|
|
include_paths = []
|
|
|
|
start_collecting = False
|
|
|
|
for line in result.stderr.splitlines():
|
|
|
|
if start_collecting:
|
|
|
|
if line.startswith(" "):
|
|
|
|
include_paths.append(line.strip())
|
|
|
|
else:
|
|
|
|
break
|
|
|
|
if "#include <...> search starts here:" in line:
|
|
|
|
start_collecting = True
|
|
|
|
return include_paths
|
|
|
|
|
|
|
|
def extract_cxx_flags(command):
|
|
|
|
flags = []
|
2024-07-24 13:43:45 +02:00
|
|
|
# Extracts CXXFLAGS from the command string, excluding includes and defines.
|
2024-07-24 13:35:07 +02:00
|
|
|
flag_pattern = re.compile(
|
|
|
|
r"(-O[0-3s]|-g|-std=[^\s]+|-Wall|-Wextra|-Werror|--[^\s]+|-f[^\s]+|-m[^\s]+|-imacros\s*[^\s]+)"
|
|
|
|
)
|
|
|
|
for match in flag_pattern.findall(command):
|
2024-07-24 15:03:00 +02:00
|
|
|
flags.append(match.replace("-imacros ", "-imacros"))
|
2024-07-24 13:35:07 +02:00
|
|
|
return flags
|
|
|
|
|
|
|
|
def transform_to_idedata_format(compile_commands):
|
|
|
|
cxx_path = find_cxx_path(compile_commands)
|
|
|
|
idedata = {
|
|
|
|
"includes": {
|
|
|
|
"toolchain": get_builtin_include_paths(cxx_path),
|
|
|
|
"build": set(),
|
|
|
|
},
|
|
|
|
"defines": set(),
|
|
|
|
"cxx_path": cxx_path,
|
|
|
|
"cxx_flags": set(),
|
|
|
|
}
|
|
|
|
|
|
|
|
for entry in compile_commands:
|
|
|
|
command = entry["command"]
|
2024-07-26 09:42:58 +02:00
|
|
|
exec = command.split()[0]
|
|
|
|
if exec != cxx_path:
|
|
|
|
continue
|
2024-07-24 13:35:07 +02:00
|
|
|
|
|
|
|
idedata["includes"]["build"].update(extract_include_paths(command))
|
|
|
|
idedata["defines"].update(extract_defines(command))
|
|
|
|
idedata["cxx_flags"].update(extract_cxx_flags(command))
|
|
|
|
|
2024-07-24 21:28:16 +02:00
|
|
|
idedata["defines"].update(
|
|
|
|
[
|
|
|
|
"pthread_attr_t=pthread_attr",
|
|
|
|
"pthread_mutexattr_t=pthread_mutexattr",
|
|
|
|
"pthread_condattr_t=pthread_condattr",
|
|
|
|
]
|
|
|
|
)
|
|
|
|
|
2024-07-24 13:35:07 +02:00
|
|
|
# Convert sets to lists for JSON serialization
|
|
|
|
idedata["includes"]["build"] = list(idedata["includes"]["build"])
|
|
|
|
idedata["defines"] = list(idedata["defines"])
|
|
|
|
idedata["cxx_flags"] = list(idedata["cxx_flags"])
|
|
|
|
|
|
|
|
return idedata
|
|
|
|
|
|
|
|
compile_commands = json.loads(
|
|
|
|
Path(
|
|
|
|
build_dir
|
|
|
|
/ ".pio"
|
|
|
|
/ "build"
|
|
|
|
/ build_environment
|
|
|
|
/ "compile_commands.json"
|
|
|
|
).read_text(encoding="utf-8")
|
|
|
|
)
|
|
|
|
data = transform_to_idedata_format(compile_commands)
|
|
|
|
else:
|
|
|
|
stdout = subprocess.check_output(
|
|
|
|
["pio", "run", "-t", "idedata", "-e", environment]
|
|
|
|
)
|
|
|
|
match = re.search(r'{\s*".*}', stdout.decode("utf-8"))
|
|
|
|
data = json.loads(match.group())
|
2021-08-10 11:14:04 +02:00
|
|
|
temp_idedata.write_text(json.dumps(data, indent=2) + "\n")
|
|
|
|
return data
|
2024-01-03 06:00:52 +01:00
|
|
|
|
|
|
|
|
|
|
|
def get_binary(name: str, version: str) -> str:
|
|
|
|
binary_file = f"{name}-{version}"
|
|
|
|
try:
|
2024-07-24 13:35:07 +02:00
|
|
|
# If no exception was raised, the command was successful
|
|
|
|
result = subprocess.check_output(
|
|
|
|
[binary_file, "-version"], stderr=subprocess.STDOUT
|
|
|
|
)
|
|
|
|
return binary_file
|
|
|
|
except FileNotFoundError:
|
2024-01-03 06:00:52 +01:00
|
|
|
pass
|
|
|
|
binary_file = name
|
|
|
|
try:
|
|
|
|
result = subprocess.run(
|
2024-07-24 13:35:07 +02:00
|
|
|
[binary_file, "-version"], text=True, capture_output=True, check=False
|
2024-01-03 06:00:52 +01:00
|
|
|
)
|
|
|
|
if result.returncode == 0 and (f"version {version}") in result.stdout:
|
|
|
|
return binary_file
|
|
|
|
raise FileNotFoundError(f"{name} not found")
|
|
|
|
|
2024-07-24 13:35:07 +02:00
|
|
|
except FileNotFoundError:
|
2024-01-03 06:00:52 +01:00
|
|
|
print(
|
|
|
|
f"""
|
|
|
|
Oops. It looks like {name} is not installed. It should be available under venv/bin
|
|
|
|
and in PATH after running in turn:
|
|
|
|
script/setup
|
|
|
|
source venv/bin/activate.
|
|
|
|
|
|
|
|
Please confirm you can run "{name} -version" or "{name}-{version} -version"
|
|
|
|
in your terminal and install
|
|
|
|
{name} (v{version}) if necessary.
|
|
|
|
|
|
|
|
Note you can also upload your code as a pull request on GitHub and see the CI check
|
|
|
|
output to apply {name}
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
raise
|