2018-02-01 12:42:07 +00:00
|
|
|
#! /usr/bin/env nix-shell
|
2020-02-26 16:07:57 +00:00
|
|
|
#! nix-shell -i python3 -p "python3.withPackages (ps: with ps; [ mypy attrs ])
|
2018-02-01 12:42:07 +00:00
|
|
|
#
|
2018-05-26 15:37:37 +01:00
|
|
|
# This script downloads Home Assistant's source tarball.
|
2019-04-25 11:50:33 +01:00
|
|
|
# Inside the homeassistant/components directory, each integration has an associated manifest.json,
|
|
|
|
# specifying required packages and other integrations it depends on:
|
2018-02-01 12:42:07 +00:00
|
|
|
#
|
2019-04-25 11:50:33 +01:00
|
|
|
# {
|
|
|
|
# "requirements": [ "package==1.2.3" ],
|
|
|
|
# "dependencies": [ "component" ]
|
|
|
|
# }
|
2018-02-01 12:42:07 +00:00
|
|
|
#
|
2019-04-25 11:50:33 +01:00
|
|
|
# By parsing the files, a dictionary mapping integrations to requirements and dependencies is created.
|
2018-05-26 15:37:37 +01:00
|
|
|
# For all of these requirements and the dependencies' requirements,
|
2019-04-25 11:50:33 +01:00
|
|
|
# nixpkgs' python3Packages are searched for appropriate names.
|
|
|
|
# Then, a Nix attribute set mapping integration name to dependencies is created.
|
2018-02-01 12:42:07 +00:00
|
|
|
|
|
|
|
import json
|
2019-04-25 11:50:33 +01:00
|
|
|
import os
|
2020-02-26 14:53:41 +00:00
|
|
|
import pathlib
|
2018-02-01 12:42:07 +00:00
|
|
|
import re
|
2019-04-25 11:50:33 +01:00
|
|
|
import subprocess
|
|
|
|
import sys
|
|
|
|
import tarfile
|
2020-02-26 14:53:41 +00:00
|
|
|
import tempfile
|
|
|
|
from io import BytesIO
|
2020-06-21 16:07:30 +01:00
|
|
|
from typing import Dict, Optional, Set, Any
|
2019-04-25 11:50:33 +01:00
|
|
|
from urllib.request import urlopen
|
2018-02-01 12:42:07 +00:00
|
|
|
|
2020-02-26 14:53:41 +00:00
|
|
|
COMPONENT_PREFIX = "homeassistant.components"
|
|
|
|
PKG_SET = "python3Packages"
|
2018-02-01 12:42:07 +00:00
|
|
|
|
2018-05-22 13:48:55 +01:00
|
|
|
# If some requirements are matched by multiple python packages,
|
|
|
|
# the following can be used to choose one of them
|
|
|
|
PKG_PREFERENCES = {
|
|
|
|
# Use python3Packages.youtube-dl-light instead of python3Packages.youtube-dl
|
2020-02-26 14:53:41 +00:00
|
|
|
"youtube-dl": "youtube-dl-light",
|
|
|
|
"tensorflow-bin": "tensorflow",
|
2020-04-08 15:54:11 +01:00
|
|
|
"tensorflow-bin_2": "tensorflow",
|
2020-02-26 14:53:41 +00:00
|
|
|
"tensorflowWithoutCuda": "tensorflow",
|
2020-04-08 15:54:11 +01:00
|
|
|
"tensorflow-build_2": "tensorflow",
|
2018-05-22 13:48:55 +01:00
|
|
|
}
|
|
|
|
|
2020-02-26 14:53:41 +00:00
|
|
|
|
2020-02-26 16:07:57 +00:00
|
|
|
def run_mypy() -> None:
|
|
|
|
cmd = ["mypy", "--ignore-missing-imports", __file__]
|
|
|
|
print(f"$ {' '.join(cmd)}")
|
|
|
|
subprocess.run(cmd, check=True)
|
|
|
|
|
|
|
|
|
2018-02-01 12:42:07 +00:00
|
|
|
def get_version():
|
2020-02-26 14:53:41 +00:00
|
|
|
with open(os.path.dirname(sys.argv[0]) + "/default.nix") as f:
|
2019-03-04 16:17:33 +00:00
|
|
|
# A version consists of digits, dots, and possibly a "b" (for beta)
|
|
|
|
m = re.search('hassVersion = "([\\d\\.b]+)";', f.read())
|
2018-02-01 12:42:07 +00:00
|
|
|
return m.group(1)
|
|
|
|
|
2020-02-26 14:53:41 +00:00
|
|
|
|
2020-03-11 10:49:33 +00:00
|
|
|
def parse_components(version: str = "master"):
|
2018-05-25 16:14:30 +01:00
|
|
|
components = {}
|
|
|
|
with tempfile.TemporaryDirectory() as tmp:
|
2020-02-26 14:53:41 +00:00
|
|
|
with urlopen(
|
|
|
|
f"https://github.com/home-assistant/home-assistant/archive/{version}.tar.gz"
|
|
|
|
) as response:
|
2018-05-25 16:14:30 +01:00
|
|
|
tarfile.open(fileobj=BytesIO(response.read())).extractall(tmp)
|
|
|
|
# Use part of a script from the Home Assistant codebase
|
2020-03-11 10:49:33 +00:00
|
|
|
core_path = os.path.join(tmp, f"core-{version}")
|
|
|
|
sys.path.append(core_path)
|
2019-04-25 11:50:33 +01:00
|
|
|
from script.hassfest.model import Integration
|
2020-02-26 14:53:41 +00:00
|
|
|
|
|
|
|
integrations = Integration.load_dir(
|
|
|
|
pathlib.Path(
|
2020-03-11 10:49:33 +00:00
|
|
|
os.path.join(core_path, "homeassistant/components")
|
2020-02-26 14:53:41 +00:00
|
|
|
)
|
|
|
|
)
|
2019-04-25 11:50:33 +01:00
|
|
|
for domain in sorted(integrations):
|
|
|
|
integration = integrations[domain]
|
|
|
|
components[domain] = integration.manifest
|
2018-05-25 16:14:30 +01:00
|
|
|
return components
|
|
|
|
|
2020-02-26 14:53:41 +00:00
|
|
|
|
2018-05-25 16:14:30 +01:00
|
|
|
# Recursively get the requirements of a component and its dependencies
|
2020-06-21 16:07:30 +01:00
|
|
|
def get_reqs(components: Dict[str, Dict[str, Any]], component: str, processed: Set[str]) -> Set[str]:
|
2020-04-29 15:32:56 +01:00
|
|
|
requirements = set(components[component].get("requirements", []))
|
2020-04-08 15:54:11 +01:00
|
|
|
deps = components[component].get("dependencies", [])
|
2020-06-21 16:07:30 +01:00
|
|
|
deps.extend(components[component].get("after_dependencies", []))
|
|
|
|
processed.add(component)
|
2020-04-08 15:54:11 +01:00
|
|
|
for dependency in deps:
|
2020-06-21 16:07:30 +01:00
|
|
|
if dependency not in processed:
|
|
|
|
requirements.update(get_reqs(components, dependency, processed))
|
2018-02-01 12:42:07 +00:00
|
|
|
return requirements
|
|
|
|
|
2020-02-26 14:53:41 +00:00
|
|
|
|
2020-02-26 15:22:48 +00:00
|
|
|
def dump_packages() -> Dict[str, Dict[str, str]]:
|
|
|
|
# Store a JSON dump of Nixpkgs' python3Packages
|
|
|
|
output = subprocess.check_output(
|
|
|
|
[
|
|
|
|
"nix-env",
|
|
|
|
"-f",
|
|
|
|
os.path.dirname(sys.argv[0]) + "/../../..",
|
|
|
|
"-qa",
|
|
|
|
"-A",
|
|
|
|
PKG_SET,
|
|
|
|
"--json",
|
|
|
|
]
|
|
|
|
)
|
|
|
|
return json.loads(output)
|
2018-02-01 12:42:07 +00:00
|
|
|
|
2020-02-26 14:53:41 +00:00
|
|
|
|
2020-02-26 15:22:48 +00:00
|
|
|
def name_to_attr_path(req: str, packages: Dict[str, Dict[str, str]]) -> Optional[str]:
|
2018-05-22 13:48:55 +01:00
|
|
|
attr_paths = set()
|
2018-02-10 22:18:41 +00:00
|
|
|
names = [req]
|
|
|
|
# E.g. python-mpd2 is actually called python3.6-mpd2
|
|
|
|
# instead of python-3.6-python-mpd2 inside Nixpkgs
|
2020-02-26 14:53:41 +00:00
|
|
|
if req.startswith("python-") or req.startswith("python_"):
|
|
|
|
names.append(req[len("python-") :])
|
2018-02-10 22:18:41 +00:00
|
|
|
for name in names:
|
2018-04-08 10:59:07 +01:00
|
|
|
# treat "-" and "_" equally
|
2020-02-26 14:53:41 +00:00
|
|
|
name = re.sub("[-_]", "[-_]", name)
|
|
|
|
pattern = re.compile("^python\\d\\.\\d-{}-\\d".format(name), re.I)
|
2018-02-10 22:18:41 +00:00
|
|
|
for attr_path, package in packages.items():
|
2020-02-26 14:53:41 +00:00
|
|
|
if pattern.match(package["name"]):
|
2018-05-22 13:48:55 +01:00
|
|
|
attr_paths.add(attr_path)
|
|
|
|
if len(attr_paths) > 1:
|
|
|
|
for to_replace, replacement in PKG_PREFERENCES.items():
|
|
|
|
try:
|
2020-02-26 14:53:41 +00:00
|
|
|
attr_paths.remove(PKG_SET + "." + to_replace)
|
|
|
|
attr_paths.add(PKG_SET + "." + replacement)
|
2018-05-22 13:48:55 +01:00
|
|
|
except KeyError:
|
|
|
|
pass
|
2018-02-01 12:42:07 +00:00
|
|
|
# Let's hope there's only one derivation with a matching name
|
2020-02-26 14:53:41 +00:00
|
|
|
assert len(attr_paths) <= 1, "{} matches more than one derivation: {}".format(
|
|
|
|
req, attr_paths
|
|
|
|
)
|
2018-05-22 13:48:55 +01:00
|
|
|
if len(attr_paths) == 1:
|
|
|
|
return attr_paths.pop()
|
2018-02-01 12:42:07 +00:00
|
|
|
else:
|
|
|
|
return None
|
|
|
|
|
2020-02-26 14:53:41 +00:00
|
|
|
|
2020-02-26 15:22:48 +00:00
|
|
|
def main() -> None:
|
|
|
|
packages = dump_packages()
|
|
|
|
version = get_version()
|
|
|
|
print("Generating component-packages.nix for version {}".format(version))
|
|
|
|
components = parse_components(version=version)
|
|
|
|
build_inputs = {}
|
|
|
|
for component in sorted(components.keys()):
|
|
|
|
attr_paths = []
|
|
|
|
missing_reqs = []
|
2020-06-21 16:07:30 +01:00
|
|
|
reqs = sorted(get_reqs(components, component, set()))
|
2020-02-26 15:22:48 +00:00
|
|
|
for req in reqs:
|
|
|
|
# Some requirements are specified by url, e.g. https://example.org/foobar#xyz==1.0.0
|
|
|
|
# Therefore, if there's a "#" in the line, only take the part after it
|
|
|
|
req = req[req.find("#") + 1 :]
|
|
|
|
name = req.split("==")[0]
|
|
|
|
attr_path = name_to_attr_path(name, packages)
|
|
|
|
if attr_path is not None:
|
|
|
|
# Add attribute path without "python3Packages." prefix
|
|
|
|
attr_paths.append(attr_path[len(PKG_SET + ".") :])
|
|
|
|
else:
|
|
|
|
missing_reqs.append(name)
|
2019-12-18 21:24:26 +00:00
|
|
|
else:
|
2020-02-26 15:22:48 +00:00
|
|
|
build_inputs[component] = (attr_paths, missing_reqs)
|
|
|
|
n_diff = len(reqs) > len(build_inputs[component])
|
|
|
|
if n_diff > 0:
|
|
|
|
print("Component {} is missing {} dependencies".format(component, n_diff))
|
|
|
|
print("missing requirements: {}".format(missing_reqs))
|
|
|
|
|
|
|
|
with open(os.path.dirname(sys.argv[0]) + "/component-packages.nix", "w") as f:
|
|
|
|
f.write("# Generated by parse-requirements.py\n")
|
|
|
|
f.write("# Do not edit!\n\n")
|
|
|
|
f.write("{\n")
|
|
|
|
f.write(f' version = "{version}";\n')
|
|
|
|
f.write(" components = {\n")
|
|
|
|
for component, deps in build_inputs.items():
|
|
|
|
available, missing = deps
|
2020-02-26 16:07:57 +00:00
|
|
|
f.write(f' "{component}" = ps: with ps; [ ')
|
2020-02-26 15:22:48 +00:00
|
|
|
f.write(" ".join(available))
|
|
|
|
f.write("];")
|
|
|
|
if len(missing) > 0:
|
|
|
|
f.write(f" # missing inputs: {' '.join(missing)}")
|
|
|
|
f.write("\n")
|
|
|
|
f.write(" };\n")
|
|
|
|
f.write("}\n")
|
|
|
|
|
2020-02-26 16:07:57 +00:00
|
|
|
|
|
|
|
if __name__ == "__main__":
|
|
|
|
run_mypy()
|
2020-02-26 15:22:48 +00:00
|
|
|
main()
|