2021-02-20 09:09:50 +00:00
|
|
|
# Used by pkgs/misc/vim-plugins/update.py and pkgs/applications/editors/kakoune/plugins/update.py
|
2018-09-09 23:19:41 +01:00
|
|
|
|
|
|
|
# format:
|
|
|
|
# $ nix run nixpkgs.python3Packages.black -c black update.py
|
|
|
|
# type-check:
|
|
|
|
# $ nix run nixpkgs.python3Packages.mypy -c mypy update.py
|
|
|
|
# linted:
|
|
|
|
# $ nix run nixpkgs.python3Packages.flake8 -c flake8 --ignore E501,E265 update.py
|
|
|
|
|
2019-11-18 16:54:29 +00:00
|
|
|
import argparse
|
2018-09-09 23:19:41 +01:00
|
|
|
import functools
|
2020-03-21 04:27:09 +00:00
|
|
|
import http
|
2018-09-09 23:19:41 +01:00
|
|
|
import json
|
|
|
|
import os
|
|
|
|
import subprocess
|
2021-05-15 13:16:00 +01:00
|
|
|
import logging
|
2018-09-09 23:19:41 +01:00
|
|
|
import sys
|
2020-04-12 22:05:00 +01:00
|
|
|
import time
|
2018-09-09 23:19:41 +01:00
|
|
|
import traceback
|
|
|
|
import urllib.error
|
2020-03-21 04:27:09 +00:00
|
|
|
import urllib.parse
|
2018-09-09 23:19:41 +01:00
|
|
|
import urllib.request
|
|
|
|
import xml.etree.ElementTree as ET
|
|
|
|
from datetime import datetime
|
2020-04-12 22:05:00 +01:00
|
|
|
from functools import wraps
|
2018-09-09 23:19:41 +01:00
|
|
|
from multiprocessing.dummy import Pool
|
|
|
|
from pathlib import Path
|
2019-11-16 15:15:42 +00:00
|
|
|
from typing import Dict, List, Optional, Tuple, Union, Any, Callable
|
2018-09-09 23:19:41 +01:00
|
|
|
from urllib.parse import urljoin, urlparse
|
2018-09-12 07:51:48 +01:00
|
|
|
from tempfile import NamedTemporaryFile
|
2021-07-30 01:25:15 +01:00
|
|
|
from dataclasses import dataclass
|
2018-09-09 23:19:41 +01:00
|
|
|
|
2020-03-27 21:10:48 +00:00
|
|
|
import git
|
|
|
|
|
2019-11-16 15:15:08 +00:00
|
|
|
ATOM_ENTRY = "{http://www.w3.org/2005/Atom}entry" # " vim gets confused here
|
|
|
|
ATOM_LINK = "{http://www.w3.org/2005/Atom}link" # "
|
|
|
|
ATOM_UPDATED = "{http://www.w3.org/2005/Atom}updated" # "
|
2018-09-09 23:19:41 +01:00
|
|
|
|
2021-05-15 13:16:00 +01:00
|
|
|
LOG_LEVELS = {
|
|
|
|
logging.getLevelName(level): level for level in [
|
|
|
|
logging.DEBUG, logging.INFO, logging.WARN, logging.ERROR ]
|
|
|
|
}
|
|
|
|
|
|
|
|
log = logging.getLogger()
|
2018-09-09 23:19:41 +01:00
|
|
|
|
2019-11-16 15:15:42 +00:00
|
|
|
def retry(ExceptionToCheck: Any, tries: int = 4, delay: float = 3, backoff: float = 2):
|
|
|
|
"""Retry calling the decorated function using an exponential backoff.
|
|
|
|
http://www.saltycrane.com/blog/2009/11/trying-out-retry-decorator-python/
|
|
|
|
original from: http://wiki.python.org/moin/PythonDecoratorLibrary#Retry
|
|
|
|
(BSD licensed)
|
|
|
|
:param ExceptionToCheck: the exception on which to retry
|
|
|
|
:param tries: number of times to try (not retry) before giving up
|
|
|
|
:param delay: initial delay between retries in seconds
|
|
|
|
:param backoff: backoff multiplier e.g. value of 2 will double the delay
|
|
|
|
each retry
|
|
|
|
"""
|
|
|
|
|
|
|
|
def deco_retry(f: Callable) -> Callable:
|
|
|
|
@wraps(f)
|
|
|
|
def f_retry(*args: Any, **kwargs: Any) -> Any:
|
|
|
|
mtries, mdelay = tries, delay
|
|
|
|
while mtries > 1:
|
|
|
|
try:
|
|
|
|
return f(*args, **kwargs)
|
|
|
|
except ExceptionToCheck as e:
|
|
|
|
print(f"{str(e)}, Retrying in {mdelay} seconds...")
|
|
|
|
time.sleep(mdelay)
|
|
|
|
mtries -= 1
|
|
|
|
mdelay *= backoff
|
|
|
|
return f(*args, **kwargs)
|
|
|
|
|
|
|
|
return f_retry # true decorator
|
|
|
|
|
|
|
|
return deco_retry
|
|
|
|
|
2021-02-20 09:09:50 +00:00
|
|
|
|
2020-07-05 09:48:12 +01:00
|
|
|
def make_request(url: str) -> urllib.request.Request:
|
|
|
|
token = os.getenv("GITHUB_API_TOKEN")
|
|
|
|
headers = {}
|
|
|
|
if token is not None:
|
2021-02-20 09:09:50 +00:00
|
|
|
headers["Authorization"] = f"token {token}"
|
2020-07-05 09:48:12 +01:00
|
|
|
return urllib.request.Request(url, headers=headers)
|
2018-09-09 23:19:41 +01:00
|
|
|
|
|
|
|
class Repo:
|
2020-04-12 22:05:00 +01:00
|
|
|
def __init__(
|
2022-01-10 23:20:52 +00:00
|
|
|
self, uri: str, branch: str, alias: Optional[str]
|
2020-04-12 22:05:00 +01:00
|
|
|
) -> None:
|
2022-01-10 23:20:52 +00:00
|
|
|
self.uri = uri
|
|
|
|
'''Url to the repo'''
|
2020-04-12 22:05:00 +01:00
|
|
|
self.branch = branch
|
2020-03-21 04:27:09 +00:00
|
|
|
self.alias = alias
|
|
|
|
self.redirect: Dict[str, str] = {}
|
2018-09-09 23:19:41 +01:00
|
|
|
|
2022-01-10 23:20:52 +00:00
|
|
|
@property
|
|
|
|
def name(self):
|
|
|
|
return self.uri.split('/')[-1]
|
2018-09-09 23:19:41 +01:00
|
|
|
|
|
|
|
def __repr__(self) -> str:
|
2022-01-10 23:20:52 +00:00
|
|
|
return f"Repo({self.name}, {self.uri})"
|
|
|
|
|
|
|
|
@retry(urllib.error.URLError, tries=4, delay=3, backoff=2)
|
|
|
|
def has_submodules(self) -> bool:
|
|
|
|
return True
|
|
|
|
|
|
|
|
@retry(urllib.error.URLError, tries=4, delay=3, backoff=2)
|
|
|
|
def latest_commit(self) -> Tuple[str, datetime]:
|
|
|
|
loaded = self._prefetch(None)
|
|
|
|
updated = datetime.strptime(loaded['date'], "%Y-%m-%dT%H:%M:%S%z")
|
|
|
|
|
|
|
|
return loaded['rev'], updated
|
|
|
|
|
|
|
|
def _prefetch(self, ref: Optional[str]):
|
|
|
|
cmd = ["nix-prefetch-git", "--quiet", "--fetch-submodules", self.uri]
|
|
|
|
if ref is not None:
|
|
|
|
cmd.append(ref)
|
|
|
|
log.debug(cmd)
|
|
|
|
data = subprocess.check_output(cmd)
|
|
|
|
loaded = json.loads(data)
|
|
|
|
return loaded
|
|
|
|
|
|
|
|
def prefetch(self, ref: Optional[str]) -> str:
|
|
|
|
loaded = self._prefetch(ref)
|
|
|
|
return loaded["sha256"]
|
|
|
|
|
|
|
|
def as_nix(self, plugin: "Plugin") -> str:
|
|
|
|
return f'''fetchgit {{
|
|
|
|
url = "{self.uri}";
|
|
|
|
rev = "{plugin.commit}";
|
|
|
|
sha256 = "{plugin.sha256}";
|
|
|
|
}}'''
|
|
|
|
|
|
|
|
|
|
|
|
class RepoGitHub(Repo):
|
|
|
|
def __init__(
|
|
|
|
self, owner: str, repo: str, branch: str, alias: Optional[str]
|
|
|
|
) -> None:
|
|
|
|
self.owner = owner
|
|
|
|
self.repo = repo
|
|
|
|
'''Url to the repo'''
|
|
|
|
super().__init__(self.url(""), branch, alias)
|
|
|
|
log.debug("Instantiating github repo %s/%s", self.owner, self.repo)
|
|
|
|
|
|
|
|
@property
|
|
|
|
def name(self):
|
|
|
|
return self.repo
|
|
|
|
|
|
|
|
def url(self, path: str) -> str:
|
|
|
|
return urljoin(f"https://github.com/{self.owner}/{self.name}/", path)
|
2018-09-09 23:19:41 +01:00
|
|
|
|
2019-11-16 15:15:42 +00:00
|
|
|
@retry(urllib.error.URLError, tries=4, delay=3, backoff=2)
|
2018-09-09 23:19:41 +01:00
|
|
|
def has_submodules(self) -> bool:
|
|
|
|
try:
|
2020-07-05 09:48:12 +01:00
|
|
|
req = make_request(self.url(f"blob/{self.branch}/.gitmodules"))
|
|
|
|
urllib.request.urlopen(req, timeout=10).close()
|
2018-09-09 23:19:41 +01:00
|
|
|
except urllib.error.HTTPError as e:
|
|
|
|
if e.code == 404:
|
|
|
|
return False
|
|
|
|
else:
|
|
|
|
raise
|
|
|
|
return True
|
|
|
|
|
2019-11-16 15:15:42 +00:00
|
|
|
@retry(urllib.error.URLError, tries=4, delay=3, backoff=2)
|
2018-09-09 23:19:41 +01:00
|
|
|
def latest_commit(self) -> Tuple[str, datetime]:
|
2020-04-12 22:05:00 +01:00
|
|
|
commit_url = self.url(f"commits/{self.branch}.atom")
|
2020-07-05 09:48:12 +01:00
|
|
|
commit_req = make_request(commit_url)
|
|
|
|
with urllib.request.urlopen(commit_req, timeout=10) as req:
|
2022-01-10 23:20:52 +00:00
|
|
|
self._check_for_redirect(commit_url, req)
|
2018-09-09 23:19:41 +01:00
|
|
|
xml = req.read()
|
|
|
|
root = ET.fromstring(xml)
|
|
|
|
latest_entry = root.find(ATOM_ENTRY)
|
|
|
|
assert latest_entry is not None, f"No commits found in repository {self}"
|
|
|
|
commit_link = latest_entry.find(ATOM_LINK)
|
|
|
|
assert commit_link is not None, f"No link tag found feed entry {xml}"
|
|
|
|
url = urlparse(commit_link.get("href"))
|
|
|
|
updated_tag = latest_entry.find(ATOM_UPDATED)
|
|
|
|
assert (
|
|
|
|
updated_tag is not None and updated_tag.text is not None
|
|
|
|
), f"No updated tag found feed entry {xml}"
|
|
|
|
updated = datetime.strptime(updated_tag.text, "%Y-%m-%dT%H:%M:%SZ")
|
2019-11-16 15:15:08 +00:00
|
|
|
return Path(str(url.path)).name, updated
|
2018-09-09 23:19:41 +01:00
|
|
|
|
2022-01-10 23:20:52 +00:00
|
|
|
def _check_for_redirect(self, url: str, req: http.client.HTTPResponse):
|
2020-03-21 04:27:09 +00:00
|
|
|
response_url = req.geturl()
|
|
|
|
if url != response_url:
|
|
|
|
new_owner, new_name = (
|
|
|
|
urllib.parse.urlsplit(response_url).path.strip("/").split("/")[:2]
|
|
|
|
)
|
2020-03-31 02:01:33 +01:00
|
|
|
end_line = "\n" if self.alias is None else f" as {self.alias}\n"
|
2020-03-21 04:27:09 +00:00
|
|
|
plugin_line = "{owner}/{name}" + end_line
|
|
|
|
|
|
|
|
old_plugin = plugin_line.format(owner=self.owner, name=self.name)
|
|
|
|
new_plugin = plugin_line.format(owner=new_owner, name=new_name)
|
|
|
|
self.redirect[old_plugin] = new_plugin
|
|
|
|
|
2022-01-10 23:20:52 +00:00
|
|
|
|
|
|
|
def prefetch(self, commit: str) -> str:
|
|
|
|
if self.has_submodules():
|
|
|
|
sha256 = super().prefetch(commit)
|
|
|
|
else:
|
|
|
|
sha256 = self.prefetch_github(commit)
|
|
|
|
return sha256
|
2018-09-09 23:19:41 +01:00
|
|
|
|
|
|
|
def prefetch_github(self, ref: str) -> str:
|
|
|
|
data = subprocess.check_output(
|
|
|
|
["nix-prefetch-url", "--unpack", self.url(f"archive/{ref}.tar.gz")]
|
|
|
|
)
|
|
|
|
return data.strip().decode("utf-8")
|
|
|
|
|
2022-01-10 23:20:52 +00:00
|
|
|
def as_nix(self, plugin: "Plugin") -> str:
|
|
|
|
if plugin.has_submodules:
|
|
|
|
submodule_attr = "\n fetchSubmodules = true;"
|
|
|
|
else:
|
|
|
|
submodule_attr = ""
|
|
|
|
|
|
|
|
return f'''fetchFromGitHub {{
|
|
|
|
owner = "{self.owner}";
|
|
|
|
repo = "{self.repo}";
|
|
|
|
rev = "{plugin.commit}";
|
|
|
|
sha256 = "{plugin.sha256}";{submodule_attr}
|
|
|
|
}}'''
|
|
|
|
|
|
|
|
|
|
|
|
@dataclass
|
|
|
|
class PluginDesc:
|
|
|
|
repo: Repo
|
|
|
|
branch: str
|
|
|
|
alias: Optional[str]
|
|
|
|
|
|
|
|
@property
|
|
|
|
def name(self):
|
|
|
|
if self.alias is None:
|
|
|
|
return self.repo.name
|
|
|
|
else:
|
|
|
|
return self.alias
|
|
|
|
|
2018-09-09 23:19:41 +01:00
|
|
|
|
|
|
|
class Plugin:
|
|
|
|
def __init__(
|
|
|
|
self,
|
|
|
|
name: str,
|
|
|
|
commit: str,
|
|
|
|
has_submodules: bool,
|
|
|
|
sha256: str,
|
|
|
|
date: Optional[datetime] = None,
|
|
|
|
) -> None:
|
|
|
|
self.name = name
|
|
|
|
self.commit = commit
|
|
|
|
self.has_submodules = has_submodules
|
|
|
|
self.sha256 = sha256
|
|
|
|
self.date = date
|
|
|
|
|
|
|
|
@property
|
|
|
|
def normalized_name(self) -> str:
|
|
|
|
return self.name.replace(".", "-")
|
|
|
|
|
|
|
|
@property
|
|
|
|
def version(self) -> str:
|
|
|
|
assert self.date is not None
|
|
|
|
return self.date.strftime("%Y-%m-%d")
|
|
|
|
|
|
|
|
def as_json(self) -> Dict[str, str]:
|
|
|
|
copy = self.__dict__.copy()
|
|
|
|
del copy["date"]
|
|
|
|
return copy
|
|
|
|
|
|
|
|
|
2022-01-10 23:20:52 +00:00
|
|
|
|
2021-02-20 09:09:50 +00:00
|
|
|
class Editor:
|
|
|
|
"""The configuration of the update script."""
|
|
|
|
|
|
|
|
def __init__(
|
|
|
|
self,
|
|
|
|
name: str,
|
|
|
|
root: Path,
|
|
|
|
get_plugins: str,
|
|
|
|
default_in: Optional[Path] = None,
|
|
|
|
default_out: Optional[Path] = None,
|
|
|
|
deprecated: Optional[Path] = None,
|
|
|
|
cache_file: Optional[str] = None,
|
|
|
|
):
|
2021-07-30 01:25:15 +01:00
|
|
|
log.debug("get_plugins:", get_plugins)
|
2021-02-20 09:09:50 +00:00
|
|
|
self.name = name
|
|
|
|
self.root = root
|
|
|
|
self.get_plugins = get_plugins
|
|
|
|
self.default_in = default_in or root.joinpath(f"{name}-plugin-names")
|
|
|
|
self.default_out = default_out or root.joinpath("generated.nix")
|
|
|
|
self.deprecated = deprecated or root.joinpath("deprecated.json")
|
|
|
|
self.cache_file = cache_file or f"{name}-plugin-cache.json"
|
2018-09-09 23:19:41 +01:00
|
|
|
|
2021-07-30 01:25:15 +01:00
|
|
|
def get_current_plugins(self):
|
|
|
|
"""To fill the cache"""
|
|
|
|
return get_current_plugins(self)
|
|
|
|
|
|
|
|
def load_plugin_spec(self, plugin_file) -> List[PluginDesc]:
|
|
|
|
return load_plugin_spec(plugin_file)
|
|
|
|
|
2021-07-30 01:25:15 +01:00
|
|
|
def generate_nix(self, plugins, outfile: str):
|
2021-07-30 01:25:15 +01:00
|
|
|
'''Returns nothing for now, writes directly to outfile'''
|
2021-07-30 01:25:15 +01:00
|
|
|
raise NotImplementedError()
|
2021-07-30 01:25:15 +01:00
|
|
|
|
|
|
|
def get_update(self, input_file: str, outfile: str, proc: int):
|
|
|
|
return get_update(input_file, outfile, proc, editor=self)
|
|
|
|
|
|
|
|
@property
|
|
|
|
def attr_path(self):
|
|
|
|
return self.name + "Plugins"
|
|
|
|
|
2021-07-30 01:25:15 +01:00
|
|
|
def get_drv_name(self, name: str):
|
|
|
|
return self.attr_path + "." + name
|
|
|
|
|
2021-07-30 01:25:15 +01:00
|
|
|
def rewrite_input(self, *args, **kwargs):
|
|
|
|
return rewrite_input(*args, **kwargs)
|
|
|
|
|
2021-07-30 01:25:15 +01:00
|
|
|
def create_parser(self):
|
|
|
|
parser = argparse.ArgumentParser(
|
2022-01-10 23:20:52 +00:00
|
|
|
description=(f"""
|
|
|
|
Updates nix derivations for {self.name} plugins.\n
|
|
|
|
By default from {self.default_in} to {self.default_out}"""
|
2021-07-30 01:25:15 +01:00
|
|
|
)
|
|
|
|
)
|
|
|
|
parser.add_argument(
|
|
|
|
"--add",
|
|
|
|
dest="add_plugins",
|
|
|
|
default=[],
|
|
|
|
action="append",
|
|
|
|
help=f"Plugin to add to {self.attr_path} from Github in the form owner/repo",
|
|
|
|
)
|
|
|
|
parser.add_argument(
|
|
|
|
"--input-names",
|
|
|
|
"-i",
|
|
|
|
dest="input_file",
|
|
|
|
default=self.default_in,
|
|
|
|
help="A list of plugins in the form owner/repo",
|
|
|
|
)
|
|
|
|
parser.add_argument(
|
|
|
|
"--out",
|
|
|
|
"-o",
|
|
|
|
dest="outfile",
|
|
|
|
default=self.default_out,
|
|
|
|
help="Filename to save generated nix code",
|
|
|
|
)
|
|
|
|
parser.add_argument(
|
|
|
|
"--proc",
|
|
|
|
"-p",
|
|
|
|
dest="proc",
|
|
|
|
type=int,
|
|
|
|
default=30,
|
2022-01-12 23:07:10 +00:00
|
|
|
help="Number of concurrent processes to spawn. Export GITHUB_API_TOKEN allows higher values.",
|
2021-07-30 01:25:15 +01:00
|
|
|
)
|
|
|
|
parser.add_argument(
|
|
|
|
"--no-commit", "-n", action="store_true", default=False,
|
|
|
|
help="Whether to autocommit changes"
|
|
|
|
)
|
|
|
|
parser.add_argument(
|
|
|
|
"--debug", "-d", choices=LOG_LEVELS.keys(),
|
|
|
|
default=logging.getLevelName(logging.WARN),
|
|
|
|
help="Adjust log level"
|
|
|
|
)
|
|
|
|
return parser
|
2021-07-30 01:25:15 +01:00
|
|
|
|
|
|
|
|
2018-09-09 23:19:41 +01:00
|
|
|
|
2018-09-12 07:51:48 +01:00
|
|
|
class CleanEnvironment(object):
|
|
|
|
def __enter__(self) -> None:
|
|
|
|
self.old_environ = os.environ.copy()
|
2021-02-20 09:09:50 +00:00
|
|
|
local_pkgs = str(Path(__file__).parent.parent.parent)
|
2018-09-12 07:51:48 +01:00
|
|
|
os.environ["NIX_PATH"] = f"localpkgs={local_pkgs}"
|
|
|
|
self.empty_config = NamedTemporaryFile()
|
|
|
|
self.empty_config.write(b"{}")
|
|
|
|
self.empty_config.flush()
|
|
|
|
os.environ["NIXPKGS_CONFIG"] = self.empty_config.name
|
|
|
|
|
|
|
|
def __exit__(self, exc_type: Any, exc_value: Any, traceback: Any) -> None:
|
|
|
|
os.environ.update(self.old_environ)
|
|
|
|
self.empty_config.close()
|
|
|
|
|
|
|
|
|
2021-02-20 09:09:50 +00:00
|
|
|
def get_current_plugins(editor: Editor) -> List[Plugin]:
|
2018-09-12 07:51:48 +01:00
|
|
|
with CleanEnvironment():
|
2022-01-08 05:49:28 +00:00
|
|
|
cmd = ["nix", "eval", "--extra-experimental-features", "nix-command", "--impure", "--json", "--expr", editor.get_plugins]
|
2021-07-30 01:25:15 +01:00
|
|
|
log.debug("Running command %s", cmd)
|
|
|
|
out = subprocess.check_output(cmd)
|
2018-09-09 23:19:41 +01:00
|
|
|
data = json.loads(out)
|
|
|
|
plugins = []
|
|
|
|
for name, attr in data.items():
|
|
|
|
p = Plugin(name, attr["rev"], attr["submodules"], attr["sha256"])
|
|
|
|
plugins.append(p)
|
|
|
|
return plugins
|
|
|
|
|
|
|
|
|
2020-03-21 04:27:09 +00:00
|
|
|
def prefetch_plugin(
|
2021-08-13 22:48:03 +01:00
|
|
|
p: PluginDesc,
|
2020-04-12 22:05:00 +01:00
|
|
|
cache: "Optional[Cache]" = None,
|
2020-03-21 04:27:09 +00:00
|
|
|
) -> Tuple[Plugin, Dict[str, str]]:
|
2022-01-10 23:20:52 +00:00
|
|
|
repo, branch, alias = p.repo, p.branch, p.alias
|
|
|
|
name = alias or p.repo.name
|
|
|
|
commit = None
|
|
|
|
log.info(f"Fetching last commit for plugin {name} from {repo.uri}@{branch}")
|
2018-09-09 23:19:41 +01:00
|
|
|
commit, date = repo.latest_commit()
|
2020-03-31 02:01:33 +01:00
|
|
|
cached_plugin = cache[commit] if cache else None
|
|
|
|
if cached_plugin is not None:
|
2021-07-30 01:25:15 +01:00
|
|
|
log.debug("Cache hit !")
|
2022-01-10 23:20:52 +00:00
|
|
|
cached_plugin.name = name
|
2020-03-31 02:01:33 +01:00
|
|
|
cached_plugin.date = date
|
|
|
|
return cached_plugin, repo.redirect
|
2018-09-09 23:19:41 +01:00
|
|
|
|
2022-01-10 23:20:52 +00:00
|
|
|
has_submodules = repo.has_submodules()
|
|
|
|
print(f"prefetch {name}")
|
|
|
|
sha256 = repo.prefetch(commit)
|
2018-09-09 23:19:41 +01:00
|
|
|
|
2020-03-21 04:27:09 +00:00
|
|
|
return (
|
2022-01-10 23:20:52 +00:00
|
|
|
Plugin(name, commit, has_submodules, sha256, date=date),
|
2020-03-21 04:27:09 +00:00
|
|
|
repo.redirect,
|
|
|
|
)
|
2018-09-09 23:19:41 +01:00
|
|
|
|
|
|
|
|
2020-03-29 17:09:59 +01:00
|
|
|
def fetch_plugin_from_pluginline(plugin_line: str) -> Plugin:
|
2021-08-13 22:48:03 +01:00
|
|
|
plugin, _ = prefetch_plugin(parse_plugin_line(plugin_line))
|
2020-03-29 17:09:59 +01:00
|
|
|
return plugin
|
|
|
|
|
|
|
|
|
2018-09-09 23:19:41 +01:00
|
|
|
def print_download_error(plugin: str, ex: Exception):
|
|
|
|
print(f"{plugin}: {ex}", file=sys.stderr)
|
|
|
|
ex_traceback = ex.__traceback__
|
|
|
|
tb_lines = [
|
|
|
|
line.rstrip("\n")
|
|
|
|
for line in traceback.format_exception(ex.__class__, ex, ex_traceback)
|
|
|
|
]
|
|
|
|
print("\n".join(tb_lines))
|
|
|
|
|
|
|
|
|
|
|
|
def check_results(
|
2022-01-10 23:20:52 +00:00
|
|
|
results: List[Tuple[PluginDesc, Union[Exception, Plugin], Dict[str, str]]]
|
|
|
|
) -> Tuple[List[Tuple[PluginDesc, Plugin]], Dict[str, str]]:
|
|
|
|
''' '''
|
2018-09-09 23:19:41 +01:00
|
|
|
failures: List[Tuple[str, Exception]] = []
|
|
|
|
plugins = []
|
2020-03-21 04:27:09 +00:00
|
|
|
redirects: Dict[str, str] = {}
|
2022-01-10 23:20:52 +00:00
|
|
|
for (pdesc, result, redirect) in results:
|
2018-09-09 23:19:41 +01:00
|
|
|
if isinstance(result, Exception):
|
2022-01-10 23:20:52 +00:00
|
|
|
failures.append((pdesc.name, result))
|
2018-09-09 23:19:41 +01:00
|
|
|
else:
|
2022-01-10 23:20:52 +00:00
|
|
|
plugins.append((pdesc, result))
|
2020-03-21 04:27:09 +00:00
|
|
|
redirects.update(redirect)
|
2018-09-09 23:19:41 +01:00
|
|
|
|
|
|
|
print(f"{len(results) - len(failures)} plugins were checked", end="")
|
|
|
|
if len(failures) == 0:
|
|
|
|
print()
|
2020-03-21 04:27:09 +00:00
|
|
|
return plugins, redirects
|
2018-09-09 23:19:41 +01:00
|
|
|
else:
|
|
|
|
print(f", {len(failures)} plugin(s) could not be downloaded:\n")
|
|
|
|
|
|
|
|
for (plugin, exception) in failures:
|
|
|
|
print_download_error(plugin, exception)
|
2018-09-16 16:58:48 +01:00
|
|
|
|
|
|
|
sys.exit(1)
|
2018-09-09 23:19:41 +01:00
|
|
|
|
2022-01-10 23:20:52 +00:00
|
|
|
def make_repo(uri, branch, alias) -> Repo:
|
|
|
|
'''Instantiate a Repo with the correct specialization depending on server (gitub spec)'''
|
|
|
|
# dumb check to see if it's of the form owner/repo (=> github) or https://...
|
|
|
|
res = uri.split('/')
|
|
|
|
if len(res) <= 2:
|
|
|
|
repo = RepoGitHub(res[0], res[1], branch, alias)
|
|
|
|
else:
|
|
|
|
repo = Repo(uri.strip(), branch, alias)
|
|
|
|
return repo
|
|
|
|
|
2021-07-30 01:25:15 +01:00
|
|
|
def parse_plugin_line(line: str) -> PluginDesc:
|
2021-12-07 10:11:41 +00:00
|
|
|
branch = "HEAD"
|
2020-04-12 22:05:00 +01:00
|
|
|
alias = None
|
2022-01-10 23:20:52 +00:00
|
|
|
uri = line
|
|
|
|
if " as " in uri:
|
2022-01-21 12:49:53 +00:00
|
|
|
uri, alias = uri.split(" as ")
|
2020-04-12 22:05:00 +01:00
|
|
|
alias = alias.strip()
|
2022-01-21 12:49:53 +00:00
|
|
|
if "@" in uri:
|
|
|
|
uri, branch = uri.split("@")
|
2022-01-10 23:20:52 +00:00
|
|
|
|
|
|
|
repo = make_repo(uri.strip(), branch.strip(), alias)
|
2020-04-12 22:05:00 +01:00
|
|
|
|
2022-01-10 23:20:52 +00:00
|
|
|
return PluginDesc(repo, branch.strip(), alias)
|
2019-11-06 08:35:21 +00:00
|
|
|
|
|
|
|
|
2021-07-30 01:25:15 +01:00
|
|
|
def load_plugin_spec(plugin_file: str) -> List[PluginDesc]:
|
2018-09-09 23:19:41 +01:00
|
|
|
plugins = []
|
|
|
|
with open(plugin_file) as f:
|
|
|
|
for line in f:
|
2021-12-06 09:53:37 +00:00
|
|
|
if line.startswith("#"):
|
|
|
|
continue
|
2019-11-06 08:35:21 +00:00
|
|
|
plugin = parse_plugin_line(line)
|
|
|
|
plugins.append(plugin)
|
2018-09-09 23:19:41 +01:00
|
|
|
return plugins
|
|
|
|
|
|
|
|
|
2021-02-20 09:09:50 +00:00
|
|
|
def get_cache_path(cache_file_name: str) -> Optional[Path]:
|
2018-09-09 23:19:41 +01:00
|
|
|
xdg_cache = os.environ.get("XDG_CACHE_HOME", None)
|
|
|
|
if xdg_cache is None:
|
|
|
|
home = os.environ.get("HOME", None)
|
|
|
|
if home is None:
|
|
|
|
return None
|
|
|
|
xdg_cache = str(Path(home, ".cache"))
|
|
|
|
|
2021-02-20 09:09:50 +00:00
|
|
|
return Path(xdg_cache, cache_file_name)
|
2018-09-09 23:19:41 +01:00
|
|
|
|
|
|
|
|
|
|
|
class Cache:
|
2021-02-20 09:09:50 +00:00
|
|
|
def __init__(self, initial_plugins: List[Plugin], cache_file_name: str) -> None:
|
|
|
|
self.cache_file = get_cache_path(cache_file_name)
|
2018-09-09 23:19:41 +01:00
|
|
|
|
|
|
|
downloads = {}
|
|
|
|
for plugin in initial_plugins:
|
|
|
|
downloads[plugin.commit] = plugin
|
|
|
|
downloads.update(self.load())
|
|
|
|
self.downloads = downloads
|
|
|
|
|
|
|
|
def load(self) -> Dict[str, Plugin]:
|
|
|
|
if self.cache_file is None or not self.cache_file.exists():
|
|
|
|
return {}
|
|
|
|
|
|
|
|
downloads: Dict[str, Plugin] = {}
|
|
|
|
with open(self.cache_file) as f:
|
|
|
|
data = json.load(f)
|
|
|
|
for attr in data.values():
|
|
|
|
p = Plugin(
|
|
|
|
attr["name"], attr["commit"], attr["has_submodules"], attr["sha256"]
|
|
|
|
)
|
|
|
|
downloads[attr["commit"]] = p
|
|
|
|
return downloads
|
|
|
|
|
|
|
|
def store(self) -> None:
|
|
|
|
if self.cache_file is None:
|
|
|
|
return
|
|
|
|
|
|
|
|
os.makedirs(self.cache_file.parent, exist_ok=True)
|
|
|
|
with open(self.cache_file, "w+") as f:
|
|
|
|
data = {}
|
|
|
|
for name, attr in self.downloads.items():
|
|
|
|
data[name] = attr.as_json()
|
|
|
|
json.dump(data, f, indent=4, sort_keys=True)
|
|
|
|
|
|
|
|
def __getitem__(self, key: str) -> Optional[Plugin]:
|
|
|
|
return self.downloads.get(key, None)
|
|
|
|
|
|
|
|
def __setitem__(self, key: str, value: Plugin) -> None:
|
|
|
|
self.downloads[key] = value
|
|
|
|
|
|
|
|
|
|
|
|
def prefetch(
|
2021-08-13 22:48:03 +01:00
|
|
|
pluginDesc: PluginDesc, cache: Cache
|
2022-01-10 23:20:52 +00:00
|
|
|
) -> Tuple[PluginDesc, Union[Exception, Plugin], dict]:
|
2018-09-09 23:19:41 +01:00
|
|
|
try:
|
2021-08-13 22:48:03 +01:00
|
|
|
plugin, redirect = prefetch_plugin(pluginDesc, cache)
|
2018-09-09 23:19:41 +01:00
|
|
|
cache[plugin.commit] = plugin
|
2022-01-10 23:20:52 +00:00
|
|
|
return (pluginDesc, plugin, redirect)
|
2018-09-09 23:19:41 +01:00
|
|
|
except Exception as e:
|
2022-01-10 23:20:52 +00:00
|
|
|
return (pluginDesc, e, {})
|
2018-09-09 23:19:41 +01:00
|
|
|
|
|
|
|
|
2020-03-29 17:09:59 +01:00
|
|
|
def rewrite_input(
|
2021-02-20 09:09:50 +00:00
|
|
|
input_file: Path,
|
|
|
|
deprecated: Path,
|
|
|
|
redirects: Dict[str, str] = None,
|
|
|
|
append: Tuple = (),
|
2020-03-29 17:09:59 +01:00
|
|
|
):
|
2020-03-24 19:00:31 +00:00
|
|
|
with open(input_file, "r") as f:
|
|
|
|
lines = f.readlines()
|
|
|
|
|
2020-03-28 17:42:31 +00:00
|
|
|
lines.extend(append)
|
|
|
|
|
2020-03-24 19:00:31 +00:00
|
|
|
if redirects:
|
|
|
|
lines = [redirects.get(line, line) for line in lines]
|
2020-03-26 06:11:20 +00:00
|
|
|
|
2020-03-27 15:38:16 +00:00
|
|
|
cur_date_iso = datetime.now().strftime("%Y-%m-%d")
|
2021-02-20 09:09:50 +00:00
|
|
|
with open(deprecated, "r") as f:
|
2020-03-26 06:11:20 +00:00
|
|
|
deprecations = json.load(f)
|
|
|
|
for old, new in redirects.items():
|
2020-03-29 17:09:59 +01:00
|
|
|
old_plugin = fetch_plugin_from_pluginline(old)
|
|
|
|
new_plugin = fetch_plugin_from_pluginline(new)
|
|
|
|
if old_plugin.normalized_name != new_plugin.normalized_name:
|
|
|
|
deprecations[old_plugin.normalized_name] = {
|
|
|
|
"new": new_plugin.normalized_name,
|
2020-03-27 15:38:16 +00:00
|
|
|
"date": cur_date_iso,
|
|
|
|
}
|
2021-02-20 09:09:50 +00:00
|
|
|
with open(deprecated, "w") as f:
|
2020-03-26 06:11:20 +00:00
|
|
|
json.dump(deprecations, f, indent=4, sort_keys=True)
|
2021-03-22 21:56:44 +00:00
|
|
|
f.write("\n")
|
2020-03-26 06:11:20 +00:00
|
|
|
|
2020-03-24 19:00:31 +00:00
|
|
|
lines = sorted(lines, key=str.casefold)
|
|
|
|
|
|
|
|
with open(input_file, "w") as f:
|
|
|
|
f.writelines(lines)
|
2020-03-21 04:27:09 +00:00
|
|
|
|
2018-09-09 23:19:41 +01:00
|
|
|
|
2020-03-31 02:01:33 +01:00
|
|
|
def commit(repo: git.Repo, message: str, files: List[Path]) -> None:
|
2021-02-14 15:47:33 +00:00
|
|
|
repo.index.add([str(f.resolve()) for f in files])
|
2020-03-28 06:29:53 +00:00
|
|
|
|
2021-02-14 15:47:33 +00:00
|
|
|
if repo.index.diff("HEAD"):
|
2020-03-31 02:01:33 +01:00
|
|
|
print(f'committing to nixpkgs "{message}"')
|
|
|
|
repo.index.commit(message)
|
|
|
|
else:
|
|
|
|
print("no changes in working tree to commit")
|
2018-09-09 23:19:41 +01:00
|
|
|
|
2020-03-28 06:29:53 +00:00
|
|
|
|
2021-02-20 09:09:50 +00:00
|
|
|
def get_update(input_file: str, outfile: str, proc: int, editor: Editor):
|
2021-07-30 01:25:15 +01:00
|
|
|
cache: Cache = Cache(editor.get_current_plugins(), editor.cache_file)
|
2020-03-31 02:01:33 +01:00
|
|
|
_prefetch = functools.partial(prefetch, cache=cache)
|
2018-09-09 23:19:41 +01:00
|
|
|
|
2020-03-31 02:01:33 +01:00
|
|
|
def update() -> dict:
|
2021-07-30 01:25:15 +01:00
|
|
|
plugin_names = editor.load_plugin_spec(input_file)
|
2020-03-28 18:11:51 +00:00
|
|
|
|
|
|
|
try:
|
2020-03-31 02:01:33 +01:00
|
|
|
pool = Pool(processes=proc)
|
|
|
|
results = pool.map(_prefetch, plugin_names)
|
2020-03-28 18:11:51 +00:00
|
|
|
finally:
|
2020-03-31 02:01:33 +01:00
|
|
|
cache.store()
|
2018-09-09 23:19:41 +01:00
|
|
|
|
2020-03-28 18:11:51 +00:00
|
|
|
plugins, redirects = check_results(results)
|
2018-09-09 23:19:41 +01:00
|
|
|
|
2021-02-20 09:09:50 +00:00
|
|
|
editor.generate_nix(plugins, outfile)
|
2020-03-28 06:29:53 +00:00
|
|
|
|
2020-03-28 18:11:51 +00:00
|
|
|
return redirects
|
2018-09-09 23:19:41 +01:00
|
|
|
|
2020-03-31 02:01:33 +01:00
|
|
|
return update
|
|
|
|
|
2020-03-21 04:27:09 +00:00
|
|
|
|
2021-07-30 01:25:15 +01:00
|
|
|
def update_plugins(editor: Editor, args):
|
2021-02-20 09:09:50 +00:00
|
|
|
"""The main entry function of this module. All input arguments are grouped in the `Editor`."""
|
|
|
|
|
2021-05-15 13:16:00 +01:00
|
|
|
log.setLevel(LOG_LEVELS[args.debug])
|
|
|
|
log.info("Start updating plugins")
|
2021-07-30 01:25:15 +01:00
|
|
|
update = editor.get_update(args.input_file, args.outfile, args.proc)
|
2020-03-28 06:29:53 +00:00
|
|
|
|
2020-03-31 02:01:33 +01:00
|
|
|
redirects = update()
|
2021-07-30 01:25:15 +01:00
|
|
|
editor.rewrite_input(args.input_file, editor.deprecated, redirects)
|
2021-04-22 02:21:36 +01:00
|
|
|
|
|
|
|
autocommit = not args.no_commit
|
|
|
|
|
|
|
|
if autocommit:
|
2021-09-03 10:48:07 +01:00
|
|
|
nixpkgs_repo = git.Repo(editor.root, search_parent_directories=True)
|
2021-07-30 01:25:15 +01:00
|
|
|
commit(nixpkgs_repo, f"{editor.attr_path}: update", [args.outfile])
|
2020-03-28 06:29:53 +00:00
|
|
|
|
2020-03-31 02:01:33 +01:00
|
|
|
if redirects:
|
|
|
|
update()
|
2021-04-22 02:21:36 +01:00
|
|
|
if autocommit:
|
|
|
|
commit(
|
|
|
|
nixpkgs_repo,
|
2021-07-30 01:25:15 +01:00
|
|
|
f"{editor.attr_path}: resolve github repository redirects",
|
2021-04-22 02:21:36 +01:00
|
|
|
[args.outfile, args.input_file, editor.deprecated],
|
|
|
|
)
|
2020-03-31 02:01:33 +01:00
|
|
|
|
|
|
|
for plugin_line in args.add_plugins:
|
2021-07-30 01:25:15 +01:00
|
|
|
editor.rewrite_input(args.input_file, editor.deprecated, append=(plugin_line + "\n",))
|
2020-03-31 02:01:33 +01:00
|
|
|
update()
|
|
|
|
plugin = fetch_plugin_from_pluginline(plugin_line)
|
2021-04-22 02:21:36 +01:00
|
|
|
if autocommit:
|
|
|
|
commit(
|
|
|
|
nixpkgs_repo,
|
2021-08-13 22:48:03 +01:00
|
|
|
"{drv_name}: init at {version}".format(
|
|
|
|
drv_name=editor.get_drv_name(plugin.normalized_name),
|
|
|
|
version=plugin.version
|
2021-04-22 02:21:36 +01:00
|
|
|
),
|
|
|
|
[args.outfile, args.input_file],
|
|
|
|
)
|