Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

pluginupdate.py: fix bugs and add improvements; vimPlugins: sort properly #353786

Merged
merged 3 commits into from
Nov 9, 2024
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
76 changes: 40 additions & 36 deletions maintainers/scripts/pluginupdate-py/pluginupdate.py
Original file line number Diff line number Diff line change
Expand Up @@ -142,7 +142,7 @@ def _prefetch(self, ref: Optional[str]):
return loaded

def prefetch(self, ref: Optional[str]) -> str:
print("Prefetching %s", self.uri)
log.info("Prefetching %s", self.uri)
loaded = self._prefetch(ref)
return loaded["sha256"]

Expand Down Expand Up @@ -256,21 +256,20 @@ class PluginDesc:

@property
def name(self):
if self.alias is None:
return self.repo.name
else:
return self.alias

def __lt__(self, other):
return self.repo.name < other.repo.name
return self.alias or self.repo.name

@staticmethod
def load_from_csv(config: FetchConfig, row: Dict[str, str]) -> "PluginDesc":
log.debug("Loading row %s", row)
branch = row["branch"]
repo = make_repo(row["repo"], branch.strip())
repo.token = config.github_token
return PluginDesc(repo, branch.strip(), row["alias"])
return PluginDesc(
repo,
branch.strip(),
# alias is usually an empty string
row["alias"] if row["alias"] else None,
)

@staticmethod
def load_from_string(config: FetchConfig, line: str) -> "PluginDesc":
Expand Down Expand Up @@ -381,16 +380,14 @@ def add(self, args):
fetch_config = FetchConfig(args.proc, args.github_token)
editor = self
for plugin_line in args.add_plugins:
log.debug("using plugin_line", plugin_line)
log.debug("using plugin_line %s", plugin_line)
pdesc = PluginDesc.load_from_string(fetch_config, plugin_line)
log.debug("loaded as pdesc", pdesc)
log.debug("loaded as pdesc %s", pdesc)
append = [pdesc]
editor.rewrite_input(
fetch_config, args.input_file, editor.deprecated, append=append
)
plugin, _ = prefetch_plugin(
pdesc,
)
plugin, _ = prefetch_plugin(pdesc)
autocommit = not args.no_commit
if autocommit:
commit(
Expand All @@ -405,9 +402,9 @@ def add(self, args):
# Expects arguments generated by 'update' subparser
def update(self, args):
"""CSV spec"""
print("the update member function should be overriden in subclasses")
print("the update member function should be overridden in subclasses")

def get_current_plugins(self, nixpkgs) -> List[Plugin]:
def get_current_plugins(self, nixpkgs: str) -> List[Plugin]:
"""To fill the cache"""
data = run_nix_expr(self.get_plugins, nixpkgs)
plugins = []
Expand Down Expand Up @@ -439,6 +436,7 @@ def update() -> dict:

plugins, redirects = check_results(results)

plugins = sorted(plugins, key=lambda v: v[1].normalized_name)
self.generate_nix(plugins, outfile)

return redirects
Expand Down Expand Up @@ -558,6 +556,7 @@ def run(
parser = self.create_parser()
args = parser.parse_args()
command = args.command or "update"
logging.basicConfig()
log.setLevel(LOG_LEVELS[args.debug])
log.info("Chose to run command: %s", command)
self.nixpkgs = args.nixpkgs
Expand Down Expand Up @@ -590,25 +589,24 @@ def prefetch_plugin(
p: PluginDesc,
cache: "Optional[Cache]" = None,
) -> Tuple[Plugin, Optional[Repo]]:
repo, branch, alias = p.repo, p.branch, p.alias
name = alias or p.repo.name
commit = None
log.info(f"Fetching last commit for plugin {name} from {repo.uri}@{branch}")
commit, date = repo.latest_commit()
log.info(f"Fetching last commit for plugin {p.name} from {p.repo.uri}@{p.branch}")
commit, date = p.repo.latest_commit()

cached_plugin = cache[commit] if cache else None
if cached_plugin is not None:
log.debug("Cache hit !")
cached_plugin.name = name
log.debug(f"Cache hit for {p.name}!")
cached_plugin.name = p.name
cached_plugin.date = date
return cached_plugin, repo.redirect
return cached_plugin, p.repo.redirect

has_submodules = repo.has_submodules()
log.debug(f"prefetch {name}")
sha256 = repo.prefetch(commit)
has_submodules = p.repo.has_submodules()
log.debug(f"prefetch {p.name}")
sha256 = p.repo.prefetch(commit)

return (
Plugin(name, commit, has_submodules, sha256, date=date),
repo.redirect,
Plugin(p.name, commit, has_submodules, sha256, date=date),
p.repo.redirect,
)


Expand Down Expand Up @@ -641,10 +639,9 @@ def check_results(

print(f"{len(results) - len(failures)} plugins were checked", end="")
if len(failures) == 0:
print()
return plugins, redirects
else:
print(f", {len(failures)} plugin(s) could not be downloaded:\n")
log.error(f", {len(failures)} plugin(s) could not be downloaded:\n")

for plugin, exception in failures:
print_download_error(plugin, exception)
Expand Down Expand Up @@ -737,10 +734,7 @@ def rewrite_input(
append: List[PluginDesc] = [],
):
log.info("Rewriting input file %s", input_file)
plugins = load_plugins_from_csv(
config,
input_file,
)
plugins = load_plugins_from_csv(config, input_file)

plugins.extend(append)

Expand All @@ -752,15 +746,25 @@ def rewrite_input(
deprecations = json.load(f)
# TODO parallelize this step
for pdesc, new_repo in redirects.items():
log.info("Rewriting input file %s", input_file)
log.info("Resolving deprecated plugin %s -> %s", pdesc.name, new_repo.name)
new_pdesc = PluginDesc(new_repo, pdesc.branch, pdesc.alias)

old_plugin, _ = prefetch_plugin(pdesc)
new_plugin, _ = prefetch_plugin(new_pdesc)

if old_plugin.normalized_name != new_plugin.normalized_name:
deprecations[old_plugin.normalized_name] = {
"new": new_plugin.normalized_name,
"date": cur_date_iso,
}

# remove plugin from index file, so we won't add it to deprecations again
for i, plugin in enumerate(plugins):
if plugin.name == pdesc.name:
plugins.pop(i)
break
plugins.append(new_pdesc)

with open(deprecated, "w") as f:
json.dump(deprecations, f, indent=4, sort_keys=True)
f.write("\n")
Expand All @@ -771,7 +775,7 @@ def rewrite_input(
fieldnames = ["repo", "branch", "alias"]
writer = csv.DictWriter(f, fieldnames, dialect="unix", quoting=csv.QUOTE_NONE)
writer.writeheader()
for plugin in sorted(plugins):
for plugin in sorted(plugins, key=lambda x: x.name):
writer.writerow(asdict(plugin))


Expand Down
6 changes: 2 additions & 4 deletions pkgs/applications/editors/kakoune/plugins/update.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@
lib.filterAttrs (n: v: v != null) checksums
)"""

HEADER = "# This file has been generated by ./pkgs/applications/editors/kakoune/plugins/update.py. Do not edit!"
HEADER = "# This file has been @generated by ./pkgs/applications/editors/kakoune/plugins/update.py. Do not edit!"
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

why this @ ?

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Just saw that the same does poetry

python-poetry/poetry#2773

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Ok, fine by me.



class KakouneEditor(pluginupdate.Editor):
Expand All @@ -59,8 +59,6 @@ def generate_nix(
plugins: List[Tuple[pluginupdate.PluginDesc, pluginupdate.Plugin]],
outfile: str,
):
sorted_plugins = sorted(plugins, key=lambda v: v[1].name.lower())

with open(outfile, "w+") as f:
f.write(HEADER)
f.write(
Expand All @@ -70,7 +68,7 @@ def generate_nix(
packages = ( self:
{"""
)
for pluginDesc, plugin in sorted_plugins:
for pluginDesc, plugin in plugins:
f.write(
f"""
{plugin.normalized_name} = buildKakounePluginFrom2Nix {{
Expand Down
8 changes: 1 addition & 7 deletions pkgs/applications/editors/vim/plugins/update.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,11 +29,6 @@

log = logging.getLogger("vim-updater")

sh = logging.StreamHandler()
formatter = logging.Formatter("%(name)s:%(levelname)s: %(message)s")
sh.setFormatter(formatter)
log.addHandler(sh)

# Import plugin update library from maintainers/scripts/pluginupdate.py
ROOT = Path(os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe()))))
import importlib
Expand All @@ -58,7 +53,6 @@ def generate_nix(
self, plugins: List[Tuple[PluginDesc, pluginupdate.Plugin]], outfile: str
):
log.info("Generating nix code")
sorted_plugins = sorted(plugins, key=lambda v: v[0].name.lower())
log.debug("Loading nvim-treesitter revision from nix...")
nvim_treesitter_rev = pluginupdate.run_nix_expr(
"(import <localpkgs> { }).vimPlugins.nvim-treesitter.src.rev",
Expand Down Expand Up @@ -95,7 +89,7 @@ def _isNeovimPlugin(plug: pluginupdate.Plugin) -> bool:
"""
)
)
for pdesc, plugin in sorted_plugins:
for pdesc, plugin in plugins:
content = self.plugin2nix(pdesc, plugin, _isNeovimPlugin(plugin))
f.write(content)
if (
Expand Down