diff --git a/src/idiap_devtools/click.py b/src/idiap_devtools/click.py index 0bc3107338d0913705624fef68a04ea6bf59134b..96d1d0e741eb45a0134b7d77b75833c73aef7e90 100644 --- a/src/idiap_devtools/click.py +++ b/src/idiap_devtools/click.py @@ -73,9 +73,7 @@ def verbosity_option( }[value] logger.setLevel(log_level) - logger.debug( - f'Level of Logger("{logger.name}") was set to {log_level}' - ) + logger.debug(f'Level of Logger("{logger.name}") was set to {log_level}') return value return click.option( diff --git a/src/idiap_devtools/conda.py b/src/idiap_devtools/conda.py index 5d0d8cb1b15dcd3c9cadf17f5f956d86c158d6ab..83c00e30f0ba6d6d9c5549320d01486e1ad6fa4c 100644 --- a/src/idiap_devtools/conda.py +++ b/src/idiap_devtools/conda.py @@ -111,9 +111,7 @@ def parse_dependencies(recipe_dir, config) -> tuple[str, list[str]]: recipe = get_parsed_recipe(metadata) requirements = [] for section in ("build", "host"): - requirements += remove_pins( - recipe.get("requirements", {}).get(section, []) - ) + requirements += remove_pins(recipe.get("requirements", {}).get(section, [])) # we don't remove pins for the rest of the recipe requirements += recipe.get("requirements", {}).get("run", []) requirements += recipe.get("test", {}).get("requires", []) diff --git a/src/idiap_devtools/gitlab/__init__.py b/src/idiap_devtools/gitlab/__init__.py index cc14423f62c064fec13fd168a433b1e0f757bec3..7467531e412661036cf3c7dfc393f3c1456f465a 100644 --- a/src/idiap_devtools/gitlab/__init__.py +++ b/src/idiap_devtools/gitlab/__init__.py @@ -26,9 +26,7 @@ def get_gitlab_instance() -> gitlab.Gitlab: for k in ["~/.python-gitlab.cfg", "/etc/python-gitlab.cfg"] ] if any([k.exists() for k in cfgs]): - gl = gitlab.Gitlab.from_config( - "idiap", [str(k) for k in cfgs if k.exists()] - ) + gl = gitlab.Gitlab.from_config("idiap", [str(k) for k in cfgs if k.exists()]) else: # ask the user for a token or use one from the current runner server = os.environ.get("CI_SERVER_URL", "https://gitlab.idiap.ch") token = os.environ.get("CI_JOB_TOKEN") diff --git a/src/idiap_devtools/gitlab/changelog.py b/src/idiap_devtools/gitlab/changelog.py index b476ebca0b549e9810c325bbf0d084c93970636e..8b2dae08a68993af69cc726d2d7fe0bdbaf438fd 100644 --- a/src/idiap_devtools/gitlab/changelog.py +++ b/src/idiap_devtools/gitlab/changelog.py @@ -55,9 +55,7 @@ def _sort_commits( The input list of ``commits``, sorted """ - return sorted( - commits, key=lambda x: parse_date(x.committed_date), reverse=reverse - ) + return sorted(commits, key=lambda x: parse_date(x.committed_date), reverse=reverse) def _sort_tags( @@ -134,9 +132,7 @@ def get_last_tag_date( # according to the Gitlab API documentation, tags are sorted from the last # updated to the first, by default - no need to do further sorting! - tag_list = package.tags.list( - page=1, per_page=1 - ) # Silence userWarning on list() + tag_list = package.tags.list(page=1, per_page=1) # Silence userWarning on list() if tag_list: # there are tags, use these @@ -162,15 +158,12 @@ def get_last_tag_date( package.id, first.committed_date, ) - return parse_date(first.committed_date) - datetime.timedelta( - milliseconds=500 - ) + return parse_date(first.committed_date) - datetime.timedelta(milliseconds=500) # there are no commits nor tags - abort raise RuntimeError( "package %s (id=%d) does not have commits " - "or tags so I cannot devise a good starting date" - % (package.name, package.id) + "or tags so I cannot devise a good starting date" % (package.name, package.id) ) @@ -204,9 +197,7 @@ def _write_one_tag( if line.startswith("* ") or line.startswith("- "): line = line[2:] - line = line.replace("!", pkg_name + "!").replace( - pkg_name + pkg_name, pkg_name - ) + line = line.replace("!", pkg_name + "!").replace(pkg_name + pkg_name, pkg_name) line = line.replace("#", pkg_name + "#") if not line: continue @@ -364,10 +355,7 @@ def write_tags_with_commits( mrs, tags, commits = get_changes_since(gitpkg, since) newline = "\n" - f.write( - f"# {gitpkg.attributes['path_with_namespace']}:" - f" patch{newline}{newline}" - ) + f.write(f"# {gitpkg.attributes['path_with_namespace']}: patch{newline}{newline}") # go through tags and writes each with its message and corresponding # commits @@ -393,9 +381,7 @@ def write_tags_with_commits( # the attribute 'merged_at' is not available in GitLab API as of 27 # June 2018 mrs4tag = [ - k - for k in mrs - if (start_date < parse_date(k.updated_at) <= end_date) + k for k in mrs if (start_date < parse_date(k.updated_at) <= end_date) ] _write_mergerequests_range( f, gitpkg.attributes["path_with_namespace"], mrs4tag @@ -408,9 +394,7 @@ def write_tags_with_commits( # write leftover merge requests # the attribute 'merged_at' is not available in GitLab API as of 27 # June 2018 - leftover_mrs = [ - k for k in mrs if parse_date(k.updated_at) > start_date - ] + leftover_mrs = [k for k in mrs if parse_date(k.updated_at) > start_date] _write_mergerequests_range( f, gitpkg.attributes["path_with_namespace"], leftover_mrs ) @@ -447,10 +431,7 @@ def write_tags( tags = _sort_tags(tags, reverse=False) newline = "\n" - f.write( - f"# {gitpkg.attributes['path_with_namespace']}:" - f" patch{newline}{newline}" - ) + f.write(f"# {gitpkg.attributes['path_with_namespace']}: patch{newline}{newline}") for tag in tags: _write_one_tag(f, gitpkg.attributes["path_with_namespace"], tag) diff --git a/src/idiap_devtools/gitlab/release.py b/src/idiap_devtools/gitlab/release.py index 987d4a63622a7791b2833269117e8f39058a92e3..7e777a7e75f795010055e17d11b548559956542e 100644 --- a/src/idiap_devtools/gitlab/release.py +++ b/src/idiap_devtools/gitlab/release.py @@ -56,9 +56,7 @@ def _update_readme( } # matches the graphical badge in the readme's text with the given version - doc_image_re = re.compile( - r"docs\-(" + "|".join(variants) + r")\-", re.VERBOSE - ) + doc_image_re = re.compile(r"docs\-(" + "|".join(variants) + r")\-", re.VERBOSE) # matches all other occurrences we need to handle branch_re = re.compile(r"/(" + "|".join(variants) + r")", re.VERBOSE) @@ -68,17 +66,13 @@ def _update_readme( if branch_re.search(line) is not None: if "gitlab" in line: # gitlab links replacement = ( - "/v%s" % version - if version is not None - else f"/{default_branch}" + "/v%s" % version if version is not None else f"/{default_branch}" ) line = branch_re.sub(replacement, line) if ("docs-latest" in line) or ("docs-stable" in line): # our doc server replacement = ( - "/v%s" % version - if version is not None - else f"/{default_branch}" + "/v%s" % version if version is not None else f"/{default_branch}" ) line = branch_re.sub(replacement, line) if doc_image_re.search(line) is not None: @@ -229,14 +223,10 @@ def _pin_versions_of_packages_list( ) ) else: - final_str = "".join( - (pkg_req.name, extras_str, specs_str, marker_str) - ) + final_str = "".join((pkg_req.name, extras_str, specs_str, marker_str)) # Replace the package specification with the pinned version - packages_list[pkg_id] = str( - packaging.requirements.Requirement(final_str) - ) + packages_list[pkg_id] = str(packaging.requirements.Requirement(final_str)) logger.debug("Package pinned: %s", packages_list[pkg_id]) return packages_list @@ -286,10 +276,7 @@ def _update_pyproject( data = tomlkit.loads(contents) - if ( - re.match(packaging.version.VERSION_PATTERN, version, re.VERBOSE) - is not None - ): + if re.match(packaging.version.VERSION_PATTERN, version, re.VERBOSE) is not None: logger.info( "Updating pyproject.toml version from '%s' to '%s'", data.get("project", {}).get("version", "unknown version"), @@ -342,9 +329,7 @@ def _update_pyproject( logger.debug("Fetching origin of dev-profile.") profile_repo.remotes.origin.fetch() logger.debug("Checking that the local commits are available on origin.") - commits_ahead = [ - c for c in profile_repo.iter_commits("origin/main..HEAD") - ] + commits_ahead = [c for c in profile_repo.iter_commits("origin/main..HEAD")] if len(commits_ahead) != 0: raise RuntimeError( "Local commits of dev-profile were not pushed to origin!\n" @@ -355,9 +340,7 @@ def _update_pyproject( "retrieved." ) logger.debug("Checking we are up to date with origin.") - commits_behind = [ - c for c in profile_repo.iter_commits("HEAD..origin/main") - ] + commits_behind = [c for c in profile_repo.iter_commits("HEAD..origin/main")] if len(commits_behind) != 0: logger.warning( "Your local dev-profile is not up to date with the origin " @@ -386,12 +369,8 @@ def _update_pyproject( # sets the various URLs url = data["project"].get("urls", {}).get("documentation") if (url is not None) and (branch_re.search(url) is not None): - replacement = ( - "/v%s" % version if version is not None else f"/{default_branch}" - ) - data["project"]["urls"]["documentation"] = branch_re.sub( - replacement, url - ) + replacement = "/v%s" % version if version is not None else f"/{default_branch}" + data["project"]["urls"]["documentation"] = branch_re.sub(replacement, url) return tomlkit.dumps(data) @@ -431,9 +410,7 @@ def get_latest_tag_name( return tag_names[-1] -def get_next_version( - gitpkg: gitlab.v4.objects.projects.Project, bump: str -) -> str: +def get_next_version(gitpkg: gitlab.v4.objects.projects.Project, bump: str) -> str: """Return the next version of this package to be tagged. Arguments: @@ -724,9 +701,7 @@ def release_package( # version tag. Add and commit to gitlab version_number = tag_name[1:] # remove 'v' in front - readme_file = gitpkg.files.get( - file_path="README.md", ref=gitpkg.default_branch - ) + readme_file = gitpkg.files.get(file_path="README.md", ref=gitpkg.default_branch) readme_contents_orig = readme_file.decode().decode() readme_contents = _update_readme( diff --git a/src/idiap_devtools/gitlab/runners.py b/src/idiap_devtools/gitlab/runners.py index 700118c7f968dcbd4ea5f76ce44b102f4f3c3ea7..42019b6ebc172c5b89c55cc5e6e1bea0f0d27695 100644 --- a/src/idiap_devtools/gitlab/runners.py +++ b/src/idiap_devtools/gitlab/runners.py @@ -36,9 +36,7 @@ def get_runner_from_description( # search for the runner to affect runners = [ - k - for k in gl.runners.list(all=True) - if k.attributes["description"] == descr + k for k in gl.runners.list(all=True) if k.attributes["description"] == descr ] if not runners: raise RuntimeError("Cannot find runner with description = %s", descr) @@ -52,9 +50,7 @@ def get_runner_from_description( return the_runner -def get_project( - gl: gitlab.Gitlab, name: str -) -> gitlab.v4.objects.projects.Project: +def get_project(gl: gitlab.Gitlab, name: str) -> gitlab.v4.objects.projects.Project: """Retrieve one single project.""" retval = gl.projects.get(name) @@ -108,9 +104,7 @@ def get_projects_from_runner( packages = [] for k, proj in enumerate(the_runner.projects): packages.append(get_project(gl, proj["id"])) - logger.debug( - "Got data from project %d/%d", k + 1, len(the_runner.projects) - ) + logger.debug("Got data from project %d/%d", k + 1, len(the_runner.projects)) return packages diff --git a/src/idiap_devtools/logging.py b/src/idiap_devtools/logging.py index b5d100e74aa8cf1e4bf466364522071f1b5f18dc..b87762c8b744bf9ec698a17d84aeac3140d67807 100644 --- a/src/idiap_devtools/logging.py +++ b/src/idiap_devtools/logging.py @@ -76,8 +76,7 @@ def setup( # First check that logger with a matching name or stream is not already # there before attaching a new one. if (debug_logger_name not in handlers_installed) or ( - getattr(handlers_installed[debug_logger_name], "stream") - != low_level_stream + getattr(handlers_installed[debug_logger_name], "stream") != low_level_stream ): debug_info = logging.StreamHandler(low_level_stream) debug_info.setLevel(logging.DEBUG) @@ -91,8 +90,7 @@ def setup( # First check that logger with a matching name or stream is not already # there before attaching a new one. if (error_logger_name not in handlers_installed) or ( - getattr(handlers_installed[error_logger_name], "stream") - != high_level_stream + getattr(handlers_installed[error_logger_name], "stream") != high_level_stream ): warn_err = logging.StreamHandler(high_level_stream) warn_err.setLevel(logging.WARNING) diff --git a/src/idiap_devtools/profile.py b/src/idiap_devtools/profile.py index acc9fd828a028d235055d302e3793a79d77c2261..b226971f5c104214510b1a7301359dcda4520950 100644 --- a/src/idiap_devtools/profile.py +++ b/src/idiap_devtools/profile.py @@ -65,9 +65,7 @@ def get_profile_path(name: str | pathlib.Path) -> pathlib.Path | None: # if you get to this point, then no local directory with that name exists # check the user configuration for a specific key if USER_CONFIGURATION.exists(): - logger.debug( - f"Loading user-configuration from {str(USER_CONFIGURATION)}..." - ) + logger.debug(f"Loading user-configuration from {str(USER_CONFIGURATION)}...") with USER_CONFIGURATION.open("rb") as f: usercfg = tomli.load(f) else: @@ -145,9 +143,7 @@ class Profile: baserc = self.data.get("conda", {}).get("baserc") if baserc is None: - condarc_options: dict[str, typing.Any] = dict( - show_channel_urls=True - ) + condarc_options: dict[str, typing.Any] = dict(show_channel_urls=True) else: f = io.BytesIO(self.data["conda"]["baserc"].encode()) condarc_options = yaml.load(f, Loader=yaml.FullLoader) @@ -171,9 +167,7 @@ class Profile: # detect append-file, if any copy_files = self.data.get("conda", {}).get("build-copy") if copy_files is not None: - append_file = [ - k for k in copy_files if k.endswith("recipe_append.yaml") - ] + append_file = [k for k in copy_files if k.endswith("recipe_append.yaml")] if append_file: condarc_options["append_sections_file"] = str( self._basedir / append_file[0] diff --git a/src/idiap_devtools/scripts/env.py b/src/idiap_devtools/scripts/env.py index 66347648a56bf1d030df4517dece2e7a8d7628f4..77ea04c994cb08a847a85931599a645ad131d98a 100644 --- a/src/idiap_devtools/scripts/env.py +++ b/src/idiap_devtools/scripts/env.py @@ -58,12 +58,8 @@ def _load_conda_packages( # we can consume this from the input list recipe_dir = str(m.parent) logger.info(f"Parsing conda recipe at {recipe_dir}...") - pkg_name, pkg_deps = conda.parse_dependencies( - recipe_dir, conda_config - ) - logger.info( - f"Added {len(pkg_deps)} packages from package '{pkg_name}'" - ) + pkg_name, pkg_deps = conda.parse_dependencies(recipe_dir, conda_config) + logger.info(f"Added {len(pkg_deps)} packages from package '{pkg_name}'") parsed_packages.append(pkg_name) conda_packages += pkg_deps consumed.append(m) @@ -73,12 +69,8 @@ def _load_conda_packages( # may need to parse it for python packages later on recipe_dir = str(m / "conda") logger.info(f"Parsing conda recipe at {recipe_dir}...") - pkg_name, pkg_deps = conda.parse_dependencies( - recipe_dir, conda_config - ) - logger.info( - f"Added {len(pkg_deps)} packages from package '{pkg_name}'" - ) + pkg_name, pkg_deps = conda.parse_dependencies(recipe_dir, conda_config) + logger.info(f"Added {len(pkg_deps)} packages from package '{pkg_name}'") parsed_packages.append(pkg_name) conda_packages += pkg_deps @@ -409,9 +401,7 @@ def env( conda_packages = _simplify_conda_plan(conda_packages) # Adds missing pins - conda_packages = _add_missing_conda_pins( - the_profile, python, conda_packages - ) + conda_packages = _add_missing_conda_pins(the_profile, python, conda_packages) # Write package installation plan, in YAML format data: dict[str, typing.Any] = dict(channels=conda_config.channels) diff --git a/src/idiap_devtools/scripts/fullenv.py b/src/idiap_devtools/scripts/fullenv.py index 936647873f69cede039afb383f7b1d34dac7be30..d60977255e7d9bad01eed519b0f923f44ad99956 100644 --- a/src/idiap_devtools/scripts/fullenv.py +++ b/src/idiap_devtools/scripts/fullenv.py @@ -117,9 +117,7 @@ def fullenv( # filter out all conda packages already in the list conda_to_python = the_profile.get(("conda", "to_python"), {}) - python_to_conda = { - v: k for k, v in conda_to_python.items() if k != "__ignore__" - } + python_to_conda = {v: k for k, v in conda_to_python.items() if k != "__ignore__"} python_packages = [ k for k in python_packages @@ -133,9 +131,7 @@ def fullenv( ) if python_packages: - data["dependencies"].append( - dict(pip=sorted([str(k) for k in python_packages])) - ) + data["dependencies"].append(dict(pip=sorted([str(k) for k in python_packages]))) # backup previous installation plan, if one exists if output.exists(): diff --git a/src/idiap_devtools/scripts/gitlab/badges.py b/src/idiap_devtools/scripts/gitlab/badges.py index e0d01364dbc011f6f6388cda75adda6b91568a7a..bdd9ff1545b319c96e6e71c39d378f4211b25473 100644 --- a/src/idiap_devtools/scripts/gitlab/badges.py +++ b/src/idiap_devtools/scripts/gitlab/badges.py @@ -77,9 +77,7 @@ def _update_readme(content, info): new_badges_text = [] for badge in README_BADGES: data = {k: v.format(**info) for (k, v) in badge.items()} - new_badges_text.append( - "[]({link_url})".format(**data) - ) + new_badges_text.append("[]({link_url})".format(**data)) new_badges_text = "\n" + "\n".join(new_badges_text) + "\n" # matches only 3 or more occurences of markdown badges expression = r"(\s?\[\!\[(?P<name>(\s|\w|-)+)\]\((?P<image_url>\S+)\)\]\((?P<link_url>\S+)\)){3,}" @@ -134,9 +132,7 @@ def badges(package, update_readme, dry_run, server, **_) -> None: if dry_run: click.secho("!!!! DRY RUN MODE !!!!", fg="yellow", bold=True) - click.secho( - "No changes will be committed to GitLab.", fg="yellow", bold=True - ) + click.secho("No changes will be committed to GitLab.", fg="yellow", bold=True) if "/" not in package: raise RuntimeError('PACKAGE should be specified as "group/name"') diff --git a/src/idiap_devtools/scripts/gitlab/changelog.py b/src/idiap_devtools/scripts/gitlab/changelog.py index 7da8e7ab111c6fb453860c03eaf90f7a748d9e3f..a464ad12c5ec2f7f622da15408a456f006aaac59 100644 --- a/src/idiap_devtools/scripts/gitlab/changelog.py +++ b/src/idiap_devtools/scripts/gitlab/changelog.py @@ -138,8 +138,7 @@ def changelog(target, output, mode, since, **_) -> None: for package in packages: if "/" not in package: raise RuntimeError( - f"Package names must contain group name" - f" (invalid: {package})" + f"Package names must contain group name (invalid: {package})" ) # retrieves the gitlab package object @@ -179,8 +178,6 @@ def changelog(target, output, mode, since, **_) -> None: continue # write_tags(f, use_package, last_release_date) - write_tags_with_commits( - output, use_package, last_release_date, mode - ) + write_tags_with_commits(output, use_package, last_release_date, mode) output.flush() diff --git a/src/idiap_devtools/scripts/gitlab/release.py b/src/idiap_devtools/scripts/gitlab/release.py index cb198d6e8ae8cd999dafe530a29c7f2810edf42a..0c95f6ec9e15a44e2527951c708f867879248682 100644 --- a/src/idiap_devtools/scripts/gitlab/release.py +++ b/src/idiap_devtools/scripts/gitlab/release.py @@ -158,14 +158,10 @@ def release( # wait until done to proceed to the next package changelogs: list[str] = changelog.readlines() - header_re = re.compile( - r"^\s*#+\s*(?P<pkg>\S+(/\S+)+)\s*:\s*(?P<bump>\S+)\s*$" - ) + header_re = re.compile(r"^\s*#+\s*(?P<pkg>\S+(/\S+)+)\s*:\s*(?P<bump>\S+)\s*$") # find the starts of each package's description at the changelog - pkgs = [ - (line, k) for k, line in enumerate(changelogs) if header_re.match(line) - ] + pkgs = [(line, k) for k, line in enumerate(changelogs) if header_re.match(line)] if dry_run: click.secho( @@ -220,9 +216,7 @@ def release( tag = bump.strip().lower() if tag in ("patch", "minor", "major"): - logger.info( - f"Processing package {pkg} to perform a {tag} release bump" - ) + logger.info(f"Processing package {pkg} to perform a {tag} release bump") # gets the "next" tag for this package vtag = get_next_version(use_package, bump) diff --git a/src/idiap_devtools/scripts/gitlab/settings.py b/src/idiap_devtools/scripts/gitlab/settings.py index 9107f2776c5659dbec33ec08a9578863407d4e08..56fbd72f1befa3ae7b41e6123ea1f24b4751b302 100644 --- a/src/idiap_devtools/scripts/gitlab/settings.py +++ b/src/idiap_devtools/scripts/gitlab/settings.py @@ -13,9 +13,7 @@ from ...logging import setup logger = setup(__name__.split(".", 1)[0]) -def _change_settings( - project, info: dict[str, typing.Any], dry_run: bool -) -> None: +def _change_settings(project, info: dict[str, typing.Any], dry_run: bool) -> None: """Update the project settings using ``info``.""" name = f"{project.namespace['full_path']}/{project.name}" @@ -32,9 +30,7 @@ def _change_settings( project.unarchive() if info.get("description") is not None: - click.secho( - f" -> set description to '{info['description']}'", bold=True - ) + click.secho(f" -> set description to '{info['description']}'", bold=True) if not dry_run: project.description = info["description"] project.save() @@ -102,9 +98,7 @@ Examples: "printing to help you understand what will be done", ) @verbosity_option(logger=logger) -def settings( - projects, avatar, description, group, archive, dry_run, **_ -) -> None: +def settings(projects, avatar, description, group, archive, dry_run, **_) -> None: """Update project settings.""" from ...gitlab import get_gitlab_instance @@ -117,9 +111,7 @@ def settings( # if we are in a dry-run mode, let's let it be known if dry_run: click.secho("!!!! DRY RUN MODE !!!!", fg="yellow", bold=True) - click.secho( - "No changes will be committed to GitLab.", fg="yellow", bold=True - ) + click.secho("No changes will be committed to GitLab.", fg="yellow", bold=True) gl = get_gitlab_instance() gl_projects = [] diff --git a/src/idiap_devtools/scripts/pixi.py b/src/idiap_devtools/scripts/pixi.py index 6229415a96d003cfa8719f08e102255e7ad9dcbf..5038231f019199a5f4a256ce83e5d93a603ca5bd 100644 --- a/src/idiap_devtools/scripts/pixi.py +++ b/src/idiap_devtools/scripts/pixi.py @@ -102,9 +102,7 @@ def pixi( for k in requirements: pr = packaging.requirements.Requirement(k) name = ( - pr.name - if pr.name not in python_to_conda - else python_to_conda[pr.name] + pr.name if pr.name not in python_to_conda else python_to_conda[pr.name] ) if pr.name in version: retval[name] = version[name] @@ -144,46 +142,36 @@ def pixi( documentation=pyproject["project"]["urls"]["documentation"], ) - conda_config = the_profile.conda_config( - python=python, public=True, stable=True - ) + conda_config = the_profile.conda_config(python=python, public=True, stable=True) config["project"]["channels"] = conda_config.channels config["project"]["platforms"] = ["linux-64", "osx-arm64"] config["dependencies"] = {"python": python + ".*"} config["dependencies"].update( - _make_requirement_dict( - pyproject.get("project", {}).get("dependencies", []) - ) + _make_requirement_dict(pyproject.get("project", {}).get("dependencies", [])) ) cmds = [] # setup standardized build procedure config.setdefault("feature", {}).setdefault("build", {})["dependencies"] = ( - _make_requirement_dict( - pyproject.get("build-system", {}).get("requires", []) - ) + _make_requirement_dict(pyproject.get("build-system", {}).get("requires", [])) ) # add pip so that the build works - config["feature"]["build"]["dependencies"].update( - _make_requirement_dict(["pip"]) - ) + config["feature"]["build"]["dependencies"].update(_make_requirement_dict(["pip"])) config["feature"]["build"]["tasks"] = dict( build="pip install --no-build-isolation --no-dependencies --editable ." ) - config.setdefault("environments", {}).setdefault("default", []).insert( - 0, "build" - ) + config.setdefault("environments", {}).setdefault("default", []).insert(0, "build") cmds.append("To install, run: `pixi run build`") # adds optional features for feature, deps in ( pyproject.get("project", {}).get("optional-dependencies", {}).items() ): - config.setdefault("feature", {}).setdefault(feature, {})[ - "dependencies" - ] = _make_requirement_dict(deps) + config.setdefault("feature", {}).setdefault(feature, {})["dependencies"] = ( + _make_requirement_dict(deps) + ) if "pre-commit" in config["feature"][feature]["dependencies"]: config["feature"][feature]["tasks"] = { @@ -191,12 +179,10 @@ def pixi( "qa-install": "pre-commit install", } # this feature can be separated from the rest - config.setdefault("environments", {}).setdefault( - "default", [] - ).insert(0, feature) - cmds.append( - "To install pre-commit hook, run: `pixi run qa-install`" + config.setdefault("environments", {}).setdefault("default", []).insert( + 0, feature ) + cmds.append("To install pre-commit hook, run: `pixi run qa-install`") cmds.append("To run quality-assurance, run: `pixi run qa`") # if ruff is part of pre-commit configuration, then also add that @@ -219,9 +205,9 @@ def pixi( } } # this feature needs to have the package installed - config.setdefault("environments", {}).setdefault( - "default", [] - ).insert(0, feature) + config.setdefault("environments", {}).setdefault("default", []).insert( + 0, feature + ) cmds.append("To do build docs, run: `pixi run doc`") if "pytest" in config["feature"][feature]["dependencies"]: @@ -232,9 +218,9 @@ def pixi( } } # this feature needs to have the package installed - config.setdefault("environments", {}).setdefault( - "default", [] - ).insert(0, feature) + config.setdefault("environments", {}).setdefault("default", []).insert( + 0, feature + ) cmds.append("To do run test, run: `pixi run test`") # backup previous installation plan, if one exists diff --git a/src/idiap_devtools/scripts/update_pins.py b/src/idiap_devtools/scripts/update_pins.py index a8e02adfaf64ee0dddbc0832f3848beb35f8b7e0..962bc8ea4fac3fefa5935f0d621e7a2b8ca6273e 100644 --- a/src/idiap_devtools/scripts/update_pins.py +++ b/src/idiap_devtools/scripts/update_pins.py @@ -141,8 +141,7 @@ def update_pins(manual_pins, profile, python, only_pip, **_) -> None: + list(manual_pins) ) click.secho( - f"Executing `{' '.join(cmd)}' to calculate a viable " - f"environment...", + f"Executing `{' '.join(cmd)}' to calculate a viable environment...", bold=True, ) output = subprocess.run(cmd, capture_output=True, check=True) @@ -214,21 +213,16 @@ package_names_map: return None with pip_constraints_path.open("w") as f: - python_packages = filter_python_packages( - resolved_packages, conda_to_python - ) + python_packages = filter_python_packages(resolved_packages, conda_to_python) python_packages = sorted( [(p, v) for (p, v) in python_packages], key=lambda x: (x[0], len(x[0])), ) click.secho( - f"Saving {len(python_packages)} entries to " - f"`{pip_constraints_path}'...", + f"Saving {len(python_packages)} entries to `{pip_constraints_path}'...", bold=True, ) - constraints = [ - f"{name}=={version}\n" for name, version in python_packages - ] + constraints = [f"{name}=={version}\n" for name, version in python_packages] f.writelines(constraints) return None diff --git a/src/idiap_devtools/update_pins.py b/src/idiap_devtools/update_pins.py index c078b264bd4b686e48c8d783626b32f65421db74..d545cdf73f4f7901f813c9fda7d37346089dbd31 100644 --- a/src/idiap_devtools/update_pins.py +++ b/src/idiap_devtools/update_pins.py @@ -51,9 +51,7 @@ def load_packages_from_conda_build_config( return packages, package_names_map -def filter_python_packages( - resolved_packages, conda_to_python: dict[str, str | None] -): +def filter_python_packages(resolved_packages, conda_to_python: dict[str, str | None]): """Filter the list of packages to return only Python packages available on PyPI. @@ -75,9 +73,7 @@ def filter_python_packages( keep_list = [] - click.echo( - f"Filtering {len(resolved_packages)} packages for PyPI availability" - ) + click.echo(f"Filtering {len(resolved_packages)} packages for PyPI availability") for p, v in resolved_packages: if p in conda_to_python["__ignore__"]: @@ -119,10 +115,7 @@ def update_pip_constraints_only( key=lambda x: (x[0], len(x[0])), ) click.echo( - f"Saving {len(python_packages)} entries to " - f"`{pip_constraints_path}'..." + f"Saving {len(python_packages)} entries to `{pip_constraints_path}'..." ) - constraints = [ - f"{name}=={version}\n" for name, version in python_packages - ] + constraints = [f"{name}=={version}\n" for name, version in python_packages] f.writelines(constraints) diff --git a/src/idiap_devtools/utils.py b/src/idiap_devtools/utils.py index 720acd3a32870d9e83bf7e3328fa27bbd56d53ec..11e3c46d7d7f867c31def6d1d9da6d0ff4f1cd47 100644 --- a/src/idiap_devtools/utils.py +++ b/src/idiap_devtools/utils.py @@ -130,8 +130,7 @@ def run_cmdline( if p.wait() != 0: raise RuntimeError( - "command `%s' exited with error state (%d)" - % (" ".join(cmd), p.returncode) + "command `%s' exited with error state (%d)" % (" ".join(cmd), p.returncode) ) total = time.time() - start